From 6acc65c2a69d7ba568a738ec11bae72b49a53d90 Mon Sep 17 00:00:00 2001 From: Tue Ton <49886739+chirontt@users.noreply.github.com> Date: Wed, 30 Mar 2022 19:17:21 -0400 Subject: [PATCH] implement a graphical demo launcher in SWT --- res/org/lwjgl/demo/demos-data.properties | 1230 ++++++++++++++++++++++ src/org/lwjgl/demo/DemoLauncher.java | 460 ++++++++ 2 files changed, 1690 insertions(+) create mode 100644 res/org/lwjgl/demo/demos-data.properties create mode 100644 src/org/lwjgl/demo/DemoLauncher.java diff --git a/res/org/lwjgl/demo/demos-data.properties b/res/org/lwjgl/demo/demos-data.properties new file mode 100644 index 00000000..63bc5e28 --- /dev/null +++ b/res/org/lwjgl/demo/demos-data.properties @@ -0,0 +1,1230 @@ +# +# Copyright LWJGL. All rights reserved. +# License terms: https://www.lwjgl.org/license +# +# +# This properties file uses INI format to represent hierarchical data for a tree's display. +# +# Each [sec.ti.on] specifies a visible tree path, with nodes separated by ".", +# which represents package of a demo class, and its associated properties for the section. +# The paths have an implied 'org.lwjgl.demo.' package prefix. +# +# The values of each 'description' property were extracted from each demo class' javadoc. +# +# Note: as required by the properties file format, the following characters +# have special meanings and need be escaped: +# +# = (escaped as \=) +# : (escaped as \:) +# \ (escaped as \\) +# +# @author Tue Ton + +[bgfx] +description=BGFX - Cross-platform rendering library demos + +[bgfx.Bump] +description=bgfx demo\: 06-Bump \ +
\ + This demo is a Java port of \ + \ + https\://github.com/bkaradzic/bgfx/tree/master/examples/06-bump. +bgfxDemo=true +class=bgfx.Bump +snapshot= + +[bgfx.Cubes] +description=bgfx demo\: 01-Cubes \ +
\ + This demo is a Java port of \ + \ + https\://github.com/bkaradzic/bgfx/tree/master/examples/01-cubes. +bgfxDemo=true +class=bgfx.Cubes + +[bgfx.Metaballs] +description=bgfx demo\: 02-Metaballs \ +
\ + This demo is a Java port of \ + \ + https\://github.com/bkaradzic/bgfx/tree/master/examples/02-metaballs. +bgfxDemo=true +class=bgfx.Metaballs + +[bgfx.Raymarch] +description=bgfx demo\: 03-Raymarch \ +
\ + This demo is a Java port of \ + \ + https\://github.com/bkaradzic/bgfx/tree/master/examples/03-raymarch. +bgfxDemo=true +class=bgfx.Raymarch + +[cuda] +description=CUDA demos + +[cuda.OpenGLExample] +description=CUDA/OpenGL interop example.\ +
\ + A device kernel function is used to fill an OpenGL texture with a red color \ + gradient and GLFW is used to display that texture in a window.\ +
\ + Author\: Kai Burjack +class=cuda.OpenGLExample + +[cuda.SequencePTX] +description=A small and simple example of using PTX code to write \ + a sequence of consecutive numbers into a buffer via a device kernel function.\ +
\ + Author\: Kai Burjack +class=cuda.SequencePTX + +[game] +description=OpenGL Game demos + +[game.VoxelGameGL] +description=A simple voxel game.\ +
\ + Author\: Kai Burjack +class=game.VoxelGameGL + +[intro] +description=LWJGL 3 mini introductory series + +[intro.Intro1] +description=This is the first of a mini introductory series to working with LWJGL 3.\ +
\ + The purpose of this series is to get you comfortable with the concepts behind LWJGL 3. It will not teach you OpenGL, \ + nor will it provide you with a readily usable little engine or game.\ +
\ + Instead, we will focus on the underlying principles and concepts of LWJGL 3. These concepts cut across the whole \ + LWJGL 3 library and apply to all library bindings, not just OpenGL. Therefore, we will learn\: \ +
\ + Author\: Kai Burjack +class=intro.Intro1 + +[intro.Intro2] +description=After we learnt that LWJGL 3 provides the functions exported by a native library as static Java methods, in this \ + second part of the introductory series we will look at how to communicate data between our Java application and the \ + native library. One example of such data could be a simple array of vectors to upload to OpenGL in order to draw a \ + simple triangle.\ +
\ + It is important to know how Java can communicate data with a native library, such as OpenGL. There are generally two \ + different kinds of memory which we can allocate. \ +
\ + The downside is that we cannot transfer this kind of memory to a native library because\: \ +
\ + Using off-heap memory we can get the physical virtual memory address of the allocated memory, which will \ + also not change throughout the lifetime of the process. Therefore, we can communicate this memory address to native \ + libraries. Those native libraries can then read from or write to the memory.\ +
\ + Author\: Kai Burjack +class=intro.Intro2 + +[intro.Intro3] +description=In Intro2 we learnt how to allocate an off-heap memory buffer using MemoryUtil. This was done by first calling one of \ + the memAlloc*() methods which return a Java NIO Buffer instance representing the allocated memory region. Once we \ + were done with the buffer, we called the memFree() method to deallocate/free the off-heap memory represented by the \ + Java NIO Buffer.\ +
\ + This manual memory management is necessary when a buffer needs to live for an extended amount of time in our \ + application, meaning that the time between allocation and deallocation spans beyond one method.\ +
\ + In most scenarios however, the memory will be very short-living. One example was the allocation of the memory to fill \ + the VBO in Intro2. Memory was allocated, filled with data, given to OpenGL and then freed again.\ +
\ + LWJGL 3 provides a better way to handle such situations, which is by using the MemoryStack class. This class allows \ + to retrieve a small chunk of memory from a pre-allocated thread-local memory region of a fixed size. By default the \ + maximum size allocatable from the MemoryStack is 8 kilobytes.\ +
\ + By the way\: It is called a stack because allocations/deallocations must be issued in LIFO order, in that allocations \ + cannot be freed randomly bust must be freed in the reverse allocation order. This allows to avoid any heap allocation \ + and compaction strategies.\ +
\ + Also note that the pre-allocated memory of the MemoryStack is per thread. That means, every thread will get its own \ + memory region and MemoryStack instances should not be shared among different threads.\ +
\ + Author\: Kai Burjack +class=intro.Intro3 + +[intro.Intro4] +description=In Intro3 we learnt how to allocate short-lived memory using the MemoryStack class.\ +
\ + There was one thing missing, though, which is necessary when working with manual memory management, including the \ + MemoryStack, which is\: Ensuring that the stackPop() call happens eventually. This may not be the case when the code \ + between stackPush() and stackPop() throws an exception.\ +
\ + To take care of possible exceptions, we will therefore wrap the code in a try-with-resources statement to ensure that \ + stackPop() will get called eventually.\ +
\ + Author\: Kai Burjack +class=intro.Intro4 + +[intro.Intro5] +description=In this part we will see how callbacks work. Callbacks mean any method which we can register in a native library so \ + that the native library can call us back and invoke our callback method whenever it wants to.\ +
\ + One example of where callbacks occur frequently is GLFW. GLFW provides some number of different callbacks for various \ + events that happen on a window, such as resizing, maximizing, minimizing and mouse or keyboard events.\ +
\ + Now, before we go into using callbacks with LWJGL 3 and GLFW, we should first get a clear picture of what a callback \ + looks like in a native library, which LWJGL 3 tries to provide a Java counterpart for.\ +
\ + In a native library like GLFW a callback is nothing more than a function pointer. This means that it is a physical \ + virtual memory address pointing to an executable piece of code, a function. The function pointer also has a type to \ + make it callable in C. This function type consists of the parameter types and the return type, just like a method \ + signature in Java including the return type. So, both caller and callee agree on a defined set of parameters and a \ + return type to expect when the callback function is called.\ +
\ + When LWJGL 3 maps this concept of a function type into Java, it provides the user (that means you) with a Java \ + interface type that contains a single method. This method has the same (or similar) signature and return type as the \ + native callback function. If you want to see an example, look at {@link GLFWMouseButtonCallbackI}. It is an interface \ + with a single non-default method which must be implemented and will be called whenever the native library calls the \ + callback.\ +
\ + The fact that it is an interface with a single method makes it applicable to be the target of a Java 8 Lambda method \ + or a Java 8 method reference. That means, with callbacks we need not provide an actual implementation of the callback \ + interface by either anonymously or explicitly creating a class implementing that interface, but we can use Java 8 \ + Lambda methods and Java 8 method references with a compatible signature.\ +
\ + If you are not yet familiar with Java 8 Lambda methods or Java 8 method references, please look them up on the Oracle \ + documentation. We will make use of them in the example code below.\ +
\
+ Author\: Kai Burjack
+class=intro.Intro5
+
+[opengl]
+description=OpenGL demos
+
+[opengl.PolygonDrawer]
+description=This demo showcases the PolygonsIntersection
algorithm. \
+ The outlines of a polygon can be drawn with the mouse and an intersection test is \
+ performed on every mouse movement to color the polygon in red \
+ if the mouse cursor is inside; or black if not.\
+
\ + Author\: Kai Burjack +class=opengl.PolygonDrawer + +[opengl.PolygonDrawer2] +description=Like the PolygonDrawer demo but it rotates everything around the viewport center.\ +
\ + Intersection tests and drawing at the right position still work! \:)\ +
\ + Author\: Kai Burjack +class=opengl.PolygonDrawer2 + +[opengl.SimpleDrawElements] +description=Simple Draw Elements +class=opengl.SimpleDrawElements + +[opengl.SimpleTriangleStripGrid] +description=Rendering a simple GL_TRIANGLE_STRIP grid.\ +
\
+ Author\: Kai Burjack
+class=opengl.SimpleTriangleStripGrid
+
+[opengl.UniformArrayDemo]
+description=Simple demo to showcase the use of GL20.glUniform3fv(int, FloatBuffer)
.\
+
\ + Author\: Kai Burjack +class=opengl.UniformArrayDemo + +[opengl.assimp] +description=OpenGL with Asset-Importer library demos + +[opengl.assimp.WavefrontObjDemo] +description=Shows how to load models in Wavefront obj and mlt format \ + with Assimp binding and render them with OpenGL.\ +
\ + Author\: Zhang Hai +class=opengl.assimp.WavefrontObjDemo + +[opengl.camera] +description=OpenGL Camera demos + +[opengl.camera.ArcballCameraDemo] +description=Showcases draggable arcball camera.\ +
\ + Author\: Kai Burjack +class=opengl.camera.ArcballCameraDemo + +[opengl.camera.FreeCameraDemo] +description=Simple "free fly" camera demo.\ +
\ + Author\: Kai Burjack +class=opengl.camera.FreeCameraDemo + +[opengl.fbo] +description=OpenGL Framebuffer Object demos + +[opengl.fbo.DepthEdgeShaderDemo20] +description=Uses an edge detection filter to render the edges of a mesh.\ +
\ + The edges are detected based on the reconstructed view-space position \ + of the mesh using the depth buffer.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.DepthEdgeShaderDemo20 + +[opengl.fbo.EdgeShaderDemo20] +description=Uses a Sobel edge detection filter to render the edges/outline of a mesh.\ +
\ + The edges are detected based on the view-space normals of the mesh. \ + The outlines are detected based on a simple mask written to the color alpha channel.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.EdgeShaderDemo20 + +[opengl.fbo.EdgeShaderMultisampleDemo20] +description=Same as EdgeShaderDemo20 but renders the normals on a multisampled renderbuffer.\ +
\ + This demo is also suitable for cards/drivers that do not support multisampled \ + textures via ARB_texture_multisample, because this demo uses multisampled renderbuffers \ + and uses EXT_framebuffer_blit to resolve the samples onto a single-sampled FBO.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.EdgeShaderMultisampleDemo20 + +[opengl.fbo.MultisampledFboDemo] +description=Showcases multisampled FBO rendering.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.MultisampledFboDemo + +[opengl.fbo.MultisampledFbo2Demo] +description=Showcases multisampled FBO rendering.\ +
\ + This demo first renders into a multisampled FBO and then blits/resolves into \ + a single-sampled FBO, which allows to obtain the final image as a texture.\ +
\ + Additionally, this demo then renders this texture on the screen using a \ + textured fullscreen quad.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.MultisampledFbo2Demo + +[opengl.fbo.ReadDepthBufferDemo] +description=Showcases simple reconstruction of the world position from the depth buffer.\ +
\ + It uses a depth attachment texture to render depth-only to the FBO. \ + Afterwards, the view-space or world-space coordinates are reconstructed \ + via the depth values from the depth texture.\ +
\ + In order to do this, first the inverse of either the view-projection matrix \ + is computed (for world-space reconstruction) or the inverse of the projection \ + matrix (for view-space reconstruction). This matrix is uploaded to a shader. \ + The fragment shader reads the depth values from the depth buffer and \ + transforms those values by the computed matrix.\ +
\ + Author\: Kai Burjack +class=opengl.fbo.ReadDepthBufferDemo + +[opengl.geometry] +description=OpenGL Geometry shader demos + +[opengl.geometry.GeometryShaderTest] +description=Geometry Shader Test +class=opengl.geometry.GeometryShaderTest + +[opengl.geometry.GeometryShaderTest20] +description=Geometry Shader Test 20 +class=opengl.geometry.GeometryShaderTest20 + +[opengl.geometry.SilhouetteDemo] +description=Demo how to render the silhouette/outline of a mesh using the geometry shader.\ +
\ + First, the triangles with adjacency information (GL_TRIANGLES_ADJACENCY) are calculated based on a normal \ + GL_TRIANGLES mesh. Using this, the geometry shader is invoked which checks whether each one \ + of the three edges is a silhouette edge by determining the dot product between the \ + front face and the back/adjacent face.\ +
\ + Author\: Kai Burjack +class=opengl.geometry.SilhouetteDemo + +[opengl.glfw] +description=OpenGL with GLFW demos + +[opengl.glfw.Multithreaded] +description=Showcases how you can use multithreading in a GLFW application in order to \ + separate the (blocking) winproc handling from the render loop.\ +
\ + Author\: Kai Burjack +class=opengl.glfw.Multithreaded + +[opengl.instancing] +description=OpenGL Instancing demos + +[opengl.instancing.GrassDemo] +description=Uses hardware instancing to render grass patches.\ +
\ + This demo uses per-instance attributes to position, rotate and animate the grass blades.\ +
\ + Author\: Kai Burjack +class=opengl.instancing.GrassDemo + +[opengl.raytracing] +description=OpenGL Raytracing demos + +[opengl.raytracing.AlphaGrass] +description=Alpha Grass\ +
\
+ Author\: Kai Burjack
+class=opengl.raytracing.AlphaGrass
+
+[opengl.raytracing.AtomicDemo]
+description=Same as Demo
but it uses a pseudo-random number generator based on \
+ ARBShaderAtomicCounters
.\
+
\
+ This should showcase the use of atomic counter buffers in GLSL shaders. In \
+ detail, there is a new random()
GLSL function which implicitly \
+ takes an atomic counter and increments it everytime the function is called. \
+ What was once the combination of screen coordinate parameterization and time \
+ to achieve variance of the generated random value over each pixel is now a \
+ single atomic counter.\
+
\ + In later demos (especially with correct "by the book" photon mapping), which \ + use rays originating from the light source we are going to need this \ + approach, since otherwise we cannot have different random reflection \ + directions, since the direction will always depend on constant "random" \ + values. This will be explained in later demos!\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.AtomicDemo + +[opengl.raytracing.CubeTraceMerged] +description=Stackless kd-tree ray tracing with optimized/merged cubes.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.CubeTraceMerged + +[opengl.raytracing.Demo] +description=Raytracing demo.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.Demo + +[opengl.raytracing.Demo20] +description=Raytracing demo.\ +
\ + This is a port of the other raytracing demos to OpenGL 2.0, to better port it \ + further to OpenGL ES 2.0 / WebGL 1.0.\ +
\ + Since we do not have SSBO support here, we use a RGB/XYZ texture to store the \ + boxes which we then sample in the shader. We are also using a different \ + random function that does not rely on GLSL 3.30.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.Demo20 + +[opengl.raytracing.Demo33] +description=Raytracing demo.\ +
\ + This is a port to OpenGL 3.3, using the old-style GPGPU with full-screen quad and vertex/fragment shaders \ + to do general purpose computing in shaders. We also need 3.3 as opposed to 3.0, because of certain GLSL \ + functions only available there.\ +
\ + The port is meant for people that cannot use OpenGL 4.3, as it is the case for all Apple products right now.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.Demo33 + +[opengl.raytracing.Demo33Ubo] +description=Raytracing demo.\ +
\ + Same asDemo33
but with using a Uniform Buffer Object to transfer the \
+ camera parameters to the shader.\
+ \
+ Author\: Kai Burjack
+class=opengl.raytracing.Demo33Ubo
+
+[opengl.raytracing.DemoSsbo]
+description=Raytracing demo.\
+ \
+ The same as Demo
but using a Shader Storage Buffer Object (SSBO) to \
+ specify the scene dynamically from the host program instead of hardcoded in \
+ the shader.\
+
\ + Also, the compute shader does not directly write into an image but instead \ + into a SSBO buffer that is afterwards uploaded via Pixel Buffer Object into a \ + texture, which is then eventually displayed on the screen. This was just some \ + left-over from a long debugging session, but I think it can stay this way to \ + showcase writing into an SSBO.\ +
\
+ Author\: Kai Burjack
+class=opengl.raytracing.DemoSsbo
+
+[opengl.raytracing.DemoSsboTrianglesStacklessKdTree]
+description=Like HybridDemoSsboTriangles
but uses the \
+ \
+ Stackless KD-Tree Traversal algorithm for binary space partitioning to avoid \
+ testing all triangles of the scene for each ray.\
+
\ + Normally, traversing a kd-tree requires recursion (or a stack), but those are not available in GLSL. So \ + "Stackless kd-tree traversal" works by connecting adjacent nodes of the tree. The connections/links are called \ + "ropes" in that paper.\ +
\
+ The kd-tree as well as the rope-building is implemented in KDTree
.\
+
\ + Author\: Kai Burjack +class=opengl.raytracing.DemoSsboTrianglesStacklessKdTree + +[opengl.raytracing.GL33KdTreeTrace] +description=Stackless kd-tree ray tracing in OpenGL 3.3 with Buffer Textures.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.GL33KdTreeTrace + +[opengl.raytracing.HybridDemo] +description=This demo is used to showcase hybrid rasterization and ray tracing to make \ + the first bounce faster.\ +
\ + The idea behind this is not new, others have done it, too, like in \ + Hybrid Rendering Demo - PowerVR Ray Tracing - GDC 2014.\ +
\ + The benefit of doing it this way is to use the rasterizer to rasterize the \ + depth/view position and normal information of a potentially complex model \ + first, storing it in a G-buffer. This essentially saves us one bounce of path \ + tracing for the first hit of the eye ray into the scene.\ +
\ + From there on we can use the ray tracer again and compute shadow rays and \ + reflection rays as usual.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.HybridDemo + +[opengl.raytracing.HybridDemoSsbo] +description=This demo is used to showcase hybrid rasterization and ray tracing to make \ + the first bounce faster.\ +
\
+ It works like the HybridDemo
, but uses a Shader Storage Buffer Object \
+ (SSBO) to let the host program dynamically specify the boxes via a memory \
+ buffer that is read by the shader.\
+
\ + Author\: Kai Burjack +class=opengl.raytracing.HybridDemoSsbo + +[opengl.raytracing.HybridDemoSsboInstancing] +description=This demo is used to showcase hybrid rasterization and ray tracing to make \ + the first bounce faster.\ +
\
+ It works like the HybridDemoSsbo
, but uses hardware instancing to \
+ rasterize the boxes. It won't be any faster, but will use less GPU memory as \
+ we only need to create a small VBO containing a unit axis-aligned box and a \
+ buffer with per-instance box data (position and size).\
+
\
+ Author\: Kai Burjack
+class=opengl.raytracing.HybridDemoSsboInstancing
+
+[opengl.raytracing.HybridDemoSsboInstancing45]
+description=This demo is identical to HybridDemoSsboInstancing
, except that only \
+ the very latest OpenGL functionality and the most modern way of doing things \
+ is being used, such as DSA texture and buffer creation and update API as well \
+ as the new DSA vertex binding/attribute setup functions introduced with \
+ OpenGL 4.5.\
+
\
+ Author\: Kai Burjack
+class=opengl.raytracing.HybridDemoSsboInstancing45
+
+[opengl.raytracing.HybridDemoSsboTriangles]
+description=Like HybridDemoSsbo
but uses a triangle mesh instead of boxes.\
+
\ + We want to get to a real ray tracer soon, and therefore we want to be able to \ + trace triangle meshes.\ +
\ + This demo uses a simple non-hierarchical AABB spatial acceleration structure, \ + to first test a ray against the AABB of all triangles of a single object in \ + the mesh before testing all triangles of the object.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.HybridDemoSsboTriangles + +[opengl.raytracing.LinearlyTransformedCosines] +description=This demo is an implementation of the paper \ + Real-Time \ + Polygonal-Light Shading with Linearly Transformed Cosines from Eric \ + Heitz, Jonathan Dupuy, Stephen Hill and David Neubelt to analytically compute \ + the direct contribution of the rectangular light without shadows.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.LinearlyTransformedCosines + +[opengl.raytracing.PhotonMappingBindlessDemo] +description=Photon mapping using bindless cube map textures.\ +
\
+ The same as PhotonMappingDemo
BUT it uses ARB_bindless_texture to \
+ have an array of cube map textures without the restriction of equal \
+ dimensions for each cube map. We want each cube map to have the dimension in \
+ proportion to the actual size of the cube it is mapped to.\
+
\ + Author\: Kai Burjack +class=opengl.raytracing.PhotonMappingBindlessDemo + +[opengl.raytracing.PhotonMappingDemo] +description=Photon mapping using cubemap array textures.\ +
\ + This demo uses a cube map array texture to hold a "photon map" for each of \ + the boxes in the scene.\ +
\ + A compute shader is used to shoot light rays into the scene and whenever they \ + hit a box the texel coordinate is computed and the "photon" is stored in the \ + corresponding face and layer of the cube map array image.\ +
\ + Afterwards, the scene is rasterized and the cube map array is sampled via a \ + samplerCubeArray. The boxes are rendered via hardware instancing and the \ + layer of the cube map array (i.e. the cube map for that particular box \ + instance) is obtained via the gl_InstanceID.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.PhotonMappingDemo + +[opengl.raytracing.TransformFeedbackDemo] +description=This demo uses transform feedback to first store the view-space positions and normals \ + of the vertices in a buffer object.\ +
\ + Afterwards, the triangles in this buffer are intersected with eye rays in a simple compute shader.\ +
\ + This demo differs from all other raytracing demos in that the scene SSBO for the compute shader is generated \ + "dynamically" via transform feedback. This allows for dynamic scenes with possible model transformations.\ +
\ + Using transform feedback to generate the scene information for ray tracing also allows for a geometry shader \ + to introduce or discard primitives and furthermore allows for tessellation control and evaluation shaders \ + to additionally alter the geometry. This fits a hybrid rendering approach where these additional shader \ + stages are being used for rasterization.\ +
\ + This demo does not use any acceleration structure such as a binary space partitioning or \ + bounding volume hierarchy but the compute shader instead tests each ray against all triangles. \ + There are algorithms for building such acceleration structures on the GPU at runtime achieving \ + interactive frame rates, such as \ + \ + Real-Time KD-Tree Construction on Graphics Hardware.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.TransformFeedbackDemo + +[opengl.raytracing.VoxelLightmapping] +description=Loads a MagicaVoxel file and builds a lightmap with ray traced ambient occlusion \ + at runtime. The scene is then rasterized normally with the lightmap sampled with linear filtering.\ +
\ + Also, implements Progressively Ordered Primitive (POP) Buffer LOD.\ +
\
+ Author\: Kai Burjack
+class=opengl.raytracing.VoxelLightmapping
+
+[opengl.raytracing.VoxelLightmapping2]
+description=Enhances VoxelLightmapping
with raytraced reflections with using the greedy meshed faces/rectangles \
+ as scene representation for kd-tree tracing instead of axis-aligned boxes.\
+
\ + This allows to compute a UV coordinate for the hit point on a face and lookup the lightmap when following \ + view-dependent rays.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.VoxelLightmapping2 + +[opengl.raytracing.tutorial] +description=OpenGL Raytracing tutorials + +[opengl.raytracing.tutorial.Tutorial1] +description=This is the first part of a small code-centric tutorial on ray tracing with \ + LWJGL 3 and OpenGL's compute shaders. The way these tutorials work is by \ + simply reading JavaDoc and inline comments as you work your way through the \ + Java code in a pretty much "top-to-bottom" way. Everything necessary will be \ + mentioned and explained as your read. You should be somewhat familiar with \ + Java, OpenGL and with how LWJGL 3 translates the native OpenGL API to Java. \ + For the latter have a look at the Intro1 and later introductions.\ +
\ + The focus of this part will be to set up everything needed for a first \ + working ray tracer to render a scene of a few uncolored boxes. Later parts \ + will cover more and more advanced topics. But for now, this part will only \ + cover the following\:\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial1 + +[opengl.raytracing.tutorial.Tutorial2] +description=In this part we create a path tracer using Monte Carlo integration/simulation \ + to estimate the amount of light reaching any visible surface point in the scene.\ +
\ + That means, we now don't just shoot primary rays from the eye/camera into the \ + scene to check for simple intersections with scene geometry and then simply \ + rendering those intersections as some grayscale color. No, this time we want \ + to actually simulate the light as it bounces around in our scene.\ +
\ + For that, we more or less use the same scene from before but with an open \ + ceiling from which light can reach the inner areas of our "room". Starting \ + with the current eye/camera position, we shoot rays into the scene (just like \ + before), but unlike before when a ray hits a box we generate a reflected ray \ + and continue following that "path" of up to three bounces/rays until either \ + one of those rays escapes the room through the open ceiling into the light, \ + or not. In the last case, the contribution of that ray's light will be zero.\ +
\ + Like mentioned we use Monte Carlo integration to estimate the amount of light \ + traveling off a visible surface point towards the eye/camera. That means we \ + need to generate many rays and average their light contribution for each \ + pixel in our framebuffer. We could implement all of this in a single compute \ + shader invocation by using a pre-defined number of samples to average over, \ + say 1000. But we do not know in advance whether that number of samples \ + suffices to generate a good estimation of the actual light transport in the \ + scene and whether we complete the shader invocation before the graphics card \ + driver assumes the kernel invocation to hang and reset the driver.\ +
\ + Because of those reasons we decide to map a single iteration of the Monte \ + Carlo integration to a single shader invocation. But now we need to average \ + the framebuffer contents of the individual invocation results somehow, \ + because for any number of iterations we need the arithmetic mean/average over \ + the last N results. There is an easy way to achieve this via linearly \ + interpolating between the (N-1) average and the current iteration result N. \ + We just need to find a blending weight `a` such that\:\ +
\ +S(1) \= O(0)/2 + O(1)/2 \n\ +S(2) \= O(0)/3 + O(1)/3 + O(2)/3 \n\ +S(3) \= O(0)/4 + O(1)/4 + O(2)/4 + O(3)/4 \n\ +\ + can be formulated recursively via\:\ +
\ +S(i) \= S(i - 1) * a + O(i) * (1 - a) \ +\ + In order to achieve this, we use\:
a \= i/(i-1)
\
+ \
+ Author\: Kai Burjack
+class=opengl.raytracing.tutorial.Tutorial2
+
+[opengl.raytracing.tutorial.Tutorial3]
+description=This tutorial provides an improvement over Tutorial2
by making use of \
+ importance sampling for a faster convergence rate of the generated light \
+ transport estimate computed via our Monte Carlo integration. This means that \
+ the variance in the estimate/image will be reduced significantly for the kind \
+ of surfaces we are using in this tutorial.\
+
\
+ Previously in Tutorial2
when a ray hit a box surface we generated a \
+ new ray which was uniformly distributed over the surface hemisphere. \
+ However, for directions that are close to parallel to the surface (i.e. whose \
+ dot product with the surface normal is close to zero), any light that may \
+ have come along that direction would've gotten attenuated strongly by the \
+ cosine fall-off factor of the rendering equation and therefore would not have \
+ contributed much to the surface irradiance.\
+
\ + A much better approach would be to change the generation of new ray \ + directions based on our knowledge of the rendering equation and the BRDF of \ + the surface so as to maximize the actual contribution of any light coming \ + from those directions. To account for the cosine fall-off term we are now \ + generating sample directions whose probability distribution is directly \ + proportional to the cosine of that direction with the surface normal.\ +
\ + To present the benefit of importance sampling we are also introducing a new \ + kind of surface in this demo. Currently we were only using lambertian/diffuse \ + surfaces which reflect light in all directions equally. Now we also want \ + specular surfaces that reflect most of the incoming light around the \ + direction of perfect reflection. There will be a mode to switch between \ + uniform hemisphere sampling and importance sampling.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial3 + +[opengl.raytracing.tutorial.Tutorial4] +description=This time we are going to add a small spherical light source to our scene. \ + With that we'll see that uniform hemisphere sampling is not quite up to the \ + task and produces very strong noise/variance, since the generated sample \ + directions will very likely miss the light source.\ +
\ + To combat this, we will use "Multiple Importance" sampling introduced by Eric \ + Veach's 1998 PhD thesis "Robust Monte Carlo Methods for Light Transport \ + Simulation" (see chapter 9). The idea is to sample not based on the BRDF but \ + around the direction towards the light source. This has the potential of \ + significantly reducing the variance in the Monte Carlo estimate, especially \ + in our simple "room with a table" scene where we will place a small spherical \ + light, since that light is visible from all locations but the ones under the table.\ +
\
+ Author\: Kai Burjack
+class=opengl.raytracing.tutorial.Tutorial4
+
+[opengl.raytracing.tutorial.Tutorial4_2]
+description=This is a small modification of Tutorial4
using a rectangular area light \
+ instead of a sphere. The multiple-importance path of the shader generates samples \
+ on the rectangular light source and provides a function to compute the probability \
+ density of any given sample direction with respect to the area sampling of the \
+ rectangular light source.\
+
\
+ Author\: Kai Burjack
+class=opengl.raytracing.tutorial.Tutorial4_2
+
+[opengl.raytracing.tutorial.Tutorial4_3]
+description=Like Tutorial4_2
, but it uses a GLSL implementation of the paper \
+ A Low-Discrepancy \
+ Sampler that Distributes Monte Carlo Errors as a Blue Noise in Screen Space \
+ from Eric Heitz, Laurent Belcour, Victor Ostromoukhov, David Coeurjolly and \
+ Jean-Claude Iehl to generate samples with blue-noise characteristics.\
+
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial4_3 + +[opengl.raytracing.tutorial.Tutorial5] +description=This part adds the \ + Edge-Avoiding À-Trous \ + Wavelet Transform for fast Global Illumination Filtering for filtering \ + the path-traced result to reduce noise at the expense of blurring the result.\ +
\
+ The algorithm uses edge-stop functions based on the color, normal and \
+ position of each sample and these edge-stop functions can be tweaked by how \
+ much they should care for a change in color, normal and position. See the \
+ filter(int)
method.\
+
\
+ Author\: Kai Burjack
+class=opengl.raytracing.tutorial.Tutorial5
+
+[opengl.raytracing.tutorial.Tutorial6]
+description=In this tutorial we will trace triangle meshes imported from a scene \
+ description file via Assimp
. We will also use a binary Bounding \
+ Volume Hierarchy structure with axis-aligned bounding boxes and show how this \
+ can be traversed in a stack-less way on the GPU.\
+
\ + The strategy to traverse the BVH tree in the compute shader is explained in \ + the corresponding raytracing.glsl shader file.\ +
\
+ Author\: Kai Burjack
+class=opengl.raytracing.tutorial.Tutorial6
+
+[opengl.raytracing.tutorial.Tutorial6_2]
+description=Same as Tutorial6
but without using next/miss pointers in the BVH \
+ nodes but instead using a bitstack in the Compute Shader to keep track of \
+ whether the near or far child was visited. This means that the Compute Shader \
+ will first visit the BVH node nearest to the ray's origin.\
+
\ + This allows to traverse the BVH tree in an optimal order, quickly culling \ + many nodes when there was a triangle intersection in a near node.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial6_2 + +[opengl.raytracing.tutorial.Tutorial7] +description=This demo shows stackless kd-tree traversal, as presented in the 2007 \ + \ + Stackless KD-Tree Traversal for High Performance GPU Ray Tracing paper together \ + with hybrid rasterization.\ +
\
+ The former is a nice way to avoiding unnecessary ray-triangle intersections \
+ using a kd-tree as the spatial acceleration structure. In addition, the \
+ traversal is made stackless by introducing "ropes" (i.e. pointers to neighbor \
+ nodes). See the class KDTreeForTutorial7
for the actual kd-tree \
+ implementation plus the "ropes" extension.\
+
\ + The latter is a way to accelerate shooting the primary rays and intersecting \ + them with scene geometry by simply rendering/rasterizing the scene with \ + OpenGL in the normal way. In the fragment shader we will write the \ + world-space normal and the view distance of the fragment into a texture, \ + which is then read by the path tracer compute shader. From there on the path \ + tracer will again use the default path tracing algorithm for shooting the \ + secondary rays.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial7 + +[opengl.raytracing.tutorial.Tutorial8] +description=This demo implements hybrid rasterization and path tracing together with \ + temporal anti-aliasing using screen-space sample reprojection as well as \ + spatial edge-avoiding à-trous filtering.\ +
\ + Temporal anti-aliasing with reprojection means that texels from an image \ + traced at frame `t-1` will be reused and reprojected onto their new position \ + at time `t` and blended together with the traced result at frame `t`. This \ + allows to effectively increase the sample count even when the camera moves. \ + Previously, when the camera moved, the weight factor was reset so that no \ + previous frame was blended together with the current frame.\ +
\ + In addition to temporal anti-aliasing, this demo also uses hybrid \ + rasterization + path tracing in order to accelerate the first bounce by \ + rasterizing the first eye-to-surface ray using rasterization and to produce a \ + robust G-buffer with scene depth and normal information, which will be used \ + by the temporal anti-aliasing step.\ +
\ + Also, this demo does not use a GLSL compute shader for the path tracing but \ + combines rasterization with path tracing in a fragment shader. Apart from \ + OpenGL 3.3 users being able to run this demo, this saves memory bandwidth \ + because the reprojection step can be done in a single shader invocation \ + instead of the necessary velocity and color information being written to a \ + render target which will then subsequently be read again by a compute shader. \ + In order to save fragment shader invocations in the rasterize and path trace \ + fragment shader, a previous depth-only pass is performed so that the fragment \ + shader is only executed for visible surfaces.\ +
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial8 + +[opengl.raytracing.tutorial.Tutorial8_2] +description=Example of temporal antialiasing using reprojection but without any filtering or neighbour clamping.\ +
\
+ This is like Tutorial8
but without filtering. In addition, this demo uses a velocity buffer.\
+
\
+ There are three main color render buffers. The first two will hold the previous and next frame result and the FBO \
+ ping-pongs between them. The third is an intermediate buffer to ray trace the scene into, which is then used as input \
+ to the reprojection shader. This shader also uses a fourth render buffer which holds the pixel velocity information \
+ computed by the ray tracing shader (which like in Tutorial8
is also a rasterization fragment shader).\
+
\ + Author\: Kai Burjack +class=opengl.raytracing.tutorial.Tutorial8_2 + +[opengl.shader] +description=OpenGL Shader demos + +[opengl.shader.DownsamplingDemo] +description=Computes 3 mip levels of a texture using only a single compute shader dispatch \ + and GL_KHR_shader_subgroup. \ + Then uses shared memory for mips 4 and 5.\ +
\ + Author\: Kai Burjack +class=opengl.shader.DownsamplingDemo + +[opengl.shader.GameOfLife] +description=Conway's Game of Life using OpenGL compute shader.\ +
\ + Author\: Kai Burjack +class=opengl.shader.GameOfLife + +[opengl.shader.ImmediateModeDemo] +description=Shows how to use immediate mode with a simple shader.\ +
\ + Author\: Kai Burjack +class=opengl.shader.ImmediateModeDemo + +[opengl.shader.InfiniteDraggablePlaneDemo] +description=Render an infinite XZ plane with antialiased grid pattern and allow \ + dragging the plane with the mouse and to move around with mouse/keyboard controls.\ +
\ + Author\: Kai Burjack +class=opengl.shader.InfiniteDraggablePlaneDemo + +[opengl.shader.InfinitePlaneDemo] +description=Render an infinite XZ plane with antialiased grid pattern.\ +
\ + Author\: Kai Burjack +class=opengl.shader.InfinitePlaneDemo + +[opengl.shader.NoVerticesBSplineDemo] +description=Renders a cubic B-spline without using any vertex source but fully computing the vertex positions in the vertex shader.\ +
\ + This demo implements cubic B-spline evaluation in the vertex shader and stores the control points in a Uniform Buffer Object.\ +
\ + Author\: Kai Burjack +class=opengl.shader.NoVerticesBSplineDemo + +[opengl.shader.NoVerticesGridDemo] +description=Renders a grid without using any vertex source but fully computing the vertex positions in the vertex shader.\ +
\ + Author\: Kai Burjack +class=opengl.shader.NoVerticesGridDemo + +[opengl.shader.NoVerticesPolygonDemo] +description=Renders a regular polygon without using any vertex source but fully computing the vertex positions in the vertex shader.\ +
\ + Author\: Kai Burjack +class=opengl.shader.NoVerticesPolygonDemo + +[opengl.shader.NoVerticesProjectedGridDemo] +description=Renders a projected grid without using any vertex source but fully computing the vertex positions in the vertex shader.\ +
\ + This showcases JOML's implementation of \ + Projected Grid.\ +
\ + This demo does not take care or show how to obtain a "pleasant" projector matrix. Consult section 2.4.1 in the referenced paper for more \ + guidance on how to obtain a projector matrix. \ + So, to keep things simple this demo instead just uses the camera's view-projection matrix as the projector matrix.\ +
\ + Author\: Kai Burjack +class=opengl.shader.NoVerticesProjectedGridDemo + +[opengl.shader.Planet] +description=Simple planet with clouds.\ +
\ + Author\: Kai Burjack +class=opengl.shader.Planet + +[opengl.shader.SimpleQuadAndGridDemo] +description=Simple Quad and Grid Demo\ +
\ + Author\: Kai Burjack +class=opengl.shader.SimpleQuadAndGridDemo + +[opengl.shadow] +description=OpenGL shadow demos + +[opengl.shadow.Omni2dShadow] +description=Omnidirectional 2D shadows using "1D shadow mapping".\ +
\ + Author\: Kai Burjack +class=opengl.shadow.Omni2dShadow + +[opengl.shadow.ProjectiveShadowDemo] +description=Projective Shadow Demo\ +
\ + Author\: Kai Burjack +class=opengl.shadow.ProjectiveShadowDemo + +[opengl.shadow.ShadowMappingDemo] +description=Simple demo to showcase shadow mapping with a custom shader doing perspective \ + divide and depth test (i.e. no sampler2DShadow).\ +
\ + Author\: Kai Burjack +class=opengl.shadow.ShadowMappingDemo + +[opengl.shadow.ShadowMappingDemo20] +description=Simple demo to showcase shadow mapping with a custom shader doing perspective \ + divide and depth test (i.e. no sampler2DShadow).\ +
\ + Author\: Kai Burjack +class=opengl.shadow.ShadowMappingDemo20 + +[opengl.swt] +description=OpenGL demos with SWT + +[opengl.swt.SwtAndGlfwDemo] +description=Shows how to use SWT and GLFW windows side-by-side.\ +
\
+ Author\: Kai Burjack
+class=opengl.swt.SwtAndGlfwDemo
+
+[opengl.swt.SwtDemo]
+description=OpenGL support within a SWT window.
+class=opengl.swt.SwtDemo
+
+[opengl.textures]
+description=OpenGL textures demos
+
+[opengl.textures.BillboardCubemapDemo]
+description=Like FullscreenCubemapDemo
but renders the black hole using a \
+ regular polygon that encompasses only the circular area of influence of the \
+ black hole.\
+
\ + Author\: Kai Burjack +class=opengl.textures.BillboardCubemapDemo + +[opengl.textures.EnvironmentDemo] +description=Shows how to render a spherical/equirectangular texture as environment map using a single fullscreen quad.\ +
\
+ This uses the same technique as the BillboardCubemapDemo
, FullscreenCubemapDemo
and all raytracing demos, \
+ which is to unproject the NDC corner positions into world-space using the inverse of the combined view-projection matrix. \
+ This demo then transforms the obtained cartesian coordinates on the unit sphere (which would otherwise be used for \
+ cubemap lookup) into spherical coordinates (longitude, latitude) to sample the texture using equirectangular projection.\
+
\
+ Author\: Kai Burjack
+class=opengl.textures.EnvironmentDemo
+
+[opengl.textures.EnvironmentTeapotDemo]
+description=Just like EnvironmentDemo
, but also adds a reflective teapot.\
+
\ + Author\: Kai Burjack +class=opengl.textures.EnvironmentTeapotDemo + +[opengl.textures.FullscreenCubemapDemo] +description=Loads and displays a simple starfield cubemap as well as a "black hole."\ +
\ + The starfield was generated with Spacescape.\ +
\ + Author\: Kai Burjack +class=opengl.textures.FullscreenCubemapDemo + +[opengl.textures.SimpleProceduralTextureDemo] +description=Simple Procedural Texture demo +class=opengl.textures.SimpleProceduralTextureDemo + +[opengl.textures.SimpleTexturedQuad] +description=Renders a simple textured quad using OpenGL 3.0.\ +
\ + Author\: Kai Burjack +class=opengl.textures.SimpleTexturedQuad + +[opengl.textures.SimpleTexturedSphere] +description=Renders a simple textured sphere using OpenGL 4.0 Core Profile.\ +
\ + Author\: Kai Burjack +class=opengl.textures.SimpleTexturedSphere + +[opengl.textures.Texture2DArrayMipmapping] +description=Texture 2D Array Mipmapping +class=opengl.textures.Texture2DArrayMipmapping + +[opengl.transform] +description=OpenGL transform demos + +[opengl.transform.LwjglDemo] +description=LWJGL demo \ +
\ + Author\: Kai Burjack +class=opengl.transform.LwjglDemo + +[opengl.transform.LwjglDemoLH] +description=LWJGL demo LH\ +
\ + Author\: Kai Burjack +class=opengl.transform.LwjglDemoLH + +[opengl.transform.ObliqueProjectDemo] +description=Oblique Project demo\ +
\ + Author\: Kai Burjack +class=opengl.transform.ObliqueProjectDemo + +[opengl.transform.OrientedQuads] +description=Oriented Quads\ +
\ + Author\: Kai Burjack +class=opengl.transform.OrientedQuads + +[vulkan] +description=Vulkan demos + +[vulkan.ClearScreenDemo] +description=Renders a simple cornflower blue image on a GLFW window with Vulkan.\ +
\ + Author\: Kai Burjack +class=vulkan.ClearScreenDemo + +[vulkan.ColoredRotatingQuadDemo] +description=Renders a simple rotating colored quad on a cornflower blue background on a GLFW window with Vulkan.\ +
\
+ This is like the ColoredTriangleDemo
, but adds an additional rotation. \
+ Do a diff between those two classes to see what's new.\
+
\ + Author\: Kai Burjack +class=vulkan.ColoredRotatingQuadDemo + +[vulkan.ColoredTriangleDemo] +description=Renders a simple colored triangle on a cornflower blue background on a GLFW window with Vulkan.\ +
\
+ This is like the TriangleDemo
, but adds an additional "color" vertex attribute. \
+ Do a diff between those two classes to see what's new.\
+
\ + Author\: Kai Burjack +class=vulkan.ColoredTriangleDemo + +[vulkan.InstancedSpheresDemo] +description=Renders multiple spheres on a cornflower blue background on a GLFW window with Vulkan.\ +
\
+ This demo is like TwoRotatingTrianglesDemo
but uses instanced rendering with a big UBO to hold all spheres' transformation matrices.\
+
\ + Author\: Kai Burjack +class=vulkan.InstancedSpheresDemo + +[vulkan.TriangleDemo] +description=Renders a simple triangle on a cornflower blue background on a GLFW window with Vulkan.\ +
\ + Author\: Kai Burjack +class=vulkan.TriangleDemo + +[vulkan.TwoRotatingTrianglesDemo] +description=Renders two rotating triangles on a cornflower blue background on a GLFW window with Vulkan.\ +
\
+ This is like the ColoredRotatingQuadDemo
, but it adds a depth buffer to avoid false overdraw.\
+
\ + Author\: Kai Burjack +class=vulkan.TwoRotatingTrianglesDemo + +[vulkan.TwoRotatingTrianglesInvDepthDemo] +description=Renders two rotating triangles on a cornflower blue background on a GLFW window with Vulkan.\ +
\
+ This is like the TwoRotatingTrianglesDemo
, but it uses reverse depth mapping to maximize \
+ the depth buffer precision.\
+
\ + Author\: Kai Burjack +class=vulkan.TwoRotatingTrianglesInvDepthDemo + +[vulkan.raytracing] +description=Vulkan raytracing demos + +[vulkan.raytracing.HybridMagicaVoxel] +description=Uses hybrid rasterization and ray tracing to trace shadow rays.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.HybridMagicaVoxel + +[vulkan.raytracing.ReflectiveMagicaVoxel] +description=Draws a MagicaVoxel scene containing reflective materials (windows).\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.ReflectiveMagicaVoxel + +[vulkan.raytracing.SdfBricks] +description=Renders brick models using signed distance functions together with ray tracing / spatial acceleration structure.\ +
\ + The bricks are procedural AABB geometries in a Vulkan ray tracing acceleration structure and the SDF of \ + a possibly repeated cube is evaluated in an intersection shader.\ +
\ + One AABB geometry can represent multiple bricks.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.SdfBricks + +[vulkan.raytracing.SimpleSphere] +description=VK_KHR_ray_tracing_pipeline and VK_KHR_acceleration_structure demo that draws a sphere \ + using a custom intersection shader.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.SimpleSphere + +[vulkan.raytracing.SimpleTriangle] +description=VK_KHR_ray_tracing_pipeline and VK_KHR_acceleration_structure demo that draws a triangle.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.SimpleTriangle + +[vulkan.raytracing.SimpleTriangleRayQuery] +description=VK_KHR_ray_query and VK_KHR_acceleration_structure demo that draws a triangle \ + using a compute shader.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.SimpleTriangleRayQuery + +[vulkan.raytracing.VoxelChunks] +description=Procedurally generates chunks with their BLASes.\ +
\ + Author\: Kai Burjack +class=vulkan.raytracing.VoxelChunks + diff --git a/src/org/lwjgl/demo/DemoLauncher.java b/src/org/lwjgl/demo/DemoLauncher.java new file mode 100644 index 00000000..ae5d154f --- /dev/null +++ b/src/org/lwjgl/demo/DemoLauncher.java @@ -0,0 +1,460 @@ +/* + * Copyright LWJGL. All rights reserved. + * Copyright Tue Ton. All rights reserved. + * License terms: https://www.lwjgl.org/license + */ +package org.lwjgl.demo; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.lang.management.ManagementFactory; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.TreeMap; +import java.util.stream.Stream; + +import org.eclipse.swt.SWT; +import org.eclipse.swt.browser.Browser; +import org.eclipse.swt.custom.SashForm; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; +import org.eclipse.swt.graphics.Font; +import org.eclipse.swt.graphics.FontData; +import org.eclipse.swt.graphics.Point; +import org.eclipse.swt.layout.FillLayout; +import org.eclipse.swt.layout.GridData; +import org.eclipse.swt.layout.GridLayout; +import org.eclipse.swt.layout.RowLayout; +import org.eclipse.swt.widgets.Button; +import org.eclipse.swt.widgets.Composite; +import org.eclipse.swt.widgets.Display; +import org.eclipse.swt.widgets.Group; +import org.eclipse.swt.widgets.Label; +import org.eclipse.swt.widgets.Shell; +import org.eclipse.swt.widgets.Tree; +import org.eclipse.swt.widgets.TreeItem; +import org.lwjgl.system.Platform; + +/** + * Graphical launcher for the LWJGL demos, implemented with SWT. + * + * @author Tue Ton + */ +public class DemoLauncher { + + static final String DEMO_PACKAGE_PREFIX = "org.lwjgl.demo."; + + static Tree demosTree; + static Browser descriptionBrowser; + static Button launchButton; + static Group bgfxOptionsPanel; + static List