From 1eec522664976a7e7b4d278bc8752071e849a095 Mon Sep 17 00:00:00 2001 From: Maxime Date: Sat, 6 Jan 2024 18:40:57 -0500 Subject: [PATCH] docs: Caustics blog post --- content/caustics-in-webgl.mdx | 741 ++++++++++ content/immigrating-to-the-us-my-path.mdx | 2 +- core/components/MDX/MDXComponents.tsx | 3 + .../MDX/Widgets/Caustics/Sandpack.tsx | 97 ++ .../MDX/Widgets/Caustics/beautifulCaustics.ts | 360 +++++ .../MDX/Widgets/Caustics/causticsPlane.ts | 442 ++++++ .../MDX/Widgets/Caustics/dynamicCaustics.ts | 1232 +++++++++++++++++ .../MDX/Widgets/Caustics/normalProjection.ts | 159 +++ .../MDX/Widgets/Caustics/simpleCaustics.ts | 246 ++++ core/components/Tweet/Tweet.tsx | 3 +- 10 files changed, 3283 insertions(+), 2 deletions(-) create mode 100644 content/caustics-in-webgl.mdx create mode 100644 core/components/MDX/Widgets/Caustics/Sandpack.tsx create mode 100644 core/components/MDX/Widgets/Caustics/beautifulCaustics.ts create mode 100644 core/components/MDX/Widgets/Caustics/causticsPlane.ts create mode 100644 core/components/MDX/Widgets/Caustics/dynamicCaustics.ts create mode 100644 core/components/MDX/Widgets/Caustics/normalProjection.ts create mode 100644 core/components/MDX/Widgets/Caustics/simpleCaustics.ts diff --git a/content/caustics-in-webgl.mdx b/content/caustics-in-webgl.mdx new file mode 100644 index 000000000..b7181cc0c --- /dev/null +++ b/content/caustics-in-webgl.mdx @@ -0,0 +1,741 @@ +--- +title: 'Shining a light on Caustics with Shaders and React Three Fiber' +subtitle: TODO +date: '2024-01-23T08:00:00.000Z' +updated: '2024-01-23T08:00:00.000Z' +categories: [] +slug: caustics-in-webgl +type: 'blogPost' +featured: false +colorFeatured: 'linear-gradient(267deg, #FAD2A4 10.37%, #ECD5ED 58.94%, #92A6E9 98.35%);' +--- + +Since my work with [refraction and chromatic dispersion](/post/refraction-dispersion-and-other-shader-light-effects/) from last year, I have not ceased experimenting with reproducing light effects with shaders, always trying to strike the right balance between realism, aesthetics, and performance. However, there's one light effect that remained on my to-do list for this entire time, and that is **Caustics**. + +Those beautiful swirls of light can be visible when light rays **travel through a transmissive or transparent curved surface**, such as a glass of water or the surface of a shallow lake and converge on a surface after being refracted. I've been obsessing with Caustics since day one of working with shaders (ask @pixelbeat, he'll tell you). I saw countless examples reproducing the effect on Blender or WebGL, each one of them making me more keen to build my own implementation to fully understand how to render them for my React Three Fiber projects. + +// EXAMPLES CAUSTICS + +For a long time, I had this objective of reproducing a Caustic effect with my shader knowledge that was both real-time and somewhat physically based while also working with a diverse set of geometries. After working heads down, step-by-step, for a few weeks, I reached this goal and got a very satisfying result 🎉 + + + +While I documented my progress on [Twitter](https://twitter.com/MaximeHeckel/status/1730610699905143248), showcasing all the steps and my train of thought going through this project, +I wanted to dedicate a blog post to truly _shine a light on caustics_ (🥁) by walking you through the inner workings of this effect. +You'll see in this article how, **by leveraging normals, render targets, and some math and shader code**, you can render those beautiful and shiny swirls of light for your own creations. + + + +// SOURCES + + + +## Anatomy of a Caustic Effect in WebGL + +In this first part, we'll look at the concepts and high-level pieces behind this project. To set the right expectations from the get-go: **We're absolutely going to cheat our way through this**. Indeed, if we wanted to reproduce Caustics with a high degree of accuracy, that project would probably fall into the domain of **raytracing**, which would be: + +- Way out of reach given my current skill set as of writing this article. +- Very resource-intensive for the average computer out there, especially as we'd want the most people to be able to admire our work. + +Thus, I opted for a _simpler_ yet still somewhat physically based approach for this project: + +1. We'll **simulate** in a fragment shader the refracted rays from **a light source** going through a **target mesh**. +2. We'll render the resulting pattern in a **caustic plane** which we'll then _scale and position_ accordingly based on the position of the light source in relation to our object. + +// DIAGRAM SCENE + +Simulating how the caustic pattern works can seem quite tricky at first. However, if we look back at the definition established in the introduction, +we can get hints for how to proceed. The light pattern we're aiming for originates from rays hitting a _curved_ surface, which nudges us toward **relying on the Normal data of our target mesh**. On top of that, based on some preliminary research, it seems that _how_ our geometry curves will dictate whether the rays of light converge or diverge and thus impact how our final caustics will look. + +// DIAGRAM RAYS CONVERGE/DIVERGE + + + +If you're looking for more detailed reading on the effect itself, here are some links that were useful to me during my own research: + +- [Caustics and the Photon Tracing Kernel](https://help.otoy.com/hc/en-us/articles/14458811751067-Caustics-and-the-Photon-Tracing-Kernel) +- [What Are Caustics and How to Render Them the Right Way](https://www.chaos.com/blog/what-are-caustics-and-how-to-render-them-the-right-way) + + + +## Extracting Normals + +Let's take a stab at **extracting the Normal data of our target mesh**! With it, we'll know the "shape" of our mesh which influences the overall final look of our Caustics. Since we'll need to read that data down the line in a shader to simulate our Caustic effect, we would want to have it available as a _texture_. That means it's time to dedust your good ol' render target skills because we'll need them here. + + + +I wrote a [dedicated blog post on render targets](/posts/beautiful-and-mind-bending-effects-with-webgl-render-targets/) last year that goes through all the use cases alongside many examples and demos. If you have not read it yet or need a little refresher, take a glance at it before continuing 😊. + + + +As always, we'll start by defining our render target, or Framer Buffer Object (FBO), using the `useFBO` hook provided by `@react-three/drei`: this is where we'll render our target mesh with a "normal" material and take a snapshot of it to have that data available as a texture later on. + +```jsx {5} title=Instantiating our normalRenderTarget +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + + const normalRenderTarget = useFBO(2000, 2000, {}); + + useFrame((state) => { + const { gl } = state; + // ... + }); + + return ( + <> + + + + + + + + + + ); +}; +``` + +We'll also need **a dedicated camera** for our render target, which I intuitively placed where our light source would be since we'll later need to simulate the rays going through the normals viewed from the point of view of that light source. That camera will point towards the center of the bounds of our target mesh using the `lookAt` function. + +```jsx {14-20} title=Setting up a dedicated camera for our render target +const light = new THREE.Vector3(-10, 13, -10); + +const normalRenderTarget = useFBO(2000, 2000, {}); + +const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) +); + +useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + //... +}); +``` + + + +When I first tried to get this camera setup, I noticed that moving its position could cause unwanted rotations of the camera, leading to weird inverted caustic patterns that didn't make much sense. I found out that _locking_ the `up` vector of the camera to a specific position, like in this case `THREE.Vector3(0, 1, 0)` helped to prevent this issue from happening + + + +We now have all the elements to capture our Normal data and project it onto the "caustic plane": + +- In our `useFrame` hook, we first swap the material of our mesh to use a material that renders the Normals of our mesh. In this case, I used a custom shader (you'll see later why), but you can also use `normalMaterial`. + +```jsx +// Custom Normal Material +const [normalMaterial] = useState(() => new NormalMaterial()); + +useFrame(() => { + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; +}); +``` + +- Then, we take a snapshot of our mesh by rendering it in our render target. + +```jsx +gl.setRenderTarget(normalRenderTarget); +gl.render(mesh.current, normalCamera); +``` + +- Finally, we can restore our mesh's original material and pass the resulting _texture_ in the `map` property of our temporary caustic plane material, allowing us to visualize the output. + +```jsx +mesh.current.material = originalMaterial; + +causticsPlane.current.material.map = normalRenderTarget.texture; + +gl.setRenderTarget(null); +``` + +// DIAGRAM FBO + +With this small render pipeline, we should be able to see our Normal data visible on our "caustic plane" thanks to the texture data obtained through our render target. This will serve as the foundations of our Caustic effect! + + + + + +I included a small widget in the demo above to let you move the position of the light source in this scene. Try to change the different coordinates and see how the Normals rendered on the plane change as you update the position vector of the light, i.e., the `normalCamera` used in the render target. + + + +## Building our Caustics material + +With what we just accomplished, we have, through our FBO, +**a texture representing the normals of our target mesh**. Having that data as a texture is very versatile +because not only can we render it as we just did, but more importantly **we can pass it to other shaders to do some computation**. + +Which is exactly what we're going to do in this part! + +We will take our Normal data and _simulate_ light rays going through those normals and then interpret the output to create our caustics pattern. + +### Calculating caustics intensity + +At first, I didn't know how to use my Normal data to obtain the desired effect as an output. I tried my luck with using a weird mix of `sin` function in the fragment shader of my caustic plane, but that didn't yield something even remotely close to what I wanted to achieve: + + + +On top of that, I also had this idea for my Caustics effect to be able to take on additional +effects such as **chromatic aberration** or **blur**, as I really wanted the output not to be _too sharp_ +to look as natural as possible. Hence, I could not directly render the pattern onto the final plane; +instead, I'd have to **use an intermediate mesh with a custom shader material to do all the necessary +math and computation I needed**. Then, that would allow me through yet another FBO to apply as many effects to the output as I wanted on the final caustics plane itself. + +// DIAGRAM FBO + +To do so, we can leverage a `FullScreenQuad` geometry that we will not render within our scene but instead instantiate on its own and use it within our `useFrame` hook. + +```jsx title=Setting up our causticsComputeRenderTarget and FullScreenQuad +const causticsComputeRenderTarget = useFBO(2000, 2000, {}); +const [causticsQuad] = useState(() => new FullScreenQuad()); +``` + +The custom `shaderMaterial`, also attached to the `FullScreenQuad` will perform the following tasks: + +1. It will calculate the refracted ray vector from our light source going through the surface of our mesh, represented here by the Normal texture we created in the first part. +2. Use each vertex of the mesh (passed as varyings to our fragment shader) as a "ray of light" and apply the refracted ray vector to it. +3. Use partial derivatives along the `x` and `y` axes for the original and the refracted position. When multiplied, the result lets us approximate a small surface neighboring each vertex. +4. Compare the resulting surfaces to determine the intensity of the caustics. + + + +The method highlighted in steps `3` and `4` above comes directly from the article titled [Rendering Realtime Caustics in WebGL](https://medium.com/@evanwallace/rendering-realtime-caustics-in-webgl-2a99a29a0b2c) from Evan Wallace (also cited at the beginning in the sources). @N8Programs recommended looking at his technique early in this project. + +This is the **key** to making this shader a possibility, which I'm very thankful for, and I hope I didn't butcher his technique too much in here 😄 + + + +Obtaining those surfaces before and after refraction is the **key** to rendering our caustic pattern: + +- A ratio `oldArea/newArea` above `1` signifies our rays have converged. Thus, the caustic intensity should be higher. +- On the other hand, a ratio `oldArea/newArea` below `1` means that our rays have diverged and that our caustic intensity should be lower. + +// DIAGRAM PROJECTION + +Below, you will find the corresponding fragment shader code that performs the steps we just highlighted: + +```glsl {20-21,23} title=CausticsComputeMaterial fragment shader +uniform sampler2D uTexture; +uniform vec3 uLight; + +varying vec2 vUv; +// Position of the vertex of the current fragment +varying vec3 vPosition; + +void main() { + vec2 uv = vUv; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea; + + gl_FragColor = vec4(vec3(value), 1.0); +} +``` + +On top of that, I applied a few _tweaks_ as I often do in my shader code. That is more subjective and enables me to reach what I originally had in mind for my Caustic shader, so take those with a grain of salt: + +```jsx {23-25} title=Extra tweaks to the final value from +uniform sampler2D uTexture; +uniform vec3 uLight; +uniform float uIntensity; + +varying vec2 vUv; +varying vec3 vPosition; + +void main() { + vec2 uv = vUv; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea; + float scale = clamp(value, 0.0, 1.0) * uIntensity; + scale *= scale; + + gl_FragColor = vec4(vec3(scale), 1.0); +} +``` + +- I added a `uIntensity` uniform so I could manually increase/decrease how _bright_ the resulting caustic effect would render. +- I made sure to `clamp` the value between 0 and 1 (see warning below). +- I squared the result to ensure the brighter areas get brighter and the dimmer areas get dimmer, thus allowing for a more striking light effect. + + + +// Showcase what happens if we do not clamp -> screenshot from Discord + + + +Finally, we can combine all that and assign what I dubbed the `CausticsComputeMaterial` to our `FullScreenQuad` and render it in a dedicated FBO. + +```jsx {1,26-29,31-32} title=Using the causticsComputeMaterial in our scene +const [causticsComputeMaterial] = useState(() => new CausticsComputeMaterial()); + +useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + + causticsQuad.material = causticsComputeMaterial; + causticsQuad.material.uniforms.uTexture.value = normalRenderTarget.texture; + causticsQuad.material.uniforms.uLight.value = light; + causticsQuad.material.uniforms.uIntensity.value = intensity; + + gl.setRenderTarget(causticsComputeRenderTarget); + causticsQuad.render(gl); + + causticsPlane.current.material.map = causticsComputeRenderTarget.texture; + + gl.setRenderTarget(null); +}); +``` + +The resulting code lets us observe a glimpse of Caustics projected onto the ground ✨ + + + + + +- Try to tweak the intensity of the effect using the widget embedded in the scene. +- Try to modify the position of the light source and notice how the brighter spots of the caustics change as the pattern moves. + + + +### Creating beautiful swirls of light + +The result we just obtained looks great but presents a few _subjective_ issues that are bothering me: + +- **It looks a bit too sharp** to my taste, and because of that, we also see a lot of artifacts/grain in the final render (probably from the partial derivative not being granular enough). +- **The caustic plane does not blend with the ground**: that black frame surrounding the pattern really has to go. + +We can alleviate these issues by creating a final `causticsPlaneMaterial` that takes the texture we obtained from our `causticsComputeRenderTarget` and gently modifies it before rendering it on our plane. + +I first decided to implement a **chromatic aberration** effect on top of our caustic effect. If you're familiar with some of my work around light effects, I'm a big fan of chromatic aberration, and when applied correctly, I think it really goes a long way to make your scene/mesh look gorgeous. + + + +For this specific case, I opted to re-apply some of my shader code from a past project on [refraction](https://r3f.maximeheckel.com/refraction). + + + + + +```glsl title=Refraction and Chromatic Aberration fragment shader +uniform sampler2D uTexture; +uniform float uAberration; + +varying vec2 vUv; + +const int SAMPLES = 16; + +float random(vec2 p){ + return fract(sin(dot(p.xy ,vec2(12.9898,78.233))) * 43758.5453); +} + +vec3 sat(vec3 rgb, float adjustment) { + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + vec3 intensity = vec3(dot(rgb, W)); + return mix(intensity, rgb, adjustment); +} + +void main() { + vec2 uv = vUv; + vec4 color = vec4(0.0); + + vec3 refractCol = vec3(0.0); + + for ( int i = 0; i < SAMPLES; i ++ ) { + float noiseIntensity = 0.01; + float noise = random(uv) * noiseIntensity; + float slide = float(i) / float(SAMPLES) * 0.1 + noise; + + // Apply the color shift and refraction to each color channel (r,g,b) of the texture passed in uSceneTex; + refractCol.r += texture2D(uTexture, uv + (uAberration * slide * 1.0) ).r; + refractCol.g += texture2D(uTexture, uv + (uAberration * slide * 2.0) ).g; + refractCol.b += texture2D(uTexture, uv + (uAberration * slide * 3.0) ).b; + } + // Divide by the number of layers to normalize colors (rgb values can be worth up to the value of SAMPLES) + refractCol /= float(SAMPLES); + refractCol = sat(refractCol, 1.265); + + color = vec4(refractCol.r, refractCol.g, refractCol.b, 1.0); + + gl_FragColor = vec4(color.rgb, 1.0); +} +``` + +While this shader worked as expected, it presented some issues: it created visible stripes as it moved each color channel of each texture fragment in the same direction. To work around this, I added code to _flip_ the direction of the aberration through each loop to create _some_ randomness. + +```glsl {8-9,11} title=Flipping the direction of the chromatic aberration +float flip = -0.5; + +for ( int i = 0; i < SAMPLES; i ++ ) { + float noiseIntensity = 0.01; + float noise = random(uv) * noiseIntensity; + float slide = float(i) / float(SAMPLES) * 0.1 + noise; + + float mult = i % 2 == 0 ? 1.0 : -1.0; + flip *= mult; + + vec2 dir = i % 2 == 0 ? vec2(flip, 0.0) : vec2(0.0, flip); + + // Apply the color shift and refraction to each color channel (r,g,b) of the texture passed in uSceneTex; + refractCol.r += texture2D(uTexture, uv + (uAberration * slide * dir * 1.0) ).r; + refractCol.g += texture2D(uTexture, uv + (uAberration * slide * dir * 2.0) ).g; + refractCol.b += texture2D(uTexture, uv + (uAberration * slide * dir * 3.0) ).b; +} +``` + +// BEFORE/AFTER FLIP + +Notice how this simple "flip" operation had multiple benefits: + +1. It solved the issue of the stripes that were degrading the quality of the output. +2. **It blurred the output**, making our light patterns less sharp and more natural-looking. + +That is what we precisely wanted! Although in some cases, if we look a bit closer, we can see some artifacts from the chromatic aberration, from afar, it looks quite alright (at least it does to me 😅). + +The last thing to tackle is to make our caustic plane _blend_ with the surroundings. We can remove the black frame visible around our light patterns by setting a couple of blending options for our `causticsPlaneMaterial` after instantiating it: + +```jsx title=Setting the proper blending option for our caustic plane to blend in +const [causticsPlaneMaterial] = useState(() => new CausticsPlaneMaterial()); +causticsPlaneMaterial.transparent = true; +causticsPlaneMaterial.blending = THREE.CustomBlending; +causticsPlaneMaterial.blendSrc = THREE.OneFactor; +causticsPlaneMaterial.blendDst = THREE.SrcAlphaFactor; +``` + +And just like that, the black frame is gone, and our caustic plane is invisible! You can see all the combined code in the code sandbox below 👇. + + + +### Scaling and positioning our Caustics + +We now have a convincing caustic effect that creates a pattern of light based on the Normal data of the target mesh. However, if we move the position of our light in the demo we just saw above, the whole scene does not feel natural. That's because we still need to do some work to _position and scale_ our caustic plane **based on the position of that light source relative to our mesh**. + +To approach this problem, I first attempted to **project the bounds of our target mesh on the ground**. By knowing where on the ground the bounds of our mesh are, I could deduce + +1. The center of the bounds: the position vector that we'll need to pass as the **position** of the caustics plane. +2. The distance from the center to the furthest projected vertex, which we could pass as the **scale** of the caustics plane. + +Doing this will make sure that the resulting size and position of the plane not only _make sense_ but also _fit_ our caustics pattern within its bounds. + + + +`@react-three/drei`'s own `Cautics` component uses the same technique to scale and position itself! Although the team behind it did a way better job at handling some edge case and avoiding the light pattern to be cut by the bounds of the plane (we'll touch upon that later). + + + +### Building a "bounding cube" for our mesh + +The first step consists of building a _bounding cube_ around our mesh. We luckily did half the work already in the first part of this article when working on getting our Normal data using the following Three.js function: + +```jsx +useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + //... +}); +``` + +The `bounds` variable contains a `min` and `max` field representing the coordinates of the minimum and maximum corners of the smallest cube containing our mesh. From there, we can extrapolate the remaining six corners/vertices of the bounding cube as follows: + +```jsx title=Getting the bounds vertices of our target mesh +useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + let boundsVertices = []; + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.max.z) + ); + + //... +}); +``` + +// DIAGRAM BOUND VERTICES? + +### Projecting the vertices of the bounding cube and positioning our plane + +Here, we want to use the vertices of our bounding cube and calculate their projected coordinates _in the direction of the light_ to intersect with the ground. + +The generalized formula for such projection looks as follows: + +`projectedVertex = vertex + lightDir * ((planeY - vertex.y) / lightDir.y)` + +If we transpose that formula to our code and consider our `planeY` value to be 0, since we're aiming to project on the ground, we get the following code: + +```jsx title=Projected bounding box vertices +const lightDir = new THREE.Vector3(light.x, light.y, light.z).normalize(); + +// Calculates the projected coordinates of the vertices onto the plane +// perpendicular to the light direction +const newVertices = boundsVertices.map((v) => { + const newX = v.x + lightDir.x * (-v.y / lightDir.y); + const newY = v.y + lightDir.y * (-v.y / lightDir.y); + const newZ = v.z + lightDir.z * (-v.y / lightDir.y); + + return new THREE.Vector3(newX, newY, newZ); +}); +``` + +By leveraging the projected vertices, we can now obtain the **center** position by combining those coordinates and dividing them by the total number of vertices, i.e., just doing the **average** of all coordinates. + +// DIAGRAM CENTER PROJECTION ? + +We can then assign that center coordinate as the position vector of our plane, which translates to the following code: + +```jsx title=Calculating the weighted center of our caustic plane +const centerPos = newVertices + .reduce((a, b) => a.add(b), new THREE.Vector3(0, 0, 0)) + .divideScalar(newVertices.length); + +causticsPlane.current.position.set(centerPos.x, centerPos.y, centerPos.z); +``` + +### Fitting our caustic pattern inside the plane + +Now comes the last step of this tedious process: we need to scale our plane so that no matter the position of the light, the resulting caustic pattern always _fits_ in it. + +That is tricky, and to be honest the solution I'm about to give you doesn't work 100% of the time, but it covers most of the use cases I encounter, although I could sometimes notice the pattern being cut by the invisible plane. + +My train of thought to solve this went as follows: + +- We have the projected vertices. +- We got the center position from those vertices. +- Hence, we can assume that the safest scale of the plane, the largest that could for sure fit our caustics, should be the distance from the center to the furthest projected vertices. + +// DIAGRAM SAFE PLANE SCALE + +Which translates to the following code: + +```jsx title=Calculating the safest scale for our plane to fit the caustic pattern +const scale = newVertices + .map((p) => + Math.sqrt(Math.pow(p.x - centerPos.x, 2), Math.pow(p.z - centerPos.z, 2)) + ) + .reduce((a, b) => Math.max(a, b), 0); + +// The scale of the plane is multiplied by this correction factor to +// avoid the caustics pattern to be cut / overflow the bounds of the plane +// my normal projection or my math must be a bit off, so I'm trying to be very conservative here +const scaleCorrection = 1.75; + +causticsPlane.current.scale.set( + scale * scaleCorrection, + scale * scaleCorrection, + scale * scaleCorrection +); +``` + + + +As said above, this technique doesn't seem to be bulletproof. I once again had to add a custom tweak by multiplying my scale by a _semi-random_ value of my choosing to handle _most_ cases 🥲 + +We'll reflect on what could have gone wrong here in the conclusion + + + +If we put all this together within our `useFrame` hook on top of what we've built in the previous part, we finally obtain the long-awaited adjustable caustic pattern ✨. + + + +Our caustic pattern looks gorgeous and behaves as expected as we move the light source around the target mesh! I hope this was worth the trouble so far because there's yet one last thing to explore to make this effect even better... + +## Dynamic Caustics + +I would lie to you if I said I wasn't happy with the result above. However, there was still something I wanted to try, and that was to see if the Caustic effect we just built could also handle **a moving/displaced mesh** and thus feel more **dynamic**. + +On top of that, our effect only really works on shapes that are either very complex or have a lot of intricate, rounded corners, limiting the pool of meshes you can use. + +// SCREENSHOT SIMPLE MESH + +Thus, I had the idea to add a bit of displacement to those meshes to increase their complexity and hope for a better caustic effect. When adding displacement to the vertices of a mesh in a vertex shader, there's one tiny aspect I never thought I had overlooked until now: **the normals are not recomputed based on the displacement out of the box**. Thus, if we were to take our target mesh and add some noise to displace its vertices, the resulting Caustic effect would unfortunately remain _unchanged_. + +To solve that, we need to **recompute our normals** on the fly based on the displacement we apply to the vertices of our mesh in our vertex shader. Luckily, the question of "how to do this" has already been answered by [Marco Fugaro](https://discourse.threejs.org/u/marcofugaro/summary) from the Three.js community! + + + +You can learn more details about his method in his post [Calculating vertex normals after displacement in the vertex shader](https://discourse.threejs.org/t/calculating-vertex-normals-after-displacement-in-the-vertex-shader/16989). We'll use the code he introduces here in our examples. + +Moreover, this method has many more applications. Recomputing normals can help making the displacement applied to materials that rely a lot on lighting look much more realistic. In the final example of this article, you'll see that I forked and modified `@react-three/drei`'s `MeshTransmissionMaterial` to do just that. +The resulting displaced mesh looks way better thanks to the normals at the origin of the many light effects of this material taking into account the displacement in real-time. + + + +I decided to try his method alongside a [classic Perlin 3D noise](https://github.com/hughsk/glsl-noise/blob/master/classic/3d.glsl). We can add the desired displacement and the Normal recomputation code to the vertex shader of our original Normal material we introduced in the first part. + +```glsl {7-10,12-17,31-37,39-40,42} title=Updated Normal material fragment shader +uniform float uFrequency; +uniform float uAmplitude; +uniform float time; + +// cnoise definition ... + +vec3 orthogonal(vec3 v) { + return normalize(abs(v.x) > abs(v.z) ? vec3(-v.y, v.x, 0.0) + : vec3(0.0, -v.z, v.y)); +} + +float displace(vec3 point) { + if(uDisplace) { + return cnoise(point * uFrequency + vec3(time)) * uAmplitude; + } + return 0.0; +} + +void main() { + vUv = uv; + + vec3 displacedPosition = position + normal * displace(position); + vec4 modelPosition = modelMatrix * vec4(displacedPosition, 1.0); + + vec4 viewPosition = viewMatrix * modelPosition; + vec4 projectedPosition = projectionMatrix * viewPosition; + + gl_Position = projectedPosition; + + float offset = 4.0/256.0; + vec3 tangent = orthogonal(normal); + vec3 bitangent = normalize(cross(normal, tangent)); + vec3 neighbour1 = position + tangent * offset; + vec3 neighbour2 = position + bitangent * offset; + vec3 displacedNeighbour1 = neighbour1 + normal * displace(neighbour1); + vec3 displacedNeighbour2 = neighbour2 + normal * displace(neighbour2); + + vec3 displacedTangent = displacedNeighbour1 - displacedPosition; + vec3 displacedBitangent = displacedNeighbour2 - displacedPosition; + + vec3 displacedNormal = normalize(cross(displacedTangent, displacedBitangent)); + + vNormal = displacedNormal * normalMatrix; +} +``` + +Since a time component is required for the noise to move, we need to ensure: + +- To add a time component to our Normal material. That will influence the entire pipeline we built in the previous parts, down to the final caustic effect. +- To add a time component and a displacement to the original material. Otherwise, it wouldn't make sense that a static mesh would create moving caustics. (see final example) + +```jsx title=Wiring up the target mesh's material and normal material with time, amplitude and frequency to enable dynamic caustics +//... + +mesh.current.material = normalMaterial; +mesh.current.material.side = THREE.BackSide; + +mesh.current.material.uniforms.time.value = clock.elapsedTime; +mesh.current.material.uniforms.uDisplace.value = displace; +mesh.current.material.uniforms.uAmplitude.value = amplitude; +mesh.current.material.uniforms.uFrequency.value = frequency; + +gl.setRenderTarget(normalRenderTarget); +gl.render(mesh.current, normalCamera); + +mesh.current.material = originalMaterial; +mesh.current.material.uniforms.time.value = clock.elapsedTime; +mesh.current.material.uniforms.uDisplace.value = displace; +mesh.current.material.uniforms.uAmplitude.value = amplitude; +mesh.current.material.uniforms.uFrequency.value = frequency; + +//... +``` + +We now have wired together all the parts necessary to handle dynamic caustics! Let's take some time to make a beautiful scene with some staging by adding a `Spotlight` from `@react-three/drei` and a ground plane that can bounce some light for more realism 🤌 and voilà! We have the perfect scene to showcase our beautiful Caustics ✨. + + + + + +You can see that the displacement introduced through the Perlin noise influences our final caustic pattern as we use that same displacement when recomputing the normals that are the defining factor of this effect. + +Try to select different meshes and turn on/off the displacement/rotation to see the different results you can get. + + + +## Conclusion + +Whether you want them subtle, shiny, or colorful ... diff --git a/content/immigrating-to-the-us-my-path.mdx b/content/immigrating-to-the-us-my-path.mdx index 122f5bda8..c9021b6dc 100644 --- a/content/immigrating-to-the-us-my-path.mdx +++ b/content/immigrating-to-the-us-my-path.mdx @@ -1,5 +1,5 @@ --- -title: 'Immigrating to the US: My path from non-immigrant to permanent resident' +title: 'Immigrating to the US' subtitle: US immigration explained for American citizens by an immigrant. date: '2020-06-26T08:00:00.000Z' updated: '2020-06-29T08:00:00.000Z' diff --git a/core/components/MDX/MDXComponents.tsx b/core/components/MDX/MDXComponents.tsx index 59ad8ac8d..ef7d33820 100644 --- a/core/components/MDX/MDXComponents.tsx +++ b/core/components/MDX/MDXComponents.tsx @@ -220,6 +220,8 @@ const RaymarchingSandpack = dynamic( () => import('./Widgets/Raymarching/Sandpack') ); +const CausticsSandpack = dynamic(() => import('./Widgets/Caustics/Sandpack')); + const customComponents = { AnimationTypes, ClipboardAnimationDetails, @@ -279,6 +281,7 @@ const customComponents = { RefractionDispersionSandpack, RenderTargetsSandpack, RaymarchingSandpack, + CausticsSandpack, }; const MDXComponents = { diff --git a/core/components/MDX/Widgets/Caustics/Sandpack.tsx b/core/components/MDX/Widgets/Caustics/Sandpack.tsx new file mode 100644 index 000000000..51ec043b5 --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/Sandpack.tsx @@ -0,0 +1,97 @@ +import { Box, useTheme } from '@maximeheckel/design-system'; +import Sandpack from '@core/components/Code/Sandpack'; +import useGPUTier from '@core/hooks/useGPUTier'; +import { useInView } from 'react-intersection-observer'; +import normalProjection from './normalProjection'; +import simpleCaustics from './simpleCaustics'; +import beautifulCaustics from './beautifulCaustics'; +import causticsPlane from './causticsPlane'; +import dynamicCaustics from './dynamicCaustics'; + +const SceneCSSDark = ` +html { + background: #20222B; +} + +body { + height: 100%; + margin: 0; +} + +canvas { + width: 100vw; + height: 100vh; +}`; + +const SceneCSSLight = ` +html { + background: #F7F7FB; +} + +body { + height: 100%; + margin: 0; +} + +canvas { + width: 100vw; + height: 100vh; +}`; + +const CausticsSandpack = (props: any) => { + const { scene } = props; + const [ref, inView] = useInView(); + const { dark } = useTheme(); + const { tier, loading: tierLoading } = useGPUTier(); + + const autorun = tier > 2; + + const SCENES = { + scene1: normalProjection, + scene2: simpleCaustics, + scene3: beautifulCaustics, + scene4: causticsPlane, + scene5: dynamicCaustics, + }; + + return ( + + {inView && !tierLoading ? ( + + ) : ( + + )} + + ); +}; + +export default CausticsSandpack; diff --git a/core/components/MDX/Widgets/Caustics/beautifulCaustics.ts b/core/components/MDX/Widgets/Caustics/beautifulCaustics.ts new file mode 100644 index 000000000..ea7567e02 --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/beautifulCaustics.ts @@ -0,0 +1,360 @@ +const causticsComputeFragment = ` +uniform sampler2D uTexture; +varying vec2 vUv; +varying vec3 vPosition; +uniform vec3 uLight; +uniform float uIntensity; + +void main() { + vec2 uv = vUv; + float scale = 0.0; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea * 0.2; + scale += clamp(value, 0.0, 1.0) * uIntensity; + scale *= scale; + + + gl_FragColor = vec4(vec3(scale), 1.0); +} +`; + +const CausticsComputeMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsComputeFragment.glsl"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vPosition; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + vPosition = worldPosition.xyz; + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; + +} +\`; + +const CausticsComputeMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uIntensity: { value: 1.0 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsComputeMaterial; +`; + +const NormalMaterial = `import { shaderMaterial } from "@react-three/drei"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vNormal; + +void main() { + vUv = uv; + vec4 modelViewPosition = modelViewMatrix * vec4(position, 1.0); + vNormal = normalize(normalMatrix * normal); + + // Set the final position of the vertex + gl_Position = projectionMatrix * modelViewPosition; +} +\`; + +const fragmentShader = \` + varying vec2 vUv; + varying vec3 vNormal; + + void main() { + vec3 normal = normalize(vNormal); + gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0); + } +\`; + +const NormalMaterial = shaderMaterial({}, vertexShader, fragmentShader); + +export default NormalMaterial; +`; + +const causticsPlaneFragmentShader = `uniform sampler2D uTexture; +uniform float uAberration; + +varying vec2 vUv; + +const int SAMPLES = 16; + +float random(vec2 p){ + return fract(sin(dot(p.xy ,vec2(12.9898,78.233))) * 43758.5453); +} + +vec3 sat(vec3 rgb, float adjustment) { + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + vec3 intensity = vec3(dot(rgb, W)); + return mix(intensity, rgb, adjustment); +} + +void main() { + vec2 uv = vUv; + vec4 color = vec4(0.0); + + vec3 refractCol = vec3(0.0); + + float flip = -0.5; + + for ( int i = 0; i < SAMPLES; i ++ ) { + float noiseIntensity = 0.01; + // This makes the texture get "noisy": maybe worth adding noiseIntensity as a uniform + float noise = random(uv) * noiseIntensity; + // This makes layers "slide" and noisy to create the rgb color shift + float slide = float(i) / float(SAMPLES) * 0.1 + noise; + + + float mult = i % 2 == 0 ? 1.0 : -1.0; + flip *= mult; + + vec2 dir = i % 2 == 0 ? vec2(flip, 0.0) : vec2(0.0, flip); + + // Apply the color shift and refraction to each color channel (r,g,b) of the texture passed in uSceneTex; + refractCol.r += texture2D(uTexture, uv + (uAberration * slide * dir * 1.0) ).r; + refractCol.g += texture2D(uTexture, uv + (uAberration * slide * dir * 2.0) ).g; + refractCol.b += texture2D(uTexture, uv + (uAberration * slide * dir * 3.0) ).b; + } + // Divide by the number of layers to normalize colors (rgb values can be worth up to the value of SAMPLES) + refractCol /= float(SAMPLES); + refractCol = sat(refractCol, 1.265); + + color = vec4(refractCol.r, refractCol.g, refractCol.b, 1.0); + + gl_FragColor = vec4(color.rgb, 1.0); + + #include + #include +} +`; + +const CausticsPlaneMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsPlaneFragmentShader.glsl"; + +const vertexShader = \` +varying vec2 vUv; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; +} +\`; + +const CausticsPlaneMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uAberration: { value: 0.02 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsPlaneMaterial; +`; + +const AppCode = `import { + OrbitControls, + Environment, + MeshTransmissionMaterial, + PerspectiveCamera, + useFBO, +} from "@react-three/drei"; +import { Canvas, useFrame } from "@react-three/fiber"; +import { useControls } from "leva"; +import { useRef, useState } from "react"; +import * as THREE from "three"; +import { FullScreenQuad } from "three-stdlib"; + +import CausticsPlaneMaterial from "./CausticsPlaneMaterial"; +import CausticsComputeMaterial from "./CausticsComputeMaterial"; +import NormalMaterial from "./NormalMaterial"; + +import './scene.css'; + +const config = { + backsideThickness: 0.3, + thickness: 25, + samples: 6, + transmission: 0.9, + clearcoat: 1, + clearcoatRoughness: 0.5, + chromaticAberration: 1.5, + anisotropy: 0.2, + roughness: 0, + distortion: 0, + distortionScale: 0.09, + temporalDistortion: 0, + ior: 1.5, + color: "#ffffff", +}; + +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + + const { + light, + intensity, + chromaticAberration, + } = useControls({ + light: { + value: new THREE.Vector3(-10, 13, -10), + }, + intensity: { + value: 1.5, + step: 0.01, + min: 0, + max: 10.0, + }, + chromaticAberration: { + value: 0.19, + step: 0.001, + min: 0, + max: 0.4, + }, + }); + + const normalRenderTarget = useFBO(2000, 2000, {}); + const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) + ); + const [normalMaterial] = useState(() => new NormalMaterial()); + + + const causticsComputeRenderTarget = useFBO(2000, 2000, {}); + const [causticsQuad] = useState(() => new FullScreenQuad()); + const [causticsComputeMaterial] = useState(() => new CausticsComputeMaterial()); + + const [causticsPlaneMaterial] = useState(() => new CausticsPlaneMaterial()); + causticsPlaneMaterial.transparent = true; + causticsPlaneMaterial.blending = THREE.CustomBlending; + causticsPlaneMaterial.blendSrc = THREE.OneFactor; + causticsPlaneMaterial.blendDst = THREE.SrcAlphaFactor; + + useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + + causticsQuad.material = causticsComputeMaterial; + causticsQuad.material.uniforms.uTexture.value = normalRenderTarget.texture; + causticsQuad.material.uniforms.uLight.value = light; + causticsQuad.material.uniforms.uIntensity.value = intensity; + + gl.setRenderTarget(causticsComputeRenderTarget); + causticsQuad.render(gl); + + causticsPlane.current.material = causticsPlaneMaterial; + + causticsPlane.current.material.uniforms.uTexture.value = + causticsComputeRenderTarget.texture; + causticsPlane.current.material.uniforms.uAberration.value = + chromaticAberration; + + gl.setRenderTarget(null); + }); + + return ( + <> + + + + + + + + + ); +}; + +const Scene = () => { + return ( + + + + + + + ); +}; + + +export default Scene; +`; + +const beautifulCaustics = { + '/App.js': { + code: AppCode, + active: true, + }, + '/CausticsPlaneMaterial.js': { + code: CausticsPlaneMaterial, + }, + '/causticsPlaneFragmentShader.glsl': { + code: causticsPlaneFragmentShader, + }, + '/CausticsComputeMaterial.js': { + code: CausticsComputeMaterial, + }, + '/causticsComputeFragment.glsl': { + code: causticsComputeFragment, + }, + '/NormalMaterial.js': { + code: NormalMaterial, + }, +}; + +export default beautifulCaustics; diff --git a/core/components/MDX/Widgets/Caustics/causticsPlane.ts b/core/components/MDX/Widgets/Caustics/causticsPlane.ts new file mode 100644 index 000000000..45dea6108 --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/causticsPlane.ts @@ -0,0 +1,442 @@ +const causticsComputeFragment = ` +uniform sampler2D uTexture; +varying vec2 vUv; +varying vec3 vPosition; +uniform vec3 uLight; +uniform float uIntensity; + +void main() { + vec2 uv = vUv; + float scale = 0.0; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea * 0.2; + scale += clamp(value, 0.0, 1.0) * uIntensity; + scale *= scale; + + + gl_FragColor = vec4(vec3(scale), 1.0); +} +`; + +const CausticsComputeMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsComputeFragment.glsl"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vPosition; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + vPosition = worldPosition.xyz; + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; + +} +\`; + +const CausticsComputeMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uIntensity: { value: 1.0 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsComputeMaterial; +`; + +const NormalMaterial = `import { shaderMaterial } from "@react-three/drei"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vNormal; + +void main() { + vUv = uv; + vec4 modelViewPosition = modelViewMatrix * vec4(position, 1.0); + vNormal = normalize(normalMatrix * normal); + + // Set the final position of the vertex + gl_Position = projectionMatrix * modelViewPosition; +} +\`; + +const fragmentShader = \` + varying vec2 vUv; + varying vec3 vNormal; + + void main() { + vec3 normal = normalize(vNormal); + gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0); + } +\`; + +const NormalMaterial = shaderMaterial({}, vertexShader, fragmentShader); + +export default NormalMaterial; +`; + +const causticsPlaneFragmentShader = `uniform sampler2D uTexture; +uniform float uAberration; + +varying vec2 vUv; + +const int SAMPLES = 16; + +float random(vec2 p){ + return fract(sin(dot(p.xy ,vec2(12.9898,78.233))) * 43758.5453); +} + +vec3 sat(vec3 rgb, float adjustment) { + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + vec3 intensity = vec3(dot(rgb, W)); + return mix(intensity, rgb, adjustment); +} + +void main() { + vec2 uv = vUv; + vec4 color = vec4(0.0); + + vec3 refractCol = vec3(0.0); + + float flip = -0.5; + + for ( int i = 0; i < SAMPLES; i ++ ) { + float noiseIntensity = 0.01; + // This makes the texture get "noisy": maybe worth adding noiseIntensity as a uniform + float noise = random(uv) * noiseIntensity; + // This makes layers "slide" and noisy to create the rgb color shift + float slide = float(i) / float(SAMPLES) * 0.1 + noise; + + + float mult = i % 2 == 0 ? 1.0 : -1.0; + flip *= mult; + + vec2 dir = i % 2 == 0 ? vec2(flip, 0.0) : vec2(0.0, flip); + + // Apply the color shift and refraction to each color channel (r,g,b) of the texture passed in uSceneTex; + refractCol.r += texture2D(uTexture, uv + (uAberration * slide * dir * 1.0) ).r; + refractCol.g += texture2D(uTexture, uv + (uAberration * slide * dir * 2.0) ).g; + refractCol.b += texture2D(uTexture, uv + (uAberration * slide * dir * 3.0) ).b; + } + // Divide by the number of layers to normalize colors (rgb values can be worth up to the value of SAMPLES) + refractCol /= float(SAMPLES); + refractCol = sat(refractCol, 1.265); + + color = vec4(refractCol.r, refractCol.g, refractCol.b, 1.0); + + gl_FragColor = vec4(color.rgb, 1.0); + + #include + #include +} +`; + +const CausticsPlaneMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsPlaneFragmentShader.glsl"; + +const vertexShader = \` +varying vec2 vUv; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; +} +\`; + +const CausticsPlaneMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uAberration: { value: 0.02 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsPlaneMaterial; +`; + +const AppCode = `import { + OrbitControls, + Environment, + MeshTransmissionMaterial, + PerspectiveCamera, + useFBO, +} from "@react-three/drei"; +import { Canvas, useFrame } from "@react-three/fiber"; +import { useControls } from "leva"; +import { useRef, useState } from "react"; +import * as THREE from "three"; +import { FullScreenQuad } from "three-stdlib"; + +import CausticsPlaneMaterial from "./CausticsPlaneMaterial"; +import CausticsComputeMaterial from "./CausticsComputeMaterial"; +import NormalMaterial from "./NormalMaterial"; + +import './scene.css'; + +const config = { + backsideThickness: 0.3, + thickness: 25, + samples: 6, + transmission: 0.9, + clearcoat: 1, + clearcoatRoughness: 0.5, + chromaticAberration: 1.5, + anisotropy: 0.2, + roughness: 0, + distortion: 0, + distortionScale: 0.09, + temporalDistortion: 0, + ior: 1.5, + color: "#ffffff", +}; + +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + + const { + light, + intensity, + chromaticAberration, + rotate, + } = useControls({ + light: { + value: new THREE.Vector3(-10, 13, -10), + }, + intensity: { + value: 1.5, + step: 0.01, + min: 0, + max: 10.0, + }, + chromaticAberration: { + value: 0.16, + step: 0.001, + min: 0, + max: 0.4, + }, + rotate: { + value: true, + } + }); + + const normalRenderTarget = useFBO(2000, 2000, {}); + const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) + ); + const [normalMaterial] = useState(() => new NormalMaterial()); + + + const causticsComputeRenderTarget = useFBO(2000, 2000, {}); + const [causticsQuad] = useState(() => new FullScreenQuad()); + const [causticsComputeMaterial] = useState(() => new CausticsComputeMaterial()); + + const [causticsPlaneMaterial] = useState(() => new CausticsPlaneMaterial()); + causticsPlaneMaterial.transparent = true; + causticsPlaneMaterial.blending = THREE.CustomBlending; + causticsPlaneMaterial.blendSrc = THREE.OneFactor; + causticsPlaneMaterial.blendDst = THREE.SrcAlphaFactor; + + useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + let boundsVertices = []; + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.max.z) + ); + + const lightDir = new THREE.Vector3( + light.x, + light.y, + light.z + ).normalize(); + + // Calculates the projected coordinates of the vertices onto the plane + // perpendicular to the light direction + const projectedCoordinates = boundsVertices.map((v) => + { + const newX = v.x + lightDir.x * (-v.y / lightDir.y); + const newY = v.y + lightDir.y * (-v.y / lightDir.y); + const newZ = v.z + lightDir.z * (-v.y / lightDir.y); + + return new THREE.Vector3(newX, newY, newZ); + } + ); + + // Calculates the combined spatial coordinates of the projected vertices + // and divides by the number of vertices to get the center position + const centerPos = projectedCoordinates + .reduce((a, b) => a.add(b), new THREE.Vector3(0, 0, 0)) + .divideScalar(projectedCoordinates.length); + + // Calculates the scale of the caustic plane based on the distance of the + // furthest vertex from the center (using euclidean distance) + const scale = projectedCoordinates + .map((p) => + Math.sqrt( + Math.pow(p.x - centerPos.x, 2), + Math.pow(p.z - centerPos.z, 2) + ) + ) + .reduce((a, b) => Math.max(a, b), 0); + + // The scale of the plane is multiplied by this correction factor to + // avoid the caustics pattern to be cut / overflow the bounds of the plane + // my normal projection or my math must be a bit off, so I'm trying to be very conservative here + const scaleCorrection = 1.75; + + causticsPlane.current.scale.set( + scale * scaleCorrection, + scale * scaleCorrection, + scale * scaleCorrection + ); + causticsPlane.current.position.set(centerPos.x, centerPos.y, centerPos.z); + + if (rotate) { + mesh.current.rotation.x += 0.005; + mesh.current.rotation.y += 0.005; + } + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + + causticsQuad.material = causticsComputeMaterial; + causticsQuad.material.uniforms.uTexture.value = normalRenderTarget.texture; + causticsQuad.material.uniforms.uLight.value = light; + causticsQuad.material.uniforms.uIntensity.value = intensity; + + gl.setRenderTarget(causticsComputeRenderTarget); + causticsQuad.render(gl); + + causticsPlane.current.material = causticsPlaneMaterial; + + causticsPlane.current.material.uniforms.uTexture.value = + causticsComputeRenderTarget.texture; + causticsPlane.current.material.uniforms.uAberration.value = + chromaticAberration; + + gl.setRenderTarget(null); + }); + + return ( + <> + + + + + + + + + ); +}; + +const Scene = () => { + return ( + + + + + + + ); +}; + + +export default Scene; +`; + +const CausticsPlane = { + '/App.js': { + code: AppCode, + active: true, + }, + '/CausticsPlaneMaterial.js': { + code: CausticsPlaneMaterial, + }, + '/causticsPlaneFragmentShader.glsl': { + code: causticsPlaneFragmentShader, + }, + '/CausticsComputeMaterial.js': { + code: CausticsComputeMaterial, + }, + '/causticsComputeFragment.glsl': { + code: causticsComputeFragment, + }, + '/NormalMaterial.js': { + code: NormalMaterial, + }, +}; + +export default CausticsPlane; diff --git a/core/components/MDX/Widgets/Caustics/dynamicCaustics.ts b/core/components/MDX/Widgets/Caustics/dynamicCaustics.ts new file mode 100644 index 000000000..8f3e58014 --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/dynamicCaustics.ts @@ -0,0 +1,1232 @@ +const vertexBase = ` +uniform float time; +uniform bool uDisplace; +uniform float uFrequency; +uniform float uAmplitude; + + +vec4 permute(vec4 x) { + return mod(((x*34.0)+1.0)*x, 289.0); +} + +vec4 taylorInvSqrt(vec4 r) { + return 1.79284291400159 - 0.85373472095314 * r; +} + +vec3 fade(vec3 t) { + return t*t*t*(t*(t*6.0-15.0)+10.0); +} + +float cnoise(vec3 P) { + vec3 Pi0 = floor(P); // Integer part for indexing + vec3 Pi1 = Pi0 + vec3(1.0); // Integer part + 1 + Pi0 = mod(Pi0, 289.0); + Pi1 = mod(Pi1, 289.0); + vec3 Pf0 = fract(P); // Fractional part for interpolation + vec3 Pf1 = Pf0 - vec3(1.0); // Fractional part - 1.0 + vec4 ix = vec4(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec4 iy = vec4(Pi0.yy, Pi1.yy); + vec4 iz0 = Pi0.zzzz; + vec4 iz1 = Pi1.zzzz; + + vec4 ixy = permute(permute(ix) + iy); + vec4 ixy0 = permute(ixy + iz0); + vec4 ixy1 = permute(ixy + iz1); + + vec4 gx0 = ixy0 / 7.0; + vec4 gy0 = fract(floor(gx0) / 7.0) - 0.5; + gx0 = fract(gx0); + vec4 gz0 = vec4(0.5) - abs(gx0) - abs(gy0); + vec4 sz0 = step(gz0, vec4(0.0)); + gx0 -= sz0 * (step(0.0, gx0) - 0.5); + gy0 -= sz0 * (step(0.0, gy0) - 0.5); + + vec4 gx1 = ixy1 / 7.0; + vec4 gy1 = fract(floor(gx1) / 7.0) - 0.5; + gx1 = fract(gx1); + vec4 gz1 = vec4(0.5) - abs(gx1) - abs(gy1); + vec4 sz1 = step(gz1, vec4(0.0)); + gx1 -= sz1 * (step(0.0, gx1) - 0.5); + gy1 -= sz1 * (step(0.0, gy1) - 0.5); + + vec3 g000 = vec3(gx0.x,gy0.x,gz0.x); + vec3 g100 = vec3(gx0.y,gy0.y,gz0.y); + vec3 g010 = vec3(gx0.z,gy0.z,gz0.z); + vec3 g110 = vec3(gx0.w,gy0.w,gz0.w); + vec3 g001 = vec3(gx1.x,gy1.x,gz1.x); + vec3 g101 = vec3(gx1.y,gy1.y,gz1.y); + vec3 g011 = vec3(gx1.z,gy1.z,gz1.z); + vec3 g111 = vec3(gx1.w,gy1.w,gz1.w); + + vec4 norm0 = taylorInvSqrt(vec4(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); + g000 *= norm0.x; + g010 *= norm0.y; + g100 *= norm0.z; + g110 *= norm0.w; + vec4 norm1 = taylorInvSqrt(vec4(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); + g001 *= norm1.x; + g011 *= norm1.y; + g101 *= norm1.z; + g111 *= norm1.w; + + float n000 = dot(g000, Pf0); + float n100 = dot(g100, vec3(Pf1.x, Pf0.yz)); + float n010 = dot(g010, vec3(Pf0.x, Pf1.y, Pf0.z)); + float n110 = dot(g110, vec3(Pf1.xy, Pf0.z)); + float n001 = dot(g001, vec3(Pf0.xy, Pf1.z)); + float n101 = dot(g101, vec3(Pf1.x, Pf0.y, Pf1.z)); + float n011 = dot(g011, vec3(Pf0.x, Pf1.yz)); + float n111 = dot(g111, Pf1); + + vec3 fade_xyz = fade(Pf0); + vec4 n_z = mix(vec4(n000, n100, n010, n110), vec4(n001, n101, n011, n111), fade_xyz.z); + vec2 n_yz = mix(n_z.xy, n_z.zw, fade_xyz.y); + float n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); + return 2.2 * n_xyz; +} + +vec3 orthogonal(vec3 v) { + return normalize(abs(v.x) > abs(v.z) ? vec3(-v.y, v.x, 0.0) + : vec3(0.0, -v.z, v.y)); +} + + +float displace(vec3 point) { + if(uDisplace) { + return cnoise(point * uFrequency + vec3(time)) * uAmplitude; + } + return 0.0; +} +`; + +const vertexDisplacement = ` +#include + +vec3 displacedPosition = position + normal * displace(position); +vec4 modelPosition = modelMatrix * vec4(displacedPosition, 1.0); +vec4 viewPosition = viewMatrix * modelPosition; +vec4 projectedPosition = projectionMatrix * viewPosition; +gl_Position = projectedPosition; + +float offset = 4.0/256.0; +vec3 tangent = orthogonal(normal); +vec3 bitangent = normalize(cross(normal, tangent)); +vec3 neighbour1 = position + tangent * offset; +vec3 neighbour2 = position + bitangent * offset; +vec3 displacedNeighbour1 = neighbour1 + normal * displace(neighbour1); +vec3 displacedNeighbour2 = neighbour2 + normal * displace(neighbour2); + +vec3 displacedTangent = displacedNeighbour1 - displacedPosition; +vec3 displacedBitangent = displacedNeighbour2 - displacedPosition; + +// https://upload.wikimedia.org/wikipedia/commons/d/d2/Right_hand_rule_cross_product.svg +vec3 displacedNormal = normalize(cross(displacedTangent, displacedBitangent)); +vNormal = displacedNormal * normalMatrix; +`; + +const causticsComputeFragment = ` +uniform sampler2D uTexture; +varying vec2 vUv; +varying vec3 vPosition; +uniform vec3 uLight; +uniform float uIntensity; + +void main() { + vec2 uv = vUv; + float scale = 0.0; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea * 0.2; + scale += clamp(value, 0.0, 1.0) * uIntensity; + scale *= scale; + + + gl_FragColor = vec4(vec3(scale), 1.0); +} +`; + +const MeshTransmissionMaterial = `import * as THREE from "three"; +import React from "react"; +import { extend, useFrame } from "@react-three/fiber"; +import { useFBO, MeshDiscardMaterial } from "@react-three/drei"; + +import vertexShader from "!!raw-loader!./vertexBase.glsl"; +import vertexReplace from "!!raw-loader!./vertexDisplacement.glsl"; + +class MeshTransmissionMaterialImpl extends THREE.MeshPhysicalMaterial { + constructor(samples = 6, transmissionSampler = false) { + super(); + + this.uniforms = { + chromaticAberration: { value: 0.05 }, + // Transmission must always be 0, unless transmissionSampler is being used + transmission: { value: 0 }, + // Instead a workaround is used, see below for reasons why + _transmission: { value: 1 }, + transmissionMap: { value: null }, + // Roughness is 1 in THREE.MeshPhysicalMaterial but it makes little sense in a transmission material + roughness: { value: 0 }, + thickness: { value: 0 }, + thicknessMap: { value: null }, + attenuationDistance: { value: Infinity }, + attenuationColor: { value: new THREE.Color("white") }, + anisotropicBlur: { value: 0.1 }, + time: { value: 0 }, + distortion: { value: 0.0 }, + distortionScale: { value: 0.5 }, + temporalDistortion: { value: 0.0 }, + buffer: { value: null }, + uDisplace: { value: true }, + uFrequency: { value: 0.5 }, + uAmplitude: { value: 0.25 }, + }; + + this.onBeforeCompile = (shader) => { + shader.uniforms = { + ...shader.uniforms, + ...this.uniforms, + }; + + // Fix for r153-r156 anisotropy chunks + // https://github.com/mrdoob/three.js/pull/26716 + if (this.anisotropy > 0) shader.defines.USE_ANISOTROPY = ""; + + // If the transmission sampler is active inject a flag + if (transmissionSampler) shader.defines.USE_SAMPLER = ""; + // Otherwise we do use use .transmission and must therefore force USE_TRANSMISSION + // because threejs won't inject it for us + else shader.defines.USE_TRANSMISSION = ""; + + // Head + shader.fragmentShader = + /*glsl*/ \` + uniform float chromaticAberration; + uniform float anisotropicBlur; + uniform float time; + uniform float distortion; + uniform float distortionScale; + uniform float temporalDistortion; + uniform sampler2D buffer; + + vec3 random3(vec3 c) { + float j = 4096.0*sin(dot(c,vec3(17.0, 59.4, 15.0))); + vec3 r; + r.z = fract(512.0*j); + j *= .125; + r.x = fract(512.0*j); + j *= .125; + r.y = fract(512.0*j); + return r-0.5; + } + + float seed = 0.0; + uint hash( uint x ) { + x += ( x << 10u ); + x ^= ( x >> 6u ); + x += ( x << 3u ); + x ^= ( x >> 11u ); + x += ( x << 15u ); + return x; + } + + // Compound versions of the hashing algorithm I whipped together. + uint hash( uvec2 v ) { return hash( v.x ^ hash(v.y) ); } + uint hash( uvec3 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ); } + uint hash( uvec4 v ) { return hash( v.x ^ hash(v.y) ^ hash(v.z) ^ hash(v.w) ); } + + // Construct a float with half-open range [0:1] using low 23 bits. + // All zeroes yields 0.0, all ones yields the next smallest representable value below 1.0. + float floatConstruct( uint m ) { + const uint ieeeMantissa = 0x007FFFFFu; // binary32 mantissa bitmask + const uint ieeeOne = 0x3F800000u; // 1.0 in IEEE binary32 + m &= ieeeMantissa; // Keep only mantissa bits (fractional part) + m |= ieeeOne; // Add fractional part to 1.0 + float f = uintBitsToFloat( m ); // Range [1:2] + return f - 1.0; // Range [0:1] + } + + // Pseudo-random value in half-open range [0:1]. + float random( float x ) { return floatConstruct(hash(floatBitsToUint(x))); } + float random( vec2 v ) { return floatConstruct(hash(floatBitsToUint(v))); } + float random( vec3 v ) { return floatConstruct(hash(floatBitsToUint(v))); } + float random( vec4 v ) { return floatConstruct(hash(floatBitsToUint(v))); } + + float rand() { + float result = random(vec3(gl_FragCoord.xy, seed)); + seed += 1.0; + return result; + } + + const float F3 = 0.3333333; + const float G3 = 0.1666667; + + float snoise(vec3 p) { + vec3 s = floor(p + dot(p, vec3(F3))); + vec3 x = p - s + dot(s, vec3(G3)); + vec3 e = step(vec3(0.0), x - x.yzx); + vec3 i1 = e*(1.0 - e.zxy); + vec3 i2 = 1.0 - e.zxy*(1.0 - e); + vec3 x1 = x - i1 + G3; + vec3 x2 = x - i2 + 2.0*G3; + vec3 x3 = x - 1.0 + 3.0*G3; + vec4 w, d; + w.x = dot(x, x); + w.y = dot(x1, x1); + w.z = dot(x2, x2); + w.w = dot(x3, x3); + w = max(0.6 - w, 0.0); + d.x = dot(random3(s), x); + d.y = dot(random3(s + i1), x1); + d.z = dot(random3(s + i2), x2); + d.w = dot(random3(s + 1.0), x3); + w *= w; + w *= w; + d *= w; + return dot(d, vec4(52.0)); + } + + float snoiseFractal(vec3 m) { + return 0.5333333* snoise(m) + +0.2666667* snoise(2.0*m) + +0.1333333* snoise(4.0*m) + +0.0666667* snoise(8.0*m); + }\n\` + shader.fragmentShader; + + // Remove transmission + shader.fragmentShader = shader.fragmentShader.replace( + "#include ", + /*glsl*/ \` + #ifdef USE_TRANSMISSION + // Transmission code is based on glTF-Sampler-Viewer + // https://github.com/KhronosGroup/glTF-Sample-Viewer + uniform float _transmission; + uniform float thickness; + uniform float attenuationDistance; + uniform vec3 attenuationColor; + #ifdef USE_TRANSMISSIONMAP + uniform sampler2D transmissionMap; + #endif + #ifdef USE_THICKNESSMAP + uniform sampler2D thicknessMap; + #endif + uniform vec2 transmissionSamplerSize; + uniform sampler2D transmissionSamplerMap; + uniform mat4 modelMatrix; + uniform mat4 projectionMatrix; + varying vec3 vWorldPosition; + vec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) { + // Direction of refracted light. + vec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior ); + // Compute rotation-independant scaling of the model matrix. + vec3 modelScale; + modelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) ); + modelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) ); + modelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) ); + // The thickness is specified in local space. + return normalize( refractionVector ) * thickness * modelScale; + } + float applyIorToRoughness( const in float roughness, const in float ior ) { + // Scale roughness with IOR so that an IOR of 1.0 results in no microfacet refraction and + // an IOR of 1.5 results in the default amount of microfacet refraction. + return roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 ); + } + vec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) { + float framebufferLod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior ); + #ifdef USE_SAMPLER + #ifdef texture2DLodEXT + return texture2DLodEXT(transmissionSamplerMap, fragCoord.xy, framebufferLod); + #else + return texture2D(transmissionSamplerMap, fragCoord.xy, framebufferLod); + #endif + #else + return texture2D(buffer, fragCoord.xy); + #endif + } + vec3 applyVolumeAttenuation( const in vec3 radiance, const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) { + if ( isinf( attenuationDistance ) ) { + // Attenuation distance is +∞, i.e. the transmitted color is not attenuated at all. + return radiance; + } else { + // Compute light attenuation using Beer's law. + vec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance; + vec3 transmittance = exp( - attenuationCoefficient * transmissionDistance ); // Beer's law + return transmittance * radiance; + } + } + vec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor, + const in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix, + const in mat4 viewMatrix, const in mat4 projMatrix, const in float ior, const in float thickness, + const in vec3 attenuationColor, const in float attenuationDistance ) { + vec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix ); + vec3 refractedRayExit = position + transmissionRay; + // Project refracted vector on the framebuffer, while mapping to normalized device coordinates. + vec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 ); + vec2 refractionCoords = ndcPos.xy / ndcPos.w; + refractionCoords += 1.0; + refractionCoords /= 2.0; + // Sample framebuffer to get pixel the refracted ray hits. + vec4 transmittedLight = getTransmissionSample( refractionCoords, roughness, ior ); + vec3 attenuatedColor = applyVolumeAttenuation( transmittedLight.rgb, length( transmissionRay ), attenuationColor, attenuationDistance ); + // Get the specular component. + vec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness ); + return vec4( ( 1.0 - F ) * attenuatedColor * diffuseColor, transmittedLight.a ); + } + #endif\n\` + ); + + // Add refraction + shader.fragmentShader = shader.fragmentShader.replace( + "#include ", + /*glsl*/ \` + // Improve the refraction to use the world pos + material.transmission = _transmission; + material.transmissionAlpha = 1.0; + material.thickness = thickness; + material.attenuationDistance = attenuationDistance; + material.attenuationColor = attenuationColor; + #ifdef USE_TRANSMISSIONMAP + material.transmission *= texture2D( transmissionMap, vUv ).r; + #endif + #ifdef USE_THICKNESSMAP + material.thickness *= texture2D( thicknessMap, vUv ).g; + #endif + + vec3 pos = vWorldPosition; + vec3 v = normalize( cameraPosition - pos ); + vec3 n = inverseTransformDirection( normal, viewMatrix ); + vec3 transmission = vec3(0.0); + float transmissionR, transmissionB, transmissionG; + float randomCoords = rand(); + float thickness_smear = thickness * max(pow(roughnessFactor, 0.33), anisotropicBlur); + vec3 distortionNormal = vec3(0.0); + vec3 temporalOffset = vec3(time, -time, -time) * temporalDistortion; + if (distortion > 0.0) { + distortionNormal = distortion * vec3(snoiseFractal(vec3((pos * distortionScale + temporalOffset))), snoiseFractal(vec3(pos.zxy * distortionScale - temporalOffset)), snoiseFractal(vec3(pos.yxz * distortionScale + temporalOffset))); + } + for (float i = 0.0; i < \${samples}.0; i ++) { + vec3 sampleNorm = normalize(n + roughnessFactor * roughnessFactor * 2.0 * normalize(vec3(rand() - 0.5, rand() - 0.5, rand() - 0.5)) * pow(rand(), 0.33) + distortionNormal); + transmissionR = getIBLVolumeRefraction( + sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90, + pos, modelMatrix, viewMatrix, projectionMatrix, material.ior, material.thickness + thickness_smear * (i + randomCoords) / float(\${samples}), + material.attenuationColor, material.attenuationDistance + ).r; + transmissionG = getIBLVolumeRefraction( + sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90, + pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + chromaticAberration * (i + randomCoords) / float(\${samples})) , material.thickness + thickness_smear * (i + randomCoords) / float(\${samples}), + material.attenuationColor, material.attenuationDistance + ).g; + transmissionB = getIBLVolumeRefraction( + sampleNorm, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90, + pos, modelMatrix, viewMatrix, projectionMatrix, material.ior * (1.0 + 2.0 * chromaticAberration * (i + randomCoords) / float(\${samples})), material.thickness + thickness_smear * (i + randomCoords) / float(\${samples}), + material.attenuationColor, material.attenuationDistance + ).b; + transmission.r += transmissionR; + transmission.g += transmissionG; + transmission.b += transmissionB; + } + transmission /= \${samples}.0; + totalDiffuse = mix( totalDiffuse, transmission.rgb, material.transmission );\n\` + ); + + shader.vertexShader = vertexShader + shader.vertexShader; + + shader.vertexShader = shader.vertexShader.replace( + "#include ", + vertexReplace + ); + }; + + Object.keys(this.uniforms).forEach((name) => + Object.defineProperty(this, name, { + get: () => this.uniforms[name].value, + set: (v) => (this.uniforms[name].value = v), + }) + ); + } +} + +export const MeshTransmissionMaterial = React.forwardRef( + ( + { + buffer, + transmissionSampler = false, + backside = false, + side = THREE.FrontSide, + transmission = 1, + thickness = 0, + backsideThickness = 0, + samples = 10, + resolution, + backsideResolution, + background, + anisotropy, + anisotropicBlur, + uDisplace, + uFrequency, + uAmplitude, + ...props + }, + fref + ) => { + extend({ MeshTransmissionMaterial: MeshTransmissionMaterialImpl }); + + const ref = React.useRef(null); + const [discardMaterial] = React.useState(() => ); + const fboBack = useFBO(backsideResolution || resolution); + const fboMain = useFBO(resolution); + + let oldBg; + let oldTone; + let parent; + useFrame((state) => { + ref.current.time = state.clock.getElapsedTime(); + // Render only if the buffer matches the built-in and no transmission sampler is set + if (ref.current.buffer === fboMain.texture && !transmissionSampler) { + parent = ref.current.__r3f.parent; + if (parent) { + // Save defaults + oldTone = state.gl.toneMapping; + oldBg = state.scene.background; + + // Switch off tonemapping lest it double tone maps + // Save the current background and set the HDR as the new BG + // Use discardmaterial, the parent will be invisible, but it's shadows will still be cast + state.gl.toneMapping = THREE.NoToneMapping; + if (background) state.scene.background = background; + parent.material = discardMaterial; + + if (backside) { + // Render into the backside buffer + state.gl.setRenderTarget(fboBack); + state.gl.render(state.scene, state.camera); + // And now prepare the material for the main render using the backside buffer + parent.material = ref.current; + parent.material.buffer = fboBack.texture; + parent.material.thickness = backsideThickness; + parent.material.side = THREE.BackSide; + } + + // Render into the main buffer + state.gl.setRenderTarget(fboMain); + state.gl.render(state.scene, state.camera); + + parent.material = ref.current; + parent.material.thickness = thickness; + parent.material.side = side; + parent.material.buffer = fboMain.texture; + + // Set old state back + state.scene.background = oldBg; + state.gl.setRenderTarget(null); + state.gl.toneMapping = oldTone; + } + } + }); + + // Forward ref + React.useImperativeHandle(fref, () => ref.current, []); + + return ( + 0 and execute extra renders. + // The exception is when transmissionSampler is set, in which case we are using three's built in sampler. + anisotropicBlur={anisotropicBlur ?? anisotropy} + transmission={transmissionSampler ? transmission : 0} + thickness={thickness} + side={side} + /> + ); + } +); + +MeshTransmissionMaterial.displayName = "MeshTransmissionMaterial"; +`; + +const CausticsComputeMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsComputeFragment.glsl"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vPosition; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + vPosition = worldPosition.xyz; + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; + +} +\`; + +const CausticsComputeMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uIntensity: { value: 1.0 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsComputeMaterial; +`; + +const NormalMaterial = `import { shaderMaterial } from "@react-three/drei"; + +const vertexShader = \` +uniform float time; +uniform bool uDisplace; +uniform float uFrequency; +uniform float uAmplitude; + +varying vec2 vUv; +varying vec3 vNormal; + +vec4 permute(vec4 x) { + return mod(((x*34.0)+1.0)*x, 289.0); +} + +vec4 taylorInvSqrt(vec4 r) { + return 1.79284291400159 - 0.85373472095314 * r; +} + +vec3 fade(vec3 t) { + return t*t*t*(t*(t*6.0-15.0)+10.0); +} + +float cnoise(vec3 P) { + vec3 Pi0 = floor(P); // Integer part for indexing + vec3 Pi1 = Pi0 + vec3(1.0); // Integer part + 1 + Pi0 = mod(Pi0, 289.0); + Pi1 = mod(Pi1, 289.0); + vec3 Pf0 = fract(P); // Fractional part for interpolation + vec3 Pf1 = Pf0 - vec3(1.0); // Fractional part - 1.0 + vec4 ix = vec4(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec4 iy = vec4(Pi0.yy, Pi1.yy); + vec4 iz0 = Pi0.zzzz; + vec4 iz1 = Pi1.zzzz; + + vec4 ixy = permute(permute(ix) + iy); + vec4 ixy0 = permute(ixy + iz0); + vec4 ixy1 = permute(ixy + iz1); + + vec4 gx0 = ixy0 / 7.0; + vec4 gy0 = fract(floor(gx0) / 7.0) - 0.5; + gx0 = fract(gx0); + vec4 gz0 = vec4(0.5) - abs(gx0) - abs(gy0); + vec4 sz0 = step(gz0, vec4(0.0)); + gx0 -= sz0 * (step(0.0, gx0) - 0.5); + gy0 -= sz0 * (step(0.0, gy0) - 0.5); + + vec4 gx1 = ixy1 / 7.0; + vec4 gy1 = fract(floor(gx1) / 7.0) - 0.5; + gx1 = fract(gx1); + vec4 gz1 = vec4(0.5) - abs(gx1) - abs(gy1); + vec4 sz1 = step(gz1, vec4(0.0)); + gx1 -= sz1 * (step(0.0, gx1) - 0.5); + gy1 -= sz1 * (step(0.0, gy1) - 0.5); + + vec3 g000 = vec3(gx0.x,gy0.x,gz0.x); + vec3 g100 = vec3(gx0.y,gy0.y,gz0.y); + vec3 g010 = vec3(gx0.z,gy0.z,gz0.z); + vec3 g110 = vec3(gx0.w,gy0.w,gz0.w); + vec3 g001 = vec3(gx1.x,gy1.x,gz1.x); + vec3 g101 = vec3(gx1.y,gy1.y,gz1.y); + vec3 g011 = vec3(gx1.z,gy1.z,gz1.z); + vec3 g111 = vec3(gx1.w,gy1.w,gz1.w); + + vec4 norm0 = taylorInvSqrt(vec4(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); + g000 *= norm0.x; + g010 *= norm0.y; + g100 *= norm0.z; + g110 *= norm0.w; + vec4 norm1 = taylorInvSqrt(vec4(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); + g001 *= norm1.x; + g011 *= norm1.y; + g101 *= norm1.z; + g111 *= norm1.w; + + float n000 = dot(g000, Pf0); + float n100 = dot(g100, vec3(Pf1.x, Pf0.yz)); + float n010 = dot(g010, vec3(Pf0.x, Pf1.y, Pf0.z)); + float n110 = dot(g110, vec3(Pf1.xy, Pf0.z)); + float n001 = dot(g001, vec3(Pf0.xy, Pf1.z)); + float n101 = dot(g101, vec3(Pf1.x, Pf0.y, Pf1.z)); + float n011 = dot(g011, vec3(Pf0.x, Pf1.yz)); + float n111 = dot(g111, Pf1); + + vec3 fade_xyz = fade(Pf0); + vec4 n_z = mix(vec4(n000, n100, n010, n110), vec4(n001, n101, n011, n111), fade_xyz.z); + vec2 n_yz = mix(n_z.xy, n_z.zw, fade_xyz.y); + float n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); + return 2.2 * n_xyz; +} + +vec3 orthogonal(vec3 v) { + return normalize(abs(v.x) > abs(v.z) ? vec3(-v.y, v.x, 0.0) + : vec3(0.0, -v.z, v.y)); +} + +float displace(vec3 point) { + if(uDisplace) { + return cnoise(point * uFrequency + vec3(time)) * uAmplitude; + } + return 0.0; +} + +void main() { + vUv = uv; + + vec3 displacedPosition = position + normal * displace(position); + vec4 modelPosition = modelMatrix * vec4(displacedPosition, 1.0); + + + vec4 viewPosition = viewMatrix * modelPosition; + vec4 projectedPosition = projectionMatrix * viewPosition; + + gl_Position = projectedPosition; + + float offset = 4.0/256.0; + vec3 tangent = orthogonal(normal); + vec3 bitangent = normalize(cross(normal, tangent)); + vec3 neighbour1 = position + tangent * offset; + vec3 neighbour2 = position + bitangent * offset; + vec3 displacedNeighbour1 = neighbour1 + normal * displace(neighbour1); + vec3 displacedNeighbour2 = neighbour2 + normal * displace(neighbour2); + + vec3 displacedTangent = displacedNeighbour1 - displacedPosition; + vec3 displacedBitangent = displacedNeighbour2 - displacedPosition; + + // https://upload.wikimedia.org/wikipedia/commons/d/d2/Right_hand_rule_cross_product.svg + vec3 displacedNormal = normalize(cross(displacedTangent, displacedBitangent)); + + vNormal = displacedNormal * normalMatrix; +} +\`; + +const fragmentShader = \` + varying vec2 vUv; + varying vec3 vNormal; + + void main() { + vec3 normal = normalize(vNormal); + gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0); + } +\`; + +const NormalMaterial = shaderMaterial( + { + time: { value: 0.0 }, + uDisplace: { value: true }, + uAmplitude: { value: 0.25 }, + uFrequency: { value: 0.75 }, + }, + vertexShader, + fragmentShader +); + +export default NormalMaterial; +`; + +const causticsPlaneFragmentShader = `uniform sampler2D uTexture; +uniform float uAberration; + +varying vec2 vUv; + +const int SAMPLES = 16; + +float random(vec2 p){ + return fract(sin(dot(p.xy ,vec2(12.9898,78.233))) * 43758.5453); +} + +vec3 sat(vec3 rgb, float adjustment) { + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + vec3 intensity = vec3(dot(rgb, W)); + return mix(intensity, rgb, adjustment); +} + +void main() { + vec2 uv = vUv; + vec4 color = vec4(0.0); + + vec3 refractCol = vec3(0.0); + + float flip = -0.5; + + for ( int i = 0; i < SAMPLES; i ++ ) { + float noiseIntensity = 0.01; + // This makes the texture get "noisy": maybe worth adding noiseIntensity as a uniform + float noise = random(uv) * noiseIntensity; + // This makes layers "slide" and noisy to create the rgb color shift + float slide = float(i) / float(SAMPLES) * 0.1 + noise; + + + float mult = i % 2 == 0 ? 1.0 : -1.0; + flip *= mult; + + vec2 dir = i % 2 == 0 ? vec2(flip, 0.0) : vec2(0.0, flip); + + // Apply the color shift and refraction to each color channel (r,g,b) of the texture passed in uSceneTex; + refractCol.r += texture2D(uTexture, uv + (uAberration * slide * dir * 1.0) ).r; + refractCol.g += texture2D(uTexture, uv + (uAberration * slide * dir * 2.0) ).g; + refractCol.b += texture2D(uTexture, uv + (uAberration * slide * dir * 3.0) ).b; + } + // Divide by the number of layers to normalize colors (rgb values can be worth up to the value of SAMPLES) + refractCol /= float(SAMPLES); + refractCol = sat(refractCol, 1.265); + + color = vec4(refractCol.r, refractCol.g, refractCol.b, 1.0); + + gl_FragColor = vec4(color.rgb, 1.0); + + #include + #include +} +`; + +const CausticsPlaneMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsPlaneFragmentShader.glsl"; + +const vertexShader = \` +varying vec2 vUv; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; +} +\`; + +const CausticsPlaneMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uAberration: { value: 0.02 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsPlaneMaterial; +`; + +const AppCode = `import { + OrbitControls, + Environment, + PerspectiveCamera, + useFBO, + SpotLight, + useGLTF, +} from "@react-three/drei"; +import { Canvas, useFrame } from "@react-three/fiber"; +import { useControls } from "leva"; +import { useRef, useState, useMemo, forwardRef } from "react"; +import * as THREE from "three"; +import { FullScreenQuad } from "three-stdlib"; + +import CausticsPlaneMaterial from "./CausticsPlaneMaterial"; +import CausticsComputeMaterial from "./CausticsComputeMaterial"; +import NormalMaterial from "./NormalMaterial"; +import { MeshTransmissionMaterial } from "./MeshTransmissionMaterial"; + +import './scene.css'; + +const config = { + backsideThickness: 0.3, + thickness: 0.3, + samples: 8, + transmission: 0.9, + clearcoat: 0.4, + clearcoatRoughness: 0.1, + chromaticAberration: 1.25, + anisotropy: 0.2, + roughness: 0, + distortion: 0, + distortionScale: 0.09, + temporalDistortion: 0, + ior: 1.25, + color: "#ffffff", +}; + +const BUNNY_GEOMETRY_URL = "https://cdn.maximeheckel.com/models/bunny.glb"; + +const BunnyGeometry = forwardRef((props, ref) => { + const { nodes } = useGLTF(BUNNY_GEOMETRY_URL); + + return ( + + + + ) +}) + +const SphereGeometry = forwardRef((props, ref) => { + return ( + + + + + ) +}) + +const TorusGeometry = forwardRef((props, ref) => { + return ( + + + + + ) +}) + +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + const spotlightRef = useRef(); + + + + const { + light, + intensity, + chromaticAberration, + displace, + amplitude, + frequency, + geometry, + } = useControls({ + light: { + value: new THREE.Vector3(-10, 13, -10), + }, + geometry: { + value: "torus", + options: [ "sphere", "torus", "bunny",], + }, + intensity: { + value: 1.5, + step: 0.01, + min: 0, + max: 10.0, + }, + chromaticAberration: { + value: 0.16, + step: 0.001, + min: 0, + max: 0.4, + }, + displace: { + value: true, + }, + amplitude: { + value: 0.13, + step: 0.01, + min: 0, + max: 1, + }, + frequency: { + value: 0.65, + step: 0.01, + min: 0, + max: 4, + }, + }); + + const TargetMesh = useMemo(() => { + switch (geometry) { + case "sphere": + return SphereGeometry; + case "torus": + return TorusGeometry; + case "bunny": + return BunnyGeometry; + default: + return SphereGeometry; + } + }, [geometry]) + + const normalRenderTarget = useFBO(2000, 2000, {}); + const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) + ); + const [normalMaterial] = useState(() => new NormalMaterial()); + + + const causticsComputeRenderTarget = useFBO(2000, 2000, {}); + const [causticsQuad] = useState(() => new FullScreenQuad()); + const [causticsComputeMaterial] = useState(() => new CausticsComputeMaterial()); + + const [causticsPlaneMaterial] = useState(() => new CausticsPlaneMaterial()); + causticsPlaneMaterial.transparent = true; + causticsPlaneMaterial.blending = THREE.CustomBlending; + causticsPlaneMaterial.blendSrc = THREE.OneFactor; + causticsPlaneMaterial.blendDst = THREE.SrcAlphaFactor; + + useFrame((state) => { + const { gl, clock, camera } = state; + + camera.lookAt(0, 0, 0); + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + let boundsVertices = []; + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.min.x, bounds.max.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.min.y, bounds.max.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.min.z) + ); + boundsVertices.push( + new THREE.Vector3(bounds.max.x, bounds.max.y, bounds.max.z) + ); + + const lightDir = new THREE.Vector3( + light.x, + light.y, + light.z + ).normalize(); + + // Calculates the projected coordinates of the vertices onto the plane + // perpendicular to the light direction + const projectedCoordinates = boundsVertices.map((v) => + { + const newX = v.x + lightDir.x * (-v.y / lightDir.y); + const newY = v.y + lightDir.y * (-v.y / lightDir.y); + const newZ = v.z + lightDir.z * (-v.y / lightDir.y); + + return new THREE.Vector3(newX, newY, newZ); + } + ); + + // Calculates the combined spatial coordinates of the projected vertices + // and divides by the number of vertices to get the center position + const centerPos = projectedCoordinates + .reduce((a, b) => a.add(b), new THREE.Vector3(0, 0, 0)) + .divideScalar(projectedCoordinates.length); + + // Calculates the scale of the caustic plane based on the distance of the + // furthest vertex from the center (using euclidean distance) + const scale = projectedCoordinates + .map((p) => + Math.sqrt( + Math.pow(p.x - centerPos.x, 2), + Math.pow(p.z - centerPos.z, 2) + ) + ) + .reduce((a, b) => Math.max(a, b), 0); + + // The scale of the plane is multiplied by this correction factor to + // avoid the caustics pattern to be cut / overflow the bounds of the plane + // my normal projection or my math must be a bit off, so I'm trying to be very conservative here + const scaleCorrection = 1.75; + + causticsPlane.current.scale.set( + scale * scaleCorrection, + scale * scaleCorrection, + scale * scaleCorrection + ); + causticsPlane.current.position.set(centerPos.x, centerPos.y, centerPos.z); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + mesh.current.material.uniforms.time.value = clock.elapsedTime; + mesh.current.material.uniforms.uDisplace.value = displace; + mesh.current.material.uniforms.uAmplitude.value = amplitude; + mesh.current.material.uniforms.uFrequency.value = frequency; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + mesh.current.material.uniforms.time.value = clock.elapsedTime; + mesh.current.material.uniforms.uDisplace.value = displace; + mesh.current.material.uniforms.uAmplitude.value = amplitude; + mesh.current.material.uniforms.uFrequency.value = frequency; + + causticsQuad.material = causticsComputeMaterial; + causticsQuad.material.uniforms.uTexture.value = normalRenderTarget.texture; + causticsQuad.material.uniforms.uLight.value = light; + causticsQuad.material.uniforms.uIntensity.value = intensity; + + gl.setRenderTarget(causticsComputeRenderTarget); + causticsQuad.render(gl); + + causticsPlane.current.material = causticsPlaneMaterial; + + causticsPlane.current.material.uniforms.uTexture.value = + causticsComputeRenderTarget.texture; + causticsPlane.current.material.uniforms.uAberration.value = + chromaticAberration; + + gl.setRenderTarget(null); + + spotlightRef.current.position.set(light.x, light.y, light.z); + spotlightRef.current.distance = Math.sqrt( + Math.pow( + spotlightRef.current.position.x - causticsPlane.current.position.x, + 2 + ) + + Math.pow( + spotlightRef.current.position.y - causticsPlane.current.position.y, + 2 + ) + + Math.pow( + spotlightRef.current.position.z - causticsPlane.current.position.z, + 2 + ) + ); + }); + + return ( + <> + + + + + + + + + + + ); +}; + +const Scene = () => { + return ( + + + + + + ); +}; + + +export default Scene; +`; + +const DynamicCaustics = { + '/App.js': { + code: AppCode, + }, + '/NormalMaterial.js': { + code: NormalMaterial, + active: true, + }, + '/MeshTransmissionMaterial.js': { + code: MeshTransmissionMaterial, + }, + '/vertexDisplacement.glsl': { + code: vertexDisplacement, + }, + '/vertexBase.glsl': { + code: vertexBase, + }, + '/CausticsPlaneMaterial.js': { + code: CausticsPlaneMaterial, + hidden: true, + }, + '/causticsPlaneFragmentShader.glsl': { + code: causticsPlaneFragmentShader, + hidden: true, + }, + '/CausticsComputeMaterial.js': { + code: CausticsComputeMaterial, + hidden: true, + }, + '/causticsComputeFragment.glsl': { + code: causticsComputeFragment, + hidden: true, + }, +}; + +export default DynamicCaustics; diff --git a/core/components/MDX/Widgets/Caustics/normalProjection.ts b/core/components/MDX/Widgets/Caustics/normalProjection.ts new file mode 100644 index 000000000..7600dbfba --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/normalProjection.ts @@ -0,0 +1,159 @@ +const NormalMaterial = `import { shaderMaterial } from "@react-three/drei"; + +const surfaceVertex = \` +varying vec2 vUv; +varying vec3 vNormal; + +void main() { + vUv = uv; + vec4 modelViewPosition = modelViewMatrix * vec4(position, 1.0); + vNormal = normalize(normalMatrix * normal); + + // Set the final position of the vertex + gl_Position = projectionMatrix * modelViewPosition; +} +\`; + +const surfaceFragment = \` + varying vec2 vUv; + varying vec3 vNormal; + + void main() { + vec3 normal = normalize(vNormal); + gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0); + } +\`; + +const NormalMaterial = shaderMaterial({}, surfaceVertex, surfaceFragment); + +export default NormalMaterial; +`; + +const AppCode = `import { + OrbitControls, + Environment, + MeshTransmissionMaterial, + PerspectiveCamera, + useFBO, +} from "@react-three/drei"; +import { Canvas, useFrame } from "@react-three/fiber"; +import { useControls } from "leva"; +import { useRef, useState } from "react"; +import * as THREE from "three"; + +import NormalMaterial from "./NormalMaterial"; + +import './scene.css'; + +const config = { + backsideThickness: 0.3, + thickness: 25, + samples: 6, + transmission: 0.9, + clearcoat: 1, + clearcoatRoughness: 0.5, + chromaticAberration: 1.5, + anisotropy: 0.2, + roughness: 0, + distortion: 0, + distortionScale: 0.09, + temporalDistortion: 0, + ior: 1.5, + color: "#ffffff", +}; + +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + + const { + light, + } = useControls({ + light: { + value: new THREE.Vector3(-10, 13, -10), + }, + }); + + const normalRenderTarget = useFBO(2000, 2000, {}); + + const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) + ); + + const [normalMaterial] = useState(() => new NormalMaterial()); + + useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + + causticsPlane.current.material.map = normalRenderTarget.texture; + + gl.setRenderTarget(null); + }); + + return ( + <> + + + + + + + + + + ); +}; + +const Scene = () => { + return ( + + + + + + + ); +}; + + +export default Scene; +`; + +const NormalProjection = { + '/App.js': { + code: AppCode, + active: true, + }, + '/NormalMaterial.js': { + code: NormalMaterial, + }, +}; + +export default NormalProjection; diff --git a/core/components/MDX/Widgets/Caustics/simpleCaustics.ts b/core/components/MDX/Widgets/Caustics/simpleCaustics.ts new file mode 100644 index 000000000..dc8b2dc58 --- /dev/null +++ b/core/components/MDX/Widgets/Caustics/simpleCaustics.ts @@ -0,0 +1,246 @@ +const causticsComputeFragment = `uniform sampler2D uTexture; +varying vec2 vUv; +varying vec3 vPosition; +uniform vec3 uLight; +uniform float uIntensity; + +void main() { + vec2 uv = vUv; + + vec3 normalTexture = texture2D(uTexture, uv).rgb; + vec3 normal = normalize(normalTexture); + vec3 lightDir = normalize(uLight); + + vec3 ray = refract(lightDir, normal, 1.0 / 1.25); + + vec3 newPos = vPosition.xyz + ray; + vec3 oldPos = vPosition.xyz; + + float lightArea = length(dFdx(oldPos)) * length(dFdy(oldPos)); + float newLightArea = length(dFdx(newPos)) * length(dFdy(newPos)); + + float value = lightArea / newLightArea * 0.2; + float scale = clamp(value, 0.0, 1.0) * uIntensity; + scale *= scale; + + gl_FragColor = vec4(vec3(scale), 1.0); +} +`; + +const CausticsComputeMaterial = `import { shaderMaterial } from "@react-three/drei"; +import * as THREE from "three"; + +import fragmentShader from "!!raw-loader!./causticsComputeFragment.glsl"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vPosition; + +void main() { + vUv = uv; + vec4 worldPosition = modelMatrix * vec4(position, 1.0); + vPosition = worldPosition.xyz; + + vec4 viewPosition = viewMatrix * worldPosition; + gl_Position = projectionMatrix * viewPosition; + +} +\`; + +const CausticsMaterial = shaderMaterial( + { + uLight: { value: new THREE.Vector2(0, 0, 0) }, + uTexture: { value: null }, + uIntensity: { value: 1.0 }, + }, + vertexShader, + fragmentShader +); + +export default CausticsMaterial; +`; + +const NormalMaterial = `import { shaderMaterial } from "@react-three/drei"; + +const vertexShader = \` +varying vec2 vUv; +varying vec3 vNormal; + +void main() { + vUv = uv; + vec4 modelViewPosition = modelViewMatrix * vec4(position, 1.0); + vNormal = normalize(normalMatrix * normal); + + // Set the final position of the vertex + gl_Position = projectionMatrix * modelViewPosition; +} +\`; + +const fragmentShader = \` + varying vec2 vUv; + varying vec3 vNormal; + + void main() { + vec3 normal = normalize(vNormal); + gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0); + } +\`; + +const NormalMaterial = shaderMaterial({}, vertexShader, fragmentShader); + +export default NormalMaterial; +`; + +const AppCode = `import { + OrbitControls, + Environment, + MeshTransmissionMaterial, + PerspectiveCamera, + useFBO, +} from "@react-three/drei"; +import { Canvas, useFrame } from "@react-three/fiber"; +import { useControls } from "leva"; +import { useRef, useState } from "react"; +import * as THREE from "three"; +import { FullScreenQuad } from "three-stdlib"; + +import CausticsComputeMaterial from "./CausticsComputeMaterial"; +import NormalMaterial from "./NormalMaterial"; + +import './scene.css'; + +const config = { + backsideThickness: 0.3, + thickness: 25, + samples: 6, + transmission: 0.9, + clearcoat: 1, + clearcoatRoughness: 0.5, + chromaticAberration: 1.5, + anisotropy: 0.2, + roughness: 0, + distortion: 0, + distortionScale: 0.09, + temporalDistortion: 0, + ior: 1.5, + color: "#ffffff", +}; + +const Caustics = () => { + const mesh = useRef(); + const causticsPlane = useRef(); + + const { + light, + intensity, + } = useControls({ + light: { + value: new THREE.Vector3(-10, 13, -10), + }, + intensity: { + value: 1.5, + step: 0.01, + min: 0, + max: 10.0, + }, + }); + + const normalRenderTarget = useFBO(2000, 2000, {}); + const [normalCamera] = useState( + () => new THREE.PerspectiveCamera(65, 1, 0.1, 1000) + ); + const [normalMaterial] = useState(() => new NormalMaterial()); + + const causticsComputeRenderTarget = useFBO(2000, 2000, {}); + const [causticsQuad] = useState(() => new FullScreenQuad()); + const [causticsComputeMaterial] = useState(() => new CausticsComputeMaterial()); + + useFrame((state) => { + const { gl } = state; + + const bounds = new THREE.Box3().setFromObject(mesh.current, true); + + normalCamera.position.set(light.x, light.y, light.z); + normalCamera.lookAt( + bounds.getCenter(new THREE.Vector3(0, 0, 0)).x, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).y, + bounds.getCenter(new THREE.Vector3(0, 0, 0)).z + ); + normalCamera.up = new THREE.Vector3(0, 1, 0); + + const originalMaterial = mesh.current.material; + + mesh.current.material = normalMaterial; + mesh.current.material.side = THREE.BackSide; + + gl.setRenderTarget(normalRenderTarget); + gl.render(mesh.current, normalCamera); + + mesh.current.material = originalMaterial; + + causticsQuad.material = causticsComputeMaterial; + causticsQuad.material.uniforms.uTexture.value = normalRenderTarget.texture; + causticsQuad.material.uniforms.uLight.value = light; + causticsQuad.material.uniforms.uIntensity.value = intensity; + + gl.setRenderTarget(causticsComputeRenderTarget); + causticsQuad.render(gl); + + causticsPlane.current.material.map = causticsComputeRenderTarget.texture; + + gl.setRenderTarget(null); + }); + + return ( + <> + + + + + + + + + + ); +}; + +const Scene = () => { + return ( + + + + + + + ); +}; + + +export default Scene; +`; + +const SimpleCaustics = { + '/App.js': { + code: AppCode, + }, + '/CausticsComputeMaterial.js': { + code: CausticsComputeMaterial, + }, + '/causticsComputeFragment.glsl': { + code: causticsComputeFragment, + active: true, + }, + '/NormalMaterial.js': { + code: NormalMaterial, + }, +}; + +export default SimpleCaustics; diff --git a/core/components/Tweet/Tweet.tsx b/core/components/Tweet/Tweet.tsx index 4e6c48a89..881fd83bf 100644 --- a/core/components/Tweet/Tweet.tsx +++ b/core/components/Tweet/Tweet.tsx @@ -132,10 +132,11 @@ const Tweet = (props: Props) => { if (m.type === 'video' && !!m.video_info) { const lastVariant = m.video_info.variants.reduce( (max, obj) => { - return obj.bitrate || 0 > max.bitrate ? obj : max; + return (obj.bitrate || 0) > max.bitrate ? obj : max; }, { bitrate: 0 } as any ); + const videoSrc = lastVariant.url; if (!videoSrc || lastVariant.content_type !== 'video/mp4')