test volume post

This commit is contained in:
Tom 2025-01-25 19:20:05 +00:00
parent 296ff991d9
commit 42d0a53143
10 changed files with 363 additions and 2 deletions

View File

@ -7,7 +7,7 @@ assets: /assets/blog/heic_depth_map
thumbnail: /assets/blog/heic_depth_map/thumbnail.png
social_image: /assets/blog/heic_depth_map/thumbnail.png
alt: An image of the text "{...}" to suggest the idea of a template.
alt: An image of my face, half is a normal colour photgraph and half is a depth map in black and white.
model_viewer: true
---

View File

@ -0,0 +1,24 @@
---
title: Volume Rendering
layout: post
excerpt:
assets: /assets/blog/volume_rendering
draft: true
thumbnail: /assets/blog/volume_rendering/thumbnail.png
social_image: /assets/blog/volume_rendering/thumbnail.png
alt: A volumetric render of a CT scan of my jaw.
model_viewer: true
---
Some text
<figure>
<img class="no-wc invertable" src="{{page.assets}}/billboard.png">
<volume-viewer model="{{page.assets}}/volume_scan.data.gz" model-metadata="{{page.assets}}/volume_scan_meta.json" camera = '{"type":"perspective","position":[-3.598,-0.4154,1.971],"rotation":[0.2078,-1.06,0.1819],"zoom":1,"target":[0,0,0]}'>
</volume-viewer>
<figcaption class="no-wc">If you have JS enabled this is interactive.</figcaption>
<figcaption class="has-wc">An interactive point cloud view of the depth data from the rear facing camera of my phone.</figcaption>
</figure>

View File

@ -5,7 +5,8 @@
}
outline-model-viewer,
point-cloud-viewer {
point-cloud-viewer,
volume-viewer {
width: 100%;
min-height: 300px;
display: flex;

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

View File

@ -0,0 +1 @@
{"dtype": "uint8", "shape": [300, 300, 300]}

View File

@ -0,0 +1,140 @@
export const vertexShader = `
// Attributes.
in vec3 position;
// Uniforms.
uniform mat4 modelMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec3 cameraPosition;
// Output.
out vec3 vOrigin; // Output ray origin.
out vec3 vDirection; // Output ray direction.
void main() {
// Compute the ray origin in model space.
vOrigin = vec3(inverse(modelMatrix) * vec4(cameraPosition, 1.0)).xyz;
// Compute ray direction in model space.
vDirection = position - vOrigin;
// Compute vertex position in clip space.
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`;
export const fragmentShader = `
precision highp sampler3D; // Precision for 3D texture sampling.
precision highp float; // Precision for floating point numbers.
uniform sampler3D dataTexture; // Sampler for the volume data texture.
// uniform sampler2D colorTexture; // Sampler for the color palette texture.
uniform float samplingRate; // The sampling rate.
uniform float threshold; // Threshold to use for isosurface-style rendering.
uniform float alphaScale; // Scaling of the color alpha value.
uniform bool invertColor; // Option to invert the color palette.
in vec3 vOrigin; // The interpolated ray origin from the vertex shader.
in vec3 vDirection; // The interpolated ray direction from the vertex shader.
out vec4 frag_color; // Output fragment color.
// Sampling of the volume data texture.
float sampleData(vec3 coord) {
return texture(dataTexture, coord).x;
}
// Sampling of the color palette texture.
vec4 sampleColor(float value) {
// In case the color palette should be inverted, invert the texture coordinate to sample the color texture.
float x = invertColor ? value : 1.0 - value;
// return texture(colorTexture, vec2(x, 0.5));
return vec4(x, x, x, 1.0);
}
// Intersection of a ray and an axis-aligned bounding box.
// Returns the intersections as the minimum and maximum distance along the ray direction.
vec2 intersectAABB(vec3 rayOrigin, vec3 rayDir, vec3 boxMin, vec3 boxMax) {
vec3 tMin = (boxMin - rayOrigin) / rayDir;
vec3 tMax = (boxMax - rayOrigin) / rayDir;
vec3 t1 = min(tMin, tMax);
vec3 t2 = max(tMin, tMax);
float tNear = max(max(t1.x, t1.y), t1.z);
float tFar = min(min(t2.x, t2.y), t2.z);
return vec2(tNear, tFar);
}
// Volume sampling and composition.
// Note that the code is inserted based on the selected algorithm in the user interface.
vec4 compose(vec4 color, vec3 entryPoint, vec3 rayDir, float samples, float tStart, float tEnd, float tIncr) {
// Composition of samples using maximum intensity projection.
// Loop through all samples along the ray.
float density = 0.0;
for (float i = 0.0; i < samples; i += 1.0) {
// Determine the sampling position.
float t = tStart + tIncr * i; // Current distance along ray.
vec3 p = entryPoint + rayDir * t; // Current position.
// Sample the volume data at the current position.
float value = sampleData(p);
// Keep track of the maximum value.
if (value > density) {
// Store the value if it is greater than the previous values.
density = value;
}
// Early exit the loop when the maximum possible value is found or the exit point is reached.
if (density >= 1.0 || t > tEnd) {
break;
}
}
// Convert the found value to a color by sampling the color palette texture.
color.rgb = sampleColor(density).rgb;
// Modify the alpha value of the color to make lower values more transparent.
color.a = alphaScale * (invertColor ? 1.0 - density : density);
// Return the color for the ray.
return color;
}
void main() {
// Determine the intersection of the ray and the box.
vec3 rayDir = normalize(vDirection);
vec3 aabbmin = vec3(-0.5);
vec3 aabbmax = vec3(0.5);
vec2 intersection = intersectAABB(vOrigin, rayDir, aabbmin, aabbmax);
// Initialize the fragment color.
vec4 color = vec4(0.0);
// Check if the intersection is valid, i.e., if the near distance is smaller than the far distance.
if (intersection.x <= intersection.y) {
// Clamp the near intersection distance when the camera is inside the box so we do not start sampling behind the camera.
intersection.x = max(intersection.x, 0.0);
// Compute the entry and exit points for the ray.
vec3 entryPoint = vOrigin + rayDir * intersection.x;
vec3 exitPoint = vOrigin + rayDir * intersection.y;
// Determine the sampling rate and step size.
// Entry Exit Align Corner sampling as described in
// Volume Raycasting Sampling Revisited by Steneteg et al. 2019
vec3 dimensions = vec3(textureSize(dataTexture, 0));
vec3 entryToExit = exitPoint - entryPoint;
float samples = ceil(samplingRate * length(entryToExit * (dimensions - vec3(1.0))));
float tEnd = length(entryToExit);
float tIncr = tEnd / samples;
float tStart = 0.5 * tIncr;
// Determine the entry point in texture space to simplify texture sampling.
vec3 texEntry = (entryPoint - aabbmin) / (aabbmax - aabbmin);
// Sample the volume along the ray and convert samples to color.
color = compose(color, texEntry, rayDir, samples, tStart, tEnd, tIncr);
}
// Return the fragment color.
frag_color = color;
}`;

View File

@ -0,0 +1,193 @@
import * as THREE from "three";
import { OrbitControls } from "three/addons/controls/OrbitControls.js";
import { Timer } from "three/addons/Addons.js";
import { GUI } from "lil-gui";
import { vertexShader, fragmentShader } from "./VolumeShaders.js";
import {
componentHTML,
setupThreeJS,
deserialiseCamera,
deserialiseControls,
} from "./helpers.js";
async function load_metadata(metadata_path) {
console.log("Loading metadata from", metadata_path);
const metadata_res = await fetch(metadata_path);
return await metadata_res.json();
}
async function load_model_bytes(model_path) {
console.log("Loading model from", model_path);
const res = await fetch(model_path);
const buffer = await res.arrayBuffer();
return new Uint8Array(buffer); // Create an uint8-array-view from the file buffer.
}
async function load_model_bytes_gzip(model_path, metadata_path, scene) {
const ds = new DecompressionStream("gzip");
const response = await fetch(model_path);
const blob_in = await response.blob();
const stream_in = blob_in.stream().pipeThrough(ds);
const buffer = await new Response(stream_in).arrayBuffer();
console.log("Decompressed Model size", buffer.byteLength);
return new Uint8Array(buffer);
}
async function load_model(model_path, metadata_path, scene) {
// If the model path ends in ".gz", we assume that the model is compressed.
const model_promise = model_path.endsWith(".gz")
? load_model_bytes_gzip(model_path, metadata_path, scene)
: load_model_bytes(model_path);
const [byteArray, metadata] = await Promise.all([
model_promise,
load_metadata(metadata_path),
]);
console.log("Loaded model with metadata", metadata);
console.log("Model shape", metadata.shape);
console.log("Model dtype", metadata.dtype);
const texture = new THREE.Data3DTexture(
byteArray, // The data values stored in the pixels of the texture.
metadata.shape[2], // Width of texture.
metadata.shape[1], // Height of texture.
metadata.shape[0] // Depth of texture.
);
texture.format = THREE.RedFormat; // Our texture has only one channel (red).
texture.type = THREE.UnsignedByteType; // The data type is 8 bit unsighed integer.
texture.minFilter = THREE.LinearFilter; // Linear filter for minification.
texture.magFilter = THREE.LinearFilter; // Linear filter for maximization.
// Repeat edge values when sampling outside of texture boundaries.
texture.wrapS = THREE.ClampToEdgeWrapping;
texture.wrapT = THREE.ClampToEdgeWrapping;
texture.wrapR = THREE.ClampToEdgeWrapping;
// Mark texture for update so that the changes take effect.
texture.needsUpdate = true;
return { texture, metadata };
}
function make_box() {
const geometry = new THREE.BoxGeometry(1, 1, 1);
const box = new THREE.Mesh(geometry);
box.scale.set(1, 1, 1);
// box.scale.set(dataDescription.scale[0], dataDescription.scale[1], dataDescription.scale[2]);
const line = new THREE.LineSegments(
new THREE.EdgesGeometry(geometry),
new THREE.LineBasicMaterial({ color: 0x999999 })
);
box.add(line);
return box;
}
function volumeMaterial(texture, renderProps) {
return new THREE.RawShaderMaterial({
glslVersion: THREE.GLSL3, // Shader language version.
uniforms: {
dataTexture: { value: texture }, // Volume data texture.
// colorTexture: { value: colorTexture }, // Color palette texture.
cameraPosition: { value: new THREE.Vector3() }, // Current camera position.
samplingRate: { value: renderProps.samplingRate }, // Sampling rate of the volume.
threshold: { value: renderProps.threshold }, // Threshold for adjusting volume rendering.
alphaScale: { value: renderProps.alphaScale }, // Alpha scale of volume rendering.
invertColor: { value: renderProps.invertColor }, // Invert color palette.
},
vertexShader: vertexShader, // Vertex shader code.
fragmentShader: fragmentShader, // Fragment shader code.
side: THREE.BackSide, // Render only back-facing triangles of box geometry.
transparent: true, // Use alpha channel / alpha blending when rendering.
});
}
export class VolumeViewer extends HTMLElement {
constructor() {
super();
this.isVisible = true;
this.shadow = this.attachShadow({ mode: "open" });
}
connectedCallback() {
const { container, canvas, scene, renderer, gui } = setupThreeJS(this);
const model = this.getAttribute("model");
const model_metadata = this.getAttribute("model-metadata");
// Make a box that just holds some triangles that our shader will render onto.
const box = make_box();
scene.add(box);
let material = null;
load_model(model, model_metadata, scene).then(({ texture, metadata }) => {
// Create the custom material with attached shaders.
material = volumeMaterial(texture, renderProps);
box.material = material;
gui
.add(material.uniforms.samplingRate, "value", 0.1, 2.0, 0.1)
.name("Sampling Rate");
gui
.add(material.uniforms.threshold, "value", 0.0, 1.0, 0.01)
.name("Threshold");
gui
.add(material.uniforms.alphaScale, "value", 0.1, 2.0, 0.1)
.name("Alpha Scale");
gui.add(material.uniforms.invertColor, "value").name("Invert Color");
});
const renderProps = {
samplingRate: 1.0,
threshold: 0.1,
alphaScale: 1.0,
invertColor: false,
};
const render = () => renderer.render(scene, this.camera);
this.render = render;
// --- OrbitControls ---
this.controls = new OrbitControls(this.camera, renderer.domElement);
this.controls.addEventListener("change", render);
this.controls.enableDamping = true;
this.controls.dampingFactor = 0.25;
this.controls.autoRotate = true;
deserialiseControls(this);
canvas.addEventListener("click", () => {
this.controls.autoRotate = false;
});
const ambientLight = new THREE.AmbientLight(0xffffff, 0.7);
scene.add(ambientLight);
const dirLight = new THREE.DirectionalLight(0xffffff, 0.7);
dirLight.position.set(5, 5, 10);
scene.add(dirLight);
window.addEventListener("resize", this.onWindowResize, false);
this.onWindowResize = () => {
this.camera.aspect = canvas.clientWidth / canvas.clientHeight;
this.camera.updateProjectionMatrix();
renderer.setSize(canvas.clientWidth, canvas.clientHeight);
};
const timer = new Timer();
const update = () => {
if (this.isVisible) {
timer.update();
const delta = timer.getDelta();
this.controls.update(delta);
if (material)
box.material.uniforms.cameraPosition.value.copy(this.camera.position);
this.render();
requestAnimationFrame(update);
}
};
update();
}
}

View File

@ -17,8 +17,10 @@ import { CustomOutlinePass } from "./CustomOutlinePass.js";
import FindSurfaces from "./FindSurfaces.js";
import { PointCloudViewer } from "./PointCloudViewer.js";
import { VolumeViewer } from "./VolumeViewer.js";
customElements.define("point-cloud-viewer", PointCloudViewer);
customElements.define("volume-viewer", VolumeViewer);
// Todo:
// Swap in the version of this code that has a debug GUI behind a flag