GLTFImporter

Introduction

vtkGLTFImporter can import glTF 2.0 files.

The GL Transmission Format (glTF) is an API-neutral runtime asset delivery
format. A glTF asset is represented by:

  • A JSON-formatted file (.gltf) containing a full scene description: node
    hierarchy, materials, cameras, as well as descriptor information for
    meshes, animations, and other constructs
  • Binary files (.bin) containing geometry and animation data, and other
    buffer-based data
  • Image files (.jpg, .png) for textures

Supported extensions:

  • KHR_draco_mesh_compression
  • KHR_lights_punctual
  • KHR_materials_unlit
  • KHR_materials_ior
  • KHR_materials_specular
  • KHR_materials_variants
  • EXT_texture_webp
  • EXT_texture_avif

Methods

extend

Method used to decorate a given object (publicAPI+model) with vtkGLTFImporter characteristics.

Argument Type Required Description
publicAPI Yes object on which methods will be bounds (public)
model Yes object on which data structure will be bounds (protected)
initialValues IGLTFImporterInitialValues No (default: {})

getActors

Get the actors.

getAnimationMixer

Get the animation mixer.

getAnimations

Get the animations.

getBaseURL

Get the base url.

getCameras

Get the cameras.

getDataAccessHelper

getUrl

Get the url of the object to load.

getVariantMappings

Get the variant mappings.

getVariants

Get the variant array.

importActors

Import the actors.

importAnimations

Import the animations.

importCameras

Import the cameras.

importLights

Import the lights.

invokeReady

Invoke the ready event.

loadData

Load the object data.

Argument Type Required Description
options IGLTFImporterOptions No

newInstance

Method used to create a new instance of vtkGLTFImporter

Argument Type Required Description
initialValues IGLTFImporterInitialValues No for pre-setting some of its content

onReady

Argument Type Required Description
callback Yes

parse

Parse data.

Argument Type Required Description
content String or ArrayBuffer Yes The content to parse.

parseAsArrayBuffer

Parse data as ArrayBuffer.

Argument Type Required Description
content ArrayBuffer Yes The content to parse.

parseAsText

Parse data as text.

Argument Type Required Description
content String Yes The content to parse.

requestData

Argument Type Required Description
inData Yes
outData Yes

setCamera

Set the camera id.

Argument Type Required Description
cameraId Yes

setDataAccessHelper

Argument Type Required Description
dataAccessHelper Yes

setDracoDecoder

Set the Draco decoder.

Argument Type Required Description
mappings Yes

setRenderer

Set the vtk Renderer.

Argument Type Required Description
renderer Yes

setUrl

Set the url of the object to load.

Argument Type Required Description
url String Yes the url of the object to load.
option IGLTFImporterOptions No The Draco reader options.

setWasmBinary

Load the WASM decoder from url and set the decoderModule

Argument Type Required Description
url Yes
binaryName Yes

switchToVariant

Switch to a variant.

Argument Type Required Description
variantIndex Yes

Source

Animations.js
import macro from 'vtk.js/Sources/macros';
import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
import { quat, vec3 } from 'gl-matrix';

const { vtkDebugMacro, vtkWarningMacro } = macro;

/**
* Create an animation channel
* @param {glTFChannel} glTFChannel
* @param {glTFChannel[]} glTFSamplers
* @returns
*/
function createAnimationChannel(glTFChannel, glTFSamplers) {
const path = glTFChannel.target.path;
const node = glTFChannel.target.node;

function applyAnimation(value) {
let axisAngle;
let w;
let nq;
switch (path) {
case 'translation':
node.setPosition(value[0], value[1], value[2]);
break;
case 'rotation':
// Convert quaternion to axis-angle representation
nq = quat.normalize(quat.create(), value);
axisAngle = new Float64Array(3);
w = quat.getAxisAngle(axisAngle, nq);
// Apply rotation using rotateWXYZ
node.rotateWXYZ(
vtkMath.degreesFromRadians(w),
axisAngle[0],
axisAngle[1],
axisAngle[2]
);
break;
case 'scale':
node.setScale(value[0], value[1], value[2]);
break;
default:
vtkWarningMacro(`Unsupported animation path: ${path}`);
}
}

function animate(currentTime) {
const sampler = glTFSamplers[glTFChannel.sampler];
const value = sampler.evaluate(currentTime, path);
applyAnimation(value);
}

return { ...glTFChannel, animate };
}

/**
* Create an animation sampler
* @param {glTFSampler} glTFSampler
* @returns
*/
function createAnimationSampler(glTFSampler) {
let lastKeyframeIndex = 0;

function findKeyframes(time) {
let i1 = lastKeyframeIndex;
while (i1 < glTFSampler.input.length - 1 && glTFSampler.input[i1] <= time) {
i1++;
}
const i0 = Math.max(0, i1 - 1);
lastKeyframeIndex = i0;
return [glTFSampler.input[i0], glTFSampler.input[i1], i0, i1];
}

function stepInterpolate(path, i0) {
const startIndex = i0 * 3;
const v0 = new Array(3);
for (let i = 0; i < 3; ++i) {
v0[i] = glTFSampler.output[startIndex + i];
}

return v0;
}

function linearInterpolate(path, t0, t1, i0, i1, t) {
const ratio = (t - t0) / (t1 - t0);
const startIndex = i0 * 4;
const endIndex = i1 * 4;

const v0 = new Array(4);
const v1 = new Array(4);
for (let i = 0; i < 4; ++i) {
v0[i] = glTFSampler.output[startIndex + i];
v1[i] = glTFSampler.output[endIndex + i];
}

switch (path) {
case 'translation':
case 'scale':
return vec3.lerp(vec3.create(), v0, v1, ratio);
case 'rotation':
return quat.slerp(quat.create(), v0, v1, ratio);
default:
vtkWarningMacro(`Unsupported animation path: ${path}`);
return null;
}
}

function cubicSplineInterpolate(path, t0, t1, i0, i1, time) {
const dt = t1 - t0;
const t = (time - t0) / dt;
const t2 = t * t;
const t3 = t2 * t;

const p0 = glTFSampler.output[i0 * 3 + 1];
const m0 = dt * glTFSampler.output[i0 * 3 + 2];
const p1 = glTFSampler.output[i1 * 3 + 1];
const m1 = dt * glTFSampler.output[i1 * 3];

if (Array.isArray(p0)) {
return p0.map((v, j) => {
const a = 2 * t3 - 3 * t2 + 1;
const b = t3 - 2 * t2 + t;
const c = -2 * t3 + 3 * t2;
const d = t3 - t2;
return a * v + b * m0[j] + c * p1[j] + d * m1[j];
});
}

const a = 2 * t3 - 3 * t2 + 1;
const b = t3 - 2 * t2 + t;
const c = -2 * t3 + 3 * t2;
const d = t3 - t2;
return a * p0 + b * m0 + c * p1 + d * m1;
}

function evaluate(time, path) {
const [t0, t1, i0, i1] = findKeyframes(time);

let result;

switch (glTFSampler.interpolation) {
case 'STEP':
result = stepInterpolate(path, i0);
break;
case 'LINEAR':
result = linearInterpolate(path, t0, t1, i0, i1, time);
break;
case 'CUBICSPLINE':
result = cubicSplineInterpolate(path, t0, t1, i0, i1, time);
break;
default:
vtkWarningMacro(
`Unknown interpolation method: ${glTFSampler.interpolation}`
);
}
return result;
}

return { ...glTFSampler, evaluate };
}

/**
* Create an animation
* @param {glTFAnimation} glTFAnimation
* @param {Map} nodes
* @returns
*/
function createAnimation(glTFAnimation, nodes) {
glTFAnimation.samplers = glTFAnimation.samplers.map((sampler) =>
createAnimationSampler(sampler)
);

glTFAnimation.channels = glTFAnimation.channels.map((channel) => {
channel.target.node = nodes.get(`node-${channel.target.node}`);
return createAnimationChannel(channel, glTFAnimation.samplers);
});

function update(currentTime) {
glTFAnimation.channels.forEach((channel) => channel.animate(currentTime));
}

return { ...glTFAnimation, update };
}

/**
* Create an animation mixer
* @param {Map} nodes
* @param {*} accessors
* @returns
*/
function createAnimationMixer(nodes, accessors) {
const animations = new Map();
const activeAnimations = new Map();

function addAnimation(glTFAnimation) {
const annimation = createAnimation(glTFAnimation, nodes, accessors);
animations.set(glTFAnimation.id, annimation);
vtkDebugMacro(`Animation "${glTFAnimation.id}" added to mixer`);
}

function play(name, weight = 1) {
if (!animations.has(name)) {
vtkWarningMacro(`Animation "${name}" not found in mixer`);
return;
}
activeAnimations.set(name, {
animation: animations.get(name),
weight,
time: 0,
});
vtkDebugMacro(`Playing animation "${name}" with weight ${weight}`);
}

function stop(name) {
if (activeAnimations.delete(name)) {
vtkWarningMacro(`Stopped animation "${name}"`);
} else {
vtkWarningMacro(`Animation "${name}" was not playing`);
}
}

function stopAll() {
activeAnimations.clear();
vtkWarningMacro('Stopped all animations');
}

function update(deltaTime) {
// Normalize weights
const totalWeight = Array.from(activeAnimations.values()).reduce(
(sum, { weight }) => sum + weight,
0
);

activeAnimations.forEach(({ animation, weight, time }, name) => {
const normalizedWeight = totalWeight > 0 ? weight / totalWeight : 0;
const newTime = time + deltaTime;
activeAnimations.set(name, { animation, weight, time: newTime });

vtkDebugMacro(
`Updating animation "${name}" at time ${newTime.toFixed(
3
)} with normalized weight ${normalizedWeight.toFixed(3)}`
);

animation.update(newTime, normalizedWeight);
});
}

return { addAnimation, play, stop, stopAll, update };
}

export {
createAnimation,
createAnimationChannel,
createAnimationMixer,
createAnimationSampler,
};
Constants.js
export const BINARY_HEADER_MAGIC = 'glTF';
export const BINARY_HEADER_LENGTH = 12;
export const BINARY_CHUNK_TYPES = { JSON: 0x4e4f534a, BIN: 0x004e4942 };
export const BINARY_HEADER_INTS = 3;
export const BINARY_CHUNK_HEADER_INTS = 2;

export const MIN_LIGHT_ATTENUATION = 0.01;

export const COMPONENTS = {
SCALAR: 1,
VEC2: 2,
VEC3: 3,
VEC4: 4,
MAT2: 4,
MAT3: 9,
MAT4: 16,
};

export const BYTES = {
5120: 1, // BYTE
5121: 1, // UNSIGNED_BYTE
5122: 2, // SHORT
5123: 2, // UNSIGNED_SHORT
5125: 4, // UNSIGNED_INT
5126: 4, // FLOAT
};

export const MODES = {
GL_POINTS: 0,
GL_LINES: 1,
GL_LINE_LOOP: 2,
GL_LINE_STRIP: 3,
GL_TRIANGLES: 4,
GL_TRIANGLE_STRIP: 5,
GL_TRIANGLE_FAN: 6,
};

export const ARRAY_TYPES = {
5120: Int8Array,
5121: Uint8Array,
5122: Int16Array,
5123: Uint16Array,
5125: Uint32Array,
5126: Float32Array,
};

export const GL_SAMPLER = {
NEAREST: 9728,
LINEAR: 9729,
NEAREST_MIPMAP_NEAREST: 9984,
LINEAR_MIPMAP_NEAREST: 9985,
NEAREST_MIPMAP_LINEAR: 9986,
LINEAR_MIPMAP_LINEAR: 9987,
REPEAT: 10497,
CLAMP_TO_EDGE: 33071,
MIRRORED_REPEAT: 33648,
TEXTURE_MAG_FILTER: 10240,
TEXTURE_MIN_FILTER: 10241,
TEXTURE_WRAP_S: 10242,
TEXTURE_WRAP_T: 10243,
};

export const DEFAULT_SAMPLER = {
magFilter: GL_SAMPLER.NEAREST,
minFilter: GL_SAMPLER.LINEAR_MIPMAP_LINEAR,
wrapS: GL_SAMPLER.REPEAT,
wrapT: GL_SAMPLER.REPEAT,
};

export const SEMANTIC_ATTRIBUTE_MAP = {
NORMAL: 'normal',
POSITION: 'position',
TEXCOORD_0: 'texcoord0',
TEXCOORD_1: 'texcoord1',
WEIGHTS_0: 'weight',
JOINTS_0: 'joint',
COLOR_0: 'color',
TANGENT: 'tangent',
};

export const ALPHA_MODE = {
OPAQUE: 'OPAQUE',
MASK: 'MASK',
BLEND: 'BLEND',
};
Decoder.js
import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper';
import {
BINARY_CHUNK_TYPES,
BINARY_CHUNK_HEADER_INTS,
BINARY_HEADER_INTS,
BINARY_HEADER_LENGTH,
BINARY_HEADER_MAGIC,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';

function getChunkInfo(headerStart, data) {
const header = new Uint32Array(data, headerStart, BINARY_CHUNK_HEADER_INTS);
const chunkStart = headerStart + BINARY_CHUNK_HEADER_INTS * 4;
const chunkLength = header[0];
const chunkType = header[1];
return { start: chunkStart, length: chunkLength, type: chunkType };
}

function getAllChunkInfos(data) {
const infos = [];
let chunkStart = BINARY_HEADER_INTS * 4;
while (chunkStart < data.byteLength) {
const chunkInfo = getChunkInfo(chunkStart, data);
infos.push(chunkInfo);
chunkStart += chunkInfo.length + BINARY_CHUNK_HEADER_INTS * 4;
}
return infos;
}

function getJsonFromChunk(chunkInfo, data) {
const chunkLength = chunkInfo.length;
const jsonStart = (BINARY_HEADER_INTS + BINARY_CHUNK_HEADER_INTS) * 4;
const jsonSlice = new Uint8Array(data, jsonStart, chunkLength);
const stringBuffer = BinaryHelper.arrayBufferToString(jsonSlice);
return JSON.parse(stringBuffer);
}

function getBufferFromChunk(chunkInfo, data) {
return data.slice(chunkInfo.start, chunkInfo.start + chunkInfo.length);
}

function parseGLB(data) {
let json;
const buffers = [];

const headerView = new DataView(data, 0, BINARY_HEADER_LENGTH);

const header = {
magic: BinaryHelper.arrayBufferToString(new Uint8Array(data, 0, 4)),
version: headerView.getUint32(4, true),
length: headerView.getUint32(8, true),
};

if (header.magic !== BINARY_HEADER_MAGIC) {
throw new Error('Unsupported glTF-Binary header.');
} else if (header.version < 2.0) {
throw new Error('Unsupported legacy binary file detected.');
}

const chunkInfos = getAllChunkInfos(data);

chunkInfos.forEach((chunkInfo) => {
if (chunkInfo.type === BINARY_CHUNK_TYPES.JSON && !json) {
json = getJsonFromChunk(chunkInfo, data);
} else if (chunkInfo.type === BINARY_CHUNK_TYPES.BIN) {
buffers.push(getBufferFromChunk(chunkInfo, data));
}
});

if (!json) {
throw new Error('glTF-Binary: JSON content not found.');
}
if (!buffers) {
throw new Error('glTF-Binary: Binary chunk not found.');
}
return { json, buffers };
}

export default parseGLB;
Extensions.js
import macro from 'vtk.js/Sources/macros';
import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';
import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader';
import vtkLight from 'vtk.js/Sources/Rendering/Core/Light';

import { MIN_LIGHT_ATTENUATION } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';

const { vtkWarningMacro } = macro;

/**
* Handles the KHR_materials_unlit extension.
*
* @param {object} extension - The KHR_materials_unlit extension object.
* @param {vtkProperty} property - The vtkProperty instance to update.
*/
export function handleKHRMaterialsUnlit(extension, property) {
property.setLighting(true);
}

/**
* Handles the KHR_materials_ior extension.
*
* @param {object} extension - The KHR_materials_unlit extension object.
* @param {vtkProperty} property - The vtkProperty instance to update.
*/
export function handleKHRMaterialsIor(extension, property) {
property.setBaseIOR(extension.ior);
}

/**
* Handles the KHR_materials_specular extension.
* @param {object} extension - The KHR_materials_specular extension object.
* @param {vtkProperty} property - The vtkProperty instance to update.
*/
export function handleKHRMaterialsSpecular(extension, property) {
property.setSpecular(extension.specularFactor);
property.setSpecularColor(extension.specularColorFactor);
}

/**
* Handles the KHR_lights_punctual extension.
*
* @param {object} extension - The KHR_lights_punctual extension object.
* @param {vtkRenderer} renderer - The vtkRenderer instance to add the light to.
*/
export function handleKHRLightsPunctual(extension, transformMatrix, model) {
const { light } = extension;

const { color, intensity, range, spot, type } = light;

const l = vtkLight.newInstance({
color: color || [1, 1, 1],
intensity: intensity || 1.0,
});

// Apply the global transform to the light
l.setTransformMatrix(transformMatrix);

// Handle range
if (range > 0) {
// Set quadratic values to get attenuation(range) ~= MIN_LIGHT_ATTENUATION
l.setAttenuationValues(1, 0, 1.0 / (range * range * MIN_LIGHT_ATTENUATION));
}

switch (type) {
case 'directional':
l.setPositional(false);
break;
case 'point':
l.setPositional(true);
l.setConeAngle(90);
break;
case 'spot':
l.setPositional(true);
l.setConeAngle(vtkMath.radiansFromDegrees(spot.outerConeAngle));
break;
default:
vtkWarningMacro(`Unsupported light type: ${type}`);
}

model.lights.set(light.name, l);
}

/**
* Handles the KHR_draco_mesh_compression extension.
*
* @param {object} extension - The KHR_draco_mesh_compression extension object.
*/
export async function handleKHRDracoMeshCompression(extension) {
const reader = vtkDracoReader.newInstance();
reader.parse(extension.bufferView);
return reader.getOutputData();
}

/**
* Handles the KHR_materials_variants extension.
*
* @param {object} extension - The KHR_materials_variants extension object.
* @param {object} model - The model object to update with variant information.
*/
export function handleKHRMaterialsVariants(extension, model) {
model.variants = extension.variants.map((v) => v.name);
}
ORMTexture.worker.js
import registerWebworker from 'webworker-promise/lib/register';

/**
*
* @param {ArrayBuffer} imageBuffer
* @param {string} mimeType
* @param {string} channel
* @returns {Promise<ImageData>}
*/
registerWebworker(async ({ imageBuffer, mimeType, channel }) => {
const channelsMap = {
r: 0,
g: 1,
b: 2,
};

const blob = new Blob([imageBuffer], { type: mimeType });
const img = await createImageBitmap(blob);
const canvas = new OffscreenCanvas(img.width, img.height);
const ctx = canvas.getContext('2d');

ctx.drawImage(img, 0, 0, img.width, img.height);
const bitmap = ctx.getImageData(0, 0, img.width, img.height);

if (channel) {
const idx = channelsMap[channel];
for (let i = 0; i < bitmap.data.length; i += 4) {
const channelValue = bitmap.data[i + idx];
bitmap.data[i] = channelValue; // red channel
bitmap.data[i + 1] = channelValue; // green channel
bitmap.data[i + 2] = channelValue; // blue channel
}
}
return { bitmap };
});
Parser.js
/* eslint-disable guard-for-in */
/* eslint-disable no-restricted-syntax */
/* eslint-disable class-methods-use-this */
import macro from 'vtk.js/Sources/macros';

import {
ALPHA_MODE,
BYTES,
COMPONENTS,
DEFAULT_SAMPLER,
GL_SAMPLER,
MODES,
SEMANTIC_ATTRIBUTE_MAP,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';

import {
getAccessorArrayTypeAndLength,
getGLEnumFromSamplerParameter,
resolveUrl,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils';

const { vtkDebugMacro, vtkWarningMacro } = macro;

class GLTFParser {
constructor(glTF, options = {}) {
const { json, baseUri = '' } = glTF;

this.glTF = glTF;
this.options = options;
this.baseUri = baseUri;
this.json = json;
this.extensions = json.extensions || {};
this.extensionsUsed = json.extensionsUsed || [];
}

async parse() {
const buffers = this.json.buffers || [];
this.buffers = new Array(buffers.length).fill(null);

const images = this.json.images || [];
this.images = new Array(images.length).fill({});
await this.loadBuffers();
await this.loadImages();
this.resolveTree();

return this.glTF.json;
}

resolveTree() {
this.json.scenes = this.json.scenes?.map((scene, idx) =>
this.resolveScene(scene, idx)
);

this.json.cameras = this.json.cameras?.map((camera, idx) =>
this.resolveCamera(camera, idx)
);

this.json.bufferViews = this.json.bufferViews?.map((bufView, idx) =>
this.resolveBufferView(bufView, idx)
);

this.json.images = this.json.images?.map((image, idx) =>
this.resolveImage(image, idx)
);

this.json.samplers = this.json.samplers?.map((sampler, idx) =>
this.resolveSampler(sampler, idx)
);

this.json.textures = this.json.textures?.map((texture, idx) =>
this.resolveTexture(texture, idx)
);

this.json.accessors = this.json.accessors?.map((accessor, idx) =>
this.resolveAccessor(accessor, idx)
);

this.json.materials = this.json.materials?.map((material, idx) =>
this.resolveMaterial(material, idx)
);

this.json.meshes = this.json.meshes?.map((mesh, idx) =>
this.resolveMesh(mesh, idx)
);

this.json.nodes = this.json.nodes?.map((node, idx) =>
this.resolveNode(node, idx)
);

this.json.skins = this.json.skins?.map((skin, idx) =>
this.resolveSkin(skin, idx)
);

this.json.animations = this.json.animations?.map((animation, idx) =>
this.resolveAnimation(animation, idx)
);
}

get(array, index) {
// check if already resolved
if (typeof index === 'object') {
return index;
}
const object = this.json[array] && this.json[array][index];
if (!object) {
vtkWarningMacro(`glTF file error: Could not find ${array}[${index}]`);
}
return object;
}

resolveScene(scene, index) {
scene.id = scene.id || `scene-${index}`;
scene.nodes = (scene.nodes || []).map((node) => this.get('nodes', node));
return scene;
}

resolveNode(node, index) {
node.id = node.id || `node-${index}`;
if (node.children) {
node.children = node.children.map((child) => this.get('nodes', child));
}
if (node.mesh !== undefined) {
node.mesh = this.get('meshes', node.mesh);
} else if (node.meshes !== undefined && node.meshes.length) {
node.mesh = node.meshes.reduce(
(accum, meshIndex) => {
const mesh = this.get('meshes', meshIndex);
accum.id = mesh.id;
accum.primitives = accum.primitives.concat(mesh.primitives);
return accum;
},
{ primitives: [] }
);
}
if (node.camera !== undefined) {
node.camera = this.get('cameras', node.camera);
}
if (node.skin !== undefined) {
node.skin = this.get('skins', node.skin);
}

// Fill punctual lights objects
if (node.extensions?.KHR_lights_punctual) {
node.extensions.KHR_lights_punctual.light =
this.extensions?.KHR_lights_punctual.lights[
node.extensions.KHR_lights_punctual.light
];
}
return node;
}

resolveSkin(skin, index) {
skin.id = skin.id || `skin-${index}`;
skin.inverseBindMatrices = this.get('accessors', skin.inverseBindMatrices);
return skin;
}

resolveMesh(mesh, index) {
mesh.id = mesh.id || `mesh-${index}`;
if (mesh.primitives) {
mesh.primitives = mesh.primitives.map((primitive, idx) => {
const attributes = primitive.attributes;
primitive.name = `primitive-${idx}`;
primitive.attributes = {};
for (const attribute in attributes) {
const attr = SEMANTIC_ATTRIBUTE_MAP[attribute];
primitive.attributes[attr] = this.get(
'accessors',
attributes[attribute]
);
}
if (primitive.indices !== undefined) {
primitive.indices = this.get('accessors', primitive.indices);
}
if (primitive.material !== undefined) {
primitive.material = this.get('materials', primitive.material);
}
if (primitive.mode === undefined) {
primitive.mode = MODES.GL_TRIANGLES; // Default one
}

if (primitive.extensions?.KHR_draco_mesh_compression) {
vtkDebugMacro('Using Draco mesh compression');
const bufferView = this.get(
'bufferViews',
primitive.extensions.KHR_draco_mesh_compression.bufferView
);
primitive.extensions.KHR_draco_mesh_compression.bufferView =
bufferView.data;
}

return primitive;
});
}
return mesh;
}

resolveMaterial(material, index) {
material.id = material.id || `material-${index}`;

if (material.alphaMode === undefined)
material.alphaMode = ALPHA_MODE.OPAQUE;
if (material.doubleSided === undefined) material.doubleSided = false;
if (material.alphaCutoff === undefined) material.alphaCutoff = 0.5;

if (material.normalTexture) {
material.normalTexture = { ...material.normalTexture };
material.normalTexture.texture = this.get(
'textures',
material.normalTexture.index
);
}
if (material.occlusionTexture) {
material.occlusionTexture = { ...material.occlusionTexture };
material.occlusionTexture.texture = this.get(
'textures',
material.occlusionTexture.index
);
}
if (material.emissiveTexture) {
material.emissiveTexture = { ...material.emissiveTexture };
material.emissiveTexture.texture = this.get(
'textures',
material.emissiveTexture.index
);
}
if (!material.emissiveFactor) {
material.emissiveFactor = material.emissiveTexture ? 1 : 0;
} else material.emissiveFactor = material.emissiveFactor[0];

if (material.pbrMetallicRoughness) {
material.pbrMetallicRoughness = { ...material.pbrMetallicRoughness };
const mr = material.pbrMetallicRoughness;
if (mr.baseColorTexture) {
mr.baseColorTexture = { ...mr.baseColorTexture };
mr.baseColorTexture.texture = this.get(
'textures',
mr.baseColorTexture.index
);
}
if (mr.metallicRoughnessTexture) {
mr.metallicRoughnessTexture = { ...mr.metallicRoughnessTexture };
mr.metallicRoughnessTexture.texture = this.get(
'textures',
mr.metallicRoughnessTexture.index
);
}
} else {
material.pbrMetallicRoughness = {
baseColorFactor: [1, 1, 1, 1],
metallicFactor: 1.0,
roughnessFactor: 1.0,
};
}
return material;
}

/**
* Take values of particular accessor from interleaved buffer various parts of
* the buffer
*/
getValueFromInterleavedBuffer(
buffer,
byteOffset,
byteStride,
bytesPerElement,
count
) {
const result = new Uint8Array(count * bytesPerElement);
for (let i = 0; i < count; i++) {
const elementOffset = byteOffset + i * byteStride;
result.set(
new Uint8Array(
buffer.arrayBuffer.slice(
elementOffset,
elementOffset + bytesPerElement
)
),
i * bytesPerElement
);
}
return result.buffer;
}

resolveAccessor(accessor, index) {
accessor.id = accessor.id || `accessor-${index}`;
if (accessor.bufferView !== undefined) {
// Draco encoded meshes don't have bufferView
accessor.bufferView = this.get('bufferViews', accessor.bufferView);
}

// Look up enums
accessor.bytesPerComponent = BYTES[accessor.componentType];
accessor.components = COMPONENTS[accessor.type];
accessor.bytesPerElement = accessor.bytesPerComponent * accessor.components;

// Create TypedArray for the accessor
// Note: The canonical way to instantiate is to ignore this array and create
// WebGLBuffer's using the bufferViews.
if (accessor.bufferView) {
const buffer = accessor.bufferView.buffer;
const { ArrayType, byteLength } = getAccessorArrayTypeAndLength(
accessor,
accessor.bufferView
);
const byteOffset =
(accessor.bufferView.byteOffset || 0) +
(accessor.byteOffset || 0) +
buffer.byteOffset;

let slicedBufffer = buffer.arrayBuffer.slice(
byteOffset,
byteOffset + byteLength
);

if (accessor.bufferView.byteStride) {
slicedBufffer = this.getValueFromInterleavedBuffer(
buffer,
byteOffset,
accessor.bufferView.byteStride,
accessor.bytesPerElement,
accessor.count
);
}
accessor.value = new ArrayType(slicedBufffer);
}

return accessor;
}

resolveTexture(texture, index) {
texture.id = texture.id || `texture-${index}`;
texture.sampler =
'sampler' in texture
? this.get('samplers', texture.sampler)
: DEFAULT_SAMPLER;

texture.source = this.get('images', texture.source);

// Handle texture extensions sources
if (texture.extensions !== undefined) {
const extensionsNames = Object.keys(texture.extensions);
extensionsNames.forEach((extensionName) => {
const extension = texture.extensions[extensionName];
switch (extensionName) {
case 'KHR_texture_basisu':
case 'EXT_texture_webp':
case 'EXT_texture_avif':
texture.source = this.get('images', extension.source);
break;
default:
vtkWarningMacro(`Unhandled extension: ${extensionName}`);
}
});
}
return texture;
}

resolveSampler(sampler, index) {
sampler.id = sampler.id || `sampler-${index}`;

if (!Object.hasOwn(sampler, 'wrapS')) sampler.wrapS = GL_SAMPLER.REPEAT;
if (!Object.hasOwn(sampler, 'wrapT')) sampler.wrapT = GL_SAMPLER.REPEAT;

if (!Object.hasOwn(sampler, 'minFilter'))
sampler.minFilter = GL_SAMPLER.LINEAR_MIPMAP_LINEAR;
if (!Object.hasOwn(sampler, 'magFilter'))
sampler.magFilter = GL_SAMPLER.NEAREST;

// Map textual parameters to GL parameter values
sampler.parameters = {};
for (const key in sampler) {
const glEnum = getGLEnumFromSamplerParameter(key);
if (glEnum !== undefined) {
sampler.parameters[glEnum] = sampler[key];
}
}
return sampler;
}

resolveImage(image, index) {
image.id = image.id || `image-${index}`;
if (image.bufferView !== undefined) {
image.bufferView = this.get('bufferViews', image.bufferView);
}
return image;
}

resolveBufferView(bufferView, index) {
bufferView.id = bufferView.id || `bufferView-${index}`;
const bufferIndex = bufferView.buffer;
bufferView.buffer = this.buffers[bufferIndex];

const arrayBuffer = this.buffers[bufferIndex].arrayBuffer;
let byteOffset = this.buffers[bufferIndex].byteOffset || 0;

if ('byteOffset' in bufferView) {
byteOffset += bufferView.byteOffset;
}

bufferView.data = new Uint8Array(
arrayBuffer,
byteOffset,
bufferView.byteLength
);
return bufferView;
}

resolveCamera(camera, index) {
camera.id = camera.id || `camera-${index}`;
return camera;
}

resolveAnimation(animation, index) {
animation.id = animation.id || `animation-${index}`;
animation.samplers.map((sampler) => {
sampler.input = this.get('accessors', sampler.input).value;
sampler.output = this.get('accessors', sampler.output).value;
return sampler;
});
return animation;
}

loadBuffers() {
const promises = this.json.buffers.map((buffer, idx) =>
this.loadBuffer(buffer, idx).then(() => {
delete buffer.uri;
})
);
return Promise.all(promises);
}

async loadBuffer(buffer, index) {
let arrayBuffer = buffer;

if (buffer.uri) {
vtkDebugMacro('Loading uri', buffer.uri);
const uri = resolveUrl(buffer.uri, this.options.baseUri);
const response = await fetch(uri);
arrayBuffer = await response.arrayBuffer();
} else if (this.glTF.glbBuffers) {
arrayBuffer = this.glTF.glbBuffers[index];
}

this.buffers[index] = {
arrayBuffer,
byteOffset: 0,
byteLength: arrayBuffer.byteLength,
};
}

loadImages() {
const images = this.json.images || [];
const promises = [];

return new Promise((resolve, reject) => {
for (let i = 0; i < images.length; ++i) {
promises.push(
Promise.resolve(
this.loadImage(images[i], i).then(() => {
vtkDebugMacro('Texture loaded ', images[i]);
})
)
);
}

Promise.all(promises).then(() => resolve(this.images));
});
}

async loadImage(image, index) {
let arrayBuffer;
let buffer;

if (image.uri) {
vtkDebugMacro('Loading texture', image.uri);
const uri = resolveUrl(image.uri, this.options.baseUri);
const response = await fetch(uri);

arrayBuffer = await response.arrayBuffer();
image.uri = uri;
image.bufferView = {
data: arrayBuffer,
};
} else if (image.bufferView) {
const bufferView = this.get('bufferViews', image.bufferView);
buffer = this.get('buffers', bufferView.buffer);

// GLB buffer
if (this.glTF.glbBuffers) {
buffer = this.glTF.glbBuffers[bufferView.buffer];
arrayBuffer = buffer.slice(
bufferView.byteOffset,
bufferView.byteOffset + bufferView.byteLength
);
}

image.bufferView = {
data: arrayBuffer,
};
}
}
}

export default GLTFParser;
Reader.js
import macro from 'vtk.js/Sources/macros';
import * as vtkMath from 'vtk.js/Sources/Common/Core/Math';

import vtkActor from 'vtk.js/Sources/Rendering/Core/Actor';
import vtkCamera from 'vtk.js/Sources/Rendering/Core/Camera';
import vtkDataArray from 'vtk.js/Sources/Common/Core/DataArray';
import vtkPolyData from 'vtk.js/Sources/Common/DataModel/PolyData';
import vtkMapper from 'vtk.js/Sources/Rendering/Core/Mapper';
import vtkCellArray from 'vtk.js/Sources/Common/Core/CellArray';
import vtkTransform from 'vtk.js/Sources/Common/Transform/Transform';
import GLTFParser from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Parser';
import {
ALPHA_MODE,
MODES,
SEMANTIC_ATTRIBUTE_MAP,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';
import {
createVTKTextureFromGLTFTexture,
loadImage,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Utils';
import {
handleKHRDracoMeshCompression,
handleKHRLightsPunctual,
handleKHRMaterialsIor,
handleKHRMaterialsSpecular,
handleKHRMaterialsUnlit,
handleKHRMaterialsVariants,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Extensions';

import { mat4, quat, vec3 } from 'gl-matrix';

const { vtkWarningMacro, vtkDebugMacro } = macro;

/**
* Parses a GLTF objects
* @param {Object} gltf - The GLTF object to parse
* @returns {glTF} The parsed GLTF object
*/
async function parseGLTF(gltf, options) {
const parser = new GLTFParser(gltf, options);
const tree = await parser.parse();
return tree;
}

/**
* Creates VTK polydata from a GLTF mesh primitive
* @param {GLTFPrimitive} primitive - The GLTF mesh primitive
* @returns {vtkPolyData} The created VTK polydata
*/
async function createPolyDataFromGLTFMesh(primitive) {
if (!primitive || !primitive.attributes) {
vtkWarningMacro('Primitive has no position data, skipping');
return null;
}

if (primitive.extensions?.KHR_draco_mesh_compression) {
return handleKHRDracoMeshCompression(
primitive.extensions.KHR_draco_mesh_compression
);
}

const polyData = vtkPolyData.newInstance();
const cells = vtkCellArray.newInstance();
const pointData = polyData.getPointData();

const attrs = Object.entries(primitive.attributes);
attrs.forEach(async ([attributeName, accessor]) => {
switch (attributeName) {
case SEMANTIC_ATTRIBUTE_MAP.POSITION: {
const position = primitive.attributes.position.value;
polyData
.getPoints()
.setData(position, primitive.attributes.position.component);
break;
}
case SEMANTIC_ATTRIBUTE_MAP.NORMAL: {
const normals = primitive.attributes.normal.value;
pointData.setNormals(
vtkDataArray.newInstance({
name: 'Normals',
values: normals,
numberOfComponents: primitive.attributes.normal.components,
})
);
break;
}
case SEMANTIC_ATTRIBUTE_MAP.COLOR_0: {
const color = primitive.attributes.color.value;
pointData.setScalars(
vtkDataArray.newInstance({
name: 'Scalars',
values: color,
numberOfComponents: primitive.attributes.color.components,
})
);
break;
}
case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_0: {
const tcoords0 = primitive.attributes.texcoord0.value;
const da = vtkDataArray.newInstance({
name: 'TEXCOORD_0',
values: tcoords0,
numberOfComponents: primitive.attributes.texcoord0.components,
});
pointData.addArray(da);
pointData.setActiveTCoords(da.getName());
break;
}
case SEMANTIC_ATTRIBUTE_MAP.TEXCOORD_1: {
const tcoords = primitive.attributes.texcoord1.value;
const dac = vtkDataArray.newInstance({
name: 'TEXCOORD_1',
values: tcoords,
numberOfComponents: primitive.attributes.texcoord1.components,
});
pointData.addArray(dac);
break;
}
case SEMANTIC_ATTRIBUTE_MAP.TANGENT: {
const tangent = primitive.attributes.tangent.value;
const dat = vtkDataArray.newInstance({
name: 'Tangents',
values: tangent,
numberOfComponents: primitive.attributes.tangent.components,
});
pointData.addArray(dat);
break;
}
default:
vtkWarningMacro(`Unhandled attribute: ${attributeName}`);
}
});

// Handle indices if available
if (primitive.indices != null) {
const indices = primitive.indices.value;
const nCells = indices.length - 2;
switch (primitive.mode) {
case MODES.GL_LINE_STRIP:
case MODES.GL_TRIANGLE_STRIP:
case MODES.GL_LINE_LOOP:
vtkWarningMacro('GL_LINE_LOOP not implemented');
break;
default:
cells.resize((4 * indices.length) / 3);
for (let cellId = 0; cellId < nCells; cellId += 3) {
const cell = indices.slice(cellId, cellId + 3);
cells.insertNextCell(cell);
}
}
}

switch (primitive.mode) {
case MODES.GL_TRIANGLES:
case MODES.GL_TRIANGLE_FAN:
polyData.setPolys(cells);
break;
case MODES.GL_LINES:
case MODES.GL_LINE_STRIP:
case MODES.GL_LINE_LOOP:
polyData.setLines(cells);
break;
case MODES.GL_POINTS:
polyData.setVerts(cells);
break;
case MODES.GL_TRIANGLE_STRIP:
polyData.setStrips(cells);
break;
default:
vtkWarningMacro('Invalid primitive draw mode. Ignoring connectivity.');
}

return polyData;
}

/**
* Creates a VTK property from a GLTF material
* @param {object} model - The vtk model object
* @param {GLTFMaterial} material - The GLTF material
* @param {vtkActor} actor - The VTK actor
*/
async function createPropertyFromGLTFMaterial(model, material, actor) {
let metallicFactor = 1.0;
let roughnessFactor = 1.0;
const emissiveFactor = material.emissiveFactor;

const property = actor.getProperty();
const pbr = material.pbrMetallicRoughness;

if (pbr != null) {
if (
!pbr?.metallicFactor ||
pbr?.metallicFactor <= 0 ||
pbr?.metallicFactor >= 1
) {
vtkDebugMacro(
'Invalid material.pbrMetallicRoughness.metallicFactor value. Using default value instead.'
);
} else metallicFactor = pbr.metallicFactor;
if (
!pbr?.roughnessFactor ||
pbr?.roughnessFactor <= 0 ||
pbr?.roughnessFactor >= 1
) {
vtkDebugMacro(
'Invalid material.pbrMetallicRoughness.roughnessFactor value. Using default value instead.'
);
} else roughnessFactor = pbr.roughnessFactor;

const color = pbr.baseColorFactor;

if (color != null) {
property.setDiffuseColor(color[0], color[1], color[2]);
property.setOpacity(color[3]);
}

property.setMetallic(metallicFactor);
property.setRoughness(roughnessFactor);
property.setEmission(emissiveFactor);

if (pbr.baseColorTexture) {
const extensions = pbr.baseColorTexture.extensions;
const tex = pbr.baseColorTexture.texture;

if (tex.extensions != null) {
const extensionsNames = Object.keys(tex.extensions);
extensionsNames.forEach((extensionName) => {
// TODO: Handle KHR_texture_basisu extension
// const extension = tex.extensions[extensionName];
switch (extensionName) {
default:
vtkWarningMacro(`Unhandled extension: ${extensionName}`);
}
});
}

const sampler = tex.sampler;
const image = await loadImage(tex.source);
const diffuseTex = createVTKTextureFromGLTFTexture(
image,
sampler,
extensions
);

// FIXME: Workaround for textures not showing up in WebGL
const viewAPI = model.renderer.getRenderWindow();
const isWebGL = viewAPI.getViews()[0].isA('vtkOpenGLRenderWindow');
if (isWebGL) {
actor.addTexture(diffuseTex);
} else {
property.setDiffuseTexture(diffuseTex);
}
}

if (pbr.metallicRoughnessTexture) {
const extensions = pbr.metallicRoughnessTexture.extensions;
const tex = pbr.metallicRoughnessTexture.texture;
const sampler = tex.sampler;
const metallicImage = await loadImage(tex.source, 'b');
const metallicTex = createVTKTextureFromGLTFTexture(
metallicImage,
sampler,
extensions
);
property.setMetallicTexture(metallicTex);

const roughnessImage = await loadImage(tex.source, 'g');
const roughnessTex = createVTKTextureFromGLTFTexture(
roughnessImage,
sampler,
extensions
);
property.setRoughnessTexture(roughnessTex);
}

// Handle ambient occlusion texture (occlusionTexture)
if (material.occlusionTexture) {
const extensions = material.occlusionTexture.extensions;
const tex = material.occlusionTexture.texture;
const sampler = tex.sampler;
const aoImage = await loadImage(tex.source, 'r');
const aoTex = createVTKTextureFromGLTFTexture(
aoImage,
sampler,
extensions
);
property.setAmbientOcclusionTexture(aoTex);
}

// Handle emissive texture (emissiveTexture)
if (material.emissiveTexture) {
const extensions = material.emissiveTexture.extensions;
const tex = material.emissiveTexture.texture;
const sampler = tex.sampler;
const emissiveImage = await loadImage(tex.source);
const emissiveTex = createVTKTextureFromGLTFTexture(
emissiveImage,
sampler,
extensions
);
property.setEmissionTexture(emissiveTex);

// Handle mutiple Uvs
if (material.emissiveTexture.texCoord != null) {
const pd = actor.getMapper().getInputData().getPointData();
pd.setActiveTCoords(`TEXCOORD_${material.emissiveTexture.texCoord}`);
}
}

// Handle normal texture (normalTexture)
if (material.normalTexture) {
const extensions = material.normalTexture.extensions;
const tex = material.normalTexture.texture;
const sampler = tex.sampler;
const normalImage = await loadImage(tex.source);
const normalTex = createVTKTextureFromGLTFTexture(
normalImage,
sampler,
extensions
);
property.setNormalTexture(normalTex);

if (material.normalTexture.scale != null) {
property.setNormalStrength(material.normalTexture.scale);
}
}
}

// Material extensions
if (material.extensions != null) {
const extensionsNames = Object.keys(material.extensions);
extensionsNames.forEach((extensionName) => {
const extension = material.extensions[extensionName];
switch (extensionName) {
case 'KHR_materials_unlit':
handleKHRMaterialsUnlit(extension, property);
break;
case 'KHR_materials_ior':
handleKHRMaterialsIor(extension, property);
break;
case 'KHR_materials_specular':
handleKHRMaterialsSpecular(extension, property);
break;
default:
vtkWarningMacro(`Unhandled extension: ${extensionName}`);
}
});
}

if (material.alphaMode !== ALPHA_MODE.OPAQUE) {
actor.setForceTranslucent(true);
}

property.setBackfaceCulling(!material.doubleSided);
}

/**
* Handles primitive extensions
* @param {string} nodeId The GLTF node id
* @param {*} extensions The extensions object
* @param {*} model The vtk model object
*/
function handlePrimitiveExtensions(nodeId, extensions, model) {
const extensionsNames = Object.keys(extensions);
extensionsNames.forEach((extensionName) => {
const extension = extensions[extensionName];
switch (extensionName) {
case 'KHR_materials_variants':
model.variantMappings.set(nodeId, extension.mappings);
break;
default:
vtkWarningMacro(`Unhandled extension: ${extensionName}`);
}
});
}

/**
* Creates a VTK actor from a GLTF mesh
* @param {GLTFMesh} mesh - The GLTF mesh
* @returns {vtkActor} The created VTK actor
*/
async function createActorFromGTLFNode(worldMatrix) {
const actor = vtkActor.newInstance();
const mapper = vtkMapper.newInstance();
mapper.setColorModeToDirectScalars();
actor.setMapper(mapper);
actor.setUserMatrix(worldMatrix);

const polydata = vtkPolyData.newInstance();
mapper.setInputData(polydata);
return actor;
}

/**
* Creates a VTK actor from a GLTF mesh
* @param {GLTFMesh} mesh - The GLTF mesh
* @returns {vtkActor} The created VTK actor
*/
async function createActorFromGTLFPrimitive(model, primitive, worldMatrix) {
const actor = vtkActor.newInstance();
const mapper = vtkMapper.newInstance();
mapper.setColorModeToDirectScalars();
actor.setMapper(mapper);
actor.setUserMatrix(worldMatrix);

const polydata = await createPolyDataFromGLTFMesh(primitive);
mapper.setInputData(polydata);

// Support for materials
if (primitive.material != null) {
await createPropertyFromGLTFMaterial(model, primitive.material, actor);
}

if (primitive.extensions != null) {
handlePrimitiveExtensions(`${primitive.name}`, primitive.extensions, model);
}

return actor;
}

/**
* Creates a GLTF animation object
* @param {GLTFAnimation} animation
* @returns
*/
function createGLTFAnimation(animation) {
vtkDebugMacro('Creating animation:', animation);
return {
name: animation.name,
channels: animation.channels,
samplers: animation.samplers,
getChannelByTargetNode(nodeIndex) {
return this.channels.filter(
(channel) => channel.target.node === nodeIndex
);
},
};
}

/**
* Gets the transformation matrix for a GLTF node
* @param {GLTFNode} node - The GLTF node
* @returns {mat4} The transformation matrix
*/
function getTransformationMatrix(node) {
// TRS
const translation = node.translation ?? vec3.create();
const rotation = node.rotation ?? quat.create();
const scale = node.scale ?? vec3.fromValues(1.0, 1.0, 1.0);

const matrix =
node.matrix != null
? mat4.clone(node.matrix)
: mat4.fromRotationTranslationScale(
mat4.create(),
rotation,
translation,
scale
);
return matrix;
}

/**
* Processes a GLTF node
* @param {GLTFnode} node - The GLTF node
* @param {object} model The model object
* @param {vtkActor} parentActor The parent actor
* @param {mat4} parentMatrix The parent matrix
*/
async function processNode(
node,
model,
parentActor = null,
parentMatrix = mat4.create()
) {
node.transform = getTransformationMatrix(node);
const worldMatrix = mat4.multiply(
mat4.create(),
parentMatrix,
node.transform
);

// Create actor for the current node
if (node.mesh != null) {
const nodeActor = await createActorFromGTLFNode(worldMatrix);
if (parentActor) {
nodeActor.setParentProp(parentActor);
}
model.actors.set(`${node.id}`, nodeActor);

await Promise.all(
node.mesh.primitives.map(async (primitive, i) => {
const actor = await createActorFromGTLFPrimitive(
model,
primitive,
worldMatrix
);
actor.setParentProp(nodeActor);
model.actors.set(`${node.id}_${primitive.name}`, actor);
})
);
}

// Handle KHRLightsPunctual extension
if (node.extensions?.KHR_lights_punctual) {
handleKHRLightsPunctual(
node.extensions.KHR_lights_punctual,
node.transform,
model
);
}

if (
node.children &&
Array.isArray(node.children) &&
node.children.length > 0
) {
await Promise.all(
node.children.map(async (child) => {
const parent = model.actors.get(node.id);
await processNode(child, model, parent, worldMatrix);
})
);
}
}

/**
* Creates VTK actors from a GLTF object
* @param {glTF} glTF - The GLTF object
* @param {number} sceneId - The scene index to create actors for
* @returns {vtkActor[]} The created VTK actors
*/
async function createVTKObjects(model) {
model.animations = model.glTFTree.animations?.map(createGLTFAnimation);

const extensionsNames = Object.keys(model.glTFTree?.extensions || []);
extensionsNames.forEach((extensionName) => {
const extension = model.glTFTree.extensions[extensionName];
switch (extensionName) {
case 'KHR_materials_variants':
handleKHRMaterialsVariants(extension, model);
break;
case 'KHR_draco_mesh_compression':
break;
default:
vtkWarningMacro(`Unhandled extension: ${extensionName}`);
}
});

// Get the sceneId to process
const sceneId = model.sceneId ?? model.glTFTree.scene;
if (model.glTFTree.scenes?.length && model.glTFTree.scenes[sceneId]?.nodes) {
await Promise.all(
model.glTFTree.scenes[sceneId].nodes.map(async (node) => {
if (node) {
await processNode(node, model);
} else {
vtkWarningMacro(`Node not found in glTF.nodes`);
}
})
);
} else {
vtkWarningMacro('No valid scenes found in the glTF data');
}
}

/**
* Sets up the camera for a vtk renderer based on the bounds of the given actors.
*
* @param {GLTCamera} camera - The GLTF camera object
*/
function GLTFCameraToVTKCamera(glTFCamera) {
const camera = vtkCamera.newInstance();
if (glTFCamera.type === 'perspective') {
const { yfov, znear, zfar } = glTFCamera.perspective;
camera.setClippingRange(znear, zfar);
camera.setParallelProjection(false);
camera.setViewAngle(vtkMath.degreesFromRadians(yfov));
} else if (glTFCamera.type === 'orthographic') {
const { ymag, znear, zfar } = glTFCamera.orthographic;
camera.setClippingRange(znear, zfar);
camera.setParallelProjection(true);
camera.setParallelScale(ymag);
} else {
throw new Error('Unsupported camera type');
}

return camera;
}

/**
*
* @param {vtkCamera} camera
* @param {*} transformMatrix
*/
function applyTransformToCamera(camera, transformMatrix) {
if (!camera || !transformMatrix) {
return;
}

// At identity, camera position is origin, +y up, -z view direction
const position = [0, 0, 0];
const viewUp = [0, 1, 0];
const focus = [0, 0, -1];

const t = vtkTransform.newInstance();
t.setMatrix(transformMatrix);

// Transform position
t.transformPoint(position, position);
t.transformPoints(viewUp, viewUp);
t.transformPoints(focus, focus);

focus[0] += position[0];
focus[1] += position[1];
focus[2] += position[2];

// Apply the transformed values to the camera
camera.setPosition(position);
camera.setFocalPoint(focus);
camera.setViewUp(viewUp);
}

export {
applyTransformToCamera,
createPropertyFromGLTFMaterial,
parseGLTF,
createVTKObjects,
GLTFCameraToVTKCamera,
};
Utils.js
import WebworkerPromise from 'webworker-promise';
import macro from 'vtk.js/Sources/macros';
import vtkTexture from 'vtk.js/Sources/Rendering/Core/Texture';
import Worker from 'vtk.js/Sources/IO/Geometry/GLTFImporter/ORMTexture.worker';
import {
BYTES,
COMPONENTS,
ARRAY_TYPES,
GL_SAMPLER,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';

const { vtkWarningMacro, vtkErrorMacro } = macro;

/**
* Get GL enum from sampler parameter
* @param {*} parameter The sampler parameter
* @returns The GL enum
*/
export function getGLEnumFromSamplerParameter(parameter) {
const GL_TEXTURE_MAG_FILTER = 0x2800;
const GL_TEXTURE_MIN_FILTER = 0x2801;
const GL_TEXTURE_WRAP_S = 0x2802;
const GL_TEXTURE_WRAP_T = 0x2803;

const Mapping = {
magFilter: GL_TEXTURE_MAG_FILTER,
minFilter: GL_TEXTURE_MIN_FILTER,
wrapS: GL_TEXTURE_WRAP_S,
wrapT: GL_TEXTURE_WRAP_T,
};

return Mapping[parameter];
}

export function getAccessorArrayTypeAndLength(accessor, bufferView) {
const ArrayType = ARRAY_TYPES[accessor.componentType];
const components = COMPONENTS[accessor.type];
const bytesPerComponent = BYTES[accessor.componentType];
const length = accessor.count * components;
const byteLength = accessor.count * components * bytesPerComponent;
return { ArrayType, length, byteLength };
}

/**
* Resolves a URL based on the original path
* @param {*} url The URL to resolve
* @param {*} originalPath The original path to resolve the URL against
* @returns The resolved URL or an empty string if the URL is invalid
*/
export function resolveUrl(url, originalPath) {
// Invalid URL
if (typeof url !== 'string' || url === '') return '';

try {
// Data URI
if (url.startsWith('data:')) return url;

// Blob URL
if (url.startsWith('blob:')) return url;

// Create URL object from the original path
const baseUrl = new URL(originalPath);
if (!baseUrl.pathname.includes('.') && !baseUrl.pathname.endsWith('/')) {
baseUrl.pathname += '/';
}

// Absolute URL (http://, https://, //)
if (
url.startsWith('http:') ||
url.startsWith('https:') ||
url.startsWith('//')
) {
return new URL(url, baseUrl).href;
}

// Host Relative URL
if (url.startsWith('/')) {
return new URL(url, baseUrl).href;
}

// Relative URL
return new URL(url, baseUrl).href;
} catch (error) {
vtkErrorMacro('Error resolving URL:', error);
return '';
}
}

/**
* Loads image from buffer or URI
* @param {*} image
* @param {*} channel
* @returns
*/
export async function loadImage(image, channel, forceReLoad = false) {
// Initialize cache if it doesn't exist
if (!image.cache) {
image.cache = {};
}

// Return cached result for the channel if available and not forced to reload
if (!forceReLoad && image.cache[channel]) {
return image.cache[channel];
}

const worker = new WebworkerPromise(new Worker());

if (image.bufferView) {
return worker
.postMessage({
imageBuffer: image.bufferView.data,
mimeType: image.mimeType,
channel,
})
.then((result) => {
// Cache the bitmap based on the channel
image.cache[channel] = result.bitmap;
return result.bitmap;
})
.finally(() => {
worker.terminate();
});
}

if (image.uri) {
vtkWarningMacro('Falling back to image uri', image.uri);
return new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = 'Anonymous';
img.onload = () => {
image.cache[channel] = img; // Cache the loaded image based on the channel
resolve(img);
};
img.onerror = reject;
img.src = image.uri;
});
}

return null;
}

/**
*
* @param {*} image
* @param {*} sampler
* @param {*} extensions
* @returns
*/
export function createVTKTextureFromGLTFTexture(image, sampler, extensions) {
const texture = vtkTexture.newInstance();
// Apply sampler settings
if (sampler) {
if (
('wrapS' in sampler && 'wrapT' in sampler) ||
('minFilter' in sampler && 'magFilter' in sampler)
) {
if (
sampler.wrapS === GL_SAMPLER.CLAMP_TO_EDGE ||
sampler.wrapT === GL_SAMPLER.CLAMP_TO_EDGE
) {
texture.setRepeat(false);
texture.setEdgeClamp(true);
} else if (
sampler.wrapS === GL_SAMPLER.REPEAT ||
sampler.wrapT === GL_SAMPLER.REPEAT
) {
texture.setRepeat(true);
texture.setEdgeClamp(false);
} else {
vtkWarningMacro('Mirrored texture wrapping is not supported!');
}

const linearFilters = [
GL_SAMPLER.LINEAR,
GL_SAMPLER.LINEAR_MIPMAP_NEAREST,
GL_SAMPLER.NEAREST_MIPMAP_LINEAR,
GL_SAMPLER.LINEAR_MIPMAP_LINEAR,
];

if (
linearFilters.includes(sampler.minFilter) ||
linearFilters.includes(sampler.magFilter)
) {
texture.setInterpolate(true);
}
} else {
texture.setMipLevel(8);
texture.setInterpolate(true);
texture.setEdgeClamp(true);
}
}

texture.setJsImageData(image);
return texture;
}
index.d.ts
import { vtkAlgorithm, vtkObject } from '../../../interfaces';
import HtmlDataAccessHelper from '../../Core/DataAccessHelper/HtmlDataAccessHelper';
import HttpDataAccessHelper from '../../Core/DataAccessHelper/HttpDataAccessHelper';
import JSZipDataAccessHelper from '../../Core/DataAccessHelper/JSZipDataAccessHelper';
import LiteHttpDataAccessHelper from '../../Core/DataAccessHelper/LiteHttpDataAccessHelper';

import vtkActor from '../../../Rendering/Core/Actor';
import vtkRenderer from '../../../Rendering/Core/Renderer';
import vtkCamera from '../../../Rendering/Core/Camera';

interface IGLTFImporterOptions {
binary?: boolean;
compression?: string;
progressCallback?: any;
}

export interface IGLTFAnimation {
id: string;
name: string;
channels: any[];
samplers: any[];
}

export interface IGLTFAnimationMixer {
addAnimation: (glTFAnimation: object) => void;
play: (name: string, weight?: number) => void;
stop: (name: string) => void;
stopAll: () => void;
update: (deltaTime: number) => void;
}

export interface IGLTFMaterialVariant {
material: number;
variants: number[];
}

/**
*
*/
export interface IGLTFImporterInitialValues {}

type vtkGLTFImporterBase = vtkObject &
Omit<
vtkAlgorithm,
| 'getInputData'
| 'setInputData'
| 'setInputConnection'
| 'getInputConnection'
| 'addInputConnection'
| 'addInputData'
>;

export interface vtkGLTFImporter extends vtkGLTFImporterBase {
/**
* Get the actors.
*/
getActors(): Map<string, vtkActor>;

/**
* Get the animation mixer.
*/
getAnimationMixer(): IGLTFAnimationMixer;

/**
* Get the animations.
*/
getAnimations(): IGLTFAnimation[];

/**
* Get the base url.
*/
getBaseURL(): string;

/**
* Get the cameras.
*/
getCameras(): Map<string, vtkCamera>;

/**
*
*/
getDataAccessHelper():
| HtmlDataAccessHelper
| HttpDataAccessHelper
| JSZipDataAccessHelper
| LiteHttpDataAccessHelper;

/**
* Get the url of the object to load.
*/
getUrl(): string;

/**
* Get the variant array.
*/
getVariants(): string[];

/**
* Get the variant mappings.
*/
getVariantMappings(): Map<string, IGLTFMaterialVariant[]>;

/**
* Import the actors.
*/
importActors(): void;

/**
* Import the animations.
*/
importAnimations(): void;

/**
* Import the cameras.
*/
importCameras(): void;

/**
* Import the lights.
*/
importLights(): void;

/**
* Invoke the ready event.
*/
invokeReady(): void;

/**
* Load the object data.
* @param {IGLTFImporterOptions} [options]
*/
loadData(options?: IGLTFImporterOptions): Promise<any>;

/**
*
* @param callback
*/
onReady(callback: () => void): void;

/**
* Parse data.
* @param {String | ArrayBuffer} content The content to parse.
*/
parse(content: string | ArrayBuffer): void;

/**
* Parse data as ArrayBuffer.
* @param {ArrayBuffer} content The content to parse.
*/
parseAsArrayBuffer(content: ArrayBuffer): void;

/**
* Parse data as text.
* @param {String} content The content to parse.
*/
parseAsText(content: string): void;

/**
*
* @param inData
* @param outData
*/
requestData(inData: any, outData: any): void;

/**
*
* @param dataAccessHelper
*/
setDataAccessHelper(
dataAccessHelper:
| HtmlDataAccessHelper
| HttpDataAccessHelper
| JSZipDataAccessHelper
| LiteHttpDataAccessHelper
): boolean;

/**
* Set the url of the object to load.
* @param {String} url the url of the object to load.
* @param {IGLTFImporterOptions} [option] The Draco reader options.
*/
setUrl(url: string, option?: IGLTFImporterOptions): Promise<string | any>;

/**
* Set the camera id.
* @param cameraId
*/
setCamera(cameraId: string): void;

/**
* Set the Draco decoder.
* @param mappings
*/
setDracoDecoder(decoder: any): void;

/**
* Set the vtk Renderer.
* @param renderer
*/
setRenderer(renderer: vtkRenderer): void;

/**
* Switch to a variant.
* @param variantIndex
*/
switchToVariant(variantIndex: number): void;
}

/**
* Method used to decorate a given object (publicAPI+model) with vtkGLTFImporter characteristics.
*
* @param publicAPI object on which methods will be bounds (public)
* @param model object on which data structure will be bounds (protected)
* @param {IGLTFImporterInitialValues} [initialValues] (default: {})
*/
export function extend(
publicAPI: object,
model: object,
initialValues?: IGLTFImporterInitialValues
): void;

/**
* Method used to create a new instance of vtkGLTFImporter
* @param {IGLTFImporterInitialValues} [initialValues] for pre-setting some of its content
*/
export function newInstance(
initialValues?: IGLTFImporterInitialValues
): vtkGLTFImporter;

/**
* Load the WASM decoder from url and set the decoderModule
* @param url
* @param binaryName
*/
export function setWasmBinary(
url: string,
binaryName: string
): Promise<boolean>;

/**
* vtkGLTFImporter can import glTF 2.0 files.
*
* The GL Transmission Format (glTF) is an API-neutral runtime asset delivery
* format. A glTF asset is represented by:
* * A JSON-formatted file (.gltf) containing a full scene description: node
* hierarchy, materials, cameras, as well as descriptor information for
* meshes, animations, and other constructs
* * Binary files (.bin) containing geometry and animation data, and other
* buffer-based data
* * Image files (.jpg, .png) for textures
*
* Supported extensions:
* * KHR_draco_mesh_compression
* * KHR_lights_punctual
* * KHR_materials_unlit
* * KHR_materials_ior
* * KHR_materials_specular
* * KHR_materials_variants
* * EXT_texture_webp
* * EXT_texture_avif
*/
export declare const vtkGLTFImporter: {
newInstance: typeof newInstance;
extend: typeof extend;
};
export default vtkGLTFImporter;
index.js
import macro from 'vtk.js/Sources/macros';

import BinaryHelper from 'vtk.js/Sources/IO/Core/BinaryHelper';
import DataAccessHelper from 'vtk.js/Sources/IO/Core/DataAccessHelper';
import vtkDracoReader from 'vtk.js/Sources/IO/Geometry/DracoReader';
import {
createVTKObjects,
parseGLTF,
GLTFCameraToVTKCamera,
applyTransformToCamera,
createPropertyFromGLTFMaterial,
} from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Reader';
import parseGLB from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Decoder';
import { createAnimationMixer } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Animations';
import { BINARY_HEADER_MAGIC } from 'vtk.js/Sources/IO/Geometry/GLTFImporter/Constants';

const { vtkDebugMacro, vtkErrorMacro } = macro;

// ----------------------------------------------------------------------------
// vtkGLTFImporter methods
// ----------------------------------------------------------------------------

function vtkGLTFImporter(publicAPI, model) {
// Set our className
model.classHierarchy.push('vtkGLTFImporter');

// Create default dataAccessHelper if not available
if (!model.dataAccessHelper) {
model.dataAccessHelper = DataAccessHelper.get('http');
}

// Internal method to fetch Array
function fetchData(url, option = {}) {
const { compression, progressCallback } = model;
if (option.binary) {
return model.dataAccessHelper.fetchBinary(url, {
compression,
progressCallback,
});
}
return model.dataAccessHelper.fetchText(publicAPI, url, {
compression,
progressCallback,
});
}

// Set DataSet url
publicAPI.setUrl = (url, option = { binary: true }) => {
model.url = url;

// Remove the file in the URL
const path = url.split('/');
path.pop();
model.baseURL = path.join('/');

model.compression = option.compression;
model.sceneId = option.sceneId ? option.sceneId : 0;

// Fetch metadata
return publicAPI.loadData({
progressCallback: option.progressCallback,
binary: !!option.binary,
});
};

// Fetch the actual data arrays
publicAPI.loadData = (option = {}) => {
const promise = fetchData(model.url, option);
promise.then(publicAPI.parse);
return promise;
};

publicAPI.parse = (content) => {
if (typeof content === 'string') {
publicAPI.parseAsText(content);
} else {
publicAPI.parseAsBinary(content);
}
};

publicAPI.parseAsBinary = async (content) => {
if (!content) {
return;
}
if (content !== model.parseData) {
publicAPI.modified();
} else {
return;
}

const glTF = {};
const options = {
baseUri: model.baseURL,
};

const magic = BinaryHelper.arrayBufferToString(
new Uint8Array(content, 0, 4)
);

if (magic === BINARY_HEADER_MAGIC) {
const { json, buffers } = parseGLB(content);
vtkDebugMacro('Loaded GLB', json, buffers);
glTF.glbBuffers = buffers;
glTF.json = json;
} else {
glTF.json = JSON.parse(BinaryHelper.arrayBufferToString(content));
}

if (glTF.json.asset === undefined || glTF.json.asset.version[0] < 2) {
vtkErrorMacro('Unsupported asset. glTF versions >=2.0 are supported.');
return;
}

model.glTFTree = await parseGLTF(glTF, options);

model.actors = new Map();
model.cameras = new Map();
model.lights = new Map();
model.animations = [];
model.variants = [];
model.variantMappings = new Map();

await createVTKObjects(model);

model.scenes = model.glTFTree.scenes;

publicAPI.invokeReady();
};

publicAPI.parseAsText = (content) => {
if (!content) {
return;
}
if (content !== model.parseData) {
publicAPI.modified();
} else {
return;
}

model.parseData = content;
};

publicAPI.requestData = (inData, outData) => {
publicAPI.parse(model.parseData);
};

publicAPI.setDracoDecoder = (decoder) => {
vtkDracoReader.setDracoDecoder(decoder);
};

publicAPI.importActors = () => {
// Add actors to renderer
model.actors.forEach((actor) => model.renderer.addActor(actor));
};

publicAPI.importCameras = () => {
// Set up camera
model.glTFTree.cameras?.forEach((glTFcamera) => {
const camera = GLTFCameraToVTKCamera(glTFcamera);
model.cameras.set(glTFcamera.id, camera);
});

model.scenes.forEach((scene) => {
scene.nodes.forEach((node) => {
const camera = model.cameras.get(node.camera?.id);
if (camera) {
applyTransformToCamera(camera, node.transform);
}
});
});
};

publicAPI.importAnimations = () => {
// Set up animations
if (model.glTFTree.animations?.length > 0) {
model.animationMixer = createAnimationMixer(
model.actors,
model.glTFTree.accessors
);
model.glTFTree.animations.forEach((animation) => {
model.animationMixer.addAnimation(animation);
});
}
model.animations = model.glTFTree.animations || [];
};

publicAPI.importLights = () => {
// Set up lights
model.lights?.forEach((light) => {
vtkDebugMacro('Adding light', light);
model.renderer.addLight(light);
});
};

publicAPI.setCamera = (cameraId) => {
const camera = model.cameras.get(cameraId);

if (!camera) {
vtkErrorMacro(`Camera ${cameraId} not found`);
return;
}
vtkDebugMacro('Setting camera', camera);
model.renderer.setActiveCamera(camera);
};

publicAPI.switchToVariant = async (variantIndex) => {
const promises = Array.from(model.actors).map(async ([nodeId, actor]) => {
vtkDebugMacro('Switching to variant', variantIndex, 'for node', nodeId);
const variantMappings = model.variantMappings.get(nodeId);

if (variantMappings) {
const mapping = variantMappings.find((m) =>
m.variants.includes(variantIndex)
);
if (mapping) {
const variantMaterial = model.glTFTree.materials[mapping.material];
await createPropertyFromGLTFMaterial(model, variantMaterial, actor);
}
}
});

await Promise.all(promises);
};
}

// ----------------------------------------------------------------------------
// Object factory
// ----------------------------------------------------------------------------

const DEFAULT_VALUES = {
// baseURL: null,
// dataAccessHelper: null,
// url: null,
};

// ----------------------------------------------------------------------------

export function extend(publicAPI, model, initialValues = {}) {
Object.assign(model, DEFAULT_VALUES, initialValues);

// Build VTK API
macro.obj(publicAPI, model);
macro.get(publicAPI, model, [
'url',
'baseURL',
'actors',
'scenes',
'cameras',
'animations',
'animationMixer',
'variants',
'variantMappings',
]);
macro.set(publicAPI, model, ['renderer', 'dracoDecoder']);
macro.event(publicAPI, model, 'ready');

// vtkGLTFImporter methods
vtkGLTFImporter(publicAPI, model);

// To support destructuring
if (!model.compression) {
model.compression = null;
}
if (!model.progressCallback) {
model.progressCallback = null;
}
}
// ----------------------------------------------------------------------------

export const newInstance = macro.newInstance(extend, 'vtkGLTFImporter');

// ----------------------------------------------------------------------------

export default {
extend,
newInstance,
};