import { strFromU8, unzipSync } from 'fflate';
import DataAccessHelper from 'vtk.js/Sources/IO/Core/DataAccessHelper'; import macro from 'vtk.js/Sources/macros';
import 'vtk.js/Sources/IO/Core/DataAccessHelper/HttpDataAccessHelper';
function vtkZipMultiDataSetReader(publicAPI, model) { model.classHierarchy.push('vtkZipMultiDataSetReader');
if (!model.dataAccessHelper) { model.dataAccessHelper = DataAccessHelper.get('http'); }
function fetchData(url, option = {}) { return model.dataAccessHelper.fetchBinary(url, option); }
publicAPI.setUrl = (url, option = {}) => { model.url = url;
const path = url.split('/'); path.pop(); model.baseURL = path.join('/');
return publicAPI.loadData(option); };
publicAPI.loadData = (option = {}) => fetchData(model.url, option).then(publicAPI.parseAsArrayBuffer);
publicAPI.parseAsArrayBuffer = (arrayBuffer) => { if (!arrayBuffer) { return Promise.reject(new Error('No ArrayBuffer to parse')); }
return new Promise((resolve, reject) => { model.arrays = []; const decompressedFiles = unzipSync(new Uint8Array(arrayBuffer)); try { Object.entries(decompressedFiles).forEach( ([relativePath, fileData]) => { if (relativePath.match(/datasets\.json$/i)) { model.datasets = JSON.parse(strFromU8(fileData)); } if (relativePath.match(/array_[a-zA-Z]+_[0-9]+/)) { const [type, id] = relativePath.split('_').slice(-2); model.arrays[id] = macro.newTypedArray(type, fileData.buffer); } } ); resolve(); } catch (err) { reject(err); } }); };
publicAPI.requestData = (inData, outData) => { }; }
const DEFAULT_VALUES = {};
export function extend(publicAPI, model, initialValues = {}) { Object.assign(model, DEFAULT_VALUES, initialValues);
macro.obj(publicAPI, model);
macro.algo(publicAPI, model, 0, 0); macro.get(publicAPI, model, ['url', 'baseURL']); macro.setGet(publicAPI, model, ['dataAccessHelper']);
vtkZipMultiDataSetReader(publicAPI, model); }
export const newInstance = macro.newInstance( extend, 'vtkZipMultiDataSetReader' );
export default { newInstance, extend };
|