diff --git a/examples/3dtiles.html b/examples/3dtiles.html
index b25c160bce..1d8932bb73 100644
--- a/examples/3dtiles.html
+++ b/examples/3dtiles.html
@@ -90,7 +90,7 @@
var menuGlobe = new GuiTools('menuDiv', view, 300);
- itowns.Fetcher.json('layers/JSONLayers/Ortho.json').then(function (result) { return view.addLayer(result) });
+ itowns.Fetcher.json('layers/JSONLayers/OPENSM.json').then(function (result) { return view.addLayer(result) });
// function use :
// For preupdate Layer geomtry :
@@ -172,7 +172,7 @@
if (interAttributes._BATCHID) {
var face = intersects[i].face.a;
var batchID = interAttributes._BATCHID.array[face];
- var batchTable = findBatchTableParent(intersects[i].object);
+ var batchTable = findBatchTableParent(intersects[i].object).json;
htmlInfo.innerHTML +='
Batch id: '+ batchID +'';
Object.keys(batchTable).map(function(objectKey) {
diff --git a/examples/file.html b/examples/file.html
new file mode 100644
index 0000000000..3ed0e70d9c
--- /dev/null
+++ b/examples/file.html
@@ -0,0 +1,72 @@
+
+
+ Itowns - File drop
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/gpx.html b/examples/gpx.html
index b1977cd0b1..d48d5f6c82 100644
--- a/examples/gpx.html
+++ b/examples/gpx.html
@@ -39,8 +39,8 @@
globeView.addEventListener(itowns.GLOBE_VIEW_EVENTS.GLOBE_INITIALIZED, function () {
console.info('Globe initialized');
itowns.Fetcher.xml('https://raw.githubusercontent.com/iTowns/iTowns2-sample-data/master/ULTRA2009.gpx').then(xml => itowns.GpxParser.parse(xml, { crs: globeView.referenceCrs })).then(function (gpx) {
- if (gpx) {
- globeView.scene.add(gpx);
+ if (gpx && gpx.object3d) {
+ globeView.scene.add(gpx.object3d);
globeView.controls.setTilt(45, true);
}
});
diff --git a/src/Core/Scheduler/Scheduler.js b/src/Core/Scheduler/Scheduler.js
index 3b48479927..2fa0ae9b2b 100644
--- a/src/Core/Scheduler/Scheduler.js
+++ b/src/Core/Scheduler/Scheduler.js
@@ -5,6 +5,8 @@
*/
import PriorityQueue from 'js-priority-queue';
+import CancelledCommandException from './CancelledCommandException';
+
import WMTSProvider from '../../Provider/WMTSProvider';
import WMSProvider from '../../Provider/WMSProvider';
import TileProvider from '../../Provider/TileProvider';
@@ -14,7 +16,14 @@ import PointCloudProvider from '../../Provider/PointCloudProvider';
import WFSProvider from '../../Provider/WFSProvider';
import RasterProvider from '../../Provider/RasterProvider';
import StaticProvider from '../../Provider/StaticProvider';
-import CancelledCommandException from './CancelledCommandException';
+import FileProvider from '../../Provider/FileProvider';
+
+import B3dmParser from '../../Parser/B3dmParser';
+import GeoJsonParser from '../../Parser/GeoJsonParser';
+import GpxParser from '../../Parser/GpxParser';
+import PntsParser from '../../Parser/PntsParser';
+import PotreeBinParser from '../../Parser/PotreeBinParser';
+import PotreeCinParser from '../../Parser/PotreeCinParser';
var instanceScheduler = null;
@@ -80,12 +89,14 @@ function Scheduler() {
this.hostQueues = new Map();
this.providers = {};
+ this.parsers = {};
this.maxConcurrentCommands = 16;
this.maxCommandsPerHost = 6;
// TODO: add an options to not instanciate default providers
this.initDefaultProviders();
+ this.initDefaultParsers();
}
Scheduler.prototype.constructor = Scheduler;
@@ -103,6 +114,17 @@ Scheduler.prototype.initDefaultProviders = function initDefaultProviders() {
this.addProtocolProvider('wfs', WFSProvider);
this.addProtocolProvider('rasterizer', RasterProvider);
this.addProtocolProvider('static', StaticProvider);
+ this.addProtocolProvider('file', FileProvider);
+};
+
+Scheduler.prototype.initDefaultParsers = function initDefaultParsers() {
+ // Register all parsers
+ this.addFormatParser(B3dmParser);
+ this.addFormatParser(GeoJsonParser);
+ this.addFormatParser(GpxParser);
+ this.addFormatParser(PntsParser);
+ this.addFormatParser(PotreeBinParser);
+ this.addFormatParser(PotreeCinParser);
};
Scheduler.prototype.runCommand = function runCommand(command, queue, executingCounterUpToDate) {
@@ -256,6 +278,29 @@ Scheduler.prototype.getProtocolProvider = function getProtocolProvider(protocol)
return this.providers[protocol];
};
+
+Scheduler.prototype.addFormatParser = function addFormatParser(parser) {
+ // eslint-disable-next-line no-console
+ console.log(`Registering format : ${parser.format}`);
+ if (typeof (parser.parse) !== 'function') {
+ throw new Error(`Can't add parser for ${parser.format}: missing a parse function.`);
+ }
+ var that = this;
+ function register(format) {
+ that.parsers[format] = that.parsers[format] || [];
+ that.parsers[format].push(parser);
+ }
+ register(parser.format);
+ parser.extensions.forEach(register);
+ parser.mimetypes.forEach(register);
+};
+
+Scheduler.prototype.getFormatParser = function getFormatParser(format) {
+ var parsers = this.parsers[format];
+ // format disambiguation is arbitrary to the first registration for now
+ return parsers ? parsers[0] : undefined;
+};
+
Scheduler.prototype.commandsWaitingExecutionCount = function commandsWaitingExecutionCount() {
let sum = this.defaultQueue.storage.length + this.defaultQueue.counters.executing;
for (var q of this.hostQueues) {
diff --git a/src/Parser/3dTilesHeaderParser.js b/src/Parser/3dTilesHeaderParser.js
new file mode 100644
index 0000000000..ce36ad43d0
--- /dev/null
+++ b/src/Parser/3dTilesHeaderParser.js
@@ -0,0 +1,81 @@
+import utf8Decoder from '../utils/Utf8Decoder';
+
+export default {
+ /** @module 3dTilesHeaderParser */
+ /** Parse buffer and convert to JSON
+ * @function parse
+ * @param {ArrayBuffer} buffer - the input buffer.
+ * @param {Object} options - additional properties.
+ * @param {string=} [options.magic] - magic string.
+ * @return {Promise} - a promise that resolves with a 3dTilesHeader object.
+ *
+ */
+ parse(buffer, options) {
+ if (!buffer) {
+ throw new Error('No array buffer provided.');
+ }
+ const header = {};
+ const parsed = { header };
+
+ // Magic type is unsigned char [4]
+ header.magic = utf8Decoder.decode(new Uint8Array(buffer, 0, 4));
+ if (header.magic !== options.magic) {
+ throw new Error(`Invalid 3d-tiles header : "${header.magic}" ("${options.magic}" was expected).`);
+ }
+
+ const view = new DataView(buffer);
+ let byteOffset = 4;
+
+ header.version = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ header.byteLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ if (options.magic === 'cmpt') {
+ header.tilesLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+ } else {
+ header.FTJSONLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ header.FTBinaryLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ header.BTJSONLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ header.BTBinaryLength = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+
+ if (options.magic === 'i3dm') {
+ header.gltfFormat = view.getUint32(byteOffset, true);
+ byteOffset += Uint32Array.BYTES_PER_ELEMENT;
+ }
+ }
+
+ parsed.header = header;
+
+ if (header.FTJSONLength > 0) {
+ const json = new Uint8Array(buffer, byteOffset, header.FTJSONLength);
+ byteOffset += header.FTJSONLength;
+ parsed.featureTable = { json: JSON.parse(utf8Decoder.decode(json)) };
+ if (header.FTBinaryLength > 0) {
+ parsed.featureTable.buffer = buffer.slice(byteOffset, byteOffset + header.FTBinaryLength);
+ byteOffset += header.FTBinaryLength;
+ }
+ }
+
+ if (header.BTJSONLength > 0) {
+ const json = new Uint8Array(buffer, byteOffset, header.BTJSONLength);
+ byteOffset += header.BTJSONLength;
+ parsed.batchTable = { json: JSON.parse(utf8Decoder.decode(json)) };
+ if (header.BTBinaryLength > 0) {
+ parsed.batchTable.buffer = buffer.slice(byteOffset, header.BTBinaryLength);
+ byteOffset += header.BTBinaryLength;
+ }
+ }
+ parsed.byteOffset = byteOffset;
+ return Promise.resolve(parsed);
+ },
+};
diff --git a/src/Parser/B3dmParser.js b/src/Parser/B3dmParser.js
index 8f35999f60..8fbfc4f96c 100644
--- a/src/Parser/B3dmParser.js
+++ b/src/Parser/B3dmParser.js
@@ -1,9 +1,9 @@
import * as THREE from 'three';
import GLTFLoader from './GLTFLoader';
import LegacyGLTFLoader from './LegacyGLTFLoader';
-import BatchTableParser from './BatchTableParser';
import Capabilities from '../Core/System/Capabilities';
import shaderUtils from '../Renderer/Shader/ShaderUtils';
+import $3dTilesHeaderParser from './3dTilesHeaderParser';
import utf8Decoder from '../utils/Utf8Decoder';
const matrixChangeUpVectorZtoY = (new THREE.Matrix4()).makeRotationX(Math.PI / 2);
@@ -63,64 +63,25 @@ export default {
*
*/
parse(buffer, options) {
- const gltfUpAxis = options.gltfUpAxis;
- const urlBase = options.urlBase;
- if (!buffer) {
- throw new Error('No array buffer provided.');
- }
-
- const view = new DataView(buffer, 4); // starts after magic
-
- let byteOffset = 0;
- const b3dmHeader = {};
-
- // Magic type is unsigned char [4]
- b3dmHeader.magic = utf8Decoder.decode(new Uint8Array(buffer, 0, 4));
- if (b3dmHeader.magic) {
- // Version, byteLength, batchTableJSONByteLength, batchTableBinaryByteLength and batchTable types are uint32
- b3dmHeader.version = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- b3dmHeader.byteLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- b3dmHeader.FTJSONLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- b3dmHeader.FTBinaryLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- b3dmHeader.BTJSONLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- b3dmHeader.BTBinaryLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- const promises = [];
- if (b3dmHeader.BTJSONLength > 0) {
- const sizeBegin = 28 + b3dmHeader.FTJSONLength + b3dmHeader.FTBinaryLength;
- promises.push(BatchTableParser.parse(
- buffer.slice(sizeBegin, b3dmHeader.BTJSONLength + sizeBegin)));
- } else {
- promises.push(Promise.resolve({}));
- }
- // TODO: missing feature table
- promises.push(new Promise((resolve/* , reject */) => {
- const onload = (gltf) => {
+ options = options || {};
+ options.magic = 'b3dm';
+ return $3dTilesHeaderParser.parse(buffer, options).then((data) => {
+ const gltfBuffer = buffer.slice(data.byteOffset);
+ const version = new DataView(gltfBuffer, 0, 20).getUint32(4, true);
+ return new Promise((resolve/* , reject */) => {
+ function onload(gltf) {
for (const scene of gltf.scenes) {
scene.traverse(filterUnsupportedSemantics);
}
// Rotation managed
- if (gltfUpAxis === undefined || gltfUpAxis === 'Y') {
+ if (options.gltfUpAxis === 'Y') {
gltf.scene.applyMatrix(matrixChangeUpVectorZtoY);
- } else if (gltfUpAxis === 'X') {
+ } else if (options.gltfUpAxis === 'X') {
gltf.scene.applyMatrix(matrixChangeUpVectorZtoX);
}
// RTC managed
- applyOptionalCesiumRTC(buffer.slice(28 + b3dmHeader.FTJSONLength +
- b3dmHeader.FTBinaryLength + b3dmHeader.BTJSONLength +
- b3dmHeader.BTBinaryLength), gltf.scene);
+ applyOptionalCesiumRTC(gltfBuffer, gltf.scene);
const init_mesh = function f_init(mesh) {
mesh.frustumCulled = false;
@@ -145,24 +106,21 @@ export default {
};
gltf.scene.traverse(init_mesh);
- resolve(gltf);
- };
-
- const gltfBuffer = buffer.slice(28 + b3dmHeader.FTJSONLength +
- b3dmHeader.FTBinaryLength + b3dmHeader.BTJSONLength +
- b3dmHeader.BTBinaryLength);
-
- const version = new DataView(gltfBuffer, 0, 20).getUint32(4, true);
-
+ resolve({
+ object3d: gltf.scene,
+ batchTable: data.batchTable,
+ });
+ }
if (version === 1) {
- legacyGLTFLoader.parse(gltfBuffer, onload, urlBase);
+ legacyGLTFLoader.parse(gltfBuffer, onload, options.urlBase);
} else {
- glTFLoader.parse(gltfBuffer, urlBase, onload);
+ glTFLoader.parse(gltfBuffer, options.urlBase, onload);
}
- }));
- return Promise.all(promises).then(values => ({ gltf: values[1], batchTable: values[0] }));
- } else {
- throw new Error('Invalid b3dm file.');
- }
+ });
+ });
},
+ format: '3d-tiles/b3dm',
+ extensions: ['b3dm'],
+ mimetypes: ['application/octet-stream'],
+ fetchtype: 'arrayBuffer',
};
diff --git a/src/Parser/BatchTableParser.js b/src/Parser/BatchTableParser.js
deleted file mode 100644
index cd009249ec..0000000000
--- a/src/Parser/BatchTableParser.js
+++ /dev/null
@@ -1,16 +0,0 @@
-import utf8Decoder from '../utils/Utf8Decoder';
-
-export default {
- /** @module BatchTableParser */
- /** Parse batch table buffer and convert to JSON
- * @function parse
- * @param {ArrayBuffer} buffer - the batch table buffer.
- * @return {Promise} - a promise that resolves with a JSON object.
- *
- */
- parse(buffer) {
- const content = utf8Decoder.decode(new Uint8Array(buffer));
- const json = JSON.parse(content);
- return Promise.resolve(json);
- },
-};
diff --git a/src/Parser/GeoJsonParser.js b/src/Parser/GeoJsonParser.js
index 514e1cbd21..41b9e2b13d 100644
--- a/src/Parser/GeoJsonParser.js
+++ b/src/Parser/GeoJsonParser.js
@@ -256,9 +256,9 @@ export default {
/**
* Parse a GeoJSON file content and return a Feature or an array of Features.
- * @param {string} json - The GeoJSON file content to parse.
+ * @param {object|string} json - The GeoJSON file content to parse.
* @param {object} options - options controlling the parsing
- * @param {string} options.crsOut - The CRS to convert the input coordinates to.
+ * @param {string} options.crs - The CRS to convert the input coordinates to.
* @param {string} options.crsIn - override the data crs
* @param {Extent=} options.filteringExtent - Optional filter to reject features
* outside of this extent.
@@ -268,19 +268,23 @@ export default {
* @returns {Promise} - a promise resolving with a Feature or an array of Features
*/
parse(json, options = {}) {
- const crsOut = options.crsOut;
const filteringExtent = options.filteringExtent;
if (typeof (json) === 'string') {
json = JSON.parse(json);
}
- options.crsIn = options.crsIn || readCRS(json);
+ const crsIn = options.crsIn || readCRS(json);
+ const crsOut = options.crs;
switch (json.type.toLowerCase()) {
case 'featurecollection':
- return Promise.resolve(readFeatureCollection(options.crsIn, crsOut, json, filteringExtent, options));
+ return Promise.resolve(readFeatureCollection(crsIn, crsOut, json, filteringExtent, options));
case 'feature':
- return Promise.resolve(readFeature(options.crsIn, crsOut, json, filteringExtent, options));
+ return Promise.resolve(readFeature(crsIn, crsOut, json, filteringExtent, options));
default:
throw new Error(`Unsupported GeoJSON type: '${json.type}`);
}
},
+ format: 'geojson',
+ extensions: ['json', 'geojson'],
+ mimetypes: ['application/json', 'application/geojson'],
+ fetchtype: 'json',
};
diff --git a/src/Parser/GpxParser.js b/src/Parser/GpxParser.js
index 5e5d721d58..d7084e4a36 100644
--- a/src/Parser/GpxParser.js
+++ b/src/Parser/GpxParser.js
@@ -166,14 +166,14 @@ function _gpxToMesh(gpxXML, options = {}) {
// gpxMesh is static data, it doens't need matrix update
gpxMesh.matrixAutoUpdate = false;
- return gpxMesh;
+ return { object3d: gpxMesh };
}
export default {
/** @module GpxParser */
/** Parse gpx file and convert to THREE.Mesh
* @function parse
- * @param {string} xml - the gpx file or xml.
+ * @param {string | XMLDocument} xml - the gpx file or xml.
* @param {Object=} options - additional properties.
* @param {string} options.crs - the default CRS of Three.js coordinates. Should be a cartesian CRS.
* @param {boolean=} [options.enablePin=true] - draw pin for way points.
@@ -196,4 +196,8 @@ export default {
}
return Promise.resolve(_gpxToMesh(xml, options));
},
+ format: 'gpx',
+ extensions: ['gpx', 'xml'],
+ mimetypes: ['application/gpx+xml', 'application/gpx'],
+ fetchtype: 'xml',
};
diff --git a/src/Parser/PntsParser.js b/src/Parser/PntsParser.js
index 8c421b0beb..9f5ae98936 100644
--- a/src/Parser/PntsParser.js
+++ b/src/Parser/PntsParser.js
@@ -1,117 +1,71 @@
import * as THREE from 'three';
-import BatchTableParser from './BatchTableParser';
-import utf8Decoder from '../utils/Utf8Decoder';
+import $3dTilesHeaderParser from './3dTilesHeaderParser';
export default {
/** @module PntsParser */
/** Parse pnts buffer and extract THREE.Points and batch table
* @function parse
* @param {ArrayBuffer} buffer - the pnts buffer.
- * @return {Promise} - a promise that resolves with an object containig a THREE.Points (point) and a batch table (batchTable).
+ * @param {Object} options - additional properties.
+ * @return {Promise} - a promise that resolves with an object containig a THREE.Points (object3d) and a batch table (batchTable).
*
*/
- parse: function parse(buffer) {
- if (!buffer) {
- throw new Error('No array buffer provided.');
- }
- const view = new DataView(buffer);
-
- let byteOffset = 0;
- const pntsHeader = {};
- let batchTable = {};
- let point = {};
-
- // Magic type is unsigned char [4]
- pntsHeader.magic = utf8Decoder.decode(new Uint8Array(buffer, byteOffset, 4));
- byteOffset += 4;
-
- if (pntsHeader.magic) {
- // Version, byteLength, batchTableJSONByteLength, batchTableBinaryByteLength and batchTable types are uint32
- pntsHeader.version = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- pntsHeader.byteLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- pntsHeader.FTJSONLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- pntsHeader.FTBinaryLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- pntsHeader.BTJSONLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- pntsHeader.BTBinaryLength = view.getUint32(byteOffset, true);
- byteOffset += Uint32Array.BYTES_PER_ELEMENT;
-
- // binary table
- if (pntsHeader.FTBinaryLength > 0) {
- point = parseFeatureBinary(buffer, byteOffset, pntsHeader.FTJSONLength);
- }
-
- // batch table
- if (pntsHeader.BTJSONLength > 0) {
- const sizeBegin = 28 + pntsHeader.FTJSONLength + pntsHeader.FTBinaryLength;
- batchTable = BatchTableParser.parse(
- buffer.slice(sizeBegin, pntsHeader.BTJSONLength + sizeBegin));
- }
-
- const pnts = { point, batchTable };
- return Promise.resolve(pnts);
- } else {
- throw new Error('Invalid pnts file.');
- }
+ parse(buffer, options) {
+ options = options || {};
+ options.magic = 'pnts';
+ return $3dTilesHeaderParser.parse(buffer, options).then(data => ({
+ batchTable: data.batchTable,
+ object3d: parseFeatureTable(data.featureTable),
+ }));
},
+ format: '3d-tiles/pnts',
+ extensions: ['pnts'],
+ mimetypes: ['application/octet-stream'],
+ fetchtype: 'arrayBuffer',
};
-function parseFeatureBinary(array, byteOffset, FTJSONLength) {
+function parseFeatureTable(featureTable) {
+ if (!featureTable) return undefined;
// Init geometry
const geometry = new THREE.BufferGeometry();
const material = new THREE.PointsMaterial({ size: 0.05, vertexColors: THREE.VertexColors, sizeAttenuation: true });
+ const json = featureTable.json;
+ const buffer = featureTable.buffer;
+ const POINTS_LENGTH = json.POINTS_LENGTH || 0;
- // init Array feature binary
- const subArrayJson = utf8Decoder.decode(new Uint8Array(array, byteOffset, FTJSONLength));
- const parseJSON = JSON.parse(subArrayJson);
- let lengthFeature;
- if (parseJSON.POINTS_LENGTH) {
- lengthFeature = parseJSON.POINTS_LENGTH;
- }
- if (parseJSON.POSITION) {
- const byteOffsetPos = (parseJSON.POSITION.byteOffset + subArrayJson.length + byteOffset);
- const positionArray = new Float32Array(array, byteOffsetPos, lengthFeature * 3);
+ if (json.POSITION) {
+ const positionArray = new Float32Array(buffer, json.POSITION.byteOffset, POINTS_LENGTH * 3);
geometry.addAttribute('position', new THREE.BufferAttribute(positionArray, 3));
}
- if (parseJSON.RGB) {
- const byteOffsetCol = parseJSON.RGB.byteOffset + subArrayJson.length + byteOffset;
- const colorArray = new Uint8Array(array, byteOffsetCol, lengthFeature * 3);
+ if (json.RGB) {
+ const colorArray = new Uint8Array(buffer, json.RGB.byteOffset, POINTS_LENGTH * 3);
geometry.addAttribute('color', new THREE.BufferAttribute(colorArray, 3, true));
}
- if (parseJSON.POSITION_QUANTIZED) {
+ if (json.POSITION_QUANTIZED) {
throw new Error('For pnts loader, POSITION_QUANTIZED: not yet managed');
}
- if (parseJSON.RGBA) {
+ if (json.RGBA) {
throw new Error('For pnts loader, RGBA: not yet managed');
}
- if (parseJSON.RGB565) {
+ if (json.RGB565) {
throw new Error('For pnts loader, RGB565: not yet managed');
}
- if (parseJSON.NORMAL) {
+ if (json.NORMAL) {
throw new Error('For pnts loader, NORMAL: not yet managed');
}
- if (parseJSON.NORMAL_OCT16P) {
+ if (json.NORMAL_OCT16P) {
throw new Error('For pnts loader, NORMAL_OCT16P: not yet managed');
}
- if (parseJSON.BATCH_ID) {
+ if (json.BATCH_ID) {
throw new Error('For pnts loader, BATCH_ID: not yet managed');
}
// creation points with geometry and material
const points = new THREE.Points(geometry, material);
- points.realPointCount = lengthFeature;
+ points.realPointCount = POINTS_LENGTH;
// Add RTC feature
- if (parseJSON.RTC_CENTER) {
- points.position.fromArray(parseJSON.RTC_CENTER);
+ if (json.RTC_CENTER) {
+ points.position.fromArray(json.RTC_CENTER);
}
return points;
diff --git a/src/Parser/PotreeBinParser.js b/src/Parser/PotreeBinParser.js
index f9493e5adc..d1329cd5b9 100644
--- a/src/Parser/PotreeBinParser.js
+++ b/src/Parser/PotreeBinParser.js
@@ -7,7 +7,7 @@ export default {
/** Parse .bin PotreeConverter format and convert to THREE.Points
* @function parse
* @param {ArrayBuffer} buffer - the bin buffer.
- * @return {Promise} a promise that resolves with a THREE.Points.
+ * @return {Promise} a promise that resolves as { object3d: }.
*
*/
parse: function parse(buffer) {
@@ -57,6 +57,10 @@ export default {
points.realPointCount = numPoints;
points.tightbbox = tightbbox;
- return Promise.resolve(points);
+ return Promise.resolve({ object3d: points });
},
+ format: 'potree/bin',
+ extensions: ['bin'],
+ mimetypes: ['binary/octet-stream'],
+ fetchtype: 'arrayBuffer',
};
diff --git a/src/Parser/PotreeCinParser.js b/src/Parser/PotreeCinParser.js
index a5dc11b78f..462f2887c5 100644
--- a/src/Parser/PotreeCinParser.js
+++ b/src/Parser/PotreeCinParser.js
@@ -6,7 +6,7 @@ export default {
/** Parse .cin PotreeConverter format (see {@link https://github.com/peppsac/PotreeConverter/tree/custom_bin}) and convert to THREE.Points
* @function parse
* @param {ArrayBuffer} buffer - the cin buffer.
- * @return {Promise} - a promise that resolves with a THREE.Points.
+ * @return {Promise} - a promise that resolves as { object3d: }.
*
*/
parse: function parse(buffer) {
@@ -37,6 +37,10 @@ export default {
points.realPointCount = numPoints;
points.tightbbox = tightbbox;
- return Promise.resolve(points);
+ return Promise.resolve({ object3d: points });
},
+ format: 'potree/cin',
+ extensions: ['cin'],
+ mimetypes: ['binary/octet-stream'],
+ fetchtype: 'arrayBuffer',
};
diff --git a/src/Provider/3dTilesProvider.js b/src/Provider/3dTilesProvider.js
index f1b043ab0d..51d80ee2e5 100644
--- a/src/Provider/3dTilesProvider.js
+++ b/src/Provider/3dTilesProvider.js
@@ -1,11 +1,8 @@
import * as THREE from 'three';
-import B3dmParser from '../Parser/B3dmParser';
-import PntsParser from '../Parser/PntsParser';
import Fetcher from './Fetcher';
import OBB from '../Renderer/ThreeExtended/OBB';
import Extent from '../Core/Geographic/Extent';
import { init3dTilesLayer } from '../Process/3dTilesProcessing';
-import utf8Decoder from '../utils/Utf8Decoder';
function $3dTilesIndex(tileset, baseURL) {
let counter = 0;
@@ -114,26 +111,6 @@ function getBox(volume, inverseTileTransform) {
}
}
-function b3dmToMesh(data, layer, url) {
- const urlBase = THREE.LoaderUtils.extractUrlBase(url);
- const options = {
- gltfUpAxis: layer.asset.gltfUpAxis,
- urlBase,
- overrideMaterials: layer.overrideMaterials,
- doNotPatchMaterial: layer.doNotPatchMaterial,
- opacity: layer.opacity,
- };
- return B3dmParser.parse(data, options).then((result) => {
- const batchTable = result.batchTable;
- const object3d = result.gltf.scene;
- return { batchTable, object3d };
- });
-}
-
-function pntsParse(data) {
- return PntsParser.parse(data).then(result => ({ object3d: result.point }));
-}
-
function configureTile(tile, layer, metadata, parent) {
tile.frustumCulled = false;
tile.layer = layer.id;
@@ -158,59 +135,57 @@ function configureTile(tile, layer, metadata, parent) {
}
function executeCommand(command) {
+ const scheduler = command.view.mainLoop.scheduler;
const layer = command.layer;
const metadata = command.metadata;
const tile = new THREE.Object3D();
configureTile(tile, layer, metadata, command.requester);
const path = metadata.content ? metadata.content.url : undefined;
- const setLayer = (obj) => {
+ function setLayer(obj) {
obj.layers.set(layer.threejsLayer);
+ }
+ if (!path) {
+ tile.traverse(setLayer);
+ return Promise.resolve(tile);
+ }
+ // Check if we have relative or absolute url (with tileset's lopocs for example)
+ const url = path.startsWith('http') ? path : metadata.baseURL + path;
+ const format = path.substr(path.lastIndexOf('.') + 1);
+
+ const options = {
+ gltfUpAxis: layer.asset.gltfUpAxis || 'Y',
+ urlBase: THREE.LoaderUtils.extractUrlBase(url),
+ overrideMaterials: layer.overrideMaterials,
+ doNotPatchMaterial: layer.doNotPatchMaterial,
+ opacity: layer.opacity,
};
- if (path) {
- // Check if we have relative or absolute url (with tileset's lopocs for example)
- const url = path.startsWith('http') ? path : metadata.baseURL + path;
- const supportedFormats = {
- b3dm: b3dmToMesh,
- pnts: pntsParse,
- };
- return Fetcher.arrayBuffer(url, layer.networkOptions).then((result) => {
- if (result !== undefined) {
- let func;
- const magic = utf8Decoder.decode(new Uint8Array(result, 0, 4));
- if (magic[0] === '{') {
- result = JSON.parse(utf8Decoder.decode(new Uint8Array(result)));
- const newPrefix = url.slice(0, url.lastIndexOf('/') + 1);
- layer.tileIndex.extendTileset(result, metadata.tileId, newPrefix);
- } else if (magic == 'b3dm') {
- func = supportedFormats.b3dm;
- } else if (magic == 'pnts') {
- func = supportedFormats.pnts;
- } else {
- Promise.reject(`Unsupported magic code ${magic}`);
- }
- if (func) {
- // TODO: request should be delayed if there is a viewerRequestVolume
- return func(result, layer, url).then((content) => {
- tile.content = content.object3d;
- if (content.batchTable) {
- tile.batchTable = content.batchTable;
- }
- tile.add(content.object3d);
- tile.traverse(setLayer);
- return tile;
- });
- }
- }
+
+ if (format === 'json') {
+ return Fetcher.json(url, layer.networkOptions).then((json) => {
+ layer.tileIndex.extendTileset(json, metadata.tileId, options.urlBase);
tile.traverse(setLayer);
return tile;
});
- } else {
- return new Promise((resolve) => {
+ }
+ const parser = scheduler.getFormatParser(format);
+ if (!parser) {
+ return Promise.reject(`Unsupported extension ${format} for file ${url} in 3d-tiles`);
+ }
+
+ return Fetcher[parser.fetchtype](url, layer.networkOptions)
+ .then(data => parser.parse(data, options))
+ .then((content) => {
+ if (content) {
+ tile.content = content.object3d;
+ if (content.batchTable) {
+ tile.batchTable = content.batchTable;
+ }
+ tile.add(content.object3d);
+ }
tile.traverse(setLayer);
- resolve(tile);
+ return tile;
});
- }
}
export default {
diff --git a/src/Provider/FileProvider.js b/src/Provider/FileProvider.js
new file mode 100644
index 0000000000..047421cff5
--- /dev/null
+++ b/src/Provider/FileProvider.js
@@ -0,0 +1,66 @@
+import * as THREE from 'three';
+import Feature2Mesh from '../Renderer/ThreeExtended/Feature2Mesh';
+
+function readFile(file, type) {
+ return new Promise((resolve, reject) => {
+ var fr = new FileReader();
+ fr.onload = () => { resolve(fr.result); };
+ fr.onerror = () => { fr.abort(); reject(new DOMException('FileReader error.')); };
+ if (type === 'arrayBuffer') {
+ fr.readAsArrayBuffer(file);
+ } else {
+ fr.readAsBinaryString(file);
+ }
+ });
+}
+
+function assignLayer(object, layer) {
+ if (object) {
+ object.layer = layer.id;
+ object.layers.set(layer.threejsLayer);
+ for (const c of object.children) {
+ assignLayer(c, layer);
+ }
+ return object;
+ }
+}
+
+export default {
+ preprocessDataLayer(layer, view, scheduler) {
+ const file = layer.file;
+ const parser = layer.parser
+ || scheduler.getFormatParser(layer.format)
+ || scheduler.getFormatParser(file.type)
+ || scheduler.getFormatParser(file.name.split('.').pop().toLowerCase());
+ if (!parser)
+ {
+ throw new Error(`No parser available for file "${file.name}"`);
+ }
+ var options = layer.options || {};
+ options.crs = view.referenceCrs;
+
+ layer.name = layer.name || file.name;
+ layer.update = layer.update || (() => {});
+ layer.convert = layer.convert ? layer.convert : Feature2Mesh.convert(options);
+ layer.object3d = layer.object3d || new THREE.Group();
+ layer.threejsLayer = layer.threejsLayer || view.mainLoop.gfxEngine.getUniqueThreejsLayer();
+
+ function addObject(obj) {
+ if (obj && !obj.object3d) {
+ obj.object3d = layer.convert(obj);
+ }
+ if (obj && obj.object3d) {
+ layer.object3d.add(obj.object3d);
+ assignLayer(obj.object3d, layer);
+ view.camera.camera3D.layers.enable(layer.threejsLayer);
+ view.notifyChange(true);
+ } else {
+ console.warn(obj, ' has no object3d key');
+ }
+ return obj;
+ }
+ return readFile(file, parser.fetchtype).then(content => parser.parse(content, options)).then(addObject);
+ },
+
+ executeCommand(/* command */) {},
+};
diff --git a/src/Provider/PointCloudProvider.js b/src/Provider/PointCloudProvider.js
index f88c0400aa..124b596027 100644
--- a/src/Provider/PointCloudProvider.js
+++ b/src/Provider/PointCloudProvider.js
@@ -1,8 +1,6 @@
import * as THREE from 'three';
import Fetcher from './Fetcher';
import PointCloudProcessing from '../Process/PointCloudProcessing';
-import PotreeBinParser from '../Parser/PotreeBinParser';
-import PotreeCinParser from '../Parser/PotreeCinParser';
import Picking from '../Core/Picking';
// Create an A(xis)A(ligned)B(ounding)B(ox) for the child `childIndex` of one aabb.
@@ -109,6 +107,10 @@ function findChildrenByName(node, name) {
let nextuuid = 1;
function addPickingAttribute(points) {
+ if (!points) {
+ console.warn('Pointcloud tile failed to parse to an object3d');
+ return undefined;
+ }
// generate unique id for picking
const numPoints = points.geometry.attributes.position.count;
const ids = new Uint8Array(4 * numPoints);
@@ -132,19 +134,14 @@ function addPickingAttribute(points) {
return points;
}
-
function loadPointFile(layer, url) {
- return fetch(url, layer.fetchOptions).then(foo => foo.arrayBuffer()).then((ab) => {
- if (layer.metadata.customBinFormat) {
- return PotreeCinParser.parse(ab).then(result => addPickingAttribute(result));
- } else {
- return PotreeBinParser.parse(ab).then(result => addPickingAttribute(result));
- }
- });
+ return layer.fetcher(url, layer.fetchOptions)
+ .then(buf => layer.parser.parse(buf))
+ .then(obj => addPickingAttribute(obj.object3d));
}
export default {
- preprocessDataLayer(layer) {
+ preprocessDataLayer(layer, view, scheduler) {
if (!layer.file) {
layer.file = 'cloud.js';
}
@@ -187,7 +184,7 @@ export default {
// (if `scale` is defined => we're fetching files from PotreeConverter)
if (layer.metadata.scale != undefined) {
// PotreeConverter format
- layer.metadata.customBinFormat = layer.metadata.pointAttributes === 'CIN';
+ layer.format = layer.format || (layer.metadata.pointAttributes === 'CIN' ? 'cin' : 'bin');
bbox = new THREE.Box3(
new THREE.Vector3(cloud.boundingBox.lx, cloud.boundingBox.ly, cloud.boundingBox.lz),
new THREE.Vector3(cloud.boundingBox.ux, cloud.boundingBox.uy, cloud.boundingBox.uz));
@@ -196,7 +193,7 @@ export default {
layer.metadata.scale = 1;
layer.metadata.octreeDir = `itowns/${layer.table}.points`;
layer.metadata.hierarchyStepSize = 1000000; // ignore this with lopocs
- layer.metadata.customBinFormat = true;
+ layer.format = layer.format || 'cin';
let idx = 0;
for (const entry of cloud) {
@@ -209,7 +206,8 @@ export default {
new THREE.Vector3(cloud[idx].bbox.xmin, cloud[idx].bbox.ymin, cloud[idx].bbox.zmin),
new THREE.Vector3(cloud[idx].bbox.xmax, cloud[idx].bbox.ymax, cloud[idx].bbox.zmax));
}
-
+ layer.parser = scheduler.getFormatParser(layer.format);
+ layer.fetcher = Fetcher[layer.parser.fetchtype];
return parseOctree(
layer,
@@ -234,13 +232,11 @@ export default {
parseOctree(layer, layer.metadata.hierarchyStepSize, node).then(() => command.view.notifyChange(false));
}
- const extension = layer.metadata.customBinFormat ? 'cin' : 'bin';
-
// `isLeaf` is for lopocs and allows the pointcloud server to consider that the current
// node is the last one, even if we could subdivide even further.
// It's necessary because lopocs doens't know about the hierarchy (it generates it on the fly
// when we request .hrc files)
- const url = `${node.baseurl}/r${node.name}.${extension}?isleaf=${command.isLeaf ? 1 : 0}`;
+ const url = `${node.baseurl}/r${node.name}.${layer.format}?isleaf=${command.isLeaf ? 1 : 0}`;
return loadPointFile(layer, url).then((points) => {
points.position.copy(node.bbox.min);
diff --git a/src/Provider/RasterProvider.js b/src/Provider/RasterProvider.js
index ff7ef911f0..b0f35096ba 100644
--- a/src/Provider/RasterProvider.js
+++ b/src/Provider/RasterProvider.js
@@ -105,7 +105,7 @@ export default {
const options = {
buildExtent: true,
crsIn: layer.projection,
- crsOut: parentCrs,
+ crs: parentCrs,
filteringExtent: layer.extent,
};
diff --git a/src/Provider/WFSProvider.js b/src/Provider/WFSProvider.js
index 4d521910c8..b95828cba0 100644
--- a/src/Provider/WFSProvider.js
+++ b/src/Provider/WFSProvider.js
@@ -6,18 +6,19 @@
import Extent from '../Core/Geographic/Extent';
import URLBuilder from './URLBuilder';
-import Fetcher from './Fetcher';
-import GeoJsonParser from '../Parser/GeoJsonParser';
import Feature2Mesh from '../Renderer/ThreeExtended/Feature2Mesh';
+import Fetcher from './Fetcher';
const cache = new Map();
-function preprocessDataLayer(layer) {
+function preprocessDataLayer(layer, view, scheduler) {
if (!layer.typeName) {
throw new Error('layer.typeName is required.');
}
layer.format = layer.format || 'application/json';
+ layer.parser = scheduler.getFormatParser(layer.format);
+ layer.fetch = Fetcher[layer.parser.fetchtype];
layer.crs = layer.projection || 'EPSG:4326';
layer.version = layer.version || '2.0.2';
@@ -69,9 +70,9 @@ function getFeatures(crs, tile, layer) {
layer.convert = layer.convert ? layer.convert : Feature2Mesh.convert({});
- return Fetcher.json(urld, layer.networkOptions)
+ return layer.fetch(urld, layer.networkOptions)
.then(
- geojson => GeoJsonParser.parse(geojson, { crsOut: crs, filteringExtent: tile.extent, filter: layer.filter }),
+ data => layer.parser.parse(data, { crs, filteringExtent: tile.extent, filter: layer.filter }),
(err) => {
// special handling for 400 errors, as it probably means the config is wrong
if (err.response.status == 400) {
diff --git a/test/feature2mesh_unit_test.js b/test/feature2mesh_unit_test.js
index 09c5657d65..7341d8fbc0 100644
--- a/test/feature2mesh_unit_test.js
+++ b/test/feature2mesh_unit_test.js
@@ -11,7 +11,7 @@ proj4.defs('EPSG:3946',
'+proj=lcc +lat_1=45.25 +lat_2=46.75 +lat_0=46 +lon_0=3 +x_0=1700000 +y_0=5200000 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs');
function parse() {
- return GeoJsonParser.parse(geojson, { crsIn: 'EPSG:3946', crsOut: 'EPSG:3946', buildExtent: true });
+ return GeoJsonParser.parse(geojson, { crsIn: 'EPSG:3946', crs: 'EPSG:3946', buildExtent: true });
}
function computeAreaOfMesh(mesh) {
diff --git a/test/featureUtils_unit_test.js b/test/featureUtils_unit_test.js
index 17a6068f48..fce3fc7b14 100644
--- a/test/featureUtils_unit_test.js
+++ b/test/featureUtils_unit_test.js
@@ -6,7 +6,7 @@ import Coordinates from '../src/Core/Geographic/Coordinates';
const assert = require('assert');
const geojson = require('./data/geojson/simple.geojson.json');
-const promise = GeoJsonParser.parse(geojson, { crsOut: 'EPSG:4326', buildExtent: true });
+const promise = GeoJsonParser.parse(geojson, { crs: 'EPSG:4326', buildExtent: true });
describe('FeaturesUtils', function () {
it('should correctly parse geojson', () =>