Good Day
I have a massive data collection of ~450, 000 features that I'm mapping on to 18 feature layers using "bins" of 25,000 features / feature layer. The following code generates those feature layers and after it runs I have 18 layers comprising my ~450, 000 features.
createFeatureLayers() {
return new Promise( (r) => {
for (const prop in this._geoData) {
if (this._geoData.hasOwnProperty(prop)) {
console.time('forLoop');
for (const geometryProp in this._geoData[prop]) {
if (this._geoData[prop].hasOwnProperty(geometryProp)) {
for (const binProp in this._geoData[prop][geometryProp]) {
if (this._geoData[prop][geometryProp].hasOwnProperty(binProp)) {
const buildFeatureLayers = true;
if (buildFeatureLayers) {
const data = this._geoData[prop][geometryProp][binProp];
if (data.length > 0) {
switch (geometryProp) {
case 'lines':
const featureLayer =
this.buildFeatureSettings(data, '#000000');
if (featureLayer) {
this._layers[prop].lines[binProp] =
new this.FeatureLayer(featureLayer);
this._layers[prop].lines[binProp].maxScale = 0;
this._layers[prop].lines[binProp].minScale = 3000000;
}
break;
default:
break;
}
}
}
}
}
}
this._geoDataKeyIndex++;
}
console.timeEnd('forLoop');
}
}
})
}
buildFeatureSettings(data, colour) {
return {
source: data,
renderer: this.buildRenderSettings(data, colour),
fields: this._fields,
outFields: ['*'],
objectIdField: 'ObjectID',
geometryType: 'polyline',
spatialReference: {
wkid: 4326
},
};
}
buildRenderSettings(data, colour: string) {
return {
type: 'simple',
symbol: {
type: 'simple-line',
size: 30,
width: 3,
color: colour,
outline: {
width: 4,
color: colour,
}
},
}
}
}
Once it's mapped onto my map view, the memory usage approximately doubles. What is the recommended way to handle massive data sets?
Are there methods available that will remove features from the map view if they're not the current stationary view?
Are there methods that will automatically add them?
Thanks