Added polygon analysis functionality
This commit is contained in:
parent
e87f0f46aa
commit
06299cc99f
642
webapps/polygon_analysis_editor/app.js
Normal file
642
webapps/polygon_analysis_editor/app.js
Normal file
|
|
@ -0,0 +1,642 @@
|
|||
|
||||
// State
|
||||
let rawGeoJSON = null;
|
||||
let cleanedGeoJSON = null;
|
||||
let analysisLog = [];
|
||||
let overlapCsvData = [];
|
||||
let originalFileName = "polygons";
|
||||
|
||||
// DOM Elements
|
||||
const dropZone = document.getElementById('dropZone');
|
||||
const fileInput = document.getElementById('geojsonFile');
|
||||
const fileLabelText = document.getElementById('fileLabelText');
|
||||
const analyzeBtn = document.getElementById('analyzeBtn');
|
||||
const resultsCard = document.getElementById('resultsCard');
|
||||
const logOutput = document.getElementById('logOutput');
|
||||
const downloadLogBtn = document.getElementById('downloadLogBtn');
|
||||
const downloadCsvBtn = document.getElementById('downloadCsvBtn');
|
||||
const editCard = document.getElementById('editCard');
|
||||
const applyEditsBtn = document.getElementById('applyEditsBtn');
|
||||
const editResults = document.getElementById('editResults');
|
||||
const editLogOutput = document.getElementById('editLogOutput');
|
||||
const downloadCleanedBtn = document.getElementById('downloadCleanedBtn');
|
||||
const downloadEditLogBtn = document.getElementById('downloadEditLogBtn');
|
||||
|
||||
// Event Listeners
|
||||
dropZone.addEventListener('dragover', (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.style.backgroundColor = '#e0f2f1';
|
||||
});
|
||||
dropZone.addEventListener('dragleave', (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.style.backgroundColor = 'rgba(42, 171, 149, 0.05)';
|
||||
});
|
||||
dropZone.addEventListener('drop', (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.style.backgroundColor = 'rgba(42, 171, 149, 0.05)';
|
||||
if (e.dataTransfer.files.length) {
|
||||
fileInput.files = e.dataTransfer.files;
|
||||
handleFile(e.dataTransfer.files[0]);
|
||||
}
|
||||
});
|
||||
fileInput.addEventListener('change', (e) => {
|
||||
if (fileInput.files.length) {
|
||||
handleFile(fileInput.files[0]);
|
||||
}
|
||||
});
|
||||
|
||||
analyzeBtn.addEventListener('click', runAnalysis);
|
||||
downloadLogBtn.addEventListener('click', () => downloadText(analysisLog.join('\n'), `${originalFileName}_log.txt`));
|
||||
downloadCsvBtn.addEventListener('click', downloadCsv);
|
||||
applyEditsBtn.addEventListener('click', runEdits);
|
||||
downloadCleanedBtn.addEventListener('click', () => downloadJson(cleanedGeoJSON, `${originalFileName}_SmartCane.geojson`));
|
||||
|
||||
// Logic
|
||||
|
||||
function handleFile(file) {
|
||||
originalFileName = file.name.replace(/\.geojson$/i, '').replace(/\.json$/i, '');
|
||||
fileLabelText.innerHTML = `<strong>${file.name}</strong><br><small>Ready to analyze</small>`;
|
||||
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
try {
|
||||
rawGeoJSON = JSON.parse(e.target.result);
|
||||
analyzeBtn.disabled = false;
|
||||
// Reset UI
|
||||
resultsCard.classList.add('hidden');
|
||||
editCard.classList.add('hidden');
|
||||
editResults.classList.add('hidden');
|
||||
} catch (err) {
|
||||
alert("Invalid GeoJSON file");
|
||||
console.error(err);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
}
|
||||
|
||||
function log(msg, targetLog = analysisLog, targetEl = logOutput) {
|
||||
// Add timestamp or just raw msg? R script uses simple text.
|
||||
targetLog.push(msg);
|
||||
targetEl.innerText = targetLog.join('\n');
|
||||
// Auto scroll
|
||||
targetEl.scrollTop = targetEl.scrollHeight;
|
||||
}
|
||||
|
||||
async function runAnalysis() {
|
||||
// Reset
|
||||
analysisLog = [];
|
||||
overlapCsvData = [];
|
||||
resultsCard.classList.remove('hidden');
|
||||
editCard.classList.add('hidden');
|
||||
analyzeBtn.disabled = true;
|
||||
analyzeBtn.innerText = "Analyzing...";
|
||||
|
||||
// Use setTimeout to allow UI to update
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
await analyze();
|
||||
analyzeBtn.disabled = false;
|
||||
analyzeBtn.innerText = "Analyze Polygons";
|
||||
editCard.classList.remove('hidden');
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
log(`ERROR: ${err.message}`);
|
||||
analyzeBtn.disabled = false;
|
||||
analyzeBtn.innerText = "Analyze Polygons";
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
|
||||
function cleanDuplicateVertices(feature) {
|
||||
if (feature.geometry.type === 'Polygon') {
|
||||
feature.geometry.coordinates = feature.geometry.coordinates.map(ring =>
|
||||
ring.filter((coord, i, arr) =>
|
||||
i === 0 || JSON.stringify(coord) !== JSON.stringify(arr[i - 1])
|
||||
)
|
||||
);
|
||||
} else if (feature.geometry.type === 'MultiPolygon') {
|
||||
feature.geometry.coordinates = feature.geometry.coordinates.map(polygon =>
|
||||
polygon.map(ring =>
|
||||
ring.filter((coord, i, arr) =>
|
||||
i === 0 || JSON.stringify(coord) !== JSON.stringify(arr[i - 1])
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
return feature;
|
||||
}
|
||||
|
||||
async function analyze() {
|
||||
log(`Processing log for: ${originalFileName}`);
|
||||
log("==================================================");
|
||||
|
||||
let features = rawGeoJSON.features;
|
||||
if (!features) {
|
||||
log("Error: No features found in GeoJSON.");
|
||||
return;
|
||||
}
|
||||
|
||||
log(`${features.length} geometries found.`);
|
||||
|
||||
// 1. Check duplicates FIRST
|
||||
log("\n=== Checking for duplicates... ===");
|
||||
const fieldCounts = {};
|
||||
features.forEach(f => {
|
||||
const id = getFieldNo(f);
|
||||
if (id) {
|
||||
fieldCounts[id] = (fieldCounts[id] || 0) + 1;
|
||||
}
|
||||
});
|
||||
|
||||
const duplicates = Object.entries(fieldCounts).filter(([id, count]) => count > 1);
|
||||
|
||||
if (duplicates.length > 0) {
|
||||
log(`✕ ${duplicates.length} duplicate field codes detected.`);
|
||||
log("Removing duplicate fields...");
|
||||
|
||||
const duplicateIds = new Set(duplicates.map(d => d[0]));
|
||||
const beforeDedup = features.length;
|
||||
features = features.filter(f => !duplicateIds.has(getFieldNo(f)));
|
||||
|
||||
duplicates.forEach(d => {
|
||||
log(`• Field ${d[0]}: ${d[1]} occurrences (Removed)`);
|
||||
});
|
||||
|
||||
log(`✓ Removed ${beforeDedup - features.length} duplicate entries!`);
|
||||
} else {
|
||||
log("✓ No duplicates found!");
|
||||
}
|
||||
|
||||
// 2. Filter empty
|
||||
log("\n=== Checking empty geometries... ===");
|
||||
const initialCount = features.length;
|
||||
features = features.filter(f => f.geometry && f.geometry.coordinates && f.geometry.coordinates.length > 0);
|
||||
const emptyCount = initialCount - features.length;
|
||||
|
||||
if (emptyCount > 0) {
|
||||
log(`${emptyCount} empty geometries detected! Deleting...`);
|
||||
} else {
|
||||
log(`✓ No empty geometries found.`);
|
||||
}
|
||||
|
||||
// 3. Clean duplicate vertices & validity check
|
||||
log("\n=== Cleaning duplicate vertices & checking geometry validity... ===");
|
||||
let invalidCount = 0;
|
||||
let duplicateVertexCount = 0;
|
||||
|
||||
let fixedFeatures = [];
|
||||
for (let f of features) {
|
||||
// Clean duplicate vertices first
|
||||
const beforeClean = JSON.stringify(f.geometry);
|
||||
f = cleanDuplicateVertices(f);
|
||||
const afterClean = JSON.stringify(f.geometry);
|
||||
|
||||
if (beforeClean !== afterClean) {
|
||||
duplicateVertexCount++;
|
||||
}
|
||||
|
||||
// Drop Z if present (Turf truncate)
|
||||
f = turf.truncate(f, {precision: 6, coordinates: 2});
|
||||
|
||||
try {
|
||||
const kinks = turf.kinks(f);
|
||||
if (kinks.features.length > 0) {
|
||||
invalidCount++;
|
||||
// Try to fix by unkinking
|
||||
const unkinked = turf.unkinkPolygon(f);
|
||||
unkinked.features.forEach(uf => {
|
||||
uf.properties = {...f.properties};
|
||||
fixedFeatures.push(uf);
|
||||
});
|
||||
} else {
|
||||
fixedFeatures.push(f);
|
||||
}
|
||||
} catch (e) {
|
||||
log(`⚠ Warning: Could not process geometry for ${getFieldNo(f)}: ${e.message}`);
|
||||
// Still add it, but log the issue
|
||||
fixedFeatures.push(f);
|
||||
}
|
||||
}
|
||||
|
||||
if (duplicateVertexCount > 0) {
|
||||
log(`✓ Cleaned duplicate vertices in ${duplicateVertexCount} geometries.`);
|
||||
}
|
||||
|
||||
if (invalidCount > 0) {
|
||||
log(`✕ ${invalidCount} invalid geometries detected (self-intersections). Attempting fix...`);
|
||||
log(`✓ Fixed by splitting self-intersections.`);
|
||||
} else {
|
||||
log(`✓ All geometries valid (no self-intersections).`);
|
||||
}
|
||||
features = fixedFeatures;
|
||||
log(`Current geometry count: ${features.length}`);
|
||||
|
||||
// 4. Check Overlaps
|
||||
log("\n=== Checking for plot overlap... ===");
|
||||
|
||||
// Build RBush Index
|
||||
const tree = new RBush();
|
||||
const items = features.map((f, index) => {
|
||||
const bbox = turf.bbox(f);
|
||||
return {
|
||||
minX: bbox[0],
|
||||
minY: bbox[1],
|
||||
maxX: bbox[2],
|
||||
maxY: bbox[3],
|
||||
feature: f,
|
||||
id: getFieldNo(f) || `Unknown-${index}`,
|
||||
area: turf.area(f),
|
||||
index: index
|
||||
};
|
||||
});
|
||||
tree.load(items);
|
||||
|
||||
const overlaps = [];
|
||||
const checkedPairs = new Set();
|
||||
let fieldsWithOverlap = new Set();
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const itemA = items[i];
|
||||
const candidates = tree.search(itemA);
|
||||
|
||||
for (const itemB of candidates) {
|
||||
if (itemA.index === itemB.index) continue;
|
||||
|
||||
// Sort IDs to ensure uniqueness of pair
|
||||
const pairId = [itemA.index, itemB.index].sort().join('_');
|
||||
if (checkedPairs.has(pairId)) continue;
|
||||
checkedPairs.add(pairId);
|
||||
|
||||
// Check intersection
|
||||
let intersection = null;
|
||||
try {
|
||||
intersection = turf.intersect(itemA.feature, itemB.feature);
|
||||
} catch (e) {
|
||||
console.warn("Intersection error", e);
|
||||
}
|
||||
|
||||
if (intersection) {
|
||||
const intersectArea = turf.area(intersection);
|
||||
if (intersectArea > 1) {
|
||||
overlaps.push({
|
||||
a: itemA,
|
||||
b: itemB,
|
||||
area: intersectArea
|
||||
});
|
||||
|
||||
fieldsWithOverlap.add(itemA.id);
|
||||
fieldsWithOverlap.add(itemB.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (overlaps.length > 0) {
|
||||
log(`✕ ${fieldsWithOverlap.size} fields with overlap found.`);
|
||||
|
||||
// We need to calculate total overlap percentage for each field to decide if it's > 30%
|
||||
// Map: FieldID -> TotalOverlapArea
|
||||
const fieldOverlapMap = {};
|
||||
|
||||
overlaps.forEach(o => {
|
||||
// Add to A
|
||||
if (!fieldOverlapMap[o.a.id]) fieldOverlapMap[o.a.id] = 0;
|
||||
fieldOverlapMap[o.a.id] += o.area;
|
||||
|
||||
// Add to B
|
||||
if (!fieldOverlapMap[o.b.id]) fieldOverlapMap[o.b.id] = 0;
|
||||
fieldOverlapMap[o.b.id] += o.area;
|
||||
});
|
||||
|
||||
// Report significant overlaps
|
||||
let severeCount = 0;
|
||||
|
||||
Object.keys(fieldOverlapMap).forEach(fid => {
|
||||
const item = items.find(it => it.id === fid); // This lookup might be slow if many items, but okay for now
|
||||
if (!item) return;
|
||||
|
||||
const totalOverlap = fieldOverlapMap[fid];
|
||||
const originalArea = item.area;
|
||||
const pct = (totalOverlap / originalArea) * 100;
|
||||
|
||||
let category = "<30%";
|
||||
if (pct >= 30) category = ">30%";
|
||||
|
||||
// Add to CSV Data
|
||||
overlapCsvData.push({
|
||||
Field_No: fid,
|
||||
Original_Area_ha: (originalArea / 10000).toFixed(4),
|
||||
Overlap_Area_ha: (totalOverlap / 10000).toFixed(4),
|
||||
Overlap_Percentage: pct.toFixed(1),
|
||||
Overlap_Category: category
|
||||
});
|
||||
|
||||
if (pct > 30) {
|
||||
log(`Field number ${fid} overlap >30% (${pct.toFixed(1)}%) -- Will be deleted`);
|
||||
severeCount++;
|
||||
}
|
||||
});
|
||||
|
||||
if (severeCount === 0) {
|
||||
log("✓ No significant overlap (>30%) detected.");
|
||||
} else {
|
||||
log(`✕ ${severeCount} fields have significant overlap (>30%).`);
|
||||
}
|
||||
|
||||
} else {
|
||||
log("✓ No overlap found!");
|
||||
}
|
||||
|
||||
// Save state for editing
|
||||
// We keep 'features' (which are fixed/deduplicated)
|
||||
rawGeoJSON.features = features;
|
||||
|
||||
}
|
||||
|
||||
function getFieldNo(feature) {
|
||||
return feature.properties['Field No'] || feature.properties['field'] || feature.properties['Field_No'];
|
||||
}
|
||||
|
||||
async function runEdits() {
|
||||
const editLog = [];
|
||||
editResults.classList.remove('hidden');
|
||||
editLogOutput.innerText = "Processing edits...";
|
||||
|
||||
function logEdit(msg) {
|
||||
editLog.push(msg);
|
||||
editLogOutput.innerText = editLog.join('\n');
|
||||
editLogOutput.scrollTop = editLogOutput.scrollHeight;
|
||||
}
|
||||
|
||||
setTimeout(async () => {
|
||||
let features = rawGeoJSON.features;
|
||||
|
||||
// Clean all features before processing
|
||||
features = features.map(f => cleanDuplicateVertices(f));
|
||||
|
||||
let delCount = 0;
|
||||
|
||||
// 1. Remove > 30% overlaps
|
||||
// We need to re-calculate overlaps because we want to be precise
|
||||
|
||||
// Build Index again
|
||||
let tree = new RBush();
|
||||
let items = features.map((f, index) => ({
|
||||
minX: turf.bbox(f)[0],
|
||||
minY: turf.bbox(f)[1],
|
||||
maxX: turf.bbox(f)[2],
|
||||
maxY: turf.bbox(f)[3],
|
||||
feature: f,
|
||||
id: getFieldNo(f),
|
||||
area: turf.area(f),
|
||||
index: index
|
||||
}));
|
||||
tree.load(items);
|
||||
|
||||
// Calculate overlap per field
|
||||
let fieldOverlapMap = {};
|
||||
|
||||
// Find all overlaps
|
||||
// Note: This is slightly inefficient to re-do, but safer.
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const itemA = items[i];
|
||||
const candidates = tree.search(itemA);
|
||||
for (const itemB of candidates) {
|
||||
if (itemA.index < itemB.index) { // Only check pair once
|
||||
const intersection = turf.intersect(itemA.feature, itemB.feature);
|
||||
if (intersection) {
|
||||
const area = turf.area(intersection);
|
||||
if (area > 1) {
|
||||
if (!fieldOverlapMap[itemA.id]) fieldOverlapMap[itemA.id] = 0;
|
||||
fieldOverlapMap[itemA.id] += area;
|
||||
|
||||
if (!fieldOverlapMap[itemB.id]) fieldOverlapMap[itemB.id] = 0;
|
||||
fieldOverlapMap[itemB.id] += area;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Identify fields to delete
|
||||
const toDelete = new Set();
|
||||
Object.keys(fieldOverlapMap).forEach(fid => {
|
||||
const item = items.find(it => it.id == fid);
|
||||
if (item) {
|
||||
const pct = (fieldOverlapMap[fid] / item.area) * 100;
|
||||
if (pct > 30) {
|
||||
toDelete.add(fid);
|
||||
logEdit(`Field ${fid} overlap ${pct.toFixed(1)}% > 30% -- ✕ Deleted!`);
|
||||
delCount++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Apply Deletion
|
||||
features = features.filter(f => !toDelete.has(getFieldNo(f)));
|
||||
|
||||
if (delCount > 0) {
|
||||
logEdit(`✓ Significant overlap fixed - ${delCount} geometries deleted.`);
|
||||
} else {
|
||||
logEdit(`✓ No significant overlap detected.`);
|
||||
}
|
||||
|
||||
// 2. Fix remaining overlaps (< 30%)
|
||||
// Loop through intersections and split up difference
|
||||
logEdit("\nChecking for remaining overlaps (<30%) to fix...");
|
||||
|
||||
// Re-index with remaining features
|
||||
items = features.map((f, index) => ({
|
||||
minX: turf.bbox(f)[0],
|
||||
minY: turf.bbox(f)[1],
|
||||
maxX: turf.bbox(f)[2],
|
||||
maxY: turf.bbox(f)[3],
|
||||
feature: f,
|
||||
id: getFieldNo(f),
|
||||
area: turf.area(f),
|
||||
index: index
|
||||
}));
|
||||
tree = new RBush();
|
||||
tree.load(items);
|
||||
|
||||
let fixCount = 0;
|
||||
|
||||
let pairs = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const itemA = items[i];
|
||||
const candidates = tree.search(itemA);
|
||||
for (const itemB of candidates) {
|
||||
if (itemA.index < itemB.index) {
|
||||
const intersection = turf.intersect(itemA.feature, itemB.feature);
|
||||
if (intersection && turf.area(intersection) > 1) {
|
||||
pairs.push({a: itemA, b: itemB});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
const f1 = pair.a.feature; // Reference to object in 'features' array
|
||||
const f2 = pair.b.feature;
|
||||
|
||||
// Check if they are still valid/exist (in case we deleted one? No we filtered already)
|
||||
// Re-check intersection because one might have been modified by a previous iteration
|
||||
const intersection = turf.intersect(f1, f2);
|
||||
if (!intersection || turf.area(intersection) <= 1) continue;
|
||||
|
||||
const area1 = turf.area(f1);
|
||||
const area2 = turf.area(f2);
|
||||
|
||||
// "Add the overlapping area to the biggest plot"
|
||||
// Means subtract overlap from the smallest plot.
|
||||
|
||||
let updated = false;
|
||||
if (area1 > area2) {
|
||||
// f1 is bigger. Subtract f1 from f2.
|
||||
// poly2 <- st_difference(poly2, st_union(poly1))
|
||||
// effectively poly2 = poly2 - poly1
|
||||
try {
|
||||
const diff = turf.difference(f2, f1);
|
||||
if (diff) {
|
||||
f2.geometry = diff.geometry;
|
||||
updated = true;
|
||||
logEdit(`✓ Fixed conflict between ${getFieldNo(f1)} and ${getFieldNo(f2)} (Trimmed ${getFieldNo(f2)})`);
|
||||
} else {
|
||||
// f2 completely inside f1?
|
||||
logEdit(`! ${getFieldNo(f2)} is completely inside ${getFieldNo(f1)} - Removed ${getFieldNo(f2)}`);
|
||||
features = features.filter(f => f !== f2);
|
||||
}
|
||||
} catch(e) {
|
||||
console.warn("Difference failed", e);
|
||||
}
|
||||
} else {
|
||||
// f2 is bigger or equal. Subtract f2 from f1.
|
||||
try {
|
||||
const diff = turf.difference(f1, f2);
|
||||
if (diff) {
|
||||
f1.geometry = diff.geometry;
|
||||
updated = true;
|
||||
logEdit(`✓ Fixed conflict between ${getFieldNo(f1)} and ${getFieldNo(f2)} (Trimmed ${getFieldNo(f1)})`);
|
||||
} else {
|
||||
logEdit(`! ${getFieldNo(f1)} is completely inside ${getFieldNo(f2)} - Removed ${getFieldNo(f1)}`);
|
||||
features = features.filter(f => f !== f1);
|
||||
}
|
||||
} catch(e) {
|
||||
console.warn("Difference failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) fixCount++;
|
||||
}
|
||||
|
||||
if (fixCount === 0) {
|
||||
logEdit("✓ No <30% overlap detected.");
|
||||
}
|
||||
|
||||
// 3. Dissolve/Merge polygons by field ID
|
||||
logEdit("\n=== Dissolving polygons by field ID... ===");
|
||||
|
||||
// Group features by field ID
|
||||
const groupedByField = {};
|
||||
features.forEach(f => {
|
||||
const fieldId = getFieldNo(f);
|
||||
if (!groupedByField[fieldId]) {
|
||||
groupedByField[fieldId] = [];
|
||||
}
|
||||
groupedByField[fieldId].push(f);
|
||||
});
|
||||
|
||||
let dissolveCount = 0;
|
||||
const dissolvedFeatures = [];
|
||||
|
||||
Object.keys(groupedByField).forEach(fieldId => {
|
||||
const group = groupedByField[fieldId];
|
||||
|
||||
if (group.length === 1) {
|
||||
// No dissolve needed
|
||||
dissolvedFeatures.push(group[0]);
|
||||
} else {
|
||||
// Multiple polygons with same field ID - need to union them
|
||||
logEdit(` Dissolving ${group.length} polygons for field ${fieldId}...`);
|
||||
|
||||
try {
|
||||
// Union all geometries for this field
|
||||
let unionGeom = group[0];
|
||||
|
||||
for (let i = 1; i < group.length; i++) {
|
||||
unionGeom = turf.union(unionGeom, group[i]);
|
||||
if (!unionGeom) {
|
||||
logEdit(`⚠ Warning: Could not union all parts of field ${fieldId}`);
|
||||
// Fall back to keeping individual parts
|
||||
dissolvedFeatures.push(...group);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve original properties from first feature
|
||||
unionGeom.properties = {...group[0].properties};
|
||||
dissolvedFeatures.push(unionGeom);
|
||||
dissolveCount++;
|
||||
logEdit(`✓ Field ${fieldId}: Merged ${group.length} parts into 1`);
|
||||
} catch (e) {
|
||||
logEdit(`⚠ Error dissolving field ${fieldId}: ${e.message}`);
|
||||
// Fall back to keeping individual parts
|
||||
dissolvedFeatures.push(...group);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (dissolveCount > 0) {
|
||||
logEdit(`✓ Dissolved ${dissolveCount} fields with multiple parts.`);
|
||||
} else {
|
||||
logEdit(`✓ No multi-part fields detected.`);
|
||||
}
|
||||
|
||||
features = dissolvedFeatures;
|
||||
|
||||
// Prepare download
|
||||
cleanedGeoJSON = {
|
||||
type: "FeatureCollection",
|
||||
features: features,
|
||||
crs: rawGeoJSON.crs
|
||||
};
|
||||
|
||||
logEdit("\n=== Saving edited shapefiles... ===");
|
||||
logEdit(`✓ Ready to download ${features.length} geometries.`);
|
||||
|
||||
// Add event listener for download edit log
|
||||
downloadEditLogBtn.onclick = () => downloadText(editLog.join('\n'), `${originalFileName}_edit_log.txt`);
|
||||
|
||||
}, 100);
|
||||
}
|
||||
|
||||
function downloadText(content, filename) {
|
||||
const blob = new Blob([content], {type: "text/plain;charset=utf-8"});
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
function downloadJson(content, filename) {
|
||||
const blob = new Blob([JSON.stringify(content, null, 2)], {type: "application/json"});
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
function downloadCsv() {
|
||||
if (overlapCsvData.length === 0) {
|
||||
alert("No overlap data to download.");
|
||||
return;
|
||||
}
|
||||
const headers = Object.keys(overlapCsvData[0]);
|
||||
const rows = overlapCsvData.map(row => headers.map(h => row[h]).join(';'));
|
||||
const csvContent = [headers.join(';')].concat(rows).join('\n');
|
||||
downloadText(csvContent, `${originalFileName}_overlap_details.csv`);
|
||||
}
|
||||
220
webapps/polygon_analysis_editor/index.html
Normal file
220
webapps/polygon_analysis_editor/index.html
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Polygon Analysis & Editor</title>
|
||||
<link rel="stylesheet" href="../theme.css">
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: #faf8f3;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0;
|
||||
}
|
||||
header {
|
||||
background: linear-gradient(135deg, var(--sc-primary) 0%, var(--sc-primary-light) 100%);
|
||||
color: white;
|
||||
padding: 15px 20px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 20px;
|
||||
}
|
||||
.back-btn {
|
||||
background: rgba(255,255,255,0.2);
|
||||
color: white;
|
||||
border: 1px solid rgba(255,255,255,0.3);
|
||||
padding: 8px 12px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 14px;
|
||||
transition: background 0.3s ease;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.back-btn:hover {
|
||||
background: rgba(255,255,255,0.3);
|
||||
}
|
||||
header h1 {
|
||||
font-size: 24px;
|
||||
font-weight: 600;
|
||||
flex: 1;
|
||||
margin: 0;
|
||||
}
|
||||
.container {
|
||||
flex: 1;
|
||||
max-width: 1200px;
|
||||
margin: 30px auto;
|
||||
width: 100%;
|
||||
padding: 0 20px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.card {
|
||||
background: white;
|
||||
padding: 25px;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||
border: 1px solid #e0d9d0;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.card h2 {
|
||||
margin-top: 0;
|
||||
color: #333;
|
||||
border-bottom: 2px solid var(--sc-primary);
|
||||
padding-bottom: 10px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
.file-input-wrapper {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
width: 100%;
|
||||
margin-top: 10px;
|
||||
}
|
||||
.file-input-label {
|
||||
display: block;
|
||||
padding: 30px;
|
||||
border: 2px dashed var(--sc-primary);
|
||||
border-radius: 6px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
transition: all 0.3s;
|
||||
background: rgba(42, 171, 149, 0.05);
|
||||
}
|
||||
.file-input-label:hover {
|
||||
border-color: var(--sc-primary-light);
|
||||
background: rgba(37, 150, 190, 0.05);
|
||||
}
|
||||
.file-input-wrapper input[type="file"] {
|
||||
display: none;
|
||||
}
|
||||
button {
|
||||
background: var(--sc-primary);
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 14px;
|
||||
transition: background 0.3s ease;
|
||||
}
|
||||
button:hover {
|
||||
background: #238b7d;
|
||||
}
|
||||
button:disabled {
|
||||
background: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
.log-output {
|
||||
background: #1e1e1e;
|
||||
color: #d4d4d4;
|
||||
padding: 15px;
|
||||
border-radius: 4px;
|
||||
font-family: 'Consolas', 'Monaco', monospace;
|
||||
white-space: pre-wrap;
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
margin-bottom: 15px;
|
||||
font-size: 13px;
|
||||
}
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
.actions-row {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
margin-top: 15px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
footer {
|
||||
background: white;
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
margin-top: 20px;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||
text-align: center;
|
||||
font-size: 13px;
|
||||
color: #666;
|
||||
border: 1px solid #e0d9d0;
|
||||
}
|
||||
/* Spinner */
|
||||
.spinner {
|
||||
border: 4px solid #f3f3f3;
|
||||
border-top: 4px solid var(--sc-primary);
|
||||
border-radius: 50%;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
animation: spin 2s linear infinite;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
margin-right: 10px;
|
||||
}
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
if (sessionStorage.getItem('authenticated') !== 'true') {
|
||||
window.location.href = '../login.html';
|
||||
}
|
||||
</script>
|
||||
<header>
|
||||
<button class="back-btn" onclick="window.location.href='../';" title="Back to main tools">← Back</button>
|
||||
<h1>🔷 Polygon Analysis & Editor</h1>
|
||||
</header>
|
||||
|
||||
<div class="container">
|
||||
<div class="card">
|
||||
<h2><span style="font-size: 24px;">🗺️</span> Upload GeoJSON</h2>
|
||||
<p>Upload a GeoJSON file containing the polygons to analyze. The tool will check for overlaps and other issues.</p>
|
||||
<div class="file-input-wrapper" id="dropZone">
|
||||
<label class="file-input-label" for="geojsonFile">
|
||||
<div id="fileLabelText">Drop your GeoJSON file here<br><small>or click to browse</small></div>
|
||||
</label>
|
||||
<input type="file" id="geojsonFile" accept=".geojson,.json" />
|
||||
</div>
|
||||
<div style="text-align: center; margin-top: 20px;">
|
||||
<button id="analyzeBtn" disabled>Analyze Polygons</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card hidden" id="resultsCard">
|
||||
<h2>Analysis Log</h2>
|
||||
<div id="logOutput" class="log-output"></div>
|
||||
|
||||
<div class="actions-row">
|
||||
<button id="downloadLogBtn">Download Log</button>
|
||||
<button id="downloadCsvBtn">Download Overlap CSV</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card hidden" id="editCard">
|
||||
<h2>Apply Edits?</h2>
|
||||
<p>Would you like to automatically clean the polygons based on the analysis? (Fields with >30% overlap will be deleted, others trimmed)</p>
|
||||
<div class="actions-row">
|
||||
<button id="applyEditsBtn">✅ Yes, Apply Edits</button>
|
||||
</div>
|
||||
<div id="editResults" class="hidden" style="margin-top: 20px;">
|
||||
<h3>Edit Results</h3>
|
||||
<div id="editLogOutput" class="log-output"></div>
|
||||
<button id="downloadCleanedBtn">Download Cleaned GeoJSON</button>
|
||||
<button id="downloadEditLogBtn">Download Edit Log</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
Polygon Analysis Editor | Powered by Turf.js
|
||||
</footer>
|
||||
</div>
|
||||
|
||||
<!-- Libraries -->
|
||||
<script src="https://cdn.jsdelivr.net/npm/@turf/turf@6/turf.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/rbush@3/rbush.min.js"></script>
|
||||
<script src="app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
Loading…
Reference in a new issue