diff --git a/webapps/polygon_analysis_editor/app.js b/webapps/polygon_analysis_editor/app.js
new file mode 100644
index 0000000..7678ffb
--- /dev/null
+++ b/webapps/polygon_analysis_editor/app.js
@@ -0,0 +1,642 @@
+
+// State
+let rawGeoJSON = null;
+let cleanedGeoJSON = null;
+let analysisLog = [];
+let overlapCsvData = [];
+let originalFileName = "polygons";
+
+// DOM Elements
+const dropZone = document.getElementById('dropZone');
+const fileInput = document.getElementById('geojsonFile');
+const fileLabelText = document.getElementById('fileLabelText');
+const analyzeBtn = document.getElementById('analyzeBtn');
+const resultsCard = document.getElementById('resultsCard');
+const logOutput = document.getElementById('logOutput');
+const downloadLogBtn = document.getElementById('downloadLogBtn');
+const downloadCsvBtn = document.getElementById('downloadCsvBtn');
+const editCard = document.getElementById('editCard');
+const applyEditsBtn = document.getElementById('applyEditsBtn');
+const editResults = document.getElementById('editResults');
+const editLogOutput = document.getElementById('editLogOutput');
+const downloadCleanedBtn = document.getElementById('downloadCleanedBtn');
+const downloadEditLogBtn = document.getElementById('downloadEditLogBtn');
+
+// Event Listeners
+dropZone.addEventListener('dragover', (e) => {
+ e.preventDefault();
+ dropZone.style.backgroundColor = '#e0f2f1';
+});
+dropZone.addEventListener('dragleave', (e) => {
+ e.preventDefault();
+ dropZone.style.backgroundColor = 'rgba(42, 171, 149, 0.05)';
+});
+dropZone.addEventListener('drop', (e) => {
+ e.preventDefault();
+ dropZone.style.backgroundColor = 'rgba(42, 171, 149, 0.05)';
+ if (e.dataTransfer.files.length) {
+ fileInput.files = e.dataTransfer.files;
+ handleFile(e.dataTransfer.files[0]);
+ }
+});
+fileInput.addEventListener('change', (e) => {
+ if (fileInput.files.length) {
+ handleFile(fileInput.files[0]);
+ }
+});
+
+analyzeBtn.addEventListener('click', runAnalysis);
+downloadLogBtn.addEventListener('click', () => downloadText(analysisLog.join('\n'), `${originalFileName}_log.txt`));
+downloadCsvBtn.addEventListener('click', downloadCsv);
+applyEditsBtn.addEventListener('click', runEdits);
+downloadCleanedBtn.addEventListener('click', () => downloadJson(cleanedGeoJSON, `${originalFileName}_SmartCane.geojson`));
+
+// Logic
+
+function handleFile(file) {
+ originalFileName = file.name.replace(/\.geojson$/i, '').replace(/\.json$/i, '');
+ fileLabelText.innerHTML = `${file.name}
Ready to analyze`;
+
+ const reader = new FileReader();
+ reader.onload = (e) => {
+ try {
+ rawGeoJSON = JSON.parse(e.target.result);
+ analyzeBtn.disabled = false;
+ // Reset UI
+ resultsCard.classList.add('hidden');
+ editCard.classList.add('hidden');
+ editResults.classList.add('hidden');
+ } catch (err) {
+ alert("Invalid GeoJSON file");
+ console.error(err);
+ }
+ };
+ reader.readAsText(file);
+}
+
+function log(msg, targetLog = analysisLog, targetEl = logOutput) {
+ // Add timestamp or just raw msg? R script uses simple text.
+ targetLog.push(msg);
+ targetEl.innerText = targetLog.join('\n');
+ // Auto scroll
+ targetEl.scrollTop = targetEl.scrollHeight;
+}
+
+async function runAnalysis() {
+ // Reset
+ analysisLog = [];
+ overlapCsvData = [];
+ resultsCard.classList.remove('hidden');
+ editCard.classList.add('hidden');
+ analyzeBtn.disabled = true;
+ analyzeBtn.innerText = "Analyzing...";
+
+ // Use setTimeout to allow UI to update
+ setTimeout(async () => {
+ try {
+ await analyze();
+ analyzeBtn.disabled = false;
+ analyzeBtn.innerText = "Analyze Polygons";
+ editCard.classList.remove('hidden');
+ } catch (err) {
+ console.error(err);
+ log(`ERROR: ${err.message}`);
+ analyzeBtn.disabled = false;
+ analyzeBtn.innerText = "Analyze Polygons";
+ }
+ }, 100);
+}
+
+function cleanDuplicateVertices(feature) {
+ if (feature.geometry.type === 'Polygon') {
+ feature.geometry.coordinates = feature.geometry.coordinates.map(ring =>
+ ring.filter((coord, i, arr) =>
+ i === 0 || JSON.stringify(coord) !== JSON.stringify(arr[i - 1])
+ )
+ );
+ } else if (feature.geometry.type === 'MultiPolygon') {
+ feature.geometry.coordinates = feature.geometry.coordinates.map(polygon =>
+ polygon.map(ring =>
+ ring.filter((coord, i, arr) =>
+ i === 0 || JSON.stringify(coord) !== JSON.stringify(arr[i - 1])
+ )
+ )
+ );
+ }
+ return feature;
+}
+
+async function analyze() {
+ log(`Processing log for: ${originalFileName}`);
+ log("==================================================");
+
+ let features = rawGeoJSON.features;
+ if (!features) {
+ log("Error: No features found in GeoJSON.");
+ return;
+ }
+
+ log(`${features.length} geometries found.`);
+
+ // 1. Check duplicates FIRST
+ log("\n=== Checking for duplicates... ===");
+ const fieldCounts = {};
+ features.forEach(f => {
+ const id = getFieldNo(f);
+ if (id) {
+ fieldCounts[id] = (fieldCounts[id] || 0) + 1;
+ }
+ });
+
+ const duplicates = Object.entries(fieldCounts).filter(([id, count]) => count > 1);
+
+ if (duplicates.length > 0) {
+ log(`✕ ${duplicates.length} duplicate field codes detected.`);
+ log("Removing duplicate fields...");
+
+ const duplicateIds = new Set(duplicates.map(d => d[0]));
+ const beforeDedup = features.length;
+ features = features.filter(f => !duplicateIds.has(getFieldNo(f)));
+
+ duplicates.forEach(d => {
+ log(`• Field ${d[0]}: ${d[1]} occurrences (Removed)`);
+ });
+
+ log(`✓ Removed ${beforeDedup - features.length} duplicate entries!`);
+ } else {
+ log("✓ No duplicates found!");
+ }
+
+ // 2. Filter empty
+ log("\n=== Checking empty geometries... ===");
+ const initialCount = features.length;
+ features = features.filter(f => f.geometry && f.geometry.coordinates && f.geometry.coordinates.length > 0);
+ const emptyCount = initialCount - features.length;
+
+ if (emptyCount > 0) {
+ log(`${emptyCount} empty geometries detected! Deleting...`);
+ } else {
+ log(`✓ No empty geometries found.`);
+ }
+
+ // 3. Clean duplicate vertices & validity check
+ log("\n=== Cleaning duplicate vertices & checking geometry validity... ===");
+ let invalidCount = 0;
+ let duplicateVertexCount = 0;
+
+ let fixedFeatures = [];
+ for (let f of features) {
+ // Clean duplicate vertices first
+ const beforeClean = JSON.stringify(f.geometry);
+ f = cleanDuplicateVertices(f);
+ const afterClean = JSON.stringify(f.geometry);
+
+ if (beforeClean !== afterClean) {
+ duplicateVertexCount++;
+ }
+
+ // Drop Z if present (Turf truncate)
+ f = turf.truncate(f, {precision: 6, coordinates: 2});
+
+ try {
+ const kinks = turf.kinks(f);
+ if (kinks.features.length > 0) {
+ invalidCount++;
+ // Try to fix by unkinking
+ const unkinked = turf.unkinkPolygon(f);
+ unkinked.features.forEach(uf => {
+ uf.properties = {...f.properties};
+ fixedFeatures.push(uf);
+ });
+ } else {
+ fixedFeatures.push(f);
+ }
+ } catch (e) {
+ log(`⚠ Warning: Could not process geometry for ${getFieldNo(f)}: ${e.message}`);
+ // Still add it, but log the issue
+ fixedFeatures.push(f);
+ }
+ }
+
+ if (duplicateVertexCount > 0) {
+ log(`✓ Cleaned duplicate vertices in ${duplicateVertexCount} geometries.`);
+ }
+
+ if (invalidCount > 0) {
+ log(`✕ ${invalidCount} invalid geometries detected (self-intersections). Attempting fix...`);
+ log(`✓ Fixed by splitting self-intersections.`);
+ } else {
+ log(`✓ All geometries valid (no self-intersections).`);
+ }
+ features = fixedFeatures;
+ log(`Current geometry count: ${features.length}`);
+
+ // 4. Check Overlaps
+ log("\n=== Checking for plot overlap... ===");
+
+ // Build RBush Index
+ const tree = new RBush();
+ const items = features.map((f, index) => {
+ const bbox = turf.bbox(f);
+ return {
+ minX: bbox[0],
+ minY: bbox[1],
+ maxX: bbox[2],
+ maxY: bbox[3],
+ feature: f,
+ id: getFieldNo(f) || `Unknown-${index}`,
+ area: turf.area(f),
+ index: index
+ };
+ });
+ tree.load(items);
+
+ const overlaps = [];
+ const checkedPairs = new Set();
+ let fieldsWithOverlap = new Set();
+
+ for (let i = 0; i < items.length; i++) {
+ const itemA = items[i];
+ const candidates = tree.search(itemA);
+
+ for (const itemB of candidates) {
+ if (itemA.index === itemB.index) continue;
+
+ // Sort IDs to ensure uniqueness of pair
+ const pairId = [itemA.index, itemB.index].sort().join('_');
+ if (checkedPairs.has(pairId)) continue;
+ checkedPairs.add(pairId);
+
+ // Check intersection
+ let intersection = null;
+ try {
+ intersection = turf.intersect(itemA.feature, itemB.feature);
+ } catch (e) {
+ console.warn("Intersection error", e);
+ }
+
+ if (intersection) {
+ const intersectArea = turf.area(intersection);
+ if (intersectArea > 1) {
+ overlaps.push({
+ a: itemA,
+ b: itemB,
+ area: intersectArea
+ });
+
+ fieldsWithOverlap.add(itemA.id);
+ fieldsWithOverlap.add(itemB.id);
+ }
+ }
+ }
+ }
+
+ if (overlaps.length > 0) {
+ log(`✕ ${fieldsWithOverlap.size} fields with overlap found.`);
+
+ // We need to calculate total overlap percentage for each field to decide if it's > 30%
+ // Map: FieldID -> TotalOverlapArea
+ const fieldOverlapMap = {};
+
+ overlaps.forEach(o => {
+ // Add to A
+ if (!fieldOverlapMap[o.a.id]) fieldOverlapMap[o.a.id] = 0;
+ fieldOverlapMap[o.a.id] += o.area;
+
+ // Add to B
+ if (!fieldOverlapMap[o.b.id]) fieldOverlapMap[o.b.id] = 0;
+ fieldOverlapMap[o.b.id] += o.area;
+ });
+
+ // Report significant overlaps
+ let severeCount = 0;
+
+ Object.keys(fieldOverlapMap).forEach(fid => {
+ const item = items.find(it => it.id === fid); // This lookup might be slow if many items, but okay for now
+ if (!item) return;
+
+ const totalOverlap = fieldOverlapMap[fid];
+ const originalArea = item.area;
+ const pct = (totalOverlap / originalArea) * 100;
+
+ let category = "<30%";
+ if (pct >= 30) category = ">30%";
+
+ // Add to CSV Data
+ overlapCsvData.push({
+ Field_No: fid,
+ Original_Area_ha: (originalArea / 10000).toFixed(4),
+ Overlap_Area_ha: (totalOverlap / 10000).toFixed(4),
+ Overlap_Percentage: pct.toFixed(1),
+ Overlap_Category: category
+ });
+
+ if (pct > 30) {
+ log(`Field number ${fid} overlap >30% (${pct.toFixed(1)}%) -- Will be deleted`);
+ severeCount++;
+ }
+ });
+
+ if (severeCount === 0) {
+ log("✓ No significant overlap (>30%) detected.");
+ } else {
+ log(`✕ ${severeCount} fields have significant overlap (>30%).`);
+ }
+
+ } else {
+ log("✓ No overlap found!");
+ }
+
+ // Save state for editing
+ // We keep 'features' (which are fixed/deduplicated)
+ rawGeoJSON.features = features;
+
+}
+
+function getFieldNo(feature) {
+ return feature.properties['Field No'] || feature.properties['field'] || feature.properties['Field_No'];
+}
+
+async function runEdits() {
+ const editLog = [];
+ editResults.classList.remove('hidden');
+ editLogOutput.innerText = "Processing edits...";
+
+ function logEdit(msg) {
+ editLog.push(msg);
+ editLogOutput.innerText = editLog.join('\n');
+ editLogOutput.scrollTop = editLogOutput.scrollHeight;
+ }
+
+ setTimeout(async () => {
+ let features = rawGeoJSON.features;
+
+ // Clean all features before processing
+ features = features.map(f => cleanDuplicateVertices(f));
+
+ let delCount = 0;
+
+ // 1. Remove > 30% overlaps
+ // We need to re-calculate overlaps because we want to be precise
+
+ // Build Index again
+ let tree = new RBush();
+ let items = features.map((f, index) => ({
+ minX: turf.bbox(f)[0],
+ minY: turf.bbox(f)[1],
+ maxX: turf.bbox(f)[2],
+ maxY: turf.bbox(f)[3],
+ feature: f,
+ id: getFieldNo(f),
+ area: turf.area(f),
+ index: index
+ }));
+ tree.load(items);
+
+ // Calculate overlap per field
+ let fieldOverlapMap = {};
+
+ // Find all overlaps
+ // Note: This is slightly inefficient to re-do, but safer.
+ for (let i = 0; i < items.length; i++) {
+ const itemA = items[i];
+ const candidates = tree.search(itemA);
+ for (const itemB of candidates) {
+ if (itemA.index < itemB.index) { // Only check pair once
+ const intersection = turf.intersect(itemA.feature, itemB.feature);
+ if (intersection) {
+ const area = turf.area(intersection);
+ if (area > 1) {
+ if (!fieldOverlapMap[itemA.id]) fieldOverlapMap[itemA.id] = 0;
+ fieldOverlapMap[itemA.id] += area;
+
+ if (!fieldOverlapMap[itemB.id]) fieldOverlapMap[itemB.id] = 0;
+ fieldOverlapMap[itemB.id] += area;
+ }
+ }
+ }
+ }
+ }
+
+ // Identify fields to delete
+ const toDelete = new Set();
+ Object.keys(fieldOverlapMap).forEach(fid => {
+ const item = items.find(it => it.id == fid);
+ if (item) {
+ const pct = (fieldOverlapMap[fid] / item.area) * 100;
+ if (pct > 30) {
+ toDelete.add(fid);
+ logEdit(`Field ${fid} overlap ${pct.toFixed(1)}% > 30% -- ✕ Deleted!`);
+ delCount++;
+ }
+ }
+ });
+
+ // Apply Deletion
+ features = features.filter(f => !toDelete.has(getFieldNo(f)));
+
+ if (delCount > 0) {
+ logEdit(`✓ Significant overlap fixed - ${delCount} geometries deleted.`);
+ } else {
+ logEdit(`✓ No significant overlap detected.`);
+ }
+
+ // 2. Fix remaining overlaps (< 30%)
+ // Loop through intersections and split up difference
+ logEdit("\nChecking for remaining overlaps (<30%) to fix...");
+
+ // Re-index with remaining features
+ items = features.map((f, index) => ({
+ minX: turf.bbox(f)[0],
+ minY: turf.bbox(f)[1],
+ maxX: turf.bbox(f)[2],
+ maxY: turf.bbox(f)[3],
+ feature: f,
+ id: getFieldNo(f),
+ area: turf.area(f),
+ index: index
+ }));
+ tree = new RBush();
+ tree.load(items);
+
+ let fixCount = 0;
+
+ let pairs = [];
+ for (let i = 0; i < items.length; i++) {
+ const itemA = items[i];
+ const candidates = tree.search(itemA);
+ for (const itemB of candidates) {
+ if (itemA.index < itemB.index) {
+ const intersection = turf.intersect(itemA.feature, itemB.feature);
+ if (intersection && turf.area(intersection) > 1) {
+ pairs.push({a: itemA, b: itemB});
+ }
+ }
+ }
+ }
+
+ for (const pair of pairs) {
+ const f1 = pair.a.feature; // Reference to object in 'features' array
+ const f2 = pair.b.feature;
+
+ // Check if they are still valid/exist (in case we deleted one? No we filtered already)
+ // Re-check intersection because one might have been modified by a previous iteration
+ const intersection = turf.intersect(f1, f2);
+ if (!intersection || turf.area(intersection) <= 1) continue;
+
+ const area1 = turf.area(f1);
+ const area2 = turf.area(f2);
+
+ // "Add the overlapping area to the biggest plot"
+ // Means subtract overlap from the smallest plot.
+
+ let updated = false;
+ if (area1 > area2) {
+ // f1 is bigger. Subtract f1 from f2.
+ // poly2 <- st_difference(poly2, st_union(poly1))
+ // effectively poly2 = poly2 - poly1
+ try {
+ const diff = turf.difference(f2, f1);
+ if (diff) {
+ f2.geometry = diff.geometry;
+ updated = true;
+ logEdit(`✓ Fixed conflict between ${getFieldNo(f1)} and ${getFieldNo(f2)} (Trimmed ${getFieldNo(f2)})`);
+ } else {
+ // f2 completely inside f1?
+ logEdit(`! ${getFieldNo(f2)} is completely inside ${getFieldNo(f1)} - Removed ${getFieldNo(f2)}`);
+ features = features.filter(f => f !== f2);
+ }
+ } catch(e) {
+ console.warn("Difference failed", e);
+ }
+ } else {
+ // f2 is bigger or equal. Subtract f2 from f1.
+ try {
+ const diff = turf.difference(f1, f2);
+ if (diff) {
+ f1.geometry = diff.geometry;
+ updated = true;
+ logEdit(`✓ Fixed conflict between ${getFieldNo(f1)} and ${getFieldNo(f2)} (Trimmed ${getFieldNo(f1)})`);
+ } else {
+ logEdit(`! ${getFieldNo(f1)} is completely inside ${getFieldNo(f2)} - Removed ${getFieldNo(f1)}`);
+ features = features.filter(f => f !== f1);
+ }
+ } catch(e) {
+ console.warn("Difference failed", e);
+ }
+ }
+
+ if (updated) fixCount++;
+ }
+
+ if (fixCount === 0) {
+ logEdit("✓ No <30% overlap detected.");
+ }
+
+ // 3. Dissolve/Merge polygons by field ID
+ logEdit("\n=== Dissolving polygons by field ID... ===");
+
+ // Group features by field ID
+ const groupedByField = {};
+ features.forEach(f => {
+ const fieldId = getFieldNo(f);
+ if (!groupedByField[fieldId]) {
+ groupedByField[fieldId] = [];
+ }
+ groupedByField[fieldId].push(f);
+ });
+
+ let dissolveCount = 0;
+ const dissolvedFeatures = [];
+
+ Object.keys(groupedByField).forEach(fieldId => {
+ const group = groupedByField[fieldId];
+
+ if (group.length === 1) {
+ // No dissolve needed
+ dissolvedFeatures.push(group[0]);
+ } else {
+ // Multiple polygons with same field ID - need to union them
+ logEdit(` Dissolving ${group.length} polygons for field ${fieldId}...`);
+
+ try {
+ // Union all geometries for this field
+ let unionGeom = group[0];
+
+ for (let i = 1; i < group.length; i++) {
+ unionGeom = turf.union(unionGeom, group[i]);
+ if (!unionGeom) {
+ logEdit(`⚠ Warning: Could not union all parts of field ${fieldId}`);
+ // Fall back to keeping individual parts
+ dissolvedFeatures.push(...group);
+ return;
+ }
+ }
+
+ // Preserve original properties from first feature
+ unionGeom.properties = {...group[0].properties};
+ dissolvedFeatures.push(unionGeom);
+ dissolveCount++;
+ logEdit(`✓ Field ${fieldId}: Merged ${group.length} parts into 1`);
+ } catch (e) {
+ logEdit(`⚠ Error dissolving field ${fieldId}: ${e.message}`);
+ // Fall back to keeping individual parts
+ dissolvedFeatures.push(...group);
+ }
+ }
+ });
+
+ if (dissolveCount > 0) {
+ logEdit(`✓ Dissolved ${dissolveCount} fields with multiple parts.`);
+ } else {
+ logEdit(`✓ No multi-part fields detected.`);
+ }
+
+ features = dissolvedFeatures;
+
+ // Prepare download
+ cleanedGeoJSON = {
+ type: "FeatureCollection",
+ features: features,
+ crs: rawGeoJSON.crs
+ };
+
+ logEdit("\n=== Saving edited shapefiles... ===");
+ logEdit(`✓ Ready to download ${features.length} geometries.`);
+
+ // Add event listener for download edit log
+ downloadEditLogBtn.onclick = () => downloadText(editLog.join('\n'), `${originalFileName}_edit_log.txt`);
+
+ }, 100);
+}
+
+function downloadText(content, filename) {
+ const blob = new Blob([content], {type: "text/plain;charset=utf-8"});
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = filename;
+ a.click();
+ URL.revokeObjectURL(url);
+}
+
+function downloadJson(content, filename) {
+ const blob = new Blob([JSON.stringify(content, null, 2)], {type: "application/json"});
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = filename;
+ a.click();
+ URL.revokeObjectURL(url);
+}
+
+function downloadCsv() {
+ if (overlapCsvData.length === 0) {
+ alert("No overlap data to download.");
+ return;
+ }
+ const headers = Object.keys(overlapCsvData[0]);
+ const rows = overlapCsvData.map(row => headers.map(h => row[h]).join(';'));
+ const csvContent = [headers.join(';')].concat(rows).join('\n');
+ downloadText(csvContent, `${originalFileName}_overlap_details.csv`);
+}
diff --git a/webapps/polygon_analysis_editor/index.html b/webapps/polygon_analysis_editor/index.html
new file mode 100644
index 0000000..9cdaa71
--- /dev/null
+++ b/webapps/polygon_analysis_editor/index.html
@@ -0,0 +1,220 @@
+
+
+
Upload a GeoJSON file containing the polygons to analyze. The tool will check for overlaps and other issues.
+