updated website and small things

This commit is contained in:
Timon 2026-01-14 11:02:45 +01:00
parent fc7e5f1ee0
commit dfa0aa900d
14 changed files with 1760 additions and 218 deletions

15
.renvignore Normal file
View file

@ -0,0 +1,15 @@
# Ignore large Python experiment directories during renv dependency discovery
# These slow down startup and contain no R dependencies
laravel_app/
data_validation_tool/
python_app/harvest_detection_experiments/
python_app/experiments/
phase2_refinement/
webapps/
tools/
output/
renv/
*.py
*.ipynb
.git/

View file

@ -31,6 +31,7 @@ suppressPackageStartupMessages({
library(readxl)
library(here)
library(furrr)
library(future)
})
# 2. Process command line arguments
@ -118,19 +119,44 @@ main <- function() {
stop(e)
})
# 4. Generate date list for processing
# ---------------------------------
dates <- date_list(end_date, 7)
log_message(paste("Processing data for week", dates$week, "of", dates$year))
# 5. Find and filter raster files by date
# 5. Find and filter raster files by date - with grid size detection
# -----------------------------------
log_message("Searching for raster files")
# Check if tiles exist (Script 01 output)
tile_folder <- file.path("laravel_app", "storage", "app", project_dir, "daily_tiles_split")
# Check if tiles exist (Script 01 output) - detect grid size dynamically
tiles_split_base <- file.path("laravel_app", "storage", "app", project_dir, "daily_tiles_split")
# Detect grid size from daily_tiles_split folder structure
# Expected structure: daily_tiles_split/5x5/ or daily_tiles_split/10x10/ etc.
grid_size <- NA
if (dir.exists(tiles_split_base)) {
subfolders <- list.dirs(tiles_split_base, full.names = FALSE, recursive = FALSE)
# Look for grid size patterns like "5x5", "10x10", "20x20"
grid_patterns <- grep("^\\d+x\\d+$", subfolders, value = TRUE)
if (length(grid_patterns) > 0) {
grid_size <- grid_patterns[1] # Use first grid size found
log_message(paste("Detected grid size:", grid_size))
}
}
# Construct tile folder path with grid size
if (!is.na(grid_size)) {
tile_folder <- file.path(tiles_split_base, grid_size)
} else {
tile_folder <- tiles_split_base
}
use_tiles <- dir.exists(tile_folder)
# Make grid_size available globally for other functions
assign("grid_size", grid_size, envir = .GlobalEnv)
tryCatch({
if (use_tiles) {
# Use tile-based processing
@ -145,7 +171,8 @@ main <- function() {
field_boundaries_sf = field_boundaries_sf,
daily_CI_vals_dir = daily_CI_vals_dir,
cumulative_CI_vals_dir = cumulative_CI_vals_dir,
merged_final_dir = merged_final
merged_final_dir = merged_final,
grid_size = grid_size
)
} else {

File diff suppressed because it is too large Load diff

View file

@ -1,155 +0,0 @@
# Angata KPI Script Updates - 09_calculate_kpis_Angata.R
## Overview
The script has been restructured to focus on **4 required KPIs** for Angata, with legacy KPIs disabled by default but retained for future use.
## Changes Made
### 1. **Script Configuration**
- **File**: `09_calculate_kpis_Angata.R`
- **Toggle Variable**: `ENABLE_LEGACY_KPIS` (default: `FALSE`)
- Set to `TRUE` to run the 6 original KPIs
- Set to `FALSE` for Angata's 4 KPIs only
### 2. **Angata KPIs (4 Required)**
#### KPI 1: **Area Change Summary** ✅ REAL DATA
- **File**: Embedded in script as `calculate_area_change_kpi()`
- **Method**: Compares current week CI to previous week CI
- **Classification**:
- **Improving areas**: Mean change > +0.5 CI units
- **Stable areas**: Mean change between -0.5 and +0.5 CI units
- **Declining areas**: Mean change < -0.5 CI units
- **Output**: Hectares, Acres, and % of farm for each category
- **Data Type**: REAL DATA (processed from satellite imagery)
#### KPI 2: **Germination Acreage** ✅ REAL DATA
- **Function**: `calculate_germination_acreage_kpi()`
- **Germination Phase Detection**:
- **Start germination**: When 10% of field's CI > 2
- **End germination**: When 70% of field's CI ≥ 2
- **Output**:
- Count of fields in germination phase
- Count of fields in post-germination phase
- Total acres and % of farm for each phase
- **Data Type**: REAL DATA (CI-based, calculated from satellite imagery)
#### KPI 3: **Harvested Acreage** ⚠️ DUMMY DATA
- **Function**: `calculate_harvested_acreage_kpi()`
- **Current Status**: Returns zero values with clear "DUMMY DATA - Detection TBD" label
- **TODO**: Implement harvesting detection logic
- Likely indicators: CI drops below 1.5, sudden backscatter change, etc.
- **Output Format**:
- Number of harvested fields
- Total acres
- % of farm
- Clearly marked as DUMMY DATA in output table
#### KPI 4: **Mature Acreage** ⚠️ DUMMY DATA
- **Function**: `calculate_mature_acreage_kpi()`
- **Current Status**: Returns zero values with clear "DUMMY DATA - Definition TBD" label
- **Concept**: Mature fields have high and stable CI for several weeks
- **TODO**: Implement stability-based maturity detection
- Calculate CI trend over last 3-4 weeks per field
- Stability metric: low CV over period, high CI relative to field max
- Threshold: e.g., field reaches 80%+ of max CI and stable for 3+ weeks
- **Output Format**:
- Number of mature fields
- Total acres
- % of farm
- Clearly marked as DUMMY DATA in output table
### 3. **Legacy KPIs (Disabled by Default)**
These original 6 KPIs are **disabled** but code is preserved for future use:
1. Field Uniformity Summary
2. TCH Forecasted
3. Growth Decline Index
4. Weed Presence Score
5. Gap Filling Score
To enable: Set `ENABLE_LEGACY_KPIS <- TRUE` in the script
### 4. **Output & Logging**
#### Console Output (STDOUT)
```
=== ANGATA KPI CALCULATION SUMMARY ===
Report Date: [date]
Current Week: [week]
Previous Week: [week]
Total Fields Analyzed: [count]
Project: [project_name]
Calculation Time: [timestamp]
Legacy KPIs Enabled: FALSE
--- REQUIRED ANGATA KPIs ---
1. Area Change Summary (REAL DATA):
[table]
2. Germination Acreage (CI-based, REAL DATA):
[table]
3. Harvested Acreage (DUMMY DATA - Detection TBD):
[table with "DUMMY" marker]
4. Mature Acreage (DUMMY DATA - Definition TBD):
[table with "DUMMY" marker]
=== ANGATA KPI CALCULATION COMPLETED ===
```
#### File Output (RDS)
- **Location**: `laravel_app/storage/app/[project]/reports/kpis/`
- **Filename**: `[project]_kpi_summary_tables_week[XX].rds`
- **Contents**:
- `area_change_summary`: Summary table
- `germination_summary`: Summary table
- `harvested_summary`: Summary table (DUMMY)
- `mature_summary`: Summary table (DUMMY)
- Field-level results for each KPI
- Metadata (report_date, weeks, total_fields, etc.)
### 5. **Data Clarity Markers**
All tables in output clearly indicate:
- **REAL DATA**: Derived from satellite CI measurements
- **DUMMY DATA - [TBD Item]**: Placeholder values; actual method to be implemented
This prevents misinterpretation of preliminary results.
## Usage
```powershell
# Run Angata KPIs only (default, legacy disabled)
Rscript r_app/09_calculate_kpis_Angata.R 2025-11-27 7 angata
# With specific date
Rscript r_app/09_calculate_kpis_Angata.R 2025-11-20 7 angata
```
## Future Work
1. **Harvesting Detection**: Implement CI threshold + temporal pattern analysis
2. **Maturity Definition**: Define stability metrics and thresholds based on field CI ranges
3. **Legacy KPIs**: Adapt or retire based on Angata's needs
4. **Integration**: Connect outputs to reporting system (R Markdown, Word reports, etc.)
## File Structure
```
r_app/
├── 09_calculate_kpis_Angata.R (main script - UPDATED)
├── kpi_utils.R (optional - legacy functions)
├── crop_messaging_utils.R (dependencies)
├── parameters_project.R (project config)
└── growth_model_utils.R (optional)
Output:
└── laravel_app/storage/app/angata/reports/kpis/
└── angata_kpi_summary_tables_week[XX].rds
```
---
**Updated**: November 27, 2025

View file

@ -653,21 +653,27 @@ process_ci_values <- function(dates, field_boundaries, merged_final_dir,
#' Process CI values from pre-split tiles (Script 01 output)
#'
#' This function processes CI values from tiles instead of full-extent rasters.
#' Tiles are created by Script 01 and stored in daily_tiles_split/[DATE]/ folders.
#' Tiles are created by Script 01 and stored in daily_tiles_split/[GRID_SIZE]/[DATE]/ folders.
#' For each field, it aggregates CI statistics from all tiles that intersect that field.
#' Output follows the same grid structure: merged_final_tif/[GRID_SIZE]/[DATE]/
#'
#' NOTE: Processes dates SEQUENTIALLY but tiles WITHIN EACH DATE in parallel (furrr)
#' This avoids worker process communication issues while still getting good speedup.
#'
#' @param dates List of dates from date_list()
#' @param tile_folder Path to the tile folder (daily_tiles_split)
#' @param tile_folder Path to the tile folder (daily_tiles_split/[GRID_SIZE])
#' @param field_boundaries Field boundaries as vector object
#' @param field_boundaries_sf Field boundaries as SF object
#' @param daily_CI_vals_dir Directory to save daily CI values
#' @param cumulative_CI_vals_dir Directory to save cumulative CI values
#' @param merged_final_dir Directory to save processed tiles with CI band
#' @param merged_final_dir Base directory to save processed tiles with CI band
#' @param grid_size Grid size label (e.g., "5x5", "10x10") for output path structure
#' @return NULL (used for side effects)
#'
process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
field_boundaries_sf, daily_CI_vals_dir,
cumulative_CI_vals_dir, merged_final_dir) {
cumulative_CI_vals_dir, merged_final_dir,
grid_size = NA) {
# Define path for combined CI data
combined_ci_path <- here::here(cumulative_CI_vals_dir, "combined_CI_data.rds")
@ -691,11 +697,29 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
if (!file.exists(combined_ci_path)) {
safe_log("combined_CI_data.rds does not exist. Creating new file with all available tile data.")
# Process all tile dates
# Process all tile dates SEQUENTIALLY but with parallel tile processing
# Tiles within each date are processed in parallel via extract_ci_from_tiles()
all_pivot_stats <- list()
for (date in tile_dates) {
safe_log(paste("Processing tiles for date:", date))
for (i in seq_along(tile_dates)) {
date <- tile_dates[i]
# SKIP: Check if this date already has processed output tiles
if (!is.na(grid_size)) {
output_date_folder <- file.path(merged_final_dir, grid_size, date)
} else {
output_date_folder <- file.path(merged_final_dir, date)
}
if (dir.exists(output_date_folder)) {
existing_tiles <- list.files(output_date_folder, pattern = "\\.tif$")
if (length(existing_tiles) > 0) {
safe_log(paste("[", i, "/", length(tile_dates), "] SKIP:", date, "- already has", length(existing_tiles), "tiles"))
next
}
}
safe_log(paste("[", i, "/", length(tile_dates), "] Processing tiles for date:", date))
date_tile_dir <- file.path(tile_folder, date)
tile_files <- list.files(date_tile_dir, pattern = "\\.tif$", full.names = TRUE)
@ -705,15 +729,17 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
next
}
safe_log(paste(" Found", length(tile_files), "tiles for date", date))
safe_log(paste(" Found", length(tile_files), "tiles - processing in parallel"))
# Process all tiles for this date and aggregate to fields
# Tiles are processed in parallel via furrr::future_map() inside extract_ci_from_tiles()
date_stats <- extract_ci_from_tiles(
tile_files = tile_files,
date = date,
field_boundaries_sf = field_boundaries_sf,
daily_CI_vals_dir = daily_CI_vals_dir,
merged_final_tif_dir = merged_final_dir
merged_final_tif_dir = merged_final_dir,
grid_size = grid_size
)
if (!is.null(date_stats)) {
@ -735,13 +761,37 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
}
} else {
# Process only new dates
# Process only new dates SEQUENTIALLY but with parallel tile processing
safe_log("combined_CI_data.rds exists, adding new tile data.")
if (length(dates_to_process) == 0) {
safe_log("No new dates to process", "WARNING")
return(invisible(NULL))
}
safe_log(paste("Processing", length(dates_to_process), "new dates..."))
new_pivot_stats_list <- list()
for (date in dates_to_process[1:2]) {
safe_log(paste("Processing tiles for date:", date))
for (i in seq_along(dates_to_process)) {
date <- dates_to_process[i]
# SKIP: Check if this date already has processed output tiles
if (!is.na(grid_size)) {
output_date_folder <- file.path(merged_final_dir, grid_size, date)
} else {
output_date_folder <- file.path(merged_final_dir, date)
}
if (dir.exists(output_date_folder)) {
existing_tiles <- list.files(output_date_folder, pattern = "\\.tif$")
if (length(existing_tiles) > 0) {
safe_log(paste("[", i, "/", length(dates_to_process), "] SKIP:", date, "- already has", length(existing_tiles), "tiles"))
next
}
}
safe_log(paste("[", i, "/", length(dates_to_process), "] Processing tiles for date:", date))
date_tile_dir <- file.path(tile_folder, date)
tile_files <- list.files(date_tile_dir, pattern = "\\.tif$", full.names = TRUE)
@ -751,7 +801,7 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
next
}
safe_log(paste(" Found", length(tile_files), "tiles for date", date))
safe_log(paste(" Found", length(tile_files), "tiles - processing in parallel"))
# Extract CI from tiles for this date
date_stats <- extract_ci_from_tiles(
@ -759,7 +809,8 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
date = date,
field_boundaries_sf = field_boundaries_sf,
daily_CI_vals_dir = daily_CI_vals_dir,
merged_final_tif_dir = merged_final_dir
merged_final_tif_dir = merged_final_dir,
grid_size = grid_size
)
if (!is.null(date_stats)) {
@ -788,17 +839,18 @@ process_ci_values_from_tiles <- function(dates, tile_folder, field_boundaries,
#' 1. Loads tile
#' 2. Creates/extracts CI band
#' 3. Creates output raster with Red, Green, Blue, NIR, CI bands
#' 4. Saves to merged_final_tif_dir/[DATE]/ mirroring daily_tiles_split structure
#' 4. Saves to merged_final_tif_dir/[GRID_SIZE]/[DATE]/ mirroring daily_tiles_split structure
#' 5. Extracts field-level CI statistics
#' Returns statistics aggregated to field level.
#'
#' @param tile_file Path to a single tile TIF file
#' @param field_boundaries_sf Field boundaries as SF object
#' @param date Character string of the date (YYYY-MM-DD format)
#' @param merged_final_tif_dir Directory to save processed tiles with CI band
#' @param merged_final_tif_dir Base directory to save processed tiles with CI band
#' @param grid_size Grid size label (e.g., "5x5", "10x10") for output path structure
#' @return Data frame with field CI statistics for this tile, or NULL if processing failed
#'
process_single_tile <- function(tile_file, field_boundaries_sf, date, merged_final_tif_dir) {
process_single_tile <- function(tile_file, field_boundaries_sf, date, merged_final_tif_dir, grid_size = NA) {
tryCatch({
tile_filename <- basename(tile_file)
safe_log(paste(" [TILE] Loading:", tile_filename))
@ -845,8 +897,14 @@ process_single_tile <- function(tile_file, field_boundaries_sf, date, merged_fin
output_raster <- c(red_band, green_band, blue_band, nir_band, ci_band)
names(output_raster) <- c("Red", "Green", "Blue", "NIR", "CI")
# Save processed tile to merged_final_tif_dir/[DATE]/ with same filename
date_dir <- file.path(merged_final_tif_dir, date)
# Save processed tile to merged_final_tif_dir/[GRID_SIZE]/[DATE]/ with same filename
# This mirrors the input structure: daily_tiles_split/[GRID_SIZE]/[DATE]/
if (!is.na(grid_size)) {
date_dir <- file.path(merged_final_tif_dir, grid_size, date)
} else {
date_dir <- file.path(merged_final_tif_dir, date)
}
if (!dir.exists(date_dir)) {
dir.create(date_dir, recursive = TRUE, showWarnings = FALSE)
}
@ -883,7 +941,7 @@ process_single_tile <- function(tile_file, field_boundaries_sf, date, merged_fin
#' Given a set of tile files for a single date, this function:
#' 1. Loads each tile IN PARALLEL using furrr
#' 2. Creates/extracts CI band
#' 3. Saves processed tile (Red, Green, Blue, NIR, CI) to merged_final_tif_dir/[DATE]/
#' 3. Saves processed tile (Red, Green, Blue, NIR, CI) to merged_final_tif_dir/[GRID_SIZE]/[DATE]/
#' 4. Calculates field statistics from CI band
#' 5. Aggregates field statistics across tiles
#' 6. Saves individual date file (matching legacy workflow)
@ -894,24 +952,43 @@ process_single_tile <- function(tile_file, field_boundaries_sf, date, merged_fin
#' @param date Character string of the date (YYYY-MM-DD format)
#' @param field_boundaries_sf Field boundaries as SF object
#' @param daily_CI_vals_dir Directory to save individual date RDS files
#' @param merged_final_tif_dir Directory to save processed tiles with CI band (mirrors daily_tiles_split structure)
#' @param merged_final_tif_dir Base directory to save processed tiles with CI band
#' @param grid_size Grid size label (e.g., "5x5", "10x10") for output path structure
#' @return Data frame with field CI statistics for the date
#'
extract_ci_from_tiles <- function(tile_files, date, field_boundaries_sf, daily_CI_vals_dir = NULL, merged_final_tif_dir = NULL) {
extract_ci_from_tiles <- function(tile_files, date, field_boundaries_sf, daily_CI_vals_dir = NULL, merged_final_tif_dir = NULL, grid_size = NA) {
if (!inherits(field_boundaries_sf, "sf")) {
field_boundaries_sf <- sf::st_as_sf(field_boundaries_sf)
}
safe_log(paste(" Processing", length(tile_files), "tiles for date", date, "(parallel processing)"))
safe_log(paste(" Processing", length(tile_files), "tiles for date", date, "(3-tile parallel batch)"))
# Process tiles in parallel using furrr::future_map
# This replaces the sequential for loop, processing 2-4 tiles simultaneously
stats_list <- furrr::future_map(
.x = tile_files,
.f = ~ process_single_tile(.x, field_boundaries_sf, date, merged_final_tif_dir),
.options = furrr::furrr_options(seed = TRUE)
)
# Windows-compatible parallelization: Process tiles in small batches
# Use future_map with 3 workers - stable and efficient on Windows
# Set up minimal future plan (3 workers max)
future::plan(future::multisession, workers = 3)
# Process tiles using furrr with 2 workers
# Use retry logic for worker stability
stats_list <- tryCatch({
furrr::future_map(
tile_files,
~ process_single_tile(.x, field_boundaries_sf, date, merged_final_tif_dir, grid_size = grid_size),
.progress = FALSE,
.options = furrr::furrr_options(seed = TRUE)
)
}, error = function(e) {
safe_log(paste("Parallel processing failed:", e$message, "- falling back to sequential"), "WARNING")
# Fallback to sequential if parallel fails
lapply(
tile_files,
function(tile_file) {
process_single_tile(tile_file, field_boundaries_sf, date, merged_final_tif_dir, grid_size = grid_size)
}
)
})
# Extract names and filter out NULL results (failed tiles)
tile_names <- basename(tile_files)

Binary file not shown.

View file

@ -56,8 +56,9 @@
border-radius: 8px;
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
display: flex;
align-items: center;
gap: 10px;
flex-direction: column;
align-items: flex-start;
gap: 12px;
}
.map-controls label {
@ -70,6 +71,19 @@
font-weight: 500;
}
.map-controls-group {
display: flex;
flex-direction: column;
gap: 8px;
border-top: 1px solid #e0e0e0;
padding-top: 8px;
}
.map-controls-group:first-child {
border-top: none;
padding-top: 0;
}
.toggle-switch {
position: relative;
display: inline-block;
@ -764,14 +778,32 @@
<div class="map-container">
<div id="map"></div>
<div class="map-controls">
<label for="mapToggle">
<span class="toggle-label" id="mapTypeLabel">OSM</span>
<div class="toggle-switch">
<input type="checkbox" id="mapToggle">
<span class="toggle-slider"></span>
</div>
<span>🛰️</span>
</label>
<div class="map-controls-group">
<label for="mapToggle">
<span class="toggle-label" id="mapTypeLabel">OSM</span>
<div class="toggle-switch">
<input type="checkbox" id="mapToggle">
<span class="toggle-slider"></span>
</div>
<span>🛰️</span>
</label>
</div>
<div class="map-controls-group">
<label for="labelsToggle">
<div class="toggle-switch">
<input type="checkbox" id="labelsToggle" checked>
<span class="toggle-slider"></span>
</div>
<span>Show Labels</span>
</label>
<label for="labelPositionToggle">
<div class="toggle-switch">
<input type="checkbox" id="labelPositionToggle">
<span class="toggle-slider"></span>
</div>
<span id="labelPositionText">Center</span>
</label>
</div>
</div>
</div>
</div>
@ -807,8 +839,11 @@
let currentLayer = 'osm';
let geojsonLayer = null;
let labelsLayer = null;
let currentGeojsonData = null;
let featuresList = [];
let showLabels = true;
let labelPosition = 'center'; // 'center' or 'side'
// Toggle map layer
const mapToggle = document.getElementById('mapToggle');
@ -830,6 +865,31 @@
}
});
// Toggle labels visibility
const labelsToggle = document.getElementById('labelsToggle');
labelsToggle.addEventListener('change', () => {
showLabels = labelsToggle.checked;
if (labelsLayer) {
if (showLabels) {
labelsLayer.addTo(map);
} else {
map.removeLayer(labelsLayer);
}
}
});
// Toggle label position
const labelPositionToggle = document.getElementById('labelPositionToggle');
const labelPositionText = document.getElementById('labelPositionText');
labelPositionToggle.addEventListener('change', () => {
labelPosition = labelPositionToggle.checked ? 'side' : 'center';
labelPositionText.textContent = labelPositionToggle.checked ? 'Side' : 'Center';
// Recreate labels with new position
if (currentGeojsonData) {
loadGeojson(currentGeojsonData, '');
}
});
// Elements
const geojsonInput = document.getElementById('geojsonFile');
const fileNameDisplay = document.getElementById('fileName');
@ -863,6 +923,64 @@
setTimeout(() => el.classList.remove('active'), 5000);
}
// Resolve label overlaps using collision detection algorithm
function resolveLabelOverlaps(labelPositions) {
const labelWidth = 150; // Approximate label width in pixels
const labelHeight = 30; // Approximate label height in pixels
const minDistance = Math.sqrt(labelWidth * labelWidth + labelHeight * labelHeight);
const adjustedPositions = labelPositions.map(pos => ({ ...pos }));
// Iteratively adjust positions to avoid overlaps
for (let iteration = 0; iteration < 5; iteration++) {
let hasAdjustment = false;
for (let i = 0; i < adjustedPositions.length; i++) {
for (let j = i + 1; j < adjustedPositions.length; j++) {
const pos1 = adjustedPositions[i];
const pos2 = adjustedPositions[j];
const p1 = map.project(pos1.latlng);
const p2 = map.project(pos2.latlng);
const dx = p2.x - p1.x;
const dy = p2.y - p1.y;
const distance = Math.sqrt(dx * dx + dy * dy);
// If labels are too close, push them apart
if (distance < minDistance && distance > 0) {
hasAdjustment = true;
const angle = Math.atan2(dy, dx);
const pushDistance = (minDistance - distance) / 2 + 5;
// Move labels away from each other
const offset1 = map.unproject([
p1.x - Math.cos(angle) * pushDistance,
p1.y - Math.sin(angle) * pushDistance
]);
const offset2 = map.unproject([
p2.x + Math.cos(angle) * pushDistance,
p2.y + Math.sin(angle) * pushDistance
]);
// Only adjust if original feature hasn't moved too far
const maxDrift = 0.003;
if (Math.abs(offset1.lat - pos1.originalLatlng.lat) < maxDrift) {
pos1.latlng = offset1;
}
if (Math.abs(offset2.lat - pos2.originalLatlng.lat) < maxDrift) {
pos2.latlng = offset2;
}
}
}
}
if (!hasAdjustment) break;
}
return adjustedPositions;
}
// Calculate area in square meters using Turf.js
function getFeatureArea(feature) {
try {
@ -957,6 +1075,11 @@
map.removeLayer(geojsonLayer);
}
// Clear previous labels
if (labelsLayer) {
map.removeLayer(labelsLayer);
}
currentGeojsonData = geojson;
featuresList = [];
@ -1018,6 +1141,86 @@
}
}).addTo(map);
// Create labels layer
labelsLayer = L.featureGroup([]);
// First pass: collect all label positions
const labelPositions = [];
features.forEach((feature, index) => {
const props = feature.properties || {};
const fieldName = props.field || props.name || props.field_name || props.fieldName || props.id || `Field ${index + 1}`;
if (feature.geometry && feature.geometry.type !== 'Point') {
// Get centroid for polygon features
const bounds = L.geoJSON(feature).getBounds();
const center = bounds.getCenter();
let labelLatlng = center;
let iconAnchor = [0, 0];
// If side position, offset label to the top-left of the bounds
if (labelPosition === 'side') {
const ne = bounds.getNorthEast();
labelLatlng = L.latLng(ne.lat, ne.lng);
iconAnchor = [-10, -30];
}
labelPositions.push({
fieldName,
latlng: labelLatlng,
originalLatlng: labelLatlng,
iconAnchor,
isPoint: false
});
} else if (feature.geometry && feature.geometry.type === 'Point') {
// For points, add label above the marker
const latlng = L.GeoJSON.coordsToLatLng(feature.geometry.coordinates);
labelPositions.push({
fieldName,
latlng: latlng,
originalLatlng: latlng,
iconAnchor: [0, 30],
isPoint: true
});
}
});
// Apply collision detection to resolve overlaps
const adjustedPositions = resolveLabelOverlaps(labelPositions);
// Second pass: create markers with adjusted positions
adjustedPositions.forEach((pos) => {
const label = L.marker(pos.latlng, {
icon: L.divIcon({
className: 'field-label',
html: `<div style="
background: rgba(102, 126, 234, 0.9);
color: white;
padding: 4px 8px;
border-radius: 4px;
font-size: 14px;
font-weight: 600;
white-space: nowrap;
box-shadow: 0 2px 4px rgba(0,0,0,0.2);
border: 2px solid white;
pointer-events: none;
text-align: center;
max-width: 200px;
overflow: hidden;
text-overflow: ellipsis;
">${pos.fieldName}</div>`,
iconSize: [null, null],
iconAnchor: pos.iconAnchor
}),
interactive: false
});
labelsLayer.addLayer(label);
});
if (showLabels) {
labelsLayer.addTo(map);
}
// Fit bounds
const bounds = geojsonLayer.getBounds();
map.fitBounds(bounds, { padding: [50, 50] });

View file

@ -162,25 +162,48 @@
});
// Handle login form
document.getElementById('loginForm').addEventListener('submit', function(e) {
document.getElementById('loginForm').addEventListener('submit', async function(e) {
e.preventDefault();
const password = document.getElementById('password').value;
const correctPassword = 'Activity3-Quaking4-Unashamed5-Penholder6';
const errorMessage = document.getElementById('errorMessage');
const button = this.querySelector('button');
if (password === correctPassword) {
// Store authentication in session
sessionStorage.setItem('authenticated', 'true');
// Disable button during request
button.disabled = true;
button.textContent = 'Verifying...';
try {
// Send password to Netlify function for server-side verification
const response = await fetch('/.netlify/functions/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ password })
});
// Redirect to main apps page
window.location.href = 'index.html';
} else {
// Show error message
errorMessage.textContent = 'Invalid password. Please try again.';
const data = await response.json();
if (response.ok) {
// Authentication successful
sessionStorage.setItem('authenticated', 'true');
window.location.href = 'index.html';
} else {
// Authentication failed
errorMessage.textContent = data.error || 'Invalid password. Please try again.';
errorMessage.classList.add('show');
document.getElementById('password').value = '';
document.getElementById('password').focus();
button.disabled = false;
button.textContent = 'Access Tools';
}
} catch (error) {
console.error('Login error:', error);
errorMessage.textContent = 'Connection error. Please check your internet and try again.';
errorMessage.classList.add('show');
document.getElementById('password').value = '';
document.getElementById('password').focus();
button.disabled = false;
button.textContent = 'Access Tools';
}
});

11
webapps/netlify.toml Normal file
View file

@ -0,0 +1,11 @@
[build]
command = "# No build needed"
functions = "netlify/functions"
publish = "."
# Environment variables - Set these in Netlify UI (Site settings > Build & deploy > Environment)
# DO NOT commit actual values to git
[env]
SMARTCANE_PASSWORD = "set_in_netlify_ui"
GOOGLE_SHEET_ID = "1ZHEIyhupNDHVd1EScBn0DnuiAzMFoZcAPZm3U65abkY"
GOOGLE_SHEET_PASSWORD = "optional_if_needed"

View file

@ -0,0 +1,52 @@
/**
* Netlify Function to fetch Google Sheets data securely
* Credentials are stored as environment variables, not exposed to frontend
*/
exports.handler = async (event) => {
try {
// Read credentials from environment variables (set in Netlify UI)
const sheetId = process.env.GOOGLE_SHEET_ID;
const sheetPassword = process.env.GOOGLE_SHEET_PASSWORD; // If needed for authentication
if (!sheetId) {
return {
statusCode: 500,
body: JSON.stringify({ error: 'GOOGLE_SHEET_ID not configured' })
};
}
// Construct the export URL
const csvUrl = `https://docs.google.com/spreadsheets/d/${sheetId}/export?format=csv`;
// Fetch the CSV from Google Sheets
const response = await fetch(csvUrl);
if (!response.ok) {
return {
statusCode: response.status,
body: JSON.stringify({ error: `Google Sheets returned ${response.status}` })
};
}
const csv = await response.text();
return {
statusCode: 200,
headers: {
'Content-Type': 'text/csv',
'Cache-Control': 'max-age=300' // Cache for 5 minutes
},
body: csv
};
} catch (error) {
console.error('Error fetching Google Sheet:', error);
return {
statusCode: 500,
body: JSON.stringify({
error: 'Failed to fetch data',
message: error.message
})
};
}
};

View file

@ -0,0 +1,66 @@
/**
* Netlify Function for secure login
* Password is stored as environment variable on server, never exposed to frontend
*/
exports.handler = async (event) => {
// Only allow POST requests
if (event.httpMethod !== 'POST') {
return {
statusCode: 405,
body: JSON.stringify({ error: 'Method not allowed' })
};
}
try {
// Parse request body
const body = JSON.parse(event.body);
const submittedPassword = body.password;
if (!submittedPassword) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Password required' })
};
}
// Get correct password from environment variable (set in Netlify UI)
const correctPassword = process.env.SMARTCANE_PASSWORD;
if (!correctPassword) {
console.error('SMARTCANE_PASSWORD environment variable not set');
return {
statusCode: 500,
body: JSON.stringify({ error: 'Server configuration error' })
};
}
// Compare passwords
if (submittedPassword === correctPassword) {
return {
statusCode: 200,
headers: {
'Set-Cookie': `auth_token=smartcane_${Date.now()}; Path=/; SameSite=Strict; Secure; HttpOnly`
},
body: JSON.stringify({
success: true,
message: 'Login successful'
})
};
} else {
// Add delay to prevent brute force
await new Promise(resolve => setTimeout(resolve, 1000));
return {
statusCode: 401,
body: JSON.stringify({ error: 'Invalid password' })
};
}
} catch (error) {
console.error('Login error:', error);
return {
statusCode: 500,
body: JSON.stringify({ error: 'Server error' })
};
}
};

View file

@ -225,6 +225,10 @@ function initMap() {
// Load CSV data
loadMillsData();
// Initialize Google Sheets auto-refresh
initGoogleSheetsAutoRefresh();
showGoogleSheetsSetup();
// Attach mode button listeners
attachModeListeners();
@ -277,11 +281,22 @@ function updateMeasurementPanel() {
// Load mills from CSV
async function loadMillsData() {
try {
const response = await fetch('sugar_cane_factories_africa.csv');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
// Try to load from Google Sheets first
let csvText = await fetchGoogleSheetData();
// Fallback to local CSV if Google Sheets fails
if (!csvText) {
console.log('Falling back to local CSV file...');
const response = await fetch('sugar_cane_factories_africa.csv');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
csvText = await response.text();
showNotification('Using local data (Google Sheet unavailable)', 'warning');
} else {
showNotification('✓ Data loaded from Google Sheet', 'success');
}
const csvText = await response.text();
parseCSV(csvText);
console.log('Mills loaded:', mills.length, mills.slice(0, 2));
renderMills();
@ -289,11 +304,10 @@ async function loadMillsData() {
updateLegend();
console.log('Legend updated');
} catch (error) {
console.error('Error loading CSV:', error);
// Show a notification to user
console.error('Error loading mills data:', error);
const notification = document.createElement('div');
notification.style.cssText = 'position: fixed; top: 20px; right: 20px; background: #ff6b6b; color: white; padding: 15px 20px; border-radius: 5px; z-index: 9999;';
notification.textContent = '⚠️ Could not load mill data. Make sure sugar_cane_factories_africa.csv is in the same folder.';
notification.textContent = '⚠️ Could not load mill data. Check console for details.';
document.body.appendChild(notification);
setTimeout(() => notification.remove(), 5000);
}

View file

@ -0,0 +1,151 @@
// Google Sheets Configuration
// This file connects to your Google Sheet for live data updates
const GOOGLE_SHEETS_CONFIG = {
// Your Google Sheet ID (from the URL)
SHEET_ID: '1ZHEIyhupNDHVd1EScBn0DnuiAzMFoZcAPZm3U65abkY',
// The sheet name or gid (the sheet tab you want to read from)
// If using gid=220881066, you can reference it this way
SHEET_NAME: 'Sheet1', // Change this to your actual sheet name if different
// Auto-refresh interval in milliseconds (5 minutes = 300000ms)
REFRESH_INTERVAL: 300000,
// Enable auto-refresh (set to false to disable)
AUTO_REFRESH_ENABLED: true
};
/**
* Fetch data from Google Sheet via Netlify Function
* The function keeps credentials secret on the server
*/
async function fetchGoogleSheetData() {
try {
// Call Netlify function instead of Google Sheets directly
// This keeps the Sheet ID and password hidden from browser dev tools
const response = await fetch('/.netlify/functions/get-mills');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const csvText = await response.text();
console.log('✓ Data fetched from Netlify Function (Google Sheet)');
return csvText;
} catch (error) {
console.error('Error fetching data from Netlify Function:', error);
showNotification('Could not fetch data. Check your connection.', 'warning');
return null;
}
}
/**
* Initialize auto-refresh of data from Google Sheet
*/
function initGoogleSheetsAutoRefresh() {
if (!GOOGLE_SHEETS_CONFIG.AUTO_REFRESH_ENABLED) {
console.log('Google Sheets auto-refresh is disabled');
return;
}
console.log(`✓ Auto-refresh enabled (every ${GOOGLE_SHEETS_CONFIG.REFRESH_INTERVAL / 1000 / 60} minutes)`);
// Refresh immediately on load
// (already done in initApp)
// Then refresh periodically
setInterval(async () => {
console.log('🔄 Refreshing data from Google Sheet...');
const csvData = await fetchGoogleSheetData();
if (csvData) {
// Clear old markers
Object.values(millMarkers).forEach(marker => map.removeLayer(marker));
millMarkers = {};
// Parse new data
parseCSV(csvData);
// Render updated mills
renderMills();
updateLegend();
// Reapply filters
applyFilters();
console.log(`✓ Updated ${mills.length} mills from Google Sheet`);
showNotification(`Map updated with latest data (${mills.length} mills)`, 'success');
}
}, GOOGLE_SHEETS_CONFIG.REFRESH_INTERVAL);
}
/**
* Show notification to user
*/
function showNotification(message, type = 'info') {
const colors = {
'success': '#4CAF50',
'warning': '#FF9800',
'error': '#F44336',
'info': '#2196F3'
};
const notification = document.createElement('div');
notification.style.cssText = `
position: fixed;
top: 20px;
right: 20px;
background: ${colors[type] || colors.info};
color: white;
padding: 15px 20px;
border-radius: 5px;
z-index: 9999;
box-shadow: 0 2px 8px rgba(0,0,0,0.2);
font-weight: 500;
`;
notification.textContent = message;
document.body.appendChild(notification);
setTimeout(() => {
notification.style.transition = 'opacity 0.3s ease';
notification.style.opacity = '0';
setTimeout(() => notification.remove(), 300);
}, 4000);
}
/**
* Provide setup instructions to the user
*/
function showGoogleSheetsSetup() {
console.log(`
Google Sheets Integration Setup Instructions
1. Your Google Sheet is configured and ready!
2. Share the sheet with your colleagues:
- Click "Share" in the top-right
- Add their email addresses
- They must have "Editor" access
3. Column headers required (case-sensitive):
- Mill/Factory Name (or similar)
- Country
- Latitude
- Longitude
- Crushing Capacity (optional)
- Annual Sugar Production (optional)
- Notes (optional)
- Data Year (optional)
4. The map will automatically update every 5 minutes
with new data from the sheet
5. To change refresh interval, edit:
GOOGLE_SHEETS_CONFIG.REFRESH_INTERVAL
`);
}

View file

@ -619,6 +619,7 @@
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
<script src="https://unpkg.com/leaflet-draw@1.0.4/dist/leaflet.draw.js"></script>
<script src="https://unpkg.com/leaflet-measure@3.1.0/dist/leaflet-measure.umd.js"></script>
<script src="google-sheets-config.js"></script>
<script src="app.js"></script>
</body>
</html>