# SAR Analysis Functions for Crop Monitoring # ========================================== # # This script contains functions for analyzing Sentinel-1 SAR data # Designed to integrate with existing crop_analysis_messaging.R workflow # # Author: Timon # Date: August 2025 # Load required libraries if (!require(terra)) install.packages("terra") if (!require(dplyr)) install.packages("dplyr") if (!require(sf)) install.packages("sf") if (!require(ggplot2)) install.packages("ggplot2") if (!require(viridis)) install.packages("viridis") if (!require(tmap)) install.packages("tmap") library(terra) library(dplyr) library(sf) library(ggplot2) library(viridis) library(tmap) # SAR Data Loading Functions # ========================== #' Load SAR mosaic for a specific week and band #' #' @param week_num Week number (26-33) #' @param year Year (2025) #' @param band SAR band ("VV", "VH", "VV_dB", "VH_dB", "VV_dB_filtered", "VH_dB_filtered", "mask") #' @param data_dir Directory containing SAR mosaics #' @return SpatRaster object load_sar_mosaic <- function(week_num, year = 2025, band = "VV_dB_filtered", data_dir = "data/aura/weekly_SAR_mosaic") { filename <- file.path(data_dir, paste0("week_", sprintf("%02d", week_num), "_", year, "_", band, ".tif")) if (!file.exists(filename)) { warning(paste("SAR file not found:", filename)) return(NULL) } # Load raster sar_raster <- rast(filename) # Set appropriate names names(sar_raster) <- paste0("SAR_", band, "_week_", week_num) return(sar_raster) } #' Load multiple weeks of SAR data #' #' @param week_range Vector of week numbers (e.g., 26:33) #' @param band SAR band to load #' @param year Year #' @return SpatRaster stack with multiple weeks load_sar_timeseries <- function(week_range = 26:33, band = "VV_dB_filtered", year = 2025) { cat("Loading SAR timeseries for weeks", min(week_range), "to", max(week_range), "- band:", band, "\n") sar_list <- list() for (week in week_range) { sar_data <- load_sar_mosaic(week, year, band) if (!is.null(sar_data)) { sar_list[[length(sar_list) + 1]] <- sar_data } } if (length(sar_list) == 0) { stop("No SAR data found for specified weeks") } # Stack all weeks sar_stack <- rast(sar_list) cat("Loaded", nlyr(sar_stack), "weeks of SAR data\n") return(sar_stack) } # SAR Metric Calculation Functions # ================================ #' Calculate Radar Vegetation Index (RVI) #' #' @param vv_stack VV polarization data (linear scale) #' @param vh_stack VH polarization data (linear scale) #' @return RVI values (0-1, higher = more vegetation) calculate_rvi <- function(vv_stack, vh_stack) { cat("Calculating Radar Vegetation Index (RVI)...\n") # RVI = 4 * VH / (VV + VH) rvi <- (4 * vh_stack) / (vv_stack + vh_stack) # Constrain to 0-1 range rvi[rvi < 0] <- 0 rvi[rvi > 1] <- 1 names(rvi) <- gsub("VH", "RVI", names(vh_stack)) return(rvi) } #' Calculate Cross-Polarization Ratio #' #' @param vv_stack VV polarization (dB scale) #' @param vh_stack VH polarization (dB scale) #' @return Cross-pol ratio (VH/VV in dB) calculate_cross_pol_ratio <- function(vv_stack, vh_stack) { cat("Calculating Cross-Polarization Ratio...\n") # Cross-pol ratio = VH - VV (in dB scale) cross_ratio <- vh_stack - vv_stack names(cross_ratio) <- gsub("VH", "CrossRatio", names(vh_stack)) return(cross_ratio) } #' Calculate Crop Structure Index #' #' @param vv_linear VV in linear scale #' @param vh_linear VH in linear scale #' @return Crop Structure Index calculate_crop_structure_index <- function(vv_linear, vh_linear) { cat("Calculating Crop Structure Index...\n") # CSI = (VH - VV) / (VH + VV) csi <- (vh_linear - vv_linear) / (vh_linear + vv_linear) names(csi) <- gsub("VH", "CSI", names(vh_linear)) return(csi) } #' Calculate SAR Temporal Stability #' #' @param sar_stack Multi-temporal SAR data #' @return Coefficient of variation over time calculate_sar_temporal_stability <- function(sar_stack) { cat("Calculating SAR temporal stability...\n") # Calculate mean and standard deviation over time sar_mean <- app(sar_stack, mean, na.rm = TRUE) sar_sd <- app(sar_stack, sd, na.rm = TRUE) # Coefficient of variation cv <- sar_sd / abs(sar_mean) # Areas with CV < 0.15 are considered stable stable_mask <- cv < 0.15 names(cv) <- "SAR_temporal_CV" names(stable_mask) <- "SAR_stable_mask" return(list(cv = cv, stable_mask = stable_mask)) } #' Calculate SAR Change Detection #' #' @param sar_current Current week SAR data #' @param sar_previous Previous week SAR data #' @return Change magnitude and direction calculate_sar_change <- function(sar_current, sar_previous) { cat("Calculating SAR change detection...\n") # Calculate change (current - previous) change_magnitude <- sar_current - sar_previous # Calculate percentage change change_percent <- (change_magnitude / abs(sar_previous)) * 100 # Categorize change change_category <- change_magnitude change_category[change_magnitude > 2] <- 3 # Strong increase change_category[change_magnitude > 0.5 & change_magnitude <= 2] <- 2 # Moderate increase change_category[change_magnitude >= -0.5 & change_magnitude <= 0.5] <- 1 # Stable change_category[change_magnitude >= -2 & change_magnitude < -0.5] <- 0 # Moderate decrease change_category[change_magnitude < -2] <- -1 # Strong decrease names(change_magnitude) <- "SAR_change_magnitude" names(change_percent) <- "SAR_change_percent" names(change_category) <- "SAR_change_category" return(list( magnitude = change_magnitude, percent = change_percent, category = change_category )) } # Field-Level Analysis Functions # ============================== #' Extract SAR statistics for field boundaries #' #' @param sar_data SAR raster data #' @param field_boundaries SF object with field polygons #' @param field_id_col Column name containing field IDs #' @return Data frame with field-level statistics extract_sar_field_stats <- function(sar_data, field_boundaries, field_id_col = "Name") { cat("Extracting SAR statistics for", nrow(field_boundaries), "fields...\n") # Extract values for each field field_stats <- extract(sar_data, field_boundaries, fun = function(x) { c(mean = mean(x, na.rm = TRUE), median = median(x, na.rm = TRUE), sd = sd(x, na.rm = TRUE), cv = sd(x, na.rm = TRUE) / abs(mean(x, na.rm = TRUE)), min = min(x, na.rm = TRUE), max = max(x, na.rm = TRUE), q25 = quantile(x, 0.25, na.rm = TRUE), q75 = quantile(x, 0.75, na.rm = TRUE)) }, bind = TRUE) # Add field information field_stats[[field_id_col]] <- field_boundaries[[field_id_col]] return(field_stats) } #' Analyze SAR trends for individual fields #' #' @param field_id Field identifier #' @param field_boundaries Field boundary polygons #' @param week_range Range of weeks to analyze #' @param band SAR band to analyze #' @return List with trend analysis results analyze_sar_field_trends <- function(field_id, field_boundaries, week_range = 26:33, band = "VV_dB_filtered") { cat("Analyzing SAR trends for field:", field_id, "\n") # Get field polygon field_poly <- field_boundaries[field_boundaries$Name == field_id, ] if (nrow(field_poly) == 0) { warning(paste("Field not found:", field_id)) return(NULL) } # Load timeseries data sar_stack <- load_sar_timeseries(week_range, band) # Extract field statistics for each week field_timeseries <- extract(sar_stack, field_poly, fun = mean, na.rm = TRUE, bind = TRUE) # Convert to long format for analysis ts_data <- data.frame( week = week_range, value = as.numeric(field_timeseries[1, 2:(length(week_range)+1)]), field_id = field_id, band = band ) # Calculate trend statistics if (nrow(ts_data) > 2) { trend_model <- lm(value ~ week, data = ts_data) trend_slope <- coef(trend_model)[2] trend_r2 <- summary(trend_model)$r.squared trend_p_value <- summary(trend_model)$coefficients[2, 4] } else { trend_slope <- NA trend_r2 <- NA trend_p_value <- NA } # Calculate additional metrics trend_direction <- ifelse(trend_slope > 0, "increasing", ifelse(trend_slope < 0, "decreasing", "stable")) weekly_changes <- diff(ts_data$value) volatility <- sd(weekly_changes, na.rm = TRUE) return(list( field_id = field_id, timeseries = ts_data, trend_slope = trend_slope, trend_direction = trend_direction, trend_r2 = trend_r2, trend_p_value = trend_p_value, volatility = volatility, latest_value = tail(ts_data$value, 1), mean_value = mean(ts_data$value, na.rm = TRUE) )) } # SAR-specific uniformity functions # ================================= #' Calculate SAR uniformity metrics (adapted for speckle) #' #' @param sar_data SAR raster data #' @param use_robust Use robust statistics (recommended for SAR) #' @return List with uniformity metrics calculate_sar_uniformity <- function(sar_data, use_robust = TRUE) { cat("Calculating SAR uniformity metrics...\n") if (use_robust) { # Use median and MAD for robust statistics center_value <- global(sar_data, median, na.rm = TRUE)[1,1] spread_value <- global(sar_data, mad, na.rm = TRUE)[1,1] cv_robust <- spread_value / abs(center_value) return(list( median = center_value, mad = spread_value, cv_robust = cv_robust, uniformity_category = ifelse(cv_robust < 0.1, "high", ifelse(cv_robust < 0.2, "medium", "low")) )) } else { # Standard statistics mean_value <- global(sar_data, mean, na.rm = TRUE)[1,1] sd_value <- global(sar_data, sd, na.rm = TRUE)[1,1] cv_standard <- sd_value / abs(mean_value) return(list( mean = mean_value, sd = sd_value, cv_standard = cv_standard, uniformity_category = ifelse(cv_standard < 0.15, "high", ifelse(cv_standard < 0.25, "medium", "low")) )) } } cat("SAR analysis functions loaded successfully!\n") cat("Available functions:\n") cat("- load_sar_mosaic(): Load single week SAR data\n") cat("- load_sar_timeseries(): Load multi-week SAR data\n") cat("- calculate_rvi(): Radar Vegetation Index\n") cat("- calculate_cross_pol_ratio(): Cross-polarization ratio\n") cat("- calculate_crop_structure_index(): Crop structure metrics\n") cat("- calculate_sar_temporal_stability(): Temporal stability analysis\n") cat("- calculate_sar_change(): Change detection\n") cat("- extract_sar_field_stats(): Field-level statistics\n") cat("- analyze_sar_field_trends(): Individual field trend analysis\n") cat("- calculate_sar_uniformity(): SAR uniformity metrics\n")