refactored 10 20

This commit is contained in:
Timon 2026-01-29 21:10:24 +01:00
parent d1f352f21c
commit 4d6bba828f
4 changed files with 679 additions and 6 deletions

View file

@ -0,0 +1,328 @@
# ==============================================================================
# SmartCane Script 10: Create Per-Field TIFFs
# ==============================================================================
#
# PURPOSE:
# Split full-farm satellite TIFFs into per-field file structure across TWO phases:
#
# PHASE 1 - MIGRATION (Legacy Data):
# Input: merged_final_tif/{DATE}.tif (5-band: R,G,B,NIR,CI - with CI calculated)
# Output: field_tiles_CI/{FIELD}/{DATE}.tif
# Status: One-time reorganization of existing data; will be removed after 2-3 weeks
#
# PHASE 2 - PROCESSING (New Downloads):
# Input: merged_tif/{DATE}.tif (4-band: R,G,B,NIR - raw from Planet API)
# Output: field_tiles/{FIELD}/{DATE}.tif
# Status: Ongoing for all new downloads; always runs (not conditional)
#
# INTEGRATION WITH DOWNSTREAM SCRIPTS:
# - Script 20 (CI Extraction):
# Reads from field_tiles/{FIELD}/{DATE}.tif
# Adds CI calculation → outputs to field_tiles_CI/{FIELD}/{DATE}.tif (5-band)
# - Script 40 (Mosaic Creation):
# Reads from field_tiles_CI/{FIELD}/{DATE}.tif (via per-field weekly aggregation)
# Creates weekly_mosaic/{FIELD}/week_{WW}.tif
#
# ARCHITECTURE:
# This script uses field/date folder organization:
# field_tiles/
# ├── field_1/
# │ ├── 2024-01-15.tif
# │ └── 2024-01-16.tif
# └── field_2/
# ├── 2024-01-15.tif
# └── 2024-01-16.tif
#
# Benefits: Upstream scripts iterate per-field → per-date, enabling clean
# aggregation for mosaics (Script 40) and KPIs (Script 80/90).
#
# ==============================================================================
library(terra)
library(sf)
# ============================================================================
# HELPER FUNCTIONS (DEFINE FIRST)
# ============================================================================
smartcane_log <- function(msg) {
cat(paste0("[", Sys.time(), "] ", msg, "\n"))
}
# Load field boundaries from GeoJSON
load_field_boundaries <- function(geojson_path) {
smartcane_log(paste("Loading field boundaries from:", geojson_path))
if (!file.exists(geojson_path)) {
stop("GeoJSON file not found:", geojson_path)
}
fields <- st_read(geojson_path, quiet = TRUE)
# Standardize field name property
if (!"field_name" %in% names(fields)) {
if ("field" %in% names(fields)) {
fields$field_name <- fields$field
} else if ("FIELD_ID" %in% names(fields)) {
fields$field_name <- fields$FIELD_ID
} else if ("Name" %in% names(fields)) {
fields$field_name <- fields$Name
} else {
# Default: use first non-geometry column
field_col <- names(fields)[!names(fields) %in% c("geometry", "geom")]
if (length(field_col) > 0) {
fields$field_name <- fields[[field_col[1]]]
} else {
stop("No suitable field name column found in GeoJSON")
}
}
}
smartcane_log(paste("Loaded", nrow(fields), "field(s)"))
return(fields)
}
# ============================================================================
# PROJECT SETUP
# ============================================================================
# Get project parameter
args <- commandArgs(trailingOnly = TRUE)
if (length(args) == 0) {
PROJECT <- "angata"
} else {
PROJECT <- args[1]
}
# Construct paths directly (avoid complex parameter initialization)
base_path <- file.path(getwd(), "laravel_app", "storage", "app", PROJECT)
data_dir <- file.path(base_path, "Data")
smartcane_log(paste("Project:", PROJECT))
smartcane_log(paste("Base path:", base_path))
smartcane_log(paste("Data dir:", data_dir))
# Unified function to crop TIFF to field boundaries
# Called by both migration and processing phases
crop_tiff_to_fields <- function(tif_path, tif_date, fields, output_base_dir) {
created <- 0
skipped <- 0
errors <- 0
# Load raster
if (!file.exists(tif_path)) {
smartcane_log(paste("ERROR: TIFF not found:", tif_path))
return(list(created = 0, skipped = 0, errors = 1))
}
rast <- tryCatch({
rast(tif_path)
}, error = function(e) {
smartcane_log(paste("ERROR loading raster:", e$message))
return(NULL)
})
if (is.null(rast)) {
return(list(created = 0, skipped = 0, errors = 1))
}
# Create raster bounding box in raster CRS
rast_bbox <- st_as_sfc(st_bbox(rast))
st_crs(rast_bbox) <- st_crs(rast)
# Reproject fields to match raster CRS
fields_reprojected <- st_transform(fields, st_crs(rast_bbox))
# Find which fields intersect this raster (CRITICAL: raster bbox first, then fields)
overlapping_indices <- st_intersects(rast_bbox, fields_reprojected, sparse = TRUE)
overlapping_indices <- unique(unlist(overlapping_indices))
if (length(overlapping_indices) == 0) {
smartcane_log(paste("No fields intersect TIFF:", basename(tif_path)))
return(list(created = 0, skipped = 0, errors = 0))
}
# Process each overlapping field
for (field_idx in overlapping_indices) {
field_name <- fields$field_name[field_idx]
field_geom <- fields_reprojected[field_idx, ]
# Create field directory
field_dir <- file.path(output_base_dir, field_name)
if (!dir.exists(field_dir)) {
dir.create(field_dir, recursive = TRUE, showWarnings = FALSE)
}
# Output file path
output_path <- file.path(field_dir, paste0(tif_date, ".tif"))
# Check if file already exists (idempotent)
if (file.exists(output_path)) {
skipped <- skipped + 1
next
}
# Crop raster to field boundary
tryCatch({
field_rast <- crop(rast, field_geom)
writeRaster(field_rast, output_path, overwrite = TRUE)
created <- created + 1
}, error = function(e) {
smartcane_log(paste("ERROR cropping field", field_name, ":", e$message))
errors <<- errors + 1
})
}
return(list(created = created, skipped = skipped, errors = errors))
}
# Migrate legacy 5-band TIFFs with CI from merged_final_tif
migrate_old_merged_final_tif <- function(merged_final_dir, field_tiles_ci_dir, fields) {
smartcane_log("\n========================================")
smartcane_log("PHASE 1: MIGRATING LEGACY DATA")
smartcane_log("========================================")
# Check if legacy directory exists
if (!dir.exists(merged_final_dir)) {
smartcane_log("No legacy merged_final_tif/ directory found - skipping migration")
return(list(total_created = 0, total_skipped = 0, total_errors = 0))
}
# Create output directory
if (!dir.exists(field_tiles_ci_dir)) {
dir.create(field_tiles_ci_dir, recursive = TRUE, showWarnings = FALSE)
}
# Find all date-pattern TIFFs in root of merged_final_tif
tiff_files <- list.files(
merged_final_dir,
pattern = "^[0-9]{4}-[0-9]{2}-[0-9]{2}\\.tif$",
full.names = TRUE
)
smartcane_log(paste("Found", length(tiff_files), "legacy TIFF(s) to migrate"))
if (length(tiff_files) == 0) {
smartcane_log("No legacy TIFFs found - skipping migration")
return(list(total_created = 0, total_skipped = 0, total_errors = 0))
}
# Process each legacy TIFF
total_created <- 0
total_skipped <- 0
total_errors <- 0
for (tif_path in tiff_files) {
tif_date <- gsub("\\.tif$", "", basename(tif_path))
smartcane_log(paste("Migrating:", tif_date))
result <- crop_tiff_to_fields(tif_path, tif_date, fields, field_tiles_ci_dir)
total_created <- total_created + result$created
total_skipped <- total_skipped + result$skipped
total_errors <- total_errors + result$errors
}
smartcane_log(paste("Migration complete: created =", total_created,
", skipped =", total_skipped, ", errors =", total_errors))
return(list(total_created = total_created, total_skipped = total_skipped,
total_errors = total_errors))
}
# Process new 4-band raw TIFFs from merged_tif
process_new_merged_tif <- function(merged_tif_dir, field_tiles_dir, fields) {
smartcane_log("\n========================================")
smartcane_log("PHASE 2: PROCESSING NEW DOWNLOADS")
smartcane_log("========================================")
# Check if download directory exists
if (!dir.exists(merged_tif_dir)) {
smartcane_log("No merged_tif/ directory found - no new data to process")
return(list(total_created = 0, total_skipped = 0, total_errors = 0))
}
# Create output directory
if (!dir.exists(field_tiles_dir)) {
dir.create(field_tiles_dir, recursive = TRUE, showWarnings = FALSE)
}
# Find all date-pattern TIFFs in root of merged_tif
tiff_files <- list.files(
merged_tif_dir,
pattern = "^[0-9]{4}-[0-9]{2}-[0-9]{2}\\.tif$",
full.names = TRUE
)
smartcane_log(paste("Found", length(tiff_files), "TIFF(s) to process"))
if (length(tiff_files) == 0) {
smartcane_log("No new TIFFs found - nothing to process")
return(list(total_created = 0, total_skipped = 0, total_errors = 0))
}
# Process each new TIFF
total_created <- 0
total_skipped <- 0
total_errors <- 0
for (tif_path in tiff_files) {
tif_date <- gsub("\\.tif$", "", basename(tif_path))
smartcane_log(paste("Processing:", tif_date))
result <- crop_tiff_to_fields(tif_path, tif_date, fields, field_tiles_dir)
total_created <- total_created + result$created
total_skipped <- total_skipped + result$skipped
total_errors <- total_errors + result$errors
}
smartcane_log(paste("Processing complete: created =", total_created,
", skipped =", total_skipped, ", errors =", total_errors))
return(list(total_created = total_created, total_skipped = total_skipped,
total_errors = total_errors))
}
# ============================================================================
# MAIN EXECUTION
# ============================================================================
smartcane_log("========================================")
smartcane_log(paste("Script 10: Per-Field TIFF Creation for", PROJECT))
smartcane_log("========================================")
# Create necessary directories
dir.create(data_dir, recursive = TRUE, showWarnings = FALSE)
# Load field boundaries
geojson_path <- file.path(data_dir, "pivot.geojson")
fields <- load_field_boundaries(geojson_path)
# Define input and output directories
merged_final_dir <- file.path(base_path, "merged_final_tif")
merged_tif_dir <- file.path(base_path, "merged_tif")
field_tiles_dir <- file.path(base_path, "field_tiles")
field_tiles_ci_dir <- file.path(base_path, "field_tiles_CI")
# PHASE 1: Migrate legacy data (if exists)
migrate_result <- migrate_old_merged_final_tif(merged_final_dir, field_tiles_ci_dir, fields)
# PHASE 2: Process new downloads (always runs)
process_result <- process_new_merged_tif(merged_tif_dir, field_tiles_dir, fields)
smartcane_log("\n========================================")
smartcane_log("FINAL SUMMARY")
smartcane_log("========================================")
smartcane_log(paste("Migration: created =", migrate_result$total_created,
", skipped =", migrate_result$total_skipped,
", errors =", migrate_result$total_errors))
smartcane_log(paste("Processing: created =", process_result$total_created,
", skipped =", process_result$total_skipped,
", errors =", process_result$total_errors))
smartcane_log("Script 10 complete")
smartcane_log("========================================\n")

View file

@ -0,0 +1,240 @@
# CI_EXTRACTION_PER_FIELD.R
# =========================
# Script 20 (Refactored for Per-Field Architecture)
#
# This script reads per-field TIFFs from Script 10 output and:
# 1. Calculates Canopy Index (CI) from 4-band imagery (RGB + NIR)
# 2. Outputs 5-band TIFFs with CI as the 5th band to field_tiles_CI/{FIELD}/{DATE}.tif
# 3. Outputs per-field per-date RDS files to daily_vals/{FIELD}/{DATE}.rds
#
# Key differences from legacy Script 20:
# - Input: field_tiles/{FIELD}/{DATE}.tif (4-band, from Script 10)
# - Output: field_tiles_CI/{FIELD}/{DATE}.tif (5-band with CI)
# - Output: daily_vals/{FIELD}/{DATE}.rds (per-field CI statistics)
# - Directly extracts CI statistics per sub_field within each field
#
# Usage:
# Rscript 20_ci_extraction_per_field.R [project_dir] [end_date] [offset]
# Example: Rscript 20_ci_extraction_per_field.R angata 2026-01-02 7
suppressPackageStartupMessages({
library(sf)
library(terra)
library(tidyverse)
library(lubridate)
library(here)
})
# =============================================================================
# Load utility functions from 20_ci_extraction_utils.R
# =============================================================================
source("r_app/20_ci_extraction_utils.R")
# =============================================================================
# Main Processing
# =============================================================================
main <- function() {
# IMPORTANT: Set working directory to project root (smartcane/)
# This ensures here() functions resolve relative to /smartcane, not /smartcane/r_app
if (basename(getwd()) == "r_app") {
setwd("..")
}
# Parse command-line arguments
args <- commandArgs(trailingOnly = TRUE)
project_dir <- if (length(args) >= 1 && args[1] != "") args[1] else "angata"
end_date <- if (length(args) >= 2 && args[2] != "") as.Date(args[2]) else Sys.Date()
offset <- if (length(args) >= 3 && !is.na(as.numeric(args[3]))) as.numeric(args[3]) else 7
# IMPORTANT: Make project_dir available globally for parameters_project.R
assign("project_dir", project_dir, envir = .GlobalEnv)
safe_log(sprintf("=== Script 20: CI Extraction Per-Field ==="))
safe_log(sprintf("Project: %s | End Date: %s | Offset: %d days",
project_dir, format(end_date, "%Y-%m-%d"), offset))
# 1. Load parameters (includes field boundaries setup)
# ---------------------------------------------------
tryCatch({
source("r_app/parameters_project.R")
safe_log("Loaded parameters_project.R")
}, error = function(e) {
safe_log(sprintf("Error loading parameters: %s", e$message), "ERROR")
stop(e)
})
# 2. Set up directory paths from parameters FIRST (before using setup$...)
# -----------------------------------------------------------------------
setup <- setup_project_directories(project_dir)
# 3. Load field boundaries directly from field_boundaries_path in setup
# ------------------------------------------------------------------
tryCatch({
field_boundaries_sf <- st_read(setup$field_boundaries_path, quiet = TRUE)
safe_log(sprintf("Loaded %d field/sub_field polygons from %s", nrow(field_boundaries_sf), setup$field_boundaries_path))
}, error = function(e) {
safe_log(sprintf("Error loading field boundaries from %s: %s", setup$field_boundaries_path, e$message), "ERROR")
stop(e)
})
# 4. Get list of dates to process
dates <- date_list(end_date, offset)
safe_log(sprintf("Processing dates: %s to %s (%d dates)",
dates$start_date, dates$end_date, length(dates$days_filter)))
safe_log(sprintf("Input directory: %s", setup$field_tiles_dir))
safe_log(sprintf("Output TIF directory: %s", setup$field_tiles_ci_dir))
safe_log(sprintf("Output RDS directory: %s", setup$daily_vals_per_field_dir))
# 5. Process each field
# ----------------------
if (!dir.exists(setup$field_tiles_dir)) {
safe_log(sprintf("Field tiles directory not found: %s", setup$field_tiles_dir), "ERROR")
stop("Script 10 output not found. Run Script 10 first.")
}
fields <- list.dirs(setup$field_tiles_dir, full.names = FALSE, recursive = FALSE)
fields <- fields[fields != ""] # Remove empty strings
if (length(fields) == 0) {
safe_log("No fields found in field_tiles directory", "WARNING")
return()
}
safe_log(sprintf("Found %d fields to process", length(fields)))
# 6. Process each field
# ----------------------
total_success <- 0
total_error <- 0
ci_results_by_date <- list()
for (field in fields) {
safe_log(sprintf("\n--- Processing field: %s ---", field))
field_tiles_path <- file.path(field_tiles_dir, field)
field_ci_path <- file.path(field_tiles_ci_dir, field)
field_daily_vals_path <- file.path(setup$daily_vals_per_field_dir, field)
# Create output subdirectories for this field
dir.create(field_ci_path, showWarnings = FALSE, recursive = TRUE)
dir.create(field_daily_vals_path, showWarnings = FALSE, recursive = TRUE)
# 5a. Process each date for this field
# -----------------------------------
for (date_str in dates$days_filter) {
input_tif <- file.path(field_tiles_path, sprintf("%s.tif", date_str))
output_tif <- file.path(field_ci_path, sprintf("%s.tif", date_str))
output_rds <- file.path(field_daily_vals_path, sprintf("%s.rds", date_str))
# Skip if both outputs already exist
if (file.exists(output_tif) && file.exists(output_rds)) {
safe_log(sprintf(" %s: Already processed (skipping)", date_str))
next
}
# Check if input TIFF exists
if (!file.exists(input_tif)) {
safe_log(sprintf(" %s: Input TIFF not found (skipping)", date_str))
next
}
tryCatch({
# Load 4-band TIFF
raster_4band <- terra::rast(input_tif)
# Calculate CI
ci_raster <- calc_ci_from_raster(raster_4band)
# Create 5-band TIFF (R, G, B, NIR, CI)
five_band <- c(raster_4band, ci_raster)
# Save 5-band TIFF
terra::writeRaster(five_band, output_tif, overwrite = TRUE)
# Extract CI statistics by sub_field
ci_stats <- extract_ci_by_subfield(ci_raster, field_boundaries_sf, field)
# Save RDS
if (!is.null(ci_stats) && nrow(ci_stats) > 0) {
saveRDS(ci_stats, output_rds)
safe_log(sprintf(" %s: ✓ Processed (%d sub-fields)", date_str, nrow(ci_stats)))
# Store for daily aggregation
ci_stats_with_date <- ci_stats %>% mutate(date = date_str)
key <- sprintf("%s_%s", field, date_str)
ci_results_by_date[[key]] <- ci_stats_with_date
} else {
safe_log(sprintf(" %s: ⚠ No CI data extracted", date_str))
}
total_success <- total_success + 1
}, error = function(e) {
safe_log(sprintf(" %s: ✗ Error - %s", date_str, e$message), "ERROR")
total_error <<- total_error + 1
})
}
}
# 7. Summary
# ----------
safe_log(sprintf("\n=== Processing Complete ==="))
safe_log(sprintf("Successfully processed: %d", total_success))
safe_log(sprintf("Errors encountered: %d", total_error))
if (total_success > 0) {
safe_log("Output files created in:")
safe_log(sprintf(" TIFFs: %s", setup$field_tiles_ci_dir))
safe_log(sprintf(" RDS: %s", setup$daily_vals_per_field_dir))
}
# 8. Aggregate per-field daily RDS files into combined_CI_data.rds
# ----------------------------------------------------------------
# This creates the wide-format (fields × dates) file that Script 30 and
# other downstream scripts expect for backward compatibility
safe_log("\n=== Aggregating Per-Field Daily RDS into combined_CI_data.rds ===")
tryCatch({
# Find all daily RDS files (recursively from daily_vals/{FIELD}/{DATE}.rds)
all_daily_files <- list.files(
path = setup$daily_vals_per_field_dir,
pattern = "\\.rds$",
full.names = TRUE,
recursive = TRUE
)
if (length(all_daily_files) == 0) {
safe_log("No daily RDS files found to aggregate", "WARNING")
} else {
safe_log(sprintf("Aggregating %d daily RDS files into combined_CI_data.rds", length(all_daily_files)))
# Read and combine all daily RDS files
combined_data <- all_daily_files %>%
purrr::map(readRDS) %>%
purrr::list_rbind() %>%
dplyr::group_by(sub_field)
# Create output directory if needed
dir.create(setup$cumulative_CI_vals_dir, showWarnings = FALSE, recursive = TRUE)
# Save combined data
combined_ci_path <- file.path(setup$cumulative_CI_vals_dir, "combined_CI_data.rds")
saveRDS(combined_data, combined_ci_path)
safe_log(sprintf("✓ Created combined_CI_data.rds with %d rows from %d files",
nrow(combined_data), length(all_daily_files)))
safe_log(sprintf(" Location: %s", combined_ci_path))
}
}, error = function(e) {
safe_log(sprintf("⚠ Error aggregating to combined_CI_data.rds: %s", e$message), "WARNING")
safe_log(" This is OK - Script 30 can still use per-field RDS files directly", "WARNING")
})
}
# Execute main if called from command line
if (sys.nframe() == 0) {
main()
}

View file

@ -6,6 +6,10 @@
#
# Parallel Processing: Tile-based extraction uses furrr::future_map to process
# multiple tiles simultaneously (typically 2-4 tiles in parallel depending on CPU cores)
#
# Per-Field Functions (Script 20):
# - calc_ci_from_raster(): Calculate CI from 4-band raster (NDVI formula)
# - extract_ci_by_subfield(): Extract per-sub_field CI statistics from raster
#' Safe logging function that works whether log_message exists or not
#'
@ -1013,3 +1017,93 @@ extract_ci_from_tiles <- function(tile_files, date, field_boundaries_sf, daily_C
return(aggregated)
}
# =============================================================================
# Script 20 (Per-Field) Specific Functions
# =============================================================================
#' Calculate Canopy Index (CI) from 4-band raster
#'
#' Computes CI = (NIR - Red) / (NIR + Red), which is equivalent to NDVI.
#' Expects band order: Red (band 1), Green (band 2), Blue (band 3), NIR (band 4)
#'
#' @param raster_obj Loaded raster object with at least 4 bands
#' @return Raster object containing CI values
#'
calc_ci_from_raster <- function(raster_obj) {
# Expected band order: Red (band 1), Green (band 2), Blue (band 3), NIR (band 4)
if (terra::nlyr(raster_obj) < 4) {
stop("Raster has fewer than 4 bands. Cannot calculate CI.")
}
r <- terra::subset(raster_obj, 1) # Red
nir <- terra::subset(raster_obj, 4) # NIR
# Canopy Index (CI) = (NIR - Red) / (NIR + Red)
# This is essentially NDVI - Normalized Difference Vegetation Index
ci <- (nir - r) / (nir + r)
return(ci)
}
#' Extract CI statistics by sub_field from a CI raster
#'
#' For a given field, masks the CI raster to each sub_field polygon
#' and calculates statistics (mean, median, sd, min, max, count).
#'
#' @param ci_raster Raster object containing CI values
#' @param field_boundaries_sf SF object containing field/sub_field polygons
#' @param field_name Character string of the field name to process
#' @return Data frame with field, sub_field, and CI statistics; NULL if field not found
#'
extract_ci_by_subfield <- function(ci_raster, field_boundaries_sf, field_name) {
# Filter to current field
field_poly <- field_boundaries_sf %>%
filter(field == field_name)
if (nrow(field_poly) == 0) {
safe_log(sprintf("Field '%s' not found in boundaries", field_name), "WARNING")
return(NULL)
}
# Extract CI values by sub_field
results <- list()
# Group by sub_field within this field
for (sub_field in unique(field_poly$sub_field)) {
sub_poly <- field_poly %>% filter(sub_field == sub_field)
ci_sub <- terra::mask(ci_raster, sub_poly)
# Get statistics
ci_values <- terra::values(ci_sub, na.rm = TRUE)
if (length(ci_values) > 0) {
result_row <- data.frame(
field = field_name,
sub_field = sub_field,
ci_mean = mean(ci_values, na.rm = TRUE),
ci_median = median(ci_values, na.rm = TRUE),
ci_sd = sd(ci_values, na.rm = TRUE),
ci_min = min(ci_values, na.rm = TRUE),
ci_max = max(ci_values, na.rm = TRUE),
ci_count = length(ci_values),
stringsAsFactors = FALSE
)
} else {
result_row <- data.frame(
field = field_name,
sub_field = sub_field,
ci_mean = NA_real_,
ci_median = NA_real_,
ci_sd = NA_real_,
ci_min = NA_real_,
ci_max = NA_real_,
ci_count = 0,
stringsAsFactors = FALSE
)
}
results[[length(results) + 1]] <- result_row
}
return(dplyr::bind_rows(results))
}

View file

@ -212,7 +212,7 @@ detect_tile_structure_from_merged_final <- function(merged_final_tif_dir, daily_
# -----------------------------------
setup_project_directories <- function(project_dir, data_source = "merged_tif_8b") {
# Base directories
laravel_storage_dir <- here("laravel_app/storage/app", project_dir)
laravel_storage_dir <- here("laravel_app", "storage", "app", project_dir)
# Determine which TIF source folder to use based on data_source parameter
# Default is merged_tif_8b for newer data with cloud masking (8-band + UDM)
@ -238,15 +238,20 @@ setup_project_directories <- function(project_dir, data_source = "merged_tif_8b"
merged = merged_tif_folder, # Use data_source parameter to select folder
final = merged_final_dir
),
# New per-field directory structure (Script 10 output)
field_tiles = here(laravel_storage_dir, "field_tiles"),
field_tiles_ci = here(laravel_storage_dir, "field_tiles_CI"),
weekly_mosaic = here(laravel_storage_dir, "weekly_mosaic"),
weekly_tile_max = here(laravel_storage_dir, "weekly_tile_max"),
extracted_ci = list(
base = here(laravel_storage_dir, "Data/extracted_ci"),
daily = here(laravel_storage_dir, "Data/extracted_ci/daily_vals"),
cumulative = here(laravel_storage_dir, "Data/extracted_ci/cumulative_vals")
base = here(laravel_storage_dir, "Data", "extracted_ci"),
daily = here(laravel_storage_dir, "Data", "extracted_ci", "daily_vals"),
cumulative = here(laravel_storage_dir, "Data", "extracted_ci", "cumulative_vals"),
# New per-field daily RDS structure (Script 20 output)
daily_per_field = here(laravel_storage_dir, "Data", "extracted_ci", "daily_vals")
),
vrt = here(laravel_storage_dir, "Data/vrt"),
harvest = here(laravel_storage_dir, "Data/HarvestData")
vrt = here(laravel_storage_dir, "Data", "vrt"),
harvest = here(laravel_storage_dir, "Data", "HarvestData")
)
# Create all directories
@ -264,6 +269,12 @@ setup_project_directories <- function(project_dir, data_source = "merged_tif_8b"
merged_final = dirs$tif$final,
daily_CI_vals_dir = dirs$extracted_ci$daily,
cumulative_CI_vals_dir = dirs$extracted_ci$cumulative,
# New per-field directory paths (Script 10 & 20 outputs)
field_tiles_dir = dirs$field_tiles,
field_tiles_ci_dir = dirs$field_tiles_ci,
daily_vals_per_field_dir = dirs$extracted_ci$daily_per_field,
# Field boundaries path for all scripts
field_boundaries_path = here(laravel_storage_dir, "Data", "pivot.geojson"),
weekly_CI_mosaic = if (use_tile_mosaic) dirs$weekly_tile_max else dirs$weekly_mosaic, # SMART: Route based on tile detection
daily_vrt = dirs$vrt, # Point to Data/vrt folder where R creates VRT files from CI extraction
weekly_tile_max = dirs$weekly_tile_max, # Per-tile weekly MAX mosaics (Script 04 output)