This commit is contained in:
Martin Folkerts 2024-04-18 13:43:14 +02:00
parent 4051b802e2
commit 5886f59bd8
6 changed files with 761 additions and 758 deletions

View file

@ -30,7 +30,7 @@ public function weeksAgo()
public static function weeksAgoForYearAndWeek($year, $week) public static function weeksAgoForYearAndWeek($year, $week)
{ {
return now()->diffInWeeks(now()->setISODate($year, $week)); return (now()->week - now()->setISODate($year, $week)->week);
} }
public function getFileName() public function getFileName()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,506 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "0c18e312-8421-47d7-84f9-ed7d5e47e7ee",
"metadata": {
"tags": []
},
"source": [
"#### Load packages and connect to SentinelHub"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "b7ca7102-5fd9-481f-90cd-3ba60e288649",
"metadata": {},
"outputs": [],
"source": [
"# $ pip install sentinelhub\n",
"# pip install gdal\n",
"\n",
"import os\n",
"import json\n",
"import datetime\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from pathlib import Path\n",
"from osgeo import gdal\n",
"\n",
"from sentinelhub import MimeType, CRS, BBox, SentinelHubRequest, SentinelHubDownloadClient, \\\n",
" DataCollection, bbox_to_dimensions, DownloadRequest, SHConfig, BBoxSplitter, read_data\n",
"\n",
"config = SHConfig()\n",
"\n",
"import time\n",
"import shutil"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "330c967c-2742-4a7a-9a61-28bfdaf8eeca",
"metadata": {},
"outputs": [],
"source": [
"#pip install pipreqs"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "49f8496a-a267-4b74-9500-a168e031ed68",
"metadata": {},
"outputs": [],
"source": [
"#import pipreqs\n",
"#pipreqs Resilience BV/4002 CMD App - General/4002 CMD Team/4002 TechnicalData/04 WP2 technical/python/Chemba_download.ipynb"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "5491a840-779c-4f0c-8164-c3de738b3298",
"metadata": {},
"outputs": [],
"source": [
"config.sh_client_id = '1a72d811-4f0e-4447-8282-df09608cff44'\n",
"config.sh_client_secret = 'FcBlRL29i9ZmTzhmKTv1etSMFs5PxSos'"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "eb1fb662-0e25-4ca9-8317-c6953290842b",
"metadata": {},
"outputs": [],
"source": [
"collection_id = '4e56d0cb-c402-40ff-97bb-c2b9e6bfcf2a'\n",
"byoc = DataCollection.define_byoc(\n",
" collection_id,\n",
" name='planet_data_8b',\n",
" is_timeless=True)"
]
},
{
"cell_type": "markdown",
"id": "6adb603d-8182-48c6-a051-869e16ee7bba",
"metadata": {
"tags": []
},
"source": [
"#### Set some variables\n",
"The only place anything might need to be changed."
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "060396e0-e5ee-4b54-b211-5d8bfcba167f",
"metadata": {},
"outputs": [],
"source": [
"#project = 'chemba' #or xinavane or chemba_test_8b\n",
"#project = 'xinavane' #or xinavane or chemba_test_8b\n",
"project = 'chemba_test_8b' #or xinavane or chemba_test_8b\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "c9f79e81-dff8-4109-8d26-6c423142dcf2",
"metadata": {},
"outputs": [],
"source": [
"# Adjust the number of days needed\n",
"days = 7 #change back to 28 which is the default. 3 years is 1095 days."
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "e18bdf8f-be4b-44ab-baaa-de5de60d92cb",
"metadata": {},
"outputs": [],
"source": [
"#delete all the satellite outputs -> then True\n",
"empty_folder_question = False"
]
},
{
"cell_type": "markdown",
"id": "81bbb513-0bd2-4277-83e8-6f94051ce70b",
"metadata": {
"tags": []
},
"source": [
"#### Define functions\n",
"After this block, no manual changes to parameters are required. \n"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "3f7c8e04-4569-457b-b39d-283582c4ba36",
"metadata": {},
"outputs": [],
"source": [
"BASE_PATH = Path('../laravel_app/storage/app') / os.getenv('PROJECT_DIR', project) \n",
"BASE_PATH_SINGLE_IMAGES = Path(BASE_PATH / 'single_images')\n",
"folder_for_merged_tifs = str(BASE_PATH / 'merged_tif')\n",
"folder_for_virtual_raster = str(BASE_PATH / 'merged_virtual')\n",
" \n",
"# Check if the folders exist, and if not, create them\n",
"if not os.path.exists(BASE_PATH_SINGLE_IMAGES):\n",
" os.makedirs(BASE_PATH_SINGLE_IMAGES)\n",
" \n",
"if not os.path.exists(folder_for_merged_tifs):\n",
" os.makedirs(folder_for_merged_tifs)\n",
"\n",
"if not os.path.exists(folder_for_virtual_raster):\n",
" os.makedirs(folder_for_virtual_raster)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "244b5752-4f02-4347-9278-f6a0a46b88f4",
"metadata": {},
"outputs": [],
"source": [
"evalscript_true_color = \"\"\"\n",
" //VERSION=3\n",
"\n",
" function setup() {\n",
" return {\n",
" input: [{\n",
" bands: \n",
" [\"CoastalBlue\", \"Blue\", \"Green\", \"GreenI\", \"Yellow\", \"Red\", \n",
" \"RedEdge\", \"NIR\", \"UDM2_Clear\"]\n",
" }],\n",
" output: {\n",
" bands: 9 \n",
" //sampleType: \"FLOAT32\"\n",
" }\n",
" };\n",
" }\n",
"\n",
" function evaluatePixel(sample) {\n",
" var scaledBlue = [2.5 * sample.Blue / 10000];\n",
" var scaledGreen = [2.5 * sample.Green / 10000];\n",
" var scaledRed = [2.5 * sample.Red / 10000];\n",
" var scaledCoastalBlue = [2.5 * sample.CoastalBlue / 10000];\n",
" var scaledGreenI = [2.5 * sample.GreenI / 10000];\n",
" var scaledYellow = [2.5 * sample.Yellow / 10000];\n",
" var scaledRedEdge = [2.5 * sample.RedEdge / 10000];\n",
" var scaledNIR = [2.5 * sample.NIR / 10000];\n",
" var UDM2_Clear = UDM2_Clear\n",
" \n",
" // Output the scaled bands\n",
" \n",
" // if (sample.UDM2_Clear != 0) { \n",
" return [\n",
" scaledCoastalBlue,\n",
" scaledBlue,\n",
" scaledGreen,\n",
" scaledGreenI,\n",
" scaledYellow,\n",
" scaledRed, \n",
" scaledRedEdge,\n",
" scaledNIR,\n",
" UDM2_Clear\n",
" ]\n",
" // } else {\n",
" // return [NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN]}\n",
" \n",
" } \n",
"\n",
"\"\"\"\n",
"\n",
"def get_true_color_request_day(time_interval, bbox, size):\n",
" return SentinelHubRequest(\n",
" evalscript=evalscript_true_color,\n",
" input_data=[\n",
" SentinelHubRequest.input_data(\n",
" data_collection=DataCollection.planet_data_8b,\n",
" time_interval=(time_interval, time_interval)\n",
" )\n",
" ],\n",
" responses=[\n",
" SentinelHubRequest.output_response('default', MimeType.TIFF)\n",
" ],\n",
" bbox=bbox,\n",
" size=size,\n",
" config=config,\n",
" data_folder=str(BASE_PATH_SINGLE_IMAGES / time_interval),\n",
"\n",
" )\n",
"\n",
"def download_function(slot, bbox, size):\n",
" # create a list of requests\n",
" list_of_requests = [get_true_color_request_day(slot, bbox, size)]\n",
" list_of_requests = [request.download_list[0] for request in list_of_requests]\n",
"\n",
" # download data chemba west with multiple threads\n",
" data = SentinelHubDownloadClient(config=config).download(list_of_requests, max_threads=15)\n",
" print(f' Image downloaded for ' +slot)\n",
" \n",
" time.sleep(.1)\n",
"\n",
"def merge_files(slot):\n",
" \n",
" # List the downloaded Tiffs in the different subfolders with pathlib (native library)\n",
" file_list = [f\"{x}/response.tiff\" for x in Path(BASE_PATH_SINGLE_IMAGES / slot).iterdir()]\n",
" \n",
" #print(file_list)\n",
"\n",
" folder_for_merged_tifs = str(BASE_PATH / 'merged_tif' / f\"{slot}.tif\")\n",
" folder_for_virtual_raster = str(BASE_PATH / 'merged_virtual' / f\"merged{slot}.vrt\")\n",
"\n",
" # Create a virtual raster\n",
" vrt_all = gdal.BuildVRT(folder_for_virtual_raster, file_list)\n",
" vrt_all = gdal.BuildVRT(folder_for_virtual_raster, file_list)\n",
"\n",
" # Convert to JPEG\n",
" gdal.Translate(folder_for_merged_tifs,folder_for_virtual_raster)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "848dc773-70d6-4ae6-b05c-d6ebfb41624d",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Monthly time windows:\n",
"\n",
"2024-03-19\n",
"2024-03-20\n",
"2024-03-21\n",
"2024-03-22\n",
"2024-03-23\n",
"2024-03-24\n",
"2024-03-25\n"
]
}
],
"source": [
"days_needed = int(os.environ.get(\"DAYS\", days))\n",
"date_str = os.environ.get(\"DATE\")\n",
"if date_str:\n",
" # Parse de datumstring naar een datetime.date object\n",
" end = datetime.datetime.strptime(date_str, \"%Y-%m-%d\").date()\n",
"else:\n",
" # Gebruik de huidige datum als fallback\n",
" end = datetime.date.today() \n",
"\n",
"start = end - datetime.timedelta(days=days_needed - 1)\n",
"\n",
"slots = [(start + datetime.timedelta(days=i)).strftime('%Y-%m-%d') for i in range(days_needed)]\n",
"\n",
"print('Monthly time windows:\\n')\n",
"if len(slots) > 10:\n",
" for slot in slots[:3]:\n",
" print(slot)\n",
" print(\"...\")\n",
" for slot in slots[-3:]:\n",
" print(slot)\n",
"else:\n",
" for slot in slots:\n",
" print(slot)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "93c715f7-4f7e-428e-bbb9-53a2d8f6e2c8",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Monthly time windows:\n",
"\n",
"2024-03-19\n"
]
}
],
"source": [
"end = datetime.datetime(2024, 3, 19)\n",
"start = end \n",
"days_needed =1\n",
"slots = [(start + datetime.timedelta(days=i)).strftime('%Y-%m-%d') for i in range(days_needed)]\n",
"\n",
"print('Monthly time windows:\\n')\n",
"if len(slots) > 10:\n",
" for slot in slots[:3]:\n",
" print(slot)\n",
" print(\"...\")\n",
" for slot in slots[-3:]:\n",
" print(slot)\n",
"else:\n",
" for slot in slots:\n",
" print(slot)"
]
},
{
"cell_type": "markdown",
"id": "f8ea846f-783b-4460-a951-7b522273555f",
"metadata": {},
"source": [
"#### Download images\n"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "1fb5dc6c-de83-4fb2-b3f7-ee9e7060a80d",
"metadata": {},
"outputs": [],
"source": [
"if project == 'chemba':\n",
" chosen_area = [[34.946, -17.3516, 34.938, -17.2917], [34.883, -17.3516, 34.938, -17.2917]]\n",
"\n",
"if project == 'chemba_test_8b':\n",
" chosen_area = [[34.946, -17.3516, 34.938, -17.2917], [34.883, -17.3516, 34.938, -17.2917]]\n",
"\n",
"if project == 'xinavane':\n",
" chosen_area = [[32.6790, -25.0333, 32.7453, -25.0235], [32.6213, -25.0647, 32.6284, -25.0570]]"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "6c02d7de-cddf-4fc3-8d23-8431415d07b8",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" Image downloaded for 2024-03-19\n",
" Image downloaded for 2024-03-19\n"
]
}
],
"source": [
"# Load areas outside the loop if they remain constant\n",
"bbox_area = json.dumps(chosen_area)\n",
"areas = json.loads(os.getenv('BBOX', bbox_area))\n",
"resolution = 3\n",
"\n",
"for slot in slots:\n",
" for area in areas:\n",
" bbox = BBox(bbox=area, crs=CRS.WGS84)\n",
" size = bbox_to_dimensions(bbox, resolution=resolution)\n",
" download_function(slot, bbox, size)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "68db3c15-6f94-432e-b315-c329e4251b21",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Merging complete\n"
]
}
],
"source": [
"for slot in slots:\n",
" merge_files(slot)\n",
"\n",
"print('Merging complete')"
]
},
{
"cell_type": "markdown",
"id": "4274d8e7-1ea3-46db-9528-069ede0b2132",
"metadata": {
"tags": []
},
"source": [
"#### Delete intermediate files\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cb3fa856-a550-4899-844a-e69209bba3ad",
"metadata": {},
"outputs": [],
"source": [
"# List of folder names\n",
"\n",
"folders_to_empty = [BASE_PATH / 'merged_virtual', BASE_PATH_SINGLE_IMAGES]\n",
" \n",
"# Function to empty folders\n",
"\n",
"# Function to empty folders\n",
"def empty_folders(folders, run=True):\n",
" if not run:\n",
" print(\"Skipping empty_folders function.\")\n",
" return\n",
" \n",
" for folder in folders:\n",
" try:\n",
" for filename in os.listdir(folder):\n",
" file_path = os.path.join(folder, filename)\n",
" try:\n",
" if os.path.isfile(file_path):\n",
" os.unlink(file_path)\n",
" elif os.path.isdir(file_path):\n",
" shutil.rmtree(file_path)\n",
" except Exception as e:\n",
" print(f\"Error: {e}\")\n",
" print(f\"Emptied folder: {folder}\")\n",
" except OSError as e:\n",
" print(f\"Error: {e}\")\n",
"\n",
"# Call the function to empty folders only if the 'run' parameter is set to True\n",
"empty_folders(folders_to_empty, run=empty_folder_question)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0145b399-dfad-448a-9f0d-fa975fb01ad2",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View file

@ -508,7 +508,7 @@ dir.create(merged_final)
dir.create(harvest_dir) dir.create(harvest_dir)
weeks_ago = 0 #weeks_ago = 0
# Creating weekly mosaic # Creating weekly mosaic
dates <- date_list(weeks_ago) dates <- date_list(weeks_ago)
print(dates) print(dates)
@ -743,13 +743,14 @@ if (new_project_question == TRUE) {
pivot_stats <- extracted_values %>% pivot_stats <- extracted_values %>%
map(readRDS) %>% list_rbind() %>% map(readRDS) %>% list_rbind() %>%
group_by(pivot_quadrant) %>% group_by(subField) %>%
summarise(across(everything(), ~ first(na.omit(.)))) summarise(across(everything(), ~ first(na.omit(.))))
combined_CI_data <- readRDS(here(cumulative_CI_vals_dir,"combined_CI_data.rds")) #%>% drop_na(pivot_quadrant) combined_CI_data <- readRDS(here(cumulative_CI_vals_dir,"combined_CI_data.rds")) #%>% drop_na(pivot_quadrant)
pivot_stats2 <- bind_rows(pivot_stats, combined_CI_data) pivot_stats2 <- bind_rows(pivot_stats, combined_CI_data)
# pivot_stats2 <- combined_CI_data # pivot_stats2 <- combined_CI_data
print("All CI values extracted from latest 7 images.") print("All CI values extracted from latest 7 images.")
saveRDS(combined_CI_data, here(cumulative_CI_vals_dir,"combined_CI_data.rds")) #used to save the rest of the data into one file
} }

View file

@ -137,11 +137,11 @@ AllPivots0 <- field_boundaries_sf
# pivots_dates$pivot <- factor(pivots_dates$pivot, levels = c("1.1", "1.2", "1.3", "1.4", "1.6", "1.7", "1.8", "1.9", "1.10", "1.11", "1.12", "1.13", "1.14" , "1.16" , "1.17" , "1.18" ,"2.1", "2.2", "2.3" , "2.4", "2.5", "3.1", "3.2", "3.3", "4.1", "4.2", "4.3", "4.4", "4.5", "4.6", "5.1" ,"5.2", "5.3", "5.4", "6.1", "6.2", "DL1.1", "DL1.3")) # pivots_dates$pivot <- factor(pivots_dates$pivot, levels = c("1.1", "1.2", "1.3", "1.4", "1.6", "1.7", "1.8", "1.9", "1.10", "1.11", "1.12", "1.13", "1.14" , "1.16" , "1.17" , "1.18" ,"2.1", "2.2", "2.3" , "2.4", "2.5", "3.1", "3.2", "3.3", "4.1", "4.2", "4.3", "4.4", "4.5", "4.6", "5.1" ,"5.2", "5.3", "5.4", "6.1", "6.2", "DL1.1", "DL1.3"))
AllPivots <- merge(AllPivots0, harvesting_data, by = c("Field", "subField")) #%>% AllPivots <- merge(AllPivots0, harvesting_data, by = c("Field", "subField")) #%>%
# rename(pivot = pivot.x) #%>% select(-pivot.y) #rename(Field = pivot, subField = pivot_quadrant) #%>% select(-pivot.y)
head(AllPivots) head(AllPivots)
AllPivots_merged <- AllPivots %>% AllPivots_merged <- AllPivots %>%
group_by(Field) %>% summarise(sub_area = first(sub_area)) group_by(Field) #%>% summarise(sub_area = first(sub_area))
AllPivots_merged <- st_transform(AllPivots_merged, crs = proj4string(CI)) AllPivots_merged <- st_transform(AllPivots_merged, crs = proj4string(CI))
@ -403,7 +403,7 @@ for (subgroup in unique(pivots_grouped$sub_area)) {
cat("\n # Subgroup: ", subgroup, "\n") # Add a title for the subgroup cat("\n # Subgroup: ", subgroup, "\n") # Add a title for the subgroup
subset_data <- filter(pivots_grouped, sub_area == subgroup) subset_data <- filter(pivots_grouped, sub_area == subgroup)
cat("\n") cat("\n")
walk(subset_data$Field, ~ { walk(AllPivots_merged$Field, ~ {
cat("\n") # Add an empty line for better spacing cat("\n") # Add an empty line for better spacing
ci_plot(.x) ci_plot(.x)
cat("\n") cat("\n")

Binary file not shown.