Skip to content

Instantly share code, notes, and snippets.

@GerardoLopez
Created August 1, 2025 15:28
Show Gist options
  • Select an option

  • Save GerardoLopez/76b541ffbbf13a4d829951a6c34858f2 to your computer and use it in GitHub Desktop.

Select an option

Save GerardoLopez/76b541ffbbf13a4d829951a6c34858f2 to your computer and use it in GitHub Desktop.
Spatial uncertainty calculation when upscaling data.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "5bb557a9",
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"# Example: A 2D grid of 500m MODIS pixels\n",
"lai_500m = np.array([\n",
" [2.5, 3.0],\n",
" [2.8, 2.6]\n",
"])\n",
"\n",
"# Standard deviation of LAI for each 500m pixel\n",
"uncertainty_500m = np.array([\n",
" [0.3, 0.4],\n",
" [0.25, 0.35]\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "1f6b2fcd",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Weighted mean LAI: 2.71051\n",
"Weighted variance for 500m pixels: 0.02996\n"
]
}
],
"source": [
"# Compute weights\n",
"weights_500m = 1.0 / (uncertainty_500m ** 2)\n",
"\n",
"weighted_mean_500m = np.sum(lai_500m * weights_500m) / np.sum(weights_500m)\n",
"\n",
"weighted_spread_500m = np.sum(weights_500m * (lai_500m - weighted_mean_500m) ** 2)\n",
"weights_modulator_500m = np.sum(weights_500m)\n",
"\n",
"weighted_variance_500m = weighted_spread_500m / weights_modulator_500m\n",
"\n",
"print(f\"Weighted mean LAI: {weighted_mean_500m:.5f}\")\n",
"print(f\"Weighted variance for 500m pixels: {weighted_variance_500m:.5f}\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "20e33fdf",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Weighted mean LAI: 2.71051\n",
"Weighted var LAI: 0.02996\n"
]
}
],
"source": [
"# Simulate resampling: each 500m pixel becomes 625 20m pixels\n",
"resample_factor = 25 # 500 / 20 = 25 in each dimension → 625 total pixels per cell\n",
"lai_20m = np.repeat(np.repeat(lai_500m, resample_factor, axis=0), resample_factor, axis=1)\n",
"weights_20m = np.repeat(np.repeat(weights_500m, resample_factor, axis=0), resample_factor, axis=1)\n",
"\n",
"# Normalize weights so each block of 625 20m pixels retains original total weight\n",
"# Compute the shape of the original and resampled images\n",
"n_blocks = lai_500m.shape\n",
"block_area = resample_factor ** 2\n",
"\n",
"# Scale down weights per block\n",
"weights_20m_normalized = weights_20m / block_area\n",
"\n",
"# Now compute weighted mean and variance across the whole area\n",
"lai_mean_20m = np.sum(lai_20m * weights_20m_normalized) / np.sum(weights_20m_normalized)\n",
"\n",
"weighted_spread_20m = np.sum(weights_20m_normalized * (lai_20m - lai_mean_20m) ** 2)\n",
"weights_modulator_20m = np.sum(weights_20m_normalized)\n",
"\n",
"lai_var = weighted_spread_20m / weights_modulator_20m\n",
"\n",
"print(f\"Weighted mean LAI: {lai_mean_20m:.5f}\")\n",
"print(f\"Weighted var LAI: {lai_var:.5f}\")\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "worldpeatland",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment