Allow overriding the map centre and OSM zoom level from the command line, keeping the existing values as defaults. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
310 lines
10 KiB
Rust
310 lines
10 KiB
Rust
mod download;
|
||
mod grib;
|
||
mod projection;
|
||
mod render;
|
||
mod tiles;
|
||
|
||
use anyhow::{bail, Context, Result};
|
||
use chrono::{DateTime, Datelike, Timelike, Utc};
|
||
use clap::Parser;
|
||
use rayon::prelude::*;
|
||
use std::path::PathBuf;
|
||
|
||
const BASE_URL: &str = "https://opendata.dwd.de/weather/nwp/icon-d2/grib/";
|
||
const GRIB_NAME_BASE: &str = "icon-d2_germany_icosahedral_";
|
||
const CACHE_DIR: &str = "cache";
|
||
|
||
#[derive(Parser)]
|
||
#[command(
|
||
name = "cloud_cover",
|
||
about = "Download and visualise DWD ICON-D2 cloud cover predictions as PNG maps",
|
||
long_about = "Downloads total cloud cover (CLCT) predictions from the DWD open-data server \
|
||
and renders one PNG map per forecast hour.\n\n\
|
||
The timestamp must correspond to an ICON-D2 model run (every 3 hours, \
|
||
starting at 00 UTC). Example: \"2022-03-04T09:00:00Z\""
|
||
)]
|
||
struct Args {
|
||
/// Model run timestamp in ISO 8601 format, e.g. "2022-03-04T09:00:00Z"
|
||
iso_timestamp: String,
|
||
|
||
/// Number of forecast hours to download and render (0–48)
|
||
#[arg(value_name = "HOURS")]
|
||
prediction_hours: u32,
|
||
|
||
/// Disable OpenStreetMap base layer (use flat green background)
|
||
#[arg(long)]
|
||
no_basemap: bool,
|
||
|
||
/// Map centre latitude in degrees (default: 52.56)
|
||
#[arg(long, value_name = "LAT", default_value_t = render::DEFAULT_CENTER_LAT)]
|
||
center_lat: f64,
|
||
|
||
/// Map centre longitude in degrees (default: 13.08)
|
||
#[arg(long, value_name = "LON", default_value_t = render::DEFAULT_CENTER_LON)]
|
||
center_lon: f64,
|
||
|
||
/// OSM tile zoom level (default: 10)
|
||
#[arg(long, value_name = "ZOOM", default_value_t = render::DEFAULT_OSM_ZOOM)]
|
||
zoom: u8,
|
||
}
|
||
|
||
fn main() -> Result<()> {
|
||
let args = Args::parse();
|
||
|
||
// Parse timestamp
|
||
let dt: DateTime<Utc> = args
|
||
.iso_timestamp
|
||
.parse::<DateTime<Utc>>()
|
||
.with_context(|| format!("Could not parse timestamp {:?}", args.iso_timestamp))?;
|
||
|
||
// Validate hour: ICON-D2 runs at 00, 03, 06, 09, 12, 15, 18, 21 UTC
|
||
if dt.hour() % 3 != 0 {
|
||
bail!(
|
||
"ICON-D2 only runs every 3 hours starting at 00 UTC. \
|
||
Got hour={}, which is not a multiple of 3.",
|
||
dt.hour()
|
||
);
|
||
}
|
||
|
||
if args.prediction_hours > 48 {
|
||
bail!(
|
||
"ICON-D2 predicts at most 48 hours into the future (requested {}).",
|
||
args.prediction_hours
|
||
);
|
||
}
|
||
|
||
// Build cache directory, e.g. cache/2022-03-04_09UTC/
|
||
let cache_subdir = format!(
|
||
"{:04}-{:02}-{:02}_{:02}UTC",
|
||
dt.year(),
|
||
dt.month(),
|
||
dt.day(),
|
||
dt.hour()
|
||
);
|
||
let cache_dir = PathBuf::from(CACHE_DIR).join(&cache_subdir);
|
||
std::fs::create_dir_all(&cache_dir)
|
||
.with_context(|| format!("Cannot create cache directory {}", cache_dir.display()))?;
|
||
|
||
// URL prefix for this model run: BASE_URL + "{HH}/"
|
||
let run_url = format!("{}{:02}/", BASE_URL, dt.hour());
|
||
let date_str = format!(
|
||
"{:04}{:02}{:02}{:02}",
|
||
dt.year(),
|
||
dt.month(),
|
||
dt.day(),
|
||
dt.hour()
|
||
);
|
||
|
||
// Build a dedicated thread pool for I/O-bound downloads, separate from
|
||
// the global rayon pool used for CPU-bound rendering.
|
||
let num_download_threads = (args.prediction_hours as usize + 2).min(16);
|
||
let download_pool = rayon::ThreadPoolBuilder::new()
|
||
.num_threads(num_download_threads)
|
||
.build()
|
||
.context("Failed to build download thread pool")?;
|
||
|
||
// Download coordinate grids (clat / clon) concurrently.
|
||
eprintln!("Downloading coordinate grids (clat / clon)...");
|
||
let (lats, lons) = download_pool.join(
|
||
|| download_and_decode(&run_url, &date_str, "clat", "time-invariant", None, &cache_dir).context("clat"),
|
||
|| download_and_decode(&run_url, &date_str, "clon", "time-invariant", None, &cache_dir).context("clon"),
|
||
);
|
||
let lats = lats?;
|
||
let lons = lons?;
|
||
|
||
if lats.len() != lons.len() {
|
||
bail!(
|
||
"clat and clon grids have different lengths ({} vs {})",
|
||
lats.len(),
|
||
lons.len()
|
||
);
|
||
}
|
||
eprintln!(" Grid has {} points.", lats.len());
|
||
|
||
// Download all forecast steps in parallel, then render sequentially.
|
||
eprintln!("Downloading {} forecast frame(s) in parallel...", args.prediction_hours);
|
||
let mut cloud_data: Vec<(u32, Vec<f32>)> = download_pool.install(|| {
|
||
(0..args.prediction_hours)
|
||
.into_par_iter()
|
||
.map(|step| {
|
||
let cloud = download_and_decode(
|
||
&run_url,
|
||
&date_str,
|
||
"clct",
|
||
"single-level",
|
||
Some(step),
|
||
&cache_dir,
|
||
)
|
||
.with_context(|| format!("clct step {}", step))?;
|
||
Ok::<_, anyhow::Error>((step, cloud))
|
||
})
|
||
.collect::<Result<Vec<_>>>()
|
||
})?;
|
||
cloud_data.sort_unstable_by_key(|(step, _)| *step);
|
||
|
||
// Fetch and rasterize OSM basemap (once, reused for all frames).
|
||
let basemap = if args.no_basemap {
|
||
None
|
||
} else {
|
||
let proj = render::make_projection(args.center_lat, args.center_lon, args.zoom);
|
||
let tile_cache = tiles::fetch_tiles(
|
||
&proj,
|
||
render::MAP_W,
|
||
render::MAP_H,
|
||
args.zoom,
|
||
&PathBuf::from(CACHE_DIR),
|
||
)?;
|
||
Some(tiles::rasterize_basemap(
|
||
&proj,
|
||
render::MAP_W,
|
||
render::MAP_H,
|
||
&tile_cache,
|
||
))
|
||
};
|
||
|
||
let labels = if args.no_basemap {
|
||
None
|
||
} else {
|
||
let proj = render::make_projection(args.center_lat, args.center_lon, args.zoom);
|
||
let label_cache = tiles::fetch_label_tiles(
|
||
&proj,
|
||
render::MAP_W,
|
||
render::MAP_H,
|
||
args.zoom,
|
||
&PathBuf::from(CACHE_DIR),
|
||
)?;
|
||
Some(tiles::rasterize_labels(
|
||
&proj,
|
||
render::MAP_W,
|
||
render::MAP_H,
|
||
&label_cache,
|
||
))
|
||
};
|
||
|
||
eprintln!("Rendering {} frame(s)...", args.prediction_hours);
|
||
let mut output_paths = Vec::new();
|
||
|
||
for (step, cloud) in cloud_data {
|
||
if cloud.len() != lats.len() {
|
||
bail!(
|
||
"clct step {} has {} values but coordinate grids have {}",
|
||
step,
|
||
cloud.len(),
|
||
lats.len()
|
||
);
|
||
}
|
||
|
||
let forecast_dt = dt + chrono::Duration::hours(step as i64);
|
||
let forecast_dt = forecast_dt.with_timezone(&chrono_tz::Europe::Berlin);
|
||
let title = if step == 0 {
|
||
format!(
|
||
"Conditions at {} CET",
|
||
forecast_dt.format("%Y-%m-%d %H:%M")
|
||
)
|
||
} else {
|
||
format!(
|
||
"Prediction at {} CET (+{:02}h)",
|
||
forecast_dt.format("%Y-%m-%d %H:%M"),
|
||
step
|
||
)
|
||
};
|
||
|
||
let out_name = format!("clct_{:06}.png", step + 1);
|
||
let out_path = cache_dir.join(&out_name);
|
||
|
||
render::render_frame(&out_path, &lats, &lons, &cloud, &title, basemap.as_deref(), labels.as_deref(), args.center_lat, args.center_lon, args.zoom)
|
||
.with_context(|| format!("Rendering frame {}", step))?;
|
||
|
||
eprintln!(" [{:02}/{:02}] {}", step + 1, args.prediction_hours, out_name);
|
||
output_paths.push(out_path);
|
||
}
|
||
|
||
eprintln!("\nDone. {} PNG(s) written to {}:", output_paths.len(), cache_dir.display());
|
||
for p in &output_paths {
|
||
eprintln!(" {}", p.display());
|
||
}
|
||
|
||
if output_paths.len() > 1 {
|
||
eprintln!("Building animation...");
|
||
let gif_path = cache_dir.join("clct_animation.gif");
|
||
create_gif(&output_paths, &gif_path).context("Creating GIF")?;
|
||
eprintln!(" GIF: {}", gif_path.display());
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
fn create_gif(png_paths: &[PathBuf], out_path: &PathBuf) -> Result<()> {
|
||
use gif::{Encoder, Frame, Repeat};
|
||
|
||
// Load PNGs and quantize to 256-colour palettes in parallel (the bottleneck).
|
||
let n = png_paths.len();
|
||
let frames: Vec<Frame<'static>> = png_paths
|
||
.par_iter()
|
||
.enumerate()
|
||
.map(|(i, path)| -> Result<Frame<'static>> {
|
||
let img = image::open(path)
|
||
.with_context(|| format!("Loading PNG {}", path.display()))?
|
||
.into_rgba8();
|
||
let (width, height) = img.dimensions();
|
||
let mut pixels = img.into_raw();
|
||
// speed 8: slightly better quality than the default (10)
|
||
let mut frame =
|
||
Frame::from_rgba_speed(width as u16, height as u16, &mut pixels, 8);
|
||
// Pause longer on first and last frame; 1 unit = 10 ms
|
||
frame.delay = if i == 0 || i == n - 1 { 200 } else { 40 };
|
||
Ok(frame)
|
||
})
|
||
.collect::<Result<Vec<_>>>()?;
|
||
|
||
// Write frames sequentially (GIF encoder is not thread-safe).
|
||
let file = std::fs::File::create(out_path)?;
|
||
let (w, h) = (frames[0].width, frames[0].height);
|
||
let mut encoder = Encoder::new(file, w, h, &[])?;
|
||
encoder.set_repeat(Repeat::Infinite)?;
|
||
for frame in &frames {
|
||
encoder.write_frame(frame)?;
|
||
}
|
||
Ok(())
|
||
}
|
||
|
||
/// Build the DWD URL, download the bzip2-compressed GRIB2 file, and decode it.
|
||
///
|
||
/// `param` – variable name ("clat", "clon", "clct")
|
||
/// `kind` – "time-invariant" or "single-level"
|
||
/// `step` – forecast step in hours (None for time-invariant files)
|
||
fn download_and_decode(
|
||
run_url: &str,
|
||
date_str: &str,
|
||
param: &str,
|
||
kind: &str,
|
||
step: Option<u32>,
|
||
cache_dir: &PathBuf,
|
||
) -> Result<Vec<f32>> {
|
||
let step_str = step.map(|s| format!("{:03}", s)).unwrap_or_else(|| "000".to_string());
|
||
// DWD filename pattern:
|
||
// icon-d2_germany_icosahedral_time-invariant_YYYYMMDDHH_000_0_clat.grib2.bz2
|
||
// icon-d2_germany_icosahedral_single-level_YYYYMMDDHH_NNN_2d_clct.grib2.bz2
|
||
let file_name_bz2 = format!(
|
||
"{}{}_{}_{}_{}_{}.grib2.bz2",
|
||
GRIB_NAME_BASE, kind, date_str, step_str,
|
||
if kind == "time-invariant" { "0" } else { "2d" },
|
||
param
|
||
);
|
||
|
||
let url = format!("{}{}/{}", run_url, param, file_name_bz2);
|
||
let cache_path = cache_dir.join(format!("{}.grib2", {
|
||
// cache as e.g. clat.grib2 or clct_003.grib2
|
||
if let Some(s) = step {
|
||
format!("{}_{:03}", param, s)
|
||
} else {
|
||
param.to_string()
|
||
}
|
||
}));
|
||
|
||
let grib_bytes = download::fetch_grib(&url, &cache_path)?;
|
||
let values = grib::decode(&grib_bytes)
|
||
.with_context(|| format!("GRIB2 decode failed for {}", cache_path.display()))?;
|
||
Ok(values)
|
||
}
|