chore: cleanups
Some checks are pending
Verify Latest Dependencies / Verify Latest Dependencies (push) Waiting to run
build and test / wxbox - latest (push) Waiting to run

This commit is contained in:
core 2025-05-19 20:11:00 -04:00
parent e4dcb48878
commit 5c597977ee
114 changed files with 12286 additions and 26 deletions

99
Cargo.lock generated
View file

@ -324,6 +324,12 @@ version = "1.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "byteorder-lite"
version = "0.1.0"
@ -624,6 +630,29 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "env_filter"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
dependencies = [
"log",
"regex",
]
[[package]]
name = "env_logger"
version = "0.11.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f"
dependencies = [
"anstream",
"anstyle",
"env_filter",
"jiff",
"log",
]
[[package]]
name = "equivalent"
version = "1.0.2"
@ -1294,6 +1323,30 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jiff"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806"
dependencies = [
"jiff-static",
"log",
"portable-atomic",
"portable-atomic-util",
"serde",
]
[[package]]
name = "jiff-static"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "jobserver"
version = "0.1.32"
@ -1509,13 +1562,12 @@ checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "nexrad-data"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2aef96f687e5774386f0dfe4e95bbf98b531559426e4b3bdddd27ca3d38488"
dependencies = [
"bincode 1.3.3",
"bzip2",
"chrono",
"clap",
"env_logger",
"log",
"nexrad-decode",
"nexrad-model",
@ -1529,25 +1581,28 @@ dependencies = [
[[package]]
name = "nexrad-decode"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dab458c09a15d9a133a7935a8024022db3cd3282549c2ed000f44c4ea392213a"
dependencies = [
"bincode 1.3.3",
"chrono",
"clap",
"env_logger",
"log",
"nexrad-data",
"nexrad-model",
"serde",
"thiserror 1.0.69",
"tokio",
"uom",
]
[[package]]
name = "nexrad-model"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a13fa673733e34220daf6f2ac75051d94d66acdd3fd2127f76593b6a36d1593c"
dependencies = [
"chrono",
"serde",
"thiserror 1.0.69",
"uom",
]
[[package]]
@ -1812,6 +1867,15 @@ version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
[[package]]
name = "portable-atomic-util"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507"
dependencies = [
"portable-atomic",
]
[[package]]
name = "ppv-lite86"
version = "0.2.21"
@ -2197,6 +2261,28 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rmp"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4"
dependencies = [
"byteorder",
"num-traits",
"paste",
]
[[package]]
name = "rmp-serde"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db"
dependencies = [
"byteorder",
"rmp",
"serde",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"
@ -3571,6 +3657,7 @@ dependencies = [
"quick-xml",
"rayon",
"reqwest",
"rmp-serde",
"serde",
"strum",
"strum_macros",

View file

@ -14,3 +14,23 @@ opt-level = 3
[profile.bench]
codegen-units = 1
lto = "fat"
[workspace.dependencies]
log = { version = "0.4" }
clap = { version = "4.5", features = ["derive"] }
chrono = { version = "0.4" }
uom = { version = "0.36" }
serde = { version = "1.0", features = ["derive"] }
thiserror = { version = "1.0" }
bincode = { version = "1.3" }
reqwest = { version = "0.12", default-features = false, features = [
"rustls-tls",
] }
xml = { version = "0.8" }
bzip2 = { version = "0.4" }
bzip2-rs = { version = "0.1" }
rayon = { version = "1.10" }
tokio = { version = "1", features = ["full"] }
env_logger = { version = "0.11" }
piet = { version = "0.6.2", features = ["png"] }
piet-common = { version = "0.6.2", features = ["png"] }

View file

@ -0,0 +1,8 @@
color: 10 164 164 255 100 100 192
color: 20 64 128 255 32 64 128
color: 30 0 255 0 0 128 0
color: 40 255 255 0 255 128 0
color: 50 255 0 0 160 0 0
color: 60 255 0 255 128 0 128
color: 70 255 255 255
color: 80 128 128 128

View file

@ -0,0 +1,9 @@
color4: -30 165 165 165 0 8 230 230 255
color: 10 0 165 255 0 8 197
color: 20 16 255 8 10 126 3
color: 35 251 238 0 210 112 2
color: 50 255 0 0 171 0 1
color: 65 247 1 249 136 63 174
color: 75 255 255 255 184 184 184
color: 85 184 184 184
color: 95 184 184 184

View file

@ -0,0 +1,10 @@
color4: -10 7 59 71 0
color: 0 62 69 71 191 193 197
color: 20 135 229 125
color: 30 48 102 43
color: 35 253 227 0
color: 50 254 26 0 181 0 52
color: 60 163 0 136 254 4 250
color: 70 67 190 254 19 144 242
color: 80 166 176 150 255 231 188
color: 85 255 231 188

View file

@ -0,0 +1,255 @@
SolidColor: -32.0 115 77 172
SolidColor: -31.5 115 78 168
SolidColor: -31.0 115 79 165
SolidColor: -30.5 115 81 162
SolidColor: -30.0 116 82 158
SolidColor: -29.5 116 84 155
SolidColor: -29.0 116 85 152
SolidColor: -28.5 117 86 148
SolidColor: -28.0 117 88 145
SolidColor: -27.5 117 89 142
SolidColor: -27.0 118 91 138
SolidColor: -26.5 118 92 135
SolidColor: -26.0 118 94 132
SolidColor: -25.5 119 95 128
SolidColor: -25.0 119 96 125
SolidColor: -24.5 119 98 122
SolidColor: -24.0 120 99 118
SolidColor: -23.5 120 101 115
SolidColor: -23.0 120 102 112
SolidColor: -22.5 121 103 108
SolidColor: -22.0 121 105 105
SolidColor: -21.5 121 106 102
SolidColor: -21.0 122 108 98
SolidColor: -20.5 122 109 95
SolidColor: -20.0 122 111 92
SolidColor: -19.5 123 112 88
SolidColor: -19.0 123 113 85
SolidColor: -18.5 123 115 82
SolidColor: -18.0 124 116 78
SolidColor: -17.5 124 118 75
SolidColor: -17.0 124 119 72
SolidColor: -16.5 125 121 69
SolidColor: -16.0 127 123 72
SolidColor: -15.5 129 125 75
SolidColor: -15.0 131 127 79
SolidColor: -14.5 133 130 82
SolidColor: -14.0 135 132 85
SolidColor: -13.5 137 134 89
SolidColor: -13.0 139 137 92
SolidColor: -12.5 141 139 96
SolidColor: -12.0 144 141 99
SolidColor: -11.5 146 144 102
SolidColor: -11.0 148 146 106
SolidColor: -10.5 150 148 109
SolidColor: -10.0 152 151 113
SolidColor: -9.5 154 153 116
SolidColor: -9.0 156 155 119
SolidColor: -8.5 158 158 123
SolidColor: -8.0 161 160 126
SolidColor: -7.5 163 162 130
SolidColor: -7.0 165 165 133
SolidColor: -6.5 167 167 136
SolidColor: -6.0 169 169 140
SolidColor: -5.5 171 172 143
SolidColor: -5.0 173 174 147
SolidColor: -4.5 175 176 150
SolidColor: -4.0 178 179 154
SolidColor: -3.5 173 175 153
SolidColor: -3.0 168 171 152
SolidColor: -2.5 163 167 151
SolidColor: -2.0 158 163 150
SolidColor: -1.5 154 159 149
SolidColor: -1.0 149 155 148
SolidColor: -0.5 144 151 147
SolidColor: 0.0 139 147 146
SolidColor: 0.5 135 144 145
SolidColor: 1.0 130 140 144
SolidColor: 1.5 125 136 143
SolidColor: 2.0 120 132 142
SolidColor: 2.5 115 128 142
SolidColor: 3.0 111 124 141
SolidColor: 3.5 106 120 140
SolidColor: 4.0 101 116 139
SolidColor: 4.5 96 112 138
SolidColor: 5.0 92 109 137
SolidColor: 5.5 87 105 136
SolidColor: 6.0 82 101 135
SolidColor: 6.5 77 97 134
SolidColor: 7.0 73 93 133
SolidColor: 7.5 68 89 132
SolidColor: 8.0 63 85 131
SolidColor: 8.5 58 81 130
SolidColor: 9.0 54 78 130
SolidColor: 9.5 55 81 132
SolidColor: 10.0 57 85 134
SolidColor: 10.5 59 89 136
SolidColor: 11.0 61 93 138
SolidColor: 11.5 63 97 141
SolidColor: 12.0 65 101 143
SolidColor: 12.5 67 105 145
SolidColor: 13.0 69 109 147
SolidColor: 13.5 71 113 149
SolidColor: 14.0 73 117 152
SolidColor: 14.5 74 121 154
SolidColor: 15.0 76 125 156
SolidColor: 15.5 78 129 158
SolidColor: 16.0 80 133 160
SolidColor: 16.5 82 137 163
SolidColor: 17.0 84 141 165
SolidColor: 17.5 86 145 167
SolidColor: 18.0 88 149 169
SolidColor: 18.5 90 153 171
SolidColor: 19.0 92 157 174
SolidColor: 19.5 76 165 142
SolidColor: 20.0 60 173 110
SolidColor: 20.5 45 182 78
SolidColor: 21.0 42 175 72
SolidColor: 21.5 39 169 67
SolidColor: 22.0 37 163 62
SolidColor: 22.5 34 156 56
SolidColor: 23.0 31 150 51
SolidColor: 23.5 29 144 46
SolidColor: 24.0 26 137 40
SolidColor: 24.5 24 131 35
SolidColor: 25.0 21 125 30
SolidColor: 25.5 18 118 24
SolidColor: 26.0 16 112 19
SolidColor: 26.5 13 106 14
SolidColor: 27.0 11 100 9
SolidColor: 27.5 35 115 8
SolidColor: 28.0 59 130 7
SolidColor: 28.5 83 145 6
SolidColor: 29.0 107 161 5
SolidColor: 29.5 131 176 4
SolidColor: 30.0 155 191 3
SolidColor: 30.5 179 207 2
SolidColor: 31.0 203 222 1
SolidColor: 31.5 227 237 0
SolidColor: 32.0 252 253 0
SolidColor: 32.5 248 248 0
SolidColor: 33.0 244 243 0
SolidColor: 33.5 241 238 0
SolidColor: 34.0 237 233 0
SolidColor: 34.5 233 228 0
SolidColor: 35.0 230 223 0
SolidColor: 35.5 226 218 0
SolidColor: 36.0 222 213 0
SolidColor: 36.5 219 208 0
SolidColor: 37.0 215 203 0
SolidColor: 37.5 211 198 0
SolidColor: 38.0 208 193 0
SolidColor: 38.5 204 188 0
SolidColor: 39.0 200 183 0
SolidColor: 39.5 197 179 0
SolidColor: 40.0 250 148 0
SolidColor: 40.5 246 144 0
SolidColor: 41.0 242 141 1
SolidColor: 41.5 238 138 1
SolidColor: 42.0 234 135 2
SolidColor: 42.5 231 132 3
SolidColor: 43.0 227 129 3
SolidColor: 43.5 223 126 4
SolidColor: 44.0 219 123 5
SolidColor: 44.5 215 120 5
SolidColor: 45.0 212 116 6
SolidColor: 45.5 208 113 6
SolidColor: 46.0 204 110 7
SolidColor: 46.5 200 107 8
SolidColor: 47.0 196 104 8
SolidColor: 47.5 193 101 9
SolidColor: 48.0 189 98 10
SolidColor: 48.5 185 95 10
SolidColor: 49.0 181 92 11
SolidColor: 49.5 178 89 12
SolidColor: 50.0 249 35 11
SolidColor: 50.5 242 35 12
SolidColor: 51.0 236 35 13
SolidColor: 51.5 230 35 14
SolidColor: 52.0 223 36 15
SolidColor: 52.5 217 36 16
SolidColor: 53.0 211 36 17
SolidColor: 53.5 205 36 18
SolidColor: 54.0 198 37 19
SolidColor: 54.5 192 37 20
SolidColor: 55.0 186 37 22
SolidColor: 55.5 180 37 23
SolidColor: 56.0 173 38 24
SolidColor: 56.5 167 38 25
SolidColor: 57.0 161 38 26
SolidColor: 57.5 155 38 27
SolidColor: 58.0 148 39 28
SolidColor: 58.5 142 39 29
SolidColor: 59.0 136 39 30
SolidColor: 59.5 130 40 32
SolidColor: 60.0 202 153 180
SolidColor: 60.5 201 146 176
SolidColor: 61.0 201 139 173
SolidColor: 61.5 200 133 169
SolidColor: 62.0 200 126 166
SolidColor: 62.5 199 120 162
SolidColor: 63.0 199 113 159
SolidColor: 63.5 199 106 155
SolidColor: 64.0 198 100 152
SolidColor: 64.5 198 93 148
SolidColor: 65.0 197 87 145
SolidColor: 65.5 197 80 141
SolidColor: 66.0 196 74 138
SolidColor: 66.5 196 67 134
SolidColor: 67.0 196 60 131
SolidColor: 67.5 195 54 127
SolidColor: 68.0 195 47 124
SolidColor: 68.5 194 41 120
SolidColor: 69.0 194 34 117
SolidColor: 69.5 194 28 114
SolidColor: 70.0 154 36 224
SolidColor: 70.5 149 34 219
SolidColor: 71.0 144 33 215
SolidColor: 71.5 139 32 210
SolidColor: 72.0 134 31 206
SolidColor: 72.5 129 30 201
SolidColor: 73.0 124 29 197
SolidColor: 73.5 120 28 193
SolidColor: 74.0 115 27 188
SolidColor: 74.5 110 26 184
SolidColor: 75.0 105 24 179
SolidColor: 75.5 100 23 175
SolidColor: 76.0 95 22 170
SolidColor: 76.5 91 21 166
SolidColor: 77.0 86 20 162
SolidColor: 77.5 81 19 157
SolidColor: 78.0 76 18 153
SolidColor: 78.5 71 17 148
SolidColor: 79.0 66 16 144
SolidColor: 79.5 62 15 140
SolidColor: 80.0 132 253 255
SolidColor: 80.5 128 245 249
SolidColor: 81.0 125 238 243
SolidColor: 81.5 121 231 237
SolidColor: 82.0 118 224 231
SolidColor: 82.5 115 217 225
SolidColor: 83.0 111 210 219
SolidColor: 83.5 108 203 213
SolidColor: 84.0 105 196 207
SolidColor: 84.5 101 189 201
SolidColor: 85.0 98 181 196
SolidColor: 85.5 94 174 190
SolidColor: 86.0 91 167 184
SolidColor: 86.5 88 160 178
SolidColor: 87.0 84 153 172
SolidColor: 87.5 81 146 166
SolidColor: 88.0 78 139 160
SolidColor: 88.5 74 132 154
SolidColor: 89.0 71 125 148
SolidColor: 89.5 68 118 143
SolidColor: 90.0 161 101 73
SolidColor: 90.5 155 90 65
SolidColor: 91.0 150 80 56
SolidColor: 91.5 145 70 48
SolidColor: 92.0 140 60 40
SolidColor: 92.5 135 50 32
SolidColor: 93.0 130 40 24
SolidColor: 93.5 125 30 16
SolidColor: 94.0 120 20 8
SolidColor: 94.5 115 10 1

View file

@ -0,0 +1,8 @@
Color4: 10 164 164 255 0 100 100 192 255
Color4: 20 64 128 255 255 32 64 128 255
Color4: 30 0 255 0 255 0 128 0 255
Color4: 40 255 255 0 255 255 128 0 255
Color4: 50 255 0 0 255 160 0 0 255
Color4: 60 255 0 255 255 128 0 128 255
Color4: 70 255 255 255 255 128 128 128 255
Color4: 80 128 128 128 255

View file

@ -4,9 +4,9 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nexrad-decode = "0.1.1"
nexrad-data = "0.2"
serde = { version = "1", features = ["derive"]}
nexrad-decode = { path = "../nexrad-decode" }
nexrad-data = { path = "../nexrad-data" }
serde = { version = "1", features = ["derive"] }
toml = "0.8"
rayon = { version = "1" }
@ -15,4 +15,4 @@ criterion = { version = "0.5" }
[[bench]]
name = "parse_benchmark"
harness = false
harness = false

View file

@ -4,22 +4,25 @@ use nexrad_data::volume::File;
use nexrad_decode::messages::MessageContents;
use nexrad_decode::messages::digital_radar_data::{GenericDataBlock, RadialStatus};
use nexrad_decode::result::Error;
use std::fmt::Debug;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
pub mod sites;
#[derive(Serialize, Deserialize, Clone)]
pub struct Scan {
pub coverage_pattern_number: u16,
pub sweeps: Vec<Sweep>,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct Sweep {
pub elevation_number: u8,
pub radials: Vec<Radial>,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct Radial {
pub collection_timestamp: i64,
pub azimuth_number: u16,
@ -37,7 +40,7 @@ pub struct Radial {
pub specific_differential_phase: Option<MomentData>,
}
#[derive(Debug)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct MomentData {
pub scale: f32,
pub offset: f32,
@ -88,7 +91,8 @@ pub fn parse(input: Vec<u8>) -> nexrad_data::result::Result<Scan> {
let mut vcp: Option<u16> = None;
let mut radials = vec![];
let radial_chunks: Vec<nexrad_data::result::Result<(Vec<Radial>, Option<u16>)>> = file.records()
let radial_chunks: Vec<nexrad_data::result::Result<(Vec<Radial>, Option<u16>)>> = file
.records()
.par_iter_mut()
.map(|record| {
let mut vcp = None;
@ -146,21 +150,15 @@ fn into_radial(
elevation_number_degrees: message.header.elevation_angle,
reflectivity: message.reflectivity_data_block.map(into_moment_data),
velocity: message.velocity_data_block.map(into_moment_data),
spectrum_width: message
.spectrum_width_data_block
.map(into_moment_data),
spectrum_width: message.spectrum_width_data_block.map(into_moment_data),
differential_reflectivity: message
.differential_reflectivity_data_block
.map(into_moment_data),
differential_phase: message
.differential_phase_data_block
.map(into_moment_data),
differential_phase: message.differential_phase_data_block.map(into_moment_data),
correlation_coefficient: message
.correlation_coefficient_data_block
.map(into_moment_data),
specific_differential_phase: message
.specific_diff_phase_data_block
.map(into_moment_data),
specific_differential_phase: message.specific_diff_phase_data_block.map(into_moment_data),
})
}
@ -206,4 +204,4 @@ impl Sweep {
sweeps
}
}
}

1
crates/nexrad-data/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/downloads

View file

@ -0,0 +1,56 @@
[package]
name = "nexrad-data"
version = "0.2.0"
description = "Models and functions for accessing NEXRAD data from public sources."
authors = ["Daniel Way <contact@danieldway.com>"]
repository = "https://github.com/danielway/nexrad/nexrad-data"
license = "MIT"
edition = "2021"
[features]
default = ["aws", "decode", "nexrad-model"]
decode = ["nexrad-decode", "bzip2", "serde", "bincode"]
aws = ["reqwest", "xml", "tokio"]
[dependencies]
log = { workspace = true }
thiserror = { workspace = true }
chrono = { workspace = true }
serde = { workspace = true, optional = true }
bincode = { workspace = true, optional = true }
reqwest = { workspace = true, optional = true }
xml = { workspace = true, optional = true }
bzip2 = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
nexrad-model = { version = "0.1.0", path = "../nexrad-model", optional = true }
nexrad-decode = { version = "0.1.1", path = "../nexrad-decode", optional = true }
[dev-dependencies]
clap = { workspace = true }
env_logger = { workspace = true }
tokio = { workspace = true }
[[example]]
name = "realtime"
path = "examples/realtime.rs"
required-features = ["aws", "decode"]
[[example]]
name = "archive"
path = "examples/archive.rs"
required-features = ["aws", "decode"]
[[example]]
name = "latency_analysis"
path = "examples/latency_analysis.rs"
required-features = ["aws", "decode"]
[[example]]
name = "chunk_timing"
path = "examples/chunk_timing.rs"
required-features = ["aws", "decode"]
[[example]]
name = "chunk_csv"
path = "examples/chunk_csv.rs"
required-features = ["aws", "decode"]

View file

@ -0,0 +1,48 @@
# NEXRAD Data
[![Crates.io](https://img.shields.io/crates/v/nexrad-data)](https://crates.io/crates/nexrad-data)
[![Docs.rs](https://docs.rs/nexrad-data/badge.svg)](https://docs.rs/nexrad-data)
[![Rust CI](https://github.com/danielway/nexrad/actions/workflows/ci.yml/badge.svg)](https://github.com/danielway/nexrad/actions/workflows/ci.yml)
Provides structure definitions and decoding functions for NEXRAD Archive II volume files, along with functions for
downloading both archival and real-time data from open cloud providers like AWS OpenData.
## Volume Definitions
The `nexrad-data::volume` module provides model definitions for the NEXRAD Archive II volume file format described in
the Radar Operations Center's ICD 2620010H for the Archive II/User (as of build 19.0 March 3, 2020). A `volume::File`
can be constructed with archive or real-time data. It can decode the archive volume header and provide access to LDM
`volume::Record`s which can be decompressed and decoded into a series of messages.
## AWS Open Data
NOAA uploads archive and real-time NEXRAD data to AWS Open Data S3 buckets which are publicly available. The
`nexrad-data::aws` module provides functions for listing and downloading NEXRAD data from these buckets.
### Archive Data
Historical volumes are archived by date and radar site in the `noaa-nexrad-level2` bucket. The
`nexrad-data::aws::archive` module provides functions for accessing these volumes. The `archive::list_files` function
queries volumes for a given date and radar site, returning identifiers for each volume. The `archive::download_file`
function downloads a volume file by its identifier.
### Real-Time Data
Real-time volume data is uploaded in chunks to the `unidata-nexrad-level2-chunks` bucket. 999 volume directories are
rotated through with chunks being added to each directory until they comprise a full volume. The
`nexrad-data::aws::realtime` module provides functions for accessing these chunks. The `realtime::list_chunks_in_volume`
function queries a volume for its chunks, returning identifiers for each chunk. The `realtime::download_chunk` function
downloads a chunk by its identifier. The `realtime::get_latest_volume` function can be used to identify which of the 999
volume directories contain the latest data, and the `realtime::estimate_next_chunk_time` function can be used to
estimate when the next chunk will be uploaded. Finally, `realtime::poll_chunks` function will asynchronously poll for
new chunks in the latest volume directory, downloading them as they become available.
## Features
The APIs in this crate should be configured to require only the dependencies they need, leaving the consumer to include
as much or little as they desire. By default, all features are included. The following named features are helpful
groupings of dependencies/behavior:
- `decode` - Enables both decoding of the volume headers and of decoding the LDM records' NEXRAD messages using `nexrad-decode`.
- `aws` - Enables accessing archive and real-time NEXRAD data from AWS Open Data.
- `nexrad-model` - Provides mappings to a common radar data model, particularly for mapping `volume::File` into a `Scan`.

View file

@ -0,0 +1,171 @@
#![cfg(all(feature = "aws", feature = "decode"))]
use chrono::{NaiveDate, NaiveTime};
use clap::Parser;
use env_logger::{Builder, Env};
use log::{debug, info, trace, warn, LevelFilter};
use nexrad_data::aws::archive::{self, download_file, list_files};
use nexrad_data::result::Result;
use nexrad_data::volume::File;
use std::fs::create_dir;
use std::io::Read;
use std::io::Write;
use std::path::Path;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Site identifier (e.g., KDMX)
#[arg(default_value = "KDMX")]
site: String,
/// Date in YYYY-MM-DD format
#[arg(default_value = "2022-03-05")]
date: String,
/// Start time in HH:MM format
#[arg(default_value = "23:30")]
start_time: String,
/// Stop time in HH:MM format
#[arg(default_value = "23:30")]
stop_time: String,
}
#[tokio::main]
async fn main() -> Result<()> {
Builder::from_env(Env::default().default_filter_or("debug"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
let cli = Cli::parse();
let site = &cli.site;
let date = NaiveDate::parse_from_str(&cli.date, "%Y-%m-%d").expect("is valid date");
let start_time =
NaiveTime::parse_from_str(&cli.start_time, "%H:%M").expect("start is valid time");
let stop_time = NaiveTime::parse_from_str(&cli.stop_time, "%H:%M").expect("stop is valid time");
info!("Listing files for {} on {}...", site, date);
let file_ids = list_files(site, &date).await?;
if file_ids.is_empty() {
warn!("No files found for the specified date/site to download.");
return Ok(());
}
debug!("Found {} files.", file_ids.len());
let start_index = get_nearest_file_index(&file_ids, start_time);
debug!(
"Nearest file to start of {:?} is {:?}.",
start_time,
file_ids[start_index].name()
);
let stop_index = get_nearest_file_index(&file_ids, stop_time);
debug!(
"Nearest file to stop of {:?} is {:?}.",
stop_time,
file_ids[stop_index].name()
);
debug!("Downloading {} files...", stop_index - start_index + 1);
for file_id in file_ids
.iter()
.skip(start_index)
.take(stop_index - start_index + 1)
{
if file_id.name().ends_with("_MDM") {
debug!("Skipping MDM file: {}", file_id.name());
continue;
}
let file = if Path::new(&format!("downloads/{}", file_id.name())).exists() {
debug!("File \"{}\" already downloaded.", file_id.name());
let mut file =
std::fs::File::open(format!("downloads/{}", file_id.name())).expect("open file");
let mut buffer = Vec::new();
file.read_to_end(&mut buffer).expect("read file");
File::new(buffer)
} else {
debug!("Downloading file \"{}\"...", file_id.name());
let file = download_file(file_id.clone()).await?;
if !Path::new("downloads").exists() {
trace!("Creating downloads directory...");
create_dir("downloads").expect("create downloads directory");
}
trace!("Writing file to disk as: {}", file_id.name());
let mut downloaded_file =
std::fs::File::create(format!("downloads/{}", file_id.name()))
.expect("create file");
downloaded_file
.write_all(file.data().as_slice())
.expect("write file");
file
};
trace!("Data file size (bytes): {}", file.data().len());
let records = file.records();
debug!(
"Volume with {} records. Header: {:?}",
records.len(),
file.header()
);
debug!("Decoding {} records...", records.len());
let mut messages = Vec::new();
for mut record in records {
if record.compressed() {
trace!("Decompressing LDM record...");
record = record.decompress().expect("Failed to decompress record");
}
messages.extend(record.messages()?.iter().cloned());
}
let summary = nexrad_decode::summarize::messages(messages.as_slice());
info!("Volume summary:\n{}", summary);
}
Ok(())
}
/// Returns the index of the file with the nearest time to the provided start time.
fn get_nearest_file_index(
files: &Vec<archive::Identifier>,
start_time: chrono::NaiveTime,
) -> usize {
let first_file = files.first().expect("find at least one file");
let first_file_time = first_file
.date_time()
.expect("file has valid date time")
.time();
let mut min_diff = first_file_time
.signed_duration_since(start_time)
.num_seconds()
.abs();
let mut min_index = 0;
for (index, file) in files.iter().skip(1).enumerate() {
let file_time = file.date_time().expect("file has valid date time").time();
let diff = file_time
.signed_duration_since(start_time)
.num_seconds()
.abs();
if diff < min_diff {
min_diff = diff;
min_index = index;
}
}
min_index
}

View file

@ -0,0 +1,475 @@
#![cfg(all(feature = "aws", feature = "decode"))]
use chrono::{DateTime, Utc};
use clap::Parser;
use env_logger::{Builder, Env};
use log::{debug, info, warn, LevelFilter};
use nexrad_data::aws::realtime::{
download_chunk, get_latest_volume, list_chunks_in_volume, Chunk, ChunkIdentifier,
ElevationChunkMapper, VolumeIndex,
};
use nexrad_data::result::Result;
use nexrad_decode::messages::{volume_coverage_pattern, MessageContents};
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
/// A tool to generate a CSV dataset of NEXRAD chunks with timing information and metadata.
/// This script analyzes chunk timing, contents, and maps them to VCP elevation cuts.
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Site identifier (e.g., KDMX)
#[arg(default_value = "KDMX")]
site: String,
/// Volume index to analyze. If not specified, will use volume before the latest.
#[arg(long)]
volume: Option<usize>,
/// Maximum number of chunks to analyze (0 for all)
#[arg(long, default_value = "0")]
max_chunks: usize,
/// Output CSV file path
#[arg(long, default_value = "nexrad_chunks.csv")]
output: PathBuf,
}
#[tokio::main]
async fn main() -> Result<()> {
Builder::from_env(Env::default().default_filter_or("info"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
let cli = Cli::parse();
let site = cli.site.clone();
let max_chunks = cli.max_chunks;
let output_path = cli.output.clone();
let volume = if let Some(vol) = cli.volume {
VolumeIndex::new(vol)
} else {
let latest_result = get_latest_volume(&site).await?;
let latest = latest_result.volume.expect("No latest volume found");
info!("Latest volume found: {}", latest.as_number());
let prev_num = if latest.as_number() > 1 {
latest.as_number() - 1
} else {
999
};
let prev = VolumeIndex::new(prev_num);
info!("Using previous volume: {}", prev.as_number());
prev
};
info!(
"Listing chunks for site {} in volume {}",
site,
volume.as_number()
);
let mut chunks = list_chunks_in_volume(&site, volume, 1000).await?;
info!(
"Found {} chunks in volume {}",
chunks.len(),
volume.as_number()
);
if chunks.is_empty() {
info!("No chunks found in this volume");
return Ok(());
}
chunks.sort_by(|a, b| {
if let (Some(time_a), Some(time_b)) = (a.upload_date_time(), b.upload_date_time()) {
time_a.cmp(&time_b)
} else if a.upload_date_time().is_some() {
Ordering::Less
} else if b.upload_date_time().is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
});
let chunks_to_analyze = if max_chunks > 0 && max_chunks < chunks.len() {
info!("Limiting analysis to first {} chunks", max_chunks);
chunks.iter().take(max_chunks).cloned().collect::<Vec<_>>()
} else {
chunks
};
let mut file = File::create(&output_path)?;
writeln!(
file,
"chunk_name,modified_time,time_since_previous_s,message_types,data_types,\
earliest_message_time,latest_message_time,scan_time_s,processing_time_s,\
elevation_numbers,matched_to_vcp,elevation_angle,azimuth_range,\
vcp_number,channel_configuration,waveform_type,super_resolution,azimuth_rate"
)?;
info!(
"Analyzing {} chunks and writing CSV to {}",
chunks_to_analyze.len(),
output_path.display()
);
let mut previous_time: Option<DateTime<Utc>> = None;
let mut vcp: Option<volume_coverage_pattern::Message> = None;
let mut elevation_chunk_mapper: Option<ElevationChunkMapper> = None;
for (i, chunk_id) in chunks_to_analyze.iter().enumerate() {
let chunk_name = chunk_id.name();
let modified_time = chunk_id.upload_date_time();
// Calculate time difference from previous chunk
let time_diff = if let Some(time) = modified_time {
previous_time
.map(|prev| {
let duration = time.signed_duration_since(prev);
duration.num_milliseconds() as f64 / 1000.0
})
.unwrap_or(0.0)
.to_string()
} else {
"Unknown".to_string()
};
info!(
"Processing chunk {}/{}: {}",
i + 1,
chunks_to_analyze.len(),
chunk_name
);
match download_chunk(&site, chunk_id).await {
Ok((_, chunk)) => {
let result =
analyze_chunk(&chunk, chunk_id, &mut vcp, &mut elevation_chunk_mapper)?;
write_csv_row(
&mut file,
chunk_name.to_string(),
modified_time,
time_diff,
result,
)?;
previous_time = modified_time;
}
Err(err) => {
warn!("Failed to download chunk {}: {}", chunk_name, err);
writeln!(
file,
"{},{},{},,,,,,,,,,,,",
chunk_name,
modified_time
.map(|time| time.format("%Y-%m-%dT%H:%M:%S%.3f").to_string())
.unwrap_or_else(|| "Unknown".to_string()),
time_diff
)?;
}
}
}
info!("CSV data written to {}", output_path.display());
Ok(())
}
/// Holds the analysis results for a chunk
struct ChunkAnalysis {
message_types: Vec<String>,
data_types: Vec<String>,
earliest_message_time: Option<DateTime<Utc>>,
latest_message_time: Option<DateTime<Utc>>,
scan_time: Option<f64>,
processing_time: Option<f64>,
elevation_numbers: HashSet<u8>,
elevation_angle: Option<f64>,
matched_to_vcp: bool,
azimuth_range: Option<f64>,
vcp_number: Option<String>,
channel_configuration: Option<String>,
waveform_type: Option<String>,
super_resolution: Option<String>,
azimuth_rate: Option<f64>,
}
/// Analyzes a chunk and returns structured data about its contents
fn analyze_chunk(
chunk: &Chunk,
chunk_id: &ChunkIdentifier,
vcp: &mut Option<volume_coverage_pattern::Message>,
elevation_chunk_mapper: &mut Option<ElevationChunkMapper>,
) -> Result<ChunkAnalysis> {
let mut result = ChunkAnalysis {
message_types: Vec::new(),
data_types: Vec::new(),
earliest_message_time: None,
latest_message_time: None,
scan_time: None,
processing_time: None,
elevation_numbers: HashSet::new(),
elevation_angle: None,
matched_to_vcp: false,
azimuth_range: None,
vcp_number: None,
channel_configuration: None,
waveform_type: None,
super_resolution: None,
azimuth_rate: None,
};
let mut messages = Vec::new();
match chunk {
Chunk::Start(file) => {
for mut record in file.records() {
if record.compressed() {
record = record.decompress()?;
}
messages.extend(record.messages()?);
}
}
Chunk::IntermediateOrEnd(record) => {
let mut record = record.clone();
if record.compressed() {
record = record.decompress()?;
}
messages.extend(record.messages()?);
}
}
let mut message_type_counter = HashMap::new();
let mut data_type_counter = HashMap::new();
let mut radar_times = Vec::new();
for message in &messages {
let msg_type = message.header().message_type();
*message_type_counter.entry(msg_type).or_insert(0) += 1;
match message.contents() {
MessageContents::VolumeCoveragePattern(chunk_vcp) => {
debug!(
"Found VCP message with {} elevation cuts",
chunk_vcp.elevations.len()
);
if vcp.is_none() {
*vcp = Some(*chunk_vcp.clone());
*elevation_chunk_mapper =
Some(ElevationChunkMapper::new(vcp.as_ref().unwrap()));
}
result.vcp_number = Some(format!("VCP{}", chunk_vcp.header.pattern_type));
}
MessageContents::DigitalRadarData(radar) => {
if let Some(time) = radar.header.date_time() {
radar_times.push(time);
}
let mut add_data_type = |data_type: &str| {
*data_type_counter.entry(data_type.to_string()).or_insert(0) += 1;
};
if radar.volume_data_block.is_some() {
add_data_type("Volume");
}
if radar.elevation_data_block.is_some() {
add_data_type("Elevation");
}
if radar.radial_data_block.is_some() {
add_data_type("Radial");
}
if radar.reflectivity_data_block.is_some() {
add_data_type("Reflectivity");
}
if radar.velocity_data_block.is_some() {
add_data_type("Velocity");
}
if radar.spectrum_width_data_block.is_some() {
add_data_type("Spectrum Width");
}
if radar.differential_reflectivity_data_block.is_some() {
add_data_type("Differential Reflectivity");
}
if radar.differential_phase_data_block.is_some() {
add_data_type("Differential Phase");
}
if radar.correlation_coefficient_data_block.is_some() {
add_data_type("Correlation Coefficient");
}
if radar.specific_diff_phase_data_block.is_some() {
add_data_type("Specific Differential Phase");
}
result
.elevation_numbers
.insert(radar.header.elevation_number);
if let Some(volume) = &radar.volume_data_block {
result.vcp_number =
Some(format!("VCP{}", volume.volume_coverage_pattern_number));
}
}
_ => {}
}
}
if !radar_times.is_empty() {
radar_times.sort();
result.earliest_message_time = Some(radar_times[0]);
result.latest_message_time = Some(radar_times[radar_times.len() - 1]);
if let (Some(earliest), Some(latest)) =
(result.earliest_message_time, result.latest_message_time)
{
let duration = latest.signed_duration_since(earliest);
result.scan_time = Some(duration.num_milliseconds() as f64 / 1000.0);
let proc_duration = chunk_id
.upload_date_time()
.unwrap()
.signed_duration_since(latest);
result.processing_time = Some(proc_duration.num_milliseconds() as f64 / 1000.0);
}
let start_azimuth = messages.iter().find_map(|msg| match msg.contents() {
MessageContents::DigitalRadarData(radar) => Some(radar.header.azimuth_angle),
_ => None,
});
let end_azimuth = messages.iter().rev().find_map(|msg| match msg.contents() {
MessageContents::DigitalRadarData(radar) => Some(radar.header.azimuth_angle),
_ => None,
});
if let (Some(start), Some(end)) = (start_azimuth, end_azimuth) {
let range = if end > start {
end - start
} else {
360.0 - (start - end)
};
result.azimuth_range = Some(range as f64);
}
}
result.message_types = message_type_counter
.keys()
.cloned()
.map(|msg_type| format!("{:?}", msg_type))
.collect();
result.data_types = data_type_counter
.keys()
.cloned()
.map(|data_type| format!("{}", data_type))
.collect();
if let (Some(vcp), Some(elevation_chunk_mapper)) = (vcp, elevation_chunk_mapper) {
let elevation = elevation_chunk_mapper
.get_sequence_elevation_number(chunk_id.sequence())
.and_then(|elevation_number| vcp.elevations.get(elevation_number - 1));
if let Some(elevation) = elevation {
result.matched_to_vcp = true;
result.elevation_angle = Some(elevation.elevation_angle_degrees());
result.channel_configuration = Some(format!("{:?}", elevation.channel_configuration()));
result.waveform_type = Some(format!("{:?}", elevation.waveform_type()));
result.super_resolution = Some(format!(
"{:?}",
elevation.super_resolution_control_half_degree_azimuth()
));
result.azimuth_rate = Some(elevation.azimuth_rate_degrees_per_second());
}
}
Ok(result)
}
/// Write a CSV row with chunk analysis data
fn write_csv_row(
file: &mut File,
chunk_name: String,
modified_time: Option<DateTime<Utc>>,
time_diff: String,
mut analysis: ChunkAnalysis,
) -> Result<()> {
analysis.message_types.sort();
analysis.data_types.sort();
writeln!(
file,
"{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}",
chunk_name,
modified_time
.map(|time| time.format("%Y-%m-%dT%H:%M:%S%.3f").to_string())
.unwrap_or_else(|| "".to_string()),
time_diff,
analysis.message_types.join(";"),
analysis.data_types.join(";"),
analysis
.earliest_message_time
.map(|time| time.format("%Y-%m-%dT%H:%M:%S%.3f").to_string())
.unwrap_or_else(|| "".to_string()),
analysis
.latest_message_time
.map(|time| time.format("%Y-%m-%dT%H:%M:%S%.3f").to_string())
.unwrap_or_else(|| "".to_string()),
analysis
.scan_time
.map(|t| format!("{:.3}", t))
.unwrap_or_else(|| "".to_string()),
analysis
.processing_time
.map(|t| format!("{:.3}", t))
.unwrap_or_else(|| "".to_string()),
analysis
.elevation_numbers
.iter()
.map(|e| e.to_string())
.collect::<Vec<String>>()
.join(";"),
analysis
.elevation_angle
.map(|e| format!("{:.2}", e))
.unwrap_or_else(|| "".to_string()),
analysis
.matched_to_vcp
.then(|| "Yes")
.unwrap_or_else(|| "No"),
analysis
.azimuth_range
.map(|a| format!("{:.2}", a))
.unwrap_or_else(|| "".to_string()),
analysis.vcp_number.unwrap_or_else(|| "".to_string()),
analysis
.channel_configuration
.unwrap_or_else(|| "".to_string()),
analysis.waveform_type.unwrap_or_else(|| "".to_string()),
analysis.super_resolution.unwrap_or_else(|| "".to_string()),
analysis
.azimuth_rate
.map(|a| format!("{:.2}", a))
.unwrap_or_else(|| "".to_string())
)?;
Ok(())
}

View file

@ -0,0 +1,408 @@
#![cfg(all(feature = "aws", feature = "decode"))]
use chrono::{DateTime, Utc};
use clap::Parser;
use env_logger::{Builder, Env};
use log::{debug, info, trace, LevelFilter};
use nexrad_data::aws::realtime::{
download_chunk, get_latest_volume, list_chunks_in_volume, Chunk, VolumeIndex,
};
use nexrad_data::result::Result;
use nexrad_decode::summarize;
use std::{cmp::Ordering, collections::HashMap};
/// Example to analyze timing between chunks in a NEXRAD volume and inspect their contents.
/// Displays information about the time differences between consecutive chunks and decodes
/// the data within each chunk to show message summaries.
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Site identifier (e.g., KDMX)
#[arg(default_value = "KDMX")]
site: String,
/// Volume index to analyze. If not specified, will use volume before the latest.
#[arg(long)]
volume: Option<usize>,
/// Maximum number of chunks to analyze (0 for all)
#[arg(long, default_value = "10")]
max_chunks: usize,
/// Whether to show detailed message information
#[arg(long, default_value = "false")]
detailed: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
Builder::from_env(Env::default().default_filter_or("info"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
let cli = Cli::parse();
let site = cli.site.clone();
let max_chunks = cli.max_chunks;
let detailed = cli.detailed;
// Determine which volume to analyze
let volume = if let Some(vol) = cli.volume {
VolumeIndex::new(vol)
} else {
// Get the latest volume and use the previous one
let latest_result = get_latest_volume(&site).await?;
let latest = latest_result.volume.expect("No latest volume found");
info!("Latest volume found: {}", latest.as_number());
// Calculate previous volume (handle wrap around from 1 to 999)
let prev_num = if latest.as_number() > 1 {
latest.as_number() - 1
} else {
999
};
let prev = VolumeIndex::new(prev_num);
info!("Using previous volume: {}", prev.as_number());
prev
};
// List all chunks in the volume
info!(
"Listing chunks for site {} in volume {}",
site,
volume.as_number()
);
let mut chunks = list_chunks_in_volume(&site, volume, 1000).await?;
// Sort chunks by modified time
chunks.sort_by(|a, b| {
if let (Some(time_a), Some(time_b)) = (a.upload_date_time(), b.upload_date_time()) {
time_a.cmp(&time_b)
} else if a.upload_date_time().is_some() {
Ordering::Less
} else if b.upload_date_time().is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
});
info!(
"Found {} chunks in volume {}",
chunks.len(),
volume.as_number()
);
if chunks.is_empty() {
info!("No chunks found in this volume");
return Ok(());
}
// If max_chunks is set, limit the number of chunks to analyze
let chunks_to_analyze = if max_chunks > 0 && max_chunks < chunks.len() {
info!("Limiting analysis to first {} chunks", max_chunks);
chunks.iter().take(max_chunks).cloned().collect::<Vec<_>>()
} else {
chunks
};
// Display chunk timing information and download/decode each chunk
println!(
"\n{:<20} {:<30} {:<15} {:<40}",
"Chunk", "Modified Time (UTC)", "Time Since Previous", "Content Summary"
);
println!("{:-<110}", "");
let mut prev_time: Option<DateTime<Utc>> = None;
let mut vcps = std::collections::HashSet::new();
let mut total_messages = 0;
for chunk_id in chunks_to_analyze {
let chunk_name = chunk_id.name();
let modified_time = chunk_id.upload_date_time();
// Calculate time difference
let time_diff = if let Some(time) = modified_time {
prev_time
.map(|prev| {
let duration = time.signed_duration_since(prev);
format!("{:.2} seconds", duration.num_milliseconds() as f64 / 1000.0)
})
.unwrap_or_else(|| "N/A".to_string())
} else {
"N/A".to_string()
};
// Download and decode the chunk
let download_time = Utc::now();
let content_summary = match download_chunk(&site, &chunk_id).await {
Ok((_, chunk)) => {
let summary = decode_chunk(&chunk, download_time, detailed)?;
// Collect VCP information
if let Some(vcp) = summary.vcp {
vcps.insert(vcp);
}
total_messages += summary.message_count;
summary.summary
}
Err(err) => format!("Failed to download: {}", err),
};
println!(
"{:<20} {:<30} {:<15} {:<40}",
chunk_name,
modified_time
.map(|time| time.format("%Y-%m-%d %H:%M:%S%.3f").to_string())
.unwrap_or_else(|| "Unknown".to_string()),
time_diff,
content_summary
);
// Store time for next iteration
if let Some(time) = modified_time {
prev_time = Some(time);
}
}
// Display statistics about the analyzed chunks
println!("\nAnalysis Summary:");
println!(" Volume Coverage Patterns found: {:?}", vcps);
println!(" Total messages decoded: {}", total_messages);
Ok(())
}
/// Information extracted from a chunk
struct ChunkSummary {
summary: String,
message_count: usize,
vcp: Option<String>,
}
/// Decodes a chunk and returns summary information
fn decode_chunk(
chunk: &Chunk,
download_time: DateTime<Utc>,
detailed: bool,
) -> Result<ChunkSummary> {
let mut message_count = 0;
let mut vcp = None;
let mut data_types = HashMap::new();
let mut min_azimuth = f32::MAX;
let mut max_azimuth = f32::MIN;
let mut elevations = Vec::new();
let summary = match chunk {
Chunk::Start(file) => {
debug!("Decoding volume start chunk");
// Process records in the file
for mut record in file.records() {
if record.compressed() {
trace!("Decompressing LDM record...");
record = record.decompress()?;
}
let messages = record.messages()?;
message_count += messages.len();
let msg_summary = summarize::messages(messages.as_slice());
// Extract VCP information
if !msg_summary.volume_coverage_patterns.is_empty() {
// Use the debug format for VCPs
let vcp_str = msg_summary
.volume_coverage_patterns
.iter()
.map(|v| format!("{:?}", v))
.collect::<Vec<_>>()
.join(",");
vcp = Some(format!("VCP{}", vcp_str.replace("VCP", "")));
}
// Track azimuth range, elevation angles, and data types
for group in &msg_summary.message_groups {
// Extract azimuth information
if let (Some(start_az), Some(end_az)) = (group.start_azimuth, group.end_azimuth)
{
min_azimuth = min_azimuth.min(start_az);
max_azimuth = max_azimuth.max(end_az);
}
// Extract elevation information
if let Some(elev) = group.elevation_angle {
// Round to 2 decimal places and add if not already present
let rounded_elev = (elev * 100.0).round() / 100.0;
if !elevations.contains(&rounded_elev) {
elevations.push(rounded_elev);
}
}
// Count data types
if let Some(dt) = &group.data_types {
for (key, count) in dt {
*data_types.entry(key.clone()).or_insert(0) += count;
}
}
}
// Print detailed message information if requested
if detailed {
println!("\nChunk Contents:\n{}", msg_summary);
if let Some(earliest) = msg_summary.earliest_collection_time {
let latency = download_time.signed_duration_since(earliest);
println!(
" Message latency: {:.2} seconds",
latency.num_milliseconds() as f64 / 1000.0
);
}
}
}
// Format azimuth range
let azimuth_info = if min_azimuth != f32::MAX && max_azimuth != f32::MIN {
format!("Az: {:.1}°-{:.1}°", min_azimuth, max_azimuth)
} else {
"Az: N/A".to_string()
};
// Format elevation info
let elev_info = if !elevations.is_empty() {
if elevations.len() == 1 {
format!("El: {:.2}°", elevations[0])
} else {
format!("El: {} angles", elevations.len())
}
} else {
"El: N/A".to_string()
};
format!(
"{} msgs, {} types, {}, {}",
message_count,
data_types.len(),
azimuth_info,
elev_info
)
}
Chunk::IntermediateOrEnd(record) => {
debug!("Decoding intermediate/end chunk");
// Clone the record to avoid ownership issues
let mut record_clone = record.clone();
if record_clone.compressed() {
trace!("Decompressing LDM record...");
record_clone = record_clone.decompress()?;
}
let messages = record_clone.messages()?;
message_count = messages.len();
let msg_summary = summarize::messages(messages.as_slice());
// Extract VCP information
if !msg_summary.volume_coverage_patterns.is_empty() {
// Use the debug format for VCPs
let vcp_str = msg_summary
.volume_coverage_patterns
.iter()
.map(|v| format!("{:?}", v))
.collect::<Vec<_>>()
.join(",");
vcp = Some(format!("VCP{}", vcp_str.replace("VCP", "")));
}
// Track azimuth range, elevation angles, and data types
for group in &msg_summary.message_groups {
// Extract azimuth information
if let (Some(start_az), Some(end_az)) = (group.start_azimuth, group.end_azimuth) {
min_azimuth = min_azimuth.min(start_az);
max_azimuth = max_azimuth.max(end_az);
}
// Extract elevation information
if let Some(elev) = group.elevation_angle {
// Round to 2 decimal places and add if not already present
let rounded_elev = (elev * 100.0).round() / 100.0;
if !elevations.contains(&rounded_elev) {
elevations.push(rounded_elev);
}
}
// Count data types
if let Some(dt) = &group.data_types {
for (key, count) in dt {
*data_types.entry(key.clone()).or_insert(0) += count;
}
}
}
// Print detailed message information if requested
if detailed {
println!("\nChunk Contents:\n{}", msg_summary);
if let Some(earliest) = msg_summary.earliest_collection_time {
let latency = download_time.signed_duration_since(earliest);
println!(
" Message latency: {:.2} seconds",
latency.num_milliseconds() as f64 / 1000.0
);
}
}
// Format azimuth range
let azimuth_info = if min_azimuth != f32::MAX && max_azimuth != f32::MIN {
format!("Az: {:.1}°-{:.1}°", min_azimuth, max_azimuth)
} else {
"Az: N/A".to_string()
};
// Format elevation info
let elev_info = if !elevations.is_empty() {
if elevations.len() == 1 {
format!("El: {:.2}°", elevations[0])
} else {
format!("El: {} angles", elevations.len())
}
} else {
"El: N/A".to_string()
};
// List data type names (up to 3)
let type_info = if !data_types.is_empty() {
let mut type_names: Vec<_> = data_types.keys().cloned().collect();
type_names.sort();
let type_count = type_names.len();
if type_count <= 3 {
format!("Types: {}", type_names.join(", "))
} else {
format!(
"Types: {}, +{} more",
type_names
.iter()
.take(2)
.cloned()
.collect::<Vec<_>>()
.join(", "),
type_count - 2
)
}
} else {
"No data types".to_string()
};
format!(
"{} msgs, {}, {}, {}",
message_count, azimuth_info, elev_info, type_info
)
}
};
Ok(ChunkSummary {
summary,
message_count,
vcp,
})
}

View file

@ -0,0 +1,292 @@
#![cfg(all(feature = "aws", feature = "decode"))]
use chrono::{DateTime, SubsecRound, Utc};
use clap::Parser;
use env_logger::{Builder, Env};
use log::{debug, info, warn, LevelFilter};
use nexrad_data::result::Result;
use nexrad_data::{aws::realtime, volume};
use nexrad_decode::summarize;
use tokio::time::sleep;
// Example designed to provide concise latency analysis for NEXRAD data chunks
// Output format (single line per chunk):
// Chunk: <name> | Downloaded: <time> | AWS Latency: <value>s | First Radial Latency: <value>s | Last Radial Latency: <value>s | Attempts: <count>
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Site identifier (e.g., KDMX)
#[arg(default_value = "KDMX")]
site: String,
/// The number of chunks to download
#[arg(default_value = "10")]
chunk_count: usize,
}
#[tokio::main]
async fn main() -> Result<()> {
use chrono::Utc;
use nexrad_data::aws::realtime::Chunk;
use nexrad_data::aws::realtime::{poll_chunks, ChunkIdentifier, PollStats};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{mpsc, Arc, Mutex};
use std::time::Duration;
use tokio::task;
Builder::from_env(Env::default().default_filter_or("info"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
let cli = Cli::parse();
let site = cli.site.clone();
let desired_chunk_count = cli.chunk_count;
let mut downloaded_chunk_count = 0;
let (update_tx, update_rx) = mpsc::channel::<(ChunkIdentifier, Chunk)>();
let (stats_tx, stats_rx) = mpsc::channel::<PollStats>();
let (stop_tx, stop_rx) = mpsc::channel::<bool>();
// Pass download attempts and timing to the update handle
let attempts = Arc::new(AtomicUsize::new(0));
let download_time = Arc::new(Mutex::new(None::<chrono::DateTime<chrono::Utc>>));
// Task to poll chunks
task::spawn(async move {
poll_chunks(&site, update_tx, Some(stats_tx), stop_rx)
.await
.expect("Failed to poll chunks");
});
// Task to timeout polling at 5 minutes
let timeout_stop_tx = stop_tx.clone();
task::spawn(async move {
sleep(Duration::from_secs(300)).await;
info!("Timeout reached, stopping...");
timeout_stop_tx
.send(true)
.expect("Failed to send stop signal");
});
// Task to receive statistics updates to track attempts
let attempts_clone = attempts.clone();
let download_time_clone = download_time.clone();
let stats_handle = task::spawn(async move {
while let Ok(stats) = stats_rx.recv() {
match stats {
PollStats::NewChunk(new_chunk_stats) => {
debug!(
"New chunk download: attempts={}, download_time={:?}, upload_time={:?}",
new_chunk_stats.calls,
new_chunk_stats.download_time,
new_chunk_stats.upload_time
);
attempts_clone.fetch_add(new_chunk_stats.calls, Ordering::SeqCst);
let mut download_time_guard = download_time_clone
.lock()
.expect("Failed to lock download time");
if let Some(time) = new_chunk_stats.download_time {
*download_time_guard = Some(time);
} else {
*download_time_guard = None;
}
}
PollStats::NewVolumeCalls(new_volume_stats) => {
debug!("New volume found: attempts={}", new_volume_stats);
attempts_clone.fetch_add(new_volume_stats, Ordering::SeqCst);
}
PollStats::ChunkTimings(chunk_timings) => {
info!("Chunk Timing Statistics:");
info!("{:-<100}", "");
info!(
"{:<15} | {:<20} | {:<20} | {:<15} | {:<15}",
"Chunk Type",
"Waveform Type",
"Channel Config",
"Avg Duration",
"Avg Attempts"
);
info!("{:-<100}", "");
for (characteristics, avg_duration, avg_attempts) in
chunk_timings.get_statistics()
{
let duration_str = avg_duration.map_or("N/A".to_string(), |d| {
format!("{:.2}s", d.num_milliseconds() as f64 / 1000.0)
});
let attempts_str =
avg_attempts.map_or("N/A".to_string(), |a| format!("{:.2}", a));
info!(
"{:<15} | {:<20} | {:<20} | {:<15} | {:<15}",
format!("{:?}", characteristics.chunk_type),
format!("{:?}", characteristics.waveform_type),
format!("{:?}", characteristics.channel_configuration),
duration_str,
attempts_str
);
}
info!("{:-<100}", "");
}
_ => {}
}
}
});
println!(
"{:<25} | {:<25} | {:<13} | {:<15} {:<29} | {:<8}",
"", "", "Time Since", "", "Latency Since", ""
);
println!(
"{:<25} | {:<25} | {:<13} | {:<13} | {:<13} | {:<13} | {:<8}",
"Chunk",
"Downloaded",
"Last Chunk",
"AWS Upload",
"First Radial",
"Last Radial",
"Attempts"
);
println!("{:-<128}", "");
// Task to receive downloaded chunks
let update_handle = task::spawn(async move {
let mut last_chunk_time = None;
while let Ok((chunk_id, chunk)) = update_rx.recv() {
let download_time = {
match *download_time.lock().expect("Failed to lock download time") {
Some(time) => time,
None => {
warn!("No download time available, using current time");
Utc::now()
}
}
};
let chunk_attempts = attempts.load(Ordering::SeqCst);
attempts.store(0, Ordering::SeqCst);
match chunk {
Chunk::Start(file) => {
let records = file.records();
debug!(
"Volume start chunk with {} records. Header: {:?}",
records.len(),
file.header()
);
for record in records {
process_record(
&chunk_id,
record,
download_time,
last_chunk_time,
chunk_attempts,
);
}
}
Chunk::IntermediateOrEnd(record) => {
debug!("Intermediate or end volume chunk.");
process_record(
&chunk_id,
record,
download_time,
last_chunk_time,
chunk_attempts,
);
}
}
last_chunk_time = chunk_id.upload_date_time();
downloaded_chunk_count += 1;
if downloaded_chunk_count >= desired_chunk_count {
info!("Downloaded {} chunks, stopping...", desired_chunk_count);
stop_tx.send(true).expect("Failed to send stop signal");
break;
}
}
});
stats_handle.await.expect("Failed to join handle");
update_handle.await.expect("Failed to join handle");
info!("Finished downloading chunks");
Ok(())
}
fn process_record(
chunk_id: &realtime::ChunkIdentifier,
mut record: volume::Record,
download_time: DateTime<Utc>,
last_chunk_time: Option<DateTime<Utc>>,
attempts: usize,
) {
debug!("Decoding LDM record...");
if record.compressed() {
debug!("Decompressing LDM record...");
record = record.decompress().expect("Failed to decompress record");
}
let messages = match record.messages() {
Ok(msgs) => msgs,
Err(e) => {
warn!("Failed to decode messages: {}", e);
return;
}
};
let summary = summarize::messages(messages.as_slice());
// Calculate latencies
let first_radial_latency = summary
.earliest_collection_time
.map(|time| (download_time - time).num_milliseconds() as f64 / 1000.0)
.unwrap_or(f64::NAN);
let last_radial_latency = summary
.latest_collection_time
.map(|time| (download_time - time).num_milliseconds() as f64 / 1000.0)
.unwrap_or(f64::NAN);
// AWS rounds to the second for object modified times
let rounded_download_time = download_time.round_subsecs(0);
let aws_latency = chunk_id
.upload_date_time()
.map(|time| {
if rounded_download_time < time {
warn!("Download time is before S3 modified time: download={}, rounded download={}, s3={}", download_time, rounded_download_time, time);
}
(rounded_download_time - time).num_milliseconds() as f64 / 1000.0
})
.unwrap_or(f64::NAN);
// Compare chunk_id.date_time() with last_chunk_time, though either could be None
let time_since_last_chunk = match (chunk_id.upload_date_time(), last_chunk_time) {
(Some(current), Some(last)) => {
format!("{}", (current - last).num_milliseconds() as f64 / 1000.0)
}
_ => String::from("N/A"),
};
// Print concise output in a single line
println!(
"{:<25} | {:<25} | {:<12}s | {:<12}s | {:<12}s | {:<12}s | {:<8}",
format!("{}/{}", chunk_id.volume().as_number(), chunk_id.name()),
download_time.format("%Y-%m-%d %H:%M:%S%.3f"),
time_since_last_chunk,
aws_latency,
first_radial_latency,
last_radial_latency,
attempts
);
}

View file

@ -0,0 +1,150 @@
#![cfg(all(feature = "aws", feature = "decode"))]
use chrono::{DateTime, Utc};
use clap::Parser;
use env_logger::{Builder, Env};
use log::{debug, info, trace, LevelFilter};
use nexrad_data::result::Result;
use nexrad_data::{
aws::realtime::{self, poll_chunks, Chunk, ChunkIdentifier, PollStats},
volume,
};
use nexrad_decode::summarize;
use std::{sync::mpsc, time::Duration};
use tokio::{task, time::sleep};
// Example output from a real-time chunk:
// Scans from 2025-03-17 01:31:40.449 UTC to 2025-03-17 01:31:44.491 UTC (0.07m)
// VCPs: VCP35
// Messages:
// Msg 1-120: Elevation: #6 (1.36°), Azimuth: 108.2° to 167.7°, Time: 01:31:40.449 to 01:31:44.491 (4.04s)
// Data types: REF (120), SW (120), VEL (120)
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Site identifier (e.g., KDMX)
#[arg(default_value = "KDMX")]
site: String,
/// The number of chunks to download
#[arg(default_value = "10")]
chunk_count: usize,
}
#[tokio::main]
async fn main() -> Result<()> {
Builder::from_env(Env::default().default_filter_or("debug"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
let cli = Cli::parse();
let site = cli.site.clone();
let desired_chunk_count = cli.chunk_count;
let mut downloaded_chunk_count = 0;
let (update_tx, update_rx) = mpsc::channel::<(ChunkIdentifier, Chunk)>();
let (stats_tx, stats_rx) = mpsc::channel::<PollStats>();
let (stop_tx, stop_rx) = mpsc::channel::<bool>();
// Task to poll chunks
task::spawn(async move {
poll_chunks(&site, update_tx, Some(stats_tx), stop_rx)
.await
.expect("Failed to poll chunks");
});
// Task to timeout polling at 60 seconds
let timeout_stop_tx = stop_tx.clone();
task::spawn(async move {
sleep(Duration::from_secs(60)).await;
info!("Timeout reached, stopping...");
timeout_stop_tx.send(true).unwrap();
});
// Task to receive statistics updates
let stats_handle = task::spawn(async move {
while let Ok(stats) = stats_rx.recv() {
info!("Polling statistics: {:?}", stats);
}
});
// Task to receive downloaded chunks
let update_handle = task::spawn(async move {
while let Ok((chunk_id, chunk)) = update_rx.recv() {
let download_time = Utc::now();
info!(
"Downloaded chunk {} from {:?} at {:?} of size {}",
chunk_id.name(),
chunk_id.upload_date_time(),
Utc::now(),
chunk.data().len()
);
match chunk {
Chunk::Start(file) => {
let records = file.records();
debug!(
"Volume start chunk with {} records. Header: {:?}",
records.len(),
file.header()
);
records
.into_iter()
.for_each(|record| decode_record(&chunk_id, record, download_time));
}
Chunk::IntermediateOrEnd(record) => {
debug!("Intermediate or end volume chunk.");
decode_record(&chunk_id, record, download_time);
}
}
downloaded_chunk_count += 1;
if downloaded_chunk_count >= desired_chunk_count {
info!("Downloaded 10 chunks, stopping...");
stop_tx.send(true).expect("Failed to send stop signal");
break;
}
}
});
stats_handle.await.expect("Failed to join handle");
update_handle.await.expect("Failed to join handle");
info!("Finished downloading chunks");
Ok(())
}
fn decode_record(
chunk_id: &realtime::ChunkIdentifier,
mut record: volume::Record,
download_time: DateTime<Utc>,
) {
debug!("Decoding LDM record...");
if record.compressed() {
trace!("Decompressing LDM record...");
record = record.decompress().expect("Failed to decompress record");
}
let messages = record.messages().expect("Failed to decode messages");
let summary = summarize::messages(messages.as_slice());
info!("Record summary:\n{}", summary);
info!(
"Message latency: earliest {:?}, latest {:?}, uploaded: {:?}",
summary
.earliest_collection_time
.map(|time| (download_time - time).num_milliseconds() as f64 / 1000.0),
summary
.latest_collection_time
.map(|time| (download_time - time).num_milliseconds() as f64 / 1000.0),
chunk_id
.upload_date_time()
.map(|time| (download_time - time).num_milliseconds() as f64 / 1000.0),
);
}

View file

@ -0,0 +1,17 @@
//!
//! Downloads archival and real-time NEXRAD level II weather radar data from AWS Open Data buckets
//! populated by NOAA.
//!
//! [AWS Open Data NOAA NEXRAD](https://registry.opendata.aws/noaa-nexrad/)
//!
//! [AWS Labs Open Data Documentation](https://github.com/awslabs/open-data-docs/tree/main/docs/noaa/noaa-nexrad)
//!
//! **NEXRAD Level II archive data**: `arn:aws:s3:::noaa-nexrad-level2`
//!
//! **NEXRAD Level II real-time data**: `arn:aws:s3:::unidata-nexrad-level2-chunks`
//!
pub mod archive;
pub mod realtime;
mod s3;

View file

@ -0,0 +1,23 @@
//!
//! # Archive NEXRAD Data
//! Archived NEXRAD radar data is stored in an AWS S3 bucket by NOAA. The S3 bucket's directories
//! are organized by year, month, day, and then site. For a given date and site, each object is a
//! "volume" file which contains radar data from a full scan. The volume file starts with an
//! Archive II header which is followed by some number of compressed LDM records. These records in
//! turn contain messages which represent individual radials with radar data.
//!
//! The [crate::aws::realtime] AWS bucket provides LDM records as "chunks". Those are uploaded in
//! real-time and once a full scan has been uploaded to a volume directory, those chunks are
//! combined to create a full Archive II volume file which is uploaded to this archive bucket.
//!
mod identifier;
pub use identifier::Identifier;
mod download_file;
pub use download_file::download_file;
mod list_files;
pub use list_files::list_files;
const ARCHIVE_BUCKET: &str = "noaa-nexrad-level2";

View file

@ -0,0 +1,22 @@
use crate::aws::archive::identifier::Identifier;
use crate::aws::archive::ARCHIVE_BUCKET;
use crate::aws::s3::download_object;
use crate::result::aws::AWSError::{DateTimeError, InvalidSiteIdentifier};
use crate::volume::File;
/// Download a data file specified by its metadata. Returns the downloaded file's encoded contents
/// which may then need to be decompressed and decoded.
pub async fn download_file(identifier: Identifier) -> crate::result::Result<File> {
let date = identifier
.date_time()
.ok_or_else(|| DateTimeError(identifier.name().to_string()))?;
let site = identifier
.site()
.ok_or_else(|| InvalidSiteIdentifier(identifier.name().to_string()))?;
let key = format!("{}/{}/{}", date.format("%Y/%m/%d"), site, identifier.name());
let downloaded_object = download_object(ARCHIVE_BUCKET, &key).await?;
Ok(File::new(downloaded_object.data))
}

View file

@ -0,0 +1,36 @@
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
/// Identifying metadata for a NEXRAD archive volume file.
#[derive(Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct Identifier(String);
impl Identifier {
/// Constructs a new identifier from the provided name.
pub fn new(name: String) -> Self {
Identifier(name)
}
/// The file name.
pub fn name(&self) -> &str {
&self.0
}
/// The radar site this file was produced at, e.g. KDMX.
pub fn site(&self) -> Option<&str> {
self.0.get(0..4)
}
/// This file's data collection time.
pub fn date_time(&self) -> Option<DateTime<Utc>> {
let date_string = self.0.get(4..12)?;
if let Ok(date) = NaiveDate::parse_from_str(date_string, "%Y%m%d") {
let time_string = self.0.get(13..19)?;
if let Ok(time) = NaiveTime::parse_from_str(time_string, "%H%M%S") {
let naive_datetime = NaiveDateTime::new(date, time);
return Some(DateTime::from_naive_utc_and_offset(naive_datetime, Utc));
}
}
None
}
}

View file

@ -0,0 +1,29 @@
use crate::aws::archive::identifier::Identifier;
use crate::aws::archive::ARCHIVE_BUCKET;
use crate::aws::s3::list_objects;
use crate::result::aws::AWSError::TruncatedListObjectsResponse;
use crate::result::Error::AWS;
use chrono::NaiveDate;
/// List data files for the specified site and date. This effectively returns an index of data files
/// which can then be individually downloaded.
pub async fn list_files(site: &str, date: &NaiveDate) -> crate::result::Result<Vec<Identifier>> {
let prefix = format!("{}/{}", date.format("%Y/%m/%d"), site);
let list_result = list_objects(ARCHIVE_BUCKET, &prefix, None).await?;
if list_result.truncated {
return Err(AWS(TruncatedListObjectsResponse));
}
let metas = list_result
.objects
.iter()
.map(|object| {
let key_parts = object.key.split('/');
let name = key_parts.skip(4).collect::<String>();
Identifier::new(name)
})
.collect();
Ok(metas)
}

View file

@ -0,0 +1,78 @@
//!
//! # Real-time NEXRAD Data
//! Near real-time (within seconds) NEXRAD radar data is uploaded to an AWS S3 bucket by NOAA. This
//! data is organized into a series of "volumes", each containing ~55 "chunks". This module provides
//! functions for identifying the most recent volume with data for a specified radar site and
//! downloading the chunks within that volume.
//!
//! A fixed number (999) volumes exist in the S3 bucket which are rotated through in a round-robin
//! fashion. Chunks are added to each volume approximately every 4-12 seconds with little latency
//! from the data's collection time (usually approximately 15 seconds from collection time).
//!
//! There may be gaps in the volume data, as illustrated in the real example below from KDMX:
//! ```text
//! Volume 001: 2024-08-04 10:10:07 UTC
//! ...
//! Volume 085: 2024-08-04 17:10:49 UTC
//! Volume 086: No files found.
//! ...
//! Volume 670: No files found.
//! Volume 671: 2024-08-03 00:00:21 UTC
//! ...
//! Volume 999: 2024-08-04 10:06:37 UTC
//! ```
//! The [get_latest_volume()] function will find the volume with the most recent data using a binary
//! search approach to minimize the number of network calls made. Once the latest volume is found
//! for a session, a different routine should be used to poll new data for that volume and advance
//! to the next volume when the active one is filled.
//!
mod volume_index;
pub use volume_index::*;
mod chunk;
pub use chunk::*;
mod chunk_type;
pub use chunk_type::*;
mod chunk_identifier;
pub use chunk_identifier::*;
mod download_chunk;
pub use download_chunk::*;
mod get_latest_volume;
pub use get_latest_volume::*;
mod list_chunks_in_volume;
pub use list_chunks_in_volume::*;
#[cfg(feature = "nexrad-decode")]
mod estimate_next_chunk_time;
#[cfg(feature = "nexrad-decode")]
pub use estimate_next_chunk_time::*;
#[cfg(all(feature = "nexrad-decode", feature = "bzip2"))]
mod poll_chunks;
#[cfg(all(feature = "nexrad-decode", feature = "bzip2"))]
pub use poll_chunks::*;
#[cfg(feature = "nexrad-decode")]
mod poll_stats;
#[cfg(feature = "nexrad-decode")]
pub use poll_stats::*;
#[cfg(feature = "nexrad-decode")]
mod chunk_timing_stats;
#[cfg(feature = "nexrad-decode")]
pub use chunk_timing_stats::*;
#[cfg(feature = "nexrad-decode")]
mod elevation_chunk_mapper;
#[cfg(feature = "nexrad-decode")]
pub use elevation_chunk_mapper::*;
mod search;
const REALTIME_BUCKET: &str = "unidata-nexrad-level2-chunks";

View file

@ -0,0 +1,47 @@
use crate::result::aws::AWSError::UnrecognizedChunkFormat;
use crate::result::Error::AWS;
use crate::volume;
/// A chunk of real-time data within a volume. Chunks are ordered and when concatenated together
/// form a complete volume of radar data. All chunks contain an LDM record with radar data messages.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Chunk<'a> {
/// The start of a new volume. This chunk will begin with an Archive II volume header followed
/// by a compressed LDM record.
Start(volume::File),
/// An intermediate or end chunk. This chunk will contain a compressed LDM record with radar
/// data messages.
IntermediateOrEnd(volume::Record<'a>),
}
impl Chunk<'_> {
/// Creates a new chunk from the provided data. The data is expected to be in one of two formats:
///
/// 1. An Archive II volume header followed by a compressed LDM record, or a "start" chunk.
/// 2. A compressed LDM record, or an "intermediate" or "end" chunk.
///
/// The chunk type is determined by the data's format.
pub fn new(data: Vec<u8>) -> crate::result::Result<Self> {
// Check if the data begins with an Archive II volume header, indicating a "start" chunk
if data[0..3].as_ref() == b"AR2" {
let file = volume::File::new(data);
return Ok(Self::Start(file));
}
// Check if the data begins with a BZ compressed record, indicating an "intermediate" or "end" chunk
if data[4..6].as_ref() == b"BZ" {
let record = volume::Record::new(data);
return Ok(Self::IntermediateOrEnd(record));
}
Err(AWS(UnrecognizedChunkFormat))
}
/// The data contained within this chunk.
pub fn data(&self) -> &[u8] {
match self {
Self::Start(file) => file.data(),
Self::IntermediateOrEnd(record) => record.data(),
}
}
}

View file

@ -0,0 +1,162 @@
use crate::{
aws::realtime::{ChunkType, VolumeIndex},
result::{aws::AWSError, Error, Result},
};
use chrono::{DateTime, NaiveDateTime, Utc};
#[cfg(feature = "nexrad-decode")]
use crate::aws::realtime::ElevationChunkMapper;
/// Identifies a volume chunk within the real-time NEXRAD data bucket. These chunks are uploaded
/// every few seconds and contain a portion of the radar data for a specific volume.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ChunkIdentifier {
// These three fields are the same for all chunks in a volume
site: String,
volume: VolumeIndex,
date_time_prefix: NaiveDateTime,
// These fields identify a specific chunk within the volume
sequence: usize,
chunk_type: ChunkType,
// This is derived from the other fields
name: String,
// If this chunk was downloaded, this is the upload time
upload_date_time: Option<DateTime<Utc>>,
}
impl ChunkIdentifier {
/// Creates a new chunk identifier.
pub fn new(
site: String,
volume: VolumeIndex,
date_time_prefix: NaiveDateTime,
sequence: usize,
chunk_type: ChunkType,
upload_date_time: Option<DateTime<Utc>>,
) -> Self {
let name = format!(
"{}-{:03}-{}",
date_time_prefix.format("%Y%m%d-%H%M%S"),
sequence,
chunk_type.abbreviation()
);
Self {
site,
volume,
date_time_prefix,
sequence,
chunk_type,
name,
upload_date_time,
}
}
/// Creates a new chunk identifier by parsing a chunk name.
pub fn from_name(
site: String,
volume: VolumeIndex,
name: String,
upload_date_time: Option<DateTime<Utc>>,
) -> Result<Self> {
let date_time_prefix = NaiveDateTime::parse_from_str(&name[..15], "%Y%m%d-%H%M%S")
.map_err(|_| Error::AWS(AWSError::UnrecognizedChunkDateTime(name[..15].to_string())))?;
let sequence_str = &name[16..19];
let sequence = sequence_str.parse::<usize>().map_err(|_| {
Error::AWS(AWSError::UnrecognizedChunkSequence(
sequence_str.to_string(),
))
})?;
let chunk_type = ChunkType::from_abbreviation(
name.chars()
.last()
.ok_or(Error::AWS(AWSError::UnrecognizedChunkType(None)))?,
)?;
Ok(Self {
site,
volume,
date_time_prefix,
sequence,
chunk_type,
name,
upload_date_time,
})
}
/// The chunk's radar site identifier.
pub fn site(&self) -> &str {
&self.site
}
/// The chunk's rotating volume index.
pub fn volume(&self) -> &VolumeIndex {
&self.volume
}
/// The chunk's date and time prefix, consistent across all chunks in a volume.
pub fn date_time_prefix(&self) -> &NaiveDateTime {
&self.date_time_prefix
}
/// The sequence number of this chunk within the volume.
pub fn sequence(&self) -> usize {
self.sequence
}
/// The chunk's type.
pub fn chunk_type(&self) -> ChunkType {
self.chunk_type
}
/// The chunk's name.
pub fn name(&self) -> &str {
&self.name
}
/// The date and time this chunk was uploaded.
pub fn upload_date_time(&self) -> Option<DateTime<Utc>> {
self.upload_date_time
}
/// Identifies the next chunk's expected location.
#[cfg(feature = "nexrad-decode")]
pub fn next_chunk(&self, elevation_chunk_mapper: &ElevationChunkMapper) -> Option<NextChunk> {
let final_sequence = elevation_chunk_mapper.final_sequence();
if self.sequence == final_sequence {
return Some(NextChunk::Volume(self.volume.next()));
}
Some(NextChunk::Sequence(ChunkIdentifier::new(
self.site().to_string(),
self.volume,
self.date_time_prefix,
self.sequence + 1,
if self.sequence + 1 == final_sequence {
ChunkType::End
} else {
ChunkType::Intermediate
},
None,
)))
}
}
/// Identifies where to find the next expected chunk.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum NextChunk {
/// The next chunk is expected to be located in the same volume at this sequence. Once the next
/// chunk's identifier is determined, it can be downloaded using the
/// [crate::aws::realtime::download_chunk()] function. You may need to poll by checking if that
/// function returns [crate::result::aws::AWSError::S3ObjectNotFoundError].
Sequence(ChunkIdentifier),
/// The chunk is expected to be located in the next volume. The next volume's chunks can be
/// listed using the [crate::aws::realtime::list_chunks_in_volume()] function.
Volume(VolumeIndex),
}

View file

@ -0,0 +1,118 @@
use crate::aws::realtime::ChunkType;
use chrono::Duration;
use nexrad_decode::messages::volume_coverage_pattern::{ChannelConfiguration, WaveformType};
use std::collections::{HashMap, VecDeque};
use std::hash::{Hash, Hasher};
/// Maximum number of timing samples to keep per chunk characteristics
const MAX_TIMING_SAMPLES: usize = 10;
/// Characteristics of a chunk that affect timing
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct ChunkCharacteristics {
/// Type of the chunk
pub chunk_type: ChunkType,
/// Waveform type of the elevation
pub waveform_type: WaveformType,
/// Channel configuration of the elevation
pub channel_configuration: ChannelConfiguration,
}
impl Hash for ChunkCharacteristics {
fn hash<H: Hasher>(&self, state: &mut H) {
std::mem::discriminant(&self.chunk_type).hash(state);
std::mem::discriminant(&self.waveform_type).hash(state);
std::mem::discriminant(&self.channel_configuration).hash(state);
}
}
/// Statistics for a single timing sample
#[derive(Debug, Clone, Copy)]
pub(crate) struct TimingStat {
/// Duration of the timing sample
duration: Duration,
/// Number of attempts to download the chunk
attempts: usize,
}
/// Statistics for timing between chunks
#[derive(Debug, Clone, Default)]
pub struct ChunkTimingStats {
/// Timing statistics for each chunk characteristics
timings: HashMap<ChunkCharacteristics, VecDeque<TimingStat>>,
}
impl ChunkTimingStats {
/// Create a new empty timing statistics
pub fn new() -> Self {
Self {
timings: HashMap::new(),
}
}
/// Add a timing sample for the given chunk characteristics
pub fn add_timing(
&mut self,
characteristics: ChunkCharacteristics,
duration: Duration,
attempts: usize,
) {
let entry = self.timings.entry(characteristics).or_default();
entry.push_back(TimingStat { duration, attempts });
// Maintain the rolling window by removing oldest if we exceed the max
if entry.len() > MAX_TIMING_SAMPLES {
entry.pop_front();
}
}
/// Get the average timing for the given chunk characteristics
pub(crate) fn get_average_timing(
&self,
characteristics: &ChunkCharacteristics,
) -> Option<Duration> {
self.timings.get(characteristics).and_then(|timings| {
if timings.is_empty() {
return None;
}
let total_millis: i64 = timings
.iter()
.map(|timing| timing.duration.num_milliseconds())
.sum();
let avg_millis = total_millis / timings.len() as i64;
Some(Duration::milliseconds(avg_millis))
})
}
/// Get the average number of attempts for the given chunk characteristics
pub(crate) fn get_average_attempts(
&self,
characteristics: &ChunkCharacteristics,
) -> Option<f64> {
self.timings.get(characteristics).and_then(|timings| {
if timings.is_empty() {
return None;
}
let total_attempts: usize = timings.iter().map(|timing| timing.attempts).sum();
Some(total_attempts as f64 / timings.len() as f64)
})
}
/// Get all chunk statistics for display purposes
pub fn get_statistics(&self) -> Vec<(ChunkCharacteristics, Option<Duration>, Option<f64>)> {
self.timings
.keys()
.map(|characteristics| {
(
*characteristics,
self.get_average_timing(characteristics),
self.get_average_attempts(characteristics),
)
})
.collect()
}
}

View file

@ -0,0 +1,30 @@
use crate::result::{aws::AWSError, Error, Result};
/// The position of this chunk within the volume.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum ChunkType {
Start,
Intermediate,
End,
}
impl ChunkType {
/// Creates a new chunk type from an abbreviation.
pub fn from_abbreviation(c: char) -> Result<Self> {
match c {
'S' => Ok(ChunkType::Start),
'I' => Ok(ChunkType::Intermediate),
'E' => Ok(ChunkType::End),
_ => Err(Error::AWS(AWSError::UnrecognizedChunkType(Some(c)))),
}
}
/// Returns the abbreviation for this chunk type.
pub fn abbreviation(&self) -> char {
match self {
ChunkType::Start => 'S',
ChunkType::Intermediate => 'I',
ChunkType::End => 'E',
}
}
}

View file

@ -0,0 +1,27 @@
use crate::aws::realtime::{Chunk, ChunkIdentifier, REALTIME_BUCKET};
use crate::aws::s3::download_object;
/// Downloads the specified chunk from the real-time NEXRAD data bucket.
pub async fn download_chunk<'a>(
site: &str,
chunk_id: &ChunkIdentifier,
) -> crate::result::Result<(ChunkIdentifier, Chunk<'a>)> {
let key = format!(
"{}/{}/{}",
site,
chunk_id.volume().as_number(),
chunk_id.name()
);
let downloaded_object = download_object(REALTIME_BUCKET, &key).await?;
Ok((
ChunkIdentifier::from_name(
site.to_string(),
*chunk_id.volume(),
chunk_id.name().to_string(),
downloaded_object.metadata.last_modified,
)?,
Chunk::new(downloaded_object.data)?,
))
}

View file

@ -0,0 +1,56 @@
use nexrad_decode::messages::volume_coverage_pattern;
/// Maps between real-time chunk sequence numbers and volume coverage pattern elevation numbers.
pub struct ElevationChunkMapper {
// Index is elevation number - 1, value is chunk range inclusive
elevation_chunk_mappings: Vec<(usize, usize)>,
}
impl ElevationChunkMapper {
/// Create a new mapper from a volume coverage pattern.
pub fn new(vcp: &volume_coverage_pattern::Message) -> Self {
let mut elevation_chunk_mappings = Vec::new();
let mut total_chunk_count = 2;
for elevation in vcp.elevations.iter() {
let elevation_chunk_count = if elevation.super_resolution_control_half_degree_azimuth()
{
6 // 720 radials / 120 chunks per chunk
} else {
3 // 360 radials / 120 chunks per chunk
};
elevation_chunk_mappings.push((
total_chunk_count,
total_chunk_count + elevation_chunk_count - 1,
));
total_chunk_count += elevation_chunk_count;
}
Self {
elevation_chunk_mappings,
}
}
/// Get the elevation number for a given sequence number. Returns None if the sequence number
/// does not correspond to a radar scan described by the VCP.
pub fn get_sequence_elevation_number(&self, sequence: usize) -> Option<usize> {
// The first chunk is metadata, not a radar scan described by the VCP
if sequence == 1 {
return None;
}
self.elevation_chunk_mappings
.iter()
.position(|(start, end)| sequence >= *start && sequence <= *end)
.map(|elevation_index| elevation_index + 1)
}
/// Returns the final sequence number for the volume.
pub fn final_sequence(&self) -> usize {
self.elevation_chunk_mappings
.last()
.map(|(_, end)| *end)
.unwrap_or(0)
}
}

View file

@ -0,0 +1,107 @@
use crate::aws::realtime::{
ChunkCharacteristics, ChunkIdentifier, ChunkTimingStats, ChunkType, ElevationChunkMapper,
};
use chrono::Duration as ChronoDuration;
use chrono::{DateTime, Utc};
use log::debug;
use nexrad_decode::messages::volume_coverage_pattern::{self, ChannelConfiguration, WaveformType};
/// Attempts to estimate the time at which the next chunk will be available given the previous
/// chunk. Requires an [ElevationChunkMapper] to describe the relationship between chunk sequence
/// and VCP elevations. A None result indicates that the chunk is already available or that an
/// estimate cannot be made.
pub fn estimate_chunk_availability_time(
chunk: &ChunkIdentifier,
vcp: &volume_coverage_pattern::Message,
elevation_chunk_mapper: &ElevationChunkMapper,
timing_stats: Option<&ChunkTimingStats>,
) -> Option<DateTime<Utc>> {
let processing_time =
estimate_chunk_processing_time(chunk, vcp, elevation_chunk_mapper, timing_stats)?;
let now = Utc::now();
let availability_time = now + processing_time;
Some(availability_time)
}
/// Attempts to estimate the time the given chunk will take to become available in the real-time S3
/// bucket following the previous chunk. Requires an [ElevationChunkMapper] to describe the
/// relationship between chunk sequence and VCP elevations. A None result indicates that an estimate
/// cannot be made.
pub fn estimate_chunk_processing_time(
chunk: &ChunkIdentifier,
vcp: &volume_coverage_pattern::Message,
elevation_chunk_mapper: &ElevationChunkMapper,
timing_stats: Option<&ChunkTimingStats>,
) -> Option<ChronoDuration> {
if chunk.chunk_type() == ChunkType::Start {
return Some(ChronoDuration::seconds(10));
}
if let Some(elevation) = elevation_chunk_mapper
.get_sequence_elevation_number(chunk.sequence())
.and_then(|elevation_number| vcp.elevations.get(elevation_number - 1))
{
let waveform_type = elevation.waveform_type();
let channel_config = elevation.channel_configuration();
let characteristics = ChunkCharacteristics {
chunk_type: chunk.chunk_type(),
waveform_type,
channel_configuration: channel_config,
};
let average_timing =
timing_stats.and_then(|stats| stats.get_average_timing(&characteristics));
let average_attempts =
timing_stats.and_then(|stats| stats.get_average_attempts(&characteristics));
// Check if we have historical timing data for this combination
let estimated_wait_time =
if let (Some(avg_timing), Some(avg_attempts)) = (average_timing, average_attempts) {
// Use historical average if available
let mut wait_time = avg_timing;
// If we're making multiple attempts, add the average number of attempts to the wait time
wait_time += chrono::Duration::seconds(avg_attempts as i64 - 1);
debug!(
"Using historical average timing of {}ms and {} attempts for {}ms",
avg_timing.num_milliseconds(),
avg_attempts,
wait_time.num_milliseconds()
);
wait_time
} else {
// Fall back to the static estimation
let wait_time = get_default_wait_time(waveform_type, channel_config);
debug!(
"No historical timing data available, using static estimation of {}ms",
wait_time.num_milliseconds()
);
wait_time
};
return Some(estimated_wait_time);
}
None
}
/// Gets the default wait time based on waveform type and channel configuration
fn get_default_wait_time(
waveform_type: WaveformType,
channel_config: ChannelConfiguration,
) -> ChronoDuration {
if waveform_type == WaveformType::CS {
ChronoDuration::seconds(11)
} else if channel_config == ChannelConfiguration::ConstantPhase {
ChronoDuration::seconds(7)
} else {
ChronoDuration::seconds(4)
}
}

View file

@ -0,0 +1,38 @@
use crate::aws::realtime::list_chunks_in_volume::list_chunks_in_volume;
use crate::aws::realtime::search::search;
use crate::aws::realtime::VolumeIndex;
use chrono::{DateTime, Utc};
use std::sync::atomic::AtomicI32;
use std::sync::atomic::Ordering::Relaxed;
use std::sync::Arc;
/// Identifies the volume index with the most recent data for the specified radar site. Real-time
/// NEXRAD data is uploaded to a series of rotating volumes 0..=999, each containing ~55 chunks.
/// This function performs a binary search to find the most recent volume with data.
pub async fn get_latest_volume(site: &str) -> crate::result::Result<LatestVolumeResult> {
let calls = Arc::new(AtomicI32::new(0));
let latest_volume = search(998, DateTime::<Utc>::MAX_UTC, |volume| {
calls.fetch_add(1, Relaxed);
async move {
let chunks = list_chunks_in_volume(site, VolumeIndex::new(volume + 1), 1).await?;
Ok(chunks.first().and_then(|chunk| chunk.upload_date_time()))
}
})
.await
.map(|volume| volume.map(|index| VolumeIndex::new(index + 1)))?;
Ok(LatestVolumeResult {
volume: latest_volume,
calls: calls.load(Relaxed) as usize,
})
}
/// Represents the most recent volume index and the number of network calls made to find it.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct LatestVolumeResult {
/// The most recent volume index, if found.
pub volume: Option<VolumeIndex>,
/// The number of network calls made to find the most recent volume.
pub calls: usize,
}

View file

@ -0,0 +1,28 @@
use crate::aws::realtime::{ChunkIdentifier, VolumeIndex, REALTIME_BUCKET};
use crate::aws::s3::list_objects;
/// Lists the chunks for the specified radar site and volume. The `max_keys` parameter can be used
/// to limit the number of chunks returned.
pub async fn list_chunks_in_volume(
site: &str,
volume: VolumeIndex,
max_keys: usize,
) -> crate::result::Result<Vec<ChunkIdentifier>> {
let prefix = format!("{}/{}/", site, volume.as_number());
let list_result = list_objects(REALTIME_BUCKET, &prefix, Some(max_keys)).await?;
let metas = list_result
.objects
.iter()
.map(|object| {
let identifier_segment = object.key.split('/').next_back();
let identifier = identifier_segment
.unwrap_or_else(|| object.key.as_ref())
.to_string();
ChunkIdentifier::from_name(site.to_string(), volume, identifier, object.last_modified)
})
.collect::<crate::result::Result<Vec<_>>>()?;
Ok(metas)
}

View file

@ -0,0 +1,272 @@
use crate::aws::realtime::poll_stats::PollStats;
use crate::aws::realtime::{
download_chunk, estimate_chunk_availability_time, get_latest_volume, list_chunks_in_volume,
Chunk, ChunkCharacteristics, ChunkIdentifier, ChunkTimingStats, ElevationChunkMapper,
NewChunkStats, NextChunk, VolumeIndex,
};
use crate::result::Error;
use crate::result::{aws::AWSError, Result};
use chrono::{Duration, Utc};
use log::debug;
use nexrad_decode::messages::volume_coverage_pattern;
use std::future::Future;
use std::sync::mpsc::{Receiver, Sender};
use std::time::Duration as StdDuration;
use tokio::time::{sleep, sleep_until, Instant};
/// The number of chunks to wait before emitting timing statistics.
const CHUNKS_UNTIL_TIMING_STATS: usize = 10;
/// Polls for the latest real-time chunks from the AWS S3 bucket. When new chunks are identified,
/// they will be downloaded and sent to the provided `Sender`. If a statistics `Sender` is provided,
/// statistics from the polling process such as how many requests are being sent will be sent to it.
/// The polling process will stop when a message is received on the provided `Receiver`.
pub async fn poll_chunks(
site: &str,
tx: Sender<(ChunkIdentifier, Chunk<'_>)>,
stats_tx: Option<Sender<PollStats>>,
stop_rx: Receiver<bool>,
) -> Result<()> {
use crate::aws::realtime::ChunkType;
use log::debug;
let latest_volume_result = get_latest_volume(site).await?;
if let Some(stats_tx) = &stats_tx {
stats_tx
.send(PollStats::LatestVolumeCalls(latest_volume_result.calls))
.map_err(|_| AWSError::PollingAsyncError)?;
}
let latest_volume = latest_volume_result
.volume
.ok_or(AWSError::LatestVolumeNotFound)?;
let latest_chunk_id = get_latest_chunk(site, latest_volume)
.await?
.ok_or(AWSError::ExpectedChunkNotFound)?;
let (latest_chunk_id, latest_chunk) = download_chunk(site, &latest_chunk_id).await?;
tx.send((latest_chunk_id.clone(), latest_chunk))
.map_err(|_| AWSError::PollingAsyncError)?;
let latest_metadata_id = ChunkIdentifier::new(
site.to_string(),
latest_volume,
*latest_chunk_id.date_time_prefix(),
1,
ChunkType::Start,
None,
);
let (_, latest_metadata) = download_chunk(site, &latest_metadata_id).await?;
let vcp = get_latest_vcp(&latest_metadata)?;
debug!("Polling volume with VCP: {}", vcp.header.pattern_number);
let mut elevation_chunk_mapper = ElevationChunkMapper::new(&vcp);
// Create timing statistics for improved predictions
let mut timing_stats = ChunkTimingStats::new();
let mut previous_chunk_id = latest_chunk_id;
let mut previous_chunk_time = None;
let mut chunks_until_timing_stats = CHUNKS_UNTIL_TIMING_STATS;
loop {
if stop_rx.try_recv().is_ok() {
break;
}
let next_chunk_estimate = estimate_chunk_availability_time(
&previous_chunk_id,
&vcp,
&elevation_chunk_mapper,
Some(&timing_stats),
);
let next_chunk_time = if let Some(next_chunk_estimate) = next_chunk_estimate {
debug!(
"Estimated next chunk time: {} ({}s)",
next_chunk_estimate,
next_chunk_estimate
.signed_duration_since(Utc::now())
.num_milliseconds() as f64
/ 1000.0
);
next_chunk_estimate
} else {
debug!("Unable to estimate next chunk time, trying immediately");
Utc::now()
};
if next_chunk_time > Utc::now() {
let time_until = next_chunk_time
.signed_duration_since(Utc::now())
.to_std()
.ok();
if let Some(time_until) = time_until {
sleep_until(Instant::now() + time_until).await;
}
}
let next_chunk_id = match previous_chunk_id
.next_chunk(&elevation_chunk_mapper)
.ok_or(AWSError::FailedToDetermineNextChunk)?
{
NextChunk::Sequence(next_chunk_id) => next_chunk_id,
NextChunk::Volume(next_volume) => {
let (attempts, chunk_id) =
try_resiliently(|| get_latest_chunk_or_error(site, next_volume), 500, 10).await;
if let Some(stats_tx) = &stats_tx {
stats_tx
.send(PollStats::NewVolumeCalls(attempts))
.map_err(|_| AWSError::PollingAsyncError)?;
}
chunk_id.ok_or(AWSError::ExpectedChunkNotFound)?
}
};
let (attempts, next_chunk) =
try_resiliently(|| download_chunk(site, &next_chunk_id), 500, 5).await;
let (next_chunk_id, next_chunk) = next_chunk.ok_or(AWSError::ExpectedChunkNotFound)?;
if let (Some(chunk_time), Some(previous_chunk_time)) =
(next_chunk_id.upload_date_time(), previous_chunk_time)
{
let chunk_duration = chunk_time.signed_duration_since(previous_chunk_time);
update_timing_stats(
&mut timing_stats,
&previous_chunk_id,
&vcp,
&elevation_chunk_mapper,
chunk_duration,
attempts,
);
}
if next_chunk_id.chunk_type() == ChunkType::Start {
let vcp = get_latest_vcp(&next_chunk)?;
debug!(
"Updated polling volume's VCP to: {}",
vcp.header.pattern_number
);
elevation_chunk_mapper = ElevationChunkMapper::new(&vcp);
}
if let Some(stats_tx) = &stats_tx {
stats_tx
.send(PollStats::NewChunk(NewChunkStats {
calls: attempts,
download_time: Some(Utc::now()),
upload_time: next_chunk_id.upload_date_time(),
}))
.map_err(|_| AWSError::PollingAsyncError)?;
if chunks_until_timing_stats == 0 {
stats_tx
.send(PollStats::ChunkTimings(timing_stats))
.map_err(|_| AWSError::PollingAsyncError)?;
timing_stats = ChunkTimingStats::new();
chunks_until_timing_stats = CHUNKS_UNTIL_TIMING_STATS;
} else {
chunks_until_timing_stats -= 1;
}
}
tx.send((next_chunk_id.clone(), next_chunk))
.map_err(|_| AWSError::PollingAsyncError)?;
previous_chunk_time = next_chunk_id.upload_date_time();
previous_chunk_id = next_chunk_id;
}
Ok(())
}
/// Helper function to update timing statistics for a downloaded chunk
fn update_timing_stats(
timing_stats: &mut ChunkTimingStats,
chunk_id: &ChunkIdentifier,
vcp: &volume_coverage_pattern::Message,
elevation_chunk_mapper: &ElevationChunkMapper,
duration: Duration,
attempts: usize,
) {
if let Some(elevation) = elevation_chunk_mapper
.get_sequence_elevation_number(chunk_id.sequence())
.and_then(|elevation_number| vcp.elevations.get(elevation_number - 1))
{
let characteristics = ChunkCharacteristics {
chunk_type: chunk_id.chunk_type(),
waveform_type: elevation.waveform_type(),
channel_configuration: elevation.channel_configuration(),
};
timing_stats.add_timing(characteristics, duration, attempts);
debug!(
"Updated timing statistics for {:?}: {}s",
&characteristics as &dyn std::fmt::Debug,
&(duration.num_milliseconds() as f64 / 1000.0) as &dyn std::fmt::Display,
);
}
}
/// Queries for the latest chunk in the specified volume. If no chunk is found, an error is returned.
async fn get_latest_chunk_or_error(site: &str, volume: VolumeIndex) -> Result<ChunkIdentifier> {
let chunks = list_chunks_in_volume(site, volume, 100).await?;
chunks
.last()
.cloned()
.ok_or(Error::AWS(AWSError::ExpectedChunkNotFound))
}
/// Queries for the latest chunk in the specified volume.
async fn get_latest_chunk(site: &str, volume: VolumeIndex) -> Result<Option<ChunkIdentifier>> {
let chunks = list_chunks_in_volume(site, volume, 100).await?;
Ok(chunks.last().cloned())
}
/// Gets the volume coverage pattern from the latest metadata chunk.
fn get_latest_vcp(latest_metadata: &Chunk<'_>) -> Result<volume_coverage_pattern::Message> {
if let Chunk::Start(file) = latest_metadata {
for mut record in file.records() {
if record.compressed() {
record = record.decompress()?;
}
for message in record.messages()? {
if let nexrad_decode::messages::MessageContents::VolumeCoveragePattern(vcp) =
message.contents()
{
return Ok(*vcp.clone());
}
}
}
}
Err(Error::MissingCoveragePattern)
}
/// Attempts an action with retries on an exponential backoff.
async fn try_resiliently<F, R>(
action: impl Fn() -> F,
wait_millis: u64,
attempts: usize,
) -> (usize, Option<R>)
where
F: Future<Output = Result<R>>,
{
for attempt in 0..attempts {
if let Ok(result) = action().await {
return (attempt + 1, Some(result));
}
let wait = wait_millis * 2u64.pow(attempt as u32);
sleep(StdDuration::from_millis(wait)).await;
}
(attempts, None)
}

View file

@ -0,0 +1,37 @@
use chrono::{DateTime, TimeDelta, Utc};
use super::ChunkTimingStats;
/// Statistics from the polling process.
#[derive(Debug, Clone)]
pub enum PollStats {
/// The number of network calls made to find the most recent volume.
LatestVolumeCalls(usize),
/// The number of network calls made to find a new volume.
NewVolumeCalls(usize),
/// Statistics for a new chunk.
NewChunk(NewChunkStats),
/// Perodic timing statistics for chunks by-type.
ChunkTimings(ChunkTimingStats),
}
/// Statistics for a new chunk.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct NewChunkStats {
/// The number of network calls made to find a new chunk.
pub calls: usize,
/// The time when the chunk was downloaded.
pub download_time: Option<DateTime<Utc>>,
/// The time when the chunk was uploaded to S3.
pub upload_time: Option<DateTime<Utc>>,
}
impl NewChunkStats {
/// The latency between when a chunk was downloaded and when it was uploaded to S3.
pub fn latency(&self) -> Option<TimeDelta> {
self.download_time.and_then(|download_time| {
self.upload_time
.map(|upload_time| upload_time.signed_duration_since(download_time))
})
}
}

View file

@ -0,0 +1,433 @@
use std::collections::VecDeque;
use std::future::Future;
/// Performs an efficient search of elements to locate the nearest element to `target` without going
/// over. Assumes there are `element_count` elements in a rotated sorted array with zero or many
/// `None` values at the pivot point. Returns `None` if there are no values less than the `target`.
pub(crate) async fn search<F, V>(
element_count: usize,
target: V,
mut f: impl FnMut(usize) -> F,
) -> crate::result::Result<Option<usize>>
where
F: Future<Output = crate::result::Result<Option<V>>>,
V: PartialOrd + Clone,
{
if element_count == 0 {
return Ok(None);
}
let some_target = Some(&target);
let mut nearest = None;
let mut first_value = f(0).await?;
let mut first_value_ref = first_value.as_ref();
if first_value_ref == some_target {
return Ok(Some(0));
}
let mut low = 0;
let mut high = element_count;
// First, locate any value in the array to use as a reference point via repeated bisection
let mut queue = VecDeque::from([(0, element_count - 1)]);
while !queue.is_empty() {
if let Some((start, end)) = queue.pop_front() {
if start > end {
continue;
}
let mid = (start + end) / 2;
let mid_value = f(mid).await?;
let mid_value_ref = mid_value.as_ref();
// If this value is None, continue the bisection
if mid_value_ref.is_none() {
queue.push_back((mid + 1, end));
if mid > 0 {
queue.push_back((start, mid - 1));
}
continue;
}
if mid_value_ref <= some_target {
nearest = Some(mid);
}
if mid_value_ref == some_target {
return Ok(nearest);
}
if should_search_right(first_value_ref, mid_value_ref, some_target) {
low = mid + 1;
} else {
high = mid;
}
}
break;
}
if low >= high {
return Ok(nearest);
}
// Move the low pointer to the first non-None value
first_value = f(low).await?;
first_value_ref = first_value.as_ref();
// Now that we have a reference point, we can perform a binary search for the target
while low < high {
let mid = low + (high - low) / 2;
let value = f(mid).await?;
let value_ref = value.as_ref();
if value_ref.is_some() && value_ref <= some_target {
nearest = Some(mid);
}
if value_ref == some_target {
return Ok(Some(mid));
}
if should_search_right(first_value_ref, value_ref, some_target) {
low = mid + 1;
} else {
high = mid;
}
}
Ok(nearest)
}
/// Returns `true` if the search should continue right, `false` if it should continue left.
fn should_search_right<V>(first: V, value: V, target: V) -> bool
where
V: PartialOrd,
{
let first_wrapped = first > value;
let target_wrapped = target < first;
if value < target {
!first_wrapped || target_wrapped
} else {
first_wrapped && !target_wrapped
}
}
#[cfg(test)]
mod tests {
use super::*;
mod binary_search {
use super::*;
macro_rules! test {
($name:ident, $elements:expr, $target:expr, $expected:expr) => {
#[tokio::test]
async fn $name() {
let result = search(
$elements.len(),
$target,
|i| async move { Ok($elements[i]) },
)
.await
.unwrap();
assert_eq!(result, $expected);
}
};
}
test!(empty, vec![] as Vec<Option<usize>>, 0, None);
test!(single, vec![Some(0)], 0, Some(0));
test!(single_under, vec![Some(1)], 0, None);
test!(single_over, vec![Some(0)], 1, Some(0));
test!(double_match, vec![Some(0), Some(1)], 0, Some(0));
test!(double_over, vec![Some(0), Some(1)], 2, Some(1));
test!(double_under, vec![Some(1), Some(2)], 0, None);
test!(double_middle, vec![Some(0), Some(2)], 1, Some(0));
test!(double_middle_over, vec![Some(0), Some(2)], 3, Some(1));
test!(
filled,
vec![
Some(0),
Some(1),
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
Some(9),
],
5,
Some(5)
);
test!(
filled_nonmatch,
vec![
Some(0),
Some(1),
Some(2),
Some(3),
Some(6),
Some(7),
Some(8),
Some(9),
],
5,
Some(3)
);
test!(
none_end,
vec![
Some(0),
Some(1),
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
None,
None,
None,
],
8,
Some(6)
);
test!(
none_beginning_no_match,
vec![
None,
None,
None,
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
],
1,
None
);
test!(
none_beginning_match,
vec![
None,
None,
None,
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
],
3,
Some(4)
);
test!(
none_wrapping_match,
vec![
None,
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
None,
None,
],
3,
Some(2)
);
test!(
none_wrapping_no_match,
vec![
None,
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
None,
None,
],
1,
None
);
test!(
none_wrapping_non_exact_match,
vec![
None,
Some(2),
Some(3),
Some(4),
Some(5),
Some(6),
Some(7),
Some(8),
None,
None,
],
10,
Some(7)
);
test!(
wrapping_match_start,
vec![
Some(6),
Some(7),
Some(8),
Some(2),
Some(3),
Some(4),
Some(5),
],
7,
Some(1)
);
test!(
wrapping_match_end,
vec![
Some(6),
Some(7),
Some(8),
Some(2),
Some(3),
Some(4),
Some(5),
],
4,
Some(5)
);
test!(
wrapping_no_match,
vec![
Some(6),
Some(7),
Some(8),
Some(2),
Some(3),
Some(4),
Some(5),
],
1,
None
);
test!(
wrapping_none_match_start,
vec![
Some(6),
Some(7),
Some(8),
None,
None,
None,
Some(2),
Some(3),
Some(4),
Some(5),
],
7,
Some(1)
);
test!(
wrapping_none_no_match,
vec![
Some(6),
Some(7),
Some(8),
None,
None,
None,
Some(2),
Some(3),
Some(4),
Some(5),
],
1,
None
);
test!(
wrapping_none_match_end,
vec![
Some(6),
Some(7),
Some(8),
None,
None,
None,
Some(2),
Some(3),
Some(4),
Some(5),
],
2,
Some(6)
);
test!(
all_none,
vec![None, None, None, None] as Vec<Option<usize>>,
5,
None
);
test!(
none_middle,
vec![None, Some(1), Some(3), None, None, None, None, None],
100,
Some(2)
);
}
mod should_search_right {
use super::*;
macro_rules! test {
($name:ident, $first:expr, $value:expr, $target:expr, $expected:expr) => {
#[test]
fn $name() {
assert_eq!(should_search_right($first, $value, $target), $expected);
}
};
}
test!(simple, 2, 5, 8, true);
test!(repeated, 2, 2, 5, true);
test!(preceding, 1, 8, 2, false);
test!(wrapped_below_pivot, 8, 2, 5, true);
test!(wrapped_above_pivot, 8, 5, 9, false);
test!(wrapped_preceding, 6, 5, 2, false);
}
}

View file

@ -0,0 +1,26 @@
/// A volume's index in the AWS real-time NEXRAD bucket. These indexes are rotated-through as chunks
/// are accumulated and finally combined into full volumes to be archived.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct VolumeIndex(usize);
impl VolumeIndex {
/// Creates a new volume index with the specified value.
pub fn new(index: usize) -> Self {
debug_assert!(index <= 999, "Volume index must be <= 999");
Self(index)
}
/// Returns the volume index as a number.
pub fn as_number(&self) -> usize {
self.0
}
/// Returns the next volume index.
pub fn next(&self) -> Self {
if self.0 == 999 {
Self::new(1)
} else {
Self::new(self.0 + 1)
}
}
}

View file

@ -0,0 +1,10 @@
mod list_objects;
pub(crate) use list_objects::list_objects;
mod download_object;
pub(crate) use download_object::download_object;
mod bucket_list_result;
mod bucket_object;
mod bucket_object_field;
mod downloaded_bucket_object;

View file

@ -0,0 +1,10 @@
use crate::aws::s3::bucket_object::BucketObject;
/// The result of a list objects request.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct BucketListResult {
/// Whether the list of objects is truncated.
pub truncated: bool,
/// The objects returned by the request.
pub objects: Vec<BucketObject>,
}

View file

@ -0,0 +1,12 @@
use chrono::{DateTime, Utc};
/// A bucket object returned from an S3 list objects request.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct BucketObject {
/// The key of the object.
pub key: String,
/// The last modified time of the object.
pub last_modified: Option<DateTime<Utc>>,
/// The size of the object.
pub size: u64,
}

View file

@ -0,0 +1,12 @@
/// A field in the S3 list objects response. These are not necessarily part of the same object.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BucketObjectField {
/// Whether the list of objects is truncated. Child of `ListBucketResult`.
IsTruncated,
/// The key of a bucket object. Child of `Contents`.
Key,
/// The last modified time of a bucket object. Child of `Contents`.
LastModified,
/// The size of a bucket object. Child of `Contents`.
Size,
}

View file

@ -0,0 +1,61 @@
use crate::aws::s3::bucket_object::BucketObject;
use crate::aws::s3::downloaded_bucket_object::DownloadedBucketObject;
use crate::result::aws::AWSError;
use crate::result::aws::AWSError::{S3GetObjectError, S3GetObjectRequestError, S3StreamingError};
use crate::result::Error;
use chrono::{DateTime, Utc};
use log::{debug, trace};
use reqwest::header::HeaderMap;
use reqwest::StatusCode;
/// Downloads an object from S3 and returns its contents.
pub async fn download_object(
bucket: &str,
key: &str,
) -> crate::result::Result<DownloadedBucketObject> {
debug!(
"Downloading object key \"{}\" from bucket \"{}\"",
key, bucket
);
let path = format!("https://{bucket}.s3.amazonaws.com/{key}");
let response = reqwest::get(path).await.map_err(S3GetObjectRequestError)?;
trace!(
" Object \"{}\" download response status: {}",
key,
response.status()
);
match response.status() {
StatusCode::NOT_FOUND => Err(Error::AWS(AWSError::S3ObjectNotFoundError)),
StatusCode::OK => {
let last_modified = get_last_modified_header(response.headers());
trace!(" Object \"{}\" last modified: {:?}", key, last_modified);
let data = response.bytes().await.map_err(S3StreamingError)?.to_vec();
trace!(" Object \"{}\" data length: {}", key, data.len());
Ok(DownloadedBucketObject {
metadata: BucketObject {
key: key.to_string(),
last_modified,
size: data.len() as u64,
},
data,
})
}
_ => Err(Error::AWS(S3GetObjectError(response.text().await.ok()))),
}
}
/// Extracts the `Last-Modified` header from a response and returns it as a `DateTime<Utc>`.
fn get_last_modified_header(response_headers: &HeaderMap) -> Option<DateTime<Utc>> {
let header = response_headers.get("Last-Modified");
let date_string = header.and_then(|value| value.to_str().ok());
date_string.and_then(|string| {
DateTime::parse_from_rfc2822(string)
.ok()
.map(|date_time| date_time.with_timezone(&Utc))
})
}

View file

@ -0,0 +1,10 @@
use crate::aws::s3::bucket_object::BucketObject;
/// A bucket object returned from an S3 list objects request.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct DownloadedBucketObject {
/// The metadata of the object.
pub metadata: BucketObject,
/// The object data.
pub data: Vec<u8>,
}

View file

@ -0,0 +1,101 @@
use crate::aws::s3::bucket_list_result::BucketListResult;
use crate::aws::s3::bucket_object::BucketObject;
use crate::aws::s3::bucket_object_field::BucketObjectField;
use crate::result::aws::AWSError;
use crate::result::aws::AWSError::S3ListObjectsError;
use chrono::{DateTime, Utc};
use log::{debug, trace, warn};
use xml::reader::XmlEvent;
use xml::EventReader;
/// Lists objects from a S3 bucket with the specified prefix. A maximum number of keys can be
/// specified to limit the number of objects returned, otherwise it will use AWS's default (1000).
pub async fn list_objects(
bucket: &str,
prefix: &str,
max_keys: Option<usize>,
) -> crate::result::Result<BucketListResult> {
let mut path = format!("https://{bucket}.s3.amazonaws.com?list-type=2&prefix={prefix}");
if let Some(max_keys) = max_keys {
path.push_str(&format!("&max-keys={}", max_keys));
}
debug!(
"Listing objects in bucket \"{}\" with prefix \"{}\"",
bucket, prefix
);
let response = reqwest::get(path).await.map_err(S3ListObjectsError)?;
trace!(" List objects response status: {}", response.status());
let body = response.text().await.map_err(S3ListObjectsError)?;
trace!(" List objects response body length: {}", body.len());
let parser = EventReader::new(body.as_bytes());
let mut objects = Vec::new();
let mut truncated = false;
let mut object: Option<BucketObject> = None;
let mut field: Option<BucketObjectField> = None;
for event in parser {
match event {
Ok(XmlEvent::StartElement { name, .. }) => match name.local_name.as_ref() {
"IsTruncated" => field = Some(BucketObjectField::IsTruncated),
"Contents" => {
object = Some(BucketObject {
key: String::new(),
last_modified: None,
size: 0,
});
}
"Key" => field = Some(BucketObjectField::Key),
"LastModified" => field = Some(BucketObjectField::LastModified),
"Size" => field = Some(BucketObjectField::Size),
_ => field = None,
},
Ok(XmlEvent::Characters(chars)) => {
if let Some(field) = field.as_ref() {
if field == &BucketObjectField::IsTruncated {
truncated = chars == "true";
if truncated {
trace!(" List objects truncated: {}", truncated);
}
continue;
}
let item = object.as_mut().ok_or_else(|| {
warn!("Expected item for object field: {:?}", field);
AWSError::S3ListObjectsDecodingError
})?;
match field {
BucketObjectField::Key => item.key.push_str(&chars),
BucketObjectField::LastModified => {
item.last_modified = DateTime::parse_from_rfc3339(&chars)
.ok()
.map(|date_time| date_time.with_timezone(&Utc));
}
BucketObjectField::Size => {
item.size = chars.parse().map_err(|_| {
warn!("Error parsing object size: {}", chars);
AWSError::S3ListObjectsDecodingError
})?;
}
_ => {}
}
}
}
Ok(XmlEvent::EndElement { name }) => {
if name.local_name.as_str() == "Contents" {
if let Some(item) = object.take() {
objects.push(item);
}
}
}
_ => {}
}
}
trace!(" List objects found: {}", objects.len());
Ok(BucketListResult { truncated, objects })
}

View file

@ -0,0 +1,18 @@
//!
//! # nexrad-data
//! Provides structure definitions and decoding functions for NEXRAD Archive II volume files, along
//! with functions for downloading both archival and real-time data from open cloud providers like
//! AWS OpenData.
//!
#![forbid(unsafe_code)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![warn(clippy::correctness)]
#[cfg(feature = "aws")]
pub mod aws;
pub mod volume;
pub mod result;

View file

@ -0,0 +1,80 @@
//!
//! Contains the Result and Error types for NEXRAD operations.
//!
use thiserror::Error as ThisError;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(ThisError, Debug)]
pub enum Error {
#[error("data file IO error")]
FileError(#[from] std::io::Error),
#[error("file deserialization error")]
#[cfg(feature = "bincode")]
DeserializationError(#[from] bincode::Error),
#[cfg(feature = "bzip2")]
#[error("error decompressing uncompressed data")]
UncompressedDataError,
#[cfg(feature = "aws")]
#[error(transparent)]
AWS(#[from] aws::AWSError),
#[cfg(feature = "nexrad-decode")]
#[error("error decoding NEXRAD data")]
Decode(#[from] nexrad_decode::result::Error),
#[cfg(feature = "nexrad-model")]
#[error("error in common model")]
Model(#[from] nexrad_model::result::Error),
#[cfg(feature = "nexrad-decode")]
#[error("compressed data cannot be decoded")]
CompressedDataError,
#[cfg(feature = "nexrad-decode")]
#[error("volume missing coverage pattern number")]
MissingCoveragePattern,
#[cfg(feature = "bzip2")]
#[error("ldm record decompression error")]
DecompressionError(#[from] bzip2::Error),
}
#[cfg(feature = "aws")]
pub mod aws {
use thiserror::Error as ThisError;
#[derive(ThisError, Debug)]
pub enum AWSError {
#[error("unexpected truncated S3 list objects response")]
TruncatedListObjectsResponse,
#[error("error decoding date/time")]
DateTimeError(String),
#[error("invalid radar site identifier")]
InvalidSiteIdentifier(String),
#[error("chunk data in unrecognized format")]
UnrecognizedChunkFormat,
#[error("unrecognized chunk date time")]
UnrecognizedChunkDateTime(String),
#[error("unrecognized chunk sequence")]
UnrecognizedChunkSequence(String),
#[error("unrecognized chunk type")]
UnrecognizedChunkType(Option<char>),
#[error("error listing AWS S3 objects")]
S3ListObjectsError(reqwest::Error),
#[error("error requesting AWS S3 object")]
S3GetObjectRequestError(reqwest::Error),
#[error("error getting AWS S3 object")]
S3GetObjectError(Option<String>),
#[error("AWS S3 object not found")]
S3ObjectNotFoundError,
#[error("error streaming/downloading AWS S3 object")]
S3StreamingError(reqwest::Error),
#[error("failed to locate latest volume")]
LatestVolumeNotFound,
#[error("a chunk was not found as expected")]
ExpectedChunkNotFound,
#[error("error sending chunk to receiver")]
PollingAsyncError,
#[error("failed to determine next chunk")]
FailedToDetermineNextChunk,
#[error("error decoding S3 list objects response")]
S3ListObjectsDecodingError,
}
}

View file

@ -0,0 +1,25 @@
//!
//! Model definitions and decompression and decoding logic for NEXRAD Level II radar data volumes.
//!
//! Archival NEXRAD weather radar data is distributed using an archive format built atop Unidata's
//! ["Local Data Manager" (or LDM)](https://www.unidata.ucar.edu/software/ldm/) system. Archive
//! files called "volumes" contain NEXRAD Level II radar data and are composed of LDM records. They
//! start with a "volume header record" that provides basic metadata about the radar site and
//! collection time followed by a series of compressed records that contain radar messages with
//! data.
//!
//! The document "Interface Control Document for the Archive II/User" 2620010H (build 19.0 at
//! writing) describes this archive format in detail, particularly in section 7 "Archive II
//! Application Layer".
//!
mod file;
pub use file::*;
mod header;
pub use header::*;
mod record;
pub use record::*;
mod util;

View file

@ -0,0 +1,81 @@
use crate::volume::{split_compressed_records, Header, Record};
use std::fmt::Debug;
/// A NEXRAD Archive II volume data file.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct File(Vec<u8>);
impl File {
/// Creates a new Archive II volume file with the provided data.
pub fn new(data: Vec<u8>) -> Self {
Self(data)
}
/// The file's encoded and compressed data.
pub fn data(&self) -> &Vec<u8> {
&self.0
}
/// The file's decoded Archive II volume header.
#[cfg(all(feature = "serde", feature = "bincode"))]
pub fn header(&self) -> crate::result::Result<Header> {
Header::deserialize(&mut self.0.as_slice())
}
/// The file's LDM records.
pub fn records(&self) -> Vec<Record> {
split_compressed_records(&self.0[size_of::<Header>()..])
}
/// Decodes this volume file into a common model scan containing sweeps and radials with moment
/// data.
#[cfg(all(feature = "nexrad-model", feature = "decode"))]
pub fn scan(&self) -> crate::result::Result<nexrad_model::data::Scan> {
use crate::result::Error;
use nexrad_decode::messages::MessageContents;
use nexrad_model::data::{Scan, Sweep};
let mut coverage_pattern_number = None;
let mut radials = Vec::new();
for mut record in self.records() {
if record.compressed() {
record = record.decompress()?;
}
let messages = record.messages()?;
for message in messages {
let contents = message.into_contents();
if let MessageContents::DigitalRadarData(radar_data_message) = contents {
if coverage_pattern_number.is_none() {
if let Some(volume_block) = &radar_data_message.volume_data_block {
coverage_pattern_number =
Some(volume_block.volume_coverage_pattern_number);
}
}
radials.push(radar_data_message.into_radial()?);
}
}
}
Ok(Scan::new(
coverage_pattern_number.ok_or(Error::MissingCoveragePattern)?,
Sweep::from_radials(radials),
))
}
}
impl Debug for File {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("File");
debug.field("data.len()", &self.data().len());
#[cfg(all(feature = "serde", feature = "bincode"))]
debug.field("header", &self.header());
#[cfg(all(feature = "nexrad-model", feature = "decode"))]
debug.field("records.len()", &self.records().len());
debug.finish()
}
}

View file

@ -0,0 +1,92 @@
use crate::volume::util::get_datetime;
use chrono::{DateTime, Duration, Utc};
use std::fmt;
use std::fmt::{Debug, Formatter};
/// Header for an Archive II volume file containing metadata about the radar data. This header is
/// located at the beginning of the file.
#[repr(C)]
#[derive(Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
pub struct Header {
/// The tape's filename which indicates the version of the data. Name is in the format
/// `AR2V0 0xx.` where `xx` indicates the version of the data.
///
/// Versions:
/// 02 = Super Resolution disabled at the RDA (pre RDA Build 12.0)
/// 03 = Super Resolution (pre RDA Build 12.0)
/// 04 = Recombined Super Resolution
/// 05 = Super Resolution disabled at the RDA (RDA Build 12.0 and later)
/// 06 = Super Resolution (RDA Build 12.0 and later)
/// 07 = Recombined Super Resolution (RDA Build 12.0 and later)
/// NOTE: Dual-pol data introduced in RDA Build 12.0
tape_filename: [u8; 9],
/// Sequential number assigned to each volume of radar data in the queue, rolling over to 001
/// after 999.
extension_number: [u8; 3],
/// This volume's date represented as a count of days since 1 January 1970 00:00 GMT. It is
/// also referred-to as a "modified Julian date" where it is the Julian date - 2440586.5.
date: u32,
/// Milliseconds past midnight, GMT.
time: u32,
/// The ICAO identifier of the radar site.
icao_of_radar: [u8; 4],
}
impl Header {
/// Deserializes an Archive II header from the provided reader.
#[cfg(all(feature = "serde", feature = "bincode"))]
pub fn deserialize<R: std::io::Read>(reader: &mut R) -> crate::result::Result<Self> {
use bincode::{DefaultOptions, Options};
Ok(DefaultOptions::new()
.with_fixint_encoding()
.with_big_endian()
.deserialize_from(reader.by_ref())?)
}
/// The tape's filename which indicates the version of the data. Name is in the format
/// `AR2V0 0xx.` where `xx` indicates the version of the data.
///
/// Versions:
/// 02 = Super Resolution disabled at the RDA (pre RDA Build 12.0)
/// 03 = Super Resolution (pre RDA Build 12.0)
/// 04 = Recombined Super Resolution
/// 05 = Super Resolution disabled at the RDA (RDA Build 12.0 and later)
/// 06 = Super Resolution (RDA Build 12.0 and later)
/// 07 = Recombined Super Resolution (RDA Build 12.0 and later)
/// NOTE: Dual-pol data introduced in RDA Build 12.0
pub fn tape_filename(&self) -> Option<String> {
String::from_utf8(self.tape_filename.to_vec()).ok()
}
/// Sequential number assigned to each volume of radar data in the queue, rolling over to 001
/// after 999.
pub fn extension_number(&self) -> Option<String> {
String::from_utf8(self.extension_number.to_vec()).ok()
}
/// Returns the date and time of the volume.
pub fn date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(self.date as u16, Duration::milliseconds(self.time as i64))
}
/// The ICAO identifier of the radar site.
pub fn icao_of_radar(&self) -> Option<String> {
String::from_utf8(self.icao_of_radar.to_vec()).ok()
}
}
impl Debug for Header {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("Header")
.field("tape_filename", &self.tape_filename())
.field("extension_number", &self.extension_number())
.field("date_time", &self.date_time())
.field("icao_of_radar", &self.icao_of_radar())
.finish()
}
}

View file

@ -0,0 +1,132 @@
use std::fmt::Debug;
#[derive(Clone, PartialEq, Eq, Hash)]
enum RecordData<'a> {
Borrowed(&'a [u8]),
Owned(Vec<u8>),
}
impl Debug for RecordData<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
RecordData::Borrowed(data) => write!(f, "RecordData::Borrowed({} bytes)", data.len()),
RecordData::Owned(data) => write!(f, "RecordData::Owned({} bytes)", data.len()),
}
}
}
/// Represents a single LDM record with its data which may be compressed.
///
/// The Unidata Local Data Manager (LDM) is a data distribution system used by the NWS to distribute
/// NEXRAD archival radar data. A NEXRAD "Archive II" file starts with an
/// [crate::volume::Header] followed by a series of compressed LDM records, each
/// containing messages with radar data.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Record<'a>(RecordData<'a>);
impl<'a> Record<'a> {
/// Creates a new LDM record with the provided data.
pub fn new(data: Vec<u8>) -> Self {
Record(RecordData::Owned(data))
}
/// Creates a new LDM record with the provided data slice.
pub fn from_slice(data: &'a [u8]) -> Self {
Record(RecordData::Borrowed(data))
}
/// The data contained in this LDM record.
pub fn data(&self) -> &[u8] {
match &self.0 {
RecordData::Borrowed(data) => data,
RecordData::Owned(data) => data,
}
}
/// Whether this LDM record's data is compressed.
pub fn compressed(&self) -> bool {
self.data().len() >= 6 && self.data()[4..6].as_ref() == b"BZ"
}
/// Decompresses this LDM record's data.
#[cfg(feature = "bzip2")]
pub fn decompress<'b>(&self) -> crate::result::Result<Record<'b>> {
use crate::result::Error;
use bzip2::read::BzDecoder;
use std::io::Read;
if !self.compressed() {
return Err(Error::UncompressedDataError);
}
// Skip the four-byte record size prefix
let data = self.data().split_at(4).1;
let mut decompressed_data = Vec::new();
BzDecoder::new(data).read_to_end(&mut decompressed_data)?;
Ok(Record::new(decompressed_data))
}
/// Decodes the NEXRAD level II messages contained in this LDM record.
#[cfg(feature = "nexrad-decode")]
pub fn messages(&self) -> crate::result::Result<Vec<nexrad_decode::messages::Message>> {
use crate::result::Error;
use nexrad_decode::messages::decode_messages;
use std::io::Cursor;
if self.compressed() {
return Err(Error::CompressedDataError);
}
let mut reader = Cursor::new(self.data());
Ok(decode_messages(&mut reader)?)
}
}
impl Debug for Record<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("Record");
debug.field("data.len()", &self.data().len());
debug.field(
"borrowed",
match &self.0 {
RecordData::Borrowed(_) => &true,
RecordData::Owned(_) => &false,
},
);
debug.field("compressed", &self.compressed());
#[cfg(feature = "decode")]
debug.field(
"messages.len()",
&self.messages().map(|messages| messages.len()),
);
debug.finish()
}
}
/// Splits compressed LDM record data into individual records. Will omit the record size prefix from
/// each record.
pub fn split_compressed_records(data: &[u8]) -> Vec<Record> {
let mut records = Vec::new();
let mut position = 0;
loop {
if position >= data.len() {
break;
}
let mut record_size = [0; 4];
record_size.copy_from_slice(&data[position..position + 4]);
let record_size = i32::from_be_bytes(record_size).unsigned_abs() as usize;
records.push(Record::from_slice(
&data[position..position + record_size + 4],
));
position += record_size + 4;
}
records
}

View file

@ -0,0 +1,17 @@
use chrono::{DateTime, Duration, NaiveDate, NaiveDateTime, NaiveTime, Utc};
/// Given a "modified" Julian date (date count since 1/1/1970) and a count of milliseconds since
/// midnight on that date, return an appropriate DateTime.
pub(crate) fn get_datetime(
modified_julian_date: u16,
past_midnight: Duration,
) -> Option<DateTime<Utc>> {
let count_start = NaiveDate::from_ymd_opt(1970, 1, 1)?;
let date = count_start + Duration::days(modified_julian_date as i64 - 1);
let time = NaiveTime::from_num_seconds_from_midnight_opt(0, 0)? + past_midnight;
Some(DateTime::from_naive_utc_and_offset(
NaiveDateTime::new(date, time),
Utc,
))
}

View file

@ -0,0 +1,30 @@
[package]
name = "nexrad-decode"
version = "0.1.1"
description = "Decoding functions and models for NEXRAD weather radar data."
authors = ["Daniel Way <contact@danieldway.com>"]
repository = "https://github.com/danielway/nexrad/nexrad-decode"
license = "MIT"
edition = "2021"
[features]
default = ["uom", "nexrad-model"]
[dependencies]
log = { workspace = true }
thiserror = { workspace = true }
bincode = { workspace = true }
serde = { workspace = true }
chrono = { workspace = true }
nexrad-model = { version = "0.1.0", path = "../nexrad-model", optional = true }
uom = { workspace = true, optional = true }
[dev-dependencies]
clap = { workspace = true }
env_logger = { workspace = true }
tokio = { workspace = true }
nexrad-data = { version = "0.2.0", path = "../nexrad-data" }
[[example]]
name = "elevation_angles"
path = "examples/elevation_angles.rs"

View file

@ -0,0 +1,13 @@
# NEXRAD Decode
[![Crates.io](https://img.shields.io/crates/v/nexrad-decode)](https://crates.io/crates/nexrad-decode)
[![Docs.rs](https://docs.rs/nexrad-decode/badge.svg)](https://docs.rs/nexrad-decode)
[![Rust CI](https://github.com/danielway/nexrad/actions/workflows/ci.yml/badge.svg)](https://github.com/danielway/nexrad/actions/workflows/ci.yml)
Decoding functions and models for NEXRAD weather radar data. Decoder and struct definitions are in accordance with
NOAA's WSR-88D Interface Control Document for the RDA/RPG "ICD 2620002W".
## Features
- `nexrad-model`: Provides mappings to a common model for representing NEXRAD radar data.
- `uom`: Use the `uom` crate for type-safe units of measure.

View file

@ -0,0 +1,34 @@
# NEXRAD Decode Examples
This directory contains examples demonstrating how to use the `nexrad-decode` library for working with NEXRAD weather radar data.
## Available Examples
### Elevation Angles CSV Generator
The `elevation_angles.rs` example demonstrates how to extract elevation angle data from a NEXRAD Archive II file and generate a CSV file.
The CSV has:
- Columns representing elevation numbers (elev_1, elev_2, etc.)
- Rows representing azimuth numbers
- Values at the intersection representing the elevation angle for that elevation/azimuth number combination
#### Usage
```bash
# Run with default output filename (elevation_angles.csv)
cargo run --example elevation_angles -- /path/to/archive_file.ar2
# Specify a custom output filename
cargo run --example elevation_angles -- /path/to/archive_file.ar2 --output-path custom_output.csv
```
#### Example Output Format
```
azimuth_num,elev_1,elev_2,elev_3
1,0.52,1.45,2.41
2,0.52,1.46,2.40
...
```
Note: Empty cells in the CSV indicate that no data was available for that particular elevation/azimuth number combination.

View file

@ -0,0 +1,126 @@
use clap::Parser;
use log::{debug, info, LevelFilter};
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Write};
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
/// Path to the Archive II file to process
#[arg(required = true)]
file_path: String,
/// Path to save the output CSV file (defaults to 'elevation_angles.csv')
#[arg(default_value = "elevation_angles.csv")]
output_path: String,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize logging
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.filter_module("reqwest::connect", LevelFilter::Info)
.init();
// Parse command line arguments
let cli = Cli::parse();
let file_path = &cli.file_path;
let output_path = &cli.output_path;
info!("Processing file: {}", file_path);
// Read the file
let mut file =
File::open(file_path).unwrap_or_else(|_| panic!("Failed to open file: {}", file_path));
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
// Parse the Archive II file
let volume_file = nexrad_data::volume::File::new(buffer);
// Create a HashMap to store elevation angles by elevation and azimuth numbers
// Key: (elevation_number, azimuth_number), Value: elevation_angle
let mut elevation_angles: HashMap<(u8, u16), f32> = HashMap::new();
// Maximum elevation number and azimuth number for CSV dimensions
let mut max_elevation_num = 0;
let mut max_azimuth_num = 0;
// Process all records in the file
for mut record in volume_file.records() {
debug!("Processing record...");
if record.compressed() {
debug!("Decompressing LDM record...");
record = record.decompress().expect("Failed to decompress record");
}
// Extract messages from the record
let messages = record.messages()?;
// Process each message to extract elevation angles
for message in messages {
// Check if the message is a Digital Radar Data message
if let nexrad_decode::messages::MessageContents::DigitalRadarData(digital_data) =
message.contents()
{
// Access header information where the elevation data is stored
let header = &digital_data.header;
let elevation_num = header.elevation_number;
let azimuth_num = header.azimuth_number;
let elevation_angle = header.elevation_angle;
// Update max values for dimensions
if elevation_num > max_elevation_num {
max_elevation_num = elevation_num;
}
if azimuth_num > max_azimuth_num {
max_azimuth_num = azimuth_num;
}
// Store elevation angle information
elevation_angles.insert((elevation_num, azimuth_num), elevation_angle);
debug!(
"Elevation: {}, Azimuth Number: {}, Elevation Angle: {}",
elevation_num, azimuth_num, elevation_angle
);
}
}
}
info!(
"Found elevation angles for {} elevation-azimuth combinations",
elevation_angles.len()
);
info!("Maximum elevation number: {}", max_elevation_num);
info!("Maximum azimuth number: {}", max_azimuth_num);
// Create and write the CSV file
let mut output_file = File::create(output_path)?;
// Write header with elevation numbers
write!(output_file, "azimuth_num")?;
for elev in 1..=max_elevation_num {
write!(output_file, ",elev_{}", elev)?;
}
writeln!(output_file)?;
// Write rows for each azimuth number
for azimuth_num in 1..=max_azimuth_num {
write!(output_file, "{}", azimuth_num)?;
// For each elevation in this azimuth number, write the elevation angle
for elev_num in 1..=max_elevation_num {
match elevation_angles.get(&(elev_num, azimuth_num)) {
Some(angle) => write!(output_file, ",{:.2}", angle)?,
None => write!(output_file, ",")?, // Empty value if no data for this combination
}
}
writeln!(output_file)?;
}
info!("CSV file created successfully: {}", output_path);
Ok(())
}

View file

@ -0,0 +1,21 @@
//!
//! # nexrad-decode
//! Decoding functions and models for NEXRAD weather radar data. Decoder and struct definitions are
//! in accordance with NOAA's WSR-88D Interface Control Document for Archive II "ICD 2620010H"
//! build 19.0.
//!
//! Optionally, the `nexrad-model` feature provides mappings to a common model for representing
//! radar data. The `uom` feature can also be used to provide type-safe units of measure.
//!
#![forbid(unsafe_code)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![warn(clippy::correctness)]
#![allow(clippy::too_many_arguments)]
pub mod messages;
pub mod result;
pub mod summarize;
mod util;

View file

@ -0,0 +1,81 @@
pub mod clutter_filter_map;
pub mod digital_radar_data;
pub mod message_header;
pub mod rda_status_data;
pub mod volume_coverage_pattern;
mod message_type;
pub use message_type::MessageType;
mod message;
pub use message::{Message, MessageContents};
mod definitions;
mod primitive_aliases;
use crate::messages::digital_radar_data::decode_digital_radar_data;
use crate::messages::message_header::MessageHeader;
use crate::messages::rda_status_data::decode_rda_status_message;
use crate::messages::volume_coverage_pattern::decode_volume_coverage_pattern;
use crate::result::Result;
use crate::util::deserialize;
use log::trace;
use std::io::{Read, Seek};
/// Decode a NEXRAD Level II message from a reader.
pub fn decode_message_header<R: Read>(reader: &mut R) -> Result<MessageHeader> {
deserialize(reader)
}
/// Decode a series of NEXRAD Level II messages from a reader.
pub fn decode_messages<R: Read + Seek>(reader: &mut R) -> Result<Vec<Message>> {
trace!("Decoding messages");
let mut messages = Vec::new();
while let Ok(header) = decode_message_header(reader) {
let contents = decode_message_contents(reader, header.message_type())?;
messages.push(Message::unsegmented(header, contents));
}
trace!(
"Decoded {} messages ending at {:?}",
messages.len(),
reader.stream_position()
);
Ok(messages)
}
/// Decode the content of a NEXRAD Level II message of the specified type from a reader.
pub fn decode_message_contents<R: Read + Seek>(
reader: &mut R,
message_type: MessageType,
) -> Result<MessageContents> {
let position = reader.stream_position();
trace!("Decoding message type {:?} at {:?}", message_type, position);
if message_type == MessageType::RDADigitalRadarDataGenericFormat {
let radar_data_message = decode_digital_radar_data(reader)?;
return Ok(MessageContents::DigitalRadarData(Box::new(
radar_data_message,
)));
}
let mut message_buffer = [0; 2432 - size_of::<MessageHeader>()];
reader.read_exact(&mut message_buffer)?;
let contents_reader = &mut message_buffer.as_ref();
Ok(match message_type {
MessageType::RDAStatusData => {
MessageContents::RDAStatusData(Box::new(decode_rda_status_message(contents_reader)?))
}
MessageType::RDAVolumeCoveragePattern => MessageContents::VolumeCoveragePattern(Box::new(
decode_volume_coverage_pattern(contents_reader)?,
)),
// TODO: this message type is segmented which is not supported well currently
// MessageType::RDAClutterFilterMap => {
// Message::ClutterFilterMap(Box::new(decode_clutter_filter_map(message_reader)?))
// }
_ => MessageContents::Other,
})
}

View file

@ -0,0 +1,55 @@
//!
//! Message type 15 "Clutter Filter Map" contains information about clutter filter maps that are
//! used to filter clutter from radar products. The clutter filter map is a 3D array of elevation,
//! azimuth, and range zones that define the clutter filter behavior for radar products.
//!
mod header;
pub use header::Header;
mod message;
pub use message::Message;
mod elevation_segment;
pub use elevation_segment::ElevationSegment;
mod azimuth_segment;
pub use azimuth_segment::{AzimuthSegment, AzimuthSegmentHeader};
mod range_zone;
pub use range_zone::RangeZone;
mod definitions;
pub use definitions::*;
use crate::result::Result;
use crate::util::deserialize;
use std::io::Read;
/// Decodes a clutter filter map message type 15 from the provided reader.
pub fn decode_clutter_filter_map<R: Read>(reader: &mut R) -> Result<Message> {
let header: Header = deserialize(reader)?;
let elevation_segment_count = header.elevation_segment_count as u8;
let mut message = Message::new(header);
for elevation_segment_number in 0..elevation_segment_count {
let mut elevation_segment = ElevationSegment::new(elevation_segment_number);
for azimuth_number in 0..360 {
let azimuth_segment_header: AzimuthSegmentHeader = deserialize(reader)?;
let range_zone_count = azimuth_segment_header.range_zone_count as usize;
let mut azimuth_segment = AzimuthSegment::new(azimuth_segment_header, azimuth_number);
for _ in 0..range_zone_count {
azimuth_segment.range_zones.push(deserialize(reader)?);
}
elevation_segment.azimuth_segments.push(azimuth_segment);
}
message.elevation_segments.push(elevation_segment);
}
Ok(message)
}

View file

@ -0,0 +1,36 @@
use crate::messages::clutter_filter_map::range_zone::RangeZone;
use crate::messages::primitive_aliases::Integer2;
use serde::Deserialize;
/// Header information for an azimuth segment to be read directly from the Archive II file.
#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct AzimuthSegmentHeader {
/// The number of range zones defined in this azimuth segment, from 1 to 20.
pub range_zone_count: Integer2,
}
/// A segment of the clutter filter map for a specific elevation and azimuth containing range zones.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AzimuthSegment {
/// Header information for this azimuth segment. This is the portion of an azimuth segment that
/// is read directly from the Archive II file.
pub header: AzimuthSegmentHeader,
/// This azimuth segment's number from 0 to 359. Each azimuth segment subtends a range of 1
/// degree, e.g.: 0 degrees <= azimuth segment 0 < 1 degree.
pub azimuth_segment: Integer2,
/// The range zones defined in this azimuth segment.
pub range_zones: Vec<RangeZone>,
}
impl AzimuthSegment {
/// Creates a new azimuth segment from the coded header.
pub(crate) fn new(header: AzimuthSegmentHeader, azimuth_segment: Integer2) -> Self {
Self {
range_zones: Vec::with_capacity(header.range_zone_count as usize),
header,
azimuth_segment,
}
}
}

View file

@ -0,0 +1,10 @@
/// Control codes indicating behavior of the clutter filter map for a range segment.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum OpCode {
/// The clutter filter is bypassed for the range segment.
BypassFilter,
/// The bypass map is in control for the range segment.
BypassMapInControl,
/// The clutter filter is being forced for the range segment.
ForceFilter,
}

View file

@ -0,0 +1,23 @@
use crate::messages::clutter_filter_map::azimuth_segment::AzimuthSegment;
use crate::messages::primitive_aliases::Integer1;
/// A segment of the clutter filter map for a specific elevation containing azimuth segments.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ElevationSegment {
/// This elevation segment's number from 1 to 5 (oftentimes there are only 2) in increasing
/// elevation from the ground.
pub elevation_segment_number: Integer1,
/// The azimuth segments defined in this elevation segment.
pub azimuth_segments: Vec<AzimuthSegment>,
}
impl ElevationSegment {
/// Creates a new elevation segment to contain azimuth segments.
pub(crate) fn new(elevation_segment_number: Integer1) -> Self {
Self {
elevation_segment_number,
azimuth_segments: Vec::with_capacity(360),
}
}
}

View file

@ -0,0 +1,40 @@
use crate::messages::primitive_aliases::Integer2;
use crate::util::get_datetime;
use chrono::{DateTime, Duration, Utc};
use serde::Deserialize;
use std::fmt::Debug;
/// Header information for a clutter filter map to be read directly from the Archive II file.
#[derive(Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct Header {
/// The date the clutter filter map was generated represented as a count of days since 1 January
/// 1970 00:00 GMT. It is also referred-to as a "modified Julian date" where it is the Julian
/// date - 2440586.5.
pub map_generation_date: Integer2,
/// The time the clutter filter map was generated in minutes past midnight, GMT.
pub map_generation_time: Integer2,
/// The number of elevation segments defined in this clutter filter map. There may be 1 to 5,
/// though there are typically 2. They will follow this header in order of increasing elevation.
pub elevation_segment_count: Integer2,
}
impl Header {
/// The date and time the clutter filter map was generated.
pub fn date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(
self.map_generation_date,
Duration::minutes(self.map_generation_time as i64),
)
}
}
impl Debug for Header {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Header")
.field("map_generation_date_time", &self.date_time())
.field("elevation_segment_count", &self.elevation_segment_count)
.finish()
}
}

View file

@ -0,0 +1,24 @@
use crate::messages::clutter_filter_map::elevation_segment::ElevationSegment;
use crate::messages::clutter_filter_map::header::Header;
use std::fmt::Debug;
/// A clutter filter map describing elevations, azimuths, and ranges containing clutter to
/// filtered from radar products.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Message {
/// Decoded header information for this clutter filter map.
pub header: Header,
/// The elevation segments defined in this clutter filter map.
pub elevation_segments: Vec<ElevationSegment>,
}
impl Message {
/// Creates a new clutter filter map from the coded header.
pub(crate) fn new(header: Header) -> Self {
Self {
elevation_segments: Vec::with_capacity(header.elevation_segment_count as usize),
header,
}
}
}

View file

@ -0,0 +1,60 @@
use crate::messages::primitive_aliases::{Code2, Integer2};
use serde::Deserialize;
use std::fmt::Debug;
use crate::messages::clutter_filter_map::OpCode;
#[cfg(feature = "uom")]
use uom::si::f64::Length;
#[cfg(feature = "uom")]
use uom::si::length::kilometer;
/// Defines a range segment of a particular elevation and azimuth with an operation type describing
/// the clutter filter map behavior for the segment.
#[derive(Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct RangeZone {
/// Operation code for the range zone.
pub op_code: Code2,
/// Stop range per zone in km. There are 20 possible zones and not all need to be defined. The
/// last zone must have an end range of 511km.
pub end_range: Integer2,
}
impl RangeZone {
/// Operation code for the range zone.
pub fn op_code(&self) -> OpCode {
match self.op_code {
0 => OpCode::BypassFilter,
1 => OpCode::BypassMapInControl,
2 => OpCode::ForceFilter,
_ => panic!("Invalid OpCode: {}", self.op_code),
}
}
/// Stop range per zone. There are 20 possible zones and not all need to be defined. The last
/// zone must have an end range of 511km.
#[cfg(feature = "uom")]
pub fn end_range(&self) -> Length {
Length::new::<kilometer>(self.end_range as f64)
}
}
#[cfg(not(feature = "uom"))]
impl Debug for RangeZone {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RangeZone")
.field("op_code", &self.op_code)
.field("end_range", &self.end_range)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for RangeZone {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RangeZone")
.field("op_code", &self.op_code)
.field("end_range", &self.end_range())
.finish()
}
}

View file

@ -0,0 +1,10 @@
/// The possible RDA redundant channels.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum RedundantChannel {
LegacySingleChannel,
LegacyRedundantChannel1,
LegacyRedundantChannel2,
ORDASingleChannel,
ORDARedundantChannel1,
ORDARedundantChannel2,
}

View file

@ -0,0 +1,114 @@
//!
//! Message type 31 "Digital Radar Data" consists of base data information such as reflectivity,
//! mean radial velocity, spectrum width, differential reflectivity, differential phase, correlation
//! coefficient, azimuth angle, elevation angle, cut type, scanning strategy, and calibration
//! parameters. The frequency and volume of the message is dependent on the scanning strategy and
//! the type of data associated with that strategy.
//!
mod header;
pub use header::Header;
mod message;
pub use message::Message;
mod data_block_id;
pub use data_block_id::DataBlockId;
mod volume_data_block;
pub use volume_data_block::VolumeDataBlock;
mod generic_data_block;
pub use generic_data_block::{GenericDataBlock, GenericDataBlockHeader};
mod elevation_data_block;
pub use elevation_data_block::ElevationDataBlock;
mod radial_data_block;
pub use radial_data_block::RadialDataBlock;
mod definitions;
pub use definitions::*;
mod spot_blanking_status;
pub use spot_blanking_status::*;
mod pointers;
pub use pointers::*;
use crate::result::{Error, Result};
use crate::util::deserialize;
use std::io::{Read, Seek, SeekFrom};
/// Decodes a digital radar data message type 31 from the provided reader.
pub fn decode_digital_radar_data<R: Read + Seek>(reader: &mut R) -> Result<Message> {
let start_position = reader.stream_position()?;
let header = deserialize(reader)?;
let mut message = Message::new(header);
let pointers_space = message.header.data_block_count as usize * size_of::<u32>();
let mut pointers_raw = vec![0; pointers_space];
reader.read_exact(&mut pointers_raw)?;
let pointers = pointers_raw
.chunks_exact(size_of::<u32>())
.map(|v| {
v.try_into()
.map_err(|_| Error::DecodingError("message pointers".to_string()))
.map(u32::from_be_bytes)
})
.collect::<Result<Vec<_>>>()?;
for pointer in pointers {
reader.seek(SeekFrom::Start(start_position + pointer as u64))?;
let data_block_id: DataBlockId = deserialize(reader)?;
reader.seek(SeekFrom::Current(-4))?;
match data_block_id.data_block_name().as_str() {
"VOL" => {
message.volume_data_block = Some(deserialize(reader)?);
}
"ELV" => {
message.elevation_data_block = Some(deserialize(reader)?);
}
"RAD" => {
message.radial_data_block = Some(deserialize(reader)?);
}
_ => {
let generic_header: GenericDataBlockHeader = deserialize(reader)?;
let mut generic_data_block = GenericDataBlock::new(generic_header);
reader.read_exact(&mut generic_data_block.encoded_data)?;
match data_block_id.data_block_name().as_str() {
"REF" => {
message.reflectivity_data_block = Some(generic_data_block);
}
"VEL" => {
message.velocity_data_block = Some(generic_data_block);
}
"SW " => {
message.spectrum_width_data_block = Some(generic_data_block);
}
"ZDR" => {
message.differential_reflectivity_data_block = Some(generic_data_block);
}
"PHI" => {
message.differential_phase_data_block = Some(generic_data_block);
}
"RHO" => {
message.correlation_coefficient_data_block = Some(generic_data_block);
}
"CFP" => {
message.specific_diff_phase_data_block = Some(generic_data_block);
}
_ => panic!("Unknown generic data block type: {:?}", data_block_id),
}
}
}
}
Ok(message)
}

View file

@ -0,0 +1,33 @@
use serde::Deserialize;
use std::fmt::Debug;
/// A digital radar data block's identifier.
#[derive(Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct DataBlockId {
/// Data block type, e.g. "R".
pub data_block_type: u8,
/// Data block name, e.g. "VOL".
pub data_name: [u8; 3],
}
impl DataBlockId {
/// Data block type, e.g. "R".
pub fn data_block_type(&self) -> char {
self.data_block_type as char
}
/// Data block name, e.g. "VOL".
pub fn data_block_name(&self) -> String {
String::from_utf8_lossy(&self.data_name).to_string()
}
}
impl Debug for DataBlockId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DataBlockId")
.field("data_block_type", &self.data_block_type())
.field("data_block_name", &self.data_block_name())
.finish()
}
}

View file

@ -0,0 +1,62 @@
use serde::{Deserialize, Serialize};
/// Indicates whether the message is compressed and what type of compression was used.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum CompressionIndicator {
Uncompressed,
CompressedBZIP2,
CompressedZLIB,
FutureUse,
}
/// Possible statuses for a radial describing its position within the larger scan.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, Serialize, Deserialize)]
pub enum RadialStatus {
ElevationStart,
IntermediateRadialData,
ElevationEnd,
VolumeScanStart,
VolumeScanEnd,
/// Start of new elevation which is the last in the VCP.
ElevationStartVCPFinal,
}
/// Flags indicating special control features.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ControlFlags {
None,
RecombinedAzimuthalRadials,
RecombinedRangeGates,
RecombinedRadialsAndRangeGatesToLegacyResolution,
}
/// Processing status flags.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ProcessingStatus {
RxRNoise,
CBT,
Other(u16),
}
/// Volume coverage pattern (VCP) definitions.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VolumeCoveragePattern {
VCP12,
VCP31,
VCP35,
VCP112,
VCP212,
VCP215,
}
/// The value for a data moment/radial, gate, and product. The value may be a floating-point number
/// or a special case such as "below threshold" or "range folded".
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ScaledMomentValue {
/// The converted floating-point representation of the data moment value for a gate.
Value(f32),
/// The value for this gate was below the signal threshold.
BelowThreshold,
/// The value for this gate exceeded the maximum unambiguous range.
RangeFolded,
}

View file

@ -0,0 +1,58 @@
use crate::messages::digital_radar_data::DataBlockId;
use crate::messages::primitive_aliases::{Integer2, Real4, ScaledSInteger2};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::f64::Information;
#[cfg(feature = "uom")]
use uom::si::information::byte;
/// An elevation data block.
#[derive(Clone, PartialEq, Deserialize)]
pub struct ElevationDataBlock {
/// Data block identifier.
pub data_block_id: DataBlockId,
/// Size of data block in bytes.
pub lrtup: Integer2,
/// Atmospheric attenuation factor in dB/km.
pub atmos: ScaledSInteger2,
/// Scaling constant used by the signal processor for this elevation to calculate reflectivity
/// in dB.
pub calibration_constant: Real4,
}
impl ElevationDataBlock {
/// Size of data block.
#[cfg(feature = "uom")]
pub fn lrtup(&self) -> Information {
Information::new::<byte>(self.lrtup as f64)
}
}
#[cfg(not(feature = "uom"))]
impl Debug for ElevationDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ElevationDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup)
.field("atmos", &self.atmos)
.field("calibration_constant", &self.calibration_constant)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for ElevationDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ElevationDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup())
.field("atmos", &self.atmos)
.field("calibration_constant", &self.calibration_constant)
.finish()
}
}

View file

@ -0,0 +1,227 @@
use crate::messages::digital_radar_data::{ControlFlags, DataBlockId, ScaledMomentValue};
use crate::messages::primitive_aliases::{
Code1, Integer1, Integer2, Integer4, Real4, ScaledInteger2,
};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::f64::{Information, Length};
#[cfg(feature = "uom")]
use uom::si::information::byte;
#[cfg(feature = "uom")]
use uom::si::length::kilometer;
/// A generic data moment block.
#[derive(Clone, PartialEq)]
pub struct GenericDataBlock {
/// The generic data block's header information.
pub header: GenericDataBlockHeader,
/// The generic data block's encoded moment data.
pub encoded_data: Vec<u8>,
}
impl GenericDataBlock {
/// Creates a new generic data moment block from the decoded header.
pub(crate) fn new(header: GenericDataBlockHeader) -> Self {
let word_size_bytes = header.data_word_size as usize / 8;
let encoded_data_size = header.number_of_data_moment_gates as usize * word_size_bytes;
Self {
encoded_data: vec![0; encoded_data_size],
header,
}
}
/// Raw gate values for this moment/radial ordered in ascending distance from the radar. These
/// values are stored in a fixed-point representation using the `DataMomentHeader.offset` and
/// `DataMomentHeader.scale` fields. `decoded_data` provides decoded floating-point values.
pub fn encoded_values(&self) -> &[u8] {
&self.encoded_data
}
/// Decodes raw moment values from `encoded_data` from their fixed-point representation into
/// their floating point representation. Additionally, identifies special values such as "below
/// threshold" and "range folded".
pub fn decoded_values(&self) -> Vec<ScaledMomentValue> {
self.encoded_data
.iter()
.copied()
.map(|raw_value| {
if self.header.scale == 0.0 {
return ScaledMomentValue::Value(raw_value as f32);
}
match raw_value {
0 => ScaledMomentValue::BelowThreshold,
1 => ScaledMomentValue::RangeFolded,
_ => ScaledMomentValue::Value(
(raw_value as f32 - self.header.offset) / self.header.scale,
),
}
})
.collect()
}
/// Get moment data from this generic data block. Note that this will clone the underlying data.
#[cfg(feature = "nexrad-model")]
pub fn moment_data(&self) -> nexrad_model::data::MomentData {
nexrad_model::data::MomentData::from_fixed_point(
self.header.number_of_data_moment_gates,
self.header.data_moment_range,
self.header.data_moment_range_sample_interval,
self.header.scale,
self.header.offset,
self.encoded_data.clone(),
)
}
/// Convert this generic data block into common model moment data, minimizing data copies.
#[cfg(feature = "nexrad-model")]
pub fn into_moment_data(self) -> nexrad_model::data::MomentData {
nexrad_model::data::MomentData::from_fixed_point(
self.header.number_of_data_moment_gates,
self.header.data_moment_range,
self.header.data_moment_range_sample_interval,
self.header.scale,
self.header.offset,
self.encoded_data,
)
}
}
impl Debug for GenericDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GenericDataBlock")
.field("header", &self.header)
.field("data", &self.encoded_data.len())
.finish()
}
}
/// A generic data moment block's decoded header.
#[derive(Clone, PartialEq, Deserialize)]
pub struct GenericDataBlockHeader {
/// Data block identifier.
pub data_block_id: DataBlockId,
/// Reserved.
pub reserved: Integer4,
/// Number of data moment gates for current radial, from 0 to 1840.
pub number_of_data_moment_gates: Integer2,
/// Range to center of first range gate in 0.000-scaled kilometers.
pub data_moment_range: ScaledInteger2,
/// Size of data moment sample interval in 0.000-scaled kilometers from 0.25 to 4.0.
pub data_moment_range_sample_interval: ScaledInteger2,
/// Threshold parameter specifying the minimum difference in echo power between two resolution
/// gates in dB for them to not be labeled as "overlayed".
pub tover: ScaledInteger2,
/// Signal-to-noise ratio threshold for valid data from -12 to 20 dB.
pub snr_threshold: ScaledInteger2,
/// Flags indicating special control features.
///
/// Flags:
/// 0 = None
/// 1 = Recombined azimuthal radials
/// 2 = Recombined range gates
/// 3 = Recombined radials and range gates to legacy resolution
pub control_flags: Code1,
/// Number of bits (8 or 16) used for storing data for each data moment gate.
pub data_word_size: Integer1,
/// Scale factor for converting data moments to floating-point representation.
pub scale: Real4,
/// Offset value for converting data moments to floating-point representation.
pub offset: Real4,
}
impl GenericDataBlockHeader {
/// Range to center of first range gate.
#[cfg(feature = "uom")]
pub fn data_moment_range(&self) -> Length {
Length::new::<kilometer>(self.data_moment_range as f64 * 0.001)
}
/// Size of data moment sample interval.
#[cfg(feature = "uom")]
pub fn data_moment_range_sample_interval(&self) -> Length {
Length::new::<kilometer>(self.data_moment_range_sample_interval as f64 * 0.001)
}
/// Flags indicating special control features.
pub fn control_flags(&self) -> ControlFlags {
match self.control_flags {
0 => ControlFlags::None,
1 => ControlFlags::RecombinedAzimuthalRadials,
2 => ControlFlags::RecombinedRangeGates,
3 => ControlFlags::RecombinedRadialsAndRangeGatesToLegacyResolution,
_ => panic!("Invalid control flag value: {}", self.control_flags),
}
}
/// Size of the data moment block in bytes.
#[cfg(feature = "uom")]
pub fn moment_size(&self) -> Information {
Information::new::<byte>(
self.number_of_data_moment_gates as f64 * self.data_word_size as f64 / 8.0,
)
}
}
#[cfg(not(feature = "uom"))]
impl Debug for GenericDataBlockHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GenericDataBlockHeader")
.field("data_block_id", &self.data_block_id)
.field("reserved", &self.reserved)
.field(
"number_of_data_moment_gates",
&self.number_of_data_moment_gates,
)
.field("data_moment_range", &self.data_moment_range)
.field(
"data_moment_range_sample_interval",
&self.data_moment_range_sample_interval,
)
.field("tover", &self.tover)
.field("snr_threshold", &self.snr_threshold)
.field("control_flags", &self.control_flags())
.field("data_word_size", &self.data_word_size)
.field("scale", &self.scale)
.field("offset", &self.offset)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for GenericDataBlockHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GenericDataBlockHeader")
.field("data_block_id", &self.data_block_id)
.field("reserved", &self.reserved)
.field(
"number_of_data_moment_gates",
&self.number_of_data_moment_gates,
)
.field("data_moment_range", &self.data_moment_range())
.field(
"data_moment_range_sample_interval",
&self.data_moment_range_sample_interval(),
)
.field("tover", &self.tover)
.field("snr_threshold", &self.snr_threshold)
.field("control_flags", &self.control_flags())
.field("data_word_size", &self.data_word_size)
.field("scale", &self.scale)
.field("offset", &self.offset)
.finish()
}
}

View file

@ -0,0 +1,234 @@
use crate::messages::digital_radar_data::spot_blanking_status::SpotBlankingStatus;
use crate::messages::digital_radar_data::{CompressionIndicator, RadialStatus};
use crate::messages::primitive_aliases::{
Code1, Integer1, Integer2, Integer4, Real4, ScaledInteger1,
};
use crate::util::get_datetime;
use chrono::{DateTime, Duration, Utc};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::angle::degree;
#[cfg(feature = "uom")]
use uom::si::f64::{Angle, Information};
#[cfg(feature = "uom")]
use uom::si::information::byte;
/// The digital radar data message header block precedes base data information for a particular
/// radial and includes parameters for that radial and information about the following data blocks.
#[derive(Clone, PartialEq, Deserialize)]
pub struct Header {
/// ICAO radar identifier.
pub radar_identifier: [u8; 4],
/// Collection time in milliseconds past midnight, GMT.
pub time: Integer4,
/// This message's date represented as a count of days since 1 January 1970 00:00 GMT. It is
/// also referred-to as a "modified Julian date" where it is the Julian date - 2440586.5.
pub date: Integer2,
/// Radial number within the elevation scan. These range up to 720, in 0.5 degree increments.
pub azimuth_number: Integer2,
/// Azimuth angle at which the radial was collected in degrees.
pub azimuth_angle: Real4,
/// Indicates if the message is compressed and what type of compression was used. This header is
/// not compressed.
///
/// Values:
/// 0 = Uncompressed
/// 1 = Compressed using BZIP2
/// 2 = Compressed using ZLIB
/// 3 = Future use
pub compression_indicator: Code1,
/// Spare to force halfword alignment.
pub spare: u8,
/// Uncompressed length of the radial in bytes (including the data header block).
pub radial_length: Integer2,
/// Azimuthal spacing between adjacent radials. Note this is the commanded value, not
/// necessarily the actual spacing.
///
/// Values:
/// 1 = 0.5 degrees
/// 2 = 1.0 degrees
pub azimuth_resolution_spacing: Code1,
/// The radial's status within the larger scan (e.g. first, last).
///
/// Statuses:
/// 0 = Start of elevation
/// 1 = Intermediate radial data
/// 2 = End of elevation
/// 3 = Start of volume scan
/// 4 = End of volume scan
/// 5 = Start of new elevation which is the last in the VCP
pub radial_status: Code1,
/// The radial's elevation number within the volume scan.
pub elevation_number: Integer1,
/// The sector number within cut. A value of 0 is only valid for continuous surveillance cuts.
pub cut_sector_number: Integer1,
/// The radial's collection elevation angle.
pub elevation_angle: Real4,
/// The spot blanking status for the current radial, elevation, and volume scan.
///
/// Statuses:
/// 0 = None
/// 1 = Radial
/// 2 = Elevation
/// 4 = Volume
pub radial_spot_blanking_status: Code1,
/// The azimuth indexing value (if keyed to constant angles).
///
/// Values:
/// 0 = No indexing
/// 1-100 = Indexing angle of 0.01 to 1.00 degrees
pub azimuth_indexing_mode: ScaledInteger1,
/// The number of "data moment" blocks following this header block, from 4 to 10. There are
/// always volume, elevation, and radial information blocks and a reflectivity data moment
/// block. The following 6 data moment blocks are optional, depending on scanning mode. The next
/// 10 fields on this header contain pointers to each block, if available in the message.
pub data_block_count: Integer2,
}
impl Header {
/// ICAO radar identifier.
pub fn radar_identifier(&self) -> String {
String::from_utf8_lossy(&self.radar_identifier).to_string()
}
/// The collection date and time for this data.
pub fn date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(self.date, Duration::milliseconds(self.time as i64))
}
/// Azimuth angle at which the radial was collected.
#[cfg(feature = "uom")]
pub fn azimuth_angle(&self) -> Angle {
Angle::new::<degree>(self.azimuth_angle as f64)
}
/// Whether the message is compressed and what type of compression was used.
pub fn compression_indicator(&self) -> CompressionIndicator {
match self.compression_indicator {
0 => CompressionIndicator::Uncompressed,
1 => CompressionIndicator::CompressedBZIP2,
2 => CompressionIndicator::CompressedZLIB,
_ => CompressionIndicator::FutureUse,
}
}
/// Uncompressed length of the radial (including the data header block).
#[cfg(feature = "uom")]
pub fn radial_length(&self) -> Information {
Information::new::<byte>(self.radial_length as f64)
}
/// Azimuthal spacing between adjacent radials.
#[cfg(feature = "uom")]
pub fn azimuth_resolution_spacing(&self) -> Angle {
Angle::new::<degree>(self.azimuth_resolution_spacing as f64 * 0.5)
}
/// The radial's status within the larger scan.
pub fn radial_status(&self) -> RadialStatus {
match self.radial_status {
0 => RadialStatus::ElevationStart,
1 => RadialStatus::IntermediateRadialData,
2 => RadialStatus::ElevationEnd,
3 => RadialStatus::VolumeScanStart,
4 => RadialStatus::VolumeScanEnd,
_ => RadialStatus::ElevationStartVCPFinal,
}
}
/// The radial's collection elevation angle.
#[cfg(feature = "uom")]
pub fn elevation_angle(&self) -> Angle {
Angle::new::<degree>(self.elevation_angle as f64)
}
/// The spot blanking status for the current radial, elevation, and volume scan.
pub fn radial_spot_blanking_status(&self) -> SpotBlankingStatus {
SpotBlankingStatus::new(self.radial_spot_blanking_status)
}
/// The azimuth indexing value (if keyed to constant angles).
#[cfg(feature = "uom")]
pub fn azimuth_indexing_mode(&self) -> Option<Angle> {
if self.azimuth_indexing_mode == 0 {
None
} else {
Some(Angle::new::<degree>(
self.azimuth_indexing_mode as f64 * 0.01,
))
}
}
}
#[cfg(not(feature = "uom"))]
impl Debug for Header {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Header")
.field("radar_identifier", &self.radar_identifier())
.field("date_time", &self.date_time())
.field("azimuth_number", &self.azimuth_number)
.field("azimuth_angle", &self.azimuth_angle)
.field("compression_indicator", &self.compression_indicator())
.field("radial_length", &self.radial_length)
.field(
"azimuth_resolution_spacing",
&self.azimuth_resolution_spacing,
)
.field("radial_status", &self.radial_status())
.field("elevation_number", &self.elevation_number)
.field("cut_sector_number", &self.cut_sector_number)
.field("elevation_angle", &self.elevation_angle)
.field(
"radial_spot_blanking_status",
&self.radial_spot_blanking_status(),
)
.field("azimuth_indexing_mode", &self.azimuth_indexing_mode)
.field("data_block_count", &self.data_block_count)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for Header {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Header")
.field("radar_identifier", &self.radar_identifier())
.field("date_time", &self.date_time())
.field("azimuth_number", &self.azimuth_number)
.field("azimuth_angle", &self.azimuth_angle())
.field("compression_indicator", &self.compression_indicator())
.field("radial_length", &self.radial_length())
.field(
"azimuth_resolution_spacing",
&self.azimuth_resolution_spacing(),
)
.field("radial_status", &self.radial_status())
.field("elevation_number", &self.elevation_number)
.field("cut_sector_number", &self.cut_sector_number)
.field("elevation_angle", &self.elevation_angle())
.field(
"radial_spot_blanking_status",
&self.radial_spot_blanking_status(),
)
.field("azimuth_indexing_mode", &self.azimuth_indexing_mode())
.field("data_block_count", &self.data_block_count)
.finish()
}
}

View file

@ -0,0 +1,151 @@
use crate::messages::digital_radar_data::{
ElevationDataBlock, GenericDataBlock, Header, RadialDataBlock, VolumeDataBlock,
};
/// The digital radar data message includes base radar data from a single radial for various
/// products.
#[derive(Debug, Clone, PartialEq)]
pub struct Message {
/// The decoded digital radar data header.
pub header: Header,
/// Volume data if included in the message.
pub volume_data_block: Option<VolumeDataBlock>,
/// Elevation data if included in the message.
pub elevation_data_block: Option<ElevationDataBlock>,
/// Radial data if included in the message.
pub radial_data_block: Option<RadialDataBlock>,
/// Reflectivity data if included in the message.
pub reflectivity_data_block: Option<GenericDataBlock>,
/// Velocity data if included in the message.
pub velocity_data_block: Option<GenericDataBlock>,
/// Spectrum width data if included in the message.
pub spectrum_width_data_block: Option<GenericDataBlock>,
/// Differential reflectivity data if included in the message.
pub differential_reflectivity_data_block: Option<GenericDataBlock>,
/// Differential phase data if included in the message.
pub differential_phase_data_block: Option<GenericDataBlock>,
/// Correlation coefficient data if included in the message.
pub correlation_coefficient_data_block: Option<GenericDataBlock>,
/// Specific differential phase data if included in the message.
pub specific_diff_phase_data_block: Option<GenericDataBlock>,
}
impl Message {
/// Create a new digital radar data message with the decoded header.
pub(crate) fn new(header: Header) -> Self {
Self {
header,
volume_data_block: None,
elevation_data_block: None,
radial_data_block: None,
reflectivity_data_block: None,
velocity_data_block: None,
spectrum_width_data_block: None,
differential_reflectivity_data_block: None,
differential_phase_data_block: None,
correlation_coefficient_data_block: None,
specific_diff_phase_data_block: None,
}
}
/// Get a radial from this digital radar data message.
#[cfg(feature = "nexrad-model")]
pub fn radial(&self) -> crate::result::Result<nexrad_model::data::Radial> {
use crate::messages::digital_radar_data::RadialStatus;
use crate::result::Error;
use nexrad_model::data::{Radial, RadialStatus as ModelRadialStatus};
Ok(Radial::new(
self.header
.date_time()
.ok_or(Error::MessageMissingDateError)?
.timestamp_millis(),
self.header.azimuth_number,
self.header.azimuth_angle,
self.header.azimuth_resolution_spacing as f32 * 0.5,
match self.header.radial_status() {
RadialStatus::ElevationStart => ModelRadialStatus::ElevationStart,
RadialStatus::IntermediateRadialData => ModelRadialStatus::IntermediateRadialData,
RadialStatus::ElevationEnd => ModelRadialStatus::ElevationEnd,
RadialStatus::VolumeScanStart => ModelRadialStatus::VolumeScanStart,
RadialStatus::VolumeScanEnd => ModelRadialStatus::VolumeScanEnd,
RadialStatus::ElevationStartVCPFinal => ModelRadialStatus::ElevationStartVCPFinal,
},
self.header.elevation_number,
self.header.elevation_angle,
self.reflectivity_data_block
.as_ref()
.map(|block| block.moment_data()),
self.velocity_data_block
.as_ref()
.map(|block| block.moment_data()),
self.spectrum_width_data_block
.as_ref()
.map(|block| block.moment_data()),
self.differential_reflectivity_data_block
.as_ref()
.map(|block| block.moment_data()),
self.differential_phase_data_block
.as_ref()
.map(|block| block.moment_data()),
self.correlation_coefficient_data_block
.as_ref()
.map(|block| block.moment_data()),
self.specific_diff_phase_data_block
.as_ref()
.map(|block| block.moment_data()),
))
}
/// Convert this digital radar data message into a common model radial, minimizing data copy.
#[cfg(feature = "nexrad-model")]
pub fn into_radial(self) -> crate::result::Result<nexrad_model::data::Radial> {
use crate::messages::digital_radar_data::RadialStatus;
use crate::result::Error;
use nexrad_model::data::{Radial, RadialStatus as ModelRadialStatus};
Ok(Radial::new(
self.header
.date_time()
.ok_or(Error::MessageMissingDateError)?
.timestamp_millis(),
self.header.azimuth_number,
self.header.azimuth_angle,
self.header.azimuth_resolution_spacing as f32 * 0.5,
match self.header.radial_status() {
RadialStatus::ElevationStart => ModelRadialStatus::ElevationStart,
RadialStatus::IntermediateRadialData => ModelRadialStatus::IntermediateRadialData,
RadialStatus::ElevationEnd => ModelRadialStatus::ElevationEnd,
RadialStatus::VolumeScanStart => ModelRadialStatus::VolumeScanStart,
RadialStatus::VolumeScanEnd => ModelRadialStatus::VolumeScanEnd,
RadialStatus::ElevationStartVCPFinal => ModelRadialStatus::ElevationStartVCPFinal,
},
self.header.elevation_number,
self.header.elevation_angle,
self.reflectivity_data_block
.map(|block| block.into_moment_data()),
self.velocity_data_block
.map(|block| block.into_moment_data()),
self.spectrum_width_data_block
.map(|block| block.into_moment_data()),
self.differential_reflectivity_data_block
.map(|block| block.into_moment_data()),
self.differential_phase_data_block
.map(|block| block.into_moment_data()),
self.correlation_coefficient_data_block
.map(|block| block.into_moment_data()),
self.specific_diff_phase_data_block
.map(|block| block.into_moment_data()),
))
}
}

View file

@ -0,0 +1,31 @@
use crate::messages::primitive_aliases::Integer4;
/// A pointer to a data moment within a digital radar data message.
pub struct DataMomentPointer {
/// The type of data moment that the pointer references.
pub data_moment_type: DataMomentPointerType,
/// The pointer to the data moment as a byte offset from the start of the message.
pub pointer: Integer4,
}
/// The type of data moment that the pointer references.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DataMomentPointerType {
Volume,
Elevation,
Radial,
Generic(DataMomentGenericPointerType),
}
/// The type of generic data moment that the pointer references.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DataMomentGenericPointerType {
Reflectivity,
Velocity,
SpectrumWidth,
DifferentialReflectivity,
DifferentialPhase,
CorrelationCoefficient,
SpecificDiffPhase,
}

View file

@ -0,0 +1,116 @@
use crate::messages::digital_radar_data::DataBlockId;
use crate::messages::primitive_aliases::{Integer2, Real4, ScaledInteger2};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::f64::{Information, Length, Velocity};
/// A radial data moment block.
#[derive(Clone, PartialEq, Deserialize)]
pub struct RadialDataBlock {
/// Data block identifier.
pub data_block_id: DataBlockId,
/// Size of data block in bytes.
pub lrtup: Integer2,
/// Unambiguous range, interval size, in km.
pub unambiguous_range: ScaledInteger2,
/// Noise level for the horizontal channel in dBm.
pub horizontal_channel_noise_level: Real4,
/// Noise level for the vertical channel in dBm.
pub vertical_channel_noise_level: Real4,
/// Nyquist velocity in m/s.
pub nyquist_velocity: ScaledInteger2,
/// Radial flags to support RPG processing.
pub radial_flags: Integer2,
/// Calibration constant for the horizontal channel in dBZ.
pub horizontal_channel_calibration_constant: Real4,
/// Calibration constant for the vertical channel in dBZ.
pub vertical_channel_calibration_constant: Real4,
}
impl RadialDataBlock {
/// Size of data block.
#[cfg(feature = "uom")]
pub fn lrtup(&self) -> Information {
Information::new::<uom::si::information::byte>(self.lrtup as f64)
}
/// Unambiguous range, interval size.
#[cfg(feature = "uom")]
pub fn unambiguous_range(&self) -> Length {
Length::new::<uom::si::length::kilometer>(self.unambiguous_range as f64)
}
/// Nyquist velocity.
#[cfg(feature = "uom")]
pub fn nyquist_velocity(&self) -> Velocity {
Velocity::new::<uom::si::velocity::meter_per_second>(self.nyquist_velocity as f64 * 0.01)
}
}
#[cfg(not(feature = "uom"))]
impl Debug for RadialDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RadialDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup)
.field("unambiguous_range", &self.unambiguous_range)
.field(
"horizontal_channel_noise_level",
&self.horizontal_channel_noise_level,
)
.field(
"vertical_channel_noise_level",
&self.vertical_channel_noise_level,
)
.field("nyquist_velocity", &self.nyquist_velocity)
.field("radial_flags", &self.radial_flags)
.field(
"horizontal_channel_calibration_constant",
&self.horizontal_channel_calibration_constant,
)
.field(
"vertical_channel_calibration_constant",
&self.vertical_channel_calibration_constant,
)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for RadialDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RadialDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup())
.field("unambiguous_range", &self.unambiguous_range())
.field(
"horizontal_channel_noise_level",
&self.horizontal_channel_noise_level,
)
.field(
"vertical_channel_noise_level",
&self.vertical_channel_noise_level,
)
.field("nyquist_velocity", &self.nyquist_velocity())
.field("radial_flags", &self.radial_flags)
.field(
"horizontal_channel_calibration_constant",
&self.horizontal_channel_calibration_constant,
)
.field(
"vertical_channel_calibration_constant",
&self.vertical_channel_calibration_constant,
)
.finish()
}
}

View file

@ -0,0 +1,47 @@
use crate::messages::primitive_aliases::Code1;
use std::fmt::{Debug, Formatter};
/// Statuses:
/// 0 = None
/// 1 = Radial
/// 2 = Elevation
/// 4 = Volume
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SpotBlankingStatus(Code1);
impl SpotBlankingStatus {
pub(crate) fn new(code: Code1) -> Self {
Self(code)
}
/// Whether no spot blanking is active.
pub fn none(&self) -> bool {
self.0 == 0
}
/// Whether spot blanking is active for the radial.
pub fn radial(&self) -> bool {
self.0 & 0b0001 != 0
}
/// Whether spot blanking is active for the elevation.
pub fn elevation(&self) -> bool {
self.0 & 0b0010 != 0
}
/// Whether spot blanking is active for the volume.
pub fn volume(&self) -> bool {
self.0 & 0b0100 != 0
}
}
impl Debug for SpotBlankingStatus {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SpotBlankingStatus")
.field("none", &self.none())
.field("radial", &self.radial())
.field("elevation", &self.elevation())
.field("volume", &self.volume())
.finish()
}
}

View file

@ -0,0 +1,216 @@
use crate::messages::digital_radar_data::{DataBlockId, ProcessingStatus, VolumeCoveragePattern};
use crate::messages::primitive_aliases::{Integer1, Integer2, Real4, SInteger2};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::f64::{Angle, Energy, Information, Length};
/// A volume data moment block.
#[derive(Clone, PartialEq, Deserialize)]
pub struct VolumeDataBlock {
/// Data block identifier.
pub data_block_id: DataBlockId,
/// Size of data block in bytes.
pub lrtup: Integer2,
/// Major version number.
pub major_version_number: Integer1,
/// Minor version number.
pub minor_version_number: Integer1,
/// Latitude of radar in degrees.
pub latitude: Real4,
/// Longitude of radar in degrees.
pub longitude: Real4,
/// Height of site base above sea level in meters.
pub site_height: SInteger2,
/// Height of feedhorn above ground in meters.
pub feedhorn_height: Integer2,
/// Reflectivity scaling factor without correction by ground noise scaling factors given in
/// adaptation data message in dB.
pub calibration_constant: Real4,
/// Transmitter power for horizontal channel in kW.
pub horizontal_shv_tx_power: Real4,
/// Transmitter power for vertical channel in kW.
pub vertical_shv_tx_power: Real4,
/// Calibration of system ZDR in dB.
pub system_differential_reflectivity: Real4,
/// Initial DP for the system in degrees.
pub initial_system_differential_phase: Real4,
/// Identifies the volume coverage pattern in use.
pub volume_coverage_pattern_number: Integer2,
/// Processing option flags.
///
/// Options:
/// 0 = RxR noise
/// 1 = CBT
pub processing_status: Integer2,
/// RPG weighted mean ZDR bias estimate in dB.
pub zdr_bias_estimate_weighted_mean: Integer2,
/// Spare.
pub spare: [u8; 6],
}
impl VolumeDataBlock {
/// Size of data block.
#[cfg(feature = "uom")]
pub fn lrtup(&self) -> Information {
Information::new::<uom::si::information::byte>(self.lrtup as f64)
}
/// Latitude of radar.
#[cfg(feature = "uom")]
pub fn latitude(&self) -> Angle {
Angle::new::<uom::si::angle::degree>(self.latitude as f64)
}
/// Longitude of radar.
#[cfg(feature = "uom")]
pub fn longitude(&self) -> Angle {
Angle::new::<uom::si::angle::degree>(self.longitude as f64)
}
/// Height of site base above sea level.
#[cfg(feature = "uom")]
pub fn site_height(&self) -> Length {
Length::new::<uom::si::length::meter>(self.site_height as f64)
}
/// Height of feedhorn above ground.
#[cfg(feature = "uom")]
pub fn feedhorn_height(&self) -> Length {
Length::new::<uom::si::length::meter>(self.feedhorn_height as f64)
}
/// Transmitter power for horizontal channel.
#[cfg(feature = "uom")]
pub fn horizontal_shv_tx_power(&self) -> Energy {
Energy::new::<uom::si::energy::kilojoule>(self.horizontal_shv_tx_power as f64)
}
/// Transmitter power for vertical channel.
#[cfg(feature = "uom")]
pub fn vertical_shv_tx_power(&self) -> Energy {
Energy::new::<uom::si::energy::kilojoule>(self.vertical_shv_tx_power as f64)
}
/// Initial DP for the system.
#[cfg(feature = "uom")]
pub fn initial_system_differential_phase(&self) -> Angle {
Angle::new::<uom::si::angle::degree>(self.initial_system_differential_phase as f64)
}
/// Identifies the volume coverage pattern in use.
pub fn volume_coverage_pattern(&self) -> VolumeCoveragePattern {
match self.volume_coverage_pattern_number {
12 => VolumeCoveragePattern::VCP12,
31 => VolumeCoveragePattern::VCP31,
35 => VolumeCoveragePattern::VCP35,
112 => VolumeCoveragePattern::VCP112,
212 => VolumeCoveragePattern::VCP212,
215 => VolumeCoveragePattern::VCP215,
_ => panic!(
"Invalid volume coverage pattern number: {}",
self.volume_coverage_pattern_number
),
}
}
/// Processing option flags.
pub fn processing_status(&self) -> ProcessingStatus {
match self.processing_status {
0 => ProcessingStatus::RxRNoise,
1 => ProcessingStatus::CBT,
_ => ProcessingStatus::Other(self.processing_status),
}
}
}
#[cfg(not(feature = "uom"))]
impl Debug for VolumeDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("VolumeDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup)
.field("major_version_number", &self.major_version_number)
.field("minor_version_number", &self.minor_version_number)
.field("latitude", &self.latitude)
.field("longitude", &self.longitude)
.field("site_height", &self.site_height)
.field("feedhorn_height", &self.feedhorn_height)
.field("calibration_constant", &self.calibration_constant)
.field("horizontal_shv_tx_power", &self.horizontal_shv_tx_power)
.field("vertical_shv_tx_power", &self.vertical_shv_tx_power)
.field(
"system_differential_reflectivity",
&self.system_differential_reflectivity,
)
.field(
"initial_system_differential_phase",
&self.initial_system_differential_phase,
)
.field(
"volume_coverage_pattern_number",
&self.volume_coverage_pattern_number,
)
.field("processing_status", &self.processing_status())
.field(
"zdr_bias_estimate_weighted_mean",
&self.zdr_bias_estimate_weighted_mean,
)
.field("spare", &self.spare)
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for VolumeDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("VolumeDataBlock")
.field("data_block_id", &self.data_block_id)
.field("lrtup", &self.lrtup())
.field("major_version_number", &self.major_version_number)
.field("minor_version_number", &self.minor_version_number)
.field("latitude", &self.latitude())
.field("longitude", &self.longitude())
.field("site_height", &self.site_height())
.field("feedhorn_height", &self.feedhorn_height())
.field("calibration_constant", &self.calibration_constant)
.field("horizontal_shv_tx_power", &self.horizontal_shv_tx_power())
.field("vertical_shv_tx_power", &self.vertical_shv_tx_power())
.field(
"system_differential_reflectivity",
&self.system_differential_reflectivity,
)
.field(
"initial_system_differential_phase",
&self.initial_system_differential_phase(),
)
.field(
"volume_coverage_pattern_number",
&self.volume_coverage_pattern_number,
)
.field("processing_status", &self.processing_status())
.field(
"zdr_bias_estimate_weighted_mean",
&self.zdr_bias_estimate_weighted_mean,
)
.field("spare", &self.spare)
.finish()
}
}

View file

@ -0,0 +1,59 @@
use crate::messages::clutter_filter_map;
use crate::messages::digital_radar_data;
use crate::messages::message_header::MessageHeader;
use crate::messages::rda_status_data;
use crate::messages::volume_coverage_pattern;
/// A decoded NEXRAD Level II message with its metadata header.
#[derive(Debug, Clone, PartialEq)]
pub struct Message {
header: MessageHeader,
contents: MessageContents,
}
impl Message {
/// Create a new unsegmented message.
pub(crate) fn unsegmented(header: MessageHeader, contents: MessageContents) -> Self {
Self { header, contents }
}
/// This message's header.
pub fn header(&self) -> &MessageHeader {
&self.header
}
/// This message's contents.
pub fn contents(&self) -> &MessageContents {
&self.contents
}
/// Consume this message, returning ownership of its contents.
pub fn into_contents(self) -> MessageContents {
self.contents
}
}
/// A decoded NEXRAD Level II message's contents.
#[derive(Debug, Clone, PartialEq)]
pub enum MessageContents {
/// Message type 2 "RDA Status Data" contains information about the current RDA state, system
/// control, operating status, scanning strategy, performance parameters like transmitter power
/// and calibration, and system alarms
RDAStatusData(Box<rda_status_data::Message>),
/// Message type 31 "Digital Radar Data" consists of base data information such as reflectivity,
/// mean radial velocity, spectrum width, differential reflectivity, differential phase,
/// correlation coefficient, azimuth angle, elevation angle, cut type, scanning strategy, and
/// calibration parameters.
DigitalRadarData(Box<digital_radar_data::Message>),
/// Message type 15 "Clutter Filter Map" contains information about clutter filter maps that are
/// used to filter clutter from radar products
ClutterFilterMap(Box<clutter_filter_map::Message>),
/// Message type 5 "Volume Coverage Pattern" provides details about the volume
/// coverage pattern being used, including detailed settings for each elevation.
VolumeCoveragePattern(Box<volume_coverage_pattern::Message>),
Other,
}

View file

@ -0,0 +1,230 @@
use crate::messages::definitions::RedundantChannel;
use crate::messages::message_type::MessageType;
use crate::messages::primitive_aliases::{Integer1, Integer2, Integer4};
use crate::util::get_datetime;
use chrono::{DateTime, Duration, Utc};
use serde::Deserialize;
use std::fmt::Debug;
#[cfg(feature = "uom")]
use uom::si::f64::Information;
#[cfg(feature = "uom")]
use uom::si::information::byte;
/// This value in the [MessageHeader::segment_size] field of a message header indicates that the
/// message is variable-length rather than segmented.
pub const VARIABLE_LENGTH_MESSAGE_SIZE: u16 = 65535;
/// Message and system configuration information appended to the beginning of all messages.
///
/// Note that messages with a segment size of [VARIABLE_LENGTH_MESSAGE_SIZE] are not segmented and
/// instead variable-length, with the segment count and segment number positions of the header
/// (bytes 12-15) specifying the size of the full message in bytes.
#[repr(C)]
#[derive(Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct MessageHeader {
rpg_unknown: [u8; 12],
/// Size of this segment in half-words. Note that this only describes this segment's size,
/// though there could be multiple segments. In the case of a variable-length message (indicated
/// by this field being set to [VARIABLE_LENGTH_MESSAGE_SIZE]), the full message's size is
/// determined differently. See [MessageHeader::message_size_bytes] and
/// [MessageHeader::segment_count] for more information.
pub segment_size: Integer2,
/// Whether the RDA is operating on a redundant channel.
///
/// Legacy:
/// 0 = Single Channel (no bits set)
/// 1 = Redundant Channel 1 (bit 0 set)
/// 2 = Redundant Channel 2 (bit 1 set)
/// ORDA
/// 8 = Single Channel (bit 3 set)
/// 9 = Redundant Channel 1 (bits 0 and 3 set)
/// 10 = Redundant Channel 2 (bits 1 and 3 set)
pub redundant_channel: Integer1,
/// Type discriminator.
pub message_type: Integer1,
/// Message sequence number.
pub sequence_number: Integer2,
/// This message's date represented as a count of days since 1 January 1970 00:00 GMT. It is
/// also referred-to as a "modified Julian date" where it is the Julian date - 2440586.5.
pub date: Integer2,
/// Milliseconds past midnight, GMT.
pub time: Integer4,
/// Number of segments in this message. If the [MessageHeader::segment_size] is less than
/// [VARIABLE_LENGTH_MESSAGE_SIZE], this field is meaningful, otherwise bytes 12-15 (this field
/// and [MessageHeader::segment_number]) specify the size of the message in bytes.
pub segment_count: Integer2,
/// This message segment's number. If the [MessageHeader::segment_size] is less than
/// [VARIABLE_LENGTH_MESSAGE_SIZE], this field is meaningful, otherwise, bytes 12-15 (this field
/// and [MessageHeader::segment_number]) specify the size of the message in bytes.
pub segment_number: Integer2,
}
impl MessageHeader {
/// If this message is [MessageHeader::segmented], this indicates this message segment's size.
/// Otherwise, this returns [None] and [MessageHeader::message_size] should be used to determine
/// the message's full size.
#[cfg(feature = "uom")]
pub fn segment_size(&self) -> Option<Information> {
if self.segment_size < VARIABLE_LENGTH_MESSAGE_SIZE {
Some(Information::new::<byte>((self.segment_size * 2) as f64))
} else {
None
}
}
/// Whether the RDA is operating on a redundant channel.
pub fn rda_redundant_channel(&self) -> RedundantChannel {
match self.redundant_channel {
0 => RedundantChannel::LegacySingleChannel,
1 => RedundantChannel::LegacyRedundantChannel1,
2 => RedundantChannel::LegacyRedundantChannel2,
8 => RedundantChannel::ORDASingleChannel,
9 => RedundantChannel::ORDARedundantChannel1,
10 => RedundantChannel::ORDARedundantChannel2,
_ => panic!("Invalid RDA redundant channel: {}", self.redundant_channel),
}
}
/// Message type discriminator.
pub fn message_type(&self) -> MessageType {
match self.message_type {
1 => MessageType::RDADigitalRadarData,
2 => MessageType::RDAStatusData,
3 => MessageType::RDAPerformanceMaintenanceData,
4 => MessageType::RDAConsoleMessage,
5 => MessageType::RDAVolumeCoveragePattern,
6 => MessageType::RDAControlCommands,
7 => MessageType::RPGVolumeCoveragePattern,
8 => MessageType::RPGClutterCensorZones,
9 => MessageType::RPGRequestForData,
10 => MessageType::RPGConsoleMessage,
11 => MessageType::RDALoopBackTest,
12 => MessageType::RPGLoopBackTest,
13 => MessageType::RDAClutterFilterBypassMap,
14 => MessageType::Spare1,
15 => MessageType::RDAClutterFilterMap,
16 => MessageType::ReservedFAARMSOnly1,
17 => MessageType::ReservedFAARMSOnly2,
18 => MessageType::RDAAdaptationData,
20 => MessageType::Reserved1,
21 => MessageType::Reserved2,
22 => MessageType::Reserved3,
23 => MessageType::Reserved4,
24 => MessageType::ReservedFAARMSOnly3,
25 => MessageType::ReservedFAARMSOnly4,
26 => MessageType::ReservedFAARMSOnly5,
29 => MessageType::Reserved5,
31 => MessageType::RDADigitalRadarDataGenericFormat,
32 => MessageType::RDAPRFData,
33 => MessageType::RDALogData,
_ => MessageType::Unknown(self.message_type),
}
}
/// This message's date and time in UTC.
pub fn date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(self.date, Duration::milliseconds(self.time as i64))
}
/// Whether this message is segmented or variable-length. If the message is segmented, multiple
/// message segments compose the full message. If the message is variable-length as indicated by
/// the [MessageHeader::segment_size] field being set to [VARIABLE_LENGTH_MESSAGE_SIZE], the
/// full message size can be retrieved by [MessageHeader::message_size_bytes].
pub fn segmented(&self) -> bool {
self.segment_size < VARIABLE_LENGTH_MESSAGE_SIZE
}
/// If the message is [MessageHeader::segmented], this indicates the number of segments in the
/// full message, otherwise this returns [None]. [MessageHeader::message_size_bytes] can be used
/// to determine the message's full size.
pub fn segment_count(&self) -> Option<u16> {
if self.segment_size < VARIABLE_LENGTH_MESSAGE_SIZE {
Some(self.segment_count)
} else {
None
}
}
/// If the message is [MessageHeader::segmented], this indicates this segment's number/sequence
/// in the message, otherwise this returns [None]. [MessageHeader::message_size_bytes] can be
/// used to determine the message's full size.
pub fn segment_number(&self) -> Option<u16> {
if self.segment_size < VARIABLE_LENGTH_MESSAGE_SIZE {
Some(self.segment_number)
} else {
None
}
}
/// The full size of the message in bytes. If the message is [MessageHeader::segmented] then
/// this is the segment size, otherwise this is the full variable-length message size.
pub fn message_size_bytes(&self) -> u32 {
match self.segment_count() {
Some(_) => self.segment_size as u32 * 2,
None => {
let segment_number = self.segment_number as u32;
let segment_size = self.segment_size as u32;
(segment_number << 16) | (segment_size << 1)
}
}
}
/// The full size of the message. If the message is [MessageHeader::segmented] then this is the
/// segment size, otherwise this is the full variable-length message size.
#[cfg(feature = "uom")]
pub fn message_size(&self) -> Information {
match self.segment_count() {
Some(_) => {
let segment_size_bytes = self.segment_size << 1;
Information::new::<byte>(segment_size_bytes as f64)
}
None => {
let segment_number = self.segment_number as u32;
let segment_size = self.segment_size as u32;
let message_size_bytes = (segment_number << 16) | segment_size;
Information::new::<byte>(message_size_bytes as f64)
}
}
}
}
#[cfg(not(feature = "uom"))]
impl Debug for MessageHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MessageHeader")
.field("segment_size", &self.segment_size)
.field("redundant_channel", &self.rda_redundant_channel())
.field("message_type", &self.message_type())
.field("sequence_number", &self.sequence_number)
.field("date_time", &self.date_time())
.field("segment_count", &self.segment_count())
.field("segment_number", &self.segment_number())
.field("message_size_bytes", &self.message_size_bytes())
.finish()
}
}
#[cfg(feature = "uom")]
impl Debug for MessageHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MessageHeader")
.field("segment_size", &self.segment_size())
.field("redundant_channel", &self.rda_redundant_channel())
.field("message_type", &self.message_type())
.field("sequence_number", &self.sequence_number)
.field("date_time", &self.date_time())
.field("segment_count", &self.segment_count())
.field("segment_number", &self.segment_number())
.field("message_size", &self.message_size())
.finish()
}
}

View file

@ -0,0 +1,71 @@
/// The types of data messages transferred between the RDA and RPG.
#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy, Ord, PartialOrd)]
#[repr(u8)]
pub enum MessageType {
/// Replaced by message type 31.
RDADigitalRadarData = 1,
/// Metadata.
RDAStatusData = 2,
/// Metadata.
RDAPerformanceMaintenanceData = 3,
RDAConsoleMessage = 4,
/// Metadata.
RDAVolumeCoveragePattern = 5,
RDAControlCommands = 6,
RPGVolumeCoveragePattern = 7,
RPGClutterCensorZones = 8,
RPGRequestForData = 9,
RPGConsoleMessage = 10,
RDALoopBackTest = 11,
RPGLoopBackTest = 12,
/// No longer sent.
RDAClutterFilterBypassMap = 13,
Spare1 = 14,
/// Metadata.
RDAClutterFilterMap = 15,
ReservedFAARMSOnly1 = 16,
ReservedFAARMSOnly2 = 17,
/// Metadata.
RDAAdaptationData = 18,
Reserved1 = 20,
Reserved2 = 21,
Reserved3 = 22,
Reserved4 = 23,
ReservedFAARMSOnly3 = 24,
ReservedFAARMSOnly4 = 25,
ReservedFAARMSOnly5 = 26,
Reserved5 = 29,
RDADigitalRadarDataGenericFormat = 31,
RDAPRFData = 32,
RDALogData = 33,
Unknown(u8),
}

View file

@ -0,0 +1,14 @@
//!
//! Primitive aliases matching the types referenced in the ICD.
//!
pub type Code1 = u8;
pub type Code2 = u16;
pub type Integer1 = u8;
pub type Integer2 = u16;
pub type Integer4 = u32;
pub type Real4 = f32;
pub type ScaledInteger1 = u8;
pub type ScaledInteger2 = u16;
pub type ScaledSInteger2 = i16;
pub type SInteger2 = i16;

View file

@ -0,0 +1,32 @@
//!
//! Message type 2 "RDA Status Data" contains information about the current RDA state, system
//! control, operating status, scanning strategy, performance parameters like transmitter power and
//! calibration, and system alarms. This message is sent upon wideband connection, after state or
//! control changes, at the beginning of each volume scan, and after an RPG request.
//!
pub mod alarm;
mod data_transmission_enabled;
pub use data_transmission_enabled::DataTransmissionEnabled;
use std::io::Read;
mod scan_data_flags;
pub use scan_data_flags::ScanDataFlags;
mod definitions;
pub use definitions::*;
mod message;
pub use message::Message;
mod volume_coverage_pattern;
use crate::result::Result;
use crate::util::deserialize;
pub use volume_coverage_pattern::VolumeCoveragePatternNumber;
/// Decodes an RDA status message type 2 from the provided reader.
pub fn decode_rda_status_message<R: Read>(reader: &mut R) -> Result<Message> {
deserialize(reader)
}

View file

@ -0,0 +1,12 @@
//!
//! The RDA system's alarm status data.
//!
mod model;
pub use model::*;
mod definitions;
pub use definitions::*;
mod summary;
pub use summary::*;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,93 @@
/// An RDA alarm message definition to be referenced by an RDA status data message.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Message {
code: u16,
state: Option<State>,
alarm_type: Option<AlarmType>,
device: Option<Device>,
sample: Option<u8>,
message: &'static str,
}
impl Message {
pub(crate) fn new(
code: u16,
state: Option<State>,
alarm_type: Option<AlarmType>,
device: Option<Device>,
sample: Option<u8>,
message: &'static str,
) -> Self {
Self {
code,
state,
alarm_type,
device,
sample,
message,
}
}
/// The alarm code.
pub fn code(&self) -> u16 {
self.code
}
/// The status of the RDA as a result of the alarm.
pub fn state(&self) -> Option<State> {
self.state
}
/// The type of alarm.
pub fn alarm_type(&self) -> Option<AlarmType> {
self.alarm_type
}
/// The hardware device area where the alarm originated.
pub fn device(&self) -> Option<Device> {
self.device
}
/// The number of samples required to trigger the alarm.
pub fn sample(&self) -> Option<u8> {
self.sample
}
/// The alarm message.
pub fn message(&self) -> &'static str {
self.message
}
}
/// The status of the RDA as a result of the alarm.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum State {
MaintenanceMandatory,
MaintenanceRequired,
Inoperative,
/// Alarm not specifically tied to state change.
Secondary,
}
/// The different classifications of alarms.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum AlarmType {
/// Alarm failed consecutively enough times to meet the alarm reporting count/sample threshold.
EdgeDetected,
/// Alarm reported each time the condition is met.
Occurrence,
/// Alarm reported at most once every 15 minutes when the condition is met.
FilteredOccurrence,
}
/// The hardware device area where the alarm originated.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum Device {
Control,
Pedestal,
Receiver,
SignalProcessor,
Communications,
TowerUtilities,
Transmitter,
}

View file

@ -0,0 +1,67 @@
use crate::messages::primitive_aliases::Code2;
use std::fmt::Debug;
/// The RDA system's active alarm types.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Summary(Code2);
impl Summary {
pub(crate) fn new(code: Code2) -> Self {
Self(code)
}
/// Whether no alarms are active.
pub fn none(&self) -> bool {
self.0 == 0
}
/// Whether the tower/utilities alarm is active.
pub fn tower_utilities(&self) -> bool {
self.0 & 0b0001 != 0
}
/// Whether the pedestal alarm is active.
pub fn pedestal(&self) -> bool {
self.0 & 0b0010 != 0
}
/// Whether the transmitter alarm is active.
pub fn transmitter(&self) -> bool {
self.0 & 0b0100 != 0
}
/// Whether the receiver alarm is active.
pub fn receiver(&self) -> bool {
self.0 & 0b1000 != 0
}
/// Whether the RDA control alarm is active.
pub fn rda_control(&self) -> bool {
self.0 & 0b10000 != 0
}
/// Whether the communication alarm is active.
pub fn communication(&self) -> bool {
self.0 & 0b100000 != 0
}
/// Whether the signal processor alarm is active.
pub fn signal_processor(&self) -> bool {
self.0 & 0b1000000 != 0
}
}
impl Debug for Summary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Summary")
.field("none", &self.none())
.field("tower_utilities", &self.tower_utilities())
.field("pedestal", &self.pedestal())
.field("transmitter", &self.transmitter())
.field("receiver", &self.receiver())
.field("rda_control", &self.rda_control())
.field("communication", &self.communication())
.field("signal_processor", &self.signal_processor())
.finish()
}
}

View file

@ -0,0 +1,43 @@
use crate::messages::primitive_aliases::Code2;
use std::fmt::Debug;
/// The types of data that have transmission enabled.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct DataTransmissionEnabled(Code2);
impl DataTransmissionEnabled {
pub(crate) fn new(value: Code2) -> Self {
Self(value)
}
/// Whether no data types have transmission enabled.
pub fn none(&self) -> bool {
self.0 & 0b0001 != 0
}
/// Whether reflectivity data has transmission enabled.
pub fn reflectivity(&self) -> bool {
self.0 & 0b0010 != 0
}
/// Whether velocity data has transmission enabled.
pub fn velocity(&self) -> bool {
self.0 & 0b0100 != 0
}
/// Whether spectrum width data has transmission enabled.
pub fn spectrum_width(&self) -> bool {
self.0 & 0b1000 != 0
}
}
impl Debug for DataTransmissionEnabled {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DataTransmissionEnabled")
.field("none", &self.none())
.field("reflectivity", &self.reflectivity())
.field("velocity", &self.velocity())
.field("spectrum_width", &self.spectrum_width())
.finish()
}
}

View file

@ -0,0 +1,110 @@
/// Acknowledgement of command receipt by RDA system.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum CommandAcknowledgement {
RemoteVCPReceived,
ClutterBypassMapReceived,
ClutterCensorZonesReceived,
RedundantChannelControlCommandAccepted,
}
/// The possible RDA system clutter mitigation decision statuses.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ClutterMitigationDecisionStatus {
Disabled,
Enabled,
/// Which elevation segments of the bypass map are applied.
BypassMapElevationSegments(Vec<u8>),
}
/// The possible RDA system auxiliary power generator states.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum AuxiliaryPowerGeneratorState {
SwitchedToAuxiliaryPower,
UtilityPowerAvailable,
GeneratorOn,
TransferSwitchSetToManual,
CommandedSwitchover,
}
/// The possible RDA system control authorizations.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ControlAuthorization {
NoAction,
LocalControlRequested,
RemoteControlRequested,
}
/// The possible RDA system control statuses.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ControlStatus {
LocalControlOnly,
RemoteControlOnly,
EitherLocalOrRemoteControl,
}
/// The possible RDA system operability statuses.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum OperabilityStatus {
OnLine,
MaintenanceActionRequired,
MaintenanceActionMandatory,
CommandedShutDown,
Inoperable,
}
/// The possible RDA system operational modes.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum OperationalMode {
Operational,
Maintenance,
}
/// The RDA system's performance check status.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum PerformanceCheckStatus {
NoCommandPending,
ForcePerformanceCheckPending,
InProgress,
}
/// The RDA system's RMS control status.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum RMSControlStatus {
NonRMS,
RMSInControl,
RDAInControl,
}
/// Indicates whether this is the RDA system's controlling channel.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum SpotBlankingStatus {
NotInstalled,
Enabled,
Disabled,
}
/// The possible RDA system statuses.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum RDAStatus {
StartUp,
Standby,
Restart,
Operate,
Spare,
}
/// Whether the RDA system has super resolution enabled.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum SuperResolutionStatus {
Enabled,
Disabled,
}
/// The possible RDA system transition power source statuses.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum TransitionPowerSourceStatus {
NotInstalled,
Off,
OK,
Unknown,
}

View file

@ -0,0 +1,529 @@
use crate::messages::primitive_aliases::{Code2, Integer2, SInteger2, ScaledInteger2};
use crate::messages::rda_status_data::alarm;
use crate::messages::rda_status_data::alarm::Summary;
use crate::messages::rda_status_data::data_transmission_enabled::DataTransmissionEnabled;
use crate::messages::rda_status_data::definitions::{
AuxiliaryPowerGeneratorState, ClutterMitigationDecisionStatus, CommandAcknowledgement,
ControlAuthorization, ControlStatus, OperabilityStatus, OperationalMode,
PerformanceCheckStatus, RDAStatus, RMSControlStatus, SpotBlankingStatus, SuperResolutionStatus,
TransitionPowerSourceStatus,
};
use crate::messages::rda_status_data::scan_data_flags::ScanDataFlags;
use crate::messages::rda_status_data::volume_coverage_pattern::VolumeCoveragePatternNumber;
use crate::util::get_datetime;
use chrono::{DateTime, Duration, Utc};
use serde::Deserialize;
use std::fmt::Debug;
/// The RDA status data message includes various information about the current RDA system's state,
/// including system operating status, performance parameters, and active alarms.
#[repr(C)]
#[derive(Clone, PartialEq, Eq, Hash, Deserialize)]
pub struct Message {
/// The RDA system's status.
///
/// Statuses:
/// 2 (bit 1) = Start-up
/// 4 (bit 2) = Standby
/// 8 (bit 3) = Restart
/// 16 (bit 4) = Operate
/// 32 (bit 5) = Spare
/// 64 (bit 6) = Spare
pub rda_status: Code2,
/// The RDA system's operability status.
///
/// Statuses:
/// 2 (bit 1) = On-line
/// 4 (bit 2) = Maintenance action required
/// 8 (bit 3) = Maintenance action mandatory
/// 16 (bit 4) = Commanded shut down
/// 32 (bit 5) = Inoperable
pub operability_status: Code2,
/// The RDA system's control status.
///
/// Statuses:
/// 2 (bit 1) = Local control only
/// 4 (bit 2) = Remote (RPG) control only
/// 8 (bit 3) = Either local or remote control
pub control_status: Code2,
/// The RDA system's auxiliary power generator state.
///
/// States:
/// 1 (bit 0) = Switched to auxiliary power
/// 2 (bit 1) = Utility power available
/// 4 (bit 2) = Generator on
/// 8 (bit 3) = Transfer switch set to manual
/// 16 (bit 4) = Commanded switchover
pub auxiliary_power_generator_state: Code2,
/// The average transmitter power in watts calculated over a range of samples.
pub average_transmitter_power: Integer2,
/// Difference from adaptation data (delta dBZ0) in dB. Scaling is two decimal places, e.g.
/// a value of -19800 represents -198.00 dB.
pub horizontal_reflectivity_calibration_correction: ScaledInteger2,
/// Which types of data have transmission enabled.
///
/// Types:
/// 1 (bit 1) = None
/// 2 (bit 2) = Reflectivity
/// 4 (bit 3) = Velocity
/// 8 (bit 4) = Spectrum width
pub data_transmission_enabled: Code2,
/// The radar's volume coverage pattern number.
///
/// The magnitude of the value identifies the pattern, and the sign indicates whether it was
/// specified locally or remotely. Zero indicates no pattern.
pub volume_coverage_pattern: SInteger2,
/// The RDA system's mode of control.
///
/// Modes:
/// 0 (none) = No action
/// 2 (bit 1) = Local control requested
/// 4 (bit 2) = Remote control requested (local released)
pub rda_control_authorization: Code2,
/// The RDA system's major and minor build numbers.
///
/// If the value divided by 100 is greater than 2 then the build version is the value divided
/// by 100, otherwise it is divided by 10.
pub rda_build_number: ScaledInteger2,
/// Whether the RDA system is operational.
///
/// Modes:
/// 4 (bit 2) = Operational
/// 8 (bit 3) = Maintenance
pub operational_mode: Code2,
/// Whether the RDA system has super resolution enabled.
///
/// Statuses:
/// 2 (bit 1) = Enabled
/// 4 (bit 2) = Disabled
pub super_resolution_status: Code2,
/// The RDA system's clutter mitigation status.
///
/// Bits 1-5 indicate which elevation segments of the bypass map are applied.
///
/// Statuses:
/// 0 (none) = Disabled
/// 1 (bit 0) = Enabled
/// 2 (bit 1) = Bypass map elevation 1 applied
/// 4 (bit 2) = Bypass map elevation 2 applied
/// 8 (bit 3) = Bypass map elevation 3 applied
/// 16 (bit 4) = Bypass map elevation 4 applied
/// 32 (bit 5) = Bypass map elevation 5 applied
pub clutter_mitigation_decision_status: Code2,
/// Multiple flags for the RDA system's scan and data status.
///
/// Flags:
/// 2 (bit 1) = AVSET enabled
/// 4 (bit 2) = AVSET disabled
/// 8 (bit 3) = EBC enablement
/// 16 (bit 4) = RDA log data enablement
/// 32 (bit 5) = Time series data recording enablement
pub rda_scan_and_data_flags: Code2,
/// The RDA system's active alarm types.
///
/// Types:
/// 0 (none) = No alarms
/// 1 (bit 1) = Tower/utilities
/// 2 (bit 2) = Pedestal
/// 4 (bit 3) = Transmitter
/// 8 (bit 4) = Receiver
/// 16 (bit 5) = RDA control
/// 32 (bit 6) = Communication
/// 64 (bit 7) = Signal processor
pub rda_alarm_summary: Code2,
/// Acknowledgement of command receipt by RDA system.
///
/// Codes:
/// 0 (none) = No acknowledgement
/// 1 (bit 0) = Remote VCP received
/// 2 (bit 1) = Clutter bypass map received
/// 3 (bit 0&1) = Clutter censor zones received
/// 4 (bit 2) = Redundant channel control command accepted
pub command_acknowledgement: Code2,
/// Indicates whether this is the RDA system's controlling channel.
///
/// Values:
/// 0 (none) = Controlling channel
/// 1 (bit 0) = Non-controlling channel
pub channel_control_status: Code2,
/// The RDA system's spot blanking status.
///
/// Statuses:
/// 0 (none) = Not installed
/// 1 (bit 1) = Enabled
/// 4 (bit 2) = Disabled
pub spot_blanking_status: Code2,
/// The bypass map generation date represented as a count of days since 1 January 1970 00:00 GMT.
/// It is also referred-to as a "modified Julian date" where it is the Julian date - 2440586.5.
pub bypass_map_generation_date: Integer2,
/// The bypass map generation time in minutes past midnight, GMT.
pub bypass_map_generation_time: Integer2,
/// The clutter filter map generation date represented as a count of days since 1 January 1970
/// 00:00 GMT. It is also referred-to as a "modified Julian date" where it is the
/// Julian date - 2440586.5.
pub clutter_filter_map_generation_date: Integer2,
/// The clutter filter map generation time in minutes past midnight, GMT.
pub clutter_filter_map_generation_time: Integer2,
/// The RDA system's vertical reflectivity calibration correction in dB.
pub vertical_reflectivity_calibration_correction: ScaledInteger2,
/// The RDA system's TPS.
///
/// Statuses:
/// 0 (none) = Not installed
/// 1 (bit 0) = Off
/// 3 (bit 0&1) = OK
/// 4 (bit 2) = Unknown
pub transition_power_source_status: Integer2,
/// The RDA system's RMS control status.
///
/// Statuses:
/// 0 (none) = Non-RMS system
/// 2 (bit 1) = RMS in control
/// 4 (bit 2) = RDA in control
pub rms_control_status: Code2,
/// The RDA system's performance check status.
///
/// Statuses:
/// 0 (none) = No command pending
/// 1 (bit 0) = Force performance check pending
/// 2 (bit 1) = In progress
pub performance_check_status: Code2,
/// The RDA system's alarm codes stored per-halfword up to 14 possible codes.
pub alarm_codes: [Integer2; 14],
/// Flags indicating the various RDA signal processing options.
pub signal_processor_options: Code2,
pub spares: [Integer2; 18],
/// Version of status message.
pub status_version: Integer2,
}
impl Message {
/// The RDA system's status.
pub fn rda_status(&self) -> RDAStatus {
match self.rda_status {
2 => RDAStatus::StartUp,
4 => RDAStatus::Standby,
8 => RDAStatus::Restart,
16 => RDAStatus::Operate,
32 => RDAStatus::Spare,
_ => panic!("Invalid RDA status: {}", self.rda_status),
}
}
/// The RDA system's operability status.
pub fn operability_status(&self) -> OperabilityStatus {
match self.operability_status {
2 => OperabilityStatus::OnLine,
4 => OperabilityStatus::MaintenanceActionRequired,
8 => OperabilityStatus::MaintenanceActionMandatory,
16 => OperabilityStatus::CommandedShutDown,
32 => OperabilityStatus::Inoperable,
_ => panic!(
"Invalid RDA operability status: {}",
self.operability_status
),
}
}
/// The RDA system's control status.
pub fn control_status(&self) -> ControlStatus {
match self.control_status {
2 => ControlStatus::LocalControlOnly,
4 => ControlStatus::RemoteControlOnly,
8 => ControlStatus::EitherLocalOrRemoteControl,
_ => panic!("Invalid RDA control status: {}", self.control_status),
}
}
/// The RDA system's auxiliary power generator state.
pub fn auxiliary_power_generator_state(&self) -> AuxiliaryPowerGeneratorState {
match self.auxiliary_power_generator_state {
1 => AuxiliaryPowerGeneratorState::SwitchedToAuxiliaryPower,
2 => AuxiliaryPowerGeneratorState::UtilityPowerAvailable,
4 => AuxiliaryPowerGeneratorState::GeneratorOn,
8 => AuxiliaryPowerGeneratorState::TransferSwitchSetToManual,
16 => AuxiliaryPowerGeneratorState::CommandedSwitchover,
_ => panic!(
"Invalid RDA auxiliary power generator state: {}",
self.auxiliary_power_generator_state
),
}
}
/// Difference from adaptation data (delta dBZ0) in dB.
pub fn horizontal_reflectivity_calibration_correction(&self) -> f32 {
self.horizontal_reflectivity_calibration_correction as f32 / 100.0
}
/// The types of data that have transmission enabled.
pub fn data_transmission_enabled(&self) -> DataTransmissionEnabled {
DataTransmissionEnabled::new(self.data_transmission_enabled)
}
/// The radar's volume coverage pattern number.
pub fn volume_coverage_pattern(&self) -> Option<VolumeCoveragePatternNumber> {
if self.volume_coverage_pattern == 0 {
return None;
}
Some(VolumeCoveragePatternNumber::new(
self.volume_coverage_pattern,
))
}
/// The RDA system's mode of control.
pub fn rda_control_authorization(&self) -> ControlAuthorization {
match self.rda_control_authorization {
0 => ControlAuthorization::NoAction,
1 => ControlAuthorization::LocalControlRequested,
2 => ControlAuthorization::RemoteControlRequested,
_ => panic!(
"Invalid RDA control authorization: {}",
self.rda_control_authorization
),
}
}
/// The RDA system's major and minor build numbers.
pub fn rda_build_number(&self) -> f32 {
let number = self.rda_build_number as f32;
if number / 100.0 > 2.0 {
return number / 100.0;
}
number / 10.0
}
/// Whether the RDA system is operational.
pub fn operational_mode(&self) -> OperationalMode {
match self.operational_mode {
4 => OperationalMode::Operational,
8 => OperationalMode::Maintenance,
_ => panic!("Invalid RDA operational mode: {}", self.operational_mode),
}
}
/// Whether the RDA system has super resolution enabled.
pub fn super_resolution_status(&self) -> SuperResolutionStatus {
match self.super_resolution_status {
2 => SuperResolutionStatus::Enabled,
4 => SuperResolutionStatus::Disabled,
_ => panic!(
"Invalid RDA super resolution status: {}",
self.super_resolution_status
),
}
}
/// The RDA system's clutter mitigation status.
pub fn clutter_mitigation_decision_status(&self) -> ClutterMitigationDecisionStatus {
match self.clutter_mitigation_decision_status {
0 => ClutterMitigationDecisionStatus::Disabled,
1 => ClutterMitigationDecisionStatus::Enabled,
_ => {
let mut segments = Vec::new();
for i in 0..5 {
if self.clutter_mitigation_decision_status & (1 << i) != 0 {
segments.push(i);
}
}
ClutterMitigationDecisionStatus::BypassMapElevationSegments(segments)
}
}
}
/// Multiple flags for the RDA system's scan and data status.
pub fn rda_scan_and_data_flags(&self) -> ScanDataFlags {
ScanDataFlags::new(self.rda_scan_and_data_flags)
}
/// The RDA system's active alarm types.
pub fn rda_alarm_summary(&self) -> Summary {
Summary::new(self.rda_alarm_summary)
}
/// Acknowledgement of command receipt by RDA system.
pub fn command_acknowledgement(&self) -> Option<CommandAcknowledgement> {
match self.command_acknowledgement {
1 => Some(CommandAcknowledgement::RemoteVCPReceived),
2 => Some(CommandAcknowledgement::ClutterBypassMapReceived),
3 => Some(CommandAcknowledgement::ClutterCensorZonesReceived),
4 => Some(CommandAcknowledgement::RedundantChannelControlCommandAccepted),
_ => None,
}
}
/// Indicates whether this is the RDA system's controlling channel.
pub fn controlling_channel(&self) -> bool {
self.channel_control_status & 1 != 0
}
/// The RDA system's spot blanking status.
pub fn spot_blanking_status(&self) -> SpotBlankingStatus {
match self.spot_blanking_status {
0 => SpotBlankingStatus::NotInstalled,
1 => SpotBlankingStatus::Enabled,
4 => SpotBlankingStatus::Disabled,
_ => panic!(
"Invalid RDA spot blanking status: {}",
self.spot_blanking_status
),
}
}
/// The bypass map generation date and time in UTC.
pub fn bypass_map_generation_date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(
self.bypass_map_generation_date,
Duration::minutes(self.bypass_map_generation_time as i64),
)
}
/// The clutter filter map generation date and time in UTC.
pub fn clutter_filter_map_generation_date_time(&self) -> Option<DateTime<Utc>> {
get_datetime(
self.clutter_filter_map_generation_date,
Duration::minutes(self.clutter_filter_map_generation_time as i64),
)
}
/// The RDA system's TPS.
pub fn transition_power_source_status(&self) -> TransitionPowerSourceStatus {
match self.transition_power_source_status {
0 => TransitionPowerSourceStatus::NotInstalled,
1 => TransitionPowerSourceStatus::Off,
3 => TransitionPowerSourceStatus::OK,
4 => TransitionPowerSourceStatus::Unknown,
_ => panic!(
"Invalid RDA TPS status: {}",
self.transition_power_source_status
),
}
}
/// The RDA system's RMS control status.
pub fn rms_control_status(&self) -> RMSControlStatus {
match self.rms_control_status {
0 => RMSControlStatus::NonRMS,
2 => RMSControlStatus::RMSInControl,
4 => RMSControlStatus::RDAInControl,
_ => panic!(
"Invalid RDA RMS control status: {}",
self.rms_control_status
),
}
}
/// The RDA system's performance check status.
pub fn performance_check_status(&self) -> PerformanceCheckStatus {
match self.performance_check_status {
0 => PerformanceCheckStatus::NoCommandPending,
1 => PerformanceCheckStatus::ForcePerformanceCheckPending,
2 => PerformanceCheckStatus::InProgress,
_ => panic!(
"Invalid RDA performance check status: {}",
self.performance_check_status
),
}
}
/// The RDA system's alarm messages.
pub fn alarm_messages(&self) -> Vec<alarm::Message> {
self.alarm_codes
.iter()
.filter(|&code| *code != 0)
.filter_map(|&code| alarm::get_alarm_message(code))
.collect()
}
}
impl Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Message")
.field("rda_status", &self.rda_status())
.field("operability_status", &self.operability_status())
.field("control_status", &self.control_status())
.field(
"auxiliary_power_generator_state",
&self.auxiliary_power_generator_state(),
)
.field("average_transmitter_power", &self.average_transmitter_power)
.field(
"horizontal_reflectivity_calibration_correction",
&self.horizontal_reflectivity_calibration_correction(),
)
.field(
"data_transmission_enabled",
&self.data_transmission_enabled(),
)
.field("volume_coverage_pattern", &self.volume_coverage_pattern())
.field(
"rda_control_authorization",
&self.rda_control_authorization(),
)
.field("rda_build_number", &self.rda_build_number())
.field("operational_mode", &self.operational_mode())
.field("super_resolution_status", &self.super_resolution_status())
.field(
"clutter_mitigation_decision_status",
&self.clutter_mitigation_decision_status(),
)
.field("rda_scan_and_data_flags", &self.rda_scan_and_data_flags())
.field("rda_alarm_summary", &self.rda_alarm_summary())
.field("command_acknowledgement", &self.command_acknowledgement())
.field("channel_control_status", &self.controlling_channel())
.field("spot_blanking_status", &self.spot_blanking_status())
.field(
"bypass_map_generation_date_time",
&self.bypass_map_generation_date_time(),
)
.field(
"clutter_filter_map_generation_date_time",
&self.clutter_filter_map_generation_date_time(),
)
.field(
"vertical_reflectivity_calibration_correction",
&self.vertical_reflectivity_calibration_correction,
)
.field(
"transition_power_source_status",
&self.transition_power_source_status(),
)
.field("rms_control_status", &self.rms_control_status())
.field("performance_check_status", &self.performance_check_status())
.field("alarm_messages", &self.alarm_messages())
.field("signal_processor_options", &self.signal_processor_options)
.field("status_version", &self.status_version)
.finish()
}
}

View file

@ -0,0 +1,52 @@
use crate::messages::primitive_aliases::Code2;
use std::fmt::Debug;
/// The multiple flags for the RDA system's scan and data status.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct ScanDataFlags(Code2);
impl ScanDataFlags {
pub(crate) fn new(value: Code2) -> Self {
Self(value)
}
/// Whether AVSET is enabled.
pub fn avset_enabled(&self) -> bool {
let enabled_flag = self.0 & 0b0001 != 0;
let disabled_flag = self.0 & 0b0010 != 0;
debug_assert!(
enabled_flag ^ disabled_flag,
"Unexpected AVSET state (expected: enabled XOR disabled)"
);
enabled_flag
}
/// Whether EBC is enabled.
pub fn ebc_enabled(&self) -> bool {
self.0 & 0b0100 != 0
}
/// Whether RDA log data is enabled.
pub fn rda_log_data_enabled(&self) -> bool {
self.0 & 0b1000 != 0
}
/// Whether time series data recording is enabled.
pub fn time_series_data_recording_enabled(&self) -> bool {
self.0 & 0b10000 != 0
}
}
impl Debug for ScanDataFlags {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ScanDataFlags")
.field("avset_enabled", &self.avset_enabled())
.field("ebc_enabled", &self.ebc_enabled())
.field("rda_log_data_enabled", &self.rda_log_data_enabled())
.field(
"time_series_data_recording_enabled",
&self.time_series_data_recording_enabled(),
)
.finish()
}
}

View file

@ -0,0 +1,37 @@
use crate::messages::primitive_aliases::SInteger2;
use std::fmt::Debug;
/// The RDA system's volume coverage pattern number.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct VolumeCoveragePatternNumber(SInteger2);
impl VolumeCoveragePatternNumber {
pub(crate) fn new(value: SInteger2) -> Self {
Self(value)
}
/// The volume coverage pattern number.
pub fn number(&self) -> i16 {
self.0.abs()
}
/// Whether the volume coverage pattern number was specified locally.
pub fn local(&self) -> bool {
self.0 < 0
}
/// Whether the volume coverage pattern number was specified remotely.
pub fn remote(&self) -> bool {
self.0 > 0
}
}
impl Debug for VolumeCoveragePatternNumber {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("VolumeCoveragePatternNumber")
.field("number", &self.number())
.field("local", &self.local())
.field("remote", &self.remote())
.finish()
}
}

View file

@ -0,0 +1,39 @@
//!
//! Message type 5 "Volume Coverage Pattern" provides details about the volume
//! coverage pattern being used. The RDA sends the Volume Coverage Pattern message
//! upon wideband connection and at the beginning of each volume scan. The volume
//! coverage pattern message includes a header which describes how the volume is being
//! collected as well as a block for each elevation cut detailing the radar settings
//! being used for that cut.
//!
use std::io::Read;
mod definitions;
pub use definitions::*;
mod header;
pub use header::Header;
mod message;
pub use message::Message;
mod elevation_data_block;
pub use elevation_data_block::ElevationDataBlock;
use crate::result::Result;
use crate::util::deserialize;
/// Decodes a volume coverage pattern message type 5 from the provided reader.
pub fn decode_volume_coverage_pattern<R: Read>(reader: &mut R) -> Result<Message> {
let header: Header = deserialize(reader)?;
let mut elevations: Vec<ElevationDataBlock> = Vec::new();
for _ in 0..header.number_of_elevation_cuts {
elevations.push(deserialize(reader)?);
}
let message = Message::new(header, elevations);
Ok(message)
}

View file

@ -0,0 +1,39 @@
/// Possible values for the VCP pattern type
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum PatternType {
Constant,
Unknown,
}
/// Possible values for pulse width
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum PulseWidth {
Short,
Long,
Unknown,
}
/// Possible values for channel configuration
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ChannelConfiguration {
ConstantPhase,
RandomPhase,
SZ2Phase,
UnknownPhase,
}
/// Possible values for waveform type
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum WaveformType {
/// Contiguous Surveillance
CS,
/// Contiguous Doppler with Ambiguity Resolution
CDW,
/// Contiguous Doppler without Ambiguity Resolution
CDWO,
/// Batch
B,
/// Staggered Pulse Pair
SPP,
Unknown,
}

View file

@ -0,0 +1,448 @@
use serde::Deserialize;
use std::fmt::Debug;
use crate::messages::primitive_aliases::{Code1, Code2, Integer1, Integer2, ScaledSInteger2};
use crate::messages::volume_coverage_pattern::definitions::{ChannelConfiguration, WaveformType};
#[cfg(feature = "uom")]
use uom::si::{
angle::degree,
angular_velocity::degree_per_second,
f64::{Angle, AngularVelocity},
};
/// A data block for a single elevation cut.
#[derive(Clone, PartialEq, Deserialize)]
pub struct ElevationDataBlock {
/// The elevation angle for this cut
pub elevation_angle: Code2,
/// The channel configuration for this cut
/// 0 => Constant Phase
/// 1 => Random Phase
/// 2 => SZ2 Phase
pub channel_configuration: Code1,
/// The waveform type for this cut
/// 1 => Contiguous Surveillance
/// 2 => Contiguous Doppler w/ Ambiguity Resolution
/// 3 => Contiguous Doppler w/o Ambiguity Resolution
/// 4 => Batch
/// 5 => Staggered Pulse Pair
pub waveform_type: Code1,
/// Super resolution control values for this cut
/// Bit 0: 0.5 degree azimuth
/// Bit 1: 1/4 km reflectivity
/// Bit 2: Doppler to 300 km
/// Bit 3: Dual polarization to 300 km
pub super_resolution_control: Code1,
/// The pulse repetition frequency number for surveillance cuts
pub surveillance_prf_number: Integer1,
/// The pulse count per radial for surveillance cuts
pub surveillance_prf_pulse_count_radial: Integer2,
/// The azimuth rate of the cut
pub azimuth_rate: Code2,
/// Signal to noise ratio (SNR) threshold for reflectivity
pub reflectivity_threshold: ScaledSInteger2,
/// Signal to noise ratio (SNR) threshold for velocity
pub velocity_threshold: ScaledSInteger2,
/// Signal to noise ratio (SNR) threshold for spectrum width
pub spectrum_width_threshold: ScaledSInteger2,
/// Signal to noise ratio (SNR) threshold for differential reflectivity
pub differential_reflectivity_threshold: ScaledSInteger2,
/// Signal to noise ratio (SNR) threshold for differential phase
pub differential_phase_threshold: ScaledSInteger2,
/// Signal to noise ratio (SNR) threshold for correlation coefficitn
pub correlation_coefficient_threshold: ScaledSInteger2,
/// Sector 1 Azimuth Clockwise Edge Angle (denotes start angle)
pub sector_1_edge_angle: Code2,
/// Sector 1 Doppler PRF Number
pub sector_1_doppler_prf_number: Integer2,
/// Sector 1 Doppler Pulse Count/Radial
pub sector_1_doppler_prf_pulse_count_radial: Integer2,
/// Supplemental Data
/// Bit 0: SAILS Cut
/// Bits 1-3: SAILS Sequence Number
/// Bit 4: MRLE Cut
/// Bits 5-7: MRLE Sequence Number
/// Bit 8: Spare
/// Bit 9: MPDA Cut
/// Bit 10: BASE TILT Cut
pub supplemental_data: Code2,
/// Sector 2 Azimuth Clockwise Edge Angle (denotes start angle)
pub sector_2_edge_angle: Code2,
/// Sector 2 Doppler PRF Number
pub sector_2_doppler_prf_number: Integer2,
/// Sector 2 Doppler Pulse Count/Radial
pub sector_2_doppler_prf_pulse_count_radial: Integer2,
/// The correction added to the elevation angle for this cut
pub ebc_angle: Code2,
/// Sector 3 Azimuth Clockwise Edge Angle (denotes start angle)
pub sector_3_edge_angle: Code2,
/// Sector 3 Doppler PRF Number
pub sector_3_doppler_prf_number: Integer2,
/// Sector 3 Doppler Pulse Count/Radial
pub sector_3_doppler_prf_pulse_count_radial: Integer2,
/// Reserved
pub reserved: Integer2,
}
/// Decodes an angle as defined in table III-A of ICD 2620002W
fn decode_angle(raw: Code2) -> f64 {
let mut angle: f64 = 0.0;
for i in 3..16 {
if ((raw >> i) & 1) == 1 {
angle += 180.0 * f64::powf(2.0, (i - 15) as f64);
}
}
angle
}
/// Decodes an angular velocity as defined in table XI-D of ICD 2620002W
fn decode_angular_velocity(raw: Code2) -> f64 {
let mut angular_velocity: f64 = 0.0;
for i in 3..15 {
if ((raw >> i) & 1) == 1 {
angular_velocity += 22.5 * f64::powf(2.0, (i - 14) as f64);
}
}
if ((raw >> 15) & 1) == 1 {
angular_velocity = -angular_velocity
}
angular_velocity
}
impl ElevationDataBlock {
/// The elevation angle for this cut
#[cfg(feature = "uom")]
pub fn elevation_angle(&self) -> Angle {
Angle::new::<degree>(decode_angle(self.elevation_angle))
}
/// The elevation angle for this cut, in degrees
pub fn elevation_angle_degrees(&self) -> f64 {
decode_angle(self.elevation_angle)
}
/// The channel configuration for this cut
pub fn channel_configuration(&self) -> ChannelConfiguration {
match self.channel_configuration {
0 => ChannelConfiguration::ConstantPhase,
1 => ChannelConfiguration::RandomPhase,
2 => ChannelConfiguration::SZ2Phase,
_ => ChannelConfiguration::UnknownPhase,
}
}
/// The waveform type for this cut
pub fn waveform_type(&self) -> WaveformType {
match self.waveform_type {
1 => WaveformType::CS,
2 => WaveformType::CDW,
3 => WaveformType::CDWO,
4 => WaveformType::B,
5 => WaveformType::SPP,
_ => WaveformType::Unknown,
}
}
/// Whether this cut uses super resolution 0.5 degree azimuth
pub fn super_resolution_control_half_degree_azimuth(&self) -> bool {
(self.super_resolution_control & 0x1) == 1
}
/// Whether this cut uses super resolution 0.25 km reflectivity
pub fn super_resolution_control_quarter_km_reflectivity(&self) -> bool {
((self.super_resolution_control >> 1) & 0x1) == 1
}
/// Whether this cut uses super resolution doppler to 300 km
pub fn super_resolution_control_doppler_to_300km(&self) -> bool {
((self.super_resolution_control >> 2) & 0x1) == 1
}
/// Whether this cut uses super resolution dual polarization to 300km
pub fn super_resolution_control_dual_polarization_to_300km(&self) -> bool {
((self.super_resolution_control >> 3) & 0x1) == 1
}
/// The azimuth rate used for this cut
#[cfg(feature = "uom")]
pub fn azimuth_rate(&self) -> AngularVelocity {
AngularVelocity::new::<degree_per_second>(decode_angular_velocity(self.azimuth_rate))
}
/// The azimuth rate used for this cut, in degrees per second
pub fn azimuth_rate_degrees_per_second(&self) -> f64 {
decode_angular_velocity(self.azimuth_rate)
}
/// The reflectivity threshold for this cut
pub fn reflectivity_threshold(&self) -> f64 {
self.reflectivity_threshold as f64 * 0.125
}
/// The velocity threshold for this cut
pub fn velocity_threshold(&self) -> f64 {
self.velocity_threshold as f64 * 0.125
}
/// The spectrum width threshold for this cut
pub fn spectrum_width_threshold(&self) -> f64 {
self.spectrum_width_threshold as f64 * 0.125
}
/// The differential reflectivity threshold for this cut
pub fn differential_reflectivity_threshold(&self) -> f64 {
self.differential_reflectivity_threshold as f64 * 0.125
}
/// The differential phase threshold for this cut
pub fn differential_phase_threshold(&self) -> f64 {
self.differential_phase_threshold as f64 * 0.125
}
/// The correlation coefficient threshold for this cut
pub fn correlation_coefficient_threshold(&self) -> f64 {
self.correlation_coefficient_threshold as f64 * 0.125
}
/// Sector 1 Azimuth Clockwise Edge Angle (denotes start angle)
#[cfg(feature = "uom")]
pub fn sector_1_edge_angle(&self) -> Angle {
Angle::new::<degree>(decode_angle(self.sector_1_edge_angle))
}
/// Sector 1 Azimuth Clockwise Edge Angle (denotes start angle), in degrees
pub fn sector_1_edge_angle_degrees(&self) -> f64 {
decode_angle(self.sector_1_edge_angle)
}
/// Sector 2 Azimuth Clockwise Edge Angle (denotes start angle)
#[cfg(feature = "uom")]
pub fn sector_2_edge_angle(&self) -> Angle {
Angle::new::<degree>(decode_angle(self.sector_2_edge_angle))
}
/// Sector 2 Azimuth Clockwise Edge Angle (denotes start angle), in degrees
pub fn sector_2_edge_angle_degrees(&self) -> f64 {
decode_angle(self.sector_2_edge_angle)
}
/// Sector 3 Azimuth Clockwise Edge Angle (denotes start angle)
#[cfg(feature = "uom")]
pub fn sector_3_edge_angle(&self) -> Angle {
Angle::new::<degree>(decode_angle(self.sector_3_edge_angle))
}
/// Sector 3 Azimuth Clockwise Edge Angle (denotes start angle), in degrees
pub fn sector_3_edge_angle_degrees(&self) -> f64 {
decode_angle(self.sector_3_edge_angle)
}
/// The correction added to the elevation angle for this cut
#[cfg(feature = "uom")]
pub fn ebc_angle(&self) -> Angle {
Angle::new::<degree>(decode_angle(self.ebc_angle))
}
/// The correction added to the elevation angle for this cut, in degrees
pub fn ebc_angle_degrees(&self) -> f64 {
decode_angle(self.ebc_angle)
}
/// Whether this cut is a SAILS cut
pub fn supplemental_data_sails_cut(&self) -> bool {
(self.supplemental_data & 0x0001) == 1
}
/// The SAILS sequence number of this cut
pub fn supplemental_data_sails_sequence_number(&self) -> u8 {
((self.supplemental_data & 0x000E) >> 1) as u8
}
/// Whether this cut is an MRLE cut
pub fn supplemental_data_mrle_cut(&self) -> bool {
((self.supplemental_data & 0x0010) >> 4) == 1
}
/// The MRLE sequence number of this cut
pub fn supplemental_data_mrle_sequence_number(&self) -> u8 {
((self.supplemental_data & 0x00E0) >> 5) as u8
}
/// Whether this cut is an MPDA cut
pub fn supplemental_data_mpda_cut(&self) -> bool {
((self.supplemental_data & 0x0200) >> 9) == 1
}
/// Whether this cut is a BASE TILT cut
pub fn supplemental_data_base_tilt_cut(&self) -> bool {
((self.supplemental_data & 0x0400) >> 10) == 1
}
}
impl Debug for ElevationDataBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("ElevationDataBlock");
#[cfg(feature = "uom")]
debug.field("elevation_angle", &self.elevation_angle());
#[cfg(not(feature = "uom"))]
debug.field("elevation_angle", &self.elevation_angle_degrees());
debug.field("channel_configuration", &self.channel_configuration());
debug.field("waveform_type", &self.waveform_type());
debug.field(
"super_resolution_control_raw",
&self.super_resolution_control,
);
debug.field(
"super_resolution_control_half_degree_azimuth",
&self.super_resolution_control_half_degree_azimuth(),
);
debug.field(
"super_resolution_control_quarter_km_reflectivity",
&self.super_resolution_control_quarter_km_reflectivity(),
);
debug.field(
"super_resolution_control_doppler_to_300km",
&self.super_resolution_control_doppler_to_300km(),
);
debug.field(
"super_resolution_control_dual_polarization_to_300km",
&self.super_resolution_control_dual_polarization_to_300km(),
);
debug.field("surveillance_prf_number", &self.surveillance_prf_number);
debug.field(
"surveillance_prf_pulse_count_radial",
&self.surveillance_prf_pulse_count_radial,
);
#[cfg(feature = "uom")]
debug.field("azimuth_rate", &self.azimuth_rate());
#[cfg(not(feature = "uom"))]
debug.field("azimuth_rate", &self.azimuth_rate_degrees_per_second());
debug.field("reflectivity_threshold", &self.reflectivity_threshold());
debug.field("velocity_threshold", &self.velocity_threshold());
debug.field("spectrum_width_threshold", &self.spectrum_width_threshold());
debug.field(
"differential_reflectivity_threshold",
&self.differential_reflectivity_threshold(),
);
debug.field(
"differential_phase_threshold",
&self.differential_phase_threshold(),
);
debug.field(
"correlation_coefficient_threshold",
&self.correlation_coefficient_threshold(),
);
#[cfg(feature = "uom")]
debug.field("sector_1_edge_angle", &self.sector_1_edge_angle());
#[cfg(not(feature = "uom"))]
debug.field("sector_1_edge_angle", &self.sector_1_edge_angle_degrees());
debug.field(
"sector_1_doppler_prf_number",
&self.sector_1_doppler_prf_number,
);
debug.field(
"sector_1_doppler_prf_pulse_count_radial",
&self.sector_1_doppler_prf_pulse_count_radial,
);
#[cfg(feature = "uom")]
debug.field("sector_2_edge_angle", &self.sector_2_edge_angle());
#[cfg(not(feature = "uom"))]
debug.field("sector_2_edge_angle", &self.sector_2_edge_angle_degrees());
debug.field(
"sector_2_doppler_prf_number",
&self.sector_2_doppler_prf_number,
);
debug.field(
"sector_2_doppler_prf_pulse_count_radial",
&self.sector_2_doppler_prf_pulse_count_radial,
);
#[cfg(feature = "uom")]
debug.field("sector_3_edge_angle", &self.sector_3_edge_angle());
#[cfg(not(feature = "uom"))]
debug.field("sector_3_edge_angle", &self.sector_3_edge_angle_degrees());
debug.field(
"sector_3_doppler_prf_number",
&self.sector_3_doppler_prf_number,
);
debug.field(
"sector_3_doppler_prf_pulse_count_radial",
&self.sector_3_doppler_prf_pulse_count_radial,
);
#[cfg(feature = "uom")]
debug.field("ebc_angle", &self.ebc_angle());
#[cfg(not(feature = "uom"))]
debug.field("ebc_angle", &self.ebc_angle_degrees());
debug.field("supplemental_data", &self.supplemental_data);
debug.field(
"supplemental_data_sails_cut",
&self.supplemental_data_sails_cut(),
);
debug.field(
"supplemental_data_sails_sequence_number",
&self.supplemental_data_sails_sequence_number(),
);
debug.field(
"supplemental_data_mrle_cut",
&self.supplemental_data_mrle_cut(),
);
debug.field(
"supplemental_data_mrle_sequence_number",
&self.supplemental_data_mrle_sequence_number(),
);
debug.field(
"supplemental_data_mpda_cut",
&self.supplemental_data_mpda_cut(),
);
debug.field(
"supplemental_data_base_tilt_cut",
&self.supplemental_data_base_tilt_cut(),
);
debug.field("reserved", &self.reserved);
debug.finish()
}
}

View file

@ -0,0 +1,233 @@
use serde::Deserialize;
use std::fmt::Debug;
use crate::messages::primitive_aliases::{Code1, Code2, Integer1, Integer2, Integer4};
use crate::messages::volume_coverage_pattern::definitions::*;
#[cfg(feature = "uom")]
use uom::si::{f64::Velocity, velocity::meter_per_second};
/// The volume coverage pattern header block
#[derive(Clone, PartialEq, Deserialize)]
pub struct Header {
/// Total message size in halfwords, including the header and all elevation blocks
pub message_size: Integer2,
/// Pattern type is always 2
pub pattern_type: Code2,
/// Volume Coverage Pattern Number
pub pattern_number: Integer2,
/// Number of elevation cuts in the complete volume scan
pub number_of_elevation_cuts: Integer2,
/// Volume Coverage Pattern Version Number
pub version: Integer1,
/// Clutter map groups are not currently implemented
pub clutter_map_group_number: Integer1,
/// Doppler velocity resolution.
/// 2 -> 0.5
/// 4 -> 1.0
pub doppler_velocity_resolution: Code1,
/// Pulse width values.
/// 2 -> Short
/// 4 -> Long
pub pulse_width: Code1,
/// Reserved
pub reserved_1: Integer4,
/// VCP sequencing values.
/// Bits 0-4: Number of Elevations
/// Bits 5-6: Maximum SAILS Cuts
/// Bit 13: Sequence Active
/// Bit 14: Truncated VCP
pub vcp_sequencing: Code2,
/// VCP supplemental data.
/// Bit 0: SAILS VCP
/// Bits 1-3: Number SAILS Cuts
/// Bit 4: MRLE VCP
/// Bits 5-7: Number MRLE Cuts
/// Bits 8-10: Spare
/// Bit 11: MPDA VCP
/// Bit 12: BASE TILT VCP
/// Bits 13-15: Number of BASE TILTS
pub vcp_supplemental_data: Code2,
/// Reserved
pub reserved_2: Integer2,
}
impl Header {
/// The pattern type of the volume coverage pattern
pub fn pattern_type(&self) -> PatternType {
match self.pattern_type {
2 => PatternType::Constant,
_ => PatternType::Unknown,
}
}
/// The doppler velocity resolution of this coverage pattern
#[cfg(feature = "uom")]
pub fn doppler_velocity_resolution(&self) -> Option<Velocity> {
match self.doppler_velocity_resolution {
2 => Some(Velocity::new::<meter_per_second>(0.5)),
4 => Some(Velocity::new::<meter_per_second>(1.0)),
_ => None,
}
}
/// The doppler velocity resolution of this coverage pattern in m/s
pub fn doppler_velocity_resolution_meters_per_second(&self) -> Option<f64> {
match self.doppler_velocity_resolution {
2 => Some(0.5),
4 => Some(1.0),
_ => None,
}
}
/// The pulse width for this VCP
pub fn pulse_width(&self) -> PulseWidth {
match self.pulse_width {
2 => PulseWidth::Short,
4 => PulseWidth::Long,
_ => PulseWidth::Unknown,
}
}
/// The number of elevations in the VCP
pub fn vcp_sequencing_number_of_elevations(&self) -> u8 {
(self.vcp_sequencing & 0x001F) as u8
}
/// The maximum number of SAILS cuts allowed in this VCP
pub fn vcp_sequencing_maximum_sails_cuts(&self) -> u8 {
((self.vcp_sequencing & 0x0060) >> 5) as u8
}
/// Whether this VCP is a part of an active VCP sequence
pub fn vcp_sequencing_sequence_active(&self) -> bool {
((self.vcp_sequencing & 0x2000) >> 13) == 1
}
/// Whether this VCP is truncated
pub fn vcp_sequencing_truncated_vcp(&self) -> bool {
((self.vcp_sequencing & 0x4000) >> 14) == 1
}
/// Whether this VCP uses SAILS cuts
pub fn vcp_supplemental_data_sails_vcp(&self) -> bool {
(self.vcp_supplemental_data & 0x0001) == 1
}
/// The number of SAILS cuts used by this VCP
pub fn vcp_supplemental_data_number_sails_cuts(&self) -> u8 {
((self.vcp_supplemental_data & 0x000E) >> 1) as u8
}
/// Whether this VCP uses MRLE cuts
pub fn vcp_supplemental_data_mrle_vcp(&self) -> bool {
((self.vcp_supplemental_data & 0x0010) >> 4) == 1
}
/// The number of MRLE cuts used by this VCP
pub fn vcp_supplemental_data_number_mrle_cuts(&self) -> u8 {
((self.vcp_supplemental_data & 0x00E0) >> 5) as u8
}
/// Whether this VCP is a Multi-PRF Dealiasing Algorithm (MPDA) VCP
pub fn vcp_supplemental_data_mpda_vcp(&self) -> bool {
((self.vcp_supplemental_data & 0x0800) >> 11) == 1
}
/// Whether this VCP contains BASE TILTS
pub fn vcp_supplemental_data_base_tilt_vcp(&self) -> bool {
((self.vcp_supplemental_data & 0x1000) >> 12) == 1
}
/// The number of BASE TILTS in this VCP
pub fn vcp_supplemental_data_base_tilts(&self) -> u8 {
((self.vcp_supplemental_data & 0xE000) >> 13) as u8
}
}
impl Debug for Header {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("Header");
debug.field("message_size", &self.message_size);
debug.field("pattern_type", &self.pattern_type());
debug.field("pattern_number", &self.pattern_number);
debug.field("number_of_elevation_cuts", &self.number_of_elevation_cuts);
debug.field("version", &self.version);
debug.field("clutter_map_group_number", &self.clutter_map_group_number);
#[cfg(feature = "uom")]
debug.field(
"doppler_velocity_resolution",
&self.doppler_velocity_resolution(),
);
#[cfg(not(feature = "uom"))]
debug.field(
"doppler_velocity_resolution",
&self.doppler_velocity_resolution_meters_per_second(),
);
debug.field("pulse_width", &self.pulse_width());
debug.field("reserved_1", &self.reserved_1);
debug.field("vcp_sequencing_raw", &self.vcp_sequencing);
debug.field(
"vcp_sequencing_number_of_elevations",
&self.vcp_sequencing_number_of_elevations(),
);
debug.field(
"vcp_sequencing_maximum_sails_cuts",
&self.vcp_sequencing_maximum_sails_cuts(),
);
debug.field(
"vcp_sequencing_sequence_active",
&self.vcp_sequencing_sequence_active(),
);
debug.field(
"vcp_sequencing_truncated_vcp",
&self.vcp_sequencing_truncated_vcp(),
);
debug.field("vcp_supplemental_data_raw", &self.vcp_supplemental_data);
debug.field(
"vcp_supplemental_data_sails_vcp",
&self.vcp_supplemental_data_sails_vcp(),
);
debug.field(
"vcp_supplemental_data_number_sails_cuts",
&self.vcp_supplemental_data_number_sails_cuts(),
);
debug.field(
"vcp_supplemental_data_mrle_vcp",
&self.vcp_supplemental_data_mrle_vcp(),
);
debug.field(
"vcp_supplemental_data_number_mrle_cuts",
&self.vcp_supplemental_data_number_mrle_cuts(),
);
debug.field(
"vcp_supplemental_data_mpda_vcp",
&self.vcp_supplemental_data_mpda_vcp(),
);
debug.field(
"vcp_supplemental_data_base_tilt_vcp",
&self.vcp_supplemental_data_base_tilt_vcp(),
);
debug.field(
"vcp_supplemental_data_base_tilts",
&self.vcp_supplemental_data_base_tilts(),
);
debug.field("reserved_2", &self.reserved_2);
debug.finish()
}
}

View file

@ -0,0 +1,19 @@
use crate::messages::volume_coverage_pattern::{ElevationDataBlock, Header};
/// The digital radar data message includes base radar data from a single radial for various
/// products.
#[derive(Debug, Clone, PartialEq)]
pub struct Message {
/// The decoded volume coverage pattern header.
pub header: Header,
/// The decoded elevation data blocks.
pub elevations: Vec<ElevationDataBlock>,
}
impl Message {
/// Create a new volume coverage pattern message
pub(crate) fn new(header: Header, elevations: Vec<ElevationDataBlock>) -> Self {
Self { header, elevations }
}
}

View file

@ -0,0 +1,19 @@
//!
//! Contains the Result and Error types for NEXRAD operations.
//!
use thiserror::Error as ThisError;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(ThisError, Debug)]
pub enum Error {
#[error("data file IO error")]
FileError(#[from] std::io::Error),
#[error("file deserialization error")]
DeserializationError(#[from] bincode::Error),
#[error("file decoding error: {0}")]
DecodingError(String),
#[error("message is missing collection date/time")]
MessageMissingDateError,
}

View file

@ -0,0 +1,260 @@
//! # Summarize Module
//!
//! The `summarize` module provides functionality for generating human-readable summaries
//! of NEXRAD radar data messages. It processes raw radar messages and organizes them into
//! logical groups based on their type and content, making it easier to understand the
//! structure and content of radar data files.
//!
//! The primary function in this module is `messages()`, which takes a slice of `Message` objects
//! and returns a `MessageSummary` containing organized information about those messages.
//! The summary includes:
//!
//! * Volume coverage patterns found in the messages
//! * Logical groupings of related messages
//! * Time range of the data collection
//! * Detailed information about radar status, scan strategies, and data types
use crate::messages::{Message, MessageContents, MessageType};
use std::collections::{HashMap, HashSet};
mod message;
pub use message::MessageSummary;
mod group;
pub use group::MessageGroupSummary;
mod rda;
pub use rda::RDAStatusInfo;
mod vcp;
pub use vcp::{VCPElevationInfo, VCPInfo};
/// Processes a collection of NEXRAD messages and generates a comprehensive summary.
///
/// This function analyzes the provided messages and organizes them into logical groups
/// based on their type and content. It extracts key information such as:
///
/// * Volume coverage patterns
/// * Time range of data collection
/// * Radar operational status
/// * Scan strategy details
/// * Data types present in each elevation cut
///
/// The function handles various message types including:
/// * Digital Radar Data messages (reflectivity, velocity, etc.)
/// * RDA Status Data messages
/// * Volume Coverage Pattern messages
/// * Other message types
///
/// Messages of the same type and characteristics are grouped together to provide
/// a more concise and understandable summary.
pub fn messages(messages: &[Message]) -> MessageSummary {
let mut summary = MessageSummary {
volume_coverage_patterns: HashSet::new(),
message_groups: Vec::new(),
earliest_collection_time: None,
latest_collection_time: None,
};
let mut current_group: Option<MessageGroupSummary> = None;
for (i, message) in messages.iter().enumerate() {
let message_type = message.header().message_type();
let message_time = message.header().date_time();
match message.contents() {
MessageContents::DigitalRadarData(radar_data) => {
if let Some(time) = message_time {
if (summary.earliest_collection_time.is_none()
|| summary.earliest_collection_time > Some(time))
&& time.timestamp_millis() > 0
{
summary.earliest_collection_time = Some(time);
}
if summary.latest_collection_time.is_none()
|| summary.latest_collection_time < Some(time)
{
summary.latest_collection_time = Some(time);
}
}
let elevation_number = radar_data.header.elevation_number;
let can_continue = if let Some(group) = &current_group {
group.message_type == MessageType::RDADigitalRadarDataGenericFormat
&& group.elevation_number == Some(elevation_number)
} else {
false
};
if !can_continue {
if let Some(group) = current_group.take() {
summary.message_groups.push(group);
}
current_group = Some(MessageGroupSummary {
message_type: MessageType::RDADigitalRadarDataGenericFormat,
start_time: message_time,
end_time: message_time,
message_count: 1,
elevation_number: Some(elevation_number),
elevation_angle: Some(radar_data.header.elevation_angle),
start_azimuth: Some(radar_data.header.azimuth_angle),
end_azimuth: Some(radar_data.header.azimuth_angle),
data_types: Some(HashMap::new()),
rda_status_info: None,
vcp_info: None,
is_continued: !summary.message_groups.is_empty()
&& summary.message_groups.iter().rev().any(|g| {
g.message_type == MessageType::RDADigitalRadarDataGenericFormat
&& g.elevation_number == Some(elevation_number)
}),
start_message_index: i,
end_message_index: i,
});
} else if let Some(group) = &mut current_group {
group.end_time = message_time;
group.message_count += 1;
group.end_azimuth = Some(radar_data.header.azimuth_angle);
group.end_message_index = i;
}
if let Some(group) = &mut current_group {
if let Some(data_types) = group.data_types.as_mut() {
let mut increment_count = |data_type: &str| {
let count = data_types.get(data_type).unwrap_or(&0) + 1;
data_types.insert(data_type.to_string(), count);
};
if radar_data.reflectivity_data_block.is_some() {
increment_count("Reflectivity");
}
if radar_data.velocity_data_block.is_some() {
increment_count("Velocity");
}
if radar_data.spectrum_width_data_block.is_some() {
increment_count("Spectrum Width");
}
if radar_data.differential_reflectivity_data_block.is_some() {
increment_count("Differential Reflectivity");
}
if radar_data.differential_phase_data_block.is_some() {
increment_count("Differential Phase");
}
if radar_data.correlation_coefficient_data_block.is_some() {
increment_count("Correlation Coefficient");
}
if radar_data.specific_diff_phase_data_block.is_some() {
increment_count("Specific Differential Phase");
}
}
}
if let Some(volume_data) = &radar_data.volume_data_block {
summary
.volume_coverage_patterns
.insert(volume_data.volume_coverage_pattern());
}
}
MessageContents::RDAStatusData(status_data) => {
if let Some(time) = message_time {
if (summary.earliest_collection_time.is_none()
|| summary.earliest_collection_time > Some(time))
&& time.timestamp_millis() > 0
{
summary.earliest_collection_time = Some(time);
}
if summary.latest_collection_time.is_none()
|| summary.latest_collection_time < Some(time)
{
summary.latest_collection_time = Some(time);
}
}
// Each RDA status message is treated individually
if let Some(group) = current_group.take() {
summary.message_groups.push(group);
}
current_group = Some(MessageGroupSummary {
message_type: MessageType::RDAStatusData,
start_time: message_time,
end_time: message_time,
message_count: 1,
elevation_number: None,
elevation_angle: None,
start_azimuth: None,
end_azimuth: None,
data_types: None,
rda_status_info: Some(rda::extract_rda_status_info(status_data)),
vcp_info: None,
is_continued: false,
start_message_index: i,
end_message_index: i,
});
}
MessageContents::VolumeCoveragePattern(vcp_data) => {
// Each Volume Coverage Pattern message is treated individually
if let Some(group) = current_group.take() {
summary.message_groups.push(group);
}
current_group = Some(MessageGroupSummary {
message_type: MessageType::RDAVolumeCoveragePattern,
start_time: message_time,
end_time: message_time,
message_count: 1,
elevation_number: None,
elevation_angle: None,
start_azimuth: None,
end_azimuth: None,
data_types: None,
rda_status_info: None,
vcp_info: Some(vcp::extract_vcp_info(vcp_data)),
is_continued: false,
start_message_index: i,
end_message_index: i,
});
}
_ => {
let can_combine = if let Some(group) = &current_group {
group.message_type == message_type
} else {
false
};
if !can_combine {
if let Some(group) = current_group.take() {
summary.message_groups.push(group);
}
current_group = Some(MessageGroupSummary {
message_type,
start_time: message_time,
end_time: message_time,
message_count: 1,
elevation_number: None,
elevation_angle: None,
start_azimuth: None,
end_azimuth: None,
data_types: None,
rda_status_info: None,
vcp_info: None,
is_continued: false,
start_message_index: i,
end_message_index: i,
});
} else if let Some(group) = &mut current_group {
group.end_time = message_time;
group.message_count += 1;
group.end_message_index = i;
}
}
}
}
if let Some(group) = current_group {
summary.message_groups.push(group);
}
summary
}

View file

@ -0,0 +1,264 @@
use super::{RDAStatusInfo, VCPInfo};
use crate::messages::MessageType;
use chrono::{DateTime, Utc};
use std::{
collections::HashMap,
fmt::{Display, Formatter, Result},
};
/// Summary of a single message or group of related messages
#[derive(Clone, PartialEq, Debug)]
pub struct MessageGroupSummary {
pub message_type: MessageType,
pub start_time: Option<DateTime<Utc>>,
pub end_time: Option<DateTime<Utc>>,
pub message_count: usize,
// For DigitalRadarData messages
pub elevation_number: Option<u8>,
pub elevation_angle: Option<f32>,
pub start_azimuth: Option<f32>,
pub end_azimuth: Option<f32>,
pub data_types: Option<HashMap<String, usize>>,
// For RDAStatusData messages
pub rda_status_info: Option<RDAStatusInfo>,
// For VolumeCoveragePattern messages
pub vcp_info: Option<VCPInfo>,
// Indicates if this group continues from a previous group
pub is_continued: bool,
// Absolute message indices
pub start_message_index: usize,
pub end_message_index: usize,
}
impl Display for MessageGroupSummary {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
if self.message_type == MessageType::RDADigitalRadarDataGenericFormat {
write!(f, "Elevation: #{}", self.elevation_number.unwrap_or(0))?;
if let Some(elev_angle) = self.elevation_angle {
write!(f, " ({:.2}°)", elev_angle)?;
}
write!(
f,
", Azimuth: {:.1}° to {:.1}°",
self.start_azimuth.unwrap_or(0.0),
self.end_azimuth.unwrap_or(0.0)
)?;
if let Some(start) = self.start_time {
write!(f, ", Time: {}", start.format("%H:%M:%S%.3f"))?;
if let Some(end) = self.end_time {
if start != end {
write!(f, " to {}", end.format("%H:%M:%S%.3f"))?;
let duration = end.signed_duration_since(start);
write!(f, " ({:.2}s)", duration.num_milliseconds() as f64 / 1000.0)?;
}
}
}
if let Some(data_types) = &self.data_types {
if !data_types.is_empty() {
writeln!(f)?;
write!(f, " Data types: ")?;
let data_types: Vec<_> = data_types.iter().collect();
for (i, (data_type, count)) in data_types.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
// Use abbreviated names for common data types
let abbr = match data_type.as_str() {
"Reflectivity" => "REF",
"Velocity" => "VEL",
"Spectrum Width" => "SW",
"Differential Reflectivity" => "ZDR",
"Differential Phase" => "DP",
"Correlation Coefficient" => "CC",
"Specific Differential Phase" => "KDP",
_ => data_type,
};
write!(f, "{} ({})", abbr, count)?;
}
}
}
} else if self.message_type == MessageType::RDAStatusData {
if let Some(status_info) = &self.rda_status_info {
write!(
f,
"RDA Status: {}, Operability: {}",
status_info.rda_status, status_info.operability_status
)?;
writeln!(f)?;
write!(
f,
" Control: {}, Mode: {}",
status_info.control_status, status_info.operational_mode
)?;
if let Some(vcp) = status_info.vcp_number {
let source = if status_info.vcp_is_local {
"local"
} else {
"remote"
};
write!(f, ", VCP: {} ({})", vcp, source)?;
}
writeln!(f)?;
write!(
f,
" Transmitter power: {} W, Reflectivity cal: {:.2} dB",
status_info.average_transmitter_power, status_info.reflectivity_calibration
)?;
writeln!(f)?;
write!(
f,
" Super resolution: {}",
status_info.super_resolution_status
)?;
// Data transmission
if !status_info.data_transmission_enabled.is_empty() {
writeln!(f)?;
write!(f, " Data enabled: ")?;
for (i, data_type) in status_info.data_transmission_enabled.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", data_type)?;
}
}
// Scan flags
if !status_info.scan_data_info.is_empty() {
writeln!(f)?;
write!(f, " Scan settings: ")?;
for (i, flag) in status_info.scan_data_info.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", flag)?;
}
}
// Alarms
if status_info.has_alarms {
writeln!(f)?;
write!(f, " Alarms: ")?;
for (i, alarm) in status_info.active_alarms.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", alarm)?;
}
}
} else {
write!(f, "RDA Status Data")?;
if self.message_count > 1 {
write!(f, " ({})", self.message_count)?;
}
}
} else if self.message_type == MessageType::RDAVolumeCoveragePattern {
if let Some(vcp_info) = &self.vcp_info {
write!(
f,
"VCP: #{}, Version: {}",
vcp_info.pattern_number, vcp_info.version
)?;
// Show general VCP information
writeln!(f)?;
write!(
f,
" {} elevation cuts, Pulse width: {}",
vcp_info.number_of_elevation_cuts, vcp_info.pulse_width
)?;
if let Some(doppler_res) = vcp_info.doppler_velocity_resolution {
write!(f, ", Doppler resolution: {:.1} m/s", doppler_res)?;
}
// VCP features
if !vcp_info.vcp_features.is_empty() {
writeln!(f)?;
write!(f, " Features: ")?;
for (i, feature) in vcp_info.vcp_features.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", feature)?;
}
}
// Show all elevation cuts on separate lines
if !vcp_info.elevations.is_empty() {
writeln!(f)?;
writeln!(f, " Elevation cuts:")?;
for (i, elev) in vcp_info.elevations.iter().enumerate() {
write!(f, " Cut #{}: {:.2}°", i + 1, elev.elevation_angle)?;
if let Some(cut_type) = &elev.special_cut_type {
write!(f, " ({})", cut_type)?;
}
// Show waveform and channel configuration
write!(
f,
", {}, {}",
elev.waveform_type, elev.channel_configuration
)?;
// Show azimuth rate
write!(f, ", {:.1}°/s", elev.azimuth_rate)?;
// Show super resolution features if any
if !elev.super_resolution_features.is_empty() {
write!(f, ", Super res: ")?;
for (j, feature) in elev.super_resolution_features.iter().enumerate() {
if j > 0 {
write!(f, ", ")?;
}
write!(f, "{}", feature)?;
}
}
if i < vcp_info.elevations.len() - 1 {
writeln!(f)?;
}
}
}
} else {
write!(f, "Volume Coverage Pattern")?;
if self.message_count > 1 {
write!(f, " ({})", self.message_count)?;
}
}
} else {
write!(f, "{:?}", self.message_type)?;
if self.message_count > 1 {
write!(f, " ({})", self.message_count)?;
}
if let Some(start) = self.start_time {
write!(f, ", Time: {}", start.format("%Y-%m-%d %H:%M:%S%.3f"))?;
if let Some(end) = self.end_time {
if start != end {
write!(f, " to {}", end.format("%Y-%m-%d %H:%M:%S%.3f"))?;
}
}
}
}
Ok(())
}
}

View file

@ -0,0 +1,94 @@
use chrono::{DateTime, Utc};
use crate::messages::{digital_radar_data, MessageType};
use std::{
collections::HashSet,
fmt::{Display, Formatter, Result},
};
use super::MessageGroupSummary;
/// Summary of a set of messages.
#[derive(Clone, PartialEq, Debug)]
pub struct MessageSummary {
/// The distinct volume coverage patterns found in these messages.
pub volume_coverage_patterns: HashSet<digital_radar_data::VolumeCoveragePattern>,
/// All messages in sequence, with related messages grouped together
pub message_groups: Vec<MessageGroupSummary>,
pub earliest_collection_time: Option<DateTime<Utc>>,
pub latest_collection_time: Option<DateTime<Utc>>,
}
impl Display for MessageSummary {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
// Time information
write!(f, "Scans from ")?;
if let Some(start) = self.earliest_collection_time {
write!(f, "{}", start.format("%Y-%m-%d %H:%M:%S%.3f UTC"))?;
} else {
write!(f, "unknown")?;
}
write!(f, " to ")?;
if let Some(end) = self.latest_collection_time {
write!(f, "{}", end.format("%Y-%m-%d %H:%M:%S%.3f UTC"))?;
if let Some(start) = self.earliest_collection_time {
let duration = end.signed_duration_since(start);
write!(f, " ({:.2}m)", duration.num_milliseconds() as f64 / 60000.0)?;
}
} else {
write!(f, "unknown")?;
}
writeln!(f)?;
// Volume coverage patterns
write!(f, "VCPs: ")?;
if self.volume_coverage_patterns.is_empty() {
writeln!(f, "none")?;
} else {
let vcps: Vec<_> = self.volume_coverage_patterns.iter().collect();
for (i, vcp) in vcps.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{:?}", vcp)?;
}
writeln!(f)?;
}
// Messages
writeln!(f, "Messages:")?;
for group in self.message_groups.iter() {
let prefix = if group.message_type == MessageType::RDADigitalRadarDataGenericFormat {
let msg_range = if group.start_message_index == group.end_message_index {
format!(" Msg {}", group.start_message_index + 1)
} else {
format!(
" Msg {}-{}",
group.start_message_index + 1,
group.end_message_index + 1
)
};
if group.is_continued {
format!("{} (cont.)", msg_range)
} else {
msg_range
}
} else if group.start_message_index == group.end_message_index {
format!(" Msg {}", group.start_message_index + 1)
} else {
format!(
" Msg {}-{}",
group.start_message_index + 1,
group.end_message_index + 1
)
};
writeln!(f, "{}: {}", prefix, group)?;
}
Ok(())
}
}

View file

@ -0,0 +1,103 @@
/// Contains summary information from the RDA Status Data message
#[derive(Clone, PartialEq, Debug)]
pub struct RDAStatusInfo {
pub rda_status: String,
pub operability_status: String,
pub control_status: String,
pub operational_mode: String,
pub vcp_number: Option<i16>,
pub vcp_is_local: bool,
pub average_transmitter_power: u16,
pub reflectivity_calibration: f32,
pub super_resolution_status: String,
pub data_transmission_enabled: Vec<String>,
pub has_alarms: bool,
pub active_alarms: Vec<String>,
pub scan_data_info: Vec<String>,
}
/// Helper function to extract RDA status information from a message
pub fn extract_rda_status_info(
message: &crate::messages::rda_status_data::Message,
) -> RDAStatusInfo {
let mut info = RDAStatusInfo {
rda_status: format!("{:?}", message.rda_status()),
operability_status: format!("{:?}", message.operability_status()),
control_status: format!("{:?}", message.control_status()),
operational_mode: format!("{:?}", message.operational_mode()),
vcp_number: message.volume_coverage_pattern().map(|vcp| vcp.number()),
vcp_is_local: message
.volume_coverage_pattern()
.map(|vcp| vcp.local())
.unwrap_or(false),
average_transmitter_power: message.average_transmitter_power,
reflectivity_calibration: message.horizontal_reflectivity_calibration_correction(),
super_resolution_status: format!("{:?}", message.super_resolution_status()),
data_transmission_enabled: Vec::new(),
has_alarms: !message.rda_alarm_summary().none(),
active_alarms: Vec::new(),
scan_data_info: Vec::new(),
};
// Data transmission enabled
let data_enabled = message.data_transmission_enabled();
if data_enabled.none() {
info.data_transmission_enabled.push("None".to_string());
} else {
if data_enabled.reflectivity() {
info.data_transmission_enabled
.push("Reflectivity".to_string());
}
if data_enabled.velocity() {
info.data_transmission_enabled.push("Velocity".to_string());
}
if data_enabled.spectrum_width() {
info.data_transmission_enabled
.push("Spectrum Width".to_string());
}
}
// Scan data flags
let flags = message.rda_scan_and_data_flags();
if flags.avset_enabled() {
info.scan_data_info.push("AVSET enabled".to_string());
} else {
info.scan_data_info.push("AVSET disabled".to_string());
}
if flags.ebc_enabled() {
info.scan_data_info.push("EBC enabled".to_string());
}
if flags.rda_log_data_enabled() {
info.scan_data_info.push("Log data enabled".to_string());
}
if flags.time_series_data_recording_enabled() {
info.scan_data_info
.push("Time series recording enabled".to_string());
}
// Alarms
let alarms = message.rda_alarm_summary();
if alarms.tower_utilities() {
info.active_alarms.push("Tower/utilities".to_string());
}
if alarms.pedestal() {
info.active_alarms.push("Pedestal".to_string());
}
if alarms.transmitter() {
info.active_alarms.push("Transmitter".to_string());
}
if alarms.receiver() {
info.active_alarms.push("Receiver".to_string());
}
if alarms.rda_control() {
info.active_alarms.push("RDA control".to_string());
}
if alarms.communication() {
info.active_alarms.push("Communication".to_string());
}
if alarms.signal_processor() {
info.active_alarms.push("Signal processor".to_string());
}
info
}

View file

@ -0,0 +1,106 @@
/// Contains summary information from the Volume Coverage Pattern message
#[derive(Clone, PartialEq, Debug)]
pub struct VCPInfo {
pub pattern_number: u16,
pub version: u8,
pub number_of_elevation_cuts: u16,
pub pulse_width: String,
pub doppler_velocity_resolution: Option<f64>,
pub vcp_features: Vec<String>,
pub elevations: Vec<VCPElevationInfo>,
}
/// Summary information about a single elevation cut in a VCP
#[derive(Clone, PartialEq, Debug)]
pub struct VCPElevationInfo {
pub elevation_angle: f64,
pub channel_configuration: String,
pub waveform_type: String,
pub azimuth_rate: f64,
pub super_resolution_features: Vec<String>,
pub special_cut_type: Option<String>,
}
/// Helper function to extract Volume Coverage Pattern information from a message
pub fn extract_vcp_info(message: &crate::messages::volume_coverage_pattern::Message) -> VCPInfo {
let mut vcp_features = Vec::new();
if message.header.vcp_supplemental_data_sails_vcp() {
let sails_cuts = message.header.vcp_supplemental_data_number_sails_cuts();
vcp_features.push(format!("SAILS ({} cuts)", sails_cuts));
}
if message.header.vcp_supplemental_data_mrle_vcp() {
let mrle_cuts = message.header.vcp_supplemental_data_number_mrle_cuts();
vcp_features.push(format!("MRLE ({} cuts)", mrle_cuts));
}
if message.header.vcp_supplemental_data_mpda_vcp() {
vcp_features.push("MPDA".to_string());
}
if message.header.vcp_supplemental_data_base_tilt_vcp() {
let base_tilts = message.header.vcp_supplemental_data_base_tilts();
vcp_features.push(format!("Base tilts ({} cuts)", base_tilts));
}
if message.header.vcp_sequencing_sequence_active() {
vcp_features.push("VCP sequence active".to_string());
}
if message.header.vcp_sequencing_truncated_vcp() {
vcp_features.push("Truncated VCP".to_string());
}
let mut elevations = Vec::new();
for elev in &message.elevations {
let mut super_res_features = Vec::new();
if elev.super_resolution_control_half_degree_azimuth() {
super_res_features.push("0.5° azimuth".to_string());
}
if elev.super_resolution_control_quarter_km_reflectivity() {
super_res_features.push("0.25 km reflectivity".to_string());
}
if elev.super_resolution_control_doppler_to_300km() {
super_res_features.push("Doppler to 300 km".to_string());
}
if elev.super_resolution_control_dual_polarization_to_300km() {
super_res_features.push("Dual pol to 300 km".to_string());
}
// Determine special cut type
let mut special_cut_type = None;
if elev.supplemental_data_sails_cut() {
let seq = elev.supplemental_data_sails_sequence_number();
special_cut_type = Some(format!("SAILS {}", seq));
} else if elev.supplemental_data_mrle_cut() {
let seq = elev.supplemental_data_mrle_sequence_number();
special_cut_type = Some(format!("MRLE {}", seq));
} else if elev.supplemental_data_mpda_cut() {
special_cut_type = Some("MPDA".to_string());
} else if elev.supplemental_data_base_tilt_cut() {
special_cut_type = Some("Base tilt".to_string());
}
elevations.push(VCPElevationInfo {
elevation_angle: elev.elevation_angle_degrees(),
channel_configuration: format!("{:?}", elev.channel_configuration()),
waveform_type: format!("{:?}", elev.waveform_type()),
azimuth_rate: elev.azimuth_rate_degrees_per_second(),
super_resolution_features: super_res_features,
special_cut_type,
});
}
VCPInfo {
pattern_number: message.header.pattern_number,
version: message.header.version,
number_of_elevation_cuts: message.header.number_of_elevation_cuts,
pulse_width: format!("{:?}", message.header.pulse_width()),
doppler_velocity_resolution: message
.header
.doppler_velocity_resolution_meters_per_second(),
vcp_features,
elevations,
}
}

Some files were not shown because too many files have changed in this diff Show more