mirror of https://github.com/ecmwf/eccodes.git
Merge branch 'develop' into feature/mtg2
This commit is contained in:
commit
94836f5ec4
|
@ -10,10 +10,23 @@ alias mars.activity = activity;
|
||||||
codetable[2] experiment "grib2/destine_experiment.table" ;
|
codetable[2] experiment "grib2/destine_experiment.table" ;
|
||||||
alias mars.experiment = experiment;
|
alias mars.experiment = experiment;
|
||||||
|
|
||||||
|
# Generation keyword - synergise with DestinE ClimateDT and use this to version the dataset
|
||||||
|
unsigned[1] generation = 255 : dump;
|
||||||
|
alias mars.generation = generation;
|
||||||
|
|
||||||
|
# Model keyword to index multiple models within EERIE
|
||||||
|
codetable[2] model "grib2/destine_model.table" : dump;
|
||||||
|
alias mars.model = model;
|
||||||
|
|
||||||
# Climate run realization keyword, which relates to an initial condition perturbation
|
# Climate run realization keyword, which relates to an initial condition perturbation
|
||||||
unsigned[1] realization = 255 ;
|
unsigned[1] realization = 255 ;
|
||||||
alias mars.realization = realization;
|
alias mars.realization = realization;
|
||||||
|
|
||||||
|
# Allows simultaneous archiving of resolutions
|
||||||
|
# high resolution for best available vs standard resolution for quick access and plotting
|
||||||
|
codetable[2] resolution "grib2/destine_resolution.table" : dump;
|
||||||
|
alias mars.resolution = resolution;
|
||||||
|
|
||||||
# Remove mars domain from this data
|
# Remove mars domain from this data
|
||||||
unalias mars.domain;
|
unalias mars.domain;
|
||||||
|
|
||||||
|
|
|
@ -5,3 +5,6 @@
|
||||||
3 3 Cloud-to-cloud lightning flash density (km-2 day-1)
|
3 3 Cloud-to-cloud lightning flash density (km-2 day-1)
|
||||||
4 4 Total lightning flash density (km-2 day-1)
|
4 4 Total lightning flash density (km-2 day-1)
|
||||||
5 5 Subgrid-scale lightning potential index (J kg-1)
|
5 5 Subgrid-scale lightning potential index (J kg-1)
|
||||||
|
# 6-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -22,3 +22,6 @@
|
||||||
38 38 Lower layer cloud top pressure (Pa)
|
38 38 Lower layer cloud top pressure (Pa)
|
||||||
39 39 Error in lower layer cloud optical depth (Numeric)
|
39 39 Error in lower layer cloud optical depth (Numeric)
|
||||||
40 40 Error in lower layer cloud top pressure (Pa)
|
40 40 Error in lower layer cloud top pressure (Pa)
|
||||||
|
# 41-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -2,3 +2,6 @@
|
||||||
0 0 Probability of encountering marginal visual flight rule conditions (%)
|
0 0 Probability of encountering marginal visual flight rule conditions (%)
|
||||||
1 1 Probability of encountering low instrument flight rule conditions (%)
|
1 1 Probability of encountering low instrument flight rule conditions (%)
|
||||||
2 2 Probability of encountering instrument flight rule conditions (%)
|
2 2 Probability of encountering instrument flight rule conditions (%)
|
||||||
|
# 3-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -8,3 +8,6 @@
|
||||||
6 6 Volcanic ash cloud optical depth (Numeric)
|
6 6 Volcanic ash cloud optical depth (Numeric)
|
||||||
7 7 Volcanic ash column density (kg m-2)
|
7 7 Volcanic ash column density (kg m-2)
|
||||||
8 8 Volcanic ash particle effective radius (m)
|
8 8 Volcanic ash particle effective radius (m)
|
||||||
|
# 9-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -5,3 +5,6 @@
|
||||||
3 3 Foundation sea-surface temperature (K)
|
3 3 Foundation sea-surface temperature (K)
|
||||||
4 4 Estimated bias between sea-surface temperature and standard (K)
|
4 4 Estimated bias between sea-surface temperature and standard (K)
|
||||||
5 5 Estimated standard deviation between sea surface temperature and standard (K)
|
5 5 Estimated standard deviation between sea surface temperature and standard (K)
|
||||||
|
# 6-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -5,3 +5,6 @@
|
||||||
3 3 Direct solar exposure (J m-2)
|
3 3 Direct solar exposure (J m-2)
|
||||||
4 4 Diffuse solar irradiance (W m-2)
|
4 4 Diffuse solar irradiance (W m-2)
|
||||||
5 5 Diffuse solar exposure (J m-2)
|
5 5 Diffuse solar exposure (J m-2)
|
||||||
|
# 6-191 Reserved
|
||||||
|
# 192-254 Reserved for local use
|
||||||
|
255 255 Missing
|
||||||
|
|
|
@ -48,7 +48,6 @@
|
||||||
1039 TENEU Urban temperate needleleaf evergreen
|
1039 TENEU Urban temperate needleleaf evergreen
|
||||||
1040 BONDU Urban boreal needleleaf deciduous
|
1040 BONDU Urban boreal needleleaf deciduous
|
||||||
# 1041-1500 Reserved for tile class entries for tile models using the ECOCLIMAP-SG land cover survey
|
# 1041-1500 Reserved for tile class entries for tile models using the ECOCLIMAP-SG land cover survey
|
||||||
# v8.1 groupings
|
|
||||||
1501 GNATU Nature grouping (NO + ROCK + SNOW + BOBD + TEBD + TRBD + TEBE + TRBE + BONE + TENE + BOND + SHRB + BOGR + GRAS + TROG + C3W + C3S + C4 + FLTR + FLGR)
|
1501 GNATU Nature grouping (NO + ROCK + SNOW + BOBD + TEBD + TRBD + TEBE + TRBE + BONE + TENE + BOND + SHRB + BOGR + GRAS + TROG + C3W + C3S + C4 + FLTR + FLGR)
|
||||||
1502 GNOFO Non-forest grouping (NO + ROCK + SNOW + BOGR + GRAS + TROG + C3W + C3S + C4 + FLTR + FLGR)
|
1502 GNOFO Non-forest grouping (NO + ROCK + SNOW + BOGR + GRAS + TROG + C3W + C3S + C4 + FLTR + FLGR)
|
||||||
1503 GFORE Forest grouping (BOBD + TEBD + TRBD + TEBE + TRBE + BONE + TENE + BOND + SHRB)
|
1503 GFORE Forest grouping (BOBD + TEBD + TRBD + TEBE + TRBE + BONE + TENE + BOND + SHRB)
|
||||||
|
@ -64,7 +63,6 @@
|
||||||
1513 GNFOR Needleleaf forest grouping (BONE + TENE + BOND)
|
1513 GNFOR Needleleaf forest grouping (BONE + TENE + BOND)
|
||||||
1514 GDBFO Deciduous broadleaf forest grouping (BOBD + TEBD + TRBD + TEBE + SHRB)
|
1514 GDBFO Deciduous broadleaf forest grouping (BOBD + TEBD + TRBD + TEBE + SHRB)
|
||||||
1515 GTGRA Temperate grassland grouping (BOGR + GRAS)
|
1515 GTGRA Temperate grassland grouping (BOGR + GRAS)
|
||||||
# v9.0 groupings
|
|
||||||
1516 GNFNF Non-forest (no FLTR) grouping (NO + ROCK + SNOW + BOGR + GRAS + TROG + C3W + C3S + C4 + FLGR)
|
1516 GNFNF Non-forest (no FLTR) grouping (NO + ROCK + SNOW + BOGR + GRAS + TROG + C3W + C3S + C4 + FLGR)
|
||||||
1517 GBFFT Broadleaf forest (with FLTR) grouping (BOBD + TEBD + TRBD + TEBE + TRBE + SHRB + FLTR)
|
1517 GBFFT Broadleaf forest (with FLTR) grouping (BOBD + TEBD + TRBD + TEBE + TRBE + SHRB + FLTR)
|
||||||
1518 GBTRE Broadleaf trees grouping (BOBD + TEBD + TRBD + TEBE + TRBE + FLTR)
|
1518 GBTRE Broadleaf trees grouping (BOBD + TEBD + TRBD + TEBE + TRBE + FLTR)
|
||||||
|
@ -72,7 +70,6 @@
|
||||||
1520 GBDTR Broadleaf deciduous trees grouping (BOBD + TEBD + TRBD + FLTR)
|
1520 GBDTR Broadleaf deciduous trees grouping (BOBD + TEBD + TRBD + FLTR)
|
||||||
1521 GBETR Broadleaf evergreen trees grouping (TEBE + TRBE)
|
1521 GBETR Broadleaf evergreen trees grouping (TEBE + TRBE)
|
||||||
1522 GNETR Needleleaf evergreen trees grouping (BONE + TENE)
|
1522 GNETR Needleleaf evergreen trees grouping (BONE + TENE)
|
||||||
# Other groupings
|
|
||||||
1523 GLCZU LCZ or urban grouping (LCZ1 + LCZ2 + LCZ3 + LCZ4 + LCZ5 + LCZ6 + LCZ7 + LCZ8 + LCZ9 + LCZ10)
|
1523 GLCZU LCZ or urban grouping (LCZ1 + LCZ2 + LCZ3 + LCZ4 + LCZ5 + LCZ6 + LCZ7 + LCZ8 + LCZ9 + LCZ10)
|
||||||
1524 GIWAT Inland water grouping (LAKE + RIVE)
|
1524 GIWAT Inland water grouping (LAKE + RIVE)
|
||||||
1525 GPTEBD Urban parks and gardens TEBD grouping (NO + GRAS + TEBDU)
|
1525 GPTEBD Urban parks and gardens TEBD grouping (NO + GRAS + TEBDU)
|
||||||
|
|
|
@ -19,12 +19,12 @@ unsigned[1] minuteOfEndOfOverallTimeInterval =0 : edition_specific;
|
||||||
unsigned[1] secondOfEndOfOverallTimeInterval =0 : edition_specific;
|
unsigned[1] secondOfEndOfOverallTimeInterval =0 : edition_specific;
|
||||||
|
|
||||||
# Number of time range specifications describing the time intervals used to calculate the statistically-processed field
|
# Number of time range specifications describing the time intervals used to calculate the statistically-processed field
|
||||||
unsigned[1] numberOfTimeRange = 1 : edition_specific;
|
unsigned[1] numberOfTimeRanges = 1 : edition_specific;
|
||||||
alias n = numberOfTimeRange;
|
alias n = numberOfTimeRanges;
|
||||||
alias numberOfTimeRanges = numberOfTimeRange;
|
alias numberOfTimeRange = numberOfTimeRanges;
|
||||||
|
|
||||||
# Total number of data values missing in statistical process
|
# Total number of data values missing in statistical process
|
||||||
unsigned[4] numberOfMissingInStatisticalProcess = 0 : edition_specific;
|
unsigned[4] numberOfMissingInStatisticalProcess = 0 : edition_specific;
|
||||||
alias totalNumberOfDataValuesMissingInStatisticalProcess=numberOfMissingInStatisticalProcess;
|
alias totalNumberOfDataValuesMissingInStatisticalProcess=numberOfMissingInStatisticalProcess;
|
||||||
|
|
||||||
statisticalProcessesList list(numberOfTimeRanges)
|
statisticalProcessesList list(numberOfTimeRanges)
|
||||||
|
@ -33,17 +33,17 @@ statisticalProcessesList list(numberOfTimeRanges)
|
||||||
codetable[1] typeOfStatisticalProcessing ('4.10.table',masterDir,localDir) : edition_specific;
|
codetable[1] typeOfStatisticalProcessing ('4.10.table',masterDir,localDir) : edition_specific;
|
||||||
|
|
||||||
# Type of time increment between successive fields used in the statistical processing
|
# Type of time increment between successive fields used in the statistical processing
|
||||||
codetable[1] typeOfTimeIncrement ('4.11.table',masterDir,localDir) = 2 : edition_specific;
|
codetable[1] typeOfTimeIncrement ('4.11.table',masterDir,localDir) = 2 : edition_specific;
|
||||||
alias typeOfTimeIncrementBetweenSuccessiveFieldsUsedInTheStatisticalProcessing=typeOfTimeIncrement;
|
alias typeOfTimeIncrementBetweenSuccessiveFieldsUsedInTheStatisticalProcessing=typeOfTimeIncrement;
|
||||||
|
|
||||||
# Indicator of unit of time for time range over which statistical processing is done
|
# Indicator of unit of time for time range over which statistical processing is done
|
||||||
codetable[1] indicatorOfUnitForTimeRange ('4.4.table',masterDir,localDir) =1 ;
|
codetable[1] indicatorOfUnitForTimeRange ('4.4.table',masterDir,localDir) =1 ;
|
||||||
|
|
||||||
# Length of the time range over which statistical processing is done, in units defined by the previous octet
|
# Length of the time range over which statistical processing is done, in units defined by the previous octet
|
||||||
unsigned[4] lengthOfTimeRange=0 ;
|
unsigned[4] lengthOfTimeRange=0 ;
|
||||||
|
|
||||||
# Indicator of unit of time for the increment between the successive fields used
|
# Indicator of unit of time for the increment between the successive fields used
|
||||||
codetable[1] indicatorOfUnitForTimeIncrement ('4.4.table',masterDir,localDir)=255 ;
|
codetable[1] indicatorOfUnitForTimeIncrement ('4.4.table',masterDir,localDir)=255 ;
|
||||||
|
|
||||||
# Time increment between successive fields, in units defined by the previous octet
|
# Time increment between successive fields, in units defined by the previous octet
|
||||||
unsigned[4] timeIncrement=0 ;
|
unsigned[4] timeIncrement=0 ;
|
||||||
|
|
|
@ -45,7 +45,7 @@ void grib_accessor_class_g2end_step_t::init(grib_accessor* a, const long l, grib
|
||||||
self->time_range_unit = grib_arguments_get_name(h, c, n++);
|
self->time_range_unit = grib_arguments_get_name(h, c, n++);
|
||||||
self->time_range_value = grib_arguments_get_name(h, c, n++);
|
self->time_range_value = grib_arguments_get_name(h, c, n++);
|
||||||
self->typeOfTimeIncrement = grib_arguments_get_name(h, c, n++);
|
self->typeOfTimeIncrement = grib_arguments_get_name(h, c, n++);
|
||||||
self->numberOfTimeRange = grib_arguments_get_name(h, c, n++);
|
self->numberOfTimeRanges = grib_arguments_get_name(h, c, n++);
|
||||||
}
|
}
|
||||||
|
|
||||||
void grib_accessor_class_g2end_step_t::dump(grib_accessor* a, grib_dumper* dumper)
|
void grib_accessor_class_g2end_step_t::dump(grib_accessor* a, grib_dumper* dumper)
|
||||||
|
@ -201,7 +201,7 @@ static int unpack_multiple_time_ranges_long_(grib_accessor* a, long* val, size_t
|
||||||
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
||||||
int i = 0, err = 0;
|
int i = 0, err = 0;
|
||||||
grib_handle* h = grib_handle_of_accessor(a);
|
grib_handle* h = grib_handle_of_accessor(a);
|
||||||
long numberOfTimeRange = 0, step_units = 0, start_step_value = 0;
|
long numberOfTimeRanges = 0, step_units = 0, start_step_value = 0;
|
||||||
|
|
||||||
size_t count = 0;
|
size_t count = 0;
|
||||||
long arr_typeOfTimeIncrement[MAX_NUM_TIME_RANGES] = {
|
long arr_typeOfTimeIncrement[MAX_NUM_TIME_RANGES] = {
|
||||||
|
@ -218,14 +218,14 @@ static int unpack_multiple_time_ranges_long_(grib_accessor* a, long* val, size_t
|
||||||
return err;
|
return err;
|
||||||
if ((err = grib_get_long_internal(h, self->step_units, &step_units)))
|
if ((err = grib_get_long_internal(h, self->step_units, &step_units)))
|
||||||
return err;
|
return err;
|
||||||
if ((err = grib_get_long_internal(h, self->numberOfTimeRange, &numberOfTimeRange)))
|
if ((err = grib_get_long_internal(h, self->numberOfTimeRanges, &numberOfTimeRanges)))
|
||||||
return err;
|
return err;
|
||||||
if (numberOfTimeRange > MAX_NUM_TIME_RANGES) {
|
if (numberOfTimeRanges > MAX_NUM_TIME_RANGES) {
|
||||||
grib_context_log(h->context, GRIB_LOG_ERROR, "Too many time range specifications!");
|
grib_context_log(h->context, GRIB_LOG_ERROR, "Too many time range specifications!");
|
||||||
return GRIB_DECODING_ERROR;
|
return GRIB_DECODING_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
count = numberOfTimeRange;
|
count = numberOfTimeRanges;
|
||||||
/* Get the arrays for the N time ranges */
|
/* Get the arrays for the N time ranges */
|
||||||
if ((err = grib_get_long_array(h, self->typeOfTimeIncrement, arr_typeOfTimeIncrement, &count)))
|
if ((err = grib_get_long_array(h, self->typeOfTimeIncrement, arr_typeOfTimeIncrement, &count)))
|
||||||
return err;
|
return err;
|
||||||
|
@ -259,11 +259,11 @@ static int unpack_multiple_time_ranges_double_(grib_accessor* a, double* val, si
|
||||||
{
|
{
|
||||||
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
||||||
int i = 0, err = 0;
|
int i = 0, err = 0;
|
||||||
grib_handle* h = grib_handle_of_accessor(a);
|
grib_handle* h = grib_handle_of_accessor(a);
|
||||||
long numberOfTimeRange = 0;
|
long numberOfTimeRanges = 0;
|
||||||
long step_units = 0;
|
long step_units = 0;
|
||||||
long start_step_value = 0;
|
long start_step_value = 0;
|
||||||
long start_step_unit = 0;
|
long start_step_unit = 0;
|
||||||
|
|
||||||
size_t count = 0;
|
size_t count = 0;
|
||||||
long arr_typeOfTimeIncrement[MAX_NUM_TIME_RANGES] = {
|
long arr_typeOfTimeIncrement[MAX_NUM_TIME_RANGES] = {
|
||||||
|
@ -286,14 +286,14 @@ static int unpack_multiple_time_ranges_double_(grib_accessor* a, double* val, si
|
||||||
if ((err = grib_get_long_internal(h, self->step_units, &step_units)))
|
if ((err = grib_get_long_internal(h, self->step_units, &step_units)))
|
||||||
return err;
|
return err;
|
||||||
|
|
||||||
if ((err = grib_get_long_internal(h, self->numberOfTimeRange, &numberOfTimeRange)))
|
if ((err = grib_get_long_internal(h, self->numberOfTimeRanges, &numberOfTimeRanges)))
|
||||||
return err;
|
return err;
|
||||||
if (numberOfTimeRange > MAX_NUM_TIME_RANGES) {
|
if (numberOfTimeRanges > MAX_NUM_TIME_RANGES) {
|
||||||
grib_context_log(h->context, GRIB_LOG_ERROR, "Too many time range specifications!");
|
grib_context_log(h->context, GRIB_LOG_ERROR, "Too many time range specifications!");
|
||||||
return GRIB_DECODING_ERROR;
|
return GRIB_DECODING_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
count = numberOfTimeRange;
|
count = numberOfTimeRanges;
|
||||||
/* Get the arrays for the N time ranges */
|
/* Get the arrays for the N time ranges */
|
||||||
if ((err = grib_get_long_array(h, self->typeOfTimeIncrement, arr_typeOfTimeIncrement, &count)))
|
if ((err = grib_get_long_array(h, self->typeOfTimeIncrement, arr_typeOfTimeIncrement, &count)))
|
||||||
return err;
|
return err;
|
||||||
|
@ -329,9 +329,9 @@ int grib_accessor_class_g2end_step_t::unpack_long(grib_accessor* a, long* val, s
|
||||||
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
grib_accessor_g2end_step_t* self = (grib_accessor_g2end_step_t*)a;
|
||||||
grib_handle* h = grib_handle_of_accessor(a);
|
grib_handle* h = grib_handle_of_accessor(a);
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
long start_step_value;
|
long start_step_value = 0;
|
||||||
long start_step_unit;
|
long start_step_unit = 0;
|
||||||
long numberOfTimeRange;
|
long numberOfTimeRanges = 0;
|
||||||
|
|
||||||
if ((ret = grib_get_long_internal(h, self->start_step_value, &start_step_value)))
|
if ((ret = grib_get_long_internal(h, self->start_step_value, &start_step_value)))
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -346,13 +346,13 @@ int grib_accessor_class_g2end_step_t::unpack_long(grib_accessor* a, long* val, s
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert(self->numberOfTimeRange);
|
Assert(self->numberOfTimeRanges);
|
||||||
if ((ret = grib_get_long_internal(h, self->numberOfTimeRange, &numberOfTimeRange)))
|
if ((ret = grib_get_long_internal(h, self->numberOfTimeRanges, &numberOfTimeRanges)))
|
||||||
return ret;
|
return ret;
|
||||||
Assert(numberOfTimeRange == 1 || numberOfTimeRange == 2);
|
Assert(numberOfTimeRanges == 1 || numberOfTimeRanges == 2);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (numberOfTimeRange == 1) {
|
if (numberOfTimeRanges == 1) {
|
||||||
ret = unpack_one_time_range_long_(a, val, len);
|
ret = unpack_one_time_range_long_(a, val, len);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -374,7 +374,7 @@ int grib_accessor_class_g2end_step_t::unpack_double(grib_accessor* a, double* va
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
long start_step_value;
|
long start_step_value;
|
||||||
long start_step_unit;
|
long start_step_unit;
|
||||||
long numberOfTimeRange;
|
long numberOfTimeRanges;
|
||||||
|
|
||||||
if ((ret = grib_get_long_internal(h, self->start_step_value, &start_step_value)))
|
if ((ret = grib_get_long_internal(h, self->start_step_value, &start_step_value)))
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -389,13 +389,13 @@ int grib_accessor_class_g2end_step_t::unpack_double(grib_accessor* a, double* va
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert(self->numberOfTimeRange);
|
Assert(self->numberOfTimeRanges);
|
||||||
if ((ret = grib_get_long_internal(h, self->numberOfTimeRange, &numberOfTimeRange)))
|
if ((ret = grib_get_long_internal(h, self->numberOfTimeRanges, &numberOfTimeRanges)))
|
||||||
return ret;
|
return ret;
|
||||||
Assert(numberOfTimeRange == 1 || numberOfTimeRange == 2);
|
Assert(numberOfTimeRanges == 1 || numberOfTimeRanges == 2);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (numberOfTimeRange == 1) {
|
if (numberOfTimeRanges == 1) {
|
||||||
ret = unpack_one_time_range_double_(a, val, len);
|
ret = unpack_one_time_range_double_(a, val, len);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -34,7 +34,7 @@ public:
|
||||||
const char* time_range_unit;
|
const char* time_range_unit;
|
||||||
const char* time_range_value;
|
const char* time_range_value;
|
||||||
const char* typeOfTimeIncrement;
|
const char* typeOfTimeIncrement;
|
||||||
const char* numberOfTimeRange;
|
const char* numberOfTimeRanges;
|
||||||
};
|
};
|
||||||
|
|
||||||
class grib_accessor_class_g2end_step_t : public grib_accessor_class_long_t
|
class grib_accessor_class_g2end_step_t : public grib_accessor_class_long_t
|
||||||
|
|
41
src/step.cc
41
src/step.cc
|
@ -24,7 +24,7 @@ namespace eccodes {
|
||||||
|
|
||||||
Step step_from_string(const std::string& step, const Unit& force_unit)
|
Step step_from_string(const std::string& step, const Unit& force_unit)
|
||||||
{
|
{
|
||||||
std::regex re("([0-9.]+)([smhDMYC]?)");
|
std::regex re("([-]?[0-9.]+)([smhDMYC]?)");
|
||||||
std::smatch match;
|
std::smatch match;
|
||||||
if (std::regex_match(step, match, re)) {
|
if (std::regex_match(step, match, re)) {
|
||||||
if (match.size() == 3) {
|
if (match.size() == 3) {
|
||||||
|
@ -52,19 +52,44 @@ Step step_from_string(const std::string& step, const Unit& force_unit)
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw std::runtime_error("Could not parse step: " + step);
|
throw std::runtime_error("Could not parse step: \"" + step + "\"");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Step> parse_range(const std::string& range_str, const Unit& force_unit)
|
std::vector<Step> parse_range(const std::string& range_str, const Unit& force_unit)
|
||||||
{
|
{
|
||||||
|
std::regex re1("([-]?[0-9.]+[smhDMYC]?)-([-]?[0-9.]+[smhDMYC]?)");
|
||||||
|
std::regex re2("[-]?[0-9.]+[smhDMYC]?");
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
std::vector<Step> steps;
|
std::vector<Step> steps;
|
||||||
std::string::size_type pos = 0;
|
if (std::regex_match(range_str, match, re1)) {
|
||||||
std::string::size_type prev = 0;
|
if (match.size() == 3) {
|
||||||
while ((pos = range_str.find("-", prev)) != std::string::npos) {
|
std::string v1 = match[1];
|
||||||
steps.push_back(step_from_string(range_str.substr(prev, pos - prev), force_unit));
|
std::string v2 = match[2];
|
||||||
prev = pos + 1;
|
steps.push_back(step_from_string(v1, force_unit));
|
||||||
|
steps.push_back(step_from_string(v2, force_unit));
|
||||||
|
}
|
||||||
|
else if (match.size() == 2) {
|
||||||
|
std::string v1 = match[1];
|
||||||
|
steps.push_back(step_from_string(v1, force_unit));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw std::runtime_error("Could not parse step range for accumulated data: \"" + range_str + "\"");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
steps.push_back(step_from_string(range_str.substr(prev), force_unit));
|
else if(std::regex_match(range_str, match, re2)) {
|
||||||
|
if (match.size() == 1) {
|
||||||
|
std::string v1 = match[0];
|
||||||
|
steps.push_back(step_from_string(v1, force_unit));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw std::runtime_error("Could not parse step range for instantaneous data: \"" + range_str + "\"");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw std::runtime_error("Could not parse step range: \"" + range_str + "\"");
|
||||||
|
}
|
||||||
|
|
||||||
return steps;
|
return steps;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -232,9 +232,6 @@ if( HAVE_BUILD_TOOLS )
|
||||||
grib_geo_iter
|
grib_geo_iter
|
||||||
grib_to_json
|
grib_to_json
|
||||||
grib_to_ppm
|
grib_to_ppm
|
||||||
grib_merge
|
|
||||||
big2gribex
|
|
||||||
grib_sub_area_check
|
|
||||||
grib_list_keys
|
grib_list_keys
|
||||||
grib_histogram
|
grib_histogram
|
||||||
bufr_get_element
|
bufr_get_element
|
||||||
|
|
|
@ -82,6 +82,7 @@ done
|
||||||
|
|
||||||
|
|
||||||
# Test for dumping a section
|
# Test for dumping a section
|
||||||
|
# ---------------------------
|
||||||
if [ $HAVE_JPEG -eq 0 ]; then
|
if [ $HAVE_JPEG -eq 0 ]; then
|
||||||
# No JPEG decoding enabled so dumping section 7 will issue errors
|
# No JPEG decoding enabled so dumping section 7 will issue errors
|
||||||
# but dumping non-data sections should work
|
# but dumping non-data sections should work
|
||||||
|
@ -110,6 +111,27 @@ ${tools_dir}/grib_dump -w count=4 $file > $temp 2>&1
|
||||||
file=$data_dir/sample.grib2
|
file=$data_dir/sample.grib2
|
||||||
ECCODES_DEBUG=1 ${tools_dir}/grib_dump $file > $temp 2>&1
|
ECCODES_DEBUG=1 ${tools_dir}/grib_dump $file > $temp 2>&1
|
||||||
|
|
||||||
|
# Check the right number of sections are listed in the dump
|
||||||
|
# ---------------------------------------------------------
|
||||||
|
file=$data_dir/sample.grib2
|
||||||
|
${tools_dir}/grib_dump -O $file > $temp
|
||||||
|
count=$(grep -c SECTION_ $temp)
|
||||||
|
[ $count -eq 8 ]
|
||||||
|
|
||||||
|
file=$data_dir/test_uuid.grib2
|
||||||
|
${tools_dir}/grib_dump -wcount=1 -O $file > $temp
|
||||||
|
count=$(grep -c SECTION_ $temp)
|
||||||
|
[ $count -eq 7 ]
|
||||||
|
|
||||||
|
file=$data_dir/regular_gaussian_model_level.grib1
|
||||||
|
${tools_dir}/grib_dump -O $file > $temp
|
||||||
|
count=$(grep -c SECTION_ $temp)
|
||||||
|
[ $count -eq 4 ]
|
||||||
|
|
||||||
|
file=$data_dir/missing_field.grib1
|
||||||
|
${tools_dir}/grib_dump -O $file > $temp
|
||||||
|
count=$(grep -c SECTION_ $temp)
|
||||||
|
[ $count -eq 5 ]
|
||||||
|
|
||||||
# Repeated key numberOfSection
|
# Repeated key numberOfSection
|
||||||
file=$data_dir/sample.grib2
|
file=$data_dir/sample.grib2
|
||||||
|
|
|
@ -41,5 +41,16 @@ ${tools_dir}/grib_set -s activity=1,experiment=1,realization=1 $eerie_sample $te
|
||||||
grib_check_key_equals $temp_grib "activity,experiment,realization" "1 1 1"
|
grib_check_key_equals $temp_grib "activity,experiment,realization" "1 1 1"
|
||||||
grib_check_key_equals $temp_grib "activity:s,experiment:s" "CMIP6 hist"
|
grib_check_key_equals $temp_grib "activity:s,experiment:s" "CMIP6 hist"
|
||||||
|
|
||||||
|
# ECC-1850: Additional keys added
|
||||||
|
# Check additional keys are present and correct
|
||||||
|
grib_check_key_exists $eerie_sample generation,model,resolution
|
||||||
|
grib_check_key_equals $eerie_sample "generation,model,resolution" "255 0 0"
|
||||||
|
|
||||||
|
# Check an example where a few additional things are set
|
||||||
|
${tools_dir}/grib_set -s generation=1,model=2,resolution=1 $eerie_sample $temp_grib
|
||||||
|
|
||||||
|
grib_check_key_equals $temp_grib "generation,model,resolution" "1 2 1"
|
||||||
|
grib_check_key_equals $temp_grib "model:s,resolution:s" "IFS-NEMO standard"
|
||||||
|
|
||||||
# Clean up
|
# Clean up
|
||||||
rm -f $temp_grib $eerie_sample
|
rm -f $temp_grib $eerie_sample
|
||||||
|
|
|
@ -148,7 +148,7 @@ echo "Test ECC-648: Set codetable key to array"
|
||||||
# ---------------------------------------------
|
# ---------------------------------------------
|
||||||
cat >$tempFilt <<EOF
|
cat >$tempFilt <<EOF
|
||||||
set productDefinitionTemplateNumber = 11;
|
set productDefinitionTemplateNumber = 11;
|
||||||
set numberOfTimeRange = 3;
|
set numberOfTimeRanges = 3;
|
||||||
set typeOfStatisticalProcessing = {3, 1, 2};
|
set typeOfStatisticalProcessing = {3, 1, 2};
|
||||||
write;
|
write;
|
||||||
EOF
|
EOF
|
||||||
|
|
|
@ -62,6 +62,11 @@ fi
|
||||||
instantaneous_field=$data_dir/reduced_gaussian_surface.grib2
|
instantaneous_field=$data_dir/reduced_gaussian_surface.grib2
|
||||||
accumulated_field=$data_dir/reduced_gaussian_sub_area.grib2
|
accumulated_field=$data_dir/reduced_gaussian_sub_area.grib2
|
||||||
|
|
||||||
|
# ECC-1228: Changing template with negative forecast time
|
||||||
|
${tools_dir}/grib_set -s stepRange=-48s--24s $accumulated_field $temp
|
||||||
|
${tools_dir}/grib_set -s productDefinitionTemplateNumber=8 $temp $temp2
|
||||||
|
grib_check_key_equals $temp2 "-p forecastTime,indicatorOfUnitOfTimeRange,lengthOfTimeRange,indicatorOfUnitForTimeRange" "-48 13 24 13"
|
||||||
|
|
||||||
# ECC-1802: Relaxation of the "Step Units Rule":
|
# ECC-1802: Relaxation of the "Step Units Rule":
|
||||||
# The updated rule permits the simultaneous assignment of the same step unit to both 'stepUnits' and 'step*' keys
|
# The updated rule permits the simultaneous assignment of the same step unit to both 'stepUnits' and 'step*' keys
|
||||||
in="$instantaneous_field"
|
in="$instantaneous_field"
|
||||||
|
|
|
@ -38,3 +38,14 @@ grib_check_key_exists()
|
||||||
# grib_get will fail if the key is not found
|
# grib_get will fail if the key is not found
|
||||||
$tools_dir/grib_get -p $a_key $a_file >/dev/null
|
$tools_dir/grib_get -p $a_key $a_file >/dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# This is useful in situations where we intend a key to have
|
||||||
|
# a single value (size=1). For example when we redefine a transient
|
||||||
|
grib_check_key_is_scalar()
|
||||||
|
{
|
||||||
|
a_file=$1
|
||||||
|
a_key=$2
|
||||||
|
a_val=$3
|
||||||
|
result=$(echo "print '|[$a_key]|';" | ${tools_dir}/grib_filter - $a_file)
|
||||||
|
[ "$result" = "|$a_val|" ]
|
||||||
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ ecbuild_add_library( TARGET ecc_tools
|
||||||
# tools binaries
|
# tools binaries
|
||||||
list( APPEND ecc_tools_binaries
|
list( APPEND ecc_tools_binaries
|
||||||
codes_info codes_count codes_split_file
|
codes_info codes_count codes_split_file
|
||||||
grib_histogram grib_filter grib_ls grib_dump grib_merge
|
grib_histogram grib_filter grib_ls grib_dump
|
||||||
grib2ppm grib_set grib_get grib_get_data grib_copy
|
grib2ppm grib_set grib_get grib_get_data grib_copy
|
||||||
grib_compare codes_parser grib_index_build bufr_index_build
|
grib_compare codes_parser grib_index_build bufr_index_build
|
||||||
bufr_ls bufr_dump bufr_set bufr_get
|
bufr_ls bufr_dump bufr_set bufr_get
|
||||||
|
@ -37,8 +37,6 @@ list( APPEND ecc_tools_binaries
|
||||||
metar_dump metar_ls metar_compare metar_get metar_filter metar_copy )
|
metar_dump metar_ls metar_compare metar_get metar_filter metar_copy )
|
||||||
|
|
||||||
list( APPEND ecc_tools_binaries_extra
|
list( APPEND ecc_tools_binaries_extra
|
||||||
big2gribex
|
|
||||||
gg_sub_area_check
|
|
||||||
grib_repair
|
grib_repair
|
||||||
grib_to_json
|
grib_to_json
|
||||||
codes_export_resource
|
codes_export_resource
|
||||||
|
|
Loading…
Reference in New Issue