mirror of https://github.com/ecmwf/eccodes.git
Merge branch 'develop' into feature/ECC-1270-marsLevtype
This commit is contained in:
commit
9baed56332
|
@ -2,7 +2,7 @@
|
|||
# general configuration #
|
||||
#---------------------------------#
|
||||
|
||||
version: 2.23.0-{build}-{branch}
|
||||
version: 2.24.0-{build}-{branch}
|
||||
|
||||
branches:
|
||||
only:
|
||||
|
|
|
@ -21,7 +21,7 @@ cmake_minimum_required( VERSION 3.12 FATAL_ERROR )
|
|||
find_package( ecbuild 3.4 REQUIRED HINTS ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/../ecbuild)
|
||||
|
||||
# Initialise project
|
||||
project( eccodes VERSION 2.23.0 LANGUAGES C )
|
||||
project( eccodes VERSION 2.24.0 LANGUAGES C )
|
||||
|
||||
###############################################################################
|
||||
# system checks needed for eccodes_config.h and some options like MEMFS
|
||||
|
|
|
@ -27,7 +27,6 @@ unsigned[1] methodNumber : dump ;
|
|||
#
|
||||
# Coordinate structure definition
|
||||
#
|
||||
|
||||
unsigned[1] spaceUnitFlag : dump ;
|
||||
|
||||
unsigned[1] verticalCoordinateDefinition : dump ;
|
||||
|
@ -41,7 +40,6 @@ unsigned[1] timeCoordinateDefinition : dump ;
|
|||
#
|
||||
# Position definition: mixed coordinates
|
||||
#
|
||||
|
||||
unsigned[1] mixedCoordinateFieldFlag : dump ;
|
||||
|
||||
unsigned[1] coordinate1Flag : dump ;
|
||||
|
@ -63,7 +61,6 @@ signed[4] coordinate2End : dump ;
|
|||
#
|
||||
# Data grid definitions
|
||||
#
|
||||
|
||||
unsigned[1] coordinate3Flag : dump ;
|
||||
|
||||
unsigned[1] coordinate4Flag : dump ;
|
||||
|
@ -87,7 +84,6 @@ flags[1] flagForNormalOrStaggeredGrid 'grib1/ocean.1.table' : dump;
|
|||
#
|
||||
# Auxiliary information
|
||||
#
|
||||
|
||||
flags[1] flagForAnyFurtherInformation 'grib1/ocean.1.table' : dump;
|
||||
|
||||
unsigned[1] numberInHorizontalCoordinates : dump;
|
||||
|
@ -101,13 +97,11 @@ unsigned[2] numberInTheAuxiliaryArray : dump ;
|
|||
#
|
||||
# Horizontal coordinate definition
|
||||
#
|
||||
|
||||
unsigned[4] horizontalCoordinateSupplement[numberInHorizontalCoordinates] : dump;
|
||||
|
||||
#
|
||||
# Mixed coordinate definition
|
||||
#
|
||||
|
||||
unsigned[4] mixedCoordinateDefinition[numberInMixedCoordinateDefinition] : dump;
|
||||
|
||||
#
|
||||
|
@ -120,13 +114,11 @@ if (numberInTheGridCoordinateList>0) {
|
|||
#
|
||||
# Auxiliary array
|
||||
#
|
||||
|
||||
unsigned[4] auxiliary[numberInTheAuxiliaryArray] : dump;
|
||||
|
||||
#
|
||||
# Post-auxiliary array
|
||||
#
|
||||
|
||||
constant postAuxiliaryArrayPresent = 1;
|
||||
|
||||
if (flagShowingPostAuxiliaryArrayInUse == postAuxiliaryArrayPresent) {
|
||||
|
|
|
@ -21,7 +21,6 @@ constant isectionNumber2 = "h";
|
|||
constant isectionNumber3 = "m";
|
||||
constant isectionNumber4 = "z";
|
||||
|
||||
|
||||
constant tsectionNumber3 = "v";
|
||||
constant tsectionNumber4 = "z";
|
||||
constant tsectionNumber5 = "m";
|
||||
|
@ -32,41 +31,29 @@ constant GRIB_LATITUDE = 4;
|
|||
|
||||
meta verificationDate g1verificationdate(dataDate, dataTime, endStep) : read_only;
|
||||
|
||||
|
||||
if(horizontalCoordinateDefinition == 0)
|
||||
{
|
||||
|
||||
if(coordinate1Flag == 1 )
|
||||
{
|
||||
|
||||
if (horizontalCoordinateDefinition == 0) {
|
||||
if (coordinate1Flag == 1 ) {
|
||||
# range
|
||||
|
||||
|
||||
if (averaging1Flag == P_TAVG ) {
|
||||
if(
|
||||
marsType == TYPE_OR
|
||||
if (marsType == TYPE_OR
|
||||
|| marsType == TYPE_FC
|
||||
|| marsType == TYPE_FF
|
||||
|| marsType == TYPE_FX
|
||||
)
|
||||
|| marsType == TYPE_FX)
|
||||
{
|
||||
meta marsRange evaluate((coordinate1End - coordinate1Start)/3600);
|
||||
alias mars.range = marsRange;
|
||||
}
|
||||
}
|
||||
# section
|
||||
|
||||
if (coordinate2Flag == 2) { alias mars.section = isectionNumber2; }
|
||||
if (coordinate2Flag == 3) { alias mars.section = isectionNumber3; }
|
||||
if (coordinate2Flag == 4) { alias mars.section = isectionNumber4; }
|
||||
|
||||
# levelist latitude longitude
|
||||
|
||||
if (coordinate2Flag == GRIB_DEPTH) {
|
||||
meta marsLevelist divdouble( coordinate2Start,1000 );
|
||||
meta roundedMarsLevelist round( marsLevelist ,1000);
|
||||
alias mars.levelist = roundedMarsLevelist ;
|
||||
|
||||
}
|
||||
if (coordinate2Flag == GRIB_LONGITUDE) {
|
||||
meta marsLongitude divdouble( coordinate2Start,1000000 );
|
||||
|
@ -76,12 +63,10 @@ if(horizontalCoordinateDefinition == 0)
|
|||
|
||||
if (coordinate2Flag == GRIB_LATITUDE) {
|
||||
meta marsLatitude divdouble( coordinate2Start,1000000 );
|
||||
|
||||
meta roundedMarsLatitude round( marsLatitude ,1000);
|
||||
alias mars.latitude = roundedMarsLatitude ;
|
||||
}
|
||||
|
||||
|
||||
#product
|
||||
if (averaging1Flag == 0) { alias mars.product = coordAveraging0;}
|
||||
if (averaging1Flag == 1) { alias mars.product = coordAveraging1;}
|
||||
|
@ -89,11 +74,9 @@ if(horizontalCoordinateDefinition == 0)
|
|||
if (averaging1Flag == 3) { alias mars.product = coordAveraging3;}
|
||||
|
||||
# date
|
||||
if(
|
||||
(marsType == TYPE_OR && averaging1Flag == P_TAVG)
|
||||
if ( (marsType == TYPE_OR && averaging1Flag == P_TAVG)
|
||||
|| (marsType == TYPE_OR && averaging1Flag == P_TACC)
|
||||
|| (marsType == TYPE_FX && averaging1Flag == P_TAVG)
|
||||
)
|
||||
|| (marsType == TYPE_FX && averaging1Flag == P_TAVG))
|
||||
{
|
||||
#remove mars.date;
|
||||
alias mars.date = verificationDate;
|
||||
|
@ -101,18 +84,13 @@ if(horizontalCoordinateDefinition == 0)
|
|||
constant stepZero = 0;
|
||||
alias mars.step = stepZero;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
meta coordinateIndexNumber evaluate(coordinate4Flag+coordinate3Flag);
|
||||
|
||||
# levelist latitude longitude
|
||||
|
||||
if(coordinateIndexNumber== 3)
|
||||
{
|
||||
if (coordinateIndexNumber== 3) {
|
||||
meta marsLatitude divdouble( coordinate1Start,1000000);
|
||||
meta marsLongitude divdouble( coordinate2Start,1000000);
|
||||
|
||||
|
@ -121,11 +99,9 @@ if(horizontalCoordinateDefinition == 0)
|
|||
|
||||
alias mars.latitude = roundedMarsLatitude ;
|
||||
alias mars.longitude = roundedMarsLongitude ;
|
||||
|
||||
}
|
||||
|
||||
if(coordinateIndexNumber == 4)
|
||||
{
|
||||
if (coordinateIndexNumber == 4) {
|
||||
meta marsLevelist divdouble( coordinate1Start,1000);
|
||||
meta marsLatitude divdouble( coordinate2Start,1000000);
|
||||
|
||||
|
@ -136,8 +112,7 @@ if(horizontalCoordinateDefinition == 0)
|
|||
alias mars.latitude = roundedMarsLatitude ;
|
||||
}
|
||||
|
||||
if(coordinateIndexNumber == 5)
|
||||
{
|
||||
if (coordinateIndexNumber == 5) {
|
||||
meta marsLevelist divdouble( coordinate1Start,1000);
|
||||
meta marsLongitude divdouble( coordinate2Start,1000000);
|
||||
|
||||
|
@ -146,43 +121,34 @@ if(horizontalCoordinateDefinition == 0)
|
|||
|
||||
alias mars.levelist = roundedMarsLevelist ;
|
||||
alias mars.longitude = roundedMarsLongitude ;
|
||||
|
||||
}
|
||||
|
||||
# section
|
||||
|
||||
if (coordinateIndexNumber == 3) { alias mars.section = tsectionNumber3; }
|
||||
if (coordinateIndexNumber == 4) { alias mars.section = tsectionNumber4; }
|
||||
if (coordinateIndexNumber == 5) { alias mars.section = tsectionNumber5; }
|
||||
|
||||
# range
|
||||
if (averaging1Flag == P_INST) {
|
||||
if(
|
||||
(marsType == TYPE_OR)
|
||||
if ((marsType == TYPE_OR)
|
||||
||(marsType == TYPE_FC)
|
||||
||(marsType == TYPE_CF)
|
||||
||(marsType == TYPE_PF)
|
||||
||(marsType == TYPE_FF)
|
||||
||(marsType == TYPE_OF)
|
||||
)
|
||||
||(marsType == TYPE_OF))
|
||||
{
|
||||
if (coordinate4Flag == 1){
|
||||
meta marsRange evaluate((coordinate4OfLastGridPoint - coordinate4OfFirstGridPoint)/3600);
|
||||
} else {
|
||||
|
||||
meta marsRange evaluate((coordinate3OfLastGridPoint - coordinate3OfFirstGridPoint)/3600);
|
||||
}
|
||||
|
||||
alias mars.range = marsRange;
|
||||
}
|
||||
}
|
||||
|
||||
# product
|
||||
alias mars.product = coordAveragingTims;
|
||||
# date
|
||||
|
||||
if (marsType == TYPE_OR && averaging1Flag == P_INST) {
|
||||
|
||||
#remove mars.date;
|
||||
alias mars.date = verificationDate;
|
||||
#remove mars.step;
|
||||
|
|
|
@ -1,11 +1,4 @@
|
|||
# (C) Copyright 2005- ECMWF.
|
||||
#
|
||||
# This software is licensed under the terms of the Apache Licence Version 2.0
|
||||
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
|
||||
#
|
||||
# In applying this licence, ECMWF does not waive the privileges and immunities granted to it by
|
||||
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
|
||||
#
|
||||
|
||||
section_length[3] section4Length;
|
||||
|
||||
|
@ -52,7 +45,6 @@ if(numberOfChars >= 12)
|
|||
ascii[1] char;
|
||||
}
|
||||
|
||||
|
||||
constant zero = 0;
|
||||
concept isEps(zero) { 1 = { marsType = "pf"; } }
|
||||
concept isSens(zero) { 1 = { marsType = "sf"; } }
|
||||
|
@ -84,18 +76,14 @@ if(numberOfChars >= 12)
|
|||
alias mars.class = marsClass;
|
||||
alias mars.type = marsType;
|
||||
alias mars.expver = marsExpver;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
charValues list(numberOfChars) {
|
||||
ascii[1] char;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#reservedBytes list (numberOfReservedBytes){
|
||||
# unsigned[1] byte;
|
||||
#}
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ int main(int argc, char** argv)
|
|||
set = codes_fieldset_new_from_files(0, filenames, nfiles, keys, nkeys, 0, 0, &err);
|
||||
CODES_CHECK(err, 0);
|
||||
|
||||
/* not jet implemented */
|
||||
/* not yet implemented */
|
||||
/* err=codes_fieldset_apply_where(set,"(centre=='ecmf') && number==1 || step==6 "); */
|
||||
/* CODES_CHECK(err,0); */
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ int main(int argc, char** argv) {
|
|||
set=grib_fieldset_new_from_files(0,filenames,nfiles,keys,nkeys,0,0,&err);
|
||||
GRIB_CHECK(err,0);
|
||||
|
||||
/* not jet implemented */
|
||||
/* not yet implemented */
|
||||
/* err=grib_fieldset_apply_where(set,"(centre=='ecmf') && number==1 || step==6 "); */
|
||||
/* GRIB_CHECK(err,0); */
|
||||
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
. ./include.sh
|
||||
|
||||
# To get verbose output
|
||||
#$PYTHON -m unittest -v high_level_api
|
||||
|
||||
if [ $HAVE_PRODUCT_GRIB -eq 1 ]; then
|
||||
echo 'Running tests for GRIB ...'
|
||||
$PYTHON $examples_src/high_level_api_grib.py -v
|
||||
rm -f test.index
|
||||
fi
|
||||
|
||||
if [ $HAVE_PRODUCT_BUFR -eq 1 ]; then
|
||||
echo 'Running tests for BUFR ...'
|
||||
$PYTHON $examples_src/high_level_api_bufr.py -v
|
||||
rm -f test.index
|
||||
fi
|
|
@ -1,211 +0,0 @@
|
|||
#!/bin/env python
|
||||
|
||||
"""
|
||||
Unit tests for high level Python interface for BUFR.
|
||||
|
||||
Author: Daniel Lee, DWD, 2016
|
||||
"""
|
||||
|
||||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
import unittest
|
||||
|
||||
from eccodes import BufrFile, BufrMessage
|
||||
|
||||
TESTBUFR = "../../data/bufr/syno_multi.bufr"
|
||||
TEST_OUTPUT = "test-output.codes"
|
||||
|
||||
KNOWN_BUFR_KEYS = ['edition', 'masterTableNumber', 'bufrHeaderSubCentre', 'bufrHeaderCentre',
|
||||
'updateSequenceNumber', 'dataCategory', 'dataSubCategory', 'masterTablesVersionNumber',
|
||||
'localTablesVersionNumber', 'typicalYearOfCentury', 'typicalMonth', 'typicalDay',
|
||||
'typicalHour', 'typicalMinute', 'typicalDate', 'typicalTime', 'rdbType', 'newSubtype', 'rdbtimeDay',
|
||||
'rdbtimeHour', 'rdbtimeMinute', 'rdbtimeSecond', 'rectimeDay', 'rectimeHour', 'rectimeMinute', 'rectimeSecond',
|
||||
'correction1', 'correction1Part', 'correction2', 'correction2Part', 'correction3', 'correction3Part',
|
||||
'correction4', 'correction4Part', 'qualityControl', 'numberOfSubsets', 'localLatitude',
|
||||
'localLongitude', 'observedData', 'compressedData', 'unexpandedDescriptors', 'subsetNumber', '#1#blockNumber',
|
||||
'#1#blockNumber->percentConfidence', '#1#stationNumber', '#1#stationNumber->percentConfidence',
|
||||
'#1#stationType', '#1#stationType->percentConfidence', '#1#year', '#1#year->percentConfidence',
|
||||
'#1#month', '#1#month->percentConfidence', '#1#day', '#1#day->percentConfidence', '#1#hour',
|
||||
'#1#hour->percentConfidence', '#1#minute', '#1#minute->percentConfidence', '#1#latitude',
|
||||
'#1#latitude->percentConfidence', '#1#longitude', '#1#longitude->percentConfidence',
|
||||
'#1#heightOfStation', '#1#heightOfStation->percentConfidence', '#1#nonCoordinatePressure',
|
||||
'#1#nonCoordinatePressure->percentConfidence', '#1#pressureReducedToMeanSeaLevel',
|
||||
'#1#pressureReducedToMeanSeaLevel->percentConfidence', '#1#3HourPressureChange',
|
||||
'#1#3HourPressureChange->percentConfidence', '#1#characteristicOfPressureTendency',
|
||||
'#1#characteristicOfPressureTendency->percentConfidence', '#1#windDirectionAt10M',
|
||||
'#1#windDirectionAt10M->percentConfidence', '#1#windSpeedAt10M',
|
||||
'#1#windSpeedAt10M->percentConfidence',
|
||||
'#1#airTemperatureAt2M', '#1#airTemperatureAt2M->percentConfidence', '#1#dewpointTemperatureAt2M',
|
||||
'#1#dewpointTemperatureAt2M->percentConfidence', '#1#relativeHumidity',
|
||||
'#1#relativeHumidity->percentConfidence', '#1#horizontalVisibility',
|
||||
'#1#horizontalVisibility->percentConfidence', '#1#presentWeather',
|
||||
'#1#presentWeather->percentConfidence', '#1#pastWeather1', '#1#pastWeather1->percentConfidence',
|
||||
'#1#pastWeather2', '#1#pastWeather2->percentConfidence', '#1#cloudCoverTotal',
|
||||
'#1#cloudCoverTotal->percentConfidence', '#1#verticalSignificanceSurfaceObservations',
|
||||
'#1#verticalSignificanceSurfaceObservations->percentConfidence', '#1#cloudAmount',
|
||||
'#1#cloudAmount->percentConfidence', '#1#heightOfBaseOfCloud',
|
||||
'#1#heightOfBaseOfCloud->percentConfidence', '#1#cloudType', '#1#cloudType->percentConfidence',
|
||||
'#2#cloudType', '#2#cloudType->percentConfidence', '#3#cloudType', '#3#cloudType->percentConfidence',
|
||||
'#2#verticalSignificanceSurfaceObservations',
|
||||
'#2#verticalSignificanceSurfaceObservations->percentConfidence',
|
||||
'#2#cloudAmount', '#2#cloudAmount->percentConfidence', '#4#cloudType',
|
||||
'#4#cloudType->percentConfidence', '#2#heightOfBaseOfCloud',
|
||||
'#2#heightOfBaseOfCloud->percentConfidence',
|
||||
'#3#verticalSignificanceSurfaceObservations',
|
||||
'#3#verticalSignificanceSurfaceObservations->percentConfidence',
|
||||
'#3#cloudAmount', '#3#cloudAmount->percentConfidence', '#5#cloudType',
|
||||
'#5#cloudType->percentConfidence',
|
||||
'#3#heightOfBaseOfCloud', '#3#heightOfBaseOfCloud->percentConfidence',
|
||||
'#4#verticalSignificanceSurfaceObservations',
|
||||
'#4#verticalSignificanceSurfaceObservations->percentConfidence', '#4#cloudAmount',
|
||||
'#4#cloudAmount->percentConfidence',
|
||||
'#6#cloudType', '#6#cloudType->percentConfidence', '#4#heightOfBaseOfCloud',
|
||||
'#4#heightOfBaseOfCloud->percentConfidence',
|
||||
'#5#verticalSignificanceSurfaceObservations',
|
||||
'#5#verticalSignificanceSurfaceObservations->percentConfidence', '#5#cloudAmount',
|
||||
'#5#cloudAmount->percentConfidence', '#7#cloudType', '#7#cloudType->percentConfidence',
|
||||
'#5#heightOfBaseOfCloud',
|
||||
'#5#heightOfBaseOfCloud->percentConfidence', '#1#totalPrecipitationPast6Hours',
|
||||
'#1#totalPrecipitationPast6Hours->percentConfidence', '#1#totalSnowDepth',
|
||||
'#1#totalSnowDepth->percentConfidence',
|
||||
'#1#centre', '#1#generatingApplication']
|
||||
|
||||
|
||||
|
||||
class TestBufrFile(unittest.TestCase):
|
||||
"""Test BufrFile functionality."""
|
||||
|
||||
def test_memory_management(self):
|
||||
"""Messages in BufrFile can be opened and closed properly."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
self.assertEqual(len(bufr_file), 3)
|
||||
for i in range(len(bufr_file)):
|
||||
msg = BufrMessage(bufr_file)
|
||||
self.assertEqual(msg["bufrHeaderCentre"], 98)
|
||||
self.assertEqual(msg['count'], i + 1)
|
||||
self.assertEqual(len(bufr_file.open_messages), 3)
|
||||
self.assertEqual(len(bufr_file.open_messages), 0)
|
||||
|
||||
def test_message_counting_works(self):
|
||||
"""The BufrFile is aware of its messages."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg_count = len(bufr_file)
|
||||
self.assertEqual(msg_count, 3)
|
||||
|
||||
def test_iterator_protocol(self):
|
||||
"""The BufrFile allows pythonic iteration over all messages."""
|
||||
latitudes = []
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
for msg in bufr_file:
|
||||
latitudes.append(msg["localLatitude"])
|
||||
self.assertSequenceEqual(latitudes, [70.93, 77, 78.92])
|
||||
|
||||
def test_read_past_last_message(self):
|
||||
"""Trying to open message on exhausted BUFR file raises IOError."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
for _ in range(len(bufr_file)):
|
||||
BufrMessage(bufr_file)
|
||||
self.assertRaises(IOError, lambda: BufrMessage(bufr_file))
|
||||
|
||||
def test_read_invalid_file(self):
|
||||
"""Trying to open message on nonexistent file raises IOError."""
|
||||
with NamedTemporaryFile(mode='r') as f:
|
||||
with BufrFile(f.name) as bufr_file:
|
||||
self.assertRaises(IOError, lambda: BufrMessage(bufr_file))
|
||||
|
||||
|
||||
class TestBufrMessage(unittest.TestCase):
|
||||
"""Test BufrMessage functionality"""
|
||||
|
||||
def test_metadata(self):
|
||||
"""Metadata is read correctly from BufrMessage."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg = BufrMessage(bufr_file)
|
||||
msg.unpack()
|
||||
msg_keys = list(msg.keys())
|
||||
self.assertEqual(len(msg_keys), 202)
|
||||
for key in KNOWN_BUFR_KEYS:
|
||||
assert key in msg_keys
|
||||
# Size of message in bytes
|
||||
self.assertEqual(msg.size(), 220)
|
||||
self.assertEqual(len(list(msg.keys())), len(msg))
|
||||
|
||||
def test_content(self):
|
||||
"""Data values are read correctly from BufrMessage."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg = BufrMessage(bufr_file)
|
||||
msg.unpack()
|
||||
self.assertEqual(msg["airTemperatureAt2M"], 274.5)
|
||||
|
||||
# TODO: Test behaviour with missing messages (SUP-1874)
|
||||
|
||||
def test_value_setting(self):
|
||||
"""Keys can be set properly."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg = BufrMessage(bufr_file)
|
||||
key, val = "localLongitude", 5
|
||||
msg[key] = val
|
||||
self.assertEqual(msg[key], val)
|
||||
|
||||
def test_serialize(self):
|
||||
"""Message can be serialized to file."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg = BufrMessage(bufr_file)
|
||||
with open(TEST_OUTPUT, "wb") as test:
|
||||
msg.write(test)
|
||||
os.unlink(TEST_OUTPUT)
|
||||
|
||||
def test_clone(self):
|
||||
"""Messages can be used to produce clone Messages."""
|
||||
with BufrFile(TESTBUFR) as bufr_file:
|
||||
msg = BufrMessage(bufr_file)
|
||||
msg2 = BufrMessage(clone=msg)
|
||||
self.assertSequenceEqual(list(msg.keys()), list(msg2.keys()))
|
||||
|
||||
def test_copy_data(self):
|
||||
"""Can copy data section from one message to another"""
|
||||
bufr = BufrMessage(sample='BUFR3')
|
||||
with BufrFile('../../data/bufr/metar_with_2_bias.bufr') as bufr_file:
|
||||
bufrin = BufrMessage(bufr_file)
|
||||
ivalues = (
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 0, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 0)
|
||||
bufr['inputDataPresentIndicator'] = ivalues
|
||||
bufr['edition'] = 3
|
||||
bufr['masterTableNumber'] = 0
|
||||
bufr['bufrHeaderSubCentre'] = 0
|
||||
bufr['bufrHeaderCentre'] = 98
|
||||
bufr['updateSequenceNumber'] = 1
|
||||
bufr['dataCategory'] = 0
|
||||
bufr['dataSubCategory'] = 140
|
||||
bufr['masterTablesVersionNumber'] = 13
|
||||
bufr['localTablesVersionNumber'] = 1
|
||||
bufr['typicalYearOfCentury'] = 15
|
||||
bufr['typicalMonth'] = 5
|
||||
bufr['typicalDay'] = 4
|
||||
bufr['typicalHour'] = 9
|
||||
bufr['typicalMinute'] = 30
|
||||
bufr['numberOfSubsets'] = 1
|
||||
bufr['observedData'] = 1
|
||||
bufr['compressedData'] = 0
|
||||
ivalues = (
|
||||
307011, 7006, 10004, 222000, 101023, 31031, 1031, 1032, 101023, 33007,
|
||||
225000, 236000, 101023, 31031, 1031, 1032, 8024, 101001, 225255, 225000,
|
||||
236000, 101023, 31031, 1031, 1032, 8024, 101001, 225255,
|
||||
1063, 2001, 4001, 4002, 4003, 4004, 4005, 5002,
|
||||
6002, 7001, 7006, 11001, 11016, 11017, 11002)
|
||||
bufr['unexpandedDescriptors'] = ivalues
|
||||
bufrin.unpack()
|
||||
bufrin.copy_data(bufr)
|
||||
with open(TEST_OUTPUT, 'wb') as test:
|
||||
bufr.write(test)
|
||||
os.unlink(TEST_OUTPUT)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -1,257 +0,0 @@
|
|||
#!/bin/env python
|
||||
|
||||
"""
|
||||
This is now deprecated. Use cfgrib instead
|
||||
"""
|
||||
|
||||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
import unittest
|
||||
|
||||
from eccodes import GribFile
|
||||
from eccodes import GribIndex
|
||||
from eccodes import GribMessage
|
||||
from eccodes.high_level.gribmessage import IndexNotSelectedError
|
||||
|
||||
TESTGRIB = "../../data/high_level_api.grib2"
|
||||
TEST_OUTPUT = "test-output.codes"
|
||||
TEST_INDEX = "test.index"
|
||||
TEST_KEYS = ("dataDate", "stepRange")
|
||||
TEST_VALUES = 20110225, 0
|
||||
SELECTION_DICTIONARY = {}
|
||||
for i1 in range(len(TEST_KEYS)):
|
||||
SELECTION_DICTIONARY[TEST_KEYS[i1]] = TEST_VALUES[i1]
|
||||
TEST_INDEX_OUTPUT = TESTGRIB
|
||||
TEST_STEPRANGE = ('0', '12', '18', '24', '6')
|
||||
# These keys should be available even if new keys are defined
|
||||
KNOWN_GRIB_KEYS = ['7777', 'GRIBEditionNumber', 'N', 'NV',
|
||||
'Ni', 'Nj', 'PLPresent', 'PVPresent', 'addEmptySection2',
|
||||
'addExtraLocalSection', 'alternativeRowScanning',
|
||||
'angleDivisor', 'angleSubdivisions',
|
||||
'average', 'backgroundProcess', 'basicAngleOfTheInitialProductionDomain',
|
||||
'binaryScaleFactor', 'bitMapIndicator', 'bitmapPresent',
|
||||
'bitsPerValue', 'bottomLevel', 'centre',
|
||||
'centreDescription', 'cfName', 'cfNameECMF', 'cfVarName',
|
||||
'cfVarNameECMF', 'changeDecimalPrecision', 'class',
|
||||
'climateDateFrom', 'climateDateTo', 'codedValues',
|
||||
'dataDate', 'dataRepresentationTemplateNumber', 'dataTime',
|
||||
'day', 'decimalPrecision', 'decimalScaleFactor',
|
||||
'deleteCalendarId', 'deleteExtraLocalSection', 'deletePV',
|
||||
'discipline', 'distinctLatitudes', 'distinctLongitudes',
|
||||
'editionNumber', 'endStep', 'eps',
|
||||
'experimentVersionNumber', 'extraLocalSectionPresent',
|
||||
'forecastTime', 'g2grid', 'gaussianGridName',
|
||||
'genVertHeightCoords', 'generatingProcessIdentifier',
|
||||
'getNumberOfValues', 'global', 'globalDomain',
|
||||
'grib2LocalSectionNumber', 'grib2LocalSectionPresent',
|
||||
'grib2divider', 'gridDefinitionDescription',
|
||||
'gridDefinitionTemplateNumber',
|
||||
'gridDescriptionSectionPresent', 'gridType', 'hour',
|
||||
'hoursAfterDataCutoff', 'iDirectionIncrement',
|
||||
'iDirectionIncrementGiven', 'iDirectionIncrementInDegrees',
|
||||
'iScansNegatively', 'iScansPositively', 'identifier',
|
||||
'ieeeFloats', 'ifsParam', 'ijDirectionIncrementGiven',
|
||||
'indicatorOfUnitOfTimeRange',
|
||||
'interpretationOfNumberOfPoints', 'isConstant',
|
||||
'isHindcast', 'isOctahedral', 'is_uerra',
|
||||
'jDirectionIncrementGiven', 'jPointsAreConsecutive',
|
||||
'jScansPositively', 'julianDay', 'kurtosis', 'latLonValues',
|
||||
'latitudeOfFirstGridPoint', 'latitudeOfFirstGridPointInDegrees',
|
||||
'latitudeOfLastGridPoint', 'latitudeOfLastGridPointInDegrees', 'latitudes',
|
||||
'legBaseDate', 'legBaseTime', 'legNumber',
|
||||
'lengthOfHeaders', 'level', 'localDefinitionNumber',
|
||||
'localDir', 'localTablesVersion',
|
||||
'longitudeOfFirstGridPoint', 'longitudeOfFirstGridPointInDegrees',
|
||||
'longitudeOfLastGridPoint', 'longitudeOfLastGridPointInDegrees', 'longitudes',
|
||||
'mAngleMultiplier', 'mBasicAngle', 'marsClass',
|
||||
'marsStream', 'marsType', 'masterDir', 'maximum',
|
||||
'md5Headers', 'md5Section1', 'md5Section3', 'md5Section4',
|
||||
'md5Section5', 'md5Section6', 'md5Section7', 'minimum',
|
||||
'minute', 'minutesAfterDataCutoff', 'missingValue',
|
||||
'modelName', 'month', 'name', 'nameECMF',
|
||||
'nameOfFirstFixedSurface', 'nameOfSecondFixedSurface',
|
||||
'neitherPresent', 'numberOfDataPoints',
|
||||
'numberOfForecastsInEnsemble', 'numberOfMissing',
|
||||
'numberOfOctectsForNumberOfPoints', 'numberOfSection',
|
||||
'numberOfValues', 'oceanAtmosphereCoupling',
|
||||
'offsetValuesBy', 'optimizeScaleFactor', 'packingError',
|
||||
'packingType', 'paramId', 'paramIdECMF',
|
||||
'parameterCategory', 'parameterName', 'parameterNumber',
|
||||
'parameterUnits', 'perturbationNumber', 'pressureUnits',
|
||||
'productDefinitionTemplateNumber',
|
||||
'productDefinitionTemplateNumberInternal', 'productType',
|
||||
'productionStatusOfProcessedData', 'radius',
|
||||
'referenceDate', 'referenceValue', 'referenceValueError',
|
||||
'resolutionAndComponentFlags', 'resolutionAndComponentFlags1',
|
||||
'resolutionAndComponentFlags2', 'resolutionAndComponentFlags6',
|
||||
'resolutionAndComponentFlags7', 'resolutionAndComponentFlags8',
|
||||
'scaleFactorOfEarthMajorAxis', 'scaleFactorOfEarthMinorAxis',
|
||||
'scaleFactorOfFirstFixedSurface', 'scaleFactorOfRadiusOfSphericalEarth',
|
||||
'scaleFactorOfSecondFixedSurface', 'scaleValuesBy',
|
||||
'scaledValueOfEarthMajorAxis', 'scaledValueOfEarthMinorAxis',
|
||||
'scaledValueOfFirstFixedSurface', 'scaledValueOfRadiusOfSphericalEarth',
|
||||
'scaledValueOfSecondFixedSurface', 'scanningMode',
|
||||
'scanningMode5', 'scanningMode6', 'scanningMode7',
|
||||
'scanningMode8', 'second', 'section0Length',
|
||||
'section1Length', 'section2Length', 'section2Padding',
|
||||
'section3Length', 'section3Padding', 'section4Length',
|
||||
'section5Length', 'section6Length', 'section7Length',
|
||||
'section8Length', 'sectionNumber', 'selectStepTemplateInstant', 'selectStepTemplateInterval',
|
||||
'setBitsPerValue', 'setCalendarId', 'shapeOfTheEarth',
|
||||
'shortName', 'shortNameECMF', 'significanceOfReferenceTime',
|
||||
'skewness', 'sourceOfGridDefinition', 'standardDeviation',
|
||||
'startStep', 'stepRange', 'stepType', 'stepTypeInternal',
|
||||
'stepUnits', 'stream', 'subCentre',
|
||||
'subdivisionsOfBasicAngle', 'tablesVersion',
|
||||
'tablesVersionLatest', 'tempPressureUnits', 'topLevel',
|
||||
'totalLength', 'type', 'typeOfEnsembleForecast',
|
||||
'typeOfFirstFixedSurface', 'typeOfGeneratingProcess',
|
||||
'typeOfLevel', 'typeOfOriginalFieldValues',
|
||||
'typeOfProcessedData', 'typeOfSecondFixedSurface', 'units',
|
||||
'unitsECMF', 'unitsOfFirstFixedSurface',
|
||||
'unitsOfSecondFixedSurface', 'unpackedError',
|
||||
'uvRelativeToGrid', 'validityDate', 'validityTime',
|
||||
'values', 'year']
|
||||
|
||||
|
||||
class TestGribFile(unittest.TestCase):
|
||||
"""Test GribFile functionality."""
|
||||
|
||||
def test_memory_management(self):
|
||||
"""Messages in GribFile can be opened and closed properly."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
self.assertEqual(len(grib_file), 5)
|
||||
for i in range(len(grib_file)):
|
||||
msg = GribMessage(grib_file)
|
||||
self.assertEqual(msg["shortName"], "msl")
|
||||
self.assertEqual(msg['count'], i + 1)
|
||||
self.assertEqual(len(grib_file.open_messages), 5)
|
||||
self.assertEqual(len(grib_file.open_messages), 0)
|
||||
|
||||
def test_message_counting_works(self):
|
||||
"""The GribFile is aware of its messages."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg_count = len(grib_file)
|
||||
self.assertEqual(msg_count, 5)
|
||||
|
||||
def test_iterator_protocol(self):
|
||||
"""The GribFile allows pythonic iteration over all messages."""
|
||||
step_ranges = []
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
for msg in grib_file:
|
||||
step_ranges.append(msg["stepRange"])
|
||||
self.assertSequenceEqual(step_ranges, ["0", "6", "12", "18", "24"])
|
||||
|
||||
def test_read_past_last_message(self):
|
||||
"""Trying to open message on exhausted GRIB file raises IOError."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
for _ in range(len(grib_file)):
|
||||
GribMessage(grib_file)
|
||||
self.assertRaises(IOError, lambda: GribMessage(grib_file))
|
||||
|
||||
def test_read_invalid_file(self):
|
||||
"""Trying to open message on nonexistent GRIB file raises IOError."""
|
||||
with NamedTemporaryFile(mode='r') as f:
|
||||
with GribFile(f.name) as grib_file:
|
||||
self.assertRaises(IOError, lambda: GribMessage(grib_file))
|
||||
|
||||
|
||||
class TestGribMessage(unittest.TestCase):
|
||||
"""Test GribMessage functionality."""
|
||||
|
||||
def test_metadata(self):
|
||||
"""Metadata is read correctly from GribMessage."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
msg_keys = list(msg.keys())
|
||||
for key in KNOWN_GRIB_KEYS:
|
||||
assert key in msg_keys, "key '%s' not found" % key
|
||||
# Size of message in bytes
|
||||
self.assertEqual(msg.size(), 160219)
|
||||
self.assertEqual(len(list(msg.keys())), len(msg))
|
||||
|
||||
def test_missing_message_behaviour(self):
|
||||
"""Key with MISSING value."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
self.assertTrue(msg.missing("scaleFactorOfSecondFixedSurface"))
|
||||
msg["scaleFactorOfSecondFixedSurface"] = 5
|
||||
msg.set_missing("scaleFactorOfSecondFixedSurface")
|
||||
# with self.assertRaises(KeyError):
|
||||
# msg["scaleFactorOfSecondFixedSurface"]
|
||||
|
||||
def test_value_setting(self):
|
||||
"""Keys can be set properly."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
msg["scaleFactorOfSecondFixedSurface"] = 5
|
||||
msg["values"] = [1, 2, 3]
|
||||
self.assertEqual(msg['scaleFactorOfSecondFixedSurface'], 5)
|
||||
|
||||
def test_multi_value_setting(self):
|
||||
"""Multiple keys/values can be set properly."""
|
||||
msg = GribMessage(sample='GRIB1')
|
||||
msg['paramId', 'stepType', 'edition'] = 49, 'avg', 2
|
||||
self.assertEqual(msg['shortName'], '10fg')
|
||||
# Another test
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
msg['setLocalDefinition', 'localDefinitionNumber'] = 1, 25
|
||||
msg['typeOfFirstFixedSurface', 'typeOfSecondFixedSurface'] = 1, 8
|
||||
msg[('typeOfFirstFixedSurface', 'typeOfSecondFixedSurface')] = (1, 8) # Also works
|
||||
self.assertEqual(msg['localDefinitionNumber'], 25)
|
||||
self.assertEqual(msg['typeOfLevel'], 'entireAtmosphere')
|
||||
|
||||
def test_serialize(self):
|
||||
"""Message can be serialized to file."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
with open(TEST_OUTPUT, "wb") as test:
|
||||
msg.write(test)
|
||||
os.unlink(TEST_OUTPUT)
|
||||
|
||||
def test_clone(self):
|
||||
"""Messages can be used to produce clone Messages."""
|
||||
with GribFile(TESTGRIB) as grib_file:
|
||||
msg = GribMessage(grib_file)
|
||||
msg2 = GribMessage(clone=msg)
|
||||
self.assertSequenceEqual(list(msg.keys()), list(msg2.keys()))
|
||||
|
||||
|
||||
class TestGribIndex(unittest.TestCase):
|
||||
"""Test GribIndex functionality."""
|
||||
|
||||
def test_memory_management(self):
|
||||
"""GribIndex closes GribMessages properly."""
|
||||
with GribIndex(TESTGRIB, TEST_KEYS) as idx:
|
||||
idx.select(SELECTION_DICTIONARY)
|
||||
self.assertEqual(len(idx.open_messages), 1)
|
||||
self.assertEqual(len(idx.open_messages), 0)
|
||||
|
||||
def test_create_and_serialize_index(self):
|
||||
"""GribIndex can be saved to file, file can be added to index."""
|
||||
with GribIndex(TESTGRIB, TEST_KEYS) as idx:
|
||||
idx.write(TEST_INDEX)
|
||||
with GribIndex(file_index=TEST_INDEX) as idx:
|
||||
idx.add(TESTGRIB)
|
||||
os.unlink(TEST_INDEX)
|
||||
|
||||
def test_index_comprehension(self):
|
||||
"""GribIndex understands underlying GRIB index properly."""
|
||||
with GribIndex(TESTGRIB, TEST_KEYS) as idx:
|
||||
self.assertEqual(idx.size(TEST_KEYS[1]), 5)
|
||||
self.assertSequenceEqual(idx.values(TEST_KEYS[1]), TEST_STEPRANGE)
|
||||
with self.assertRaises(IndexNotSelectedError):
|
||||
# Note: The following will issue a message to stderr:
|
||||
# ECCODES ERROR : please select a value for index key "dataDate"
|
||||
# This is expected behaviour
|
||||
idx.select({TEST_KEYS[1]: TEST_VALUES[0]})
|
||||
# Now it will be OK as we have selected all necessary keys
|
||||
idx.select(SELECTION_DICTIONARY)
|
||||
self.assertEqual(len(idx.open_messages), 1)
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -0,0 +1,199 @@
|
|||
# (C) Copyright 2005- ECMWF.
|
||||
#
|
||||
# This software is licensed under the terms of the Apache Licence Version 2.0
|
||||
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
|
||||
#
|
||||
# In applying this licence, ECMWF does not waive the privileges and immunities
|
||||
# granted to it by virtue of its status as an intergovernmental organisation
|
||||
# nor does it submit to any jurisdiction.
|
||||
|
||||
import traceback
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import getopt
|
||||
|
||||
from eccodes import *
|
||||
|
||||
VERBOSE = 1 # verbose error reporting
|
||||
DEBUG = 0
|
||||
|
||||
|
||||
def usage():
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
print("Usage: ", progname, "[options] bufr_file")
|
||||
print("Options:")
|
||||
print("\t-e generate the file defining Table B descriptors (element.table)")
|
||||
print("\t-s generate the file defining Table D descriptors (sequence.def)")
|
||||
print("")
|
||||
|
||||
|
||||
def infer_type(units, scale):
|
||||
ktype = "double"
|
||||
if re.search("^CCITT", units) or re.search("^CHARACTER", units):
|
||||
return "string"
|
||||
if re.search("CODE\s*TABLE", units):
|
||||
return "table"
|
||||
if re.search("FLAG\s*TABLE", units):
|
||||
return "flag"
|
||||
if re.search("NUMERIC", units) or int(scale) <= 0:
|
||||
return "long"
|
||||
return ktype
|
||||
|
||||
|
||||
def generate_tables(INPUT, what):
|
||||
assert what in ("element", "sequence")
|
||||
|
||||
f = open(INPUT, "rb")
|
||||
|
||||
cnt = 0
|
||||
if what == "element":
|
||||
print("#code|abbreviation|type|name|unit|scale|reference|width")
|
||||
|
||||
# loop for the messages in the file
|
||||
while 1:
|
||||
# get handle for message
|
||||
bufr = codes_bufr_new_from_file(f)
|
||||
if bufr is None:
|
||||
break
|
||||
|
||||
# Sanity check
|
||||
if codes_get(bufr, "dataCategory") != 11:
|
||||
# print('BUFR message dataCategory must be 11 (BUFR tables). Ignoring')
|
||||
codes_release(bufr)
|
||||
continue
|
||||
if codes_get(bufr, "numberOfSubsets") == 0:
|
||||
# print('BUFR message number of subsets == 0. Ignoring')
|
||||
codes_release(bufr)
|
||||
continue
|
||||
|
||||
if DEBUG:
|
||||
print(f"Processing message {cnt+1}")
|
||||
codes_set(bufr, "unpack", 1)
|
||||
|
||||
# Each table message contains three delayed replications (031001):
|
||||
# First is for Table A (skipped), second for Table B and third for Table D
|
||||
|
||||
replications = codes_get_array(bufr, "delayedDescriptorReplicationFactor")
|
||||
fDesc = codes_get_array(bufr, "fDescriptorToBeAddedOrDefined")
|
||||
xDesc = codes_get_array(bufr, "xDescriptorToBeAddedOrDefined")
|
||||
yDesc = codes_get_array(bufr, "yDescriptorToBeAddedOrDefined")
|
||||
|
||||
assert len(fDesc) == len(xDesc)
|
||||
assert len(fDesc) == len(yDesc)
|
||||
assert len(replications) > 0
|
||||
assert len(fDesc) == replications[1] + replications[2]
|
||||
|
||||
if what == "element":
|
||||
try:
|
||||
elementName1 = codes_get_array(bufr, "elementNameLine1")
|
||||
elementName2 = codes_get_array(bufr, "elementNameLine2")
|
||||
unitsName = codes_get_array(bufr, "unitsName")
|
||||
unitsScaleSign = codes_get_array(bufr, "unitsScaleSign")
|
||||
unitsScale = codes_get_array(bufr, "unitsScale")
|
||||
unitsReferenceSign = codes_get_array(bufr, "unitsReferenceSign")
|
||||
unitsReferenceValue = codes_get_array(bufr, "unitsReferenceValue")
|
||||
elementDataWidth = codes_get_array(bufr, "elementDataWidth")
|
||||
except CodesInternalError as err:
|
||||
if DEBUG:
|
||||
print("No element descriptors found")
|
||||
pass
|
||||
|
||||
for i in range(len(fDesc)):
|
||||
fd = fDesc[i]
|
||||
xd = xDesc[i]
|
||||
yd = yDesc[i]
|
||||
if fd == "0":
|
||||
# ecCodes key (abbreviation) => first word of elementNameLine1
|
||||
el1 = elementName1[i].rstrip()
|
||||
key = str.split(el1)[0]
|
||||
# description (name) => Combine rest of elementNameLine1 and elementNameLine2
|
||||
desc = " ".join(str.split(el1)[1:]) + elementName2[i].rstrip()
|
||||
if len(desc) == 0:
|
||||
desc = key
|
||||
units = unitsName[i].rstrip()
|
||||
sign = unitsScaleSign[i]
|
||||
scale = int(unitsScale[i])
|
||||
if sign == "-":
|
||||
scale = -1 * scale
|
||||
sign = unitsReferenceSign[i]
|
||||
reference = int(unitsReferenceValue[i])
|
||||
if sign == "-":
|
||||
reference = -1 * reference
|
||||
width = elementDataWidth[i].rstrip()
|
||||
ktype = infer_type(units, scale)
|
||||
print(
|
||||
f"{fd}{xd}{yd}|{key}|{ktype}|{desc}|{units}|{scale}|{reference}|{width}"
|
||||
)
|
||||
if what == "sequence":
|
||||
try:
|
||||
descriptorDefiningSequence = codes_get_array(
|
||||
bufr, "descriptorDefiningSequence"
|
||||
)
|
||||
except CodesInternalError as err:
|
||||
if DEBUG:
|
||||
print("No sequence descriptors found")
|
||||
pass
|
||||
k = 0
|
||||
rindex = 3 # From index 3 onwards we have the contents of sequences
|
||||
for i in range(len(fDesc)):
|
||||
fd = fDesc[i]
|
||||
xd = xDesc[i]
|
||||
yd = yDesc[i]
|
||||
if fd == "3":
|
||||
repls = replications[rindex]
|
||||
if repls:
|
||||
print(f'"{fd}{xd}{yd}" = [ ', end="")
|
||||
comma = ","
|
||||
for j in range(repls):
|
||||
seq = descriptorDefiningSequence[k + j]
|
||||
if j == repls - 1:
|
||||
comma = ""
|
||||
print(f"{seq}{comma} ", end="")
|
||||
k = k + repls
|
||||
rindex = rindex + 1
|
||||
print("]")
|
||||
|
||||
cnt += 1
|
||||
|
||||
codes_release(bufr)
|
||||
|
||||
f.close()
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
usage()
|
||||
return 1
|
||||
try:
|
||||
what = "element"
|
||||
options = "es"
|
||||
opts, args = getopt.getopt(sys.argv[1:], options)
|
||||
for o, a in opts:
|
||||
if o == "-e":
|
||||
what = "element"
|
||||
elif o == "-s":
|
||||
what = "sequence"
|
||||
else:
|
||||
assert False, "Invalid option"
|
||||
# Check we have a BUFR file to process
|
||||
if not args:
|
||||
usage()
|
||||
return 1
|
||||
for arg in args:
|
||||
generate_tables(arg, what)
|
||||
except getopt.GetoptError as err:
|
||||
print("Error: ", err)
|
||||
usage()
|
||||
return 1
|
||||
except CodesInternalError as err:
|
||||
if VERBOSE:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
else:
|
||||
sys.stderr.write(err.msg + "\n")
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
90
memfs.py
90
memfs.py
|
@ -1,12 +1,54 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import binascii
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import binascii
|
||||
import time
|
||||
|
||||
assert len(sys.argv) > 2
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--count",
|
||||
type=int,
|
||||
default=10,
|
||||
help="Number of files to generate",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
"--chunk",
|
||||
type=int,
|
||||
default=16,
|
||||
help="Chunk size (MB)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
type=str,
|
||||
default="memfs_gen",
|
||||
help="Name of C file to generate",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--exclude",
|
||||
help="Exclude packages",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"dirs",
|
||||
type=str,
|
||||
nargs="+",
|
||||
help="The list of directories to process",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
start = time.time()
|
||||
print("MEMFS: starting")
|
||||
|
@ -14,29 +56,24 @@ print("MEMFS: starting")
|
|||
# Exclude experimental features e.g. GRIB3 and TAF
|
||||
# The BUFR codetables is not used in the engine
|
||||
EXCLUDED = ["grib3", "codetables", "taf", "stations"]
|
||||
EXPECTED_FCOUNT = 6
|
||||
|
||||
pos = 1
|
||||
if sys.argv[1] == "-exclude":
|
||||
product = sys.argv[2]
|
||||
if product == "bufr":
|
||||
EXCLUDED.append(product)
|
||||
EXPECTED_FCOUNT = 4
|
||||
elif product == "grib":
|
||||
EXCLUDED.extend(["grib1", "grib2"])
|
||||
EXPECTED_FCOUNT = 2
|
||||
else:
|
||||
assert False, "Invalid product %s" % product
|
||||
pos = 3
|
||||
EXCLUDE = {
|
||||
None: [],
|
||||
"bufr": ["bufr"],
|
||||
"grib": ["grib1", "grib2"],
|
||||
}
|
||||
|
||||
dirs = [os.path.realpath(x) for x in sys.argv[pos:-1]]
|
||||
EXCLUDED.extend(EXCLUDE[args.exclude])
|
||||
|
||||
|
||||
dirs = [os.path.realpath(x) for x in args.dirs]
|
||||
print("Directories: ", dirs)
|
||||
print("Excluding: ", EXCLUDED)
|
||||
|
||||
FILES = {}
|
||||
SIZES = {}
|
||||
NAMES = []
|
||||
CHUNK = 16 * 1024 * 1024 # chunk size in bytes
|
||||
CHUNK = args.chunk * 1024 * 1024 # chunk size in bytes
|
||||
|
||||
# Binary to ASCII function. Different in Python 2 and 3
|
||||
try:
|
||||
|
@ -51,10 +88,11 @@ def get_outfile_name(base, count):
|
|||
|
||||
|
||||
# The last argument is the base name of the generated C file(s)
|
||||
output_file_base = sys.argv[-1]
|
||||
output_file_base = args.output
|
||||
|
||||
buffer = None
|
||||
fcount = -1
|
||||
fcount = 0
|
||||
MAX_FCOUNT = args.count
|
||||
|
||||
for directory in dirs:
|
||||
|
||||
|
@ -69,8 +107,8 @@ for directory in dirs:
|
|||
for name in files:
|
||||
|
||||
if buffer is None:
|
||||
fcount += 1
|
||||
opath = get_outfile_name(output_file_base, fcount)
|
||||
fcount += 1
|
||||
print("MEMFS: Generating output:", opath)
|
||||
buffer = open(opath, "w")
|
||||
|
||||
|
@ -119,9 +157,19 @@ for directory in dirs:
|
|||
if buffer is not None:
|
||||
buffer.close()
|
||||
|
||||
assert fcount <= MAX_FCOUNT, fcount
|
||||
|
||||
while fcount < MAX_FCOUNT:
|
||||
opath = get_outfile_name(output_file_base, fcount)
|
||||
print("MEMFS: Generating output:", opath, "(empty)")
|
||||
with open(opath, "w") as f:
|
||||
# ISO compilers issue a warning for an empty translation unit
|
||||
# so add a dummy declaration to suppress this
|
||||
print("struct eccodes_suppress_iso_warning;/* empty */", file=f)
|
||||
fcount += 1
|
||||
|
||||
# The number of generated C files is hard coded.
|
||||
# See memfs/CMakeLists.txt
|
||||
assert fcount == EXPECTED_FCOUNT, fcount
|
||||
opath = output_file_base + "_final.c"
|
||||
print("MEMFS: Generating output:", opath)
|
||||
g = open(opath, "w")
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
# Note: the count 10 of generated C files must match
|
||||
# the 'generated_c_files' list
|
||||
list( APPEND memfs_args
|
||||
--count 10
|
||||
--output ${CMAKE_CURRENT_BINARY_DIR}/memfs_gen
|
||||
)
|
||||
|
||||
set( generated_c_files
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_000.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_001.c
|
||||
|
@ -6,35 +13,26 @@ set( generated_c_files
|
|||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_004.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_005.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_006.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_007.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_008.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_009.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_final.c)
|
||||
|
||||
set( exclude "" )
|
||||
if( NOT HAVE_PRODUCT_BUFR )
|
||||
set( exclude -exclude bufr )
|
||||
set( generated_c_files
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_000.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_001.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_002.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_003.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_004.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_final.c)
|
||||
elseif( NOT HAVE_PRODUCT_GRIB )
|
||||
set( exclude -exclude grib )
|
||||
set( generated_c_files
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_000.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_001.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_002.c
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen_final.c)
|
||||
list( APPEND memfs_args --exclude bufr)
|
||||
endif()
|
||||
|
||||
if( NOT HAVE_PRODUCT_GRIB )
|
||||
list( APPEND memfs_args --exclude grib)
|
||||
endif()
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${generated_c_files}
|
||||
COMMAND ${PYTHON_EXECUTABLE} ${PROJECT_SOURCE_DIR}/memfs.py
|
||||
${exclude}
|
||||
${memfs_args}
|
||||
${PROJECT_SOURCE_DIR}/definitions
|
||||
${PROJECT_SOURCE_DIR}/samples
|
||||
${PROJECT_SOURCE_DIR}/ifs_samples
|
||||
${CMAKE_CURRENT_BINARY_DIR}/memfs_gen
|
||||
DEPENDS ${PROJECT_SOURCE_DIR}/memfs.py)
|
||||
|
||||
set_source_files_properties(
|
||||
|
|
Binary file not shown.
|
@ -436,10 +436,9 @@ endif()
|
|||
|
||||
|
||||
ecbuild_add_library( TARGET eccodes
|
||||
SOURCES grib_api_version.c
|
||||
SOURCES ${CMAKE_CURRENT_BINARY_DIR}/grib_api_version.c
|
||||
# griby.c gribl.c
|
||||
${grib_api_srcs}
|
||||
GENERATED grib_api_version.c
|
||||
#PRIVATE_LIBS ${ECCODES_EXTRA_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${CMATH_LIBRARIES}
|
||||
PRIVATE_INCLUDES "${AEC_INCLUDE_DIRS}"
|
||||
PRIVATE_LIBS ${ECCODES_EXTRA_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${AEC_LIBRARIES} ${PNG_LIBRARIES}
|
||||
|
|
|
@ -269,6 +269,8 @@ static void dump(grib_action* act, FILE* f, int lvl)
|
|||
|
||||
static void xref(grib_action* act, FILE* f, const char* path)
|
||||
{
|
||||
Assert(!"xref is disabled");
|
||||
#if 0
|
||||
grib_action_alias* a = (grib_action_alias*)act;
|
||||
if (a->target) {
|
||||
fprintf(f, "bless({name=>'%s', target=>'%s', path=>'%s'},'xref::alias'),\n", act->name, a->target, path);
|
||||
|
@ -280,6 +282,7 @@ static void xref(grib_action* act, FILE* f, const char* path)
|
|||
if (act->name_space)
|
||||
fprintf(f, "bless({name=>'%s.%s', path=>'%s'},'xref::unalias'),\n", act->name_space, act->name, path);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static void destroy(grib_context* context, grib_action* act)
|
||||
|
|
|
@ -131,6 +131,7 @@ static void dump(grib_action* act, FILE* f, int lvl)
|
|||
grib_context_print(act->context, f, "%s[%d] %s \n", act->op, a->len, act->name);
|
||||
}
|
||||
|
||||
#if 0
|
||||
#define F(x) \
|
||||
if (flg & x) { \
|
||||
fprintf(f, "%s=>1,", #x); \
|
||||
|
@ -176,6 +177,11 @@ static void xref(grib_action* act, FILE* f, const char* path)
|
|||
|
||||
fprintf(f, "]}, 'xref::%s'),\n", act->op);
|
||||
}
|
||||
#endif
|
||||
static void xref(grib_action* act, FILE* f, const char* path)
|
||||
{
|
||||
Assert(!"xref is disabled");
|
||||
}
|
||||
|
||||
static int create_accessor(grib_section* p, grib_action* act, grib_loader* loader)
|
||||
{
|
||||
|
|
|
@ -407,7 +407,7 @@ bufr_descriptor* accessor_bufr_elements_table_get_descriptor(grib_accessor* a, i
|
|||
v = (bufr_descriptor*)grib_context_malloc_clear(c, sizeof(bufr_descriptor));
|
||||
if (!v) {
|
||||
grib_context_log(c, GRIB_LOG_ERROR,
|
||||
"grib_bufr_descriptor_new unable to allocate %ld bytes\n", sizeof(bufr_descriptor));
|
||||
"accessor_bufr_elements_table_get_descriptor: unable to allocate %ld bytes\n", sizeof(bufr_descriptor));
|
||||
*err = GRIB_OUT_OF_MEMORY;
|
||||
return NULL;
|
||||
}
|
||||
|
|
|
@ -216,6 +216,7 @@ static int global_depth = -1;
|
|||
#endif
|
||||
|
||||
#define BUFR_DESCRIPTORS_ARRAY_USED_SIZE(v) ((v)->n)
|
||||
#define SILENT 1
|
||||
|
||||
static void __expand(grib_accessor* a, bufr_descriptors_array* unexpanded, bufr_descriptors_array* expanded,
|
||||
change_coding_params* ccp, int* err)
|
||||
|
@ -273,7 +274,7 @@ static void __expand(grib_accessor* a, bufr_descriptors_array* unexpanded, bufr_
|
|||
|
||||
inner_unexpanded = grib_bufr_descriptors_array_new(c, DESC_SIZE_INIT, DESC_SIZE_INCR);
|
||||
for (i = 0; i < size; i++) {
|
||||
vv = grib_bufr_descriptor_new(self->tablesAccessor, v[i], err);
|
||||
vv = grib_bufr_descriptor_new(self->tablesAccessor, v[i], !SILENT, err);
|
||||
inner_unexpanded = grib_bufr_descriptors_array_push(inner_unexpanded, vv);
|
||||
}
|
||||
grib_context_free(c, v);
|
||||
|
@ -396,7 +397,7 @@ static void __expand(grib_accessor* a, bufr_descriptors_array* unexpanded, bufr_
|
|||
DESCRIPTORS_POP_FRONT_OR_RETURN(unexpanded, u);
|
||||
size = 1;
|
||||
if (ccp->associatedFieldWidth && u->X != 31) {
|
||||
bufr_descriptor* au = grib_bufr_descriptor_new(self->tablesAccessor, 999999, err);
|
||||
bufr_descriptor* au = grib_bufr_descriptor_new(self->tablesAccessor, 999999, !SILENT, err);
|
||||
au->width = ccp->associatedFieldWidth;
|
||||
grib_bufr_descriptor_set_scale(au, 0);
|
||||
strcpy(au->shortName, "associatedField");
|
||||
|
@ -658,8 +659,12 @@ static int expand(grib_accessor* a)
|
|||
unexpanded_copy = grib_bufr_descriptors_array_new(c, unexpandedSize, DESC_SIZE_INCR);
|
||||
operator206yyy_width = 0;
|
||||
for (i = 0; i < unexpandedSize; i++) {
|
||||
bufr_descriptor* aDescriptor1 = grib_bufr_descriptor_new(self->tablesAccessor, u[i], &err);
|
||||
bufr_descriptor* aDescriptor2 = grib_bufr_descriptor_new(self->tablesAccessor, u[i], &err);
|
||||
bufr_descriptor *aDescriptor1, *aDescriptor2;
|
||||
/* ECC-1274: clear error and only issue msg once */
|
||||
err = 0;
|
||||
aDescriptor1 = grib_bufr_descriptor_new(self->tablesAccessor, u[i], SILENT, &err);
|
||||
err = 0;
|
||||
aDescriptor2 = grib_bufr_descriptor_new(self->tablesAccessor, u[i], !SILENT, &err);
|
||||
|
||||
/* ECC-433: Operator 206YYY */
|
||||
if (aDescriptor1->F == 2 && aDescriptor1->X == 6) {
|
||||
|
|
|
@ -142,7 +142,6 @@ static void init(grib_accessor* a, const long l, grib_arguments* c)
|
|||
a->flags |= GRIB_ACCESSOR_FLAG_READ_ONLY;
|
||||
}
|
||||
|
||||
|
||||
static void dump(grib_accessor* a, grib_dumper* dumper)
|
||||
{
|
||||
grib_dump_string(dumper, a, NULL);
|
||||
|
@ -153,7 +152,6 @@ static int unpack_string(grib_accessor* a, char* val, size_t* len)
|
|||
/* special clim case where each mont have 30 days.. to comply with mars*/
|
||||
grib_accessor_g1day_of_the_year_date* self = (grib_accessor_g1day_of_the_year_date*)a;
|
||||
|
||||
|
||||
char tmp[1024];
|
||||
|
||||
long year = 0;
|
||||
|
@ -164,7 +162,6 @@ static int unpack_string(grib_accessor* a, char* val, size_t* len)
|
|||
long fullyear = 0;
|
||||
long fake_day_of_year = 0;
|
||||
|
||||
|
||||
size_t l;
|
||||
|
||||
grib_get_long_internal(grib_handle_of_accessor(a), self->century, ¢ury);
|
||||
|
@ -175,7 +172,6 @@ static int unpack_string(grib_accessor* a, char* val, size_t* len)
|
|||
if (*len < 1)
|
||||
return GRIB_BUFFER_TOO_SMALL;
|
||||
|
||||
|
||||
fullyear = ((century - 1) * 100 + year);
|
||||
fake_day_of_year = ((month - 1) * 30) + day;
|
||||
sprintf(tmp, "%04ld-%03ld", fullyear, fake_day_of_year);
|
||||
|
@ -189,6 +185,5 @@ static int unpack_string(grib_accessor* a, char* val, size_t* len)
|
|||
*len = l;
|
||||
strcpy(val, tmp);
|
||||
|
||||
|
||||
return GRIB_SUCCESS;
|
||||
}
|
||||
|
|
|
@ -235,14 +235,14 @@ int pack_long_unsigned_helper(grib_accessor* a, const long* val, size_t* len, in
|
|||
|
||||
/* Check if value fits into number of bits */
|
||||
if (check) {
|
||||
const long nbits = self->nbytes * 8;
|
||||
/* See GRIB-23 and GRIB-262 */
|
||||
if (!value_is_missing(v)) {
|
||||
if (v < 0) {
|
||||
if (val[0] < 0) {
|
||||
grib_context_log(a->context, GRIB_LOG_ERROR,
|
||||
"Key \"%s\": Trying to encode a negative value of %ld for key of type unsigned\n", a->name, v);
|
||||
"Key \"%s\": Trying to encode a negative value of %ld for key of type unsigned\n", a->name, val[0]);
|
||||
return GRIB_ENCODING_ERROR;
|
||||
}
|
||||
/* See GRIB-23 and GRIB-262 */
|
||||
if (!value_is_missing(v)) {
|
||||
const long nbits = self->nbytes * 8;
|
||||
if (nbits < 33) {
|
||||
unsigned long maxval = (1UL << nbits) - 1;
|
||||
if (maxval > 0 && v > maxval) { /* See ECC-1002 */
|
||||
|
|
|
@ -171,7 +171,7 @@ grib_hash_array_value* grib_double_hash_array_value_new(grib_context* c, const c
|
|||
void grib_hash_array_value_delete(grib_context* c, grib_hash_array_value* v);
|
||||
|
||||
/* grib_bufr_descriptor.c */
|
||||
bufr_descriptor* grib_bufr_descriptor_new(grib_accessor* tables_accessor, int code, int* err);
|
||||
bufr_descriptor* grib_bufr_descriptor_new(grib_accessor* tables_accessor, int code, int silent, int* err);
|
||||
bufr_descriptor* grib_bufr_descriptor_clone(bufr_descriptor* d);
|
||||
int grib_bufr_descriptor_set_code(grib_accessor* tables_accessor, int code, bufr_descriptor* v);
|
||||
void grib_bufr_descriptor_set_reference(bufr_descriptor* v, long reference);
|
||||
|
@ -743,7 +743,6 @@ char* grib_get_field_file(grib_index* index, off_t* offset);
|
|||
grib_handle* grib_handle_new_from_index(grib_index* index, int* err);
|
||||
grib_handle* codes_new_from_index(grib_index* index, int message_type, int* err);
|
||||
void grib_index_rewind(grib_index* index);
|
||||
int grib_index_search(grib_index* index, grib_index_key* keys);
|
||||
int codes_index_set_product_kind(grib_index* index, ProductKind product_kind);
|
||||
int codes_index_set_unpack_bufr(grib_index* index, int unpack);
|
||||
int is_index_file(const char* filename);
|
||||
|
|
|
@ -420,7 +420,7 @@ int grib_encode_size_tb(unsigned char* p, size_t val, long* bitp, long nb)
|
|||
#if OMP_PACKING
|
||||
#include "grib_bits_any_endian_omp.c"
|
||||
#elif VECTOR
|
||||
#include "grib_bits_any_endian_vector.c"
|
||||
#include "grib_bits_any_endian_vector.c" /* Experimental */
|
||||
#else
|
||||
#include "grib_bits_any_endian_simple.c"
|
||||
#endif
|
||||
|
|
|
@ -11,10 +11,10 @@
|
|||
|
||||
#include "grib_api_internal.h"
|
||||
|
||||
bufr_descriptor* grib_bufr_descriptor_new(grib_accessor* tables_accessor, int code, int* err)
|
||||
bufr_descriptor* grib_bufr_descriptor_new(grib_accessor* tables_accessor, int code, int silent, int* err)
|
||||
{
|
||||
bufr_descriptor* ret = accessor_bufr_elements_table_get_descriptor(tables_accessor, code, err);
|
||||
if (*err)
|
||||
if (!silent && *err)
|
||||
grib_context_log(tables_accessor->context, GRIB_LOG_ERROR,
|
||||
"unable to get descriptor %06d from table", code);
|
||||
return ret;
|
||||
|
|
|
@ -423,6 +423,7 @@ int grib_fieldset_apply_where(grib_fieldset* set, const char* where_string)
|
|||
|
||||
print_math(m);
|
||||
printf("\n");
|
||||
grib_math_delete(set->context, m);
|
||||
return err;
|
||||
}
|
||||
|
||||
|
|
|
@ -193,6 +193,7 @@ static grib_math* readatom(grib_context* c, char** form, int* err)
|
|||
p->name = strdup(buf);
|
||||
Assert(p->name);
|
||||
p->left = 0;
|
||||
p->right = 0;
|
||||
|
||||
switch (**form) {
|
||||
case '(':
|
||||
|
@ -252,9 +253,9 @@ static grib_math* readpower(grib_context* c, char** form, int* err)
|
|||
{
|
||||
grib_math* p = readatom(c, form, err);
|
||||
|
||||
|
||||
while (**form == '^' || (**form == '*' && *(*form + 1) == '*')) {
|
||||
grib_math* q = (grib_math*)grib_context_malloc(c, sizeof(grib_math));
|
||||
q->right = 0;
|
||||
q->left = p;
|
||||
q->arity = 2;
|
||||
|
||||
|
|
|
@ -1921,6 +1921,7 @@ void grib_index_rewind(grib_index* index)
|
|||
index->rewind = 1;
|
||||
}
|
||||
|
||||
#if 0
|
||||
static grib_index_key* search_key(grib_index_key* keys, grib_index_key* to_search)
|
||||
{
|
||||
if (!keys || !strcmp(keys->name, to_search->name))
|
||||
|
@ -1947,6 +1948,7 @@ int grib_index_search(grib_index* index, grib_index_key* keys)
|
|||
grib_index_rewind(index);
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
int codes_index_set_product_kind(grib_index* index, ProductKind product_kind)
|
||||
{
|
||||
|
|
|
@ -100,11 +100,13 @@ if( HAVE_BUILD_TOOLS )
|
|||
grib_sh_imag
|
||||
grib_sh_spectral_complex
|
||||
pseudo_diag
|
||||
wrap
|
||||
grib_levtype
|
||||
grib_grid_unstructured
|
||||
grib_grid_lambert_conformal
|
||||
grib_grid_polar_stereographic
|
||||
grib_g1monthlydate
|
||||
grib_g1day_of_the_year_date
|
||||
grib_g1fcperiod)
|
||||
|
||||
# These tests do require data downloads
|
||||
|
|
|
@ -23,7 +23,15 @@ for file in $ECCODES_SAMPLES_PATH/BUFR*.tmpl; do
|
|||
done
|
||||
|
||||
# Check one specific BUFR file dump output
|
||||
${tools_dir}/bufr_dump -p $ECCODES_SAMPLES_PATH/BUFR3.tmpl > $temp
|
||||
input=$ECCODES_SAMPLES_PATH/BUFR3.tmpl
|
||||
${tools_dir}/bufr_dump -p $input > $temp
|
||||
diff ${data_dir}/BUFR3.tmpl.dump.plain.ref $temp
|
||||
|
||||
# Code generation
|
||||
input=$ECCODES_SAMPLES_PATH/BUFR4.tmpl
|
||||
for lang in C python fortran filter; do
|
||||
${tools_dir}/bufr_dump -D $lang $input >/dev/null
|
||||
${tools_dir}/bufr_dump -E $lang $input >/dev/null
|
||||
done
|
||||
|
||||
rm -f $temp
|
||||
|
|
|
@ -92,6 +92,9 @@ input=${data_dir}/spherical_model_level.grib1
|
|||
${tools_dir}/grib_set -r -s packingType=spectral_simple $input $temp
|
||||
grib_check_key_equals $temp packingType 'spectral_simple'
|
||||
${tools_dir}/grib_ls -p numberOfCodedValues $temp
|
||||
${tools_dir}/grib_ls -n statistics $temp
|
||||
stats=`${tools_dir}/grib_get -F%.1f -p average,standardDeviation $temp`
|
||||
[ "$stats" = "195.1 12.0" ]
|
||||
|
||||
|
||||
rm -f $temp
|
||||
|
|
|
@ -189,6 +189,17 @@ sample1=$ECCODES_SAMPLES_PATH/sh_ml_grib1.tmpl
|
|||
padding=`echo 'print "[padding_grid50_1]";' | ${tools_dir}/grib_filter - $sample1`
|
||||
[ "$padding" = "000000000000000000000000000000000000" ]
|
||||
|
||||
echo "Test switch statement"
|
||||
# --------------------------
|
||||
cat >temp.filt <<EOF
|
||||
switch (edition) {
|
||||
case 1: print "1";
|
||||
case 2: print "2";
|
||||
default: print "what is this?";assert(0);
|
||||
}
|
||||
EOF
|
||||
${tools_dir}/grib_filter temp.filt $ECCODES_SAMPLES_PATH/GRIB1.tmpl $ECCODES_SAMPLES_PATH/GRIB2.tmpl
|
||||
|
||||
|
||||
# Clean up
|
||||
rm -f temp_filt.grib2 temp.filt
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
#!/bin/sh
|
||||
# (C) Copyright 2005- ECMWF.
|
||||
#
|
||||
# This software is licensed under the terms of the Apache Licence Version 2.0
|
||||
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
|
||||
#
|
||||
# In applying this licence, ECMWF does not waive the privileges and immunities granted to it by
|
||||
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
|
||||
#
|
||||
|
||||
. ./include.sh
|
||||
|
||||
label="grib_g1day_of_the_year_date"
|
||||
sample_g1=$ECCODES_SAMPLES_PATH/GRIB1.tmpl
|
||||
temp=temp.$label.grib
|
||||
|
||||
${tools_dir}/grib_set -s marsType=s3,marsStream=mpic $sample_g1 $temp
|
||||
grib_check_key_equals $temp "mars.date:s,dayOfTheYearDate:s" "2006-076 2006-076"
|
||||
|
||||
${tools_dir}/grib_set -s marsType=s3,marsStream=ukmo $sample_g1 $temp
|
||||
grib_check_key_equals $temp "mars.date:s,dayOfTheYearDate:s" "2006-076 2006-076"
|
||||
|
||||
|
||||
# Clean up
|
||||
rm -f $temp
|
|
@ -9,6 +9,8 @@
|
|||
#
|
||||
|
||||
. ./include.sh
|
||||
set -u
|
||||
temp=temp.grib_iterator.grib
|
||||
|
||||
files="reduced_latlon_surface.grib1 \
|
||||
reduced_gaussian_pressure_level.grib1 \
|
||||
|
@ -32,3 +34,7 @@ done
|
|||
# ECC-822: Increase lat/lon decimals using default grib_get_data
|
||||
${tools_dir}/grib_get_data -L%12.6f%11.5f ${data_dir}/regular_latlon_surface.grib2
|
||||
|
||||
${tools_dir}/grib_get_data -p shortName,level ${data_dir}/regular_latlon_surface.grib2 > $temp
|
||||
grep -q "Latitude Longitude Value shortName level" $temp
|
||||
|
||||
rm -f $temp
|
||||
|
|
|
@ -12,12 +12,15 @@
|
|||
#set -x
|
||||
|
||||
REDIRECT=/dev/null
|
||||
sample_g1=$ECCODES_SAMPLES_PATH/reduced_gg_pl_640_grib1.tmpl
|
||||
sample_g2=$ECCODES_SAMPLES_PATH/reduced_gg_pl_640_grib2.tmpl
|
||||
|
||||
cd ${data_dir}
|
||||
rm -f local.log
|
||||
|
||||
# Check all GRIB2 local def files and definitions/grib2/grib2LocalSectionNumber.98.table
|
||||
# Each number should appear in the table
|
||||
# -----------------------------------------
|
||||
g2lds=${ECCODES_DEFINITION_PATH}/grib2/local.98.*.def
|
||||
for g2ld in $g2lds; do
|
||||
bname=`basename $g2ld`
|
||||
|
@ -25,11 +28,20 @@ for g2ld in $g2lds; do
|
|||
grep -q "^$dnum" ${ECCODES_DEFINITION_PATH}/grib2/grib2LocalSectionNumber.98.table
|
||||
done
|
||||
|
||||
# Use of subCentre=98 for other centre GRIBs
|
||||
# ------------------------------------------
|
||||
temp=temp.grib_local.grib
|
||||
${tools_dir}/grib_set -s centre=edzw,subCentre=98,setLocalDefinition=1,localDefinitionNumber=1 \
|
||||
$ECCODES_SAMPLES_PATH/GRIB2.tmpl $temp
|
||||
grib_check_key_equals $temp section2Length,centre,expver,marsClass:i,marsType:i,marsStream:i "17 edzw 0001 1 2 1025"
|
||||
rm -f $temp
|
||||
|
||||
|
||||
${tools_dir}/grib_set -s edition=2,setLocalDefinition=1 reduced_gaussian_model_level.grib1 loc.grib2
|
||||
${tools_dir}/grib_set -s setLocalDefinition=1 reduced_gaussian_model_level.grib1 loc.grib1
|
||||
|
||||
# conversion 1->2
|
||||
# Conversion 1->2
|
||||
# ----------------
|
||||
for localDefinitionNumber in 1 15 26 30
|
||||
do
|
||||
${tools_dir}/grib_set -s localDefinitionNumber=$localDefinitionNumber,perturbationNumber=2,numberOfForecastsInEnsemble=50 loc.grib1 eps.grib1
|
||||
|
@ -45,7 +57,8 @@ do
|
|||
${tools_dir}/grib_compare -e -b param eps.grib1 eps.grib2
|
||||
done
|
||||
|
||||
#local -> local
|
||||
# Local -> local
|
||||
# ---------------
|
||||
for localStart in 1 7 9 20 25 26 30
|
||||
do
|
||||
${tools_dir}/grib_set -s localDefinitionNumber=$localStart loc.grib1 loc1.grib1
|
||||
|
@ -60,7 +73,8 @@ do
|
|||
done
|
||||
done
|
||||
|
||||
#special types/streams
|
||||
# Special types/streams
|
||||
# ----------------------
|
||||
${tools_dir}/grib_set -s localDefinitionNumber=1,numberOfForecastsInEnsemble=0 loc.grib1 loc1.grib1
|
||||
${tools_dir}/grib_set -s edition=2 loc1.grib1 loc1.grib2
|
||||
${tools_dir}/grib_get -f -p localDefinitionNumber,perturbationNumber loc1.grib2 >> local.log
|
||||
|
@ -78,8 +92,6 @@ rm -f local.log loc.grib1 loc.grib2 loc1.grib1 loc1.grib2 eps.grib1 eps.grib2
|
|||
|
||||
# Delete Local Definition
|
||||
# -----------------------
|
||||
sample_g1=$ECCODES_SAMPLES_PATH/reduced_gg_pl_640_grib1.tmpl
|
||||
sample_g2=$ECCODES_SAMPLES_PATH/reduced_gg_pl_640_grib2.tmpl
|
||||
temp=temp.grib_local.grib
|
||||
grib_check_key_equals $sample_g1 localUsePresent 1
|
||||
${tools_dir}/grib_set -s deleteLocalDefinition=1 $sample_g1 $temp
|
||||
|
@ -105,8 +117,16 @@ grib_check_key_equals $temp section2Length 5
|
|||
rm -f $temp
|
||||
|
||||
|
||||
# Local Definition 5
|
||||
# -----------------------
|
||||
# Local Definition 4: Ocean model data
|
||||
# ---------------------------------------
|
||||
${tools_dir}/grib_set -s \
|
||||
localDefinitionNumber=4,coordinate2Flag=2,averaging1Flag=1,coordinate1Flag=1,coordinate2Start=1234 \
|
||||
$sample_g1 $temp
|
||||
grib_check_key_equals $temp "mars.levelist,roundedMarsLevelist:d,roundedMarsLevelist:s" "1 1.234 1.234"
|
||||
|
||||
|
||||
# Local Definition 5: Forecast probability data
|
||||
# ---------------------------------------------
|
||||
sample_g1=$ECCODES_SAMPLES_PATH/GRIB1.tmpl
|
||||
temp=temp.grib_local.grib
|
||||
${tools_dir}/grib_set -s setLocalDefinition=1,localDefinitionNumber=5 $sample_g1 $temp.1
|
||||
|
@ -169,4 +189,5 @@ result=`echo 'print "[ccccIdentifiers]";' | ${tools_dir}/grib_filter - $temp`
|
|||
[ "$result" = "kwbc ecmf sabm" ]
|
||||
|
||||
|
||||
|
||||
rm -f $temp $temp.1 $temp.2 $temp.3
|
||||
|
|
|
@ -21,6 +21,8 @@
|
|||
|
||||
#include "eccodes.h"
|
||||
|
||||
int grib_fieldset_apply_where(grib_fieldset* set, const char* where_string); /*experimental*/
|
||||
|
||||
static void usage(const char* prog)
|
||||
{
|
||||
printf("Usage: %s [-n] grib_file grib_file ...\n", prog);
|
||||
|
@ -72,6 +74,10 @@ int main(int argc, char** argv)
|
|||
set = codes_fieldset_new_from_files(0, filenames, nfiles, 0, 0, 0, order_by, &err);
|
||||
CODES_CHECK(err, 0);
|
||||
|
||||
/* grib_fieldset_apply_where not fully implemented*/
|
||||
err=grib_fieldset_apply_where(set, "(centre=='ecmf') && number==1 || step==6");
|
||||
CODES_CHECK(err, 0);
|
||||
|
||||
printf("ordering by %s\n", order_by);
|
||||
printf("%d fields in the fieldset\n", codes_fieldset_count(set));
|
||||
printf("n,step,param\n");
|
||||
|
|
|
@ -19,6 +19,7 @@ input_grb=${data_dir}/reduced_gaussian_pressure_level.grib1
|
|||
# --------------------------------------
|
||||
$EXEC ${test_dir}/grib_nearest_test $input_grb > $temp
|
||||
cat > $tempRef <<EOF
|
||||
((((centre)==(ecmf))&&((number)==(1)))||((step)==(6)))
|
||||
ordering by param,step
|
||||
1 fields in the fieldset
|
||||
n,step,param
|
||||
|
@ -36,6 +37,7 @@ diff $tempRef $temp
|
|||
# ----------------------------------------
|
||||
$EXEC ${test_dir}/grib_nearest_test -n $input_grb > $temp
|
||||
cat > $tempRef <<EOF
|
||||
((((centre)==(ecmf))&&((number)==(1)))||((step)==(6)))
|
||||
ordering by param,step
|
||||
1 fields in the fieldset
|
||||
n,step,param
|
||||
|
|
|
@ -13,7 +13,8 @@
|
|||
REDIRECT=/dev/null
|
||||
|
||||
infile=${data_dir}/regular_gaussian_surface.grib1
|
||||
outfile=${data_dir}/set.grib1
|
||||
outfile=${data_dir}/temp.grib_set.grib
|
||||
temp=temp.grib_set.out
|
||||
|
||||
rm -f $outfile
|
||||
|
||||
|
@ -52,13 +53,37 @@ centre=`${tools_dir}/grib_get -p centre $outfile`
|
|||
centre=`${tools_dir}/grib_get -p centre:l $outfile`
|
||||
[ $centre -eq 80 ]
|
||||
|
||||
# Set without -s. Expected to fail
|
||||
# ----------------------------------------------------
|
||||
set +e
|
||||
# This is expected to fail
|
||||
${tools_dir}/grib_set -p levtype $infile $outfile 2> $REDIRECT > $REDIRECT
|
||||
[ $? -ne 0 ]
|
||||
status=$?
|
||||
set -e
|
||||
[ $status -ne 0 ]
|
||||
|
||||
# GRIB-941: encoding of grib2 angles
|
||||
# Out-of-bounds value. Expected to fail
|
||||
# ----------------------------------------------------
|
||||
input=${data_dir}/reduced_gaussian_sub_area.grib2
|
||||
set +e
|
||||
${tools_dir}/grib_set -s perturbationNumber=1000 $input $outfile 2>$temp
|
||||
status=$?
|
||||
set -e
|
||||
[ $status -ne 0 ]
|
||||
grep -q "Trying to encode value of 1000 but the maximum allowable value is 255 (number of bits=8)" $temp
|
||||
|
||||
# Negative value for an unsigned key. Expected to fail
|
||||
# ----------------------------------------------------
|
||||
input=${data_dir}/reduced_gaussian_sub_area.grib2
|
||||
set +e
|
||||
${tools_dir}/grib_set -s perturbationNumber=-1 $input $outfile 2>$temp
|
||||
status=$?
|
||||
set -e
|
||||
[ $status -ne 0 ]
|
||||
grep -q "Trying to encode a negative value of -1 for key of type unsigned" $temp
|
||||
|
||||
|
||||
# GRIB-941: encoding of GRIB2 angles
|
||||
# -----------------------------------
|
||||
angleInDegrees=130.9989
|
||||
angleInMicroDegrees=130998900
|
||||
files="GRIB2.tmpl regular_gg_pl_grib2.tmpl reduced_gg_pl_320_grib2.tmpl polar_stereographic_pl_grib2.tmpl"
|
||||
|
@ -69,14 +94,17 @@ for f in $files; do
|
|||
done
|
||||
|
||||
# GRIB-943: centre code table
|
||||
# ----------------------------
|
||||
${tools_dir}/grib_set -s centre=289 $ECCODES_SAMPLES_PATH/GRIB2.tmpl $outfile
|
||||
${tools_dir}/grib_dump -O $outfile | grep -q 'centre = 289.*Zambia'
|
||||
|
||||
# ECC-539: avoid output being the same as input
|
||||
# -----------------------------------------------
|
||||
set +e
|
||||
${tools_dir}/grib_set -s centre=0 $outfile $outfile
|
||||
status=$?
|
||||
set -e
|
||||
[ $status -ne 0 ]
|
||||
|
||||
rm -f $outfile
|
||||
|
||||
rm -f $outfile $temp
|
||||
|
|
|
@ -33,6 +33,8 @@ tempGrib=temp.${label}.grib
|
|||
tempNetcdf=temp.${label}.nc
|
||||
tempText=temp.${label}.txt
|
||||
|
||||
have_netcdf4=0
|
||||
|
||||
# Do we have ncdump?
|
||||
NC_DUMPER=""
|
||||
if command -v "ncdump" >/dev/null 2>&1; then
|
||||
|
@ -51,6 +53,22 @@ if test "x$NC_DUMPER" != "x"; then
|
|||
grep -q "short tp_0001" $tempText
|
||||
fi
|
||||
|
||||
if [ $ECCODES_ON_WINDOWS -eq 0 ]; then
|
||||
echo "Test HDF5 decoding ..."
|
||||
# ---------------------------
|
||||
# Note: this is only available in NetCDF-4. So need to check if the command worked with -k3
|
||||
input=${data_dir}/sample.grib2
|
||||
set +e
|
||||
${tools_dir}/grib_to_netcdf -k3 -o $tempNetcdf $input 2>/dev/null
|
||||
stat=$?
|
||||
set -e
|
||||
if [ $stat -eq 0 ]; then
|
||||
have_netcdf4=1
|
||||
${tools_dir}/grib_dump -TA -O $tempNetcdf
|
||||
res=`${tools_dir}/grib_get -TA -p identifier $tempNetcdf`
|
||||
[ "$res" = "HDF5" ]
|
||||
fi
|
||||
fi
|
||||
|
||||
grib_files="\
|
||||
regular_latlon_surface.grib2 \
|
||||
|
@ -72,14 +90,22 @@ for dt in $ncf_types; do
|
|||
done
|
||||
done
|
||||
|
||||
echo "Test creating different kinds; netcdf3 classic and large ..."
|
||||
echo "Test creating different kinds ..."
|
||||
# ------------------------------------------------------------------
|
||||
# TODO: enable tests for netcdf4 formats too
|
||||
input=${data_dir}/regular_latlon_surface.grib2
|
||||
${tools_dir}/grib_to_netcdf -k 1 -o $tempNetcdf $input >/dev/null
|
||||
${tools_dir}/grib_to_netcdf -k 2 -o $tempNetcdf $input >/dev/null
|
||||
#${tools_dir}/grib_to_netcdf -k 3 -o $tempNetcdf $input >/dev/null
|
||||
#${tools_dir}/grib_to_netcdf -k 4 -o $tempNetcdf $input >/dev/null
|
||||
if [ $have_netcdf4 -eq 1 ]; then
|
||||
${tools_dir}/grib_to_netcdf -k 3 -o $tempNetcdf $input >/dev/null
|
||||
${tools_dir}/grib_to_netcdf -k 4 -o $tempNetcdf $input >/dev/null
|
||||
fi
|
||||
|
||||
echo "Test shuffle and deflate ..."
|
||||
# ---------------------------------
|
||||
if [ $have_netcdf4 -eq 1 ]; then
|
||||
input=${data_dir}/sst_globus0083.grib
|
||||
${tools_dir}/grib_to_netcdf -s -d9 -k4 -o $tempNetcdf $input
|
||||
fi
|
||||
|
||||
echo "Test ECC-1060 ..."
|
||||
# ----------------------
|
||||
|
|
|
@ -15,6 +15,7 @@ label="pseudo_budg_test"
|
|||
set -u
|
||||
tempOut=temp.$label.txt
|
||||
tempRef=temp.$label.ref
|
||||
tempBud=temp.$label.bud
|
||||
|
||||
${tools_dir}/grib_ls -j ${data_dir}/budg > $tempOut
|
||||
cat > $tempRef << EOF
|
||||
|
@ -31,7 +32,20 @@ cat > $tempRef << EOF
|
|||
EOF
|
||||
diff $tempRef $tempOut
|
||||
|
||||
# Set date
|
||||
${tools_dir}/grib_set -s date=20170102 ${data_dir}/budg $tempBud
|
||||
res=`${tools_dir}/grib_get -p mars.date $tempBud`
|
||||
[ "$res" = "20170102" ]
|
||||
|
||||
${tools_dir}/grib_dump ${data_dir}/budg
|
||||
${tools_dir}/grib_dump -O ${data_dir}/budg
|
||||
|
||||
rm -f $tempRef $tempOut
|
||||
ms=`${tools_dir}/grib_get -p mars.step ${data_dir}/budg`
|
||||
[ "$ms" = "0" ]
|
||||
${tools_dir}/grib_set -s mars.step=19 ${data_dir}/budg $tempOut
|
||||
${tools_dir}/grib_ls -jm $tempOut
|
||||
ms=`${tools_dir}/grib_get -p mars.step $tempOut`
|
||||
[ "$ms" = "19" ]
|
||||
|
||||
|
||||
rm -f $tempRef $tempOut $tempBud
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# bashrc to be used for running tests on windows.
|
||||
# bashrc to be used for running tests on Microsoft Windows.
|
||||
# In order for the functions to be usable by the tests, we need to use the -i option.
|
||||
# Typical usage:
|
||||
# bash --rcfile windows_testing.bashrc -ci "some/test/script.sh"
|
||||
|
|
|
@ -8,15 +8,22 @@
|
|||
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
|
||||
#
|
||||
|
||||
# Tests for WRAP
|
||||
|
||||
. ./include.sh
|
||||
label="wrap-test"
|
||||
tempOut=temp.${label}.out
|
||||
tempTxt=temp.${label}.txt
|
||||
tempRef=temp.${label}.ref
|
||||
|
||||
REDIRECT=/dev/null
|
||||
|
||||
cat >${data_dir}/f.rules <<EOF
|
||||
set values = { 1.0e-110, 1.5e-110, 1.005e-110 }
|
||||
write;
|
||||
EOF
|
||||
input=$ECCODES_SAMPLES_PATH/wrap.tmpl
|
||||
|
||||
${tools_dir}/grib_filter ${data_dir}/f.rules ${data_dir}/ 2> $REDIRECT > $REDIRECT
|
||||
${tools_dir}/grib_dump -TA -O $input
|
||||
id=`${tools_dir}/grib_get -TA -p identifier $input`
|
||||
[ "$id" = "WRAP" ]
|
||||
|
||||
|
||||
# Clean up
|
||||
rm -f $tempOut $tempRef $tempTxt
|
|
@ -208,7 +208,7 @@ int grib_tool_new_handle_action(grib_runtime_options* options, grib_handle* h)
|
|||
|
||||
if (print_keys)
|
||||
for (i = 0; i < options->print_keys_count; i++)
|
||||
fprintf(dump_file, ", %s", options->print_keys[i].name);
|
||||
fprintf(dump_file, " %s", options->print_keys[i].name);
|
||||
|
||||
fprintf(dump_file, "\n");
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ PACKAGE_NAME='eccodes'
|
|||
|
||||
# Package version
|
||||
ECCODES_MAJOR_VERSION=2
|
||||
ECCODES_MINOR_VERSION=23
|
||||
ECCODES_MINOR_VERSION=24
|
||||
ECCODES_REVISION_VERSION=0
|
||||
|
||||
ECCODES_CURRENT=1
|
||||
|
|
Loading…
Reference in New Issue