Better Bufr data support including multi-category messages in one data file

This commit is contained in:
wyq 2024-11-27 09:01:04 +08:00
parent be48f3c9cc
commit b776981ba6
61 changed files with 11370 additions and 104 deletions

5
.idea/compiler.xml generated
View File

@ -28,4 +28,9 @@
</annotationProcessing>
<bytecodeTargetLevel target="11" />
</component>
<component name="JavacSettings">
<option name="ADDITIONAL_OPTIONS_OVERRIDE">
<module name="meteoinfo-data" options="-extdirs D:\MyProgram\java\MeteoInfoDev\MeteoInfo\meteoinfo-data\lib" />
</option>
</component>
</project>

View File

@ -811,10 +811,12 @@
<inspection_tool class="CyclomaticComplexity" enabled="false" level="WARNING" enabled_by_default="false">
<option name="m_limit" value="10" />
</inspection_tool>
<inspection_tool class="CyclomaticComplexityInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="DanglingJavadoc" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="DataClassPrivateConstructor" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="DataProviderReturnType" enabled="false" level="ERROR" enabled_by_default="false" />
<inspection_tool class="DateToString" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="DeclarativeUnresolvedReference" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="DeclareCollectionAsInterface" enabled="false" level="WARNING" enabled_by_default="false">
<option name="ignoreLocalVariables" value="false" />
<option name="ignorePrivateMethodsAndFields" value="false" />
@ -955,6 +957,7 @@
<inspection_tool class="ExplicitArrayFilling" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ExplicitThis" enabled="true" level="INFORMATION" enabled_by_default="true" />
<inspection_tool class="ExplicitToImplicitClassMigration" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ExpressionComparedToItself" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ExpressionMayBeFactorized" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="ExtendsAnnotation" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ExtendsConcreteCollection" enabled="false" level="WARNING" enabled_by_default="false" />
@ -1284,6 +1287,7 @@
<inspection_tool class="InfinitePropertiesLabel" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="InfiniteRecursion" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="InfiniteTransitionLabel" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="InfixCallToOrdinary" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="InjectedReferences" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="InjectionNotApplicable" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="InlineClassDeprecatedMigration" enabled="false" level="WARNING" enabled_by_default="false" />
@ -1433,6 +1437,7 @@
<inspection_tool class="Junit4Converter" enabled="true" level="INFORMATION" enabled_by_default="true" />
<inspection_tool class="Junit4RunWithInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="JvmCoverageInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="JvmLinesOfCodeInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="KDocMissingDocumentation" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="KDocUnresolvedReference" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="KeySetIterationMayUseEntrySet" enabled="false" level="WARNING" enabled_by_default="false" />
@ -1507,10 +1512,12 @@
</inspection_tool>
<inspection_tool class="LocaleText" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="LogStatementGuardedByLogCondition" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="LogStatementNotGuardedByLogCondition" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="LoggerInitializedWithForeignClass" enabled="false" level="WARNING" enabled_by_default="false">
<option name="loggerFactoryMethodName" value="getLogger,getLogger,getLog,getLogger" />
</inspection_tool>
<inspection_tool class="LoggingConditionDisagreesWithLogStatement" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="LoggingGuardedByCondition" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="LoggingPlaceholderCountMatchesArgumentCount" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="LoggingSimilarMessage" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
<inspection_tool class="LoggingStringTemplateAsArgument" enabled="false" level="WARNING" enabled_by_default="false" />
@ -1534,12 +1541,14 @@
<inspection_tool class="MalformedDataProvider" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MalformedFormatString" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MalformedXPath" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MaliciousLibrariesLocal" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ManualArrayCopy" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ManualArrayToCollectionCopy" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ManualMinMaxCalculation" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MapGetWithNotNullAssertionOperator" enabled="true" level="INFORMATION" enabled_by_default="true" />
<inspection_tool class="MapReplaceableByEnumMap" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MappingBeforeCount" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="MarkdownDocumentationCommentsMigration" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="MarkdownIncorrectTableFormatting" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
<inspection_tool class="MarkdownIncorrectlyNumberedListItem" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="MarkdownLinkDestinationWithSpaces" enabled="true" level="WARNING" enabled_by_default="true" />
@ -2142,7 +2151,9 @@
<option name="ignoreCloneable" value="false" />
</inspection_tool>
<inspection_tool class="RedundantInnerClassModifier" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="RedundantJavaTimeOperations" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="RedundantLabelMigration" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
<inspection_tool class="RedundantLabeledReturnOnLastExpressionInLambda" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="RedundantLabeledSwitchRuleCodeBlock" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="RedundantLambdaArrow" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
<inspection_tool class="RedundantLambdaOrAnonymousFunction" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
@ -2280,6 +2291,7 @@
<inspection_tool class="ReturnSeparatedFromComputation" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="ReturnThis" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="ReuseOfLocalVariable" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="RunBlocking" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="RuntimeExec" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="RuntimeExecWithNonConstantString" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="SSBasedInspection" enabled="true" level="WARNING" enabled_by_default="true" />
@ -2422,6 +2434,7 @@
<option name="onlyWarnOnLoop" value="true" />
</inspection_tool>
<inspection_tool class="StringTemplateMigration" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="StringTemplateReverseMigration" enabled="false" level="INFORMATION" enabled_by_default="false" />
<inspection_tool class="StringToUpperWithoutLocale" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="StringTokenizer" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="StringTokenizerDelimiter" enabled="false" level="WARNING" enabled_by_default="false" />
@ -2679,6 +2692,7 @@
<inspection_tool class="UnspecifiedActionsPlace" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="UnstableApiUsage" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="UnstableTypeUsedInSignature" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="UnsupportedCharacter" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="UnsupportedChronoFieldUnitCall" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="UnusedAssignment" enabled="false" level="WARNING" enabled_by_default="false">
<option name="REPORT_PREFIX_EXPRESSIONS" value="false" />

View File

@ -42,11 +42,6 @@
<artifactId>meteoinfo-data</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>meteoinfo-projection</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>meteoinfo-image</artifactId>

View File

@ -417,9 +417,8 @@ public class RandomAccessFile implements DataInput, DataOutput {
* write will occur.
*
* @return the offset from the start of the file in bytes.
* @throws IOException if an I/O error occurrs.
*/
public long getFilePointer() throws IOException {
public long getFilePointer() {
return filePosition;
}

View File

@ -67,7 +67,7 @@ import java.util.zip.ZipInputStream;
public static String getVersion() {
String version = GlobalUtil.class.getPackage().getImplementationVersion();
if (version == null || version.equals("")) {
version = "3.9.6";
version = "3.9.7";
}
return version;
}

View File

@ -15,6 +15,7 @@
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<netcdf.version>5.6.0</netcdf.version>
<burf.version>5.6.1-SNAPSHOT</burf.version>
</properties>
<repositories>
@ -51,12 +52,12 @@
<dependency>
<groupId>edu.ucar</groupId>
<artifactId>cdm-core</artifactId>
<version>${netcdf.version}</version>
<version>5.6.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.ucar</groupId>
<artifactId>bufr</artifactId>
<version>${netcdf.version}</version>
<version>5.6.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.ucar</groupId>
@ -135,6 +136,20 @@
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
<optimize>true</optimize>
<!-- 导入\lib下的本地jar包 -->
<compilerArguments>
<extdirs>${project.basedir}\lib</extdirs>
</compilerArguments>
</configuration>
</plugin>
</plugins>
</build>

View File

@ -73,7 +73,7 @@ public class MeteoDataInfo {
private int _lonIdx;
private DrawType2D drawType2D;
/// <summary>
/// Is Lont/Lat
/// Is Lon/Lat
/// </summary>
public boolean IsLonLat;
/// <summary>

View File

@ -50,6 +50,7 @@ public class Variable {
private boolean isSwath = false;
private int varId;
private boolean dimVar = false;
private boolean memberOfStructure = false;
private List<Integer> levelIdxs = new ArrayList<>();
private List<Integer> varInLevelIdxs = new ArrayList<>();
private Array cachedData;
@ -560,6 +561,24 @@ public class Variable {
dimVar = value;
}
/**
* Get if the variable is a member of a structure
*
* @return Is a member of a structure or not
*/
public boolean isMemberOfStructure() {
return this.memberOfStructure;
}
/**
* Set if the variable is a member of a structure
*
* @param value Boolean
*/
public void setMemberOfStructure(boolean value) {
this.memberOfStructure = value;
}
/**
* Get level index list - for ARL data
*

View File

@ -0,0 +1,16 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
/**
* Interface for bit counters
*
* @author caron
* @since 8/8/13
*/
public interface BitCounter {
int getNumberRows();
}

View File

@ -0,0 +1,116 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import java.util.Formatter;
/**
* Count size of compressed fields
*
* @author caron
* @since Jul 4, 2008
*/
public class BitCounterCompressed implements BitCounter {
private final DataDescriptor dkey; // the field to count
private final int nrows; // number of (obs) in the compression
private final int bitOffset; // starting position of the compressed data, relative to start of data section
private int dataWidth; // bitWidth of incremental values
private BitCounterCompressed[][] nested; // used if the dkey is a structure = nested[innerRows][dkey.subkeys.size]
/**
* This counts the size of an array of Structures or Sequences, ie Structure(n)
*
* @param dkey is a structure or a sequence - so has subKeys
* @param n numbers of rows in the table
* @param bitOffset number of bits taken up by the count variable (non-zero only for sequences)
*/
public BitCounterCompressed(DataDescriptor dkey, int n, int bitOffset) {
this.dkey = dkey;
this.nrows = n;
this.bitOffset = bitOffset;
}
void setDataWidth(int dataWidth) {
this.dataWidth = dataWidth;
}
public int getStartingBitPos() {
return bitOffset;
}
public int getBitPos(int msgOffset) {
return bitOffset + dkey.bitWidth + 6 + dataWidth * msgOffset;
}
public int getTotalBits() {
if (nested == null)
return dkey.bitWidth + 6 + dataWidth * nrows;
else {
int totalBits = 0;
for (BitCounterCompressed[] counters : nested) {
if (counters == null)
continue;
for (BitCounterCompressed counter : counters)
if (counter != null)
totalBits += counter.getTotalBits();
}
if (dkey.replicationCountSize > 0)
totalBits += dkey.replicationCountSize + 6; // 6 boit count, 6 bit extra
return totalBits;
}
}
public BitCounterCompressed[] getNestedCounters(int innerIndex) {
return nested[innerIndex];
}
public void addNestedCounters(int innerDimensionSize) {
nested = new BitCounterCompressed[innerDimensionSize][dkey.getSubKeys().size()];
}
/**
* Number of nested fields
*
* @return 1 if no nested fields, otherwise count of nested fields
*/
public int ncounters() {
if (nested == null)
return 1;
else {
int ncounters = 0;
for (BitCounterCompressed[] counters : nested) {
if (counters == null)
continue;
for (BitCounterCompressed counter : counters)
if (counter != null)
ncounters += counter.ncounters();
}
return ncounters;
}
}
public void show(Formatter out, int indent) {
for (int i = 0; i < indent; i++)
out.format(" ");
out.format("%8d %8d %4d %s %n", getTotalBits(), bitOffset, dataWidth, dkey.name);
if (nested != null) {
for (BitCounterCompressed[] counters : nested) {
if (counters == null)
continue;
for (BitCounterCompressed counter : counters)
if (counter != null)
counter.show(out, indent + 2);
}
}
}
@Override
public int getNumberRows() {
return nrows;
}
}

View File

@ -0,0 +1,148 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.nc2.util.Indent;
import java.util.Formatter;
import java.util.HashMap;
import java.util.Map;
/**
* Counts the size of nested tables, for uncompressed messages.
* <p>
* A top-level BitCounterUncompressed counts bits for one row = obs = dataset.
* obs = new BitCounterUncompressed(root, 1, 0);
*
* @author caron
* @since May 10, 2008
*/
public class BitCounterUncompressed implements BitCounter {
private final DataDescriptor parent; // represents the table - fields/cols are the subKeys of dkey
private final int nrows; // number of rows in this table
private final int replicationCountSize; // number of bits taken up by the count variable (non-zero only for sequences)
private Map<DataDescriptor, Integer> bitPosition;
private Map<DataDescriptor, BitCounterUncompressed[]> subCounters; // nested tables; null for regular fields
private int[] startBit; // from start of data section, for each row
private int countBits; // total nbits in this table
private int bitOffset; // count bits
private static boolean debug;
/**
* This counts the size of an array of Structures or Sequences, ie Structure(n)
*
* @param parent is a structure or a sequence - so has subKeys
* @param nrows numbers of rows in the table, equals 1 for top level
* @param replicationCountSize number of bits taken up by the count variable (non-zero only for sequences)
*/
BitCounterUncompressed(DataDescriptor parent, int nrows, int replicationCountSize) {
this.parent = parent;
this.nrows = nrows;
this.replicationCountSize = replicationCountSize;
}
// not used yet
public void setBitOffset(DataDescriptor dkey) {
if (bitPosition == null)
bitPosition = new HashMap<>(2 * parent.getSubKeys().size());
bitPosition.put(dkey, bitOffset);
bitOffset += dkey.getBitWidth();
}
public int getOffset(DataDescriptor dkey) {
return bitPosition.get(dkey);
}
/**
* Track nested Tables.
*
* @param subKey subKey is a structure or a sequence - so itself has subKeys
* @param n numbers of rows in the nested table
* @param row which row in the parent Table this belongs to
* @param replicationCountSize number of bits taken up by the count (non-zero for sequences)
* @return nested ReplicationCounter
*/
BitCounterUncompressed makeNested(DataDescriptor subKey, int n, int row, int replicationCountSize) {
if (subCounters == null)
subCounters = new HashMap<>(5); // assumes DataDescriptor.equals is ==
// one for each row in this table
BitCounterUncompressed[] subCounter = subCounters.computeIfAbsent(subKey, k -> new BitCounterUncompressed[nrows]);
BitCounterUncompressed rc = new BitCounterUncompressed(subKey, n, replicationCountSize);
subCounter[row] = rc;
return rc;
}
public BitCounterUncompressed[] getNested(DataDescriptor subKey) {
return (subCounters == null) ? null : subCounters.get(subKey);
}
// total bits of this table and all subtables
int countBits(int startBit) {
countBits = replicationCountSize;
this.startBit = new int[nrows];
for (int i = 0; i < nrows; i++) {
this.startBit[i] = startBit + countBits;
if (debug)
System.out.println(" BitCounterUncompressed row " + i + " startBit=" + this.startBit[i]);
for (DataDescriptor nd : parent.subKeys) {
BitCounterUncompressed[] bitCounter = (subCounters == null) ? null : subCounters.get(nd);
if (bitCounter == null) // a regular field
countBits += nd.getBitWidth();
else {
if (debug)
System.out.println(" ---------> nested " + nd.getFxyName() + " starts at =" + (startBit + countBits));
countBits += bitCounter[i].countBits(startBit + countBits);
if (debug)
System.out.println(" <--------- nested " + nd.getFxyName() + " ends at =" + (startBit + countBits));
}
}
}
return countBits;
}
public int getCountBits() {
return countBits;
}
public int getNumberRows() {
return nrows;
}
public int getStartBit(int row) {
if (row >= startBit.length)
throw new IllegalStateException();
return startBit[row];
}
public void toString(Formatter f, Indent indent) {
f.format("%s dds=%s, ", indent, parent.getFxyName());
f.format("nrows=%d%n", nrows);
if (subCounters == null)
return;
indent.incr();
int count = 0;
// Map<DataDescriptor, BitCounterUncompressed[]> subCounters; // nested tables; null for regular fields
for (BitCounterUncompressed[] bcus : subCounters.values()) {
if (bcus == null)
f.format("%s%d: null", indent, count);
else {
for (BitCounterUncompressed bcu : bcus)
bcu.toString(f, indent);
}
count++;
}
indent.decr();
}
}

View File

@ -0,0 +1,180 @@
/*
* Copyright (c) 1998-2018 John Caron and University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.unidata.io.RandomAccessFile;
import java.io.EOFException;
import java.io.IOException;
/**
* Helper for reading data that has been bit packed.
*
* @author caron
* @since Apr 7, 2008
*/
public class BitReader {
private static final int BIT_LENGTH = Byte.SIZE;
private static final int BYTE_BITMASK = 0xFF;
private static final long LONG_BITMASK = Long.MAX_VALUE;
private RandomAccessFile raf;
private long startPos;
private byte[] data;
private int dataPos;
private byte bitBuf;
private int bitPos; // Current bit position in bitBuf.
// for testing
public BitReader(byte[] test) {
this.data = test;
this.dataPos = 0;
}
/**
* Constructor
*
* @param raf the RandomAccessFile
* @param startPos points to start of data in data section, in bytes
* @throws IOException on read error
*/
public BitReader(RandomAccessFile raf, long startPos) throws IOException {
this.raf = raf;
this.startPos = startPos;
raf.seek(startPos);
}
/**
* Go to the next byte in the stream
*/
public void incrByte() {
this.bitPos = 0;
}
/**
* Position file at bitOffset from startPos
*
* @param bitOffset bit offset from starting position
* @throws IOException on io error
*/
public void setBitOffset(int bitOffset) throws IOException {
if (bitOffset % 8 == 0) {
raf.seek(startPos + bitOffset / 8);
bitPos = 0;
bitBuf = 0;
} else {
raf.seek(startPos + bitOffset / 8);
bitPos = 8 - (bitOffset % 8);
bitBuf = (byte) raf.read();
bitBuf &= 0xff >> (8 - bitPos); // mask off consumed bits
}
}
public long getPos() {
if (raf != null) {
return raf.getFilePointer();
} else {
return dataPos;
}
}
/**
* Read the next nb bits and return an Unsigned Long .
*
* @param nb the number of bits to convert to int, must be 0 <= nb <= 64.
* @return result
* @throws IOException on read error
*/
public long bits2UInt(int nb) throws IOException {
assert nb <= 64;
assert nb >= 0;
long result = 0;
int bitsLeft = nb;
while (bitsLeft > 0) {
// we ran out of bits - fetch the next byte...
if (bitPos == 0) {
bitBuf = nextByte();
bitPos = BIT_LENGTH;
}
// -- retrieve bit from current byte ----------
// how many bits to read from the current byte
int size = Math.min(bitsLeft, bitPos);
// move my part to start
int myBits = bitBuf >> (bitPos - size);
// mask-off sign-extending
myBits &= BYTE_BITMASK;
// mask-off bits of next value
myBits &= ~(BYTE_BITMASK << size);
// -- put bit to result ----------------------
// where to place myBits inside of result
int shift = bitsLeft - size;
assert shift >= 0;
// put it there
result |= myBits << shift;
// -- put bit to result ----------------------
// update information on what we consumed
bitsLeft -= size;
bitPos -= size;
}
return result;
}
/**
* Read the next nb bits and return an Signed Long .
*
* @param nb the number of bits to convert to int, must be <= 64.
* @return result
* @throws IOException on read error
*/
public long bits2SInt(int nb) throws IOException {
long result = bits2UInt(nb);
// check if we're negative
if (getBit(result, nb)) {
// it's negative! reset leading bit
result = setBit(result, nb, false);
// build 2's-complement
result = ~result & LONG_BITMASK;
result = result + 1;
}
return result;
}
private byte nextByte() throws IOException {
if (raf != null) {
int result = raf.read();
if (result == -1)
throw new EOFException();
return (byte) result;
} else {
return data[dataPos++];
}
}
public static long setBit(long decimal, int N, boolean value) {
return value ? decimal | (1 << (N - 1)) : decimal & ~(1 << (N - 1));
}
public static boolean getBit(long decimal, int N) {
int constant = 1 << (N - 1);
return (decimal & constant) > 0;
}
}

View File

@ -0,0 +1,633 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.jdom2.Element;
import org.meteoinfo.data.meteodata.bufr.point.BufrField;
import org.meteoinfo.data.meteodata.bufr.point.StandardFields;
import thredds.client.catalog.Catalog;
import ucar.ma2.*;
import ucar.nc2.Attribute;
import ucar.nc2.NetcdfFile;
import ucar.nc2.NetcdfFiles;
import ucar.nc2.Sequence;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import ucar.nc2.time.CalendarDate;
import ucar.nc2.util.Indent;
import ucar.unidata.geoloc.StationImpl;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.util.*;
/**
* Configuration for converting BUFR files to CDM
* DataDescriptor tree becomes FieldConverter tree with annotations.
*
* @author caron
* @since 8/8/13
*/
public class BufrConfig {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BufrConfig.class);
public static BufrConfig scanEntireFile(RandomAccessFile raf) {
return new BufrConfig(raf);
}
static BufrConfig openFromMessage(RandomAccessFile raf, Message m, Element iospParam) throws IOException {
BufrConfig config = new BufrConfig(raf, m);
if (iospParam != null)
config.merge(iospParam);
return config;
}
private String filename;
private Message message;
private StandardFields.StandardFieldsFromMessage standardFields;
private FieldConverter rootConverter;
private int messHash;
private FeatureType featureType;
private Map<String, BufrStation> map;
private long start = Long.MAX_VALUE;
private long end = Long.MIN_VALUE;
private boolean debug;
/*
* Open file as a stream of BUFR messages, create config file.
*
* Examine length of sequences, annotate
*
* @param bufrFilename open this file
*
* @throws java.io.IOException on IO error
*
* private BufrConfig(String bufrFilename, boolean read) throws IOException {
* this.filename = bufrFilename;
* try {
* scanBufrFile(new RandomAccessFile(bufrFilename, "r"), read);
* } catch (Exception e) {
* e.printStackTrace();
* throw new RuntimeException(e.getMessage());
* }
* }
*/
private BufrConfig(RandomAccessFile raf) {
this.filename = raf.getLocation();
try {
scanBufrFile(raf);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
private BufrConfig(RandomAccessFile raf, Message m) throws IOException {
this.filename = raf.getLocation();
this.message = m;
this.messHash = m.hashCode();
this.rootConverter = new FieldConverter(m.ids.getCenterId(), m.getRootDataDescriptor());
standardFields = StandardFields.extract(m);
}
public String getFilename() {
return filename;
}
public Message getMessage() {
return this.message;
}
public FieldConverter getRootConverter() {
return rootConverter;
}
public Map<String, BufrStation> getStationMap() {
return map;
}
public int getMessHash() {
return messHash;
}
public FeatureType getFeatureType() {
return featureType;
}
public FieldConverter getStandardField(BufrCdmIndexProto.FldType want) {
for (FieldConverter fld : rootConverter.flds)
if (fld.type == want)
return fld;
return null;
}
public long getStart() {
return start;
}
public long getEnd() {
return end;
}
public long getNobs() {
return countObs;
}
////////////////////////////////////////////////////////////////////////////
private void merge(Element iospParam) {
assert iospParam.getName().equals("iospParam");
Element bufr2nc = iospParam.getChild("bufr2nc", Catalog.ncmlNS);
if (bufr2nc == null)
return;
for (Element child : bufr2nc.getChildren("fld", Catalog.ncmlNS))
merge(child, rootConverter);
}
private void merge(Element jdom, FieldConverter parent) {
if (jdom == null || parent == null)
return;
FieldConverter fld = null;
// find the corresponding field
String idxName = jdom.getAttributeValue("idx");
if (idxName != null) {
try {
int idx = Integer.parseInt(idxName);
fld = parent.getChild(idx);
} catch (NumberFormatException ne) {
log.info("BufrConfig cant find Child member index={} for file = {}", idxName, filename);
}
}
if (fld == null) {
String fxyName = jdom.getAttributeValue("fxy");
if (fxyName != null) {
fld = parent.findChildByFxyName(fxyName);
if (fld == null) {
log.info("BufrConfig cant find Child member fxy={} for file = {}", fxyName, filename);
}
}
}
if (fld == null) {
String name = jdom.getAttributeValue("name");
if (name != null) {
fld = parent.findChild(name);
if (fld == null) {
log.info("BufrConfig cant find Child member name={} for file = {}", name, filename);
}
}
}
if (fld == null) {
log.info("BufrConfig must have idx, name or fxy attribute = {} for file = {}", jdom, filename);
return;
}
String action = jdom.getAttributeValue("action");
if (action != null && !action.isEmpty())
fld.setAction(action);
if (jdom.getChildren("fld") != null) {
for (Element child : jdom.getChildren("fld", Catalog.ncmlNS)) {
merge(child, fld);
}
}
}
////////////////////////////////////////////////////////////////////////////
private StandardFields.StandardFieldsFromStructure extract;
private boolean hasStations;
private boolean hasDate;
private int countObs;
private void scanBufrFile(RandomAccessFile raf) throws Exception {
NetcdfFile ncd = null;
countObs = 0;
try {
MessageScanner scanner = new MessageScanner(raf);
Message protoMessage = scanner.getFirstDataMessage();
if (protoMessage == null)
throw new IOException("No message found!");
messHash = protoMessage.hashCode();
standardFields = StandardFields.extract(protoMessage);
rootConverter = new FieldConverter(protoMessage.ids.getCenterId(), protoMessage.getRootDataDescriptor());
if (standardFields.hasStation()) {
hasStations = true;
map = new HashMap<>(1000);
}
featureType = guessFeatureType(standardFields);
hasDate = standardFields.hasTime();
ncd = NetcdfFiles.open(raf.getLocation()); // LOOK opening another raf
Attribute centerAtt = ncd.findGlobalAttribute(BufrIosp2.centerId);
int center = (centerAtt == null) ? 0 : centerAtt.getNumericValue().intValue();
Sequence seq = (Sequence) ncd.getRootGroup().findVariableLocal(BufrIosp2.obsRecordName);
extract = new StandardFields.StandardFieldsFromStructure(center, seq);
StructureDataIterator iter = seq.getStructureIterator();
processSeq(iter, rootConverter, true);
setStandardActions(rootConverter);
} finally {
if (ncd != null)
ncd.close();
}
}
private FeatureType guessFeatureType(StandardFields.StandardFieldsFromMessage standardFields) {
if (standardFields.hasStation())
return FeatureType.STATION;
if (standardFields.hasTime())
return FeatureType.POINT;
return FeatureType.ANY;
}
private void setStandardActions(FieldConverter fld) {
fld.setAction(fld.makeAction());
if (fld.flds == null)
return;
for (FieldConverter child : fld.flds)
setStandardActions(child);
}
/////////////////////////////////////////////////////////////////////////////////////
private CalendarDate today = CalendarDate.present();
private void processSeq(StructureDataIterator sdataIter, FieldConverter parent, boolean isTop) throws IOException {
try {
while (sdataIter.hasNext()) {
StructureData sdata = sdataIter.next();
if (isTop) {
countObs++;
if (hasStations)
processStations(parent, sdata);
if (hasDate) {
extract.extract(sdata);
CalendarDate date = extract.makeCalendarDate();
if (Math.abs(date.getDifferenceInMsecs(today)) > 1000L * 3600 * 24 * 100) {
extract.makeCalendarDate();
}
long msecs = date.getMillis();
if (this.start > msecs) {
this.start = msecs;
}
if (this.end < msecs) {
this.end = msecs;
}
}
}
int count = 0;
for (StructureMembers.Member m : sdata.getMembers()) {
if (m.getDataType() == DataType.SEQUENCE) {
FieldConverter fld = parent.getChild(count);
ArraySequence data = (ArraySequence) sdata.getArray(m);
int n = data.getStructureDataCount();
fld.trackSeqCounts(n);
processSeq(data.getStructureDataIterator(), fld, false);
}
count++;
}
}
} finally {
sdataIter.close();
}
}
private void processStations(FieldConverter parent, StructureData sdata) {
BufrStation station = new BufrStation();
station.read(parent, sdata);
BufrStation check = map.get(station.getName());
if (check == null)
map.put(station.getName(), station);
else {
check.count++;
if (!station.equals(check))
log.warn("bad station doesnt equal " + station + " != " + check);
}
}
public class BufrStation extends StationImpl {
public int count = 1;
void read(FieldConverter parent, StructureData sdata) {
extract.extract(sdata);
setName(extract.getStationId());
setLatitude(extract.getFieldValueD(BufrCdmIndexProto.FldType.lat));
setLongitude(extract.getFieldValueD(BufrCdmIndexProto.FldType.lon));
if (extract.hasField(BufrCdmIndexProto.FldType.stationDesc))
setDescription(extract.getFieldValueS(BufrCdmIndexProto.FldType.stationDesc));
if (extract.hasField(BufrCdmIndexProto.FldType.wmoId))
setWmoId(extract.getFieldValueS(BufrCdmIndexProto.FldType.wmoId));
if (extract.hasField(BufrCdmIndexProto.FldType.heightOfStation))
setAltitude(extract.getFieldValueD(BufrCdmIndexProto.FldType.heightOfStation));
}
/*
* void read(FieldConverter parent, StructureData sdata) {
* int count = 0;
* List<FieldConverter> flds = parent.getChildren(); // asssume these track exactly the members
* for (StructureMembers.Member m : sdata.getMembers()) {
* FieldConverter fld = flds.get(count++);
* if (fld.getType() == null) continue;
*
* switch (fld.getType()) {
* case stationId:
* setName( readString(sdata, m));
* break;
* case stationDesc:
* setDescription(sdata.getScalarString(m));
* break;
* case wmoId:
* setWmoId(readString(sdata, m));
* break;
* case lat:
* setLatitude(sdata.convertScalarDouble(m));
* break;
* case lon:
* setLongitude(sdata.convertScalarDouble(m));
* break;
* case height:
* setAltitude(sdata.convertScalarDouble(m));
* break;
* case heightOfStation:
* setAltitude(sdata.convertScalarDouble(m));
* break;
* }
* }
* }
*
* String readString(StructureData sdata, StructureMembers.Member m) {
* if (m.getDataType().isString())
* return sdata.getScalarString(m);
* else if (m.getDataType().isIntegral())
* return Integer.toString(sdata.convertScalarInt(m));
* else if (m.getDataType().isNumeric())
* return Double.toString(sdata.convertScalarDouble(m));
* else return "type "+ m.getDataType();
* }
*/
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
BufrStation that = (BufrStation) o;
if (Double.compare(that.alt, alt) != 0)
return false;
if (Double.compare(that.lat, lat) != 0)
return false;
if (Double.compare(that.lon, lon) != 0)
return false;
if (!Objects.equals(desc, that.desc))
return false;
if (!name.equals(that.name))
return false;
return Objects.equals(wmoId, that.wmoId);
}
@Override
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(lat);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(lon);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(alt);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + name.hashCode();
result = 31 * result + (desc != null ? desc.hashCode() : 0);
result = 31 * result + (wmoId != null ? wmoId.hashCode() : 0);
return result;
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////
public static class FieldConverter implements BufrField {
DataDescriptor dds;
List<FieldConverter> flds;
BufrCdmIndexProto.FldType type;
BufrCdmIndexProto.FldAction action;
int min = Integer.MAX_VALUE;
int max;
boolean isSeq;
private FieldConverter(int center, DataDescriptor dds) {
this.dds = dds;
this.type = StandardFields.findField(center, dds.getFxyName());
if (dds.getSubKeys() != null) {
this.flds = new ArrayList<>(dds.getSubKeys().size());
for (DataDescriptor subdds : dds.getSubKeys()) {
FieldConverter subfld = new FieldConverter(center, subdds);
flds.add(subfld);
}
}
}
public String getName() {
return dds.getName();
}
public String getDesc() {
return dds.getDesc();
}
public String getUnits() {
return dds.getUnits();
}
public short getFxy() {
return dds.getFxy();
}
public String getFxyName() {
return dds.getFxyName();
}
public BufrCdmIndexProto.FldAction getAction() {
return action;
}
public BufrCdmIndexProto.FldType getType() {
return type;
}
public List<FieldConverter> getChildren() {
return flds;
}
public boolean isSeq() {
return isSeq;
}
public int getMin() {
return min;
}
public int getMax() {
return max;
}
public int getScale() {
return dds.getScale();
}
public int getReference() {
return dds.getRefVal();
}
public int getBitWidth() {
return dds.getBitWidth();
}
public void setAction(String action) {
try {
this.action = BufrCdmIndexProto.FldAction.valueOf(action);
} catch (Exception e) {
log.warn("Unknown action {}", action);
}
}
public void setAction(BufrCdmIndexProto.FldAction action) {
this.action = action;
}
FieldConverter findChild(String want) {
for (FieldConverter child : flds) {
String name = child.dds.getName();
if (name != null && name.equals(want))
return child;
}
return null;
}
FieldConverter findChildByFxyName(String fxyName) {
for (FieldConverter child : flds) {
String name = child.dds.getFxyName();
if (name != null && name.equals(fxyName))
return child;
}
return null;
}
FieldConverter getChild(int i) {
return flds.get(i);
}
void trackSeqCounts(int n) {
isSeq = true;
if (n > max)
max = n;
if (n < min)
min = n;
}
void showRange(Formatter f) {
if (!isSeq)
return;
if (max == min)
f.format(" isConstant='%d'", max);
else if (max < 2)
f.format(" isBinary='true'");
else
f.format(" range='[%d,%d]'", min, max);
}
BufrCdmIndexProto.FldAction makeAction() {
if (!isSeq)
return null;
if (max == 0)
return BufrCdmIndexProto.FldAction.remove;
if (max < 2)
return BufrCdmIndexProto.FldAction.asMissing;
else
return BufrCdmIndexProto.FldAction.asArray;
}
void show(Formatter f, Indent indent, int index) {
boolean hasContent = false;
if (isSeq)
f.format("%s<fld idx='%d' name='%s'", indent, index, dds.getName());
else
f.format("%s<fld idx='%d' fxy='%s' name='%s' desc='%s' units='%s' bits='%d'", indent, index, dds.getFxyName(),
dds.getName(), dds.getDesc(), dds.getUnits(), dds.getBitWidth());
if (type != null)
f.format(" type='%s'", type);
showRange(f);
f.format(" action='%s'", makeAction());
/*
* if (type != null) {
* f.format(">%n");
* indent.incr();
* f.format("%s<type>%s</type>%n", indent, type);
* indent.decr();
* hasContent = true;
* }
*/
if (flds != null) {
f.format(">%n");
indent.incr();
int subidx = 0;
for (FieldConverter cc : flds) {
cc.show(f, indent, subidx++);
}
indent.decr();
hasContent = true;
}
if (hasContent)
f.format("%s</fld>%n", indent);
else
f.format(" />%n");
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
public void show(Formatter out) {
if (standardFields != null)
out.format("Standard Fields%n%s%n%n", standardFields);
Indent indent = new Indent(2);
out.format("<bufr2nc location='%s' hash='%s' featureType='%s'>%n", filename, Integer.toHexString(messHash),
featureType);
indent.incr();
int index = 0;
for (FieldConverter fld : rootConverter.flds) {
fld.show(out, indent, index++);
}
indent.decr();
out.format("</bufr2nc>%n");
}
}

View File

@ -0,0 +1,142 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.unidata.io.RandomAccessFile;
import javax.annotation.concurrent.Immutable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Represents Section 3 of a BUFR message.
*
* @author caron
* @since May 10, 2008
*/
@Immutable
public class BufrDataDescriptionSection {
/**
* Offset to start of BufrDataDescriptionSection.
*/
private final long offset;
/**
* Number of data sets.
*/
private final int ndatasets;
/**
* data type (observed or compressed).
*/
private final int datatype;
/**
* List of data set descriptors.
*/
private final List<Short> descriptors = new ArrayList<>();
/**
* Constructs a BufrDataDescriptionSection object by reading section 3 from a BUFR file.
*
* @param raf RandomAccessFile, position must be on a BUFR section 3
* @throws IOException on read error
*/
public BufrDataDescriptionSection(RandomAccessFile raf) throws IOException {
offset = raf.getFilePointer();
int length = BufrNumbers.uint3(raf);
long EOS = offset + length;
// reserved byte
raf.read();
// octets 5-6 number of datasets
ndatasets = BufrNumbers.uint2(raf);
// octet 7 data type bit 2 is for compressed data 192 or 64,
// non-compressed data is 0 or 128
datatype = raf.read();
// get descriptors
int ndesc = (length - 7) / 2;
for (int i = 0; i < ndesc; i++) {
int ch1 = raf.read();
int ch2 = raf.read();
short fxy = (short) ((ch1 << 8) + (ch2));
descriptors.add(fxy);
}
// reset for any offset discrepancies
raf.seek(EOS);
}
/**
* Offset to the beginning of BufrDataDescriptionSection.
*
* @return offset in bytes of BUFR record
*/
public final long getOffset() {
return offset;
}
/**
* Number of data sets in this record.
*
* @return datasets
*/
public final int getNumberDatasets() {
return ndatasets;
}
/**
* Data type (compressed or non-compressed).
*
* @return datatype
*/
public final int getDataType() {
return datatype;
}
/**
* Observation data
*
* @return true if observation data
*/
public boolean isObserved() {
return (datatype & 0x80) != 0;
}
/**
* Is data compressed?
*
* @return true if data is compressed
*/
public boolean isCompressed() {
return (datatype & 0x40) != 0;
}
/**
* get list of data descriptors as Shorts
*
* @return descriptors as List<Short>
*/
public final List<Short> getDataDescriptors() {
return descriptors;
}
/**
* get list of data descriptors as Strings
*
* @return descriptors as List<String>
*/
public final List<String> getDescriptors() {
List<String> desc = new ArrayList<>();
for (short fxy : descriptors)
desc.add(Descriptor.makeString(fxy));
return desc;
}
}

View File

@ -7,7 +7,12 @@ package org.meteoinfo.data.meteodata.bufr;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import ucar.nc2.Group;
import ucar.nc2.NetcdfFile;
import ucar.nc2.iosp.AbstractIOServiceProvider;
import ucar.nc2.iosp.IOServiceProvider;
import ucar.unidata.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
@ -15,11 +20,6 @@ import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.meteoinfo.common.DataConvert;
import ucar.nc2.iosp.bufr.BufrTableLookup;
import ucar.nc2.iosp.bufr.DataDescriptor;
import ucar.nc2.iosp.bufr.Descriptor;
import ucar.nc2.iosp.bufr.Message;
import ucar.nc2.iosp.bufr.MessageScanner;
/**
*
@ -40,6 +40,59 @@ public class BufrDataInfo {
// <editor-fold desc="Get Set Methods">
// </editor-fold>
// <editor-fold desc="Methods">
public static NetcdfFile open(String location) throws IOException {
IOServiceProvider spi = new BufrIosp2();
NetcdfFile ncFile = BufrDataInfo.build(spi, location);
spi.buildFinish(ncFile);
return ncFile;
}
public static NetcdfFile build(IOServiceProvider spi, String location) throws IOException {
RandomAccessFile raf = new RandomAccessFile(location, "r");
NetcdfFile.Builder builder = NetcdfFile.builder().setIosp((AbstractIOServiceProvider) spi).setLocation(location);
try {
Group.Builder root = Group.builder().setName("");
spi.build(raf, root, null);
builder.setRootGroup(root);
String id = root.getAttributeContainer().findAttributeString("_Id", null);
if (id != null) {
builder.setId(id);
}
String title = root.getAttributeContainer().findAttributeString("_Title", null);
if (title != null) {
builder.setTitle(title);
}
} catch (IOException | RuntimeException e) {
try {
raf.close();
} catch (Throwable t2) {
}
try {
spi.close();
} catch (Throwable t1) {
}
throw e;
} catch (Throwable t) {
try {
spi.close();
} catch (Throwable t1) {
}
try {
raf.close();
} catch (Throwable t2) {
}
throw new RuntimeException(t);
}
return builder.build();
}
/**
* Read first message
*
@ -49,7 +102,7 @@ public class BufrDataInfo {
* @throws IOException
*/
public Message readFirstMessage(String fileName) throws FileNotFoundException, IOException {
ucar.unidata.io.RandomAccessFile br = new ucar.unidata.io.RandomAccessFile(fileName, "r");
RandomAccessFile br = new RandomAccessFile(fileName, "r");
MessageScanner ms = new MessageScanner(br);
Message m = ms.getFirstDataMessage();
br.close();
@ -63,7 +116,7 @@ public class BufrDataInfo {
* @throws IOException
*/
public List<Message> readMessages(String fileName) throws IOException {
ucar.unidata.io.RandomAccessFile br = new ucar.unidata.io.RandomAccessFile(fileName, "r");
RandomAccessFile br = new RandomAccessFile(fileName, "r");
MessageScanner ms = new MessageScanner(br);
List<Message> messages = new ArrayList<>();
while(ms.hasNext()) {
@ -83,11 +136,13 @@ public class BufrDataInfo {
bw = new RandomAccessFile(fileName, "rw");
} catch (FileNotFoundException ex) {
Logger.getLogger(BufrDataInfo.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Close the data file created by previos step
* Close the data file created by previous step
*/
public void closeDataFile() {
try {

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import javax.annotation.concurrent.Immutable;
/**
* Represents Section 4 of a BUFR message.
*
* @author caron
* @since May 10, 2008
*/
@Immutable
public class BufrDataSection {
private final long dataPos;
private final int dataLength;
public BufrDataSection(long dataPos, int dataLength) {
this.dataPos = dataPos;
this.dataLength = dataLength;
}
public long getDataPos() {
return dataPos;
}
public int getDataLength() {
return dataLength;
}
}

View File

@ -0,0 +1,306 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.nc2.time.CalendarDate;
import ucar.unidata.io.RandomAccessFile;
import javax.annotation.concurrent.Immutable;
import java.io.IOException;
/**
* A class representing the IdentificationSection (section 1) of a BUFR record.
* Handles editions 2,3,4.
*
* @author Robb Kambic
* @author caron
*/
@Immutable
public class BufrIdentificationSection {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BufrIdentificationSection.class);
private static final boolean warnDate = false;
/**
* Master Table number.
*/
private final int master_table;
/**
* Identification of subcenter .
*/
private final int subcenter_id;
/**
* Identification of center.
*/
private final int center_id;
/**
* Update Sequence Number.
*/
private final int update_sequence;
/**
* Optional section exists.
*/
private final boolean hasOptionalSection;
private final int optionalSectionLen;
private final long optionalSectionPos;
/**
* Data category.
*/
private final int category;
/**
* Data sub category.
*/
private final int subCategory;
private final int localSubCategory; // edition >= 4
/**
* Table Version numbers.
*/
private final int master_table_version;
private final int local_table_version;
/**
* Time of the obs (nominal)
*/
private final int year, month, day, hour, minute, second;
private final byte[] localUse;
// *** constructors *******************************************************
/**
* Constructs a <tt>BufrIdentificationSection</tt> object from a raf.
*
* @param raf RandomAccessFile with Section 1 content
* @param is the BufrIndicatorSection, needed for the bufr edition number
* @throws IOException if raf contains no valid BUFR file
*/
public BufrIdentificationSection(RandomAccessFile raf, BufrIndicatorSection is) throws IOException {
// section 1 octet 1-3 (length of section)
int length = BufrNumbers.int3(raf);
// master table octet 4
master_table = raf.read();
if (is.getBufrEdition() < 4) {
if (length < 17)
throw new IOException("Invalid BUFR message on " + raf.getLocation());
if (is.getBufrEdition() == 2) {
subcenter_id = 255;
// Center octet 5-6
center_id = BufrNumbers.int2(raf);
} else { // edition 3
// Center octet 5
subcenter_id = raf.read();
// Center octet 6
center_id = raf.read();
}
// Update sequence number octet 7
update_sequence = raf.read();
// Optional section octet 8
int optional = raf.read();
hasOptionalSection = (optional & 0x80) != 0;
// Category octet 9
category = raf.read();
// Category octet 10
subCategory = raf.read();
localSubCategory = -1; // not used
// master table version octet 11
master_table_version = raf.read();
// local table version octet 12
local_table_version = raf.read();
// octets 13-17 (reference time of forecast)
int lyear = raf.read();
if (lyear > 100)
lyear -= 100;
year = lyear + 2000;
int tempMonth = raf.read();
month = (tempMonth == 0) ? 1 : tempMonth; // joda time does not allow 0 month
int tempDay = raf.read();
day = (tempDay == 0) ? 1 : tempDay; // joda time does not allow 0 day
hour = raf.read();
minute = raf.read();
second = 0;
if (warnDate && (tempMonth == 0 || tempDay == 0)) {
// From manual on codes
// When accuracy of the time does not define a time unit, then the value for this unit shall be set to zero
// (e.g. for a
// SYNOP observation at 09 UTC, minute = 0, second = 0.
// NCEP codes their BUFR table messages with 0/0/0 0:0:0 in edition 3
log.warn(raf.getLocation() + ": month or day is zero, set to 1. {}/{}/{} {}:{}:{}", year, tempMonth, tempDay,
hour, minute, second);
}
int n = length - 17;
localUse = new byte[n];
int nRead = raf.read(localUse);
if (nRead != localUse.length)
throw new IOException("Error reading BUFR local use field.");
} else { // BUFR Edition 4 and above are slightly different
if (length < 22)
throw new IOException("Invalid BUFR message");
// Center octet 5 - 6
center_id = BufrNumbers.int2(raf);
// Sub Center octet 7-8
subcenter_id = BufrNumbers.int2(raf);
// Update sequence number octet 9
update_sequence = raf.read();
// Optional section octet 10
int optional = raf.read();
// Most Sig. Bit = 1 : has optional section
// 0 : does not have an optional section
hasOptionalSection = (optional & 0x80) != 0;
// Category octet 11
category = raf.read();
// International Sub Category octet 12
subCategory = raf.read();
// Local Sub Category Octet 13 - just read this for now
localSubCategory = raf.read();
// master table version octet 14
master_table_version = raf.read();
// local table version octet 15
local_table_version = raf.read();
// octets 16-22 (reference time of forecast)
// Octet 16-17 is the 4-digit year
year = BufrNumbers.int2(raf);
month = raf.read();
day = raf.read();
hour = raf.read();
minute = raf.read();
second = raf.read();
int n = length - 22;
localUse = new byte[n];
int nRead = raf.read(localUse);
if (nRead != localUse.length)
throw new IOException("Error reading BUFR local use field.");
}
// skip optional section, but store position so can read if caller wants it
if (hasOptionalSection) {
int optionalLen = BufrNumbers.int3(raf);
if (optionalLen % 2 != 0)
optionalLen++;
optionalSectionLen = optionalLen - 4;
raf.skipBytes(1);
optionalSectionPos = raf.getFilePointer();
raf.skipBytes(optionalSectionLen);
} else {
optionalSectionLen = -1;
optionalSectionPos = -1;
}
}
/**
* Identification of center.
*
* @return center id as int
*/
public final int getCenterId() {
return center_id;
}
/**
* Identification of subcenter.
*
* @return subcenter as int
*/
public final int getSubCenterId() {
return subcenter_id;
}
/**
* Get update sequence.
*
* @return update_sequence
*/
public final int getUpdateSequence() {
return update_sequence;
}
/**
* return record header time as a CalendarDate
*
* @return referenceTime
*/
public final CalendarDate getReferenceTime() {
int sec = (second < 0 || second > 59) ? 0 : second;
return CalendarDate.of(null, year, month, day, hour, minute, sec);
}
public final int getCategory() {
return category;
}
public final int getSubCategory() {
return subCategory;
}
public final int getLocalSubCategory() {
return localSubCategory;
}
public final int getMasterTableId() {
return master_table;
}
public final int getMasterTableVersion() {
return master_table_version;
}
public final int getLocalTableVersion() {
return local_table_version;
}
/**
* last bytes of the id section are "reserved for local use by ADP centers.
*
* @return local use bytes, if any.
*/
public final byte[] getLocalUseBytes() {
return localUse;
}
public final byte[] getOptiondsalSection(RandomAccessFile raf) throws IOException {
if (!hasOptionalSection)
return null;
byte[] optionalSection = new byte[optionalSectionLen - 4];
raf.seek(optionalSectionPos);
int nRead = raf.read(optionalSection);
if (nRead != optionalSection.length)
log.warn("Error reading optional section -- expected " + optionalSection.length + " but read " + nRead);
return optionalSection;
}
}

View File

@ -0,0 +1,65 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.unidata.io.RandomAccessFile;
import javax.annotation.concurrent.Immutable;
import java.io.IOException;
/**
* A class representing the IndicatorSection (section 0) of a BUFR record.
* Handles editions 2,3,4.
*
* @author Robb Kambic
* @author caron
*/
@Immutable
public class BufrIndicatorSection {
private final long startPos;
private final int bufrLength; // Length in bytes of BUFR record.
private final int edition;
// *** constructors *******************************************************
/**
* Constructs a <tt>BufrIndicatorSection</tt> object from a raf.
*
* @param raf RandomAccessFile with IndicatorSection content
* @throws IOException on read error
*/
public BufrIndicatorSection(RandomAccessFile raf) throws IOException {
this.startPos = raf.getFilePointer() - 4; // start of BUFR message, including "BUFR"
bufrLength = BufrNumbers.uint3(raf);
edition = raf.read();
}
/**
* Get the byte length of this BUFR record.
*
* @return length in bytes of BUFR record
*/
public final int getBufrLength() {
return bufrLength;
}
/**
* Get the edition of the BUFR specification used.
*
* @return edition number of BUFR specification
*/
public final int getBufrEdition() {
return edition;
}
/**
* Get starting position in the file. This should point to the "BUFR" chars .
*
* @return byte offset in file of start of BUFR meessage.
*/
public final long getStartPos() {
return startPos;
}
}

View File

@ -0,0 +1,449 @@
/*
* Copyright (c) 1998-2020 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.jdom2.Element;
import ucar.ma2.*;
import ucar.nc2.*;
import ucar.nc2.constants.DataFormatType;
import ucar.nc2.iosp.AbstractIOServiceProvider;
import ucar.nc2.util.CancelTask;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.util.*;
/**
* IOSP for BUFR data - version 2, using the preprocessor.
*
* @author caron
* @since 8/8/13
*/
public class BufrIosp2 extends AbstractIOServiceProvider {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BufrIosp2.class);
public static final String obsRecordName = "obs";
public static final String fxyAttName = "BUFR:TableB_descriptor";
public static final String centerId = "BUFR:centerId";
// debugging
private static boolean debugIter;
public static void setDebugFlags(ucar.nc2.util.DebugFlags debugFlag) {
debugIter = debugFlag.isSet("Bufr/iter");
}
//private Structure obsStructure;
//private Message protoMessage; // prototypical message: all messages in the file must be the same.
private MessageScanner scanner;
private List<Message> protoMessages; // prototypical messages: the messages with different category.
private List<RootVariable> rootVariables;
private HashSet<Integer> messHash;
private boolean isSingle;
private BufrConfig config;
private Element iospParam;
@Override
public boolean isValidFile(RandomAccessFile raf) throws IOException {
return MessageScanner.isValidFile(raf);
}
@Override
public boolean isBuilder() {
return true;
}
public void build(String fileName) throws IOException {
RandomAccessFile raf = new RandomAccessFile(fileName, "r");
Group.Builder rootGroup = new Group.Builder().setName("");
this.build(raf, rootGroup, null);
System.out.println(rootGroup);
}
@Override
public void build(RandomAccessFile raf, Group.Builder rootGroup, CancelTask cancelTask) throws IOException {
super.open(raf, rootGroup.getNcfile(), cancelTask);
scanner = new MessageScanner(raf);
Message protoMessage = scanner.getFirstDataMessage();
if (protoMessage == null)
throw new IOException("No data messages in the file= " + raf.getLocation());
if (!protoMessage.isTablesComplete())
throw new IllegalStateException("BUFR file has incomplete tables");
// get all prototype messages - contains different message category in a Bufr data file
protoMessages = new ArrayList<>();
protoMessages.add(protoMessage);
int category = protoMessage.ids.getCategory();
while (scanner.hasNext()) {
Message message = scanner.next();
if (message.ids.getCategory() != category) {
protoMessages.add(message);
category = message.ids.getCategory();
}
}
// just get the fields
BufrConfig config = BufrConfig.openFromMessage(raf, protoMessage, iospParam);
// this fills the netcdf object
if (this.protoMessages.size() == 1) {
new BufrIospBuilder(protoMessage, config, rootGroup, raf.getLocation());
} else {
List<BufrConfig> configs = new ArrayList<>();
for (Message message : protoMessages) {
configs.add(BufrConfig.openFromMessage(raf, message, iospParam));
}
new BufrIospBuilder(protoMessage, configs, rootGroup, raf.getLocation());
}
isSingle = false;
}
@Override
public void buildFinish(NetcdfFile ncfile) {
// support multiple root variables in one Bufr data file
this.rootVariables = new ArrayList<>();
if (this.protoMessages.size() == 1) {
Structure obsStructure = (Structure) ncfile.findVariable(obsRecordName);
// The proto DataDescriptor must have a link to the Sequence object to read nested Sequences.
connectSequences(obsStructure.getVariables(), protoMessages.get(0).getRootDataDescriptor().getSubKeys());
this.rootVariables.add(new RootVariable(protoMessages.get(0), obsStructure));
} else {
for (int i = 0; i < this.protoMessages.size(); i++) {
Structure variable = (Structure) ncfile.getVariables().get(i);
Message message = protoMessages.get(i);
connectSequences(variable.getVariables(), message.getRootDataDescriptor().getSubKeys());
this.rootVariables.add(new RootVariable(message, variable));
}
}
}
private void connectSequences(List<Variable> variables, List<DataDescriptor> dataDescriptors) {
for (Variable v : variables) {
if (v instanceof Sequence) {
findDataDescriptor(dataDescriptors, v.getShortName()).ifPresent(dds -> dds.refersTo = (Sequence) v);
}
if (v instanceof Structure) { // recurse
findDataDescriptor(dataDescriptors, v.getShortName())
.ifPresent(dds -> connectSequences(((Structure) v).getVariables(), dds.getSubKeys()));
}
}
}
private Optional<DataDescriptor> findDataDescriptor(List<DataDescriptor> dataDescriptors, String name) {
Optional<DataDescriptor> ddsOpt = dataDescriptors.stream().filter(d -> name.equals(d.name)).findFirst();
if (ddsOpt.isPresent()) {
return ddsOpt;
} else {
throw new IllegalStateException("DataDescriptor does not contain " + name);
}
}
@Override
public void open(RandomAccessFile raf, NetcdfFile ncfile, CancelTask cancelTask) throws IOException {
super.open(raf, ncfile, cancelTask);
scanner = new MessageScanner(raf);
Message protoMessage = scanner.getFirstDataMessage();
if (protoMessage == null)
throw new IOException("No data messages in the file= " + ncfile.getLocation());
if (!protoMessage.isTablesComplete())
throw new IllegalStateException("BUFR file has incomplete tables");
// just get the fields
BufrConfig config = BufrConfig.openFromMessage(raf, protoMessage, iospParam);
// this fills the netcdf object
Construct2 construct = new Construct2(protoMessage, config, ncfile);
Structure obsStructure = construct.getObsStructure();
ncfile.finish();
isSingle = false;
}
// for BufrMessageViewer
public void open(RandomAccessFile raf, NetcdfFile ncfile, Message single) throws IOException {
this.raf = raf;
Message protoMessage = single;
protoMessage.getRootDataDescriptor(); // construct the data descriptors, check for complete tables
if (!protoMessage.isTablesComplete())
throw new IllegalStateException("BUFR file has incomplete tables");
BufrConfig config = BufrConfig.openFromMessage(raf, protoMessage, null);
// this fills the netcdf object
Construct2 construct = new Construct2(protoMessage, config, ncfile);
Structure obsStructure = construct.getObsStructure();
isSingle = true;
ncfile.finish();
this.ncfile = ncfile;
}
@Override
public Object sendIospMessage(Object message) {
if (message instanceof Element) {
iospParam = (Element) message;
iospParam.detach();
return true;
}
return super.sendIospMessage(message);
}
/*public BufrConfig getConfig() {
return config;
}*/
public Element getElem() {
return iospParam;
}
private int nelems = -1;
@Override
public Array readData(Variable v2, Section section) {
RootVariable rootVariable = findRootSequence(v2);
Structure obsStructure = rootVariable.getVariable();
return new ArraySequence(obsStructure.makeStructureMembers(), new SeqIter(rootVariable), nelems);
}
@Override
public StructureDataIterator getStructureIterator(Structure s, int bufferSize) {
RootVariable rootVariable = findRootSequence(s);
return isSingle ? new SeqIterSingle(rootVariable) : new SeqIter(rootVariable);
}
private Structure findRootSequence() {
return (Structure) this.ncfile.findVariable(BufrIosp2.obsRecordName);
}
// find root sequence from root variable list
private RootVariable findRootSequence(Variable var) {
for (RootVariable rootVariable : this.rootVariables) {
if (rootVariable.getVariable().getShortName().equals(var.getShortName())) {
return rootVariable;
}
}
return null;
}
// root variable contains prototype message and corresponding variable
private class RootVariable {
private Message protoMessage;
private Structure variable;
public RootVariable(Message message, Structure variable) {
this.protoMessage = message;
this.variable = variable;
}
public Message getProtoMessage() {
return this.protoMessage;
}
public Structure getVariable() {
return this.variable;
}
}
private class SeqIter implements StructureDataIterator {
StructureDataIterator currIter;
int recnum;
// add its own prototype message and observation structure
Message protoMessage;
Structure obsStructure;
SeqIter(Message message, Structure structure) {
this.protoMessage = message;
this.obsStructure = structure;
reset();
}
SeqIter(RootVariable rootVariable) {
this(rootVariable.protoMessage, rootVariable.variable);
}
@Override
public StructureDataIterator reset() {
recnum = 0;
currIter = null;
scanner.reset();
return this;
}
@Override
public boolean hasNext() throws IOException {
if (currIter == null) {
currIter = readNextMessage();
if (currIter == null) {
nelems = recnum;
return false;
}
}
if (!currIter.hasNext()) {
currIter = readNextMessage();
return hasNext();
}
return true;
}
@Override
public StructureData next() throws IOException {
recnum++;
return currIter.next();
}
private StructureDataIterator readNextMessage() throws IOException {
if (!scanner.hasNext())
return null;
Message m = scanner.next();
if (m == null) {
log.warn("BUFR scanner hasNext() true but next() null!");
return null;
}
if (m.containsBufrTable()) // data messages only
return readNextMessage();
// mixed messages
if (!protoMessage.equals(m)) {
if (messHash == null)
messHash = new HashSet<>(20);
if (!messHash.contains(m.hashCode())) {
log.warn("File " + raf.getLocation() + " has different BUFR message types hash=" + protoMessage.hashCode()
+ "; skipping");
messHash.add(m.hashCode());
}
return readNextMessage();
}
ArrayStructure as = readMessage(m);
return as.getStructureDataIterator();
}
private ArrayStructure readMessage(Message m) throws IOException {
ArrayStructure as;
if (m.dds.isCompressed()) {
MessageCompressedDataReader reader = new MessageCompressedDataReader();
as = reader.readEntireMessage(obsStructure, protoMessage, m, raf, null);
} else {
MessageUncompressedDataReader reader = new MessageUncompressedDataReader();
as = reader.readEntireMessage(obsStructure, protoMessage, m, raf, null);
}
return as;
}
@Override
public int getCurrentRecno() {
return recnum - 1;
}
@Override
public void close() {
if (currIter != null)
currIter.close();
currIter = null;
if (debugIter)
System.out.printf("BUFR read recnum %d%n", recnum);
}
}
private class SeqIterSingle implements StructureDataIterator {
StructureDataIterator currIter;
int recnum;
// add its own prototype message and observation structure
Message protoMessage;
Structure obsStructure;
SeqIterSingle(Message message, Structure structure) {
protoMessage = message;
obsStructure = structure;
reset();
}
SeqIterSingle(RootVariable rootVariable) {
this(rootVariable.protoMessage, rootVariable.variable);
}
@Override
public StructureDataIterator reset() {
recnum = 0;
currIter = null;
return this;
}
@Override
public boolean hasNext() throws IOException {
if (currIter == null) {
currIter = readProtoMessage();
if (currIter == null) {
nelems = recnum;
return false;
}
}
return currIter.hasNext();
}
@Override
public StructureData next() throws IOException {
recnum++;
return currIter.next();
}
private StructureDataIterator readProtoMessage() throws IOException {
Message m = protoMessage;
ArrayStructure as;
if (m.dds.isCompressed()) {
MessageCompressedDataReader reader = new MessageCompressedDataReader();
as = reader.readEntireMessage(obsStructure, protoMessage, m, raf, null);
} else {
MessageUncompressedDataReader reader = new MessageUncompressedDataReader();
as = reader.readEntireMessage(obsStructure, protoMessage, m, raf, null);
}
return as.getStructureDataIterator();
}
@Override
public int getCurrentRecno() {
return recnum - 1;
}
@Override
public void close() {
if (currIter != null)
currIter.close();
currIter = null;
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public String getDetailInfo() {
Formatter ff = new Formatter();
ff.format("%s", super.getDetailInfo());
protoMessages.get(0).dump(ff);
ff.format("%n");
config.show(ff);
return ff.toString();
}
@Override
public String getFileTypeId() {
return DataFormatType.BUFR.getDescription();
}
@Override
public String getFileTypeDescription() {
return "WMO Binary Universal Form";
}
}

View File

@ -0,0 +1,467 @@
package org.meteoinfo.data.meteodata.bufr;
import org.slf4j.Logger;
import ucar.ma2.DataType;
import ucar.nc2.*;
import ucar.nc2.constants.AxisType;
import ucar.nc2.constants.CDM;
import ucar.nc2.constants.CF;
import ucar.nc2.constants._Coordinate;
import org.meteoinfo.data.meteodata.bufr.tables.CodeFlagTables;
import java.util.*;
/**
* Construction of the Netcdf objects using builders.
*/
class BufrIospBuilder {
private static Logger log = org.slf4j.LoggerFactory.getLogger(BufrIospBuilder.class);
private static final boolean warnUnits = false;
private final Group.Builder rootGroup;
private Sequence.Builder recordStructure;
private final Formatter coordinates = new Formatter();
private int tempNo = 1; // fishy
BufrIospBuilder(Message proto, BufrConfig bufrConfig, Group.Builder root, String location) {
this.rootGroup = root;
this.recordStructure = Sequence.builder().setName(BufrIosp2.obsRecordName);
this.rootGroup.addVariable(recordStructure);
// global Attributes
AttributeContainerMutable atts = root.getAttributeContainer();
atts.addAttribute(CDM.HISTORY, "Read using CDM BufrIosp2");
if (bufrConfig.getFeatureType() != null) {
atts.addAttribute(CF.FEATURE_TYPE, bufrConfig.getFeatureType().toString());
}
atts.addAttribute("location", location);
atts.addAttribute("BUFR:categoryName", proto.getLookup().getCategoryName());
atts.addAttribute("BUFR:subCategoryName", proto.getLookup().getSubCategoryName());
atts.addAttribute("BUFR:centerName", proto.getLookup().getCenterName());
atts.addAttribute("BUFR:category", proto.ids.getCategory());
atts.addAttribute("BUFR:subCategory", proto.ids.getSubCategory());
atts.addAttribute("BUFR:localSubCategory", proto.ids.getLocalSubCategory());
atts.addAttribute(BufrIosp2.centerId, proto.ids.getCenterId());
atts.addAttribute("BUFR:subCenter", proto.ids.getSubCenterId());
atts.addAttribute("BUFR:table", proto.ids.getMasterTableId());
atts.addAttribute("BUFR:tableVersion", proto.ids.getMasterTableVersion());
atts.addAttribute("BUFR:localTableVersion", proto.ids.getLocalTableVersion());
atts.addAttribute("Conventions", "BUFR/CDM");
atts.addAttribute("BUFR:edition", proto.is.getBufrEdition());
String header = proto.getHeader();
if (header != null && !header.isEmpty()) {
atts.addAttribute("WMO Header", header);
}
makeObsRecord(bufrConfig);
String coordS = coordinates.toString();
if (!coordS.isEmpty()) {
recordStructure.addAttribute(new Attribute("coordinates", coordS));
}
}
BufrIospBuilder(Message proto, List<BufrConfig> bufrConfigs, Group.Builder root, String location) {
this.rootGroup = root;
// global Attributes
AttributeContainerMutable atts = root.getAttributeContainer();
atts.addAttribute(CDM.HISTORY, "Read using CDM BufrIosp2");
atts.addAttribute("location", location);
atts.addAttribute("BUFR:categoryName", proto.getLookup().getCategoryName());
atts.addAttribute("BUFR:subCategoryName", proto.getLookup().getSubCategoryName());
atts.addAttribute("BUFR:centerName", proto.getLookup().getCenterName());
atts.addAttribute(BufrIosp2.centerId, proto.ids.getCenterId());
atts.addAttribute("BUFR:subCenter", proto.ids.getSubCenterId());
atts.addAttribute("BUFR:table", proto.ids.getMasterTableId());
atts.addAttribute("BUFR:tableVersion", proto.ids.getMasterTableVersion());
atts.addAttribute("BUFR:localTableVersion", proto.ids.getLocalTableVersion());
atts.addAttribute("Conventions", "BUFR/CDM");
atts.addAttribute("BUFR:edition", proto.is.getBufrEdition());
String header = proto.getHeader();
if (header != null && !header.isEmpty()) {
atts.addAttribute("WMO Header", header);
}
for (BufrConfig bufrConfig : bufrConfigs) {
String varName = proto.getLookup().getCategoryName(bufrConfig.getMessage().ids.getCategory());
Sequence.Builder rs = Sequence.builder().setName(varName);
this.rootGroup.addVariable(rs);
makeObsRecord(bufrConfig, rs);
String coordS = coordinates.toString();
if (!coordS.isEmpty()) {
rs.addAttribute(new Attribute("coordinates", coordS));
}
}
}
Sequence.Builder getObsStructure() {
return recordStructure;
}
private void makeObsRecord(BufrConfig bufrConfig) {
BufrConfig.FieldConverter root = bufrConfig.getRootConverter();
for (BufrConfig.FieldConverter fld : root.flds) {
DataDescriptor dkey = fld.dds;
if (!dkey.isOkForVariable()) {
continue;
}
if (dkey.replication == 0) {
addSequence(rootGroup, recordStructure, fld);
} else if (dkey.replication > 1) {
List<BufrConfig.FieldConverter> subFlds = fld.flds;
List<DataDescriptor> subKeys = dkey.subKeys;
if (subKeys.size() == 1) { // only one member
DataDescriptor subDds = dkey.subKeys.get(0);
BufrConfig.FieldConverter subFld = subFlds.get(0);
if (subDds.dpi != null) {
addDpiStructure(recordStructure, fld, subFld);
} else if (subDds.replication == 1) { // one member not a replication
Variable.Builder v = addVariable(rootGroup, recordStructure, subFld, dkey.replication);
v.setSPobject(fld); // set the replicating field as SPI object
} else { // one member is a replication (two replications in a row)
addStructure(rootGroup, recordStructure, fld, dkey.replication);
}
} else if (subKeys.size() > 1) {
addStructure(rootGroup, recordStructure, fld, dkey.replication);
}
} else { // replication == 1
addVariable(rootGroup, recordStructure, fld, dkey.replication);
}
}
}
private void makeObsRecord(BufrConfig bufrConfig, Sequence.Builder rs) {
BufrConfig.FieldConverter root = bufrConfig.getRootConverter();
for (BufrConfig.FieldConverter fld : root.flds) {
DataDescriptor dkey = fld.dds;
if (!dkey.isOkForVariable()) {
continue;
}
if (dkey.replication == 0) {
addSequence(rootGroup, rs, fld);
} else if (dkey.replication > 1) {
List<BufrConfig.FieldConverter> subFlds = fld.flds;
List<DataDescriptor> subKeys = dkey.subKeys;
if (subKeys.size() == 1) { // only one member
DataDescriptor subDds = dkey.subKeys.get(0);
BufrConfig.FieldConverter subFld = subFlds.get(0);
if (subDds.dpi != null) {
addDpiStructure(rs, fld, subFld);
} else if (subDds.replication == 1) { // one member not a replication
Variable.Builder v = addVariable(rootGroup, rs, subFld, dkey.replication);
v.setSPobject(fld); // set the replicating field as SPI object
} else { // one member is a replication (two replications in a row)
addStructure(rootGroup, rs, fld, dkey.replication);
}
} else if (subKeys.size() > 1) {
addStructure(rootGroup, rs, fld, dkey.replication);
}
} else { // replication == 1
addVariable(rootGroup, rs, fld, dkey.replication);
}
}
}
private void addStructure(Group.Builder group, Structure.Builder parent, BufrConfig.FieldConverter fld, int count) {
DataDescriptor dkey = fld.dds;
String uname = findUniqueName(parent, fld.getName(), "struct");
dkey.name = uname; // name may need to be changed for uniqueness
Structure.Builder struct = Structure.builder().setName(uname);
struct.setDimensionsAnonymous(new int[]{count}); // anon vector
for (BufrConfig.FieldConverter subKey : fld.flds) {
addMember(group, struct, subKey);
}
parent.addMemberVariable(struct);
struct.setSPobject(fld);
}
private void addSequence(Group.Builder group, Structure.Builder parent, BufrConfig.FieldConverter fld) {
DataDescriptor dkey = fld.dds;
String uname = findUniqueName(parent, fld.getName(), "seq");
dkey.name = uname; // name may need to be changed for uniqueness
Sequence.Builder seq = Sequence.builder().setName(uname);
for (BufrConfig.FieldConverter subKey : fld.flds) {
addMember(group, seq, subKey);
}
parent.addMemberVariable(seq);
seq.setSPobject(fld);
}
private void addMember(Group.Builder group, Structure.Builder parent, BufrConfig.FieldConverter fld) {
DataDescriptor dkey = fld.dds;
if (dkey.replication == 0) {
addSequence(group, parent, fld);
} else if (dkey.replication > 1) {
List<DataDescriptor> subKeys = dkey.subKeys;
if (subKeys.size() == 1) {
BufrConfig.FieldConverter subFld = fld.flds.get(0);
Variable.Builder v = addVariable(group, parent, subFld, dkey.replication);
v.setSPobject(fld); // set the replicating field as SPI object
} else {
addStructure(group, parent, fld, dkey.replication);
}
} else {
addVariable(group, parent, fld, dkey.replication);
}
}
private void addDpiStructure(Structure.Builder parent, BufrConfig.FieldConverter parentFld,
BufrConfig.FieldConverter dpiField) {
DataDescriptor dpiKey = dpiField.dds;
String uname = findUniqueName(parent, dpiField.getName(), "struct");
dpiKey.name = uname; // name may need to be changed for uniqueness
Structure.Builder struct = Structure.builder().setName(uname);
parent.addMemberVariable(struct);
int n = parentFld.dds.replication;
struct.setDimensionsAnonymous(new int[]{n}); // anon vector
Variable.Builder v = Variable.builder().setName("name");
v.setDataType(DataType.STRING); // scalar
struct.addMemberVariable(v);
v = Variable.builder().setName("data");
v.setDataType(DataType.FLOAT); // scalar
struct.addMemberVariable(v);
struct.setSPobject(dpiField); // ??
}
private void addDpiSequence(Structure.Builder parent, BufrConfig.FieldConverter fld) {
Structure.Builder struct = Structure.builder().setName("statistics");
struct.setDimensionsAnonymous(new int[]{fld.dds.replication}); // scalar
Variable.Builder v = Variable.builder().setName("name");
v.setDataType(DataType.STRING); // scalar
struct.addMemberVariable(v);
v = Variable.builder().setName("data");
v.setDataType(DataType.FLOAT); // scalar
struct.addMemberVariable(v);
parent.addMemberVariable(struct);
}
private Variable.Builder addVariable(Group.Builder group, Structure.Builder struct, BufrConfig.FieldConverter fld,
int count) {
DataDescriptor dkey = fld.dds;
String uname = findGloballyUniqueName(fld.getName(), "unknown");
dkey.name = uname; // name may need to be changed for uniqueness
Variable.Builder v = Variable.builder().setName(uname);
if (count > 1) {
v.setDimensionsAnonymous(new int[]{count}); // anon vector
}
if (fld.getDesc() != null) {
v.addAttribute(new Attribute(CDM.LONG_NAME, fld.getDesc()));
}
if (fld.getUnits() == null) {
if (warnUnits) {
log.warn("dataDesc.units == null for " + uname);
}
} else {
String units = fld.getUnits();
if (ucar.nc2.iosp.bufr.DataDescriptor.isCodeTableUnit(units)) {
v.addAttribute(new Attribute(CDM.UNITS, "CodeTable " + fld.dds.getFxyName()));
} else if (ucar.nc2.iosp.bufr.DataDescriptor.isFlagTableUnit(units)) {
v.addAttribute(new Attribute(CDM.UNITS, "FlagTable " + fld.dds.getFxyName()));
} else if (!ucar.nc2.iosp.bufr.DataDescriptor.isInternationalAlphabetUnit(units) && !units.startsWith("Numeric")) {
v.addAttribute(new Attribute(CDM.UNITS, units));
}
}
DataDescriptor dataDesc = fld.dds;
if (dataDesc.type == 1) {
v.setDataType(DataType.CHAR);
int size = dataDesc.bitWidth / 8;
v.setDimensionsAnonymous(new int[]{size});
} else if ((dataDesc.type == 2) && CodeFlagTables.hasTable(dataDesc.fxy)) { // enum
int nbits = dataDesc.bitWidth;
int nbytes = (nbits % 8 == 0) ? nbits / 8 : nbits / 8 + 1;
CodeFlagTables ct = CodeFlagTables.getTable(dataDesc.fxy);
if (nbytes == 1) {
v.setDataType(DataType.ENUM1);
} else if (nbytes == 2) {
v.setDataType(DataType.ENUM2);
} else if (nbytes == 4) {
v.setDataType(DataType.ENUM4);
}
// v.removeAttribute(CDM.UNITS);
v.addAttribute(new Attribute("BUFR:CodeTable", ct.getName() + " (" + dataDesc.getFxyName() + ")"));
EnumTypedef type = group.findOrAddEnumTypedef(ct.getName(), ct.getMap());
v.setEnumTypeName(type.getShortName());
} else {
int nbits = dataDesc.bitWidth;
// use of unsigned seems fishy, since only time it uses high bit is for missing
// not necessarily true, just when they "add one bit" to deal with missing case
if (nbits < 9) {
v.setDataType(DataType.BYTE);
if (nbits == 8) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (short) BufrNumbers.missingValue(nbits)));
} else {
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (byte) BufrNumbers.missingValue(nbits)));
}
} else if (nbits < 17) {
v.setDataType(DataType.SHORT);
if (nbits == 16) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
} else {
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (short) BufrNumbers.missingValue(nbits)));
}
} else if (nbits < 33) {
v.setDataType(DataType.INT);
if (nbits == 32) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
} else {
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
}
} else {
v.setDataType(DataType.LONG);
v.addAttribute(new Attribute(CDM.MISSING_VALUE, BufrNumbers.missingValue(nbits)));
}
// value = scale_factor * packed + add_offset
// bpacked = (value * 10^scale - refVal)
// (bpacked + refVal) / 10^scale = value
// value = bpacked * 10^-scale + refVal * 10^-scale
// scale_factor = 10^-scale
// add_ofset = refVal * 10^-scale
int scale10 = dataDesc.scale;
double scale = (scale10 == 0) ? 1.0 : Math.pow(10.0, -scale10);
if (scale10 != 0) {
v.addAttribute(new Attribute(CDM.SCALE_FACTOR, (float) scale));
}
if (dataDesc.refVal != 0) {
v.addAttribute(new Attribute(CDM.ADD_OFFSET, (float) scale * dataDesc.refVal));
}
}
annotate(v, fld);
v.addAttribute(new Attribute(BufrIosp2.fxyAttName, dataDesc.getFxyName()));
v.addAttribute(new Attribute("BUFR:bitWidth", dataDesc.bitWidth));
struct.addMemberVariable(v);
v.setSPobject(fld);
return v;
}
private String findUniqueName(Structure.Builder<?> struct, String want, String def) {
if (want == null) {
return def + tempNo++;
}
String vwant = NetcdfFiles.makeValidCdmObjectName(want);
Optional<Variable.Builder<?>> oldV = struct.findMemberVariable(vwant);
if (!oldV.isPresent()) {
return vwant;
}
int seq = 2;
while (true) {
String wantSeq = vwant + "-" + seq;
oldV = struct.findMemberVariable(wantSeq);
if (!oldV.isPresent()) {
return wantSeq;
}
seq++;
}
}
// force globally unique variable names, even when they are in different Structures.
// this allows us to promote structure members without worrying about name collisions
private Map<String, Integer> names = new HashMap<>(100);
private String findGloballyUniqueName(String want, String def) {
if (want == null) {
return def + tempNo++;
}
String vwant = NetcdfFiles.makeValidCdmObjectName(want);
Integer have = names.get(vwant);
if (have == null) {
names.put(vwant, 1);
return vwant;
} else {
have = have + 1;
String wantSeq = vwant + "-" + have;
names.put(vwant, have);
return wantSeq;
}
}
private void annotate(Variable.Builder v, BufrConfig.FieldConverter fld) {
if (fld.type == null) {
return;
}
switch (fld.type) {
case lat:
v.addAttribute(new Attribute(CDM.UNITS, CDM.LAT_UNITS));
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Lat.toString()));
coordinates.format("%s ", v.shortName);
break;
case lon:
v.addAttribute(new Attribute(CDM.UNITS, CDM.LON_UNITS));
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Lon.toString()));
coordinates.format("%s ", v.shortName);
break;
case height:
case heightOfStation:
case heightAboveStation:
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Height.toString()));
coordinates.format("%s ", v.shortName);
break;
case stationId:
v.addAttribute(new Attribute(CF.STANDARD_NAME, CF.STATION_ID));
break;
case wmoId:
v.addAttribute(new Attribute(CF.STANDARD_NAME, CF.STATION_WMOID));
break;
}
}
}

View File

@ -0,0 +1,154 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
/**
* A class that contains static methods for converting multiple
* bytes into one float or integer.
*/
public final class BufrNumbers {
// used to check missing values when value is packed with all 1's
private static final long[] missing_value = new long[2049];
static {
long accum = 0;
for (int i = 0; i < 65; i++) {
missing_value[i] = accum;
accum = accum * 2 + 1;
}
}
public static boolean isMissing(long raw, int bitWidth) {
return (raw == BufrNumbers.missing_value[bitWidth]);
}
static long missingValue(int bitWidth) {
return BufrNumbers.missing_value[bitWidth];
}
/** if missing value is not defined use this value. */
private static final int UNDEFINED = -9999;
/** Convert 2 bytes into a signed integer. */
static int int2(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
return int2(a, b);
}
/** Convert 2 bytes to a signed integer. */
private static int int2(int a, int b) {
if ((a == 0xff && b == 0xff)) // all bits set to one
return UNDEFINED;
return (1 - ((a & 128) >> 6)) * ((a & 127) << 8 | b);
}
/** Read 3 bytes and turn into a signed integer. */
static int int3(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
int c = raf.read();
return int3(a, b, c);
}
/** Convert 3 bytes to signed integer. */
private static int int3(int a, int b, int c) {
return (1 - ((a & 128) >> 6)) * ((a & 127) << 16 | b << 8 | c);
}
/** Convert 4 bytes into a signed integer. */
public static int int4(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
int c = raf.read();
int d = raf.read();
return int4(a, b, c, d);
}
/** Convert 4 bytes into a signed integer. */
private static int int4(int a, int b, int c, int d) {
// all bits set to ones
if (a == 0xff && b == 0xff && c == 0xff && d == 0xff)
return UNDEFINED;
return (1 - ((a & 128) >> 6)) * ((a & 127) << 24 | b << 16 | c << 8 | d);
} // end int4
/** Convert 2 bytes into an unsigned integer. */
static int uint2(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
return uint2(a, b);
}
/** Convert 2 bytes to an unsigned integer. */
private static int uint2(int a, int b) {
return a << 8 | b;
}
/** Read 3 bytes and convert into an unsigned integer. */
public static int uint3(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
int c = raf.read();
return uint3(a, b, c);
}
/** Convert 3 bytes into an unsigned int. */
private static int uint3(int a, int b, int c) {
return a << 16 | b << 8 | c;
}
/** Read 4 bytes and convert into a float value. */
public static float float4(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
int c = raf.read();
int d = raf.read();
return float4(a, b, c, d);
}
/** Convert 4 bytes to a float. */
private static float float4(int a, int b, int c, int d) {
int sgn, mant, exp;
mant = b << 16 | c << 8 | d;
if (mant == 0)
return 0.0f;
sgn = -(((a & 128) >> 6) - 1);
exp = (a & 127) - 64;
return (float) (sgn * Math.pow(16.0, exp - 6) * mant);
}
/** Read 8 bytes and convert into a signed long. */
public static long int8(RandomAccessFile raf) throws IOException {
int a = raf.read();
int b = raf.read();
int c = raf.read();
int d = raf.read();
int e = raf.read();
int f = raf.read();
int g = raf.read();
int h = raf.read();
return (1 - ((a & 128) >> 6))
* ((long) (a & 127) << 56 | (long) b << 48 | (long) c << 40 | (long) d << 32 | e << 24 | f << 16 | g << 8 | h);
}
}

View File

@ -0,0 +1,223 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.*;
import ucar.nc2.wmo.CommonCodeTable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.List;
/**
* Look up info in BUFR tables.
* Allows local center overrides for BUFR tables
*
* @author caron
* @since 8/22/13
*/
public class BufrTableLookup {
public static BufrTableLookup factory(Message m) throws IOException {
return new BufrTableLookup(m.is.getBufrEdition(), m.ids.getCenterId(), m.ids.getSubCenterId(),
m.ids.getMasterTableId(), m.ids.getMasterTableVersion(), m.ids.getLocalTableVersion(), m.ids.getCategory(),
m.ids.getSubCategory(), m.ids.getLocalSubCategory());
}
/*
* static public BufrTableLookup factory(int bufrEdition, int center, int subCenter, int masterId, int masterVersion,
* int localVersion,
* int category, int subCategory, int localSubCategory) {
* return new BufrTableLookup(bufrEdition, center, subCenter, masterId, masterVersion, localVersion, category,
* subCategory, localSubCategory);
* }
*/
//////////////////////////////////////////////////////////
private int center, subCenter, masterId, masterVersion, localVersion, bufrEdition, category, subCategory,
localSubCategory;
private BufrTableLookup(int bufrEdition, int center, int subCenter, int masterId, int masterVersion, int localVersion,
int category, int subCategory, int localSubCategory) throws IOException {
this.bufrEdition = bufrEdition;
this.center = center;
this.subCenter = subCenter;
this.masterId = masterId;
this.masterVersion = masterVersion;
this.localVersion = localVersion;
this.category = category;
this.subCategory = subCategory;
this.localSubCategory = localSubCategory;
tlookup = new TableLookup(center, subCenter, masterVersion, localVersion, category);
}
public int getBufrEdition() {
return bufrEdition;
}
public int getCenter() {
return center;
}
public int getSubCenter() {
return subCenter;
}
public int getMasterTableId() {
return masterId;
}
public int getMasterTableVersion() {
return masterVersion;
}
public int getLocalTableVersion() {
return localVersion;
}
public int getCategory() {
return category;
}
public int getSubCategory() {
return subCategory;
}
public int getLocalSubCategory() {
return localSubCategory;
}
public String getCenterName() {
String name = CommonCodeTable.getCenterNameBufr(getCenter(), getBufrEdition());
String subname = CommonCodeTable.getSubCenterName(getCenter(), getSubCenter());
if (subname != null)
name = name + " / " + subname;
return getCenter() + "." + getSubCenter() + " (" + name + ")";
}
public String getCenterNo() {
return getCenter() + "." + getSubCenter();
}
public String getTableName() {
return getMasterTableId() + "." + getMasterTableVersion() + "." + getLocalTableVersion();
}
public String getCategoryFullName() { // throws IOException {
String catName = getCategoryName();
String subcatName = getSubCategoryName();
if (subcatName != null)
return getCategoryNo() + "=" + catName + " / " + subcatName;
else
return getCategoryNo() + "=" + catName;
}
public String getSubCategoryName() { // throws IOException {
String subcatName = null;
if (center == 7)
subcatName = NcepTable.getDataSubcategory(getCategory(), getSubCategory());
if (subcatName == null)
subcatName = CommonCodeTable.getDataSubcategoy(getCategory(), getSubCategory());
return subcatName;
}
public String getCategoryName() {
return TableA.getDataCategoryName(getCategory());
}
public String getCategoryName(int cat) {
return TableA.getDataCategoryName(cat);
}
public String getCategoryNo() {
String result = getCategory() + "." + getSubCategory();
if (getLocalSubCategory() >= 0)
result += "." + getLocalSubCategory();
return result;
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private TableLookup tlookup;
private void init() {
}
public void setTableLookup(TableLookup tlookup) {
this.tlookup = tlookup;
}
public TableA.Descriptor getDescriptorTableA(int code) {
return tlookup.getDescriptorTableA(code);
}
public TableB.Descriptor getDescriptorTableB(short fxy) {
return tlookup.getDescriptorTableB(fxy);
}
public TableD.Descriptor getDescriptorTableD(short fxy) {
return tlookup.getDescriptorTableD(fxy);
}
public String getWmoTableBName() {
return tlookup.getWmoTableBName();
}
public String getLocalTableBName() {
return tlookup.getLocalTableBName();
}
public String getLocalTableDName() {
return tlookup.getLocalTableDName();
}
public String getWmoTableDName() {
return tlookup.getWmoTableDName();
}
public BufrTables.Mode getMode() {
return tlookup.getMode();
}
public void showMissingFields(List<Short> ddsList, Formatter out) {
for (short fxy : ddsList) {
int f = (fxy & 0xC000) >> 14;
if (f == 3) {
List<Short> sublist = getDescriptorListTableD(fxy);
if (sublist == null)
out.format("%s, ", ucar.nc2.iosp.bufr.Descriptor.makeString(fxy));
else
showMissingFields(sublist, out);
} else if (f == 0) { // skip the 2- operators for now
TableB.Descriptor b = getDescriptorTableB(fxy);
if (b == null)
out.format("%s, ", ucar.nc2.iosp.bufr.Descriptor.makeString(fxy));
}
}
}
public List<String> getDescriptorListTableD(String fxy) {
short id = ucar.nc2.iosp.bufr.Descriptor.getFxy(fxy);
List<Short> seq = getDescriptorListTableD(id);
if (seq == null)
return null;
List<String> result = new ArrayList<>(seq.size());
for (Short s : seq)
result.add(Descriptor.makeString(s));
return result;
}
public List<Short> getDescriptorListTableD(short id) {
TableD.Descriptor d = getDescriptorTableD(id);
if (d != null)
return d.getSequence();
return null;
}
}

View File

@ -0,0 +1,473 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.ma2.DataType;
import ucar.ma2.InvalidRangeException;
import ucar.nc2.*;
import ucar.nc2.constants.AxisType;
import ucar.nc2.constants.CDM;
import ucar.nc2.constants.CF;
import ucar.nc2.constants._Coordinate;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import ucar.nc2.ft.point.bufr.StandardFields;
import org.meteoinfo.data.meteodata.bufr.tables.CodeFlagTables;
import java.io.IOException;
import java.util.Formatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* BufrIosp2 delegates the construction of the Netcdf objects to Construct2.
*
* @author caron
* @since 8/8/13
*/
class Construct2 {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(Construct2.class);
private static final boolean warnUnits = false;
private NetcdfFile ncfile;
private Sequence recordStructure;
private int centerId;
private Formatter coordinates = new Formatter();
Construct2(Message proto, BufrConfig bufrConfig, NetcdfFile nc) throws IOException {
this.ncfile = nc;
// dkeyRoot = dds.getDescriptorRoot();
// int nbits = dds.getTotalBits();
// int inputBytes = (nbits % 8 == 0) ? nbits / 8 : nbits / 8 + 1;
// int outputBytes = dds.getTotalBytes();
// the category
// int cat = proto.ids.getCategory();
// int subcat = proto.ids.getSubCategory();
// global Attributes
ncfile.addAttribute(null, new Attribute(CDM.HISTORY, "Read using CDM BufrIosp2"));
if (bufrConfig.getFeatureType() != null)
ncfile.addAttribute(null, CF.FEATURE_TYPE, bufrConfig.getFeatureType().toString());
ncfile.addAttribute(null, "location", nc.getLocation());
ncfile.addAttribute(null, "BUFR:categoryName", proto.getLookup().getCategoryName());
ncfile.addAttribute(null, "BUFR:subCategoryName", proto.getLookup().getSubCategoryName());
ncfile.addAttribute(null, "BUFR:centerName", proto.getLookup().getCenterName());
ncfile.addAttribute(null, new Attribute("BUFR:category", proto.ids.getCategory()));
ncfile.addAttribute(null, new Attribute("BUFR:subCategory", proto.ids.getSubCategory()));
ncfile.addAttribute(null, new Attribute("BUFR:localSubCategory", proto.ids.getLocalSubCategory()));
ncfile.addAttribute(null, new Attribute(BufrIosp2.centerId, proto.ids.getCenterId()));
ncfile.addAttribute(null, new Attribute("BUFR:subCenter", proto.ids.getSubCenterId()));
// ncfile.addAttribute(null, "BUFR:tableName", proto.ids.getMasterTableFilename()));
ncfile.addAttribute(null, new Attribute("BUFR:table", proto.ids.getMasterTableId()));
ncfile.addAttribute(null, new Attribute("BUFR:tableVersion", proto.ids.getMasterTableVersion()));
ncfile.addAttribute(null, new Attribute("BUFR:localTableVersion", proto.ids.getLocalTableVersion()));
ncfile.addAttribute(null, "Conventions", "BUFR/CDM");
ncfile.addAttribute(null, new Attribute("BUFR:edition", proto.is.getBufrEdition()));
centerId = proto.ids.getCenterId();
String header = proto.getHeader();
if (header != null && !header.isEmpty())
ncfile.addAttribute(null, new Attribute("WMO Header", header));
makeObsRecord(bufrConfig);
String coordS = coordinates.toString();
if (!coordS.isEmpty())
recordStructure.addAttribute(new Attribute("coordinates", coordS));
ncfile.finish();
}
Sequence getObsStructure() {
return recordStructure;
}
private void makeObsRecord(BufrConfig bufrConfig) {
recordStructure = new Sequence(ncfile, null, null, BufrIosp2.obsRecordName);
ncfile.addVariable(null, recordStructure);
BufrConfig.FieldConverter root = bufrConfig.getRootConverter();
for (BufrConfig.FieldConverter fld : root.flds) {
DataDescriptor dkey = fld.dds;
if (!dkey.isOkForVariable())
continue;
if (dkey.replication == 0) {
addSequence(recordStructure, fld);
} else if (dkey.replication > 1) {
List<BufrConfig.FieldConverter> subFlds = fld.flds;
List<DataDescriptor> subKeys = dkey.subKeys;
if (subKeys.size() == 1) { // only one member
DataDescriptor subDds = dkey.subKeys.get(0);
BufrConfig.FieldConverter subFld = subFlds.get(0);
if (subDds.dpi != null) {
addDpiStructure(recordStructure, fld, subFld);
} else if (subDds.replication == 1) { // one member not a replication
Variable v = addVariable(recordStructure, subFld, dkey.replication);
v.setSPobject(fld); // set the replicating field as SPI object
} else { // one member is a replication (two replications in a row)
addStructure(recordStructure, fld, dkey.replication);
}
} else if (subKeys.size() > 1) {
addStructure(recordStructure, fld, dkey.replication);
}
} else { // replication == 1
addVariable(recordStructure, fld, dkey.replication);
}
}
}
private void addStructure(Structure parent, BufrConfig.FieldConverter fld, int count) {
DataDescriptor dkey = fld.dds;
String uname = findUniqueName(parent, fld.getName(), "struct");
dkey.name = uname; // name may need to be changed for uniqueness
// String structName = dataDesc.name != null ? dataDesc.name : "struct" + structNum++;
Structure struct = new Structure(ncfile, null, parent, uname);
try {
struct.setDimensionsAnonymous(new int[]{count}); // anon vector
} catch (InvalidRangeException e) {
log.error("illegal count= " + count + " for " + fld);
}
for (BufrConfig.FieldConverter subKey : fld.flds)
addMember(struct, subKey);
parent.addMemberVariable(struct);
struct.setSPobject(fld);
}
private void addSequence(Structure parent, BufrConfig.FieldConverter fld) {
DataDescriptor dkey = fld.dds;
String uname = findUniqueName(parent, fld.getName(), "seq");
dkey.name = uname; // name may need to be changed for uniqueness
// String seqName = ftype == (FeatureType.STATION_PROFILE) ? "profile" : "seq";
// String seqName = dataDesc.name != null ? dataDesc.name : "seq" + seqNum++;
Sequence seq = new Sequence(ncfile, null, parent, uname);
seq.setDimensions(""); // scalar
for (BufrConfig.FieldConverter subKey : fld.flds)
addMember(seq, subKey);
parent.addMemberVariable(seq);
seq.setSPobject(fld);
dkey.refersTo = seq;
}
private void addMember(Structure parent, BufrConfig.FieldConverter fld) {
DataDescriptor dkey = fld.dds;
if (dkey.replication == 0)
addSequence(parent, fld);
else if (dkey.replication > 1) {
List<DataDescriptor> subKeys = dkey.subKeys;
if (subKeys.size() == 1) {
BufrConfig.FieldConverter subFld = fld.flds.get(0);
Variable v = addVariable(parent, subFld, dkey.replication);
v.setSPobject(fld); // set the replicating field as SPI object
} else {
addStructure(parent, fld, dkey.replication);
}
} else {
addVariable(parent, fld, dkey.replication);
}
}
private void addDpiStructure(Structure parent, BufrConfig.FieldConverter parentFld,
BufrConfig.FieldConverter dpiField) {
DataDescriptor dpiKey = dpiField.dds;
String uname = findUniqueName(parent, dpiField.getName(), "struct");
dpiKey.name = uname; // name may need to be changed for uniqueness
// String structName = findUnique(parent, dpiField.name);
Structure struct = new Structure(ncfile, null, parent, uname);
int n = parentFld.dds.replication;
try {
struct.setDimensionsAnonymous(new int[]{n}); // anon vector
} catch (InvalidRangeException e) {
log.error("illegal count= " + 1 + " for " + dpiField);
}
Variable v = new Variable(ncfile, null, struct, "name");
v.setDataType(DataType.STRING); // scalar
v.setDimensions(""); // scalar
struct.addMemberVariable(v);
v = new Variable(ncfile, null, struct, "data");
v.setDataType(DataType.FLOAT); // scalar
v.setDimensions(""); // scalar
struct.addMemberVariable(v);
parent.addMemberVariable(struct);
struct.setSPobject(dpiField); // ??
// add some fake dkeys corresponding to above
// DataDescriptor nameDD = new DataDescriptor();
}
private void addDpiSequence(Structure parent, BufrConfig.FieldConverter fld) {
Structure struct = new Structure(ncfile, null, parent, "statistics");
try {
struct.setDimensionsAnonymous(new int[]{fld.dds.replication}); // scalar
} catch (InvalidRangeException e) {
e.printStackTrace();
}
Variable v = new Variable(ncfile, null, struct, "name");
v.setDataType(DataType.STRING); // scalar
v.setDimensions(""); // scalar
struct.addMemberVariable(v);
v = new Variable(ncfile, null, struct, "data");
v.setDataType(DataType.FLOAT); // scalar
v.setDimensions(""); // scalar
struct.addMemberVariable(v);
parent.addMemberVariable(struct);
}
private Variable addVariable(Structure struct, BufrConfig.FieldConverter fld, int count) {
DataDescriptor dkey = fld.dds;
String uname = findGloballyUniqueName(fld.getName(), "unknown");
dkey.name = uname; // name may need to be changed for uniqueness
Variable v = new Variable(ncfile, null, struct, uname);
try {
if (count > 1)
v.setDimensionsAnonymous(new int[]{count}); // anon vector
else
v.setDimensions(""); // scalar
} catch (InvalidRangeException e) {
log.error("illegal count= " + count + " for " + fld);
}
if (fld.getDesc() != null)
v.addAttribute(new Attribute(CDM.LONG_NAME, fld.getDesc()));
if (fld.getUnits() == null) {
if (warnUnits)
log.warn("dataDesc.units == null for " + uname);
} else {
String units = fld.getUnits();
if (DataDescriptor.isCodeTableUnit(units)) {
v.addAttribute(new Attribute(CDM.UNITS, "CodeTable " + fld.dds.getFxyName()));
} else if (DataDescriptor.isFlagTableUnit(units)) {
v.addAttribute(new Attribute(CDM.UNITS, "FlagTable " + fld.dds.getFxyName()));
} else if (!DataDescriptor.isInternationalAlphabetUnit(units) && !units.startsWith("Numeric")) {
v.addAttribute(new Attribute(CDM.UNITS, units));
}
}
DataDescriptor dataDesc = fld.dds;
if (dataDesc.type == 1) {
v.setDataType(DataType.CHAR);
int size = dataDesc.bitWidth / 8;
try {
v.setDimensionsAnonymous(new int[]{size});
} catch (InvalidRangeException e) {
e.printStackTrace();
}
} else if ((dataDesc.type == 2) && CodeFlagTables.hasTable(dataDesc.fxy)) { // enum
int nbits = dataDesc.bitWidth;
int nbytes = (nbits % 8 == 0) ? nbits / 8 : nbits / 8 + 1;
CodeFlagTables ct = CodeFlagTables.getTable(dataDesc.fxy);
if (nbytes == 1)
v.setDataType(DataType.ENUM1);
else if (nbytes == 2)
v.setDataType(DataType.ENUM2);
else if (nbytes == 4)
v.setDataType(DataType.ENUM4);
// v.removeAttribute(CDM.UNITS);
v.addAttribute(new Attribute("BUFR:CodeTable", ct.getName() + " (" + dataDesc.getFxyName() + ")"));
Group g = struct.getParentGroupOrRoot();
if (g == null)
log.warn("Struct parent group is null.");
EnumTypedef enumTypedef = g.findEnumeration(ct.getName());
if (enumTypedef == null) {
enumTypedef = new EnumTypedef(ct.getName(), ct.getMap());
g.addEnumeration(enumTypedef);
}
v.setEnumTypedef(enumTypedef);
} else {
int nbits = dataDesc.bitWidth;
// use of unsigned seems fishy, since only time it uses high bit is for missing
// not necessarily true, just when they "add one bit" to deal with missing case
if (nbits < 9) {
v.setDataType(DataType.BYTE);
if (nbits == 8) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (short) BufrNumbers.missingValue(nbits)));
} else
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (byte) BufrNumbers.missingValue(nbits)));
} else if (nbits < 17) {
v.setDataType(DataType.SHORT);
if (nbits == 16) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
} else
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (short) BufrNumbers.missingValue(nbits)));
} else if (nbits < 33) {
v.setDataType(DataType.INT);
if (nbits == 32) {
v.addAttribute(new Attribute(CDM.UNSIGNED, "true"));
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
} else
v.addAttribute(new Attribute(CDM.MISSING_VALUE, (int) BufrNumbers.missingValue(nbits)));
} else {
v.setDataType(DataType.LONG);
v.addAttribute(new Attribute(CDM.MISSING_VALUE, BufrNumbers.missingValue(nbits)));
}
// value = scale_factor * packed + add_offset
// bpacked = (value * 10^scale - refVal)
// (bpacked + refVal) / 10^scale = value
// value = bpacked * 10^-scale + refVal * 10^-scale
// scale_factor = 10^-scale
// add_ofset = refVal * 10^-scale
int scale10 = dataDesc.scale;
double scale = (scale10 == 0) ? 1.0 : Math.pow(10.0, -scale10);
if (scale10 != 0)
v.addAttribute(new Attribute(CDM.SCALE_FACTOR, (float) scale));
if (dataDesc.refVal != 0)
v.addAttribute(new Attribute(CDM.ADD_OFFSET, (float) scale * dataDesc.refVal));
}
annotate(v, fld);
v.addAttribute(new Attribute(BufrIosp2.fxyAttName, dataDesc.getFxyName()));
v.addAttribute(new Attribute("BUFR:bitWidth", dataDesc.bitWidth));
struct.addMemberVariable(v);
v.setSPobject(fld);
return v;
}
private int tempNo = 1;
private String findUniqueName(Structure struct, String want, String def) {
if (want == null)
return def + tempNo++;
String vwant = NetcdfFile.makeValidCdmObjectName(want);
Variable oldV = struct.findVariable(vwant);
if (oldV == null)
return vwant;
int seq = 2;
while (true) {
String wantSeq = vwant + "-" + seq;
oldV = struct.findVariable(wantSeq);
if (oldV == null)
return wantSeq;
seq++;
}
}
// force globally unique variable names, even when they are in different Structures.
// this allows us to promote structure members without worrying about name collisions
private Map<String, Integer> names = new HashMap<>(100);
private String findGloballyUniqueName(String want, String def) {
if (want == null)
return def + tempNo++;
String vwant = NetcdfFile.makeValidCdmObjectName(want);
Integer have = names.get(vwant);
if (have == null) {
names.put(vwant, 1);
return vwant;
} else {
have = have + 1;
String wantSeq = vwant + "-" + have;
names.put(vwant, have);
return wantSeq;
}
}
private void annotate(Variable v, BufrConfig.FieldConverter fld) {
if (fld.type == null)
return;
switch (fld.type) {
case lat:
v.addAttribute(new Attribute(CDM.UNITS, CDM.LAT_UNITS));
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Lat.toString()));
coordinates.format("%s ", v.getShortName());
break;
case lon:
v.addAttribute(new Attribute(CDM.UNITS, CDM.LON_UNITS));
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Lon.toString()));
coordinates.format("%s ", v.getShortName());
break;
case height:
case heightOfStation:
case heightAboveStation:
v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Height.toString()));
coordinates.format("%s ", v.getShortName());
break;
case stationId:
v.addAttribute(new Attribute(CF.STANDARD_NAME, CF.STATION_ID));
break;
case wmoId:
v.addAttribute(new Attribute(CF.STANDARD_NAME, CF.STATION_WMOID));
break;
}
}
private void annotateObs(Sequence recordStructure) {
StandardFields.StandardFieldsFromStructure extract =
new StandardFields.StandardFieldsFromStructure(centerId, recordStructure);
try (Formatter f = new Formatter()) {
String name = extract.getFieldName(BufrCdmIndexProto.FldType.lat);
if (name != null)
f.format("%s ", name);
name = extract.getFieldName(BufrCdmIndexProto.FldType.lon);
if (name != null)
f.format("%s ", name);
name = extract.getFieldName(BufrCdmIndexProto.FldType.height);
if (name != null)
f.format("%s ", name);
name = extract.getFieldName(BufrCdmIndexProto.FldType.heightAboveStation);
if (name != null)
f.format("%s ", name);
recordStructure.addAttribute(new Attribute("coordinates", f.toString()));
}
}
}

View File

@ -0,0 +1,429 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.TableB;
import org.meteoinfo.data.meteodata.bufr.tables.TableC;
import ucar.nc2.Sequence;
import java.util.List;
import java.util.Objects;
/**
* Essentially a TableB entry, modified by any relevent TableC operators.
* TableD has been expanded.
* Replication gets made into nested DataDescriptors, which we map to Structures (fixed replication) or
* Sequences (deferred replication).
* Most of the processing is done by DataDescriptorTreeConstructor.convert().
* Here we encapsulate the final result, ready to map to the CDM.
*
* @author caron
* @since Apr 5, 2008
*/
public class DataDescriptor {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DataDescriptor.class);
////////////////////////////////
// from the TableB.Descriptor
short fxy;
int f, x, y;
String name;
private String units, desc, source;
private boolean localOverride;
boolean bad; // no descriptor found
// may get modified by TableC operators
int scale;
int refVal;
int bitWidth;
int type; // 0 = isNumeric, 1 = isString, 2 = isEnum, 3 = compound;
// replication info
List<DataDescriptor> subKeys;
int replication = 1; // number of replications, essentially dk.y when sk.f == 1
int replicationCountSize; // for delayed replication : size of count in bits
int repetitionCountSize; // for delayed repetition
AssociatedField assField; // associated field == 02 04 Y, Y number of extra bits
Sequence refersTo; // needed for nested sequence objects
DataDescriptorTreeConstructor.DataPresentIndicator dpi;
DataDescriptor() {
}
public DataDescriptor(short fxy, BufrTableLookup lookup) {
this.fxy = fxy;
this.f = (fxy & 0xC000) >> 14;
this.x = (fxy & 0x3F00) >> 8;
this.y = fxy & 0xFF;
TableB.Descriptor db;
if (f == 0) {
db = lookup.getDescriptorTableB(fxy);
if (db != null)
setDescriptor(db);
else {
bad = true;
this.name = "*NOT FOUND";
}
} else if (f == 1) // replication
this.type = 3; // compound
else if (f == 2) {
this.name = TableC.getOperatorName(x);
}
}
/**
* Test if unit string indicates that the data are 7-bit coded characters following
* the International Reference Alphabet (formally known as the International Alphabet
* No.5 (IA5)) Recommendation/International Standard from the International Telegraph
* and Telephone Consultative Committee (CCITT)
* <p>
* https://www.itu.int/rec/T-REC-T.50/en
*
* @param unitString unit
* @return If true, treat the data as 7-bit coded International Reference Alphabet Characters
*/
public static boolean isInternationalAlphabetUnit(String unitString) {
String testUnitString = unitString.toLowerCase();
return testUnitString.startsWith("ccitt");
}
/**
* Test if the unit string indicates that we are dealing with data associated with a code table
*
* @param unitString unit
* @return If true, the unit indicates we are working with data associated with a code table
*/
public static boolean isCodeTableUnit(String unitString) {
String testUnitString = unitString.toLowerCase();
return testUnitString.equalsIgnoreCase("Code Table") || testUnitString.equalsIgnoreCase("Code_Table")
|| testUnitString.startsWith("codetable");
}
/**
* Test if the unit string indicates that we are dealing with data associated with a flag table
*
* @param unitString unit
* @return If true, the unit indicates we are working with data associated with a flag table
*/
public static boolean isFlagTableUnit(String unitString) {
String testUnitString = unitString.toLowerCase();
return testUnitString.equalsIgnoreCase("Flag Table") || testUnitString.equalsIgnoreCase("Flag_Table")
|| testUnitString.startsWith("flagtable");
}
private void setDescriptor(TableB.Descriptor d) {
this.name = d.getName().trim();
this.units = d.getUnits().trim();
this.desc = d.getDesc();
this.refVal = d.getRefVal();
this.scale = d.getScale();
this.bitWidth = d.getDataWidth();
this.localOverride = d.getLocalOverride();
this.source = d.getSource();
if (isInternationalAlphabetUnit(units)) {
this.type = 1; // String
}
// LOOK what about flag table ??
if (isCodeTableUnit(units)) {
this.type = 2; // enum
}
}
/*
* for dpi fields
* DataDescriptor makeStatField(String statType) {
* DataDescriptor statDD = new DataDescriptor();
* statDD.name = name + "_" + statType;
* statDD.units = units;
* statDD.refVal = 0;
*
* return statDD;
* }
*/
// for associated fields
DataDescriptor makeAssociatedField(int bitWidth) {
DataDescriptor assDD = new DataDescriptor();
assDD.name = name + "_associated_field";
assDD.units = "";
assDD.refVal = 0;
assDD.scale = 0;
assDD.bitWidth = bitWidth;
assDD.type = 0;
assDD.f = 0;
assDD.x = 31;
assDD.y = 22;
assDD.fxy = (short) ((f << 14) + (x << 8) + (y));
return assDD;
}
static class AssociatedField {
int nbits;
int nfields;
String dataFldName;
AssociatedField(int nbits) {
this.nbits = nbits;
}
}
public List<DataDescriptor> getSubKeys() {
return subKeys;
}
boolean isOkForVariable() {
return (f == 0) || (f == 1) || ((f == 2) && (x == 5) || ((f == 2) && (x == 24) && (y == 255)));
}
public boolean isLocal() {
if ((f == 0) || (f == 3)) {
return (x >= 48) || (y >= 192);
}
return false;
}
public boolean isLocalOverride() {
return localOverride;
}
public String getFxyName() {
return Descriptor.makeString(f, x, y);
}
public short getFxy() {
return fxy;
}
public String getName() {
return name;
}
public String getSource() {
return source;
}
public int getType() {
return type;
}
public int getScale() {
return scale;
}
public int getRefVal() {
return refVal;
}
public String getUnits() {
return units;
}
public String getDesc() {
return desc;
}
public float convert(long raw) {
if (ucar.nc2.iosp.bufr.BufrNumbers.isMissing(raw, bitWidth))
return Float.NaN;
// bpacked = (value * 10^scale - refVal)
// value = (bpacked + refVal) / 10^scale
float fscale = (float) Math.pow(10.0, -scale); // LOOK precompute ??
float fval = (raw + refVal);
return fscale * fval;
}
public static float convert(long raw, int scale, int refVal, int bitWidth) {
if (BufrNumbers.isMissing(raw, bitWidth))
return Float.NaN;
// bpacked = (value * 10^scale - refVal)
// value = (bpacked + refVal) / 10^scale
float fscale = (float) Math.pow(10.0, -scale); // LOOK precompute ??
float fval = (raw + refVal);
return fscale * fval;
}
/**
* Transfer info from the "proto message" to another message with the exact same structure.
*
* @param fromList transfer from here
* @param toList to here
*/
static void transferInfo(List<DataDescriptor> fromList, List<DataDescriptor> toList) { // get info from proto
// message
if (fromList.size() != toList.size())
throw new IllegalArgumentException("list sizes dont match " + fromList.size() + " != " + toList.size());
for (int i = 0; i < fromList.size(); i++) {
DataDescriptor from = fromList.get(i);
DataDescriptor to = toList.get(i);
to.refersTo = from.refersTo;
to.name = from.name;
if (from.getSubKeys() != null)
transferInfo(from.getSubKeys(), to.getSubKeys());
}
}
///////////////////////////////////////////////////////////////////////////////////////////////
private int total_nbytesCDM;
/**
* count the bits used by the data in this dd and its children
* only accurate for not compressed, and not variable length
*
* @return bits used by the data in the file
*/
int countBits() {
int total_nbits = 0;
total_nbytesCDM = 0;
for (DataDescriptor dd : subKeys) {
if (dd.subKeys != null) {
total_nbits += dd.countBits();
total_nbytesCDM += dd.total_nbytesCDM;
} else if (dd.f == 0) {
total_nbits += dd.bitWidth;
total_nbytesCDM += dd.getByteWidthCDM();
}
}
// replication
if (replication > 1) {
total_nbits *= replication;
total_nbytesCDM *= replication;
}
return total_nbits;
}
public int getBitWidth() {
return bitWidth;
}
/**
* Get the number of bytes the CDM datatype will take
*
* @return the number of bytes the CDM datatype will take
*/
int getByteWidthCDM() {
if (type == 1) // string
return bitWidth / 8;
if (type == 3) // compound
return total_nbytesCDM;
// numeric or enum
if (bitWidth < 9)
return 1;
if (bitWidth < 17)
return 2;
if (bitWidth < 33)
return 4;
return 8;
}
public String toString() {
String id = getFxyName();
StringBuilder sbuff = new StringBuilder();
if (f == 0) {
sbuff.append(getFxyName()).append(": ");
sbuff.append(name).append(" units=").append(units);
if (type == 0) {
sbuff.append(" scale=").append(scale).append(" refVal=").append(refVal);
sbuff.append(" nbits=").append(bitWidth);
} else if (type == 1) {
sbuff.append(" nchars=").append(bitWidth / 8);
} else {
sbuff.append(" enum nbits=").append(bitWidth);
}
} else if (f == 1) {
sbuff.append(id).append(": ").append("Replication");
if (replication != 1)
sbuff.append(" count=").append(replication);
if (replicationCountSize != 0)
sbuff.append(" replicationCountSize=").append(replicationCountSize);
if (repetitionCountSize != 0)
sbuff.append(" repetitionCountSize=").append(repetitionCountSize);
if (name != null)
sbuff.append(": " + name);
} else if (f == 2) {
String desc = TableC.getOperatorName(x);
if (desc == null)
desc = "Operator";
sbuff.append(id).append(": ").append(desc);
} else
sbuff.append(id).append(": ").append(name);
return sbuff.toString();
}
/////////////////////////////////
// stuff for the root
boolean isVarLength;
boolean isBad;
int total_nbits;
public int getTotalBits() {
return total_nbits;
}
public boolean isVarLength() {
return isVarLength;
}
////////////////////////////////////////////////////////////////////////////////////////////////
// LOOK need different hashCode, reader assumes using object id
public boolean equals2(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
DataDescriptor that = (DataDescriptor) o;
if (fxy != that.fxy)
return false;
if (replication != that.replication)
return false;
if (type != that.type)
return false;
return Objects.equals(subKeys, that.subKeys);
}
public int hashCode2() {
int result = (int) fxy;
result = 31 * result + type;
result = 31 * result + getListHash();
result = 31 * result + replication;
return result;
}
// has to use hashCode2, so cant use list.hashCode()
private int getListHash() {
if (subKeys == null)
return 0;
int result = 1;
for (DataDescriptor e : subKeys)
result = 31 * result + (e == null ? 0 : e.hashCode2());
return result;
}
}

View File

@ -0,0 +1,498 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.TableD;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Convert a list of data descriptors to a tree of DataDescriptor objects.
* Expand Table D, process table C operators.
*
* @author caron
* @since Jul 14, 2008
*/
public class DataDescriptorTreeConstructor {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DataDescriptorTreeConstructor.class);
//////////////////////////////////////////////////////////////////////////////////
private DataDescriptor root;
public DataDescriptor factory(BufrTableLookup lookup, BufrDataDescriptionSection dds) {
root = new DataDescriptor();
// convert ids to DataDescriptor
List<DataDescriptor> keys = decode(dds.getDataDescriptors(), lookup);
// deal with f3-60-4
keys = preflatten(keys);
// try to find useful struct names
grabCompoundNames(keys);
// make replicated keys into subKeys, constituting a tree
List<DataDescriptor> tree = replicate(keys);
// flatten the compounds
root.subKeys = new ArrayList<>();
flatten(root.subKeys, tree);
// process the operators
operate(root.subKeys);
// count the size
root.total_nbits = root.countBits();
return root;
}
// convert ids to DataDescriptors, expand table D
private List<DataDescriptor> decode(List<Short> keyDesc, BufrTableLookup lookup) {
if (keyDesc == null)
return null;
List<DataDescriptor> keys = new ArrayList<>();
for (short id : keyDesc) {
DataDescriptor dd = new DataDescriptor(id, lookup);
keys.add(dd);
if (dd.f == 3) {
TableD.Descriptor tdd = lookup.getDescriptorTableD(dd.fxy);
if (tdd == null || tdd.getSequence() == null) {
dd.bad = true;
} else {
dd.name = tdd.getName();
dd.subKeys = decode(tdd.getSequence(), lookup);
}
}
}
return keys;
}
// look for replication, move replicated items into subtree
private List<DataDescriptor> replicate(List<DataDescriptor> keys) {
List<DataDescriptor> tree = new ArrayList<>();
Iterator<DataDescriptor> dkIter = keys.iterator();
while (dkIter.hasNext()) {
DataDescriptor dk = dkIter.next();
if (dk.f == 1) {
dk.subKeys = new ArrayList<>();
dk.replication = dk.y; // replication count
if (dk.replication == 0) { // delayed replication
root.isVarLength = true; // variable sized data == deferred replication == sequence data
// the next one is the replication count size : does not count in field count (x)
DataDescriptor replication = dkIter.next();
// see https://github.com/Unidata/netcdf-java/issues/1282
if (replication.x == 31)
dk.replicationCountSize = replication.bitWidth;
// Not sure about the following hard codes values and if the previous condition (replication.x == 31) already
// captures those cases automatically. Ideally need an expert for BUFR to look over these.
else if (replication.y == 0)
dk.replicationCountSize = 1; // ??
else if (replication.y == 1)
dk.replicationCountSize = 8;
else if (replication.y == 2)
dk.replicationCountSize = 16;
else if (replication.y == 11)
dk.repetitionCountSize = 8;
else if (replication.y == 12)
dk.repetitionCountSize = 16;
else
log.error("Unknown replication type= " + replication);
}
// transfer to the subKey list
for (int j = 0; j < dk.x && dkIter.hasNext(); j++) {
dk.subKeys.add(dkIter.next());
}
// recurse
dk.subKeys = replicate(dk.subKeys);
} else if ((dk.f == 3) && (dk.subKeys != null)) {
dk.subKeys = replicate(dk.subKeys); // do at all levels
}
tree.add(dk);
}
return tree;
}
/*
* Use case:
* 3-62-1 : HEADR
* 0-4-194 : FORECAST TIME
* 0-1-205 : STATION NUMBER -- 6 DIGITS
* 0-1-198 : REPORT IDENTIFIER
* 0-5-2 : Latitude (coarse accuracy)
* 0-6-2 : Longitude (coarse accuracy)
* 0-10-194: GRID-POINT ELEVATION
* 0-2-196 : CLASS OF PROFILE OUTPUT
* 3-60-2 :
* 1-01-000: replication
* 0-31-1 : Delayed descriptor replication factor
* 3-62-2 : PROFILE
* 0-10-4 : Pressure
* 0-12-1 : Temperature/dry-bulb temperature
* 0-11-3 : u-component
*
* where the 3-62-2 should be replicated.
* This is from NCEP bufrtab.ETACLS1. Not sure if others use this idiom.
*
* Use case 2:
* not just top level
* 3-61-37 : TMPSQ1 SYNOPTIC REPORT TEMPERATURE DATA
* 0-33-193: QMAT
* 0-12-101: TMDB
* 0-33-194: QMDD
* 0-12-103: TMDP
* 0-2-38 : MSST
* 0-33-218: QMST
* 0-22-43 : SST1
* 3-60-4 : DRP1BIT
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-38 : TMPSQ2 SYNOPTIC REPORT WET BULB TEMPERATURE DATA
* 0-2-39 : MWBT
* 0-12-102: TMWB
* 0-13-3 : REHU
* 3-60-4 : DRP1BIT
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-39 : TMPSQ3 SYNOPTIC REPORT MAXIMUM AND MINIMUM TEMPERATURE DATA
* 0-4-31 : DTH
* 0-12-111: MXTM
* 0-4-31 : DTH
* 0-12-112: MITM
*
* I think that a 3-60-4 should just be flattened:
* 3-61-37 : TMPSQ1 SYNOPTIC REPORT TEMPERATURE DATA
* 0-33-193: QMAT
* 0-12-101: TMDB
* 0-33-194: QMDD
* 0-12-103: TMDP
* 0-2-38 : MSST
* 0-33-218: QMST
* 0-22-43 : SST1
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-38 : TMPSQ2 SYNOPTIC REPORT WET BULB TEMPERATURE DATA
* 0-2-39 : MWBT
* 0-12-102: TMWB
* 0-13-3 : REHU
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-39 : TMPSQ3 SYNOPTIC REPORT MAXIMUM AND MINIMUM TEMPERATURE DATA
* 0-4-31 : DTH
* 0-12-111: MXTM
* 0-4-31 : DTH
* 0-12-112: MITM
*/
// LOOK this is NCEP specific !!
static boolean isNcepDRP(DataDescriptor key) {
return key.f == 3 && key.x == 60;
}
private List<DataDescriptor> preflatten(List<DataDescriptor> tree) {
if (tree == null)
return null;
// do we need to flatten, ie have f3604 ??
boolean flatten = false;
for (DataDescriptor key : tree) {
if (isNcepDRP(key))
flatten = true;
}
if (flatten) {
List<DataDescriptor> result = new ArrayList<>(tree.size());
for (DataDescriptor key : tree) {
if (isNcepDRP(key)) {
result.addAll(key.subKeys); // remove f3604
} else {
result.add(key); // leave others
}
}
tree = result;
}
// recurse
for (DataDescriptor key : tree) {
key.subKeys = preflatten(key.subKeys);
}
return tree;
}
/*
* try to grab names of compounds (structs)
* if f=1 is followed by f=3, eg:
* 0-40-20 : GQisFlagQualDetailed - Quality flag for the system
* 1-01-010: replication
* 3-40-2 : (IASI Level 1c band description)
* 0-25-140: Start channel
* 0-25-141: End channel
* 0-25-142: Channel scale factor
* 1-01-087: replication
* 3-40-3 : (IASI Level 1c 100 channels)
* 1-04-100: replication
* 2-01-136: Operator= change data width
* 0-5-42 : Channel number
* 2-01-000: Operator= change data width
* 0-14-46 : Scaled IASI radiance
* 0-2-19 : Satellite instruments
* 0-25-51 : AVHRR channel combination
* 1-01-007: replication
* 3-40-4 : (IASI Level 1c AVHRR single scene)
* 0-5-60 : Y angular position from centre of gravity
* 0-5-61 : Z angular position from centre of gravity
* 0-25-85 : Fraction of clear pixels in HIRS FOV
* ...
*
* sequence:
* 3-60-4 : DRP1BIT
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-38 : TMPSQ2 SYNOPTIC REPORT WET BULB TEMPERATURE DATA
* 0-2-39 : MWBT
* 0-12-102: TMWB
* 0-13-3 : REHU
*
* which has been preflattened into:
*
* 1-01-000: replication
* 0-31-0 : DRF1BIT
* 3-61-38 : TMPSQ2 SYNOPTIC REPORT WET BULB TEMPERATURE DATA
* 0-2-39 : MWBT
* 0-12-102: TMWB
* 0-13-3 : REHU
*
*
*/
private void grabCompoundNames(List<DataDescriptor> tree) {
for (int i = 0; i < tree.size(); i++) {
DataDescriptor key = tree.get(i);
if (key.bad)
continue;
if ((key.f == 3) && (key.subKeys != null)) {
grabCompoundNames(key.subKeys);
} else if (key.f == 1 && key.x == 1 && i < tree.size() - 1) { // replicator with 1 element
DataDescriptor nextKey = tree.get(i + 1);
if (nextKey.f == 3) { // the one element is a compound
if (nextKey.name != null && !nextKey.name.isEmpty())
key.name = nextKey.name;
} else if (key.y == 0 && i < tree.size() - 2) { // seq has an extra key before the 3
DataDescriptor nnKey = tree.get(i + 2);
if (nnKey.f == 3)
if (nnKey.name != null && !nnKey.name.isEmpty())
key.name = nnKey.name;
}
}
}
}
// flatten the compounds (type 3); but dont remove bad ones
private void flatten(List<DataDescriptor> result, List<DataDescriptor> tree) {
for (DataDescriptor key : tree) {
if (key.bad) {
root.isBad = true;
result.add(key); // add it anyway so we can see it in debug
continue;
}
if ((key.f == 3) && (key.subKeys != null)) {
flatten(result, key.subKeys);
} else if (key.f == 1) { // flatten the subtrees
List<DataDescriptor> subTree = new ArrayList<>();
flatten(subTree, key.subKeys);
key.subKeys = subTree;
result.add(key);
} else {
result.add(key);
}
}
}
private DataDescriptor changeWidth; // 02 01 Y
private DataDescriptor changeScale; // 02 02 Y
private DataDescriptor changeRefval; // 02 03 Y
private DataDescriptor changeWtf; // 02 07 Y
private DataPresentIndicator dpi; // assume theres only one in effect at a time
private void operate(List<DataDescriptor> tree) {
if (tree == null)
return;
boolean hasAssFields = false;
// boolean hasDpiFields = false;
DataDescriptor.AssociatedField assField = null; // 02 04 Y
Iterator<DataDescriptor> iter = tree.iterator();
while (iter.hasNext()) {
DataDescriptor dd = iter.next();
if (dd.f == 2) {
if (dd.x == 1) {
changeWidth = (dd.y == 0) ? null : dd;
iter.remove();
} else if (dd.x == 2) {
changeScale = (dd.y == 0) ? null : dd;
iter.remove();
// throw new UnsupportedOperationException("2-2-Y (change scale)");
} else if (dd.x == 3) {
changeRefval = (dd.y == 255) ? null : dd;
iter.remove();
// throw new UnsupportedOperationException("2-3-Y (change reference values)"); // untested - no examples
} else if (dd.x == 4) {
assField = (dd.y == 0) ? null : new DataDescriptor.AssociatedField(dd.y);
iter.remove();
hasAssFields = true;
} else if (dd.x == 5) { // char data - this allows arbitrary string to be inserted
dd.type = 1; // String
dd.bitWidth = dd.y * 8;
dd.name = "Note";
} else if (dd.x == 6) {
// see L3-82 (3.1.6.5)
// "Y bits of data are described by the immediately following descriptor". could they speak English?
iter.remove();
if ((dd.y != 0) && iter.hasNext()) { // fnmoc using 2-6-0 as cancel (apparently)
DataDescriptor next = iter.next();
next.bitWidth = dd.y; // LOOK should it be dd.bitWidth??
}
} else if (dd.x == 7) {
changeWtf = (dd.y == 0) ? null : dd;
iter.remove();
} else if (dd.x == 36) {
if (iter.hasNext()) {
DataDescriptor dpi_dd = iter.next(); // this should be a replicated data present field
dpi = new DataPresentIndicator(tree, dpi_dd);
dd.dpi = dpi;
dpi_dd.dpi = dpi;
}
} else if ((dd.x == 37) && (dd.y == 255)) { // cancel dpi
dpi = null;
} else if ((dd.x == 24) && (dd.y == 255)) {
dd.dpi = dpi;
}
} else if (dd.subKeys != null) {
operate(dd.subKeys);
} else if (dd.f == 0) {
if (dd.type != 3) { // numeric or string or enum, not compound
if (changeWidth != null)
dd.bitWidth += changeWidth.y - 128;
if (changeScale != null)
dd.scale += changeScale.y - 128;
if (changeRefval != null)
dd.refVal += changeRefval.y - 128; // LOOK wrong
if (changeWtf != null && dd.type == 0) {
// see I.2 BUFR Table C 4
// For Table B elements, which are not CCITT IA5 (character data), code tables, or flag tables:
// 1. Add Y to the existing scale factor
// 2. Multiply the existing reference value by 10 Y
// 3. Calculate ((10 x Y) + 2) ÷ 3, disregard any fractional remainder and add the result to the existing
// bit width.
// HAHAHAHAHAHAHAHA
int y = changeWtf.y;
dd.scale += y;
dd.refVal *= Math.pow(10, y);
int wtf = ((10 * y) + 2) / 3;
dd.bitWidth += wtf;
}
}
if (assField != null) {
assField.nfields++;
dd.assField = assField;
assField.dataFldName = dd.name;
}
}
}
if (hasAssFields)
addAssFields(tree);
// if (hasDpiFields) addDpiFields(tree);
}
private void addAssFields(List<DataDescriptor> tree) {
if (tree == null)
return;
int index = 0;
while (index < tree.size()) {
DataDescriptor dd = tree.get(index);
if (dd.assField != null) {
DataDescriptor.AssociatedField assField = dd.assField;
if ((dd.f == 0) && (dd.x == 31) && (dd.y == 21)) { // the meaning field
dd.name = assField.dataFldName + "_associated_field_significance";
dd.assField = null;
} else {
DataDescriptor assDD = dd.makeAssociatedField(assField.nbits);
tree.add(index, assDD);
index++;
}
}
index++;
}
}
static class DataPresentIndicator {
DataDescriptor dataPresent; // replication of bit present field
List<DataDescriptor> linear; // linear list of dds
DataPresentIndicator(List<DataDescriptor> tree, DataDescriptor dpi_dd) {
this.dataPresent = dpi_dd;
linear = new ArrayList<>();
linearize(tree);
}
int getNfields() {
return dataPresent.replication;
}
private void linearize(List<DataDescriptor> tree) {
for (DataDescriptor dd : tree) {
if (dd.f == 0) {
linear.add(dd);
} else if (dd.f == 1) {
for (int i = 0; i < dd.replication; i++) // whut about defered replication hahahahahah
linearize(dd.getSubKeys());
}
}
}
}
}

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.nc2.util.Indent;
import java.util.Formatter;
/**
* Helper class for debugging BUFR descriptors
*
* @author caron
* @since Nov 16, 2009
*/
class DebugOut {
Formatter f;
Indent indent;
int fldno; // track fldno to compare with EU output
DebugOut(Formatter f) {
this.f = f;
this.indent = new Indent(2);
this.indent.setIndentLevel(0);
this.fldno = 1;
}
String indent() {
return indent.toString();
}
}

View File

@ -0,0 +1,122 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.TableB;
import org.meteoinfo.data.meteodata.bufr.tables.TableC;
import org.meteoinfo.data.meteodata.bufr.tables.TableD;
import java.util.Formatter;
/**
* Static methods to manipulate the f-x-y descriptors
*
* @author caron
* @since Oct 25, 2008
*/
public class Descriptor {
public static String makeString(short fxy) {
int f = (fxy & 0xC000) >> 14;
int x = (fxy & 0x3F00) >> 8;
int y = fxy & 0xFF;
return makeString(f, x, y);
}
public static String makeString(int f, int x, int y) {
return String.format("%d-%d-%d", f, x, y);
}
public static boolean isWmoRange(short fxy) {
int x = (fxy & 0x3F00) >> 8;
int y = fxy & 0xFF;
return (x < 48 && y < 192);
}
public static short getFxy(String name) {
String[] tok = name.split("-");
int f = (tok.length > 0) ? Integer.parseInt(tok[0]) : 0;
int x = (tok.length > 1) ? Integer.parseInt(tok[1]) : 0;
int y = (tok.length > 2) ? Integer.parseInt(tok[2]) : 0;
return (short) ((f << 14) + (x << 8) + (y));
}
public static short getFxy2(String fxxyyy) {
int fxy = Integer.parseInt(fxxyyy.trim());
int y = fxy % 1000;
fxy /= 1000;
int x = fxy % 100;
int f1 = fxy / 100;
return (short) ((f1 << 14) + (x << 8) + (y));
}
// contains a BUFR table entry
public static boolean isBufrTable(short fxy) {
int f = (fxy & 0xC000) >> 14;
int x = (fxy & 0x3F00) >> 8;
int y = (fxy & 0xFF);
return (f == 0) && (x == 0) && (y < 13);
}
public static short getFxy(short f, short x, short y) {
return (short) ((f << 14) + (x << 8) + (y));
}
private static final String[] descType = {"tableB", "replication", "tableC-operators", "tableD"};
public static void show(Formatter out, short fxy, BufrTableLookup lookup) {
int f = (fxy & 0xC000) >> 14;
if (f == 0) {
TableB.Descriptor b = lookup.getDescriptorTableB(fxy);
if (b == null)
out.format("%-8s: NOT FOUND!!", makeString(fxy));
else
out.format("%-8s: %s", b.getFxy(), b.getName());
} else if (f == 1) {
out.format("%-8s: %s", makeString(fxy), descType[1]);
} else if (f == 2) {
int x = (fxy & 0x3F00) >> 8;
out.format("%-8s: Operator= %s", makeString(fxy), TableC.getOperatorName(x));
} else if (f == 3) {
TableD.Descriptor d = lookup.getDescriptorTableD(fxy);
if (d == null)
out.format("%-8s: NOT FOUND!!", makeString(fxy));
else
out.format("%-8s: %s", d.getFxy(), d.getName());
}
}
public static String getName(short fxy, BufrTableLookup lookup) {
int f = (fxy & 0xC000) >> 14;
if (f == 0) {
TableB.Descriptor b = lookup.getDescriptorTableB(fxy);
if (b == null)
return ("**NOT FOUND!!");
else
return b.getName();
} else if (f == 1) {
return descType[1];
} else if (f == 2) {
int x = (fxy & 0x3F00) >> 8;
return TableC.getOperatorName(x);
} else if (f == 3) {
TableD.Descriptor d = lookup.getDescriptorTableD(fxy);
if (d == null)
return "**NOT FOUND!!";
else
return d.getName();
}
return "illegal F=" + f;
}
}

View File

@ -0,0 +1,321 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.TableA;
import ucar.ma2.*;
import ucar.nc2.*;
import org.meteoinfo.data.meteodata.bufr.tables.TableB;
import org.meteoinfo.data.meteodata.bufr.tables.TableD;
import org.meteoinfo.data.meteodata.bufr.tables.WmoXmlReader;
import ucar.nc2.wmo.Util;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* BUFR allows you to encode a BUFR table in BUFR.
* if table is embedded, all entries must be from it
* LOOK: may be NCEP specific ?
*
* @author John
* @since 8/11/11
*/
public class EmbeddedTable {
private static final boolean showB = false;
private static final boolean showD = false;
private final RandomAccessFile raf;
private final BufrIdentificationSection ids;
private List<Message> messages = new ArrayList<>();
private boolean tableRead;
private TableA a;
private TableB b;
private TableD d;
private Structure seq1, seq2, seq3, seq4;
private TableLookup tlookup;
EmbeddedTable(Message m, RandomAccessFile raf) {
this.raf = raf;
this.ids = m.ids;
a = new TableA("embed", raf.getLocation());
b = new TableB("embed", raf.getLocation());
d = new TableD("embed", raf.getLocation());
}
public void addTable(Message m) {
messages.add(m);
}
private void read2() throws IOException {
Message proto = messages.get(0);
// make root sub key data descriptors name as null, so the following construct
// will have seq2 and seq3 variables
DataDescriptor root = proto.getRootDataDescriptor();
for (DataDescriptor ds : root.subKeys) {
ds.name = null;
}
BufrConfig config = BufrConfig.openFromMessage(raf, proto, null);
Construct2 construct = new Construct2(proto, config, new NetcdfFileSubclass());
Sequence obs = construct.getObsStructure();
seq1 = (Structure) obs.findVariable("seq1");
seq2 = (Structure) obs.findVariable("seq2");
seq3 = (Structure) obs.findVariable("seq3");
seq4 = (Structure) seq3.findVariable("seq4");
// read all the messages
ArrayStructure data;
for (Message m : messages) {
if (!m.dds.isCompressed()) {
MessageUncompressedDataReader reader = new MessageUncompressedDataReader();
data = reader.readEntireMessage(obs, proto, m, raf, null);
} else {
MessageCompressedDataReader reader = new MessageCompressedDataReader();
data = reader.readEntireMessage(obs, proto, m, raf, null);
}
while (data.hasNext()) {
StructureData sdata = (StructureData) data.next();
add(sdata);
}
}
}
private void add(StructureData data) throws IOException {
for (StructureMembers.Member m : data.getMembers()) {
if (showB)
System.out.printf("%s%n", m);
if (m.getDataType() == DataType.SEQUENCE) {
if (m.getName().equals("seq1")) {
ArraySequence seq = data.getArraySequence(m);
StructureDataIterator iter = seq.getStructureDataIterator();
while (iter.hasNext())
addTableEntryA(iter.next());
} else if (m.getName().equals("seq2")) {
ArraySequence seq = data.getArraySequence(m);
StructureDataIterator iter = seq.getStructureDataIterator();
while (iter.hasNext())
addTableEntryB(iter.next());
} else if (m.getName().equals("seq3")) {
ArraySequence seq = data.getArraySequence(m);
StructureDataIterator iter = seq.getStructureDataIterator();
while (iter.hasNext())
addTableEntryD(iter.next());
}
}
}
}
private void addTableEntryA(StructureData sdata) {
int scale = 0, refVal = 0, width = 0;
String entry = "", line1 = "", line2 = "";
List<StructureMembers.Member> members = sdata.getMembers();
List<Variable> vars = seq1.getVariables();
for (int i = 0; i < vars.size(); i++) {
Variable v = vars.get(i);
StructureMembers.Member m = members.get(i);
String data = sdata.getScalarString(m);
Attribute att = v.attributes().findAttribute(BufrIosp2.fxyAttName);
switch (att.getStringValue()) {
case "0-0-1":
entry = sdata.getScalarString(m);
System.out.println(entry);
break;
case "0-0-2":
line1 = sdata.getScalarString(m);
System.out.println(line1);
break;
case "0-0-3":
line2 = sdata.getScalarString(m);
System.out.println(line2);
break;
}
}
int code = Integer.parseInt(entry);
// split name and description from appended line 1 and 2
String desc = (line1 + line2).trim();
String name = "";
int pos = desc.indexOf(' ');
if (pos > 0) {
name = desc.substring(0, pos);
}
TableA.Descriptor d = a.addDescriptor(code, desc);
d.setName(name);
}
private void addTableEntryB(StructureData sdata) {
String name = "", units = "", signScale = null, signRef = null;
int scale = 0, refVal = 0, width = 0;
short x1 = 0, y1 = 0;
List<StructureMembers.Member> members = sdata.getMembers();
List<Variable> vars = seq2.getVariables();
for (int i = 0; i < vars.size(); i++) {
Variable v = vars.get(i);
StructureMembers.Member m = members.get(i);
String data = sdata.getScalarString(m);
if (showB)
System.out.printf("%s == %s%n", v, data);
Attribute att = v.attributes().findAttribute(BufrIosp2.fxyAttName);
switch (att.getStringValue()) {
case "0-0-10":
sdata.getScalarString(m);
break;
case "0-0-11":
String x = sdata.getScalarString(m);
x1 = Short.parseShort(x.trim());
break;
case "0-0-12":
String y = sdata.getScalarString(m);
y1 = Short.parseShort(y.trim());
break;
case "0-0-13":
name = sdata.getScalarString(m);
break;
case "0-0-14":
name += sdata.getScalarString(m); // append both lines
break;
case "0-0-15":
units = sdata.getScalarString(m);
units = WmoXmlReader.cleanUnit(units.trim());
break;
case "0-0-16":
signScale = sdata.getScalarString(m).trim();
break;
case "0-0-17":
String scaleS = sdata.getScalarString(m);
scale = Integer.parseInt(scaleS.trim());
break;
case "0-0-18":
signRef = sdata.getScalarString(m).trim();
break;
case "0-0-19":
String refS = sdata.getScalarString(m);
refVal = Integer.parseInt(refS.trim());
break;
case "0-0-20":
String widthS = sdata.getScalarString(m);
width = Integer.parseInt(widthS.trim());
break;
}
}
if (showB)
System.out.printf("%n");
// split name and description from appended line 1 and 2
String desc = null;
name = name.trim();
int pos = name.indexOf(' ');
if (pos > 0) {
desc = Util.cleanName(name.substring(pos + 1));
name = name.substring(0, pos);
name = Util.cleanName(name);
}
if ("-".equals(signScale))
scale = -1 * scale;
if ("-".equals(signRef))
refVal = -1 * refVal;
b.addDescriptor(x1, y1, scale, refVal, width, name, units, desc);
}
private void addTableEntryD(StructureData sdata) throws IOException {
String name = null;
short x1 = 0, y1 = 0;
List<Short> dds = null;
List<StructureMembers.Member> members = sdata.getMembers();
List<Variable> vars = seq3.getVariables();
for (int i = 0; i < vars.size(); i++) {
Variable v = vars.get(i);
StructureMembers.Member m = members.get(i);
if (m.getName().equals("seq4")) {
dds = getDescriptors(sdata.getArraySequence(m));
continue;
}
Attribute att = v.attributes().findAttribute(BufrIosp2.fxyAttName);
if (att != null) {
if (showD)
System.out.printf("%s == %s%n", v, sdata.getScalarString(m));
switch (att.getStringValue()) {
case "0-0-10":
sdata.getScalarString(m);
break;
case "0-0-11":
String x = sdata.getScalarString(m);
x1 = Short.parseShort(x.trim());
break;
case "0-0-12":
String y = sdata.getScalarString(m);
y1 = Short.parseShort(y.trim());
break;
case "2-5-64":
name = sdata.getScalarString(m);
break;
}
}
}
if (showD)
System.out.printf("%n");
name = Util.cleanName(name);
d.addDescriptor(x1, y1, name, dds);
}
private List<Short> getDescriptors(ArraySequence seqdata) throws IOException {
List<Short> list = new ArrayList<>();
String fxyS = null;
List<Variable> vars = seq4.getVariables();
StructureDataIterator iter = seqdata.getStructureDataIterator();
while (iter.hasNext()) {
StructureData sdata = iter.next();
List<StructureMembers.Member> members = sdata.getMembers();
for (int i = 0; i < vars.size(); i++) {
Variable v = vars.get(i);
StructureMembers.Member m = members.get(i);
String data = sdata.getScalarString(m);
if (showD)
System.out.printf("%s == %s%n", v, data);
Attribute att = v.attributes().findAttribute(BufrIosp2.fxyAttName);
if (att != null && att.getStringValue().equals("0-0-30"))
fxyS = sdata.getScalarString(m);
}
if (showD)
System.out.printf("%n");
if (fxyS != null) {
short id = Descriptor.getFxy2(fxyS);
list.add(id);
}
}
return list;
}
TableLookup getTableLookup() throws IOException {
if (!tableRead) {
read2();
tableRead = true;
tlookup = new TableLookup(ids, a, b, d);
}
return tlookup;
}
}

View File

@ -0,0 +1,384 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import com.google.re2j.Matcher;
import com.google.re2j.Pattern;
import ucar.nc2.time.CalendarDate;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.util.Formatter;
import java.util.List;
/**
* Encapsolates a complete BUFR message.
* A message has a DataDescriptor and one or more "datasets" aka "data subsets" aka "observations" aka "obs".
* Table lookup is done through getLookup().
*/
public class Message {
private static final Pattern wmoPattern = Pattern.compile(".*([IJ]..... ....) .*");
public BufrIndicatorSection is;
public BufrIdentificationSection ids;
public BufrDataDescriptionSection dds;
public BufrDataSection dataSection;
private RandomAccessFile raf;
private BufrTableLookup lookup;
private DataDescriptor root;
private String header; // wmo header
private long startPos; // starting pos in raf
private byte[] raw; // raw bytes
// bit counting
BitCounterUncompressed[] counterDatasets; // uncompressed: one for each dataset
int msg_nbits;
public Message(RandomAccessFile raf, BufrIndicatorSection is, BufrIdentificationSection ids,
BufrDataDescriptionSection dds, BufrDataSection dataSection) throws IOException {
this.raf = raf;
this.is = is;
this.ids = ids;
this.dds = dds;
this.dataSection = dataSection;
lookup = BufrTableLookup.factory(this);
}
void setTableLookup(TableLookup lookup) {
this.lookup.setTableLookup(lookup);
}
public void close() throws IOException {
if (raf != null)
raf.close();
}
/**
* Get number of datasets in this message.
*
* @return number of datasets in this message
*/
public int getNumberDatasets() {
return dds.getNumberDatasets();
}
public CalendarDate getReferenceTime() {
return ids.getReferenceTime();
}
///////////////////////////////////////////////////////////////////////////
// the WMO header is in here somewhere when the message comes over the IDD
public void setHeader(String header) {
this.header = header;
}
public String getHeader() {
return header;
}
// where the message starts in the file
public void setStartPos(long startPos) {
this.startPos = startPos;
}
public long getStartPos() {
return startPos;
}
public void setRawBytes(byte[] raw) {
this.raw = raw;
}
public byte[] getRawBytes() {
return raw;
}
public String extractWMO() {
Matcher matcher = wmoPattern.matcher(header);
if (!matcher.matches()) {
return "";
}
return matcher.group(1);
}
/**
* Get the byte length of the entire BUFR record.
*
* @return length in bytes of BUFR record
*/
public long getMessageSize() {
return is.getBufrLength();
}
/**
* Get the root of the DataDescriptor tree.
*
* @return root DataDescriptor
*/
public DataDescriptor getRootDataDescriptor() {
if (root == null)
root = new DataDescriptorTreeConstructor().factory(lookup, dds);
return root;
}
public boolean usesLocalTable() throws IOException {
DataDescriptor root = getRootDataDescriptor();
return usesLocalTable(root);
}
private boolean usesLocalTable(DataDescriptor dds) {
for (DataDescriptor key : dds.getSubKeys()) {
if (key.isLocal())
return true;
if ((key.getSubKeys() != null) && usesLocalTable(key))
return true;
}
return false;
}
/**
* Check if this message contains a BUFR table
*
* @return true if message contains a BUFR table
*/
public boolean containsBufrTable() {
for (Short key : dds.getDataDescriptors()) {
if (ucar.nc2.iosp.bufr.Descriptor.isBufrTable(key))
return true;
}
return false;
}
/**
* Check if all descriptors were found in the tables.
*
* @return true if all dds were found.
*/
public boolean isTablesComplete() {
DataDescriptor root = getRootDataDescriptor();
return !root.isBad;
}
public BufrTableLookup getLookup() {
return lookup;
}
////////////////////////////////////////////////////////////////////////
// bit counting
public boolean isBitCountOk() {
getRootDataDescriptor(); // make sure root is calculated
getTotalBits(); // make sure bits are counted
// int nbitsGiven = 8 * (dataSection.getDataLength() - 4);
int nbytesCounted = getCountedDataBytes();
int nbytesGiven = dataSection.getDataLength();
return Math.abs(nbytesCounted - nbytesGiven) <= 1; // radiosondes dataLen not even number of bytes
}
public int getCountedDataBytes() {
int msg_nbytes = msg_nbits / 8;
if (msg_nbits % 8 != 0)
msg_nbytes++;
msg_nbytes += 4;
if (msg_nbytes % 2 != 0)
msg_nbytes++; // LOOK seems to be violated by some messages
return msg_nbytes;
}
public int getCountedDataBits() {
return msg_nbits;
}
/**
* Get the offset of this obs from the start of the message data.
* Use only for non compressed data
*
* @param obsOffsetInMessage index of obs in the message
* @return offset in bits
* <p/>
* public int getBitOffset(int obsOffsetInMessage) {
* if (dds.isCompressed())
* throw new IllegalArgumentException("cant call BufrMessage.getBitOffset() on compressed message");
* <p/>
* if (!root.isVarLength)
* return root.total_nbits * obsOffsetInMessage;
* <p/>
* getTotalBits(); // make sure its been set
* return nestedTableCounter[obsOffsetInMessage].getStartBit();
* }
*/
public BitCounterUncompressed getBitCounterUncompressed(int obsOffsetInMessage) {
if (dds.isCompressed())
throw new IllegalArgumentException("cant call BufrMessage.getBitOffset() on compressed message");
calcTotalBits(null); // make sure its been set
return counterDatasets[obsOffsetInMessage];
}
/**
* This is the total number of bits taken by the data in the data section of the message.
* This is the counted number.
*
* @return total number of bits
*/
public int getTotalBits() {
if (msg_nbits == 0)
calcTotalBits(null);
return msg_nbits;
}
// sets msg_nbits as side-effect
public int calcTotalBits(Formatter out) {
try {
if (!dds.isCompressed()) {
MessageUncompressedDataReader reader = new MessageUncompressedDataReader();
reader.readData(null, this, raf, null, false, out);
} else {
MessageCompressedDataReader reader = new MessageCompressedDataReader();
reader.readData(null, this, raf, null, out);
}
} catch (IOException ioe) {
return 0;
}
return msg_nbits;
}
///////////////////////////////////////////////////////////////////
/**
* Override hashcode to be consistent with equals.
*
* @return the hash code of dds.getDescriptors()
*/
public int hashCode() {
int result = 17;
result += 37 * result + getDDShashcode();
// result += 37 * result + ids.getCenterId();
// result += 37 * result + ids.getSubCenter_id();
result += 37 * result + ids.getCategory();
result += 37 * result + ids.getSubCategory();
return result;
}
public int getDDShashcode() {
root = getRootDataDescriptor();
return root.hashCode2();
}
/**
* BufrMessage is equal if they have the same dds.
*
* @param obj other BufrMessage
* @return true if equals
*/
public boolean equals(Object obj) {
if (!(obj instanceof Message))
return false;
Message o = (Message) obj;
if (!dds.getDataDescriptors().equals(o.dds.getDataDescriptors()))
return false;
if (ids.getCenterId() != o.ids.getCenterId())
return false;
// if (ids.getSubCenter_id() != o.ids.getSubCenter_id()) return false;
if (ids.getCategory() != o.ids.getCategory())
return false;
return ids.getSubCategory() == o.ids.getSubCategory();
}
////////////////////////////////////////////////////////////////////
// perhaps move this into a helper class - started from ucar.bufr.Dump
public void showMissingFields(Formatter out) throws IOException {
lookup.showMissingFields(dds.getDataDescriptors(), out);
}
public void dump(Formatter out) { // throws IOException {
int listHash = dds.getDataDescriptors().hashCode();
out.format(" BUFR edition %d time= %s wmoHeader=%s hash=[0x%x] listHash=[0x%x] (%d) %n", is.getBufrEdition(),
getReferenceTime(), getHeader(), hashCode(), listHash, listHash);
out.format(" Category= %s %n", lookup.getCategoryFullName());
out.format(" Center= %s %n", lookup.getCenterName());
out.format(" Table= %s %n", lookup.getTableName());
out.format(" Table B= wmoTable= %s localTable= %s mode=%s%n", lookup.getWmoTableBName(),
lookup.getLocalTableBName(), lookup.getMode());
out.format(" Table D= wmoTable= %s localTable= %s%n", lookup.getWmoTableDName(), lookup.getLocalTableDName());
out.format(" DDS nsubsets=%d type=0x%x isObs=%b isCompressed=%b%n", dds.getNumberDatasets(), dds.getDataType(),
dds.isObserved(), dds.isCompressed());
long startPos = is.getStartPos();
long startData = dataSection.getDataPos();
out.format(" startPos=%d len=%d endPos=%d dataStart=%d dataLen=%d dataEnd=%d %n", startPos, is.getBufrLength(),
(startPos + is.getBufrLength()), startData, dataSection.getDataLength(),
startData + dataSection.getDataLength());
dumpDesc(out, dds.getDataDescriptors(), lookup, 4);
out.format("%n CDM Nested Table=%n");
DataDescriptor root = new DataDescriptorTreeConstructor().factory(lookup, dds);
dumpKeys(out, root, 4);
/*
* int nbits = m.getTotalBits();
* int nbytes = (nbits % 8 == 0) ? nbits / 8 : nbits / 8 + 1;
* out.format(" totalBits = %d (%d bytes) outputBytes= %d isVarLen=%s isCompressed=%s\n\n",
* nbits, nbytes, root.getByteWidthCDM(), root.isVarLength(), m.dds.isCompressed());
*/
}
private void dumpDesc(Formatter out, List<Short> desc, BufrTableLookup table, int indent) {
if (desc == null)
return;
for (Short fxy : desc) {
for (int i = 0; i < indent; i++)
out.format(" ");
Descriptor.show(out, fxy, table);
out.format("%n");
int f = (fxy & 0xC000) >> 14;
if (f == 3) {
List<Short> sublist = table.getDescriptorListTableD(fxy);
dumpDesc(out, sublist, table, indent + 2);
}
}
}
private void dumpKeys(Formatter out, DataDescriptor tree, int indent) {
for (DataDescriptor key : tree.subKeys) {
for (int i = 0; i < indent; i++)
out.format(" ");
out.format("%s%n", key);
if (key.getSubKeys() != null)
dumpKeys(out, key, indent + 2);
}
}
public void dumpHeader(Formatter out) {
out.format(" BUFR edition %d time= %s wmoHeader=%s %n", is.getBufrEdition(), getReferenceTime(), getHeader());
out.format(" Category= %d %s %s %n", lookup.getCategory(), lookup.getCategoryName(), lookup.getCategoryNo());
out.format(" Center= %s %s %n", lookup.getCenterName(), lookup.getCenterNo());
out.format(" Table= %d.%d local= %d wmoTables= %s,%s localTables= %s,%s %n", ids.getMasterTableId(),
ids.getMasterTableVersion(), ids.getLocalTableVersion(), lookup.getWmoTableBName(), lookup.getWmoTableDName(),
lookup.getLocalTableBName(), lookup.getLocalTableDName());
out.format(" DDS nsubsets=%d type=0x%x isObs=%b isCompressed=%b%n", dds.getNumberDatasets(), dds.getDataType(),
dds.isObserved(), dds.isCompressed());
}
public void dumpHeaderShort(Formatter out) {
out.format(" %s, Cat= %s, Center= %s (%s), Table= %d.%d.%d %n", getHeader(), lookup.getCategoryName(),
lookup.getCenterName(), lookup.getCenterNo(), ids.getMasterTableId(), ids.getMasterTableVersion(),
ids.getLocalTableVersion());
}
}

View File

@ -0,0 +1,539 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.ma2.*;
import ucar.nc2.Sequence;
import ucar.nc2.Structure;
import ucar.nc2.iosp.BitReader;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.HashMap;
import java.util.List;
/**
* Reads through the data of a message.
* Can count bits / transfer all or some data to an Array.
*
* @author caron
* @since Nov 15, 2009
*/
/*
* Within one message there are n obs (datasets) and s fields in each dataset.
* For compressed datasets, storage order is data(fld, obs) (obs varying fastest) :
*
* Ro1, NBINC1, I11, I12, . . . I1n
* Ro2, NBINC2, I21, I22, . . . I2n
* ...
* Ros, NBINCs, Is1, Is2, . . . Isn
*
* where Ro1, Ro2, . . . Ros are local reference values (number of bits as Table B) for field i.
* NBINC1 . . . NBINCs contain, as 6-bit quantities, the number of bits occupied by the increments that follow.
* If NBINC1 = 0, all values of element I are equal to Ro1; in such cases, the increments shall be omitted.
* For character data, NBINC shall contain the number of octets occupied by the character element.
* However, if the character data in all subsets are identical NBINC=0.
* Iij is the increment for the ith field and the jth obs.
*
* A replicated field (structure) takes a group of fields and replicates them.
* Let C be the entire compressed block for the ith field, as above.
*
* Ci = Roi, NBINCi, Ii1, Ii2, . . . Iin
*
* data:
*
* C1, (C2, C3)*r, ... Cs
*
* where r is set in the data descriptor, and is the same for all datasets.
*
* A delayed replicated field (sequence) takes a group of fields and replicates them, with the number of replications
* in the data :
*
* C1, dr, 6bits, (C2, C3)*dr, ... Cs
*
* where the width (nbits) of dr is set in the data descriptor. This dr must be the same for each dataset in the
* message.
* For some reason there is an extra 6 bits after the dr. My guess its a programming mistake that is now needed.
* There is no description of this case in the spec or the guide.
*
*
* --------------------------
*
* We use an ArrayStructureMA to hold the data, and fill it sequentially as we scan the message.
* Each field is held in an Array stored in the member.getDataArray().
* An iterator is stored in member.getDataObject() which keeps track of where we are.
* For fixed length nested Structures, we need fld(dataset, inner) but we have fld(inner, dataset) se we transpose the
* dimensions
* before we set the iterator.
* For Sequences, inner.length is the same for all datasets in the message. However, it may vary across messages.
* However, we
* only iterate over the inner sequence, never across all messages. So the implementation can be specific to the
* meassage.
*
*/
public class MessageCompressedDataReader {
/**
* Read all datasets from a single message
*
* @param s outer variables
* @param proto prototype message, has been processed
* @param m read this message
* @param raf from this file
* @param f output bit count debugging info (may be null)
* @return ArrayStructure with all the data from the message in it.
* @throws IOException on read error
*/
public ArrayStructure readEntireMessage(Structure s, Message proto, Message m, RandomAccessFile raf, Formatter f)
throws IOException {
// transfer info (refersTo, name) from the proto message
DataDescriptor.transferInfo(proto.getRootDataDescriptor().getSubKeys(), m.getRootDataDescriptor().getSubKeys());
// allocate ArrayStructureMA for outer structure
int n = m.getNumberDatasets();
ArrayStructureMA ama = ArrayStructureMA.factoryMA(s, new int[]{n});
setIterators(ama);
// map dkey to Member recursively
HashMap<DataDescriptor, StructureMembers.Member> map = new HashMap<>(100);
associateMessage2Members(ama.getStructureMembers(), m.getRootDataDescriptor(), map);
readData(m, raf, f, new Request(ama, map, null));
return ama;
}
/**
* Read some or all datasets from a single message
*
* @param ama place data into here in order (may be null). iterators must be already set.
* @param m read this message
* @param raf from this file
* @param r which datasets, relative to this message. null == all.
* @param f output bit count debugging info (may be null)
* @throws IOException on read error
*/
public void readData(ArrayStructureMA ama, Message m, RandomAccessFile raf, Range r, Formatter f) throws IOException {
// map dkey to Member recursively
HashMap<DataDescriptor, StructureMembers.Member> map = null;
if (ama != null) {
map = new HashMap<>(2 * ama.getMembers().size());
associateMessage2Members(ama.getStructureMembers(), m.getRootDataDescriptor(), map);
}
readData(m, raf, f, new Request(ama, map, r));
}
// manage the request
private static class Request {
ArrayStructureMA ama; // data goes here, may be null
HashMap<DataDescriptor, StructureMembers.Member> map; // map of DataDescriptor to members of ama, may be null
Range r; // requested range
DpiTracker dpiTracker; // may be null
int outerRow; // if inner process needs to know what row its on
Request(ArrayStructureMA ama, HashMap<DataDescriptor, StructureMembers.Member> map, Range r) {
this.ama = ama;
this.map = map;
this.r = r;
}
boolean wantRow(int row) {
if (ama == null)
return false;
if (r == null)
return true;
return r.contains(row);
}
}
// An iterator is stored in member.getDataObject() which keeps track of where we are.
// For fixed length nested Structures, we need fld(dataset, inner1, inner2, ...) but we have fld(inner1, inner2, ... ,
// dataset)
// so we permute the dimensions
// before we set the iterator.
public static void setIterators(ArrayStructureMA ama) {
StructureMembers sms = ama.getStructureMembers();
for (StructureMembers.Member sm : sms.getMembers()) {
Array data = sm.getDataArray();
if (data instanceof ArrayStructureMA) {
setIterators((ArrayStructureMA) data);
} else {
int[] shape = data.getShape();
if ((shape.length > 1) && (sm.getDataType() != DataType.CHAR)) {
Array datap;
if (shape.length == 2)
datap = data.transpose(0, 1);
else {
int[] pdims = new int[shape.length]; // (0,1,2,3...) -> (1,2,3...,0)
for (int i = 0; i < shape.length - 1; i++)
pdims[i] = i + 1;
datap = data.permute(pdims);
}
sm.setDataObject(datap.getIndexIterator());
} else {
sm.setDataObject(data.getIndexIterator());
}
}
}
}
private void associateMessage2Members(StructureMembers members, DataDescriptor parent,
HashMap<DataDescriptor, StructureMembers.Member> map) {
for (DataDescriptor dkey : parent.getSubKeys()) {
if (dkey.name == null) {
if (dkey.getSubKeys() != null)
associateMessage2Members(members, dkey, map);
continue;
}
StructureMembers.Member m = members.findMember(dkey.name);
if (m != null) {
map.put(dkey, m);
if (m.getDataType() == DataType.STRUCTURE) {
ArrayStructure nested = (ArrayStructure) m.getDataArray();
if (dkey.getSubKeys() != null)
associateMessage2Members(nested.getStructureMembers(), dkey, map);
}
} else {
if (dkey.getSubKeys() != null)
associateMessage2Members(members, dkey, map);
}
}
}
// read / count the bits in a compressed message
private int readData(Message m, RandomAccessFile raf, Formatter f, Request req) throws IOException {
BitReader reader = new BitReader(raf, m.dataSection.getDataPos() + 4);
DataDescriptor root = m.getRootDataDescriptor();
if (root.isBad)
return 0;
DebugOut out = (f == null) ? null : new DebugOut(f);
BitCounterCompressed[] counterFlds = new BitCounterCompressed[root.subKeys.size()]; // one for each field LOOK why
// not m.counterFlds ?
readData(out, reader, counterFlds, root, 0, m.getNumberDatasets(), req);
m.msg_nbits = 0;
for (BitCounterCompressed counter : counterFlds)
if (counter != null)
m.msg_nbits += counter.getTotalBits();
return m.msg_nbits;
}
/**
* @param out debug info; may be null
* @param reader raf wrapper for bit reading
* @param fldCounters one for each field
* @param parent parent.subkeys() holds the fields
* @param bitOffset bit offset from beginning of data
* @param ndatasets number of compressed datasets
* @param req for writing into the ArrayStructure;
* @return bitOffset
* @throws IOException on read error
*/
private int readData(DebugOut out, BitReader reader, BitCounterCompressed[] fldCounters, DataDescriptor parent,
int bitOffset, int ndatasets, Request req) throws IOException {
List<DataDescriptor> flds = parent.getSubKeys();
for (int fldidx = 0; fldidx < flds.size(); fldidx++) {
DataDescriptor dkey = flds.get(fldidx);
if (!dkey.isOkForVariable()) { // dds with no data to read
// the dpi nightmare
if ((dkey.f == 2) && (dkey.x == 36)) {
req.dpiTracker = new DpiTracker(dkey.dpi, dkey.dpi.getNfields());
}
if (out != null)
out.f.format("%s %d %s (%s) %n", out.indent(), out.fldno++, dkey.name, dkey.getFxyName());
continue;
}
BitCounterCompressed counter = new BitCounterCompressed(dkey, ndatasets, bitOffset);
fldCounters[fldidx] = counter;
// sequence
if (dkey.replication == 0) {
reader.setBitOffset(bitOffset);
int count = (int) reader.bits2UInt(dkey.replicationCountSize);
bitOffset += dkey.replicationCountSize;
reader.bits2UInt(6);
if (null != out)
out.f.format("%s--sequence %s bitOffset=%d replication=%s %n", out.indent(), dkey.getFxyName(), bitOffset,
count);
bitOffset += 6; // LOOK seems to be an extra 6 bits.
counter.addNestedCounters(count);
// make an ArrayObject of ArraySequence, place it into the data array
bitOffset = makeArraySequenceCompressed(out, reader, counter, dkey, bitOffset, ndatasets, count, req);
// if (null != out) out.f.format("--back %s %d %n", dkey.getFxyName(), bitOffset);
continue;
}
// structure
if (dkey.type == 3) {
if (null != out)
out.f.format("%s--structure %s bitOffset=%d replication=%s %n", out.indent(), dkey.getFxyName(), bitOffset,
dkey.replication);
// p 11 of "standard", doesnt really describe the case of replication AND compression
counter.addNestedCounters(dkey.replication);
for (int i = 0; i < dkey.replication; i++) {
BitCounterCompressed[] nested = counter.getNestedCounters(i);
req.outerRow = i;
if (null != out) {
out.f.format("%n");
out.indent.incr();
bitOffset = readData(out, reader, nested, dkey, bitOffset, ndatasets, req);
out.indent.decr();
} else {
bitOffset = readData(null, reader, nested, dkey, bitOffset, ndatasets, req);
}
}
// if (null != out) out.f.format("--back %s %d %n", dkey.getFxyName(), bitOffset);
continue;
}
// all other fields
StructureMembers.Member member;
IndexIterator iter = null;
ArrayStructure dataDpi = null; // if iter is missing - for the dpi case
if (req.map != null) {
member = req.map.get(dkey);
iter = (IndexIterator) member.getDataObject();
if (iter == null) {
dataDpi = (ArrayStructure) member.getDataArray();
}
}
reader.setBitOffset(bitOffset); // ?? needed ??
// char data special case
if (dkey.type == 1) {
int nc = dkey.bitWidth / 8;
byte[] minValue = new byte[nc];
for (int i = 0; i < nc; i++)
minValue[i] = (byte) reader.bits2UInt(8);
int dataWidth = (int) reader.bits2UInt(6); // incremental data width in bytes
counter.setDataWidth(8 * dataWidth);
int totalWidth = dkey.bitWidth + 6 + 8 * dataWidth * ndatasets; // total width in bits for this compressed set
// of values
bitOffset += totalWidth; // bitOffset now points to the next field
if (null != out)
out.f.format("%s read %d %s (%s) bitWidth=%d defValue=%s dataWidth=%d n=%d bitOffset=%d %n", out.indent(),
out.fldno++, dkey.name, dkey.getFxyName(), dkey.bitWidth, new String(minValue, StandardCharsets.UTF_8),
dataWidth, ndatasets, bitOffset);
if (iter != null) {
for (int dataset = 0; dataset < ndatasets; dataset++) {
if (dataWidth == 0) { // use the min value
if (req.wantRow(dataset))
for (int i = 0; i < nc; i++)
iter.setCharNext((char) minValue[i]); // ??
} else { // read the incremental value
int nt = Math.min(nc, dataWidth);
byte[] incValue = new byte[nc];
for (int i = 0; i < nt; i++)
incValue[i] = (byte) reader.bits2UInt(8);
for (int i = nt; i < nc; i++) // can dataWidth < n ?
incValue[i] = 0;
if (req.wantRow(dataset))
for (int i = 0; i < nc; i++) {
int cval = incValue[i];
if (cval < 32 || cval > 126)
cval = 0; // printable ascii KLUDGE!
iter.setCharNext((char) cval); // ??
}
if (out != null)
out.f.format(" %s,", new String(incValue, StandardCharsets.UTF_8));
}
}
}
if (out != null)
out.f.format("%n");
continue;
}
// numeric fields
int useBitWidth = dkey.bitWidth;
// a dpi Field needs to be substituted
boolean isDpi = ((dkey.f == 0) && (dkey.x == 31) && (dkey.y == 31));
boolean isDpiField = false;
if ((dkey.f == 2) && (dkey.x == 24) && (dkey.y == 255)) {
isDpiField = true;
DataDescriptor dpiDD = req.dpiTracker.getDpiDD(req.outerRow);
useBitWidth = dpiDD.bitWidth;
}
long dataMin = reader.bits2UInt(useBitWidth);
int dataWidth = (int) reader.bits2UInt(6); // increment data width - always in 6 bits, so max is 2^6 = 64
if (dataWidth > useBitWidth && (null != out))
out.f.format(" BAD WIDTH ");
if (dkey.type == 1)
dataWidth *= 8; // char data count is in bytes
counter.setDataWidth(dataWidth);
int totalWidth = useBitWidth + 6 + dataWidth * ndatasets; // total width in bits for this compressed set of values
bitOffset += totalWidth; // bitOffset now points to the next field
if (null != out)
out.f.format("%s read %d, %s (%s) bitWidth=%d dataMin=%d (%f) dataWidth=%d n=%d bitOffset=%d %n", out.indent(),
out.fldno++, dkey.name, dkey.getFxyName(), useBitWidth, dataMin, dkey.convert(dataMin), dataWidth,
ndatasets, bitOffset);
// numeric fields
// if dataWidth == 0, just use min value, otherwise read the compressed value here
for (int dataset = 0; dataset < ndatasets; dataset++) {
long value = dataMin;
if (dataWidth > 0) {
long cv = reader.bits2UInt(dataWidth);
if (BufrNumbers.isMissing(cv, dataWidth))
value = BufrNumbers.missingValue(useBitWidth); // set to missing value
else // add to minimum
value += cv;
}
// workaround for malformed messages
if (dataWidth > useBitWidth) {
long missingVal = BufrNumbers.missingValue(useBitWidth);
if ((value & missingVal) != value) // overflow
value = missingVal; // replace with missing value
}
if (req.wantRow(dataset)) {
if (isDpiField) {
if (dataDpi != null) {
DataDescriptor dpiDD = req.dpiTracker.getDpiDD(req.outerRow);
StructureMembers sms = dataDpi.getStructureMembers();
StructureMembers.Member m0 = sms.getMember(0);
IndexIterator iter2 = (IndexIterator) m0.getDataObject();
iter2.setObjectNext(dpiDD.getName());
StructureMembers.Member m1 = sms.getMember(1);
iter2 = (IndexIterator) m1.getDataObject();
iter2.setFloatNext(dpiDD.convert(value));
}
} else if (iter != null) {
iter.setLongNext(value);
}
}
// since dpi must be the same for all datasets, just keep the first one
if (isDpi && (dataset == 0))
req.dpiTracker.setDpiValue(req.outerRow, value); // keep track of dpi values in the tracker - perhaps not
// expose
if ((out != null) && (dataWidth > 0))
out.f.format(" %d (%f)", value, dkey.convert(value));
}
if (out != null)
out.f.format("%n");
}
return bitOffset;
}
// read in the data into an ArrayStructureMA, holding an ArrayObject() of ArraySequence
private int makeArraySequenceCompressed(DebugOut out, BitReader reader, BitCounterCompressed bitCounterNested,
DataDescriptor seqdd, int bitOffset, int ndatasets, int count, Request req) throws IOException {
// construct ArrayStructureMA and associated map
ArrayStructureMA ama = null;
StructureMembers members = null;
HashMap<DataDescriptor, StructureMembers.Member> nmap = null;
if (req.map != null) {
Sequence seq = (Sequence) seqdd.refersTo;
int[] shape = {ndatasets, count}; // seems unlikely this can handle recursion
ama = ArrayStructureMA.factoryMA(seq, shape);
setIterators(ama);
members = ama.getStructureMembers();
nmap = new HashMap<>(2 * members.getMembers().size());
associateMessage2Members(members, seqdd, nmap);
}
Request nreq = new Request(ama, nmap, req.r);
// iterate over the number of replications, reading ndataset compressed values at each iteration
if (out != null)
out.indent.incr();
for (int i = 0; i < count; i++) {
BitCounterCompressed[] nested = bitCounterNested.getNestedCounters(i);
nreq.outerRow = i;
bitOffset = readData(out, reader, nested, seqdd, bitOffset, ndatasets, nreq);
}
if (out != null)
out.indent.decr();
// add ArraySequence to the ArrayObject in the outer structure
if (req.map != null) {
StructureMembers.Member m = req.map.get(seqdd);
ArrayObject arrObj = (ArrayObject) m.getDataArray();
// we need to break ama into separate sequences, one for each dataset
int start = 0;
for (int i = 0; i < ndatasets; i++) {
ArraySequence arrSeq = new ArraySequence(members, new SequenceIterator(start, count, ama), count);
arrObj.setObject(i, arrSeq);
start += count;
}
}
return bitOffset;
}
private static class DpiTracker {
DataDescriptorTreeConstructor.DataPresentIndicator dpi;
boolean[] isPresent;
List<DataDescriptor> dpiDD;
DpiTracker(DataDescriptorTreeConstructor.DataPresentIndicator dpi, int nPresentFlags) {
this.dpi = dpi;
isPresent = new boolean[nPresentFlags];
}
void setDpiValue(int fldidx, long value) {
isPresent[fldidx] = (value == 0); // present if the value is zero
}
DataDescriptor getDpiDD(int fldPresentIndex) {
if (dpiDD == null) {
dpiDD = new ArrayList<>();
for (int i = 0; i < isPresent.length; i++) {
if (isPresent[i])
dpiDD.add(dpi.linear.get(i));
}
}
return dpiDD.get(fldPresentIndex);
}
boolean isDpiDDs(DataDescriptor dkey) {
return (dkey.f == 2) && (dkey.x == 24) && (dkey.y == 255);
}
boolean isDpiField(DataDescriptor dkey) {
return (dkey.f == 2) && (dkey.x == 24) && (dkey.y == 255);
}
}
}

View File

@ -0,0 +1,249 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.unidata.io.KMPMatch;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.nio.channels.WritableByteChannel;
import java.nio.charset.StandardCharsets;
/**
* Sequentially scans a BUFR file, extracts the messages.
*
* @author caron
* @since May 9, 2008
*/
public class MessageScanner {
// static public final int MAX_MESSAGE_SIZE = 500 * 1000; // GTS allows up to 500 Kb messages (ref?)
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(MessageScanner.class);
private static final KMPMatch matcher = new KMPMatch("BUFR".getBytes(StandardCharsets.UTF_8));
/**
* is this a valid BUFR file.
*
* @param raf check this file
* @return true if its a BUFR file
* @throws IOException on read error
*/
public static boolean isValidFile(RandomAccessFile raf) throws IOException {
raf.seek(0);
if (!raf.searchForward(matcher, 40 * 1000))
return false; // must find "BUFR" in first 40k
raf.skipBytes(4);
BufrIndicatorSection is = new BufrIndicatorSection(raf);
if (is.getBufrEdition() > 4)
return false;
// if(is.getBufrLength() > MAX_MESSAGE_SIZE) return false;
return !(is.getBufrLength() > raf.length());
}
/////////////////////////////////
private RandomAccessFile raf;
private boolean useEmbeddedTables;
private int countMsgs;
private int countObs;
private byte[] header;
private long startPos;
private long lastPos;
private boolean debug;
private EmbeddedTable embedTable;
public MessageScanner(RandomAccessFile raf) throws IOException {
this(raf, 0, true);
}
public MessageScanner(RandomAccessFile raf, long startPos, boolean useEmbeddedTables) throws IOException {
startPos = (startPos < 30) ? 0 : startPos - 30; // look for the header
this.raf = raf;
lastPos = startPos;
this.useEmbeddedTables = useEmbeddedTables;
raf.seek(startPos);
raf.order(RandomAccessFile.BIG_ENDIAN);
}
public Message getFirstDataMessage() throws IOException {
while (hasNext()) {
Message m = next();
if (m == null)
continue;
if (m.containsBufrTable())
continue; // not data
if (m.getNumberDatasets() == 0)
continue; // empty
return m;
}
return null;
}
public void reset() {
lastPos = 0;
}
public boolean hasNext() throws IOException {
if (lastPos >= raf.length())
return false;
raf.seek(lastPos);
boolean more = raf.searchForward(matcher, -1); // will scan to end for another BUFR header
if (more) {
long stop = raf.getFilePointer();
int sizeHeader = (int) (stop - lastPos);
if (sizeHeader > 30)
sizeHeader = 30;
header = new byte[sizeHeader];
startPos = stop - sizeHeader;
raf.seek(startPos);
int nRead = raf.read(header);
if (nRead != header.length) {
log.warn("Unable to read full BUFR header. Got " + nRead + " but expected " + header.length);
return false;
}
}
if (debug && countMsgs % 100 == 0)
System.out.printf("%d ", countMsgs);
return more;
}
public Message next() {
try {
long start = raf.getFilePointer();
raf.seek(start + 4);
BufrIndicatorSection is = new BufrIndicatorSection(raf);
BufrIdentificationSection ids = new BufrIdentificationSection(raf, is);
BufrDataDescriptionSection dds = new BufrDataDescriptionSection(raf);
long dataPos = raf.getFilePointer();
int dataLength = BufrNumbers.uint3(raf);
BufrDataSection dataSection = new BufrDataSection(dataPos, dataLength);
lastPos = dataPos + dataLength + 4; // position to the end message plus 1
// nbytes += lastPos - startPos;
/*
* length consistency checks
* if (is.getBufrLength() > MAX_MESSAGE_SIZE) {
* log.warn("Illegal length - BUFR message at pos "+start+" header= "+cleanup(header)+" size= "+is.getBufrLength()
* );
* return null;
* }
*/
if (is.getBufrEdition() > 4) {
log.warn("Illegal edition - BUFR message at pos " + start + " header= " + cleanup(header));
return null;
}
if (is.getBufrEdition() < 2) {
log.warn("Edition " + is.getBufrEdition() + " is not supported - BUFR message at pos " + start + " header= "
+ cleanup(header));
return null;
}
// check that end section is correct
long ending = dataPos + dataLength;
raf.seek(dataPos + dataLength);
for (int i = 0; i < 3; i++) {
if (raf.read() != 55) {
log.warn("Missing End of BUFR message at pos= {} header= {} file= {}", ending, cleanup(header),
raf.getLocation());
return null;
}
}
// allow off by one : may happen when dataLength rounded to even bytes
if (raf.read() != 55) {
raf.seek(dataPos + dataLength - 1); // see if byte before is a '7'
if (raf.read() != 55) {
log.warn("Missing End of BUFR message at pos= {} header= {} edition={} file= {}", ending, cleanup(header),
is.getBufrEdition(), raf.getLocation());
return null;
} else {
log.info("End of BUFR message off-by-one at pos= {} header= {} edition={} file= {}", ending, cleanup(header),
is.getBufrEdition(), raf.getLocation());
lastPos--;
}
}
Message m = new Message(raf, is, ids, dds, dataSection);
m.setHeader(cleanup(header));
m.setStartPos(start);
if (useEmbeddedTables && m.containsBufrTable()) {
if (embedTable == null)
embedTable = new EmbeddedTable(m, raf);
embedTable.addTable(m);
} else if (embedTable != null) {
m.setTableLookup(embedTable.getTableLookup());
}
countMsgs++;
countObs += dds.getNumberDatasets();
raf.seek(start + is.getBufrLength());
return m;
} catch (IOException ioe) {
log.error("Error reading message at " + lastPos, ioe);
lastPos = raf.getFilePointer(); // dont do an infinite loop
return null;
}
}
public TableLookup getTableLookup() throws IOException {
while (hasNext()) {
next();
}
return (embedTable != null) ? embedTable.getTableLookup() : null;
}
public byte[] getMessageBytesFromLast(ucar.nc2.iosp.bufr.Message m) throws IOException {
long startPos = m.getStartPos();
int length = (int) (lastPos - startPos);
byte[] result = new byte[length];
raf.seek(startPos);
raf.readFully(result);
return result;
}
public byte[] getMessageBytes(Message m) throws IOException {
long startPos = m.getStartPos();
int length = m.is.getBufrLength();
byte[] result = new byte[length];
raf.seek(startPos);
raf.readFully(result);
return result;
}
public int getTotalObs() {
return countObs;
}
public int getTotalMessages() {
return countMsgs;
}
// the WMO header is in here somewhere when the message comes over the IDD
private static String cleanup(byte[] h) {
byte[] bb = new byte[h.length];
int count = 0;
for (byte b : h) {
if (b >= 32 && b < 127)
bb[count++] = b;
}
return new String(bb, 0, count, StandardCharsets.UTF_8);
}
public long writeCurrentMessage(WritableByteChannel out) throws IOException {
long nbytes = lastPos - startPos;
return raf.readToByteChannel(out, startPos, nbytes);
}
}

View File

@ -0,0 +1,370 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import ucar.ma2.*;
import ucar.nc2.Sequence;
import ucar.nc2.Structure;
import ucar.nc2.Variable;
import ucar.unidata.io.RandomAccessFile;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.StandardCharsets;
import java.util.Formatter;
import java.util.List;
/**
* Read data for uncompressed messages.
*
* Within one message there are n obs (datasets) and s fields in each dataset.
* For uncompressed datasets, storage order is data(obs, fld) (fld varying fastest) :
*
* R11, R12, R13, . . . R1s
* R21, R22, R23, . . . R2s
* ....
* Rn1, Rn2, Rn3, . . . Rns
*
* where Rij is the jth value of the ith data subset.
* the datasets each occupy an identical number of bits, unless delayed replication is used,
* and are not necessarily aligned on octet boundaries.
*
* A replicated field (structure) takes a group of fields and replicates them:
*
* Ri1, (Ri2, Ri3)*r, . . . Ris
*
* where r is set in the data descriptor, and is the same for all datasets.
*
* A delayed replicated field (sequence) takes a group of fields and replicates them, and adds the number of
* replications
* in the data :
*
* Ri1, dri, (Ri2, Ri3)*dri, . . . Ris
*
* where the width (nbits) of dr is set in the data descriptor. This dr can be different for each dataset in the
* message.
* It can be 0. When it has a bit width of 1, it indicates an optional set of fields.
*
* --------------------------
*
* We use an ArrayStructureBB to hold the data, and fill it sequentially as we scan the message.
* Fixed length nested Structures are kept in the ArrayStructureBB.
* Variable length objects (Strings, Sequences) are added to the heap.
*/
public class MessageUncompressedDataReader {
/**
* Read all datasets from a single message
*
* @param s outer variables
* @param proto prototype message, has been processed
* @param m read this message
* @param raf from this file
* @param f output bit count debugging info (may be null)
* @return ArraySTructure with all the data from the message in it.
* @throws IOException on read error
*/
ArrayStructure readEntireMessage(Structure s, Message proto, Message m, RandomAccessFile raf, Formatter f)
throws IOException {
// transfer info from proto message
DataDescriptor.transferInfo(proto.getRootDataDescriptor().getSubKeys(), m.getRootDataDescriptor().getSubKeys());
// allocate ArrayStructureBB for outer structure
// This assumes that all of the fields and all of the datasets are being read
StructureMembers members = s.makeStructureMembers();
ArrayStructureBB.setOffsets(members);
int n = m.getNumberDatasets();
ArrayStructureBB abb = new ArrayStructureBB(members, new int[] {n});
ByteBuffer bb = abb.getByteBuffer();
bb.order(ByteOrder.BIG_ENDIAN);
boolean addTime = false; // (s.findVariable(BufrIosp2.TIME_NAME) != null);
readData(abb, m, raf, null, addTime, f);
return abb;
}
/**
* Read some or all datasets from a single message
*
* @param abb place data into here in order (may be null)
* @param m read this message
* @param raf from this file
* @param r which datasets, relative to this message. null == all.
* @param addTime add the time coordinate
* @param f output bit count debugging info (may be null)
* @return number of datasets read
* @throws IOException on read error
*/
public int readData(ArrayStructureBB abb, Message m, RandomAccessFile raf, Range r, boolean addTime, Formatter f)
throws IOException {
BitReader reader = new BitReader(raf, m.dataSection.getDataPos() + 4);
DataDescriptor root = m.getRootDataDescriptor();
if (root.isBad)
return 0;
Request req = new Request(abb, r);
int n = m.getNumberDatasets();
m.counterDatasets = new BitCounterUncompressed[n]; // one for each dataset
m.msg_nbits = 0;
// loop over the rows
int count = 0;
for (int i = 0; i < n; i++) {
if (f != null)
f.format("Count bits in observation %d%n", i);
// the top table always has exactly one "row", since we are working with a single obs
m.counterDatasets[i] = new BitCounterUncompressed(root, 1, 0);
DebugOut out = (f == null) ? null : new DebugOut(f);
req.setRow(i);
if (req.wantRow() && addTime) {
req.bb.putInt(0); // placeholder for time assumes an int
count++;
}
readData(out, reader, m.counterDatasets[i], root.subKeys, 0, req);
m.msg_nbits += m.counterDatasets[i].countBits(m.msg_nbits);
}
return count;
}
private static class Request {
ArrayStructureBB abb;
ByteBuffer bb;
Range r;
int row;
Request(ArrayStructureBB abb, Range r) {
this.abb = abb;
if (abb != null)
bb = abb.getByteBuffer();
this.r = r;
this.row = 0;
}
Request setRow(int row) {
this.row = row;
return this;
}
boolean wantRow() {
if (abb == null)
return false;
if (r == null)
return true;
return r.contains(row);
}
}
/**
* count/read the bits in one row of a "nested table", defined by List<DataDescriptor> dkeys.
*
* @param out optional debug output, may be null
* @param reader read data with this
* @param dkeys the fields of the table
* @param table put the results here
* @param nestedRow which row of the table
* @param req read data into here, may be null
* @throws IOException on read error
*/
private void readData(DebugOut out, BitReader reader, BitCounterUncompressed table, List<DataDescriptor> dkeys,
int nestedRow, Request req) throws IOException {
for (DataDescriptor dkey : dkeys) {
if (!dkey.isOkForVariable()) {// misc skip
if (out != null)
out.f.format("%s %d %s (%s) %n", out.indent(), out.fldno++, dkey.name, dkey.getFxyName());
continue;
}
// sequence
if (dkey.replication == 0) {
// find out how many objects in the sequence
int count = (int) reader.bits2UInt(dkey.replicationCountSize);
if (out != null)
out.f.format("%4d delayed replication count=%d %n", out.fldno++, count);
if ((out != null) && (count > 0)) {
out.f.format("%4d %s read sequence %s count= %d bitSize=%d start at=0x%x %n", out.fldno, out.indent(),
dkey.getFxyName(), count, dkey.replicationCountSize, reader.getPos());
}
// read the data
BitCounterUncompressed bitCounterNested = table.makeNested(dkey, count, nestedRow, dkey.replicationCountSize);
ArraySequence seq = makeArraySequenceUncompressed(out, reader, bitCounterNested, dkey, req);
if (req.wantRow()) {
int index = req.abb.addObjectToHeap(seq);
req.bb.putInt(index); // an index into the Heap
}
continue;
}
// compound
if (dkey.type == 3) {
BitCounterUncompressed nested = table.makeNested(dkey, dkey.replication, nestedRow, 0);
if (out != null)
out.f.format("%4d %s read structure %s count= %d%n", out.fldno, out.indent(), dkey.getFxyName(),
dkey.replication);
for (int i = 0; i < dkey.replication; i++) {
if (out != null) {
out.f.format("%s read row %d (struct %s) %n", out.indent(), i, dkey.getFxyName());
out.indent.incr();
readData(out, reader, nested, dkey.subKeys, i, req);
out.indent.decr();
} else {
readData(null, reader, nested, dkey.subKeys, i, req);
}
}
continue;
}
// char data
if (dkey.type == 1) {
byte[] vals = readCharData(dkey, reader, req);
if (out != null) {
String s = new String(vals, StandardCharsets.UTF_8);
out.f.format("%4d %s read char %s (%s) width=%d end at= 0x%x val=<%s>%n", out.fldno++, out.indent(),
dkey.getFxyName(), dkey.getName(), dkey.bitWidth, reader.getPos(), s);
}
continue;
}
// otherwise read a number
long val = readNumericData(dkey, reader, req);
if (out != null)
out.f.format("%4d %s read %s (%s %s) bitWidth=%d end at= 0x%x raw=%d convert=%f%n", out.fldno++, out.indent(),
dkey.getFxyName(), dkey.getName(), dkey.getUnits(), dkey.bitWidth, reader.getPos(), val, dkey.convert(val));
}
}
private byte[] readCharData(DataDescriptor dkey, BitReader reader, Request req) throws IOException {
int nchars = dkey.getByteWidthCDM();
byte[] b = new byte[nchars];
for (int i = 0; i < nchars; i++)
b[i] = (byte) reader.bits2UInt(8);
if (req.wantRow()) {
for (int i = 0; i < nchars; i++)
req.bb.put(b[i]);
}
return b;
}
private long readNumericData(DataDescriptor dkey, BitReader reader, Request req) throws IOException {
// numeric data
long result = reader.bits2UInt(dkey.bitWidth);
if (req.wantRow()) {
// place into byte buffer
if (dkey.getByteWidthCDM() == 1) {
req.bb.put((byte) result);
} else if (dkey.getByteWidthCDM() == 2) {
byte b1 = (byte) (result & 0xff);
byte b2 = (byte) ((result & 0xff00) >> 8);
req.bb.put(b2);
req.bb.put(b1);
} else if (dkey.getByteWidthCDM() == 4) {
byte b1 = (byte) (result & 0xff);
byte b2 = (byte) ((result & 0xff00) >> 8);
byte b3 = (byte) ((result & 0xff0000) >> 16);
byte b4 = (byte) ((result & 0xff000000) >> 24);
req.bb.put(b4);
req.bb.put(b3);
req.bb.put(b2);
req.bb.put(b1);
} else {
byte b1 = (byte) (result & 0xff);
byte b2 = (byte) ((result & 0xff00) >> 8);
byte b3 = (byte) ((result & 0xff0000) >> 16);
byte b4 = (byte) ((result & 0xff000000) >> 24);
byte b5 = (byte) ((result & 0xff00000000L) >> 32);
byte b6 = (byte) ((result & 0xff0000000000L) >> 40);
byte b7 = (byte) ((result & 0xff000000000000L) >> 48);
byte b8 = (byte) ((result & 0xff00000000000000L) >> 56);
req.bb.put(b8);
req.bb.put(b7);
req.bb.put(b6);
req.bb.put(b5);
req.bb.put(b4);
req.bb.put(b3);
req.bb.put(b2);
req.bb.put(b1);
}
}
return result;
}
// read in the data into an ArrayStructureBB, wrapped by an ArraySequence
private ArraySequence makeArraySequenceUncompressed(DebugOut out, BitReader reader,
BitCounterUncompressed bitCounterNested, DataDescriptor seqdd, Request req) throws IOException {
int count = bitCounterNested.getNumberRows(); // the actual number of rows in this sequence
ArrayStructureBB abb = null;
StructureMembers members = null;
if (req.wantRow()) {
Sequence seq = seqdd.refersTo;
assert seq != null;
// for the obs structure
int[] shape = {count};
// allocate ArrayStructureBB for outer structure
// LOOK why is this different from ArrayStructureBB.setOffsets() ?
int offset = 0;
members = seq.makeStructureMembers();
for (StructureMembers.Member m : members.getMembers()) {
m.setDataParam(offset);
Variable mv = seq.findVariable(m.getName());
BufrConfig.FieldConverter fld = (BufrConfig.FieldConverter) mv.getSPobject();
DataDescriptor dk = fld.dds;
if (dk.replication == 0) // LOOK
offset += 4;
else
offset += dk.getByteWidthCDM();
if (m.getStructureMembers() != null)
ArrayStructureBB.setOffsets(m.getStructureMembers());
}
abb = new ArrayStructureBB(members, shape);
ByteBuffer bb = abb.getByteBuffer();
bb.order(ByteOrder.BIG_ENDIAN);
}
Request nreq = new Request(abb, null);
// loop through nested obs
for (int i = 0; i < count; i++) {
if (out != null) {
out.f.format("%s read row %d (seq %s) %n", out.indent(), i, seqdd.getFxyName());
out.indent.incr();
readData(out, reader, bitCounterNested, seqdd.getSubKeys(), i, nreq);
out.indent.decr();
} else {
readData(null, reader, bitCounterNested, seqdd.getSubKeys(), i, nreq);
}
}
return abb != null ? new ArraySequence(members, abb.getStructureDataIterator(), count) : null;
}
}

View File

@ -0,0 +1,182 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr;
import org.meteoinfo.data.meteodata.bufr.tables.BufrTables;
import org.meteoinfo.data.meteodata.bufr.tables.TableA;
import org.meteoinfo.data.meteodata.bufr.tables.TableB;
import org.meteoinfo.data.meteodata.bufr.tables.TableD;
import javax.annotation.concurrent.Immutable;
import java.io.IOException;
/**
* Encapsolates lookup into the BUFR Tables.
*
* @author caron
* @since Jul 14, 2008
*/
@Immutable
public class TableLookup {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(TableLookup.class);
private static final boolean showErrors = false;
/////////////////////////////////////////
private TableA localTableA = null;
private final TableB localTableB;
private final TableD localTableD;
private final TableB wmoTableB;
private final TableD wmoTableD;
private final BufrTables.Mode mode;
public TableLookup(int center, int subcenter, int masterTableVersion, int local, int cat) throws IOException {
this.wmoTableB = BufrTables.getWmoTableB(masterTableVersion);
this.wmoTableD = BufrTables.getWmoTableD(masterTableVersion);
BufrTables.Tables tables = BufrTables.getLocalTables(center, subcenter, masterTableVersion, local, cat);
if (tables != null) {
this.localTableB = tables.b;
this.localTableD = tables.d;
this.mode = (tables.mode == null) ? BufrTables.Mode.localOverride : tables.mode;
} else {
this.localTableB = null;
this.localTableD = null;
this.mode = BufrTables.Mode.localOverride;
}
}
public TableLookup(BufrIdentificationSection ids, TableB b, TableD d) throws IOException {
this.wmoTableB = BufrTables.getWmoTableB(ids.getMasterTableVersion());
this.wmoTableD = BufrTables.getWmoTableD(ids.getMasterTableVersion());
this.localTableB = b;
this.localTableD = d;
this.mode = BufrTables.Mode.localOverride;
}
public TableLookup(BufrIdentificationSection ids, TableA a, TableB b, TableD d) throws IOException {
this.wmoTableB = BufrTables.getWmoTableB(ids.getMasterTableVersion());
this.wmoTableD = BufrTables.getWmoTableD(ids.getMasterTableVersion());
this.localTableA = a;
this.localTableB = b;
this.localTableD = d;
this.mode = BufrTables.Mode.localOverride;
}
public String getWmoTableBName() {
return wmoTableB.getName();
}
public String getLocalTableAName() {
return localTableA == null ? "none" : localTableA.getName();
}
public String getLocalTableBName() {
return localTableB == null ? "none" : localTableB.getName();
}
public String getLocalTableDName() {
return localTableD == null ? "none" : localTableD.getName();
}
public String getWmoTableDName() {
return wmoTableD.getName();
}
public BufrTables.Mode getMode() {
return mode;
}
public TableA getLocalTableA() {
return localTableA;
}
public TableB getLocalTableB() {
return localTableB;
}
public TableD getLocalTableD() {
return localTableD;
}
public TableA.Descriptor getDescriptorTableA(int code) {
if (localTableA != null) {
return localTableA.getDescriptor(code);
} else {
return null;
}
}
public TableB.Descriptor getDescriptorTableB(short fxy) {
TableB.Descriptor b = null;
boolean isWmoRange = ucar.nc2.iosp.bufr.Descriptor.isWmoRange(fxy);
if (isWmoRange && (mode == BufrTables.Mode.wmoOnly)) {
b = wmoTableB.getDescriptor(fxy);
} else if (isWmoRange && (mode == BufrTables.Mode.wmoLocal)) {
b = wmoTableB.getDescriptor(fxy);
if ((b == null) && (localTableB != null))
b = localTableB.getDescriptor(fxy);
} else if (isWmoRange && (mode == BufrTables.Mode.localOverride)) {
if (localTableB != null)
b = localTableB.getDescriptor(fxy);
if (b == null)
b = wmoTableB.getDescriptor(fxy);
else
b.setLocalOverride(true);
} else if (!isWmoRange) {
if (localTableB != null)
b = localTableB.getDescriptor(fxy);
}
if (b == null) { // look forward in standard WMO table; often the version number of the message is wrong
b = BufrTables.getWmoTableBlatest().getDescriptor(fxy);
}
if (b == null && showErrors)
log.warn(" TableLookup cant find Table B descriptor = {} in tables {}, {} mode={}", ucar.nc2.iosp.bufr.Descriptor.makeString(fxy),
getLocalTableBName(), getWmoTableBName(), mode);
return b;
}
public TableD.Descriptor getDescriptorTableD(short fxy) {
TableD.Descriptor d = null;
boolean isWmoRange = ucar.nc2.iosp.bufr.Descriptor.isWmoRange(fxy);
if (isWmoRange && (mode == BufrTables.Mode.wmoOnly)) {
d = wmoTableD.getDescriptor(fxy);
} else if (isWmoRange && (mode == BufrTables.Mode.wmoLocal)) {
d = wmoTableD.getDescriptor(fxy);
if ((d == null) && (localTableD != null))
d = localTableD.getDescriptor(fxy);
} else if (isWmoRange && (mode == BufrTables.Mode.localOverride)) {
if (localTableD != null)
d = localTableD.getDescriptor(fxy);
if (d == null)
d = wmoTableD.getDescriptor(fxy);
else
d.setLocalOverride(true);
} else {
if (localTableD != null)
d = localTableD.getDescriptor(fxy);
}
if (d == null) { // look forward in standard WMO table; often the version number of the message is wrong
d = BufrTables.getWmoTableDlatest().getDescriptor(fxy);
}
if (d == null && showErrors)
log.warn(String.format(" TableLookup cant find Table D descriptor %s in tables %s,%s mode=%s%n",
Descriptor.makeString(fxy), getLocalTableDName(), getWmoTableDName(), mode));
return d;
}
}

View File

@ -0,0 +1,278 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.point;
import org.meteoinfo.data.meteodata.bufr.BufrConfig;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import ucar.nc2.stream.NcStream;
import ucar.nc2.time.CalendarDate;
import ucar.unidata.io.RandomAccessFile;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
/**
* Manage cdm index (ncx) for Bufr files.
* Covers BufrCdmIndexProto
* Never completed for operational use, could redo as needed
*
* @author caron
* @since 8/14/13
*/
public class BufrCdmIndex {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BufrCdmIndex.class);
public static final String MAGIC_START = "BufrCdmIndex";
public static final String NCX_IDX = ".ncx";
private static final int version = 1;
public static File calcIndexFile(String bufrFilename) {
File bufrFile = new File(bufrFilename);
String name = bufrFile.getName();
File result = new File(bufrFile.getParent(), name + BufrCdmIndex.NCX_IDX);
if (result.exists())
return result;
int pos = name.indexOf('.');
if (pos > 0) {
name = name.substring(0, pos);
result = new File(bufrFile.getParent(), name + BufrCdmIndex.NCX_IDX);
if (result.exists())
return result;
}
return null;
}
public static boolean writeIndex(String bufrFilename, BufrConfig config, File idxFile) throws IOException {
return new BufrCdmIndex().writeIndex2(bufrFilename, config, idxFile);
}
public static BufrCdmIndex readIndex(String indexFilename) throws IOException {
BufrCdmIndex index = new BufrCdmIndex();
try (RandomAccessFile raf = RandomAccessFile.acquire(indexFilename)) {
index.readIndex(raf);
}
return index;
}
/////////////////////////////////////////////////////////////////////////////////
/*
* MAGIC_START
* version
* sizeIndex
* BufrCdmIndexProto (sizeIndex bytes)
*/
private boolean writeIndex2(String bufrFilename, BufrConfig config, File indexFile) throws IOException {
if (indexFile.exists()) {
if (!indexFile.delete())
log.warn(" BufrCdmIndex cant delete index file {}", indexFile.getPath());
}
log.debug(" createIndex for {}", indexFile.getPath());
try (RandomAccessFile raf = new RandomAccessFile(indexFile.getPath(), "rw")) {
raf.order(RandomAccessFile.BIG_ENDIAN);
//// header message
raf.write(MAGIC_START.getBytes(StandardCharsets.UTF_8));
raf.writeInt(version);
// build it
BufrCdmIndexProto.BufrIndex.Builder indexBuilder = BufrCdmIndexProto.BufrIndex.newBuilder();
indexBuilder.setFilename(bufrFilename);
root = buildField(config.getRootConverter());
indexBuilder.setRoot(root);
indexBuilder.setStart(config.getStart());
indexBuilder.setEnd(config.getEnd());
indexBuilder.setNobs(config.getNobs());
Map<String, BufrConfig.BufrStation> smaps = config.getStationMap();
if (smaps != null) {
List<BufrConfig.BufrStation> stations = new ArrayList<>(smaps.values());
Collections.sort(stations);
for (BufrConfig.BufrStation s : stations) {
indexBuilder.addStations(buildStation(s));
}
}
// write it
BufrCdmIndexProto.BufrIndex index = indexBuilder.build();
byte[] b = index.toByteArray();
NcStream.writeVInt(raf, b.length); // message size
raf.write(b); // message - all in one gulp
log.debug(" file size = {} bytes", raf.length());
return true;
}
}
public static boolean writeIndex(BufrCdmIndex index, BufrField root, File indexFile) throws IOException {
if (indexFile.exists()) {
if (!indexFile.delete())
log.warn(" BufrCdmIndex cant delete index file {}", indexFile.getPath());
}
log.debug(" createIndex for {}", indexFile.getPath());
try (RandomAccessFile raf = new RandomAccessFile(indexFile.getPath(), "rw")) {
raf.order(RandomAccessFile.BIG_ENDIAN);
//// header message
raf.write(MAGIC_START.getBytes(StandardCharsets.UTF_8));
raf.writeInt(version);
// build it
BufrCdmIndexProto.BufrIndex.Builder indexBuilder = BufrCdmIndexProto.BufrIndex.newBuilder();
indexBuilder.setFilename(index.bufrFilename);
BufrCdmIndexProto.Field rootf = buildField(root);
indexBuilder.setRoot(rootf);
indexBuilder.setStart(index.start);
indexBuilder.setEnd(index.end);
indexBuilder.setNobs(index.nobs);
if (index.stations != null) {
for (BufrCdmIndexProto.Station s : index.stations) {
indexBuilder.addStations(s);
}
}
// write it
BufrCdmIndexProto.BufrIndex indexOut = indexBuilder.build();
byte[] b = indexOut.toByteArray();
NcStream.writeVInt(raf, b.length); // message size
raf.write(b); // message - all in one gulp
log.debug(" write BufrCdmIndexProto= {} bytes", b.length);
log.debug(" file size = {} bytes", raf.length());
return true;
}
}
private BufrCdmIndexProto.Station buildStation(BufrConfig.BufrStation s) {
BufrCdmIndexProto.Station.Builder builder = BufrCdmIndexProto.Station.newBuilder();
builder.setId(s.getName());
builder.setCount(s.count);
if (s.getWmoId() != null)
builder.setWmoId(s.getWmoId());
if (s.getDescription() != null)
builder.setDesc(s.getDescription());
builder.setLat(s.getLatitude());
builder.setLon(s.getLongitude());
builder.setAlt(s.getAltitude());
return builder.build();
}
private static BufrCdmIndexProto.Field buildField(BufrField fld) {
BufrCdmIndexProto.Field.Builder fldBuilder = BufrCdmIndexProto.Field.newBuilder();
fldBuilder.setFxy(fld.getFxy());
fldBuilder.setScale(fld.getScale());
fldBuilder.setReference(fld.getReference());
fldBuilder.setBitWidth(fld.getBitWidth());
if (fld.getName() != null)
fldBuilder.setName(fld.getName());
if (fld.getDesc() != null)
fldBuilder.setDesc(fld.getDesc());
if (fld.getUnits() != null)
fldBuilder.setUnits(fld.getUnits());
if (fld.getChildren() != null) {
for (BufrField child : fld.getChildren())
fldBuilder.addFlds(buildField(child));
}
if (fld.getAction() != null && fld.getAction() != BufrCdmIndexProto.FldAction.none)
fldBuilder.setAction(fld.getAction());
if (fld.getType() != null)
fldBuilder.setType(fld.getType());
if (fld.isSeq()) {
fldBuilder.setMin(fld.getMin());
fldBuilder.setMax(fld.getMax());
}
return fldBuilder.build();
}
//////////////////////////////////////////////////////////////////
public String idxFilename;
public String bufrFilename;
public BufrCdmIndexProto.Field root;
public List<BufrCdmIndexProto.Station> stations;
public long start, end;
public long nobs;
protected boolean readIndex(RandomAccessFile raf) {
this.idxFilename = raf.getLocation();
try {
raf.order(RandomAccessFile.BIG_ENDIAN);
raf.seek(0);
//// header message
if (!NcStream.readAndTest(raf, MAGIC_START.getBytes(StandardCharsets.UTF_8))) {
log.error("BufrCdmIndex {}: invalid index", raf.getLocation());
return false;
}
int indexVersion = raf.readInt();
boolean versionOk = (indexVersion == version);
if (!versionOk) {
log.warn("BufrCdmIndex {}: index found version={}, want version= {}", raf.getLocation(), indexVersion, version);
return false;
}
int size = NcStream.readVInt(raf);
if ((size < 0) || (size > 100 * 1000 * 1000)) {
log.warn("BufrCdmIndex {}: invalid or empty index ", raf.getLocation());
return false;
}
byte[] m = new byte[size];
raf.readFully(m);
BufrCdmIndexProto.BufrIndex proto = BufrCdmIndexProto.BufrIndex.parseFrom(m);
bufrFilename = proto.getFilename();
root = proto.getRoot();
stations = proto.getStationsList();
start = proto.getStart();
end = proto.getEnd();
nobs = proto.getNobs();
// showProtoRoot(root);
} catch (Throwable t) {
log.error("Error reading index " + raf.getLocation(), t);
return false;
}
return true;
}
public void showIndex(Formatter f) {
f.format("BufrCdmIndex %n");
f.format(" idxFilename=%s%n", idxFilename);
f.format(" bufrFilename=%s%n", bufrFilename);
f.format(" dates=[%s,%s]%n", CalendarDate.of(start), CalendarDate.of(end));
f.format(" nobs=%s%n", nobs);
if (stations != null) {
f.format(" # stations=%d%n", stations.size());
int count = 0;
for (BufrCdmIndexProto.Station s : stations)
count += s.getCount();
f.format(" # stations obs=%d%n", count);
}
}
}

View File

@ -0,0 +1,416 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.point;
import org.jdom2.Element;
import thredds.client.catalog.Catalog;
import ucar.ma2.*;
import ucar.nc2.Attribute;
import ucar.nc2.Structure;
import ucar.nc2.Variable;
import ucar.nc2.VariableSimpleIF;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.NetcdfDataset;
import ucar.nc2.dataset.SequenceDS;
import ucar.nc2.dataset.VariableDS;
import ucar.nc2.ft.*;
import ucar.nc2.ft.point.*;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import ucar.nc2.iosp.IOServiceProvider;
import org.meteoinfo.data.meteodata.bufr.BufrIosp2;
import ucar.nc2.time.CalendarDate;
import ucar.nc2.time.CalendarDateRange;
import ucar.nc2.time.CalendarDateUnit;
import ucar.nc2.util.CancelTask;
import ucar.nc2.util.Indent;
import ucar.unidata.geoloc.EarthLocation;
import ucar.unidata.geoloc.LatLonRect;
import javax.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Use BufrConfig to make BUFR files into PointFeatureDataset
*
* @author caron
* @since 8/14/13
*/
public class BufrFeatureDatasetFactory implements FeatureDatasetFactory {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BufrFeatureDatasetFactory.class);
private static CalendarDateUnit bufrDateUnits = CalendarDateUnit.of(null, "msecs since 1970-01-01T00:00:00");
private static String bufrAltUnits = "m"; // LOOK fake
@Override
public Object isMine(FeatureType wantFeatureType, NetcdfDataset ncd, Formatter errlog) {
IOServiceProvider iosp = ncd.getIosp();
return (iosp instanceof BufrIosp2) ? true : null;
}
@Override
public FeatureType[] getFeatureTypes() {
return new FeatureType[]{FeatureType.ANY_POINT};
}
@Override
public FeatureDataset open(FeatureType ftype, NetcdfDataset ncd, Object analysis, CancelTask task, Formatter errlog)
throws IOException {
// must have an index file
File indexFile = BufrCdmIndex.calcIndexFile(ncd.getLocation());
if (indexFile == null)
return null;
BufrCdmIndex index = BufrCdmIndex.readIndex(indexFile.getPath());
return new BufrStationDataset(ncd, index);
}
private void show(Element parent, Indent indent, Formatter f) {
if (parent == null)
return;
for (Element child : parent.getChildren("fld", Catalog.ncmlNS)) {
String idx = child.getAttributeValue("idx");
String fxy = child.getAttributeValue("fxy");
String name = child.getAttributeValue("name");
String action = child.getAttributeValue("action");
f.format("%sidx='%s' fxy='%s' name='%s' action='%s'%n", indent, idx, fxy, name, action);
indent.incr();
show(child, indent, f);
indent.decr();
}
}
private void processSeq(Structure struct, Element parent) {
if (parent == null || struct == null)
return;
List<Variable> vars = struct.getVariables();
for (Element child : parent.getChildren("fld", Catalog.ncmlNS)) {
String idxS = child.getAttributeValue("idx");
int idx = Integer.parseInt(idxS);
if (idx < 0 || idx >= vars.size()) {
log.error("Bad index = {}", child);
continue;
}
Variable want = vars.get(idx);
struct.removeMemberVariable(want);
}
}
private static class BufrStationDataset extends PointDatasetImpl {
private Munge munger;
private BufrCdmIndex index;
private SequenceDS obs;
private BufrStationDataset(NetcdfDataset ncfile, BufrCdmIndex index) {
super(ncfile, FeatureType.STATION);
this.index = index;
// create the list of data variables
munger = new Munge();
obs = (SequenceDS) ncfile.findVariable(BufrIosp2.obsRecordName);
this.dataVariables = munger.makeDataVariables(index, obs);
BufrStationCollection bufrCollection = new BufrStationCollection(ncfile.getLocation());
setPointFeatureCollection(bufrCollection);
CalendarDateRange dateRange = CalendarDateRange.of(CalendarDate.of(index.start), CalendarDate.of(index.end));
setDateRange(dateRange);
}
@Override
public FeatureType getFeatureType() {
return FeatureType.STATION;
}
@Override
public void getDetailInfo(Formatter sf) {
super.getDetailInfo(sf);
index.showIndex(sf);
}
private class BufrStationCollection extends StationTimeSeriesCollectionImpl {
StandardFields.StandardFieldsFromStructure extract;
private BufrStationCollection(String name) {
super(name, null, null);
// need the center id to match the standard fields
Attribute centerAtt = netcdfDataset.findGlobalAttribute(BufrIosp2.centerId);
int center = (centerAtt == null) ? 0 : centerAtt.getNumericValue().intValue();
this.extract = new StandardFields.StandardFieldsFromStructure(center, obs);
try {
this.timeUnit = bufrDateUnits;
} catch (Exception e) {
e.printStackTrace(); // cant happen
}
this.altUnits = "m"; // LOOK fake units
}
@Override
protected StationHelper createStationHelper() {
StationHelper stationHelper = new StationHelper();
for (BufrCdmIndexProto.Station s : index.stations)
stationHelper.addStation(new BufrStation(s));
return stationHelper;
}
private class BufrStation extends StationTimeSeriesFeatureImpl {
private BufrStation(BufrCdmIndexProto.Station proto) {
super(proto.getId(), proto.getDesc(), proto.getWmoId(), proto.getLat(), proto.getLon(), proto.getAlt(),
bufrDateUnits, bufrAltUnits, proto.getCount(), StructureData.EMPTY);
}
@Override
public PointFeatureIterator getPointFeatureIterator() throws IOException {
return new BufrStationIterator(obs.getStructureIterator(), null);
}
@Nonnull
@Override
public StructureData getFeatureData() {
return StructureData.EMPTY;
}
// iterates over the records for this station
public class BufrStationIterator extends PointIteratorFromStructureData {
public BufrStationIterator(StructureDataIterator structIter, Filter filter) {
super(structIter, filter);
}
@Override
protected PointFeature makeFeature(int recnum, StructureData sdata) throws IOException {
extract.extract(sdata);
String stationId = extract.getStationId();
if (!stationId.equals(s.getName()))
return null;
CalendarDate date = extract.makeCalendarDate();
return new BufrStationPoint(s, date.getMillis(), 0, munger.munge(sdata)); // LOOK obsTime, nomTime
}
}
public class BufrStationPoint extends PointFeatureImpl implements StationFeatureHas {
StructureData sdata;
public BufrStationPoint(EarthLocation location, double obsTime, double nomTime, StructureData sdata) {
super(BufrStation.this, location, obsTime, nomTime, bufrDateUnits);
this.sdata = sdata;
}
@Nonnull
@Override
public StructureData getDataAll() {
return sdata;
}
@Nonnull
@Override
public StructureData getFeatureData() {
return sdata;
}
@Override
public StationFeature getStationFeature() {
return BufrStation.this;
}
}
}
// flatten into a PointFeatureCollection
// if empty, may return null
@Override
public PointFeatureCollection flatten(LatLonRect boundingBox, CalendarDateRange dateRange) throws IOException {
return new BufrPointFeatureCollection(boundingBox, dateRange);
}
private class BufrPointFeatureCollection extends PointCollectionImpl {
StationHelper stationsWanted;
PointFeatureIterator.Filter filter;
BufrPointFeatureCollection(LatLonRect boundingBox, CalendarDateRange dateRange) throws IOException {
super("BufrPointFeatureCollection", bufrDateUnits, bufrAltUnits);
setBoundingBox(boundingBox);
if (dateRange != null) {
getInfo();
info.setCalendarDateRange(dateRange);
}
createStationHelper();
stationsWanted = getStationHelper().subset(boundingBox);
if (dateRange != null)
filter = new PointIteratorFiltered.SpaceAndTimeFilter(null, dateRange);
}
@Override
public PointFeatureIterator getPointFeatureIterator() throws IOException {
return new BufrRecordIterator(obs.getStructureIterator(), filter);
}
// iterates once over all the records
public class BufrRecordIterator extends PointIteratorFromStructureData {
int countHere;
public BufrRecordIterator(StructureDataIterator structIter, Filter filter) {
super(structIter, filter);
}
@Override
protected PointFeature makeFeature(int recnum, StructureData sdata) throws IOException {
extract.extract(sdata);
String stationId = extract.getStationId();
StationFeature want = stationsWanted.getStation(stationId);
if (want == null)
return null;
CalendarDate date = extract.makeCalendarDate();
countHere++;
return new BufrPoint(want, date.getMillis(), 0, munger.munge(sdata));
}
@Override
public void close() {
log.debug(String.format("BufrRecordIterator passed %d features super claims %d%n", countHere,
getInfo().nfeatures));
super.close();
}
}
public class BufrPoint extends PointFeatureImpl implements StationPointFeature {
StructureData sdata;
public BufrPoint(StationFeature want, double obsTime, double nomTime, StructureData sdata) {
super(BufrPointFeatureCollection.this, want, obsTime, nomTime, bufrDateUnits);
this.sdata = sdata;
}
@Nonnull
@Override
public StructureData getDataAll() {
return sdata;
}
@Nonnull
@Override
public StructureData getFeatureData() {
return sdata;
}
@Override
public StationFeature getStation() {
return (StationFeature) location;
}
}
}
}
}
private static class Action {
BufrCdmIndexProto.FldAction what;
private Action(BufrCdmIndexProto.FldAction what) {
this.what = what;
}
}
private static class Munge {
String sdataName;
boolean needed;
protected Map<String, Action> actions = new HashMap<>(32);
protected Map<String, StructureData> missingData = new HashMap<>(32);
protected Map<String, VariableDS> vars = new HashMap<>(32);
List<VariableSimpleIF> makeDataVariables(BufrCdmIndex index, Structure obs) {
this.sdataName = obs.getShortName() + "Munged";
List<Variable> members = obs.getVariables();
List<VariableSimpleIF> result = new ArrayList<>(members.size());
List<BufrCdmIndexProto.Field> flds = index.root.getFldsList();
int count = 0;
for (Variable v : members) {
BufrCdmIndexProto.Field fld = flds.get(count++);
if (fld.getAction() != null && fld.getAction() != BufrCdmIndexProto.FldAction.none) {
needed = true;
Action act = new Action(fld.getAction());
actions.put(v.getShortName(), act);
if (fld.getAction() == BufrCdmIndexProto.FldAction.remove) {
continue; // skip
} else if (fld.getAction() == BufrCdmIndexProto.FldAction.asMissing) {
// promote the children
Structure s = (Structure) v;
for (Variable child : s.getVariables()) {
result.add(child);
vars.put(child.getShortName(), (VariableDS) child); // track ones we may have to create missing values for
}
continue;
}
}
if (v.getDataType() == DataType.SEQUENCE)
continue;
result.add(v);
}
return result;
}
StructureData munge(StructureData org) throws IOException {
return needed ? new StructureDataMunged2(org) : org;
}
// LOOK needs to be ported to immutable StructureDataComposite
private class StructureDataMunged2 extends StructureDataComposite {
StructureDataMunged2(StructureData sdata) throws IOException {
add(sdata);
for (StructureMembers.Member m : sdata.getMembers()) {
Action act = actions.get(m.getName());
if (act == null) {
// do nothing
} else if (act.what == BufrCdmIndexProto.FldAction.remove) {
this.members.hideMember(m);
} else if (act.what == BufrCdmIndexProto.FldAction.asMissing) { // 0 or 1
int pos = this.members.hideMember(m);
ArraySequence seq = sdata.getArraySequence(m);
StructureDataIterator iter = seq.getStructureDataIterator();
if (iter.hasNext()) {
add(pos, iter.next());
} else {
// missing data
add(pos, makeMissing(m, seq));
}
}
}
}
}
StructureData makeMissing(StructureMembers.Member seqm, ArraySequence seq) {
StructureData result = missingData.get(seqm.getName());
if (result != null)
return result;
StructureMembers sm = seq.getStructureMembers().toBuilder(false).build();
StructureDataW resultW = new StructureDataW(sm);
for (StructureMembers.Member m : sm.getMembers()) {
VariableDS var = vars.get(m.getName());
Array missingData = var.getMissingDataArray(m.getShape());
resultW.setMemberData(m, missingData);
}
missingData.put(seqm.getName(), resultW);
return resultW;
}
} // Munge
}

View File

@ -0,0 +1,48 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.point;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import java.util.List;
/**
* Abstraction for BUFR field.
* Used in writing index, so we can make changes in BufrCdmIndexPanel
*
* @author caron
* @since 8/20/13
*/
public interface BufrField {
String getName();
String getDesc();
String getUnits();
short getFxy();
String getFxyName();
BufrCdmIndexProto.FldAction getAction();
BufrCdmIndexProto.FldType getType();
boolean isSeq();
int getMin();
int getMax();
int getScale();
int getReference();
int getBitWidth();
List<? extends BufrField> getChildren();
}

View File

@ -0,0 +1,355 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.point;
import org.meteoinfo.data.meteodata.bufr.BufrIosp2;
import org.meteoinfo.data.meteodata.bufr.DataDescriptor;
import org.meteoinfo.data.meteodata.bufr.Message;
import ucar.ma2.DataType;
import ucar.ma2.StructureData;
import ucar.ma2.StructureMembers;
import ucar.nc2.Attribute;
import ucar.nc2.Structure;
import ucar.nc2.Variable;
import ucar.nc2.ft.point.bufr.BufrCdmIndexProto;
import ucar.nc2.time.CalendarDate;
import java.util.*;
/**
* Extract standard fields from BUFR
*
* @author caron
* @since 8/7/13
*/
public class StandardFields {
private static int nflds = 50;
private static Map<BufrCdmIndexProto.FldType, List<String>> type2Flds = new HashMap<>(2 * nflds);
private static Map<String, TypeAndOrder> fld2type = new HashMap<>(2 * nflds);
private static Map<Integer, Map<String, BufrCdmIndexProto.FldType>> locals = new HashMap<>(10);
static {
// first choice
addField("0-1-1", BufrCdmIndexProto.FldType.wmoBlock);
addField("0-1-2", BufrCdmIndexProto.FldType.wmoId);
addField("0-1-18", BufrCdmIndexProto.FldType.stationId);
addField("0-4-1", BufrCdmIndexProto.FldType.year);
addField("0-4-2", BufrCdmIndexProto.FldType.month);
addField("0-4-3", BufrCdmIndexProto.FldType.day);
addField("0-4-4", BufrCdmIndexProto.FldType.hour);
addField("0-4-5", BufrCdmIndexProto.FldType.minute);
addField("0-4-6", BufrCdmIndexProto.FldType.sec);
addField("0-5-1", BufrCdmIndexProto.FldType.lat);
addField("0-6-1", BufrCdmIndexProto.FldType.lon);
addField("0-7-30", BufrCdmIndexProto.FldType.heightOfStation);
// second choice
addField("0-1-15", BufrCdmIndexProto.FldType.stationId);
addField("0-1-19", BufrCdmIndexProto.FldType.stationId);
addField("0-4-7", BufrCdmIndexProto.FldType.sec);
addField("0-4-43", BufrCdmIndexProto.FldType.doy);
addField("0-5-2", BufrCdmIndexProto.FldType.lat);
addField("0-6-2", BufrCdmIndexProto.FldType.lon);
addField("0-7-1", BufrCdmIndexProto.FldType.heightOfStation);
// third choice
addField("0-1-62", BufrCdmIndexProto.FldType.stationId);
addField("0-1-63", BufrCdmIndexProto.FldType.stationId);
addField("0-7-2", BufrCdmIndexProto.FldType.height);
addField("0-7-10", BufrCdmIndexProto.FldType.height);
addField("0-7-7", BufrCdmIndexProto.FldType.height);
// 4th choice LOOK
addField("0-1-5", BufrCdmIndexProto.FldType.stationId);
addField("0-1-6", BufrCdmIndexProto.FldType.stationId);
// addField("0-1-7", BufrCdmIndexProto.FldType.stationId); satellite id
addField("0-1-8", BufrCdmIndexProto.FldType.stationId);
addField("0-1-10", BufrCdmIndexProto.FldType.stationId);
addField("0-1-11", BufrCdmIndexProto.FldType.stationId);
addField("0-7-6", BufrCdmIndexProto.FldType.heightAboveStation);
addField("0-7-7", BufrCdmIndexProto.FldType.heightAboveStation);
// locals
/*
* Map<String, BufrCdmIndexProto.FldType> ncep = new HashMap<String, BufrCdmIndexProto.FldType>(10);
* ncep.put("0-1-198", BufrCdmIndexProto.FldType.stationId);
* locals.put(7, ncep);
*/
Map<String, BufrCdmIndexProto.FldType> uu = new HashMap<>(10);
uu.put("0-1-194", BufrCdmIndexProto.FldType.stationId);
locals.put(59, uu);
}
private static class TypeAndOrder {
BufrCdmIndexProto.FldType type;
int order;
private TypeAndOrder(BufrCdmIndexProto.FldType type, int order) {
this.type = type;
this.order = order;
}
}
private static void addField(String fld, BufrCdmIndexProto.FldType type) {
List<String> list = type2Flds.computeIfAbsent(type, k -> new ArrayList<>());
list.add(fld); // keep in order
TypeAndOrder tao = new TypeAndOrder(type, list.size() - 1);
fld2type.put(fld, tao);
}
//////////////////////////////////////////////////////
private static TypeAndOrder findTao(int center, String key) {
Map<String, BufrCdmIndexProto.FldType> local = locals.get(center);
if (local != null) {
BufrCdmIndexProto.FldType result = local.get(key);
if (result != null)
return new TypeAndOrder(result, -1);
}
return fld2type.get(key);
}
public static BufrCdmIndexProto.FldType findField(int center, String key) {
Map<String, BufrCdmIndexProto.FldType> local = locals.get(center);
if (local != null) {
BufrCdmIndexProto.FldType result = local.get(key);
if (result != null)
return result;
}
return findStandardField(key);
}
public static BufrCdmIndexProto.FldType findStandardField(String key) {
TypeAndOrder tao = fld2type.get(key);
return (tao == null) ? null : tao.type;
}
/////////////////////////////////////////////////////////////////////////////////////////
public static StandardFieldsFromMessage extract(Message m) {
StandardFieldsFromMessage result = new StandardFieldsFromMessage();
extract(m.ids.getCenterId(), m.getRootDataDescriptor(), result);
return result;
}
private static void extract(int center, DataDescriptor dds, StandardFieldsFromMessage extract) {
for (DataDescriptor subdds : dds.getSubKeys()) {
extract.match(center, subdds);
if (subdds.getSubKeys() != null)
extract(center, subdds, extract);
}
}
public static class StandardFieldsFromMessage {
Map<BufrCdmIndexProto.FldType, List<DataDescriptor>> typeMap = new TreeMap<>();
void match(int center, DataDescriptor dds) {
String name = dds.getFxyName();
BufrCdmIndexProto.FldType type = findField(center, name);
if (type == null)
return;
// got a match
List<DataDescriptor> list = typeMap.computeIfAbsent(type, k -> new ArrayList<>(3));
list.add(dds);
}
public boolean hasStation() {
if (typeMap.get(BufrCdmIndexProto.FldType.lat) == null)
return false;
if (typeMap.get(BufrCdmIndexProto.FldType.lon) == null)
return false;
if (typeMap.get(BufrCdmIndexProto.FldType.stationId) != null)
return true;
return typeMap.get(BufrCdmIndexProto.FldType.wmoId) != null;
}
public boolean hasTime() {
if (typeMap.get(BufrCdmIndexProto.FldType.year) == null)
return false;
if (typeMap.get(BufrCdmIndexProto.FldType.month) == null)
return false;
return typeMap.get(BufrCdmIndexProto.FldType.day) != null || typeMap.get(BufrCdmIndexProto.FldType.doy) != null;
// if (typeMap.get(BufrCdmIndexProto.FldType.hour) == null) return false; // LOOK could assume 0:0 ??
// if (typeMap.get(BufrCdmIndexProto.FldType.minute) == null) return false;
}
@Override
public String toString() {
try (Formatter f = new Formatter()) {
for (BufrCdmIndexProto.FldType type : typeMap.keySet()) {
f.format(" %20s: ", type);
List<DataDescriptor> list = typeMap.get(type);
for (DataDescriptor dds : list) {
f.format(" %s", dds.getName());
if (dds.getDesc() != null)
f.format("=%s", dds.getDesc());
f.format(",");
}
f.format(" %n");
}
return f.toString();
}
}
}
public static class StandardFieldsFromStructure {
private static class Field {
TypeAndOrder tao;
String memberName;
String valueS;
int value = -1;
double valueD = Double.NaN;
double scale = 1.0;
double offset;
boolean hasScale;
private Field(TypeAndOrder tao, Variable v) {
this.tao = tao;
this.memberName = v.getShortName();
Attribute att = v.attributes().findAttribute("scale_factor");
if (att != null && !att.isString()) {
scale = att.getNumericValue().doubleValue();
hasScale = true;
}
att = v.attributes().findAttribute("add_offset");
if (att != null && !att.isString()) {
offset = att.getNumericValue().doubleValue();
hasScale = true;
}
}
}
private Map<BufrCdmIndexProto.FldType, Field> map = new HashMap<>();
public StandardFieldsFromStructure(int center, Structure obs) {
// run through all available fields - LOOK we are not recursing into sub sequences
for (Variable v : obs.getVariables()) {
Attribute att = v.attributes().findAttribute(BufrIosp2.fxyAttName);
if (att == null)
continue;
String key = att.getStringValue();
TypeAndOrder tao = findTao(center, key);
if (tao == null)
continue;
Field oldFld = map.get(tao.type);
if (oldFld == null) {
Field fld = new Field(tao, v);
map.put(tao.type, fld);
} else {
if (oldFld.tao.order < tao.order) { // replace old one
Field fld = new Field(tao, v);
map.put(tao.type, fld);
}
}
}
}
// extract standard fields values from specific StructureData
public void extract(StructureData sdata) {
StructureMembers sm = sdata.getStructureMembers();
for (Field fld : map.values()) {
StructureMembers.Member m = sm.findMember(fld.memberName);
DataType dtype = m.getDataType();
if (dtype.isString())
fld.valueS = sdata.getScalarString(m).trim();
else if (dtype.isIntegral()) {
fld.value = sdata.convertScalarInt(m);
fld.valueD = fld.value;
} else if (dtype.isNumeric())
fld.valueD = sdata.convertScalarDouble(m);
}
}
public boolean hasField(BufrCdmIndexProto.FldType type) {
return null != map.get(type);
}
public String getFieldName(BufrCdmIndexProto.FldType type) {
Field fld = map.get(type);
return (fld == null) ? null : fld.memberName;
}
public String getFieldValueS(BufrCdmIndexProto.FldType type) {
Field fld = map.get(type);
if (fld == null)
return null;
if (fld.valueS != null)
return fld.valueS;
if (fld.value != -1)
return Integer.toString(fld.value);
if (!Double.isNaN(fld.valueD))
return Double.toString(fld.valueD);
return null;
}
public int getFieldValue(BufrCdmIndexProto.FldType type) {
Field fld = map.get(type);
return (fld == null) ? -1 : fld.value;
}
public double getFieldValueD(BufrCdmIndexProto.FldType type) {
Field fld = map.get(type);
if (fld == null)
return Double.NaN;
if (fld.hasScale)
return fld.valueD * fld.scale + fld.offset;
return fld.valueD;
}
public String getStationId() {
if (hasField(BufrCdmIndexProto.FldType.stationId))
return getFieldValueS(BufrCdmIndexProto.FldType.stationId);
if (hasField(BufrCdmIndexProto.FldType.wmoBlock) && hasField(BufrCdmIndexProto.FldType.wmoId))
return getFieldValue(BufrCdmIndexProto.FldType.wmoBlock) + "/" + getFieldValue(BufrCdmIndexProto.FldType.wmoId);
if (hasField(BufrCdmIndexProto.FldType.wmoId))
return Integer.toString(getFieldValue(BufrCdmIndexProto.FldType.wmoId));
return null;
}
public CalendarDate makeCalendarDate() {
if (!hasField(BufrCdmIndexProto.FldType.year))
return null;
int year = getFieldValue(BufrCdmIndexProto.FldType.year);
int hour = !hasField(BufrCdmIndexProto.FldType.hour) ? 0 : getFieldValue(BufrCdmIndexProto.FldType.hour);
int minute = !hasField(BufrCdmIndexProto.FldType.minute) ? 0 : getFieldValue(BufrCdmIndexProto.FldType.minute);
int sec = !hasField(BufrCdmIndexProto.FldType.sec) ? 0 : getFieldValue(BufrCdmIndexProto.FldType.sec);
if (sec < 0) {
sec = 0;
} else if (sec > 0) {
Field fld = map.get(BufrCdmIndexProto.FldType.sec);
if (fld.scale != 0) {
sec = (int) (sec * fld.scale); // throw away msecs
}
if (sec < 0 || sec > 59)
sec = 0;
}
if (hasField(BufrCdmIndexProto.FldType.month) && hasField(BufrCdmIndexProto.FldType.day)) {
int month = getFieldValue(BufrCdmIndexProto.FldType.month);
int day = getFieldValue(BufrCdmIndexProto.FldType.day);
return CalendarDate.of(null, year, month, day, hour, minute, sec);
} else if (hasField(BufrCdmIndexProto.FldType.doy)) {
int doy = getFieldValue(BufrCdmIndexProto.FldType.doy);
return CalendarDate.withDoy(null, year, doy, hour, minute, sec);
}
return null;
}
}
}

View File

@ -0,0 +1,195 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import org.meteoinfo.data.meteodata.bufr.Descriptor;
import ucar.nc2.wmo.CommonCodeTable;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Read BUFR Code / Flag tables. */
public class CodeFlagTables {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(CodeFlagTables.class);
private static final String CodeFlagFilename = "wmo/BUFRCREX_37_0_0_CodeFlag_en.xml";
static Map<Short, CodeFlagTables> tableMap;
public static CodeFlagTables getTable(short id) {
if (tableMap == null)
init();
if (id == 263)
return useCC(id, 5); // 0-1-7
if (id == 526)
return useCC(id, 7); // 0-2-14
if (id == 531)
return useCC(id, 8); // 0-2-19
if (id == 5699)
return useCC(id, 3); // 0-22-67
if (id == 5700)
return useCC(id, 4); // 0-22-68
return tableMap.get(id);
}
private static CodeFlagTables useCC(short fxy, int cc) {
CodeFlagTables cft = tableMap.get(fxy);
if (cft == null) {
CommonCodeTable cct = CommonCodeTable.getTable(cc);
cft = new CodeFlagTables(fxy, cct.getTableName(), cct.getMap());
tableMap.put(fxy, cft);
}
return cft;
}
public static boolean hasTable(short id) {
if (tableMap == null)
init();
CodeFlagTables result = tableMap.get(id);
return result != null;
}
private static void init() {
tableMap = new HashMap<>(300);
init(tableMap);
}
public static Map<Short, CodeFlagTables> getTables() {
if (tableMap == null)
init();
return tableMap;
}
/*
* <Exp_CodeFlagTables_E>
* <No>837</No>
* <FXY>002119</FXY>
* <ElementName_E>Instrument operations</ElementName_E>
* <CodeFigure>0</CodeFigure>
* <EntryName_E>Intermediate frequency calibration mode (IF CAL)</EntryName_E>
* <Status>Operational</Status>
* </Exp_CodeFlagTables_E>
*
* <BUFRCREX_19_1_1_CodeFlag_en>
* <No>2905</No>
* <FXY>020042</FXY>
* <ElementName_en>Airframe icing present</ElementName_en>
* <CodeFigure>2</CodeFigure>
* <EntryName_en>Reserved</EntryName_en>
* <Status>Operational</Status>
* </BUFRCREX_19_1_1_CodeFlag_en>
*
* <BUFRCREX_22_0_1_CodeFlag_en>
* <No>3183</No>
* <FXY>020063</FXY>
* <ElementName_en>Special phenomena</ElementName_en>
* <CodeFigure>31</CodeFigure>
* <EntryName_en>Slight coloration of clouds at sunrise associated with a tropical disturbance</EntryName_en>
* <Status>Operational</Status>
* </BUFRCREX_22_0_1_CodeFlag_en>
*
*/
static void init(Map<Short, CodeFlagTables> table) {
String filename = BufrTables.RESOURCE_PATH + CodeFlagFilename;
try (InputStream is = CodeFlagTables.class.getResourceAsStream(filename)) {
SAXBuilder builder = new SAXBuilder();
builder.setExpandEntities(false);
org.jdom2.Document tdoc = builder.build(is);
Element root = tdoc.getRootElement();
List<Element> elems = root.getChildren();
for (Element elem : elems) {
String fxyS = elem.getChildText("FXY");
String desc = elem.getChildText("ElementName_en");
short fxy = Descriptor.getFxy2(fxyS);
CodeFlagTables ct = table.get(fxy);
if (ct == null) {
ct = new CodeFlagTables(fxy, desc);
table.put(fxy, ct);
}
String line = elem.getChildText("No");
String codeS = elem.getChildText("CodeFigure");
String value = elem.getChildText("EntryName_en");
if ((codeS == null) || (value == null))
continue;
if (value.toLowerCase().startsWith("reserved"))
continue;
if (value.toLowerCase().startsWith("not used"))
continue;
int code;
if (codeS.toLowerCase().contains("all")) {
code = -1;
} else
try {
code = Integer.parseInt(codeS);
} catch (NumberFormatException e) {
log.debug("NumberFormatException on line " + line + " in " + codeS);
continue;
}
ct.addValue((short) code, value);
}
} catch (IOException | JDOMException e) {
log.error("Can't read BUFR code table " + filename, e);
}
}
////////////////////////////////////////////////
// TODO Make Immutable
private short fxy;
private String name;
private Map<Integer, String> map; // needs to be integer for EnumTypedef
CodeFlagTables(short fxy, String name) {
this.fxy = fxy;
this.name = (name == null) ? fxy() : name; // StringUtil2.replace(name, ' ', "_") + "("+fxy()+")";
map = new HashMap<>(20);
}
private CodeFlagTables(short fxy, String name, Map<Integer, String> map) {
this.fxy = fxy;
this.name = (name == null) ? fxy() : name;
this.map = map;
}
public String getName() {
return name;
}
public Map<Integer, String> getMap() {
return map;
}
void addValue(int value, String text) {
map.put(value, text);
}
public short getId() {
return fxy;
}
public String fxy() {
int f = fxy >> 14;
int x = (fxy & 0xff00) >> 8;
int y = (fxy & 0xff);
return f + "-" + x + "-" + y;
}
public String toString() {
return name;
}
}

View File

@ -0,0 +1,315 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
/*
* BufrRead mnemonic.java 1.0 05/09/2008
*
* @author Robb Kambic
*
* @version 1.0
*/
import com.google.re2j.Matcher;
import com.google.re2j.Pattern;
import org.meteoinfo.data.meteodata.bufr.Descriptor;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.*;
/**
* A class that reads a mnemonic table. It doesn't include X < 48 and Y < 192 type of
* descriptors because they are already stored in the latest WMO tables.
*/
public class NcepMnemonic {
// | HEADR | 362001 | TABLE D ENTRY - PROFILE COORDINATES | |
private static final Pattern fields3 = Pattern.compile("^\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|\\s+(.*)\\s*\\|");
private static final Pattern fields2 = Pattern.compile("^\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|");
private static final Pattern fields5 =
Pattern.compile("^\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|\\s+(.*)\\s+\\|");
/**
* Pattern to get 3 integers from beginning of line.
*/
private static final Pattern ints6 = Pattern.compile("^\\d{6}");
private static final int XlocalCutoff = 48;
private static final int YlocalCutoff = 192;
private static final boolean debugTable = false;
/**
* Read NCEP mnemonic BUFR tables.
*
* @return true on success.
*/
public static boolean read(InputStream ios, BufrTables.Tables tables) throws IOException {
if (ios == null)
return false;
if (tables.b == null)
tables.b = new TableB("fake", "fake");
if (tables.d == null)
tables.d = new TableD("fake", "fake");
HashMap<String, String> number = new HashMap<>(); // key = mnemonic value = fxy
HashMap<String, String> desc = new HashMap<>(); // key = mnemonic value = description
HashMap<String, String> mnseq = new HashMap<>();
try {
BufferedReader dataIS = new BufferedReader(new InputStreamReader(ios, StandardCharsets.UTF_8));
// read mnemonic table
Matcher m;
// read header info and disregard
while (true) {
String line = dataIS.readLine();
if (line == null)
throw new RuntimeException("Bad NCEP mnemonic BUFR table ");
if (line.contains("MNEMONIC"))
break;
}
// read mnemonic, number, and description
// | HEADR | 362001 | TABLE D ENTRY - PROFILE COORDINATES |
while (true) {
String line = dataIS.readLine();
if (line == null)
break;
if (line.contains("MNEMONIC"))
break;
if (line.contains("----"))
continue;
if (line.startsWith("*"))
continue;
if (line.startsWith("| "))
continue;
m = fields3.matcher(line);
if (m.find()) {
String mnu = m.group(1).trim();
String fxy = m.group(2).trim();
if (fxy.startsWith("3")) {
number.put(mnu, fxy);
desc.put(mnu, m.group(3).replace("TABLE D ENTRY - ", "").trim());
} else if (fxy.startsWith("0")) {
number.put(mnu, fxy);
desc.put(mnu, m.group(3).replace("TABLE B ENTRY - ", "").trim());
} else if (fxy.startsWith("A")) {
number.put(mnu, fxy);
desc.put(mnu, m.group(3).replace("TABLE A ENTRY - ", "").trim());
}
} else if (debugTable) {
System.out.println("bad mnemonic, number, and description: " + line);
}
}
// read in sequences using mnemonics
// | ETACLS1 | HEADR {PROFILE} SURF FLUX HYDR D10M {SLYR} XTRA |
while (true) {
String line = dataIS.readLine();
if (line == null)
break;
if (line.contains("MNEMONIC"))
break;
if (line.contains("----"))
continue;
if (line.startsWith("| "))
continue;
if (line.startsWith("*"))
continue;
m = fields2.matcher(line);
if (m.find()) {
String mnu = m.group(1).trim();
if (mnseq.containsKey(mnu)) { // concat lines with same mnu
String value = mnseq.get(mnu);
value = value + " " + m.group(2);
mnseq.put(mnu, value);
} else {
mnseq.put(mnu, m.group(2));
}
} else if (debugTable) {
System.out.println("bad sequence mnemonic: " + line);
}
}
// create sequences, replacing mnemonics with numbers
for (Map.Entry<String, String> ent : mnseq.entrySet()) {
String seq = ent.getValue();
seq = seq.replaceAll("\\<", "1-1-0 0-31-0 ");
seq = seq.replaceAll("\\>", "");
seq = seq.replaceAll("\\{", "1-1-0 0-31-1 ");
seq = seq.replaceAll("\\}", "");
seq = seq.replaceAll("\\(", "1-1-0 0-31-2 ");
seq = seq.replaceAll("\\)", "");
StringTokenizer stoke = new StringTokenizer(seq, " ");
List<Short> list = new ArrayList<>();
while (stoke.hasMoreTokens()) {
String mn = stoke.nextToken();
if (mn.charAt(1) == '-') {
list.add(Descriptor.getFxy(mn));
continue;
}
// element descriptor needs hyphens
m = ints6.matcher(mn);
if (m.find()) {
String F = mn.substring(0, 1);
String X = removeLeading0(mn.substring(1, 3));
String Y = removeLeading0(mn.substring(3));
list.add(Descriptor.getFxy(F + "-" + X + "-" + Y));
continue;
}
if (mn.startsWith("\"")) {
int idx = mn.lastIndexOf('"');
String count = mn.substring(idx + 1);
list.add(Descriptor.getFxy("1-1-" + count));
mn = mn.substring(1, idx);
}
if (mn.startsWith(".")) {
String des = mn.substring(mn.length() - 4);
mn = mn.replace(des, "....");
}
String fxy = number.get(mn);
String F = fxy.substring(0, 1);
String X = removeLeading0(fxy.substring(1, 3));
String Y = removeLeading0(fxy.substring(3));
list.add(Descriptor.getFxy(F + "-" + X + "-" + Y));
}
String fxy = number.get(ent.getKey());
String X = removeLeading0(fxy.substring(1, 3));
String Y = removeLeading0(fxy.substring(3));
// these are in latest tables
if (XlocalCutoff > Integer.parseInt(X) && YlocalCutoff > Integer.parseInt(Y))
continue;
// key = F + "-" + X + "-" + Y;
short seqX = Short.parseShort(X.trim());
short seqY = Short.parseShort(Y.trim());
tables.d.addDescriptor(seqX, seqY, ent.getKey(), list);
// short id = Descriptor.getFxy(key);
// sequences.put(Short.valueOf(id), tableD);
}
// add some static repetition sequences
// LOOK why?
List<Short> list = new ArrayList<>();
// 16 bit delayed repetition
list.add(Descriptor.getFxy("1-1-0"));
list.add(Descriptor.getFxy("0-31-2"));
tables.d.addDescriptor((short) 60, (short) 1, "", list);
// tableD = new DescriptorTableD("", "3-60-1", list, false);
// tableD.put( "3-60-1", d);
// short id = Descriptor.getFxy("3-60-1");
// sequences.put(Short.valueOf(id), tableD);
list = new ArrayList<>();
// 8 bit delayed repetition
list.add(Descriptor.getFxy("1-1-0"));
list.add(Descriptor.getFxy("0-31-1"));
tables.d.addDescriptor((short) 60, (short) 2, "", list);
// tableD = new DescriptorTableD("", "3-60-2", list, false);
// tableD.put( "3-60-2", d);
// id = Descriptor.getFxy("3-60-2");
// sequences.put(Short.valueOf(id), tableD);
list = new ArrayList<>();
// 8 bit delayed repetition
list.add(Descriptor.getFxy("1-1-0"));
list.add(Descriptor.getFxy("0-31-1"));
tables.d.addDescriptor((short) 60, (short) 3, "", list);
// tableD = new DescriptorTableD("", "3-60-3", list, false);
// tableD.put( "3-60-3", d);
// id = Descriptor.getFxy("3-60-3");
// sequences.put(Short.valueOf(id), tableD);
list = new ArrayList<>();
// 1 bit delayed repetition
list.add(Descriptor.getFxy("1-1-0"));
list.add(Descriptor.getFxy("0-31-0"));
tables.d.addDescriptor((short) 60, (short) 4, "", list);
// tableD = new DescriptorTableD("", "3-60-4", list, false);
// tableD.put( "3-60-4", d);
// id = Descriptor.getFxy("3-60-4");
// sequences.put(Short.valueOf(id), tableD);
// add in element descriptors
// MNEMONIC | SCAL | REFERENCE | BIT | UNITS
// | FTIM | 0 | 0 | 24 | SECONDS |-------------|
// tableB = new TableB(tablename, tablename);
while (true) {
String line = dataIS.readLine();
if (line == null)
break;
if (line.contains("MNEMONIC"))
break;
if (line.startsWith("| "))
continue;
if (line.startsWith("*"))
continue;
m = fields5.matcher(line);
if (m.find()) {
if (m.group(1).equals("")) {
// do nothing
} else if (number.containsKey(m.group(1).trim())) { // add descriptor to tableB
String fxy = number.get(m.group(1).trim());
String X = fxy.substring(1, 3);
String Y = fxy.substring(3);
String mnu = m.group(1).trim();
String descr = desc.get(mnu);
short x = Short.parseShort(X.trim());
short y = Short.parseShort(Y.trim());
// these are in latest tables so skip LOOK WHY
if (XlocalCutoff > x && YlocalCutoff > y)
continue;
int scale = Integer.parseInt(m.group(2).trim());
int refVal = Integer.parseInt(m.group(3).trim());
int width = Integer.parseInt(m.group(4).trim());
String units = m.group(5).trim();
tables.b.addDescriptor(x, y, scale, refVal, width, mnu, units, descr);
} else if (debugTable) {
System.out.println("bad element descriptors: " + line);
}
}
}
} finally {
ios.close();
}
// LOOK why ?
// default for NCEP
// 0; 63; 0; 0; 0; 16; Numeric; Byte count
tables.b.addDescriptor((short) 63, (short) 0, 0, 0, 16, "Byte count", "Numeric", null);
return true;
}
private static String removeLeading0(String number) {
if (number.length() == 2 && number.startsWith("0")) {
number = number.substring(1);
} else if (number.length() == 3 && number.startsWith("00")) {
number = number.substring(2);
} else if (number.length() == 3 && number.startsWith("0")) {
number = number.substring(1);
}
return number;
}
}

View File

@ -0,0 +1,94 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import ucar.unidata.util.StringUtil2;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
/**
* Ncep local table overrides
*
* @author caron
* @since 8/22/13
*/
public class NcepTable {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NcepTable.class);
private static void readNcepTable(String location) throws IOException {
try (InputStream ios = BufrTables.openStream(location)) {
BufferedReader dataIS = new BufferedReader(new InputStreamReader(ios, StandardCharsets.UTF_8));
int count = 0;
while (true) {
String line = dataIS.readLine();
if (line == null)
break;
if (line.startsWith("#"))
continue;
count++;
String[] flds = line.split(";");
if (flds.length < 3) {
log.warn("{} BAD split == {}", count, line);
continue;
}
int fldidx = 0;
try {
int cat = Integer.parseInt(flds[fldidx++].trim());
int subcat = Integer.parseInt(flds[fldidx++].trim());
String desc = StringUtil2.remove(flds[fldidx++], '"');
entries.add(new TableEntry(cat, subcat, desc));
} catch (Exception e) {
log.warn("{} {} BAD line == {}", count, fldidx, line);
}
}
}
}
private static List<TableEntry> entries;
private static class TableEntry {
public int cat, subcat;
public String value;
public TableEntry(int cat, int subcat, String value) {
this.cat = cat;
this.subcat = subcat;
this.value = value.trim();
}
}
private static void init() {
entries = new ArrayList<>(100);
String location = "resource:/resources/bufrTables/local/ncep/DataSubCategories.csv";
try {
readNcepTable(location);
} catch (IOException e) {
e.printStackTrace();
}
}
public static String getDataSubcategory(int cat, int subcat) {
if (entries == null)
init();
for (TableEntry p : entries) {
if ((p.cat == cat) && (p.subcat == subcat))
return p.value;
}
return null;
}
}

View File

@ -0,0 +1,183 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import org.jdom2.Element;
import org.jdom2.input.SAXBuilder;
import java.io.InputStream;
import java.util.Formatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Read standard WMO Table A (data categories).
*/
public class TableA {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(TableA.class);
private static final String TABLEA_FILENAME = "wmo/BUFR_37_0_0_TableA_en.xml";
private static Map<Integer, Descriptor> tableA;
private final String name;
private final String location;
/*
* <BUFR_19_1_1_TableA_en>
* <No>27</No>
* <CodeFigure>28</CodeFigure>
* <Meaning_en>Precision orbit (satellite)</Meaning_en>
* <Status>Operational</Status>
* </BUFR_19_1_1_TableA_en>
*
* <Exp_BUFRTableA_E>
* <No>4</No>
* <CodeFigure>3</CodeFigure>
* <Meaning_E>Vertical soundings (satellite)</Meaning_E>
* <Status>Operational</Status>
* </Exp_BUFRTableA_E>
*/
private static void init() {
String filename = BufrTables.RESOURCE_PATH + TABLEA_FILENAME;
try (InputStream is = CodeFlagTables.class.getResourceAsStream(filename)) {
HashMap<Integer, Descriptor> map = new HashMap<>(100);
SAXBuilder builder = new SAXBuilder();
builder.setExpandEntities(false);
org.jdom2.Document tdoc = builder.build(is);
Element root = tdoc.getRootElement();
List<Element> elems = root.getChildren();
for (Element elem : elems) {
String line = elem.getChildText("No");
String codeS = elem.getChildText("CodeFigure");
String desc = elem.getChildText("Meaning_en");
try {
int code = Integer.parseInt(codeS);
Descriptor descriptor = new Descriptor(code, desc);
map.put(code, descriptor);
} catch (NumberFormatException e) {
log.debug("NumberFormatException on line " + line + " in " + codeS);
}
}
tableA = map;
} catch (Exception e) {
log.error("Can't read BUFR code table " + filename, e);
}
}
public TableA(String name, String location) {
this.name = name;
this.location = location;
tableA = new HashMap<>();
}
public Descriptor getDescriptor(int code) {
if (tableA == null)
init();
return tableA.get(code);
}
/**
* data category description, from table A
*
* @param cat data category
* @return category description, or null if not found
*/
public static String getDataCategory(int cat) {
if (tableA == null)
init();
Descriptor descriptor = tableA.get(cat);
return descriptor != null ? descriptor.getDescription() : "Unknown category=" + cat;
}
/**
* data category name, from table A
*
* @param cat data category
* @return category name, or null if not found
*/
public static String getDataCategoryName(int cat) {
if (tableA == null)
init();
Descriptor descriptor = tableA.get(cat);
return descriptor != null ? descriptor.getName() : "obs_" + cat;
}
public String getName() {
return name;
}
public String getLocation() {
return location;
}
public TableA.Descriptor addDescriptor(int code, String description) {
TableA.Descriptor d = new TableA.Descriptor(code, description);
tableA.put(code, d);
return d;
}
public static class Descriptor implements Comparable<Descriptor> {
private int code;
private String name;
private String description;
private boolean localOverride;
Descriptor(int code, String description) {
this.code = code;
this.description = description;
this.name = "obs_" + String.valueOf(code);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return this.description;
}
/**
* Get code
*
* @return Code
*/
public int getCode() {
return this.code;
}
public String toString() {
return String.valueOf(code) + " " + getName() + " " +
this.description;
}
@Override
public int compareTo(Descriptor o) {
return code - o.getCode();
}
public boolean isLocal() {
return ((code >= 102) && (code <= 239));
}
public void setLocalOverride(boolean isOverride) {
this.localOverride = isOverride;
}
public boolean getLocalOverride() {
return localOverride;
}
}
}

View File

@ -0,0 +1,221 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import org.meteoinfo.data.meteodata.bufr.DataDescriptor;
import javax.annotation.concurrent.Immutable;
import java.util.*;
/**
* BUFR Table B - Data descriptors
*
* @author caron
* @since Sep 25, 2008
*/
public class TableB {
private final String name;
private final String location;
private final Map<Short, Descriptor> map;
public TableB(String name, String location) {
this.name = name;
this.location = location;
map = new HashMap<>();
}
public void addDescriptor(short x, short y, int scale, int refVal, int width, String name, String units,
String desc) {
short id = (short) ((x << 8) + y);
map.put(id, new Descriptor(x, y, scale, refVal, width, name, units, desc));
}
public String getName() {
return name;
}
public String getLocation() {
return location;
}
public Descriptor getDescriptor(short id) {
return map.get(id);
}
public Collection<Descriptor> getDescriptors() {
return map.values();
}
public Collection<Short> getKeys() {
return map.keySet();
}
public void show(Formatter out) {
List<Short> sortKeys = new ArrayList<>(getKeys());
Collections.sort(sortKeys);
out.format("Table B %s %n", name);
for (Short key : sortKeys) {
Descriptor dd = getDescriptor(key);
if (dd != null)
dd.show(out);
out.format("%n");
}
}
/**
* Composite pattern - collection of TableB
*/
public static class Composite extends TableB {
List<TableB> list = new ArrayList<>(3);
public Composite(String name, String location) {
super(name, location);
}
public void addTable(TableB b) {
list.add(b);
}
@Override
public Descriptor getDescriptor(short id) {
for (TableB b : list) {
Descriptor d = b.getDescriptor(id);
if (d != null)
return d;
}
return null;
}
@Override
public Collection<Descriptor> getDescriptors() {
ArrayList<Descriptor> result = new ArrayList<>(3000);
for (TableB b : list)
result.addAll(b.getDescriptors());
return result;
}
@Override
public Collection<Short> getKeys() {
ArrayList<Short> result = new ArrayList<>(3000);
for (TableB b : list)
result.addAll(b.getKeys());
return result;
}
}
// inner class
@Immutable
public class Descriptor implements Comparable<Descriptor> {
private final short x, y;
private final int scale;
private final int refVal;
private final int dataWidth;
private final String units;
private final String name;
private final String desc;
private final boolean numeric;
private boolean localOverride;
Descriptor(short x, short y, int scale, int refVal, int width, String name, String units, String desc) {
this.x = x;
this.y = y;
this.scale = scale;
this.refVal = refVal;
this.dataWidth = width;
this.name = name.trim();
this.units = units.trim().intern();
this.desc = desc;
this.numeric = !DataDescriptor.isInternationalAlphabetUnit(units);
}
public int getScale() {
return scale;
}
public int getRefVal() {
return refVal;
}
public int getDataWidth() {
return dataWidth;
}
public String getUnits() {
return units;
}
public String getName() {
return name;
}
public String getDesc() { // optional - use as long name
return desc;
}
/**
* Get fxy as a short
*
* @return fxy encoded as a short
*/
public short getId() {
return (short) ((x << 8) + y);
}
/**
* Get fxy as a String, eg 0-5-22
*
* @return fxy encoded as a String
*/
public String getFxy() {
return "0-" + x + "-" + y;
}
/**
* is descriptor numeric or String
*
* @return true if numeric
*/
public boolean isNumeric() {
return numeric;
}
public boolean isLocal() {
return ((x >= 48) || (y >= 192));
}
public void setLocalOverride(boolean isOverride) {
this.localOverride = isOverride;
}
public boolean getLocalOverride() {
return localOverride;
}
public String toString() {
Formatter out = new Formatter();
show(out);
return out.toString();
}
public String getSource() {
return getLocation();
}
void show(Formatter out) {
out.format(" %8s scale=%d refVal=%d width=%d units=(%s) name=(%s)", getFxy(), scale, refVal, dataWidth, units,
name);
}
@Override
public int compareTo(Descriptor o) {
return getId() - o.getId();
}
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
/**
* BUFR Table C - Data operators
*
* @author caron
* @since Oct 25, 2008
*/
public class TableC {
private static final String[] tableCdesc = new String[38];
static {
tableCdesc[1] = "change data width";
tableCdesc[2] = "change scale";
tableCdesc[3] = "change reference value";
tableCdesc[4] = "add associated field";
tableCdesc[5] = "signify character";
tableCdesc[6] = "signify data width for next descriptor";
tableCdesc[7] = "increase scale, reference value, and data width";
tableCdesc[21] = "data not present";
tableCdesc[22] = "quality information follows";
tableCdesc[23] = "substituted values operator";
tableCdesc[24] = "first order statistics";
tableCdesc[25] = "difference statistics";
tableCdesc[32] = "replaced/retained values";
tableCdesc[35] = "cancel backward data reference";
tableCdesc[36] = "define data present bit-map";
tableCdesc[37] = "use/cancel data present bit-map";
}
public static String getOperatorName(int index) {
if ((index < 0) || (index >= tableCdesc.length))
return "unknown";
return (tableCdesc[index] == null) ? "unknown" : tableCdesc[index];
}
}

View File

@ -0,0 +1,139 @@
/*
* Copyright (c) 1998-2018 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import java.util.*;
/**
* BUFR Table D - Data sequences
*
* @author caron
* @since Sep 25, 2008
*/
public class TableD {
private String name;
private String location;
private Map<Short, Descriptor> map;
public TableD(String name, String location) {
this.name = name;
this.location = location;
map = new HashMap<>();
}
public String getName() {
return name;
}
public String getLocation() {
return location;
}
public Descriptor addDescriptor(short x, short y, String name, List<Short> seq) {
short id = (short) ((3 << 14) + (x << 8) + y);
Descriptor d = new Descriptor(x, y, name, seq);
map.put(id, d);
return d;
}
public Descriptor getDescriptor(short id) {
return map.get(id);
}
public Collection<Descriptor> getDescriptors() {
return map.values();
}
public void show(Formatter out) {
Collection<Short> keys = map.keySet();
List<Short> sortKeys = new ArrayList<>(keys);
Collections.sort(sortKeys);
out.format("Table D %s %n", name);
for (Short key : sortKeys) {
Descriptor dd = map.get(key);
dd.show(out, true);
}
}
public static class Descriptor implements Comparable<Descriptor> {
private short x, y;
private String name;
private List<Short> seq;
private boolean localOverride;
Descriptor(short x, short y, String name, List<Short> seq) {
this.x = x;
this.y = y;
this.name = name;
this.seq = seq;
}
public List<Short> getSequence() {
return seq;
}
public void addFeature(short f) {
seq.add(f);
}
public String getName() {
return name;
}
/**
* Get fxy as a short
*
* @return fxy encoded as a short
*/
public short getId() {
return (short) ((3 << 14) + (x << 8) + y);
}
/**
* Get fxy as a String, eg 3-4-22
*
* @return fxy encoded as a String
*/
public String getFxy() {
return "3-" + x + "-" + y;
}
public String toString() {
return getFxy() + " " + getName();
}
public void show(Formatter out, boolean oneline) {
out.format(" %8s: name=(%s) seq=", getFxy(), name);
if (oneline) {
for (short s : seq)
out.format(" %s,", ucar.nc2.iosp.bufr.Descriptor.makeString(s));
out.format("%n");
} else {
for (short s : seq)
out.format(" %s%n", ucar.nc2.iosp.bufr.Descriptor.makeString(s));
}
}
@Override
public int compareTo(Descriptor o) {
return getId() - o.getId();
}
public boolean isLocal() {
return ((x >= 48) || (y >= 192));
}
public void setLocalOverride(boolean isOverride) {
this.localOverride = isOverride;
}
public boolean getLocalOverride() {
return localOverride;
}
}
}

View File

@ -0,0 +1,458 @@
/*
* Copyright (c) 1998-2020 University Corporation for Atmospheric Research/Unidata
* See LICENSE for license information.
*/
package org.meteoinfo.data.meteodata.bufr.tables;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import ucar.nc2.wmo.Util;
import ucar.unidata.util.StringUtil2;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Read WMO BUFR XML formats
*
* @author John
* @since 8/10/11
*/
public class WmoXmlReader {
private static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(WmoXmlReader.class);
public enum Version {
BUFR_14_1_0, BUFR_14_2_0, BUFR_15_1_1, BUFR_16_0_0, BUFR_WMO;
String[] getElemNamesB() {
if (this == BUFR_14_1_0) {
return new String[]{"BC_TableB_BUFR14_1_0_CREX_6_1_0", "ElementName_E"};
} else if (this == BUFR_14_2_0) {
return new String[]{"Exporting_BCTableB_E", "ElementName"};
} else if (this == BUFR_15_1_1) {
return new String[]{"Exp_JointTableB_E", "ElementName_E"};
} else if (this == BUFR_16_0_0) {
return new String[]{"Exp_BUFRCREXTableB_E", "ElementName_E"};
} else if (this == BUFR_WMO) { // from now on this is the element name
return new String[]{null, "ElementName_en"};
}
return null;
}
String[] getElemNamesD() {
if (this == BUFR_14_1_0) {
return new String[]{"B_TableD_BUFR14_1_0_CREX_6_1_0", "ElementName1_E"};
} else if (this == BUFR_14_2_0) {
return new String[]{"Exporting_BUFRTableD_E", "ElementName1"};
} else if (this == BUFR_15_1_1) {
return new String[]{"Exp_BUFRTableD_E", "ElementName_E", "ExistingElementName_E"};
} else if (this == BUFR_16_0_0) {
return new String[]{"Exp_BUFRTableD_E", "ElementName_E", "ExistingElementName_E"};
} else if (this == BUFR_WMO) {
return new String[]{null, "ElementName_en"};
}
return null;
}
}
/*
* 14.1
* <BC_TableB_BUFR14_1_0_CREX_6_1_0>
* <SNo>1</SNo>
* <Class>00</Class>
* <FXY>000001</FXY>
* <ElementName_E>Table A: entry</ElementName_E>
* <ElementName_F>Table A : entr?e</ElementName_F>
* <ElementName_R>??????? ?: ???????</ElementName_R>
* <ElementName_S>Tabla A: elemento</ElementName_S>
* <BUFR_Unit>CCITT IA5</BUFR_Unit>
* <BUFR_Scale>0</BUFR_Scale>
* <BUFR_ReferenceValue>0</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>24</BUFR_DataWidth_Bits>
* <CREX_Unit>Character</CREX_Unit>
* <CREX_Scale>0</CREX_Scale>
* <CREX_DataWidth>3</CREX_DataWidth>
* <Status>Operational</Status>
* <NotesToTable_E>Notes: (see)#BUFR14_1_0_CREX6_1_0_Notes.doc#BC_Cl000</NotesToTable_E>
* </BC_TableB_BUFR14_1_0_CREX_6_1_0>
*
* 14.2
* <Exporting_BCTableB_E>
* <No>2</No>
* <ClassNo>00</ClassNo>
* <ClassName>BUFR/CREX table entries</ClassName>
* <FXY>000002</FXY>
* <ElementName>Table A: data category description, line 1 </ElementName>
* <BUFR_Unit>CCITT IA5 </BUFR_Unit>
* <BUFR_Scale>0</BUFR_Scale>
* <BUFR_ReferenceValue>0</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>256</BUFR_DataWidth_Bits>
* <CREX_Unit>Character</CREX_Unit>
* <CREX_Scale>0</CREX_Scale>
* <CREX_DataWidth>32</CREX_DataWidth>
* <Status>Operational</Status>
* </Exporting_BCTableB_E>
*
* 15.1
* <Exp_JointTableB_E>
* <No>1</No>
* <ClassNo>00</ClassNo>
* <ClassName_E>BUFR/CREX table entries</ClassName_E>
* <FXY>000001</FXY>
* <ElementName_E>Table A: entry</ElementName_E>
* <BUFR_Unit>CCITT IA5</BUFR_Unit>
* <BUFR_Scale>0</BUFR_Scale>
* <BUFR_ReferenceValue>0</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>24</BUFR_DataWidth_Bits>
* <CREX_Unit>Character</CREX_Unit>
* <CREX_Scale>0</CREX_Scale>
* <CREX_DataWidth_Char>3</CREX_DataWidth_Char>
* <Status>Operational</Status>
* </Exp_JointTableB_E>
*
* 16.0
* <Exp_BUFRCREXTableB_E>
* <No>681</No>
* <ClassNo>13</ClassNo>
* <ClassName_E>Hydrographic and hydrological elements</ClassName_E>
* <FXY>013060</FXY>
* <ElementName_E>Total accumulated precipitation</ElementName_E>
* <BUFR_Unit>kg m-2</BUFR_Unit>
* <BUFR_Scale>1</BUFR_Scale>
* <BUFR_ReferenceValue>-1</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>17</BUFR_DataWidth_Bits>
* <CREX_Unit>kg m-2</CREX_Unit>
* <CREX_Scale>1</CREX_Scale>
* <CREX_DataWidth_Char>5</CREX_DataWidth_Char>
* <Status>Operational</Status>
* </Exp_BUFRCREXTableB_E>
*
* <BUFRCREX_17_0_0_TableB_en>
* <No>8</No>
* <ClassNo>00</ClassNo>
* <ClassName_en>BUFR/CREX table entries</ClassName_en>
* <FXY>000008</FXY>
* <ElementName_en>BUFR Local table version number</ElementName_en>
* <Note_en>(see Note 4)</Note_en>
* <BUFR_Unit>CCITT IA5</BUFR_Unit>
* <BUFR_Scale>0</BUFR_Scale>
* <BUFR_ReferenceValue>0</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>16</BUFR_DataWidth_Bits>
* <CREX_Unit>Character</CREX_Unit>
* <CREX_Scale>0</CREX_Scale>
* <CREX_DataWidth_Char>2</CREX_DataWidth_Char>
* <Status>Operational</Status>
* </BUFRCREX_17_0_0_TableB_en>
*
* <BUFRCREX_22_0_1_TableB_en>
* <No>1018</No>
* <ClassNo>21</ClassNo>
* <ClassName_en>BUFR/CREX Radar data</ClassName_en>
* <FXY>021073</FXY>
* <ElementName_en>Satellite altimeter instrument mode</ElementName_en>
* <BUFR_Unit>Flag table</BUFR_Unit>
* <BUFR_Scale>0</BUFR_Scale>
* <BUFR_ReferenceValue>0</BUFR_ReferenceValue>
* <BUFR_DataWidth_Bits>9</BUFR_DataWidth_Bits>
* <CREX_Unit>Flag table</CREX_Unit>
* <CREX_Scale>0</CREX_Scale>
* <CREX_DataWidth_Char>3</CREX_DataWidth_Char>
* <Status>Operational</Status>
* </BUFRCREX_22_0_1_TableB_en>
*/
static void readWmoXmlTableB(InputStream ios, TableB b) throws IOException {
org.jdom2.Document doc;
try {
SAXBuilder builder = new SAXBuilder();
builder.setExpandEntities(false);
doc = builder.build(ios);
} catch (JDOMException e) {
throw new IOException(e.getMessage());
}
Element root = doc.getRootElement();
// what elements do we need to parse tableB?
String[] elems = elementsUsedFromTableB(root);
List<Element> unrecognizedSequenceTermElements = new ArrayList<>();
List<Element> featList = root.getChildren();
for (Element elem : featList) {
Element ce = null;
for (int nameTest = 1; nameTest < elems.length; nameTest++) {
ce = elem.getChild(elems[nameTest]);
if (ce != null) {
break;
}
}
if (ce == null) {
unrecognizedSequenceTermElements.add(elem);
continue;
}
String name = Util.cleanName(ce.getTextNormalize());
String units = cleanUnit(elem.getChildTextNormalize("BUFR_Unit"));
int x = 0, y = 0, scale = 0, reference = 0, width = 0;
String fxy = null;
String s = null;
try {
fxy = elem.getChildTextNormalize("FXY");
int xy = Integer.parseInt(cleanNumber(fxy));
x = xy / 1000;
y = xy % 1000;
} catch (NumberFormatException e) {
log.warn(" key {} name '{}' fails parsing", fxy, name);
}
try {
s = elem.getChildTextNormalize("BUFR_Scale");
scale = Integer.parseInt(cleanNumber(s));
} catch (NumberFormatException e) {
log.warn(" key {} name '{}' has bad scale='{}'", fxy, name, s);
}
try {
s = elem.getChildTextNormalize("BUFR_ReferenceValue");
reference = Integer.parseInt(cleanNumber(s));
} catch (NumberFormatException e) {
log.warn(" key {} name '{}' has bad reference='{}'", fxy, name, s);
}
try {
s = elem.getChildTextNormalize("BUFR_DataWidth_Bits");
width = Integer.parseInt(cleanNumber(s));
} catch (NumberFormatException e) {
log.warn(" key {} name '{}' has bad width='{}'", fxy, name, s);
}
b.addDescriptor((short) x, (short) y, scale, reference, width, name, units, null);
}
if (log.isDebugEnabled()) {
logUnrecognizedElements(unrecognizedSequenceTermElements, "B", b.getLocation());
}
ios.close();
}
static String cleanNumber(String s) {
return StringUtil2.remove(s, ' ');
}
public static String cleanUnit(String unit) {
String result = StringUtil2.remove(unit, 176);
return StringUtil2.replace(result, (char) 65533, "2"); // seems to be a superscript 2 in some language
}
static String[] elementsUsedFromTableD(Element root) {
return elementsUsedFromTable(root, "D");
}
static String[] elementsUsedFromTableB(Element root) {
return elementsUsedFromTable(root, "B");
}
static String[] elementsUsedFromTable(Element root, String tableType) {
String[] elems = null;
// does the table have its own enum value? If so, use it.
for (Version v : Version.values()) {
boolean match = root.getAttributes().stream().anyMatch(attr -> attr.getValue().contains(v.toString()));
if (match) {
elems = tableType.equals("B") ? v.getElemNamesB() : v.getElemNamesD();
break;
}
}
// exact table match not found. Try seeing if the table uses
// the sequence element from a version defined in the Version enum.
// Note: will stop on the first version that works, as defined by
// the order of the Version enum. might not be correct.
if (elems == null) {
for (Version v : Version.values()) {
elems = tableType.equals("B") ? v.getElemNamesB() : v.getElemNamesD();
List<Element> featList = null;
if ((elems != null) && (elems.length > 0)) {
featList = root.getChildren(elems[0]);
}
if (featList != null && !featList.isEmpty()) {
break;
}
}
}
return elems;
}
static void logUnrecognizedElements(List<Element> unrecognizedSequenceTermElements, String tableType,
String location) {
// not every sequence entry in the WMO xml table D files is processed. This has caused trouble before.
// this is a pretty specific, low level debug message to hopefully give a clue to us in the future
// that if we are having trouble decoding BUFR messages, maybe we're not fully parsing the WMO xml TableD
// entries, and so the sequence being used might not be the full sequence necessary to decode.
if (log.isDebugEnabled()) {
if (unrecognizedSequenceTermElements.size() > 0) {
StringBuilder msgBuilder = new StringBuilder();
msgBuilder.append(String.format("%d Unprocessed sequences in WMO table %s %s",
unrecognizedSequenceTermElements.size(), tableType, location));
if (tableType.equals("D")) {
String tableDChecker = "bufr/src/test/java/ucar/nc2/iosp/bufr/tables/WmoTableDVariations.java";
msgBuilder
.append(String.format("This might be ok, but to know for sure, consider running %s", tableDChecker));
}
log.debug(msgBuilder.toString());
}
}
}
/*
* <B_TableD_BUFR14_1_0_CREX_6_1_0>
* <SNo>2647</SNo>
* <Category>10</Category>
* <FXY1>310013</FXY1>
* <ElementName1_E>(AVHRR (GAC) report)</ElementName1_E>
* <FXY2>004005</FXY2>
* <ElementName2_E>Minute</ElementName2_E>
* <Remarks_E>Minute</Remarks_E>
* <Status>Operational</Status>
* </B_TableD_BUFR14_1_0_CREX_6_1_0>
*
* 14.2.0
* <Exporting_BUFRTableD_E>
* <No>2901</No>
* <Category>10</Category>
* <CategoryOfSequences>Vertical sounding sequences (satellite data)</CategoryOfSequences>
* <FXY1>310025</FXY1>
* <ElementName1>(SSMIS Temperature data record)</ElementName1>
* <FXY2>004006</FXY2>
* <Status>Operational</Status>
* </Exporting_BUFRTableD_E>
*
* 15.1.1
* <Exp_BUFRTableD_E>
* <No>102</No>
* <Category>01</Category>
* <CategoryOfSequences_E>Location and identification sequences</CategoryOfSequences_E>
* <FXY1>301034</FXY1>
* <Title_E>(Buoy/platform - fixed)</Title_E>
* <FXY2>001005</FXY2>
* <ElementName_E>Buoy/platform identifier</ElementName_E>
* <ExistingElementName_E>Buoy/platform identifier</ExistingElementName_E>
* <Status>Operational</Status>
* </Exp_BUFRTableD_E>
*
* 16.0.0
* <Exp_BUFRTableD_E>
* <No>402</No>
* <Category>02</Category>
* <CategoryOfSequences_E>Meteorological sequences common to surface data</CategoryOfSequences_E>
* <FXY1>302001</FXY1>
* <FXY2>010051</FXY2>
* <ElementName_E>Pressure reduced to mean sea level</ElementName_E>
* <ExistingElementName_E>Pressure reduced to mean sea level</ExistingElementName_E>
* <Status>Operational</Status>
* </Exp_BUFRTableD_E>
*
* <BUFR_19_1_1_TableD_en>
* <No>4</No>
* <Category>00</Category>
* <CategoryOfSequences_en>BUFR table entries sequences</CategoryOfSequences_en>
* <FXY1>300003</FXY1>
* <Title_en>(F, X, Y of descriptor to be added or defined)</Title_en>
* <FXY2>000011</FXY2>
* <ElementName_en>X descriptor to be added or defined</ElementName_en>
* <Status>Operational</Status>
* </BUFR_19_1_1_TableD_en>
*
* <BUFR_22_0_1_TableD_en>
* <No>5874</No>
* <Category>15</Category>
* <CategoryOfSequences_en>Oceanographic report sequences</CategoryOfSequences_en>
* <FXY1>315004</FXY1>
* <Title_en>(XBT temperature profile data sequence)</Title_en>
* <FXY2>025061</FXY2>
* <ElementName_en>Software identification and version number</ElementName_en>
* <Status>Operational</Status>
* </BUFR_22_0_1_TableD_en>
*
*/
static void readWmoXmlTableD(InputStream ios, TableD tableD) throws IOException {
org.jdom2.Document doc;
try {
SAXBuilder builder = new SAXBuilder();
builder.setExpandEntities(false);
doc = builder.build(ios);
} catch (JDOMException e) {
throw new IOException(e.getMessage());
}
int currSeqno = -1;
TableD.Descriptor currDesc = null;
Element root = doc.getRootElement();
// what elements do we need to parse tableD?
String[] elems = elementsUsedFromTableD(root);
List<Element> unrecognizedSequenceTermElements = new ArrayList<>();
List<Element> featList = root.getChildren();
for (Element elem : featList) {
// see if element in table is recognized
Element ce = null;
for (int nameTest = 1; nameTest < elems.length; nameTest++) {
ce = elem.getChild(elems[nameTest]);
if (ce != null) {
break;
}
}
if (ce == null) {
unrecognizedSequenceTermElements.add(elem);
continue;
}
String seqs = elem.getChildTextNormalize("FXY1");
int seq = Integer.parseInt(seqs);
if (currSeqno != seq) {
int y = seq % 1000;
int w = seq / 1000;
int x = w % 100;
String seqName = Util.cleanName(ce.getTextNormalize());
currDesc = tableD.addDescriptor((short) x, (short) y, seqName, new ArrayList<>());
currSeqno = seq;
}
String fnos = elem.getChildTextNormalize("FXY2");
int fno = Integer.parseInt(fnos);
int y = fno % 1000;
int w = fno / 1000;
int x = w % 100;
int f = w / 100;
int fxy = (f << 14) + (x << 8) + y;
currDesc.addFeature((short) fxy);
}
if (log.isDebugEnabled()) {
logUnrecognizedElements(unrecognizedSequenceTermElements, "D", tableD.getLocation());
}
ios.close();
}
}

View File

@ -5,6 +5,7 @@
*/
package org.meteoinfo.data.meteodata.netcdf;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
@ -14,6 +15,8 @@ import org.meteoinfo.data.dimarray.Dimension;
import org.meteoinfo.data.meteodata.Attribute;
import org.meteoinfo.data.meteodata.Variable;
import org.meteoinfo.ndarray.*;
import org.meteoinfo.ndarray.math.ArrayMath;
import org.meteoinfo.ndarray.math.ArrayUtil;
/**
*
@ -158,6 +161,7 @@ public class NCUtil {
var.setShortName(ncVar.getShortName());
var.setDataType(convertDataType(ncVar.getDataType()));
var.setDescription(ncVar.getDescription());
var.setMemberOfStructure(ncVar.isMemberOfStructure());
var.setDimensions(convertDimensions(ncVar.getDimensions()));
for (ucar.nc2.Attribute ncAttr : ncVar.getAttributes()) {
var.addAttribute(convertAttribute(ncAttr));
@ -184,4 +188,223 @@ public class NCUtil {
return null;
}
}
/**
* Get pack data from variable
* @param var The variable
* @return Pack data
*/
public static double[] getPackData(ucar.nc2.Variable var) {
double add_offset = 0, scale_factor = 1, missingValue = Double.NaN;
for (int i = 0; i < var.getAttributes().size(); i++) {
ucar.nc2.Attribute att = var.getAttributes().get(i);
String attName = att.getShortName();
if (attName.equals("add_offset")) {
add_offset = Double.parseDouble(att.getValue(0).toString());
}
if (attName.equals("scale_factor")) {
scale_factor = Double.parseDouble(att.getValue(0).toString());
}
if (attName.equals("missing_value")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
//MODIS NetCDF data
if (attName.equals("_FillValue")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
}
return new double[]{add_offset, scale_factor, missingValue};
}
/**
* Get pack data from variable
* @param var The variable
* @return Pack data
*/
public static double[] getPackData(Variable var) {
double add_offset = 0, scale_factor = 1, missingValue = Double.NaN;
for (int i = 0; i < var.getAttributes().size(); i++) {
Attribute att = var.getAttributes().get(i);
String attName = att.getShortName();
if (attName.equals("add_offset")) {
add_offset = Double.parseDouble(att.getValue(0).toString());
}
if (attName.equals("scale_factor")) {
scale_factor = Double.parseDouble(att.getValue(0).toString());
}
if (attName.equals("missing_value")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
//MODIS NetCDF data
if (attName.equals("_FillValue")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
}
return new double[]{add_offset, scale_factor, missingValue};
}
/**
* Get missing value from variable
* @param var The variable
* @return Missing value
*/
public static double getMissingValue(Variable var) {
double missingValue = Double.NaN;
for (int i = 0; i < var.getAttributes().size(); i++) {
Attribute att = var.getAttributes().get(i);
String attName = att.getShortName();
if (attName.equals("missing_value")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
//MODIS NetCDF data
if (attName.equals("_FillValue")) {
try {
missingValue = Double.parseDouble(att.getValue(0).toString());
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
}
return missingValue;
}
/**
* Read data array from an ucar ArraySequence
*
* @param parentArray The ucar ArraySequence
* @param memberName Member name
* @return Read data array
*/
public static Array readSequence(ucar.ma2.ArrayStructure parentArray, String memberName) throws IOException {
ucar.ma2.StructureMembers.Member member = parentArray.findMember(memberName);
ucar.ma2.Array r = parentArray.extractMemberArray(member);
return convertArray(r);
}
/**
* Read data array from an ucar ArrayObject with ArraySequence elements
*
* @param parentArray The ucar ArrayObject with ArraySequence elements
* @param memberName Member name
* @param index Record index
* @param missingValue Missing value
* @return Read data array
*/
public static Array readSequenceRecord(ucar.ma2.ArrayObject parentArray, String memberName,
int index, double missingValue) throws IOException {
int n = (int) parentArray.getSize();
ucar.ma2.IndexIterator pIter = parentArray.getIndexIterator();
ucar.ma2.StructureMembers.Member member = null;
while (pIter.hasNext()) {
ucar.ma2.ArrayStructure sArray = (ucar.ma2.ArrayStructure) pIter.getObjectNext();
if (sArray != null) {
member = sArray.findMember(memberName);
break;
}
}
DataType dataType = convertDataType(member.getDataType());
Array r = Array.factory(dataType, new int[]{n});
pIter = parentArray.getIndexIterator();
IndexIterator rIter = r.getIndexIterator();
while (pIter.hasNext()) {
ucar.ma2.ArrayStructure sArray = (ucar.ma2.ArrayStructure) pIter.getObjectNext();
if (sArray == null) {
rIter.setObjectNext(missingValue);
} else {
member = sArray.findMember(memberName);
ucar.ma2.Array a = sArray.extractMemberArray(member);
if (a.getSize() > index) {
rIter.setObjectNext(a.getObject(index));
} else {
rIter.setObjectNext(missingValue);
}
}
}
return r;
}
/**
* Read data array from an ucar ArrayObject with ArraySequence elements
*
* @param parentArray The ucar ArrayObject with ArraySequence elements
* @param memberName Member name
* @param index Record index
* @param missingValue Missing value
* @return Read data array
*/
public static Array readSequenceRecord(ucar.ma2.ArrayObject parentArray, String memberName,
int index) throws IOException {
return readSequenceRecord(parentArray, memberName, index, Double.NaN);
}
/**
* Read data array from an ucar ArrayObject with ArraySequence elements
*
* @param parentArray The ucar ArrayObject with ArraySequence elements
* @param memberName Member name
* @param index Station index
* @return Read data array
*/
public static Array readSequenceStation(ucar.ma2.ArrayObject parentArray, String memberName,
int index) throws IOException {
int n = (int) parentArray.getSize();
ucar.ma2.ArrayStructure sArray = (ucar.ma2.ArrayStructure) parentArray.getObject(index);
if (sArray == null) {
return null;
}
ucar.ma2.StructureMembers.Member member = sArray.findMember(memberName);
ucar.ma2.Array r = sArray.extractMemberArray(member);
return convertArray(r);
}
/**
* Unpack an array
* @param a The array
* @param variable The variable including packing parameters
* @return Unpacked data
*/
public static Array arrayUnPack(Array a, Variable variable) {
double[] packValues = getPackData(variable);
double addOffset = packValues[0];
double scaleFactor = packValues[1];
double missingValue = packValues[2];
return ArrayUtil.unPack(a, missingValue, scaleFactor, addOffset);
}
}

View File

@ -143,9 +143,9 @@ public class NetCDFDataInfo extends DataInfo implements IGridDataInfo, IStationD
}
/**
* Get file type identifer
* Get file type identifier
*
* @return File type identifer
* @return File type identifier
*/
public String getFileTypeId() {
return fileTypeId;
@ -1760,6 +1760,7 @@ public class NetCDFDataInfo extends DataInfo implements IGridDataInfo, IStationD
dimType = DimensionType.Y;
break;
case "time":
case "valid_time":
dimType = DimensionType.T;
break;
case "level":
@ -3100,7 +3101,10 @@ public class NetCDFDataInfo extends DataInfo implements IGridDataInfo, IStationD
scale_factor = packData[1];
missingValue = packData[2];
if (add_offset != 0 || scale_factor != 1) {
//ArrayMath.fill_value = missingValue;
data = ArrayUtil.convertToDataType(data, org.meteoinfo.ndarray.DataType.DOUBLE);
if (!Double.isNaN(missingValue)) {
ArrayMath.replaceValue(data, missingValue, Double.NaN);
}
data = ArrayMath.add(ArrayMath.mul(data, scale_factor), add_offset);
}
}

View File

@ -47,11 +47,6 @@
<artifactId>meteoinfo-image</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>meteoinfo-data</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>meteoinfo-chart</artifactId>

View File

@ -11,7 +11,6 @@ import org.meteoinfo.geometry.shape.PolylineZShape;
import org.meteoinfo.ndarray.Array;
import org.meteoinfo.ndarray.DataType;
import org.meteoinfo.ndarray.math.ArrayUtil;
import ucar.nc2.util.IO;
import java.io.*;
import java.time.LocalDateTime;

View File

@ -1,32 +1,30 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<MeteoInfo File="milconfig.xml" Type="configurefile">
<Path OpenPath="D:\Working\MIScript\Jython\mis\plot_types\contour">
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\plot_types\3d\jogl"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\plot_types\3d"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\plot_types\pie"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\map"/>
<Path OpenPath="D:\Working\MIScript\Jython\mis\io\burf">
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\map\geoshow"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\array"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\common_math"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\dataset"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\netcdf"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\common_math\fft"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\grads"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\plot_types"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\plot_types\contour"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\hdf"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\satellite"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\netcdf"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\micaps"/>
<RecentFolder Folder="D:\Working\MIScript\Jython\mis\io\burf"/>
</Path>
<File>
<OpenedFiles>
<OpenedFile File="D:\Working\MIScript\Jython\mis\plot_types\3d\jogl\slice\slice_2d.py"/>
<OpenedFile File="D:\Working\MIScript\Jython\mis\common_math\fft\fft_5.py"/>
<OpenedFile File="D:\Working\MIScript\Jython\mis\io\grads\verticle_plot_2.py"/>
<OpenedFile File="D:\Working\MIScript\Jython\mis\io\burf\bufr_gfs_1.py"/>
<OpenedFile File="D:\Working\MIScript\Jython\mis\io\burf\bufr_gdas_3.py"/>
</OpenedFiles>
<RecentFiles>
<RecentFile File="D:\Working\MIScript\Jython\mis\plot_types\3d\jogl\slice\slice_2d.py"/>
<RecentFile File="D:\Working\MIScript\Jython\mis\common_math\fft\fft_5.py"/>
<RecentFile File="D:\Working\MIScript\Jython\mis\io\grads\verticle_plot_2.py"/>
<RecentFile File="D:\Working\MIScript\Jython\mis\io\burf\bufr_gfs_1.py"/>
<RecentFile File="D:\Working\MIScript\Jython\mis\io\burf\bufr_gdas_3.py"/>
</RecentFiles>
</File>
<Font>

View File

@ -5,15 +5,23 @@ from .dimdatafile import DimDataFile
class BUFRDataFile(DimDataFile):
def __init__(self, dataset=None, access='r', bufrdata=None):
super(BUFRDataFile, self).__init__(dataset, access)
"""
Create a BUFR data file object.
:param dataset: (*MeteoDataInfo*) Underline dataset.
:param access: (*string*) File access.
:param bufrdata: (*object*) Bufr data.
"""
DimDataFile.__init__(self, dataset, access)
self.bufrdata = bufrdata
def write_indicator(self, bufrlen, edition=3):
"""
Write indicator section with arbitrary length.
:param bufrlen: (*int*) The total length of the message.
:param edition: (*int*) Bruf edition.
:param edition: (*int*) Burf edition.
:returns: (*int*) Indicator section length.
"""
@ -24,7 +32,7 @@ class BUFRDataFile(DimDataFile):
Write indicator section with correct length.
:param bufrlen: (*int*) The total length of the message.
:param edition: (*int*) Bruf edition.
:param edition: (*int*) Burf edition.
"""
self.bufrdata.reWriteIndicatorSection(bufrlen, edition)

View File

@ -34,23 +34,27 @@ class DimDataFile(object):
self._variables = []
if not dataset is None:
self.filename = dataset.getFileName()
for v in dataset.getDataInfo().getVariables():
self._variables.append(DimVariable(v))
self.nvar = dataset.getDataInfo().getVariableNum()
self.fill_value = dataset.getMissingValue()
self.proj = dataset.getProjectionInfo()
self.projection = self.proj
for v in dataset.getDataInfo().getVariables():
self._variables.append(DimVariable.factory(v, self))
self.arldata = arldata
self.bufrdata = bufrdata
def __getitem__(self, key):
if isinstance(key, basestring):
var = self.dataset.getDataInfo().getVariable(key)
if var is None:
print(key + ' is not a variable name')
raise ValueError()
else:
return DimVariable(self.dataset.getDataInfo().getVariable(key), self)
for var in self._variables:
if var.name == key:
return var
for var in self._variables:
if var.short_name == key:
return var
print(key + ' is not a variable name')
raise ValueError()
else:
print(key + ' is not a variable name')
raise ValueError()
@ -67,7 +71,7 @@ class DimDataFile(object):
def close(self):
"""
Close the opended dataset
Close the opened dataset
"""
if not self.dataset is None:
self.dataset.close()

View File

@ -20,41 +20,69 @@ import mipylib.numeric as np
import mipylib.miutil as miutil
import datetime
import numbers
import warnings
# Dimension variable
class DimVariable(object):
@staticmethod
def factory(variable=None, dataset=None, ncvariable=None):
"""
Factor method.
"""
if variable.getDataType().isStructure():
return StructureVariable(variable, dataset)
else:
return DimVariable(variable, dataset, ncvariable)
# variable must be org.meteoinfo.data.meteodata.Variable
# dataset is DimDataFile
def __init__(self, variable=None, dataset=None, ncvariable=None):
self.variable = variable
self._variable = variable
self.dataset = dataset
self.ncvariable = ncvariable
if not variable is None:
self.name = variable.getName()
self.dtype = np.dtype.fromjava(variable.getDataType())
self.dims = variable.getDimensions()
self.ndim = variable.getDimNumber()
self.attributes = variable.getAttributes()
elif not ncvariable is None:
self.name = ncvariable.getShortName()
self.dtype = ncvariable.getDataType()
self.dims = ncvariable.getDimensions()
self.ndim = len(self.dims)
self.attributes = list(ncvariable.getAttributes())
else:
self.name = None
self.dtype = None
self.dims = None
self.ndim = 0
self.attributes = None
self.proj = None if dataset is None else dataset.projection
self.projection = self.proj
@property
def name(self):
if self._variable is not None:
return self._variable.getName()
if self.ncvariable is not None:
return self.ncvariable.getFullName()
return None
@property
def short_name(self):
if self._variable is not None:
return self._variable.getShortName()
if self.ncvariable is not None:
return self.ncvariable.getShortName()
return None
def __len__(self):
len = 1
if not self.variable is None:
for dim in self.variable.getDimensions():
if not self._variable is None:
for dim in self._variable.getDimensions():
len = len * dim.getLength()
return len
@ -78,7 +106,7 @@ class DimVariable(object):
return self.__str__()
def __getitem__(self, indices):
if self.variable.getDataType() in [DataType.STRUCTURE, DataType.SEQUENCE]:
if self._variable.getDataType() in [DataType.STRUCTURE, DataType.SEQUENCE]:
if isinstance(indices, str): #metadata
return self.member_array(indices)
else:
@ -105,16 +133,17 @@ class DimVariable(object):
else:
indices1.append(ii)
indices = indices1
if len(indices) < self.ndim:
indices = list(indices)
for _ in range(self.ndim - len(indices)):
indices.append(slice(None))
indices = tuple(indices)
if len(indices) != self.ndim:
print('indices must be ' + str(self.ndim) + ' dimensions!')
return None
if self.ndim > 0:
if len(indices) < self.ndim:
indices = list(indices)
for _ in range(self.ndim - len(indices)):
indices.append(slice(None))
indices = tuple(indices)
if len(indices) != self.ndim:
print('indices must be ' + str(self.ndim) + ' dimensions!')
return None
if not self.proj is None and not self.proj.isLonLat():
xlim = None
@ -237,7 +266,7 @@ class DimVariable(object):
ranges.append(tlist)
k = tlist
elif isinstance(k, basestring):
dim = self.variable.getDimension(i)
dim = self._variable.getDimension(i)
kvalues = k.split(':')
sv = float(kvalues[0])
sidx = dim.getValueIndex(sv)
@ -265,7 +294,7 @@ class DimVariable(object):
n = abs(eidx - sidx) + 1
size.append(n)
if n > 1:
dim = self.variable.getDimension(i)
dim = self._variable.getDimension(i)
#if dim.isReverse():
# step = -step
dim = dim.extract(sidx, eidx, step)
@ -283,7 +312,7 @@ class DimVariable(object):
ranges.append(rr)
else:
if len(k) > 1:
dim = self.variable.getDimension(i)
dim = self._variable.getDimension(i)
dim = dim.extract(k)
#dim.setReverse(False)
dims.append(dim)
@ -316,6 +345,23 @@ class DimVariable(object):
"""
return np.array(self.dataset.read(self.name))
def get_pack_paras(self):
"""
Get pack parameters.
:return: missing_value, scale_factor, add_offset
"""
pack_paras = NCUtil.getPackData(self._variable)
return pack_paras[2], pack_paras[1], pack_paras[0]
def is_member(self):
"""
Whether the variable is a member of a structure.
:return: Is a member of a structure or not.
"""
return self._variable.isMemberOfStructure()
def get_members(self):
"""
Get structure members. Only valid for Structure data type.
@ -324,7 +370,7 @@ class DimVariable(object):
"""
a = self.read()
if a._array.getDataType() != DataType.STRUCTURE:
print 'This method is only valid for structure array!'
print('This method is only valid for structure array!')
return None
a = a._array.getArrayObject()
return a.getMembers()
@ -343,28 +389,32 @@ class DimVariable(object):
a = a._array.getArrayObject()
return a.findMember(member)
def member_array(self, member, indices=None):
def member_array(self, member, index=None, rec=0):
"""
Extract member array. Only valid for Structure data type.
:param member: (*string*) Member name.
:param indices: (*slice*) Indices.
:param index: (*slice*) Index.
:param rec: (*int*) Record index.
:returns: (*array*) Extracted member array.
"""
a = self.read()
if a._array.getDataType() != DataType.STRUCTURE:
print('This method is only valid for structure array!')
return None
a = a._array.getArrayObject()
is_structure = isinstance(a, ArrayStructure)
if isinstance(member, basestring):
member = a.findMember(member)
if is_structure:
member = a.findMember(member)
else:
member = a.getObject(rec).findMember(member)
if member is None:
raise KeyError('The member %s not exists!' % member)
self.dataset.reopen()
a = a.extractMemberArray(member)
if is_structure:
a = a.extractMemberArray(member)
else:
a = a.getObject(rec).extractMemberArray(member)
if a.getDataType() in [NCDataType.SEQUENCE, NCDataType.STRUCTURE]:
return StructureArray(a)
@ -373,11 +423,46 @@ class DimVariable(object):
if r.size == 1:
return r[0]
if not indices is None:
r = r.__getitem__(indices)
if not index is None:
r = r.__getitem__(index)
return r
# def member_array(self, member, indices=None):
# """
# Extract member array. Only valid for Structure data type.
#
# :param member: (*string*) Member name.
# :param indices: (*slice*) Indices.
#
# :returns: (*array*) Extracted member array.
# """
# a = self.read()
# if a._array.getDataType() != DataType.STRUCTURE:
# print('This method is only valid for structure array!')
# return None
#
# a = a._array.getArrayObject()
# if isinstance(member, basestring):
# member = a.findMember(member)
# if member is None:
# raise KeyError('The member %s not exists!' % member)
#
# self.dataset.reopen()
# a = a.extractMemberArray(member)
# if a.getDataType() in [NCDataType.SEQUENCE, NCDataType.STRUCTURE]:
# return StructureArray(a)
#
# a = NCUtil.convertArray(a)
# r = np.array(a)
# if r.size == 1:
# return r[0]
#
# if not indices is None:
# r = r.__getitem__(indices)
#
# return r
def dimlen(self, idx):
"""
Get dimension length.
@ -414,7 +499,7 @@ class DimVariable(object):
:param attr: (*string or Attribute*) Attribute or Attribute name
"""
if isinstance(attr, str):
attr = self.variable.findAttribute(attr)
attr = self._variable.findAttribute(attr)
if attr is None:
return None
v = np.array(attr.getValues())
@ -447,17 +532,17 @@ class DimVariable(object):
def adddim(self, dimtype, dimvalue):
if isinstance(dimvalue, np.NDArray):
dimvalue = dimvalue.aslist()
self.variable.addDimension(dimtype, dimvalue)
self.ndim = self.variable.getDimNumber()
self._variable.addDimension(dimtype, dimvalue)
self.ndim = self._variable.getDimNumber()
def setdim(self, dimtype, dimvalue, index=None, reverse=False):
if isinstance(dimvalue, np.NDArray):
dimvalue = dimvalue.aslist()
if index is None:
self.variable.setDimension(dimtype, dimvalue, reverse)
self._variable.setDimension(dimtype, dimvalue, reverse)
else:
self.variable.setDimension(dimtype, dimvalue, reverse, index)
self.ndim = self.variable.getDimNumber()
self._variable.setDimension(dimtype, dimvalue, reverse, index)
self.ndim = self._variable.getDimNumber()
def setdimrev(self, idx, reverse):
self.dims[idx].setReverse(reverse)
@ -487,6 +572,151 @@ class DimVariable(object):
self.ncvariable.addAttribute(ncattr)
return attr
class StructureVariable(DimVariable):
def __init__(self, variable=None, dataset=None, parent_variable=None):
"""
Structure variable.
:param variable: (*Structure*) NC Structure object.
:param dataset: (*DimDataFile*) Data file.
:param parent_variable: (*StructureVariable*) Parent structure variable.
"""
DimVariable.__init__(self, variable, dataset)
self._parent_variable = parent_variable
if dataset is not None:
datainfo = dataset.dataset.getDataInfo()
if not datainfo.isOpened():
datainfo.reOpen()
self._ncfile = datainfo.getFile()
self._ncvar = self._ncfile.findVariable(self.name)
self._variables = []
for var in self._ncvar.getVariables():
self._variables.append(MemberVariable.factory(NCUtil.convertVariable(var), dataset, self))
def __getitem__(self, key):
if isinstance(key, basestring):
for var in self._variables:
if var.name == key:
return var
for var in self._variables:
if var.short_name == key:
return var
raise ValueError(key + ' is not a variable name')
else:
return np.array(self.dataset.read(self.name))
@property
def variables(self):
"""
Get all variables.
"""
return self._variables
@property
def varnames(self):
"""
Get all variable names.
"""
names = []
for var in self._variables:
names.append(var.short_name)
return names
class MemberVariable(DimVariable):
@staticmethod
def factory(variable=None, dataset=None, parent_variable=None):
"""
Factor method.
"""
if variable.getDataType().isStructure():
return StructureVariable(variable, dataset, parent_variable)
else:
return MemberVariable(variable, dataset, parent_variable)
def __init__(self, variable=None, dataset=None, parent_variable=None):
"""
Structure variable.
:param variable: (*Structure*) NC Structure object.
:param dataset: (*DimDataFile*) Data file.
:param array: (*NCArray*) NC Array.
"""
DimVariable.__init__(self, variable, dataset)
self._parent_variable = parent_variable
if dataset is not None:
datainfo = dataset.dataset.getDataInfo()
if not datainfo.isOpened():
datainfo.reOpen()
self._ncfile = datainfo.getFile()
self._ncvar = self._ncfile.findVariable(self.name)
def __getitem__(self, key=0, station=None):
if isinstance(key, int):
return self.read_array(record=key)
elif isinstance(key, slice):
if key == slice(None):
return self.read()
else:
return self.read_array(station=key.start)
def read_array(self, record=0, station=None):
"""
Read data array.
:param record: (*int*) Record index. Default is 0.
:param station: (*int*) station index. Default is `None`, means all stations.
:return: (*array*) Data array.
"""
a = self._parent_variable.read()
a = a._array.getArrayObject()
missing_value, scale_factor, add_offset = self.get_pack_paras()
is_structure = isinstance(a, ArrayStructure)
if is_structure:
r = NCUtil.readSequence(a, self.short_name)
else:
if station is None:
r = NCUtil.readSequenceRecord(a, self.short_name, record, missing_value)
else:
r = NCUtil.readSequenceStation(a, self.short_name, station)
if r is None:
return None
r = ArrayUtil.unPack(r, missing_value, scale_factor, add_offset)
return np.array(r)
@property
def variables(self):
"""
Get all variables.
"""
return self._variables
@property
def varnames(self):
"""
Get all variable names.
"""
names = []
for var in self._variables:
names.append(var.short_name)
return names
class StructureArray(object):
def __init__(self, array):
@ -540,12 +770,12 @@ class StructureArray(object):
else:
return self._array.getObject(rec).findMember(member)
def member_array(self, member, indices=None, rec=0):
def member_array(self, member, index=None, rec=0):
"""
Extract member array. Only valid for Structure data type.
:param member: (*string*) Member name.
:param indices: (*slice*) Indices.
:param index: (*slice*) Index.
:param rec: (*int*) Record index.
:returns: (*array*) Extracted member array.
@ -572,8 +802,8 @@ class StructureArray(object):
if r.size == 1:
return r[0]
if not indices is None:
r = r.__getitem__(indices)
if not index is None:
r = r.__getitem__(index)
return r
@ -583,7 +813,7 @@ class TDimVariable(object):
# variable must be org.meteoinfo.data.meteodata.Variable
# dataset is DimDataFiles
def __init__(self, variable, dataset):
self.variable = variable
self._variable = variable
self.dataset = dataset
self.name = variable.getName()
self.dtype = np.dtype.fromjava(variable.getDataType())
@ -599,7 +829,7 @@ class TDimVariable(object):
self.tnum = len(times)
def __str__(self):
if self.variable is None:
if self._variable is None:
return 'None'
r = str(self.dtype) + ' ' + self.name + '('
@ -609,7 +839,7 @@ class TDimVariable(object):
dimname = 'null'
r = r + dimname + ','
r = r[:-1] + '):'
attrs = self.variable.getAttributes()
attrs = self._variable.getAttributes()
for attr in attrs:
r = r + '\n\t' + self.name + ': ' + attr.toString()
return r
@ -628,7 +858,7 @@ class TDimVariable(object):
indices = tuple(indices)
if len(indices) != self.ndim:
print 'indices must be ' + str(self.ndim) + ' dimensions!'
print('indices must be ' + str(self.ndim) + ' dimensions!')
return None
k = indices[0]

View File

@ -118,10 +118,13 @@ def addfile(fname, access='r', dtype='netcdf', keepopen=False, **kwargs):
meteodata = MeteoDataInfo()
meteodata.openData(fname, keepopen)
if meteodata.getDataInfo().getDataType() == MeteoDataType.RADAR:
datainfo = meteodata.getDataInfo()
if datainfo.getDataType() == MeteoDataType.RADAR:
datafile = RadarDataFile(meteodata, access=access)
elif meteodata.getDataInfo().isRadial():
elif datainfo.isRadial():
datafile = RadarDataFile(meteodata, access=access)
elif datainfo.getDataType() == MeteoDataType.NETCDF and datainfo.getFileTypeId() == 'BUFR':
datafile = BUFRDataFile(meteodata, access=access)
else:
datafile = DimDataFile(meteodata, access=access)
return datafile

View File

@ -224,6 +224,15 @@ public enum DataType {
return (this == DataType.ENUM1) || (this == DataType.ENUM2) || (this == DataType.ENUM4);
}
/**
* Is structure
*
* @return true if structure and sequence
*/
public boolean isStructure() {
return (this == DataType.STRUCTURE || this == DataType.SEQUENCE);
}
/**
* Convert a number to this data type
* @param n The number

View File

@ -2329,6 +2329,14 @@ public class ArrayUtil {
return r;
}
/**
* Split an Array
*
* @param a Input array
* @param sections
* @param axis
* @return
*/
public static List<Array> arraySplit(Array a, int sections, int axis) {
int[] shape = a.getShape();
if (axis == -1) {
@ -2342,6 +2350,31 @@ public class ArrayUtil {
return arrays;
}
/**
* Unpack array by missing value, scale factor and add offset
*
* @param a Input array
* @param missingValue Missing value
* @param scaleFactor Scale factor
* @param addOffset Add offset
* @return Output array
*/
public static Array unPack(Array a, double missingValue, double scaleFactor, double addOffset) {
if (!Double.isNaN(missingValue)) {
a = ArrayUtil.convertToDataType(a, DataType.DOUBLE);
ArrayMath.replaceValue(a, missingValue, Double.NaN);
}
if (scaleFactor != 1) {
a = ArrayMath.mul(a, scaleFactor);
}
if (addOffset != 0) {
a = ArrayMath.add(a, addOffset);
}
return a;
}
/**
* Sort array along an axis
*

View File

@ -35,7 +35,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<revision>3.9.6</revision>
<revision>3.9.7</revision>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<maven.compiler.release>8</maven.compiler.release>