001: package it.geosolutions.imageio.plugins.jhdf;
002:
003: import java.awt.image.DataBuffer;
004:
005: import ncsa.hdf.object.Attribute;
006: import ncsa.hdf.object.Datatype;
007:
008: public class HDFUtilities {
009:
010: /**
011: * Given a HDF Attribute, builds a String containing comma separated
012: * values related to the attribute. Some Attribute may have a int
013: * array as value.
014: *
015: * @param att
016: * a HDF <code>Attribute</code>.
017: * @return
018: * the built <code>String</code>
019: */
020:
021: public static String buildAttributeString(Attribute att) {
022:
023: //TODO: Add more type handler
024: final Datatype dataType = att.getType();
025: final int attribTypeClass = dataType.getDatatypeClass();
026: final int attribTypeSize = dataType.getDatatypeSize();
027: Object valuesList = att.getValue();
028: String attribValue = "";
029: if (valuesList != null) {
030:
031: int i = 0;
032: final StringBuffer sb = new StringBuffer();
033: switch (attribTypeClass) {
034: case Datatype.CLASS_ARRAY:
035:
036: break;
037: case Datatype.CLASS_BITFIELD:
038:
039: break;
040: case Datatype.CLASS_CHAR:
041: final String[] strValues = (String[]) valuesList;
042: final int numValues = strValues.length;
043: for (; i < numValues - 1; i++) {
044: sb.append(strValues[i]).append(",");
045: }
046: sb.append(strValues[i]);
047: break;
048: case Datatype.CLASS_FLOAT:
049: switch (attribTypeSize) {
050: case 4://32 bit floating point
051: final float[] fValues = (float[]) valuesList;
052: final int fNumValues = fValues.length;
053: for (; i < fNumValues - 1; i++) {
054: sb.append(fValues[i]).append(",");
055: }
056: sb.append(fValues[i]);
057: break;
058: case 8://64 bit floating point
059: final double[] dValues = (double[]) valuesList;
060: final int dNumValues = dValues.length;
061: for (; i < dNumValues - 1; i++) {
062: sb.append(dValues[i]).append(",");
063: }
064: sb.append(dValues[i]);
065: break;
066: }
067: break;
068: case Datatype.CLASS_INTEGER:
069: switch (attribTypeSize) {
070: case 2://16 bit integers
071: final short[] sValues = (short[]) valuesList;
072: final int sNumValues = sValues.length;
073: for (; i < sNumValues - 1; i++) {
074: sb.append(sValues[i]).append(",");
075: }
076: sb.append(sValues[i]);
077: break;
078: case 4://32 bit integers
079: final int[] iValues = (int[]) valuesList;
080: final int iNumValues = iValues.length;
081: for (; i < iNumValues - 1; i++) {
082: sb.append(iValues[i]).append(",");
083: }
084: sb.append(iValues[i]);
085: break;
086: case 8://64 bit integers
087: final long[] lValues = (long[]) valuesList;
088: final int lNumValues = lValues.length;
089: for (; i < lNumValues - 1; i++) {
090: sb.append(lValues[i]).append(",");
091: }
092: sb.append(lValues[i]);
093: break;
094: }
095: break;
096: case Datatype.CLASS_STRING:
097:
098: break;
099: }
100: attribValue = sb.toString();
101: }
102: return attribValue;
103: }
104:
105: /**
106: * Given a HDF datatype, returns a proper DataBuffer type depending on
107: * the datatype size and the datatype class.
108: *
109: * @param datatype
110: * the input datatype
111: * @return the proper buffer type
112: */
113: public static int getBufferTypeFromDataType(Datatype datatype) {
114: int buffer_type = 0;
115: final int dataTypeClass = datatype.getDatatypeClass();
116: final int dataTypeSize = datatype.getDatatypeSize();
117: final boolean isUnsigned = datatype.isUnsigned();
118: if (dataTypeClass == Datatype.CLASS_INTEGER) {
119: if (dataTypeSize == 1)
120: buffer_type = DataBuffer.TYPE_BYTE;
121: else if (dataTypeSize == 2) {
122: if (isUnsigned)
123: buffer_type = DataBuffer.TYPE_USHORT;
124: else
125: buffer_type = DataBuffer.TYPE_SHORT;
126: } else if (dataTypeSize == 4)
127: buffer_type = DataBuffer.TYPE_INT;
128: } else if (dataTypeClass == Datatype.CLASS_FLOAT)
129: if (dataTypeSize == 4)
130: buffer_type = DataBuffer.TYPE_FLOAT;
131: else if (dataTypeSize == 8)
132: buffer_type = DataBuffer.TYPE_DOUBLE;
133: return buffer_type;
134: }
135: }
|