001: package it.geosolutions.imageio.plugins.jhdf.pool;
002:
003: import java.io.File;
004: import java.util.Iterator;
005: import java.util.List;
006: import java.util.Set;
007: import java.util.TreeMap;
008: import java.util.logging.Logger;
009:
010: import ncsa.hdf.object.Dataset;
011: import ncsa.hdf.object.FileFormat;
012: import ncsa.hdf.object.Group;
013:
014: /**
015: * This class represents a pool of Dataset. Since using subsampling / subregion
016: * mechanism will modify the original dataset, we need to work on a different
017: * dataset everytime <code>ImageReader.read()</code> method is called.
018: * Furthermore, if read operations are multithreaded (each thread will try to
019: * load a tile) such a problem is more perceptible.
020: *
021: * @author Romagnoli Daniele
022: * @author Giannecchini Simone
023: */
024:
025: /**
026: * WORK IN PROGRESS - NOT YET COMPLETED
027: */
028:
029: public class DatasetPool {
030:
031: private static final Logger LOGGER = org.geotools.util.logging.Logging
032: .getLogger("it.geosolutions.imageio.plugins.jhdf.pool");
033:
034: private static final int DEFAULT_DATASETSCOPIES_LIMIT = 10;
035:
036: private static final int DEFAULT_DATASETSCOPIES_START = 4;
037:
038: private final File sourceFile;
039:
040: /**
041: * <code>DatasetPool constructor</code>
042: *
043: * @param imageIndex
044: * The index of the dataset in the originating file.
045: * @param originatingFile
046: * The originating <code>File</code>
047: * @param datasetCopies
048: * The number of pooled copies of this dataset
049: * @param datasetLimit
050: * The maximum number of copies allowed for this pool
051: */
052: public DatasetPool(int imageIndex, final File originatingFile,
053: final int datasetCopies, final int datasetLimit) {
054:
055: datasetIndex = imageIndex;
056: this .datasetLimit = datasetLimit;
057: sourceFile = originatingFile;
058:
059: // retrieving the name of the originating file.
060: final String fileName = originatingFile.getAbsolutePath();
061:
062: // synchronized (storedCopies) {
063: for (int i = 0; i < datasetCopies; i++) {
064: try {
065:
066: // retrieving a FileFormat
067: FileFormat ffo = FileFormat.getInstance(fileName);
068: ffo = ffo.open(fileName, FileFormat.WRITE);
069: Group root = (Group) ffo.get("/");
070:
071: // setting the copy ID.
072: final int uniqueID = IDgenerator++;
073: final List membersList = root.getMemberList();
074: final int members = membersList.size();
075: final int selectedIndex = members > 1 ? imageIndex + 1
076: : imageIndex;
077:
078: // getting the required dataset.
079: final Dataset dataset = (Dataset) membersList
080: .get(selectedIndex);
081:
082: // Building a new copy of this dataset.
083: final DatasetCopy dsc = new DatasetCopy(uniqueID,
084: dataset);
085: nCopies++;
086: availableDatasets++;
087: storedCopies.put(Integer.valueOf(uniqueID), dsc);
088:
089: // initializing the original properties of the pooled dataset.
090: if (i == 0)
091: originalProperties = new DatasetProperties(dsc
092: .getDataset());
093: } catch (Exception e) {
094: // TODO Auto-generated catch block
095: e.printStackTrace();
096: }
097: }
098: // }
099: }
100:
101: public DatasetPool(int imageIndex, final File originatingFile,
102: final int datasetCopies) {
103: this (imageIndex, originatingFile, datasetCopies,
104: DEFAULT_DATASETSCOPIES_LIMIT);
105: }
106:
107: public DatasetPool(int imageIndex, File originatingFile) {
108: this (imageIndex, originatingFile, DEFAULT_DATASETSCOPIES_START,
109: DEFAULT_DATASETSCOPIES_LIMIT);
110:
111: }
112:
113: /** dataset not yet "on use " */
114: private int availableDatasets;
115:
116: /** total number of dataset within the pool */
117: private int nCopies;
118:
119: /** the max number of creable dataset */
120: private int datasetLimit;
121:
122: /** the index number of this (sub)dataset within the file */
123: private int datasetIndex;
124:
125: /** the list containing the real pool of datasets */
126: private TreeMap storedCopies = new TreeMap();
127:
128: /**
129: * the original properties of the dataset. Since operations like subregion
130: * and subsampling will modify the dataset's properties (such as its rank,
131: * its dimension,...), I need to store somewhere these properties prior the
132: * dataset will be modified.
133: */
134: private DatasetProperties originalProperties;
135:
136: private static int IDgenerator = 0;
137:
138: /**
139: * Simple class representing a Dataset copy.
140: *
141: */
142: public final class DatasetCopy {
143:
144: /** A dataset */
145: private Dataset dataset;
146:
147: /**
148: * <code>true</code> if this dataset of the pool is available or it is
149: * already "on use" by some reader/writer.
150: */
151: private boolean isAvailable;
152:
153: /** Represents the unique ID related to this specific copy of the dataset */
154: private int copyID; // TODO:Should be long?
155:
156: public DatasetCopy(final int copyID, Dataset dataset) {
157: this .copyID = copyID;
158: isAvailable = true;
159: // TODO: prior to call this method should I do a real copy?
160: this .dataset = dataset;
161: }
162:
163: public DatasetCopy(final int copyID) {
164: this .copyID = copyID;
165: isAvailable = true;
166:
167: final String fileName = sourceFile.getAbsolutePath();
168:
169: try {
170: FileFormat ffo = FileFormat.getInstance(fileName);
171:
172: ffo = ffo.open(fileName, FileFormat.WRITE);
173: Group root = (Group) ffo.get("/");
174:
175: final Dataset dataset = (Dataset) root.getMemberList()
176: .get(datasetIndex);
177: this .dataset = dataset;
178:
179: } catch (OutOfMemoryError e) {
180: // TODO Auto-generated catch block
181: e.printStackTrace();
182: } catch (Exception e) {
183: // TODO Auto-generated catch block
184: e.printStackTrace();
185: }
186:
187: }
188:
189: public Dataset getDataset() {
190: return dataset;
191: }
192:
193: public int getCopyID() {
194: return copyID;
195: }
196:
197: public boolean isAvailable() {
198: return isAvailable;
199: }
200:
201: public void setAvailable(boolean isAvailable) {
202: this .isAvailable = isAvailable;
203: }
204:
205: /**
206: * Restoring original properties for this dataset copy. I need to
207: * compute a copy of each value.
208: */
209: private void restoreOriginalProperties() {
210: long[] start = this .dataset.getStartDims();
211: long[] stride = this .dataset.getStride();
212: long[] dims = this .dataset.getDims();
213: int[] selectedIndex = this .dataset.getSelectedIndex();
214: long[] sizes = this .dataset.getSelectedDims();
215:
216: final int startLength = originalProperties.start.length;
217: for (int i = 0; i < startLength; i++) {
218: start[i] = originalProperties.start[i];
219: }
220: final int strideLength = originalProperties.stride.length;
221: for (int i = 0; i < strideLength; i++) {
222: stride[i] = originalProperties.stride[i];
223: }
224: final int dimsLength = originalProperties.dims.length;
225: for (int i = 0; i < dimsLength; i++) {
226: dims[i] = originalProperties.dims[i];
227: }
228: final int sizesLength = originalProperties.sizes.length;
229: for (int i = 0; i < sizesLength; i++) {
230: sizes[i] = originalProperties.sizes[i];
231: }
232: final int selectedIndexLength = originalProperties.selectedIndex.length;
233: for (int i = 0; i < selectedIndexLength; i++) {
234: selectedIndex[i] = originalProperties.selectedIndex[i];
235: }
236:
237: System.out.print("init\n");
238: this .dataset.init();
239:
240: // TODO: It is needed?
241: this .dataset.clearData();
242:
243: }
244: }
245:
246: /**
247: * A simple class which allows to store the properties of a dataset, prior
248: * it will be used/accessed with mechanismes which may modify the
249: * properties.
250: *
251: */
252: protected final class DatasetProperties {
253:
254: /**
255: * <coded>DatasetProperties</code> constructor. It provides to copy all
256: * the fields which may be changed during a parametrized read operation.
257: */
258: public DatasetProperties(Dataset dataset) {
259: long[] start = dataset.getStartDims();
260: long[] stride = dataset.getStride();
261: long[] dims = dataset.getDims();
262: int[] selectedIndex = dataset.getSelectedIndex();
263: long[] sizes = dataset.getSelectedDims();
264:
265: final int startLength = start.length;
266: this .start = new long[startLength];
267: for (int i = 0; i < startLength; i++) {
268: this .start[i] = start[i];
269: }
270: final int strideLength = stride.length;
271: this .stride = new long[strideLength];
272: for (int i = 0; i < strideLength; i++) {
273: this .stride[i] = stride[i];
274: }
275: final int dimsLength = dims.length;
276: this .dims = new long[dimsLength];
277: for (int i = 0; i < dimsLength; i++) {
278: this .dims[i] = dims[i];
279: }
280: final int sizesLength = sizes.length;
281: this .sizes = new long[sizesLength];
282: for (int i = 0; i < sizesLength; i++) {
283: this .sizes[i] = sizes[i];
284: }
285: final int selectedIndexLength = selectedIndex.length;
286: this .selectedIndex = new int[selectedIndexLength];
287: for (int i = 0; i < selectedIndexLength; i++) {
288: this .selectedIndex[i] = selectedIndex[i];
289: }
290: }
291:
292: /** The starting position of the selected dataset */
293: private final long[] start;
294:
295: /**
296: * The stride of the selected dataset. Essentially, the stride is the
297: * number of elements to move from the start location in each dimension.
298: */
299: private final long[] stride;
300:
301: /** The current dimension sizes of the selected dataset */
302: private final long[] dims;
303:
304: /** The selected dimensions for display */
305: private final int[] selectedIndex;
306:
307: /** The selected dimension sizes of the selected dataset */
308: private final long[] sizes;
309:
310: }
311:
312: public final boolean hasAvailableDatasets() {
313: return availableDatasets > 0;
314: }
315:
316: public DatasetCopy getDatasetCopy() {
317: DatasetCopy dsc = null;
318:
319: if (hasAvailableDatasets()) {
320: // synchronized (storedCopies) {
321: final Set set = storedCopies.keySet();
322: final Iterator iter = set.iterator();
323: // Cleaning HashMap
324: while (iter.hasNext()) {
325: dsc = (DatasetCopy) storedCopies.get(iter.next());
326: if (dsc.isAvailable()) {
327: dsc.setAvailable(false);
328: availableDatasets--;
329: break;
330: }
331: }
332: // }
333: } else if (nCopies < datasetLimit) {
334: // I can instantiate some other DatasetCopy's
335:
336: // synchronized (storedCopies) {
337: final int uniqueID = IDgenerator++;
338: dsc = new DatasetCopy(uniqueID);
339: nCopies++;
340: availableDatasets++;
341: storedCopies.put(Integer.valueOf(uniqueID), dsc);
342: // }
343: }
344: return dsc;
345: }
346:
347: public void getBackDatasetCopy(final int copyID,
348: boolean hasBeenModified) {
349: // TODO: Add some checks
350: // synchronized (storedCopies) {
351: DatasetCopy dsc = (DatasetCopy) storedCopies.get(Integer
352: .valueOf(copyID));
353: if (hasBeenModified)
354: dsc.restoreOriginalProperties();
355: dsc.setAvailable(true);
356: availableDatasets++;
357: // }
358: }
359:
360: public DatasetProperties getOriginalProperties() {
361: return originalProperties;
362: }
363: }
|