001: /*
002: * Copyright 2006 Pentaho Corporation. All rights reserved.
003: * This software was developed by Pentaho Corporation and is provided under the terms
004: * of the Mozilla Public License, Version 1.1, or any later version. You may not use
005: * this file except in compliance with the license. If you need a copy of the license,
006: * please go to http://www.mozilla.org/MPL/MPL-1.1.txt. The Original Code is the Pentaho
007: * BI Platform. The Initial Developer is Pentaho Corporation.
008: *
009: * Software distributed under the Mozilla Public License is distributed on an "AS IS"
010: * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to
011: * the license for the specific language governing your rights and limitations.
012: *
013: * @created Jul 13, 2005
014: * @author James Dixon
015: *
016: */
017:
018: package org.pentaho.plugin.shark;
019:
020: import java.io.File;
021: import java.io.FileInputStream;
022: import java.io.FileNotFoundException;
023: import java.io.FileOutputStream;
024: import java.io.IOException;
025: import java.util.HashMap;
026: import java.util.Iterator;
027: import org.apache.commons.logging.Log;
028: import org.apache.commons.logging.LogFactory;
029: import org.enhydra.shark.Shark;
030: import org.enhydra.shark.api.client.wfbase.BaseException;
031: import org.enhydra.shark.api.client.wfservice.PackageAdministration;
032: import org.enhydra.shark.api.client.wfservice.PackageHasActiveProcesses;
033: import org.enhydra.shark.api.client.wfservice.PackageInUse;
034: import org.enhydra.shark.api.client.wfservice.RepositoryMgr;
035: import org.pentaho.core.publisher.BasePublisher;
036: import org.pentaho.core.session.IPentahoSession;
037: import org.pentaho.core.system.PentahoSystem;
038: import org.pentaho.messages.Messages;
039:
040: public class SharkPublisher extends BasePublisher {
041:
042: /**
043: *
044: */
045: private static final long serialVersionUID = 3866069850517891673L;
046:
047: private static final Log logger = LogFactory
048: .getLog(SharkPublisher.class);
049:
050: private HashMap modifiedXPDLs;
051:
052: private HashMap unmodifiedXPDLs;
053:
054: private HashMap newXPDLs;
055:
056: private SharkManager shark = null;
057:
058: public Log getLogger() {
059: return logger;
060: }
061:
062: public String getName() {
063: return Messages.getString("SharkPublisher.USER_TITLE"); //$NON-NLS-1$
064: }
065:
066: public String getDescription() {
067: return Messages
068: .getString("SharkPublisher.USER_PUBLISH_DESCRIPTION"); //$NON-NLS-1$
069: }
070:
071: public String publish(IPentahoSession session) {
072:
073: try {
074: unmodifiedXPDLs = new HashMap();
075: modifiedXPDLs = new HashMap();
076: newXPDLs = new HashMap();
077:
078: File solutionRoot = new File(PentahoSystem
079: .getApplicationContext().getSolutionPath("")); //$NON-NLS-1$
080:
081: shark = SharkManager.getInstance(session);
082: PackageAdministration packageAdmin = Shark.getInstance()
083: .getAdminInterface().getPackageAdministration();
084: RepositoryMgr repository = Shark.getInstance()
085: .getRepositoryManager();
086:
087: // TODO: keep a list of the XPDL files in the solution so we can
088: // delete unwanted ones
089:
090: // process the solution directories
091: if (debug)
092: debug(Messages
093: .getString("SharkPublisher.DEBUG_COPYING_FILES")); //$NON-NLS-1$
094: processDir(solutionRoot, repository, packageAdmin);
095:
096: // delete any subdirectories and unused XPDLs
097: if (debug)
098: debug(Messages
099: .getString("SharkPublisher.DEBUG_CLEANSING_REPOSITORY")); //$NON-NLS-1$
100: cleanupExternalRepository();
101:
102: // add any new XPDL files
103: if (debug)
104: debug(Messages
105: .getString("SharkPublisher.DEBUG_ADDING_NEW_FILES")); //$NON-NLS-1$
106: addXpdlFiles(newXPDLs, repository, packageAdmin);
107:
108: // Adds any unmodified files that have not been updated AddXpldFiles
109: // will not update if they are already updated
110: if (debug)
111: debug(Messages
112: .getString("SharkPublisher.DEBUG_ADDING_UNMODIFIED_FILES")); //$NON-NLS-1$
113: addXpdlFiles(unmodifiedXPDLs, repository, packageAdmin);
114:
115: // updated any modified XPDL files
116: if (debug)
117: debug(Messages
118: .getString("SharkPublisher.DEBUG_UPDATING_FILES")); //$NON-NLS-1$
119: updateXpdlFiles(modifiedXPDLs, repository, packageAdmin);
120:
121: // This is a temporary call to do maintenance tasks after the
122: // publish
123: SharkMaintenance maintenance = new SharkMaintenance();
124: maintenance.doMaintenance(session);
125: } catch (Throwable t) {
126: error(
127: Messages
128: .getErrorString("SharkPublisher.ERROR_0009_PUBLISH_FAILED"), t); //$NON-NLS-1$
129: return Messages
130: .getString("SharkPublisher.USER_ERROR_PUBLISH_FAILED") + t.getLocalizedMessage(); //$NON-NLS-1$
131: }
132: return Messages
133: .getString("SharkPublisher.USER_WORKFLOW_UPDATED"); //$NON-NLS-1$
134:
135: }
136:
137: private void processDir(File root, RepositoryMgr repository,
138: PackageAdministration packageAdmin) {
139:
140: // process every file in this directory
141: File files[] = root.listFiles();
142: if (files != null) {
143: for (int i = 0; i < files.length; i++) {
144: File file = files[i];
145: if (file.getAbsolutePath().endsWith(
146: "shark\\repository\\external")) { //$NON-NLS-1$
147: // skip this
148: file = null;
149: } else if (file.getAbsolutePath().endsWith(
150: "shark/repository/external")) { //$NON-NLS-1$
151: // skip this
152: file = null;
153: } else if (file.isDirectory()) {
154: // process this sub-directory
155: processDir(file, repository, packageAdmin);
156: } else {
157: if (file.getName().toLowerCase().endsWith(".xpdl")) { //$NON-NLS-1$
158: // process this XPDL file
159: copyXpdlFile(file);
160: }
161: }
162: }
163: }
164: }
165:
166: private void copyXpdlFile(File xpdlFile) {
167:
168: try {
169: // create a byte array to hold the bits and nibbles
170: long size = xpdlFile.length();
171: byte bytes[] = new byte[(int) size];
172: // read the file into the byte array
173: FileInputStream stream = new FileInputStream(xpdlFile);
174: stream.read(bytes);
175:
176: // copy this file to the Shark external repository
177: File destination = new File(shark
178: .getExternalRepositoryPath()
179: + File.separator + xpdlFile.getName());
180: boolean modified = false;
181: if (destination.exists()) {
182: long destinationSize = destination.length();
183: if (size != destinationSize) {
184: modified = true;
185: } else {
186: byte destinationBytes[] = new byte[(int) size];
187: stream = new FileInputStream(destination);
188: stream.read(destinationBytes);
189: // TODO make this compare more intelligent
190: for (int i = 0; i < size; i++) {
191: if (bytes[i] != destinationBytes[i]) {
192: modified = true;
193: break;
194: }
195: }
196: }
197: if (modified) {
198: modifiedXPDLs.put(destination.getName(), ""); //$NON-NLS-1$
199: } else {
200: unmodifiedXPDLs.put(destination.getName(), ""); //$NON-NLS-1$
201: }
202: } else {
203: newXPDLs.put(destination.getName(), ""); //$NON-NLS-1$
204: modified = true;
205: }
206: if (modified) {
207: modifiedXPDLs.put(destination.getName(), ""); //$NON-NLS-1$
208: FileOutputStream outputStream = new FileOutputStream(
209: destination);
210: outputStream.write(bytes);
211: outputStream.flush();
212: outputStream.close();
213: }
214: } catch (FileNotFoundException e) {
215: // this one is not very likey
216: error(Messages
217: .getErrorString(
218: "SharkPublisher.ERROR_0001_COULD_NOT_COPY_MISSING_FILE", xpdlFile.getName())); //$NON-NLS-1$
219: e.printStackTrace();
220: } catch (IOException e) {
221: error(Messages
222: .getErrorString(
223: "SharkPublisher.ERROR_0002_COULD_NOT_COPY_FILE", xpdlFile.getName())); //$NON-NLS-1$
224: e.printStackTrace();
225: }
226:
227: }
228:
229: private void cleanupExternalRepository() {
230:
231: File externalRepositoryDirectory = new File(shark
232: .getExternalRepositoryPath());
233: if (!externalRepositoryDirectory.exists()) {
234: error(Messages
235: .getErrorString("SharkPublisher.ERROR_0003_EXTERNAL_REPOSITORY_DOES_NOT_EXIST")); //$NON-NLS-1$
236: return;
237: }
238:
239: File files[] = externalRepositoryDirectory.listFiles();
240: HashMap packagesInUse = new HashMap();
241: if (files != null) {
242: for (int i = 0; i < files.length; i++) {
243: File file = files[i];
244: String fileName = file.getName();
245: if (!fileName.toLowerCase().endsWith(".xpdl")) { //$NON-NLS-1$
246: // this is not an XPDL file, so delete it
247: deleteFile(file);
248: } else {
249: // check to see if this XPDL is still used
250:
251: if (!unmodifiedXPDLs.containsKey(fileName)
252: && !modifiedXPDLs.containsKey(fileName)
253: && !newXPDLs.containsKey(fileName)) {
254:
255: try {
256: shark.unloadPackage(fileName);
257: if (!shark.isPackageLoaded(fileName))
258: deleteFile(file);
259: } catch (PackageHasActiveProcesses pe) {
260: warn(Messages
261: .getErrorString(
262: "SharkPublisher.ERROR_0004_UNLOAD_FAIL_PROCESSES_RUNNING", fileName)); //$NON-NLS-1$
263: } catch (PackageInUse pue) {
264: warn(Messages
265: .getErrorString(
266: "SharkPublisher.ERROR_0005_UNLOAD_FAILED_DUE_TO_REFERENCES", fileName)); //$NON-NLS-1$
267: packagesInUse.put(fileName, file);
268: } catch (BaseException e) {
269: error(
270: Messages
271: .getErrorString(
272: "SharkPublisher.ERROR_0006_UNLOAD_FAILED", fileName), e); //$NON-NLS-1$
273: }
274: }
275: }
276: }
277: }
278:
279: // If the package was referenced by another external package, we assume
280: // at this point that the parent package has been deleted - try again to
281: // delete the child
282: for (Iterator it = packagesInUse.keySet().iterator(); it
283: .hasNext();) {
284: String fileName = (String) it.next();
285: try {
286: shark.unloadPackage(fileName);
287: if (!shark.isPackageLoaded(fileName))
288: deleteFile((File) packagesInUse.get(fileName));
289: } catch (Exception e) {
290: error(
291: Messages
292: .getErrorString(
293: "SharkPublisher.ERROR_0006_UNLOAD_FAILED", fileName), e); //$NON-NLS-1$
294: }
295: }
296:
297: }
298:
299: private void deleteFile(File file) {
300: if (file.isDirectory()) {
301: File files[] = file.listFiles();
302: for (int i = 0; i < files.length; i++) {
303: deleteFile(files[i]);
304: }
305: }
306:
307: if (file.delete()) {
308: info(Messages
309: .getString(
310: "SharkPublisher.INFO_DELETING_FILE", file.getAbsolutePath())); //$NON-NLS-1$
311: } else {
312: warn(Messages
313: .getString(
314: "SharkPublisher.WARN_COULD_NOT_DELETE_FILE", file.getAbsolutePath())); //$NON-NLS-1$
315: }
316: }
317:
318: private void addXpdlFiles(HashMap xpdlFileList,
319: RepositoryMgr repository, PackageAdministration packageAdmin) {
320:
321: Iterator fileListIterator = xpdlFileList.keySet().iterator();
322: while (fileListIterator.hasNext()) {
323: String xpdlFileName = (String) fileListIterator.next();
324: try {
325: String pkgId = repository.getPackageId(xpdlFileName);
326: if (!packageAdmin.isPackageOpened(pkgId)) {
327: packageAdmin.openPackage(xpdlFileName);
328: }
329: } catch (Exception e) {
330: error(Messages
331: .getErrorString(
332: "SharkPublisher.ERROR_0007_COULD_NOT_ADD_FILE", xpdlFileName)); //$NON-NLS-1$
333: e.printStackTrace();
334: }
335: }
336:
337: }
338:
339: private void updateXpdlFiles(HashMap xpdlFileList,
340: RepositoryMgr repository, PackageAdministration packageAdmin) {
341:
342: Iterator fileListIterator = xpdlFileList.keySet().iterator();
343: while (fileListIterator.hasNext()) {
344: String xpdlFileName = (String) fileListIterator.next();
345: try {
346: String pkgId = repository.getPackageId(xpdlFileName);
347: if (pkgId == null) {
348: if (!packageAdmin.isPackageOpened(pkgId)) {
349: packageAdmin.openPackage(xpdlFileName);
350: }
351:
352: } else {
353: packageAdmin.updatePackage(pkgId, xpdlFileName);
354: }
355: } catch (Exception e) {
356: error(Messages
357: .getErrorString(
358: "SharkPublisher.ERROR_0008_COULD_NOT_UPDATE_FILE", xpdlFileName)); //$NON-NLS-1$
359: e.printStackTrace();
360: }
361: }
362:
363: }
364: }
|