001: package org.apache.ojb.broker.util.dbhandling;
002:
003: /* Copyright 2004-2005 The Apache Software Foundation
004: *
005: * Licensed under the Apache License, Version 2.0 (the "License");
006: * you may not use this file except in compliance with the License.
007: * You may obtain a copy of the License at
008: *
009: * http://www.apache.org/licenses/LICENSE-2.0
010: *
011: * Unless required by applicable law or agreed to in writing, software
012: * distributed under the License is distributed on an "AS IS" BASIS,
013: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014: * See the License for the specific language governing permissions and
015: * limitations under the License.
016: */
017:
018: import java.io.*;
019: import java.util.HashMap;
020: import java.util.Iterator;
021: import java.util.StringTokenizer;
022: import java.util.zip.GZIPInputStream;
023: import java.util.zip.GZIPOutputStream;
024:
025: import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor;
026: import org.apache.ojb.broker.platforms.PlatformException;
027: import org.apache.ojb.broker.util.logging.LoggerFactory;
028: import org.apache.tools.ant.Project;
029: import org.apache.tools.ant.taskdefs.SQLExec;
030: import org.apache.tools.ant.types.FileSet;
031: import org.apache.torque.task.TorqueDataModelTask;
032: import org.apache.torque.task.TorqueSQLExec;
033: import org.apache.torque.task.TorqueSQLTask;
034:
035: /**
036: * Provides basic database handling (drop, create, init) via torque.
037: *
038: * @author Thomas Dudziak
039: */
040: public class TorqueDBHandling implements DBHandling {
041: /** Torque db platforms */
042: protected static final String TORQUE_PLATFORM_DB2 = "db2";
043: protected static final String TORQUE_PLATFORM_HYPERSONIC = "hypersonic";
044: protected static final String TORQUE_PLATFORM_INTERBASE = "interbase";
045: protected static final String TORQUE_PLATFORM_MSSQL = "mssql";
046: protected static final String TORQUE_PLATFORM_MYSQL = "mysql";
047: protected static final String TORQUE_PLATFORM_ORACLE = "oracle";
048: protected static final String TORQUE_PLATFORM_POSTGRESQL = "postgresql";
049: protected static final String TORQUE_PLATFORM_SAPDB = "sapdb";
050: protected static final String TORQUE_PLATFORM_SYBASE = "sybase";
051:
052: /** The name of the db-creation script */
053: private static final String CREATION_SCRIPT_NAME = "create-db.sql";
054: /** The name of the torque database mapping file */
055: private static final String SQL_DB_MAP_NAME = "sqldb.map";
056:
057: /** Mapping from ojb dbms to torque database setting */
058: private static HashMap _dbmsToTorqueDb = new HashMap();
059:
060: static {
061: _dbmsToTorqueDb.put("db2", TORQUE_PLATFORM_DB2);
062: _dbmsToTorqueDb.put("hsqldb", TORQUE_PLATFORM_HYPERSONIC);
063: _dbmsToTorqueDb.put("firebird", TORQUE_PLATFORM_INTERBASE);
064: _dbmsToTorqueDb.put("mssqlserver", TORQUE_PLATFORM_MSSQL);
065: _dbmsToTorqueDb.put("mysql", TORQUE_PLATFORM_MYSQL);
066: _dbmsToTorqueDb.put("oracle", TORQUE_PLATFORM_ORACLE);
067: _dbmsToTorqueDb.put("oracle9i", TORQUE_PLATFORM_ORACLE);
068: _dbmsToTorqueDb.put("postgresql", TORQUE_PLATFORM_POSTGRESQL);
069: _dbmsToTorqueDb.put("sapdb", TORQUE_PLATFORM_SAPDB);
070: _dbmsToTorqueDb.put("sybaseasa", TORQUE_PLATFORM_SYBASE);
071: _dbmsToTorqueDb.put("sybasease", TORQUE_PLATFORM_SYBASE);
072: _dbmsToTorqueDb.put("sybase", TORQUE_PLATFORM_SYBASE);
073: }
074:
075: /** The jdbc connection for communicating with the db */
076: private JdbcConnectionDescriptor _jcd;
077: /** The target database */
078: private String _targetDatabase;
079: /** The directory where we work in */
080: private File _workDir;
081: /** The compressed contents of the torque schemata */
082: private HashMap _torqueSchemata = new HashMap();
083: /** The compressed content of the creation script */
084: private byte[] _creationScript;
085: /** The compressed contents of the db initialization scripts */
086: private HashMap _initScripts = new HashMap();
087:
088: /**
089: * Creates a new handling object.
090: */
091: public TorqueDBHandling() {
092: }
093:
094: /**
095: * Sets the jdbc connection to use.
096: *
097: * @param jcd The connection to use
098: * @throws PlatformException If the target database cannot be handled with torque
099: */
100: public void setConnection(JdbcConnectionDescriptor jcd)
101: throws PlatformException {
102: _jcd = jcd;
103:
104: String targetDatabase = (String) _dbmsToTorqueDb.get(_jcd
105: .getDbms().toLowerCase());
106:
107: if (targetDatabase == null) {
108: throw new PlatformException("Database " + _jcd.getDbms()
109: + " is not supported by torque");
110: }
111: if (!targetDatabase.equals(_targetDatabase)) {
112: _targetDatabase = targetDatabase;
113: _creationScript = null;
114: _initScripts.clear();
115: }
116: }
117:
118: /**
119: * Returns the connection descriptor used by this handling object.
120: *
121: * @return The connection descriptor
122: */
123: public JdbcConnectionDescriptor getConnection() {
124: return _jcd;
125: }
126:
127: /**
128: * Returns the torque database platform used.
129: *
130: * @return The target db platform
131: */
132: public String getTargetTorquePlatform() {
133: return _targetDatabase;
134: }
135:
136: /**
137: * Adds the input files (in our case torque schema files) to use.
138: *
139: * @param srcDir The directory containing the files
140: * @param listOfFilenames The filenames in a comma-separated list
141: */
142: public void addDBDefinitionFiles(String srcDir,
143: String listOfFilenames) throws IOException {
144: StringTokenizer tokenizer = new StringTokenizer(
145: listOfFilenames, ",");
146: File dir = new File(srcDir);
147: String filename;
148:
149: while (tokenizer.hasMoreTokens()) {
150: filename = tokenizer.nextToken().trim();
151: if (filename.length() > 0) {
152: _torqueSchemata.put("schema" + _torqueSchemata.size()
153: + ".xml", readTextCompressed(new File(dir,
154: filename)));
155: }
156: }
157: }
158:
159: /**
160: * Adds an input stream of a db definition (in our case of a torque schema file).
161: *
162: * @param schemaStream The input stream
163: */
164: public void addDBDefinitionFile(InputStream schemaStream)
165: throws IOException {
166: _torqueSchemata.put("schema" + _torqueSchemata.size() + ".xml",
167: readStreamCompressed(schemaStream));
168: }
169:
170: /**
171: * Writes the torque schemata to files in the given directory and returns
172: * a comma-separated list of the filenames.
173: *
174: * @param dir The directory to write the files to
175: * @return The list of filenames
176: * @throws IOException If an error occurred
177: */
178: private String writeSchemata(File dir) throws IOException {
179: writeCompressedTexts(dir, _torqueSchemata);
180:
181: StringBuffer includes = new StringBuffer();
182:
183: for (Iterator it = _torqueSchemata.keySet().iterator(); it
184: .hasNext();) {
185: includes.append((String) it.next());
186: if (it.hasNext()) {
187: includes.append(",");
188: }
189: }
190: return includes.toString();
191: }
192:
193: /**
194: * Creates the db-creation sql script (but does not perform it).
195: *
196: * @throws PlatformException If some error occurred
197: */
198: public void createCreationScript() throws PlatformException {
199: Project project = new Project();
200: TorqueDataModelTask modelTask = new TorqueDataModelTask();
201: File tmpDir = null;
202: File scriptFile = null;
203:
204: _creationScript = null;
205: try {
206: tmpDir = new File(getWorkDir(), "schemas");
207: tmpDir.mkdir();
208:
209: String includes = writeSchemata(tmpDir);
210:
211: scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME);
212:
213: project.setBasedir(tmpDir.getAbsolutePath());
214:
215: // populating with defaults
216: modelTask.setProject(project);
217: modelTask.setUseClasspath(true);
218: modelTask.setControlTemplate("sql/db-init/Control.vm");
219: modelTask.setOutputDirectory(tmpDir);
220: modelTask.setOutputFile(CREATION_SCRIPT_NAME);
221: modelTask.setTargetDatabase(_targetDatabase);
222:
223: FileSet files = new FileSet();
224:
225: files.setDir(tmpDir);
226: files.setIncludes(includes);
227: modelTask.addFileset(files);
228: modelTask.execute();
229:
230: _creationScript = readTextCompressed(scriptFile);
231:
232: deleteDir(tmpDir);
233: } catch (Exception ex) {
234: // clean-up
235: if ((tmpDir != null) && tmpDir.exists()) {
236: deleteDir(tmpDir);
237: }
238: throw new PlatformException(ex);
239: }
240: }
241:
242: /**
243: * Creates the database.
244: *
245: * @throws PlatformException If some error occurred
246: */
247: public void createDB() throws PlatformException {
248: if (_creationScript == null) {
249: createCreationScript();
250: }
251:
252: Project project = new Project();
253: TorqueDataModelTask modelTask = new TorqueDataModelTask();
254: File tmpDir = null;
255: File scriptFile = null;
256:
257: try {
258: tmpDir = new File(getWorkDir(), "schemas");
259: tmpDir.mkdir();
260:
261: scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME);
262:
263: writeCompressedText(scriptFile, _creationScript);
264:
265: project.setBasedir(tmpDir.getAbsolutePath());
266:
267: // we use the ant task 'sql' to perform the creation script
268: SQLExec sqlTask = new SQLExec();
269: SQLExec.OnError onError = new SQLExec.OnError();
270:
271: onError.setValue("continue");
272: sqlTask.setProject(project);
273: sqlTask.setAutocommit(true);
274: sqlTask.setDriver(_jcd.getDriver());
275: sqlTask.setOnerror(onError);
276: sqlTask.setUserid(_jcd.getUserName());
277: sqlTask.setPassword(_jcd.getPassWord() == null ? "" : _jcd
278: .getPassWord());
279: sqlTask.setUrl(getDBCreationUrl());
280: sqlTask.setSrc(scriptFile);
281: sqlTask.execute();
282:
283: deleteDir(tmpDir);
284: } catch (Exception ex) {
285: // clean-up
286: if ((tmpDir != null) && tmpDir.exists()) {
287: try {
288: scriptFile.delete();
289: } catch (NullPointerException e) {
290: LoggerFactory.getLogger(this .getClass()).error(
291: "NPE While deleting scriptFile ["
292: + scriptFile.getName() + "]", e);
293: }
294: }
295: throw new PlatformException(ex);
296: }
297: }
298:
299: /**
300: * Creates the initialization scripts (creation of tables etc.) but does
301: * not perform them.
302: *
303: * @throws PlatformException If some error occurred
304: */
305: public void createInitScripts() throws PlatformException {
306: Project project = new Project();
307: TorqueSQLTask sqlTask = new TorqueSQLTask();
308: File schemaDir = null;
309: File sqlDir = null;
310:
311: _initScripts.clear();
312: try {
313: File tmpDir = getWorkDir();
314:
315: schemaDir = new File(tmpDir, "schemas");
316: sqlDir = new File(tmpDir, "sql");
317: schemaDir.mkdir();
318: sqlDir.mkdir();
319:
320: String includes = writeSchemata(schemaDir);
321: File sqlDbMapFile = new File(sqlDir, SQL_DB_MAP_NAME);
322:
323: sqlDbMapFile.createNewFile();
324: project.setBasedir(sqlDir.getAbsolutePath());
325:
326: // populating with defaults
327: sqlTask.setProject(project);
328: sqlTask.setUseClasspath(true);
329: sqlTask.setBasePathToDbProps("sql/base/");
330: sqlTask.setControlTemplate("sql/base/Control.vm");
331: sqlTask.setOutputDirectory(sqlDir);
332: // we put the report in the parent directory as we don't want
333: // to read it in later on
334: sqlTask.setOutputFile("../report.sql.generation");
335: sqlTask.setSqlDbMap(SQL_DB_MAP_NAME);
336: sqlTask.setTargetDatabase(_targetDatabase);
337:
338: FileSet files = new FileSet();
339:
340: files.setDir(schemaDir);
341: files.setIncludes(includes);
342: sqlTask.addFileset(files);
343: sqlTask.execute();
344:
345: readTextsCompressed(sqlDir, _initScripts);
346: deleteDir(schemaDir);
347: deleteDir(sqlDir);
348: } catch (Exception ex) {
349: // clean-up
350: if ((schemaDir != null) && schemaDir.exists()) {
351: deleteDir(schemaDir);
352: }
353: if ((sqlDir != null) && sqlDir.exists()) {
354: deleteDir(sqlDir);
355: }
356: throw new PlatformException(ex);
357: }
358: }
359:
360: /**
361: * Creates the tables according to the schema files.
362: *
363: * @throws PlatformException If some error occurred
364: */
365: public void initDB() throws PlatformException {
366: if (_initScripts.isEmpty()) {
367: createInitScripts();
368: }
369:
370: Project project = new Project();
371: TorqueSQLTask sqlTask = new TorqueSQLTask();
372: File outputDir = null;
373:
374: try {
375: outputDir = new File(getWorkDir(), "sql");
376:
377: outputDir.mkdir();
378: writeCompressedTexts(outputDir, _initScripts);
379:
380: project.setBasedir(outputDir.getAbsolutePath());
381:
382: // executing the generated sql, but this time with a torque task
383: TorqueSQLExec sqlExec = new TorqueSQLExec();
384: TorqueSQLExec.OnError onError = new TorqueSQLExec.OnError();
385:
386: sqlExec.setProject(project);
387: onError.setValue("continue");
388: sqlExec.setAutocommit(true);
389: sqlExec.setDriver(_jcd.getDriver());
390: sqlExec.setOnerror(onError);
391: sqlExec.setUserid(_jcd.getUserName());
392: sqlExec.setPassword(_jcd.getPassWord() == null ? "" : _jcd
393: .getPassWord());
394: sqlExec.setUrl(getDBManipulationUrl());
395: sqlExec.setSrcDir(outputDir.getAbsolutePath());
396: sqlExec.setSqlDbMap(SQL_DB_MAP_NAME);
397: sqlExec.execute();
398:
399: deleteDir(outputDir);
400: } catch (Exception ex) {
401: // clean-up
402: if (outputDir != null) {
403: deleteDir(outputDir);
404: }
405: throw new PlatformException(ex);
406: }
407: }
408:
409: /**
410: * Template-and-Hook method for generating the url required by the jdbc driver
411: * to allow for creating a database (as opposed to accessing an already-existing
412: * database).
413: *
414: */
415: protected String getDBCreationUrl() {
416: JdbcConnectionDescriptor jcd = getConnection();
417:
418: // currently I only know about specifics for mysql
419: if (TORQUE_PLATFORM_MYSQL.equals(getTargetTorquePlatform())) {
420: // we have to remove the db name as the jdbc driver would try to connect to
421: // a non-existing db
422: // the db-alias has this form: [host&port]/[dbname]?[options]
423: String dbAliasPrefix = jcd.getDbAlias();
424: String dbAliasSuffix = "";
425: int questionPos = dbAliasPrefix.indexOf('?');
426:
427: if (questionPos > 0) {
428: dbAliasSuffix = dbAliasPrefix.substring(questionPos);
429: dbAliasPrefix = dbAliasPrefix.substring(0, questionPos);
430: }
431:
432: int slashPos = dbAliasPrefix.lastIndexOf('/');
433:
434: if (slashPos > 0) {
435: // it is important that the slash at the end is present
436: dbAliasPrefix = dbAliasPrefix
437: .substring(0, slashPos + 1);
438: }
439: return jcd.getProtocol() + ":" + jcd.getSubProtocol() + ":"
440: + dbAliasPrefix + dbAliasSuffix;
441: } else if (TORQUE_PLATFORM_POSTGRESQL
442: .equals(getTargetTorquePlatform())) {
443: // we have to replace the db name with 'template1'
444: // the db-alias has this form: [host&port]/[dbname]?[options]
445: String dbAliasPrefix = jcd.getDbAlias();
446: String dbAliasSuffix = "";
447: int questionPos = dbAliasPrefix.indexOf('?');
448:
449: if (questionPos > 0) {
450: dbAliasSuffix = dbAliasPrefix.substring(questionPos);
451: dbAliasPrefix = dbAliasPrefix.substring(0, questionPos);
452: }
453:
454: int slashPos = dbAliasPrefix.lastIndexOf('/');
455:
456: if (slashPos > 0) {
457: // it is important that the slash at the end is present
458: dbAliasPrefix = dbAliasPrefix
459: .substring(0, slashPos + 1);
460: } else {
461: dbAliasPrefix += "/";
462: }
463: dbAliasPrefix += "template1";
464: if (dbAliasSuffix.length() > 0) {
465: dbAliasPrefix += "/";
466: }
467: return jcd.getProtocol() + ":" + jcd.getSubProtocol() + ":"
468: + dbAliasPrefix + dbAliasSuffix;
469:
470: } else {
471: return jcd.getProtocol() + ":" + jcd.getSubProtocol() + ":"
472: + jcd.getDbAlias();
473: }
474: }
475:
476: /**
477: * Template-and-Hook method for generating the url required by the jdbc driver
478: * to allow for modifying an existing database.
479: *
480: */
481: protected String getDBManipulationUrl() {
482: JdbcConnectionDescriptor jcd = getConnection();
483:
484: return jcd.getProtocol() + ":" + jcd.getSubProtocol() + ":"
485: + jcd.getDbAlias();
486: }
487:
488: /**
489: * Reads the given text file and compressed its content.
490: *
491: * @param file The file
492: * @return A byte array containing the GZIP-compressed content of the file
493: * @throws IOException If an error ocurred
494: */
495: private byte[] readTextCompressed(File file) throws IOException {
496: return readStreamCompressed(new FileInputStream(file));
497: }
498:
499: /**
500: * Reads the given text stream and compressed its content.
501: *
502: * @param stream The input stream
503: * @return A byte array containing the GZIP-compressed content of the stream
504: * @throws IOException If an error ocurred
505: */
506: private byte[] readStreamCompressed(InputStream stream)
507: throws IOException {
508: ByteArrayOutputStream bao = new ByteArrayOutputStream();
509: GZIPOutputStream gos = new GZIPOutputStream(bao);
510: OutputStreamWriter output = new OutputStreamWriter(gos);
511: BufferedReader input = new BufferedReader(
512: new InputStreamReader(stream));
513: String line;
514:
515: while ((line = input.readLine()) != null) {
516: output.write(line);
517: output.write('\n');
518: }
519: input.close();
520: stream.close();
521: output.close();
522: gos.close();
523: bao.close();
524: return bao.toByteArray();
525: }
526:
527: /**
528: * Reads the text files in the given directory and puts their content
529: * in the given map after compressing it. Note that this method does not
530: * traverse recursivly into sub-directories.
531: *
532: * @param dir The directory to process
533: * @param results Map that will receive the contents (indexed by the relative filenames)
534: * @throws IOException If an error ocurred
535: */
536: private void readTextsCompressed(File dir, HashMap results)
537: throws IOException {
538: if (dir.exists() && dir.isDirectory()) {
539: File[] files = dir.listFiles();
540:
541: for (int idx = 0; idx < files.length; idx++) {
542: if (files[idx].isDirectory()) {
543: continue;
544: }
545: results.put(files[idx].getName(),
546: readTextCompressed(files[idx]));
547: }
548: }
549: }
550:
551: /**
552: * Uncompresses the given textual content and writes it to the given file.
553: *
554: * @param file The file to write to
555: * @param compressedContent The content
556: * @throws IOException If an error occurred
557: */
558: private void writeCompressedText(File file, byte[] compressedContent)
559: throws IOException {
560: ByteArrayInputStream bais = new ByteArrayInputStream(
561: compressedContent);
562: GZIPInputStream gis = new GZIPInputStream(bais);
563: BufferedReader input = new BufferedReader(
564: new InputStreamReader(gis));
565: BufferedWriter output = new BufferedWriter(new FileWriter(file));
566: String line;
567:
568: while ((line = input.readLine()) != null) {
569: output.write(line);
570: output.write('\n');
571: }
572: input.close();
573: gis.close();
574: bais.close();
575: output.close();
576: }
577:
578: /**
579: * Uncompresses the textual contents in the given map and and writes them to the files
580: * denoted by the keys of the map.
581: *
582: * @param dir The base directory into which the files will be written
583: * @param contents The map containing the contents indexed by the filename
584: * @throws IOException If an error occurred
585: */
586: private void writeCompressedTexts(File dir, HashMap contents)
587: throws IOException {
588: String filename;
589:
590: for (Iterator nameIt = contents.keySet().iterator(); nameIt
591: .hasNext();) {
592: filename = (String) nameIt.next();
593: writeCompressedText(new File(dir, filename),
594: (byte[]) contents.get(filename));
595: }
596: }
597:
598: /**
599: * Sets the working directory.
600: *
601: * @param dir The directory
602: * @throws IOException If the directory does not exist or cannot be written/read
603: */
604: public void setWorkDir(String dir) throws IOException {
605: File workDir = new File(dir);
606:
607: if (!workDir.exists() || !workDir.canWrite()
608: || !workDir.canRead()) {
609: throw new IOException("Cannot access directory " + dir);
610: }
611: _workDir = workDir;
612: }
613:
614: /**
615: * Returns the temporary directory used by java.
616: *
617: * @return The temporary directory
618: * @throws IOException If an io error occurred
619: */
620: private File getWorkDir() throws IOException {
621: if (_workDir == null) {
622: File dummy = File.createTempFile("dummy", ".log");
623: String workDir = dummy.getPath().substring(0,
624: dummy.getPath().lastIndexOf(File.separatorChar));
625:
626: if ((workDir == null) || (workDir.length() == 0)) {
627: workDir = ".";
628: }
629: dummy.delete();
630: _workDir = new File(workDir);
631: }
632: return _workDir;
633: }
634:
635: /**
636: * Little helper function that recursivly deletes a directory.
637: *
638: * @param dir The directory
639: */
640: private void deleteDir(File dir) {
641: if (dir.exists() && dir.isDirectory()) {
642: File[] files = dir.listFiles();
643:
644: for (int idx = 0; idx < files.length; idx++) {
645: if (!files[idx].exists()) {
646: continue;
647: }
648: if (files[idx].isDirectory()) {
649: deleteDir(files[idx]);
650: } else {
651: files[idx].delete();
652: }
653: }
654: dir.delete();
655: }
656: }
657: }
|