001: /*
002:
003: Derby - Class org.apache.derbyTesting.functionTests.tests.lang.streams
004:
005: Licensed to the Apache Software Foundation (ASF) under one or more
006: contributor license agreements. See the NOTICE file distributed with
007: this work for additional information regarding copyright ownership.
008: The ASF licenses this file to You under the Apache License, Version 2.0
009: (the "License"); you may not use this file except in compliance with
010: the License. You may obtain a copy of the License at
011:
012: http://www.apache.org/licenses/LICENSE-2.0
013:
014: Unless required by applicable law or agreed to in writing, software
015: distributed under the License is distributed on an "AS IS" BASIS,
016: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
017: See the License for the specific language governing permissions and
018: limitations under the License.
019:
020: */
021:
022: package org.apache.derbyTesting.functionTests.tests.lang;
023:
024: import java.sql.Connection;
025: import java.sql.DriverManager;
026: import java.sql.Statement;
027: import java.sql.PreparedStatement;
028: import java.sql.ResultSet;
029: import java.sql.SQLException;
030: import java.sql.SQLWarning;
031: import java.sql.Types;
032:
033: import org.apache.derby.tools.ij;
034: import org.apache.derby.tools.JDBCDisplayUtil;
035:
036: /**
037: This tests streams, and when we should not materialize it. Beetle entry 4896, 4955.
038:
039: Some of the code comes from conn/largeStreams.java. But this program figures out whether
040: a stream is materialized or not in a different way. Part of the reason is that this test
041: should be run nightly to catch regressions and shouldn't require too much disk space. It
042: figures out whether a stream is materialized or not by comparing the stack levels of different
043: cases. The stack level is when reading the last byte of the stream. According to the current
044: code, the stack is about 10 levels deeper when reading the stream from store per page (not
045: materialized before hand), comparing to the case when materializing from sql language layer.
046: We don't expect this to change dramatically for some time. And this can always be adjusted
047: when needed.
048:
049: For bug 5592 - match db's limits for long varchar which is 32700. In order to enforce that limit
050: we now materialize the stream to make sure we are not trying to overstuff data in long varchar.
051: Because of this, I had to make some changes into the stack level checking for long varchars.
052: */
053:
054: public class streams {
055:
056: private static int pkCount;
057: private static Connection conn;
058:
059: public static void main(String[] args) {
060: System.out.println("Test streams starting");
061:
062: try {
063: // use the ij utility to read the property file and
064: // make the initial connection.
065: ij.getPropertyArg(args);
066: conn = ij.startJBMS();
067:
068: conn.setAutoCommit(true);
069:
070: setup();
071:
072: conn.setAutoCommit(false);
073:
074: doWork();
075:
076: conn.setAutoCommit(true);
077:
078: teardown();
079:
080: conn.close();
081:
082: } catch (Throwable e) {
083: System.out.println("FAIL: exception thrown:");
084: JDBCDisplayUtil.ShowException(System.out, e);
085: }
086:
087: System.out.println("Test streams finished");
088: }
089:
090: static void setup() throws SQLException {
091: Statement stmt = conn.createStatement();
092: stmt
093: .executeUpdate("call SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY('derby.storage.pageSize', '2048')");
094:
095: verifyCount(
096: stmt
097: .executeUpdate("create table t1 (id int, pid int, lvc long varchar, lvb long varchar for bit data)"),
098: 0);
099: verifyCount(
100: stmt
101: .executeUpdate("create table t2 (id int, pid int, lvc long varchar, lvb long varchar for bit data)"),
102: 0);
103: verifyCount(
104: stmt
105: .executeUpdate("create trigger tr21 after insert on t2 for each statement mode db2sql values 1"),
106: 0);
107: verifyCount(
108: stmt
109: .executeUpdate("create table t3 (id int not null primary key, pid int, lvc long varchar, lvb long varchar for bit data, CONSTRAINT FK1 Foreign Key(pid) REFERENCES T3 (id))"),
110: 0);
111: verifyCount(
112: stmt
113: .executeUpdate("create table t4 (id int, longcol long varchar)"),
114: 0);
115: verifyCount(
116: stmt
117: .executeUpdate("create table t5 (id int, longcol long varchar)"),
118: 0);
119: }
120:
121: static void teardown() throws SQLException {
122: Statement stmt = conn.createStatement();
123:
124: verifyCount(stmt.executeUpdate("drop table t1"), 0);
125:
126: verifyCount(stmt.executeUpdate("drop trigger tr21"), 0);
127:
128: verifyCount(stmt.executeUpdate("drop table t2"), 0);
129:
130: verifyCount(stmt.executeUpdate("drop table t3"), 0);
131:
132: verifyCount(stmt.executeUpdate("drop table t4"), 0);
133:
134: verifyCount(stmt.executeUpdate("drop table t5"), 0);
135:
136: stmt.close();
137:
138: System.out.println("teardown complete");
139: }
140:
141: static void verifyCount(int count, int expect) throws SQLException {
142: if (count != expect) {
143: System.out.println("FAIL: Expected " + expect + " got "
144: + count + " rows");
145: throw new SQLException("Wrong number of rows returned");
146: } else
147: System.out.println("PASS: expected and got " + count
148: + (count == 1 ? " row" : " rows"));
149: }
150:
151: private static void doWork() throws Exception {
152:
153: Statement s = conn.createStatement();
154:
155: System.out.println("Start testing");
156:
157: PreparedStatement ps = conn
158: .prepareStatement("insert into t1 values(?, ?, ?,?)");
159: int level1 = insertLongString(ps, 8, true);
160: System.out
161: .println("materialized insert: got reader stack level");
162:
163: ps = conn.prepareStatement("insert into t2 values(?, ?, ?,?)");
164: int level2 = insertLongString(ps, 8, true);
165: System.out
166: .println("materialized insert (for trigger): got reader stack level");
167:
168: if (level1 != level2)
169: System.out
170: .println("FAILED!! level difference not expected since streams are materialized.");
171: else
172: System.out.println("SUCCEED!! stack level as expected.");
173:
174: ps = conn.prepareStatement("insert into t3 values(?, ?, ?,?)");
175: int level3 = insertLongString(ps, 8, true);
176: System.out
177: .println("self ref foreign key insert(should not materialize): got reader stack level");
178: if (level3 == level1)
179: System.out.println("SUCCEED!! levels expected.");
180: else
181: System.out
182: .println("FAILED!! should not materialize in this case.");
183:
184: conn.rollback();
185:
186: s
187: .executeUpdate("insert into t3 values (1,1,'a',null), (2,2,'b',null), (3,3,'c',null)");
188: ps = conn
189: .prepareStatement("update t3 set id = ?, lvc = ? where pid = 2");
190: level1 = insertLongString(ps, 8, false);
191: System.out
192: .println("materialized for multiple row update: got reader stack level");
193:
194: ps = conn
195: .prepareStatement("update t3 set id = ?, lvc = ? where pid = 2 and id = 2");
196: level2 = insertLongString(ps, 8, false);
197: System.out.println("single row update: got reader stack level");
198:
199: if (level1 != level2)
200: System.out
201: .println("FAILED!! level difference not expected because streams are materialized with fix for bug 5592.");
202: else
203: System.out
204: .println("SUCCEED!! single row update materialized stream.");
205:
206: s.executeUpdate("insert into t4 values (1, 'ccccc')");
207: ps = conn.prepareStatement("insert into t4 values(?, ?)");
208: insertLongString(ps, 6, false);
209: s.executeUpdate("insert into t4 values (3, 'aaaaabbbbbb')");
210: s.executeUpdate("insert into t4 values (4, 'bbbbbb')");
211: insertLongString(ps, 5, false);
212: ResultSet rs = s
213: .executeQuery("select id, cast(longcol as varchar(8192)) lcol from t4 order by lcol");
214: if (rs.next()) // 3, aaaaabbbbbb
215: System.out.println("id = " + rs.getInt(1) + " longcol = "
216: + rs.getString(2));
217: if (rs.next()) // 4, bbbbbb
218: System.out.println("id = " + rs.getInt(1) + " longcol = "
219: + rs.getString(2));
220: for (int i = 0; i < 2; i++) {
221: if (rs.next()) {
222: String longcol = rs.getString(2);
223: int collen = longcol.length();
224: System.out.print("id = " + rs.getInt(1)
225: + " longcol length = " + collen);
226: System.out.println(" longcol = "
227: + longcol.substring(0, 5) + "..."
228: + longcol.substring(collen - 5, collen));
229: }
230: }
231: if (rs.next()) // 1, 'ccccc'
232: System.out.println("id = " + rs.getInt(1) + " longcol = "
233: + rs.getString(2));
234: if (rs.next())
235: System.out.println("FAILED, more rows left");
236: else
237: System.out.println("number of rows ok");
238:
239: s.executeUpdate("insert into t5 values (1, 'bbbbbb')");
240: ps = conn.prepareStatement("insert into t5 values(?, ?)");
241: insertLongString(ps, 5, false);
242: insertLongString(ps, 7, false);
243: s.executeUpdate("insert into t5 values (3, 'aaaaabbbbbba')");
244: s.executeUpdate("insert into t5 values (4, 'bbbbbbbbb')");
245: rs = s
246: .executeQuery("select t4.id, t4.longcol, t5.id, cast(t5.longcol as varchar(8192)) lcol from t4, t5 where cast(t4.longcol as varchar(8192)) = cast(t5.longcol as varchar(8192)) order by lcol");
247: while (rs.next()) {
248: System.out.println("t4 id = " + rs.getInt(1)
249: + " t4 longcol length = "
250: + rs.getString(2).length() + " t5 id = "
251: + rs.getInt(3) + " t5 longcol length = "
252: + rs.getString(4).length());
253: }
254:
255: System.out.println("Start testing long var binary");
256: conn.rollback();
257:
258: ps = conn.prepareStatement("insert into t1 values(?, ?, ?,?)");
259: level1 = insertLongBinary(ps, 8);
260: System.out
261: .println("non materialized insert: got reader stack level");
262:
263: ps = conn.prepareStatement("insert into t2 values(?, ?, ?,?)");
264: level2 = insertLongBinary(ps, 8);
265: System.out
266: .println("materialized insert (for trigger): got reader stack level");
267:
268: if (level1 > level2 + 5)
269: System.out.println("SUCCEED, level difference expected.");
270: else
271: System.out.println("FAILED, check stack level change.");
272:
273: ps = conn.prepareStatement("insert into t3 values(?, ?, ?,?)");
274: level3 = insertLongBinary(ps, 8);
275: System.out
276: .println("self ref foreign key insert(should not materialize): got reader stack level");
277: if (level3 == level1)
278: System.out.println("SUCCEED!! levels expected.");
279: else
280: System.out
281: .println("FAILED!! should not materialize stream in this case.");
282:
283: conn.rollback();
284: }
285:
286: private static int insertLongString(PreparedStatement ps,
287: int kchars, boolean isInsert) throws SQLException {
288: // don't end on a clean boundary
289: int chars = (kchars * 1024) + 273;
290:
291: long start = System.currentTimeMillis();
292:
293: DummyReader dr = new DummyReader(chars);
294: if (isInsert) {
295: ps.setInt(1, pkCount);
296: ps.setInt(2, pkCount++);
297: ps.setCharacterStream(3, dr, chars);
298: ps.setNull(4, Types.VARBINARY);
299: } else {
300: ps.setInt(1, 2);
301: ps.setCharacterStream(2, dr, chars);
302: }
303:
304: ps.executeUpdate();
305: long end = System.currentTimeMillis();
306:
307: System.out.println("setCharacterStream " + chars + " chars");
308:
309: return dr.readerStackLevel;
310:
311: }
312:
313: private static int insertLongBinary(PreparedStatement ps, int kbytes)
314: throws SQLException {
315:
316: // add a small number of bytes to ensure that we are not always ending on a clean Mb boundary
317: int bytes = (kbytes * 1024) + 273;
318:
319: long start = System.currentTimeMillis();
320: ps.setInt(1, pkCount);
321: ps.setInt(2, pkCount++);
322: ps.setNull(3, Types.LONGVARCHAR);
323: DummyBinary db = new DummyBinary(bytes);
324: ps.setBinaryStream(4, db, bytes);
325:
326: ps.executeUpdate();
327: long end = System.currentTimeMillis();
328:
329: System.out.println("setBinaryStream " + bytes + " bytes");
330:
331: return db.readerStackLevel;
332: }
333: }
334:
335: class DummyReader extends java.io.Reader {
336:
337: private int count;
338: public int readerStackLevel;
339:
340: DummyReader(int length) {
341: this .count = length;
342: }
343:
344: private void whereAmI() {
345: if (count == 0) {
346: readerStackLevel = -1;
347: try {
348: throw new Throwable();
349: } catch (Throwable e) {
350: try {
351: readerStackLevel = e.getStackTrace().length;
352: // System.out.println("================= stack array length is: " + readerStackLevel);
353: // e.printStackTrace();
354: } catch (NoSuchMethodError nme) {
355: DummyOutputStream dos = new DummyOutputStream();
356: DummyPrintStream dps = new DummyPrintStream(dos);
357: e.printStackTrace(dps);
358: dps.flush();
359: // System.out.println("================= print to dop level num is: " + dps.lines);
360: readerStackLevel = dps.lines;
361: // e.printStackTrace();
362: }
363: }
364: }
365: }
366:
367: public int read() {
368: if (count == 0)
369: return -1;
370:
371: count--;
372: whereAmI();
373:
374: return 'b';
375: }
376:
377: public int read(char[] buf, int offset, int length) {
378:
379: if (count == 0)
380: return -1;
381:
382: if (length > count)
383: length = count;
384:
385: count -= length;
386: whereAmI();
387:
388: java.util.Arrays.fill(buf, offset, offset + length, 'b');
389:
390: return length;
391: }
392:
393: public void close() {
394: }
395: }
396:
397: class DummyBinary extends java.io.InputStream {
398:
399: public int readerStackLevel;
400: int count;
401: byte content = 42;
402:
403: DummyBinary(int length) {
404: this .count = length;
405: }
406:
407: private void whereAmI() {
408: if (count == 0) {
409: readerStackLevel = -1;
410: try {
411: throw new Throwable();
412: } catch (Throwable e) {
413: try {
414: readerStackLevel = e.getStackTrace().length;
415: // System.out.println("================= stack array length is: " + readerStackLevel);
416: // e.printStackTrace();
417: } catch (NoSuchMethodError nme) {
418: DummyOutputStream dos = new DummyOutputStream();
419: DummyPrintStream dps = new DummyPrintStream(dos);
420: e.printStackTrace(dps);
421: dps.flush();
422: // System.out.println("================= print to dop level num is: " + dps.lines);
423: readerStackLevel = dps.lines;
424: // e.printStackTrace();
425: }
426: }
427: }
428: }
429:
430: public int read() {
431: if (count == 0)
432: return -1;
433:
434: count--;
435: whereAmI();
436: return content++;
437: }
438:
439: public int read(byte[] buf, int offset, int length) {
440:
441: if (count == 0)
442: return -1;
443:
444: if (length > count)
445: length = count;
446:
447: count -= length;
448: whereAmI();
449:
450: for (int i = 0; i < length; i++)
451: buf[offset + i] = content++;
452:
453: return length;
454: }
455:
456: public void close() {
457: }
458: }
459:
460: class DummyOutputStream extends java.io.OutputStream {
461: public void close() {
462: }
463:
464: public void flush() {
465: }
466:
467: public void write(byte[] b) {
468: }
469:
470: public void write(byte[] b, int off, int len) {
471: }
472:
473: public void write(int b) {
474: }
475: }
476:
477: class DummyPrintStream extends java.io.PrintStream {
478: public int lines;
479:
480: public DummyPrintStream(DummyOutputStream dos) {
481: super (dos);
482: }
483:
484: public void println() {
485: lines++;
486: }
487:
488: public void println(String x) {
489: lines++;
490: }
491:
492: public void println(Object x) {
493: lines++;
494: }
495:
496: public void println(char[] x) {
497: lines++;
498: }
499:
500: public void println(double x) {
501: lines++;
502: }
503:
504: public void println(float x) {
505: lines++;
506: }
507:
508: public void println(long x) {
509: lines++;
510: }
511:
512: public void println(int x) {
513: lines++;
514: }
515:
516: public void println(char x) {
517: lines++;
518: }
519:
520: public void println(boolean x) {
521: lines++;
522: }
523: }
|