Source Code Cross Referenced for PerformanceTestCase.java in  » IDE-Netbeans » performance » org » netbeans » performance » test » utilities » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » IDE Netbeans » performance » org.netbeans.performance.test.utilities 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
0003:         *
0004:         * Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
0005:         *
0006:         * The contents of this file are subject to the terms of either the GNU
0007:         * General Public License Version 2 only ("GPL") or the Common
0008:         * Development and Distribution License("CDDL") (collectively, the
0009:         * "License"). You may not use this file except in compliance with the
0010:         * License. You can obtain a copy of the License at
0011:         * http://www.netbeans.org/cddl-gplv2.html
0012:         * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
0013:         * specific language governing permissions and limitations under the
0014:         * License.  When distributing the software, include this License Header
0015:         * Notice in each file and include the License file at
0016:         * nbbuild/licenses/CDDL-GPL-2-CP.  Sun designates this
0017:         * particular file as subject to the "Classpath" exception as provided
0018:         * by Sun in the GPL Version 2 section of the License file that
0019:         * accompanied this code. If applicable, add the following below the
0020:         * License Header, with the fields enclosed by brackets [] replaced by
0021:         * your own identifying information:
0022:         * "Portions Copyrighted [year] [name of copyright owner]"
0023:         *
0024:         * Contributor(s):
0025:         *
0026:         * The Original Software is NetBeans. The Initial Developer of the Original
0027:         * Software is Sun Microsystems, Inc. Portions Copyright 1997-2007 Sun
0028:         * Microsystems, Inc. All Rights Reserved.
0029:         *
0030:         * If you wish your version of this file to be governed by only the CDDL
0031:         * or only the GPL Version 2, indicate your decision by adding
0032:         * "[Contributor] elects to include this software in this distribution
0033:         * under the [CDDL or GPL Version 2] license." If you do not indicate a
0034:         * single choice of license, a recipient has the option to distribute
0035:         * your version of this file under either the CDDL, the GPL Version 2 or
0036:         * to extend the choice of license to its licensees as provided above.
0037:         * However, if you add GPL Version 2 code and therefore, elected the GPL
0038:         * Version 2 license, then the option applies only if the new code is
0039:         * made subject to such option by the copyright holder.
0040:         */
0041:
0042:        package org.netbeans.performance.test.utilities;
0043:
0044:        import java.awt.Component;
0045:        import java.awt.Window;
0046:        import java.lang.ref.Reference;
0047:        import java.lang.ref.WeakReference;
0048:
0049:        import java.util.HashMap;
0050:        import java.util.Map;
0051:        import junit.framework.AssertionFailedError;
0052:
0053:        import org.netbeans.jellytools.JellyTestCase;
0054:
0055:        import org.netbeans.jemmy.QueueTool;
0056:        import org.netbeans.jemmy.JemmyProperties;
0057:        import org.netbeans.jemmy.operators.ComponentOperator;
0058:        import org.netbeans.jemmy.operators.WindowOperator;
0059:        import org.netbeans.jemmy.util.PNGEncoder;
0060:
0061:        import org.netbeans.junit.NbPerformanceTest;
0062:
0063:        import org.netbeans.performance.test.guitracker.ActionTracker;
0064:        import org.netbeans.performance.test.guitracker.LoggingRepaintManager;
0065:        import org.netbeans.performance.test.guitracker.LoggingEventQueue;
0066:
0067:        /**
0068:         * Test case with implemented Performance Tests Validation support stuff.
0069:         * This class provide methods for QA Performance measurement.
0070:         * Implemented methods:
0071:         * <pre>
0072:         * doMeasurement();
0073:         * measureTime();
0074:         * measureMemoryUsage();
0075:         *</pre>
0076:         *
0077:         *
0078:         * Number of repeatedly measured time can be set by system property
0079:         * <b> org.netbeans.performance.repeat </b>. If property isn't set time is measured only once.
0080:         *
0081:         * @author  mmirilovic@netbeans.org, rkubacki@netbeans.org, anebuzelsky@netbeans.org, mrkam@netbeans.org
0082:         */
0083:        public abstract class PerformanceTestCase extends JellyTestCase
0084:                implements  NbPerformanceTest {
0085:
0086:            private static final boolean logMemory = Boolean
0087:                    .getBoolean("org.netbeans.performance.memory.usage.log");
0088:
0089:            /**
0090:             * Constant defining maximum time delay for "ui-response" of actions that needs to react
0091:             * quickly to keep the user's flow to stay uninterrupted. This is set to 1000ms.
0092:             */
0093:            protected static final long WINDOW_OPEN = 1000;
0094:
0095:            /**
0096:             * Constant defining maximum time delay for "ui-response" of actions that needs to react
0097:             * instantaneously. This is set to 100ms.
0098:             */
0099:            protected static final long UI_RESPONSE = 100;
0100:
0101:            /**
0102:             * Expected time in which the measured action should be completed.
0103:             * Usualy should be set to WINDOW_OPEN or UI_RESPONSE.
0104:             * <br><b>default</b> = UI_RESPONSE */
0105:            public long expectedTime = UI_RESPONSE;
0106:
0107:            /**
0108:             * Maximum number of iterations to wait for last paint on component/container.
0109:             * <br><b>default</b> = 10 iterations */
0110:            public int MAX_ITERATION = 10;
0111:
0112:            /**
0113:             * Defines delay between checks if the component/container is painted.
0114:             * <br><b>default</b> = 1000 ms */
0115:            public int WAIT_PAINT = 1000;
0116:
0117:            /** Wait No Event in the Event Queue after call method <code>open()</code>.
0118:             * <br><b>default</b> = 1000 ms */
0119:            public int WAIT_AFTER_OPEN = 1000;
0120:
0121:            /** Wait No Event in the Event Queue after call method <code>prepare()</code>.
0122:             * <br><b>default</b> = 1000 ms */
0123:            public int WAIT_AFTER_PREPARE = 250;
0124:
0125:            /** Wait No Event in the Event Queue after call method {@link close}.
0126:             * <br><b>default</b> = 1000 ms */
0127:            public int WAIT_AFTER_CLOSE = 250;
0128:
0129:            /** Factor for wait_after_open_heuristic timeout, negative HEURISTIC_FACTOR
0130:             * disables heuristic */
0131:            public double HEURISTIC_FACTOR = 1.25;
0132:
0133:            /** Count of repeats */
0134:            protected static int repeat = Integer.getInteger(
0135:                    "org.netbeans.performance.repeat", 1).intValue();
0136:
0137:            /** Count of repeats for measure memory usage */
0138:            protected static int repeat_memory = Integer.getInteger(
0139:                    "org.netbeans.performance.memory.repeat", -1).intValue();
0140:
0141:            /** Performance data. */
0142:            private static java.util.ArrayList<NbPerformanceTest.PerformanceData> data = new java.util.ArrayList<NbPerformanceTest.PerformanceData>();
0143:
0144:            /** Warmup finished flag. */
0145:            private static boolean warmupFinished = false;
0146:
0147:            /** Measure from last MOUSE event, you can define your own , by default it's MOUSE_RELEASE */
0148:            protected int track_mouse_event = ActionTracker.TRACK_MOUSE_RELEASE;
0149:
0150:            /** Define start event - measured time will start by this event */
0151:            protected int MY_START_EVENT = MY_EVENT_NOT_AVAILABLE;
0152:
0153:            /** Define end event - measured time will end by this event */
0154:            protected int MY_END_EVENT = MY_EVENT_NOT_AVAILABLE;
0155:
0156:            /** Not set event - default for START/END events */
0157:            protected static final int MY_EVENT_NOT_AVAILABLE = -10;
0158:
0159:            /** tracker for UI activities */
0160:            private static ActionTracker tr;
0161:
0162:            private static LoggingRepaintManager rm;
0163:
0164:            private static LoggingEventQueue leq;
0165:
0166:            static {
0167:                if (repeat_memory == -1) {
0168:                    // XXX load our EQ and repaint manager
0169:                    tr = ActionTracker.getInstance();
0170:                    rm = new LoggingRepaintManager(tr);
0171:                    rm.setEnabled(true);
0172:                    leq = new LoggingEventQueue(tr);
0173:                    leq.setEnabled(true);
0174:                }
0175:            }
0176:
0177:            /** Tested component operator. */
0178:            protected ComponentOperator testedComponentOperator;
0179:
0180:            /** Name of test case should be changed. */
0181:            protected HashMap<String, String> renamedTestCaseName;
0182:
0183:            /** Use order just for indentify first and next run, not specific run order */
0184:            public boolean useTwoOrderTypes = true;
0185:
0186:            /** Group identification for traced refs that do not have special category. */
0187:            private Object DEFAULT_REFS_GROUP = new Object();
0188:
0189:            /** Set of references to traced object that ought to be GCed after tests runs
0190:             * and their informational messages.
0191:             */
0192:            private static Map<Object, Map<Reference<Object>, String>> tracedRefs = new HashMap<Object, Map<Reference<Object>, String>>();
0193:
0194:            /**
0195:             * Creates a new instance of PerformanceTestCase
0196:             * @param testName name of the test
0197:             */
0198:            public PerformanceTestCase(String testName) {
0199:                super (testName);
0200:                renamedTestCaseName = new HashMap<String, String>();
0201:            }
0202:
0203:            /**
0204:             * Creates a new instance of PerformanceTestCase
0205:             * @param testName name of the test
0206:             * @param performanceDataName name for measured performance data, measured values are stored to results under this name
0207:             */
0208:            public PerformanceTestCase(String testName,
0209:                    String performanceDataName) {
0210:                this (testName);
0211:                setTestCaseName(testName, performanceDataName);
0212:            }
0213:
0214:            /**
0215:             * SetUp test cases: redirect log/ref, initialize performance data.
0216:             */
0217:            public void setUp() {
0218:                checkScanFinished();
0219:                checkWarmup();
0220:                data = new java.util.ArrayList<NbPerformanceTest.PerformanceData>();
0221:            }
0222:
0223:            /**
0224:             * Getter for LoggingRepaintManager.
0225:             * @return LoggingRepaintManager
0226:             */
0227:            protected LoggingRepaintManager repaintManager() {
0228:                return rm;
0229:            }
0230:
0231:            /**
0232:             * TearDown test cases: call method <code>call()</code> and closing all modal dialogs.
0233:             * @see close
0234:             */
0235:            public void tearDown() {
0236:                // tr = null;
0237:                //close();
0238:                closeAllModal();
0239:            }
0240:
0241:            /**
0242:             * Switch to measured methods.
0243:             * Now all test can be used for measure UI responsiveness or look for memory leaks.
0244:             */
0245:            public void doMeasurement() {
0246:                if (repeat_memory == -1)
0247:                    measureTime();
0248:                else
0249:                    measureMemoryUsage();
0250:            }
0251:
0252:            /**
0253:             * Test that measures time betwen generated AWT event and last paint event that
0254:             * finishes painting of component/container.
0255:             * It uses <code>ROBOT_MODEL_MASK</code> as an event dispatching model when user's
0256:             * activity is simulated.</p>
0257:             * <p>To initialize the test {@link prepare()} method is invoked at the begining
0258:             * and processing is delayed until there is a period of time at least
0259:             * <code>WAIT_AFTER_PREPARE</code>ms long.</p>
0260:             * <p>The {@link open()} method is called then to perform the measured action,
0261:             * tests waits for no event in <code>WAIT_AFTER_OPEN</code>ms and
0262:             * until component/container is fully painted. Meaure time and report measured time.
0263:             * <br>
0264:             * <br>If during measurement exception arise - test fails and no value is reported as Performance Data.
0265:             * <br>If measuredTime as longer than expectedTime test fails.</p>
0266:             * <p>Each test should reset the state in {@link close()} method. Again there is a waiting
0267:             * for quiet period of time after this call.</p>
0268:             */
0269:            public void measureTime() {
0270:                Exception exceptionDuringMeasurement = null;
0271:
0272:                long wait_after_open_heuristic = WAIT_AFTER_OPEN;
0273:
0274:                long[] measuredTime = new long[repeat + 1];
0275:
0276:                // issue 56091 and applied workarround on the next line
0277:                // JemmyProperties.setCurrentDispatchingModel(JemmyProperties.ROBOT_MODEL_MASK);
0278:                JemmyProperties.setCurrentDispatchingModel(JemmyProperties
0279:                        .getCurrentDispatchingModel()
0280:                        | JemmyProperties.ROBOT_MODEL_MASK);
0281:                JemmyProperties.setCurrentTimeout(
0282:                        "EventDispatcher.RobotAutoDelay", 1);
0283:                log("----------------------- DISPATCHING MODEL = "
0284:                        + JemmyProperties.getCurrentDispatchingModel());
0285:
0286:                // filter default button on Vista - see issue 100961
0287:                if ("Windows Vista".equalsIgnoreCase(System.getProperty(
0288:                        "os.name", ""))) {
0289:                    repaintManager().addRegionFilter(
0290:                            repaintManager().VISTA_FILTER);
0291:                }
0292:
0293:                String performanceDataName = setPerformanceName();
0294:
0295:                tr.startNewEventList(performanceDataName);
0296:                tr.add(tr.TRACK_CONFIG_APPLICATION_MESSAGE, "Expected_time="
0297:                        + expectedTime + ", Repeat=" + repeat
0298:                        + ", Wait_after_prepare=" + WAIT_AFTER_PREPARE
0299:                        + ", Wait_after_open=" + WAIT_AFTER_OPEN
0300:                        + ", Wait_after_close=" + WAIT_AFTER_CLOSE
0301:                        + ", Wait_paint=" + WAIT_PAINT + ", Max_iteration="
0302:                        + MAX_ITERATION);
0303:
0304:                checkScanFinished(); // just to be sure, that during measurement we will not wait for scanning dialog
0305:
0306:                try {
0307:                    initialize();
0308:
0309:                    for (int i = 1; i <= repeat
0310:                            && exceptionDuringMeasurement == null; i++) {
0311:                        try {
0312:                            tr.startNewEventList("Iteration no." + i);
0313:                            tr.connectToAWT(true);
0314:                            prepare();
0315:                            waitNoEvent(WAIT_AFTER_PREPARE);
0316:
0317:                            // Uncomment if you want to run with analyzer tool
0318:                            // com.sun.forte.st.collector.CollectorAPI.resume ();
0319:
0320:                            // to be sure EventQueue is empty
0321:                            new QueueTool().waitEmpty();
0322:
0323:                            logMemoryUsage();
0324:
0325:                            tr.add(tr.TRACK_TRACE_MESSAGE, "OPEN - before");
0326:                            testedComponentOperator = open();
0327:                            tr.add(tr.TRACK_TRACE_MESSAGE, "OPEN - after");
0328:
0329:                            // this is to optimize delays
0330:                            long wait_time = (wait_after_open_heuristic > WAIT_AFTER_OPEN) ? WAIT_AFTER_OPEN
0331:                                    : wait_after_open_heuristic;
0332:                            tr.add(tr.TRACK_CONFIG_APPLICATION_MESSAGE,
0333:                                    "Wait_after_open_heuristic=" + wait_time);
0334:                            Thread.currentThread().sleep(wait_time);
0335:                            waitNoEvent(wait_time / 4);
0336:
0337:                            logMemoryUsage();
0338:
0339:                            // we were waiting for painting the component, but after
0340:                            // starting to use RepaintManager it's not possible, so at least
0341:                            // wait for empty EventQueue
0342:                            new QueueTool().waitEmpty();
0343:
0344:                            measuredTime[i] = getMeasuredTime();
0345:                            tr.add(tr.TRACK_APPLICATION_MESSAGE,
0346:                                    "Measured Time=" + measuredTime[i], true);
0347:                            // negative HEURISTIC_FACTOR disables heuristic
0348:                            if (HEURISTIC_FACTOR > 0) {
0349:                                wait_after_open_heuristic = (long) (measuredTime[i] * HEURISTIC_FACTOR);
0350:                            }
0351:
0352:                            log("Measured Time [" + performanceDataName + " | "
0353:                                    + i + "] = " + measuredTime[i]);
0354:
0355:                            // the measured time could be 0 (on Windows averything under 7-8 ms is logged as 0), but it shouldn't be under 0
0356:                            if (measuredTime[i] < 0)
0357:                                throw new Exception("Measured value ["
0358:                                        + measuredTime[i] + "] < 0 !!!");
0359:
0360:                            reportPerformance(performanceDataName,
0361:                                    measuredTime[i], "ms", i, expectedTime);
0362:
0363:                            getScreenshotOfMeasuredIDEInTimeOfMeasurement(i);
0364:
0365:                        } catch (Exception exc) { // catch for prepare(), open()
0366:                            log("------- [ "
0367:                                    + i
0368:                                    + " ] ---------------- Exception rises while measuring performance :"
0369:                                    + exc.getMessage());
0370:                            exc.printStackTrace(getLog());
0371:                            getScreenshot("exception_during_open");
0372:                            exceptionDuringMeasurement = exc;
0373:                            // throw new JemmyException("Exception arises during measurement:"+exc.getMessage());
0374:                        } finally { // finally for prepare(), open()
0375:                            try {
0376:                                // Uncomment if you want to run with analyzer tool
0377:                                // com.sun.forte.st.collector.CollectorAPI.pause ();
0378:
0379:                                tr
0380:                                        .add(tr.TRACK_TRACE_MESSAGE,
0381:                                                "CLOSE - before");
0382:                                close();
0383:
0384:                                closeAllModal();
0385:                                waitNoEvent(WAIT_AFTER_CLOSE);
0386:
0387:                            } catch (Exception e) { // catch for close()
0388:                                log("------- [ "
0389:                                        + i
0390:                                        + " ] ---------------- Exception rises while closing tested component :"
0391:                                        + e.getMessage());
0392:                                e.printStackTrace(getLog());
0393:                                getScreenshot("exception_during_close");
0394:                                exceptionDuringMeasurement = e;
0395:                                //throw new JemmyException("Exception arises while closing tested component :"+e.getMessage());
0396:                            } finally { // finally for close()
0397:                                tr.connectToAWT(false);
0398:                            }
0399:                        }
0400:                    }
0401:
0402:                    tr.startNewEventList("shutdown hooks");
0403:                    shutdown();
0404:                    closeAllDialogs();
0405:                    tr.add(tr.TRACK_APPLICATION_MESSAGE, "AFTER SHUTDOWN");
0406:                } catch (Exception e) { // catch for initialize(), shutdown(), closeAllDialogs()
0407:                    log("----------------------- Exception rises while shuting down / initializing:"
0408:                            + e.getMessage());
0409:                    e.printStackTrace(getLog());
0410:                    getScreenshot("exception_during_init_or_shutdown");
0411:                    // throw new JemmyException("Exception rises while shuting down :"+e.getMessage());
0412:                    exceptionDuringMeasurement = e;
0413:                } finally { // finally for initialize(), shutdown(), closeAllDialogs()
0414:                    repaintManager().resetRegionFilters();
0415:                }
0416:
0417:                dumpLog();
0418:                if (exceptionDuringMeasurement != null)
0419:                    throw new Error("Exception {"
0420:                            + exceptionDuringMeasurement.getMessage()
0421:                            + "} rises during measurement.",
0422:                            exceptionDuringMeasurement);
0423:
0424:                compare(measuredTime);
0425:
0426:            }
0427:
0428:            /**
0429:             * Test that measures memory consumption after each invocation of measured aciotn.
0430:             * Tet finds the lowest value of measured memory consumption and compute all deltas against this value.
0431:             * This method contains the same pattern as previously used method for measuring UI responsiveness
0432:             * {@link measureTime()} . Memory consumption is computed as difference between
0433:             * used and allocated memory (heap). Garbage Collection {@link runGC()} is called then to each measurement of action {@link open()}.
0434:             * <br>
0435:             * <br>If during measurement exception arise - test fails and no value is reported as Performance Data.
0436:             * <p>Each test should reset the state in {@link close()} method. Again there is a waiting
0437:             * for quiet period of time after this call.</p>
0438:             */
0439:            public void measureMemoryUsage() {
0440:
0441:                Exception exceptionDuringMeasurement = null;
0442:                long wait_after_open_heuristic = WAIT_AFTER_OPEN;
0443:
0444:                long memoryUsageMinimum = 0;
0445:                long[] memoryUsage = new long[repeat_memory + 1];
0446:
0447:                useTwoOrderTypes = false;
0448:
0449:                // issue 56091 and applied workarround on the next line
0450:                // JemmyProperties.setCurrentDispatchingModel(JemmyProperties.ROBOT_MODEL_MASK);
0451:                JemmyProperties.setCurrentDispatchingModel(JemmyProperties
0452:                        .getCurrentDispatchingModel()
0453:                        | JemmyProperties.ROBOT_MODEL_MASK);
0454:                JemmyProperties.setCurrentTimeout(
0455:                        "EventDispatcher.RobotAutoDelay", 1);
0456:                log("----------------------- DISPATCHING MODEL = "
0457:                        + JemmyProperties.getCurrentDispatchingModel());
0458:
0459:                checkScanFinished(); // just to be sure, that during measurement we will not wait for scanning dialog
0460:
0461:                runGC(5);
0462:
0463:                initialize();
0464:
0465:                for (int i = 1; i <= repeat_memory
0466:                        && exceptionDuringMeasurement == null; i++) {
0467:                    try {
0468:                        prepare();
0469:
0470:                        waitNoEvent(WAIT_AFTER_PREPARE);
0471:
0472:                        // Uncomment if you want to run with analyzer tool
0473:                        // com.sun.forte.st.collector.CollectorAPI.resume ();
0474:
0475:                        // to be sure EventQueue is empty
0476:                        new QueueTool().waitEmpty();
0477:
0478:                        testedComponentOperator = open();
0479:
0480:                        long wait_time = (wait_after_open_heuristic > WAIT_AFTER_OPEN) ? WAIT_AFTER_OPEN
0481:                                : wait_after_open_heuristic;
0482:                        waitNoEvent(wait_time);
0483:
0484:                        new QueueTool().waitEmpty();
0485:
0486:                    } catch (Exception exc) { // catch for prepare(), open()
0487:                        exc.printStackTrace(getLog());
0488:                        exceptionDuringMeasurement = exc;
0489:                        getScreenshot("exception_during_open");
0490:                        // throw new JemmyException("Exception arises during measurement:"+exc.getMessage());
0491:                    } finally {
0492:                        try {
0493:                            // Uncomment if you want to run with analyzer tool
0494:                            // com.sun.forte.st.collector.CollectorAPI.pause ();
0495:
0496:                            close();
0497:
0498:                            closeAllModal();
0499:                            waitNoEvent(WAIT_AFTER_CLOSE);
0500:
0501:                        } catch (Exception e) {
0502:                            e.printStackTrace(getLog());
0503:                            getScreenshot("exception_during_close");
0504:                            exceptionDuringMeasurement = e;
0505:                        } finally { // finally for initialize(), shutdown(), closeAllDialogs()
0506:                            // XXX export results?
0507:                        }
0508:                    }
0509:
0510:                    runGC(3);
0511:
0512:                    Runtime runtime = Runtime.getRuntime();
0513:                    memoryUsage[i] = runtime.totalMemory()
0514:                            - runtime.freeMemory();
0515:                    log("Used Memory [" + i + "] = " + memoryUsage[i]);
0516:
0517:                    if (memoryUsageMinimum == 0
0518:                            || memoryUsageMinimum > memoryUsage[i])
0519:                        memoryUsageMinimum = memoryUsage[i];
0520:
0521:                }
0522:
0523:                // set Performance Data Name
0524:                String performanceDataName = setPerformanceName();
0525:
0526:                // report deltas against minimum of measured values
0527:                for (int i = 1; i <= repeat_memory; i++) {
0528:                    //String performanceDataName = setPerformanceName(i);
0529:                    log("Used Memory [" + performanceDataName + " | " + i
0530:                            + "] = " + memoryUsage[i]);
0531:
0532:                    reportPerformance(performanceDataName, memoryUsage[i]
0533:                            - memoryUsageMinimum, "bytes", i);
0534:                }
0535:
0536:                try {
0537:                    shutdown();
0538:                    closeAllDialogs();
0539:                } catch (Exception e) {
0540:                    e.printStackTrace(getLog());
0541:                    getScreenshot("shutdown");
0542:                    exceptionDuringMeasurement = e;
0543:                } finally {
0544:                }
0545:
0546:                if (exceptionDuringMeasurement != null)
0547:                    throw new Error(
0548:                            "Exception rises during measurement, look at appropriate log file for stack trace(s).");
0549:
0550:            }
0551:
0552:            /**
0553:             * Initialize callback that is called once before the repeated sequence of
0554:             * testet operation is perfromed.
0555:             * Default implementation is empty.
0556:             */
0557:            protected void initialize() {
0558:            }
0559:
0560:            /**
0561:             * Prepare method is called before at the begining of each measurement
0562:             * The system should be ready to perform measured action when work requested by
0563:             * this method is completed.
0564:             * Default implementation is empty.
0565:             */
0566:            public abstract void prepare();
0567:
0568:            /**
0569:             * This method should be overriden in subclasses to triger the measured action.
0570:             * Only last action before UI changing must be specified here
0571:             * (push button, select menuitem, expand tree, ...).
0572:             * Whole method uses for dispatching ROBOT_MODEL_MASK in testing measurement.
0573:             * Default implementation is empty.
0574:             * @return tested component operator that will be later passed to close method
0575:             */
0576:            public abstract ComponentOperator open();
0577:
0578:            /**
0579:             * Close opened window, or invoked popup menu.
0580:             * If tested component controled by testedCompponentOperator is Window it will
0581:             * be closed, if it is component ESC key is pressed.
0582:             */
0583:            public void close() {
0584:                if (testedComponentOperator != null
0585:                        && testedComponentOperator.isShowing()) {
0586:                    if (testedComponentOperator instanceof  WindowOperator)
0587:                        ((WindowOperator) testedComponentOperator).close();
0588:                    else if (testedComponentOperator instanceof  ComponentOperator) {
0589:                        testedComponentOperator
0590:                                .pushKey(java.awt.event.KeyEvent.VK_ESCAPE);
0591:                        //testedComponentOperator.pressKey(java.awt.event.KeyEvent.VK_ESCAPE);
0592:                        //testedComponentOperator.releaseKey(java.awt.event.KeyEvent.VK_ESCAPE);
0593:                    }
0594:                }
0595:            }
0596:
0597:            /**
0598:             * Shutdown method resets the state of system when all test invocation are done.
0599:             * Default implementation is empty.
0600:             */
0601:            protected void shutdown() {
0602:            }
0603:
0604:            /**
0605:             * Method for storing and reporting measured performance value
0606:             * @param name measured value name
0607:             * @param value measured perofrmance value
0608:             * @param unit unit name of measured value
0609:             * @param runOrder order in which the data was measured (1st, 2nd, ...)
0610:             * @param threshold the limit for an action, menu or dialog
0611:             */
0612:            public void reportPerformance(String name, long value, String unit,
0613:                    int runOrder, long threshold) {
0614:                NbPerformanceTest.PerformanceData d = new NbPerformanceTest.PerformanceData();
0615:                d.name = name == null ? getName() : name;
0616:                d.value = value;
0617:                d.unit = unit;
0618:                d.runOrder = (useTwoOrderTypes && runOrder > 1) ? 2 : runOrder;
0619:                d.threshold = threshold;
0620:                data.add(d);
0621:            }
0622:
0623:            /**
0624:             * Method for storing and reporting measured performance value
0625:             * @param name measured value name
0626:             * @param value measured perofrmance value
0627:             * @param unit unit name of measured value
0628:             * @param runOrder order in which the data was measured (1st, 2nd, ...)
0629:             */
0630:            public void reportPerformance(String name, long value, String unit,
0631:                    int runOrder) {
0632:                NbPerformanceTest.PerformanceData d = new NbPerformanceTest.PerformanceData();
0633:                d.name = name == null ? getName() : name;
0634:                d.value = value;
0635:                d.unit = unit;
0636:                d.runOrder = (useTwoOrderTypes && runOrder > 1) ? 2 : runOrder;
0637:                data.add(d);
0638:            }
0639:
0640:            /** Registers an object to be tracked and later verified in 
0641:             * @link #testGC
0642:             * @param message informantion message associated with object
0643:             * @param object traced object
0644:             * @param group mark grouping more refrenced together to test them at once or <CODE>null</CODE>
0645:             */
0646:            protected void reportReference(String message, Object object,
0647:                    Object group) {
0648:                Object g = group == null ? DEFAULT_REFS_GROUP : group;
0649:                if (!tracedRefs.containsKey(g)) {
0650:                    tracedRefs.put(g, new HashMap<Reference<Object>, String>());
0651:                }
0652:                tracedRefs.get(g).put(new WeakReference<Object>(object),
0653:                        message);
0654:            }
0655:
0656:            /** Generic test case checking if all objects registered with 
0657:             * @link #reportReference can be garbage collected.
0658:             * 
0659:             * Set of traced objects is cleared after this test.
0660:             * It is supposed that this method will be added to a suite
0661:             * typically at the end.
0662:             */
0663:            protected void runTestGC(Object group) throws Exception {
0664:                Object g = group == null ? DEFAULT_REFS_GROUP : group;
0665:                try {
0666:                    AssertionFailedError afe = null;
0667:                    for (Map.Entry<Reference<Object>, String> entry : tracedRefs
0668:                            .get(g).entrySet()) {
0669:                        try {
0670:                            assertGC(entry.getValue(), entry.getKey());
0671:                        } catch (AssertionFailedError e) {
0672:                            if (afe != null) {
0673:                                Throwable t = e;
0674:                                while (t.getCause() != null) {
0675:                                    t = t.getCause();
0676:                                }
0677:                                t.initCause(afe);
0678:                            }
0679:                            afe = e;
0680:                        }
0681:                    }
0682:                    if (afe != null) {
0683:                        throw afe;
0684:                    }
0685:                } finally {
0686:                    tracedRefs.get(g).clear();
0687:                }
0688:            }
0689:
0690:            /**
0691:             * Turn's off blinking of the caret in the editor.
0692:             * A method generally useful for any UI Responsiveness tests which measure actions
0693:             * in the Java editor. This method should be called from a test's initialize() method.
0694:             * @param kitClass class of the editor for which you want turn off caret blinking
0695:             */
0696:            protected void setEditorCaretFilteringOn(Class kitClass) {
0697:                org.netbeans.modules.editor.options.BaseOptions options = org.netbeans.modules.editor.options.BaseOptions
0698:                        .getOptions(kitClass);
0699:                options.setCaretBlinkRate(0);
0700:            }
0701:
0702:            /**
0703:             * Turn's off blinking of the caret in the Java editor.
0704:             * A method generally useful for any UI Responsiveness tests which measure actions
0705:             * in the Java editor. This method should be called from a test's initialize() method.
0706:             */
0707:            protected void setJavaEditorCaretFilteringOn() {
0708:                setEditorCaretFilteringOn(org.netbeans.modules.editor.java.JavaKit.class);
0709:            }
0710:
0711:            /**
0712:             * Turn's off blinking of the caret in the plain text editor.
0713:             * A method generally useful for any UI Responsiveness tests which measure actions
0714:             * in the plain text editor. This method should be called from a test's initialize() method.
0715:             */
0716:            protected void setPlainTextEditorCaretFilteringOn() {
0717:                setEditorCaretFilteringOn(org.netbeans.modules.editor.plain.PlainKit.class);
0718:            }
0719:
0720:            /**
0721:             * Turn's off blinking of the caret in the XML editor.
0722:             * A method generally useful for any UI Responsiveness tests which measure actions
0723:             * in the XML editor. This method should be called from a test's initialize() method.
0724:             */
0725:            protected void setXMLEditorCaretFilteringOn() {
0726:                setEditorCaretFilteringOn(org.netbeans.modules.xml.text.syntax.XMLKit.class);
0727:            }
0728:
0729:            /**
0730:             * Turn's off blinking of the caret in the JSP editor.
0731:             * A method generally useful for any UI Responsiveness tests which measure actions
0732:             * in the JSP editor. This method should be called from a test's initialize() method.
0733:             */
0734:            protected void setJSPEditorCaretFilteringOn() {
0735:                setEditorCaretFilteringOn(org.netbeans.modules.web.core.syntax.JSPKit.class);
0736:            }
0737:
0738:            /**
0739:             * Log used memory size. It can help with evaluation what happend during measurement.
0740:             * If the size of the memory before and after open differs :
0741:             * <li>if increases- there could be memory leak</li>
0742:             * <li>if decreases- there was an garbage collection during measurement - it prolongs the action time</li>
0743:             */
0744:            protected void logMemoryUsage() {
0745:                // log memory usage after each test case
0746:                if (logMemory) {
0747:                    Runtime runtime = Runtime.getRuntime();
0748:                    long totalMemory = runtime.totalMemory();
0749:                    long freeMemory = runtime.freeMemory();
0750:                    tr.add(tr.TRACK_APPLICATION_MESSAGE, "Memory used="
0751:                            + (totalMemory - freeMemory) + " total="
0752:                            + totalMemory);
0753:                }
0754:            }
0755:
0756:            /**
0757:             * Run Garbage Collection 3 times * number defined as a parameter for this method
0758:             * @param i number of repeat (GC runs i*3 times)
0759:             */
0760:            public void runGC(int i) {
0761:                while (i > 0) {
0762:                    try {
0763:                        System.runFinalization();
0764:                        System.gc();
0765:                        Thread.currentThread().sleep(500);
0766:                        System.gc();
0767:                        Thread.currentThread().sleep(500);
0768:                        System.gc();
0769:                        Thread.currentThread().sleep(500);
0770:                    } catch (Exception exc) {
0771:                        exc.printStackTrace(System.err);
0772:                    }
0773:                    i--;
0774:                }
0775:            }
0776:
0777:            /**
0778:             * Set name for performance data. Measured value is stored to database under this name.
0779:             * @return performance data name
0780:             */
0781:            public String setPerformanceName() {
0782:                String performanceDataName = getPerformanceName();
0783:
0784:                if (performanceDataName.equalsIgnoreCase("measureTime"))
0785:                    performanceDataName = this .getClass().getName();
0786:
0787:                return performanceDataName;
0788:            }
0789:
0790:            /**
0791:             * Compare each measured value with expected value.
0792:             *  Test fails if one of the measured value is bigger than expected one.
0793:             * @param measuredValues array of measured values
0794:             */
0795:            public void compare(long[] measuredValues) {
0796:                boolean fail = false;
0797:                String measuredValuesString = "";
0798:
0799:                for (int i = 1; i < measuredValues.length; i++) {
0800:                    measuredValuesString = measuredValuesString + " "
0801:                            + measuredValues[i];
0802:
0803:                    if ((i > 1 && measuredValues[i] > expectedTime)
0804:                            || (i == 1 && measuredValues.length == 1 && measuredValues[i] > expectedTime))
0805:                        // fail if it's subsequent usage and it's over expected time or it's first usage without any other usages and it's over expected time
0806:                        fail = true;
0807:                    else if (i == 1 && measuredValues.length > 1
0808:                            && measuredValues[i] > 2 * expectedTime)
0809:                        // fail if it's first usage and it isn't the last one and it's over 2-times expected time
0810:                        fail = true;
0811:                }
0812:
0813:                if (fail) {
0814:                    captureScreen = false;
0815:                    fail("One of the measuredTime(s) ["
0816:                            + measuredValuesString
0817:                            + " ] > expectedTime["
0818:                            + expectedTime
0819:                            + "] - performance issue (it's ok if the first usage is in boundary <0,2*expectedTime>) .");
0820:                }
0821:            }
0822:
0823:            /**
0824:             * Ensures that all warm up tasks are already executed so the tests may begin.
0825:             */
0826:            private void checkWarmup() {
0827:                if (warmupFinished) {
0828:                    return;
0829:                }
0830:                try {
0831:                    Class cls = Class
0832:                            .forName("org.netbeans.core.WarmUpSupport"); // NOI18N
0833:                    java.lang.reflect.Field fld = cls
0834:                            .getDeclaredField("finished"); // NOI18N
0835:                    fld.setAccessible(true);
0836:
0837:                    // assume that warmup should not last more than 20sec
0838:                    for (int i = 20; i > 0; i--) {
0839:                        warmupFinished = fld.getBoolean(null);
0840:                        if (warmupFinished) {
0841:                            return;
0842:                        }
0843:                        try {
0844:                            log("checkWarmup - waiting");
0845:                            Thread.sleep(1000);
0846:                        } catch (InterruptedException ie) {
0847:                            ie.printStackTrace(System.err);
0848:                        }
0849:                    }
0850:                    fail("checkWarmup - waiting for warmup completion failed");
0851:                } catch (Exception e) {
0852:                    fail(e);
0853:                }
0854:            }
0855:
0856:            /**
0857:             * If scanning of classpath started wait till the scan finished
0858:             * (just to be sure check it twice after short delay)
0859:             */
0860:            public void checkScanFinished() {
0861:                org.netbeans.junit.ide.ProjectSupport.waitScanFinished();
0862:                waitNoEvent(1000);
0863:                org.netbeans.junit.ide.ProjectSupport.waitScanFinished();
0864:            }
0865:
0866:            /**
0867:             * This method returns meaasured time, it goes through all data logged 
0868:             * by guitracker (LoggingEventQueue and LoggingRepaintManager).
0869:             * The measured time is the difference between :
0870:             * <ul>
0871:             *     <li> last START or
0872:             *     <li> last MOUSE_PRESS (if the measure_mouse_press property is true)
0873:             *     <li> last MOUSE_RELEASE - by default (if the measure_mouse_press property is false)
0874:             *     <li> last KEY_PRESS
0875:             * </ul>
0876:             * and
0877:             * <ul>
0878:             *     <li> last or expected paint
0879:             *     <li> last FRAME_SHOW
0880:             *     <li> last DIALOG_SHOW
0881:             *     <li> last COMPONENT_SHOW
0882:             * </ul>
0883:             * @return measured time
0884:             */
0885:            public long getMeasuredTime() {
0886:                ActionTracker.Tuple start = tr.getCurrentEvents().getFirst();
0887:                ActionTracker.Tuple end = tr.getCurrentEvents().getFirst();
0888:
0889:                for (ActionTracker.Tuple tuple : tr.getCurrentEvents()) {
0890:                    int code = tuple.getCode();
0891:
0892:                    // start 
0893:                    if (code == MY_START_EVENT) {
0894:                        start = tuple;
0895:                    } else if (MY_START_EVENT == MY_EVENT_NOT_AVAILABLE
0896:                            && (code == ActionTracker.TRACK_START
0897:                                    || code == track_mouse_event // it could be ActionTracker.TRACK_MOUSE_RELEASE (by default) or ActionTracker.TRACK_MOUSE_PRESS or ActionTracker.TRACK_MOUSE_MOVE
0898:                            || code == ActionTracker.TRACK_KEY_PRESS)) {
0899:                        start = tuple;
0900:
0901:                        //end 
0902:                    } else if (code == MY_END_EVENT) {
0903:                        end = tuple;
0904:                    } else if (MY_END_EVENT == MY_EVENT_NOT_AVAILABLE
0905:                            && (code == ActionTracker.TRACK_PAINT
0906:                                    || code == ActionTracker.TRACK_FRAME_SHOW
0907:                                    || code == ActionTracker.TRACK_DIALOG_SHOW || code == ActionTracker.TRACK_COMPONENT_SHOW)) {
0908:                        end = tuple;
0909:                    }
0910:                }
0911:
0912:                start.setMeasured(true);
0913:                end.setMeasured(true);
0914:
0915:                long result = end.getTimeMillis() - start.getTimeMillis();
0916:
0917:                if (result < 0 || start.getTimeMillis() == 0) {
0918:                    throw new IllegalStateException(
0919:                            "Measuring failed, because we start ["
0920:                                    + start.getTimeMillis() + "] > end ["
0921:                                    + end.getTimeMillis() + "] or start=0");
0922:                }
0923:                return result;
0924:            }
0925:
0926:            /**
0927:             * Data are logged to the file, it helps with evaluation of the failure
0928:             * as well as it shows what exactly is meaured (user can find the start event
0929:             * and stop paint/show) .
0930:             */
0931:            public void dumpLog() {
0932:                tr.stopRecording();
0933:                try {
0934:                    tr.setXslLocation(getWorkDirPath());
0935:                    tr.exportAsXML(getLog("ActionTracker.xml"));
0936:                } catch (Exception ex) {
0937:                    throw new Error("Exception while generating log", ex);
0938:                }
0939:                tr.forgetAllEvents();
0940:                tr.startRecording();
0941:            }
0942:
0943:            /**
0944:             * Waits for a period of time during which no event is processed by event queue.
0945:             * @param time time to wait for after last event in EventQueue.
0946:             */
0947:            protected void waitNoEvent(long time) {
0948:                if (repeat_memory != -1) {
0949:                    try {
0950:                        synchronized (Thread.currentThread()) {
0951:                            Thread.currentThread().wait(time);
0952:                        }
0953:                    } catch (Exception exc) {
0954:                        log("Exception rises during waiting " + time + " ms");
0955:                        exc.printStackTrace(getLog());
0956:                    }
0957:                } else {
0958:                    // XXX need to reimplement
0959:                    rm.waitNoPaintEvent(time);
0960:                }
0961:            }
0962:
0963:            /**
0964:             * Getter for all measured performance data from current test
0965:             * @return PerformanceData[] performance data
0966:             */
0967:            public NbPerformanceTest.PerformanceData[] getPerformanceData() {
0968:                if (data != null)
0969:                    return data
0970:                            .toArray(new NbPerformanceTest.PerformanceData[0]);
0971:                else
0972:                    return null;
0973:            }
0974:
0975:            /**
0976:             * Setter for test case name. It is possible to set name of test case, it is useful if you have
0977:             *  test suite where called test methods (with the same name) are from different classes, which is
0978:             *  true if your tests extend PerformanceTestCase.
0979:             * @param oldName old TestCase name
0980:             * @param newName new TestCase name
0981:             */
0982:            public void setTestCaseName(String oldName, String newName) {
0983:                renamedTestCaseName.put(oldName, newName);
0984:            }
0985:
0986:            /**
0987:             * Getter for test case name. It overwrites method getName() from superclass. It is necessary to diversify
0988:             * method names if the test methods (with the same name) are runned from different classes, which is
0989:             * done if your tests extend PerformanceTestCase.
0990:             * @return testCaseName (all '|' are replaced by '#' if it was changed if not call super.getName() !
0991:             */
0992:            public String getName() {
0993:                String originalTestCaseName = super .getName();
0994:
0995:                if (renamedTestCaseName.containsKey(originalTestCaseName))
0996:                    return (renamedTestCaseName.get(originalTestCaseName))
0997:                            .replace('|', '-'); // workarround for problem on Win, there isn't possible cretae directories with '|'
0998:                else
0999:                    return originalTestCaseName;
1000:            }
1001:
1002:            /**
1003:             * Returns performance data name
1004:             * @return performance data name if it was changed if not call super.getName() !
1005:             */
1006:            public String getPerformanceName() {
1007:                String originalTestCaseName = super .getName();
1008:
1009:                if (renamedTestCaseName.containsKey(originalTestCaseName))
1010:                    return renamedTestCaseName.get(originalTestCaseName);
1011:                else
1012:                    return originalTestCaseName;
1013:            }
1014:
1015:            /**
1016:             * Closes all opened dialogs.
1017:             */
1018:            public static void closeAllDialogs() {
1019:                javax.swing.JDialog dialog;
1020:                org.netbeans.jemmy.ComponentChooser chooser = new org.netbeans.jemmy.ComponentChooser() {
1021:                    public boolean checkComponent(Component comp) {
1022:                        return (comp instanceof  javax.swing.JDialog && comp
1023:                                .isShowing());
1024:                    }
1025:
1026:                    public String getDescription() {
1027:                        return ("Dialog");
1028:                    }
1029:                };
1030:                while ((dialog = (javax.swing.JDialog) org.netbeans.jemmy.DialogWaiter
1031:                        .getDialog(chooser)) != null) {
1032:                    closeDialogs(findBottomDialog(dialog, chooser), chooser);
1033:                }
1034:            }
1035:
1036:            /**
1037:             * Find Bottom dialogs.
1038:             * @param dialog find all dialogs of owner for this dialog
1039:             * @param chooser chooser used for looking for dialogs
1040:             * @return return bottm dialog
1041:             */
1042:            private static javax.swing.JDialog findBottomDialog(
1043:                    javax.swing.JDialog dialog,
1044:                    org.netbeans.jemmy.ComponentChooser chooser) {
1045:                java.awt.Window owner = dialog.getOwner();
1046:                if (chooser.checkComponent(owner)) {
1047:                    return (findBottomDialog((javax.swing.JDialog) owner,
1048:                            chooser));
1049:                }
1050:                return (dialog);
1051:            }
1052:
1053:            /**
1054:             * Close dialogs
1055:             * @param dialog find all dialogs of owner for this dialog
1056:             * @param chooser chooser used for looking for dialogs
1057:             */
1058:            private static void closeDialogs(javax.swing.JDialog dialog,
1059:                    org.netbeans.jemmy.ComponentChooser chooser) {
1060:                for (Window window : dialog.getOwnedWindows()) {
1061:                    if (chooser.checkComponent(window)) {
1062:                        closeDialogs((javax.swing.JDialog) window, chooser);
1063:                    }
1064:                }
1065:                new org.netbeans.jemmy.operators.JDialogOperator(dialog)
1066:                        .close();
1067:            }
1068:
1069:            /**
1070:             * Get screenshot - if testedComponentOperator=null - then grap whole screen Black&White,
1071:             * if isn't grap area with testedComponent (-100,-100, width+200, height+200)
1072:             * @param i order of measurement in one test case
1073:             */
1074:            protected void getScreenshotOfMeasuredIDEInTimeOfMeasurement(int i) {
1075:                try {
1076:                    if (testedComponentOperator == null) {
1077:                        PNGEncoder.captureScreen(getWorkDir().getAbsolutePath()
1078:                                + java.io.File.separator + "screen_" + i
1079:                                + ".png", PNGEncoder.BW_MODE);
1080:                    } else {
1081:                        java.awt.Point locationOnScreen = testedComponentOperator
1082:                                .getLocationOnScreen();
1083:                        java.awt.Rectangle bounds = testedComponentOperator
1084:                                .getBounds();
1085:                        java.awt.Rectangle bounds_new = new java.awt.Rectangle(
1086:                                locationOnScreen.x - 100,
1087:                                locationOnScreen.y - 100, bounds.width + 200,
1088:                                bounds.height + 200);
1089:                        java.awt.Rectangle screen_size = new java.awt.Rectangle(
1090:                                java.awt.Toolkit.getDefaultToolkit()
1091:                                        .getScreenSize());
1092:
1093:                        if (bounds_new.height > screen_size.height / 2
1094:                                || bounds_new.width > screen_size.width / 2)
1095:                            PNGEncoder.captureScreen(getWorkDir()
1096:                                    .getAbsolutePath()
1097:                                    + java.io.File.separator
1098:                                    + "screen_"
1099:                                    + i
1100:                                    + ".png", PNGEncoder.BW_MODE);
1101:                        else
1102:                            PNGEncoder.captureScreen(bounds_new, getWorkDir()
1103:                                    .getAbsolutePath()
1104:                                    + java.io.File.separator
1105:                                    + "screen_"
1106:                                    + i
1107:                                    + ".png", PNGEncoder.GREYSCALE_MODE);
1108:                        //System.err.println("XX "+rm.getRepaintedArea());
1109:                        //                PNGEncoder.captureScreen(rm.getRepaintedArea(),getWorkDir().getAbsolutePath()+java.io.File.separator+"screen_"+i+".png",PNGEncoder.GREYSCALE_MODE);
1110:                    }
1111:                } catch (Exception exc) {
1112:                    log(" Exception rises during capturing screenshot of measurement ");
1113:                    exc.printStackTrace(getLog());
1114:                }
1115:            }
1116:
1117:            /**
1118:             * Get screenshot of whole screen if exception rised during initialize()
1119:             * @param title title is part of the screenshot file name
1120:             */
1121:            protected void getScreenshot(String title) {
1122:                try {
1123:                    PNGEncoder.captureScreen(getWorkDir().getAbsolutePath()
1124:                            + java.io.File.separator + "error_screenshot_"
1125:                            + title + ".png");
1126:                } catch (Exception exc) {
1127:                    log(" Exception rises during capturing screenshot ");
1128:                    exc.printStackTrace(getLog());
1129:                }
1130:
1131:            }
1132:
1133:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.