org.testng.internal.Utils Java Examples
The following examples show how to use
org.testng.internal.Utils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CustomJUnitReportListener.java From heat with Apache License 2.0 | 6 votes |
private void setFailedTcAttribute(XMLStringBuffer doc, ITestResult failedTestCase) { Properties attributesFailedTestSuites = new Properties(); String tcName = ((HashMap<String, String>) failedTestCase.getParameters()[0]).get(PROP_TEST_ID); attributesFailedTestSuites.setProperty(XMLConstants.ATTR_NAME, tcName); long elapsedTimeMillis = failedTestCase.getEndMillis() - failedTestCase.getStartMillis(); testRunningTotalTime += elapsedTimeMillis; Throwable t = failedTestCase.getThrowable(); doc.push(XMLConstants.TESTCASE, attributesFailedTestSuites); if (t != null) { attributesFailedTestSuites.setProperty(XMLConstants.ATTR_TYPE, t.getClass().getName()); String message = t.getMessage(); if ((message != null) && (message.length() > 0)) { attributesFailedTestSuites.setProperty(XMLConstants.ATTR_MESSAGE, encodeAttr(message)); // ENCODE } doc.push(XMLConstants.FAILURE, attributesFailedTestSuites); doc.addCDATA(Utils.stackTrace(t, false)[0]); doc.pop(); } else { doc.addEmptyElement(XMLConstants.FAILURE); // THIS IS AN ERROR } doc.pop(); }
Example #2
Source File: WriterTest.java From clickhouse-jdbc with Apache License 2.0 | 6 votes |
@Test public void testTSV() throws Exception { File tempFile = Utils.createTempFile(""); FileOutputStream fos = new FileOutputStream(tempFile); for (int i = 0; i < 1000; i++) { fos.write((i + "\tИмя " + i + "\n").getBytes("UTF-8")); } fos.close(); statement .write() .table("test.writer") .format(ClickHouseFormat.TabSeparated) .data(tempFile) .send(); assertTableRowCount(1000); ResultSet rs = statement.executeQuery("SELECT count() FROM test.writer WHERE name = concat('Имя ', toString(id))"); rs.next(); assertEquals(rs.getInt(1), 1000); }
Example #3
Source File: VerboseReporter.java From brooklyn-server with Apache License 2.0 | 6 votes |
/** * Print out test summary */ private void logResults() { // // Log test summary // ITestNGMethod[] ft = resultsToMethods(getFailedTests()); StringBuilder sb = new StringBuilder("\n===============================================\n"); sb.append(" ").append(suiteName).append("\n"); sb.append(" Tests run: ").append(Utils.calculateInvokedMethodCount(getAllTestMethods())); sb.append(", Failures: ").append(Utils.calculateInvokedMethodCount(ft)); sb.append(", Skips: ").append(Utils.calculateInvokedMethodCount(resultsToMethods(getSkippedTests()))); int confFailures = getConfigurationFailures().size(); int confSkips = getConfigurationSkips().size(); if (confFailures > 0 || confSkips > 0) { sb.append("\n").append(" Configuration Failures: ").append(confFailures); sb.append(", Skips: ").append(confSkips); } sb.append("\n==============================================="); log(sb.toString()); }
Example #4
Source File: PowerEmailableReporter.java From WebAndAppUITesting with GNU General Public License v3.0 | 4 votes |
private void generateForResult(ITestResult ans, ITestNGMethod method, int resultSetSize) { Object[] parameters = ans.getParameters(); boolean hasParameters = parameters != null && parameters.length > 0; if (hasParameters) { tableStart("result", null); m_out.print("<tr class=\"param\">"); for (int x = 1; x <= parameters.length; x++) { m_out.print("<th>Parameter #" + x + "</th>"); } m_out.println("</tr>"); m_out.print("<tr class=\"param stripe\">"); for (Object p : parameters) { m_out.println("<td>" + Utils.escapeHtml(p.toString()) + "</td>"); } m_out.println("</tr>"); } List<String> msgs = Reporter.getOutput(ans); boolean hasReporterOutput = msgs.size() > 0; Throwable exception = ans.getThrowable(); boolean hasThrowable = exception != null; if (hasReporterOutput || hasThrowable) { if (hasParameters) { m_out.print("<tr><td"); if (parameters.length > 1) { m_out.print(" colspan=\"" + parameters.length + "\""); } m_out.println(">"); } else { m_out.println("<div>"); } if (hasReporterOutput) { if (hasThrowable) { m_out.println("<h3>Test Messages</h3>"); } for (String line : msgs) { m_out.println(line + "<br/>"); } } if (hasThrowable) { boolean wantsMinimalOutput = ans.getStatus() == ITestResult.SUCCESS; if (hasReporterOutput) { m_out.println("<h3>" + (wantsMinimalOutput ? "Expected Exception" : "Failure") + "</h3>"); } generateExceptionReport(exception, method); } if (hasParameters) { m_out.println("</td></tr>"); } else { m_out.println("</div>"); } } if (hasParameters) { m_out.println("</table>"); } }
Example #5
Source File: PowerEmailableReporter.java From WebAndAppUITesting with GNU General Public License v3.0 | 4 votes |
protected void generateExceptionReport(Throwable exception, ITestNGMethod method) { m_out.print("<div class=\"stacktrace\">"); m_out.print(Utils.stackTrace(exception, true)[0]); m_out.println("</div>"); }
Example #6
Source File: TestClass.java From qaf with MIT License | 4 votes |
private void log(int level, String s) { Utils.log("TestClass", level, s); }
Example #7
Source File: TestRunner.java From qaf with MIT License | 4 votes |
/** * Main method that create a graph of methods and then pass it to the * graph executor to run them. */ private void privateRun(XmlTest xmlTest) { boolean parallel = xmlTest.getParallel().isParallel(); { // parallel int threadCount = parallel ? xmlTest.getThreadCount() : 1; // Make sure we create a graph based on the intercepted methods, otherwise an interceptor // removing methods would cause the graph never to terminate (because it would expect // termination from methods that never get invoked). DynamicGraph<ITestNGMethod> graph = createDynamicGraph(intercept(m_allTestMethods)); if (parallel) { if (graph.getNodeCount() > 0) { GraphThreadPoolExecutor<ITestNGMethod> executor = new GraphThreadPoolExecutor<ITestNGMethod>(graph, this, threadCount, threadCount, 0, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>()); executor.run(); try { long timeOut = m_xmlTest.getTimeOut(XmlTest.DEFAULT_TIMEOUT_MS); Utils.log("TestRunner", 2, "Starting executor for test " + m_xmlTest.getName() + " with time out:" + timeOut + " milliseconds."); executor.awaitTermination(timeOut, TimeUnit.MILLISECONDS); executor.shutdownNow(); } catch (InterruptedException handled) { handled.printStackTrace(); Thread.currentThread().interrupt(); } } } else { boolean debug = false; List<ITestNGMethod> freeNodes = graph.getFreeNodes(); if (debug) { System.out.println("Free nodes:" + freeNodes); } if (graph.getNodeCount() > 0 && freeNodes.isEmpty()) { throw new TestNGException("No free nodes found in:" + graph); } while (! freeNodes.isEmpty()) { List<IWorker<ITestNGMethod>> runnables = createWorkers(freeNodes); for (IWorker<ITestNGMethod> r : runnables) { r.run(); } graph.setStatus(freeNodes, Status.FINISHED); freeNodes = graph.getFreeNodes(); if (debug) { System.out.println("Free nodes:" + freeNodes); } } } } }
Example #8
Source File: TestRunner.java From qaf with MIT License | 4 votes |
private void log(int level, String s) { Utils.log("TestRunner", level, s); }
Example #9
Source File: TestRunner.java From qaf with MIT License | 4 votes |
private void log(String s) { Utils.log("TestRunner", 2, s); }
Example #10
Source File: SeleniumTestsReporter.java From seleniumtestsframework with Apache License 2.0 | 4 votes |
public void afterInvocation(final IInvokedMethod method, final ITestResult result) { Reporter.setCurrentTestResult(result); // Handle Soft CustomAssertion if (method.isTestMethod()) { final List<Throwable> verificationFailures = CustomAssertion.getVerificationFailures(); final int size = verificationFailures.size(); if (size == 0) { return; } else if (result.getStatus() == TestResult.FAILURE) { return; } result.setStatus(TestResult.FAILURE); if (size == 1) { result.setThrowable(verificationFailures.get(0)); } else { // create failure message with all failures and stack traces barring last failure) final StringBuilder failureMessage = new StringBuilder("!!! Many Test Failures (").append(size).append( ")\n"); for (int i = 0; i < size - 1; i++) { failureMessage.append("Failure ").append(i + 1).append(" of ").append(size).append("\n"); final Throwable t = verificationFailures.get(i); final String fullStackTrace = Utils.stackTrace(t, false)[1]; failureMessage.append(fullStackTrace).append("\n"); } // final failure final Throwable last = verificationFailures.get(size - 1); failureMessage.append("Failure ").append(size).append(" of ").append(size).append(":n"); failureMessage.append(last.toString()); // set merged throwable final Throwable merged = new Throwable(failureMessage.toString()); merged.setStackTrace(last.getStackTrace()); result.setThrowable(merged); } } }
Example #11
Source File: VerboseReporter.java From brooklyn-server with Apache License 2.0 | 4 votes |
/** * Log meaningful message for passed in arguments. * Message itself is of form: * $status: "$suiteName" - $methodDeclaration ($actualArguments) finished in $x ms ($run of $totalRuns) * * @param st status of passed in itr * @param itr test result to be described * @param isConfMethod is itr describing configuration method */ private void logTestResult(Status st, ITestResult itr, boolean isConfMethod) { StringBuilder sb = new StringBuilder(); String stackTrace = ""; switch (st) { case STARTED: sb.append("INVOKING"); break; case SKIP: sb.append("SKIPPED"); stackTrace = itr.getThrowable() != null ? Utils.stackTrace(itr.getThrowable(), false)[0] : ""; break; case FAILURE: sb.append("FAILED"); stackTrace = itr.getThrowable() != null ? Utils.stackTrace(itr.getThrowable(), false)[0] : ""; break; case SUCCESS: sb.append("PASSED"); break; case SUCCESS_PERCENTAGE_FAILURE: sb.append("PASSED with failures"); break; default: //not happen throw new RuntimeException("Unsupported test status:" + itr.getStatus()); } if (isConfMethod) { sb.append(" CONFIGURATION: "); } else { sb.append(": "); } ITestNGMethod tm = itr.getMethod(); int identLevel = sb.length(); sb.append(getMethodDeclaration(tm)); Object[] params = itr.getParameters(); Class<?>[] paramTypes = tm.getConstructorOrMethod().getParameterTypes(); if (null != params && params.length > 0) { // The error might be a data provider parameter mismatch, so make // a special case here if (params.length != paramTypes.length) { sb.append("Wrong number of arguments were passed by the Data Provider: found "); sb.append(params.length); sb.append(" but expected "); sb.append(paramTypes.length); } else { sb.append("(value(s): "); for (int i = 0; i < params.length; i++) { if (i > 0) { sb.append(", "); } sb.append(Utils.toString(params[i], paramTypes[i])); } sb.append(")"); } } if (Status.STARTED != st) { sb.append(" finished in "); sb.append(itr.getEndMillis() - itr.getStartMillis()); sb.append(" ms"); if (!Utils.isStringEmpty(tm.getDescription())) { sb.append("\n"); for (int i = 0; i < identLevel; i++) { sb.append(" "); } sb.append(tm.getDescription()); } if (tm.getInvocationCount() > 1) { sb.append(" ("); sb.append(tm.getCurrentInvocationCount()); sb.append(" of "); sb.append(tm.getInvocationCount()); sb.append(")"); } if (!Utils.isStringEmpty(stackTrace)) { sb.append("\n").append(stackTrace.substring(0, stackTrace.lastIndexOf(System.getProperty("line.separator")))); } } else { if (!isConfMethod && tm.getInvocationCount() > 1) { sb.append(" success: "); sb.append(tm.getSuccessPercentage()); sb.append("%"); } } log(sb.toString()); }