org.junit.BeforeClass Java Examples
The following examples show how to use
org.junit.BeforeClass.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HadoopS3RecoverableWriterITCase.java From flink with Apache License 2.0 | 6 votes |
@BeforeClass public static void checkCredentialsAndSetup() throws IOException { // check whether credentials exist S3TestCredentials.assumeCredentialsAvailable(); basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID()); // initialize configuration with valid credentials final Configuration conf = new Configuration(); conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey()); conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey()); conf.setLong(PART_UPLOAD_MIN_SIZE, PART_UPLOAD_MIN_SIZE_VALUE); conf.setInteger(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE); final String defaultTmpDir = TEMP_FOLDER.getRoot().getAbsolutePath() + "s3_tmp_dir"; conf.setString(CoreOptions.TMP_DIRS, defaultTmpDir); FileSystem.initialize(conf); skipped = false; }
Example #2
Source File: Slf4jReporterTest.java From flink with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUp() { TestUtils.addTestAppenderForRootLogger(); Configuration configuration = new Configuration(); configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); registry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), Collections.singletonList(ReporterSetup.forReporter("slf4j", new Slf4jReporter()))); delimiter = registry.getDelimiter(); taskMetricGroup = new TaskManagerMetricGroup(registry, HOST_NAME, TASK_MANAGER_ID) .addTaskForJob(new JobID(), JOB_NAME, new JobVertexID(), new ExecutionAttemptID(), TASK_NAME, 0, 0); reporter = (Slf4jReporter) registry.getReporters().get(0); }
Example #3
Source File: ListTelnetHandlerTest.java From dubbo-2.6.5 with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUp() { StringBuilder buf = new StringBuilder(); StringBuilder buf2 = new StringBuilder(); Method[] methods = DemoService.class.getMethods(); for (Method method : methods) { if (buf.length() > 0) { buf.append("\r\n"); } if (buf2.length() > 0) { buf2.append("\r\n"); } buf2.append(method.getName()); buf.append(ReflectUtils.getName(method)); } detailMethods = buf.toString(); methodsName = buf2.toString(); ProtocolUtils.closeAll(); }
Example #4
Source File: GattConnectionTests.java From bitgatt with Mozilla Public License 2.0 | 6 votes |
@BeforeClass public static void beforeClass() { Context appContext = mock(Context.class); when(appContext.getSystemService(any(String.class))).thenReturn(null); when(appContext.getApplicationContext()).thenReturn(appContext); FitbitGatt.getInstance().setAsyncOperationThreadWatchdog(mock(LooperWatchdog.class)); FitbitGatt.getInstance().start(appContext); Looper mockLooper = mock(Looper.class); BluetoothDevice mockBluetoothDevice = mock(BluetoothDevice.class); when(mockBluetoothDevice.getAddress()).thenReturn(MOCK_ADDRESS); when(mockBluetoothDevice.getName()).thenReturn(MOCK_NAME); mockGatt = mock(BluetoothGatt.class); when(mockGatt.getDevice()).thenReturn(mockBluetoothDevice); connection = new GattConnection(new FitbitBluetoothDevice(mockBluetoothDevice), mockLooper); connection.setMockMode(true); BluetoothGattServer server = mock(BluetoothGattServer.class); serverConnection = new GattServerConnection(server, mockLooper); serverConnection.setMockMode(true); }
Example #5
Source File: ResponseWriterTestCase.java From quarkus-http with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() throws ServletException { final PathHandler root = new PathHandler(); final ServletContainer container = ServletContainer.Factory.newInstance(); DeploymentInfo builder = new DeploymentInfo() .setClassIntrospecter(TestClassIntrospector.INSTANCE) .setClassLoader(ResponseWriterTestCase.class.getClassLoader()) .setContextPath("/servletContext") .setDeploymentName("servletContext.war") .addServlet(Servlets.servlet("resp", ResponseWriterServlet.class) .addMapping("/resp")) .addServlet(Servlets.servlet("respLArget", LargeResponseWriterServlet.class) .addMapping("/large")); DeploymentManager manager = container.addDeployment(builder); manager.deploy(); root.addPrefixPath(builder.getContextPath(), manager.start()); DefaultServer.setRootHandler(root); }
Example #6
Source File: TestNcssParams.java From tds with BSD 3-Clause "New" or "Revised" License | 6 votes |
@BeforeClass public static void setUp() { ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); validator = factory.getValidator(); Class c = resolver.getClass(); InputStream is = c.getResourceAsStream("/ValidationMessages.properties"); if (is != null) { try { resolver.load(is); resolver.list(System.out); is.close(); } catch (IOException e) { e.printStackTrace(); // To change body of catch statement use File | Settings | File Templates. } } }
Example #7
Source File: BindKernelTest.java From grcuda with BSD 3-Clause "New" or "Revised" License | 6 votes |
@BeforeClass public static void setupUpClass() throws IOException, InterruptedException { // Write CUDA C source file File sourceFile = tempFolder.newFile("inc_kernel.cu"); PrintWriter writer = new PrintWriter(new FileWriter(sourceFile)); writer.write(INCREMENT_KERNEL_SOURCE); writer.close(); BindKernelTest.ptxFileName = sourceFile.getParent() + File.separator + "inc_kernel.ptx"; // Compile source file with NVCC Process compiler = Runtime.getRuntime().exec("nvcc --ptx " + sourceFile.getAbsolutePath() + " -o " + BindKernelTest.ptxFileName); BufferedReader output = new BufferedReader(new InputStreamReader(compiler.getErrorStream())); int nvccReturnCode = compiler.waitFor(); output.lines().forEach(System.out::println); assertEquals(0, nvccReturnCode); }
Example #8
Source File: TestBase.java From WeBASE-Front with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUpBeforeClass() throws Exception { // 获取spring配置文件,生成上下文 context = new ClassPathXmlApplicationContext("applicationContext.xml"); // ((ClassPathXmlApplicationContext) context).start(); Service service = context.getBean(Service.class); service.run(); System.out.println("start..."); System.out.println("==================================================================="); ChannelEthereumService channelEthereumService = new ChannelEthereumService(); channelEthereumService.setChannelService(service); web3j = Web3j.build(channelEthereumService, service.getGroupId()); // EthBlockNumber ethBlockNumber = web3.ethBlockNumber().send(); Ok ok = Ok.deploy(web3j, credentials, new StaticGasProvider(gasPrice, gasLimit)).send(); address = ok.getContractAddress(); blockNumber = ok.getTransactionReceipt().get().getBlockNumber(); blockHash = ok.getTransactionReceipt().get().getBlockHash(); txHash = ok.getTransactionReceipt().get().getTransactionHash(); }
Example #9
Source File: BlockchainQueriesLogCacheTest.java From besu with Apache License 2.0 | 6 votes |
@BeforeClass public static void setupClass() throws IOException { final Address testAddress = Address.fromHexString("0x123456"); final Bytes testMessage = Bytes.fromHexString("0x9876"); final Log testLog = new Log(testAddress, testMessage, List.of()); testLogsBloomFilter = LogsBloomFilter.builder().insertLog(testLog).build(); logsQuery = new LogsQuery(List.of(testAddress), List.of()); for (int i = 0; i < 2; i++) { final RandomAccessFile file = new RandomAccessFile(cacheDir.newFile("logBloom-" + i + ".cache"), "rws"); writeThreeEntries(testLogsBloomFilter, file); file.seek((BLOCKS_PER_BLOOM_CACHE - 3) * LogsBloomFilter.BYTE_SIZE); writeThreeEntries(testLogsBloomFilter, file); } }
Example #10
Source File: HAQueryableStateRocksDBBackendITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() throws Exception { zkServer = new TestingServer(); // we have to manage this manually because we have to create the ZooKeeper server // ahead of this miniClusterResource = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(getConfig()) .setNumberTaskManagers(NUM_TMS) .setNumberSlotsPerTaskManager(NUM_SLOTS_PER_TM) .build()); miniClusterResource.before(); client = new QueryableStateClient("localhost", QS_PROXY_PORT_RANGE_START); clusterClient = miniClusterResource.getClusterClient(); }
Example #11
Source File: MockRequestTestCase.java From quarkus-http with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() throws ServletException { final PathHandler root = new PathHandler(); final ServletContainer container = ServletContainer.Factory.newInstance(); ServletInfo s = new ServletInfo("servlet", HelloServlet.class).addMapping("/aa"); DeploymentInfo builder = new DeploymentInfo() .setClassLoader(MockRequestTestCase.class.getClassLoader()) .setContextPath("/servletContext") .setClassIntrospecter(TestClassIntrospector.INSTANCE) .setDeploymentName("servletContext.war") .addServlet(s); DeploymentManager manager = container.addDeployment(builder); manager.deploy(); deployment = manager.getDeployment(); root.addPrefixPath(builder.getContextPath(), manager.start()); DefaultServer.setRootHandler(root); }
Example #12
Source File: NFilePruningTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
@BeforeClass public static void beforeClass() { if (Shell.MAC) System.setProperty("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy sparkConf = new SparkConf().setAppName(UUID.randomUUID().toString()).setMaster("local[4]"); sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer"); sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory"); sparkConf.set("spark.sql.shuffle.partitions", "1"); sparkConf.set("spark.memory.fraction", "0.1"); // opt memory sparkConf.set("spark.shuffle.detectCorrupt", "false"); // For sinai_poc/query03, enable implicit cross join conversion sparkConf.set("spark.sql.crossJoin.enabled", "true"); ss = SparkSession.builder().config(sparkConf).getOrCreate(); KylinSparkEnv.setSparkSession(ss); UdfManager.create(ss); System.out.println("Check spark sql config [spark.sql.catalogImplementation = " + ss.conf().get("spark.sql.catalogImplementation") + "]"); }
Example #13
Source File: GracefulShutdownTestCase.java From quarkus-http with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() { shutdown = Handlers.gracefulShutdown(new HttpHandler() { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { final CountDownLatch countDownLatch = latch2.get(); final CountDownLatch latch = latch1.get(); if (latch != null) { latch.countDown(); } if (countDownLatch != null) { countDownLatch.await(); } } }); DefaultServer.setRootHandler(shutdown); }
Example #14
Source File: TestNodeFailure.java From hadoop-ozone with Apache License 2.0 | 5 votes |
/** * Create a MiniDFSCluster for testing. * * @throws IOException */ @BeforeClass public static void init() throws Exception { final OzoneConfiguration conf = new OzoneConfiguration(); conf.setTimeDuration( RatisHelper.HDDS_DATANODE_RATIS_SERVER_PREFIX_KEY + "." + DatanodeRatisServerConfig.RATIS_FOLLOWER_SLOWNESS_TIMEOUT_KEY, 10, TimeUnit.SECONDS); conf.setTimeDuration( RatisHelper.HDDS_DATANODE_RATIS_SERVER_PREFIX_KEY + "." + DatanodeRatisServerConfig.RATIS_SERVER_NO_LEADER_TIMEOUT_KEY, 10, TimeUnit.SECONDS); conf.set(HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL, "2s"); cluster = MiniOzoneCluster.newBuilder(conf) .setNumDatanodes(6) .setHbInterval(1000) .setHbProcessorInterval(1000) .build(); cluster.waitForClusterToBeReady(); final StorageContainerManager scm = cluster.getStorageContainerManager(); pipelineManager = scm.getPipelineManager(); ratisPipelines = pipelineManager.getPipelines( HddsProtos.ReplicationType.RATIS, HddsProtos.ReplicationFactor.THREE); timeForFailure = (int) conf.getObject(DatanodeRatisServerConfig.class) .getFollowerSlownessTimeout(); }
Example #15
Source File: BaseMapperTest.java From mybatis-action with Apache License 2.0 | 5 votes |
@BeforeClass public static void init() { try { Reader reader = Resources.getResourceAsReader("mybatis-config.xml"); sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader); reader.close(); } catch (IOException e) { e.printStackTrace(); } }
Example #16
Source File: HardCodedProfileValueSourceSpringRunnerTests.java From java-technology-stack with MIT License | 5 votes |
@BeforeClass public static void setProfileValue() { numTestsExecuted = 0; // Set the system property to something other than VALUE as a sanity // check. System.setProperty(NAME, "999999999999"); }
Example #17
Source File: SystemStreamTests.java From vividus with Apache License 2.0 | 5 votes |
@BeforeClass @BeforeAll public static void beforeClass() { out = new ByteArrayOutputStream(); System.setOut(createPrintStream(out)); err = new ByteArrayOutputStream(); System.setErr(createPrintStream(err)); }
Example #18
Source File: WorkflowActionSNMPTrapTest.java From hop with Apache License 2.0 | 5 votes |
@BeforeClass public static void beforeClass() throws HopException { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init(); String passwordEncoderPluginID = Const.NVL( EnvUtil.getSystemProperty( Const.HOP_PASSWORD_ENCODER_PLUGIN ), "Hop" ); Encr.init( passwordEncoderPluginID ); }
Example #19
Source File: HazelcastJetAutoConfigurationClientTests.java From hazelcast-jet-contrib with Apache License 2.0 | 5 votes |
@BeforeClass public static void init() { JetConfig jetConfig = new JetConfig(); jetConfig.configureHazelcast(hzConfig -> hzConfig .setClusterName("boot-starter") .getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false) ); jetServer = Jet.newJetInstance(jetConfig); }
Example #20
Source File: TestOzoneShellHA.java From hadoop-ozone with Apache License 2.0 | 5 votes |
/** * Create a MiniOzoneCluster for testing with using distributed Ozone * handler type. * * @throws Exception */ @BeforeClass public static void init() throws Exception { conf = new OzoneConfiguration(); String path = GenericTestUtils.getTempPath( TestOzoneShellHA.class.getSimpleName()); baseDir = new File(path); baseDir.mkdirs(); testFile = new File(path + OzoneConsts.OZONE_URI_DELIMITER + "testFile"); testFile.getParentFile().mkdirs(); testFile.createNewFile(); ozoneShell = new OzoneShell(); // Init HA cluster omServiceId = "om-service-test1"; numOfOMs = 3; clusterId = UUID.randomUUID().toString(); scmId = UUID.randomUUID().toString(); cluster = MiniOzoneCluster.newHABuilder(conf) .setClusterId(clusterId) .setScmId(scmId) .setOMServiceId(omServiceId) .setNumOfOzoneManagers(numOfOMs) .build(); conf.setQuietMode(false); cluster.waitForClusterToBeReady(); }
Example #21
Source File: LdapBackendTestNewStyleConfig2.java From deprecated-security-advanced-modules with Apache License 2.0 | 5 votes |
@BeforeClass public static void startLdapServer() throws Exception { ldapServer = new EmbeddedLDAPServer(); ldapServer.start(); ldapServer.applyLdif("base.ldif","base2.ldif"); ldapPort = ldapServer.getLdapPort(); ldapsPort = ldapServer.getLdapsPort(); }
Example #22
Source File: DefaultCharsetTestCase.java From quarkus-http with Apache License 2.0 | 5 votes |
@BeforeClass public static void setup() throws ServletException { DeploymentUtils.setupServlet(new ServletExtension() { @Override public void handleDeployment(DeploymentInfo deploymentInfo, ServletContext servletContext) { deploymentInfo.setDefaultEncoding("UTF-8"); } }, servlet("servlet", DefaultCharsetServlet.class) .addMapping("/writer"), servlet("form", DefaultCharsetFormParserServlet.class) .addMapping("/form")); }
Example #23
Source File: CallBuilderTest.java From das with Apache License 2.0 | 5 votes |
@BeforeClass public static void setupDataBase() throws SQLException { dao = DasClientFactory.getClient(DATABASE_LOGIC_NAME); String[] sqls = new String[]{ DROP_SP_WITHOUT_OUT_PARAM, CREATE_SP_WITHOUT_OUT_PARAM, DROP_SP_WITH_OUT_PARAM, CREATE_SP_WITH_OUT_PARAM, DROP_SP_WITH_IN_OUT_PARAM, CREATE_SP_WITH_IN_OUT_PARAM, DROP_SP_WITH_INTERMEDIATE_RESULT, CREATE_SP_WITH_INTERMEDIATE_RESULT}; BatchUpdateBuilder b = new BatchUpdateBuilder(sqls); b.hints().inShard(0); dao.batchUpdate(b); b.hints().inShard(1); dao.batchUpdate(b); }
Example #24
Source File: SpitterRepositoryTest.java From Project with Apache License 2.0 | 5 votes |
@BeforeClass public static void before() { SPITTERS[0] = new Spitter(1L, "habuma", "password", "Craig Walls", "[email protected]", false); SPITTERS[1] = new Spitter(2L, "mwalls", "password", "Michael Walls", "[email protected]", true); SPITTERS[2] = new Spitter(3L, "chuck", "password", "Chuck Wagon", "[email protected]", false); SPITTERS[3] = new Spitter(4L, "artnames", "password", "Art Names", "[email protected]", true); SPITTERS[4] = new Spitter(5L, "newbee", "letmein", "New Bee", "[email protected]", true); SPITTERS[5] = new Spitter(4L, "arthur", "letmein", "Arthur Names", "[email protected]", false); }
Example #25
Source File: TrafficControlIntegrationTest.java From data-highway with Apache License 2.0 | 5 votes |
@BeforeClass public static void before() throws Exception { kafka.createTopic(ROAD_TOPIC, 1, 1); kafka.createTopic(PATCH_TOPIC, 1, 1); int port; try (ServerSocket socket = new ServerSocket(0)) { port = socket.getLocalPort(); } context = new SpringApplicationBuilder(TrafficControlApp.class) .bannerMode(OFF) .properties( ImmutableMap .<String, Object> builder() .put("server.port", Integer.toString(port)) .put("kafka.default.replicationFactor", "1") .put("kafka.bootstrapServers", kafka.bootstrapServers()) .put("kafka.zookeeper", kafka.zKConnectString()) .put("kafka.road.topic", ROAD_TOPIC) .put("kafka.road.modification.topic", PATCH_TOPIC) .build()) .build() .run(); kafkaStore = new KafkaStore<>(kafka.bootstrapServers(), new StringJsonNodeSerializer(), ROAD_TOPIC); }
Example #26
Source File: ProtobufDescriptorMapTest.java From stateful-functions with Apache License 2.0 | 5 votes |
@BeforeClass public static void setup() throws IOException { // Load the test file descriptor set InputStream descriptorStream = ProtobufDescriptorMap.class.getClassLoader().getResourceAsStream(TEST_DESCRIPTOR_NAME); FILE_DESCRIPTOR_SET = DescriptorProtos.FileDescriptorSet.parseFrom(descriptorStream); }
Example #27
Source File: CalculatorBackwardCompatibilityUnitTest.java From hop with Apache License 2.0 | 5 votes |
@BeforeClass public static void init() throws HopException { assertEquals( DEFAULT_ROUND_2_MODE, getRound2Mode() ); setRound2Mode( OBSOLETE_ROUND_2_MODE ); assertEquals( OBSOLETE_ROUND_2_MODE, getRound2Mode() ); HopEnvironment.init(); }
Example #28
Source File: SecurityRedirectTestCase.java From quarkus-http with Apache License 2.0 | 5 votes |
@BeforeClass public static void setup() throws ServletException { final PathHandler root = new PathHandler(); final ServletContainer container = ServletContainer.Factory.newInstance(); ServletIdentityManager identityManager = new ServletIdentityManager(); identityManager.addUser("user1", "password1", "role1"); DeploymentInfo builder = new DeploymentInfo() .setClassIntrospecter(TestClassIntrospector.INSTANCE) .setClassLoader(ServletPathMappingTestCase.class.getClassLoader()) .setContextPath("/servletContext") .setDeploymentName("servletContext.war") .setResourceManager(new TestResourceLoader(SecurityRedirectTestCase.class)) .addWelcomePages("index.html") .setIdentityManager(identityManager) .setLoginConfig(new LoginConfig("BASIC", "Test Realm")) .addSecurityConstraint(new SecurityConstraint() .addRoleAllowed("role1") .addWebResourceCollection(new WebResourceCollection() .addUrlPatterns("/index.html", "/filterpath/*"))); DeploymentManager manager = container.addDeployment(builder); manager.deploy(); root.addPrefixPath(builder.getContextPath(), manager.start()); DefaultServer.setRootHandler(root); }
Example #29
Source File: SuspendResumeTestCase.java From quarkus-http with Apache License 2.0 | 5 votes |
@BeforeClass public static void setup() throws ServletException { final ServletContainer container = ServletContainer.Factory.newInstance(); DeploymentInfo builder = new DeploymentInfo() .setClassLoader(TestMessagesReceivedInOrder.class.getClassLoader()) .setContextPath("/") .setResourceManager(new TestResourceLoader(TestMessagesReceivedInOrder.class)) .setClassIntrospecter(TestClassIntrospector.INSTANCE) .addServletContextAttribute(WebSocketDeploymentInfo.ATTRIBUTE_NAME, new WebSocketDeploymentInfo() .addListener(new WebSocketDeploymentInfo.ContainerReadyListener() { @Override public void ready(ServerWebSocketContainer c) { serverContainer = c; } }) .addEndpoint(SuspendResumeEndpoint.class) ) .setDeploymentName("servletContext.war"); DeploymentManager manager = container.addDeployment(builder); manager.deploy(); DefaultServer.setRootHandler(Handlers.path().addPrefixPath("/", manager.start())); }
Example #30
Source File: ElasticsearchSinkTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@BeforeClass public static void prepare() throws Exception { LOG.info("-------------------------------------------------------------------------"); LOG.info(" Starting embedded Elasticsearch node "); LOG.info("-------------------------------------------------------------------------"); // dynamically load version-specific implementation of the Elasticsearch embedded node environment Class<?> clazz = Class.forName( "org.apache.flink.streaming.connectors.elasticsearch.EmbeddedElasticsearchNodeEnvironmentImpl"); embeddedNodeEnv = (EmbeddedElasticsearchNodeEnvironment) InstantiationUtil.instantiate(clazz); embeddedNodeEnv.start(tempFolder.newFolder(), CLUSTER_NAME); }