Java Code Examples for com.google.common.io.Files#createTempDir()

The following examples show how to use com.google.common.io.Files#createTempDir() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractBaleenFileConsumerTest.java    From baleen with Apache License 2.0 6 votes vote down vote up
@Test
public void testNoSource() throws Exception {
  File baseDir = Files.createTempDir();

  AnalysisEngine consumer =
      AnalysisEngineFactory.createEngine(
          TestFileConsumer.class,
          TypeSystemSingleton.getTypeSystemDescriptionInstance(),
          BASE_PATH,
          baseDir.getPath(),
          "extension",
          "txt");

  jCas.setDocumentText(TEXT);

  consumer.process(jCas);

  String s =
      FileUtils.file2String(
          new File(
              baseDir, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855.txt"));
  assertEquals(TEXT, s);
}
 
Example 2
Source File: SwaggerGenMojoTest.java    From herd with Apache License 2.0 6 votes vote down vote up
@Test
public void test_GetOperationsFilterShouldBeApplied() throws Exception
{
    File tempDir = Files.createTempDir();
    String outputFileName = "swagger.yaml";

    SwaggerGenMojo swaggerGenMojo = new SwaggerGenMojo();
    ReflectionTestUtils.setField(swaggerGenMojo, "outputDirectory", tempDir);
    ReflectionTestUtils.setField(swaggerGenMojo, "outputFilename", outputFileName);
    ReflectionTestUtils.setField(swaggerGenMojo, "restJavaPackage", "org.finra.herd.swaggergen.test.swaggerGenMojo.rest");
    ReflectionTestUtils.setField(swaggerGenMojo, "modelJavaPackage", "org.finra.herd.swaggergen.test.swaggerGenMojo.model");
    ReflectionTestUtils.setField(swaggerGenMojo, "modelErrorClassName", "ErrorResponse");
    ReflectionTestUtils.setField(swaggerGenMojo, "tagPatternParameter", "(?<tag>.+?)RestController");
    ReflectionTestUtils.setField(swaggerGenMojo, "title", "test_title");
    ReflectionTestUtils.setField(swaggerGenMojo, "version", "test_version");
    ReflectionTestUtils.setField(swaggerGenMojo, "basePath", "/test_basePath");
    ReflectionTestUtils.setField(swaggerGenMojo, "schemeParameters", Arrays.asList("http", "https"));
    ReflectionTestUtils.setField(swaggerGenMojo, "applyOperationsFilter", true);
    ReflectionTestUtils.setField(swaggerGenMojo, "includeOperations", new String[] {"Person.get"});

    swaggerGenMojo.execute();

    assertEquals(getResourceAsString("/yaml_GetOperationsFilter.yaml"), getFileAsString(Paths.get(tempDir.getAbsolutePath(), outputFileName)));
}
 
Example 3
Source File: SolrTestingUtility.java    From hbase-indexer with Apache License 2.0 6 votes vote down vote up
public void start() throws Exception {
    // Make the Solr home directory
    this.tmpDir = Files.createTempDir();
    this.solrHomeDir = new File(tmpDir, "home");
    if (!this.solrHomeDir.mkdir()) {
        throw new RuntimeException("Failed to create directory " + this.solrHomeDir.getAbsolutePath());
    }
    writeSolrXml();

    // Set required system properties
    System.setProperty("solr.solr.home", solrHomeDir.getAbsolutePath());
    System.setProperty("zkHost", zkConnectString);
    System.setProperty("solr.port", Integer.toString(solrPort));

    for (Entry<String, String> entry : configProperties.entrySet()) {
        System.setProperty(entry.getKey().toString(), entry.getValue());
    }

    jettySolrRunner = createServer();
    jettySolrRunner.start();
}
 
Example 4
Source File: AvroCompactionTaskTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void testNonDedup() throws Exception {
  File basePath = Files.createTempDir();
  basePath.deleteOnExit();

  File jobDir = new File(basePath, "Identity/MemberAccount/minutely/2017/04/03/10/20_30/run_2017-04-03-10-20");
  Assert.assertTrue(jobDir.mkdirs());

  GenericRecord r1 = createRandomRecord();
  GenericRecord r2 = createRandomRecord();
  writeFileWithContent(jobDir, "file1", r1, 20);
  writeFileWithContent(jobDir, "file2", r2, 18);

  EmbeddedGobblin embeddedGobblin = createEmbeddedGobblin("non-dedup", basePath.getAbsolutePath().toString());
  JobExecutionResult result = embeddedGobblin.run();
  Assert.assertTrue(result.isSuccessful());
}
 
Example 5
Source File: EmbeddedElasticsearchNode.java    From immutables with Apache License 2.0 6 votes vote down vote up
/**
 * Creates elastic node as single member of a cluster. Node will not be started
 * unless {@link #start()} is explicitly called.
 * <p>Need {@code synchronized} because of static caches inside ES (which are not thread safe).
 * @return instance which needs to be explicitly started (using {@link #start()})
 */
static synchronized EmbeddedElasticsearchNode create() {
  File home = Files.createTempDir();
  home.deleteOnExit();

  Settings settings = Settings.builder()
          .put("node.name", "embedded-test-elastic")
          .put("path.home", home.getAbsolutePath())
          .put("http.type", "netty4")
          // allow multiple instances to run in parallel
          .put("transport.tcp.port", 0)
          .put("http.port", 0)
          .put("network.host", "localhost")
          .build();

  return create(settings);
}
 
Example 6
Source File: MetaStoreConstTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testOpenLocalPentahoMetaStore() throws Exception {
  MetaStoreConst.disableMetaStore = false;
  File tempDir = Files.createTempDir();
  String tempPath = tempDir.getAbsolutePath();
  System.setProperty( Const.PENTAHO_METASTORE_FOLDER, tempPath );
  String metaFolder = tempPath + File.separator + XmlUtil.META_FOLDER_NAME;

  // Create a metastore
  assertNotNull( MetaStoreConst.openLocalPentahoMetaStore() );
  assertTrue( ( new File( metaFolder ) ).exists() );

  // Check existing while disabling the metastore ( used for tests )
  MetaStoreConst.disableMetaStore = true;
  assertNull( MetaStoreConst.openLocalPentahoMetaStore() );

  // Check existing metastore
  MetaStoreConst.disableMetaStore = false;
  assertNotNull( MetaStoreConst.openLocalPentahoMetaStore( false ) );

  // Try to read a metastore that does not exist with allowCreate = false
  FileUtils.deleteDirectory( new File( metaFolder ) );
  assertNull( MetaStoreConst.openLocalPentahoMetaStore( false ) );
  assertFalse( ( new File( metaFolder ) ).exists() );
}
 
Example 7
Source File: SshEffectorTasksTest.java    From brooklyn-server with Apache License 2.0 5 votes vote down vote up
@BeforeMethod(alwaysRun=true)
public void setup() throws Exception {
    app = TestApplication.Factory.newManagedInstanceForTests();
    mgmt = app.getManagementContext();
    
    LocalhostMachineProvisioningLocation lhc = mgmt.getLocationManager().createLocation(LocationSpec.create(LocalhostMachineProvisioningLocation.class));
    host = lhc.obtain();
    app.start(Arrays.asList(host));
    clearExpectedFailure();
    tempDir = Files.createTempDir();
}
 
Example 8
Source File: TestSentrySchemaTool.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Before
public void defaultSetup() throws Exception {
  sentryConf = new Configuration();
  File dbDir = new File(Files.createTempDir(), "sentry_policy_db");
  sentryConf.set(ServerConfig.SENTRY_STORE_JDBC_URL,
      "jdbc:derby:;databaseName=" + dbDir.getPath() + ";create=true");
  sentryConf.set(ServerConfig.SENTRY_STORE_JDBC_PASS, "dummy");
  schemaTool = new SentrySchemaTool("./src/main/resources", sentryConf,
      "derby");
}
 
Example 9
Source File: TempDirectoryTest.java    From jweb-cms with GNU Affero General Public License v3.0 5 votes vote down vote up
@BeforeEach
void setup() throws IOException {
    File dir = Files.createTempDir();
    new File(dir, "1.txt").createNewFile();
    new File(dir, "dir1").mkdir();
    new File(dir, "dir1/2.txt").createNewFile();
    tempDirectory = new TempDirectory(dir.toPath());
}
 
Example 10
Source File: TestCheckpoint.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testLegacyOivImage() throws Exception {
  MiniDFSCluster cluster = null;
  SecondaryNameNode secondary = null;
  File tmpDir = Files.createTempDir();
  Configuration conf = new HdfsConfiguration();
  conf.set(DFSConfigKeys.DFS_NAMENODE_LEGACY_OIV_IMAGE_DIR_KEY,
      tmpDir.getAbsolutePath());
  conf.set(DFSConfigKeys.DFS_NAMENODE_NUM_CHECKPOINTS_RETAINED_KEY,
      "2");

  try {
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
            .format(true).build();

    secondary = startSecondaryNameNode(conf);

    // Checkpoint once
    secondary.doCheckpoint();
    String files1[] = tmpDir.list();
    assertEquals("Only one file is expected", 1, files1.length);

    // Perform more checkpointngs and check whether retention management
    // is working.
    secondary.doCheckpoint();
    secondary.doCheckpoint();
    String files2[] = tmpDir.list();
    assertEquals("Two files are expected", 2, files2.length);

    // Verify that the first file is deleted.
    for (String fName : files2) {
      assertFalse(fName.equals(files1[0]));
    }
  } finally {
    cleanup(secondary);
    cleanup(cluster);
    tmpDir.delete();
  }
}
 
Example 11
Source File: GeoWaveIndexerTest.java    From rya with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setup() throws AccumuloException, AccumuloSecurityException, IOException, InterruptedException {
    if (!IS_MOCK) {
        tempAccumuloDir = Files.createTempDir();

        accumulo = MiniAccumuloClusterFactory.newAccumuloCluster(
                new MiniAccumuloConfigImpl(tempAccumuloDir, ACCUMULO_PASSWORD),
                GeoWaveIndexerTest.class);

        accumulo.start();
    }
}
 
Example 12
Source File: JobKillServiceV3Test.java    From genie with Apache License 2.0 5 votes vote down vote up
/**
 * Setup for the tests.
 *
 * @throws IOException if the job directory cannot be created
 */
@Before
public void setup() throws IOException {
    Assume.assumeTrue(SystemUtils.IS_OS_UNIX);
    final File tempDirectory = Files.createTempDir();
    this.genieWorkingDir = new FileSystemResource(tempDirectory);
    Files.createParentDirs(new File(tempDirectory.getPath() + "/" + ID + "/genie/x"));
    this.persistenceService = Mockito.mock(PersistenceService.class);
    this.executor = Mockito.mock(Executor.class);
    this.genieEventBus = Mockito.mock(GenieEventBus.class);
    this.processCheckerFactory = Mockito.mock(ProcessChecker.Factory.class);
    this.processChecker = Mockito.mock(ProcessChecker.class);
    this.dataServices = Mockito.mock(DataServices.class);
    Mockito.when(this.dataServices.getPersistenceService()).thenReturn(this.persistenceService);
    this.service = new JobKillServiceV3(
        HOSTNAME,
        this.dataServices,
        this.executor,
        false,
        this.genieEventBus,
        this.genieWorkingDir,
        GenieObjectMapper.getMapper(),
        this.processCheckerFactory
    );

    this.killCommand = new CommandLine("kill");
    this.killCommand.addArguments(Integer.toString(PID));
}
 
Example 13
Source File: AbstractTestWithHiveServer.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
public static Context createContext(Map<String, String> properties)
    throws Exception {
  fileSystem = FileSystem.get(new Configuration());
  baseDir = Files.createTempDir();
  LOGGER.info("BaseDir = " + baseDir);
  logDir = assertCreateDir(new File(baseDir, "log"));
  confDir = assertCreateDir(new File(baseDir, "etc"));
  dataDir = assertCreateDir(new File(baseDir, "data"));
  policyFile = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
  hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile.getPath(), fileSystem);
  hiveServer.start();
  return new Context(hiveServer, fileSystem,
      baseDir, dataDir, policyFile);
}
 
Example 14
Source File: CommonDatabaseMigrationConfig.java    From cloudbreak with Apache License 2.0 4 votes vote down vote up
@Bean
public FileMigrationLoader upMigrationLoader() throws IOException {
    tempUpMigrationFolder = Files.createTempDir();
    copyInternalFilesToTempFolder(tempUpMigrationFolder, schemaLocationProviders, SchemaLocationProvider::upSubfolder);
    return new FileMigrationLoader(tempUpMigrationFolder, "UTF-8", new Properties());
}
 
Example 15
Source File: KdcLocalCluster.java    From hadoop-mini-clusters with Apache License 2.0 4 votes vote down vote up
protected void prepareSecureConfiguration(String username) throws Exception {
    baseConf = new Configuration(false);
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, baseConf);
    baseConf.setBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, true);
    //baseConf.set(CommonConfigurationKeys.HADOOP_RPC_PROTECTION, "authentication");

    Class klass = this.getClass();
    String file = klass.getName();
    file = file.replace('.', '/') + ".class";
    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
    String sslConfigDir;
    if (url.getProtocol().equals("jar")) {
      File tempDir = Files.createTempDir();
      sslConfigDir = tempDir.getAbsolutePath();
      tempDir.deleteOnExit();
    } else {
      sslConfigDir = url.toURI().getPath();
      sslConfigDir = sslConfigDir.substring(0, sslConfigDir.length() - file.length() - 1);
    }
    KeyStoreTestUtil.setupSSLConfig(baseDir, sslConfigDir, baseConf, false);

    // User
    baseConf.set("hadoop.proxyuser." + username + ".hosts", "*");
    baseConf.set("hadoop.proxyuser." + username + ".groups", "*");

    // HTTP
    String spnegoPrincipal = getKrbPrincipalWithRealm(SPNEGO_USER_NAME);
    baseConf.set("hadoop.proxyuser." + SPNEGO_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.proxyuser." + SPNEGO_USER_NAME + ".hosts", "*");

    // Oozie
    String ooziePrincipal = getKrbPrincipalWithRealm(OOZIE_USER_NAME);
    baseConf.set("hadoop.proxyuser." + OOZIE_USER_NAME + ".hosts", "*");
    baseConf.set("hadoop.proxyuser." + OOZIE_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
    baseConf.set("oozie.service.HadoopAccessorService.keytab.file", getKeytabForPrincipal(OOZIE_USER_NAME));
    baseConf.set("oozie.service.HadoopAccessorService.kerberos.principal", ooziePrincipal);
    baseConf.setBoolean("oozie.service.HadoopAccessorService.kerberos.enabled", true);

    // HDFS
    String hdfsPrincipal = getKrbPrincipalWithRealm(HDFS_USER_NAME);
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, getKeytabForPrincipal(HDFS_USER_NAME));
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, getKeytabForPrincipal(HDFS_USER_NAME));
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY, getKeytabForPrincipal(SPNEGO_USER_NAME));
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);

    // HBase
    String hbasePrincipal = getKrbPrincipalWithRealm(HBASE_USER_NAME);
    baseConf.set("hbase.security.authentication", "kerberos");
    baseConf.setBoolean("hbase.security.authorization", true);
    baseConf.set("hbase.regionserver.kerberos.principal", hbasePrincipal);
    baseConf.set("hbase.regionserver.keytab.file", getKeytabForPrincipal(HBASE_USER_NAME));
    baseConf.set("hbase.master.kerberos.principal", hbasePrincipal);
    baseConf.set("hbase.master.keytab.file", getKeytabForPrincipal(HBASE_USER_NAME));
    baseConf.set("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.token.TokenProvider");
    baseConf.set("hbase.rest.authentication.kerberos.keytab", getKeytabForPrincipal(SPNEGO_USER_NAME));
    baseConf.set("hbase.rest.authentication.kerberos.principal", spnegoPrincipal);
    baseConf.set("hbase.rest.kerberos.principal", hbasePrincipal);
    baseConf.set("hadoop.proxyuser." + HBASE_USER_NAME + ".groups", "*");
    baseConf.set("hadoop.proxyuser." + HBASE_USER_NAME + ".hosts", "*");

    //hbase.coprocessor.master.classes -> org.apache.hadoop.hbase.security.access.AccessController
    //hbase.coprocessor.region.classes -> org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController

    // Storm
    //String stormPrincipal = getKrbPrincipalWithRealm(STORM_USER_NAME);

    // Yarn
    String yarnPrincipal = getKrbPrincipalWithRealm(YARN_USER_NAME);
    baseConf.set("yarn.resourcemanager.keytab", getKeytabForPrincipal(YARN_USER_NAME));
    baseConf.set("yarn.resourcemanager.principal", yarnPrincipal);
    baseConf.set("yarn.nodemanager.keytab", getKeytabForPrincipal(YARN_USER_NAME));
    baseConf.set("yarn.nodemanager.principal", yarnPrincipal);

    // Mapreduce
    String mrv2Principal = getKrbPrincipalWithRealm(MRV2_USER_NAME);
    baseConf.set("mapreduce.jobhistory.keytab", getKeytabForPrincipal(MRV2_USER_NAME));
    baseConf.set("mapreduce.jobhistory.principal", mrv2Principal);
}
 
Example 16
Source File: TestWriteAheadLog.java    From bboxdb with Apache License 2.0 4 votes vote down vote up
@Before
public void before() {
	tempDir = Files.createTempDir();
}
 
Example 17
Source File: AbstractBrooklynLauncherRebindTest.java    From brooklyn-server with Apache License 2.0 4 votes vote down vote up
protected String newTempPersistenceContainerName() {
    File persistenceDirF = Files.createTempDir();
    Os.deleteOnExitRecursively(persistenceDirF);
    return persistenceDirF.getAbsolutePath();
}
 
Example 18
Source File: TagBasedVersionFactoryTest.java    From gradle-gitsemver with Apache License 2.0 4 votes vote down vote up
private static Repository createRepository() throws IOException {
    File repoDir = Files.createTempDir();
    Repository repo = new FileRepository(new File(repoDir, ".git"));
    repo.create();
    return repo;
}
 
Example 19
Source File: PolicyResolver.java    From apiman-cli with Apache License 2.0 4 votes vote down vote up
public PolicyResolver() {
    super(Files.createTempDir());
    buildInbuiltPolicyMap();
    LOGGER.debug("Inbuilt policy map: {}", inbuiltPolicyMap);
}
 
Example 20
Source File: AbstractTestWithStaticConfiguration.java    From incubator-sentry with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void setupTestStaticConfiguration() throws Exception {
  LOGGER.info("AbstractTestWithStaticConfiguration setupTestStaticConfiguration");
  properties = Maps.newHashMap();
  if(!policyOnHdfs) {
    policyOnHdfs = Boolean.valueOf(System.getProperty("sentry.e2etest.policyonhdfs", "false"));
  }
  if (testServerType != null) {
    properties.put("sentry.e2etest.hiveServer2Type", testServerType);
  }
  baseDir = Files.createTempDir();
  LOGGER.info("BaseDir = " + baseDir);
  logDir = assertCreateDir(new File(baseDir, "log"));
  confDir = assertCreateDir(new File(baseDir, "etc"));
  dataDir = assertCreateDir(new File(baseDir, "data"));
  policyFileLocation = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);

  String dfsType = System.getProperty(DFSFactory.FS_TYPE);
  dfs = DFSFactory.create(dfsType, baseDir, testServerType);
  fileSystem = dfs.getFileSystem();

  PolicyFile policyFile = PolicyFile.setAdminOnServer1(ADMIN1)
      .setUserGroupMapping(StaticUserGroup.getStaticMapping());
  policyFile.write(policyFileLocation);

  String policyURI;
  if (policyOnHdfs) {
    String dfsUri = FileSystem.getDefaultUri(fileSystem.getConf()).toString();
    LOGGER.error("dfsUri " + dfsUri);
    policyURI = dfsUri + System.getProperty("sentry.e2etest.hive.policy.location",
        "/user/hive/sentry");
    policyURI += "/" + HiveServerFactory.AUTHZ_PROVIDER_FILENAME;
  } else {
    policyURI = policyFileLocation.getPath();
  }

  boolean startSentry = Boolean.valueOf(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false"));
  if ("true".equalsIgnoreCase(System.getProperty(ENABLE_SENTRY_HA, "false"))) {
    enableSentryHA = true;
  }
  if (useSentryService && (!startSentry)) {
    setupSentryService();
  }

  if (enableHiveConcurrency) {
    properties.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "true");
    properties.put(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname,
        "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
    properties.put(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname,
        "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
  }

  hiveServer = create(properties, baseDir, confDir, logDir, policyURI, fileSystem);
  hiveServer.start();
  createContext();

  // Create tmp as scratch dir if it doesn't exist
  Path tmpPath = new Path("/tmp");
  if (!fileSystem.exists(tmpPath)) {
    fileSystem.mkdirs(tmpPath, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
  }
}