org.apache.beam.sdk.io.DefaultFilenamePolicy Java Examples

The following examples show how to use org.apache.beam.sdk.io.DefaultFilenamePolicy. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WindowedFilenamePolicy.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/**
 * The windowed filename method will construct filenames per window according to the baseFile,
 * suffix, and shardTemplate supplied. Directories with date templates in them will automatically
 * have their values resolved. For example the outputDirectory of /YYYY/MM/DD would resolve to
 * /2017/01/08 on January 8th, 2017.
 */
@Override
public ResourceId windowedFilename(
        int shardNumber,
        int numShards,
        BoundedWindow window,
        PaneInfo paneInfo,
        OutputFileHints outputFileHints) {

    ResourceId outputFile =
            resolveWithDateTemplates(outputDirectory, window)
                    .resolve(outputFilenamePrefix.get(), StandardResolveOptions.RESOLVE_FILE);

    DefaultFilenamePolicy policy =
            DefaultFilenamePolicy.fromStandardParameters(
                    StaticValueProvider.of(outputFile), shardTemplate.get(), suffix.get(), true);
    ResourceId result =
            policy.windowedFilename(shardNumber, numShards, window, paneInfo, outputFileHints);
    LOG.debug("Windowed file name policy created: {}", result.toString());
    return result;
}
 
Example #2
Source File: WindowedFilenamePolicy.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
/**
 * The windowed filename method will construct filenames per window according to the baseFile,
 * suffix, and shardTemplate supplied. Directories with date templates in them will automatically
 * have their values resolved. For example the outputDirectory of /YYYY/MM/DD would resolve to
 * /2017/01/08 on January 8th, 2017.
 */
@Override
public ResourceId windowedFilename(
    int shardNumber,
    int numShards,
    BoundedWindow window,
    PaneInfo paneInfo,
    OutputFileHints outputFileHints) {

  ResourceId outputFile =
      resolveWithDateTemplates(outputDirectory, window)
          .resolve(outputFilenamePrefix.get(), StandardResolveOptions.RESOLVE_FILE);

  DefaultFilenamePolicy policy =
      DefaultFilenamePolicy.fromStandardParameters(
          StaticValueProvider.of(outputFile), shardTemplate.get(), suffix.get(), true);
  ResourceId result =
      policy.windowedFilename(shardNumber, numShards, window, paneInfo, outputFileHints);
  LOG.debug("Windowed file name policy created: {}", result.toString());
  return result;
}
 
Example #3
Source File: JdbcAvroIO.java    From dbeam with Apache License 2.0 6 votes vote down vote up
public static PTransform<PCollection<String>, WriteFilesResult<Void>> createWrite(
    String filenamePrefix, String filenameSuffix, Schema schema, JdbcAvroArgs jdbcAvroArgs) {
  filenamePrefix = filenamePrefix.replaceAll("/+$", "") + "/part";
  ValueProvider<ResourceId> prefixProvider =
      StaticValueProvider.of(FileBasedSink.convertToFileResourceIfPossible(filenamePrefix));
  FileBasedSink.FilenamePolicy filenamePolicy =
      DefaultFilenamePolicy.fromStandardParameters(
          prefixProvider, DEFAULT_SHARD_TEMPLATE, filenameSuffix, false);

  final DynamicAvroDestinations<String, Void, String> destinations =
      AvroIO.constantDestinations(
          filenamePolicy,
          schema,
          ImmutableMap.of(),
          // since Beam does not support zstandard
          CodecFactory.nullCodec(),
          SerializableFunctions.identity());
  final FileBasedSink<String, Void, String> sink =
      new JdbcAvroSink<>(prefixProvider, destinations, jdbcAvroArgs);
  return WriteFiles.to(sink);
}
 
Example #4
Source File: ExportTransform.java    From DataflowTemplates with Apache License 2.0 5 votes vote down vote up
@Override
public FileBasedSink.FilenamePolicy getFilenamePolicy(final String destination) {
  final String uniqueId = sideInput(uniqueIdView);
  return DefaultFilenamePolicy.fromStandardParameters(
      ValueProvider.NestedValueProvider.of(
          baseDir,
          (SerializableFunction<ResourceId, ResourceId>)
              r ->
                  r.resolve(
                      GcsUtil.joinPath(uniqueId, destination + ".avro"),
                      ResolveOptions.StandardResolveOptions.RESOLVE_FILE)),
      null,
      null,
      false);
}
 
Example #5
Source File: PTransformMatchersTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void writeWithRunnerDeterminedSharding() {
  ResourceId outputDirectory = LocalResources.fromString("/foo/bar", true /* isDirectory */);
  FilenamePolicy policy =
      DefaultFilenamePolicy.fromStandardParameters(
          StaticValueProvider.of(outputDirectory),
          DefaultFilenamePolicy.DEFAULT_UNWINDOWED_SHARD_TEMPLATE,
          "",
          false);
  WriteFiles<Integer, Void, Integer> write =
      WriteFiles.to(
          new FileBasedSink<Integer, Void, Integer>(
              StaticValueProvider.of(outputDirectory), DynamicFileDestinations.constant(policy)) {
            @Override
            public WriteOperation<Void, Integer> createWriteOperation() {
              return null;
            }
          });
  assertThat(
      PTransformMatchers.writeWithRunnerDeterminedSharding().matches(appliedWrite(write)),
      is(true));

  WriteFiles<Integer, Void, Integer> withStaticSharding = write.withNumShards(3);
  assertThat(
      PTransformMatchers.writeWithRunnerDeterminedSharding()
          .matches(appliedWrite(withStaticSharding)),
      is(false));

  WriteFiles<Integer, Void, Integer> withCustomSharding =
      write.withSharding(Sum.integersGlobally().asSingletonView());
  assertThat(
      PTransformMatchers.writeWithRunnerDeterminedSharding()
          .matches(appliedWrite(withCustomSharding)),
      is(false));
}