Java Code Examples for org.apache.beam.sdk.options.PipelineOptionsFactory#as()

The following examples show how to use org.apache.beam.sdk.options.PipelineOptionsFactory#as() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldSupportIPv6Streaming() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(FlinkRunner.class);

  options.setFlinkMaster("[FE80:CD00:0000:0CDE:1257:0000:211E:729C]:1234");
  StreamExecutionEnvironment sev =
      FlinkExecutionEnvironments.createStreamExecutionEnvironment(
          options, Collections.emptyList());
  checkHostAndPort(sev, "FE80:CD00:0000:0CDE:1257:0000:211E:729C", 1234);

  options.setFlinkMaster("FE80:CD00:0000:0CDE:1257:0000:211E:729C");
  sev =
      FlinkExecutionEnvironments.createStreamExecutionEnvironment(
          options, Collections.emptyList());
  checkHostAndPort(
      sev, "FE80:CD00:0000:0CDE:1257:0000:211E:729C", RestOptions.PORT.defaultValue());
}
 
Example 2
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldTreatAutoAndEmptyHostTheSameBatch() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(FlinkRunner.class);

  ExecutionEnvironment sev =
      FlinkExecutionEnvironments.createBatchExecutionEnvironment(
          options, Collections.emptyList());

  options.setFlinkMaster("[auto]");

  ExecutionEnvironment sev2 =
      FlinkExecutionEnvironments.createBatchExecutionEnvironment(
          options, Collections.emptyList());

  assertEquals(sev.getClass(), sev2.getClass());
}
 
Example 3
Source File: FlinkPipelineExecutionEnvironmentTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldUsePreparedFilesOnRemoteStreamEnvironment() throws Exception {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(TestFlinkRunner.class);
  options.setFlinkMaster("clusterAddress");
  options.setStreaming(true);

  FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options);

  Pipeline pipeline = Pipeline.create(options);
  flinkEnv.translate(pipeline);

  StreamExecutionEnvironment streamExecutionEnvironment =
      flinkEnv.getStreamExecutionEnvironment();
  assertThat(streamExecutionEnvironment, instanceOf(RemoteStreamEnvironment.class));

  List<URL> jarFiles = getJars(streamExecutionEnvironment);

  List<URL> urlConvertedStagedFiles = convertFilesToURLs(options.getFilesToStage());

  assertThat(jarFiles, is(urlConvertedStagedFiles));
}
 
Example 4
Source File: DataflowPipelineOptionsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUserNameIsLong() {
  resetDateTimeProviderRule.setDateTimeFixed("2014-12-08T19:07:06.698Z");
  System.getProperties().put("user.name", "abcdeabcdeabcdeabcdeabcdeabcde");
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setAppName("1234567890");
  List<String> nameComponents = Splitter.on('-').splitToList(options.getJobName());
  assertEquals(4, nameComponents.size());
  assertEquals("a234567890", nameComponents.get(0));
  assertEquals("abcdeabcdeabcdeabcdeabcdeabcde", nameComponents.get(1));
  assertEquals("1208190706", nameComponents.get(2));
  // Verify the last component is a hex integer (unsigned).
  Long.parseLong(nameComponents.get(3), 16);
}
 
Example 5
Source File: DataflowWorkUnitClientTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws Exception {
  MockitoAnnotations.initMocks(this);
  when(transport.buildRequest(anyString(), anyString())).thenReturn(request);
  doCallRealMethod().when(request).getContentAsString();

  Dataflow service = new Dataflow(transport, Transport.getJsonFactory(), null);
  pipelineOptions = PipelineOptionsFactory.as(DataflowWorkerHarnessOptions.class);
  pipelineOptions.setProject(PROJECT_ID);
  pipelineOptions.setJobId(JOB_ID);
  pipelineOptions.setWorkerId(WORKER_ID);
  pipelineOptions.setGcpCredential(new TestCredential());
  pipelineOptions.setDataflowClient(service);
  pipelineOptions.setRegion("us-central1");
}
 
Example 6
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldAllowPortOmissionForRemoteEnvironmentStreaming() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(FlinkRunner.class);
  options.setFlinkMaster("host");

  StreamExecutionEnvironment sev =
      FlinkExecutionEnvironments.createStreamExecutionEnvironment(
          options, Collections.emptyList());

  assertThat(sev, instanceOf(RemoteStreamEnvironment.class));
  checkHostAndPort(sev, "host", RestOptions.PORT.defaultValue());
}
 
Example 7
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldAutoSetIdleSourcesFlagWithoutCheckpointing() {
  // Checkpointing disabled, shut down sources immediately
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  FlinkExecutionEnvironments.createStreamExecutionEnvironment(options, Collections.emptyList());
  assertThat(options.getShutdownSourcesAfterIdleMs(), is(0L));
}
 
Example 8
Source File: WorkItemStatusClientTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
  MockitoAnnotations.initMocks(this);
  options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  executionContext = BatchModeExecutionContext.forTesting(options, "testStage");
  statusClient = new WorkItemStatusClient(workUnitClient, workItem);
}
 
Example 9
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldInferParallelismFromEnvironmentBatch() throws IOException {
  String flinkConfDir = extractFlinkConfig();

  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(TestFlinkRunner.class);
  options.setFlinkMaster("host:80");

  ExecutionEnvironment bev =
      FlinkExecutionEnvironments.createBatchExecutionEnvironment(
          options, Collections.emptyList(), flinkConfDir);

  assertThat(options.getParallelism(), is(23));
  assertThat(bev.getParallelism(), is(23));
}
 
Example 10
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldAllowPortOmissionForRemoteEnvironmentBatch() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(FlinkRunner.class);
  options.setFlinkMaster("host");

  ExecutionEnvironment bev =
      FlinkExecutionEnvironments.createBatchExecutionEnvironment(
          options, Collections.emptyList());

  assertThat(bev, instanceOf(RemoteEnvironment.class));
  checkHostAndPort(bev, "host", RestOptions.PORT.defaultValue());
}
 
Example 11
Source File: WindmillStateInternalsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
  MockitoAnnotations.initMocks(this);
  options = PipelineOptionsFactory.as(DataflowWorkerHarnessOptions.class);
  cache = new WindmillStateCache(options.getWorkerCacheMb());
  resetUnderTest();
}
 
Example 12
Source File: FlinkExecutionEnvironmentsTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldParsePortForRemoteEnvironmentBatch() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(FlinkRunner.class);
  options.setFlinkMaster("host:1234");

  ExecutionEnvironment bev =
      FlinkExecutionEnvironments.createBatchExecutionEnvironment(
          options, Collections.emptyList());

  assertThat(bev, instanceOf(RemoteEnvironment.class));
  checkHostAndPort(bev, "host", 1234);
}
 
Example 13
Source File: SnowflakeCredentialsFactoryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void usernamePasswordTest() {
  SnowflakePipelineOptions options = PipelineOptionsFactory.as(SnowflakePipelineOptions.class);
  options.setUsername("username");
  options.setPassword("password");

  SnowflakeCredentials credentials = SnowflakeCredentialsFactory.of(options);

  assertEquals(UsernamePasswordSnowflakeCredentials.class, credentials.getClass());
}
 
Example 14
Source File: ExecutableStageDoFnOperatorTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("rawtypes")
private ExecutableStageDoFnOperator getOperator(
    TupleTag<Integer> mainOutput,
    List<TupleTag<?>> additionalOutputs,
    DoFnOperator.MultiOutputOutputManagerFactory<Integer> outputManagerFactory,
    WindowingStrategy windowingStrategy,
    @Nullable Coder keyCoder,
    Coder windowedInputCoder) {

  FlinkExecutableStageContextFactory contextFactory =
      Mockito.mock(FlinkExecutableStageContextFactory.class);
  when(contextFactory.get(any())).thenReturn(stageContext);

  final ExecutableStagePayload stagePayload;
  if (keyCoder != null) {
    stagePayload = this.stagePayloadWithUserState;
  } else {
    stagePayload = this.stagePayload;
  }

  ExecutableStageDoFnOperator<Integer, Integer> operator =
      new ExecutableStageDoFnOperator<>(
          "transform",
          windowedInputCoder,
          Collections.emptyMap(),
          mainOutput,
          additionalOutputs,
          outputManagerFactory,
          Collections.emptyMap() /* sideInputTagMapping */,
          Collections.emptyList() /* sideInputs */,
          Collections.emptyMap() /* sideInputId mapping */,
          PipelineOptionsFactory.as(FlinkPipelineOptions.class),
          stagePayload,
          jobInfo,
          contextFactory,
          createOutputMap(mainOutput, additionalOutputs),
          windowingStrategy,
          keyCoder,
          keyCoder != null ? new KvToByteBufferKeySelector<>(keyCoder) : null);

  Whitebox.setInternalState(operator, "stateRequestHandler", stateRequestHandler);
  return operator;
}
 
Example 15
Source File: DoFnTransformTest.java    From incubator-nemo with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testCountBundle() {

  final TupleTag<String> outputTag = new TupleTag<>("main-output");
  final NemoPipelineOptions pipelineOptions = PipelineOptionsFactory.as(NemoPipelineOptions.class);
  pipelineOptions.setMaxBundleSize(3L);
  pipelineOptions.setMaxBundleTimeMills(10000000L);

  final List<Integer> bundleOutput = new ArrayList<>();

  final DoFnTransform<String, String> doFnTransform =
    new DoFnTransform<>(
      new BundleTestDoFn(bundleOutput),
      NULL_INPUT_CODER,
      NULL_OUTPUT_CODERS,
      outputTag,
      Collections.emptyList(),
      WindowingStrategy.globalDefault(),
      pipelineOptions,
      DisplayData.none(),
      DoFnSchemaInformation.create(),
      Collections.emptyMap());

  final Transform.Context context = mock(Transform.Context.class);
  final OutputCollector<WindowedValue<String>> oc = new TestOutputCollector<>();
  doFnTransform.prepare(context, oc);

  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));
  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));
  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));

  assertEquals(3, (int) bundleOutput.get(0));

  bundleOutput.clear();

  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));
  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));
  doFnTransform.onData(WindowedValue.valueInGlobalWindow("a"));

  assertEquals(3, (int) bundleOutput.get(0));

  doFnTransform.close();
}
 
Example 16
Source File: DoFnOperatorTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testBundleProcessingExceptionIsFatalDuringCheckpointing() throws Exception {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setMaxBundleSize(10L);
  options.setCheckpointingInterval(1L);

  TupleTag<String> outputTag = new TupleTag<>("main-output");

  StringUtf8Coder coder = StringUtf8Coder.of();
  WindowedValue.ValueOnlyWindowedValueCoder<String> windowedValueCoder =
      WindowedValue.getValueOnlyCoder(coder);

  DoFnOperator.MultiOutputOutputManagerFactory<String> outputManagerFactory =
      new DoFnOperator.MultiOutputOutputManagerFactory(
          outputTag,
          WindowedValue.getFullCoder(StringUtf8Coder.of(), GlobalWindow.Coder.INSTANCE));

  @SuppressWarnings("unchecked")
  DoFnOperator doFnOperator =
      new DoFnOperator<>(
          new IdentityDoFn() {
            @FinishBundle
            public void finishBundle() {
              throw new RuntimeException("something went wrong here");
            }
          },
          "stepName",
          windowedValueCoder,
          Collections.emptyMap(),
          outputTag,
          Collections.emptyList(),
          outputManagerFactory,
          WindowingStrategy.globalDefault(),
          new HashMap<>(), /* side-input mapping */
          Collections.emptyList(), /* side inputs */
          options,
          null,
          null,
          DoFnSchemaInformation.create(),
          Collections.emptyMap());

  @SuppressWarnings("unchecked")
  OneInputStreamOperatorTestHarness<WindowedValue<String>, WindowedValue<String>> testHarness =
      new OneInputStreamOperatorTestHarness<>(doFnOperator);

  testHarness.open();

  // start a bundle
  testHarness.processElement(
      new StreamRecord<>(WindowedValue.valueInGlobalWindow("regular element")));

  // Make sure we throw Error, not a regular Exception.
  // A regular exception would just cause the checkpoint to fail.
  assertThrows(Error.class, () -> testHarness.snapshot(0, 0));
}
 
Example 17
Source File: HourlyTeamScoreTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testUserScoreOptions() {
  PipelineOptionsFactory.as(HourlyTeamScore.Options.class);
}
 
Example 18
Source File: NemoRunner.java    From incubator-nemo with Apache License 2.0 4 votes vote down vote up
/**
 * Creates and returns a new NemoRunner with default options.
 *
 * @return A pipeline runner with default options.
 */
public static NemoRunner create() {
  NemoPipelineOptions options = PipelineOptionsFactory.as(NemoPipelineOptions.class);
  options.setRunner(NemoRunner.class);
  return new NemoRunner(options);
}
 
Example 19
Source File: EnvironmentsTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void createEnvironments() throws IOException {
  PortablePipelineOptions options = PipelineOptionsFactory.as(PortablePipelineOptions.class);
  options.setDefaultEnvironmentType(Environments.ENVIRONMENT_DOCKER);
  options.setDefaultEnvironmentConfig("java");
  assertThat(
      Environments.createOrGetDefaultEnvironment(options),
      is(
          Environment.newBuilder()
              .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER))
              .setPayload(
                  DockerPayload.newBuilder().setContainerImage("java").build().toByteString())
              .addAllCapabilities(Environments.getJavaCapabilities())
              .build()));
  options.setDefaultEnvironmentType(Environments.ENVIRONMENT_PROCESS);
  options.setDefaultEnvironmentConfig(
      "{\"os\": \"linux\", \"arch\": \"amd64\", \"command\": \"run.sh\", \"env\":{\"k1\": \"v1\", \"k2\": \"v2\"} }");
  assertThat(
      Environments.createOrGetDefaultEnvironment(options),
      is(
          Environment.newBuilder()
              .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS))
              .setPayload(
                  ProcessPayload.newBuilder()
                      .setOs("linux")
                      .setArch("amd64")
                      .setCommand("run.sh")
                      .putEnv("k1", "v1")
                      .putEnv("k2", "v2")
                      .build()
                      .toByteString())
              .addAllCapabilities(Environments.getJavaCapabilities())
              .build()));
  options.setDefaultEnvironmentType(Environments.ENVIRONMENT_PROCESS);
  options.setDefaultEnvironmentConfig("{\"command\": \"run.sh\"}");
  assertThat(
      Environments.createOrGetDefaultEnvironment(options),
      is(
          Environment.newBuilder()
              .setUrn(BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS))
              .setPayload(ProcessPayload.newBuilder().setCommand("run.sh").build().toByteString())
              .addAllCapabilities(Environments.getJavaCapabilities())
              .build()));
}
 
Example 20
Source File: WorkerCustomSourcesSplitOnlySourceTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllSplitsAreReturned() throws Exception {
  final long apiSizeLimitForTest = 500 * 1024;
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setAppName("TestAppName");
  options.setProject("test-project");
  options.setRegion("some-region1");
  options.setTempLocation("gs://test/temp/location");
  options.setGcpCredential(new TestCredential());
  options.setRunner(DataflowRunner.class);
  options.setPathValidatorClass(NoopPathValidator.class);
  // Generate a CountingSource and split it into the desired number of splits
  // (desired size = 1 byte), triggering the re-split with a larger bundle size.
  // Thus below we expect to produce 'numberOfSplits' splits.
  com.google.api.services.dataflow.model.Source source =
      WorkerCustomSourcesTest.translateIOToCloudSource(
          CountingSource.upTo(numberOfSplits), options);
  SourceSplitResponse split =
      WorkerCustomSourcesTest.performSplit(
          source, options, 1L, null /* numBundles limit */, apiSizeLimitForTest);
  assertThat(
      split.getBundles().size(),
      lessThanOrEqualTo(WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT));

  List<OffsetBasedSource<?>> originalSplits = new ArrayList<>(numberOfSplits);
  // Collect all the splits
  for (DerivedSource derivedSource : split.getBundles()) {
    Object deserializedSource =
        WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
    if (deserializedSource instanceof SplittableOnlyBoundedSource) {
      SplittableOnlyBoundedSource<?> splittableOnlySource =
          (SplittableOnlyBoundedSource<?>) deserializedSource;
      originalSplits.addAll((List) splittableOnlySource.split(1L, options));
    } else {
      originalSplits.add((OffsetBasedSource<?>) deserializedSource);
    }
  }

  assertEquals(numberOfSplits, originalSplits.size());
  for (int i = 0; i < originalSplits.size(); i++) {
    OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) originalSplits.get(i);
    assertEquals(i, offsetBasedSource.getStartOffset());
    assertEquals(i + 1, offsetBasedSource.getEndOffset());
  }
}