org.apache.samza.config.MapConfig Java Examples

The following examples show how to use org.apache.samza.config.MapConfig. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestDefaultCoordinatorStreamConfigFactory.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testBuildCoordinatorStreamConfigWithJobName() {
  Map<String, String> mapConfig = new HashMap<>();
  mapConfig.put("job.name", "testName");
  mapConfig.put("job.id", "testId");
  mapConfig.put("job.coordinator.system", "testSamza");
  mapConfig.put("test.only", "nothing");
  mapConfig.put("systems.testSamza.test", "test");

  Config config = factory.buildCoordinatorStreamConfig(new MapConfig(mapConfig));

  Map<String, String> expectedMap = new HashMap<>();
  expectedMap.put("job.name", "testName");
  expectedMap.put("job.id", "testId");
  expectedMap.put("systems.testSamza.test", "test");
  expectedMap.put(JobConfig.JOB_COORDINATOR_SYSTEM, "testSamza");
  expectedMap.put(JobConfig.MONITOR_PARTITION_CHANGE_FREQUENCY_MS, "300000");

  assertEquals(config, new MapConfig(expectedMap));
}
 
Example #2
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndSubQuery() throws Exception {
  int numMessages = 20;
  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql1 =
      "Insert into testavro.outputTopic(id, bool_value) select Flatten(a) as id, true as bool_value"
          + " from (select MyTestArray(id) a from testavro.SIMPLE1)";
  List<String> sqlStmts = Collections.singletonList(sql1);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<OutgoingMessageEnvelope> outMessages = new ArrayList<>(TestAvroSystemFactory.messages);

  int expectedMessages = 0;
  // Flatten de-normalizes the data. So there is separate record for each entry in the array.
  for (int index = 1; index < numMessages; index++) {
    expectedMessages = expectedMessages + Math.max(1, index);
  }
  Assert.assertEquals(expectedMessages, outMessages.size());
}
 
Example #3
Source File: SamzaTimerInternalsFactoryTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private static KeyValueStore<ByteArray, byte[]> createStore(String name) {
  final Options options = new Options();
  options.setCreateIfMissing(true);

  RocksDbKeyValueStore rocksStore =
      new RocksDbKeyValueStore(
          new File(System.getProperty("java.io.tmpdir") + "/" + name),
          options,
          new MapConfig(),
          false,
          "beamStore",
          new WriteOptions(),
          new FlushOptions(),
          new KeyValueStoreMetrics("beamStore", new MetricsRegistryMap()));

  return new SerializedKeyValueStore<>(
      rocksStore,
      new ByteArraySerdeFactory.ByteArraySerde(),
      new ByteSerde(),
      new SerializedKeyValueStoreMetrics("beamStore", new MetricsRegistryMap()));
}
 
Example #4
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndWithBooleanCheck() throws Exception {
  int numMessages = 20;

  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql1 = "Insert into testavro.outputTopic"
      + " select * from testavro.COMPLEX1 where bool_value IS TRUE";
  List<String> sqlStmts = Arrays.asList(sql1);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<OutgoingMessageEnvelope> outMessages = new ArrayList<>(TestAvroSystemFactory.messages);
  Assert.assertEquals(numMessages / 2, outMessages.size());
}
 
Example #5
Source File: TestQueryTranslator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test (expected = SamzaException.class)
public void testTranslateStreamStreamJoin() {
  Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 1);
  String sql =
      "Insert into testavro.enrichedPageViewTopic(profileName, pageKey)"
          + " select p.name as profileName, pv.pageKey"
          + " from testavro.PAGEVIEW as pv"
          + " join testavro.PROFILE as p"
          + " on p.id = pv.profileId";
  config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql);
  Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config));

  List<String> sqlStmts = fetchSqlFromConfig(config);
  List<SamzaSqlQueryParser.QueryInfo> queryInfo = fetchQueryInfo(sqlStmts);
  SamzaSqlApplicationConfig samzaSqlApplicationConfig = new SamzaSqlApplicationConfig(new MapConfig(config),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSources).flatMap(Collection::stream)
          .collect(Collectors.toList()),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSink).collect(Collectors.toList()));

  StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(streamApp -> { }, samzaConfig);
  QueryTranslator translator = new QueryTranslator(streamAppDesc, samzaSqlApplicationConfig);
  translator.translate(queryInfo.get(0), streamAppDesc, 0);
}
 
Example #6
Source File: TestJobModelManager.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetProcessorLocalityNewContainer() {
  Config config = new MapConfig(ImmutableMap.of(JobConfig.JOB_CONTAINER_COUNT, "2"));

  Map<String, Map<String, String>> localityMappings = new HashMap<>();
  // 2 containers, but only return 1 existing mapping
  localityMappings.put("0", ImmutableMap.of(SetContainerHostMapping.HOST_KEY, "abc-affinity"));
  LocalityManager mockLocalityManager = mock(LocalityManager.class);
  when(mockLocalityManager.readContainerLocality()).thenReturn(localityMappings);

  Map<String, LocationId> processorLocality = JobModelManager.getProcessorLocality(config, mockLocalityManager);

  Mockito.verify(mockLocalityManager).readContainerLocality();
  ImmutableMap<String, LocationId> expected = ImmutableMap.of(
      // found entry in existing locality
      "0", new LocationId("abc-affinity"),
      // no entry in existing locality
      "1", new LocationId("ANY_HOST"));
  Assert.assertEquals(expected, processorLocality);
}
 
Example #7
Source File: TestStreamProcessor.java    From samza with Apache License 2.0 6 votes vote down vote up
/**
 * Should be able to create task instances from the provided task factory.
 */
@Test
public void testStreamProcessorWithAsyncStreamTaskFactory() {
  final String testSystem = "test-system";
  final String inputTopic = "numbers3";
  final String outputTopic = "output3";
  final int messageCount = 20;

  final Config configs = new MapConfig(createConfigs("1", testSystem, inputTopic, outputTopic, messageCount));
  final ExecutorService executorService = Executors.newSingleThreadExecutor();
  createTopics(inputTopic, outputTopic);
  final AsyncStreamTaskFactory stf = () -> new AsyncStreamTaskAdapter(new IdentityStreamTask(), executorService);
  final TestStubs stubs = new TestStubs(configs, stf, bootstrapServers());

  produceMessages(stubs.producer, inputTopic, messageCount);
  run(stubs.processor, stubs.shutdownLatch);
  verifyNumMessages(stubs.consumer, outputTopic, messageCount);
  executorService.shutdownNow();
}
 
Example #8
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndWithProjectionWithCase() throws Exception {
  int numMessages = 20;

  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql1 = "Insert into testavro.outputTopic(id, long_value) "
      + " select id, NOT(id = 5) as bool_value, CASE WHEN id IN (5, 6, 7) THEN CAST('foo' AS VARCHAR) WHEN id < 5 THEN CAST('bars' AS VARCHAR) ELSE NULL END as string_value from testavro.SIMPLE1";
  List<String> sqlStmts = Arrays.asList(sql1);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<Integer> outMessages = TestAvroSystemFactory.messages.stream()
      .map(x -> Integer.valueOf(((GenericRecord) x.getMessage()).get("id").toString()))
      .sorted()
      .collect(Collectors.toList());
  Assert.assertEquals(numMessages, outMessages.size());
  Assert.assertTrue(IntStream.range(0, numMessages).boxed().collect(Collectors.toList()).equals(outMessages));
}
 
Example #9
Source File: TestZkJobCoordinator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testShouldStopPartitionCountMonitorWhenStoppingTheJobCoordinator() {
  ZkKeyBuilder keyBuilder = Mockito.mock(ZkKeyBuilder.class);
  ZkClient mockZkClient = Mockito.mock(ZkClient.class);
  when(keyBuilder.getJobModelVersionBarrierPrefix()).thenReturn(TEST_BARRIER_ROOT);

  ZkUtils zkUtils = Mockito.mock(ZkUtils.class);
  when(zkUtils.getKeyBuilder()).thenReturn(keyBuilder);
  when(zkUtils.getZkClient()).thenReturn(mockZkClient);
  when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>()));

  ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class);

  ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator("TEST_PROCESSOR_ID", new MapConfig(),
      new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore));

  StreamPartitionCountMonitor monitor = Mockito.mock(StreamPartitionCountMonitor.class);
  zkJobCoordinator.debounceTimer = mockDebounceTimer;
  zkJobCoordinator.streamPartitionCountMonitor = monitor;

  zkJobCoordinator.stop();

  Mockito.verify(monitor).stop();
}
 
Example #10
Source File: TestStreamAppender.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testNonDefaultSerde() {
  System.setProperty("samza.container.name", "samza-container-1");
  String streamName = StreamAppender.getStreamName("log4jTest", "1");
  Map<String, String> map = new HashMap<String, String>();
  map.put("job.name", "log4jTest");
  map.put("job.id", "1");
  map.put("serializers.registry.log4j-string.class", LoggingEventStringSerdeFactory.class.getCanonicalName());
  map.put("systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName());
  map.put("systems.mock.streams." + streamName + ".samza.msg.serde", "log4j-string");
  map.put("task.log4j.system", "mock");
  MockSystemProducerAppender systemProducerAppender = new MockSystemProducerAppender(new MapConfig(map));
  PatternLayout layout = new PatternLayout();
  layout.setConversionPattern("%m");
  systemProducerAppender.setLayout(layout);
  systemProducerAppender.activateOptions();
  assertNotNull(systemProducerAppender.getSerde());
  assertEquals(LoggingEventStringSerde.class, systemProducerAppender.getSerde().getClass());
}
 
Example #11
Source File: TestStreamAppender.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testNonDefaultSerde() {
  System.setProperty("samza.container.name", "samza-container-1");
  String streamName = StreamAppender.getStreamName("log4jTest", "1");
  Map<String, String> map = new HashMap<String, String>();
  map.put("job.name", "log4jTest");
  map.put("job.id", "1");
  map.put("serializers.registry.log4j-string.class", LoggingEventStringSerdeFactory.class.getCanonicalName());
  map.put("systems.mock.samza.factory", MockSystemFactory.class.getCanonicalName());
  map.put("systems.mock.streams." + streamName + ".samza.msg.serde", "log4j-string");
  map.put("task.log4j.system", "mock");
  PatternLayout layout = PatternLayout.newBuilder().withPattern("%m").build();
  MockSystemProducerAppender systemProducerAppender = MockSystemProducerAppender.createAppender("testName", null, layout, false, new MapConfig(map), null);
  systemProducerAppender.start();
  assertNotNull(systemProducerAppender.getSerde());
  assertEquals(LoggingEventStringSerde.class, systemProducerAppender.getSerde().getClass());
}
 
Example #12
Source File: TestStreamProcessor.java    From samza with Apache License 2.0 6 votes vote down vote up
/**
 * Should be able to create task instances from the provided task factory.
 */
@Test
public void testStreamProcessorWithStreamTaskFactory() {
  final String testSystem = "test-system";
  final String inputTopic = "numbers2";
  final String outputTopic = "output2";
  final int messageCount = 20;

  final Config configs = new MapConfig(createConfigs("1", testSystem, inputTopic, outputTopic, messageCount));
  createTopics(inputTopic, outputTopic);
  final TestStubs stubs = new TestStubs(configs, IdentityStreamTask::new, bootstrapServers());

  produceMessages(stubs.producer, inputTopic, messageCount);
  run(stubs.processor, stubs.shutdownLatch);
  verifyNumMessages(stubs.consumer, outputTopic, messageCount);
}
 
Example #13
Source File: TestTableConfigGenerator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSerdes() {
  List<TableDescriptor> descriptors = Arrays.asList(
      new MockLocalTableDescriptor("t1", KVSerde.of(new StringSerde(), new IntegerSerde())),
      new MockLocalTableDescriptor("t2", KVSerde.of(new StringSerde(), new IntegerSerde()))
  );
  Config jobConfig = new MapConfig(TableConfigGenerator.generateSerdeConfig(descriptors));
  JavaTableConfig javaTableConfig = new JavaTableConfig(jobConfig);
  assertNotNull(javaTableConfig.getKeySerde("t1"));
  assertNotNull(javaTableConfig.getMsgSerde("t1"));
  assertNotNull(javaTableConfig.getKeySerde("t2"));
  assertNotNull(javaTableConfig.getMsgSerde("t2"));

  MapConfig tableConfig = new MapConfig(TableConfigGenerator.generate(jobConfig, descriptors));
  javaTableConfig = new JavaTableConfig(tableConfig);
  assertNotNull(javaTableConfig.getTableProviderFactory("t1"));
  assertNotNull(javaTableConfig.getTableProviderFactory("t2"));
}
 
Example #14
Source File: TestJobModel.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testMaxChangeLogStreamPartitions() {
  Config config = new MapConfig(ImmutableMap.of("a", "b"));
  Map<TaskName, TaskModel> tasksForContainer1 = ImmutableMap.of(
      new TaskName("t1"), new TaskModel(new TaskName("t1"), ImmutableSet.of(), new Partition(0)),
      new TaskName("t2"), new TaskModel(new TaskName("t2"), ImmutableSet.of(), new Partition(1)));
  Map<TaskName, TaskModel> tasksForContainer2 = ImmutableMap.of(
      new TaskName("t3"), new TaskModel(new TaskName("t3"), ImmutableSet.of(), new Partition(2)),
      new TaskName("t4"), new TaskModel(new TaskName("t4"), ImmutableSet.of(), new Partition(3)),
      new TaskName("t5"), new TaskModel(new TaskName("t5"), ImmutableSet.of(), new Partition(4)));
  ContainerModel containerModel1 = new ContainerModel("0", tasksForContainer1);
  ContainerModel containerModel2 = new ContainerModel("1", tasksForContainer2);
  Map<String, ContainerModel> containers = ImmutableMap.of("0", containerModel1, "1", containerModel2);
  JobModel jobModel = new JobModel(config, containers);
  assertEquals(jobModel.maxChangeLogStreamPartitions, 5);
}
 
Example #15
Source File: TestStreamProcessor.java    From samza with Apache License 2.0 6 votes vote down vote up
/**
 * Testing a basic identity stream task - reads data from a topic and writes it to another topic
 * (without any modifications)
 *
 * <p>
 * The standalone version in this test uses KafkaSystemFactory and it uses a SingleContainerGrouperFactory. Hence,
 * no matter how many tasks are present, it will always be run in a single processor instance. This simplifies testing
 */
@Test
public void testStreamProcessor() {
  final String testSystem = "test-system";
  final String inputTopic = "numbers";
  final String outputTopic = "output";
  final int messageCount = 20;

  final Config configs = new MapConfig(createConfigs("1", testSystem, inputTopic, outputTopic, messageCount));
  // Note: createTopics needs to be called before creating a StreamProcessor. Otherwise it fails with a
  // TopicExistsException since StreamProcessor auto-creates them.
  createTopics(inputTopic, outputTopic);
  final TestStubs stubs = new TestStubs(configs, IdentityStreamTask::new, bootstrapServers());

  produceMessages(stubs.producer, inputTopic, messageCount);
  run(stubs.processor, stubs.shutdownLatch);
  verifyNumMessages(stubs.consumer, outputTopic, messageCount);
}
 
Example #16
Source File: TranslationContext.java    From beam with Apache License 2.0 6 votes vote down vote up
/** The dummy stream created will only be used in Beam tests. */
private static InputDescriptor<OpMessage<String>, ?> createDummyStreamDescriptor(String id) {
  final GenericSystemDescriptor dummySystem =
      new GenericSystemDescriptor(id, InMemorySystemFactory.class.getName());
  final GenericInputDescriptor<OpMessage<String>> dummyInput =
      dummySystem.getInputDescriptor(id, new NoOpSerde<>());
  dummyInput.withOffsetDefault(SystemStreamMetadata.OffsetType.OLDEST);
  final Config config = new MapConfig(dummyInput.toConfig(), dummySystem.toConfig());
  final SystemFactory factory = new InMemorySystemFactory();
  final StreamSpec dummyStreamSpec = new StreamSpec(id, id, id, 1);
  factory.getAdmin(id, config).createStream(dummyStreamSpec);

  final SystemProducer producer = factory.getProducer(id, config, null);
  final SystemStream sysStream = new SystemStream(id, id);
  final Consumer<Object> sendFn =
      (msg) -> {
        producer.send(id, new OutgoingMessageEnvelope(sysStream, 0, null, msg));
      };
  final WindowedValue<String> windowedValue =
      WindowedValue.timestampedValueInGlobalWindow("dummy", new Instant());

  sendFn.accept(OpMessage.ofElement(windowedValue));
  sendFn.accept(new WatermarkMessage(BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()));
  sendFn.accept(new EndOfStreamMessage(null));
  return dummyInput;
}
 
Example #17
Source File: TestSamzaSqlApplicationRunner.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testComputeSamzaConfigs() {
  Map<String, String> configs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(10);
  String sql1 = "Insert into testavro.outputTopic(id,long_value) select id, MyTest(id) as long_value from testavro.SIMPLE1";
  configs.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql1);
  configs.put(SamzaSqlApplicationRunner.RUNNER_CONFIG, SamzaSqlApplicationRunner.class.getName());
  MapConfig samzaConfig = new MapConfig(configs);
  Config newConfigs = SamzaSqlApplicationRunner.computeSamzaConfigs(true, samzaConfig);
  Assert.assertEquals(newConfigs.get(SamzaSqlApplicationRunner.RUNNER_CONFIG), LocalApplicationRunner.class.getName());
  // Check whether five new configs added.
  Assert.assertEquals(newConfigs.size(), configs.size() + 5);

  newConfigs = SamzaSqlApplicationRunner.computeSamzaConfigs(false, samzaConfig);
  Assert.assertEquals(newConfigs.get(SamzaSqlApplicationRunner.RUNNER_CONFIG), RemoteApplicationRunner.class.getName());

  // Check whether five new configs added.
  Assert.assertEquals(newConfigs.size(), configs.size() + 5);
}
 
Example #18
Source File: TestJobNodeConfigurationGenerator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testConfigRewriter() {
  Map<String, String> configs = new HashMap<>(mockConfig);
  String streamCfgToOverride = String.format("streams.%s.samza.system", intermediateInputDescriptor.getStreamId());
  configs.put(String.format(JobConfig.CONFIG_REWRITER_CLASS, "mock"), MockConfigRewriter.class.getName());
  configs.put(JobConfig.CONFIG_REWRITERS, "mock");
  configs.put(String.format("job.config.rewriter.mock.%s", streamCfgToOverride), "rewritten-system");
  mockConfig = spy(new MapConfig(configs));
  mockStreamAppDesc = new StreamApplicationDescriptorImpl(getRepartitionJoinStreamApplication(), mockConfig);
  configureJobNode(mockStreamAppDesc);

  JobNodeConfigurationGenerator configureGenerator = new JobNodeConfigurationGenerator();
  JobConfig jobConfig = configureGenerator.generateJobConfig(mockJobNode, "testJobGraphJson");
  Config expectedConfig = getExpectedJobConfig(mockConfig, mockJobNode.getInEdges());
  validateJobConfig(expectedConfig, jobConfig);
  assertEquals("rewritten-system", jobConfig.get(streamCfgToOverride));
}
 
Example #19
Source File: DefaultCoordinatorStreamConfigFactory.java    From samza with Apache License 2.0 6 votes vote down vote up
@Override
public Config buildCoordinatorStreamConfig(Config config) {
  JobConfig jobConfig = new JobConfig(config);
  String jobName = jobConfig.getName().orElseThrow(() -> new ConfigException("Missing required config: job.name"));

  String jobId = jobConfig.getJobId();

  // Build a map with just the system config and job.name/job.id. This is what's required to start the JobCoordinator.
  Map<String, String> map = config.subset(String.format(SystemConfig.SYSTEM_ID_PREFIX, jobConfig.getCoordinatorSystemName()), false);
  Map<String, String> addConfig = new HashMap<>();
  addConfig.put(JobConfig.JOB_NAME, jobName);
  addConfig.put(JobConfig.JOB_ID, jobId);
  addConfig.put(JobConfig.JOB_COORDINATOR_SYSTEM, jobConfig.getCoordinatorSystemName());
  addConfig.put(JobConfig.MONITOR_PARTITION_CHANGE_FREQUENCY_MS, String.valueOf(jobConfig.getMonitorPartitionChangeFrequency()));

  addConfig.putAll(map);
  return new MapConfig(addConfig);
}
 
Example #20
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEnd() throws SamzaSqlValidatorException {
  int numMessages = 20;

  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql = "Insert into testavro.simpleOutputTopic select * from testavro.SIMPLE1";
  List<String> sqlStmts = Arrays.asList(sql);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<Integer> outMessages = TestAvroSystemFactory.messages.stream()
      .map(x -> Integer.valueOf(((GenericRecord) x.getMessage()).get("id").toString()))
      .sorted()
      .collect(Collectors.toList());
  Assert.assertEquals(numMessages, outMessages.size());
  Assert.assertTrue(IntStream.range(0, numMessages).boxed().collect(Collectors.toList()).equals(outMessages));
}
 
Example #21
Source File: TestQueryTranslator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test (expected = SamzaException.class)
public void testTranslateStreamTableJoinWithThetaCondition() {
  Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 1);
  String sql =
      "Insert into testavro.enrichedPageViewTopic(profileName, pageKey)"
          + " select p.name as profileName, pv.pageKey"
          + " from testavro.PAGEVIEW as pv"
          + " join testavro.PROFILE.`$table` as p"
          + " on p.id <> pv.profileId";
  config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql);
  Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config));

  List<String> sqlStmts = fetchSqlFromConfig(config);
  List<SamzaSqlQueryParser.QueryInfo> queryInfo = fetchQueryInfo(sqlStmts);
  SamzaSqlApplicationConfig samzaSqlApplicationConfig = new SamzaSqlApplicationConfig(new MapConfig(config),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSources).flatMap(Collection::stream)
          .collect(Collectors.toList()),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSink).collect(Collectors.toList()));

  StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(streamApp -> { }, samzaConfig);
  QueryTranslator translator = new QueryTranslator(streamAppDesc, samzaSqlApplicationConfig);
  translator.translate(queryInfo.get(0), streamAppDesc, 0);
}
 
Example #22
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndCompoundBooleanCheckWorkaround() throws SamzaSqlValidatorException {

  int numMessages = 20;

  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  // BUG Compound boolean checks dont work in calcite, So workaround by casting it to String
  String sql1 = "Insert into testavro.outputTopic"
      + " select * from testavro.COMPLEX1 where id >= 0 and CAST(bool_value AS VARCHAR) =  'TRUE'";
  List<String> sqlStmts = Arrays.asList(sql1);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<OutgoingMessageEnvelope> outMessages = new ArrayList<>(TestAvroSystemFactory.messages);

  Assert.assertEquals(10, outMessages.size());
}
 
Example #23
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndWithDifferentSystemSameStream() throws SamzaSqlValidatorException {
  int numMessages = 20;

  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql = "Insert into testavro2.SIMPLE1 select * from testavro.SIMPLE1";
  List<String> sqlStmts = Arrays.asList(sql);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<Integer> outMessages = TestAvroSystemFactory.messages.stream()
      .map(x -> Integer.valueOf(((GenericRecord) x.getMessage()).get("id").toString()))
      .sorted()
      .collect(Collectors.toList());
  Assert.assertEquals(numMessages, outMessages.size());
  Assert.assertTrue(IntStream.range(0, numMessages).boxed().collect(Collectors.toList()).equals(outMessages));
}
 
Example #24
Source File: MockEventHubConfigFactory.java    From samza with Apache License 2.0 6 votes vote down vote up
public static Config getEventHubConfig(EventHubSystemProducer.PartitioningMethod partitioningMethod) {
  HashMap<String, String> mapConfig = new HashMap<>();
  mapConfig.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, SYSTEM_NAME), partitioningMethod.toString());
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, SYSTEM_NAME), STREAM_NAME1 + "," + STREAM_NAME2);

  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, STREAM_NAME1), EVENTHUB_NAMESPACE);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, STREAM_NAME1), EVENTHUB_ENTITY1);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, STREAM_NAME1), EVENTHUB_KEY_NAME);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, STREAM_NAME1), EVENTHUB_KEY);

  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, STREAM_NAME2), EVENTHUB_NAMESPACE);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, STREAM_NAME2), EVENTHUB_ENTITY2);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, STREAM_NAME2), EVENTHUB_KEY_NAME);
  mapConfig.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, STREAM_NAME2), EVENTHUB_KEY);

  return new MapConfig(mapConfig);
}
 
Example #25
Source File: TestKinesisConfig.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testKinesisConfigs() {
  Map<String, String> kv = new HashMap<>();
  String system = "kinesis";
  String stream = "kinesis-stream";
  String systemConfigPrefix = String.format("systems.%s.", system);
  String ssConfigPrefix = String.format("systems.%s.streams.%s.", system, stream);

  kv.put("sensitive." + ssConfigPrefix + "aws.secretKey", "secretKey");
  kv.put(systemConfigPrefix + "aws.region", "us-east-1");
  kv.put(ssConfigPrefix + "aws.accessKey", "accessKey");

  Config config = new MapConfig(kv);
  KinesisConfig kConfig = new KinesisConfig(config);

  assertEquals("us-east-1", kConfig.getRegion(system, stream).getName());
  assertEquals("accessKey", kConfig.getStreamAccessKey(system, stream));
  assertEquals("secretKey", kConfig.getStreamSecretKey(system, stream));
}
 
Example #26
Source File: TestQueryTranslator.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test (expected = SamzaException.class)
public void testTranslateStreamTableJoinWithFullJoinOperator() {
  Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 1);
  String sql =
      "Insert into testavro.enrichedPageViewTopic(profileName, pageKey)"
          + " select p.name as profileName, pv.pageKey"
          + " from testavro.PAGEVIEW as pv"
          + " full join testavro.PROFILE.`$table` as p"
          + " on p.id = pv.profileId";
  config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql);
  Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config));

  List<String> sqlStmts = fetchSqlFromConfig(config);
  List<SamzaSqlQueryParser.QueryInfo> queryInfo = fetchQueryInfo(sqlStmts);
  SamzaSqlApplicationConfig samzaSqlApplicationConfig = new SamzaSqlApplicationConfig(new MapConfig(config),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSources).flatMap(Collection::stream)
          .collect(Collectors.toList()),
      queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSink).collect(Collectors.toList()));

  StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(streamApp -> { }, samzaConfig);
  QueryTranslator translator = new QueryTranslator(streamAppDesc, samzaSqlApplicationConfig);

  translator.translate(queryInfo.get(0), streamAppDesc, 0);
}
 
Example #27
Source File: TestSamzaSqlEndToEnd.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testEndToEndFanIn() throws SamzaSqlValidatorException {
  int numMessages = 20;
  TestAvroSystemFactory.messages.clear();
  Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages);
  String sql1 = "Insert into testavro.simpleOutputTopic select * from testavro.SIMPLE2";
  String sql2 = "Insert into testavro.simpleOutputTopic select * from testavro.SIMPLE1";
  List<String> sqlStmts = Arrays.asList(sql1, sql2);
  staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts));

  Config config = new MapConfig(staticConfigs);
  new SamzaSqlValidator(config).validate(sqlStmts);

  runApplication(config);

  List<Integer> outMessages = TestAvroSystemFactory.messages.stream()
      .map(x -> Integer.valueOf(((GenericRecord) x.getMessage()).get("id").toString()))
      .sorted()
      .collect(Collectors.toList());
  Assert.assertEquals(numMessages * 2, outMessages.size());
  Set<Integer> outMessagesSet = new HashSet<>(outMessages);
  Assert.assertEquals(numMessages, outMessagesSet.size());
  Assert.assertTrue(IntStream.range(0, numMessages).boxed().collect(Collectors.toList()).equals(new ArrayList<>(outMessagesSet)));
}
 
Example #28
Source File: TestAllSspToSingleTaskGrouper.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testLocalStreamGroupedCorrectlyForYarn() {
  HashSet<SystemStreamPartition> allSSPs = new HashSet<>();
  HashMap<String, String> configMap = new HashMap<>();

  configMap.put("job.container.count", "2");
  configMap.put("processor.list", "0,1");

  Config config = new MapConfig(configMap);

  SystemStreamPartitionGrouper grouper = grouperFactory.getSystemStreamPartitionGrouper(config);

  Collections.addAll(allSSPs, aa0, aa1, aa2, ab0);
  Map<TaskName, Set<SystemStreamPartition>> result = grouper.group(allSSPs);
  Map<TaskName, Set<SystemStreamPartition>> expectedResult = new HashMap<>();

  HashSet<SystemStreamPartition> partitions = new HashSet<>();
  partitions.add(aa0);
  partitions.add(aa1);
  partitions.add(aa2);
  partitions.add(ab0);
  expectedResult.put(new TaskName("Task-0"), partitions);
  expectedResult.put(new TaskName("Task-1"), partitions);

  assertEquals(expectedResult, result);
}
 
Example #29
Source File: JobNodeConfigurationGenerator.java    From samza with Apache License 2.0 6 votes vote down vote up
private void configureTables(Map<String, String> generatedConfig, Config originalConfig,
    Map<String, TableDescriptor> tables, Set<String> inputs) {
  generatedConfig.putAll(
      TableConfigGenerator.generate(
          new MapConfig(generatedConfig), new ArrayList<>(tables.values())));

  // Add side inputs to the inputs and mark the stream as bootstrap
  tables.values().forEach(tableDescriptor -> {
    if (tableDescriptor instanceof LocalTableDescriptor) {
      LocalTableDescriptor localTableDescriptor = (LocalTableDescriptor) tableDescriptor;
      List<String> sideInputs = localTableDescriptor.getSideInputs();
      if (sideInputs != null && !sideInputs.isEmpty()) {
        sideInputs.stream()
            .map(sideInput -> StreamUtil.getSystemStreamFromNameOrId(originalConfig, sideInput))
            .forEach(systemStream -> {
              inputs.add(StreamUtil.getNameFromSystemStream(systemStream));
              generatedConfig.put(String.format(StreamConfig.STREAM_PREFIX + StreamConfig.BOOTSTRAP,
                  systemStream.getSystem(), systemStream.getStream()), "true");
            });
      }
    }
  });
}
 
Example #30
Source File: TestLocalTableProvider.java    From samza with Apache License 2.0 6 votes vote down vote up
@Test
public void testInit() {
  Context context = mock(Context.class);
  JobContext jobContext = mock(JobContext.class);
  when(context.getJobContext()).thenReturn(jobContext);
  when(jobContext.getConfig()).thenReturn(new MapConfig());
  ContainerContext containerContext = mock(ContainerContext.class);
  when(context.getContainerContext()).thenReturn(containerContext);
  when(containerContext.getContainerMetricsRegistry()).thenReturn(new NoOpMetricsRegistry());
  TaskContext taskContext = mock(TaskContext.class);
  when(context.getTaskContext()).thenReturn(taskContext);
  when(taskContext.getStore(any())).thenReturn(mock(KeyValueStore.class));

  TableProvider tableProvider = createTableProvider("t1");
  tableProvider.init(context);
  Assert.assertNotNull(tableProvider.getTable());
}