avro.shaded.com.google.common.collect.Lists Java Examples
The following examples show how to use
avro.shaded.com.google.common.collect.Lists.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AppenderConsumer.java From DBus with Apache License 2.0 | 5 votes |
private Consumer<String, byte[]> createConsumer() throws Exception { Properties props = PropertiesHolder.getProperties(Constants.Properties.CONSUMER_CONFIG); props.setProperty("client.id", this.topologyId + "_consumer"); props.setProperty("group.id", this.topologyId + "_grp"); TopicProvider dataTopicProvider = new DataInputTopicProvider(); TopicProvider controlTopicProvider = new ControlTopicProvider(); TopicProvider schemaTopicProvider = new SchemaTopicProvider(); this.controlTopics = controlTopicProvider.provideTopics(); this.dataTopics = dataTopicProvider.provideTopics(); List<String> topics = Lists.newArrayList(); topics.addAll(controlTopics); topics.addAll(dataTopics); if (DbusDatasourceType.ORACLE == GlobalCache.getDatasourceType() || DbusDatasourceType.DB2 == GlobalCache.getDatasourceType() ) { this.schemaTopics = schemaTopicProvider.provideTopics(); topics.addAll(schemaTopics); } Consumer<String, byte[]> consumer = consumerProvider.consumer(props, topics); Map<String, Object> data = zkNodeOperator.getData(); for (Map.Entry<String, Object> entry : data.entrySet()) { TopicInfo b = TopicInfo.build((Map<String, Object>) entry.getValue()); this.pausedTopics.put(b.getTopic(), b); } consumerProvider.pause(consumer, this.pausedTopics.entrySet() .stream() .map(entry -> entry.getValue()) .collect(Collectors.toList())); return consumer; }
Example #2
Source File: TypeConverterUtils.java From components with Apache License 2.0 | 5 votes |
/** * Get the step from the hierachical string path * * @param path * @return */ public static Stack<String> getPathSteps(String path) { if (path.startsWith(".")) { path = path.substring(1); } Stack<String> pathSteps = new Stack<String>(); List<String> stepsList = Arrays.asList(path.split("\\.")); pathSteps.addAll(Lists.reverse(stepsList)); return pathSteps; }
Example #3
Source File: FieldAttributeBasedDeltaFieldsProviderTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@Test public void testGetDeltaFieldNamesForNewSchema(){ Configuration conf = mock(Configuration.class); when(conf.get(FieldAttributeBasedDeltaFieldsProvider.ATTRIBUTE_FIELD)).thenReturn("attributes_json"); when(conf.get(FieldAttributeBasedDeltaFieldsProvider.DELTA_PROP_NAME, FieldAttributeBasedDeltaFieldsProvider.DEFAULT_DELTA_PROP_NAME)) .thenReturn(FieldAttributeBasedDeltaFieldsProvider.DEFAULT_DELTA_PROP_NAME); AvroDeltaFieldNameProvider provider = new FieldAttributeBasedDeltaFieldsProvider(conf); Schema original = new Schema.Parser().parse(FULL_SCHEMA_WITH_ATTRIBUTES); GenericRecord record = mock(GenericRecord.class); when(record.getSchema()).thenReturn(original); List<String> fields = provider.getDeltaFieldNames(record); Assert.assertEquals(fields, Lists.newArrayList("scn2", "scn")); }
Example #4
Source File: KafkaConsumerSpout.java From DBus with Apache License 2.0 | 4 votes |
/** * @param reloadJson: reload control msg json */ private void reloadConfig(String reloadJson) { close(); ZKHelper zkHelper = null; DBHelper dbHelper = null; try { dsInfo = new DataSourceInfo(); zkHelper = new ZKHelper(topologyRoot, topologyID, zkServers); zkHelper.loadDsNameAndOffset(dsInfo); dbHelper = new DBHelper(zkServers); dbHelper.loadDsInfo(dsInfo); logger.info(String.format("Spout read datasource: %s", dsInfo.toString())); //init consumer dataTopicPartition = new TopicPartition(dsInfo.getDataTopic(), 0); ctrlTopicPartition = new TopicPartition(dsInfo.getCtrlTopic(), 0); List<TopicPartition> topics = Arrays.asList(dataTopicPartition, ctrlTopicPartition); consumer = new KafkaConsumer(zkHelper.getConsumerProps()); consumer.assign(topics); //skip offset long oldOffset = consumer.position(dataTopicPartition); logger.info(String.format("reloaded offset as: %d", oldOffset)); String offset = dsInfo.getDataTopicOffset(); if (offset.equalsIgnoreCase("none")) { ; // do nothing } else if (offset.equalsIgnoreCase("begin")) { consumer.seekToBeginning(Lists.newArrayList(dataTopicPartition)); logger.info(String.format("Offset seek to begin, changed as: %d", consumer.position(dataTopicPartition))); } else if (offset.equalsIgnoreCase("end")) { consumer.seekToEnd(Lists.newArrayList(dataTopicPartition)); logger.info(String.format("Offset seek to end, changed as: %d", consumer.position(dataTopicPartition))); } else { long nOffset = Long.parseLong(offset); consumer.seek(dataTopicPartition, nOffset); logger.info(String.format("Offset changed as: %d", consumer.position(dataTopicPartition))); } dsInfo.resetDataTopicOffset(); zkHelper.saveDsInfo(dsInfo); zkHelper.saveReloadStatus(reloadJson, "dispatcher-spout", true); } catch (Exception ex) { logger.error("KafkaConsumerSpout reloadConfig():", ex); collector.reportError(ex); throw new RuntimeException(ex); } finally { if (dbHelper != null) { dbHelper.close(); } if (zkHelper != null) { zkHelper.close(); } } }
Example #5
Source File: OracleMessageHandler.java From DBus with Apache License 2.0 | 4 votes |
public OracleMessageHandler(RecordProcessListener rpListener, ConsumerListener consumerListener) { super(rpListener, consumerListener); this.schemaTopics = Lists.newArrayList(consumerListener.getSchemaTopics()); createProcessors(); }
Example #6
Source File: Db2MessageHandler.java From DBus with Apache License 2.0 | 4 votes |
public Db2MessageHandler(RecordProcessListener rpListener, ConsumerListener consumerListener) { super(rpListener, consumerListener); this.schemaTopics = Lists.newArrayList(consumerListener.getSchemaTopics()); createProcessors(); }