com.github.shyiko.mysql.binlog.event.DeleteRowsEventData Java Examples

The following examples show how to use com.github.shyiko.mysql.binlog.event.DeleteRowsEventData. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BinaryLogConnectorEventMapperTest.java    From SpinalTap with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeleteEvent() {
  eventHeader.setEventType(EventType.EXT_DELETE_ROWS);
  DeleteRowsEventData eventData = new DeleteRowsEventData();
  eventData.setTableId(TABLE_ID);
  eventData.setRows(ImmutableList.of(PREV_ROW));

  Optional<BinlogEvent> binlogEvent =
      BinaryLogConnectorEventMapper.INSTANCE.map(
          new Event(eventHeader, eventData), BINLOG_FILE_POS);
  assertTrue(binlogEvent.isPresent());
  assertTrue(binlogEvent.get() instanceof DeleteEvent);
  DeleteEvent deleteEvent = (DeleteEvent) (binlogEvent.get());
  assertEquals(BINLOG_FILE_POS, deleteEvent.getBinlogFilePos());
  assertEquals(ImmutableList.of(PREV_ROW), deleteEvent.getRows());
  assertEquals(SERVER_ID, deleteEvent.getServerId());
  assertEquals(TABLE_ID, deleteEvent.getTableId());
  assertEquals(TIMESTAMP, deleteEvent.getTimestamp());
}
 
Example #2
Source File: RecordConverter.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private List<Record> toRecords(Table table,
                               EventHeader eventHeader,
                               DeleteRowsEventData eventData,
                               SourceOffset offset) {
  List<Record> res = new ArrayList<>(eventData.getRows().size());
  for (Serializable[] row : eventData.getRows()) {
    Record record = recordFactory.create(offset.format());
    Map<String, Field> fields = createHeader(table, eventHeader, offset);
    fields.put(TYPE_FIELD, create("DELETE"));
    record.getHeader().setAttribute(
        OperationType.SDC_OPERATION_TYPE,
        String.valueOf(OperationType.DELETE_CODE)
    );
    List<ColumnValue> columnValues = zipColumnsValues(eventData.getIncludedColumns(), table, row);
    Map<String, Field> data = toMap(columnValues);
    fields.put(OLD_DATA_FIELD, create(data));

    record.set(create(fields));
    res.add(record);
  }
  return res;
}
 
Example #3
Source File: RecordConverter.java    From datacollector with Apache License 2.0 6 votes vote down vote up
private List<Record> toRecords(Table table,
                               EventHeader eventHeader,
                               DeleteRowsEventData eventData,
                               SourceOffset offset) {
  List<Record> res = new ArrayList<>(eventData.getRows().size());
  for (Serializable[] row : eventData.getRows()) {
    Record record = recordFactory.create(offset.format());
    Map<String, Field> fields = createHeader(table, eventHeader, offset);
    fields.put(TYPE_FIELD, create("DELETE"));
    record.getHeader().setAttribute(
        OperationType.SDC_OPERATION_TYPE,
        String.valueOf(OperationType.DELETE_CODE)
    );
    List<ColumnValue> columnValues = zipColumnsValues(eventData.getIncludedColumns(), table, row);
    Map<String, Field> data = toMap(columnValues);
    fields.put(OLD_DATA_FIELD, create(data));

    record.set(create(fields));
    res.add(record);
  }
  return res;
}
 
Example #4
Source File: EventProcessor.java    From openmessaging-connect-odar with Apache License 2.0 5 votes vote down vote up
private void processDeleteEvent(Event event) {
    DeleteRowsEventData data = event.getData();
    Long tableId = data.getTableId();
    List<Serializable[]> list = data.getRows();

    for (Serializable[] row : list) {
        addRow(EntryType.DELETE, tableId, null, row);
    }

}
 
Example #5
Source File: RowsEvent.java    From syncer with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
public static List<IndexedFullRow> getIndexedRows(SimpleEventType eventType, EventData data,
                                                  Set<Integer> primaryKeys) {
  switch (eventType) {
    case UPDATE:
      return UpdateRowsEvent.getIndexedRows((UpdateRowsEventData) data);
    case WRITE:
      WriteRowsEventData write = (WriteRowsEventData) data;
      return getIndexedRows(write.getRows(), write.getIncludedColumns());
    case DELETE:
      DeleteRowsEventData delete = (DeleteRowsEventData) data;
      return getIndexedRows(delete.getRows(), delete.getIncludedColumns());
    default:
      throw new IllegalArgumentException("Unsupported event type");
  }
}
 
Example #6
Source File: RecordConverter.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public List<Record> toRecords(EnrichedEvent event) {
  EventType eventType = event.getEvent().getHeader().getEventType();
  switch (eventType) {
    case PRE_GA_WRITE_ROWS:
    case WRITE_ROWS:
    case EXT_WRITE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<WriteRowsEventData>getData(),
          event.getOffset()
      );
    case PRE_GA_UPDATE_ROWS:
    case UPDATE_ROWS:
    case EXT_UPDATE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<UpdateRowsEventData>getData(),
          event.getOffset()
      );
    case PRE_GA_DELETE_ROWS:
    case DELETE_ROWS:
    case EXT_DELETE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<DeleteRowsEventData>getData(),
          event.getOffset()
      );
    default:
      throw new IllegalArgumentException(String.format("EventType '%s' not supported", eventType));
  }
}
 
Example #7
Source File: RecordConverter.java    From datacollector with Apache License 2.0 5 votes vote down vote up
public List<Record> toRecords(EnrichedEvent event) {
  EventType eventType = event.getEvent().getHeader().getEventType();
  switch (eventType) {
    case PRE_GA_WRITE_ROWS:
    case WRITE_ROWS:
    case EXT_WRITE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<WriteRowsEventData>getData(),
          event.getOffset()
      );
    case PRE_GA_UPDATE_ROWS:
    case UPDATE_ROWS:
    case EXT_UPDATE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<UpdateRowsEventData>getData(),
          event.getOffset()
      );
    case PRE_GA_DELETE_ROWS:
    case DELETE_ROWS:
    case EXT_DELETE_ROWS:
      return toRecords(
          event.getTable(),
          event.getEvent().getHeader(),
          event.getEvent().<DeleteRowsEventData>getData(),
          event.getOffset()
      );
    default:
      throw new IllegalArgumentException(String.format("EventType '%s' not supported", eventType));
  }
}
 
Example #8
Source File: BinlogServiceTest.java    From ad with Apache License 2.0 4 votes vote down vote up
@Test
public void testBinlog() throws IOException {
    String hostname = "127.0.0.1", username = "yuwen", password = "lyp82nlf";
    int port = 3306;
    // BinaryLogClient其实就是一个连接数据库的客户端,
    // 它加自己伪装成slave 连接到master上
    BinaryLogClient client = new BinaryLogClient(
            hostname, port, username, password
    );
    // 设置监听的Binlog, 如果不设置则监听最新的Binlog
    //client.setBinlogFilename();
    // 设置监听的binlog位置, 如果不设置则监听最新的位置
    //client.setBinlogPosition();
    // 注册事件监听器, 监听期间MySQL发生的一些变化, Event代表已经发生的事件
    client.registerEventListener(event -> {
        // MySQL 数据表发生变化的一些数据
        EventData eventData = event.getData();
        if (eventData instanceof UpdateRowsEventData) {
            log.info("update event");
            log.debug("{}", eventData);
        } else if (eventData instanceof WriteRowsEventData) {
            log.info("write event");
            log.debug("{}", eventData);
        } else if (eventData instanceof DeleteRowsEventData) {
            log.info("delete event");
            log.debug("{}", eventData);
        }
    });
    // 连接到 master 开始监听
    client.connect();


    // 启动后手动连接到 MySQL执行
    // insert into `ad_unit_keyword` (`unit_id`, `keyword`) values (10, '标志');
    // 控制台得到日志
    // 15:39:17.410 [main] INFO top.ezttf.ad.service.BinlogServiceTest - write event
    // 15:39:17.459 [main] DEBUG top.ezttf.ad.service.BinlogServiceTest - WriteRowsEventData{tableId=122, includedColumns={0, 1, 2}, rows=[
    //     [13, 10, 标志]
    // ]}

    // WriteRowsEventData{tableId=118, includedColumns={0, 1, 2, 3, 4, 5, 6, 7}, rows=[
    //    [11, 666, plan, 1, 2019-01-01, 2019-01-01, Tue Jan 01 08:00:00 CST 2019, Tue Jan 01 08:00:00 CST 2019]
    //]}
}
 
Example #9
Source File: BinaryLogConsumer.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public void onEvent(Event event) {
  LOG.trace("Received event {}", event);
  EventType eventType = event.getHeader().getEventType();
  currentBinLogFileName = client.getBinlogFilename();
  switch (eventType) {
    case TABLE_MAP:
      handleTableMappingEvent((TableMapEventData) event.getData());
      break;
    case PRE_GA_WRITE_ROWS:
    case WRITE_ROWS:
    case EXT_WRITE_ROWS:
      handleRowEvent(event, event.<WriteRowsEventData>getData().getTableId());
      break;
    case PRE_GA_UPDATE_ROWS:
    case UPDATE_ROWS:
    case EXT_UPDATE_ROWS:
      handleRowEvent(event, event.<UpdateRowsEventData>getData().getTableId());
      break;
    case PRE_GA_DELETE_ROWS:
    case DELETE_ROWS:
    case EXT_DELETE_ROWS:
      handleRowEvent(event, event.<DeleteRowsEventData>getData().getTableId());
      break;
    case QUERY:
      QueryEventData queryEventData = event.getData();
      String query = queryEventData.getSql();
      if (isCommit(query)) {
        finishTx();
      } else if (isSchemaChangeQuery(query)) {
        schemaRepository.evictAll();
      }
      break;
    case XID:
      finishTx();
      break;
    case GTID:
      GtidEventData eventData = event.getData();
      currentGtidSet = client.getGtidSet();
      currentTxGtid = eventData.getGtid();
      currentTxEventSeqNo = 0;
      LOG.trace("Started new tx, gtid: {}", currentTxGtid);
      break;
    default:
      // ignore
      break;
  }
}
 
Example #10
Source File: BinaryLogConsumer.java    From datacollector with Apache License 2.0 4 votes vote down vote up
@Override
public void onEvent(Event event) {
  LOG.trace("Received event {}", event);
  EventType eventType = event.getHeader().getEventType();
  currentBinLogFileName = client.getBinlogFilename();
  switch (eventType) {
    case TABLE_MAP:
      handleTableMappingEvent((TableMapEventData) event.getData());
      break;
    case PRE_GA_WRITE_ROWS:
    case WRITE_ROWS:
    case EXT_WRITE_ROWS:
      handleRowEvent(event, event.<WriteRowsEventData>getData().getTableId());
      break;
    case PRE_GA_UPDATE_ROWS:
    case UPDATE_ROWS:
    case EXT_UPDATE_ROWS:
      handleRowEvent(event, event.<UpdateRowsEventData>getData().getTableId());
      break;
    case PRE_GA_DELETE_ROWS:
    case DELETE_ROWS:
    case EXT_DELETE_ROWS:
      handleRowEvent(event, event.<DeleteRowsEventData>getData().getTableId());
      break;
    case QUERY:
      QueryEventData queryEventData = event.getData();
      String query = queryEventData.getSql();
      if (isCommit(query)) {
        finishTx();
      } else if (isSchemaChangeQuery(query)) {
        schemaRepository.evictAll();
      }
      break;
    case XID:
      finishTx();
      break;
    case GTID:
      GtidEventData eventData = event.getData();
      currentGtidSet = client.getGtidSet();
      currentTxGtid = eventData.getGtid();
      currentTxEventSeqNo = 0;
      LOG.trace("Started new tx, gtid: {}", currentTxGtid);
      break;
    default:
      // ignore
      break;
  }
}
 
Example #11
Source File: DeleteRowsEventInfo.java    From nifi with Apache License 2.0 4 votes vote down vote up
public DeleteRowsEventInfo(TableInfo tableInfo, Long timestamp, String binlogFilename, Long binlogPosition, DeleteRowsEventData data) {
    super(tableInfo, DELETE_EVENT, timestamp, binlogFilename, binlogPosition, data.getIncludedColumns(), data.getRows());
}