org.postgresql.PGConnection Java Examples

The following examples show how to use org.postgresql.PGConnection. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PGCopyPreparedStatement.java    From phoebus with Eclipse Public License 1.0 7 votes vote down vote up
@Override
public int[] executeBatch() throws SQLException {
    long res = 0;
    try {
        CopyManager cpManager = ((PGConnection) connection).getCopyAPI();
        PushbackReader reader = new PushbackReader(new StringReader(""),
                batchBuilder.length());
        reader.unread(batchBuilder.toString().toCharArray());
        res = cpManager.copyIn("COPY " + tableName +  " FROM STDIN WITH CSV", reader);
        batchBuilder.setLength(0);
        reader.close();
    } catch (IOException e) {
        throw new SQLException(e);
    }
    return new int[] { (int) res };
}
 
Example #2
Source File: PGBulkLoader.java    From hop with Apache License 2.0 6 votes vote down vote up
private void do_copy( PGBulkLoaderMeta meta, boolean wait ) throws HopException {
  data.db = getDatabase( this, meta );
  String copyCmd = getCopyCommand();
  try {
    connect();

    checkClientEncoding();

    processTruncate();

    logBasic( "Launching command: " + copyCmd );
    pgCopyOut = new PGCopyOutputStream( (PGConnection) data.db.getConnection(), copyCmd );

  } catch ( Exception ex ) {
    throw new HopException( "Error while preparing the COPY " + copyCmd, ex );
  }
}
 
Example #3
Source File: PGBulkLoader.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void do_copy( PGBulkLoaderMeta meta, boolean wait ) throws KettleException {
  data.db = getDatabase( this, meta );
  String copyCmd = getCopyCommand();
  try {
    connect();

    checkClientEncoding();

    processTruncate();

    logBasic( "Launching command: " + copyCmd );
    pgCopyOut = new PGCopyOutputStream( (PGConnection) data.db.getConnection(), copyCmd );

  } catch ( Exception ex ) {
    throw new KettleException( "Error while preparing the COPY " + copyCmd, ex );
  }
}
 
Example #4
Source File: PostgreSQLWalDumper.java    From shardingsphere with Apache License 2.0 6 votes vote down vote up
@Override
public void dump(final Channel channel) {
    try {
        PGConnection pgConnection = logicalReplication.createPgConnection((JDBCDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration());
        decodingPlugin = new TestDecodingPlugin(((Connection) pgConnection).unwrap(PgConnection.class).getTimestampUtils());
        PGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection,
                PostgreSQLLogPositionManager.SLOT_NAME, walPosition.getLogSequenceNumber());
        while (isRunning()) {
            ByteBuffer msg = stream.readPending();
            if (msg == null) {
                try {
                    Thread.sleep(10L);
                    continue;
                } catch (InterruptedException ignored) {
                
                }
            }
            AbstractWalEvent event = decodingPlugin.decode(msg, stream.getLastReceiveLSN());
            pushRecord(channel, walEventConverter.convert(event));
        }
    } catch (SQLException ex) {
        throw new SyncTaskExecuteException(ex);
    }
}
 
Example #5
Source File: SimpleRowWriter.java    From PgBulkInsert with MIT License 6 votes vote down vote up
public SimpleRowWriter(final Table table, final PGConnection connection, final boolean usePostgresQuoting) throws SQLException {
    this.table = table;
    this.isClosed = false;
    this.isOpened = false;
    this.nullCharacterHandler = (val) -> val;

    this.provider = new ValueHandlerProvider();
    this.lookup = new HashMap<>();

    for (int ordinal = 0; ordinal < table.columns.length; ordinal++) {
        lookup.put(table.columns[ordinal], ordinal);
    }

    this.writer = new PgBinaryWriter(new PGCopyOutputStream(connection, table.getCopyCommand(usePostgresQuoting), 1));

    isClosed = false;
    isOpened = true;
}
 
Example #6
Source File: SimpleRowWriterWithQuotesTest.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Test
public void rowBasedWriterTest() throws SQLException {

    // Get the underlying PGConnection:
    PGConnection pgConnection = PostgreSqlUtils.getPGConnection(connection);

    // Define the Columns to be inserted:
    String[] columnNames = new String[] {
            "Value_int",
            "value_text"
    };

    // Create the Table Definition:
    SimpleRowWriter.Table table = new SimpleRowWriter.Table(schema, tableName, columnNames);

    // Create the Writer:
    try(SimpleRowWriter writer = new SimpleRowWriter(table, pgConnection, true)) {

        // ... write your data rows:
        for (int rowIdx = 0; rowIdx < 10000; rowIdx++) {

            // ... using startRow and work with the row, see how the order doesn't matter:
            writer.startRow((row) -> {
                row.setText("value_text", "Hi");
                row.setInteger("Value_int", 1);
            });
        }
    }

    // Now assert, that we have written 10000 entities:
    Assert.assertEquals(10000, getRowCount());
}
 
Example #7
Source File: TunnelServer.java    From tunnel with Apache License 2.0 5 votes vote down vote up
private void createRplConn() throws SQLException {
    String url = this.jdbcConfig.getUrl();
    Properties props = new Properties();
    PGProperty.USER.set(props, this.jdbcConfig.getUsername());
    PGProperty.PASSWORD.set(props, this.jdbcConfig.getPassword());
    PGProperty.ASSUME_MIN_SERVER_VERSION.set(props, this.jdbcConfig.getMinVersion());
    PGProperty.REPLICATION.set(props, this.jdbcConfig.getRplLevel());
    PGProperty.PREFER_QUERY_MODE.set(props, "simple");

    this.connection = DriverManager.getConnection(url, props);
    this.rplConnection = this.connection.unwrap(PGConnection.class);
    log.info("GetRplConnection success,slot:{}", this.slotName);
}
 
Example #8
Source File: RDB.java    From tuffylite with Apache License 2.0 5 votes vote down vote up
public void createTempTableIntList(String rel, Collection<Integer> vals){
	dropTable(rel);
	String sql = "CREATE TABLE " + rel + "(id INT)";
	update(sql);
	try {
		
		String loadingFile = Config.dir_working + "/createTempTableIntList";
		
		BufferedWriter bw = new BufferedWriter(new FileWriter(
				Config.dir_working + "/createTempTableIntList"));
		
		for(int pid : vals){
			bw.write(pid + "\n");
		}
		bw.close();
	
		ArrayList<String> cols = new ArrayList<String>();
		cols.add("id");
		FileInputStream in = new FileInputStream(loadingFile);
		PGConnection con = (PGConnection) this.getConnection();
		sql = "COPY " + rel + 
		StringMan.commaListParen(cols) + " FROM STDIN CSV";
		con.getCopyAPI().copyIn(sql, in);
		in.close();
		this.analyze(rel);
		FileMan.removeFile(loadingFile);
		
	} catch (Exception e) {
		e.printStackTrace();
	}
}
 
Example #9
Source File: PostgisDb.java    From hortonmachine with GNU General Public License v3.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void initSpatialMetadata( String options ) throws Exception {
    if (!wasInitialized) {
        Connection jdbcConnection = getJdbcConnection();

        if (jdbcConnection instanceof PGConnection) {
            // FIXME how to enter in pooled mode
            PGConnection pgconn = (PGConnection) jdbcConnection;
            pgconn.addDataType("geometry", (Class< ? extends PGobject>) Class.forName("org.postgis.PGgeometry"));
            pgconn.addDataType("box3d", (Class< ? extends PGobject>) Class.forName("org.postgis.PGbox3d"));
            pgconn.addDataType("box2d", (Class< ? extends PGobject>) Class.forName("org.postgis.PGbox2d"));
        }
        wasInitialized = true;
    }
}
 
Example #10
Source File: PostgresLargeObjectManager.java    From activemq-artemis with Apache License 2.0 5 votes vote down vote up
private Object getLargeObjectManager() throws SQLException {
   if (shouldUseReflection) {
      try {
         Method method = realConnection.getClass().getMethod("getLargeObjectAPI");
         return method.invoke(realConnection);
      } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
         throw new SQLException("Couldn't access org.postgresql.largeobject.LargeObjectManager", ex);
      }
   } else {
      return ((PGConnection) realConnection).getLargeObjectAPI();
   }
}
 
Example #11
Source File: PostgresLargeObjectManager.java    From activemq-artemis with Apache License 2.0 5 votes vote down vote up
public Object open(long oid, int mode) throws SQLException {
   if (shouldUseReflection) {
      Object largeObjectManager = getLargeObjectManager();
      try {
         Method method = largeObjectManager.getClass().getMethod("open", long.class, int.class);
         return method.invoke(largeObjectManager, oid, mode);
      } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
         throw new SQLException("Couldn't access org.postgresql.largeobject.LargeObjectManager", ex);
      }
   } else {
      return ((PGConnection) realConnection).getLargeObjectAPI().open(oid, mode);
   }
}
 
Example #12
Source File: PostgresLargeObjectManager.java    From activemq-artemis with Apache License 2.0 5 votes vote down vote up
public final Long createLO() throws SQLException {
   if (shouldUseReflection) {
      Object largeObjectManager = getLargeObjectManager();
      try {
         Method method = largeObjectManager.getClass().getMethod("createLO");
         return (Long) method.invoke(largeObjectManager);
      } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
         throw new SQLException("Couldn't access org.postgresql.largeobject.LargeObjectManager", ex);
      }
   } else {
      return ((PGConnection) realConnection).getLargeObjectAPI().createLO();
   }
}
 
Example #13
Source File: LogicalReplication.java    From shardingsphere with Apache License 2.0 5 votes vote down vote up
private PGConnection createConnection(final JDBCDataSourceConfiguration jdbcDataSourceConfiguration) throws SQLException {
    Properties props = new Properties();
    PGProperty.USER.set(props, jdbcDataSourceConfiguration.getUsername());
    PGProperty.PASSWORD.set(props, jdbcDataSourceConfiguration.getPassword());
    PGProperty.ASSUME_MIN_SERVER_VERSION.set(props, "9.6");
    PGProperty.REPLICATION.set(props, "database");
    PGProperty.PREFER_QUERY_MODE.set(props, "simple");
    return DriverManager.getConnection(jdbcDataSourceConfiguration.getJdbcUrl(), props).unwrap(PGConnection.class);
}
 
Example #14
Source File: LogicalReplication.java    From shardingsphere with Apache License 2.0 5 votes vote down vote up
/**
 * Create PostgreSQL replication stream.
 *
 * @param pgConnection PostgreSQL connection
 * @param slotName slot name
 * @param startPosition start position
 * @return replication stream
 * @throws SQLException sql exception
 */
public PGReplicationStream createReplicationStream(final PGConnection pgConnection, final String slotName, final LogSequenceNumber startPosition) throws SQLException {
    return pgConnection.getReplicationAPI()
            .replicationStream()
            .logical()
            .withStartPosition(startPosition)
            .withSlotName(slotName)
            .withSlotOption("include-xids", true)
            .withSlotOption("skip-empty-xacts", true)
            .start();
}
 
Example #15
Source File: TunnelServer.java    From tunnel with Apache License 2.0 5 votes vote down vote up
private void createRplConn() throws SQLException {
    String url = this.jdbcConfig.getUrl();
    Properties props = new Properties();
    PGProperty.USER.set(props, this.jdbcConfig.getUsername());
    PGProperty.PASSWORD.set(props, this.jdbcConfig.getPassword());
    PGProperty.ASSUME_MIN_SERVER_VERSION.set(props, this.jdbcConfig.getMinVersion());
    PGProperty.REPLICATION.set(props, this.jdbcConfig.getRplLevel());
    PGProperty.PREFER_QUERY_MODE.set(props, "simple");

    this.connection = DriverManager.getConnection(url, props);
    this.rplConnection = this.connection.unwrap(PGConnection.class);
    log.info("GetRplConnection success,slot:{}", this.slotName);
}
 
Example #16
Source File: App.java    From LogicalDecode with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
public void receiveChangesOccursBeforStartReplication() throws Exception {
    PGConnection pgConnection = (PGConnection) replicationConnection;

    LogSequenceNumber lsn = getCurrentLSN();

    Statement st = connection.createStatement();
    st.execute("insert into test_logical_table(name) values('previous value')");
    st.execute("insert into test_logical_table(name) values('previous value')");
    st.execute("insert into test_logical_table(name) values('previous value')");
    st.close();

    PGReplicationStream stream =
            pgConnection
                    .getReplicationAPI()
                    .replicationStream()
                    .logical()
                    .withSlotName(SLOT_NAME)
                    .withStartPosition(lsn)
                //    .withSlotOption("proto_version",1)
                //    .withSlotOption("publication_names", "pub1")
                   .withSlotOption("include-xids", true)
                //    .withSlotOption("skip-empty-xacts", true)
                    .withStatusInterval(10, TimeUnit.SECONDS)
                    .start();
    ByteBuffer buffer;
    while(true)
    {
        buffer = stream.readPending();
        if (buffer == null) {
            TimeUnit.MILLISECONDS.sleep(10L);
            continue;
        }

        System.out.println( toString(buffer));
        //feedback
        stream.setAppliedLSN(stream.getLastReceiveLSN());
        stream.setFlushedLSN(stream.getLastReceiveLSN());
    }

}
 
Example #17
Source File: NullTerminatingStringTest.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Test
public void testWriterDoesNotThrowErrorForNullCharacter() throws SQLException {

    // Get the underlying PGConnection:
    PGConnection pgConnection = PostgreSqlUtils.getPGConnection(connection);

    // Define the Columns to be inserted:
    String[] columnNames = new String[] {
            "value_text"
    };

    // Create the Table Definition:
    SimpleRowWriter.Table table = new SimpleRowWriter.Table(schema, tableName, columnNames);

    boolean exceptionHasBeenThrown = false;
    try {
        // Create the Writer:
        SimpleRowWriter writer = new SimpleRowWriter(table, pgConnection);

        // ENABLE the Null Character Handler:
        writer.enableNullCharacterHandler();


        writer.startRow((row) -> {
            row.setText("value_text", "Hi\0");
        });

        // ... and make sure to close it:
        writer.close();
    } catch(Exception e) {
        exceptionHasBeenThrown = true;
    }

    Assert.assertEquals(false, exceptionHasBeenThrown);
}
 
Example #18
Source File: PostgreSqlUtils.java    From PgBulkInsert with MIT License 5 votes vote down vote up
private static Optional<PGConnection> tryUnwrapConnection(final Connection connection) {
    try {
        if (connection.isWrapperFor(PGConnection.class)) {
            return Optional.of(connection.unwrap(PGConnection.class));
        }
    } catch (Exception e) {
        // do nothing
    }
    return Optional.empty();
}
 
Example #19
Source File: BulkWriteHandler.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Override
public void write(List<TEntity> entities) throws Exception {
    // Obtain a new Connection and execute it in a try with resources block, so it gets closed properly:
    try(Connection connection = connectionFactory.get()) {
        // Now get the underlying PGConnection for the COPY API wrapping:
        final PGConnection pgConnection = PostgreSqlUtils.getPGConnection(connection);
        // And finally save all entities by using the COPY API:
        client.saveAll(pgConnection, entities.stream());
    }
}
 
Example #20
Source File: PostgreSqlUtils.java    From PgBulkInsert with MIT License 5 votes vote down vote up
public static Optional<PGConnection> tryGetPGConnection(final Connection connection) {
    final Optional<PGConnection> fromCast = tryCastConnection(connection);
    if (fromCast.isPresent()) {
        return fromCast;
    }
    return tryUnwrapConnection(connection);
}
 
Example #21
Source File: NullTerminatingStringTest.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Test
public void testWriterThrowsErrorForNullCharacter() throws SQLException {

    // Get the underlying PGConnection:
    PGConnection pgConnection = PostgreSqlUtils.getPGConnection(connection);

    // Define the Columns to be inserted:
    String[] columnNames = new String[] {
            "value_text"
    };

    // Create the Table Definition:
    SimpleRowWriter.Table table = new SimpleRowWriter.Table(schema, tableName, columnNames);

    boolean exceptionHasBeenThrown = false;

    try {
        // Create the Writer:
        try(SimpleRowWriter writer = new SimpleRowWriter(table, pgConnection)) {

            writer.startRow((row) -> {
                row.setText("value_text", "Hi\0");
            });
        }
    } catch(Exception e) {
        exceptionHasBeenThrown = true;
    }

    Assert.assertEquals(true, exceptionHasBeenThrown);
}
 
Example #22
Source File: PgBulkInsert.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Override
public void saveAll(PGConnection connection, Stream<TEntity> entities) throws SQLException {
    // Wrap the CopyOutputStream in our own Writer:
    try (PgBinaryWriter bw = new PgBinaryWriter(new PGCopyOutputStream(connection, mapping.getCopyCommand(), 1), configuration.getBufferSize())) {
        // Insert Each Column:
        entities.forEach(entity -> saveEntitySynchonized(bw, entity));
    }
}
 
Example #23
Source File: SimpleRowWriterTest.java    From PgBulkInsert with MIT License 5 votes vote down vote up
@Test
public void rowBasedWriterTest() throws SQLException {

    // Get the underlying PGConnection:
    PGConnection pgConnection = PostgreSqlUtils.getPGConnection(connection);

    // Define the Columns to be inserted:
    String[] columnNames = new String[] {
            "value_int",
            "value_text",
            "value_range"
    };

    // Create the Table Definition:
    SimpleRowWriter.Table table = new SimpleRowWriter.Table(schema, tableName, columnNames);

    // Create the Writer:
    try(SimpleRowWriter writer = new SimpleRowWriter(table, pgConnection)) {

        // ... write your data rows:
        for (int rowIdx = 0; rowIdx < 10000; rowIdx++) {

            // ... using startRow and work with the row, see how the order doesn't matter:
            writer.startRow((row) -> {
                row.setText("value_text", "Hi");
                row.setInteger("value_int", 1);
                row.setTsTzRange("value_range", new Range<>(
                        ZonedDateTime.of(2020, 3, 1, 0, 0, 0, 0, ZoneId.of("GMT")),
                        ZonedDateTime.of(2020, 3, 1, 0, 0, 0, 0, ZoneId.of("GMT"))));
            });
        }
    }

    // Now assert, that we have written 10000 entities:

    Assert.assertEquals(10000, getRowCount());
}
 
Example #24
Source File: PgBulkInsert.java    From PgBulkInsert with MIT License 4 votes vote down vote up
public void saveAll(PGConnection connection, Collection<TEntity> entities) throws SQLException {
    saveAll(connection, entities.stream());
}
 
Example #25
Source File: PostgresCDCWalReceiver.java    From datacollector with Apache License 2.0 4 votes vote down vote up
public LogSequenceNumber createReplicationStream(String startOffset)
    throws StageException, InterruptedException, TimeoutException, SQLException {

  boolean newSlot = false;
  if (!doesReplicationSlotExists(slotName)) {
    createReplicationSlot(slotName);
    newSlot = true;
  }
  obtainReplicationSlotInfo(slotName);

  connection = getConnection(this.uri, this.properties);
  PGConnection pgConnection = connection.unwrap(PGConnection.class);

  ChainedLogicalStreamBuilder streamBuilder = pgConnection
      .getReplicationAPI()
      .replicationStream()
      .logical()
      .withSlotName(slotName)
      .withSlotOption("include-xids", true)
      .withSlotOption("include-timestamp", true)
      .withSlotOption("include-lsn", true);

  LogSequenceNumber streamLsn;
  LogSequenceNumber serverFlushedLsn = LogSequenceNumber.valueOf(confirmedFlushLSN);
  if (newSlot) {
    //if the replication slot was just created setting the start offset to an older LSN is a NO OP
    //setting it to a future LSN is risky as the LSN could be invalid (we have to consider the LSN an opaque value).
    //we set the offset then to the 'confirmed_flush_lsn' of the replication slot, that happens to be the
    //the starting point of the newly created replication slot.
    //
    //NOTE that the DATE filter, if a date in the future, it will work as expected because we filter  by DATE.
    streamLsn = serverFlushedLsn;
  } else {

    switch (configBean.startValue) {
      case LATEST:
        // we pick up what is in the replication slot
        streamLsn = serverFlushedLsn;
        break;
      case LSN:
      case DATE:
        LogSequenceNumber configStartLsn = LogSequenceNumber.valueOf(startOffset);
        if (configStartLsn.asLong() > serverFlushedLsn.asLong()) {
          // the given LSN is newer than the last flush, we can safely forward the stream to it,
          // referenced data (by the given configStartLsn should be there)
          streamLsn = configStartLsn;
        } else {
          // we ignore the config start LSN as it is before the last flushed, not in the server anymore
          // this is the normal scenario on later pipeline restarts
          streamLsn = serverFlushedLsn;
          LOG.debug(
              "Configuration Start LSN '{}' is older than server Flushed LSN '{}', this is expected after the first pipeline run",
              configStartLsn,
              serverFlushedLsn
          );
        }
        break;
      default:
        throw new IllegalStateException("Should not happen startValue enum not handled" + configBean.startValue);
    }
  }
  streamBuilder.withStartPosition(streamLsn);

  stream = streamBuilder.start();

  LOG.debug("Starting the Stream with LSN : {}", streamLsn);

  heartBeatSender.scheduleAtFixedRate(this::sendUpdates, 1, 900, TimeUnit.MILLISECONDS);
  return streamLsn;
}
 
Example #26
Source File: SimpleRowWriter.java    From PgBulkInsert with MIT License 4 votes vote down vote up
public SimpleRowWriter(final Table table, final PGConnection connection) throws SQLException {
    this(table, connection, false);
}
 
Example #27
Source File: PostgreSqlUtils.java    From PgBulkInsert with MIT License 4 votes vote down vote up
private static Optional<PGConnection> tryCastConnection(final Connection connection) {
    if (connection instanceof PGConnection) {
        return Optional.of((PGConnection) connection);
    }
    return Optional.empty();
}
 
Example #28
Source File: SqlFileStore.java    From syndesis with Apache License 2.0 4 votes vote down vote up
private static PGConnection getPostgresConnection(Connection conn) throws SQLException {
    if (conn instanceof PGConnection) {
        return PGConnection.class.cast(conn);
    }
    return conn.unwrap(PGConnection.class);
}
 
Example #29
Source File: PgSQLCopyOutputAdapter.java    From OpenRate with Apache License 2.0 4 votes vote down vote up
/**
 * Perform data copy into database
 *
 * @param transactionNumber
 * @return true if successful, false if not
 */
public boolean performCopy(int transactionNumber) {
  try {
    // Get connection  	
    JDBCcon = DBUtil.getConnection(dataSourceName);

    // Initialize copy manager
    cpManager = JDBCcon.unwrap(PGConnection.class).getCopyAPI();

    //###################################
    // Start with copy steps	    
    for (String copyRecords : dataHolder.keySet()) {
      // Prepare copy data				
      byte[] CopyData = StringUtils.join(dataHolder.get(copyRecords), System.getProperty("line.separator")).getBytes();
      //  Start with COPY operation/s
      long numOfRowsEffected = cpManager.copyIn(this.prepareCopyStatement(copyRecords), new ByteArrayInputStream(CopyData));
      getPipeLog().debug("Copy effected " + numOfRowsEffected + " rows in module <" + getSymbolicName() + ">");

    }
  } catch (InitializationException iex) {
    // Not good. Abort the transaction
    setErrorMessageAbortTransaction("Error acquiring connection from DataSource", iex);
  } catch (SQLException Sex) {
    // Not good. Abort the transaction
    setErrorMessageAbortTransaction("Error performing copy to database", Sex);

  } catch (IOException ioe) {
    // Not good. Abort the transaction
    setErrorMessageAbortTransaction("Error closing InputStream", ioe);
  } finally {
    // Close the connection
    DBUtil.close(JDBCcon);
  }

  // We have errors. Abort.	
  if (getExceptionHandler().hasError()) {
    return false;
  }

  // Everything went well
  return true;
}
 
Example #30
Source File: PostgreSqlUtils.java    From PgBulkInsert with MIT License 4 votes vote down vote up
public static PGConnection getPGConnection(final Connection connection) {
    return tryGetPGConnection(connection).orElseThrow(() -> new PgConnectionException("Could not obtain a PGConnection"));
}