Java Code Examples for com.mongodb.client.MongoClient#getDatabase()

The following examples show how to use com.mongodb.client.MongoClient#getDatabase() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MongoSourceTask.java    From mongo-kafka with Apache License 2.0 5 votes vote down vote up
private ChangeStreamIterable<Document> getChangeStreamIterable(
    final MongoSourceConfig sourceConfig, final MongoClient mongoClient) {
  String database = sourceConfig.getString(DATABASE_CONFIG);
  String collection = sourceConfig.getString(COLLECTION_CONFIG);

  Optional<List<Document>> pipeline = sourceConfig.getPipeline();
  ChangeStreamIterable<Document> changeStream;
  if (database.isEmpty()) {
    LOGGER.info("Watching all changes on the cluster");
    changeStream = pipeline.map(mongoClient::watch).orElse(mongoClient.watch());
  } else if (collection.isEmpty()) {
    LOGGER.info("Watching for database changes on '{}'", database);
    MongoDatabase db = mongoClient.getDatabase(database);
    changeStream = pipeline.map(db::watch).orElse(db.watch());
  } else {
    LOGGER.info("Watching for collection changes on '{}.{}'", database, collection);
    MongoCollection<Document> coll = mongoClient.getDatabase(database).getCollection(collection);
    changeStream = pipeline.map(coll::watch).orElse(coll.watch());
  }

  int batchSize = sourceConfig.getInt(BATCH_SIZE_CONFIG);
  if (batchSize > 0) {
    changeStream.batchSize(batchSize);
  }
  sourceConfig.getFullDocument().ifPresent(changeStream::fullDocument);
  sourceConfig.getCollation().ifPresent(changeStream::collation);
  return changeStream;
}
 
Example 2
Source File: MongoDB.java    From aion with MIT License 5 votes vote down vote up
@Override
public boolean open() {
    if (isOpen()) {
        return true;
    }

    LOG.info("Initializing MongoDB at {}", mongoClientUri);

    // Get the client and create a session for this instance
    MongoClient mongoClient =
            MongoConnectionManager.inst().getMongoClientInstance(this.mongoClientUri);
    ClientSessionOptions sessionOptions =
            ClientSessionOptions.builder()
                    .causallyConsistent(true)
                    .defaultTransactionOptions(
                            TransactionOptions.builder()
                                    .readConcern(ReadConcern.DEFAULT)
                                    .writeConcern(WriteConcern.MAJORITY)
                                    .readPreference(ReadPreference.nearest())
                                    .build())
                    .build();
    this.clientSession = mongoClient.startSession(sessionOptions);

    // Get the database and our collection. Mongo takes care of creating these if they don't
    // exist
    MongoDatabase mongoDb = mongoClient.getDatabase(MongoConstants.AION_DB_NAME);

    // Gets the collection where we will be saving our values. Mongo creates it if it doesn't
    // yet exist
    this.collection = mongoDb.getCollection(this.name, BsonDocument.class);

    LOG.info("Finished opening the Mongo connection");
    return isOpen();
}
 
Example 3
Source File: MongoOperations.java    From quarkus with Apache License 2.0 5 votes vote down vote up
private static MongoDatabase mongoDatabase(MongoEntity entity) {
    MongoClient mongoClient = mongoClient(entity);
    if (entity != null && !entity.database().isEmpty()) {
        return mongoClient.getDatabase(entity.database());
    }
    String databaseName = getDefaultDatabaseName();
    return mongoClient.getDatabase(databaseName);
}
 
Example 4
Source File: ConfigService.java    From runelite with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Autowired
public ConfigService(
	MongoClient mongoClient,
	@Value("${mongo.database}") String databaseName
)
{

	MongoDatabase database = mongoClient.getDatabase(databaseName);
	MongoCollection<Document> collection = database.getCollection("config");
	this.mongoCollection = collection;

	// Create unique index on _userId
	IndexOptions indexOptions = new IndexOptions().unique(true);
	collection.createIndex(Indexes.ascending("_userId"), indexOptions);
}
 
Example 5
Source File: MongoResultsWriter.java    From spring-data-dev-tools with Apache License 2.0 5 votes vote down vote up
private void doWrite(Collection<RunResult> results) throws ParseException {

		Date now = new Date();
		StandardEnvironment env = new StandardEnvironment();

		String projectVersion = env.getProperty("project.version", "unknown");
		String gitBranch = env.getProperty("git.branch", "unknown");
		String gitDirty = env.getProperty("git.dirty", "no");
		String gitCommitId = env.getProperty("git.commit.id", "unknown");

		ConnectionString uri = new ConnectionString(this.uri);
		MongoClient client = MongoClients.create();

		String dbName = StringUtils.hasText(uri.getDatabase()) ? uri.getDatabase() : "spring-data-mongodb-benchmarks";
		MongoDatabase db = client.getDatabase(dbName);

		String resultsJson = ResultsWriter.jsonifyResults(results).trim();
		JSONArray array = (JSONArray) new JSONParser(JSONParser.MODE_PERMISSIVE).parse(resultsJson);
		for (Object object : array) {
			JSONObject dbo = (JSONObject) object;

			String collectionName = extractClass(dbo.get("benchmark").toString());

			Document sink = new Document();
			sink.append("_version", projectVersion);
			sink.append("_branch", gitBranch);
			sink.append("_commit", gitCommitId);
			sink.append("_dirty", gitDirty);
			sink.append("_method", extractBenchmarkName(dbo.get("benchmark").toString()));
			sink.append("_date", now);
			sink.append("_snapshot", projectVersion.toLowerCase().contains("snapshot"));

			sink.putAll(dbo);

			db.getCollection(collectionName).insertOne(fixDocumentKeys(sink));
		}

		client.close();
	}
 
Example 6
Source File: DatastoreImpl.java    From morphia with Apache License 2.0 5 votes vote down vote up
protected DatastoreImpl(final MongoClient mongoClient, final MapperOptions options, final String dbName) {
    this.mongoClient = mongoClient;
    MongoDatabase database = mongoClient.getDatabase(dbName);
    this.mapper = new Mapper(this, database.getCodecRegistry(), options);

    this.database = database
                        .withCodecRegistry(mapper.getCodecRegistry());
    this.queryFactory = options.getQueryFactory();
}
 
Example 7
Source File: SchemaUtils.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
/**
 * This method will produce an Apache Arrow Schema for the given TableName and DocumentDB connection
 * by scanning up to the requested number of rows and using basic schema inference to determine
 * data types.
 *
 * @param client The DocumentDB connection to use for the scan operation.
 * @param table The DocumentDB TableName for which to produce an Apache Arrow Schema.
 * @param numObjToSample The number of records to scan as part of producing the Schema.
 * @return An Apache Arrow Schema representing the schema of the HBase table.
 * @note The resulting schema is a union of the schema of every row that is scanned. Presently the code does not
 * attempt to resolve conflicts if unique field has different types across documents. It is recommend that you
 * use AWS Glue to define a schema for tables which may have such conflicts. In the future we may enhance this method
 * to use a reasonable default (like String) and coerce heterogeneous fields to avoid query failure but forcing
 * explicit handling by defining Schema in AWS Glue is likely a better approach.
 */
public static Schema inferSchema(MongoClient client, TableName table, int numObjToSample)
{
    MongoDatabase db = client.getDatabase(table.getSchemaName());
    int docCount = 0;
    int fieldCount = 0;
    try (MongoCursor<Document> docs = db.getCollection(table.getTableName()).find().batchSize(numObjToSample)
            .maxScan(numObjToSample).limit(numObjToSample).iterator()) {
        if (!docs.hasNext()) {
            return SchemaBuilder.newBuilder().build();
        }
        SchemaBuilder schemaBuilder = SchemaBuilder.newBuilder();

        while (docs.hasNext()) {
            docCount++;
            Document doc = docs.next();
            for (String key : doc.keySet()) {
                fieldCount++;
                Field newField = getArrowField(key, doc.get(key));
                Types.MinorType newType = Types.getMinorTypeForArrowType(newField.getType());
                Field curField = schemaBuilder.getField(key);
                Types.MinorType curType = (curField != null) ? Types.getMinorTypeForArrowType(curField.getType()) : null;

                if (curField == null) {
                    schemaBuilder.addField(newField);
                }
                else if (newType != curType) {
                    //TODO: currently we resolve fields with mixed types by defaulting to VARCHAR. This is _not_ ideal
                    logger.warn("inferSchema: Encountered a mixed-type field[{}] {} vs {}, defaulting to String.",
                            key, curType, newType);
                    schemaBuilder.addStringField(key);
                }
                else if (curType == Types.MinorType.LIST) {
                    schemaBuilder.addField(mergeListField(key, curField, newField));
                }
                else if (curType == Types.MinorType.STRUCT) {
                    schemaBuilder.addField(mergeStructField(key, curField, newField));
                }
            }
        }

        Schema schema = schemaBuilder.build();
        if (schema.getFields().isEmpty()) {
            throw new RuntimeException("No columns found after scanning " + fieldCount + " values across " +
                    docCount + " documents. Please ensure the collection is not empty and contains at least 1 supported column type.");
        }
        return schema;
    }
    finally {
        logger.info("inferSchema: Evaluated {} field values across {} documents.", fieldCount, docCount);
    }
}
 
Example 8
Source File: DocDBRecordHandler.java    From aws-athena-query-federation with Apache License 2.0 4 votes vote down vote up
/**
 * Scans DocumentDB using the scan settings set on the requested Split by DocDBeMetadataHandler.
 *
 * @see RecordHandler
 */
@Override
protected void readWithConstraint(BlockSpiller spiller, ReadRecordsRequest recordsRequest, QueryStatusChecker queryStatusChecker)
{
    TableName tableName = recordsRequest.getTableName();
    Map<String, ValueSet> constraintSummary = recordsRequest.getConstraints().getSummary();

    MongoClient client = getOrCreateConn(recordsRequest.getSplit());
    MongoDatabase db = client.getDatabase(tableName.getSchemaName());
    MongoCollection<Document> table = db.getCollection(tableName.getTableName());

    Document query = QueryUtils.makeQuery(recordsRequest.getSchema(), constraintSummary);
    Document output = QueryUtils.makeProjection(recordsRequest.getSchema());

    logger.info("readWithConstraint: query[{}] projection[{}]", query, output);

    final MongoCursor<Document> iterable = table
            .find(query)
            .projection(output)
            .batchSize(MONGO_QUERY_BATCH_SIZE).iterator();

    long numRows = 0;
    AtomicLong numResultRows = new AtomicLong(0);
    while (iterable.hasNext() && queryStatusChecker.isQueryRunning()) {
        numRows++;
        spiller.writeRows((Block block, int rowNum) -> {
            Document doc = iterable.next();

            boolean matched = true;
            for (Field nextField : recordsRequest.getSchema().getFields()) {
                Object value = TypeUtils.coerce(nextField, doc.get(nextField.getName()));
                Types.MinorType fieldType = Types.getMinorTypeForArrowType(nextField.getType());
                try {
                    switch (fieldType) {
                        case LIST:
                        case STRUCT:
                            matched &= block.offerComplexValue(nextField.getName(), rowNum, DEFAULT_FIELD_RESOLVER, value);
                            break;
                        default:
                            matched &= block.offerValue(nextField.getName(), rowNum, value);
                            break;
                    }
                    if (!matched) {
                        return 0;
                    }
                }
                catch (Exception ex) {
                    throw new RuntimeException("Error while processing field " + nextField.getName(), ex);
                }
            }

            numResultRows.getAndIncrement();
            return 1;
        });
    }

    logger.info("readWithConstraint: numRows[{}] numResultRows[{}]", numRows, numResultRows.get());
}