Java Code Examples for org.apache.jena.tdb.TDBFactory#createDataset()

The following examples show how to use org.apache.jena.tdb.TDBFactory#createDataset() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SparqlDataSourceTest.java    From Server.Java with MIT License 6 votes vote down vote up
/**
 *
 * @throws Exception
 */
@BeforeClass
public static void setUpClass() throws Exception {
    final String typeName = "SparqlSourceType";
    if ( ! DataSourceTypesRegistry.isRegistered(typeName) ) {
        DataSourceTypesRegistry.register( typeName,
                                          new SparqlDataSourceType() );
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    jena = new File(tmpdir, "ldf-sparql-test");
    jena.mkdir();
    
    dataset = TDBFactory.createDataset(jena.getAbsolutePath());

    Model model = dataset.getDefaultModel();
    InputStream in = ClassLoader.getSystemResourceAsStream("demo.nt");
    RDFDataMgr.read(model, in, Lang.NTRIPLES);

    // Dynamically-generated port comes from pom.xml configuration: build-helper-maven-plugin
    int fusekiPort = Integer.parseInt(System.getProperty("fuseki.port"));

    // Create Fuseki, loaded with the test dataset
    fuseki = FusekiServer.create().setPort(fusekiPort).add("/ds", dataset).build();
    fuseki.start();

    // Everything is in place, now create the LDF datasource                
    JsonObject config = createConfig("sparql test", "sparql test",
                                     typeName);
    
    JsonObject settings = new JsonObject();
    settings.addProperty("endpoint", "http://localhost:" + fusekiPort + "/ds");
    config.add("settings", settings);

    setDatasource(DataSourceFactory.create(config));
}
 
Example 2
Source File: JenaOntologySearch.java    From BioSolr with Apache License 2.0 6 votes vote down vote up
private Dataset buildBaseDataset() {
	Dataset jenaData;
	
	if (StringUtils.isNotBlank(jenaConfig.getAssemblerFile())) {
		LOGGER.debug("Building dataset from assembler file {}", jenaConfig.getAssemblerFile());
		jenaData = DatasetFactory.assemble(jenaConfig.getAssemblerFile(), jenaConfig.getAssemblerDataset());
	} else if (StringUtils.isNotBlank(jenaConfig.getTdbPath())) {
		LOGGER.debug("Building dataset from TDB data at {}", jenaConfig.getTdbPath());
		jenaData = TDBFactory.createDataset(jenaConfig.getTdbPath());
	} else {
		LOGGER.debug("Building dataset from ontology URI {}", jenaConfig.getOntologyUri());
		FileManager fileManager = FileManager.get();
		Model model = fileManager.loadModel(jenaConfig.getOntologyUri());
		
		// Build the base dataset backed by the model loaded from the URI
		jenaData = DatasetFactory.create(model);
	}
	
	return jenaData;
}
 
Example 3
Source File: ExTDB5.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;
    
    // Potentially expensive query.
    String sparqlQueryString = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
    // See http://incubator.apache.org/jena/documentation/query/app_api.html
    
    Query query = QueryFactory.create(sparqlQueryString) ;
    QueryExecution qexec = QueryExecutionFactory.create(query, dataset) ;
    try {
      ResultSet results = qexec.execSelect() ;
      for ( ; results.hasNext() ; )
      {
          QuerySolution soln = results.nextSolution() ;
          int count = soln.getLiteral("count").getInt() ;
          System.out.println("count = "+count) ;
      }
    } finally { qexec.close() ; }

    // Close the dataset.
    dataset.close();
    
}
 
Example 4
Source File: ExTDB4.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;
    
    // Potentially expensive query.
    String sparqlQueryString = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
    // See http://incubator.apache.org/jena/documentation/query/app_api.html
    
    Query query = QueryFactory.create(sparqlQueryString) ;
    QueryExecution qexec = QueryExecutionFactory.create(query, dataset) ;
    ResultSet results = qexec.execSelect() ;
    ResultSetFormatter.out(results) ;
    qexec.close() ;

    dataset.close();
}
 
Example 5
Source File: SPARQLQueryBuilderTest.java    From inception with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a dataset description with FTS support for the RDFS label property.
 */
private static Dataset createFusekiFTSDataset()
{
    Dataset ds1 = TDBFactory.createDataset();
    Directory dir = new RAMDirectory();
    EntityDefinition eDef = new EntityDefinition("iri", "text");
    eDef.setPrimaryPredicate(org.apache.jena.vocabulary.RDFS.label);
    TextIndex tidx = new TextIndexLucene(dir, new TextIndexConfig(eDef));
    Dataset ds = TextDatasetFactory.create(ds1, tidx);
    return ds;
}
 
Example 6
Source File: IOHelper.java    From robot with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Given a path to an RDF/XML or TTL file and a RDF language, load the file as the default model
 * of a TDB dataset backed by a directory to improve processing time. Return the new dataset.
 *
 * <p>WARNING - this creates a directory at given tdbDir location!
 *
 * @param inputPath input path of RDF/XML or TTL file
 * @param tdbDir location to put TDB mappings
 * @return Dataset instantiated with triples
 * @throws JenaException if TDB directory can't be written to
 */
public static Dataset loadToTDBDataset(String inputPath, String tdbDir) throws JenaException {
  Dataset dataset;
  if (new File(tdbDir).isDirectory()) {
    dataset = TDBFactory.createDataset(tdbDir);
    if (!dataset.isEmpty()) {
      return dataset;
    }
  }
  dataset = TDBFactory.createDataset(tdbDir);
  logger.debug(String.format("Parsing input '%s' to dataset", inputPath));
  // Track parsing time
  long start = System.nanoTime();
  Model m;
  dataset.begin(ReadWrite.WRITE);
  try {
    m = dataset.getDefaultModel();
    FileManager.get().readModel(m, inputPath);
    dataset.commit();
  } catch (JenaException e) {
    dataset.abort();
    dataset.end();
    dataset.close();
    throw new JenaException(String.format(syntaxError, inputPath));
  } finally {
    dataset.end();
  }
  long time = (System.nanoTime() - start) / 1000000000;
  logger.debug(String.format("Parsing complete - took %s seconds", String.valueOf(time)));
  return dataset;
}
 
Example 7
Source File: JenaTDBDataSourceTest.java    From Server.Java with MIT License 5 votes vote down vote up
/**
 *
 * @throws Exception
 */
@BeforeClass
public static void setUpClass() throws Exception {
    final String typeName = "JenaSourceType";
    if ( ! DataSourceTypesRegistry.isRegistered(typeName) ) {
        DataSourceTypesRegistry.register( typeName,
                                          new JenaTDBDataSourceType() );
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    jena = new File(tmpdir, "ldf-jena-test");
    jena.mkdir();
    
    dataset = TDBFactory.createDataset(jena.getAbsolutePath());

    Model model = dataset.getDefaultModel();
    InputStream in = ClassLoader.getSystemResourceAsStream("demo.nt");
    RDFDataMgr.read(model, in, Lang.NTRIPLES);

    // Everything is in place, now create the LDF datasource                
    JsonObject config = createConfig("jena tdb test", "jena tdb test",
                                     typeName);
    
    JsonObject settings = new JsonObject();
    settings.addProperty("directory", jena.getAbsolutePath());
    config.add("settings", settings);

    setDatasource(DataSourceFactory.create(config));
}
 
Example 8
Source File: ExTDB_Txn2.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;

    // Start WRITE transaction. 
    //   It's possible to read from the datet inside the write transaction.

    //   An application can have other Datasets, in the same JVM, 
    //   tied to the same TDB database performing read
    //   transactions concurrently. If another write transaction
    //   starts, the call of dataset.begin(WRITE) blocks until
    //   existing writer finishes.
    
    dataset.begin(ReadWrite.WRITE) ;
    try
    {
        GraphStore graphStore = GraphStoreFactory.create(dataset) ;
        // Do a SPARQL Update.
        String sparqlUpdateString = StrUtils.strjoinNL(
             "PREFIX . <http://example/>",
             "INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
             ) ;

        execUpdate(sparqlUpdateString, graphStore) ;
        dataset.commit() ;
        // Or call .abort()
        
    } finally
    {
        // Notify the end of the transaction.
        // The transaction was finished at the point .commit or .abort was called.
        // .end will force an abort() if no previous call to .commit() or .abort()
        // has occurred, so .end() help manage track the state of the transaction.
        // .end() can be called multiple times for the same .begin(WRITE)
        dataset.end() ;
    }
}
 
Example 9
Source File: ExTDB_Txn1.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;

    // Start READ transaction. 
    //   No updates or changes to the dataset are possible while this
    //   dataset is used for a read transaction.
    //   An application can have other Datasets, in the same JVM, 
    //   tied to the same TDB database performing read or write
    //   transactions concurrently.
    
    dataset.begin(ReadWrite.READ) ;
    try
    {
        // Do some queries
        String sparqlQueryString1 = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
        execQuery(sparqlQueryString1, dataset) ;
        
        String sparqlQueryString2 = "SELECT * { ?s ?p ?o }" ;
        execQuery(sparqlQueryString2, dataset) ;
        
        // Can also call dataset.abort() or dataset.commit() here 
    } finally
    {
        // Notify the end of the READ transaction.
        // Any use of dataset.abort() or dataset.commit() or dataset.end()
        // .end() can be called multiple times for the same .begin(READ)
        dataset.end() ;
    }
}
 
Example 10
Source File: ExTDB1.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset ds = TDBFactory.createDataset(directory) ;
    Model model = ds.getDefaultModel() ;
    
    // ... do work ...
    
    // Close the dataset.
    ds.close();
    
}
 
Example 11
Source File: ExQuadFilter.java    From xcurator with Apache License 2.0 5 votes vote down vote up
/** Example setup - in-memory dataset with two graphs, one triple in each */
private static Dataset setup()
{
    Dataset ds = TDBFactory.createDataset() ;
    DatasetGraph dsg = ds.asDatasetGraph() ;
    Quad q1 = SSE.parseQuad("(<http://example/g1> <http://example/s> <http://example/p> <http://example/o1>)") ;
    Quad q2 = SSE.parseQuad("(<http://example/g2> <http://example/s> <http://example/p> <http://example/o2>)") ;
    dsg.add(q1) ;
    dsg.add(q2) ;
    return ds ;
}
 
Example 12
Source File: JenaTDBBasedRequestProcessorForTPFs.java    From Server.Java with MIT License 4 votes vote down vote up
public JenaTDBBasedRequestProcessorForTPFs(File tdbdir, String defaultGraph) {
    this.defaultGraph = defaultGraph;
    this.tdb = TDBFactory.createDataset(tdbdir.getAbsolutePath());
}
 
Example 13
Source File: ExTDB6.java    From xcurator with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    /// turn off the "No BGP optimizer warning"
    TDB.setOptimizerWarningFlag(false);

    final IRIFactory iriFactory = IRIFactory.semanticWebImplementation();

    final String DATASET_DIR_NAME = "data0";
    final Dataset data0 = TDBFactory.createDataset( DATASET_DIR_NAME );

    // show the currently registered names
    for (Iterator<String> it = data0.listNames(); it.hasNext(); ) {
        out.println("NAME="+it.next());
    }

    out.println("getting named model...");
    /// this is the OWL portion
    final Model model = data0.getNamedModel( MY_NS );
    out.println("Model := "+model);

    out.println("getting graph...");
    /// this is the DATA in that MODEL
    final Graph graph = model.getGraph();
    out.println("Graph := "+graph);

    if (graph.isEmpty()) {
        final Resource product1 = model.createResource(
                iriFactory.construct( MY_NS +"product/1" )
                    .toString() );

        final Property hasName = model.createProperty( MY_NS, "#hasName");
        final Statement stmt = model.createStatement(
                product1, hasName, model.createLiteral("Beach Ball","en") );
        out.println("Statement = " + stmt);

        model.add(stmt);

        // just for fun
        out.println("Triple := " + stmt.asTriple().toString());
    } else {
        out.println("Graph is not Empty; it has "+graph.size()+" Statements");
        long t0, t1;
        t0 = System.currentTimeMillis();
        final Query q = QueryFactory.create(
                "PREFIX exns: <"+MY_NS+"#>\n"+
                "PREFIX exprod: <"+MY_NS+"product/>\n"+
                " SELECT * "
                // if you don't provide the Model to the
                // QueryExecutionFactory below, then you'll need
                // to specify the FROM;
                // you *can* always specify it, if you want
                // +" FROM <"+MY_NS+">\n"
                // +" WHERE { ?node <"+MY_NS+"#hasName> ?name }"
                // +" WHERE { ?node exns:hasName ?name }"
                // +" WHERE { exprod:1 exns:hasName ?name }"
                +" WHERE { ?res ?pred ?obj }"
                );
        out.println("Query := "+q);
        t1 = System.currentTimeMillis();
        out.println("QueryFactory.TIME="+(t1 - t0));

        t0 = System.currentTimeMillis();
        final QueryExecution qExec = QueryExecutionFactory
                // if you query the whole DataSet,
                // you have to provide a FROM in the SparQL
                //.create(q, data0);
                .create(q, model);
        t1 = System.currentTimeMillis();
        out.println("QueryExecutionFactory.TIME="+(t1 - t0));

        try {
            t0 = System.currentTimeMillis();
            ResultSet rs = qExec.execSelect();
            t1 = System.currentTimeMillis();
            out.println("executeSelect.TIME="+(t1 - t0));
            while (rs.hasNext()) {
                QuerySolution sol = rs.next();
                out.println("Solution := "+sol);
                for (Iterator<String> names = sol.varNames(); names.hasNext(); ) {
                    final String name = names.next();
                    out.println("\t"+name+" := "+sol.get(name));
                }
            }
        } finally {
            qExec.close();
        }
    }
    out.println("closing graph");
    graph.close();
    out.println("closing model");
    model.close();
    //out.println("closing DataSetGraph");
    //dsg.close();
    out.println("closing DataSet");
    data0.close();
}