org.apache.jena.tdb.TDBFactory Java Examples

The following examples show how to use org.apache.jena.tdb.TDBFactory. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ExTDB_Txn3.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String... argv)
{
    DatasetGraphTransaction dsg = (DatasetGraphTransaction)TDBFactory.createDatasetGraph() ;

    // Start READ transaction. 
    dsg.begin(ReadWrite.READ) ;
    
    try
    {
        // Do some queries
        String sparqlQueryString1 = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
        execQuery(sparqlQueryString1, dsg) ;
    } finally
    {
        dsg.end() ;
    }
}
 
Example #2
Source File: QueryCommand.java    From robot with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Given a command line and a list of queries, execute 'query' using TDB and writing mappings to
 * disk.
 *
 * @param line CommandLine with options
 * @param queries List of queries
 * @throws IOException on problem running queries
 */
private static void executeOnDisk(CommandLine line, List<List<String>> queries)
    throws IOException {
  Dataset dataset = createTDBDataset(line);
  boolean keepMappings = CommandLineHelper.getBooleanValue(line, "keep-tdb-mappings", false);
  String tdbDir = CommandLineHelper.getDefaultValue(line, "tdb-directory", ".tdb");
  try {
    runQueries(line, dataset, queries);
  } finally {
    dataset.close();
    TDBFactory.release(dataset);
    if (!keepMappings) {
      boolean success = IOHelper.cleanTDB(tdbDir);
      if (!success) {
        logger.error(String.format("Unable to remove directory '%s'", tdbDir));
      }
    }
  }
}
 
Example #3
Source File: ExTDB4.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;
    
    // Potentially expensive query.
    String sparqlQueryString = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
    // See http://incubator.apache.org/jena/documentation/query/app_api.html
    
    Query query = QueryFactory.create(sparqlQueryString) ;
    QueryExecution qexec = QueryExecutionFactory.create(query, dataset) ;
    ResultSet results = qexec.execSelect() ;
    ResultSetFormatter.out(results) ;
    qexec.close() ;

    dataset.close();
}
 
Example #4
Source File: ExTDB5.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;
    
    // Potentially expensive query.
    String sparqlQueryString = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
    // See http://incubator.apache.org/jena/documentation/query/app_api.html
    
    Query query = QueryFactory.create(sparqlQueryString) ;
    QueryExecution qexec = QueryExecutionFactory.create(query, dataset) ;
    try {
      ResultSet results = qexec.execSelect() ;
      for ( ; results.hasNext() ; )
      {
          QuerySolution soln = results.nextSolution() ;
          int count = soln.getLiteral("count").getInt() ;
          System.out.println("count = "+count) ;
      }
    } finally { qexec.close() ; }

    // Close the dataset.
    dataset.close();
    
}
 
Example #5
Source File: TDBOntologyPlugin.java    From BioSolr with Apache License 2.0 6 votes vote down vote up
@Override
public void initialise(Map<String, Object> configuration) throws PluginInitialisationException {
	LOGGER.debug("Initialising ontology plugin: {}", PLUGIN_NAME);
	if (!configuration.containsKey(ENABLED_CFGKEY)) {
		LOGGER.info("No '{}' config key - assuming plugin disabled", ENABLED_CFGKEY);
	} else {
           enabled = (Boolean)configuration.get(ENABLED_CFGKEY);
           if (enabled) {
               if (!configuration.containsKey(TDB_PATH_CFGKEY)) {
                   throw new PluginInitialisationException("No " + TDB_PATH_CFGKEY + " specified - cannot create TDB dataset.");
               } else {
                   this.dataset = TDBFactory.createDatasetGraph((String) configuration.get(TDB_PATH_CFGKEY));
               }
           }
	}
}
 
Example #6
Source File: SparqlDataSourceTest.java    From Server.Java with MIT License 6 votes vote down vote up
/**
 *
 * @throws Exception
 */
@BeforeClass
public static void setUpClass() throws Exception {
    final String typeName = "SparqlSourceType";
    if ( ! DataSourceTypesRegistry.isRegistered(typeName) ) {
        DataSourceTypesRegistry.register( typeName,
                                          new SparqlDataSourceType() );
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    jena = new File(tmpdir, "ldf-sparql-test");
    jena.mkdir();
    
    dataset = TDBFactory.createDataset(jena.getAbsolutePath());

    Model model = dataset.getDefaultModel();
    InputStream in = ClassLoader.getSystemResourceAsStream("demo.nt");
    RDFDataMgr.read(model, in, Lang.NTRIPLES);

    // Dynamically-generated port comes from pom.xml configuration: build-helper-maven-plugin
    int fusekiPort = Integer.parseInt(System.getProperty("fuseki.port"));

    // Create Fuseki, loaded with the test dataset
    fuseki = FusekiServer.create().setPort(fusekiPort).add("/ds", dataset).build();
    fuseki.start();

    // Everything is in place, now create the LDF datasource                
    JsonObject config = createConfig("sparql test", "sparql test",
                                     typeName);
    
    JsonObject settings = new JsonObject();
    settings.addProperty("endpoint", "http://localhost:" + fusekiPort + "/ds");
    config.add("settings", settings);

    setDatasource(DataSourceFactory.create(config));
}
 
Example #7
Source File: JenaOntologySearch.java    From BioSolr with Apache License 2.0 6 votes vote down vote up
private Dataset buildBaseDataset() {
	Dataset jenaData;
	
	if (StringUtils.isNotBlank(jenaConfig.getAssemblerFile())) {
		LOGGER.debug("Building dataset from assembler file {}", jenaConfig.getAssemblerFile());
		jenaData = DatasetFactory.assemble(jenaConfig.getAssemblerFile(), jenaConfig.getAssemblerDataset());
	} else if (StringUtils.isNotBlank(jenaConfig.getTdbPath())) {
		LOGGER.debug("Building dataset from TDB data at {}", jenaConfig.getTdbPath());
		jenaData = TDBFactory.createDataset(jenaConfig.getTdbPath());
	} else {
		LOGGER.debug("Building dataset from ontology URI {}", jenaConfig.getOntologyUri());
		FileManager fileManager = FileManager.get();
		Model model = fileManager.loadModel(jenaConfig.getOntologyUri());
		
		// Build the base dataset backed by the model loaded from the URI
		jenaData = DatasetFactory.create(model);
	}
	
	return jenaData;
}
 
Example #8
Source File: SparqlDataSourceTest.java    From Server.Java with MIT License 5 votes vote down vote up
/**
 *
 * @throws Exception
 */
@AfterClass
public static void tearDownClass() throws Exception {
    fuseki.stop();

    TDBFactory.release(dataset);
    File[] files = jena.listFiles();
    for (File f : files) {
        f.delete();
    }
    jena.delete();
 
}
 
Example #9
Source File: ExQuadFilter.java    From xcurator with Apache License 2.0 5 votes vote down vote up
/** Example setup - in-memory dataset with two graphs, one triple in each */
private static Dataset setup()
{
    Dataset ds = TDBFactory.createDataset() ;
    DatasetGraph dsg = ds.asDatasetGraph() ;
    Quad q1 = SSE.parseQuad("(<http://example/g1> <http://example/s> <http://example/p> <http://example/o1>)") ;
    Quad q2 = SSE.parseQuad("(<http://example/g2> <http://example/s> <http://example/p> <http://example/o2>)") ;
    dsg.add(q1) ;
    dsg.add(q2) ;
    return ds ;
}
 
Example #10
Source File: ExTDB1.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    // Direct way: Make a TDB-back Jena model in the named directory.
    String directory = "MyDatabases/DB1" ;
    Dataset ds = TDBFactory.createDataset(directory) ;
    Model model = ds.getDefaultModel() ;
    
    // ... do work ...
    
    // Close the dataset.
    ds.close();
    
}
 
Example #11
Source File: ExTDB_Txn1.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;

    // Start READ transaction. 
    //   No updates or changes to the dataset are possible while this
    //   dataset is used for a read transaction.
    //   An application can have other Datasets, in the same JVM, 
    //   tied to the same TDB database performing read or write
    //   transactions concurrently.
    
    dataset.begin(ReadWrite.READ) ;
    try
    {
        // Do some queries
        String sparqlQueryString1 = "SELECT (count(*) AS ?count) { ?s ?p ?o }" ;
        execQuery(sparqlQueryString1, dataset) ;
        
        String sparqlQueryString2 = "SELECT * { ?s ?p ?o }" ;
        execQuery(sparqlQueryString2, dataset) ;
        
        // Can also call dataset.abort() or dataset.commit() here 
    } finally
    {
        // Notify the end of the READ transaction.
        // Any use of dataset.abort() or dataset.commit() or dataset.end()
        // .end() can be called multiple times for the same .begin(READ)
        dataset.end() ;
    }
}
 
Example #12
Source File: ExTDB_Txn2.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;

    // Start WRITE transaction. 
    //   It's possible to read from the datet inside the write transaction.

    //   An application can have other Datasets, in the same JVM, 
    //   tied to the same TDB database performing read
    //   transactions concurrently. If another write transaction
    //   starts, the call of dataset.begin(WRITE) blocks until
    //   existing writer finishes.
    
    dataset.begin(ReadWrite.WRITE) ;
    try
    {
        GraphStore graphStore = GraphStoreFactory.create(dataset) ;
        // Do a SPARQL Update.
        String sparqlUpdateString = StrUtils.strjoinNL(
             "PREFIX . <http://example/>",
             "INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
             ) ;

        execUpdate(sparqlUpdateString, graphStore) ;
        dataset.commit() ;
        // Or call .abort()
        
    } finally
    {
        // Notify the end of the transaction.
        // The transaction was finished at the point .commit or .abort was called.
        // .end will force an abort() if no previous call to .commit() or .abort()
        // has occurred, so .end() help manage track the state of the transaction.
        // .end() can be called multiple times for the same .begin(WRITE)
        dataset.end() ;
    }
}
 
Example #13
Source File: SPARQLQueryBuilderTest.java    From inception with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a dataset description with FTS support for the RDFS label property.
 */
private static Dataset createFusekiFTSDataset()
{
    Dataset ds1 = TDBFactory.createDataset();
    Directory dir = new RAMDirectory();
    EntityDefinition eDef = new EntityDefinition("iri", "text");
    eDef.setPrimaryPredicate(org.apache.jena.vocabulary.RDFS.label);
    TextIndex tidx = new TextIndexLucene(dir, new TextIndexConfig(eDef));
    Dataset ds = TextDatasetFactory.create(ds1, tidx);
    return ds;
}
 
Example #14
Source File: JenaTDBDataSourceTest.java    From Server.Java with MIT License 5 votes vote down vote up
/**
 *
 * @throws Exception
 */
@AfterClass
public static void tearDownClass() throws Exception {
    TDBFactory.release(dataset);
    File[] files = jena.listFiles();
    for (File f : files) {
        f.delete();
    }
    jena.delete();
 
}
 
Example #15
Source File: JenaTDBDataSourceTest.java    From Server.Java with MIT License 5 votes vote down vote up
/**
 *
 * @throws Exception
 */
@BeforeClass
public static void setUpClass() throws Exception {
    final String typeName = "JenaSourceType";
    if ( ! DataSourceTypesRegistry.isRegistered(typeName) ) {
        DataSourceTypesRegistry.register( typeName,
                                          new JenaTDBDataSourceType() );
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    jena = new File(tmpdir, "ldf-jena-test");
    jena.mkdir();
    
    dataset = TDBFactory.createDataset(jena.getAbsolutePath());

    Model model = dataset.getDefaultModel();
    InputStream in = ClassLoader.getSystemResourceAsStream("demo.nt");
    RDFDataMgr.read(model, in, Lang.NTRIPLES);

    // Everything is in place, now create the LDF datasource                
    JsonObject config = createConfig("jena tdb test", "jena tdb test",
                                     typeName);
    
    JsonObject settings = new JsonObject();
    settings.addProperty("directory", jena.getAbsolutePath());
    config.add("settings", settings);

    setDatasource(DataSourceFactory.create(config));
}
 
Example #16
Source File: IOHelper.java    From robot with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Given a path to an RDF/XML or TTL file and a RDF language, load the file as the default model
 * of a TDB dataset backed by a directory to improve processing time. Return the new dataset.
 *
 * <p>WARNING - this creates a directory at given tdbDir location!
 *
 * @param inputPath input path of RDF/XML or TTL file
 * @param tdbDir location to put TDB mappings
 * @return Dataset instantiated with triples
 * @throws JenaException if TDB directory can't be written to
 */
public static Dataset loadToTDBDataset(String inputPath, String tdbDir) throws JenaException {
  Dataset dataset;
  if (new File(tdbDir).isDirectory()) {
    dataset = TDBFactory.createDataset(tdbDir);
    if (!dataset.isEmpty()) {
      return dataset;
    }
  }
  dataset = TDBFactory.createDataset(tdbDir);
  logger.debug(String.format("Parsing input '%s' to dataset", inputPath));
  // Track parsing time
  long start = System.nanoTime();
  Model m;
  dataset.begin(ReadWrite.WRITE);
  try {
    m = dataset.getDefaultModel();
    FileManager.get().readModel(m, inputPath);
    dataset.commit();
  } catch (JenaException e) {
    dataset.abort();
    dataset.end();
    dataset.close();
    throw new JenaException(String.format(syntaxError, inputPath));
  } finally {
    dataset.end();
  }
  long time = (System.nanoTime() - start) / 1000000000;
  logger.debug(String.format("Parsing complete - took %s seconds", String.valueOf(time)));
  return dataset;
}
 
Example #17
Source File: ReportOperation.java    From robot with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Given an input path to an ontology and a map of options, create a Report object and run (on TDB
 * dataset) the report queries specified in a profile (from options, or default). Return the
 * completed Report object. The labels option is not supported with TDB.
 *
 * @param inputPath path to load triples to TDB
 * @param options map of report options
 * @return Report object with violation details
 * @throws Exception on any loading, query, or reporting error
 */
public static Report getTDBReport(String inputPath, Map<String, String> options)
    throws Exception {
  String tdbDir = OptionsHelper.getOption(options, "tdb-directory", ".tdb");

  // Load dataset
  // Fail if the input path is not in RDF/XML or TTL
  Dataset dataset = IOHelper.loadToTDBDataset(inputPath, tdbDir);

  Report report;
  boolean keepMappings = OptionsHelper.optionIsTrue(options, "keep-tdb-mappings");
  try {
    report = getTDBReport(dataset, options);
  } finally {
    // Close and release
    dataset.close();
    try {
      TDBFactory.release(dataset);
    } catch (TDBTransactionException e) {
      // Do nothing - already released
    }

    if (!keepMappings) {
      // Maybe delete
      boolean success = IOHelper.cleanTDB(tdbDir);
      if (!success) {
        logger.error(String.format("Unable to remove directory '%s'", tdbDir));
      }
    }
  }

  return report;
}
 
Example #18
Source File: QueryCommand.java    From robot with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Given a command line, an ontology, and a list of queries, run the queries over the ontology
 * with any options.
 *
 * @param line CommandLine with options
 * @param inputOntology OWLOntology to query
 * @param queries List of queries
 * @throws Exception on issue loading ontology or running queries
 */
private static void executeInMemory(
    CommandLine line, OWLOntology inputOntology, List<List<String>> queries) throws Exception {
  boolean useGraphs = CommandLineHelper.getBooleanValue(line, "use-graphs", false);
  Dataset dataset = QueryOperation.loadOntologyAsDataset(inputOntology, useGraphs);
  try {
    runQueries(line, dataset, queries);
  } finally {
    dataset.close();
    TDBFactory.release(dataset);
  }
}
 
Example #19
Source File: Zone.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
/**
 * Create a dataset appropriate to the storage type.
 * This does <em>not</em> write the configuration details into the on-disk zone information.
 */
public DatasetGraph localStorage(LocalStorageType storage, Path dataPath) {
    switch(storage) {
        case EXTERNAL:     return null;
        case MEM:          return DatasetGraphFactory.createTxnMem();
        case TDB:
            return TDBFactory.createDatasetGraph(IOX.asLocation(dataPath));
        case TDB2:
            return DatabaseMgr.connectDatasetGraph(dataPath.toString());
        case NONE:         return null;
        default :
            throw new NotImplemented("Zone::localStorage = "+storage);
    }
}
 
Example #20
Source File: JenaTDBBasedRequestProcessorForTPFs.java    From Server.Java with MIT License 4 votes vote down vote up
public JenaTDBBasedRequestProcessorForTPFs(File tdbdir, String defaultGraph) {
    this.defaultGraph = defaultGraph;
    this.tdb = TDBFactory.createDataset(tdbdir.getAbsolutePath());
}
 
Example #21
Source File: ExTDB6.java    From xcurator with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    /// turn off the "No BGP optimizer warning"
    TDB.setOptimizerWarningFlag(false);

    final IRIFactory iriFactory = IRIFactory.semanticWebImplementation();

    final String DATASET_DIR_NAME = "data0";
    final Dataset data0 = TDBFactory.createDataset( DATASET_DIR_NAME );

    // show the currently registered names
    for (Iterator<String> it = data0.listNames(); it.hasNext(); ) {
        out.println("NAME="+it.next());
    }

    out.println("getting named model...");
    /// this is the OWL portion
    final Model model = data0.getNamedModel( MY_NS );
    out.println("Model := "+model);

    out.println("getting graph...");
    /// this is the DATA in that MODEL
    final Graph graph = model.getGraph();
    out.println("Graph := "+graph);

    if (graph.isEmpty()) {
        final Resource product1 = model.createResource(
                iriFactory.construct( MY_NS +"product/1" )
                    .toString() );

        final Property hasName = model.createProperty( MY_NS, "#hasName");
        final Statement stmt = model.createStatement(
                product1, hasName, model.createLiteral("Beach Ball","en") );
        out.println("Statement = " + stmt);

        model.add(stmt);

        // just for fun
        out.println("Triple := " + stmt.asTriple().toString());
    } else {
        out.println("Graph is not Empty; it has "+graph.size()+" Statements");
        long t0, t1;
        t0 = System.currentTimeMillis();
        final Query q = QueryFactory.create(
                "PREFIX exns: <"+MY_NS+"#>\n"+
                "PREFIX exprod: <"+MY_NS+"product/>\n"+
                " SELECT * "
                // if you don't provide the Model to the
                // QueryExecutionFactory below, then you'll need
                // to specify the FROM;
                // you *can* always specify it, if you want
                // +" FROM <"+MY_NS+">\n"
                // +" WHERE { ?node <"+MY_NS+"#hasName> ?name }"
                // +" WHERE { ?node exns:hasName ?name }"
                // +" WHERE { exprod:1 exns:hasName ?name }"
                +" WHERE { ?res ?pred ?obj }"
                );
        out.println("Query := "+q);
        t1 = System.currentTimeMillis();
        out.println("QueryFactory.TIME="+(t1 - t0));

        t0 = System.currentTimeMillis();
        final QueryExecution qExec = QueryExecutionFactory
                // if you query the whole DataSet,
                // you have to provide a FROM in the SparQL
                //.create(q, data0);
                .create(q, model);
        t1 = System.currentTimeMillis();
        out.println("QueryExecutionFactory.TIME="+(t1 - t0));

        try {
            t0 = System.currentTimeMillis();
            ResultSet rs = qExec.execSelect();
            t1 = System.currentTimeMillis();
            out.println("executeSelect.TIME="+(t1 - t0));
            while (rs.hasNext()) {
                QuerySolution sol = rs.next();
                out.println("Solution := "+sol);
                for (Iterator<String> names = sol.varNames(); names.hasNext(); ) {
                    final String name = names.next();
                    out.println("\t"+name+" := "+sol.get(name));
                }
            }
        } finally {
            qExec.close();
        }
    }
    out.println("closing graph");
    graph.close();
    out.println("closing model");
    model.close();
    //out.println("closing DataSetGraph");
    //dsg.close();
    out.println("closing DataSet");
    data0.close();
}
 
Example #22
Source File: QueryCommand.java    From robot with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
 * Given an input state and command line arguments, query the ontolgy. The input ontology is not
 * changed.
 *
 * @param state the state from the previous command, or null
 * @param args the command-line arguments
 * @return the unchanged state
 * @throws Exception on any problem
 */
public CommandState execute(CommandState state, String[] args) throws Exception {
  CommandLine line = CommandLineHelper.getCommandLine(getUsage(), getOptions(), args);
  if (line == null) {
    return null;
  }

  IOHelper ioHelper = CommandLineHelper.getIOHelper(line);

  // If an update(s) are provided, run then return the OWLOntology
  // This is different than the rest of the Query operations because it returns an ontology
  // Whereas the others return query results
  List<String> updatePaths = CommandLineHelper.getOptionalValues(line, "update");
  if (!updatePaths.isEmpty()) {
    state = CommandLineHelper.updateInputOntology(ioHelper, state, line);
    OWLOntology inputOntology = state.getOntology();

    OWLOntology outputOntology = executeUpdate(state, inputOntology, ioHelper, updatePaths);
    CommandLineHelper.maybeSaveOutput(line, outputOntology);
    state.setOntology(outputOntology);
    return state;
  }

  boolean createTDB = CommandLineHelper.getBooleanValue(line, "create-tdb", false);
  if (createTDB) {
    // Create and close without deleting TDB directory
    Dataset dataset = createTDBDataset(line);
    dataset.close();
    TDBFactory.release(dataset);
    return state;
  }

  List<List<String>> queries = getQueries(line);

  boolean useTDB = CommandLineHelper.getBooleanValue(line, "tdb", false);
  if (useTDB) {
    // DOES NOT UPDATE STATE
    // This will not work with chained commands as it uses the `--input` option
    // Updating the state results in loading the ontology to memory
    executeOnDisk(line, queries);
  } else {
    state = CommandLineHelper.updateInputOntology(ioHelper, state, line);
    executeInMemory(line, state.getOntology(), queries);
  }

  return state;
}
 
Example #23
Source File: ExTDB2.java    From xcurator with Apache License 2.0 3 votes vote down vote up
public static void main(String... argv)
{
    String assemblerFile = "Store/tdb-assembler.ttl" ;

    Dataset ds = TDBFactory.assembleDataset(assemblerFile) ;
    
    // ... do work ...
    
    ds.close() ;
}