com.hp.hpl.jena.util.FileManager Java Examples

The following examples show how to use com.hp.hpl.jena.util.FileManager. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RDFFileManager.java    From Benchmark with GNU General Public License v3.0 6 votes vote down vote up
public static EventPattern extractQueryFromDataset(String serviceRequest) {
	Model queryBase = FileManager.get().loadModel(datasetDirectory + serviceRequest);
	dataset.getDefaultModel().add(ModelFactory.createOntologyModel(ontoSpec, queryBase));

	String describeStr = queryPrefix + " select ?x  where{?x rdf:type ces:EventRequest}";
	// Query query = QueryFactory.create(describeStr);
	// query.setPrefixMapping(pmap);
	QueryExecution qe = QueryExecutionFactory.create(describeStr, dataset);
	ResultSet results = qe.execSelect();
	// ResultSetFormatter.out(System.out, results, query);
	Map<String, EventDeclaration> edMap = new HashMap<String, EventDeclaration>();
	EventPattern ep = new EventPattern();
	ep.setQuery(true);
	while (results.hasNext()) {
		// System.out.println("results!");
		QuerySolution row = results.next();
		RDFNode edID = row.get("x");
		// System.out.println("has id: " + edID.toString());
		ep = extractEDByServiceID(edID, dataset, edMap).getEp();
	}
	return ep;
}
 
Example #2
Source File: RDFFileManager.java    From Benchmark with GNU General Public License v3.0 6 votes vote down vote up
public static EventPattern extractCompositionPlanFromDataset(String serviceRequest) {
	Model queryBase = FileManager.get().loadModel(datasetDirectory + serviceRequest);
	dataset.getDefaultModel().add(ModelFactory.createOntologyModel(ontoSpec, queryBase));

	String describeStr = queryPrefix + " select ?x  where{?x rdf:type ces:CompositionPlan}";
	// Query query = QueryFactory.create(describeStr);
	// query.setPrefixMapping(pmap);
	QueryExecution qe = QueryExecutionFactory.create(describeStr, dataset);
	ResultSet results = qe.execSelect();
	// ResultSetFormatter.out(System.out, results, query);
	Map<String, EventDeclaration> edMap = new HashMap<String, EventDeclaration>();
	EventPattern ep = new EventPattern();
	ep.setQuery(false);
	while (results.hasNext()) {
		// System.out.println("results!");
		QuerySolution row = results.next();
		RDFNode edID = row.get("x");
		// System.out.println("has id: " + edID.toString());
		ep = extractEDByServiceID(edID, dataset, edMap).getEp();
	}
	return ep;
}
 
Example #3
Source File: LUBM.java    From neo4jena with Apache License 2.0 6 votes vote down vote up
public static void write(GraphDatabaseService njgraph) {
	Logger log= Logger.getLogger(Wine.class);
	InputStream in = FileManager.get().open( inputFileName );
	if (in == null) {
           throw new IllegalArgumentException( "File: " + inputFileName + " not found");
       }
       
	Model model = ModelFactory.createDefaultModel();
       model.read(in,"","RDF");
       double triples = model.size();
       log.info("Model loaded with " +  triples + " triples");
       System.out.println("Model loaded with " +  triples + " triples");
       
	NeoGraph graph = new NeoGraph(njgraph);
	graph.startBulkLoad();
	log.info("Connection created");
	Model njmodel = ModelFactory.createModelForGraph(graph);
	log.info("NeoGraph Model initiated");
	System.out.println("NeoGraph Model initiated");
	StopWatch watch = new StopWatch();
	//log.info(njmodel.add(model));
	njmodel.add(model);
	log.info("Storing completed (ms): " + watch.stop());
	graph.stopBulkLoad();
	System.out.println("Storing completed (ms): " + watch.stop());
}
 
Example #4
Source File: Wine.java    From neo4jena with Apache License 2.0 6 votes vote down vote up
public static void write(GraphDatabaseService njgraph) {
	InputStream in = FileManager.get().open( inputFileName );
	if (in == null) {
           throw new IllegalArgumentException( "File: " + inputFileName + " not found");
       }
       
	Model model = ModelFactory.createDefaultModel();
       model.read(in,"","RDF");
       double triples = model.size();
       System.out.println("Model loaded with " +  triples + " triples");
       
	NeoGraph graph = new NeoGraph(njgraph);
	Model njmodel = ModelFactory.createModelForGraph(graph);
	graph.startBulkLoad();
	System.out.println("NeoGraph Model initiated");
	StopWatch watch = new StopWatch();
	//log.info(njmodel.add(model));
	njmodel.add(model);
	System.out.println("Storing completed (ms): " + watch.stop());
	graph.stopBulkLoad();
}
 
Example #5
Source File: ComplianceTests.java    From r2rml-parser with Apache License 2.0 6 votes vote down vote up
@Test
public void testSparqlQuery() {
	ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("test-context.xml");
	Util util = (Util) context.getBean("util");
	
	Model model = ModelFactory.createDefaultModel();
	String modelFilename = "dump1-epersons.rdf";
	InputStream isMap = FileManager.get().open(modelFilename);
	try {
		model.read(isMap, null, "N3");
	} catch (Exception e) {
		log.error("Error reading model.");
		System.exit(0);
	}
	String query = "SELECT ?x ?z WHERE {?x dc:source ?z} ";
	LocalResultSet rs = util.sparql(model, query);
	log.info("found " + String.valueOf(rs.getRows().size()));
	
	context.close();
}
 
Example #6
Source File: ComplianceTests.java    From r2rml-parser with Apache License 2.0 6 votes vote down vote up
@Test
public void createModelFromReified() {
	Model model = ModelFactory.createDefaultModel();
	String modelFilename = "example.rdf";
	InputStream isMap = FileManager.get().open(modelFilename);
	try {
		model.read(isMap, null, "N3");
	} catch (Exception e) {
		log.error("Error reading model.");
		System.exit(0);
	}
	
	ArrayList<Statement> stmtToAdd = new ArrayList<Statement>();
	Model newModel = ModelFactory.createDefaultModel();
	RSIterator rsIter = model.listReifiedStatements();
	while (rsIter.hasNext()) {
		ReifiedStatement rstmt = rsIter.next();
		stmtToAdd.add(rstmt.getStatement());
	}
	rsIter.close();
	newModel.add(stmtToAdd.toArray(new Statement[stmtToAdd.size()]));
	
	log.info("newModel has " + newModel.listStatements().toList().size() + " statements");
}
 
Example #7
Source File: RdfReader.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
static void readRdfFile (String pathToRdfFile) {
        // create an empty model
        Model model = ModelFactory.createDefaultModel();

        // use the FileManager to find the input file
        InputStream in = FileManager.get().open( pathToRdfFile );
        if (in == null) {
            throw new IllegalArgumentException(
                    "File: " + pathToRdfFile + " not found");
        }

// read the RDF/XML file
        model.read(in, null);

// write it to standard out
        model.write(System.out);
    }
 
Example #8
Source File: ValidatingRDFParser.java    From GeoTriples with Apache License 2.0 5 votes vote down vote up
public Model parse() {
	try {
		return FileManager.get().loadModel(file);
	} catch (JenaException ex) {
		Throwable cause = ex.getCause();
		if (cause instanceof RiotException) {
			report.report(Problem.SYNTAX_ERROR, cause.getMessage());
		} else {
			report.report(Problem.IO_ERROR, ex.getMessage());
		}
		return null;
	}
}
 
Example #9
Source File: AssemblerExample.java    From GeoTriples with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	// Load assembler specification from file
	Model assemblerSpec = FileManager.get().loadModel("doc/example/assembler.ttl");
	
	// Get the model resource
	Resource modelSpec = assemblerSpec.createResource(assemblerSpec.expandPrefix(":myModel"));
	
	// Assemble a model
	Model m = Assembler.general.openModel(modelSpec);
	
	// Write it to System.out
	m.write(System.out);

	m.close();
}
 
Example #10
Source File: JenaGraphExample.java    From GeoTriples with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	// Load mapping file
	Model mapModel = FileManager.get().loadModel("doc/example/mapping-iswc.ttl");
	
	// Read mapping file
	D2RQReader reader = new D2RQReader(mapModel, "http://localhost:2020/");
	Mapping mapping = reader.getMapping();
	
	// Compile mapping for D2RQ engine
	CompiledMapping compiled = mapping.compile();

	// Set up the GraphD2RQ
	GraphD2RQ g = new GraphD2RQ(compiled);

	// Create a find(spo) pattern 
	Node subject = Node.ANY;
	Node predicate = DC.date.asNode();
	Node object = Node.createLiteral("2003", null, XSDDatatype.XSDgYear);
	Triple pattern = new Triple(subject, predicate, object);

	// Query the graph
	Iterator<Triple> it = g.find(pattern);
	
	// Output query results
	while (it.hasNext()) {
		Triple t = it.next();
	    System.out.println("Published in 2003: " + t.getSubject());
	}
	g.close();
}
 
Example #11
Source File: RDFFileManager.java    From Benchmark with GNU General Public License v3.0 5 votes vote down vote up
public static Dataset initializeCSPARQLContext(String serviceDesc, Reasoner r) {
	// ExecContext context;
	// if (r != null)
	// context = new ReasonerContext(cqelsHome, true, ReasonerRegistry.getRDFSReasoner());
	// else
	// context = new ExecContext(cqelsHome, true);
	Model defaultModel = FileManager.get().loadModel(datasetDirectory + serviceDesc);
	Model ces = FileManager.get().loadModel(ontologyDirectory + "ces.n3");

	dataset = DatasetFactory.create(defaultModel);
	dataset.addNamedModel(cesPrefix, ces);
	return dataset;
}
 
Example #12
Source File: RDFFileManager.java    From Benchmark with GNU General Public License v3.0 5 votes vote down vote up
public static void initializeDataset(String serviceDesc) {
	// if (clean) {
	deleteDir(new File(databaseDirectory));
	if (!(new File(databaseDirectory)).mkdir()) {
		System.out.println("can not create working directory" + databaseDirectory);
	}
	DatasetGraph datasettdb = TDBFactory.createDatasetGraph(databaseDirectory);
	dataset = DatasetImpl.wrap(datasettdb);
	loadOntology(dataset);
	Model serviceBase = FileManager.get().loadModel(datasetDirectory + serviceDesc);
	dataset.getDefaultModel().add(ModelFactory.createOntologyModel(ontoSpec, serviceBase));
	// } else
	// dataset = TDBFactory.createDataset(databaseDirectory);
}
 
Example #13
Source File: RDFFileManager.java    From Benchmark with GNU General Public License v3.0 5 votes vote down vote up
private static void loadOntology(Dataset dataset) {
	Model ssnBase = FileManager.get().loadModel(ontologyDirectory + "ssn.owl");
	Model ssnInf = ModelFactory.createOntologyModel(ontoSpec, ssnBase);
	dataset.addNamedModel(ssnPrefix, ssnInf);

	Model owlService = FileManager.get().loadModel(ontologyDirectory + "Service.owl");
	Model owlServiceInf = ModelFactory.createOntologyModel(ontoSpec, owlService);
	dataset.addNamedModel(owlsPrefix, owlServiceInf);

	Model owlGrounding = FileManager.get().loadModel(ontologyDirectory + "Grounding.owl");
	Model owlGroundingInf = ModelFactory.createOntologyModel(ontoSpec, owlGrounding);
	dataset.addNamedModel(owlsPrefix, owlGroundingInf);

	Model owlProcess = FileManager.get().loadModel(ontologyDirectory + "Process.owl");
	Model owlProcessInf = ModelFactory.createOntologyModel(ontoSpec, owlProcess);
	dataset.addNamedModel(owlsPrefix, owlProcessInf);

	Model owlProfile = FileManager.get().loadModel(ontologyDirectory + "Profile.owl");
	Model owlProfileInf = ModelFactory.createOntologyModel(ontoSpec, owlProfile);
	dataset.addNamedModel(owlsPrefix, owlProfileInf);

	Model cesBase = FileManager.get().loadModel(ontologyDirectory + "ces.n3");
	Model cesInf = ModelFactory.createOntologyModel(ontoSpec, cesBase);
	dataset.addNamedModel(cesPrefix, cesInf);

	Model ctBase = FileManager.get().loadModel(ontologyDirectory + "city.n3");
	Model ctInf = ModelFactory.createOntologyModel(ontoSpec, ctBase);
	dataset.addNamedModel(ctPrefix, ctInf);

	// FileManager.get().r(dataset.getNamedModel(ssnPrefix),
	// ModelFactory.createInfModel(ReasonerRegistry.getRDFSReasoner(), ssnBase));
	// FileManager.get().readModel(dataset.getNamedModel(owlPrefix), ontologyDirectory + "Service.owl");
	//
	// FileManager.get().readModel(dataset.getNamedModel(owlPrefix), ontologyDirectory + "Grounding.owl");
	// FileManager.get().readModel(dataset.getNamedModel(owlPrefix), ontologyDirectory + "Process.owl");
	// FileManager.get().readModel(dataset.getNamedModel(owlPrefix), ontologyDirectory + "Profile.owl");
	// FileManager.get().readModel(dataset.getNamedModel(cesPrefix), ontologyDirectory + "ces.n3");
}
 
Example #14
Source File: Course_Test.java    From neo4jena with Apache License 2.0 5 votes vote down vote up
public static void write(BatchInserter inserter) {
	InputStream in = FileManager.get().open( inputFileName );
	if (in == null) {
           throw new IllegalArgumentException( "File: " + inputFileName + " not found");
       }
       
	Model model = ModelFactory.createDefaultModel();

	StopWatch watch = new StopWatch();
       BatchHandler handler = new BatchHandler(inserter,500000,60);
       model.register(handler);
       
       model.read(in,"","TTL");	

       double triples = model.size();
       //log.info("Model loaded with " +  triples + " triples");
       System.out.println("Model loaded with " +  triples + " triples" + " time taken: " + watch.stop());
       //Map<String, String> prefixMap = model.getNsPrefixMap();
      // System.out.println("Prefix Mapping: " + prefixMap);
       
	//NeoGraph graph = new NeoGraph(njgraph);
	//graph.getPrefixMapping().setNsPrefixes(prefixMap);
	//graph.startBulkLoad();
	//log.info("Connection created");
	//Model njmodel = ModelFactory.createModelForGraph(graph);
	/*log.info("NeoGraph Model initiated");
	System.out.println("NeoGraph Model initiated");
	
	StopWatch watch = new StopWatch();
	//njmodel.add(model);
	
	long endTime = watch.stop();
	
	log.info("Total triples loaded are:"+ graph.size());
	System.out.println("Total triples loaded are:"+ graph.size());
	graph.stopBulkLoad();
	
	log.info("Storing completed (ms): " + endTime);
	System.out.println("Storing completed (ms): " + endTime);*/
}
 
Example #15
Source File: Course.java    From neo4jena with Apache License 2.0 5 votes vote down vote up
public static void write(GraphDatabaseService njgraph) {
	InputStream in = FileManager.get().open( inputFileName );
	if (in == null) {
           throw new IllegalArgumentException( "File: " + inputFileName + " not found");
       }
       
	Model model = ModelFactory.createDefaultModel();
       model.read(in,"","TTL");
       double triples = model.size();
       log.info("Model loaded with " +  triples + " triples");
       System.out.println("Model loaded with " +  triples + " triples");
       Map<String, String> prefixMap = model.getNsPrefixMap();
      // System.out.println("Prefix Mapping: " + prefixMap);
       
	NeoGraph graph = new NeoGraph(njgraph);
	graph.getPrefixMapping().setNsPrefixes(prefixMap);
	graph.startBulkLoad();
	log.info("Connection created");
	Model njmodel = ModelFactory.createModelForGraph(graph);
	log.info("NeoGraph Model initiated");
	System.out.println("NeoGraph Model initiated");
	
	//log.info(njmodel.add(model));
	//njmodel.add(model);
	StmtIterator iterator = model.listStatements();
	StopWatch watch = new StopWatch();
	int count = 0;
	while(iterator.hasNext()){
		njmodel.add(iterator.next());
		count++;
	}
	System.out.println("Total triples loaded are:"+ count);
	graph.stopBulkLoad();
	//log.info("Storing completed (ms): " + watch.stop());
	System.out.println("Storing completed (ms): " + watch.stop());
}