org.apache.jena.query.Dataset Java Examples

The following examples show how to use org.apache.jena.query.Dataset. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JenaUtil.java    From shacl with Apache License 2.0 6 votes vote down vote up
private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) {

		if (dataset == null) {
	        dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
	    }
		
		E_Function expr = new E_Function(function.getURI(), args);
		DatasetGraph dsg = dataset.asDatasetGraph();
		Context cxt = ARQ.getContext().copy();
		cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime());
		FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null);
		try {
			NodeValue r = expr.eval(BindingRoot.create(), env);
			if(r != null) {
				return r.asNode();
			}
		}
		catch(ExprEvalException ex) {
		}
		return null;
	}
 
Example #2
Source File: RdfFirstSuccessReader.java    From RDFUnit with Apache License 2.0 6 votes vote down vote up
@Override
public void readDataset(Dataset dataset) throws RdfReaderException {
    StringBuilder message = new StringBuilder();
    // return the first successful attempt
    for (RdfReader r : readers) {
        try {
            r.readDataset(dataset);
            // return on first read() that does not throw an exception
            return;
        } catch (RdfReaderException e) {
            message.append("\n");
            if (e.getMessage() != null) {
                message.append(e.getMessage());
            } else {
                message.append(e);
            }
        }
    }

    throw new RdfReaderException("Cannot read from any reader: " + message.toString());
}
 
Example #3
Source File: RdfDataManagerTest.java    From rdf2neo with GNU Lesser General Public License v3.0 6 votes vote down vote up
/**
 * Loads the test TDB used in this class with a bounch of RDF data.
 */
@BeforeClass
public static void initData ()
{
	rdfMgr.open ( TDB_PATH );
	Dataset ds = rdfMgr.getDataSet ();
	Model m = ds.getDefaultModel ();
	ds.begin ( ReadWrite.WRITE );
	try 
	{
		//if ( m.size () > 0 ) return;
		m.read ( IOUtils.openResourceReader ( "test_data.ttl" ), null, "TURTLE" );
		ds.commit ();
	}
	catch ( Exception ex ) {
		ds.abort ();
		throw new RuntimeException ( "Test error: " + ex.getMessage (), ex );
	}
	finally { 
		ds.end ();
	}
}
 
Example #4
Source File: SHACLSPARQLARQFunction.java    From shacl with Apache License 2.0 6 votes vote down vote up
@Override
   public NodeValue executeBody(Dataset dataset, Model defaultModel, QuerySolution bindings) {
    try( QueryExecution qexec = createQueryExecution(dataset, defaultModel, bindings) ) {
        if(arqQuery.isAskType()) {
            boolean result = qexec.execAsk();
            return NodeValue.makeBoolean(result);
        }
        else {
            ResultSet rs = qexec.execSelect();
            if(rs.hasNext()) {
                QuerySolution s = rs.nextSolution();
                List<String> resultVars = rs.getResultVars();
                String varName = resultVars.get(0);
                RDFNode resultNode = s.get(varName);
                if(resultNode != null) {
                    return NodeValue.makeNode(resultNode.asNode());
                }
            }
            throw new ExprEvalException("Empty result set for SHACL function");
        }
    }
}
 
Example #5
Source File: JenaUtil.java    From shacl with Apache License 2.0 6 votes vote down vote up
/**
 * Calls a SPARQL expression and returns the result, using some initial bindings.
 *
 * @param expression     the expression to execute (must contain absolute URIs)
 * @param initialBinding the initial bindings for the unbound variables
 * @param dataset        the query Dataset or null for default
 * @return the result or null
 */
public static Node invokeExpression(String expression, QuerySolution initialBinding, Dataset dataset) {
    if (dataset == null) {
        dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel());
    }
    Query query = ARQFactory.get().createExpressionQuery(expression);
    try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, initialBinding)) {
        ResultSet rs = qexec.execSelect();
        Node result = null;
        if (rs.hasNext()) {
            QuerySolution qs = rs.next();
            String firstVarName = rs.getResultVars().get(0);
            RDFNode rdfNode = qs.get(firstVarName);
            if (rdfNode != null) {
                result = rdfNode.asNode();
            }
        }
        return result;
    }
}
 
Example #6
Source File: Item.java    From Processor with Apache License 2.0 6 votes vote down vote up
@Override
public Response put(Dataset dataset)
{
    Model existing = getService().getDatasetAccessor().getModel(getURI().toString());

    if (!existing.isEmpty()) // remove existing representation
    {
        EntityTag entityTag = new EntityTag(Long.toHexString(ModelUtils.hashModel(dataset.getDefaultModel())));
        ResponseBuilder rb = getRequest().evaluatePreconditions(entityTag);
        if (rb != null)
        {
            if (log.isDebugEnabled()) log.debug("PUT preconditions were not met for resource: {} with entity tag: {}", this, entityTag);
            return rb.build();
        }
    }
    
    if (log.isDebugEnabled()) log.debug("PUT GRAPH {} to GraphStore", getURI());
    getService().getDatasetAccessor().putModel(getURI().toString(), dataset.getDefaultModel());
    
    if (existing.isEmpty()) return Response.created(getURI()).build();
    else return Response.ok(dataset).build();
}
 
Example #7
Source File: SkolemizingDatasetProvider.java    From Processor with Apache License 2.0 6 votes vote down vote up
@Override
public Dataset process(Dataset dataset)
{
    dataset = super.process(dataset); // validation
    
    process(dataset.getDefaultModel());
    
    Iterator<String> it = dataset.listNames();
    while (it.hasNext())
    {
        String graphURI = it.next();
        process(dataset.getNamedModel(graphURI));
    }
    
    return dataset;
}
 
Example #8
Source File: TestAnnotationTools.java    From incubator-taverna-language with Apache License 2.0 6 votes vote down vote up
@Test
public void componentStuff() throws Exception {
	Dataset dataset = annotations.annotationDatasetFor(component.getMainWorkflow());
	String query = "PREFIX comp: <http://purl.org/DP/components#> "
			+ "SELECT ?fits ?from ?to WHERE { "
			+ " GRAPH ?any { "
			+ "?w comp:fits ?fits ; "
			+ "   comp:migrates ?path . "
			+ "?path comp:fromMimetype ?from ; "
			+ "      comp:toMimetype ?to . "
			+ "  }"
			+ "}";
	
	ResultSet select = QueryExecutionFactory.create(query, dataset).execSelect();
	assertTrue(select.hasNext());
	QuerySolution solution = select.next();
	assertEquals("image/tiff", solution.getLiteral("from").toString());
	assertEquals("image/tiff", solution.getLiteral("to").toString());
	assertEquals("MigrationAction", solution.getResource("fits").getLocalName());
}
 
Example #9
Source File: TargetContainsPFunction.java    From shacl with Apache License 2.0 6 votes vote down vote up
@Override
public QueryIterator exec(Binding binding, PropFuncArg argSubject,
		Node predicate, PropFuncArg argObject, ExecutionContext execCxt) {

	argSubject = Substitute.substitute(argSubject, binding);
	argObject = Substitute.substitute(argObject, binding);
	
	if(!argObject.getArg().isVariable()) {
		throw new ExprEvalException("Right hand side of tosh:targetContains must be a variable");
	}
	
	Node targetNode = argSubject.getArgList().get(0);
	Node shapesGraphNode = argSubject.getArgList().get(1);
	
	Model currentModel = ModelFactory.createModelForGraph(execCxt.getActiveGraph());
	Dataset dataset = new DatasetWithDifferentDefaultModel(currentModel, DatasetImpl.wrap(execCxt.getDataset()));

	Model model = dataset.getNamedModel(shapesGraphNode.getURI());
	Resource target = (Resource) model.asRDFNode(targetNode);

	Set<Node> focusNodes = new HashSet<Node>();
	SHACLUtil.addNodesInTarget(target, dataset, focusNodes);
	return new QueryIterExtendByVar(binding, (Var) argObject.getArg(), focusNodes.iterator(), execCxt);
}
 
Example #10
Source File: UpdateExecuteOperations.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void ex3(Dataset dataset)
{
    // Build up the request then execute it.
    // This is the preferred way for complex sequences of operations. 
    UpdateRequest request = UpdateFactory.create() ;
    request.add("DROP ALL")
           .add("CREATE GRAPH <http://example/g2>") ;
    // Different style.
    // Equivalent to request.add("...")
    UpdateFactory.parse(request, "LOAD <file:etc/update-data.ttl> INTO GRAPH <http://example/g2>") ;
    
    // And perform the operations.
    UpdateAction.execute(request, dataset) ;
    
    System.out.println("# Debug format");
    SSE.write(dataset) ;
    
    System.out.println();
    
    System.out.println("# N-Quads: S P O G") ;
    RDFDataMgr.write(System.out, dataset, Lang.NQUADS) ;
}
 
Example #11
Source File: DatasetDeclarationPlan.java    From sparql-generate with Apache License 2.0 6 votes vote down vote up
protected final Context prepareDataset(Binding binding, Context context) {
	if (fromClauses == null || fromClauses.isEmpty()) {
		return context;
	}
	final DatasetGraph dsg = DatasetGraphFactory.createGeneral();
	fromClauses.forEach((fromClause) -> {
		if (fromClause.getGenerate() == null) {
			if (!fromClause.isNamed()) {
				addDefaultGraph(binding, context, dsg, fromClause.getName());
			} else {
				addNamedGraph(binding, context, dsg, fromClause.getName());
			}
		} else {
			SPARQLExtQuery generate = fromClause.getGenerate();
			if (!fromClause.isNamed()) {
				addDefaultGraph(binding, context, dsg, generate);
			} else {
				addNamedGraph(binding, context, dsg, generate, fromClause.getName());
			}
		}
	});
	Dataset newDataset = DatasetFactory.wrap(dsg);
	return ContextUtils.fork(context).setDataset(newDataset).fork();
}
 
Example #12
Source File: ITER_DefaultGraphNamespaces.java    From sparql-generate with Apache License 2.0 6 votes vote down vote up
@Override
public List<List<NodeValue>> exec(List<NodeValue> args) {
    if (!args.isEmpty()) {
        LOG.debug("Expecting zero arguments.");
        throw new ExprEvalException("Expecting zero arguments.");
    }
    
    Dataset dataset = ContextUtils.getDataset(getContext());
    
    List<List<NodeValue>> output = new ArrayList<>();
    for (Map.Entry<String, String> prefix : dataset.getDefaultModel().getNsPrefixMap().entrySet()) {
        List<NodeValue> ns = new ArrayList<>();
        ns.add(new NodeValueString(prefix.getKey()));
        ns.add(new NodeValueString(prefix.getValue()));
        output.add(ns);
    }
    return output;
}
 
Example #13
Source File: DatasetDeclarationPlan.java    From sparql-generate with Apache License 2.0 6 votes vote down vote up
private void addNamedGraph(Binding binding, Context context, DatasetGraph dsg, Expr sourceExpr) {
	String sourceURI = evalSourceURI(binding, context, sourceExpr);
	final String absURI = baseURI(sourceURI, baseURI);
	Dataset dataset = ContextUtils.getDataset(context);
	Node n = NodeFactory.createURI(absURI);
	Graph g = dsg.getGraph(n);
	if (g == null) {
		g = GraphFactory.createJenaDefaultGraph();
		dsg.addGraph(n, g);
	}
	// default: check the dataset
	if (dataset.containsNamedModel(absURI)) {
		Graph dg = dataset.getNamedModel(absURI).getGraph();
		GraphUtil.addInto(g, dg);
		return;
	}
	// fallback: load as RDF graph
	StreamRDF dest = StreamRDFLib.graph(g);
	ContextUtils.loadGraph(context, sourceURI, absURI, dest);
}
 
Example #14
Source File: SHACLUtil.java    From shacl with Apache License 2.0 6 votes vote down vote up
/**
 * Gets all nodes from a given sh:target.
 * @param target  the value of sh:target (parameterizable or SPARQL target)
 * @param dataset  the dataset to operate on
 * @return an Iterable over the resources
 */
public static Iterable<RDFNode> getResourcesInTarget(Resource target, Dataset dataset) {
	Resource type = JenaUtil.getType(target);
	Resource executable;
	SHParameterizableTarget parameterizableTarget = null;
	if(SHFactory.isParameterizableInstance(target)) {
		executable = type;
		parameterizableTarget = SHFactory.asParameterizableTarget(target);
	}
	else {
		executable = target;
	}
	CustomTargetLanguage plugin = CustomTargets.get().getLanguageForTarget(executable);
	if(plugin != null) {
		Set<RDFNode> results = new HashSet<>();
		plugin.createTarget(executable, parameterizableTarget).addTargetNodes(dataset, results);
		return results;
	}
	else {
		return new ArrayList<>();
	}
}
 
Example #15
Source File: AnnotationTools.java    From incubator-taverna-language with Apache License 2.0 5 votes vote down vote up
private String getLiteral(Child<?> workflowBean, String propertyUri) {
	Dataset annotations = annotationDatasetFor(workflowBean);
	URI beanUri = uritools.uriForBean(workflowBean);
	Node subject = NodeFactory.createURI(beanUri.toString());
	Node property = NodeFactory.createURI(propertyUri);

	Iterator<Quad> found = annotations.asDatasetGraph().find(null, subject,
			property, null);
	if (!found.hasNext()) {
		return null;
	}
	return found.next().getObject().toString(false);
}
 
Example #16
Source File: RdfReader.java    From RDFUnit with Apache License 2.0 5 votes vote down vote up
default Dataset readDataset() throws RdfReaderException {
    try {
        Dataset dataset = DatasetFactory.create();
        readDataset(dataset);
        return dataset;
    } catch (Exception e) {
        throw new RdfReaderException(e);
    }
}
 
Example #17
Source File: ExTDB3.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String assemblerFile = "Store/tdb-assembler.ttl" ;
    
    // Find a particular description in the file where there are several: 
    Model spec = RDFDataMgr.loadModel(assemblerFile) ;

    // Find the right starting point for the description in some way.
    Resource root = null ;

    if ( false )
        // If you know the Resource URI:
        root = spec.createResource("http://example/myChoiceOfURI" );
    else
    {
        // Alternatively, look for the a single resource of the right type. 
        try {
            // Find the required description - the file can contain descriptions of many different types.
            root = GraphUtils.findRootByType(spec, VocabTDB.tDatasetTDB) ;
            if ( root == null )
                throw new JenaException("Failed to find a suitable root") ;
        } catch (TypeNotUniqueException ex)
        { throw new JenaException("Multiple types for: "+DatasetAssemblerVocab.tDataset) ; }
    }

    Dataset ds = (Dataset)Assembler.general.open(root) ;
}
 
Example #18
Source File: JenaUtil.java    From shacl with Apache License 2.0 5 votes vote down vote up
public static Node invokeFunction3(Resource function, RDFNode argument1, RDFNode argument2, RDFNode argument3, Dataset dataset) {
	ExprList args = new ExprList();
	args.add(argument1 != null ? NodeValue.makeNode(argument1.asNode()) : new ExprVar("arg1"));
	args.add(argument2 != null ? NodeValue.makeNode(argument2.asNode()) : new ExprVar("arg2"));
	args.add(argument3 != null ? NodeValue.makeNode(argument3.asNode()) : new ExprVar("arg3"));
	return invokeFunction(function, args, dataset);
}
 
Example #19
Source File: SPARQLSubstitutions.java    From shacl with Apache License 2.0 5 votes vote down vote up
public static QueryExecution createQueryExecution(Query query, Dataset dataset, QuerySolution bindings) {
	if(USE_TRANSFORM && bindings != null) {
		Map<Var,Node> substitutions = new HashMap<Var,Node>();
		Iterator<String> varNames = bindings.varNames();
		while(varNames.hasNext()) {
			String varName = varNames.next();
			substitutions.put(Var.alloc(varName), bindings.get(varName).asNode());
		}
		Query newQuery = JenaUtil.queryWithSubstitutions(query, substitutions);
		return ARQFactory.get().createQueryExecution(newQuery, dataset);
	}
	else {
		return ARQFactory.get().createQueryExecution(query, dataset, bindings);
	}
}
 
Example #20
Source File: UpdateExecuteOperations.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String []args)
{
    // Create an empty DatasetGraph (has an empty default graph and no named graphs) 
    Dataset dataset = DatasetFactory.createTxnMem() ;
    
    ex1(dataset) ;
    ex2(dataset) ;
    ex3(dataset) ;
}
 
Example #21
Source File: Templates.java    From shacl with Apache License 2.0 5 votes vote down vote up
/**
 * Takes an instance of dash:SPARQLConstructTemplate and parameter bindings and returns a Model with the triples
 * that result from the execution of all CONSTRUCT queries in the template using the given parameter bindings.
 * @param template  the template defining the sh:construct queries to run
 * @param bindings  the initial bindings for the CONSTRUCT queries
 * @param dataset  the Dataset to query over
 * @return a Model with the constructed triples
 */
public static Model construct(Resource template, QuerySolutionMap bindings, Dataset dataset) {
	Model result = JenaUtil.createDefaultModel();
	template.listProperties(SH.construct).filterKeep(s -> s.getObject().isLiteral()).forEachRemaining(s -> {			
		String queryString = s.getString();
		Query arqQuery = ARQFactory.get().createQuery(SPARQLSubstitutions.withPrefixes(queryString, template));
		try(QueryExecution qexec = ARQFactory.get().createQueryExecution(arqQuery, dataset, bindings)) {
			qexec.execConstruct(result);
		}
	});
	return result;
}
 
Example #22
Source File: RouteDeleteIT.java    From fcrepo-camel-toolbox with Apache License 2.0 5 votes vote down vote up
@Before
public void setUpFuseki() throws Exception {
    final FcrepoClient client = createClient();
    final FcrepoResponse res = client.post(URI.create("http://localhost:" + FCREPO_PORT + "/fcrepo/rest"))
                .body(loadResourceAsStream("container.ttl"), "text/turtle").perform();
    fullPath = res.getLocation().toString();

    logger.info("Starting EmbeddedFusekiServer on port {}", FUSEKI_PORT);
    final Dataset ds = new DatasetImpl(createDefaultModel());
    server = FusekiEmbeddedServer.create().setPort(parseInt(FUSEKI_PORT))
                .setContextPath("/fuseki").add("/test", ds).build();
    server.start();
}
 
Example #23
Source File: RouteUpdateIT.java    From fcrepo-camel-toolbox with Apache License 2.0 5 votes vote down vote up
@Before
public void setUpFuseki() throws Exception {
    final FcrepoClient client = createClient();
    final FcrepoResponse res = client.post(URI.create("http://localhost:" + FCREPO_PORT + "/fcrepo/rest"))
              .body(loadResourceAsStream("indexable.ttl"), "text/turtle").perform();
    fullPath = res.getLocation().toString();

    logger.info("Starting EmbeddedFusekiServer on port {}", FUSEKI_PORT);
    final Dataset ds = new DatasetImpl(createDefaultModel());
    server = FusekiEmbeddedServer.create().setPort(parseInt(FUSEKI_PORT))
                .setContextPath("/fuseki").add("/test", ds).build();
    server.start();
}
 
Example #24
Source File: ValidationEngine.java    From shacl with Apache License 2.0 5 votes vote down vote up
/**
 * Constructs a new ValidationEngine.
 * @param dataset  the Dataset to operate on
 * @param shapesGraphURI  the URI of the shapes graph (must be in the dataset)
 * @param shapesGraph  the ShapesGraph with the shapes to validate against
 * @param report  the sh:ValidationReport object in the results Model, or null to create a new one
 */
protected ValidationEngine(Dataset dataset, URI shapesGraphURI, ShapesGraph shapesGraph, Resource report) {
	super(dataset, shapesGraph, shapesGraphURI);
	setConfiguration(new ValidationEngineConfiguration());
	if(report == null) {
		Model reportModel = JenaUtil.createMemoryModel();
		reportModel.setNsPrefixes(dataset.getDefaultModel());
		reportModel.withDefaultMappings(shapesGraph.getShapesModel());
		this.report = reportModel.createResource(SH.ValidationReport);
	}
	else {
		this.report = report;
	}
}
 
Example #25
Source File: RdfReadAndCacheReader.java    From RDFUnit with Apache License 2.0 5 votes vote down vote up
@Override
public void readDataset(Dataset dataset) throws RdfReaderException {
    reader.readDataset(dataset);
    //If read succeeds try to write
    try {
        //TODO change this
        writer.write(dataset.getDefaultModel());
    } catch (RdfWriterException e) {
        log.warn("Could not cache RdfReader", e);
    }
}
 
Example #26
Source File: DB2QueryExecutionImpl.java    From quetzal with Eclipse Public License 2.0 5 votes vote down vote up
public Dataset getDataset()
{
if (ds != null)
   return ds;

throw new RdfStoreException("Operation Not supported");
}
 
Example #27
Source File: JSTarget.java    From shacl with Apache License 2.0 5 votes vote down vote up
@Override
public void addTargetNodes(Dataset dataset, Collection<RDFNode> results) {
	
	boolean nested = SHACLScriptEngineManager.begin();
	JSScriptEngine engine = SHACLScriptEngineManager.getCurrentEngine();

	Model model = dataset.getDefaultModel();
	JSGraph dataJSGraph = new JSGraph(model.getGraph(), engine);
	try {
		engine.executeLibraries(as);
		engine.put(SH.JS_DATA_VAR, dataJSGraph);
		
		QuerySolutionMap bindings = new QuerySolutionMap();
		if(parameterizableTarget != null) {
			parameterizableTarget.addBindings(bindings);
		}

		Object result = engine.invokeFunction(as.getFunctionName(), bindings);
		if(NashornUtil.isArray(result)) {
			for(Object obj : NashornUtil.asArray(result)) {
				Node node = JSFactory.getNode(obj);
				results.add(model.asRDFNode(node));
			}
		}
	}
	catch(Exception ex) {
		ExceptionUtil.throwUnchecked(ex);
	}
	finally {
		dataJSGraph.close();
		SHACLScriptEngineManager.end(nested);
	}
}
 
Example #28
Source File: SPARQLExtCli.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
private static Dataset getDataset(File dir, FileConfigurations request) {
	try {
		return request.loadDataset(dir);
	} catch (Exception ex) {
		LOG.warn("Error while loading the dataset, no dataset will be used.");
		return DatasetFactory.create();
	}
}
 
Example #29
Source File: ExTDB_Txn2.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String directory = "MyDatabases/DB1" ;
    Dataset dataset = TDBFactory.createDataset(directory) ;

    // Start WRITE transaction. 
    //   It's possible to read from the datet inside the write transaction.

    //   An application can have other Datasets, in the same JVM, 
    //   tied to the same TDB database performing read
    //   transactions concurrently. If another write transaction
    //   starts, the call of dataset.begin(WRITE) blocks until
    //   existing writer finishes.
    
    dataset.begin(ReadWrite.WRITE) ;
    try
    {
        GraphStore graphStore = GraphStoreFactory.create(dataset) ;
        // Do a SPARQL Update.
        String sparqlUpdateString = StrUtils.strjoinNL(
             "PREFIX . <http://example/>",
             "INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
             ) ;

        execUpdate(sparqlUpdateString, graphStore) ;
        dataset.commit() ;
        // Or call .abort()
        
    } finally
    {
        // Notify the end of the transaction.
        // The transaction was finished at the point .commit or .abort was called.
        // .end will force an abort() if no previous call to .commit() or .abort()
        // has occurred, so .end() help manage track the state of the transaction.
        // .end() can be called multiple times for the same .begin(WRITE)
        dataset.end() ;
    }
}
 
Example #30
Source File: RdfMultipleReader.java    From RDFUnit with Apache License 2.0 5 votes vote down vote up
@Override
public void readDataset(Dataset dataset) throws RdfReaderException {

    for (RdfReader r : readers) {
        try {
            r.readDataset(dataset);
        } catch (RdfReaderException e) {
            throw new RdfReaderException("Cannot read from reader", e);
        }
    }


}