org.apache.jena.riot.system.StreamRDF Java Examples

The following examples show how to use org.apache.jena.riot.system.StreamRDF. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DatasetDeclarationPlan.java    From sparql-generate with Apache License 2.0 6 votes vote down vote up
private void addNamedGraph(Binding binding, Context context, DatasetGraph dsg, Expr sourceExpr) {
	String sourceURI = evalSourceURI(binding, context, sourceExpr);
	final String absURI = baseURI(sourceURI, baseURI);
	Dataset dataset = ContextUtils.getDataset(context);
	Node n = NodeFactory.createURI(absURI);
	Graph g = dsg.getGraph(n);
	if (g == null) {
		g = GraphFactory.createJenaDefaultGraph();
		dsg.addGraph(n, g);
	}
	// default: check the dataset
	if (dataset.containsNamedModel(absURI)) {
		Graph dg = dataset.getNamedModel(absURI).getGraph();
		GraphUtil.addInto(g, dg);
		return;
	}
	// fallback: load as RDF graph
	StreamRDF dest = StreamRDFLib.graph(g);
	ContextUtils.loadGraph(context, sourceURI, absURI, dest);
}
 
Example #2
Source File: StreamingRDFWriter.java    From tarql with BSD 2-Clause "Simplified" License 6 votes vote down vote up
public void writeTurtle(String baseIRI, PrefixMapping prefixes, boolean writeBase) {
	// Auto-register RDF prefix so that rdf:type is displayed well
	// All other prefixes come from the query and should be as author intended
	prefixes = ensureRDFPrefix(prefixes);

	if (writeBase) {
		// Jena's streaming Turtle writers don't output base even if it is provided,
		// so we write it directly.
		IndentedWriter w = new IndentedWriter(out);
		RiotLib.writeBase(w, baseIRI);
		w.flush();
	}
	
	StreamRDF writer = new WriterStreamRDFBlocks(out);
	if (dedupWindowSize > 0) {
		writer = new StreamRDFDedup(writer, dedupWindowSize);
	}
	writer.start();
	writer.base(baseIRI);
	for (Entry<String, String> e : prefixes.getNsPrefixMap().entrySet()) {
		writer.prefix(e.getKey(), e.getValue());
	}
	StreamOps.sendTriplesToStream(triples, writer);
	writer.finish();
}
 
Example #3
Source File: RDFStar2RDFTest.java    From RDFstarTools with Apache License 2.0 6 votes vote down vote up
protected Graph convertAndLoadIntoGraph( String filename )
{
    final String fullFilename = getClass().getResource("/TurtleStar/"+filename).getFile();
    String result;
    try( ByteArrayOutputStream os = new ByteArrayOutputStream() ) {
        new RDFStar2RDF().convert(fullFilename, os);
        result = os.toString();
    }
    catch ( IOException e ) {
        fail( "Closing the output stream failed: " + e.getMessage() );
        return null;
    }

	final StringReader reader = new StringReader(result);
       final Graph g = ModelFactory.createDefaultModel().getGraph();
       final StreamRDF dest = StreamRDFLib.graph(g);

       RDFParser.create()
                .source(reader)
                .lang(Lang.TURTLE)
                .parse(dest);

	return g;
}
 
Example #4
Source File: ContextUtils.java    From sparql-generate with Apache License 2.0 6 votes vote down vote up
public static void loadGraph(Context context, String sourceURI, String baseURI, StreamRDF dest) {
	if(getDataset(context).containsNamedModel(sourceURI)) {
		final Model model = getDataset(context).getNamedModel(sourceURI);
		StreamRDFOps.sendGraphToStream(model.getGraph(), dest);
		return;
	}
	if(!isRootContext(context)) {
		Context parentContext = (Context) context.get(PARENT_CONTEXT);
		loadGraph(parentContext, sourceURI, baseURI, dest);
		return;
	}
	final SPARQLExtStreamManager sm = (SPARQLExtStreamManager) context.get(SysRIOT.sysStreamManager);
	final String acceptHeader = "text/turtle;q=1.0,application/rdf+xml;q=0.9,*/*;q=0.1";
	final LookUpRequest request = new LookUpRequest(sourceURI, acceptHeader);
	try (TypedInputStream tin = sm.open(request);) {
		if(tin == null) {
			LOG.warn("Could not locate graph " + request);
			return;
		}
		Lang lang = RDFLanguages.contentTypeToLang(tin.getMediaType());
		RDFParser.create().source(tin).base(baseURI).context(context).lang(lang).parse(dest);
	} catch (RiotException ex) {
		LOG.warn("Error while loading graph " + sourceURI, ex);
	}
}
 
Example #5
Source File: RdfBulkUpdateRequestHandler.java    From SolRDF with Apache License 2.0 5 votes vote down vote up
@Override
public void load(
		final SolrQueryRequest request, 
		final SolrQueryResponse response,
		final ContentStream stream, 
		final UpdateRequestProcessor processor) throws Exception {
	
	final PipedRDFIterator<Triple> iterator = new PipedRDFIterator<Triple>();
	final StreamRDF inputStream = new PipedTriplesStream(iterator);
	
	executor.submit(new Runnable() {
		@Override
		public void run() {
			try {
				RDFDataMgr.parse(
						inputStream, 
						stream.getStream(), 
						RDFLanguages.contentTypeToLang(stream.getContentType()));
			} catch (final IOException exception) {
				throw new SolrException(ErrorCode.SERVER_ERROR, exception);
			}					
		}
	});
		
	// Graph Store Protocol indicates the target graph URI separately.
	// So the incoming Content-type here is one that maps "Triples Loader" but
	// the indexed tuple could be a Quad.
	final String graphUri = request.getParams().get(Names.GRAPH_URI_ATTRIBUTE_NAME);
	
	final DatasetGraph dataset = new LocalDatasetGraph(request, response, null, null);
	final Graph defaultGraph = graphUri == null 
			? dataset.getDefaultGraph() 
			: dataset.getGraph(NodeFactory.createURI(graphUri));
	while (iterator.hasNext()) {
		defaultGraph.add(iterator.next());
	}		
}
 
Example #6
Source File: RootPlan.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
/**
 * Executes a GENERATE query and returns the generated RDF Graph.
 *
 * @param values
 *            the values for the query signature, or null.
 * @param context
 *            the execution context, created using {@link ContextUtils}
 * @return the RDF Graph.
 */
public Model execGenerate(final List<Binding> values, final Context context) {
	final Model model = ModelFactory.createDefaultModel();
	final StreamRDF output = new StreamRDFModel(model);
	checkContextHasNoOutput(context);
	boolean isRoot = ContextUtils.isRootContext(context);
	final Context newContext = ContextUtils.fork(context, isRoot).setGenerateOutput(output).fork();
	execGenerateStream(values, newContext);
	return model;
}
 
Example #7
Source File: DatasetDeclarationPlan.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
private void addDefaultGraph(Binding binding, Context context, DatasetGraph dsg, Expr sourceExpr) {
		String sourceURI = evalSourceURI(binding, context, sourceExpr);
		final String absURI = baseURI(sourceURI, baseURI);
		// default: check the dataset
		Dataset dataset = ContextUtils.getDataset(context);
		if (dataset.containsNamedModel(absURI)) {
//			Node n = NodeFactory.createURI(absURI);
			Graph g = dataset.getNamedModel(absURI).getGraph();
			GraphUtil.addInto(dsg.getDefaultGraph(), g);
			return;
		}
		// fallback: load as RDF graph
		StreamRDF dest = StreamRDFLib.graph(dsg.getDefaultGraph());
		ContextUtils.loadGraph(context, sourceURI, absURI, dest);
	}
 
Example #8
Source File: ContextUtils.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
public static StreamRDF getGenerateOutput(Context context) {
	StreamRDF output = context.get(OUTPUT_GENERATE);
	if(output != null) {
		return output;
	}
	if(!isRootContext(context)) {
		Context parentContext = (Context) context.get(PARENT_CONTEXT);
		return getGenerateOutput(parentContext);
	}
	return null;
}
 
Example #9
Source File: SHACLCReader.java    From shacl with Apache License 2.0 5 votes vote down vote up
@Override
public void read(InputStream in, String baseURI, Lang lang, StreamRDF output, Context context) {
	try {
		SHACLCLexer lexer = new SHACLCLexer(new ANTLRInputStream(in));
		read(lexer, baseURI, output);
	}
	catch(IOException ex) {
		ExceptionUtil.throwUnchecked(ex);
	}
}
 
Example #10
Source File: SHACLCReader.java    From shacl with Apache License 2.0 5 votes vote down vote up
@Override
public void read(Reader reader, String baseURI, ContentType ct, StreamRDF output, Context context) {
	try {
		SHACLCLexer lexer = new SHACLCLexer(new ANTLRInputStream(reader));
		read(lexer, baseURI, output);
	}
	catch(IOException ex) {
		ExceptionUtil.throwUnchecked(ex);
	}
}
 
Example #11
Source File: RdfBulkUpdateRequestHandler.java    From SolRDF with Apache License 2.0 5 votes vote down vote up
@Override
public void load( 
		final SolrQueryRequest request, 
		final SolrQueryResponse response,
		final ContentStream stream, 
		final UpdateRequestProcessor processor) throws Exception {
	
	final PipedRDFIterator<Quad> iterator = new PipedRDFIterator<Quad>();
	final StreamRDF inputStream = new PipedQuadsStream(iterator);
	
	executor.submit(new Runnable() {
		@Override
		public void run() {
			try {
				RDFDataMgr.parse(
						inputStream, 
						stream.getStream(), 
						RDFLanguages.contentTypeToLang(stream.getContentType()));
			} catch (final IOException exception) {
				throw new SolrException(ErrorCode.SERVER_ERROR, exception);
			}					
		}
	});
	
	final DatasetGraph dataset = new LocalDatasetGraph(request, response);
	while (iterator.hasNext()) {
		dataset.add(iterator.next());
	}									
}
 
Example #12
Source File: LangTurtleStar.java    From RDFstarTools with Apache License 2.0 5 votes vote down vote up
@Override
public void read(InputStream in, String baseURI, ContentType ct, StreamRDF output, Context context) {
	final LangRIOT parser;
	if ( lang == TURTLESTAR ) {
		final Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in);
        parser = new LangTurtleStar(tokenizer, parserProfile, output);
	}
	else {
		parser = RiotParsers.createParser(in, lang, output, parserProfile);
	}

    parser.parse() ;
}
 
Example #13
Source File: ExRIOT_4.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String...argv)
{
    String filename = "data.ttl" ;

    // This is the heart of N-triples printing ... outoput is heavily buffered
    // so the FilterSinkRDF called flush at the end of parsing.
    Sink<Triple> output = new SinkTripleOutput(System.out, null, SyntaxLabels.createNodeToLabel()) ;
    StreamRDF filtered = new FilterSinkRDF(output, FOAF.name, FOAF.knows) ;
    
    // Call the parsing process. 
    RDFDataMgr.parse(filtered, filename) ;
}
 
Example #14
Source File: StreamingRDFWriter.java    From tarql with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public void writeNTriples() {
	StreamRDF writer = new WriterStreamRDFPlain(new IndentedWriter(out));
	if (dedupWindowSize > 0) {
		writer = new StreamRDFDedup(writer, dedupWindowSize);
	}
	writer.start();
	StreamOps.sendTriplesToStream(triples, writer);
	writer.finish();
}
 
Example #15
Source File: StreamRDFDedupTest.java    From tarql with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test public void shouldPassThroughTriples() {
	StreamRDF dedup = new StreamRDFDedup(new MockStreamRDF());
	dedup.start();
	dedup.triple(triple("<a> <a> <a>"));
	dedup.triple(triple("<b> <b> <b>"));
	dedup.triple(triple("<c> <c> <c>"));
	dedup.finish();
	assertEquals(triples("<a> <a> <a>", "<b> <b> <b>", "<c> <c> <c>"), received);
}
 
Example #16
Source File: StreamRDFDedupTest.java    From tarql with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test public void shouldRemoveDuplicateInWindow() {
	StreamRDF dedup = new StreamRDFDedup(new MockStreamRDF());
	dedup.start();
	dedup.triple(triple("<a> <a> <a>"));
	dedup.triple(triple("<a> <a> <a>"));
	dedup.finish();
	assertEquals(triples("<a> <a> <a>"), received);
}
 
Example #17
Source File: StreamRDFDedupTest.java    From tarql with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test public void shouldNotRemoveDuplicateOutsideWindow() {
	StreamRDF dedup = new StreamRDFDedup(new MockStreamRDF(), 2);
	dedup.start();
	dedup.triple(triple("<a> <a> <a>"));
	dedup.triple(triple("<b> <b> <b>"));
	dedup.triple(triple("<a> <a> <a>"));
	dedup.triple(triple("<c> <c> <c>"));
	dedup.triple(triple("<a> <a> <a>"));
	dedup.finish();
	assertEquals(triples("<a> <a> <a>", "<b> <b> <b>", "<c> <c> <c>", "<a> <a> <a>"), received);
}
 
Example #18
Source File: GenerateFormPlan.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
private synchronized void outputIfConcrete(
        final StringBuilder sb,
        final StreamRDF outputStream,
        final Triple t) {
    if (t.isConcrete()) {
        if (LOG.isTraceEnabled()) {
            Triple t2 = LogUtils.compress(t);
            sb.append("\n  ").append(t2);
        }
        outputStream.triple(t);
    }
}
 
Example #19
Source File: LangTurtleStar.java    From RDFstarTools with Apache License 2.0 5 votes vote down vote up
public LangTurtleStar(Tokenizer tokens, ParserProfile profile, StreamRDF dest) {
    super(tokens, wrapParserProfileIfNeeded(profile), dest);

    if ( ! (tokens instanceof TokenizerText) )
    	throw new IllegalArgumentException( "The given tokenizer is of an unexpected type (" + tokens.getClass().getName() + ")" );

    setCurrentGraph(null) ;
}
 
Example #20
Source File: LangTurtleStar.java    From RDFstarTools with Apache License 2.0 5 votes vote down vote up
@Override
public void read(Reader in, String baseURI, ContentType ct, StreamRDF output, Context context) {
	final LangRIOT parser;
	if ( lang == TURTLESTAR ) {
		@SuppressWarnings("deprecation")
        final Tokenizer tokenizer = TokenizerFactory.makeTokenizer(in);
        parser = new LangTurtleStar(tokenizer, parserProfile, output);
	}
	else {
		parser = RiotParsers.createParser(in, lang, output, parserProfile);
	}

    parser.parse() ;
}
 
Example #21
Source File: RDFStarUtils.java    From RDFstarTools with Apache License 2.0 5 votes vote down vote up
/**
 * Adds the RDF* data from the given Turtle* serialization
 * to the given {@link Graph}. 
 */
static public void populateGraphFromTurtleStarSnippet( Graph graph, String snippet )
{
	final StringReader reader = new StringReader(snippet);
    final StreamRDF dest = StreamRDFLib.graph(graph);

    RDFParser.create()
             .source(reader)
             .lang(LangTurtleStar.TURTLESTAR)
             .parse(dest);
}
 
Example #22
Source File: RdflintParserRdfxml.java    From rdflint with MIT License 5 votes vote down vote up
@Override
public void read(InputStream in, String baseURI, // SUPPRESS CHECKSTYLE AbbreviationAsWordInName
    ContentType ct, StreamRDF output, Context context) {
  this.input = in;
  this.xmlBase = baseURI_RDFXML(baseURI);
  this.filename = baseURI;
  this.sink = output;
  this.context = context;
  parse();
}
 
Example #23
Source File: RdflintParserRdfxml.java    From rdflint with MIT License 5 votes vote down vote up
@Override
public void read(Reader reader, String baseURI, // SUPPRESS CHECKSTYLE AbbreviationAsWordInName
    ContentType ct, StreamRDF output, Context context) {
  this.reader = reader;
  this.xmlBase = baseURI_RDFXML(baseURI);
  this.filename = baseURI;
  this.sink = output;
  this.context = context;
  parse();
}
 
Example #24
Source File: RdflintParserRdfxml.java    From rdflint with MIT License 5 votes vote down vote up
HandlerSink(StreamRDF output, ErrorHandler errHandler, ARP arp,
    List<RdfValidator> models, List<LintProblem> diagnosticList) {
  super(new ErrorHandlerBridge(errHandler));
  this.output = output;
  this.riotErrorHandler = errHandler;
  this.checker = new CheckerLiterals(errHandler);
  this.arp = arp;
  this.models = models;
  this.diagnosticList = diagnosticList;
}
 
Example #25
Source File: append.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
protected RDFPatch toPatch(String fn) {
        // .gz??
        Lang lang = RDFLanguages.filenameToLang(fn);
        if ( lang != null && ( RDFLanguages.isTriples(lang) || RDFLanguages.isQuads(lang) ) ) {
            RDFChangesCollector x = new RDFChangesCollector();
            StreamRDF dest  = new RDF2Patch(x);
            // dest will do the start-finish on the RDFChangesCollector via parsing.
            RDFDataMgr.parse(dest, fn);
            return x.getRDFPatch();
        }

        // Not RDF - assume a text patch.
//        String ext = FileOps.extension(fn);
//        switch(ext) {
//            case RDFPatchConst.EXT:
//                break;
//            case RDFPatchConst.EXT_B:
//                break;
//            default:
//                Log.warn(addpatch.class, "Conventionally, patches have file extension ."+RDFPatchConst.EXT);
//        }

        Path path = Paths.get(fn);
        try(InputStream in = Files.newInputStream(path) ) {
            return RDFPatchOps.read(in);
        } catch (IOException ex ) { throw IOX.exception(ex); }
    }
 
Example #26
Source File: GenerateFormPlan.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
private void substAndOutputForList(
        final Node subject,
        final Node predicate,
        final Node_List list,
        final StringBuilder sb,
        final Binding binding,
        final StreamRDF outputStream,
        final Context context,
        final int position,
        final Map<Node, Node> bNodeMap) {

    final Node first = ContextUtils.getNode(context, list, 0);
    final Node current = ContextUtils.getNode(context, list, position);
    final Node next = ContextUtils.getNode(context, list, position + 1);
    final Node var = list.getExpr().asVar();
    // potentially substitute subject and predicate
    Node s2 = subst(subject, bNodeMap);
    Node p2 = subst(predicate, bNodeMap);
    Triple t = new Triple(s2, p2, first);
    Triple t2 = Substitute.substitute(t, binding);
    outputIfConcrete(sb, outputStream, t2);
    // potentially substitute var
    Node var2 = subst(var, bNodeMap);
    Node var2sub = Substitute.substitute(var2, binding);
    Triple tfirst = new Triple(current, FIRST, var2sub);
    outputIfConcrete(sb, outputStream, tfirst);
    // nothing to substitute here
    Triple tRest = new Triple(current, REST, next);
    outputIfConcrete(sb, outputStream, tRest);
}
 
Example #27
Source File: GenerateFormPlan.java    From sparql-generate with Apache License 2.0 5 votes vote down vote up
@Override
public void exec(
        final List<Var> variables,
        final List<Binding> values,
        final Context context) {
	final StreamRDF outputStream = ContextUtils.getGenerateOutput(context);
    final StringBuilder sb = new StringBuilder("Output triples");
    final int size = values.size();
    final Context newContext = ContextUtils.fork(context)
            .setSize(size)
            .fork();

    for (int i = 0; i < size; i++) {
        final Map<Node, Node> bNodeMap = new HashMap<>();
        final Binding binding = values.get(i);
        for (Triple t : bgp.getList()) {
            if (t.getObject() instanceof Node_List) {
                substAndOutputForList(t.getSubject(), t.getPredicate(), (Node_List) t.getObject(), sb, binding, outputStream, newContext, i, bNodeMap);
            } else {
                Triple t2 = TemplateLib.subst(t, binding, bNodeMap);
                outputIfConcrete(sb, outputStream, t2);
            }
        }

    }
    if (LOG.isTraceEnabled()) {
        LOG.trace(sb.toString());
    }

    for (int i = 0; i < subQueries.size(); i++) {
        RootPlan subPlan = subQueries.get(i);
        subPlan.execGenerateStream(values, newContext);
    }
}
 
Example #28
Source File: RDFPatchOps.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
/** Provide an {@link StreamRDF} that will output in RDFPatch binary format.
 *  {@link StreamRDF#start} and {@link StreamRDF#finish}
 *  must be called; these bracket the patch in transaction markers
 *  {@code TX} and {@code TC}.
 */
public static void writeBinary(OutputStream out, Consumer<StreamRDF> action) {
    RDFChangesWriterBinary.writeBinary(out, c->{
        StreamRDF stream = new StreamPatch(c);
        action.accept(stream);
    });
}
 
Example #29
Source File: rdf2patch.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
@Override
protected void exec() {
    StreamRDF dest  = RDFPatchOps.write(System.out);
    dest.start();
    if ( getPositional().isEmpty() )
        execOne(System.in);
    getPositional().forEach(fn->RDFDataMgr.parse(dest, fn));
    dest.finish();
}
 
Example #30
Source File: ExRIOT_5.java    From xcurator with Apache License 2.0 4 votes vote down vote up
private void read(Item item, String baseURI, ContentType ct, StreamRDF output, Context context) {
    Graph graph = BuilderGraph.buildGraph(item) ;
    Iterator<Triple> iter = graph.find(null, null, null) ;
    for ( ; iter.hasNext() ; )
        output.triple(iter.next()) ;
}