Java Code Examples for org.apache.jena.riot.RDFDataMgr

The following are top voted examples for showing how to use org.apache.jena.riot.RDFDataMgr. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: trellis   File: JenaIOService.java   View source code 6 votes vote down vote up
private void writeJsonLd(final OutputStream output, final DatasetGraph graph, final IRI... profiles) {
    final String profile = getCustomJsonLdProfile(profiles);
    final RDFFormat format = nonNull(profile) && nonNull(cache) ? JSONLD_COMPACT_FLAT : getJsonLdProfile(profiles);
    final WriterDatasetRIOT writer = RDFDataMgr.createDatasetWriter(format);
    final PrefixMap pm = RiotLib.prefixMap(graph);
    final String base = null;
    final JsonLDWriteContext ctx = new JsonLDWriteContext();
    if (nonNull(profile) && nonNull(cache)) {
        LOGGER.debug("Setting JSON-LD context with profile: {}", profile);
        final String c = cache.get(profile, p -> {
            try (final TypedInputStream res = HttpOp.execHttpGet(profile)) {
                return IOUtils.toString(res.getInputStream(), UTF_8);
            } catch (final IOException | HttpException ex) {
                LOGGER.warn("Error fetching profile {}: {}", p, ex.getMessage());
                return null;
            }
        });
        if (nonNull(c)) {
            ctx.setJsonLDContext(c);
            ctx.setJsonLDContextSubstitution("\"" + profile + "\"");
        }
    }
    writer.write(output, graph, pm, base, ctx);
}
 
Example 2
Project: trellis-io-jena   File: JenaIOService.java   View source code 6 votes vote down vote up
private void writeJsonLd(final OutputStream output, final DatasetGraph graph, final IRI... profiles) {
    final String profile = getCustomJsonLdProfile(profiles);
    final RDFFormat format = nonNull(profile) && nonNull(cache) ? JSONLD_COMPACT_FLAT : getJsonLdProfile(profiles);
    final WriterDatasetRIOT writer = RDFDataMgr.createDatasetWriter(format);
    final PrefixMap pm = RiotLib.prefixMap(graph);
    final String base = null;
    final JsonLDWriteContext ctx = new JsonLDWriteContext();
    if (nonNull(profile) && nonNull(cache)) {
        LOGGER.debug("Setting JSON-LD context with profile: {}", profile);
        final String c = cache.get(profile, p -> {
            try (final TypedInputStream res = HttpOp.execHttpGet(profile)) {
                return IOUtils.toString(res.getInputStream(), UTF_8);
            } catch (final IOException | HttpException ex) {
                LOGGER.warn("Error fetching profile {}: {}", p, ex.getMessage());
                return null;
            }
        });
        if (nonNull(c)) {
            ctx.setJsonLDContext(c);
            ctx.setJsonLDContextSubstitution("\"" + profile + "\"");
        }
    }
    writer.write(output, graph, pm, base, ctx);
}
 
Example 3
Project: PageRankRDF   File: Parser.java   View source code 6 votes vote down vote up
public static PipedRDFIterator<Triple> parse(String dump){
    PipedRDFIterator<Triple> iter = new PipedRDFIterator<Triple>();
    final String d=dump;
    System.out.println(d);
    final PipedRDFStream<Triple> inputStream = new PipedTriplesStream(iter);
    // PipedRDFStream and PipedRDFIterator need to be on different threads
    ExecutorService executor = Executors.newSingleThreadExecutor();
    // Create a runnable for our parser thread
    Runnable parser = new Runnable() {
        @Override
        public void run() {
            // Call the parsing process.
            RDFDataMgr.parse(inputStream, d);
        }
    };

    // Start the parser on another thread
    executor.submit(parser);
    return iter;
}
 
Example 4
Project: alvisnlp   File: RDFProjector.java   View source code 6 votes vote down vote up
private Model createModel(Logger logger) throws IOException {
		LoggingUtils.configureSilentLog4J();
		Model model = ModelFactory.createDefaultModel();
		model.setNsPrefixes(PrefixMapping.Standard);
		model.setNsPrefix("xsd", "http://www.w3.org/2001/XMLSchema#");
		model.setNsPrefix("skos", "http://www.w3.org/2004/02/skos/core#");
		model.setNsPrefix("oboInOwl", "http://www.geneontology.org/formats/oboInOwl#");
		model.setNsPrefixes(prefixes);
		for (InputStream is : Iterators.loop(source.getInputStreams())) {
			logger.info("loading model from: " + source.getStreamName(is));
//			System.err.println("is = " + is);
//			model.read(is, null, Lang.RDFXML.toString());
			RDFDataMgr.read(model, is, Lang.RDFXML);
		}
		return model;
	}
 
Example 5
Project: RDF2PT   File: DBpediaGenderDictionary.java   View source code 6 votes vote down vote up
public DBpediaGenderDictionary() {
	Model model = ModelFactory.createDefaultModel();

	Literal maleLit = model.createLiteral(VALUE_MALE, "en");
	Literal femaleLit = model.createLiteral(VALUE_FEMALE, "en");

	RDFDataMgr.read(model, getClass().getClassLoader().getResourceAsStream(GENDER_FILE_LOCATION), Lang.TURTLE);
	StmtIterator iter = model.listStatements(null, model.createProperty(GENDER_PROPERTY), (RDFNode) null);
	while(iter.hasNext()) {
		Statement st = iter.next();
		Literal lit = st.getObject().asLiteral();
		if(lit.equals(maleLit)) {
			male.add(st.getSubject().getURI());
		} else if(lit.equals(femaleLit)){
			female.add(st.getSubject().getURI());
		}
	}
}
 
Example 6
Project: ontonethub   File: ModelWriter.java   View source code 6 votes vote down vote up
@Override
public void writeTo(Model t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
		MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream)
		throws IOException, WebApplicationException {
	
	Lang lang = null;
	
	if(mediaType.equals(KRFormat.N3_TYPE))
		lang = Lang.N3;
	else if(mediaType.equals(KRFormat.N_TRIPLE_TYPE))
		lang = Lang.NTRIPLES;
	else if(mediaType.equals(KRFormat.RDF_JSON_TYPE))
		lang = Lang.RDFJSON;
	else if(mediaType.equals(new MediaType("application", "json-ld")))
		lang = Lang.JSONLD;
	else lang = Lang.TURTLE;
	
	RDFDataMgr.write(entityStream, t, lang);
	
}
 
Example 7
Project: trellis   File: JenaIOService.java   View source code 6 votes vote down vote up
private void writeJsonLd(final OutputStream output, final DatasetGraph graph, final IRI... profiles) {
    final String profile = getCustomJsonLdProfile(profiles);
    final RDFFormat format = nonNull(profile) && nonNull(cache) ? JSONLD_COMPACT_FLAT : getJsonLdProfile(profiles);
    final WriterDatasetRIOT writer = RDFDataMgr.createDatasetWriter(format);
    final PrefixMap pm = RiotLib.prefixMap(graph);
    final String base = null;
    final JsonLDWriteContext ctx = new JsonLDWriteContext();
    if (nonNull(profile) && nonNull(cache)) {
        LOGGER.debug("Setting JSON-LD context with profile: {}", profile);
        final String c = cache.get(profile, p -> {
            try (final TypedInputStream res = HttpOp.execHttpGet(profile)) {
                return IOUtils.toString(res.getInputStream(), UTF_8);
            } catch (final IOException | HttpException ex) {
                LOGGER.warn("Error fetching profile {}: {}", p, ex.getMessage());
                return null;
            }
        });
        if (nonNull(c)) {
            ctx.setJsonLDContext(c);
            ctx.setJsonLDContextSubstitution("\"" + profile + "\"");
        }
    }
    writer.write(output, graph, pm, base, ctx);
}
 
Example 8
Project: Squirrel   File: RDFAnalyzer.java   View source code 6 votes vote down vote up
@Override
public Iterator<byte[]> analyze(CrawleableUri curi, File data, Sink sink) {
    FileInputStream fin = null;
    try {
        // First, try to get the language of the data
        Lang lang = null;
        String contentType = (String) curi.getData(HttpHeaders.CONTENT_TYPE);
        if (contentType != null) {
            lang = RDFLanguages.contentTypeToLang(contentType);
        } else {
            lang = RDFLanguages.filenameToLang(data.getName(), null);
        }
        FilterSinkRDF filtered = new FilterSinkRDF(curi, sink);
        RDFDataMgr.parse(filtered, data.getAbsolutePath(), lang);
    } catch (Exception e) {
        LOGGER.error("Exception while analyzing. Aborting.");
    } finally {
        IOUtils.closeQuietly(fin);
    }
    return collector.getUris(curi);
}
 
Example 9
Project: SDA   File: OneM2MContentInstanceMapper.java   View source code 6 votes vote down vote up
public static void main(String[] args) throws IOException {

		File f = new File("/Users/rosenc/Documents/business/[2015]icbms/json_sample1.txt");
		BufferedReader br = new BufferedReader(new FileReader(f));
		String line = null;
		String s = "";
		while ((line = br.readLine()) != null) {
			s = s + line + "\n";
		}

		System.out.println(s);
		Gson gson = new Gson();
		OneM2MContentInstanceDTO cont = gson.fromJson(s, OneM2MContentInstanceDTO.class);
		OneM2MContentInstanceMapper mapper = new OneM2MContentInstanceMapper(cont);

		Model model = ModelFactory.createDefaultModel();
		model.add(mapper.from());
		System.out.println("content type ; " + mapper.getContentType());
		// 스트링 변환부분
		RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
		// System.out.println(mapper.getTypedContent("2k42kk"));
		// mapper.getTypedContent("2.4");

	}
 
Example 10
Project: SDA   File: OneM2MAEDTO.java   View source code 6 votes vote down vote up
public static void main(String[] args) {
	String sample = " {     \"_id\" : ObjectId(\"561e1e1e1ee82041fac258b6\"),     \"rn\" : \"SAE_0\",     \"ty\" : 2,     \"ri\" : \"SAE_0\",     \"pi\" : \"herit-in\",     \"lbl\" : [          \"home1\",          \"home_service\"     ],     \"et\" : \"20151203T122321\",     \"at\" : [          \"//onem2m.hubiss.com/cse1\",          \"//onem2m.hubiss.com/cse2\"     ],     \"aa\" : [          \"poa\",          \"apn\"     ],     \"apn\" : \"onem2mPlatformAdmin\",     \"api\" : \"NHeritAdmin\",     \"aei\" : \"/SAE_0\",     \"poa\" : [          \"10.101.101.111:8080\"     ],     \"rr\" : false,     \"_uri\" : \"/herit-in/herit-cse/SAE_0\",     \"ct\" : \"20151014T181926\",     \"lt\" : \"20151014T181926\" }";
	Gson gson = new Gson();
	OneM2MAEDTO cont = gson.fromJson(sample, OneM2MAEDTO.class);


	System.out.println(cont);
	
	OneM2MAEMapper mapper = new OneM2MAEMapper(cont);
	Model model = ModelFactory.createDefaultModel();
	model.add(mapper.from());
	
	//스트링 변환부분
	RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
	
	// gooper
	if(! model.isClosed()) {
		model.close();
	}
	if(model != null) {
		model = null;
	}

}
 
Example 11
Project: SDA   File: OneM2MCSEBaseDTO.java   View source code 6 votes vote down vote up
public static void main(String[] args) {
	String sample = "{     \"_id\" : ObjectId(\"560c9d741ee8203c53a63569\"),     \"rn\" : \"CONTENT_INST_5\",     \"ty\" : 4,     \"ri\" : \"CONTENT_INST_5\",     \"pi\" : \"CONTAINER_37\",     \"lbl\" : [          \"cnt-switch\"     ],     \"cr\" : \"C_AE-D-GASLOCK1004\",     \"cnf\" : \"text/plain:0\",     \"cs\" : 3,     \"con\" : \"Off\",     \"_uri\" : \"/herit-in/herit-cse/ae-gaslock1004/cnt-switch/CONTENT_INST_5\",     \"ct\" : \"20151001T114156\",     \"lt\" : \"20151001T114156\" , \"or\":\"http://www.iotoasis.org/ontology/StateCondition\" }";
	Gson gson = new Gson();
	OneM2MContentInstanceDTO cont = gson.fromJson(sample, OneM2MContentInstanceDTO.class);
	System.out.println(cont);

	OneM2MContentInstanceMapper mapper = new OneM2MContentInstanceMapper(cont);
	Model model = ModelFactory.createDefaultModel();
	model.add(mapper.from());

	// 스트링 변환부분
	RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
	
	// gooper
	if(! model.isClosed()) {
		model.close();
	}
	if(model != null) {
		model = null;
	}

}
 
Example 12
Project: openml-rdf   File: VocabularyBuilder.java   View source code 6 votes vote down vote up
/**
 * Build all properties for each class contained in a specification file.<br>
 * <br>
 * Specification file contains tab-separated rows as:<br>
 * [ClassName][TAB][JSON example URL]
 * 
 * @param specFile
 * @param outFile
 * @throws IOException 
 * @throws JSONException 
 */
public void buildAll(String specFile, String inFile, String outFile) throws JSONException, IOException {
	logger.info("Started.");
	String base = System.getProperty("user.dir") + "/";
	FileOutputStream file = new FileOutputStream(new File(base + outFile));
	logger.info("user.dir = "+base);
	logger.info("Opening RDF file: "+base + inFile);
	Model m = RDFDataMgr.loadModel(base + inFile, Lang.RDFXML);
	Scanner in = new Scanner(new File(specFile));
	while (in.hasNextLine()) {
		String[] line = in.nextLine().split("\t");
		build(line[0], line[1], m);
	}
	in.close();
	logger.info("Writing to file "+outFile+ "...");
	
	m.write(file);
	
	file.close();
	logger.info("Done.");
}
 
Example 13
Project: BENGAL   File: DBpediaGenderDictionary.java   View source code 6 votes vote down vote up
public DBpediaGenderDictionary() {
	Model model = ModelFactory.createDefaultModel();

	Literal maleLit = model.createLiteral(VALUE_MALE, "en");
	Literal femaleLit = model.createLiteral(VALUE_FEMALE, "en");

	RDFDataMgr.read(model, getClass().getClassLoader().getResourceAsStream(GENDER_FILE_LOCATION), Lang.TURTLE);
	StmtIterator iter = model.listStatements(null, model.createProperty(GENDER_PROPERTY), (RDFNode) null);
	while (iter.hasNext()) {
		Statement st = iter.next();
		Literal lit = st.getObject().asLiteral();
		if (lit.equals(maleLit)) {
			male.add(st.getSubject().getURI());
		} else if (lit.equals(femaleLit)) {
			female.add(st.getSubject().getURI());
		}
	}
}
 
Example 14
Project: rdf-delta   File: TestRDFChangesGraph.java   View source code 6 votes vote down vote up
private Graph replay() {
    IO.close(bout);
    final boolean DEBUG = false;
    
    if ( DEBUG ) {
        System.out.println("== Graph ==");
        RDFDataMgr.write(System.out, baseGraph, Lang.NQ);
        System.out.println("== Replay ==");
        String x = StrUtils.fromUTF8bytes(bout.toByteArray());
        System.out.print(x);
        System.out.println("== ==");
    }
    
    // A completely separate graph (different dataset)
    Graph graph2 = txnGraph();
    
    try(ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray())) {
        RDFPatchOps.applyChange(graph2, bin);
        if ( DEBUG ) {
            System.out.println("== Graph outcome ==");
            RDFDataMgr.write(System.out, graph2, Lang.NT);
            System.out.println("== ==");
        }
        return graph2;
    } catch (IOException ex) { IO.exception(ex); return null; }
}
 
Example 15
Project: rdf-delta   File: DeltaEx1_DatasetWithPatchLog.java   View source code 6 votes vote down vote up
public static void main(String ...args) {
    // -- Base dataset 
    DatasetGraph dsgBase = DatasetGraphFactory.createTxnMem();

    // -- Destination for changes.
    // Text form of output.  
    OutputStream out = System.out;
    // Create an RDFChanges that writes to "out".
    RDFChanges changeLog = RDFPatchOps.textWriter(out);

    // Combined datasetgraph and changes. 
    DatasetGraph dsg = RDFPatchOps.changes(dsgBase, changeLog);

    // Wrap in the Dataset API
    Dataset ds = DatasetFactory.wrap(dsg);

    // --------
    // Do something. Read in data.ttl inside a transaction.
    // (data.ttl is in src/main/resources/)
    Txn.executeWrite(ds,
        ()->RDFDataMgr.read(dsg, "data.ttl")
        );
}
 
Example 16
Project: sparql-generate   File: TestBase.java   View source code 6 votes vote down vote up
public TestBase(Logger log, File exampleDir, String name) {
    this.log = log;
    this.exampleDir = exampleDir;

    log.info("constructing with " + exampleDir);

    SPARQLGenerate.init();
            
    // read location-mapping
    Model conf = ModelFactory.createDefaultModel();
    conf.add(RDFDataMgr.loadModel(new File(exampleDir, "queryset/configuration.ttl").toString(), Lang.TTL));
    conf.add(RDFDataMgr.loadModel(new File(exampleDir, "documentset/configuration.ttl").toString(), Lang.TTL));

    // initialize stream manager
    SPARQLGenerateStreamManager sm = SPARQLGenerateStreamManager.makeStreamManager(new LocatorFileAccept(exampleDir.toURI().getPath()));
    sm.setLocationMapper(conf);
    SPARQLGenerate.setStreamManager(sm);
}
 
Example 17
Project: ASPG   File: viz.java   View source code 6 votes vote down vote up
private static Model readRDF(String s) {
        Model model = ModelFactory.createDefaultModel();

        if (isPath(s)) {
//            System.out.println("Parsing file " + s);
            model.read(s);
        } else {
            try {
//                System.out.println("Parsing RDF string " + s);
                RDFDataMgr.read(model, new ByteArrayInputStream(s.getBytes("UTF-8")), Lang.TTL);
            } catch (UnsupportedEncodingException e) {
                e.printStackTrace();
            }
        }
        return model;
    }
 
Example 18
Project: phenopacket-reference-implementation   File: RdfGenerator.java   View source code 6 votes vote down vote up
/**
 * Convert a PhenoPacket to RDF triples using the JSON-LD context
 * 
 * @param packet
 * @param base
 *            URI base for generated RDF; if `null` a UUID-based base will
 *            be used
 * @return model containing RDF triples
 * @throws JsonLdError
 * @throws JsonProcessingException
 */
public static Model toRdf(PhenoPacket packet, String base)
		throws JsonLdError, JsonProcessingException {
	PhenoPacket packetWithContext;
	if (packet.getContext() == null) {
		packetWithContext = PhenoPacket.newBuilder(packet)
                           .context(ContextUtil.defaultContextURI)
                           .build();
	} else {
		packetWithContext = packet;
	}
	Model rdfModel = ModelFactory.createDefaultModel();
	StringReader reader = new StringReader(
			JsonGenerator.render(packetWithContext));
	String baseToUse;
	if (base != null) {
		baseToUse = base;
	} else {
		String uuid = UUID.randomUUID().toString();
		baseToUse = "http://phenopackets.org/local/" + uuid + "/";
	}
	RDFDataMgr.read(rdfModel, reader, baseToUse, Lang.JSONLD);
	return rdfModel;
}
 
Example 19
Project: Ifc2Rdf   File: RdfUtils.java   View source code 6 votes vote down vote up
public static void exportJenaModelToRdfFile(Model model, String filePath, RDFFormat format, boolean gzip) throws IOException {
	String fileExtension = RdfVocabulary.getRdfFileExtension(format);
	if (gzip) {
		fileExtension += ".gz";
	}
			
	String filePathWithExtension = FileManager.createFileNameWithExtension(filePath, fileExtension);
	

	logger.info(String.format("Exporting graph to file '%s' with format '%s'", filePathWithExtension, format));
	File file = FileManager.createFile(filePathWithExtension);
	OutputStream out = new FileOutputStream(file);
	if (gzip) {
		out = new GZIPOutputStream(out);
	}
	try {
		RDFDataMgr.write(out, model, format);
	}
	finally {
		out.close();
	}
	logger.info(String.format("Exporting graph to file is completed, file size: %s", FileManager.getReadableFileSize(file.length())));
}
 
Example 20
Project: Ifc2Rdf   File: RdfUtils.java   View source code 6 votes vote down vote up
public static void exportJenaModelToRdfFile(Model model, String filePath, RDFFormat format, boolean gzip) throws IOException {
	String fileExtension = RdfVocabulary.getRdfFileExtension(format);
	if (gzip) {
		fileExtension += ".gz";
	}
			
	String filePathWithExtension = FileManager.createFileNameWithExtension(filePath, fileExtension);
	

	logger.info(String.format("Exporting graph to file '%s' with format '%s'", filePathWithExtension, format));
	File file = FileManager.createFile(filePathWithExtension);
	OutputStream out = new FileOutputStream(file);
	if (gzip) {
		out = new GZIPOutputStream(out);
	}
	try {
		RDFDataMgr.write(out, model, format);
	}
	finally {
		out.close();
	}
	logger.info(String.format("Exporting graph to file is completed, file size: %s", FileManager.getReadableFileSize(file.length())));
}
 
Example 21
Project: Tapioca   File: VoidParsingExtractorTest.java   View source code 6 votes vote down vote up
/**
 * Test method for VoidParsingExtractor#getVoidInformation()
 * expected_5.nt
 */
@Test
public final void test052GetVoidInformation() {
	// set input file
	String inFile = "src/test/data/expected_5.nt";
	// run extraction
	try {
		// setup
		InputStream inStream = new FileInputStream( inFile );
		PipedRDFIterator<Triple> iter = new PipedRDFIterator<Triple>();
		PipedRDFStream<Triple> rdfStream = new PipedTriplesStream( iter );
		// run
		RDFDataMgr.parse(rdfStream,	inStream, "", RDFLanguages.resourceNameToLang( inFile ) );
		VoidParsingExtractor extractor = new VoidParsingExtractor();
		extractor.extract( iter );
		// print
		System.out.println( "expected_5.nt:\n  " + extractor.getVoidInformation().toString() );
		// test
		assertEquals( 11, extractor.getVoidInformation().size() );
	}
	catch( Exception e ) {
		fail( e.toString() );
	}		
}
 
Example 22
Project: SDA   File: OneM2MCSEBaseDTO.java   View source code 6 votes vote down vote up
public static void main(String[] args) {
	String sample = "{     \"_id\" : ObjectId(\"560c9d741ee8203c53a63569\"),     \"rn\" : \"CONTENT_INST_5\",     \"ty\" : 4,     \"ri\" : \"CONTENT_INST_5\",     \"pi\" : \"CONTAINER_37\",     \"lbl\" : [          \"cnt-switch\"     ],     \"cr\" : \"C_AE-D-GASLOCK1004\",     \"cnf\" : \"text/plain:0\",     \"cs\" : 3,     \"con\" : \"Off\",     \"_uri\" : \"/herit-in/herit-cse/ae-gaslock1004/cnt-switch/CONTENT_INST_5\",     \"ct\" : \"20151001T114156\",     \"lt\" : \"20151001T114156\" , \"or\":\"http://www.iotoasis.org/ontology/StateCondition\" }";
	Gson gson = new Gson();
	OneM2MContentInstanceDTO cont = gson.fromJson(sample, OneM2MContentInstanceDTO.class);
	System.out.println(cont);

	OneM2MContentInstanceMapper mapper = new OneM2MContentInstanceMapper(cont);
	Model model = ModelFactory.createDefaultModel();
	model.add(mapper.from());

	// 스트링 변환부분
	RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
	
	// gooper
	if(! model.isClosed()) {
		model.close();
	}
	if(model != null) {
		model = null;
	}

}
 
Example 23
Project: Source   File: SemanticBaseListener.java   View source code 6 votes vote down vote up
/**
 * Gets the {@link JenaBase} from the given {@link IFile}.
 * 
 * @param file
 *            the {@link IFile}
 * @return the {@link JenaBase} from the given {@link IFile} if nay, <code>null</code> otherwise
 */
private JenaBase getBaseFromFile(IFile file) {
	JenaBase res = null;

	final Lang lang = EXTENSION_TO_LANG.get(file.getFileExtension());
	if (lang != null) {
		try {
			final Model model = RDFDataMgr.loadModel(file.getLocation().toFile().getAbsolutePath(), lang);
			if (!model.isEmpty()) {
				res = new JenaBase(model, file.getFullPath().toString());
			}
			// CHECKSTYLE:OFF
		} catch (Exception e) {
			// CHECKSTYLE:ON
			Activator.getDefault().getLog().log(new Status(IStatus.WARNING, Activator.PLUGIN_ID,
					UNABLE_TO_LOAD_SEMANTIC_BASE_FROM + file.getLocation().toString(), e));
		}
	}

	return res;
}
 
Example 24
Project: marklogic-jena   File: MarkLogicDatasetGraphTest.java   View source code 6 votes vote down vote up
@Test
public void testWriteableView() {
    MarkLogicDatasetGraph dsg = getMarkLogicDatasetGraph("testdata/smallfile.nt");
    Graph defaultGraph = dsg.getDefaultGraph();
    RDFDataMgr.write(System.out, defaultGraph, RDFFormat.TURTLE);

    Triple newTriple = Triple.create(NodeFactory.createURI("http://a"),
            NodeFactory.createURI("http://b"),
            NodeFactory.createLiteral("1", XSDint));

    defaultGraph.add(newTriple);

    QueryExecution qe = QueryExecutionFactory.create(
            "prefix xsd: <http://www.w3.org/2001/XMLSchema#>  ask where { <http://a> ?p  \"1\"^^xsd:int .}", dsg.toDataset());
    assertTrue(qe.execAsk());
    defaultGraph.remove(newTriple.getSubject(), newTriple.getPredicate(), newTriple.getObject());
    qe = QueryExecutionFactory.create(
            "prefix xsd: <http://www.w3.org/2001/XMLSchema#>  ask where { <http://a> ?p  \"1\"^^xsd:int .}", dsg.toDataset());
    assertFalse(qe.execAsk());
}
 
Example 25
Project: marklogic-jena   File: JenaGraphTests.java   View source code 6 votes vote down vote up
@Test
public void testSetDefaultGraph_admin() {

	String file = datasource + "relative1.nt";
	// Read triples into dataset
	RDFDataMgr.read(markLogicDatasetGraphWriter, file);
	markLogicDatasetGraphWriter.sync();
	Graph g1 = markLogicDatasetGraphWriter.getDefaultGraph();

	assertTrue("did not match Triples", g1.toString().contains("#electricVehicle2"));

	// Create New graph and add triples from defaultgraph to new graph
	Triple triple = new Triple(NodeFactory.createURI("s5"), NodeFactory.createURI("p5"), NodeFactory.createURI("o5"));
	Quad quad = new Quad(NodeFactory.createURI("new-graph-fordefault"), triple);
	Node n1 = NodeFactory.createURI("new-graph-fordefault");
	markLogicDatasetGraphWriter.add(quad);
	markLogicDatasetGraphWriter.sync();
	Graph g2 = markLogicDatasetGraphWriter.getGraph(n1);
	assertTrue("did not match Triples", g2.contains(triple));
	// Set DefaultGraph to be NamedGraph
	markLogicDatasetGraphWriter.setDefaultGraph(g2);
	Graph defaultG = markLogicDatasetGraphWriter.getDefaultGraph();
	assertTrue("did not match Triples", defaultG.contains(triple));
}
 
Example 26
Project: marklogic-jena   File: JenaGraphTests.java   View source code 6 votes vote down vote up
@Test
public void testCRUD_triplexml() {

	String file = datasource + "triplexml1.xml";
	Exception exp = null;
	// Read triples into dataset
	try {
		RDFDataMgr.read(markLogicDatasetGraphWriter, file);
		markLogicDatasetGraphWriter.sync();
		Graph g1 = markLogicDatasetGraphWriter.getDefaultGraph();

		assertTrue("did not match Triples", g1.toString().contains("Anna's Homepage"));

	} catch (Exception e) {
		exp = e;

	}
	assertTrue(exp.toString().contains("RiotException") && exp != null);
}
 
Example 27
Project: marklogic-jena   File: JenaSPARQLUpdateTests.java   View source code 6 votes vote down vote up
@Test
public void testStringAskQuery() {
	String file = datasource + "tigers.ttl";
	// Read triples into dataset
	RDFDataMgr.read(markLogicDatasetGraph, file);
	markLogicDatasetGraph.sync();
	dataSet = DatasetFactory.create(markLogicDatasetGraph);

	String query1 = "PREFIX  bb: <http://marklogic.com/baseball/players#>" + " ASK " + " WHERE" + " {" + " ?id bb:lastname  ?name ."
			+ " FILTER  EXISTS { ?id bb:country ?countryname }" + " }";
	QueryExecution queryExec = QueryExecutionFactory.create(query1, dataSet);
	Boolean result = queryExec.execAsk();
	assertFalse(result);

	String query2 = "PREFIX  bb: <http://marklogic.com/baseball/players#>" + "PREFIX  r: <http://marklogic.com/baseball/rules#>"
			+ " ASK WHERE" + " {" + " ?id bb:team r:Tigers." + " ?id bb:position \"pitcher\"." + " }";
	queryExec = QueryExecutionFactory.create(query2, dataSet);
	assertTrue(queryExec.execAsk());
	markLogicDatasetGraph.close();
	markLogicDatasetGraph = MarkLogicDatasetGraphFactory.createDatasetGraph(writerClient);

}
 
Example 28
Project: Tapioca   File: VoidParsingExtractorTest.java   View source code 6 votes vote down vote up
/**
 * Test method for VoidParsingExtractor#getVoidInformation()
 * 0.nt
 */
@Test
public final void test00GetVoidInformation() {
	// set input file
	String inFile = "src/test/data/0.nt";
	// run extraction
	try {
		// setup
		InputStream inStream = new FileInputStream( inFile );
		PipedRDFIterator<Triple> iter = new PipedRDFIterator<Triple>();
		PipedRDFStream<Triple> rdfStream = new PipedTriplesStream( iter );
		// run
		RDFDataMgr.parse(rdfStream,	inStream, "", RDFLanguages.resourceNameToLang( inFile ) );
		VoidParsingExtractor extractor = new VoidParsingExtractor();
		extractor.extract( iter );
		// print
		System.out.println( "0.nt\n  " + extractor.getVoidInformation().toString() );
		// test
		assertEquals( 0, extractor.getVoidInformation().size() );
	}
	catch( Exception e ) {
		fail( e.toString() );
	}
}
 
Example 29
Project: Tapioca   File: SearchEngineBeanTest.java   View source code 6 votes vote down vote up
@BeforeClass
public static void setUpBeforeClass() throws Exception {
	// create engine
	String[] cachefiles = { "src/test/data3/index/cache/uriToLabelCache_1.object",
			"src/test/data3/index/cache/uriToLabelCache_2.object", 
			"src/test/data3/index/cache/uriToLabelCache_3.object"
	};
	String modelfile = "src/test/data3/index/model/probAlgState.object";
	String corpusfile = "src/test/data3/index/model/lodStats_final_noFilter.corpus";
	String rdfMetaDataFile = "src/test/data3/index/model/lodstats.nt";
	int elasticport = 9300;
	engine = new SearchEngineBean( cachefiles, modelfile, corpusfile, rdfMetaDataFile, elasticport );
	
	// create model
	InputStream stream = new FileInputStream( new File( "src/test/data3/metadata/34.nt" ) );
	model = ModelFactory.createDefaultModel();
	RDFDataMgr.read( model, stream, RDFLanguages.nameToLang( "N-TRIPLES" ) );
	stream.close();
}
 
Example 30
Project: JenaKBClient   File: KBIndividualImplTest.java   View source code 6 votes vote down vote up
@Test
public void testSomeMethod2() throws Exception {
  Dataset ds = TDBFactory.createDataset("/scratch/WORK2/jena/dataset2/");
  
  OntModel model1 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym1"));
  OntModel model2 = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ds.getNamedModel("vijaym2"));
  OntClass thing = model1.createClass("http://www.w3.org/2002/07/owl#Thing");
  model1.createIndividual("http://example.com/onto1#VijayRaj", thing);
  model2.createIndividual("http://example.;cegilovcom/onto2#VijayRaj", thing);
  Model m = model1.union(model2);
  
  FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/mergetestds.xml");
  RDFDataMgr.write(fw, ds, RDFFormat.NQUADS_UTF8);
  

}
 
Example 31
Project: JenaKBClient   File: KBIndividualImplTest.java   View source code 6 votes vote down vote up
@Test
public void testRule() throws Exception {
  Node var = NodeFactory.createAnon();
  
  System.out.println("Node: " + var);
  
  KBCollection varCol = new KBCollectionImpl("http://www.w3.org/2007/rif#var");
  RDFNode varRdf = KBObjectImpl.getBaseContextModel().asRDFNode(var);
  System.out.println("RDF Node: " + varRdf);

  Resource varRes = varRdf.asResource();
  System.out.println("Resource: " + varRes.getLocalName() + ", " + varRes.getNameSpace() + ", " + varRes.getURI());
  // Individual varInd = ((KBCollectionImpl)varCol).getCore().createIndividual();//.createIndividual(varRes.getURI());
  // KBObjectImpl.getBaseContextModel().createIndividual(((KBCollectionImpl)varCol).getCore());
  
  Variable v = new VariableImpl("vijayvar");
  System.out.println("Var Name: " + v.getName());
  FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/test4.xml");
  RDFDataMgr.write(fw, KBObjectImpl.getDataset(), RDFFormat.NQUADS);
  
}
 
Example 32
Project: JenaKBClient   File: KBIndividualImplTest.java   View source code 6 votes vote down vote up
@Test
public void testStatements() throws Exception {
  String bio_ns = "http://example.com/biology#";
  KBCollection human = new KBCollectionImpl(bio_ns + "human");
  Variable v = new VariableImpl ("person");
  
  KBObjectImpl.getBaseContextModel().createList(new RDFNode[] {((Resource)human.getCore()), ((Resource)v.getCore())});
  FileWriter fw = new FileWriter("/scratch/WORK2/jena/testModels/test5.xml");
  
  
  KBPredicate father = new KBPredicateImpl(Constants.CYC_TBOX_NS.concat("father"));
  
  Statement s;
  s = KBObjectImpl.getBaseContextModel().createStatement(((KBCollectionImpl)Constants.owlThing()).getCore(), (OntProperty)((KBPredicateImpl)father).getCore(), ((VariableImpl)v).getCore());
  KBObjectImpl.getBaseContextModel().add(s);
  
  KBObjectImpl.getBaseContextModel().createReifiedStatement(s);
  
  RDFDataMgr.write(fw, KBObjectImpl.getDataset(), RDFFormat.NQUADS);
}
 
Example 33
Project: incubator-taverna-language   File: RDFToManifest.java   View source code 6 votes vote down vote up
protected static Model jsonLdAsJenaModel(InputStream jsonIn, URI base)
		throws IOException, RiotException {
	Model model = ModelFactory.createDefaultModel();
	
	ClassLoader oldCl = Thread.currentThread().getContextClassLoader();
	try { 
		// TAVERNA-971: set context classloader so jarcache.json is consulted
		// even through OSGi
		Thread.currentThread().setContextClassLoader(RDFToManifest.class.getClassLoader());
		
		// Now we can parse the JSON-LD without network access
		RDFDataMgr.read(model, jsonIn, base.toASCIIString(), Lang.JSONLD);
	} finally { 
		// Restore old context class loader (if any)
		Thread.currentThread().setContextClassLoader(oldCl);
	}
	return model;
}
 
Example 34
Project: marklogic-spring-batch   File: RdfTripleItemReader.java   View source code 6 votes vote down vote up
/**
 * Uses RDFDataMgr to parse triples by constructing the CollectorStreamTriples as the input stream 
 */
@SuppressWarnings("unchecked")
private void parseTriples() {

	Assert.state(fileName.length() != 0, "FileName must not be null.");

	try {
		inputStream = new CollectorStreamTriples();
		Assert.state(inputStream != null, "inputStream must not be null");
		RDFDataMgr.parse(inputStream, fileName);
		logger.info("Number of Triples [" + inputStream.getCollected().size() + "]");
		tripleIterator = (Iterator<T>) inputStream.getCollected().iterator();
		Assert.state(tripleIterator != null, "tripleIterator must not be null. Something went wrong");
	}
	catch (Exception e) {
		close();
		logger.info("Caught exception[" + e.getMessage() + "]");
	}

}
 
Example 35
Project: jena-inf-engine   File: DevRDFS.java   View source code 6 votes vote down vote up
public static void mainTDB(String...argv) throws IOException {
    DatasetGraphTDB dsg = ((DatasetGraphTransaction)TDBFactory.createDatasetGraph()).get() ;
    String DIR = "testing/Inf" ;
    String DATA_FILE = DIR+"/rdfs-data.ttl" ;
    String VOCAB_FILE = DIR+"/rdfs-vocab.ttl" ;
    String RULES_FILE = DIR+"/rdfs-min.rules" ;

    Model vocab = RDFDataMgr.loadModel(VOCAB_FILE) ;
    Model data = RDFDataMgr.loadModel(DATA_FILE) ;
    String rules = FileUtils.readWholeFileAsUTF8(RULES_FILE) ;
    rules = rules.replaceAll("#[^\\n]*", "") ;

    // TDB
    
    InferenceSetupRDFS_TDB setup = new InferenceSetupRDFS_TDB(vocab, dsg, false) ;
    //Graph graph = new GraphRDFS(setup, data.getGraph()) ;
    
    
}
 
Example 36
Project: jena-inf-engine   File: DevRDFS.java   View source code 6 votes vote down vote up
public static void expand() throws IOException {
    boolean combined = false ;
    String DIR = "testing/Inf" ;
    String DATA_FILE = "data.ttl" ;
    String VOCAB_FILE = "vocab.ttl" ;
    String RULES_FILE = DIR+"/rdfs-min.rules" ;

    Model vocab = RDFDataMgr.loadModel(VOCAB_FILE) ;
    Model data = RDFDataMgr.loadModel(DATA_FILE) ;
    
    String rules = FileUtils.readWholeFileAsUTF8(RULES_FILE) ;
    rules = rules.replaceAll("#[^\\n]*", "") ;

    InferenceSetupRDFS setup = new InferenceSetupRDFS(vocab, combined) ;
    
    Reasoner reasoner = new GenericRuleReasoner(Rule.parseRules(rules));
    InfModel m = ModelFactory.createInfModel(reasoner, vocab, data);
    
    // Expansion Graph
    Graph graphExpanded = Factory.createDefaultGraph() ;
    StreamRDF stream = StreamRDFLib.graph(graphExpanded) ;
    // Apply inferences.
    stream = new InferenceProcessorStreamRDF(stream, setup) ;
    sendToStream(data.getGraph(), stream) ;
    RDFDataMgr.write(System.out, graphExpanded, Lang.TTL) ;
}
 
Example 37
Project: rocker   File: ModelManager.java   View source code 6 votes vote down vote up
/**
 * @param path
 * @return
 */
public static HashMap<String, List<Resource>> loadClassList(String path) {
	HashMap<String, List<Resource>> res = new HashMap<>();
	// load specification file
	Model model = RDFDataMgr.loadModel(path);
	// get all graphs
	Iterator<Statement> statIt = model.listStatements((Resource) null, 
			ResourceFactory.createProperty("http://aksw.org/deduplication/relatedGraph"), (RDFNode) null);
	while(statIt.hasNext()) {
		Statement s = statIt.next();
		Resource dataset = s.getSubject();
		String graph = s.getObject().as(Resource.class).getURI();
		// get all classes for each graph
		ArrayList<Resource> classes = new ArrayList<>();
		Iterator<RDFNode> nodeIt = model.listObjectsOfProperty(dataset, ResourceFactory.createProperty("http://aksw.org/deduplication/requiredClasses"));
		while(nodeIt.hasNext()) {
			Resource c = nodeIt.next().as(Resource.class);
			classes.add(c);
		}
		res.put(graph, classes);
	}
	return res;
}
 
Example 38
Project: p3-osm-transformer   File: JenaTextConfig.java   View source code 6 votes vote down vote up
/**
 * Import the data into the data set. When a new data set is imported the old data is deleted. 
 * @param dataset
 * @param file
 */
public void loadData(Dataset dataset, String file){
    log.info("Start loading") ;
    long startTime = System.nanoTime() ;
    dataset.begin(ReadWrite.WRITE) ;
    try {
        Model m = dataset.getDefaultModel() ;
        log.info("Number of triples before loading: " + m.size());
        RDFDataMgr.read(m, file) ;
        log.info("Number of triples after loading: " + m.size());
        dataset.commit() ;
    } 
    finally { 
        dataset.end() ;
    }
    long finishTime = System.nanoTime() ;
    double time = (finishTime-startTime)/1.0e6 ;
    log.info(String.format("Finish loading - %.2fms", time)) ;
}
 
Example 39
Project: u-qasar.platform   File: UQasarUtil.java   View source code 6 votes vote down vote up
/**
	 * Read the RDF model from files.
	 */
	public static void readSemanticModelFiles() {
		logger.debug("Reading the model from a file");
		// Read the model to an existing model
		String dataDir = UQasarUtil.getDataDirPath();
		String modelPath = "file:///" + dataDir + ONTOLOGYFILE;
//		String modelPath = "file:///C:/nyrhinen/Programme/jboss-as-7.1.1.Final/standalone/data/uq-ontology-model.rdf";

		OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
		RDFDataMgr.read(model, modelPath);
		// Test output to standard output
		//		RDFDataMgr.write(System.out, uqModel, RDFFormat.RDFXML_PRETTY);
		logger.debug("Model read from file " +modelPath);
		UQasarUtil.setUqModel(model);
		System.out.println("Reading done.");
	}
 
Example 40
Project: Tapioca   File: EndpointAnalyzingTask.java   View source code 6 votes vote down vote up
@Override
public void run() {
	LOGGER.info("Starting extraction from \"" + endpointName + "\"...");
	SPARQLEndpointAnalyzer analyzer = new SPARQLEndpointAnalyzer(cacheDirectory);
	try {
		if (outputFile.exists()) {
			LOGGER.info("There already is a file for \"" + endpointName + "\". Jumping over this endpoint.");
		} else {
			Model voidModel = analyzer.extractVoidInfo(endpointCfg);
			if (voidModel != null) {
				voidModel.setNsPrefix("void", "http://rdfs.org/ns/void#");
				// NTripleWriter writer = new NTripleWriter();
				FileOutputStream fout = new FileOutputStream(outputFile);
				// writer.write(voidModel, fout, "");
				RDFDataMgr.write(fout, voidModel, RDFFormat.TURTLE_PRETTY);
				fout.close();
			} else {
				LOGGER.error("Error while requesting the void information of \"" + endpointName + "\".");
			}
		}
	} catch (Exception e) {
		LOGGER.error("Error while requesting and storing the void information of \"" + endpointName + "\".", e);
	} finally {
		LOGGER.info("Finished extraction from \"" + endpointName + "\"...");
	}
}