Java Code Examples for org.apache.jena.riot.RDFDataMgr#loadModel()

The following examples show how to use org.apache.jena.riot.RDFDataMgr#loadModel() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GtRDFReader.java    From JedAIToolkit with Apache License 2.0 6 votes vote down vote up
protected void performReading() {
    final Model model = RDFDataMgr.loadModel(inputFilePath);
    final StmtIterator iter = model.listStatements();
    while (iter.hasNext()) {
        Statement stmt = iter.nextStatement();

        final String pred = stmt.getPredicate().toString();
        if (!(pred.contains("sameAs"))) {
            continue;
        }

        final String sub = stmt.getSubject().toString();
        final String obj = stmt.getObject().toString();

        // add a new edge for every pair of duplicate entities
        int entityId1 = urlToEntityId1.get(sub);
        int entityId2 = urlToEntityId1.get(obj) + datasetLimit;

        duplicatesGraph.addEdge(entityId1, entityId2);
    }
}
 
Example 2
Source File: EntityRDFReader.java    From JedAIToolkit with Apache License 2.0 6 votes vote down vote up
@Override
public List<EntityProfile> getEntityProfiles() {
    if (!entityProfiles.isEmpty()) {
        return entityProfiles;
    }

    if (inputFilePath == null) {
        Log.error("Input file path has not been set!");
        return null;
    }

    //load the rdf model from the input file
    try {
        final Model model = RDFDataMgr.loadModel(inputFilePath);
        readModel(model);
    } catch (IOException ex) {
        Log.error("Error in entities reading!", ex);
        return null;
    }

    return entityProfiles;
}
 
Example 3
Source File: ExRIOT_1.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String...argv)
{
    Model m = ModelFactory.createDefaultModel() ;
    // read into the model.
    m.read("data.ttl") ;
    
    // Alternatively, use the RDFDataMgr, which reads from the web,
    // with content negotiation.  Plain names are assumed to be 
    // local files where file extension indicates the syntax.  
    
    Model m2 = RDFDataMgr.loadModel("data.ttl") ;
    
    // read in more data, the remote server serves up the data
    // with the right MIME type.
    RDFDataMgr.read(m2, "http://host/some-published-data") ;
    
    
    // Read some data but also give a hint for the synatx if it is not
    // discovered by inspectying the file or by HTTP content negotiation.  
    RDFDataMgr.read(m2, "some-more-data.out", RDFLanguages.TURTLE) ;
}
 
Example 4
Source File: ExRIOT_out2.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args)
{
    Model model = RDFDataMgr.loadModel("D.ttl") ;
    System.out.println() ;
    System.out.println("#### ---- Write as TriG") ;
    System.out.println() ;
    // This wil be the default graph of the dataset written.
    RDFDataMgr.write(System.out, model, Lang.TRIG) ;
    
    // Loading Turtle as Trig reads into the default graph.
    Dataset dataset = RDFDataMgr.loadDataset("D.ttl") ;
    System.out.println() ;
    System.out.println("#### ---- Write as NQuads") ;
    System.out.println() ;
    RDFDataMgr.write(System.out, dataset, Lang.NQUADS) ;
}
 
Example 5
Source File: RdfEntityGraphConsumerTest.java    From baleen with Apache License 2.0 5 votes vote down vote up
@Test
public void testEntityGraphRdf()
    throws AnalysisEngineProcessException, ResourceInitializationException, IOException,
        URISyntaxException {

  processJCas(
      RdfEntityGraphConsumer.PARAM_QUERY_ENDPOINT,
      "http://localhost:3330/ds/query",
      RdfEntityGraphConsumer.PARAM_UPDATE_ENDPOINT,
      "http://localhost:3330/ds/update",
      RdfEntityGraphConsumer.PARAM_STORE_ENDPOINT,
      "http://localhost:3330/ds/data");

  Model expected = RDFDataMgr.loadModel(EXPECTED_DOCUMENT_FILE.toURI().toString());
  Model model = ds.getDefaultModel();
  Resource resource =
      expected.getResource(
          "http://baleen.dstl.gov.uk/8b408a0c7163fdfff06ced3e80d7d2b3acd9db900905c4783c28295b8c996165");
  resource.removeProperties(); // Get rid of the timestamp

  StmtIterator listStatements = expected.listStatements();
  while (listStatements.hasNext()) {
    Statement statement = listStatements.next();
    assertTrue("Missing statement " + statement.toString(), model.contains(statement));
  }
  assertTrue(model.containsAll(expected));
}
 
Example 6
Source File: RdfDocumentGraphConsumerTest.java    From baleen with Apache License 2.0 5 votes vote down vote up
@Test
public void testDocumentGraphRdf()
    throws AnalysisEngineProcessException, ResourceInitializationException, IOException,
        URISyntaxException {

  processJCas(
      RdfDocumentGraphConsumer.PARAM_QUERY_ENDPOINT,
      "http://localhost:" + port + "/ds/query",
      RdfDocumentGraphConsumer.PARAM_UPDATE_ENDPOINT,
      "http://localhost:" + port + "/ds/update",
      RdfDocumentGraphConsumer.PARAM_STORE_ENDPOINT,
      "http://localhost:" + port + "/ds/data");

  Model expected = RDFDataMgr.loadModel(EXPECTED_DOCUMENT_FILE.toURI().toString());
  Model model = ds.getDefaultModel();
  Resource resource =
      expected.getResource(
          "http://baleen.dstl.gov.uk/8b408a0c7163fdfff06ced3e80d7d2b3acd9db900905c4783c28295b8c996165");
  resource.removeProperties(); // Get rid of the timestamp

  StmtIterator listStatements = expected.listStatements();
  while (listStatements.hasNext()) {
    Statement statement = listStatements.next();
    assertTrue("Missing statement " + statement.toString(), model.contains(statement));
  }
  assertTrue(model.containsAll(expected));
}
 
Example 7
Source File: RdfDocumentGraphConsumerTest.java    From baleen with Apache License 2.0 5 votes vote down vote up
@Test
public void testDocumentGraphRdfithRelationsAsLinks()
    throws AnalysisEngineProcessException, ResourceInitializationException, IOException,
        URISyntaxException {

  processJCas(
      RdfDocumentGraphConsumer.PARAM_QUERY_ENDPOINT,
      "http://localhost:" + port + "/ds/query",
      RdfDocumentGraphConsumer.PARAM_UPDATE_ENDPOINT,
      "http://localhost:" + port + "/ds/update",
      RdfDocumentGraphConsumer.PARAM_STORE_ENDPOINT,
      "http://localhost:" + port + "/ds/data",
      RdfDocumentGraphConsumer.PARAM_OUTPUT_RELATIONS_AS_LINKS,
      true);

  Model expected =
      RDFDataMgr.loadModel(EXPECTED_DOCUMENT_RELATION_AS_LINKS_FILE.toURI().toString());
  Model model = ds.getDefaultModel();
  Resource resource =
      expected.getResource(
          "http://baleen.dstl.gov.uk/8b408a0c7163fdfff06ced3e80d7d2b3acd9db900905c4783c28295b8c996165");
  resource.removeProperties(); // Get rid of the timestamp

  StmtIterator listStatements = expected.listStatements();
  while (listStatements.hasNext()) {
    Statement statement = listStatements.next();
    assertTrue("Missing statement " + statement.toString(), model.contains(statement));
  }
  assertTrue(model.containsAll(expected));
}
 
Example 8
Source File: ExTDB3.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String assemblerFile = "Store/tdb-assembler.ttl" ;
    
    // Find a particular description in the file where there are several: 
    Model spec = RDFDataMgr.loadModel(assemblerFile) ;

    // Find the right starting point for the description in some way.
    Resource root = null ;

    if ( false )
        // If you know the Resource URI:
        root = spec.createResource("http://example/myChoiceOfURI" );
    else
    {
        // Alternatively, look for the a single resource of the right type. 
        try {
            // Find the required description - the file can contain descriptions of many different types.
            root = GraphUtils.findRootByType(spec, VocabTDB.tDatasetTDB) ;
            if ( root == null )
                throw new JenaException("Failed to find a suitable root") ;
        } catch (TypeNotUniqueException ex)
        { throw new JenaException("Multiple types for: "+DatasetAssemblerVocab.tDataset) ; }
    }

    Dataset ds = (Dataset)Assembler.general.open(root) ;
}
 
Example 9
Source File: Driver.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws InterruptedException {
        // Fix up fuseki config files.
        // DELTA_PORT => value.

        // This is a template that needs updating,
        // Server 1.

        Model model = RDFDataMgr.loadModel("fuseki-config.ttl");
        //  fuseki:name    "%DS_NAME%"
        //  delta:changes  "%LOG_URL%"
        //  delta:patchlog "%LOG_NAME%"
        //  delta:zone     "%ZONE_NAME%"
        update(model, "%DS_NAME%", DS_NAME);
        String LOG_URL = "http://localhost:"+DELTA_PORT+"/";
        update(model, "%LOG_URL%", LOG_URL);
        update(model, "%LOG_NAME%", PATCH_LOG_NAME);

        String zone1 = ZONE1.toString();
        String zone2 = ZONE2.toString();

        update(model, "%ZONE_NAME%", zone1);

        // --- Reset state.
        if ( true ) {
            FileOps.ensureDir(DELTA_DIR);
            FileOps.clearAll(DELTA_DIR);
            FileOps.ensureDir(zone1);
            FileOps.clearAll(zone1);
            FileOps.ensureDir(zone2);
            FileOps.clearAll(zone2);
        }

        DeltaServer logServer = deltaServer(DELTA_PORT, DELTA_DIR);

        try {
            logServer.start();
        }
        catch (BindException e) {
            e.printStackTrace();
            System.exit(0);
        }


//        RDFDataMgr.write(System.out,  model, Lang.TTL);
//        System.out.flush();
        FusekiServer server1 = fuseki(F1_PORT, model);
        server1.start();
        //FusekiServer server2 = fuseki2();

        int numClients = 10;
        int clientLoops = 10;

        CountDownLatch cdl1 = new CountDownLatch(numClients);
        CountDownLatch cdl2 = new CountDownLatch(numClients);
        for (int i = 0 ; i < numClients ; i++ ) {
            client(clientLoops, cdl1, cdl2);
        }
        cdl2.await();
        logServer.stop();
        System.out.println("DONE");
        System.exit(0);
    }
 
Example 10
Source File: DeltaEx_FusekiHighAvailability.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
public static void main2(String ...args) {
    setup();
    // Patch Log Server
    FileOps.exists(PLOG_DIR);
    FileOps.clearAll(PLOG_DIR);

    DeltaServer patchLogServer = DeltaServer.server(PLOG_PORT, PLOG_DIR);
    try { patchLogServer.start(); }
    catch (BindException ex) {
        System.err.println("Can't start the patch log server: "+ex.getMessage());
        System.exit(1);
    }

    // For high availability, need a load balancer that switches between the two Fuskei servers.

    // Fuseki server 1
    FusekiServer fuseki1 = fuseki1();
    RDFConnection conn1 = RDFConnectionFactory.connect("http://localhost:"+F1_PORT+"/ds1") ;

    // Fuseki server 2
    FusekiServer fuseki2 = fuseki2();
    RDFConnection conn2 = RDFConnectionFactory.connect("http://localhost:"+F2_PORT+"/ds2") ;

    // Some data (data.ttl is in src/main/resources).
    Model model = RDFDataMgr.loadModel("data.ttl");
    conn1.put(model);

    // Kill fuseki1.
    fuseki1.stop();

    // And fetch data.
    Model model2 = conn2.fetch();
    System.out.println();
    RDFDataMgr.write(System.out, model2, Lang.NT);
    System.out.println();

    // Remove a triple via conn2.
    conn2.update("PREFIX ex: <http://example.org/> DELETE DATA { ex:s ex:p ex:o }");

    // Restart Fuseki1.
    fuseki1 = fuseki1();
    // Not necesary.
    // conn1 = RDFConnectionFactory.connect("http://localhost:"+F1_PORT+"/ds1") ;
    Model model1 = conn1.fetch();
    System.out.println();
    // Data in Fuseki1. One less triple.
    RDFDataMgr.write(System.out, model1, Lang.NT);
    System.out.println();
}
 
Example 11
Source File: TestBaseGenerate.java    From sparql-generate with Apache License 2.0 4 votes vote down vote up
@Test
    public void testPlanExecution() throws Exception {

        String query = IOUtils.toString(sm.open(new LookUpRequest(request.query, SPARQLExt.MEDIA_TYPE)), "UTF-8");

        long start0 = System.currentTimeMillis();
        long start = start0;
        SPARQLExtQuery q = (SPARQLExtQuery) QueryFactory.create(query, SPARQLExt.SYNTAX);
        long now = System.currentTimeMillis();
        log.info("needed " + (now - start) + " to parse query");
        start = now;

        // create generation plan
        RootPlan plan = PlanFactory.create(q);
        Dataset ds = request.loadDataset(exampleDir);

        now = System.currentTimeMillis();
        log.info("needed " + (now - start) + " to get ready");
        start = now;

        // execute plan

        ExecutorService executor = new ForkJoinPool(Runtime.getRuntime().availableProcessors(),
            (ForkJoinPool pool) -> {
                final ForkJoinWorkerThread worker = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool);
                worker.setName("test-" + name + "-" + worker.getPoolIndex());
                return worker;
            },
            null, true);

        ScheduledExecutorService guard = Executors.newScheduledThreadPool(1);
        guard.schedule(()->{executor.shutdownNow();}, 15, TimeUnit.SECONDS);
        Model output;
        Context context = ContextUtils.build()
                .setPrefixMapping(q)
                .setStreamManager(sm)
                .setExecutor(executor)
                .setInputDataset(ds)
                .build();
        output = plan.execGenerate(context);
        guard.shutdownNow();

        now = System.currentTimeMillis();
        log.info("executed plan in " + (now - start));
        start = now;
        log.info("total needed " + (now - start0));

        // write output
        String fileName = exampleDir.toString() + "/output.ttl";
        FileWriter out = new FileWriter(fileName);
        try {
            output.write(out, "TTL");
            StringWriter sw = new StringWriter();
            output.write(sw, "TTL");
            LOG.debug("output is \n" + sw.toString());
        } finally {
            try {
                out.close();
            } catch (IOException closeException) {
                log.error("Error while writing to file");
            }
        }

        URI expectedOutputUri = exampleDir.toURI().resolve("expected_output.ttl");
        Model expectedOutput = RDFDataMgr.loadModel(expectedOutputUri.toString(), Lang.TTL);
//        StringWriter sw = new StringWriter();
//        expectedOutput.write(System.out, "TTL");
        System.out.println("Is isomorphic: " + output.isIsomorphicWith(expectedOutput));
        if (!output.isIsomorphicWith(expectedOutput)) {
            output.listStatements().forEachRemaining((s) -> {
                if (!expectedOutput.contains(s)) {
                    LOG.debug("expectedOutput does not contain " + s);
                }
                expectedOutput.remove(s);
            });
            expectedOutput.listStatements().forEachRemaining((s) -> {
                LOG.debug("output does not contain " + s);
            });
        }

        assertTrue("Error with test " + exampleDir.getName(), output.isIsomorphicWith(expectedOutput));
    }
 
Example 12
Source File: TestAllTypes.java    From incubator-taverna-language with Apache License 2.0 4 votes vote down vote up
@Test
	public void convert() throws Exception {
		assertFalse(allTypesWfdesc.exists());
		WorkflowBundleIO io = new WorkflowBundleIO();
		WorkflowBundle wfBundle = io.readBundle(allTypesT2flow, null);
		io.writeBundle(wfBundle, allTypesWfdesc,
				"text/vnd.wf4ever.wfdesc+turtle");
		assertTrue(allTypesWfdesc.exists());
	
		Model allTypes = RDFDataMgr.loadModel(allTypesWfdesc.getAbsolutePath(), Lang.TURTLE);

		
		
		ByteArrayOutputStream out = new ByteArrayOutputStream();

		Query query = QueryFactory.create("PREFIX wfdesc: <http://purl.org/wf4ever/wfdesc#> "
				+ "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "
				+ "PREFIX owl: <http://www.w3.org/2002/07/owl#> "
				+ "SELECT ?wf ?proc ?procType ?procLabel "
				+ "WHERE {"
				+ "	?wf a wfdesc:Workflow;"
				+ "       wfdesc:hasSubProcess ?proc. "
				+ " ?proc rdfs:label ?procLabel ."
				// Ignore non-specific types
				+ "OPTIONAL { ?proc a ?procType . FILTER (?procType != wfdesc:Description && ?procType != wfdesc:Process && ?procType != owl:Thing) }"
				+ "} " + "ORDER BY ?wf ") ;
		

		try (QueryExecution qexec = QueryExecutionFactory.create(query, allTypes)) {
		    ResultSet results = qexec.execSelect() ;
		    
		    //results.forEachRemaining(
		}
		

//		ObjectMapper mapper = new ObjectMapper();
//		JsonNode jsonNode = mapper.readValue(out.toByteArray(), JsonNode.class);
//		String oldWf = null;
//		for (JsonNode binding : jsonNode.path("results").path("bindings")) {
//			String wf = binding.path("wf").path("value").asText();
//			if (!wf.equals(oldWf)) {
//				//System.out.println(wf);
//				oldWf = wf;
//			}
//			String proc = binding.path("proc").path("value").asText();
//			assertNotNull(proc);
//			String procType = binding.path("procType").path("value").asText();
//			String procTypeShort = null;
//			if (procType == null) 
//				continue;
//			procTypeShort = URI.create(procType).getFragment();
//			assertNotNull(procTypeShort);
//			String procLabel = binding.path("procLabel").path("value").asText();
//			assertNotNull(procLabel);
//			//System.out.println(" Processor " + procLabel + " (" + procTypeShort
//			//		+ ")");
//			//System.out.println("   " + proc + " " + procType);
//		}

		out.reset();

		Query query2 = QueryFactory.create(
				"PREFIX wfdesc: <http://purl.org/wf4ever/wfdesc#> "
						+ " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "
						+ " PREFIX owl: <http://www.w3.org/2002/07/owl#> "
						+ " SELECT ?wf ?fromProc ?toProc ?fromProcLabel ?toProcLabel "
						+ " WHERE {" + "	?wf a wfdesc:Workflow;"
						+ "       wfdesc:hasSubProcess ?fromProc,?toProc ;"
						+ "       wfdesc:hasDataLink ?link . "
						+ " ?link wfdesc:hasSource ?fromPort; "
						+ "      wfdesc:hasSink ?toPort ."
						+ " ?fromProc wfdesc:hasOutput ?fromPort ;"
						+ "           rdfs:label ?fromProcLabel ."
						+ " ?toProc wfdesc:hasInput ?toPort ;"
						+ "         rdfs:label ?toProcLabel ." + "} "
						+ "ORDER BY ?wf ");
		 //System.out.println(out.toString());
//		jsonNode = mapper.readValue(out.toByteArray(), JsonNode.class);
//		for (JsonNode binding : jsonNode.path("results").path("bindings")) {
//			String wf = binding.path("wf").path("value").asText();
//			if (!wf.equals(oldWf)) {
//				//System.out.println(wf);
//				oldWf = wf;
//			}
//			String fromProcLabel = binding.path("fromProcLabel").path("value")
//					.asText();
//			assertNotNull(fromProcLabel);
//			String toProcLabel = binding.path("toProcLabel").path("value")
//					.asText();
//			assertNotNull(toProcLabel);
//			String fromProc = binding.path("fromProc").path("value").asText();
//			assertNotNull(fromProc);
//			String toProc = binding.path("toProc").path("value").asText();
//			assertNotNull(toProc);
//			//System.out.print(" " + fromProcLabel);
//			//System.out.println(" -> " + toProcLabel);
//			//System.out.println("    " + fromProc + " -> " + toProc);
//		}

	}