Java Code Examples for org.eclipse.rdf4j.repository.RepositoryConnection#commit()

The following examples show how to use org.eclipse.rdf4j.repository.RepositoryConnection#commit() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testRegexFilter() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    IRI testClass = VF.createIRI(litdupsNS, "test");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc, sev);
    conn.add(cpu, loadPerc, ten);
    conn.add(cpu, RDF.TYPE, testClass);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            String.format("<%s> ?p ?o.\n", cpu.stringValue()) +
            "FILTER(regex(?o, '^1'))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(cth.getCount(), 1);
}
 
Example 2
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testOSPObjRange() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc, sev);
    conn.add(cpu, loadPerc, ten);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            "?s ?p ?o.\n" +
            "FILTER(org.apache:range(?o, '6', '8'))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(cth.getCount(), 2);
}
 
Example 3
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testSPOObjRange() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc, sev);
    conn.add(cpu, loadPerc, ten);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" +
            "FILTER(org.apache:range(?o, '6', '8'))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(cth.getCount(), 2);
}
 
Example 4
Source File: AbstractSHACLTest.java    From rdf4j with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
protected void upload(Repository rep, Model dataGraph) {
	RepositoryConnection con = rep.getConnection();

	try {
		con.begin();
		con.add(dataGraph);
		con.commit();
	} catch (Exception e) {
		if (con.isActive()) {
			con.rollback();
		}
		throw e;
	} finally {
		con.close();
	}
}
 
Example 5
Source File: AbstractLuceneSailSpinTest.java    From rdf4j with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Test
public void testDistanceFunction() throws Exception {
	RepositoryConnection connection = getConnection();
	String queryStr = "prefix geo:  <" + GEO.NAMESPACE + ">" + "prefix geof: <" + GEOF.NAMESPACE + ">"
			+ "prefix search: <" + LuceneSailSchema.NAMESPACE + ">"
			+ "select ?toUri ?fromUri ?dist where {(?from ?range ?units geo:asWKT search:distance)"
			+ "search:withinDistance (?toUri ?to ?dist) ."
			+ "?toUri a <urn:geo/Landmark>. ?fromUri geo:asWKT ?from; <urn:geo/maxDistance> ?range.}";
	try {
		TupleQuery query = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryStr);
		query.setBinding("units", GEOF.UOM_METRE);

		printTupleResult(query);
		try (TupleQueryResult result = query.evaluate()) {
			int count = countTupleResults(result);
			Assert.assertEquals(2, count);
		}
	} catch (Exception e) {
		connection.rollback();
		throw e;
	} finally {
		connection.commit();
	}
}
 
Example 6
Source File: RepositoryConfigUtil.java    From rdf4j with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Update the specified RepositoryConnection with the specified set of RepositoryConfigs. This will overwrite all
 * existing configurations in the Repository that have a Repository ID occurring in these RepositoryConfigs. Note:
 * this method does NOT commit the updates on the connection.
 *
 * @param con     the repository connection to perform the update on
 * @param configs The RepositoryConfigs that should be added to or updated in the Repository. The RepositoryConfig's
 *                ID may already occur in the Repository, in which case all previous configuration data for that
 *                Repository will be cleared before the RepositoryConfig is added.
 * @throws RepositoryException
 * @throws RepositoryConfigException
 */
@Deprecated
public static void updateRepositoryConfigs(RepositoryConnection con, RepositoryConfig... configs)
		throws RepositoryException, RepositoryConfigException {
	ValueFactory vf = con.getRepository().getValueFactory();

	con.begin();

	for (RepositoryConfig config : configs) {
		Resource context = getContext(con, config.getID());

		if (context != null) {
			con.clear(context);
		} else {
			context = vf.createBNode();
		}

		con.add(context, RDF.TYPE, REPOSITORY_CONTEXT);

		Model graph = new LinkedHashModel();
		config.export(graph);
		con.add(graph, context);
	}

	con.commit();
}
 
Example 7
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testNamedGraphLoad2() throws Exception {
    InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
    assertNotNull(stream);
    RepositoryConnection conn = repository.getConnection();
    conn.add(stream, "", RDFFormat.TRIG);
    conn.commit();

    RepositoryResult<Statement> statements = conn.getStatements(null, VF.createIRI("http://www.example.org/vocabulary#name"), null, true, VF.createIRI("http://www.example.org/exampleDocument#G1"));
    int count = 0;
    while (statements.hasNext()) {
        statements.next();
        count++;
    }
    statements.close();
    assertEquals(1, count);

    conn.close();
}
 
Example 8
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testSPOPredRange() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc1");
    IRI loadPerc2 = VF.createIRI(litdupsNS, "loadPerc2");
    IRI loadPerc3 = VF.createIRI(litdupsNS, "loadPerc3");
    IRI loadPerc4 = VF.createIRI(litdupsNS, "loadPerc4");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc2, sev);
    conn.add(cpu, loadPerc4, ten);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            "<" + cpu.stringValue() + "> ?p ?o.\n" +
            "FILTER(org.apache:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(2, cth.getCount());
}
 
Example 9
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testEvaluate() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    IRI uri1 = VF.createIRI(litdupsNS, "uri1");
    conn.add(cpu, loadPerc, uri1);
    conn.commit();

    String query = "select * where {" +
            "?x <" + loadPerc.stringValue() + "> ?o1." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    assertEquals(cth.getCount(), 1);
    conn.close();
}
 
Example 10
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testEvaluateMultiLine() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    IRI uri1 = VF.createIRI(litdupsNS, "uri1");
    IRI pred2 = VF.createIRI(litdupsNS, "pred2");
    IRI uri2 = VF.createIRI(litdupsNS, "uri2");
    conn.add(cpu, loadPerc, uri1);
    conn.add(cpu, pred2, uri2);
    conn.commit();

    String query = "select * where {" +
            "?x <" + loadPerc.stringValue() + "> ?o1." +
            "?x <" + pred2.stringValue() + "> ?o2." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true));
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(cth.getCount(), 1);
}
 
Example 11
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testPOObjRange() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc, sev);
    conn.add(cpu, loadPerc, ten);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            "?x <" + loadPerc.stringValue() + "> ?o.\n" +
            "FILTER(org.apache:range(?o, '6', '8'))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(2, cth.getCount());
}
 
Example 12
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 6 votes vote down vote up
public void testPOPredRange() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc1");
    IRI loadPerc2 = VF.createIRI(litdupsNS, "loadPerc2");
    IRI loadPerc3 = VF.createIRI(litdupsNS, "loadPerc3");
    IRI loadPerc4 = VF.createIRI(litdupsNS, "loadPerc4");
    Literal six = VF.createLiteral("6");
    Literal sev = VF.createLiteral("7");
    Literal ten = VF.createLiteral("10");
    conn.add(cpu, loadPerc, six);
    conn.add(cpu, loadPerc2, sev);
    conn.add(cpu, loadPerc4, ten);
    conn.commit();

    String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" +
            "select * where {" +
            "?x ?p ?o.\n" +
            "FILTER(org.apache:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
            "}";
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    CountTupleHandler cth = new CountTupleHandler();
    tupleQuery.evaluate(cth);
    conn.close();
    assertEquals(cth.getCount(), 2);
}
 
Example 13
Source File: CbSailTest.java    From rya with Apache License 2.0 5 votes vote down vote up
public void testSimpleQueryAuth() throws Exception {
    RepositoryConnection conn = repository.getConnection();
    IRI cpu = VF.createIRI(litdupsNS, "cpu");
    IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
    IRI uri1 = VF.createIRI(litdupsNS, "uri1");
    IRI uri2 = VF.createIRI(litdupsNS, "uri2");
    IRI auth1 = VF.createIRI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "auth1");
    conn.add(cpu, loadPerc, uri1, auth1);
    conn.add(cpu, loadPerc, uri2);
    conn.commit();
    conn.close();

    resultEndpoint.expectedMessageCount(1);

    //query through camel
    String query = "select * where {" +
            "<" + cpu.toString() + "> ?p ?o1." +
            "}";
    template.sendBodyAndHeader(null, CbSailComponent.SPARQL_QUERY_PROP, query);

    assertMockEndpointsSatisfied();

    resultEndpoint.expectedMessageCount(2);

    query = "select * where {" +
            "<" + cpu.toString() + "> ?p ?o1." +
            "}";
    Map<String, Object> headers = new HashMap<String, Object>();
    headers.put(CbSailComponent.SPARQL_QUERY_PROP, query);
    headers.put(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, "auth1");
    template.sendBodyAndHeaders(null, headers);

    assertMockEndpointsSatisfied();
}
 
Example 14
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 5 votes vote down vote up
public void testEquivPropOf() throws Exception {
      if(internalInferenceEngine == null)
 {
	return; //infer not supported;
}

      RepositoryConnection conn = repository.getConnection();
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, VF.createIRI(litdupsNS, "ugradDegreeFrom")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UgradA"), VF.createIRI(litdupsNS, "undergradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradB"), VF.createIRI(litdupsNS, "ugradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradC"), VF.createIRI(litdupsNS, "ugraduateDegreeFrom"), VF.createIRI(litdupsNS, "Harvard")));
      conn.commit();
      conn.close();

      internalInferenceEngine.refreshGraph();

      conn = repository.getConnection();

      String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:ugradDegreeFrom lit:Harvard.}";

      TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      CountTupleHandler tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(2, tupleHandler.getCount());

      conn.close();
  }
 
Example 15
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 5 votes vote down vote up
public void testAddStatement() throws Exception {
        RepositoryConnection conn = repository.getConnection();

        IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc");
        IRI uri1 = VF.createIRI(litdupsNS, "uri1");
        conn.add(cpu, loadPerc, uri1);
        conn.commit();

        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true);
        int count = 0;
        while (result.hasNext()) {
            count++;
            result.next();
        }
        result.close();
        assertEquals(1, count);

        //clean up
        conn.remove(cpu, loadPerc, uri1);

//        //test removal
        result = conn.getStatements(cpu, loadPerc, null, true);
        count = 0;
        while (result.hasNext()) {
            count++;
            result.next();
        }
        result.close();
        assertEquals(0, count);

        conn.close();
    }
 
Example 16
Source File: SPARQL11UpdateComplianceTest.java    From rdf4j with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Override
protected void runTest() throws Exception {

	logger.debug("running {}", getName());

	RepositoryConnection con = dataRep.getConnection();
	RepositoryConnection erCon = expectedResultRepo.getConnection();
	try {
		String updateString = readUpdateString();

		con.begin();

		Update update = con.prepareUpdate(QueryLanguage.SPARQL, updateString, requestFile);
		update.setDataset(dataset);
		update.execute();

		con.commit();

		// check default graph
		logger.info("checking default graph");
		compareGraphs(Iterations.asList(con.getStatements(null, null, null, true, (Resource) null)),
				Iterations.asList(erCon.getStatements(null, null, null, true, (Resource) null)));

		for (String namedGraph : inputNamedGraphs.keySet()) {
			logger.info("checking named graph {}", namedGraph);
			IRI contextURI = con.getValueFactory().createIRI(namedGraph.replaceAll("\"", ""));
			compareGraphs(Iterations.asList(con.getStatements(null, null, null, true, contextURI)),
					Iterations.asList(erCon.getStatements(null, null, null, true, contextURI)));
		}
	} catch (Exception e) {
		if (con.isActive()) {
			con.rollback();
		}
		throw e;
	} finally {
		con.close();
		erCon.close();
	}
}
 
Example 17
Source File: SPARQLComplianceTest.java    From rdf4j with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
protected void upload(IRI graphURI, Resource context) throws Exception {
	RepositoryConnection con = getDataRepository().getConnection();

	try {
		con.begin();
		RDFFormat rdfFormat = Rio.getParserFormatForFileName(graphURI.toString()).orElse(RDFFormat.TURTLE);
		RDFParser rdfParser = Rio.createParser(rdfFormat, getDataRepository().getValueFactory());
		rdfParser.setVerifyData(false);
		rdfParser.setDatatypeHandling(DatatypeHandling.IGNORE);
		// rdfParser.setPreserveBNodeIDs(true);

		RDFInserter rdfInserter = new RDFInserter(con);
		rdfInserter.enforceContext(context);
		rdfParser.setRDFHandler(rdfInserter);

		URL graphURL = new URL(graphURI.toString());
		InputStream in = graphURL.openStream();
		try {
			rdfParser.parse(in, graphURI.toString());
		} finally {
			in.close();
		}

		con.commit();
	} catch (Exception e) {
		if (con.isActive()) {
			con.rollback();
		}
		throw e;
	} finally {
		con.close();
	}
}
 
Example 18
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 4 votes vote down vote up
public void testSubClassOf() throws Exception {
      if(internalInferenceEngine == null)
 {
	return; //infer not supported;
}

      RepositoryConnection conn = repository.getConnection();
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, VF.createIRI(litdupsNS, "Student")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "Student"), RDFS.SUBCLASSOF, VF.createIRI(litdupsNS, "Person")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UgradA"), RDF.TYPE, VF.createIRI(litdupsNS, "UndergraduateStudent")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentB"), RDF.TYPE, VF.createIRI(litdupsNS, "Student")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "PersonC"), RDF.TYPE, VF.createIRI(litdupsNS, "Person")));
      conn.commit();
      conn.close();

      internalInferenceEngine.refreshGraph();

      conn = repository.getConnection();

      //simple api first
      RepositoryResult<Statement> person = conn.getStatements(null, RDF.TYPE, VF.createIRI(litdupsNS, "Person"), true);
      int count = 0;
      while (person.hasNext()) {
          count++;
          person.next();
      }
      person.close();
      assertEquals(3, count);

      String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s rdf:type lit:Person.}";

      TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      CountTupleHandler tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(3, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s rdf:type lit:Student.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(2, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s rdf:type lit:UndergraduateStudent.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(1, tupleHandler.getCount());

      conn.close();
  }
 
Example 19
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 4 votes vote down vote up
public void testTransitiveProp() throws Exception {
      if(internalInferenceEngine == null)
 {
	return; //infer not supported;
}

      RepositoryConnection conn = repository.getConnection();
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "Queens"), VF.createIRI(litdupsNS, "subRegionOf"), VF.createIRI(litdupsNS, "NYC")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "NYC"), VF.createIRI(litdupsNS, "subRegionOf"), VF.createIRI(litdupsNS, "NY")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "NY"), VF.createIRI(litdupsNS, "subRegionOf"), VF.createIRI(litdupsNS, "US")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "US"), VF.createIRI(litdupsNS, "subRegionOf"), VF.createIRI(litdupsNS, "NorthAmerica")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "NorthAmerica"), VF.createIRI(litdupsNS, "subRegionOf"), VF.createIRI(litdupsNS, "World")));
      conn.commit();
      conn.close();

      internalInferenceEngine.refreshGraph();

      conn = repository.getConnection();

      String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:subRegionOf lit:NorthAmerica.}";

      TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      CountTupleHandler tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(4, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:subRegionOf lit:NY.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(2, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {lit:Queens lit:subRegionOf ?s.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(5, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {lit:NY lit:subRegionOf ?s.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(3, tupleHandler.getCount());

      conn.close();
  }
 
Example 20
Source File: RdfCloudTripleStoreConnectionTest.java    From rya with Apache License 2.0 4 votes vote down vote up
public void testSubPropertyOf() throws Exception {
      if(internalInferenceEngine == null)
 {
	return; //infer not supported;
}

      RepositoryConnection conn = repository.getConnection();
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "degreeFrom")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "degreeFrom")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "memberOf")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "associatedWith")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UgradA"), VF.createIRI(litdupsNS, "undergradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradB"), VF.createIRI(litdupsNS, "gradDegreeFrom"), VF.createIRI(litdupsNS, "Yale")));
      conn.add(VF.createStatement(VF.createIRI(litdupsNS, "ProfessorC"), VF.createIRI(litdupsNS, "memberOf"), VF.createIRI(litdupsNS, "Harvard")));
      conn.commit();
      conn.close();

      internalInferenceEngine.refreshGraph();

      conn = repository.getConnection();

      String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:degreeFrom lit:Harvard.}";

      TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      CountTupleHandler tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(1, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:memberOf lit:Harvard.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(2, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:associatedWith ?o.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(3, tupleHandler.getCount());

      query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
              "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
              "PREFIX lit: <" + litdupsNS + ">\n" +
              "select * where {?s lit:gradDegreeFrom lit:Yale.}";

      tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
      tupleHandler = new CountTupleHandler();
      tupleQuery.evaluate(tupleHandler);
      assertEquals(1, tupleHandler.getCount());

      conn.close();
  }