Java Code Examples for org.openrdf.model.Statement

The following examples show how to use org.openrdf.model.Statement. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: database   Source File: SPARQLQueryTest.java    License: GNU General Public License v2.0 6 votes vote down vote up
protected final TupleQueryResult readExpectedTupleQueryResult()
	throws Exception
{
	TupleQueryResultFormat tqrFormat = QueryResultIO.getParserFormatForFileName(resultFileURL);

	if (tqrFormat != null) {
		InputStream in = new URL(resultFileURL).openStream();
		try {
			TupleQueryResultParser parser = QueryResultIO.createParser(tqrFormat);
			parser.setValueFactory(dataRep.getValueFactory());

			TupleQueryResultBuilder qrBuilder = new TupleQueryResultBuilder();
			parser.setQueryResultHandler(qrBuilder);

			parser.parseQueryResult(in);
			return qrBuilder.getQueryResult();
		}
		finally {
			in.close();
		}
	}
	else {
		Set<Statement> resultGraph = readExpectedGraphQueryResult();
		return DAWGTestResultSetUtil.toTupleQueryResult(resultGraph);
	}
}
 
Example 2
public void testNTriplesFile()
	throws Exception
{
	RDFParser turtleParser = createRDFParser();
	turtleParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
	turtleParser.setRDFHandler(new RDFHandlerBase() {
		public void handleStatement(Statement st)
				throws RDFHandlerException {
			if (log.isInfoEnabled())
				log.info("Statement: " + st);
		}
	});

	// Note: This is a local copy.
	InputStream in = BigdataNTriplesParser.class.getResourceAsStream(NTRIPLES_TEST_FILE);
	try {
		turtleParser.parse(in, NTRIPLES_TEST_URL);
	}
	catch (RDFParseException e) {
		fail("Failed to parse N-Triples test document: " + e.getMessage());
	}
	finally {
		in.close();
	}
}
 
Example 3
/**
 * {@inheritDoc}
 * 
 * Logs the query at INFO and logs the optimized AST at TRACE.
 */
@Override
protected Stream<Statement> _project( 
        final String queryStr, final String extQueryId) {
    
    logQuery(queryStr);
    return Code.wrapThrow(() -> {
        final BigdataSailGraphQuery query = (BigdataSailGraphQuery) 
                cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
        setMaxQueryTime(query);
        final UUID queryId = setupQuery(query.getASTContainer(), 
                                        QueryType.CONSTRUCT, extQueryId);
    
        sparqlLog.trace(() -> "optimized AST:\n"+query.optimize());
    
        /*
         * Result is closed automatically by GraphStreamer.
         */
        final GraphQueryResult result = query.evaluate();
        final Optional<Runnable> onClose = 
                Optional.of(() -> finalizeQuery(queryId));
        return new GraphStreamer<>(result, onClose).stream();
    });
    
}
 
Example 4
Source Project: mustard   Source File: RDFDataModule.java    License: MIT License 6 votes vote down vote up
@Main
public DTGraph<String,String> createGraph() {
	Set<Statement> stmts;
	
	if (graphDepth == 0) { // graphDepth == 0, is get the full graph
		stmts = new HashSet<Statement>(dataset.getFullGraph(inference));
	} else {
		stmts = RDFUtils.getStatements4Depth(dataset, instances, graphDepth, inference);
	}
	
	stmts.removeAll(blackList);
	
	if (instances == null) { // No instances supplied, then we do not create instanceNodes
		graph = RDFUtils.statements2Graph(stmts, RDFUtils.REGULAR_LITERALS);
	} else {
		SingleDTGraph g = RDFUtils.statements2Graph(stmts, RDFUtils.REGULAR_LITERALS, instances, false);
		graph = g.getGraph();
		instanceNodes = g.getInstances();
	}
	return graph;
}
 
Example 5
Source Project: cumulusrdf   Source File: CumulusRDFSailConnection.java    License: Apache License 2.0 6 votes vote down vote up
public CloseableIteration<? extends Statement, QueryEvaluationException> getRangeStatements(
		final Resource subj, 
		final URI pred, 
		final Literal lowerBound,
		final boolean lower_equals, 
		final Literal upperBound, 
		final boolean upper_equals, 
		final Literal equals, 
		final boolean reverse) throws QueryEvaluationException {
	try {
		return createRangeStatementIterator(subj, pred, lowerBound, lower_equals, upperBound, upper_equals, equals, reverse);
	} catch (SailException e) {
		e.printStackTrace();
		throw new QueryEvaluationException(e);
	}
}
 
Example 6
Source Project: cumulusrdf   Source File: ValueDictionaryBase.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Iterator<byte[][]> toIDTripleIterator(final Iterator<Statement> triples) {

	return new AbstractIterator<byte[][]>() {

		@Override
		protected byte[][] computeNext() {

			if (!triples.hasNext()) {
				return endOfData();
			}

			final Statement statement = triples.next();

			try {
				return new byte[][] {
					getID(statement.getSubject(), false),
					getID(statement.getPredicate(), true),
					getID(statement.getObject(), false) };
			} catch (final DataAccessLayerException exception) {
				_log.error(MessageCatalog._00093_DATA_ACCESS_LAYER_FAILURE, exception);
				return endOfData();
			}
		}
	};
}
 
Example 7
@Test
public void testAddStatement()
	throws Exception
{
	testCon.add(bob, name, nameBob);

	assertTrue(NEWLY_ADDED, testCon.hasStatement(bob, name, nameBob, false));

	Statement statement = vf.createStatement(alice, name, nameAlice);
	testCon.add(statement);

	assertTrue(NEWLY_ADDED, testCon.hasStatement(statement, false));
	assertTrue(NEWLY_ADDED, testCon.hasStatement(alice, name, nameAlice, false));

	Repository tempRep = new SailRepository(new MemoryStore());
	tempRep.initialize();
	RepositoryConnection con = tempRep.getConnection();

	con.add(testCon.getStatements(null, null, null, false));

	assertTrue("Temp Repository should contain newly added statement",
			con.hasStatement(bob, name, nameBob, false));
	con.close();
	tempRep.shutDown();
}
 
Example 8
Source Project: database   Source File: TestFederatedQuery.java    License: GNU General Public License v2.0 6 votes vote down vote up
/**
 * Read the expected tuple query result from the specified resource
 * 
 * @param queryResource
 * @return
 * @throws RepositoryException
 * @throws IOException
 */
private TupleQueryResult readExpectedTupleQueryResult(final String resultFile)    throws Exception
{
   final TupleQueryResultFormat tqrFormat = QueryResultIO.getParserFormatForFileName(resultFile);

    if (tqrFormat != null) {
       final InputStream in = TestFederatedQuery.class.getResourceAsStream(TEST_RESOURCE_PATH + resultFile);
        try {
           final TupleQueryResultParser parser = QueryResultIO.createParser(tqrFormat);
            parser.setValueFactory(ValueFactoryImpl.getInstance());

            final TupleQueryResultBuilder qrBuilder = new TupleQueryResultBuilder();
            parser.setTupleQueryResultHandler(qrBuilder);

            parser.parse(in);
            return qrBuilder.getQueryResult();
        }
        finally {
            in.close();
        }
    }
    else {
       final Set<Statement> resultGraph = readExpectedGraphQueryResult(resultFile);
        return DAWGTestResultSetUtil.toTupleQueryResult(resultGraph);
    }
}
 
Example 9
Source Project: GeoTriples   Source File: RMLMappingFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Extract content URIs from a term type resource.
 * 
 * @return
 * @throws InvalidR2RMLStructureException
 */
protected static Set<URI> extractURIsFromTermMap(
		CustomSesameDataset r2rmlMappingGraph, Resource termType, R2RMLTerm term)
		throws InvalidR2RMLStructureException {
	URI p = getTermURI(r2rmlMappingGraph, term);

	List<Statement> statements = r2rmlMappingGraph.tuplePattern(termType,
			p, null);
	if (statements.isEmpty()) {
		return null;
	}
	Set<URI> uris = new HashSet<URI>();
	for (Statement statement : statements) {
		URI uri = (URI) statement.getObject();
		log.debug("[RMLMappingFactory:extractURIsFromTermMap] Extracted "
				+ term + " : " + uri.stringValue());
		uris.add(uri);
	}
	return uris;
}
 
Example 10
/**
 * Handles a statement.
 * 
 * The statements will be added up until chunk size is reached.
 * After a chunk of statements is added the transaction will be committed
 * and new transaction will be started.
 * @param stmnt
 * @throws RDFHandlerException 
 */
@Override
public void handleStatement(Statement stmnt) throws RDFHandlerException {
	try {
		// check if triple should be added to a specific graph
		if (dctx != null) {
			conn.add(stmnt, dctx);
		} else {
			conn.add(stmnt);
		}
		// check if chunk size is reached and transaction should be
		// committed
		count++;
		if (count >= size) {
			count = 0;
			conn.commit();
			conn.begin();
		}
	} catch (RepositoryException ex) {
		throw new RDFHandlerException(ex);
	}
}
 
Example 11
Source Project: GeoTriples   Source File: NTriplesAlternative.java    License: Apache License 2.0 6 votes vote down vote up
public void handleStatementIter(Collection<Statement> statements) throws RDFHandlerException {
    if (!writingStarted)
        throw new RuntimeException("Document writing has not yet been started");
    try {
        // WARNING if you want to produce blank nodes replace all the .append("<").append(X).append(">");
        // with   NTriplesUtil.append(st.getSubject(), sb);
        for(Statement st: statements){
            sb
                    .append("<")
                    .append(st.getSubject().toString())
                    .append("> <")
                    .append(st.getPredicate().toString())
                    .append("> ");
            NTriplesUtilNoEscape.append(st.getObject(), sb);
            sb.append(" .\n");
        }
    } catch (Exception e) {
        throw new RDFHandlerException(e);
    }
}
 
Example 12
Source Project: database   Source File: RDFStoreTest.java    License: GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testStatementSerialization()
	throws Exception
{
	Statement st = vf.createStatement(picasso, RDF.TYPE, painter);

	ByteArrayOutputStream baos = new ByteArrayOutputStream();
	ObjectOutputStream out = new ObjectOutputStream(baos);
	out.writeObject(st);
	out.close();

	ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
	ObjectInputStream in = new ObjectInputStream(bais);
	Statement deserializedStatement = (Statement)in.readObject();
	in.close();

	assertTrue(st.equals(deserializedStatement));
}
 
Example 13
Source Project: cumulusrdf   Source File: ValueDictionaryBase.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Iterator<Statement> toValueTripleIterator(final Iterator<byte[][]> triples) {

	return new AbstractIterator<Statement>() {

		@Override
		protected Statement computeNext() {

			while (triples.hasNext()) {

				final byte[][] ids = triples.next();
				try {
					return getValues(ids[0], ids[1], ids[2]);
				} catch (DataAccessLayerException exception) {
					_log.error(MessageCatalog._00093_DATA_ACCESS_LAYER_FAILURE, exception);
					return endOfData();
				}
			}

			return endOfData();
		}
	};
}
 
Example 14
Source Project: database   Source File: BigdataGASEngine.java    License: GNU General Public License v2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public Iterator<Statement> getEdges(final IGASContext<?, ?, ?> ctx,
        final Value u, final EdgesEnum edges) {

    final AbstractTripleStore kb = getKB();
    
    switch (edges) {
    case NoEdges:
        return EmptyIterator.DEFAULT;
    case InEdges:
        return getEdges(kb, true/* inEdges */, ctx, getIV(u));
    case OutEdges:
        return getEdges(kb, false/* inEdges */, ctx, getIV(u));
    case AllEdges: {
        final IStriterator a = getEdges(kb, true/* inEdges */, ctx, getIV(u));
        final IStriterator b = getEdges(kb, false/* outEdges */, ctx, getIV(u));
        a.append(b);
        return a;
    }
    default:
        throw new UnsupportedOperationException(edges.name());
    }

}
 
Example 15
Source Project: mustard   Source File: ThemeExperiment.java    License: MIT License 6 votes vote down vote up
private static Map<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>> createDataSetCache(LargeClassificationDataSet data, long[] seeds, double fraction, int minSize, int maxClasses, int[] depths, boolean[] inference) {
	Map<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>> cache = new HashMap<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>>();

	for (long seed : seeds) {
		cache.put(seed, new HashMap<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>());
		data.createSubSet(seed, fraction, minSize, maxClasses);

		for (boolean inf : inference) {
			cache.get(seed).put(inf, new HashMap<Integer,Pair<SingleDTGraph, List<Double>>>());

			for (int depth : depths) {
				System.out.println("Getting Statements...");
				Set<Statement> stmts = RDFUtils.getStatements4Depth(tripleStore, data.getRDFData().getInstances(), depth, inf);
				System.out.println("# Statements: " + stmts.size());
				stmts.removeAll(new HashSet<Statement>(data.getRDFData().getBlackList()));
				System.out.println("# Statements: " + stmts.size() + ", after blackList");
				System.out.println("Building Graph...");
				SingleDTGraph graph = RDFUtils.statements2Graph(stmts, RDFUtils.REGULAR_LITERALS, data.getRDFData().getInstances(), true);
				System.out.println("Built Graph with " + graph.getGraph().nodes().size() + ", and " + graph.getGraph().links().size() + " links");

				cache.get(seed).get(inf).put(depth, new Pair<SingleDTGraph,List<Double>>(graph, new ArrayList<Double>(data.getTarget())));
			}
		}
	}
	return cache;
}
 
Example 16
Source Project: GeoTriples   Source File: RMLMappingFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Extract content literal from a term type resource.
 * 
 * @param r2rmlMappingGraph
 * @param termType
 * @param term
 * @return
 * @throws InvalidR2RMLStructureException
 */
protected static String extractLiteralFromTermMap(
		CustomSesameDataset r2rmlMappingGraph, Resource termType, Enum term)
		throws InvalidR2RMLStructureException {

	URI p = getTermURI(r2rmlMappingGraph, term);

	List<Statement> statements = r2rmlMappingGraph.tuplePattern(termType,
			p, null);
	if (statements.isEmpty()) {
		return null;
	}
	if (statements.size() > 1) {
		throw new InvalidR2RMLStructureException(
				"[RMLMappingFactory:extractValueFromTermMap] " + termType
						+ " has too many " + term + " predicate defined.");
	}
	String result = statements.get(0).getObject().stringValue();
	if (log.isDebugEnabled()) {
		log.debug("[RMLMappingFactory:extractLiteralFromTermMap] Extracted "
				+ term + " : " + result);
	}
	return result;
}
 
Example 17
Source Project: mustard   Source File: RDFUtils.java    License: MIT License 6 votes vote down vote up
/**
 * Convert a set of RDF statements into a DTGraph. 
 * There are three possible ways to treat literals, as regular nodes (REGULAR_LITERALS), as unique nodes (i.e. one for each literal even if they are equal) (REPEAT_LITERALS),
 * or ignore them (NO_LITERALS)
 * 
 * @param stmts
 * @param literalOption
 * @return
 */
public static DTGraph<String,String> statements2Graph(Set<Statement> stmts, int literalOption) {
	DTGraph<String,String> graph = new LightDTGraph<String,String>();	
	Map<String, DTNode<String,String>> nodeMap = new HashMap<String, DTNode<String,String>>();
	
	for (Statement s : stmts) {
		if (s.getObject() instanceof Literal && literalOption != NO_LITERALS) {
			if (literalOption == REGULAR_LITERALS) {
				addStatement(graph, s, false, false, nodeMap);
			}
			if (literalOption == REGULAR_SPLIT_LITERALS) {
				addStatement(graph, s, false, true, nodeMap);
			}
			if (literalOption == REPEAT_LITERALS) {
				addStatement(graph, s, true, false, nodeMap);
			}
			if (literalOption == REPEAT_SPLIT_LITERALS) {
				addStatement(graph, s, true, true, nodeMap);
			}
		} else if (!(s.getObject() instanceof Literal)){
			addStatement(graph, s, false, false, nodeMap);
		}
	}	
	return graph;
}
 
Example 18
@Test
public void testSimpleGraphQuery()
	throws Exception
{
	testCon.add(alice, name, nameAlice, context2);
	testCon.add(alice, mbox, mboxAlice, context2);
	testCon.add(context2, publisher, nameAlice);

	testCon.add(bob, name, nameBob, context1);
	testCon.add(bob, mbox, mboxBob, context1);
	testCon.add(context1, publisher, nameBob);

	StringBuilder queryBuilder = new StringBuilder(128);
       queryBuilder.append(" prefix foaf: <" + FOAF_NS + ">");
       queryBuilder.append(" construct ");
       queryBuilder.append(" where { ?s foaf:name ?name . ?s foaf:mbox ?mbox . }");

	GraphQueryResult result = testCon.prepareGraphQuery(QueryLanguage.SPARQL, queryBuilder.toString()).evaluate();

	try {
		assertThat(result, is(notNullValue()));
		assertThat(result.hasNext(), is(equalTo(true)));

		while (result.hasNext()) {
			Statement st = result.next();
			if (name.equals(st.getPredicate())) {
				assertThat(st.getObject(), anyOf(is(equalTo((Value)nameAlice)), is(equalTo((Value)nameBob))));
			}
			else {
				assertThat(st.getPredicate(), is(equalTo(mbox)));
				assertThat(st.getObject(), anyOf(is(equalTo((Value)mboxAlice)), is(equalTo((Value)mboxBob))));
			}
		}
	}
	finally {
		result.close();
	}
}
 
Example 19
@Test
public void testGetStatements() throws RepositoryException {
	final RepositoryResult<Statement> stmts = con.getStatements(s, p, o, includeInferred, c);
	try {
		assertEquals(EncodeDecodeValue.encodeValue(s), remote.data.opts.getRequestParam("s"));
		assertEquals(EncodeDecodeValue.encodeValue(p), remote.data.opts.getRequestParam("p"));
		assertEquals(EncodeDecodeValue.encodeValue(o), remote.data.opts.getRequestParam("o"));
		assertEquals(EncodeDecodeValue.encodeValue(c), remote.data.opts.getRequestParam("c"));
		assertEquals(Boolean.toString(includeInferred), remote.data.opts.getRequestParam(RemoteRepositoryDecls.INCLUDE_INFERRED));
	} finally {
		stmts.close();
	}
}
 
Example 20
public void clearData() {
	try {
		RepositoryResult<Statement> statements = rep.getConnection().getStatements(null, null, null, true);
		rep.getConnection().remove(statements);
	} catch (RepositoryException e) {
		e.printStackTrace();
		throw new SparqlTutorialException(e);
	}
}
 
Example 21
Source Project: database   Source File: BigdataGASState.java    License: GNU General Public License v2.0 5 votes vote down vote up
@Override
public boolean isLinkAttrib(final Statement e,
        final URI linkAttribType) {
    final ISPO edge = (ISPO) e;
    if (!edge.p().equals(linkAttribType)) {
        // Edge does not use the specified link attribute type.
        return false;
    }
    if (!(edge.s() instanceof SidIV)) {
        // The subject of the edge is not a Statement.
        return false;
    }
    return true;
}
 
Example 22
Source Project: database   Source File: TestGather.java    License: GNU General Public License v2.0 5 votes vote down vote up
@Override
public Factory<Value, Set<Statement>> getVertexStateFactory() {
    return new Factory<Value, Set<Statement>>() {
        @Override
        public Set<Statement> initialValue(Value value) {
            return new LinkedHashSet<Statement>();
        }
    };
}
 
Example 23
Source Project: database   Source File: ModelUtil.java    License: GNU General Public License v2.0 5 votes vote down vote up
/**
 * Compares two models, defined by two statement collections, and returns
 * <tt>true</tt> if the first model is a subset of the second model.
 */
public static boolean isSubset(Iterable<? extends Statement> model1, Iterable<? extends Statement> model2)
{
	// Filter duplicates
	Set<Statement> set1 = new LinkedHashSet<Statement>();
	Iterators.addAll(model1.iterator(), set1);

	Set<Statement> set2 = new LinkedHashSet<Statement>();
	Iterators.addAll(model2.iterator(), set2);

	return isSubset(set1, set2);
}
 
Example 24
Source Project: anno4j   Source File: CRUDTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns all statements that are present in any context of a repository having the specified subject, predicate and/or object.
 * @param connection A connection to the repository to query.
 * @param subject The subject the returned triples should have or null for any subject.
 * @param predicate The predicate the returned triples should have or null for any predicate.
 * @param object The object the returned triples should have or null for any object.
 * @return Returns the set of all triples present in the repository having the desired spo-structure.
 * @throws RepositoryException Thrown if an error occurs while querying the repository.
 */
private Collection<Statement> getStatements(RepositoryConnection connection, Resource subject, URI predicate, Value object) throws RepositoryException {
    // Query the repository:
    RepositoryResult<Statement> result = connection.getStatements(subject, predicate, object, false);

    // Fetch all statements from the result:
    Collection<Statement> statements = new HashSet<>();
    while (result.hasNext()) {
        statements.add(result.next());
    }
    return statements;
}
 
Example 25
Source Project: mustard   Source File: ThemeComputationTimeExperiment.java    License: MIT License 5 votes vote down vote up
private static Map<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>> createDataSetCache(RDFDataSet tripleStore, LargeClassificationDataSet data, long[] seeds, double fraction, int minSize, int maxClasses, int[] depths, boolean[] inference) {
	Map<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>> cache = new HashMap<Long, Map<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>>();

	for (long seed : seeds) {
		cache.put(seed, new HashMap<Boolean, Map<Integer,Pair<SingleDTGraph, List<Double>>>>());
		data.createSubSet(seed, fraction, minSize, maxClasses);

		for (boolean inf : inference) {
			cache.get(seed).put(inf, new HashMap<Integer,Pair<SingleDTGraph, List<Double>>>());

			for (int depth : depths) {
				System.out.println("Getting Statements...");
				Set<Statement> stmts = RDFUtils.getStatements4Depth(tripleStore, data.getRDFData().getInstances(), depth, inf);
				System.out.println("# Statements: " + stmts.size());
				stmts.removeAll(new HashSet<Statement>(data.getRDFData().getBlackList()));
				System.out.println("# Statements: " + stmts.size() + ", after blackList");
				System.out.println("Building Graph...");

				SingleDTGraph graph = RDFUtils.statements2Graph(stmts, RDFUtils.REGULAR_LITERALS, data.getRDFData().getInstances(), true);

				System.out.println("Built Graph with " + graph.getGraph().nodes().size() + ", and " + graph.getGraph().links().size() + " links");

				cache.get(seed).get(inf).put(depth, new Pair<SingleDTGraph,List<Double>>(graph, new ArrayList<Double>(data.getTarget())));
			}
		}
	}
	return cache;
}
 
Example 26
Source Project: mustard   Source File: SteveExperiment.java    License: MIT License 5 votes vote down vote up
private static Map<Long, Map<Boolean, Map<Integer, Pair<SingleDTGraph, List<Double>>>>> createDataSetCache(
		LargeClassificationDataSet data, long[] seeds, int[] depths, boolean inference) {
	Map<Long, Map<Boolean, Map<Integer, Pair<SingleDTGraph, List<Double>>>>> cache = new HashMap<Long, Map<Boolean, Map<Integer, Pair<SingleDTGraph, List<Double>>>>>();

	for (long seed : seeds) {
		cache.put(seed, new HashMap<Boolean, Map<Integer, Pair<SingleDTGraph, List<Double>>>>());
		data.createSubSet(seed, 0, 50, 0);
		cache.get(seed).put(inference, new HashMap<Integer, Pair<SingleDTGraph, List<Double>>>());

		for (int depth : depths) {
			System.out.println("Getting Statements...");
			Set<Statement> stmts = RDFUtils.getStatements4Depth(tripleStore, data.getRDFData().getInstances(),
					depth, inference);
			System.out.println("# Statements: " + stmts.size());
			stmts.removeAll(new HashSet<Statement>(data.getRDFData().getBlackList()));
			System.out.println("# Statements: " + stmts.size() + ", after blackList");
			System.out.println("Building Graph...");
			SingleDTGraph graph = RDFUtils.statements2Graph(stmts, RDFUtils.REGULAR_LITERALS, data.getRDFData()
					.getInstances(), true);
			System.out.println("Built Graph with " + graph.getGraph().nodes().size() + ", and "
					+ graph.getGraph().links().size() + " links");

			cache.get(seed)
					.get(inference)
					.put(depth,
							new Pair<SingleDTGraph, List<Double>>(graph, new ArrayList<Double>(data.getTarget())));
		}
	}
	return cache;
}
 
Example 27
Source Project: anno4j   Source File: OwlNormalizer.java    License: Apache License 2.0 5 votes vote down vote up
private void propagateSubClassType(Resource classDef) {
	for (Resource c : findClasses(Collections.singleton(classDef))) {
		if (c.equals(RDFS.DATATYPE))
			continue;
		for (Statement stmt : ds.match(null, RDF.TYPE, c)) {
			Resource subj = stmt.getSubject();
			ds.add(subj, RDF.TYPE, classDef);
		}
	}
}
 
Example 28
@Override
public void handleStatement(final Statement st) throws RDFHandlerException {
    namespacesReady.countDown();
    if (closed)
        throw new RDFHandlerException("Result closed");
    try {
        queue.put(st);
    } catch (InterruptedException e) {
        throw new RDFHandlerException(e);
    }
}
 
Example 29
Source Project: GeoTriples   Source File: XPathProcessor.java    License: Apache License 2.0 5 votes vote down vote up
public WorkerPerformOnNode(RMLPerformer performer, Resource subject,
		Node n, SesameDataSet dataset, TriplesMap parentTriplesMap,
		TriplesMap map, List<Statement> statements2) {
	this.performer = performer;
	this.subject = subject;
	this.n = n;
	this.dataset = dataset;
	this.parentTriplesMap = parentTriplesMap;
	this.map = map;
	this.statements=statements2;
}
 
Example 30
Source Project: database   Source File: ModelUtil.java    License: GNU General Public License v2.0 5 votes vote down vote up
/**
 * Compares two models, defined by two statement collections, and returns
 * <tt>true</tt> if they are equal. Models are equal if they contain the same
 * set of statements. Blank node IDs are not relevant for model equality,
 * they are mapped from one model to the other by using the attached
 * properties.
 */
public static boolean equals(Set<? extends Statement> model1, Set<? extends Statement> model2) {
	// Compare the number of statements in both sets
	if (model1.size() != model2.size()) {
		return false;
	}

	return isSubsetInternal(model1, model2);
}