Java Code Examples for com.hp.hpl.jena.rdf.model.Resource#asNode()

The following examples show how to use com.hp.hpl.jena.rdf.model.Resource#asNode() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: D2RQCompiler.java    From GeoTriples with Apache License 2.0 5 votes vote down vote up
public void visitLeave(ClassMap classMap) {
	DatabaseOp tabular = currentClassMapOpBuilder.getOp();
	for (Resource class_: classMap.getClasses()) {
		NodeMaker predicates = new FixedNodeMaker(RDF.type.asNode());
		NodeMaker objects = new FixedNodeMaker(class_.asNode());
		currentClassMapTripleRelations.add(new TripleRelation(currentSQLConnection,
				tabular, currentSubjects, predicates, objects));
	}
	result.addResourceCollection(classMap.resource().getLocalName(), 
			new ResourceCollection(result, currentSQLConnection, currentSubjects,
					tabular, currentClassMapTripleRelations));
	for (TripleRelation triples: currentClassMapTripleRelations) {
		result.addTripleRelation(triples);
	}
}
 
Example 2
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the discovered links.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @return a list of triples with the discovered links.
 * @since 2.0
 */
public Triple[] getDiscoveredLinks(final String sparqlEndpointURI, final String graphName, final String user, final String password, final int offset, final int limit) {
	final String query = "select * FROM <" + graphName + "> " + 
					"where {?source ?rel ?target }" +
					" ORDER BY ?source ?target" +
					" OFFSET " + offset + " LIMIT " + limit;
  	ArrayList<Triple> linksList = new ArrayList<Triple>();
 	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
           	final Resource sourceResType = soln.getResource("source");
           	final Resource targetResType = soln.getResource("target");
           	final Resource relResType = soln.getResource("rel");
       		final Triple triple = new Triple(sourceResType.asNode(), relResType.asNode(), targetResType.asNode());
       		linksList.add(triple);
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	if (linksList.isEmpty()) {
		return new Triple[0];
	}
	return (Triple[]) linksList.toArray(new Triple[linksList.size()]);
}
 
Example 3
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the ambiguous discovered links of a source URI.
 *
 * @param ambiguousLink			a {@link eu.aliada.shared.rdfstore.AmbiguousLink} that contains the source URI.  
 * @param localRes				the source resource of the link.  
 * @param extResBegin			the beginning string of the target link.  
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @since 2.0
 */
public void getSourceURIAmbiguousLinks(final AmbiguousLink ambiguousLink, final Resource localRes, final String extResBegin, final String sparqlEndpointURI, final String graphName, final String user, final String password) {
	final String query = "SELECT ?rel ?extRes FROM <" + graphName + "> " + 
			" WHERE {<" + ambiguousLink.getSourceURI() + "> ?rel ?extRes ." +
			" FILTER regex(?extRes, \"^" + extResBegin + "\", \"i\")" +
			" }";
	try {
		// Execute the query and obtain results
		final QueryExecution qexec = QueryExecutionFactory.sparqlService(
				sparqlEndpointURI, 
				QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
		qexec.setTimeout(2000, 5000);
		final ResultSet results = qexec.execSelect() ;
		while (results.hasNext())
		{
			final QuerySolution soln = results.nextSolution() ;
	    	final Resource extRes = soln.getResource("extRes");
           	final Resource relResType = soln.getResource("rel");
           	final Triple triple = new Triple(localRes.asNode(), relResType.asNode(), extRes.asNode());
	    	ambiguousLink.addLink(triple);
	    }
	    qexec.close() ;
	  } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
}
 
Example 4
Source File: PageServlet.java    From GeoTriples with Apache License 2.0 4 votes vote down vote up
public void doGet(HttpServletRequest request, HttpServletResponse response)
		throws IOException, ServletException {
	D2RServer server = D2RServer.fromServletContext(getServletContext());
	server.checkMappingFileChanged();
	String relativeResourceURI = request.getRequestURI().substring(
			request.getContextPath().length()
					+ request.getServletPath().length());
	// Some servlet containers keep the leading slash, some don't
	if (!"".equals(relativeResourceURI)
			&& "/".equals(relativeResourceURI.substring(0, 1))) {
		relativeResourceURI = relativeResourceURI.substring(1);
	}
	if (request.getQueryString() != null) {
		relativeResourceURI = relativeResourceURI + "?"
				+ request.getQueryString();
	}

	/* Determine service stem, i.e. vocab/ in /[vocab/]page */
	int servicePos;
	if (-1 == (servicePos = request.getServletPath().indexOf(
			"/" + D2RServer.getPageServiceName())))
		throw new ServletException("Expected to find service path /"
				+ D2RServer.getPageServiceName());
	String serviceStem = request.getServletPath().substring(1,
			servicePos + 1);

	String resourceURI = server.resourceBaseURI(serviceStem)
			+ relativeResourceURI;
	String documentURL = server.dataURL(serviceStem, relativeResourceURI);
	String pageURL = server.pageURL(serviceStem, relativeResourceURI);

	VelocityWrapper velocity = new VelocityWrapper(this, request, response);
	Context context = velocity.getContext();
	context.put("uri", resourceURI);

	// Build resource description
	Resource resource = ResourceFactory.createResource(resourceURI);
	boolean outgoingTriplesOnly = server.isVocabularyResource(resource)
			&& !server.getConfig().getVocabularyIncludeInstances();
	int limit = server.getConfig().getLimitPerPropertyBridge();
	Model description = null;
	try {
		ResourceDescriber describer = new ResourceDescriber(
				server.getMapping(), resource.asNode(), outgoingTriplesOnly,
				limit, Math.round(server.getConfig().getPageTimeout() * 1000));
		description = ModelFactory.createModelForGraph(describer.description());
	} catch (QueryCancelledException ex) {
		velocity.reportError(
				504, "504 Gateway Timeout", "The operation timed out.");
		return;
	}
	if (description.size() == 0) {
		velocity.reportError(404, "404 Not Found", "No resource with this identifier exists in the database.");
		return;
	}
	// Get a Resource that is attached to the description model
	resource = description.getResource(resourceURI);

	this.prefixes = server.getPrefixes(); // model();

	if (server.getConfig().serveMetadata()) {
		// create and add metadata to context
		MetadataCreator resourceMetadataCreator = new MetadataCreator(
				server, server.getConfig().getResourceMetadataTemplate(
						server, getServletContext()));

		Model metadata = resourceMetadataCreator.addMetadataFromTemplate(
				resourceURI, documentURL, pageURL);
		if (!metadata.isEmpty()) {
			List<Statement> mList = metadata.getResource(documentURL)
					.listProperties().toList();
			Collections.sort(mList, MetadataCreator.subjectSorter);
			context.put("metadata", mList);

			context.put("metadataroot", metadata.getResource(documentURL));

			// add prefixes to context
			Map<String, String> nsSet = metadata.getNsPrefixMap();
			nsSet.putAll(description.getNsPrefixMap());

			context.put("prefixes", nsSet.entrySet());
			context.put("renderedNodesMap",
					new HashMap<Resource, Boolean>());

			// add a empty map for keeping track of blank nodes aliases
			context.put("blankNodesMap", new HashMap<Resource, String>());
		} else {
			context.put("metadata", Boolean.FALSE);
		}
	} else {
		context.put("metadata", Boolean.FALSE);
	}

	context.put("rdf_link", documentURL);
	context.put("label", getBestLabel(resource));
	context.put("properties", collectProperties(description, resource));
	context.put("classmap_links", classmapLinks(resource));
	context.put("limit_per_property_bridge", limit > 0 ? limit : null);
	velocity.mergeTemplateXHTML("resource_page.vm");
}
 
Example 5
Source File: AbstractMDRResource.java    From semanticMDR with GNU General Public License v3.0 4 votes vote down vote up
public AbstractMDRResource(Resource resource, MDRDatabase mdrDatabase) {
	super(resource.asNode(),
			(EnhGraph) mdrDatabase.getOntModel());
	this.mdrDatabase = mdrDatabase;
}