Java Code Examples for com.hp.hpl.jena.query.QueryExecutionFactory#sparqlService()

The following examples show how to use com.hp.hpl.jena.query.QueryExecutionFactory#sparqlService() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TrigKSTripleReader.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
public static ArrayList<String> readEventIdsFromKs(String sparqlQuery)throws Exception {
    ArrayList<String> eventIds = new ArrayList<String>();
    //System.out.println("serviceEndpoint = " + serviceEndpoint);
    //System.out.println("sparqlQuery = " + sparqlQuery);
    //System.out.println("user = " + user);
    //System.out.println("pass = " + pass);
    HttpAuthenticator authenticator = new SimpleAuthenticator(user, pass.toCharArray());

    QueryExecution x = QueryExecutionFactory.sparqlService(serviceEndpoint, sparqlQuery, authenticator);
    ResultSet resultset = x.execSelect();
    while (resultset.hasNext()) {
        QuerySolution solution = resultset.nextSolution();
        //System.out.println("solution.toString() = " + solution.toString());
        //( ?event = <http://www.newsreader-project.eu/data/Dasym-Pilot/425819_relink_dominant.naf#ev24> )
        String currentEvent = solution.get("event").toString();
        //System.out.println("currentEvent = " + currentEvent);
        //http://www.newsreader-project.eu/data/Dasym-Pilot/425819_relink_dominant.naf#ev24
        if (!eventIds.contains(currentEvent)) {
            eventIds.add(currentEvent);
        }
    }
    return eventIds;
}
 
Example 2
Source File: AliadaRDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 6 votes vote down vote up
public String crm2AliadaClass(final String crmClass) {
	final Query query = QueryFactory.create(CRM_TO_ALIADA_CLASS_P1 + crmClass + CRM_TO_ALIADA_CLASS_P2);
	ARQ.getContext().setTrue(ARQ.useSAX);
       
	QueryExecution execution = null;
	try {
		execution = QueryExecutionFactory.sparqlService("http://172.25.5.15:8890/sparql", query);
		execution.setTimeout(2000, 5000);
		final ResultSet results = execution.execSelect();
		//Iterating over the SPARQL Query results
		while (results.hasNext()) {
			QuerySolution soln = results.nextSolution();
			//Printing DBpedia entries' abstract.
			System.out.println(soln.get("?abstract"));   
			return soln.get("?abstract").asResource().getURI();
		}
		return "NULL";
       } finally {
       	try {
       		execution.close();
		} catch (Exception exception) {
			// TODO: handle exception
		}
       }

}
 
Example 3
Source File: TrigKSTripleReader.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
public static ArrayList<Statement> readTriplesFromKs(String subjectUri, String sparqlQuery){

        ArrayList<Statement> triples = new ArrayList<Statement>();
        HttpAuthenticator authenticator = new SimpleAuthenticator(user, pass.toCharArray());
        try {
            qCount++;
            QueryExecution x = QueryExecutionFactory.sparqlService(serviceEndpoint, sparqlQuery, authenticator);
            ResultSet resultset = x.execSelect();
            while (resultset.hasNext()) {
                QuerySolution solution = resultset.nextSolution();
                String relString = solution.get("predicate").toString();
                RDFNode obj = solution.get("object");
                Model model = ModelFactory.createDefaultModel();
                Resource subj = model.createResource(subjectUri);
                Statement s = createStatement(subj, ResourceFactory.createProperty(relString), obj);
                triples.add(s);
            }
        } catch (Exception e) {
            //e.printStackTrace();
        }
        return triples;
    }
 
Example 4
Source File: SPARQLEndPoint.java    From LodView with MIT License 6 votes vote down vote up
public String testEndpoint(ConfigurationBean conf) {
	System.out.println("testing connection on " + conf.getEndPointUrl());
	QueryExecution qe = QueryExecutionFactory.sparqlService(conf.getEndPointUrl(), "select ?s {?s ?p ?o} LIMIT 1");
	String msg = "";
	try {
		ResultSet rs = qe.execSelect();
		if (rs.hasNext()) {
			System.out.println("is online");
			msg = "online";
		} else {
			System.out.println("is offline");
			msg = "offline";
		}
	} catch (Exception e) {
		System.out.println("is offline " + e.getMessage());
		msg = "offline " + e.getMessage();
	} finally {
		qe.close();
	}
	return msg;
}
 
Example 5
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 6 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param query					the query to use to look for the resources.
 * @return the {@link com.hp.hpl.jena.query.ResultSet} of the SELECT SPARQL query.
 * @since 2.0
 */
public ResultSet executeSelect(final String sparqlEndpointURI, final String user, final String password, final String query) {
	ResultSet results = null;
 	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           results = qexec.execSelect() ;
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
 	return results;
}
 
Example 6
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 6 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the number of resources specified in the query argument.
 *
 * @param query					the query to execute to get the number of resources.  
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @return the number of resources.
 * @since 2.0
 */
public int getNumResources(final String query, final String sparqlEndpointURI, final String user, final String password) {
	int numRes = 0;
	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
           	numRes = soln.getLiteral("count").getInt();
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	return numRes;
}
 
Example 7
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the resources specified in the query argument.
 *
 * @param query					the query to execute to get the resources.  
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @return a list of {@link eu.aliada.shared.rdfstore.RetrievedResource} with the resources.
 * @since 2.0
 */
public RetrievedResource[] getResources(final String query, final String sparqlEndpointURI, final String user, final String password, final int offset, final int limit) {
	final ArrayList<RetrievedResource> resList = new ArrayList<RetrievedResource>();
 	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
           	final Resource res = soln.getResource("res");
       		String name = "";
           	if(soln.contains("name")) {
           		name = soln.getLiteral("name").getString();
           	}
       		final RetrievedResource retrievedRes = new RetrievedResource(res.getURI(), name);
       		resList.add(retrievedRes);
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	if (resList.isEmpty()) {
		return new RetrievedResource[0];
	}
	return (RetrievedResource[]) resList.toArray(new RetrievedResource[resList.size()]);
}
 
Example 8
Source File: SPARQLEndPoint.java    From LodView with MIT License 5 votes vote down vote up
public Model extractData(Model result, String IRI, String sparql, List<String> queries) throws Exception {

		// System.out.println("executing query on " + sparql);
		Resource subject = result.createResource(IRI);
		for (String query : queries) {
			QueryExecution qe = QueryExecutionFactory.sparqlService(sparql, parseQuery(query, IRI, null, -1, null));
			try {
				ResultSet rs = qe.execSelect();

				List<Statement> sl = new ArrayList<Statement>();
				while (rs.hasNext()) {
					QuerySolution qs = rs.next();
					RDFNode subject2 = qs.get("s");
					RDFNode property = qs.get("p");
					RDFNode object = qs.get("o");
					result.add(result.createStatement(subject2 != null ? subject2.asResource() : subject, property.as(Property.class), object));
				}
				result.add(sl);
			} catch (Exception e) {
				e.printStackTrace();
				throw new Exception("error in query execution: " + e.getMessage());
			} finally {
				qe.close();
			}
		}
		return result;
	}
 
Example 9
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the ambiguous discovered links of a source URI.
 *
 * @param ambiguousLink			a {@link eu.aliada.shared.rdfstore.AmbiguousLink} that contains the source URI.  
 * @param localRes				the source resource of the link.  
 * @param extResBegin			the beginning string of the target link.  
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @since 2.0
 */
public void getSourceURIAmbiguousLinks(final AmbiguousLink ambiguousLink, final Resource localRes, final String extResBegin, final String sparqlEndpointURI, final String graphName, final String user, final String password) {
	final String query = "SELECT ?rel ?extRes FROM <" + graphName + "> " + 
			" WHERE {<" + ambiguousLink.getSourceURI() + "> ?rel ?extRes ." +
			" FILTER regex(?extRes, \"^" + extResBegin + "\", \"i\")" +
			" }";
	try {
		// Execute the query and obtain results
		final QueryExecution qexec = QueryExecutionFactory.sparqlService(
				sparqlEndpointURI, 
				QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
		qexec.setTimeout(2000, 5000);
		final ResultSet results = qexec.execSelect() ;
		while (results.hasNext())
		{
			final QuerySolution soln = results.nextSolution() ;
	    	final Resource extRes = soln.getResource("extRes");
           	final Resource relResType = soln.getResource("rel");
           	final Triple triple = new Triple(localRes.asNode(), relResType.asNode(), extRes.asNode());
	    	ambiguousLink.addLink(triple);
	    }
	    qexec.close() ;
	  } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
}
 
Example 10
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the number of ambiguous discovered links.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @return the number of the ambiguous discovered links.
 * @since 2.0
 */
public int getNumAmbiguousDiscoveredLinks (final String sparqlEndpointURI, final String graphName, final String user, final String password) {
	final String query = "SELECT (COUNT(?localRes) AS ?count) FROM <" + graphName + "> " + 
					" WHERE {?localRes ?rel ?extRes ." +
					" BIND( str(?extRes) as ?extResStr )." +
					" BIND( SUBSTR(?extResStr, 1,14) AS ?extResBegin)" +
					" }" +
					" GROUP BY ?localRes ?extResBegin" +
					" HAVING (COUNT(?localRes) > 1)";

	int numLinks = 0;
	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
           	numLinks = numLinks + soln.getLiteral("count").getInt();
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	return numLinks;
}
 
Example 11
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint,
 * to get the ambiguous discovered links.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @return a list of {@link eu.aliada.shared.rdfstore.AmbiguousLink} with the ambiguous discovered links.
 * @since 2.0
 */
public AmbiguousLink[] getAmbiguousDiscoveredLinks(final String sparqlEndpointURI, final String graphName, final String user, final String password, final int offset, final int limit) {
	final String query = "SELECT ?localRes ?extResBegin (COUNT(?localRes) AS ?count) FROM <" + graphName + "> " + 
			" WHERE {?localRes ?rel ?extRes ." +
			" BIND( str(?extRes) as ?extResStr )." +
			" BIND( SUBSTR(?extResStr, 1,14) AS ?extResBegin)" +
			" }" +
			" GROUP BY ?localRes ?extResBegin" +
			" HAVING (COUNT(?localRes) > 1)" +
			" ORDER BY ?localRes" +
			" OFFSET " + offset + " LIMIT " + limit;
	
	ArrayList<AmbiguousLink> ambiguousLinksList = new ArrayList<AmbiguousLink>();
	try {
		// Execute the query and obtain results
		final QueryExecution qexec = QueryExecutionFactory.sparqlService(
				sparqlEndpointURI, 
				QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
		qexec.setTimeout(2000, 5000);
		final ResultSet results = qexec.execSelect() ;
		while (results.hasNext())
		{
			final QuerySolution soln = results.nextSolution() ;
	    	final Resource localRes = soln.getResource("localRes");
	    	final String extResBegin = soln.getLiteral("extResBegin").getString();
	    	final AmbiguousLink ambiguousLink = new AmbiguousLink();
	    	ambiguousLink.setSourceURI(localRes.getURI());
	    	getSourceURIAmbiguousLinks(ambiguousLink, localRes, extResBegin, sparqlEndpointURI, graphName, user, password);
	    	ambiguousLinksList.add(ambiguousLink);
	    }
	    qexec.close() ;
	  } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	if (ambiguousLinksList.isEmpty()) {
		return new AmbiguousLink[0];
	}
	return (AmbiguousLink[]) ambiguousLinksList.toArray(new AmbiguousLink[ambiguousLinksList.size()]);
}
 
Example 12
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 5 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the discovered links.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @return a list of triples with the discovered links.
 * @since 2.0
 */
public Triple[] getDiscoveredLinks(final String sparqlEndpointURI, final String graphName, final String user, final String password, final int offset, final int limit) {
	final String query = "select * FROM <" + graphName + "> " + 
					"where {?source ?rel ?target }" +
					" ORDER BY ?source ?target" +
					" OFFSET " + offset + " LIMIT " + limit;
  	ArrayList<Triple> linksList = new ArrayList<Triple>();
 	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
           	final Resource sourceResType = soln.getResource("source");
           	final Resource targetResType = soln.getResource("target");
           	final Resource relResType = soln.getResource("rel");
       		final Triple triple = new Triple(sourceResType.asNode(), relResType.asNode(), targetResType.asNode());
       		linksList.add(triple);
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	if (linksList.isEmpty()) {
		return new Triple[0];
	}
	return (Triple[]) linksList.toArray(new Triple[linksList.size()]);
}
 
Example 13
Source File: SparqlEndpointQuery.java    From micro-integrator with Apache License 2.0 4 votes vote down vote up
@Override
public QueryExecution getQueryExecution() throws IOException, DataServiceFault {
	return QueryExecutionFactory.sparqlService(this.getConfig().getSparqlEndpoint(), this.getQuery());
}
 
Example 14
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 4 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, 
 * to get the resources specified in the query argument.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param graphName 			the graphName, null in case of default graph.
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param offset				causes the solutions generated to start after 
 *                              the specified number of solutions.
 * @param limit					upper bound on the number of solutions returned.
 * @return a list of {@link eu.aliada.shared.rdfstore.RetrievedResource} with the resources.
 * @since 2.0
 */
public RetrievedWork[] getWorks(final String sparqlEndpointURI, final String graphName, final String user, final String password, final int offset, final int limit) {
	final String query = " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" +
	" PREFIX ecrm:   <http://erlangen-crm.org/current/>" +
	" PREFIX efrbroo: <http://erlangen-crm.org/efrbroo/>" +
	" SELECT DISTINCT ?work ?expr ?manif ?title FROM <" + graphName + "> " +
	" WHERE { ?work rdf:type efrbroo:F1_Work . "+
	" ?work efrbroo:R40_has_representative_expression ?expr ." +
	" ?expr efrbroo:R4_carriers_provided_by ?manif ." +
	" ?manif ecrm:P102_has_title ?apel ." +
	" ?apel ecrm:P3_has_note ?title ." +
	" }" +
	" ORDER BY ?work ?expr ?manif" +
	" OFFSET " + offset + " LIMIT " + limit;

	ArrayList<RetrievedWork> resList = new ArrayList<RetrievedWork>();
 	try {
        // Execute the query and obtain results
        final QueryExecution qexec = QueryExecutionFactory.sparqlService(
        		sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
        qexec.setTimeout(2000, 5000);
           final ResultSet results = qexec.execSelect() ;
           while (results.hasNext())
           {
           	final QuerySolution soln = results.nextSolution() ;
	    	final RetrievedWork retrievedRes = new RetrievedWork();
	    	retrievedRes.setWorkURI(soln.getResource("work").getURI());
	    	retrievedRes.setExprURI(soln.getResource("expr").getURI());
	    	retrievedRes.setManifURI(soln.getResource("manif").getURI());
	    	retrievedRes.setTitle(soln.getLiteral("title").getString());
       		
	    	String dimensions = "";
           	if(soln.contains("dimensions")) {
           		dimensions = soln.getLiteral("dimensions").getString();
           	}
	    	retrievedRes.setDimensions(dimensions);
       		
	    	String extension = "";
           	if(soln.contains("extension")) {
           		extension = soln.getLiteral("extension").getString();
           	}
	    	retrievedRes.setExtension(extension);
       		
	    	String author = "";
           	if(soln.contains("author")) {
           		author = soln.getLiteral("author").getString();
           	}
	    	retrievedRes.setAuthor(author);

	    	String publicPlace = "";
           	if(soln.contains("place_publication")) {
           		publicPlace = soln.getLiteral("place_publication").getString();
           	}
	    	retrievedRes.setPublicPlace(publicPlace);

	    	String publicDate = "";
           	if(soln.contains("date_publication")) {
           		publicDate = soln.getLiteral("date_publication").getString();
           	}
	    	retrievedRes.setPublicDate(publicDate);

	    	String edition = "";
           	if(soln.contains("edition")) {
           		edition = soln.getLiteral("edition").getString();
           	}
	    	retrievedRes.setEdition(edition);
       		
	    	resList.add(retrievedRes);
           }
        qexec.close() ;
      } catch (Exception exception) {
		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
	}
	if (resList.isEmpty()) {
		return new RetrievedWork[0];
	}
	return (RetrievedWork[]) resList.toArray(new RetrievedWork[resList.size()]);
}
 
Example 15
Source File: RDFStoreDAO.java    From aliada-tool with GNU General Public License v3.0 4 votes vote down vote up
/**
 * It executes a SELECT SPARQL query on the SPARQL endpoint, to get the 
 * URIs of a type from ALIADA ontology.
 *
 * @param sparqlEndpointURI		the SPARQL endpoint URI.  
 * @param user					the user name for the SPARQl endpoint.
 * @param password				the password for the SPARQl endpoint.
 * @param typeLabel				the label value of the type to search for.
 * @return a list of URIs with the matched types.
 * @since 1.0
 */
public String[] getOntologyTypeURI(final String sparqlEndpointURI, final String user, final String password, final String typeLabel) {
	final String query = "select distinct ?type FROM <http://aliada-project.eu/2014/aliada-ontology#> " + 
					"where {?type a <http://www.w3.org/2004/02/skos/core#Concept> . " +
					"?type <http://www.w3.org/2004/02/skos/core#prefLabel> ?label . " +
					"FILTER regex(str(?label), \"^" + typeLabel + "$\")}";
	final ArrayList<String> typesList = new ArrayList<String>();
	QueryExecution qexec = null;
	try {
        // Execute the query and obtain results
  		qexec = QueryExecutionFactory.sparqlService(
  				sparqlEndpointURI, 
        		QueryFactory.create(query), 
				auth(sparqlEndpointURI, user, password));
           
        if (qexec instanceof QueryEngineHTTP) {
        	((QueryEngineHTTP)qexec).setTimeout(2000L, 5000L);
        }
        
        final ResultSet results = qexec.execSelect() ;
           while (results.hasNext()) {
           	final QuerySolution soln = results.nextSolution() ;
           	final Resource resType = soln.getResource("type");
       		final String type = resType.getURI();
       		typesList.add(type);
           }
  	} catch (Exception exception) {
  		LOGGER.error(MessageCatalog._00035_SPARQL_FAILED, exception, query);
  	} finally {
  		try {
			qexec.close();
		} catch (Exception e) {
			// Ignore
		}
  	}
 	
	if (typesList.isEmpty()) {
		return new String[0];
	}
	return (String[]) typesList.toArray(new String[typesList.size()]);
}
 
Example 16
Source File: ModelFactoryImpl.java    From semweb4j with BSD 2-Clause "Simplified" License 4 votes vote down vote up
@Override
public QueryResultTable sparqlSelect(String url, String queryString) {
	log.debug("Query " + queryString);
	QueryExecution qe = QueryExecutionFactory.sparqlService(url, queryString);
	return new QueryResultTableImpl(qe);
}
 
Example 17
Source File: SolRDF.java    From SolRDF with Apache License 2.0 2 votes vote down vote up
/**
 * Returns the {@link QueryExecution} associated with the given query.
 * 
 * @param query the SPARQL query.
 * @return the {@link QueryExecution} associated with the given query.
 */
QueryExecution execution(final String query) {
	return QueryExecutionFactory.sparqlService(
			sparqlEndpoint, 
			QueryFactory.create(QueryFactory.create(query)));
}