org.apache.jena.riot.web.HttpOp Java Examples

The following examples show how to use org.apache.jena.riot.web.HttpOp. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JenaIOService.java    From trellis with Apache License 2.0 6 votes vote down vote up
private void writeJsonLd(final OutputStream output, final DatasetGraph graph, final IRI... profiles) {
    final String profile = getCustomJsonLdProfile(profiles);
    final RDFFormat format = canUseCustomJsonLdProfile(profile) ? JSONLD_COMPACT_FLAT : getJsonLdProfile(profiles);
    final JsonLDWriteContext ctx = new JsonLDWriteContext();
    if (canUseCustomJsonLdProfile(profile)) {
        LOGGER.debug("Setting JSON-LD context with profile: {}", profile);
        final String c = cache.get(profile, p -> {
            try (final TypedInputStream res = HttpOp.execHttpGet(profile)) {
                return IOUtils.toString(res.getInputStream(), UTF_8);
            } catch (final IOException | HttpException ex) {
                LOGGER.warn("Error fetching profile {}: {}", p, ex.getMessage());
                return null;
            }
        });
        if (c != null) {
            ctx.setJsonLDContext(c);
            ctx.setJsonLDContextSubstitution("\"" + profile + "\"");
        }
    }
    RDFWriter.create().format(format).context(ctx).source(graph).output(output);
}
 
Example #2
Source File: TestDeltaFusekiGood.java    From rdf-delta with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void beforeClass() {
    try {

        dftStdHttpClient = HttpOp.getDefaultHttpClient();

        HttpOp.setDefaultHttpClient(HttpClients.createMinimal());

        deltaServer = deltaServer(CLEAN);

        // This needs testing.
        server1 = fuseki1(CLEAN);
        server2 = fuseki2(CLEAN); // Can not create!

        URL_DPS = "http://localhost:"+D_PORT+"/";

        conn1 = RDFConnectionFactory.connect("http://localhost:"+F1_PORT+ds1) ;
        conn2 = RDFConnectionFactory.connect("http://localhost:"+F2_PORT+ds2) ;

    } catch (Throwable th) {
        th.printStackTrace();
    }
}
 
Example #3
Source File: SPARQLEndpointExecution.java    From hypergraphql with Apache License 2.0 5 votes vote down vote up
@Override
public SPARQLExecutionResult call() {
    Map<String, Set<String>> resultSet = new HashMap<>();

    markers.forEach(marker -> resultSet.put(marker, new HashSet<>()));

    Model unionModel = ModelFactory.createDefaultModel();

    SPARQLServiceConverter converter = new SPARQLServiceConverter(schema);
    String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType);
    logger.info(sparqlQuery);

    CredentialsProvider credsProvider = new BasicCredentialsProvider();
    Credentials credentials =
            new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword());
    credsProvider.setCredentials(AuthScope.ANY, credentials);
    HttpClient httpclient = HttpClients.custom()
            .setDefaultCredentialsProvider(credsProvider)
            .build();
    HttpOp.setDefaultHttpClient(httpclient);

    Query jenaQuery = QueryFactory.create(sparqlQuery);

    QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery);
    qEngine.setClient(httpclient);

    ResultSet results = qEngine.execSelect();

    results.forEachRemaining(solution -> {
        markers.stream().filter(solution::contains).forEach(marker ->
                resultSet.get(marker).add(solution.get(marker).asResource().getURI()));

        unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema));
    });

    SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel);
    logger.info(sparqlExecutionResult);

    return sparqlExecutionResult;
}
 
Example #4
Source File: ARQFactory.java    From shacl with Apache License 2.0 5 votes vote down vote up
public static HttpClient buildHttpClient(String serviceURI, String user, String password) {
        if ( user == null )
            return HttpOp.getDefaultHttpClient();
        CredentialsProvider credsProvider = new BasicCredentialsProvider();
        Credentials credentials = new UsernamePasswordCredentials(user, password);
        credsProvider.setCredentials(AuthScope.ANY, credentials);
        return HttpClients.custom()
            .setDefaultCredentialsProvider(credsProvider)
            .build();
        
        // Example for scoped credentials 
        // See http://jena.staging.apache.org/documentation/query/http-auth.html
//      CredentialsProvider credsProvider = new BasicCredentialsProvider();
//      Credentials unscopedCredentials = new UsernamePasswordCredentials("user", "passwd");
//      credsProvider.setCredentials(AuthScope.ANY, unscopedCredentials);
//      Credentials scopedCredentials = new UsernamePasswordCredentials("user", "passwd");
//      final String host = "http://example.com/sparql";
//      final int port = 80;
//      final String realm = "aRealm";
//      final String schemeName = "DIGEST";
//      AuthScope authscope = new AuthScope(host, port, realm, schemeName);
//      credsProvider.setCredentials(authscope, scopedCredentials);
//      return HttpClients.custom()
//          .setDefaultCredentialsProvider(credsProvider)
//          .build();
        
    }
 
Example #5
Source File: DeltaLinkHTTP.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
private RDFPatch fetchCommon(Id dsRef, String param, String paramStr) {
    checkLink();

    String url = remoteReceive;
    url = createURL(url, DeltaConst.paramDatasource, dsRef.asParam());
    url = appendURL(url, paramStr);
    final String s = url;
    try {
        RDFPatch patch =  retry(()->{
            // [NET] Network point
            InputStream in = HttpOp.execHttpGet(s) ;
            if ( in == null )
                return null ;
            RDFPatchReaderText pr = new RDFPatchReaderText(in) ;
            RDFChangesCollector collector = new RDFChangesCollector();
            pr.apply(collector);
            return collector.getRDFPatch();
        }, ()->true, ()->"Retry fetch patch.", ()->"Failed to fetch patch.");
        return patch;
    }
    catch ( HttpException ex) {
        if ( ex.getStatusCode() == HttpSC.NOT_FOUND_404 ) {
            return null ; //throw new DeltaNotFoundException(ex.getMessage());
        }
        throw ex;
    }
}
 
Example #6
Source File: PHandlerSPARQLUpdate.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
@Override
public void handle(Patch patch) { 
    IndentedLineBuffer x = new IndentedLineBuffer() ;
    RDFChanges scData = new RDFChangesWriteUpdate(x) ;
    patch.play(scData);
    x.flush();
    String reqStr = x.asString() ;
    updateEndpoints.forEach((ep)->{
        try { HttpOp.execHttpPost(ep, WebContent.contentTypeSPARQLUpdate, reqStr) ; }
        catch (HttpException ex) { DPS.LOG.warn("Failed to send to "+ep) ; }
    }) ;
}
 
Example #7
Source File: TestCmdServerZkS3.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
@Test(expected=HttpException.class)
public void deltaZkS3_3() {
    runtest(
        (endpoint)-> {
            // Would cause a 404 log message.
            // Hidden by the logging configuration.
            HttpOp.execHttpGet(endpoint+"$/noSuch");
        },
        endpointURL);
}
 
Example #8
Source File: Setup.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
/** Set the HttpClient - close the old one if appropriate */
/*package*/ static void setHttpClient(HttpClient newHttpClient) {
    HttpClient hc = HttpOp.getDefaultHttpClient() ;
    if ( hc instanceof CloseableHttpClient )
        IO.close((CloseableHttpClient)hc) ;
    HttpOp.setDefaultHttpClient(newHttpClient) ;

}
 
Example #9
Source File: TestDeltaFusekiGood.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
@AfterClass
public static void afterClass() {
    if ( server1 != null )
        server1.stop();
    if ( server2 != null )
        server2.stop();
    deltaServer.stop();
    IO.close( ((CloseableHttpClient)HttpOp.getDefaultHttpClient()) );
    HttpOp.setDefaultHttpClient(dftStdHttpClient);
}
 
Example #10
Source File: TestDeltaFusekiBad.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
@Before
public void before() { HttpOp.setDefaultHttpClient(HttpClients.createMinimal()); }
 
Example #11
Source File: TestDeltaFusekiBad.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
@After
public void after() {
    IO.close( ((CloseableHttpClient)HttpOp.getDefaultHttpClient()) );
}
 
Example #12
Source File: TestDeltaFusekiBad.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void cpatureHttpOp() {
    dftStdHttpClient = HttpOp.getDefaultHttpClient() ;
}
 
Example #13
Source File: TestDeltaFusekiBad.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void resetHttpOp() {
    HttpOp.setDefaultHttpClient(dftStdHttpClient) ;
}
 
Example #14
Source File: TestCmdServerZkS3.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
@Test public void deltaZkS3_2() {
    runtest(
        (endpoint)->HttpOp.execHttpGet(endpoint+"$/ping"),
        endpointURL);
}
 
Example #15
Source File: Setup.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
private static void resetDefaultHttpClient() {
    setHttpClient(HttpOp.createDefaultHttpClient());
}
 
Example #16
Source File: TestPatchFuseki.java    From rdf-delta with Apache License 2.0 4 votes vote down vote up
private static void applyPatch(String dest, RDFPatch patch) {
    String body = RDFPatchOps.str(patch);
    HttpOp.execHttpPost(dest, DeltaFuseki.patchContentType, body);
}
 
Example #17
Source File: SPARQLEndpointExecution.java    From hypergraphql with Apache License 2.0 4 votes vote down vote up
@Override
public SPARQLExecutionResult call() {
    Map<String, Set<String>> resultSet = new HashMap<>();

    markers.forEach(marker -> resultSet.put(marker, new HashSet<>()));

    Model unionModel = ModelFactory.createDefaultModel();

    SPARQLServiceConverter converter = new SPARQLServiceConverter(schema);
    String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType);
    logger.debug(sparqlQuery);

    CredentialsProvider credsProvider = new BasicCredentialsProvider();
    Credentials credentials =
            new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword());
    credsProvider.setCredentials(AuthScope.ANY, credentials);
    HttpClient httpclient = HttpClients.custom()
            .setDefaultCredentialsProvider(credsProvider)
            .build();
    HttpOp.setDefaultHttpClient(httpclient);

    ARQ.init();
    Query jenaQuery = QueryFactory.create(sparqlQuery);

    QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery);
    qEngine.setClient(httpclient);
    //qEngine.setSelectContentType(ResultsFormat.FMT_RS_XML.getSymbol());

    ResultSet results = qEngine.execSelect();

    results.forEachRemaining(solution -> {
        markers.stream().filter(solution::contains).forEach(marker ->
                resultSet.get(marker).add(solution.get(marker).asResource().getURI()));

        unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema));
    });

    SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel);
    logger.debug("Result: {}", sparqlExecutionResult);

    return sparqlExecutionResult;
}