org.apache.jena.riot.RDFDataMgr Java Examples

The following examples show how to use org.apache.jena.riot.RDFDataMgr. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ExRIOT_out2.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args)
{
    Model model = RDFDataMgr.loadModel("D.ttl") ;
    System.out.println() ;
    System.out.println("#### ---- Write as TriG") ;
    System.out.println() ;
    // This wil be the default graph of the dataset written.
    RDFDataMgr.write(System.out, model, Lang.TRIG) ;
    
    // Loading Turtle as Trig reads into the default graph.
    Dataset dataset = RDFDataMgr.loadDataset("D.ttl") ;
    System.out.println() ;
    System.out.println("#### ---- Write as NQuads") ;
    System.out.println() ;
    RDFDataMgr.write(System.out, dataset, Lang.NQUADS) ;
}
 
Example #2
Source File: ExRIOT_3.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String...argv)
{
    Dataset ds = null ;
    
    // Read a TriG file into quad storage in-memory.
    ds = RDFDataMgr.loadDataset("data.trig") ;
    
    // read some (more) data into a dataset graph.
    RDFDataMgr.read(ds, "data2.trig") ;
    
    // Create a dataset,
    Dataset ds2 = DatasetFactory.createTxnMem() ;
    // read in data, indicating the syntax in case the remote end does not
    // correctly provide the HTTP content type.
    RDFDataMgr.read(ds2, "http://host/data2.unknown", TRIG) ;
}
 
Example #3
Source File: Mapper_SPARQL_Test.java    From rmlmapper-java with MIT License 6 votes vote down vote up
private void mappingTest(String testCaseName, Class expectedException) throws Exception {
    String resourcePath = "test-cases/" + testCaseName + "-SPARQL/resource.ttl";
    String mappingPath = "./test-cases/" + testCaseName + "-SPARQL/mapping.ttl";
    String outputPath = "test-cases/" + testCaseName + "-SPARQL/output.nq";
    String tempMappingPath = replacePortInMappingFile(mappingPath, "" + PORTNUMBER_SPARQL);

    builder.add("/ds"+(1), RDFDataMgr.loadDataset(resourcePath), true);
    this.server = builder.build();
    this.server.start();

    // mapping
    if (expectedException == null) {
        doMapping(tempMappingPath, outputPath);
    } else {
        doMappingExpectError(tempMappingPath);
    }

    deleteTempMappingFile(tempMappingPath);
}
 
Example #4
Source File: GetPerspectiveRelations.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) {
    try {
        OutputStream fos = new FileOutputStream(pathToTrigFile);
        Dataset ds = TDBFactory.createDataset();
        Model defaultModel = ds.getDefaultModel();
        //ResourcesUri.prefixModel(defaultModel);
      //  Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective");
        ResourcesUri.prefixModelGaf(defaultModel);
        String attrBase = pathToTrigFile+"_";
        JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.grasp, "wasAttributedTo", perspectiveObjects, 1);
        RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY);
        fos.close();
    } catch (IOException e) {
        e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
    }
}
 
Example #5
Source File: IntegrationTestSupertypeLayer.java    From SolRDF with Apache License 2.0 6 votes vote down vote up
protected void assertIsomorphic(final Model memoryModel, final Model solrdfModel, final String uri) {
	try {
		assertTrue(solrdfModel.isIsomorphicWith(memoryModel));
	} catch (Throwable exception) {
		final StringWriter memoryModelWriter = new StringWriter();
		final StringWriter remoteModelWriter = new StringWriter();
		RDFDataMgr.write(memoryModelWriter, memoryModel, RDFFormat.NTRIPLES) ;
		RDFDataMgr.write(remoteModelWriter, solrdfModel, RDFFormat.NQUADS) ;

		final String name = uri != null ? uri : " (DEFAULT) ";
		log.debug("**** MEMORY MODEL " + name + " ****");
		log.debug(memoryModelWriter.toString());
		log.debug("");
		log.debug("**** REMOTE MODEL " + name + " ****");
		log.debug(remoteModelWriter.toString());
		log.debug("*********************************");
		throw exception;
	}
}
 
Example #6
Source File: ExRIOT_1.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String...argv)
{
    Model m = ModelFactory.createDefaultModel() ;
    // read into the model.
    m.read("data.ttl") ;
    
    // Alternatively, use the RDFDataMgr, which reads from the web,
    // with content negotiation.  Plain names are assumed to be 
    // local files where file extension indicates the syntax.  
    
    Model m2 = RDFDataMgr.loadModel("data.ttl") ;
    
    // read in more data, the remote server serves up the data
    // with the right MIME type.
    RDFDataMgr.read(m2, "http://host/some-published-data") ;
    
    
    // Read some data but also give a hint for the synatx if it is not
    // discovered by inspectying the file or by HTTP content negotiation.  
    RDFDataMgr.read(m2, "some-more-data.out", RDFLanguages.TURTLE) ;
}
 
Example #7
Source File: OneM2MContentInstanceMapper.java    From SDA with BSD 2-Clause "Simplified" License 6 votes vote down vote up
public static void main(String[] args) throws IOException {

		File f = new File("/Users/rosenc/Documents/business/[2015]icbms/json_sample1.txt");
		BufferedReader br = new BufferedReader(new FileReader(f));
		String line = null;
		String s = "";
		while ((line = br.readLine()) != null) {
			s = s + line + "\n";
		}

		System.out.println(s);
		Gson gson = new Gson();
		OneM2MContentInstanceDTO cont = gson.fromJson(s, OneM2MContentInstanceDTO.class);
		OneM2MContentInstanceMapper mapper = new OneM2MContentInstanceMapper(cont);

		Model model = ModelFactory.createDefaultModel();
		model.add(mapper.from());
		System.out.println("content type ; " + mapper.getContentType());
		// 스트링 변환부분
		RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
		// System.out.println(mapper.getTypedContent("2k42kk"));
		// mapper.getTypedContent("2.4");

	}
 
Example #8
Source File: NtripleUtil.java    From NLIWOD with GNU Affero General Public License v3.0 6 votes vote down vote up
private static PipedRDFIterator<Triple> fileToStreamIterator(String filename) {
	PipedRDFIterator<Triple> iter = new PipedRDFIterator<>();
	final PipedRDFStream<Triple> inputStream = new PipedTriplesStream(iter);

	// PipedRDFStream and PipedRDFIterator need to be on different threads
	ExecutorService executor = Executors.newSingleThreadExecutor();

	// Create a runnable for our parser thread
	Runnable parser = new Runnable() {

		@Override
		public void run() {
			RDFDataMgr.parse(inputStream, filename);
		}
	};

	// Start the parser on another thread
	executor.submit(parser);
	// We will consume the input on the main thread here
	// We can now iterate over data as it is parsed, parsing only runs as
	// far ahead of our consumption as the buffer size allows
	return iter;
}
 
Example #9
Source File: GtRDFReader.java    From JedAIToolkit with Apache License 2.0 6 votes vote down vote up
protected void performReading() {
    final Model model = RDFDataMgr.loadModel(inputFilePath);
    final StmtIterator iter = model.listStatements();
    while (iter.hasNext()) {
        Statement stmt = iter.nextStatement();

        final String pred = stmt.getPredicate().toString();
        if (!(pred.contains("sameAs"))) {
            continue;
        }

        final String sub = stmt.getSubject().toString();
        final String obj = stmt.getObject().toString();

        // add a new edge for every pair of duplicate entities
        int entityId1 = urlToEntityId1.get(sub);
        int entityId2 = urlToEntityId1.get(obj) + datasetLimit;

        duplicatesGraph.addEdge(entityId1, entityId2);
    }
}
 
Example #10
Source File: Stresstest.java    From IGUANA with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void init(String host, String queueName) throws IOException, TimeoutException {
	super.init(host, queueName);

	// create from construct args and class
	QueryHandlerFactory factory = new QueryHandlerFactory();
	// add Worker
	QueryHandler queryHandler = factory.createWorkerBasedQueryHandler(qhClassName, qhConstructorArgs, workers);
	queryHandler.generateQueries();

       Model tripleStats = queryHandler.generateTripleStats(taskID, iguanaResource, iguanaProperty);
	StringWriter sw = new StringWriter();
	RDFDataMgr.write(sw, tripleStats, RDFFormat.NTRIPLES);
	this.metaData.put(COMMON.SIMPLE_TRIPLE_KEY, sw.toString());
	this.metaData.put(COMMON.QUERY_STATS, tripleStats);


}
 
Example #11
Source File: RDFToManifest.java    From incubator-taverna-language with Apache License 2.0 6 votes vote down vote up
protected static Model jsonLdAsJenaModel(InputStream jsonIn, URI base)
		throws IOException, RiotException {
	Model model = ModelFactory.createDefaultModel();

	ClassLoader oldCl = Thread.currentThread().getContextClassLoader();
	try {
		// TAVERNA-971: set context classloader so jarcache.json is consulted
		// even through OSGi
		Thread.currentThread().setContextClassLoader(RDFToManifest.class.getClassLoader());

		// Now we can parse the JSON-LD without network access
		RDFDataMgr.read(model, jsonIn, base.toASCIIString(), Lang.JSONLD);
	} finally {
		// Restore old context class loader (if any)
		Thread.currentThread().setContextClassLoader(oldCl);
	}
	return model;
}
 
Example #12
Source File: UpdateProgrammatic.java    From xcurator with Apache License 2.0 6 votes vote down vote up
public static void main(String []args)
{
    Dataset dataset = DatasetFactory.createTxnMem() ;
    
    UpdateRequest request = UpdateFactory.create() ;
    
    request.add(new UpdateDrop(Target.ALL)) ;
    request.add(new UpdateCreate("http://example/g2")) ;
    request.add(new UpdateLoad("file:etc/update-data.ttl", "http://example/g2")) ;
    UpdateAction.execute(request, dataset) ;
    
    System.out.println("# Debug format");
    SSE.write(dataset) ;
    
    System.out.println();
    
    System.out.println("# N-Quads: S P O G") ;
    RDFDataMgr.write(System.out, dataset, Lang.NQUADS) ;
}
 
Example #13
Source File: SparqlDataSourceTest.java    From Server.Java with MIT License 6 votes vote down vote up
/**
 *
 * @throws Exception
 */
@BeforeClass
public static void setUpClass() throws Exception {
    final String typeName = "SparqlSourceType";
    if ( ! DataSourceTypesRegistry.isRegistered(typeName) ) {
        DataSourceTypesRegistry.register( typeName,
                                          new SparqlDataSourceType() );
    }

    String tmpdir = System.getProperty("java.io.tmpdir");
    jena = new File(tmpdir, "ldf-sparql-test");
    jena.mkdir();
    
    dataset = TDBFactory.createDataset(jena.getAbsolutePath());

    Model model = dataset.getDefaultModel();
    InputStream in = ClassLoader.getSystemResourceAsStream("demo.nt");
    RDFDataMgr.read(model, in, Lang.NTRIPLES);

    // Dynamically-generated port comes from pom.xml configuration: build-helper-maven-plugin
    int fusekiPort = Integer.parseInt(System.getProperty("fuseki.port"));

    // Create Fuseki, loaded with the test dataset
    fuseki = FusekiServer.create().setPort(fusekiPort).add("/ds", dataset).build();
    fuseki.start();

    // Everything is in place, now create the LDF datasource                
    JsonObject config = createConfig("sparql test", "sparql test",
                                     typeName);
    
    JsonObject settings = new JsonObject();
    settings.addProperty("endpoint", "http://localhost:" + fusekiPort + "/ds");
    config.add("settings", settings);

    setDatasource(DataSourceFactory.create(config));
}
 
Example #14
Source File: JenaSerialization.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
static public void serializeJenaCompositeEvents (OutputStream stream,
                                                 HashMap<String, ArrayList<CompositeEvent>> semEvents,
                                                 HashMap <String, SourceMeta> sourceMetaHashMap,
                                                 boolean ILIURI,
                                                 boolean VERBOSE_MENTIONS) {



    createModels();
    addJenaCompositeEvents(semEvents, sourceMetaHashMap, ILIURI,VERBOSE_MENTIONS);
    try {
        RDFDataMgr.write(stream, ds, RDFFormat.TRIG_PRETTY);
    } catch (Exception e) {
      //  e.printStackTrace();
    }
}
 
Example #15
Source File: DeltaEx01_DatasetWithPatchLog.java    From rdf-delta with Apache License 2.0 6 votes vote down vote up
public static void main(String ...args) {
    // -- Base dataset
    DatasetGraph dsgBase = DatasetGraphFactory.createTxnMem();

    // -- Destination for changes.
    // Text form of output.
    OutputStream out = System.out;
    // Create an RDFChanges that writes to "out".
    RDFChanges changeLog = RDFPatchOps.textWriter(out);

    // Combined datasetgraph and changes.
    DatasetGraph dsg = RDFPatchOps.changes(dsgBase, changeLog);

    // Wrap in the Dataset API
    Dataset ds = DatasetFactory.wrap(dsg);

    // --------
    // Do something. Read in data.ttl inside a transaction.
    // (data.ttl is in src/main/resources/)
    Txn.executeWrite(ds,
        ()->RDFDataMgr.read(dsg, "data.ttl")
        );
}
 
Example #16
Source File: EntityRDFReader.java    From JedAIToolkit with Apache License 2.0 6 votes vote down vote up
@Override
public List<EntityProfile> getEntityProfiles() {
    if (!entityProfiles.isEmpty()) {
        return entityProfiles;
    }

    if (inputFilePath == null) {
        Log.error("Input file path has not been set!");
        return null;
    }

    //load the rdf model from the input file
    try {
        final Model model = RDFDataMgr.loadModel(inputFilePath);
        readModel(model);
    } catch (IOException ex) {
        Log.error("Error in entities reading!", ex);
        return null;
    }

    return entityProfiles;
}
 
Example #17
Source File: ProcessEventObjectsStream.java    From EventCoreference with Apache License 2.0 6 votes vote down vote up
private static void readTrigFromStream(Dataset ds, Dataset dsnew) {
    InputStream is = null;
    try {
        is = System.in;

        if (is==null){
            throw new IllegalArgumentException(
                    "No stream input!");
        }

        ByteArrayOutputStream b = cloneInputStream(is);
        InputStream is1 = new ByteArrayInputStream(b.toByteArray());
        InputStream is2 = new ByteArrayInputStream(b.toByteArray());

        RDFDataMgr.read(ds, is1, RDFLanguages.TRIG);
        RDFDataMgr.read(dsnew, is2, RDFLanguages.TRIG);

    }
    finally {
        // close the streams using close method

    }

}
 
Example #18
Source File: RdfBulkUpdateRequestHandler.java    From SolRDF with Apache License 2.0 5 votes vote down vote up
@Override
public void load( 
		final SolrQueryRequest request, 
		final SolrQueryResponse response,
		final ContentStream stream, 
		final UpdateRequestProcessor processor) throws Exception {
	
	final PipedRDFIterator<Quad> iterator = new PipedRDFIterator<Quad>();
	final StreamRDF inputStream = new PipedQuadsStream(iterator);
	
	executor.submit(new Runnable() {
		@Override
		public void run() {
			try {
				RDFDataMgr.parse(
						inputStream, 
						stream.getStream(), 
						RDFLanguages.contentTypeToLang(stream.getContentType()));
			} catch (final IOException exception) {
				throw new SolrException(ErrorCode.SERVER_ERROR, exception);
			}					
		}
	});
	
	final DatasetGraph dataset = new LocalDatasetGraph(request, response);
	while (iterator.hasNext()) {
		dataset.add(iterator.next());
	}									
}
 
Example #19
Source File: IntegrationTestSupertypeLayer.java    From SolRDF with Apache License 2.0 5 votes vote down vote up
/**
 * Executes a given CONSTRUCT query against a given dataset.
 * 
 * @param data the mistery guest containing test data (query and dataset)
 * @throws Exception never, otherwise the test fails.
 */
protected void describeTest(final MisteryGuest data) throws Exception {
	load(data);
	
	final Query query = QueryFactory.create(queryString(data.query));
	try {
		inMemoryExecution = QueryExecutionFactory.create(query, memoryDataset);
		
		assertTrue(
				Arrays.toString(data.datasets) + ", " + data.query,
				inMemoryExecution.execDescribe().isIsomorphicWith(
						SOLRDF_CLIENT.describe(queryString(data.query))));
	} catch (final Throwable error) {
		StringWriter writer = new StringWriter();
		RDFDataMgr.write(writer, SOLRDF_CLIENT.describe(queryString(data.query)), RDFFormat.NTRIPLES);
		log.debug("JNS\n" + writer);
		
		QueryExecution debugExecution = QueryExecutionFactory.create(query, memoryDataset);
		writer = new StringWriter();
		RDFDataMgr.write(writer, debugExecution.execDescribe(), RDFFormat.NTRIPLES);
		
		log.debug("MEM\n" + writer);
		
		debugExecution.close();
		throw error;
	} 
}
 
Example #20
Source File: ModelImplJena.java    From semweb4j with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public void writeTo(Writer writer, Syntax syntax) {
	assertModel();
	registerNamespaces(this.jenaModel);

	RDFDataMgr.write(writer, this.jenaModel, RDFWriterRegistry.defaultSerialization(getJenaLang(syntax)));
}
 
Example #21
Source File: DeltaEx02_DatasetCollectPatch.java    From rdf-delta with Apache License 2.0 5 votes vote down vote up
public static void main(String ...args) {
    // -- Base dataset
    DatasetGraph dsgBase = DatasetGraphFactory.createTxnMem();

    // -- Destination for changes.
    // Text form of output.
    OutputStream out = System.out;
    // Create an RDFChanges that writes to "out".
    RDFChanges changeLog = RDFPatchOps.textWriter(out);


    // ---- Collect up changes.
    //RDFPatchOps.collect();
    RDFChangesCollector rcc = new RDFChangesCollector();
    DatasetGraph dsg = RDFPatchOps.changes(dsgBase, rcc);
    Dataset ds = DatasetFactory.wrap(dsg);
    Txn.executeWrite(ds,
                     ()->RDFDataMgr.read(dsg, "data.ttl")
                     );
    // Again - different bnodes.
    // Note all changes are recorded - even if they have no effect
    // (e.g the prefix, the triple "ex:s ex:p ex:o").
    Txn.executeWrite(ds,
                     ()->RDFDataMgr.read(dsg, "data.ttl")
                     );

    // Collected (in-memory) patch.
    RDFPatch patch = rcc.getRDFPatch();
    // Write it.
    patch.apply(changeLog);
}
 
Example #22
Source File: WriteStatementsKnowledgeStore.java    From EventCoreference with Apache License 2.0 5 votes vote down vote up
static public void main (String[] args) {
    Dataset dataset  = null;
    String address = "http://145.100.57.176:50053/";
    ArrayList<org.openrdf.model.Statement> statements = new ArrayList<org.openrdf.model.Statement>();
            Iterator<String> it = dataset.listNames();
            while (it.hasNext()) {
                String name = it.next();

                Model namedModel = dataset.getNamedModel(name);
                StmtIterator siter = namedModel.listStatements();
                while (siter.hasNext()) {
                    com.hp.hpl.jena.rdf.model.Statement s = siter.nextStatement();
                    org.openrdf.model.Statement statement =castJenaOpenRdf(s, name);
                    if (statement!=null) {
                        statements.add(statement);
                    }
                }
            }
            if (DEBUG) {
                try {
                    ByteArrayOutputStream os = new ByteArrayOutputStream();
                    RDFDataMgr.write(os, dataset, RDFFormat.TRIG_PRETTY);
                    String rdfString = new String(os.toByteArray(), "UTF-8");
                    System.out.println("rdfString = " + rdfString);
                    os.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
           // System.out.println("address = " + address);
    WriteStatementsKnowledgeStore.storeTriples(statements, address);
}
 
Example #23
Source File: ExTDB3.java    From xcurator with Apache License 2.0 5 votes vote down vote up
public static void main(String... argv)
{
    String assemblerFile = "Store/tdb-assembler.ttl" ;
    
    // Find a particular description in the file where there are several: 
    Model spec = RDFDataMgr.loadModel(assemblerFile) ;

    // Find the right starting point for the description in some way.
    Resource root = null ;

    if ( false )
        // If you know the Resource URI:
        root = spec.createResource("http://example/myChoiceOfURI" );
    else
    {
        // Alternatively, look for the a single resource of the right type. 
        try {
            // Find the required description - the file can contain descriptions of many different types.
            root = GraphUtils.findRootByType(spec, VocabTDB.tDatasetTDB) ;
            if ( root == null )
                throw new JenaException("Failed to find a suitable root") ;
        } catch (TypeNotUniqueException ex)
        { throw new JenaException("Multiple types for: "+DatasetAssemblerVocab.tDataset) ; }
    }

    Dataset ds = (Dataset)Assembler.general.open(root) ;
}
 
Example #24
Source File: InitialIndexTool.java    From gerbil with GNU Affero General Public License v3.0 5 votes vote down vote up
private static void indexStreamMem(Indexer index, InputStream in, SameAsCollectorStreamMem sink) {
	RDFDataMgr.parse(sink, in, Lang.TURTLE);
	LOGGER.info("Found {} instances of owl:sameAs", sink.getMapping().size());
	for (String key : sink.getMapping().keySet()) {
		index.indexSameAs(key, sink.getMapping().get(key));
	}
}
 
Example #25
Source File: OneM2MCSEBaseDTO.java    From SDA with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public static void main(String[] args) {
	String sample = "{     \"_id\" : ObjectId(\"560c9d741ee8203c53a63569\"),     \"rn\" : \"CONTENT_INST_5\",     \"ty\" : 4,     \"ri\" : \"CONTENT_INST_5\",     \"pi\" : \"CONTAINER_37\",     \"lbl\" : [          \"cnt-switch\"     ],     \"cr\" : \"C_AE-D-GASLOCK1004\",     \"cnf\" : \"text/plain:0\",     \"cs\" : 3,     \"con\" : \"Off\",     \"_uri\" : \"/herit-in/herit-cse/ae-gaslock1004/cnt-switch/CONTENT_INST_5\",     \"ct\" : \"20151001T114156\",     \"lt\" : \"20151001T114156\" , \"or\":\"http://www.pineone.com/campus/StateCondition\" }";
	Gson gson = new Gson();
	OneM2MContentInstanceDTO cont = gson.fromJson(sample, OneM2MContentInstanceDTO.class);
	System.out.println(cont);

	OneM2MContentInstanceMapper mapper = new OneM2MContentInstanceMapper(cont);
	Model model = ModelFactory.createDefaultModel();
	model.add(mapper.from());

	// 스트링 변환부분
	RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
}
 
Example #26
Source File: RdfWriterImpl.java    From Server.Java with MIT License 5 votes vote down vote up
@Override
public void writeFragment(ServletOutputStream outputStream, IDataSource datasource, ILinkedDataFragment fragment, ILinkedDataFragmentRequest ldfRequest) throws Exception {
    final Model output = ModelFactory.createDefaultModel();
    output.setNsPrefixes(getPrefixes());
    output.add(fragment.getMetadata());
    output.add(fragment.getTriples());
    output.add(fragment.getControls());
    RDFDataMgr.write(outputStream, output, contentType);
}
 
Example #27
Source File: OneM2MContainerDTO.java    From SDA with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public static void main(String[] args) {
		String sample = "{     \"_id\" : ObjectId(\"561f27831ee8202c5e307d37\"),     \"rn\" : \"CONTAINER_268\",     \"ty\" : 3,     \"ri\" : \"CONTAINER_268\",     \"pi\" : \"SAE_0\",     \"lbl\" : [          \"switch\",          \"key1\",          \"key2\"     ],     \"et\" : \"20151203T122321\",     \"cr\" : \"//onem2m.herit.net/herit-cse/SAE_5\",     \"mni\" : 100,     \"mbs\" : 1.024e+006,     \"mia\" : 36000,     \"cni\" : 1,     \"cbs\" : 2,     \"_uri\" : \"/herit-in/herit-cse/SAE_0/CONTAINER_268\",     \"ct\" : \"20151015T131147\",     \"lt\" : \"20151015T131147\", \"or\":\"http://www.pineone.com/m2m/SwitchStatusSensor\" }";
		Gson gson = new Gson();
		OneM2MContainerDTO cont = gson.fromJson(sample, OneM2MContainerDTO.class);


		System.out.println(cont);
		
		OneM2MContainerMapper mapper = new OneM2MContainerMapper(cont);
		Model model = ModelFactory.createDefaultModel();
		model.add(mapper.from());
		
		//스트링 변환부분
		RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);

		//스트링 변환부분
//		String serviceURI = "http://219.248.137.7:13030/icbms";
//
//		DatasetAccessor	accessor = DatasetAccessorFactory.createHTTP(serviceURI);
//		accessor.deleteDefault();
//		accessor.add(model);
//		
//		
//		QueryExecution q = QueryExecutionFactory.sparqlService(serviceURI	,"select * {?s ?p ?o}"	);
//		ResultSet rs = q.execSelect();
//		ResultSetFormatter.out(rs);;
		
//		model = DatasetAccessorFactory.createHTTP(serviceURI).getModel();
//		System.out.println(model.size());

	}
 
Example #28
Source File: OneM2MAEDTO.java    From SDA with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public static void main(String[] args) {
	String sample = " {     \"_id\" : ObjectId(\"561e1e1e1ee82041fac258b6\"),     \"rn\" : \"SAE_0\",     \"ty\" : 2,     \"ri\" : \"SAE_0\",     \"pi\" : \"herit-in\",     \"lbl\" : [          \"home1\",          \"home_service\"     ],     \"et\" : \"20151203T122321\",     \"at\" : [          \"//onem2m.hubiss.com/cse1\",          \"//onem2m.hubiss.com/cse2\"     ],     \"aa\" : [          \"poa\",          \"apn\"     ],     \"apn\" : \"onem2mPlatformAdmin\",     \"api\" : \"NHeritAdmin\",     \"aei\" : \"/SAE_0\",     \"poa\" : [          \"10.101.101.111:8080\"     ],     \"rr\" : false,     \"_uri\" : \"/herit-in/herit-cse/SAE_0\",     \"ct\" : \"20151014T181926\",     \"lt\" : \"20151014T181926\" }";
	Gson gson = new Gson();
	OneM2MAEDTO cont = gson.fromJson(sample, OneM2MAEDTO.class);


	System.out.println(cont);
	
	OneM2MAEMapper mapper = new OneM2MAEMapper(cont);
	Model model = ModelFactory.createDefaultModel();
	model.add(mapper.from());
	
	//스트링 변환부분
	RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
}
 
Example #29
Source File: RdfDocumentGraphConsumerTest.java    From baleen with Apache License 2.0 5 votes vote down vote up
@Test
public void testDocumentGraphRdfithRelationsAsLinks()
    throws AnalysisEngineProcessException, ResourceInitializationException, IOException,
        URISyntaxException {

  processJCas(
      RdfDocumentGraphConsumer.PARAM_QUERY_ENDPOINT,
      "http://localhost:" + port + "/ds/query",
      RdfDocumentGraphConsumer.PARAM_UPDATE_ENDPOINT,
      "http://localhost:" + port + "/ds/update",
      RdfDocumentGraphConsumer.PARAM_STORE_ENDPOINT,
      "http://localhost:" + port + "/ds/data",
      RdfDocumentGraphConsumer.PARAM_OUTPUT_RELATIONS_AS_LINKS,
      true);

  Model expected =
      RDFDataMgr.loadModel(EXPECTED_DOCUMENT_RELATION_AS_LINKS_FILE.toURI().toString());
  Model model = ds.getDefaultModel();
  Resource resource =
      expected.getResource(
          "http://baleen.dstl.gov.uk/8b408a0c7163fdfff06ced3e80d7d2b3acd9db900905c4783c28295b8c996165");
  resource.removeProperties(); // Get rid of the timestamp

  StmtIterator listStatements = expected.listStatements();
  while (listStatements.hasNext()) {
    Statement statement = listStatements.next();
    assertTrue("Missing statement " + statement.toString(), model.contains(statement));
  }
  assertTrue(model.containsAll(expected));
}
 
Example #30
Source File: ModelSetImplJena.java    From semweb4j with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
	public void writeTo(OutputStream out, Syntax syntax) throws IOException,
			ModelRuntimeException, SyntaxNotSupportedException {
		
		if (syntax == null) {
			throw new NullPointerException("syntax may not be null");
		}
		
		Lang jenaLang = getJenaLang(syntax);

//		if (RDFLanguages.isTriples(jenaLang)) {
//			/*
//			 * NB: Writing a ModelSet to a triple serialization loses the
//			 * context of any quads if present.
//			 */
//			Iterator<Model> it = this.getModels();
//			while (it.hasNext()) {
//				Model model = it.next();
//				model.writeTo(out, syntax);
//			}
//			this.getDefaultModel().writeTo(out, syntax);
//		}
// FIXME stuehmer: write unit test to see if this can be removed
//		else {
		try {
			RDFDataMgr.write(out, this.dataset, jenaLang);
		}
		catch (RiotException e) {
			throw new SyntaxNotSupportedException(
					"error writing syntax " + syntax + ": " + e.getMessage());
		}
	}