Java Code Examples for org.apache.hadoop.mrunit.mapreduce.MapDriver

The following examples show how to use org.apache.hadoop.mrunit.mapreduce.MapDriver. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: kylin   Source File: InMemCuboidMapperTest.java    License: Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws Exception {
    createTestMetadata();
    FileUtils.deleteDirectory(new File("./meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("./meta"));

    cubeName = "test_kylin_cube_with_slr_1_new_segment";
    cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
    inMemCuboidMapper = new InMemCuboidMapper<>();
    mapDriver = MapDriver.newMapDriver(inMemCuboidMapper);

    PowerMockito.stub(PowerMockito.method(CuboidSchedulerUtil.class, "getCuboidSchedulerByMode", CubeSegment.class,
            String.class)).toReturn(cube.getCuboidScheduler());
    IMRBatchCubingInputSide mockInputSide = createMockInputSide();
    PowerMockito.stub(PowerMockito.method(MRUtil.class, "getBatchCubingInputSide")).toReturn(mockInputSide);

}
 
Example 2
Source Project: rya   Source File: ForwardChainTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTableMapperOutput() throws Exception {
    RyaStatement rya = TestUtils.ryaStatement("x", "subOrganizationOf", "y");
    TripleRowResolver trr = new WholeRowTripleResolver();
    Map<TABLE_LAYOUT,TripleRow> map = trr.serialize(rya);
    TripleRow tr = map.get(TABLE_LAYOUT.SPO);
    byte[] b = new byte[0];
    Key key = new Key(tr.getRow(), tr.getColumnFamily(),
        tr.getColumnQualifier(), b, 1);
    Value val = new Value(b);
    ResourceWritable rw1 = new ResourceWritable();
    ResourceWritable rw2 = new ResourceWritable();
    rw1.set(TestUtils.uri("x"));
    rw2.set(TestUtils.uri("y"));
    new MapDriver<Key, Value, ResourceWritable, Fact>()
        .withMapper(new ForwardChain.TableMapper(schema))
        .withInput(key, val)
        .withOutput(rw1, X_SUB_Y)
        .withOutput(rw2, X_SUB_Y)
        .runTest();
}
 
Example 3
Source Project: rya   Source File: ForwardChainTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testRdfMapperOutput() throws Exception {
    RyaStatement rya = TestUtils.ryaStatement("x", "subOrganizationOf", "y");
    RyaStatementWritable rsw = new RyaStatementWritable();
    rsw.setRyaStatement(rya);
    LongWritable l = new LongWritable();
    ResourceWritable rw1 = new ResourceWritable();
    ResourceWritable rw2 = new ResourceWritable();
    rw1.set(TestUtils.uri("x"));
    rw2.set(TestUtils.uri("y"));
    new MapDriver<LongWritable, RyaStatementWritable, ResourceWritable,
            Fact>()
        .withMapper(new ForwardChain.RdfMapper(schema))
        .withInput(l, rsw)
        .withOutput(rw1, X_SUB_Y)
        .withOutput(rw2, X_SUB_Y)
        .runTest();
}
 
Example 4
Source Project: rya   Source File: DuplicateEliminationTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTableMapperOutput() throws Exception {
    RyaStatement rya = TestUtils.ryaStatement("x", "subOrganizationOf", "y");
    TripleRowResolver trr = new WholeRowTripleResolver();
    Map<TABLE_LAYOUT,TripleRow> map = trr.serialize(rya);
    TripleRow tr = map.get(TABLE_LAYOUT.SPO);
    byte[] b = new byte[0];
    Key key = new Key(tr.getRow(), tr.getColumnFamily(),
        tr.getColumnQualifier(), b, 1);
    Value val = new Value(b);
    new MapDriver<Key, Value, Fact, Derivation>()
        .withMapper(new DuplicateElimination.DuplicateTableMapper())
        .withInput(key, val)
        .withOutput(X_SUB_Y, X_SUB_Y.getDerivation())
        .runTest();
}
 
Example 5
Source Project: ApprovalTests.Java   Source File: HadoopApprovals.java    License: Apache License 2.0 6 votes vote down vote up
public static void verifyMapReduce(SmartMapper mapper, SmartReducer reducer, Object key, Object input)
    throws Exception
{
  MapDriver mapDriver = new MapDriver();
  mapDriver.setMapper(mapper);
  MapReduceDriver mapReduceDriver = new MapReduceDriver();
  mapReduceDriver.setMapper(mapper);
  Object writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  Object writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapDriver.withInput(writableKey, writableValue);
  List results = mapDriver.run();
  Collections.sort(results, PairComparer.INSTANCE);
  mapReduceDriver = new MapReduceDriver<LongWritable, Text, Text, LongWritable, Text, LongWritable>();
  writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapReduceDriver.withInput(writableKey, writableValue);
  mapReduceDriver.setMapper(mapper);
  mapReduceDriver.setReducer(reducer);
  List finalResults = mapReduceDriver.run();
  String text = String.format("[%s]\n\n -> maps via %s to -> \n\n%s\n\n -> reduces via %s to -> \n\n%s", input,
      mapper.getClass().getSimpleName(), ArrayUtils.toString(results, Echo.INSTANCE),
      reducer.getClass().getSimpleName(), ArrayUtils.toString(finalResults, Echo.INSTANCE));
  Approvals.verify(text);
}
 
Example 6
Source Project: linden   Source File: LindenMapredTest.java    License: Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void init() throws IOException {
  LindenMapper mapper = new LindenMapper();
  mDriver = MapDriver.newMapDriver(mapper);
  int numShards = 1;
  Shard[] shards = LindenJob.createShards(indexPath, numShards);
  Shard.setIndexShards(mDriver.getConfiguration(), shards);
}
 
Example 7
Source Project: dkpro-c4corpus   Source File: MRUnitTest.java    License: Apache License 2.0 5 votes vote down vote up
@Before
public void setUp()
{
    SMSCDRMapper mapper = new SMSCDRMapper();
    SMSCDRReducer reducer = new SMSCDRReducer();
    mapDriver = MapDriver.newMapDriver(mapper);
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
}
 
Example 8
Source Project: kylin   Source File: FactDistinctColumnsMapperTest.java    License: Apache License 2.0 5 votes vote down vote up
@Before
public void setup() throws Exception {
    createTestMetadata();
    FileUtils.deleteDirectory(new File("./meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("./meta"));

    cubeName = "test_kylin_cube_with_slr_1_new_segment";
    cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
    cubeDesc = cube.getDescriptor();
    FactDistinctColumnsMapper<LongWritable> factDistinctColumnsMapper = new FactDistinctColumnsMapper<>();
    mapDriver = MapDriver.newMapDriver(factDistinctColumnsMapper);
}
 
Example 9
@Before
public void setup() throws Exception {
    createTestMetadata();
    FileUtils.deleteDirectory(new File("./meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("./meta"));

    cubeName = "test_kylin_cube_with_slr_1_new_segment";
    cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
    cubeDesc = cube.getDescriptor();
    CalculateStatsFromBaseCuboidMapper calStatsFromBasicCuboidMapper = new CalculateStatsFromBaseCuboidMapper();
    mapDriver = MapDriver.newMapDriver(calStatsFromBasicCuboidMapper);
}
 
Example 10
Source Project: rya   Source File: CardinalityMapperTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOutput() throws InterruptedException, IOException {

  String s = "urn:gem:etype#1234";
  String p = "urn:gem#pred";

  Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1);
  Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2);
  Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + 3);

  byte[] b = new byte[0];
  byte[] c = "25".getBytes();
  byte[] d = "47".getBytes();
  byte[] e = "15".getBytes();

  Key key1 = new Key(t1.getBytes(), b, b, b, 1);
  Key key2 = new Key(t2.getBytes(), b, b, b, 1);
  Key key3 = new Key(t3.getBytes(), b, b, b, 1);
  Value val1 = new Value(c);
  Value val2 = new Value(d);
  Value val3 = new Value(e);

  // System.out.println("Keys are " + key1 + " and " + key2);

  new MapDriver<Key,Value,CompositeType,TripleCard>().withMapper(new JoinSelectProspectOutput.CardinalityMapper()).withInput(key1, val1)
      .withInput(key2, val2).withInput(key3, val3).withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1)))
      .withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2)))
      .withOutput(new CompositeType(s + DELIM + p, 1), new TripleCard(new CardinalityType(15, "subjectpredicate", 3))).runTest();

}
 
Example 11
Source Project: rya   Source File: JoinSelectProspectOutputTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOutput() throws InterruptedException, IOException {

    String s = "urn:gem:etype#1234";
    String p = "urn:gem#pred";

    String ts = "798497748386999999";
    
    Text t1 = new Text(TripleValueType.subject.name() + DELIM + s + DELIM + 1);
    Text t2 = new Text(TripleValueType.predicate.name() + DELIM + p + DELIM + 2);
    Text t3 = new Text(TripleValueType.subjectpredicate.name() + DELIM + s + DELIM + p + DELIM + ts);

    byte[] b = new byte[0];
    byte[] c = "25".getBytes();
    byte[] d = "47".getBytes();
    byte[] e = "15".getBytes();

    Key key1 = new Key(t1.getBytes(), b, b, b, 1);
    Key key2 = new Key(t2.getBytes(), b, b, b, 1);
    Key key3 = new Key(t3.getBytes(), b, b, b, 1);
    Value val1 = new Value(c);
    Value val2 = new Value(d);
    Value val3 = new Value(e);
    
   

    // System.out.println("Keys are " + key1 + " and " + key2);

    new MapDriver<Key, Value, CompositeType, TripleCard>()
            .withMapper(new JoinSelectProspectOutput.CardinalityMapper())
            .withInput(key1, val1)
            .withInput(key2, val2)
            .withInput(key3, val3)
            .withOutput(new CompositeType(s, 1), new TripleCard(new CardinalityType(25, "subject", 1)))
            .withOutput(new CompositeType(p, 1), new TripleCard(new CardinalityType(47, "predicate", 2)))
            .withOutput(new CompositeType(s + DELIM + p, 1),
                    new TripleCard(new CardinalityType(15, "subjectpredicate", Long.parseLong(ts)))).runTest();

}
 
Example 12
Source Project: rya   Source File: JoinSelectStatisticsSumTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFullTripleEntry() throws InterruptedException, IOException {

  TripleEntry te1 = new TripleEntry(new Text("urn:gem:etype#1234"), new Text("urn:gem#pred"), new Text("subject"), new Text("predicate"), new Text("object"));
  CardList cl = new CardList(34, 52, 63, 0, 0, 0);
  TripleEntry te2 = new TripleEntry(new Text("urn:gem:etype#1234"), new Text(""), new Text("subject"), new Text(""), new Text("object"));
  TripleEntry te3 = new TripleEntry(new Text("urn:gem#pred"), new Text(""), new Text("predicate"), new Text(""), new Text("object"));

  new MapDriver<TripleEntry,CardList,TripleEntry,CardList>().withMapper(new JoinSelectStatisticsSum.CardinalityIdentityMapper()).withInput(te1, cl)
      .withOutput(te2, cl).withOutput(te3, cl).withOutput(te1, cl).runTest();

}
 
Example 13
Source Project: rya   Source File: ForwardChainTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFileMapperOutput() throws Exception {
    ResourceWritable rw1 = new ResourceWritable();
    ResourceWritable rw2 = new ResourceWritable();
    rw1.set(TestUtils.uri("x"));
    rw2.set(TestUtils.uri("y"));
    new MapDriver<Fact, NullWritable, ResourceWritable, Fact>()
        .withMapper(new ForwardChain.FileMapper(schema))
        .withInput(X_SUB_Y, NullWritable.get())
        .withOutput(rw1, X_SUB_Y)
        .withOutput(rw2, X_SUB_Y)
        .runTest();
}
 
Example 14
Source Project: rya   Source File: DuplicateEliminationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFileMapperOutput() throws Exception {
    new MapDriver<Fact, NullWritable, Fact, Derivation>()
        .withMapper(new DuplicateElimination.DuplicateFileMapper())
        .withInput(X_SUB_Y, NullWritable.get())
        .withOutput(X_SUB_Y, X_SUB_Y.getDerivation())
        .runTest();
}
 
Example 15
Source Project: rya   Source File: DuplicateEliminationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testRdfMapperOutput() throws Exception {
    RyaStatement rya = TestUtils.ryaStatement("x", "subOrganizationOf", "y");
    RyaStatementWritable rsw = new RyaStatementWritable();
    rsw.setRyaStatement(rya);
    LongWritable l = new LongWritable();
    new MapDriver<LongWritable, RyaStatementWritable, Fact,
        Derivation>()
        .withMapper(new DuplicateElimination.DuplicateRdfMapper())
        .withInput(l, rsw)
        .withOutput(X_SUB_Y, X_SUB_Y.getDerivation())
        .runTest();
}
 
Example 16
Source Project: rya   Source File: DuplicateEliminationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInconsistencyMapperOutput() throws Exception {
    Fact empty = new Fact();
    empty.setDerivation(X_DISJOINT);
    new MapDriver<Derivation, NullWritable, Fact, Derivation>()
        .withMapper(new DuplicateElimination.InconsistencyMapper())
        .withInput(X_DISJOINT, NullWritable.get())
        .withOutput(empty, X_DISJOINT)
        .runTest();
}
 
Example 17
@Before
public void setUp() {
    LouvainTableSynthesizerMapper mapper = new LouvainTableSynthesizerMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
    LouvainTableSynthesizerReducer reducer = new LouvainTableSynthesizerReducer();
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
}
 
Example 18
@Before
public void setUp() {
    CommunityCompression.Map mapper = new CommunityCompression.Map();
    mapDriver = MapDriver.newMapDriver(mapper);
    CommunityCompression.Reduce reducer = new CommunityCompression.Reduce();
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
}
 
Example 19
Source Project: Kylin   Source File: BaseCuboidMapperTest.java    License: Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws Exception {
    createTestMetadata();

    // hack for distributed cache
    FileUtils.deleteDirectory(new File("../job/meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl()), new File("../job/meta"));

    BaseCuboidMapper<Text> mapper = new BaseCuboidMapper<Text>();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 20
@Before
public void setUp() throws Exception {
  DerivedColumnNoTransformationPhaseMapper mapper = new DerivedColumnNoTransformationPhaseMapper();
  mapDriver = MapDriver.newMapDriver(mapper);
  Configuration configuration = mapDriver.getConfiguration();
  configuration.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization,"
      + "org.apache.hadoop.io.serializer.WritableSerialization");


  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TABLE_NAME.toString(), "collection");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_DIMENSION_NAMES.toString(), "d1,d2,d3");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_DIMENSION_TYPES.toString(), "STRING,LONG,STRING");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_NAMES.toString(), "m1,m2");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_TYPES.toString(), "INT,INT");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TIMECOLUMN_NAME.toString(), "hoursSinceEpoch");

  ThirdEyeConfig thirdeyeConfig = ThirdEyeConfig.fromProperties(props);
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_THIRDEYE_CONFIG.toString(),
      OBJECT_MAPPER.writeValueAsString(thirdeyeConfig));

  Schema inputSchema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(AVRO_SCHEMA));
  setUpAvroSerialization(mapDriver.getConfiguration(), inputSchema);

  Schema outputSchema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(NO_TRANSFORMATION_SCHEMA));
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_SCHEMA.toString(),
      outputSchema.toString());

  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_TOPK_PATH.toString(),
      TOPK_PATH);

  TemporaryPath tmpPath = new TemporaryPath();
  outputPath = tmpPath.toString();
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_PATH.toString(), outputPath);
}
 
Example 21
@Before
public void setUp() throws Exception {
  DerivedColumnTransformationPhaseMapper mapper = new DerivedColumnTransformationPhaseMapper();
  mapDriver = MapDriver.newMapDriver(mapper);
  Configuration configuration = mapDriver.getConfiguration();
  configuration.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization,"
      + "org.apache.hadoop.io.serializer.WritableSerialization");

  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TABLE_NAME.toString(), "collection");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_DIMENSION_NAMES.toString(), "d1,d2,d3");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_DIMENSION_TYPES.toString(), "STRING,LONG,STRING");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_NAMES.toString(), "m1,m2");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_METRIC_TYPES.toString(), "INT,INT");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TIMECOLUMN_NAME.toString(), "hoursSinceEpoch");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TOPK_DIMENSION_NAMES.toString(), "d2,");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TOPK_METRICS.toString() + ".d2", "m1");
  props.setProperty(ThirdEyeConfigProperties.THIRDEYE_TOPK_KVALUES.toString() + ".d2", "1");

  ThirdEyeConfig thirdeyeConfig = ThirdEyeConfig.fromProperties(props);
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_THIRDEYE_CONFIG.toString(),
      OBJECT_MAPPER.writeValueAsString(thirdeyeConfig));

  Schema inputSchema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(AVRO_SCHEMA));
  setUpAvroSerialization(mapDriver.getConfiguration(), inputSchema);

  Schema outputSchema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(TRANSFORMATION_SCHEMA));
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_SCHEMA.toString(),
      outputSchema.toString());

  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_TOPK_PATH.toString(),
      ClassLoader.getSystemResource(TOPK_PATH).toString());

  TemporaryPath tmpPath = new TemporaryPath();
  outputPath = tmpPath.toString();
  configuration.set(DerivedColumnTransformationPhaseConstants.DERIVED_COLUMN_TRANSFORMATION_PHASE_OUTPUT_PATH.toString(), outputPath);

}
 
Example 22
Source Project: ApprovalTests.Java   Source File: HadoopApprovals.java    License: Apache License 2.0 5 votes vote down vote up
public static void verifyMapping(SmartMapper mapper, Object key, Object input) throws Exception
{
  MapDriver mapDriver = new MapDriver();
  mapDriver.setMapper(mapper);
  Object writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  Object writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapDriver.withInput(writableKey, writableValue);
  List results = mapDriver.run();
  Collections.sort(results, PairComparer.INSTANCE);
  String header = String.format("[%s]\n\n -> maps via %s to -> \n", input, mapper.getClass().getSimpleName());
  Approvals.verifyAll(header, results, Echo.INSTANCE);
}
 
Example 23
Source Project: kylin-on-parquet-v2   Source File: CubeHFileMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() {
    CubeHFileMapper mapper = new CubeHFileMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 24
Source Project: kylin   Source File: CubeHFileMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() {
    CubeHFileMapper mapper = new CubeHFileMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 25
Source Project: rya   Source File: JoinSelectMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testOutput() throws TripleRowResolverException, IOException {

  RyaStatement rya = new RyaStatement(new RyaIRI("urn:gem:etype#1234"), new RyaIRI("urn:gem#pred"), new RyaType("mydata1"));
  Text s = new Text(rya.getSubject().getData());
  Text p = new Text(rya.getPredicate().getData());
  Text o = new Text(rya.getObject().getData());
  Text sp = new Text(rya.getSubject().getData() + DELIM + rya.getPredicate().getData());
  Text so = new Text(rya.getSubject().getData() + DELIM + rya.getObject().getData());
  Text po = new Text(rya.getPredicate().getData() + DELIM + rya.getObject().getData());
  Text ps = new Text(rya.getPredicate().getData() + DELIM + rya.getSubject().getData());
  Text op = new Text(rya.getObject().getData() + DELIM + rya.getPredicate().getData());
  Text os = new Text(rya.getObject().getData() + DELIM + rya.getSubject().getData());

  TripleEntry t1 = new TripleEntry(s, p, new Text("subject"), new Text("predicate"), new Text("object"));
  TripleEntry t2 = new TripleEntry(p, o, new Text("predicate"), new Text("object"), new Text("subject"));
  TripleEntry t3 = new TripleEntry(o, s, new Text("object"), new Text("subject"), new Text("predicate"));
  TripleEntry t4 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("subjectpredicate"));
  TripleEntry t5 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("objectsubject"));
  TripleEntry t6 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("predicateobject"));
  TripleEntry t7 = new TripleEntry(s, new Text(""), new Text("subject"), new Text(""), new Text("objectpredicate"));
  TripleEntry t8 = new TripleEntry(p, new Text(""), new Text("predicate"), new Text(""), new Text("subjectobject"));
  TripleEntry t9 = new TripleEntry(o, new Text(""), new Text("object"), new Text(""), new Text("predicatesubject"));

  TripleRowResolver trr = new WholeRowTripleResolver();
  Map<TABLE_LAYOUT,TripleRow> map = trr.serialize(rya);
  System.out.println(map);
  TripleRow tr = map.get(TABLE_LAYOUT.SPO);
  System.out.println("Triple row is" + tr);
  System.out.println("ColumnV is " + tr.getTimestamp());
  byte[] b = new byte[0];
  Key key = new Key(tr.getRow(), tr.getColumnFamily(), tr.getColumnQualifier(), b, 1);
  Value val = new Value(b);

  new MapDriver<Key,Value,CompositeType,TripleCard>().withMapper(new JoinSelectSpoTableOutput.JoinSelectMapper()).withInput(key, val)
      .withOutput(new CompositeType(o, new IntWritable(2)), new TripleCard(t1)).withOutput(new CompositeType(s, new IntWritable(2)), new TripleCard(t2))
      .withOutput(new CompositeType(p, new IntWritable(2)), new TripleCard(t3)).withOutput(new CompositeType(po, new IntWritable(2)), new TripleCard(t6))
      .withOutput(new CompositeType(so, new IntWritable(2)), new TripleCard(t5)).withOutput(new CompositeType(sp, new IntWritable(2)), new TripleCard(t4))
      .withOutput(new CompositeType(op, new IntWritable(2)), new TripleCard(t7)).withOutput(new CompositeType(os, new IntWritable(2)), new TripleCard(t8))
      .withOutput(new CompositeType(ps, new IntWritable(2)), new TripleCard(t9)).runTest();

}
 
Example 26
Source Project: Kylin   Source File: CubeHFileMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() {
    CubeHFileMapper mapper = new CubeHFileMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 27
Source Project: Kylin   Source File: RangeKeyDistributionMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() {
    RangeKeyDistributionMapper mapper = new RangeKeyDistributionMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 28
Source Project: Kylin   Source File: RandomKeyDistributionMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
@SuppressWarnings({ "rawtypes", "unchecked" })
public void setUp() {
    RandomKeyDistributionMapper mapper = new RandomKeyDistributionMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}
 
Example 29
Source Project: Kylin   Source File: MergeCuboidMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() throws Exception {

    createTestMetadata();

    logger.info("The metadataUrl is : " + getTestConfig());

    MetadataManager.clearCache();
    CubeManager.clearCache();
    ProjectManager.clearCache();
    DictionaryManager.clearCache();

    // hack for distributed cache
    // CubeManager.removeInstance(KylinConfig.createInstanceFromUri("../job/meta"));//to
    // make sure the following mapper could get latest CubeManger
    FileUtils.deleteDirectory(new File("../job/meta"));

    MergeCuboidMapper mapper = new MergeCuboidMapper();
    mapDriver = MapDriver.newMapDriver(mapper);

    cubeManager = CubeManager.getInstance(getTestConfig());
    cube = cubeManager.getCube("test_kylin_cube_without_slr_left_join_ready_2_segments");
    dictionaryManager = DictionaryManager.getInstance(getTestConfig());
    lfn = cube.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME");
    lsi = cube.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "CAL_DT");
    ssc = cube.getDescriptor().findColumnRef("DEFAULT.TEST_CATEGORY_GROUPINGS", "META_CATEG_NAME");

    DictionaryInfo sharedDict = makeSharedDict();

    boolean isFirstSegment = true;
    for (CubeSegment segment : cube.getSegments()) {

        TableSignature signature = new TableSignature();
        signature.setSize(100);
        signature.setLastModifiedTime(System.currentTimeMillis());
        signature.setPath("fake_dict_for" + lfn.getName() + segment.getName());

        DictionaryInfo newDictInfo = new DictionaryInfo(lfn.getTable(), lfn.getColumn().getName(), lfn.getColumn().getZeroBasedIndex(), "string", signature, "");

        List<byte[]> values = new ArrayList<byte[]>();
        values.add(new byte[] { 97, 97, 97 });
        if (isFirstSegment)
            values.add(new byte[] { 99, 99, 99 });
        else
            values.add(new byte[] { 98, 98, 98 });
        Dictionary<?> dict = DictionaryGenerator.buildDictionaryFromValueList(newDictInfo, values);
        dictionaryManager.trySaveNewDict(dict, newDictInfo);
        ((TrieDictionary) dict).dump(System.out);

        segment.putDictResPath(lfn, newDictInfo.getResourcePath());
        segment.putDictResPath(lsi, sharedDict.getResourcePath());
        segment.putDictResPath(ssc, sharedDict.getResourcePath());

        // cubeManager.saveResource(segment.getCubeInstance());
        // cubeManager.afterCubeUpdated(segment.getCubeInstance());
        cubeManager.updateCube(cube);

        isFirstSegment = false;
    }

}
 
Example 30
Source Project: Kylin   Source File: ColumnCardinalityMapperTest.java    License: Apache License 2.0 4 votes vote down vote up
@SuppressWarnings({ "rawtypes", "unchecked" })
@Before
public void setUp() {
    ColumnCardinalityMapper mapper = new ColumnCardinalityMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
}