Java Code Examples for org.apache.hadoop.io.compress.DefaultCodec#setConf()
The following examples show how to use
org.apache.hadoop.io.compress.DefaultCodec#setConf() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestIFile.java From hadoop with Apache License 2.0 | 6 votes |
@Test /** * Create an IFile.Writer using GzipCodec since this code does not * have a compressor when run via the tests (ie no native libraries). */ public void testIFileWriterWithCodec() throws Exception { Configuration conf = new Configuration(); FileSystem localFs = FileSystem.getLocal(conf); FileSystem rfs = ((LocalFileSystem)localFs).getRaw(); Path path = new Path(new Path("build/test.ifile"), "data"); DefaultCodec codec = new GzipCodec(); codec.setConf(conf); IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class, codec, null); writer.close(); }
Example 2
Source File: TestIFile.java From hadoop with Apache License 2.0 | 6 votes |
@Test /** Same as above but create a reader. */ public void testIFileReaderWithCodec() throws Exception { Configuration conf = new Configuration(); FileSystem localFs = FileSystem.getLocal(conf); FileSystem rfs = ((LocalFileSystem)localFs).getRaw(); Path path = new Path(new Path("build/test.ifile"), "data"); DefaultCodec codec = new GzipCodec(); codec.setConf(conf); FSDataOutputStream out = rfs.create(path); IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class, codec, null); writer.close(); FSDataInputStream in = rfs.open(path); IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(), codec, null); reader.close(); // test check sum byte[] ab= new byte[100]; int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length); assertEquals( readed,reader.checksumIn.getChecksum().length); }
Example 3
Source File: TestIFile.java From big-c with Apache License 2.0 | 6 votes |
@Test /** * Create an IFile.Writer using GzipCodec since this code does not * have a compressor when run via the tests (ie no native libraries). */ public void testIFileWriterWithCodec() throws Exception { Configuration conf = new Configuration(); FileSystem localFs = FileSystem.getLocal(conf); FileSystem rfs = ((LocalFileSystem)localFs).getRaw(); Path path = new Path(new Path("build/test.ifile"), "data"); DefaultCodec codec = new GzipCodec(); codec.setConf(conf); IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class, codec, null); writer.close(); }
Example 4
Source File: TestIFile.java From big-c with Apache License 2.0 | 6 votes |
@Test /** Same as above but create a reader. */ public void testIFileReaderWithCodec() throws Exception { Configuration conf = new Configuration(); FileSystem localFs = FileSystem.getLocal(conf); FileSystem rfs = ((LocalFileSystem)localFs).getRaw(); Path path = new Path(new Path("build/test.ifile"), "data"); DefaultCodec codec = new GzipCodec(); codec.setConf(conf); FSDataOutputStream out = rfs.create(path); IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class, codec, null); writer.close(); FSDataInputStream in = rfs.open(path); IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(), codec, null); reader.close(); // test check sum byte[] ab= new byte[100]; int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length); assertEquals( readed,reader.checksumIn.getChecksum().length); }
Example 5
Source File: TestReduceTask.java From hadoop with Apache License 2.0 | 5 votes |
public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); DefaultCodec codec = new DefaultCodec(); codec.setConf(conf); for (Pair[] testCase: testCases) { runValueIterator(tmpDir, testCase, conf, codec); } }
Example 6
Source File: TestReduceTask.java From big-c with Apache License 2.0 | 5 votes |
public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); DefaultCodec codec = new DefaultCodec(); codec.setConf(conf); for (Pair[] testCase: testCases) { runValueIterator(tmpDir, testCase, conf, codec); } }
Example 7
Source File: TestRecordWriterManager.java From datacollector with Apache License 2.0 | 5 votes |
@Test public void testSeqFileCompression() throws Exception { DefaultCodec codec = new DefaultCodec(); codec.setConf(new Configuration()); testSeqFile(codec, SequenceFile.CompressionType.RECORD); testSeqFile(codec, SequenceFile.CompressionType.BLOCK); }
Example 8
Source File: TestReduceTask.java From RDFS with Apache License 2.0 | 5 votes |
public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); DefaultCodec codec = new DefaultCodec(); codec.setConf(conf); for (Pair[] testCase: testCases) { runValueIterator(tmpDir, testCase, conf, codec); } }
Example 9
Source File: TestReduceTask.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); DefaultCodec codec = new DefaultCodec(); codec.setConf(conf); for (Pair[] testCase: testCases) { runValueIterator(tmpDir, testCase, conf, codec); } }
Example 10
Source File: TestRecordWriterManager.java From datacollector with Apache License 2.0 | 4 votes |
@Test public void testTextFileCompression() throws Exception { DefaultCodec codec = new DefaultCodec(); codec.setConf(new Configuration()); testTextFile(codec); }