Java Code Examples for org.apache.hadoop.io.Text#equals()
The following examples show how to use
org.apache.hadoop.io.Text#equals() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ClientHSTokenSelector.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public Token<MRDelegationTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } LOG.debug("Looking for a token with service " + service.toString()); for (Token<? extends TokenIdentifier> token : tokens) { if (LOG.isDebugEnabled()) { LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); } if (MRDelegationTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { return (Token<MRDelegationTokenIdentifier>) token; } } return null; }
Example 2
Source File: AuthenticationTokenSelector.java From hbase with Apache License 2.0 | 6 votes |
@Override public Token<AuthenticationTokenIdentifier> selectToken(Text serviceName, Collection<Token<? extends TokenIdentifier>> tokens) { if (serviceName != null) { for (Token ident : tokens) { if (serviceName.equals(ident.getService()) && AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE.equals(ident.getKind())) { if (LOG.isDebugEnabled()) { LOG.debug("Returning token "+ident); } return (Token<AuthenticationTokenIdentifier>)ident; } } } LOG.debug("No matching token found"); return null; }
Example 3
Source File: NMTokenSelector.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public Token<NMTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } for (Token<? extends TokenIdentifier> token : tokens) { if (LOG.isDebugEnabled()) { LOG.info("Looking for service: " + service + ". Current token is " + token); } if (NMTokenIdentifier.KIND.equals(token.getKind()) && service.equals(token.getService())) { return (Token<NMTokenIdentifier>) token; } } return null; }
Example 4
Source File: ContainerTokenSelector.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public Token<ContainerTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } for (Token<? extends TokenIdentifier> token : tokens) { if (LOG.isDebugEnabled()) { LOG.info("Looking for service: " + service + ". Current token is " + token); } if (ContainerTokenIdentifier.KIND.equals(token.getKind()) && service.equals(token.getService())) { return (Token<ContainerTokenIdentifier>) token; } } return null; }
Example 5
Source File: JobTokenSelector.java From incubator-tez with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public Token<JobTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } for (Token<? extends TokenIdentifier> token : tokens) { if (JobTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { return (Token<JobTokenIdentifier>) token; } } return null; }
Example 6
Source File: JobTokenSelector.java From tez with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public Token<JobTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } for (Token<? extends TokenIdentifier> token : tokens) { if (JobTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { return (Token<JobTokenIdentifier>) token; } } return null; }
Example 7
Source File: TokenAspect.java From hadoop with Apache License 2.0 | 5 votes |
private static String getSchemeByKind(Text kind) { if (kind.equals(HftpFileSystem.TOKEN_KIND)) { return HftpFileSystem.SCHEME; } else if (kind.equals(HsftpFileSystem.TOKEN_KIND)) { return HsftpFileSystem.SCHEME; } else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) { return WebHdfsFileSystem.SCHEME; } else if (kind.equals(SWebHdfsFileSystem.TOKEN_KIND)) { return SWebHdfsFileSystem.SCHEME; } else { throw new IllegalArgumentException("Unsupported scheme"); } }
Example 8
Source File: TokenAspect.java From hadoop with Apache License 2.0 | 5 votes |
@Override public boolean handleKind(Text kind) { return kind.equals(HftpFileSystem.TOKEN_KIND) || kind.equals(HsftpFileSystem.TOKEN_KIND) || kind.equals(WebHdfsFileSystem.TOKEN_KIND) || kind.equals(SWebHdfsFileSystem.TOKEN_KIND); }
Example 9
Source File: JobTokenSelector.java From hadoop with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public Token<JobTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } for (Token<? extends TokenIdentifier> token : tokens) { if (JobTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { return (Token<JobTokenIdentifier>) token; } } return null; }
Example 10
Source File: StramDelegationTokenSelector.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public Token<StramDelegationTokenIdentifier> selectToken(Text text, Collection<Token<? extends TokenIdentifier>> clctn) { Token<StramDelegationTokenIdentifier> token = null; if (text != null) { for (Token<? extends TokenIdentifier> ctoken : clctn) { if (StramDelegationTokenIdentifier.IDENTIFIER_KIND.equals(ctoken.getKind()) && text.equals(ctoken.getService())) { token = (Token<StramDelegationTokenIdentifier>)ctoken; } } } return token; }
Example 11
Source File: LoadGeneratorMR.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { sum += values.next().get(); } if (key.equals(OPEN_EXECTIME)){ executionTime[OPEN] = sum; } else if (key.equals(NUMOPS_OPEN)){ numOfOps[OPEN] = sum; } else if (key.equals(LIST_EXECTIME)){ executionTime[LIST] = sum; } else if (key.equals(NUMOPS_LIST)){ numOfOps[LIST] = sum; } else if (key.equals(DELETE_EXECTIME)){ executionTime[DELETE] = sum; } else if (key.equals(NUMOPS_DELETE)){ numOfOps[DELETE] = sum; } else if (key.equals(CREATE_EXECTIME)){ executionTime[CREATE] = sum; } else if (key.equals(NUMOPS_CREATE)){ numOfOps[CREATE] = sum; } else if (key.equals(WRITE_CLOSE_EXECTIME)){ System.out.println(WRITE_CLOSE_EXECTIME + " = " + sum); executionTime[WRITE_CLOSE]= sum; } else if (key.equals(NUMOPS_WRITE_CLOSE)){ numOfOps[WRITE_CLOSE] = sum; } else if (key.equals(TOTALOPS)){ totalOps = sum; } else if (key.equals(ELAPSED_TIME)){ totalTime = sum; } result.set(sum); output.collect(key, result); // System.out.println("Key = " + key + " Sum is =" + sum); // printResults(System.out); }
Example 12
Source File: LoadGeneratorMR.java From big-c with Apache License 2.0 | 5 votes |
@Override public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { int sum = 0; while (values.hasNext()) { sum += values.next().get(); } if (key.equals(OPEN_EXECTIME)){ executionTime[OPEN] = sum; } else if (key.equals(NUMOPS_OPEN)){ numOfOps[OPEN] = sum; } else if (key.equals(LIST_EXECTIME)){ executionTime[LIST] = sum; } else if (key.equals(NUMOPS_LIST)){ numOfOps[LIST] = sum; } else if (key.equals(DELETE_EXECTIME)){ executionTime[DELETE] = sum; } else if (key.equals(NUMOPS_DELETE)){ numOfOps[DELETE] = sum; } else if (key.equals(CREATE_EXECTIME)){ executionTime[CREATE] = sum; } else if (key.equals(NUMOPS_CREATE)){ numOfOps[CREATE] = sum; } else if (key.equals(WRITE_CLOSE_EXECTIME)){ System.out.println(WRITE_CLOSE_EXECTIME + " = " + sum); executionTime[WRITE_CLOSE]= sum; } else if (key.equals(NUMOPS_WRITE_CLOSE)){ numOfOps[WRITE_CLOSE] = sum; } else if (key.equals(TOTALOPS)){ totalOps = sum; } else if (key.equals(ELAPSED_TIME)){ totalTime = sum; } result.set(sum); output.collect(key, result); // System.out.println("Key = " + key + " Sum is =" + sum); // printResults(System.out); }
Example 13
Source File: CopyListing.java From circus-train with Apache License 2.0 | 5 votes |
/** * Validate the final resulting path listing. Checks if there are duplicate entries. If preserving ACLs, checks that * file system can support ACLs. If preserving XAttrs, checks that file system can support XAttrs. * * @param pathToListFile path listing build by doBuildListing * @param options Input options to S3MapReduceCp * @throws IOException Any issues while checking for duplicates and throws * @throws DuplicateFileException if there are duplicates */ private void validateFinalListing(Path pathToListFile, S3MapReduceCpOptions options) throws DuplicateFileException, IOException { Configuration config = getConf(); FileSystem fs = pathToListFile.getFileSystem(config); Path sortedList = sortListing(fs, config, pathToListFile); SequenceFile.Reader reader = new SequenceFile.Reader(config, SequenceFile.Reader.file(sortedList)); try { Text lastKey = new Text("*"); // source relative path can never hold * CopyListingFileStatus lastFileStatus = new CopyListingFileStatus(); Text currentKey = new Text(); while (reader.next(currentKey)) { if (currentKey.equals(lastKey)) { CopyListingFileStatus currentFileStatus = new CopyListingFileStatus(); reader.getCurrentValue(currentFileStatus); throw new DuplicateFileException("File " + lastFileStatus.getPath() + " and " + currentFileStatus.getPath() + " would cause duplicates. Aborting"); } reader.getCurrentValue(lastFileStatus); lastKey.set(currentKey); } } finally { IOUtils.closeStream(reader); } }
Example 14
Source File: TokenExtractor.java From NNAnalytics with Apache License 2.0 | 5 votes |
/** * Extract the last seen DelegationTokens from FSNamesystem. * * @return map of user names to last timestamp of token seen */ public Map<String, Long> getTokenLastLogins() { if (fsn == null || dtsm == null) { return new HashMap<String, Long>() { { put("hdfs", System.currentTimeMillis()); put("n/a", -1L); } }; } Map<String, Long> lastLogins = new HashMap<>(); fsn.writeLock(); try { Set<Map.Entry<DelegationTokenIdentifier, DelegationTokenInformation>> entries = dtsm.currentTokens.entrySet(); for (Map.Entry<DelegationTokenIdentifier, DelegationTokenInformation> entry : entries) { Text owner = entry.getKey().getOwner(); Text realUser = entry.getKey().getRealUser(); String ownerStr = new KerberosName(owner.toString()).getServiceName(); long time = entry.getKey().getIssueDate(); lastLogins.put(ownerStr, time); if ((realUser != null) && (!realUser.toString().isEmpty()) && !realUser.equals(owner)) { String realUserStr = new KerberosName(realUser.toString()).getServiceName(); lastLogins.put(realUserStr, time); } } return lastLogins; } finally { fsn.writeUnlock(); } }
Example 15
Source File: IdentityAggregator.java From datawave with Apache License 2.0 | 5 votes |
protected boolean samePointer(Text row, ByteSequence pointer, Key key) { if (row.equals(key.getRow())) { ByteSequence pointer2 = parsePointer(key.getColumnQualifierData()); return (pointer.equals(pointer2)); } return false; }
Example 16
Source File: IndexStatsSummingIterator.java From datawave with Apache License 2.0 | 5 votes |
@Override public void next() throws IOException { if (src.hasTop()) { Key srcTK = src.getTopKey(); Text workingRow = srcTK.getRow(); Text currentRow = srcTK.getRow(); long sumUnique = 0; long sumCount = 0; while (workingRow.equals(currentRow)) { tuple.readFields(new DataInputStream(new ByteArrayInputStream(src.getTopValue().get()))); sumUnique += tuple.getNumberOfUniqueWords().get(); sumCount += tuple.getWordCount().get(); src.next(); if (src.hasTop()) { srcTK = src.getTopKey(); srcTK.getRow(currentRow); } else { break; } } summedValues.setNumberOfUniqueWords(sumUnique); summedValues.setWordCount(sumCount); tk = new Key(workingRow); ByteArrayOutputStream baos = new ByteArrayOutputStream(); summedValues.write(new DataOutputStream(baos)); tv = new Value(baos.toByteArray()); } else { tk = null; tv = null; } }
Example 17
Source File: IndexStatsCombiningIterator.java From datawave with Apache License 2.0 | 5 votes |
@Override public void next() throws IOException { if (src.hasTop()) { Key srcTK = src.getTopKey(); Text workingRow = srcTK.getRow(); Text currentRow = srcTK.getRow(); long sumUnique = 0; long sumCount = 0; while (workingRow.equals(currentRow)) { tuple.readFields(new DataInputStream(new ByteArrayInputStream(src.getTopValue().get()))); sumUnique += tuple.getNumberOfUniqueWords().get(); sumCount += tuple.getWordCount().get(); src.next(); if (src.hasTop()) { srcTK = src.getTopKey(); srcTK.getRow(currentRow); } else { break; } } weight.set(((double) sumUnique) / ((double) sumCount)); tk = new Key(workingRow); ByteArrayOutputStream baos = new ByteArrayOutputStream(); weight.write(new DataOutputStream(baos)); tv = new Value(baos.toByteArray()); } else { tk = null; tv = null; } }
Example 18
Source File: JsonDataValidationReducer.java From jumbune with GNU Lesser General Public License v3.0 | 4 votes |
public void reduce(Text key, Iterable<FileKeyViolationBean> values, Context context) throws IOException, InterruptedException { if(!key.equals(ok)){ createDirectory(key); } ArrayList <ReducerViolationBean> list = new ArrayList<ReducerViolationBean>(); Integer totalKey = 0, totalLine = 0; for(FileKeyViolationBean value:values){ Integer size = value.getViolationList().size(); Long splitEndOffset = value.getSplitEndOffset().get(); Long totalRecEmiByMap = value.getTotalRecordsEmittByMap().get(); String fileName = value.getFileName().toString(); FileOffsetKey fileOffsetKey = new FileOffsetKey(fileName,splitEndOffset); offsetLinesMap.put(fileOffsetKey, totalRecEmiByMap); ArrayListWritable <JsonLineViolationBean> alw = value.getViolationList(); Integer keyViolationSize = 0; for(JsonLineViolationBean jslb :alw){ Integer ViolationSize = jslb.getJsonKeyViolationList().size(); List<JsonKeyViolationBean> jsonKVB=jslb.getJsonKeyViolationList(); for (JsonKeyViolationBean jskvb :jsonKVB){ if(!key.equals(ok)){ ViolationPersistenceBean bean =null; if (key.equals(regexKey)){ bean = new ViolationPersistenceBean(Integer.parseInt(jskvb.getLineNumber().toString()),jskvb.getJsonNode().toString(),jskvb.getExpectedValue().toString(), jskvb.getActualValue().toString(),regexKey.toString() ,value.getFileName().toString(),splitEndOffset); regexArray.add(bean); }else if (key.equals(missingKey)){ bean = new ViolationPersistenceBean(Integer.parseInt(jskvb.getLineNumber().toString()),jskvb.getJsonNode().toString(),jskvb.getExpectedValue().toString(), jskvb.getActualValue().toString(),missingKey.toString() ,value.getFileName().toString(),splitEndOffset); missingArray.add(bean); }else if (key.equals(dataKey)){ bean = new ViolationPersistenceBean(Integer.parseInt(jskvb.getLineNumber().toString()),jskvb.getJsonNode().toString(),jskvb.getExpectedValue().toString(), jskvb.getActualValue().toString(),dataKey.toString() ,value.getFileName().toString(),splitEndOffset); dataArray.add(bean); }else if (key.equals(jsonSchemaKey)){ bean = new ViolationPersistenceBean(Integer.parseInt(jskvb.getLineNumber().toString()),jskvb.getJsonNode().toString(),jskvb.getExpectedValue().toString(), jskvb.getActualValue().toString(),jsonSchemaKey.toString() ,value.getFileName().toString(),splitEndOffset); schemaArray.add(bean); }else if (key.equals(nullKey)){ bean = new ViolationPersistenceBean(Integer.parseInt(jskvb.getLineNumber().toString()),jskvb.getJsonNode().toString(),jskvb.getExpectedValue().toString(), jskvb.getActualValue().toString(),nullKey.toString() ,value.getFileName().toString(),splitEndOffset); nullTypeArray.add(bean); } } } keyViolationSize = keyViolationSize + ViolationSize; } totalKey = totalKey + keyViolationSize; totalLine = totalLine + size; Text text = new Text(value.getFileName()); ReducerViolationBean reducerViolationBean = new ReducerViolationBean(); reducerViolationBean.setFileName(text); reducerViolationBean.setSize(new IntWritable(keyViolationSize)); list.add(reducerViolationBean); } ArrayListWritable<ReducerViolationBean> awb = new ArrayListWritable<ReducerViolationBean>(list); TotalReducerViolationBean totalReducerViolationBean= new TotalReducerViolationBean(); totalReducerViolationBean.setReducerViolationBeanList(awb); totalReducerViolationBean.setTotalLineViolation(new IntWritable(totalLine)); totalReducerViolationBean.setTotalKeyViolation(new IntWritable(totalKey)); context.write(key, totalReducerViolationBean); }
Example 19
Source File: ContinuousQuery.java From accumulo-examples with Apache License 2.0 | 3 votes |
private static ArrayList<Text[]> findRandomTerms(Scanner scanner, int numTerms) { Text currentRow = null; ArrayList<Text> words = new ArrayList<>(); ArrayList<Text[]> ret = new ArrayList<>(); Random rand = new Random(); for (Entry<Key,Value> entry : scanner) { Key key = entry.getKey(); if (currentRow == null) currentRow = key.getRow(); if (!currentRow.equals(key.getRow())) { selectRandomWords(words, ret, rand, numTerms); words.clear(); currentRow = key.getRow(); } words.add(key.getColumnFamily()); } selectRandomWords(words, ret, rand, numTerms); return ret; }
Example 20
Source File: QueryUtil.java From accumulo-examples with Apache License 2.0 | 2 votes |
/** * Returns either the {@link #DIR_COLF} or a decoded string version of the colf. * * @param colf * the column family */ public static String getType(Text colf) { if (colf.equals(DIR_COLF)) return colf.toString() + ":"; return Long.toString(Ingest.encoder.decode(colf.getBytes())) + ":"; }