/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.raid; import java.io.IOException; import java.io.FileNotFoundException; import java.io.InterruptedIOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.HashMap; import java.util.LinkedList; import java.util.LinkedHashMap; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.net.InetSocketAddress; import java.net.SocketException; import javax.security.auth.login.LoginException; import javax.xml.parsers.ParserConfigurationException; import org.apache.hadoop.ipc.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.security.UnixUserGroupInformation; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.HarFileSystem; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedRaidFileSystem; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.raid.StripeReader.LocationPair; import org.apache.hadoop.raid.protocol.PolicyInfo; import org.apache.hadoop.raid.protocol.RaidProtocol; import org.xml.sax.SAXException; import java.util.concurrent.atomic.*; /** * A {@link RaidShell} that allows browsing configured raid policies. */ public class RaidShell extends Configured implements Tool { static { Configuration.addDefaultResource("hdfs-default.xml"); Configuration.addDefaultResource("hdfs-site.xml"); Configuration.addDefaultResource("raid-default.xml"); Configuration.addDefaultResource("raid-site.xml"); } public static final Log LOG = LogFactory.getLog( "org.apache.hadoop.RaidShell"); public RaidProtocol raidnode; RaidProtocol rpcRaidnode; private UnixUserGroupInformation ugi; volatile boolean clientRunning = true; private Configuration conf; AtomicInteger corruptCounter = new AtomicInteger(); AtomicLongArray numStrpMissingBlks = new AtomicLongArray(Codec.getCodec("rs").stripeLength+Codec.getCodec("rs").parityLength); private final PrintStream out; final static private String DistRaidCommand = "-distRaid"; /** * Start RaidShell. * <p> * The RaidShell connects to the specified RaidNode and performs basic * configuration options. * @throws IOException */ public RaidShell(Configuration conf) throws IOException { super(conf); this.conf = conf; this.out = System.out; } public RaidShell(Configuration conf, PrintStream out) throws IOException { super(conf); this.conf = conf; this.out = out; } void initializeRpc(Configuration conf, InetSocketAddress address) throws IOException { try { this.ugi = UnixUserGroupInformation.login(conf, true); } catch (LoginException e) { throw (IOException)(new IOException().initCause(e)); } this.rpcRaidnode = createRPCRaidnode(address, conf, ugi); this.raidnode = createRaidnode(rpcRaidnode); } void initializeLocal(Configuration conf) throws IOException { try { this.ugi = UnixUserGroupInformation.login(conf, true); } catch (LoginException e) { throw (IOException)(new IOException().initCause(e)); } } public static RaidProtocol createRaidnode(Configuration conf) throws IOException { return createRaidnode(RaidNode.getAddress(conf), conf); } public static RaidProtocol createRaidnode(InetSocketAddress raidNodeAddr, Configuration conf) throws IOException { try { return createRaidnode(createRPCRaidnode(raidNodeAddr, conf, UnixUserGroupInformation.login(conf, true))); } catch (LoginException e) { throw (IOException)(new IOException().initCause(e)); } } private static RaidProtocol createRPCRaidnode(InetSocketAddress raidNodeAddr, Configuration conf, UnixUserGroupInformation ugi) throws IOException { LOG.info("RaidShell connecting to " + raidNodeAddr); return (RaidProtocol)RPC.getProxy(RaidProtocol.class, RaidProtocol.versionID, raidNodeAddr, ugi, conf, NetUtils.getSocketFactory(conf, RaidProtocol.class)); } private static RaidProtocol createRaidnode(RaidProtocol rpcRaidnode) throws IOException { RetryPolicy createPolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep( 5, 5000, TimeUnit.MILLISECONDS); Map<Class<? extends Exception>,RetryPolicy> remoteExceptionToPolicyMap = new HashMap<Class<? extends Exce