/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.hadoop.http;

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.sun.jersey.spi.container.servlet.ServletContainer;
import de.thetaphi.forbiddenapis.SuppressForbidden;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.server.ConnectionFactory;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.RequestLog;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.RequestLogHandler;
import org.eclipse.jetty.server.session.SessionHandler;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.FilterMapping;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.servlet.ServletMapping;
import org.eclipse.jetty.util.ArrayUtil;
import org.eclipse.jetty.util.MultiException;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.PrintStream;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * Create a Jetty embedded server to answer http requests. The primary goal is
 * to serve up status information for the server. There are three contexts:
 * "/logs/" points to the log directory "/static/" points to common static
 * files (src/webapps/static) "/" the jsp server code from
 * (src/webapps/NAME)
 *
 * This class is a fork of the old HttpServer. HttpServer exists for
 * compatibility reasons. See HBASE-10336 for more details.
 */
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class HttpServer2 implements FilterContainer {
  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);

  public static final String HTTP_SCHEME = "http";
  public static final String HTTPS_SCHEME = "https";

  public static final String HTTP_MAX_REQUEST_HEADER_SIZE_KEY =
      "hadoop.http.max.request.header.size";
  public static final int HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT = 65536;
  public static final String HTTP_MAX_RESPONSE_HEADER_SIZE_KEY =
      "hadoop.http.max.response.header.size";
  public static final int HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT = 65536;

  public static final String HTTP_SOCKET_BACKLOG_SIZE_KEY =
      "hadoop.http.socket.backlog.size";
  public static final int HTTP_SOCKET_BACKLOG_SIZE_DEFAULT = 128;
  public static final String HTTP_MAX_THREADS_KEY = "hadoop.http.max.threads";
  public static final String HTTP_ACCEPTOR_COUNT_KEY =
      "hadoop.http.acceptor.count";
  // -1 to use default behavior of setting count based on CPU core count
  public static final int HTTP_ACCEPTOR_COUNT_DEFAULT = -1;
  public static final String HTTP_SELECTOR_COUNT_KEY =
      "hadoop.http.selector.count";
  // -1 to use default behavior of setting count based on CPU core count
  public static final int HTTP_SELECTOR_COUNT_DEFAULT = -1;
  // idle timeout in milliseconds
  public static final String HTTP_IDLE_TIMEOUT_MS_KEY =
      "hadoop.http.idle_timeout.ms";
  public static final int HTTP_IDLE_TIMEOUT_MS_DEFAULT = 10000;
  public static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir";

  public static final String FILTER_INITIALIZER_PROPERTY
      = "hadoop.http.filter.initializers";

  // The ServletContext attribute where the daemon Configuration
  // gets stored.
  public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
  public static final String ADMINS_ACL = "admins.acl";
  public static final String SPNEGO_FILTER = "SpnegoFilter";
  public static final String NO_CACHE_FILTER = "NoCacheFilter";

  public static final String BIND_ADDRESS = "bind.address";

  private final AccessControlList adminsAcl;

  private final Server webServer;

  private final HandlerCollection handlers;

  private final List<Server