FAQ
HBASE-15954 REST server should log requests with TRACE instead of DEBUG

Conflicts:
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java

Conflicts:
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java

Conflicts:
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
  hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
  hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/218259c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/218259c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/218259c0

Branch: refs/heads/branch-1.1
Commit: 218259c0edcb7e8d8ee5f6a586c114fa2f33bc7f
Parents: 73a7462
Author: Enis Soztutar <enis@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <enis@apache.org>
Committed: Mon Jun 6 14:06:32 2016 -0700

----------------------------------------------------------------------
  .../hadoop/hbase/rest/MultiRowResource.java | 4 +-
  .../hbase/rest/ProtobufStreamingUtil.java | 10 +-
  .../apache/hadoop/hbase/rest/RESTServer.java | 18 +-
  .../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
  .../hadoop/hbase/rest/RegionsResource.java | 4 +-
  .../apache/hadoop/hbase/rest/RootResource.java | 4 +-
  .../apache/hadoop/hbase/rest/RowResource.java | 54 ++--
  .../hbase/rest/ScannerInstanceResource.java | 32 ++-
  .../hadoop/hbase/rest/ScannerResource.java | 18 +-
  .../hadoop/hbase/rest/SchemaResource.java | 22 +-
  .../rest/StorageClusterStatusResource.java | 4 +-
  .../rest/StorageClusterVersionResource.java | 4 +-
  .../apache/hadoop/hbase/rest/TableResource.java | 26 +-
  .../hadoop/hbase/rest/VersionResource.java | 10 +-
  .../apache/hadoop/hbase/rest/client/Client.java | 40 +--
  .../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
  .../rest/filter/RestCsrfPreventionFilter.java | 286 +++++++++++++++++++
  .../consumer/ProtobufMessageBodyConsumer.java | 6 +-
  .../hadoop/hbase/util/ConnectionCache.java | 6 +-
  19 files changed, 434 insertions(+), 123 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
            }
            model.addRow(rowModel);
          } else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
          }
        }


http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
      this.contentType = type;
      this.limit = limit;
      this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
    }

    @Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
      outStream.write(Bytes.toBytes((short)objectBytes.length));
      outStream.write(objectBytes);
      outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
    }

    private CellSetModel createModelFromResults(Result[] results) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index ad8c65d..7ccc6c1 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -127,23 +127,27 @@ public class RESTServer implements Constants {
      // check for user-defined port setting, if so override the conf
      if (commandLine != null && commandLine.hasOption("port")) {
        String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
      }

      // check if server should only process GET requests, if so override the conf
      if (commandLine != null && commandLine.hasOption("readonly")) {
        servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
      }

      // check for user-defined info server port setting, if so override the conf
      if (commandLine != null && commandLine.hasOption("infoport")) {
        String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
      }

      @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 0ecaf5a..e49298b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;

  import java.io.IOException;

+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
  import org.apache.hadoop.conf.Configuration;
  import org.apache.hadoop.hbase.classification.InterfaceAudience;
  import org.apache.hadoop.hbase.client.Admin;
@@ -29,14 +31,13 @@ import org.apache.hadoop.hbase.security.UserProvider;
  import org.apache.hadoop.hbase.util.ConnectionCache;
  import org.apache.hadoop.security.UserGroupInformation;
  import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;

  /**
   * Singleton class encapsulating global REST servlet state and functions.
   */
  @InterfaceAudience.Private
  public class RESTServlet implements Constants {
- private static Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
    private static RESTServlet INSTANCE;
    private final Configuration conf;
    private final MetricsREST metrics = new MetricsREST();

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c425e84..2521895 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
    @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
    @GET
    @Produces(MIMETYPE_BINARY)
    public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
      }
      servlet.getMetrics().incrementRequests(1);
      // doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
            put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
          }
          puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
          }
        }
        table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
        put.addImmutable(parts[0], parts[1], timestamp, message);
        table = servlet.getTable(tableResource.getName());
        table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
        }
        servlet.getMetrics().incrementSucessfulPutRequests(1);
        return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
        if (table != null) try {
          table.close();
        } catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
        }
      }
    }
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
      MIMETYPE_PROTOBUF_IETF})
    public Response put(final CellSetModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
          + " " + uriInfo.getQueryParameters());
      }
      return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
    @Consumes(MIMETYPE_BINARY)
    public Response putBinary(final byte[] message,
        final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
      }
      return updateBinary(message, headers, true);
    }
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
      MIMETYPE_PROTOBUF_IETF})
    public Response post(final CellSetModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
          + " " + uriInfo.getQueryParameters());
      }
      return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
    @Consumes(MIMETYPE_BINARY)
    public Response postBinary(final byte[] message,
        final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
      }
      return updateBinary(message, headers, false);
    }

    @DELETE
    public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
        table = servlet.getTable(tableResource.getName());
        table.delete(delete);
        servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
        }
      } catch (Exception e) {
        servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
        if (table != null) try {
          table.close();
        } catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
        }
      }
      return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
            .build();
        }

- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
        }
        if (!retValue) {
          servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
      } finally {
        if (table != null) try {
          table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
          LOG.debug("Exception received while closing the table", ioe);
        }
      }
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
            .build();
        }

- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
            + retValue);
        }


http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {

    public ScannerInstanceResource() throws IOException { }

- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
        ResultGenerator generator, int batch) throws IOException {
      this.id = id;
      this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
    @GET
    @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
        @QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
            .build();
        }
        if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
          // respond with 204 (No Content) if an empty cell set would be
          // returned
          if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
        if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
          // if maxRows was given as a query param, stop if we would exceed the
          // specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
            if (--maxRows == 0) {
              generator.putBack(value);
              break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
          rowModel = new RowModel(rowKey);
        }
        rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
            value.getTimestamp(), CellUtil.cloneValue(value)));
      } while (--count > 0);
      model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
    @GET
    @Produces(MIMETYPE_BINARY)
    public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
          MIMETYPE_BINARY);
      }
      servlet.getMetrics().incrementRequests(1);
      try {
        Cell value = generator.next();
        if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
          return Response.noContent().build();
        }
        ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
        response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
          Base64.encodeBytes(
            KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
        response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {

    @DELETE
    public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      if (servlet.isReadOnly()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
  import javax.ws.rs.Path;
  import javax.ws.rs.PathParam;
  import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
  import javax.ws.rs.core.Response;
  import javax.ws.rs.core.UriBuilder;
  import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
        spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
            model.getEndTime(), model.getMaxVersions());
      }
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
      try {
        Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
        String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
        ScannerInstanceResource instance =
          new ScannerInstanceResource(tableName, id, gen, model.getBatch());
        scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
        }
        UriBuilder builder = uriInfo.getAbsolutePathBuilder();
        URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
    @PUT
    @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
      }
      return update(model, true, uriInfo);
    }
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
      MIMETYPE_PROTOBUF_IETF})
    public Response post(final ScannerModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
      }
      return update(model, false, uriInfo);
    }

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
      } catch (Exception e) {
        servlet.getMetrics().incrementFailedGetRequests(1);
        return processException(e);
- }
+ }
    }

    private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
    @PUT
    @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
    @POST
    @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
        justification="Expected")
    @DELETE
    public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      if (servlet.isReadOnly()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
    @GET
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
    public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
        @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
        @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
        @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
        @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
      try {
        Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
            tableScan.setStartRow(prefixBytes);
          }
        }
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
        Table hTable = RESTServlet.getInstance().getTable(this.table);
        tableScan.setBatch(batchSize);
        tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
          String[] familysplit = csplit.trim().split(":");
          if (familysplit.length == 2) {
            if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
              tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
            } else {
              tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
              tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
            }
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
            tableScan.addFamily(Bytes.toBytes(familysplit[0]));
          }
        }

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
     * Build a response for a version request.
     * @param context servlet context
     * @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
     */
    @GET
    @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
      MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
        final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
      }
      servlet.getMetrics().incrementRequests(1);
      ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
     * Dispatch to StorageClusterVersionResource
     */
    @Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
        throws IOException {
      return new StorageClusterVersionResource();
    }

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index ebedf57..f511e03 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
    }

    /**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
     */
    public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
        (MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
      manager.shutdown();
    }
@@ -151,7 +151,7 @@ public class Client {
     * one of the members of the supplied cluster definition and iterate through
     * the list until a transaction can be successfully completed. The
     * definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
     * @param cluster the cluster definition
     * @param method the transaction method
     * @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
      long startTime = System.currentTimeMillis();
      int code = httpClient.executeMethod(method);
      long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
          method.getStatusText() + " in " + (endTime - startTime) + " ms");
      }
      return code;
@@ -250,7 +250,7 @@ public class Client {
    }

    /**
- * Send a HEAD request
+ * Send a HEAD request
     * @param path the path or URI
     * @return a Response object with response detail
     * @throws IOException
@@ -260,14 +260,14 @@ public class Client {
    }

    /**
- * Send a HEAD request
+ * Send a HEAD request
     * @param cluster the cluster definition
     * @param path the path or URI
     * @param headers the HTTP headers to include in the request
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
        throws IOException {
      HeadMethod method = new HeadMethod();
      try {
@@ -280,7 +280,7 @@ public class Client {
    }

    /**
- * Send a GET request
+ * Send a GET request
     * @param path the path or URI
     * @return a Response object with response detail
     * @throws IOException
@@ -290,7 +290,7 @@ public class Client {
    }

    /**
- * Send a GET request
+ * Send a GET request
     * @param cluster the cluster definition
     * @param path the path or URI
     * @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
    }

    /**
- * Send a GET request
+ * Send a GET request
     * @param path the path or URI
     * @param accept Accept header value
     * @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
    }

    /**
- * Send a GET request
+ * Send a GET request
     * @param cluster the cluster definition
     * @param path the path or URI
     * @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
    /**
     * Send a GET request
     * @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
     * <tt>Accept</tt> must be supplied
     * @return a Response object with response detail
     * @throws IOException
@@ -346,7 +346,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
        throws IOException {
      GetMethod method = new GetMethod();
      try {
@@ -382,7 +382,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
        byte[] content) throws IOException {
      Header[] headers = new Header[1];
      headers[0] = new Header("Content-Type", contentType);
@@ -398,7 +398,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
        throws IOException {
      return put(cluster, path, headers, content);
    }
@@ -413,7 +413,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
        byte[] content) throws IOException {
      PutMethod method = new PutMethod();
      try {
@@ -449,7 +449,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
        byte[] content) throws IOException {
      Header[] headers = new Header[1];
      headers[0] = new Header("Content-Type", contentType);
@@ -465,7 +465,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
        throws IOException {
      return post(cluster, path, headers, content);
    }
@@ -480,7 +480,7 @@ public class Client {
     * @return a Response object with response detail
     * @throws IOException
     */
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
        byte[] content) throws IOException {
      PostMethod method = new PostMethod();
      try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
              throw new ServletException("Failed to retrieve server principal", ie);
            }
          }
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
          name = name.substring(REST_PREFIX_LEN);
          props.setProperty(name, value);
        }

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
new file mode 100644
index 0000000..dbb1447
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest.filter;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This filter provides protection against cross site request forgery (CSRF)
+ * attacks for REST APIs. Enabling this filter on an endpoint results in the
+ * requirement of all client to send a particular (configurable) HTTP header
+ * with every request. In the absense of this header the filter will reject the
+ * attempt as a bad request.
+ */
+@interfaceaudience.public
+@interfacestability.evolving
+public class RestCsrfPreventionFilter implements Filter {
+
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
+
+ public static final String HEADER_USER_AGENT = "User-Agent";
+ public static final String BROWSER_USER_AGENT_PARAM =
+ "browser-useragents-regex";
+ public static final String CUSTOM_HEADER_PARAM = "custom-header";
+ public static final String CUSTOM_METHODS_TO_IGNORE_PARAM =
+ "methods-to-ignore";
+ static final String BROWSER_USER_AGENTS_DEFAULT = "^Mozilla.*,^Opera.*";
+ public static final String HEADER_DEFAULT = "X-XSRF-HEADER";
+ static final String METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE";
+ private String headerName = HEADER_DEFAULT;
+ private Set<String> methodsToIgnore = null;
+ private Set<Pattern> browserUserAgents;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
+ if (customHeader != null) {
+ headerName = customHeader;
+ }
+ String customMethodsToIgnore =
+ filterConfig.getInitParameter(CUSTOM_METHODS_TO_IGNORE_PARAM);
+ if (customMethodsToIgnore != null) {
+ parseMethodsToIgnore(customMethodsToIgnore);
+ } else {
+ parseMethodsToIgnore(METHODS_TO_IGNORE_DEFAULT);
+ }
+
+ String agents = filterConfig.getInitParameter(BROWSER_USER_AGENT_PARAM);
+ if (agents == null) {
+ agents = BROWSER_USER_AGENTS_DEFAULT;
+ }
+ parseBrowserUserAgents(agents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
+ }
+
+ void parseBrowserUserAgents(String userAgents) {
+ String[] agentsArray = userAgents.split(",");
+ browserUserAgents = new HashSet<Pattern>();
+ for (String patternString : agentsArray) {
+ browserUserAgents.add(Pattern.compile(patternString));
+ }
+ }
+
+ void parseMethodsToIgnore(String mti) {
+ String[] methods = mti.split(",");
+ methodsToIgnore = new HashSet<String>();
+ for (int i = 0; i < methods.length; i++) {
+ methodsToIgnore.add(methods[i]);
+ }
+ }
+
+ /**
+ * This method interrogates the User-Agent String and returns whether it
+ * refers to a browser. If its not a browser, then the requirement for the
+ * CSRF header will not be enforced; if it is a browser, the requirement will
+ * be enforced.
+ * <p>
+ * A User-Agent String is considered to be a browser if it matches
+ * any of the regex patterns from browser-useragent-regex; the default
+ * behavior is to consider everything a browser that matches the following:
+ * "^Mozilla.*,^Opera.*". Subclasses can optionally override
+ * this method to use different behavior.
+ *
+ * @param userAgent The User-Agent String, or null if there isn't one
+ * @return true if the User-Agent String refers to a browser, false if not
+ */
+ protected boolean isBrowser(String userAgent) {
+ if (userAgent == null) {
+ return false;
+ }
+ for (Pattern pattern : browserUserAgents) {
+ Matcher matcher = pattern.matcher(userAgent);
+ if (matcher.matches()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Defines the minimal API requirements for the filter to execute its
+ * filtering logic. This interface exists to facilitate integration in
+ * components that do not run within a servlet container and therefore cannot
+ * rely on a servlet container to dispatch to the {@link #doFilter} method.
+ * Applications that do run inside a servlet container will not need to write
+ * code that uses this interface. Instead, they can use typical servlet
+ * container configuration mechanisms to insert the filter.
+ */
+ public interface HttpInteraction {
+
+ /**
+ * Returns the value of a header.
+ *
+ * @param header name of header
+ * @return value of header
+ */
+ String getHeader(String header);
+
+ /**
+ * Returns the method.
+ *
+ * @return method
+ */
+ String getMethod();
+
+ /**
+ * Called by the filter after it decides that the request may proceed.
+ *
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ void proceed() throws IOException, ServletException;
+
+ /**
+ * Called by the filter after it decides that the request is a potential
+ * CSRF attack and therefore must be rejected.
+ *
+ * @param code status code to send
+ * @param message response message
+ * @throws IOException if there is an I/O error
+ */
+ void sendError(int code, String message) throws IOException;
+ }
+
+ /**
+ * Handles an {@link HttpInteraction} by applying the filtering logic.
+ *
+ * @param httpInteraction caller's HTTP interaction
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ public void handleHttpInteraction(HttpInteraction httpInteraction)
+ throws IOException, ServletException {
+ if (!isBrowser(httpInteraction.getHeader(HEADER_USER_AGENT)) ||
+ methodsToIgnore.contains(httpInteraction.getMethod()) ||
+ httpInteraction.getHeader(headerName) != null) {
+ httpInteraction.proceed();
+ } else {
+ httpInteraction.sendError(HttpServletResponse.SC_BAD_REQUEST,
+ "Missing Required Header for CSRF Vulnerability Protection");
+ }
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response,
+ final FilterChain chain) throws IOException, ServletException {
+ final HttpServletRequest httpRequest = (HttpServletRequest)request;
+ final HttpServletResponse httpResponse = (HttpServletResponse)response;
+ handleHttpInteraction(new ServletFilterHttpInteraction(httpRequest,
+ httpResponse, chain));
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ /**
+ * Constructs a mapping of configuration properties to be used for filter
+ * initialization. The mapping includes all properties that start with the
+ * specified configuration prefix. Property names in the mapping are trimmed
+ * to remove the configuration prefix.
+ *
+ * @param conf configuration to read
+ * @param confPrefix configuration prefix
+ * @return mapping of configuration properties to be used for filter
+ * initialization
+ */
+ public static Map<String, String> getFilterParams(Configuration conf,
+ String confPrefix) {
+ Map<String, String> filterConfigMap = new HashMap<>();
+ for (Map.Entry<String, String> entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(confPrefix)) {
+ String value = conf.get(name);
+ name = name.substring(confPrefix.length());
+ filterConfigMap.put(name, value);
+ }
+ }
+ return filterConfigMap;
+ }
+
+ /**
+ * {@link HttpInteraction} implementation for use in the servlet filter.
+ */
+ private static final class ServletFilterHttpInteraction
+ implements HttpInteraction {
+
+ private final FilterChain chain;
+ private final HttpServletRequest httpRequest;
+ private final HttpServletResponse httpResponse;
+
+ /**
+ * Creates a new ServletFilterHttpInteraction.
+ *
+ * @param httpRequest request to process
+ * @param httpResponse response to process
+ * @param chain filter chain to forward to if HTTP interaction is allowed
+ */
+ public ServletFilterHttpInteraction(HttpServletRequest httpRequest,
+ HttpServletResponse httpResponse, FilterChain chain) {
+ this.httpRequest = httpRequest;
+ this.httpResponse = httpResponse;
+ this.chain = chain;
+ }
+
+ @Override
+ public String getHeader(String header) {
+ return httpRequest.getHeader(header);
+ }
+
+ @Override
+ public String getMethod() {
+ return httpRequest.getMethod();
+ }
+
+ @Override
+ public void proceed() throws IOException, ServletException {
+ chain.doFilter(httpRequest, httpResponse);
+ }
+
+ @Override
+ public void sendError(int code, String message) throws IOException {
+ httpResponse.sendError(code, message);
+ }
+ }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
  @Provider
  @Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
  @InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
      implements MessageBodyReader<ProtobufMessageHandler> {
    private static final Log LOG =
      LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
            baos.write(buffer, 0, read);
          }
        } while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
            inputStream);
        }
        obj = obj.getObjectFromMessage(baos.toByteArray());

http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index ccd59d4..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
  import java.util.concurrent.ConcurrentHashMap;
  import java.util.concurrent.locks.Lock;

+import org.apache.commons.logging.Log;
  import org.apache.hadoop.conf.Configuration;
  import org.apache.hadoop.hbase.ChoreService;
  import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
  import org.apache.hadoop.hbase.security.User;
  import org.apache.hadoop.hbase.security.UserProvider;
  import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;

  /**
   * A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
   */
  @InterfaceAudience.Private
  public class ConnectionCache {
- private static Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);

    private final Map<String, ConnectionInfo>
     connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {

    private final ThreadLocal<String> effectiveUserNames =
        new ThreadLocal<String>() {
+ @Override
      protected String initialValue() {
        return realUserName;
      }

Search Discussions

Discussion Posts

Previous

Follow ups

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 21 of 96 | next ›
Discussion Overview
groupcommits @
categorieshbase, hadoop
postedJun 1, '16 at 5:35p
activeJun 11, '16 at 5:04a
posts96
users9
websitehbase.apache.org

People

Translate

site design / logo © 2019 Grokbase