diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java index 008726aa459e..6773912703f6 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java @@ -18,19 +18,14 @@ package org.apache.hadoop.hive.common.jsonexplain; -import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.TreeMap; import org.apache.hadoop.hive.common.jsonexplain.Op.OpType; -import org.codehaus.jackson.JsonParseException; -import org.codehaus.jackson.map.JsonMappingException; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; @@ -68,7 +63,7 @@ public final class Vertex implements Comparable{ // tag public String tag; protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); - + public static enum VertexType { MAP, REDUCE, UNION, UNKNOWN }; @@ -106,16 +101,11 @@ public void addDependency(Connection connection) throws JSONException { } /** - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException * @throws Exception * We assume that there is a single top-level Map Operator Tree or a * Reduce Operator Tree in a vertex */ - public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { + public void extractOpTree() throws Exception { if (vertexObject.length() != 0) { for (String key : JSONObject.getNames(vertexObject)) { if (key.equals("Map Operator Tree:")) { @@ -162,16 +152,11 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin * @param object * @param parent * @return - * @throws JSONException - * @throws JsonParseException - * @throws JsonMappingException - * @throws IOException * @throws Exception * assumption: each operator only has one parent but may have many * children */ - Op extractOp(JSONObject object, Op parent) throws JSONException, JsonParseException, JsonMappingException, - IOException, Exception { + Op extractOp(JSONObject object, Op parent) throws Exception { String[] names = JSONObject.getNames(object); if (names.length != 1) { throw new Exception("Expect only one operator in " + object.toString()); @@ -224,8 +209,7 @@ Op extractOp(JSONObject object, Op parent) throws JSONException, JsonParseExcept } } - public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) - throws JSONException, Exception { + public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) throws Exception { // print vertexname if (parser.printSet.contains(this) && numReduceOp <= 1) { if (type != null) { @@ -281,7 +265,7 @@ public void print(Printer printer, int indentFlag, String type, Vertex callingVe /** * We check if a vertex has multiple reduce operators. - * @throws JSONException + * @throws JSONException */ public void checkMultiReduceOperator(boolean rewriteObject) throws JSONException { // check if it is a reduce vertex and its children is more than 1; @@ -310,8 +294,12 @@ public void setType(String type) { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } Vertex vertex = (Vertex) o; return Objects.equals(name, vertex.name) && Objects.equals(stage, vertex.stage) && diff --git a/common/src/java/org/apache/hive/http/JMXJsonServlet.java b/common/src/java/org/apache/hive/http/JMXJsonServlet.java index 884cff1520b5..ed95084f011f 100644 --- a/common/src/java/org/apache/hive/http/JMXJsonServlet.java +++ b/common/src/java/org/apache/hive/http/JMXJsonServlet.java @@ -44,11 +44,12 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; + /* * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has * been rewritten to be read only and to output in a JSON format so it is not @@ -72,7 +73,7 @@ * attribute of a JMX bean. The format of the URL is * http://.../jmx?get=MXBeanName::AttributeName *

- * For example + * For example * * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId * will return the cluster id of the namenode mxbean. @@ -98,23 +99,23 @@ *

* The servlet attempts to convert the JMXBeans into JSON. Each * bean's attributes will be converted to a JSON object member. - * + * * If the attribute is a boolean, a number, a string, or an array - * it will be converted to the JSON equivalent. - * + * it will be converted to the JSON equivalent. + * * If the value is a {@link CompositeData} then it will be converted * to a JSON object with the keys as the name of the JSON member and * the value is converted following these same rules. - * + * * If the value is a {@link TabularData} then it will be converted * to an array of the {@link CompositeData} elements that it contains. - * + * * All other objects will be converted to a string and output as such. - * + * * The bean's name and modelerType will be returned for all beans. * * Optional paramater "callback" should be used to deliver JSONP response. - * + * */ public class JMXJsonServlet extends HttpServlet { private static final Logger LOG = LoggerFactory.getLogger(JMXJsonServlet.class); @@ -144,7 +145,7 @@ public void init() throws ServletException { /** * Process a GET request for the specified resource. - * + * * @param request * The servlet request we are processing * @param response @@ -210,8 +211,8 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { } // --------------------------------------------------------- Private Methods - private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, - HttpServletResponse response) + private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, + HttpServletResponse response) throws IOException { LOG.debug("Listing beans for "+qry); Set names = null; @@ -255,7 +256,7 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, + " threw an exception", e); } catch ( ReflectionException e ) { // This happens when the code inside the JMX bean (setter?? from the - // java docs) threw an exception, so log it and fall back on the + // java docs) threw an exception, so log it and fall back on the // class name LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", e); diff --git a/common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java b/common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java index ab938849869d..71825058c5cf 100644 --- a/common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java +++ b/common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java @@ -32,10 +32,11 @@ import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; -import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * A servlet to configure log4j2. *
diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAddPartitionMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAddPartitionMessage.java index ebdd29e41385..1483e3b5b115 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAddPartitionMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAddPartitionMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.AddPartitionMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; import java.util.Map; diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterPartitionMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterPartitionMessage.java index c28524165eb8..7fcf25f4e141 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterPartitionMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterPartitionMessage.java @@ -22,7 +22,7 @@ import org.apache.hive.hcatalog.common.HCatConstants; import org.apache.hive.hcatalog.messaging.AlterPartitionMessage; import org.apache.hive.hcatalog.messaging.AlterTableMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.List; diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterTableMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterTableMessage.java index 9c0799b473cd..ae8a8a4f265d 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterTableMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONAlterTableMessage.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hive.hcatalog.common.HCatConstants; import org.apache.hive.hcatalog.messaging.AlterTableMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.List; diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateDatabaseMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateDatabaseMessage.java index 191c4d47771c..2426fcc91cff 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateDatabaseMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateDatabaseMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.CreateDatabaseMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON Implementation of CreateDatabaseMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateFunctionMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateFunctionMessage.java index 17d3b7301505..ea6e8ccdf9ad 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateFunctionMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateFunctionMessage.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.metastore.messaging.MessageBuilder; import org.apache.hive.hcatalog.messaging.CreateFunctionMessage; import org.apache.thrift.TException; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON Implementation of CreateFunctionMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateTableMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateTableMessage.java index 60333d5aec52..7eae8524138e 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateTableMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONCreateTableMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.CreateTableMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON implementation of CreateTableMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropDatabaseMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropDatabaseMessage.java index 26e1c1635460..8d23a4850d8f 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropDatabaseMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropDatabaseMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.DropDatabaseMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON implementation of DropDatabaseMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropFunctionMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropFunctionMessage.java index 7fb7d1cc2f45..36cda71ad752 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropFunctionMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropFunctionMessage.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.metastore.messaging.MessageBuilder; import org.apache.hive.hcatalog.messaging.DropFunctionMessage; import org.apache.thrift.TException; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON Implementation of CreateDatabaseMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropPartitionMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropPartitionMessage.java index e00d056f76b8..222fae52fce0 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropPartitionMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropPartitionMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.DropPartitionMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; import java.util.Map; diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropTableMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropTableMessage.java index 232270586f07..cef5382ad212 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropTableMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONDropTableMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.DropTableMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * JSON implementation of DropTableMessage. diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONInsertMessage.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONInsertMessage.java index e05113cd332a..bdd7db897991 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONInsertMessage.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONInsertMessage.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.messaging.json; import org.apache.hive.hcatalog.messaging.InsertMessage; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; import java.util.Map; diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageDeserializer.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageDeserializer.java index ab26a14c8d8b..55075562cb72 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageDeserializer.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageDeserializer.java @@ -31,8 +31,9 @@ import org.apache.hive.hcatalog.messaging.DropTableMessage; import org.apache.hive.hcatalog.messaging.InsertMessage; import org.apache.hive.hcatalog.messaging.MessageDeserializer; -import org.codehaus.jackson.map.DeserializationConfig; -import org.codehaus.jackson.map.ObjectMapper; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; /** * MessageDeserializer implementation, for deserializing from JSON strings. @@ -42,7 +43,7 @@ public class JSONMessageDeserializer extends MessageDeserializer { static ObjectMapper mapper = new ObjectMapper(); // Thread-safe. static { - mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } @Override diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java index dfcdaef3e933..30808a935de5 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java @@ -27,9 +27,10 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hive.hcatalog.templeton.tool.TempletonUtils; -import org.codehaus.jackson.map.ObjectMapper; import org.eclipse.jetty.http.HttpStatus; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * Helper class to build new json objects with new top level * properties. Only add non-null entries. @@ -93,8 +94,9 @@ public static JsonBuilder createError(String msg, int errorCode) * Add a non-null value to the map. */ public JsonBuilder put(String name, Object val) { - if (val != null) + if (val != null) { map.put(name, val); + } return this; } @@ -134,7 +136,9 @@ public String buildJson() public Response buildResponse() { int status = HttpStatus.OK_200; // Server ok. if (map.containsKey("error")) + { status = HttpStatus.INTERNAL_SERVER_ERROR_500; // Generic http server error. + } Object o = map.get("errorCode"); if (o != null) { if(hiveError2HttpStatusCode.containsKey(o)) { @@ -173,9 +177,9 @@ public static boolean isError(Map obj) { */ public static Map jsonToMap(String json) throws IOException { - if (!TempletonUtils.isset(json)) + if (!TempletonUtils.isset(json)) { return new HashMap(); - else { + } else { ObjectMapper mapper = new ObjectMapper(); return mapper.readValue(json, Map.class); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java index 1c3ecbe435a7..42c85200cf52 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java @@ -24,7 +24,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.codehaus.jackson.map.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectMapper; /** * Simple exception that will return a json error payload if thrown @@ -55,8 +55,9 @@ public static Response buildMessage(int httpCode, Map params, String msg) { HashMap err = new HashMap(); err.put("error", msg); - if (params != null) + if (params != null) { err.putAll(params); + } String json = "\"error\""; try { diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java index 3770c5301ca9..8f23b157a72f 100644 --- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java +++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java @@ -24,12 +24,13 @@ import java.util.List; import java.util.Map; -import org.codehaus.jackson.map.ObjectMapper; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; import org.junit.Test; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * TestDesc - Test the desc objects that are correctly converted to * and from json. This also sets every field of the TableDesc object. diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java index 31df4bbc026f..dc1bb7d98671 100644 --- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java +++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java @@ -27,14 +27,16 @@ import org.apache.commons.httpclient.methods.StringRequestEntity; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; import org.apache.hadoop.hive.ql.ErrorMsg; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.eclipse.jetty.http.HttpStatus; import java.io.IOException; @@ -276,7 +278,7 @@ public void getPigVersion() throws Exception { Assert.assertEquals(HttpStatus.NOT_IMPLEMENTED_501, p.httpStatusCode); Map props = JsonBuilder.jsonToMap(p.responseBody); Assert.assertEquals(p.getAssertMsg(), "Pig version request not yet " + - "implemented", (String)props.get("error")); + "implemented", props.get("error")); } /** diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSysLogSerDe.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/log/syslog/TestSysLogSerDe.java similarity index 97% rename from itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSysLogSerDe.java rename to itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/log/syslog/TestSysLogSerDe.java index b3bfcfad63dc..e52c277bc591 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSysLogSerDe.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/log/syslog/TestSysLogSerDe.java @@ -20,7 +20,6 @@ import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.log.syslog.SyslogSerDe; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.Text; import org.junit.Assert; import org.junit.Test; diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java index a965be627bb1..22c67534ee7b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java @@ -29,7 +29,6 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -43,17 +42,13 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.registry.impl.ZkRegistryBase; import org.apache.hive.http.security.PamAuthenticator; -import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.jdbc.miniHS2.MiniHS2; -import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.server.HS2ActivePassiveHARegistry; import org.apache.hive.service.server.HS2ActivePassiveHARegistryClient; import org.apache.hive.service.server.HiveServer2Instance; import org.apache.hive.service.server.TestHS2HttpServerPam; import org.apache.hive.service.servlet.HS2Peers; import org.apache.http.Header; -import org.apache.http.HttpException; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; @@ -63,7 +58,6 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.StatusLine; import org.apache.http.util.EntityUtils; -import org.codehaus.jackson.map.ObjectMapper; import org.eclipse.jetty.http.HttpHeader; import org.eclipse.jetty.util.B64Code; import org.eclipse.jetty.util.StringUtil; @@ -72,6 +66,9 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; + +import com.fasterxml.jackson.databind.ObjectMapper; + import org.junit.Ignore; @Ignore("HIVE-23548") diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java index 411b5e37cfe7..17d1c497908e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java @@ -20,6 +20,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryDisplay; import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskResult; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; @@ -31,6 +33,8 @@ import org.junit.Before; import org.junit.Test; +import static org.junit.Assert.assertEquals; + import java.io.StringWriter; import java.util.HashMap; import java.util.List; @@ -300,4 +304,61 @@ private void verifyDDLHtml(String stmt, String opHandle, boolean assertCondition Assert.assertTrue(html.contains("testuser")); } + static class MyTask extends Task { + + public MyTask() { + id = "x"; + } + + /** + * + */ + private static final long serialVersionUID = 1L; + + @Override + public String getName() { + return "my"; + + } + + @Override + public int execute() { + return 0; + } + + @Override + public StageType getType() { + return StageType.ATLAS_DUMP; + + } + + } + + @Test + public void testJSONSerialization() throws Exception { + QueryDisplay qd = new QueryDisplay(); + qd.setErrorMessage("asd"); + qd.setTaskResult("a", new TaskResult()); + qd.setExplainPlan("explainPlan"); + qd.setQueryStr("qstr"); + Task tTask = new MyTask(); + qd.updateTaskStatus(tTask); + tTask.setStarted(); + qd.updateTaskStatus(tTask); + tTask.setDone(); + qd.updateTaskStatus(tTask); + + Long ee = qd.getTaskDisplays().get(0).getElapsedTime(); + System.out.println(ee); + String json = QueryDisplay.OBJECT_MAPPER.writeValueAsString(qd); + QueryDisplay n = QueryDisplay.OBJECT_MAPPER.readValue(json, QueryDisplay.class); + + assertEquals(qd.getQueryString(), n.getQueryString()); + assertEquals(qd.getExplainPlan(), n.getExplainPlan()); + assertEquals(qd.getErrorMessage(), n.getErrorMessage()); + assertEquals(qd.getTaskDisplays().size(), n.getTaskDisplays().size()); + assertEquals(qd.getTaskDisplays().get(0).taskState, n.getTaskDisplays().get(0).taskState); + + } + } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/AppStatusBuilder.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/AppStatusBuilder.java index c2ba4dba0c58..833745227778 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/AppStatusBuilder.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/AppStatusBuilder.java @@ -25,7 +25,8 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.codehaus.jackson.annotate.JsonIgnore; + +import com.fasterxml.jackson.annotation.JsonIgnore; /** * Represents the status of the Llap application. diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java index e3302b71f8e8..b0e27c01e79d 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/status/LlapStatusServiceDriver.java @@ -53,14 +53,15 @@ import org.apache.hadoop.yarn.service.client.ServiceClient; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonMethod; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; -import org.codehaus.jackson.map.annotate.JsonSerialize; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + /** * Checks the status of the Llap. */ @@ -297,10 +298,10 @@ private ApplicationReport getAppReport(String appName, long timeoutMs) public void outputJson(PrintWriter writer) throws LlapStatusCliException { ObjectMapper mapper = new ObjectMapper(); - mapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); - mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); - mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_EMPTY); - mapper.setVisibility(JsonMethod.ALL, Visibility.NON_PRIVATE); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + mapper.setSerializationInclusion(Include.NON_NULL); + mapper.setSerializationInclusion(Include.NON_EMPTY); + mapper.setVisibility(PropertyAccessor.ALL, Visibility.NON_PRIVATE); try { writer.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(appStatusBuilder)); } catch (IOException e) { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java index 59bdf53c48e8..0bb16366b52b 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java @@ -43,11 +43,12 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hive.common.util.HiveVersionInfo; import org.apache.hive.http.HttpServer; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; + public class LlapWebServices extends AbstractService { private static final Logger LOG = LoggerFactory.getLogger(LlapWebServices.class); @@ -214,7 +215,7 @@ public static class LlapPeerRegistryServlet extends HttpServlet { public void init() throws ServletException { jsonFactory = new JsonFactory(); } - + @Override public void doGet(HttpServletRequest request, HttpServletResponse response) { JsonGenerator jg = null; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/SystemConfigurationServlet.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/SystemConfigurationServlet.java index 46ed5bf6e2a5..a2d5944f4e99 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/SystemConfigurationServlet.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/SystemConfigurationServlet.java @@ -32,11 +32,12 @@ import org.apache.hadoop.hive.llap.LlapDaemonInfo; import org.apache.hadoop.util.Shell; import org.apache.hive.http.HttpServer; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; + import io.netty.util.NetUtil; /** diff --git a/pom.xml b/pom.xml index ff85c9785b7c..abc206258937 100644 --- a/pom.xml +++ b/pom.xml @@ -1493,6 +1493,7 @@ org.spark_project.guava.** org.apache.spark.api.java.Optional jersey.repackaged.com.google.** + org.codehaus.jackson.** true diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java index 0dafb004b6c2..cdba54c5b652 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java @@ -27,19 +27,32 @@ import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.RunningJob; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.annotate.JsonWriteNullProperties; -import org.codehaus.jackson.annotate.JsonIgnore; import org.json.JSONException; import org.json.JSONObject; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * Some limited query information to save for WebUI. * * The class is synchronized, as WebUI may access information about a running query. */ +@JsonAutoDetect(fieldVisibility = Visibility.ANY, getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE) public class QueryDisplay { + /** + * Preffered objectmapper for this class. + * + * It must be used to have things work in shaded environment (and its also more performant). + */ + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + // Member variables private String queryStr; private String explainPlan; @@ -73,12 +86,14 @@ public enum Phase { EXECUTION, } + @JsonIgnore public String getFullLogLocation() { return LogUtils.getLogFilePath(); } - @JsonWriteNullProperties(false) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) + @JsonAutoDetect(fieldVisibility = Visibility.ANY, getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE) public static class TaskDisplay { public static final String NUMBER_OF_MAPPERS = "Number of Mappers"; @@ -277,6 +292,7 @@ public synchronized void setQueryStr(String queryStr) { this.queryStr = queryStr; } + @JsonIgnore public synchronized String getQueryString() { return returnStringOrUnknown(queryStr); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java index f98e01eab2c1..9968d7ef3c66 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/ShowUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FSDataOutputStream; @@ -42,7 +43,6 @@ import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; import org.apache.hive.common.util.HiveStringUtils; -import org.codehaus.jackson.map.ObjectMapper; import java.io.DataOutputStream; import java.io.IOException; @@ -80,9 +80,9 @@ public static DataOutputStream getOutputStream(Path outputFile, DDLOperationCont * 'property_name1'='property_value1', * 'property_name2'='property_value2', * ... - * + * * Properties are listed in alphabetical order. - * + * * @param properties The properties to list. * @param exclude Property names to exclude. */ @@ -105,7 +105,7 @@ public static void writeToFile(String data, String file, DDLOperationContext con if (StringUtils.isEmpty(data)) { return; } - + Path resFile = new Path(file); FileSystem fs = resFile.getFileSystem(context.getConf()); try (FSDataOutputStream out = fs.create(resFile); @@ -245,7 +245,7 @@ public static void asJson(OutputStream out, Map data) throws Hiv /** * Prints a row with the given fields into the builder. * The last field could be a multiline field, and the extra lines should be padded. - * + * * @param fields The fields to print * @param tableInfo The target builder * @param isLastLinePadded Is the last field could be printed in multiple lines, if contains newlines? @@ -285,7 +285,7 @@ public static void formatOutput(String[] fields, StringBuilder tableInfo, boolea /** * Prints a row the given fields to a formatted line. - * + * * @param fields The fields to print * @param tableInfo The target builder */ @@ -296,7 +296,7 @@ public static void formatOutput(String[] fields, StringBuilder tableInfo) { /** * Prints the name value pair, and if the value contains newlines, it adds one more empty field * before the two values (Assumes, the name value pair is already indented with it). - * + * * @param name The field name to print * @param value The value to print - might contain newlines * @param tableInfo The target builder @@ -311,7 +311,7 @@ public static void formatOutput(String name, String value, StringBuilder tableIn * Prints the name value pair * If the output is padded then unescape the value, so it could be printed in multiple lines. * In this case it assumes the pair is already indented with a field delimiter - * + * * @param name The field name to print * @param value The value t print * @param tableInfo The target builder @@ -327,7 +327,7 @@ public static void formatOutput(String name, String value, StringBuilder tableIn * Indent processing for multi-line values. * Values should be indented the same amount on each line. * If the first line comment starts indented by k, the following line comments should also be indented by k. - * + * * @param value the value to write * @param tableInfo the buffer to write to * @param columnWidths the widths of the previous columns @@ -352,7 +352,7 @@ private static void indentMultilineValue(String value, StringBuilder tableInfo, /** * Print the right padding, with the given column widths. - * + * * @param tableInfo The buffer to write to * @param columnWidths The column widths */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/formatter/JsonShowResourcePlanFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/formatter/JsonShowResourcePlanFormatter.java index 2b759e3ea641..6ae036ca8d3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/formatter/JsonShowResourcePlanFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/formatter/JsonShowResourcePlanFormatter.java @@ -21,8 +21,9 @@ import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.codehaus.jackson.JsonGenerator; -import org.codehaus.jackson.map.ObjectMapper; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; import java.io.Closeable; import java.io.DataOutputStream; @@ -35,7 +36,7 @@ public class JsonShowResourcePlanFormatter extends ShowResourcePlanFormatter { @Override public void showResourcePlans(DataOutputStream out, List resourcePlans) throws HiveException { - try (JsonGenerator generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out)) { + try (JsonGenerator generator = new ObjectMapper().getFactory().createJsonGenerator(out)) { generator.writeStartArray(); for (WMResourcePlan plan : resourcePlans) { generator.writeStartObject(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerBaseModelObject.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerBaseModelObject.java index 2f999afbbb99..064437c7d55a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerBaseModelObject.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerBaseModelObject.java @@ -18,22 +18,25 @@ package org.apache.hadoop.hive.ql.exec.repl.ranger; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; + +import java.util.Date; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; -import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; /** * RangerBaseModelObject class to contain common attributes of Ranger Base object. */ @JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerExportPolicyList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerExportPolicyList.java index a395feb0551e..8f6229708e14 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerExportPolicyList.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerExportPolicyList.java @@ -18,22 +18,26 @@ package org.apache.hadoop.hive.ql.exec.repl.ranger; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.map.annotate.JsonSerialize; + + +import java.util.LinkedHashMap; +import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; -import java.util.LinkedHashMap; -import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; /** * RangerExportPolicyList class to extends RangerPolicyList class. */ @JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonInclude(Include.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerExportPolicyList extends RangerPolicyList implements java.io.Serializable { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicy.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicy.java index 733a8983a474..91119ed40ec7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicy.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicy.java @@ -18,24 +18,26 @@ package org.apache.hadoop.hive.ql.exec.repl.ranger; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import org.codehaus.jackson.map.annotate.JsonSerialize; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + /** * RangerPolicy class to contain Ranger Policy details. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -481,7 +483,7 @@ public StringBuilder toString(StringBuilder sb) { * RangerPolicyResource class to store the resource path values. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -653,7 +655,7 @@ public boolean equals(Object obj) { * RangerPolicyItem class contains ranger policy items like access and permissions. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -914,7 +916,7 @@ public boolean equals(Object obj) { * RangerDataMaskPolicyItem class. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -1005,7 +1007,7 @@ public StringBuilder toString(StringBuilder sb) { * RangerRowFilterPolicyItem class. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -1095,7 +1097,7 @@ public StringBuilder toString(StringBuilder sb) { * RangerPolicyItemAccess class. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -1204,7 +1206,7 @@ public boolean equals(Object obj) { * RangerPolicyItemCondition class to store policy conditions. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -1325,7 +1327,7 @@ public boolean equals(Object obj) { * RangerPolicyItemDataMaskInfo store policy having datamasking. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) @@ -1439,7 +1441,7 @@ private StringBuilder toString(StringBuilder sb) { * Ranger policyItem Row-filter info class. */ @JsonAutoDetect(fieldVisibility = Visibility.ANY) - @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) + @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicyList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicyList.java index 3de935ac5745..3949ba0b3c9e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicyList.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ranger/RangerPolicyList.java @@ -18,22 +18,23 @@ package org.apache.hadoop.hive.ql.exec.repl.ranger; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; -import org.codehaus.jackson.map.annotate.JsonSerialize; - import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import java.util.ArrayList; import java.util.List; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + /** * RangerPolicyList class to contain List of RangerPolicy objects. */ @JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY) -@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) +@JsonInclude(Include.NON_NULL) @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class RangerPolicyList { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/AmPluginNode.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/AmPluginNode.java index f0a13f72593b..43f0fca69e67 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/AmPluginNode.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/AmPluginNode.java @@ -21,9 +21,9 @@ import org.apache.hadoop.security.token.Token; import org.apache.hive.common.util.Ref; import org.apache.tez.common.security.JobTokenIdentifier; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.annotate.JsonSerialize; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; @JsonSerialize public interface AmPluginNode { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java index bf91fd14084b..1a7fc5bc3eba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java @@ -85,12 +85,12 @@ import org.apache.tez.serviceplugins.api.ServicePluginsDescriptor; import org.apache.tez.serviceplugins.api.TaskCommunicatorDescriptor; import org.apache.tez.serviceplugins.api.TaskSchedulerDescriptor; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.annotate.JsonSerialize; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.tez.monitoring.TezJobMonitor; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmEvent.java index fae68ef75b13..b52e3133f248 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmEvent.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmEvent.java @@ -18,11 +18,12 @@ package org.apache.hadoop.hive.ql.exec.tez; import org.apache.hadoop.hive.ql.wm.WmContext; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.annotate.JsonSerialize; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; + /** * Workload Manager events at query level. */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmTezSession.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmTezSession.java index 6004d712c4cf..b12e0dd45f1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmTezSession.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmTezSession.java @@ -18,20 +18,19 @@ package org.apache.hadoop.hive.ql.exec.tez; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.SettableFuture; - -import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.registry.impl.TezAmInstance; import org.apache.hive.common.util.Ref; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.annotate.JsonSerialize; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.SettableFuture; @JsonSerialize public class WmTezSession extends TezSessionPoolSession implements AmPluginNode { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java index caa798c0961d..33f9a8a34d26 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java @@ -20,6 +20,10 @@ import org.apache.hadoop.hive.metastore.api.WMPoolSchedulingPolicy; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Sets; @@ -81,9 +85,6 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.Ref; import org.apache.tez.dag.api.TezConfiguration; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -262,9 +263,9 @@ public static WorkloadManager create(String yarnQueue, HiveConf conf, WMFullReso updateResourcePlanAsync(plan).get(); // Wait for the initial resource plan to be applied. objectMapper = new ObjectMapper(); - objectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); + objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); // serialize json based on field annotations only - objectMapper.setVisibilityChecker(objectMapper.getSerializationConfig().getDefaultVisibilityChecker() + objectMapper.setVisibility(objectMapper.getSerializationConfig().getDefaultVisibilityChecker() .withGetterVisibility(JsonAutoDetect.Visibility.NONE) .withSetterVisibility(JsonAutoDetect.Visibility.NONE)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index 6c04abe41be0..5ea4078e2804 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -23,6 +23,7 @@ import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStream; import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; @@ -107,10 +108,10 @@ import org.apache.hive.common.util.Ref; import org.apache.orc.FileFormatException; import org.apache.orc.impl.OrcAcidUtils; -import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import javax.annotation.concurrent.Immutable; @@ -2368,7 +2369,7 @@ static boolean isCompacted(Path baseOrDeltaDir, FileSystem fs, HdfsDirSnapshot d } Path formatFile = new Path(baseOrDeltaDir, METADATA_FILE); try (FSDataInputStream strm = fs.open(formatFile)) { - Map metaData = new ObjectMapper().readValue(strm, Map.class); + Map metaData = new ObjectMapper().readValue((InputStream)strm, Map.class); if (!CURRENT_VERSION.equalsIgnoreCase(metaData.get(Field.VERSION.toString()))) { throw new IllegalStateException("Unexpected Meta Data version: " + metaData.get(Field.VERSION)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index 9a245c9d4a83..f22674fa557d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -25,8 +25,9 @@ import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.codehaus.jackson.JsonGenerator; -import org.codehaus.jackson.map.ObjectMapper; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; /** * Format table and index information for machine readability using diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java index 74dc1cf5328f..e99f3b7ecd0b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/ReplState.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.ql.parse.repl; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,16 +29,14 @@ * Logger class for Repl Events. **/ public abstract class ReplState { - @JsonIgnoreProperties private static final Logger REPL_LOG = LoggerFactory.getLogger("ReplState"); - @JsonIgnoreProperties private static final ObjectMapper mapper = new ObjectMapper(); // Thread-safe. static { - mapper.configure(SerializationConfig.Feature.AUTO_DETECT_GETTERS, false); - mapper.configure(SerializationConfig.Feature.AUTO_DETECT_IS_GETTERS, false); - mapper.configure(SerializationConfig.Feature.AUTO_DETECT_FIELDS, false); + mapper.configure(MapperFeature.AUTO_DETECT_GETTERS, false); + mapper.configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false); + mapper.configure(MapperFeature.AUTO_DETECT_FIELDS, false); } public enum LogTag { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java index 4be00b63bcce..aaa13ef23796 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java @@ -22,8 +22,9 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerator; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; import java.io.Closeable; import java.io.IOException; @@ -37,7 +38,7 @@ public class JsonWriter implements Closeable { public JsonWriter(FileSystem fs, Path writePath) throws IOException { OutputStream out = fs.create(writePath); - jsonGenerator = new JsonFactory().createJsonGenerator(out); + jsonGenerator = new JsonFactory().createGenerator(out); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("version", METADATA_FORMAT_VERSION); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpBegin.java index 8702cc1f9342..fd8a599b8a22 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpBegin.java @@ -19,7 +19,8 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; + +import com.fasterxml.jackson.annotation.JsonProperty; /** * AtlasDumpBegin. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpEnd.java index 920a01362702..32c3f13aff5c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/AtlasDumpEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * AtlasDumpEnd. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpBegin.java index 41b3a8b343f2..09c538263d4c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapDumpBegin extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpEnd.java index d2785414821d..18fcf120f9ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapDumpEnd extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpFunction.java index 583d99f3224f..bcbdf08460fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpFunction.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpFunction.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.log.state; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapDumpFunction extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpTable.java index 455393a5d2ca..a7b1b1607eba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/BootstrapDumpTable.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapDumpTable extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpBegin.java index 7c0afc3260a9..15a63255b2fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalDumpBegin extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEnd.java index 99e8f9df95db..4c9b6116ee62 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalDumpEnd extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEvent.java index 61088b47fee3..a9dbc37a7ec4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEvent.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/IncrementalDumpEvent.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.log.state; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalDumpEvent extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java index db3e212a3a7a..96d5439c3309 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * RangerDumpBegin. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java index d7981f96add6..c8c71bafe4ac 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/log/state/RangerDumpEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * RangerDumpEnd. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadBegin.java index f0538341cd55..708207c60040 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * Replication state for Atlas Load Begin. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadEnd.java index 276ec37b0480..9aa0b42b92df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/AtlasLoadEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * Replication state for Atlas Load End. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadBegin.java index 2596a09cd762..da28dab0737c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapLoadBegin extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadEnd.java index 52475fcdf4f0..21d11ef473a3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapLoadEnd extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadFunction.java index cd9e0b169da2..6acf0d6c842f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadFunction.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadFunction.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.log.state; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapLoadFunction extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadTable.java index 880f784c31d5..c3c2a9ade8fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/BootstrapLoadTable.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class BootstrapLoadTable extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/DataCopyEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/DataCopyEnd.java index 1838f4cda303..204246e64018 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/DataCopyEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/DataCopyEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class DataCopyEnd extends ReplState { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadBegin.java index f88d05f6a242..e45116bc8f9d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalLoadBegin extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEnd.java index c8746b7daa3b..ac71d3a9d93a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEnd.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hadoop.hive.ql.parse.repl.DumpType; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalLoadEnd extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEvent.java index a436bb6d699f..cd143be4799d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEvent.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/IncrementalLoadEvent.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.exec.repl.ReplStatsTracker; import org.apache.hadoop.hive.ql.parse.repl.ReplState; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; public class IncrementalLoadEvent extends ReplState { @JsonProperty diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java index 21534e9ec66d..081b6cc56186 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadBegin.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty; /** * RangerLoadBegin. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java index 6f2e5917d901..c720f4aa9e53 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/log/state/RangerLoadEnd.java @@ -19,7 +19,8 @@ import org.apache.hadoop.hive.ql.parse.repl.ReplState; import org.apache.hive.common.util.SuppressFBWarnings; -import org.codehaus.jackson.annotate.JsonProperty; + +import com.fasterxml.jackson.annotation.JsonProperty; /** * RangerLoadEnd. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java index a8b393c7b10f..9e7e05361f9b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java @@ -26,15 +26,13 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import com.fasterxml.jackson.core.json.JsonReadFeature; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterators; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.io.Text; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonParser.Feature; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.type.TypeFactory; -import org.codehaus.jackson.type.JavaType; /** * UDFJson. @@ -61,11 +59,17 @@ public class UDFJson extends UDF { private static final Pattern patternKey = Pattern.compile("^([a-zA-Z0-9_\\-\\:\\s]+).*"); private static final Pattern patternIndex = Pattern.compile("\\[([0-9]+|\\*)\\]"); - private static final JavaType MAP_TYPE = TypeFactory.fromClass(Map.class); - private static final JavaType LIST_TYPE = TypeFactory.fromClass(List.class); - private final JsonFactory jsonFactory = new JsonFactory(); - private final ObjectMapper objectMapper = new ObjectMapper(jsonFactory); + private static final ObjectMapper objectMapper = new ObjectMapper(); + private static final JavaType MAP_TYPE = objectMapper.getTypeFactory().constructType(Map.class); + private static final JavaType LIST_TYPE = objectMapper.getTypeFactory().constructType(List.class); + + static { + // Allows for unescaped ASCII control characters in JSON values + objectMapper.enable(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature()); + // Enabled to accept quoting of all character backslash qooting mechanism + objectMapper.enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature()); + } // An LRU cache using a linked hash map static class HashCache extends LinkedHashMap { @@ -95,10 +99,6 @@ protected boolean removeEldestEntry(Map.Entry eldest) { Map mKeyMatchesCache = new HashCache(); public UDFJson() { - // Allows for unescaped ASCII control characters in JSON values - jsonFactory.enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS); - // Enabled to accept quoting of all character backslash qooting mechanism - jsonFactory.enable(Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java index 82feca293b00..6baa07d99b20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java @@ -25,6 +25,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.core.json.JsonReadFeature; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -35,11 +40,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.io.Text; -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonParser.Feature; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.type.TypeFactory; -import org.codehaus.jackson.type.JavaType; /** * GenericUDTFJSONTuple: this @@ -53,15 +53,15 @@ public class GenericUDTFJSONTuple extends GenericUDTF { private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFJSONTuple.class.getName()); - private static final JsonFactory JSON_FACTORY = new JsonFactory(); + private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final JavaType MAP_TYPE = MAPPER.getTypeFactory().constructType(Map.class); static { // Allows for unescaped ASCII control characters in JSON values - JSON_FACTORY.enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS); + MAPPER.enable(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature()); // Enabled to accept quoting of all character backslash qooting mechanism - JSON_FACTORY.enable(Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER); + MAPPER.enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature()); + } - private static final ObjectMapper MAPPER = new ObjectMapper(JSON_FACTORY); - private static final JavaType MAP_TYPE = TypeFactory.fromClass(Map.class); int numCols; // number of output columns String[] paths; // array of path expressions, each of which corresponds to a column diff --git a/ql/src/java/org/apache/hadoop/hive/ql/wm/Trigger.java b/ql/src/java/org/apache/hadoop/hive/ql/wm/Trigger.java index 4adad7a1b62e..23299f0ef647 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/wm/Trigger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/wm/Trigger.java @@ -15,7 +15,7 @@ */ package org.apache.hadoop.hive.ql.wm; -import org.codehaus.jackson.map.annotate.JsonSerialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; /** * Trigger interface which gets mapped to CREATE TRIGGER .. queries. A trigger can have a name, expression and action. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/wm/WmContext.java b/ql/src/java/org/apache/hadoop/hive/ql/wm/WmContext.java index b896ddc6696e..658042f0a9c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/wm/WmContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/wm/WmContext.java @@ -33,14 +33,15 @@ import org.apache.hadoop.hive.ql.exec.tez.WmEvent; import org.apache.hadoop.hive.ql.exec.tez.monitoring.PrintSummary; import org.apache.hadoop.hive.ql.session.SessionState; -import org.codehaus.jackson.annotate.JsonAutoDetect; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.annotate.JsonProperty; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + /** * Some context information that are required for rule evaluation. */ @@ -204,9 +205,9 @@ public void printJson(final SessionState.LogHelper console) { try { waitForReturnSessionEvent(); ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); + objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); // serialize json based on field annotations only - objectMapper.setVisibilityChecker(objectMapper.getSerializationConfig().getDefaultVisibilityChecker() + objectMapper.setVisibility(objectMapper.getSerializationConfig().getDefaultVisibilityChecker() .withSetterVisibility(JsonAutoDetect.Visibility.NONE)); String wmContextJson = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(this); console.printInfo(""); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java index 2ec9f72c3c8c..f108dbdc069e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java @@ -35,12 +35,13 @@ import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus; import org.apache.hadoop.hive.metastore.api.WMTrigger; import org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.show.formatter.JsonShowResourcePlanFormatter; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.map.ObjectMapper; import org.junit.After; import org.junit.Before; import org.junit.Test; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * Test class for json resource plan formatter. */ diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java index b4f219497e5c..7b6af3b89307 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.codehaus.jackson.JsonNode; -import org.codehaus.jackson.node.JsonNodeFactory; import java.util.ArrayList; import java.util.Arrays; diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index f231c20b406c..aaae6c36fbbd 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -81,7 +81,6 @@ import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; import org.apache.hive.service.server.ThreadWithGarbageCleanup; -import org.codehaus.jackson.map.ObjectMapper; /** * SQLOperation. @@ -507,7 +506,7 @@ public String getTaskStatus() throws HiveSQLException { List statuses = driver.getQueryDisplay().getTaskDisplays(); if (statuses != null) { try (final ByteArrayOutputStream out = new ByteArrayOutputStream()) { - new ObjectMapper().writeValue(out, statuses); + QueryDisplay.OBJECT_MAPPER.writeValue(out, statuses); return out.toString(StandardCharsets.UTF_8.name()); } catch (Exception e) { throw new HiveSQLException(e); diff --git a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java index acb58c88fee7..f5dde7efcf73 100644 --- a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java +++ b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java @@ -28,10 +28,11 @@ import org.apache.hive.http.HttpConstants; import org.apache.hive.http.HttpServer; import org.apache.hive.service.server.HiveServer2; -import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * Returns "true" if this HS2 instance is leader else "false". * Invoking a "DELETE" method on this endpoint will trigger a failover if this instance is a leader. diff --git a/service/src/java/org/apache/hive/service/servlet/HS2Peers.java b/service/src/java/org/apache/hive/service/servlet/HS2Peers.java index ac5b76c87144..1650753d317e 100644 --- a/service/src/java/org/apache/hive/service/servlet/HS2Peers.java +++ b/service/src/java/org/apache/hive/service/servlet/HS2Peers.java @@ -31,12 +31,13 @@ import org.apache.hive.service.server.HS2ActivePassiveHARegistry; import org.apache.hive.service.server.HS2ActivePassiveHARegistryClient; import org.apache.hive.service.server.HiveServer2Instance; -import org.codehaus.jackson.annotate.JsonIgnore; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializationConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + /** * Returns all HS2 instances in Active-Passive standy modes. */ @@ -65,7 +66,7 @@ public void setHiveServer2Instances(final Collection hiveSe @JsonIgnore public String toJson() throws IOException { ObjectMapper mapper = new ObjectMapper(); - mapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(this); } } diff --git a/service/src/java/org/apache/hive/service/servlet/QueriesRESTfulAPIServlet.java b/service/src/java/org/apache/hive/service/servlet/QueriesRESTfulAPIServlet.java index 7f9bc92aedaf..22c40b254e68 100644 --- a/service/src/java/org/apache/hive/service/servlet/QueriesRESTfulAPIServlet.java +++ b/service/src/java/org/apache/hive/service/servlet/QueriesRESTfulAPIServlet.java @@ -22,16 +22,17 @@ import org.apache.hive.service.cli.operation.OperationManager; import org.apache.hive.service.cli.session.HiveSession; import org.apache.hive.service.cli.session.SessionManager; -import org.codehaus.jackson.JsonGenerator; -import org.codehaus.jackson.JsonProcessingException; -import org.codehaus.jackson.Version; -import org.codehaus.jackson.map.JsonSerializer; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.SerializerProvider; -import org.codehaus.jackson.map.module.SimpleModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.Version; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; + import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; @@ -140,7 +141,7 @@ private void sendAsJson( response.setContentType("application/json"); response.setStatus(HttpServletResponse.SC_OK); ObjectMapper mapper = new ObjectMapper(); - SimpleModule module = new SimpleModule("CustomSessionModule", new Version(1, 0, 0, null)); + SimpleModule module = new SimpleModule("CustomSessionModule", new Version(1, 0, 0, null, null, null)); module.addSerializer(HiveSession.class, new HiveSessionSerializer()); mapper.registerModule(module); diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index 120de13299da..ac19b19dbb96 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -21,7 +21,6 @@ import static org.junit.Assert.*; import java.io.ByteArrayInputStream; -import java.io.Serializable; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -40,10 +39,12 @@ import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; @@ -382,7 +383,7 @@ public void testGlobalCompileLockTimeout() throws Exception { int THREAD_COUNT = 3; @SuppressWarnings("unchecked") - FutureTask[] tasks = (FutureTask[])new FutureTask[THREAD_COUNT]; + FutureTask[] tasks = new FutureTask[THREAD_COUNT]; long longPollingTimeoutMs = 10 * 60 * 1000; // Larger than max compile duration used in test // 1st query acquires the lock and takes 20 secs to compile diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/AcidMetaDataFile.java b/storage-api/src/java/org/apache/hadoop/hive/common/AcidMetaDataFile.java index 4cef0760fe43..e4a9abd2c9d5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/AcidMetaDataFile.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/AcidMetaDataFile.java @@ -17,16 +17,17 @@ */ package org.apache.hadoop.hive.common; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.codehaus.jackson.map.ObjectMapper; - import java.io.IOException; import java.io.OutputStreamWriter; import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import com.fasterxml.jackson.databind.ObjectMapper; + /** * General facility to place a metadata file into a dir created by acid/compactor write. */