HIVE的自定义函数,有UDF,UDAF,UDTF,分别实现一进一出、多进一出、一进多出的操做 java
今天只分享一个UDTF的使用,另外两个之后再分享 apache
场景:使用UDTF解析JSON串 json
废话很少说了,直接上代码 ide
要想写UDTF必须继承GenericUDTF类,并实现initialize,process,close三个方法,initialize定义每行的列名及类型,process方法是对数据的操做,就是把一行拆成多行,注意一行有多列的话,须要是个集合,close方法能够不实现 函数
package dw.udf; import java.util.ArrayList; import java.util.Iterator; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.json.JSONArray; import org.json.JSONObject; public class JsonParse extends GenericUDTF {//集成GenericUDTF @Override public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException { if (args.length != 1) { throw new UDFArgumentLengthException("ExplodeMap takes only one argument"); } if (args[0].getCategory() != ObjectInspector.Category.PRIMITIVE) { throw new UDFArgumentException("ExplodeMap takes string as a parameter"); } ArrayList<String> fieldNames = new ArrayList<String>();//这里是列的 ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(); fieldNames.add("containerid"); fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); fieldNames.add("first_step"); fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); fieldNames.add("second_step"); fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); fieldNames.add("third_step"); fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); } @SuppressWarnings("rawtypes") @Override public void process(Object[] args) throws HiveException { try { JSONObject obj = new JSONObject(args[0].toString()); Iterator it = obj.keys(); while (it.hasNext()) { String key = (String) it.next(); JSONArray array = obj.getJSONArray(key); if (key.indexOf("&") != -1) { key = key.substring(0, key.indexOf("&")); } String[] outstr = new String[4]; outstr[0] = key; for (int i = 0; i < array.length(); i++) { outstr[i + 1] = array.getString(i); } forward(outstr); } } catch (Exception e) { e.printStackTrace(); } } @Override public void close() throws HiveException { } }