1. hadoop fs -put udf_DZRI.jar /hive_udf
  2. beeline -u jdbc:hive2://192.168.1.101:10000” -n hive -p 123456
  3. add jar hdfs://192.168.1.101/hive_udf/udf_DZRI.jar;
  4. drop permanent function trans;
  5. create permanent function trans as com.hive.TransportDZRI using jar 'hdfs://192.168.1.101/hive_udf/udf_DZRI.jar’;

UDF一进一出

  • org.apache.hadoop.hive.ql.udf.generic.GenericUDF ```sql import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

public class MyLengthUDF extends GenericUDF {

//初始化
@Override
public ObjectInspector initialize(ObjectInspector[] objectInspectors) throws UDFArgumentException {
    if (objectInspectors.length != 1){
        throw new UDFArgumentException("参数个数错误");
    }

    return PrimitiveObjectInspectorFactory.javaIntObjectInspector;
}

//计算
@Override
public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
    //取出参数
    String input = deferredObjects[0].get().toString();

    if (input == null){
        return 0;
    }
    return input.length();
}

//声明执行计划,自定义udf一般不写
@Override
public String getDisplayString(String[] strings) {
    return null;
}

}

<a name="KyULl"></a>
### UDTF多进一出

- org.apache.hadoop.hive.ql.udf.generic.GenericUDTF
- select myudtf("hello,world,hadoop,hive",",");  

>>><br />hello<br />world<br />hadoop<br />hive
```sql
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.util.ArrayList;
import java.util.List;

public class MyUDTF extends GenericUDTF {

    //udtf的输出
    private ArrayList<String> outputList = new ArrayList<>();

    @Override
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {

        //输出数据的默认列名
        List<String> fieldNames = new ArrayList<>();
        fieldNames.add("_col");

        //输出数据的类型
        List<ObjectInspector> fieldOIs = new ArrayList<>();
        fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);

        return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
    }

    @Override
    public void process(Object[] objects) throws HiveException {

        //获取输入数据:第一个参数
        String input = objects[0].toString();

        //获取分隔符:第二个参数
        String split = objects[1].toString();

        //切割
        String[] splitArray = input.split(split);

        for (String s : splitArray) {

            //集合为复用的 先清空
            outputList.clear();

            //将每一个单词添加至集合
            outputList.add(s);

            //将集合内容写出
            forward(outputList);

        }

    }

    @Override
    public void close() throws HiveException {

    }
}

UDAF多进一出

  • 不常用