hadoop map reduce自定义数据类型时注意顺序,不然报错。

自定义数据类型,实现Writable接口,重写write方法和readFields方法时,在操做字段时,必须保证顺序,若是在write方法先写id字段,则在readFields也先读id字段。不然报错:java

package com.my.hadoop;

import org.apache.hadoop.io.Writable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class PairWritable implements Writable{

    private int id;
    private String name;

    public PairWritable() {
    }

    public PairWritable(int id, String name) {
       set(id,name);
    }

    public void set(int id,String name){
        this.setId(id);
        this.setName(name);
    }

    public int getId() {
        return id;
    }

    public void setId(int id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    @Override
    public String toString() {
        return id+"\t"+name;
    }

    /**
     * 写字段的顺序与读字段的顺序必须一致。如在write中首先读id,则read方法中,必须先读id。否则会出错
     * @param dataOutput
     * @throws IOException
     */
    @Override
    public void write(DataOutput dataOutput) throws IOException {
        dataOutput.writeInt(id);
        dataOutput.writeUTF(name);
    }

    @Override
    public void readFields(DataInput dataInput) throws IOException {
        this.id=dataInput.readInt();
        this.name=dataInput.readUTF();
    }

    @Override
    public int hashCode() {
        return super.hashCode();
    }

    @Override
    public boolean equals(Object obj) {
        return super.equals(obj);
    }
}
相关文章
相关标签/搜索