支持虚拟节点+加权,由于不一样的节点可能资源配置不一样,加权能够使负载均衡最大化,虚拟节点,能够下降某个节点出现问题后对整个哈希环的冲击java
考虑到不一样场景用来做哈希的key可能不同,因此提供一个包装类Node,能够自定义key,且能够自定义权重node
安全问题,添加节点和删除节点是须要重建哈希环,此处要考虑并发状况的发生(此处暂未实现),通常状况下只需初始化一次便可。安全
哈希后的取值,本处为了通用采用String类型,能够针对特殊场景做优化,我的认为只有最适合的,没有最好的并发
public final class Node<T> { public static final int DEFAULT_WEIGHT = 1; private T target; private String key; private int weight = DEFAULT_WEIGHT; public Node(T target) { this.target = target; this.key = target.toString(); } public Node(T target, String key) { this.target = target; this.key = key; } public Node(T target, String key, int weight) { this.target = target; this.key = key; this.weight = weight; } public T getTarget() { return target; } public void setTarget(T target) { this.target = target; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public int getWeight() { return weight; } public void setWeight(int weight) { this.weight = weight; } }
import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import java.nio.charset.Charset; import java.util.*; /** * 一致性哈希 * @param <T> */ public class ConsistentHash<T> { //哈希函数 private HashFunction hashFunction = Hashing.murmur3_32(); //哈希环 private TreeMap<String, Node<T>> nodeMappings = new TreeMap<>(); //虚拟节点数 private int replicas = 160; //真实节点 private List<Node<T>> realNodes; public ConsistentHash(HashFunction hashFunction, List<Node<T>> realNodes, int replicas) { this.hashFunction = hashFunction; this.realNodes = realNodes; this.replicas = replicas; } public ConsistentHash(HashFunction hashFunction, List<Node<T>> realNodes) { this.hashFunction = hashFunction; this.realNodes = realNodes; } public ConsistentHash(List<Node<T>> realNodes, int replicas) { this.realNodes = realNodes; this.replicas = replicas; } public ConsistentHash(List<Node<T>> realNodes) { this.realNodes = realNodes; } /** * 初始化哈希环 */ public void init(){ realNodes.forEach(node -> { for (int i = 0; i < replicas*node.getWeight(); i++) { StringBuilder sb = new StringBuilder(node.getKey()); sb.append(i); nodeMappings.put(hashCode(sb.toString()), node); } }); } public String hashCode(String key){ return hashFunction.hashString(key, Charset.defaultCharset()).asBytes().toString(); } public Node<T> getNode(String key){ SortedMap<String, Node<T>> tailMap = nodeMappings.tailMap(hashCode(key)); if(tailMap.isEmpty()){ return nodeMappings.get(nodeMappings.firstKey()); } return tailMap.get(tailMap.firstKey()); } public static void main(String[] args) { List<Node<String>> realNodes = Arrays.asList(new Node("127.0.0.1"),new Node("192.168.4.175"),new Node("192.168.3.175"), new Node("172.147.0.101")); ConsistentHash consistentHash = new ConsistentHash(Hashing.md5(), realNodes); consistentHash.init(); List<Node> one = new ArrayList<>(); List<Node> two = new ArrayList<>(); List<Node> three = new ArrayList<>(); List<Node> four = new ArrayList<>(); for (int i = 0; i <10000 ; i++) { Node<String> node = consistentHash.getNode("node"+i); if(node.getKey().equals("127.0.0.1")){ one.add(node); } if(node.getKey().equals("192.168.4.175")){ two.add(node); } if(node.getKey().equals("192.168.3.175")){ three.add(node); } if(node.getKey().equals("172.147.0.101")){ four.add(node); } } System.out.println(one.size()); System.out.println(two.size()); System.out.println(three.size()); System.out.println(four.size()); }