介绍

HashSet和LinkedHashSet都是不可重复的元素集合,HashSet通过HashMap实现,LinkedHashSet通过LinkedHashMap来实现,所以LinkedHashSet迭代是有顺序的,和add时的顺序一致,HashSet迭代是无序的。
类图如下:
image.png

HashSet

底层利用HashMap保存所有的元素,元素都保存在map的key上,因为map的key都是不重复的,所以间接实现了去重的效果,map的value都固定是一个静态的Object对象,称之为PRESENT。

  1. public class HashSet<E>
  2. extends AbstractSet<E>
  3. implements Set<E>, Cloneable, java.io.Serializable
  4. {
  5. static final long serialVersionUID = -5024744406713321676L;
  6. // 保存所有的元素
  7. private transient HashMap<E,Object> map;
  8. // Dummy value to associate with an Object in the backing Map
  9. // 每个key的value都是它
  10. private static final Object PRESENT = new Object();
  11. // 默认初始化
  12. public HashSet() {
  13. map = new HashMap<>();
  14. }
  15. // 集合转hashset
  16. public HashSet(Collection<? extends E> c) {
  17. // 默认根据负载因子0.75计算hashmap的容量再+1,不足16默认16
  18. map = new HashMap<>(Math.max((int) (c.size()/.75f) + 1, 16));
  19. addAll(c);
  20. }
  21. // 自定义初始化容量和负载因子
  22. public HashSet(int initialCapacity, float loadFactor) {
  23. map = new HashMap<>(initialCapacity, loadFactor);
  24. }
  25. // 自定义初始化容量
  26. public HashSet(int initialCapacity) {
  27. map = new HashMap<>(initialCapacity);
  28. }
  29. // dummy实际上没有用到,可以看到没有访问修饰符,这个构造方法是提供给子类LinkedHashSet使用
  30. // dummy的作用是为了和上面的构造函数区别开来
  31. HashSet(int initialCapacity, float loadFactor, boolean dummy) {
  32. map = new LinkedHashMap<>(initialCapacity, loadFactor);
  33. }
  34. public Iterator<E> iterator() {
  35. return map.keySet().iterator();
  36. }
  37. public int size() {
  38. return map.size();
  39. }
  40. public boolean isEmpty() {
  41. return map.isEmpty();
  42. }
  43. public boolean contains(Object o) {
  44. return map.containsKey(o);
  45. }
  46. // 添加元素,key为e,value固定为PRESENT
  47. public boolean add(E e) {
  48. return map.put(e, PRESENT)==null;
  49. }
  50. // 移除元素,元素的value是PRESENT
  51. public boolean remove(Object o) {
  52. return map.remove(o)==PRESENT;
  53. }
  54. public void clear() {
  55. map.clear();
  56. }
  57. @SuppressWarnings("unchecked")
  58. public Object clone() {
  59. try {
  60. HashSet<E> newSet = (HashSet<E>) super.clone();
  61. newSet.map = (HashMap<E, Object>) map.clone();
  62. return newSet;
  63. } catch (CloneNotSupportedException e) {
  64. throw new InternalError(e);
  65. }
  66. }
  67. private void writeObject(java.io.ObjectOutputStream s)
  68. throws java.io.IOException {
  69. // Write out any hidden serialization magic
  70. s.defaultWriteObject();
  71. // Write out HashMap capacity and load factor
  72. s.writeInt(map.capacity());
  73. s.writeFloat(map.loadFactor());
  74. // Write out size
  75. s.writeInt(map.size());
  76. // Write out all elements in the proper order.
  77. for (E e : map.keySet())
  78. s.writeObject(e);
  79. }
  80. private void readObject(java.io.ObjectInputStream s)
  81. throws java.io.IOException, ClassNotFoundException {
  82. // Read in any hidden serialization magic
  83. s.defaultReadObject();
  84. // Read capacity and verify non-negative.
  85. int capacity = s.readInt();
  86. if (capacity < 0) {
  87. throw new InvalidObjectException("Illegal capacity: " +
  88. capacity);
  89. }
  90. // Read load factor and verify positive and non NaN.
  91. float loadFactor = s.readFloat();
  92. if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
  93. throw new InvalidObjectException("Illegal load factor: " +
  94. loadFactor);
  95. }
  96. // Read size and verify non-negative.
  97. int size = s.readInt();
  98. if (size < 0) {
  99. throw new InvalidObjectException("Illegal size: " +
  100. size);
  101. }
  102. // Set the capacity according to the size and load factor ensuring that
  103. // the HashMap is at least 25% full but clamping to maximum capacity.
  104. capacity = (int) Math.min(size * Math.min(1 / loadFactor, 4.0f),
  105. HashMap.MAXIMUM_CAPACITY);
  106. // Create backing HashMap
  107. map = (((HashSet<?>)this) instanceof LinkedHashSet ?
  108. new LinkedHashMap<E,Object>(capacity, loadFactor) :
  109. new HashMap<E,Object>(capacity, loadFactor));
  110. // Read in all elements in the proper order.
  111. for (int i=0; i<size; i++) {
  112. @SuppressWarnings("unchecked")
  113. E e = (E) s.readObject();
  114. map.put(e, PRESENT);
  115. }
  116. }
  117. public Spliterator<E> spliterator() {
  118. return new HashMap.KeySpliterator<E,Object>(map, 0, -1, 0, 0);
  119. }
  120. }

LinkedHashSet

LinkedHashSet完美利用了HashSet的方法,只需要构造函数的不同,构造出底层是LinkedHashMap就行,
因为LinkedHashMap是HashMap的子类,LinkedHashMap的节点之间是有顺序的,所以LinkedHashSet的节点也拥有了顺序。

  1. public class LinkedHashSet<E>
  2. extends HashSet<E>
  3. implements Set<E>, Cloneable, java.io.Serializable {
  4. private static final long serialVersionUID = -2851667679971038690L;
  5. // 调用HashSet的构造初始化,实际上是new LinkedHashMap(initialCapacity, loadFactor);
  6. public LinkedHashSet(int initialCapacity, float loadFactor) {
  7. super(initialCapacity, loadFactor, true);
  8. }
  9. // 调用HashSet的构造初始化,默认负载因子0.75
  10. public LinkedHashSet(int initialCapacity) {
  11. super(initialCapacity, .75f, true);
  12. }
  13. // 默认容量16,负载因子0.75
  14. public LinkedHashSet() {
  15. super(16, .75f, true);
  16. }
  17. // 初始化一个集合,初始容量是2倍size和11取其中的大值。
  18. public LinkedHashSet(Collection<? extends E> c) {
  19. super(Math.max(2*c.size(), 11), .75f, true);
  20. addAll(c);
  21. }
  22. @Override
  23. public Spliterator<E> spliterator() {
  24. return Spliterators.spliterator(this, Spliterator.DISTINCT | Spliterator.ORDERED);
  25. }
  26. }