最终代码(The Final Code)

  1. use std::alloc::{self, Layout};
  2. use std::marker::PhantomData;
  3. use std::mem;
  4. use std::ops::{Deref, DerefMut};
  5. use std::ptr::{self, NonNull};
  6. struct RawVec<T> {
  7. ptr: NonNull<T>,
  8. cap: usize,
  9. _marker: PhantomData<T>,
  10. }
  11. unsafe impl<T: Send> Send for RawVec<T> {}
  12. unsafe impl<T: Sync> Sync for RawVec<T> {}
  13. impl<T> RawVec<T> {
  14. fn new() -> Self {
  15. // !0 is usize::MAX. This branch should be stripped at compile time.
  16. let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
  17. // `NonNull::dangling()` doubles as "unallocated" and "zero-sized allocation"
  18. RawVec {
  19. ptr: NonNull::dangling(),
  20. cap: cap,
  21. _marker: PhantomData,
  22. }
  23. }
  24. fn grow(&mut self) {
  25. // since we set the capacity to usize::MAX when T has size 0,
  26. // getting to here necessarily means the Vec is overfull.
  27. assert!(mem::size_of::<T>() != 0, "capacity overflow");
  28. let (new_cap, new_layout) = if self.cap == 0 {
  29. (1, Layout::array::<T>(1).unwrap())
  30. } else {
  31. // This can't overflow because we ensure self.cap <= isize::MAX.
  32. let new_cap = 2 * self.cap;
  33. // `Layout::array` checks that the number of bytes is <= usize::MAX,
  34. // but this is redundant since old_layout.size() <= isize::MAX,
  35. // so the `unwrap` should never fail.
  36. let new_layout = Layout::array::<T>(new_cap).unwrap();
  37. (new_cap, new_layout)
  38. };
  39. // Ensure that the new allocation doesn't exceed `isize::MAX` bytes.
  40. assert!(
  41. new_layout.size() <= isize::MAX as usize,
  42. "Allocation too large"
  43. );
  44. let new_ptr = if self.cap == 0 {
  45. unsafe { alloc::alloc(new_layout) }
  46. } else {
  47. let old_layout = Layout::array::<T>(self.cap).unwrap();
  48. let old_ptr = self.ptr.as_ptr() as *mut u8;
  49. unsafe { alloc::realloc(old_ptr, old_layout, new_layout.size()) }
  50. };
  51. // If allocation fails, `new_ptr` will be null, in which case we abort.
  52. self.ptr = match NonNull::new(new_ptr as *mut T) {
  53. Some(p) => p,
  54. None => alloc::handle_alloc_error(new_layout),
  55. };
  56. self.cap = new_cap;
  57. }
  58. }
  59. impl<T> Drop for RawVec<T> {
  60. fn drop(&mut self) {
  61. let elem_size = mem::size_of::<T>();
  62. if self.cap != 0 && elem_size != 0 {
  63. unsafe {
  64. alloc::dealloc(
  65. self.ptr.as_ptr() as *mut u8,
  66. Layout::array::<T>(self.cap).unwrap(),
  67. );
  68. }
  69. }
  70. }
  71. }
  72. pub struct Vec<T> {
  73. buf: RawVec<T>,
  74. len: usize,
  75. }
  76. impl<T> Vec<T> {
  77. fn ptr(&self) -> *mut T {
  78. self.buf.ptr.as_ptr()
  79. }
  80. fn cap(&self) -> usize {
  81. self.buf.cap
  82. }
  83. pub fn new() -> Self {
  84. Vec {
  85. buf: RawVec::new(),
  86. len: 0,
  87. }
  88. }
  89. pub fn push(&mut self, elem: T) {
  90. if self.len == self.cap() {
  91. self.buf.grow();
  92. }
  93. unsafe {
  94. ptr::write(self.ptr().add(self.len), elem);
  95. }
  96. // Can't overflow, we'll OOM first.
  97. self.len += 1;
  98. }
  99. pub fn pop(&mut self) -> Option<T> {
  100. if self.len == 0 {
  101. None
  102. } else {
  103. self.len -= 1;
  104. unsafe { Some(ptr::read(self.ptr().add(self.len))) }
  105. }
  106. }
  107. pub fn insert(&mut self, index: usize, elem: T) {
  108. assert!(index <= self.len, "index out of bounds");
  109. if self.cap() == self.len { self.buf.grow(); }
  110. unsafe {
  111. ptr::copy(
  112. self.ptr().add(index),
  113. self.ptr().add(index + 1),
  114. self.len - index,
  115. );
  116. ptr::write(self.ptr().add(index), elem);
  117. self.len += 1;
  118. }
  119. }
  120. pub fn remove(&mut self, index: usize) -> T {
  121. assert!(index < self.len, "index out of bounds");
  122. unsafe {
  123. self.len -= 1;
  124. let result = ptr::read(self.ptr().add(index));
  125. ptr::copy(
  126. self.ptr().add(index + 1),
  127. self.ptr().add(index),
  128. self.len - index,
  129. );
  130. result
  131. }
  132. }
  133. pub fn into_iter(self) -> IntoIter<T> {
  134. unsafe {
  135. let iter = RawValIter::new(&self);
  136. let buf = ptr::read(&self.buf);
  137. mem::forget(self);
  138. IntoIter {
  139. iter: iter,
  140. _buf: buf,
  141. }
  142. }
  143. }
  144. pub fn drain(&mut self) -> Drain<T> {
  145. unsafe {
  146. let iter = RawValIter::new(&self);
  147. // this is a mem::forget safety thing. If Drain is forgotten, we just
  148. // leak the whole Vec's contents. Also we need to do this *eventually*
  149. // anyway, so why not do it now?
  150. self.len = 0;
  151. Drain {
  152. iter: iter,
  153. vec: PhantomData,
  154. }
  155. }
  156. }
  157. }
  158. impl<T> Drop for Vec<T> {
  159. fn drop(&mut self) {
  160. while let Some(_) = self.pop() {}
  161. // deallocation is handled by RawVec
  162. }
  163. }
  164. impl<T> Deref for Vec<T> {
  165. type Target = [T];
  166. fn deref(&self) -> &[T] {
  167. unsafe { std::slice::from_raw_parts(self.ptr(), self.len) }
  168. }
  169. }
  170. impl<T> DerefMut for Vec<T> {
  171. fn deref_mut(&mut self) -> &mut [T] {
  172. unsafe { std::slice::from_raw_parts_mut(self.ptr(), self.len) }
  173. }
  174. }
  175. struct RawValIter<T> {
  176. start: *const T,
  177. end: *const T,
  178. }
  179. impl<T> RawValIter<T> {
  180. unsafe fn new(slice: &[T]) -> Self {
  181. RawValIter {
  182. start: slice.as_ptr(),
  183. end: if mem::size_of::<T>() == 0 {
  184. ((slice.as_ptr() as usize) + slice.len()) as *const _
  185. } else if slice.len() == 0 {
  186. slice.as_ptr()
  187. } else {
  188. slice.as_ptr().add(slice.len())
  189. },
  190. }
  191. }
  192. }
  193. impl<T> Iterator for RawValIter<T> {
  194. type Item = T;
  195. fn next(&mut self) -> Option<T> {
  196. if self.start == self.end {
  197. None
  198. } else {
  199. unsafe {
  200. let result = ptr::read(self.start);
  201. self.start = if mem::size_of::<T>() == 0 {
  202. (self.start as usize + 1) as *const _
  203. } else {
  204. self.start.offset(1)
  205. };
  206. Some(result)
  207. }
  208. }
  209. }
  210. fn size_hint(&self) -> (usize, Option<usize>) {
  211. let elem_size = mem::size_of::<T>();
  212. let len = (self.end as usize - self.start as usize) /
  213. if elem_size == 0 { 1 } else { elem_size };
  214. (len, Some(len))
  215. }
  216. }
  217. impl<T> DoubleEndedIterator for RawValIter<T> {
  218. fn next_back(&mut self) -> Option<T> {
  219. if self.start == self.end {
  220. None
  221. } else {
  222. unsafe {
  223. self.end = if mem::size_of::<T>() == 0 {
  224. (self.end as usize - 1) as *const _
  225. } else {
  226. self.end.offset(-1)
  227. };
  228. Some(ptr::read(self.end))
  229. }
  230. }
  231. }
  232. }
  233. pub struct IntoIter<T> {
  234. _buf: RawVec<T>, // we don't actually care about this. Just need it to live.
  235. iter: RawValIter<T>,
  236. }
  237. impl<T> Iterator for IntoIter<T> {
  238. type Item = T;
  239. fn next(&mut self) -> Option<T> {
  240. self.iter.next()
  241. }
  242. fn size_hint(&self) -> (usize, Option<usize>) {
  243. self.iter.size_hint()
  244. }
  245. }
  246. impl<T> DoubleEndedIterator for IntoIter<T> {
  247. fn next_back(&mut self) -> Option<T> {
  248. self.iter.next_back()
  249. }
  250. }
  251. impl<T> Drop for IntoIter<T> {
  252. fn drop(&mut self) {
  253. for _ in &mut *self {}
  254. }
  255. }
  256. pub struct Drain<'a, T: 'a> {
  257. vec: PhantomData<&'a mut Vec<T>>,
  258. iter: RawValIter<T>,
  259. }
  260. impl<'a, T> Iterator for Drain<'a, T> {
  261. type Item = T;
  262. fn next(&mut self) -> Option<T> {
  263. self.iter.next()
  264. }
  265. fn size_hint(&self) -> (usize, Option<usize>) {
  266. self.iter.size_hint()
  267. }
  268. }
  269. impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
  270. fn next_back(&mut self) -> Option<T> {
  271. self.iter.next_back()
  272. }
  273. }
  274. impl<'a, T> Drop for Drain<'a, T> {
  275. fn drop(&mut self) {
  276. // pre-drain the iter
  277. for _ in &mut *self {}
  278. }
  279. }
  280. #
  281. # fn main() {
  282. # tests::create_push_pop();
  283. # tests::iter_test();
  284. # tests::test_drain();
  285. # tests::test_zst();
  286. # println!("All tests finished OK");
  287. # }
  288. #
  289. # mod tests {
  290. # use super::*;
  291. #
  292. # pub fn create_push_pop() {
  293. # let mut v = Vec::new();
  294. # v.push(1);
  295. # assert_eq!(1, v.len());
  296. # assert_eq!(1, v[0]);
  297. # for i in v.iter_mut() {
  298. # *i += 1;
  299. # }
  300. # v.insert(0, 5);
  301. # let x = v.pop();
  302. # assert_eq!(Some(2), x);
  303. # assert_eq!(1, v.len());
  304. # v.push(10);
  305. # let x = v.remove(0);
  306. # assert_eq!(5, x);
  307. # assert_eq!(1, v.len());
  308. # }
  309. #
  310. # pub fn iter_test() {
  311. # let mut v = Vec::new();
  312. # for i in 0..10 {
  313. # v.push(Box::new(i))
  314. # }
  315. # let mut iter = v.into_iter();
  316. # let first = iter.next().unwrap();
  317. # let last = iter.next_back().unwrap();
  318. # drop(iter);
  319. # assert_eq!(0, *first);
  320. # assert_eq!(9, *last);
  321. # }
  322. #
  323. # pub fn test_drain() {
  324. # let mut v = Vec::new();
  325. # for i in 0..10 {
  326. # v.push(Box::new(i))
  327. # }
  328. # {
  329. # let mut drain = v.drain();
  330. # let first = drain.next().unwrap();
  331. # let last = drain.next_back().unwrap();
  332. # assert_eq!(0, *first);
  333. # assert_eq!(9, *last);
  334. # }
  335. # assert_eq!(0, v.len());
  336. # v.push(Box::new(1));
  337. # assert_eq!(1, *v.pop().unwrap());
  338. # }
  339. #
  340. # pub fn test_zst() {
  341. # let mut v = Vec::new();
  342. # for _i in 0..10 {
  343. # v.push(())
  344. # }
  345. #
  346. # let mut count = 0;
  347. #
  348. # for _ in v.into_iter() {
  349. # count += 1
  350. # }
  351. #
  352. # assert_eq!(10, count);
  353. # }
  354. # }