Skip to content

Commit 2391497

Browse files
committed
Rename some identifiers in RawVec and libarena.
- Use `len` more consistently for the number of elements in a vector, because that's the usual name. - Use `additional` more consistently for the number of elements we want to add, because that's what `Vec::reserve()` uses. - Use `cap` consistently rather than `capacity`. - Plus a few other tweaks. This increases consistency and conciseness.
1 parent 301d0af commit 2391497

File tree

3 files changed

+62
-79
lines changed

3 files changed

+62
-79
lines changed

src/liballoc/raw_vec.rs

+32-49
Original file line numberDiff line numberDiff line change
@@ -211,13 +211,13 @@ impl<T, A: AllocRef> RawVec<T, A> {
211211
}
212212
}
213213

214-
/// Ensures that the buffer contains at least enough space to hold
215-
/// `used_capacity + needed_extra_capacity` elements. If it doesn't already have
216-
/// enough capacity, will reallocate enough space plus comfortable slack
217-
/// space to get amortized `O(1)` behavior. Will limit this behavior
218-
/// if it would needlessly cause itself to panic.
214+
/// Ensures that the buffer contains at least enough space to hold `len +
215+
/// additional` elements. If it doesn't already have enough capacity, will
216+
/// reallocate enough space plus comfortable slack space to get amortized
217+
/// `O(1)` behavior. Will limit this behavior if it would needlessly cause
218+
/// itself to panic.
219219
///
220-
/// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate
220+
/// If `len` exceeds `self.capacity()`, this may fail to actually allocate
221221
/// the requested space. This is not really unsafe, but the unsafe
222222
/// code *you* write that relies on the behavior of this function may break.
223223
///
@@ -263,37 +263,32 @@ impl<T, A: AllocRef> RawVec<T, A> {
263263
/// # vector.push_all(&[1, 3, 5, 7, 9]);
264264
/// # }
265265
/// ```
266-
pub fn reserve(&mut self, used_capacity: usize, needed_extra_capacity: usize) {
267-
match self.try_reserve(used_capacity, needed_extra_capacity) {
266+
pub fn reserve(&mut self, len: usize, additional: usize) {
267+
match self.try_reserve(len, additional) {
268268
Err(CapacityOverflow) => capacity_overflow(),
269269
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
270270
Ok(()) => { /* yay */ }
271271
}
272272
}
273273

274274
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
275-
pub fn try_reserve(
276-
&mut self,
277-
used_capacity: usize,
278-
needed_extra_capacity: usize,
279-
) -> Result<(), TryReserveError> {
280-
if self.needs_to_grow(used_capacity, needed_extra_capacity) {
281-
self.grow_amortized(used_capacity, needed_extra_capacity)
275+
pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
276+
if self.needs_to_grow(len, additional) {
277+
self.grow_amortized(len, additional)
282278
} else {
283279
Ok(())
284280
}
285281
}
286282

287-
/// Ensures that the buffer contains at least enough space to hold
288-
/// `used_capacity + needed_extra_capacity` elements. If it doesn't already,
289-
/// will reallocate the minimum possible amount of memory necessary.
290-
/// Generally this will be exactly the amount of memory necessary,
291-
/// but in principle the allocator is free to give back more than what
292-
/// we asked for.
283+
/// Ensures that the buffer contains at least enough space to hold `len +
284+
/// additional` elements. If it doesn't already, will reallocate the
285+
/// minimum possible amount of memory necessary. Generally this will be
286+
/// exactly the amount of memory necessary, but in principle the allocator
287+
/// is free to give back more than we asked for.
293288
///
294-
/// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate
295-
/// the requested space. This is not really unsafe, but the unsafe
296-
/// code *you* write that relies on the behavior of this function may break.
289+
/// If `len` exceeds `self.capacity()`, this may fail to actually allocate
290+
/// the requested space. This is not really unsafe, but the unsafe code
291+
/// *you* write that relies on the behavior of this function may break.
297292
///
298293
/// # Panics
299294
///
@@ -304,8 +299,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
304299
/// # Aborts
305300
///
306301
/// Aborts on OOM.
307-
pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize) {
308-
match self.try_reserve_exact(used_capacity, needed_extra_capacity) {
302+
pub fn reserve_exact(&mut self, len: usize, additional: usize) {
303+
match self.try_reserve_exact(len, additional) {
309304
Err(CapacityOverflow) => capacity_overflow(),
310305
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
311306
Ok(()) => { /* yay */ }
@@ -315,14 +310,10 @@ impl<T, A: AllocRef> RawVec<T, A> {
315310
/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
316311
pub fn try_reserve_exact(
317312
&mut self,
318-
used_capacity: usize,
319-
needed_extra_capacity: usize,
313+
len: usize,
314+
additional: usize,
320315
) -> Result<(), TryReserveError> {
321-
if self.needs_to_grow(used_capacity, needed_extra_capacity) {
322-
self.grow_exact(used_capacity, needed_extra_capacity)
323-
} else {
324-
Ok(())
325-
}
316+
if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) }
326317
}
327318

328319
/// Shrinks the allocation down to the specified amount. If the given amount
@@ -347,8 +338,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
347338
impl<T, A: AllocRef> RawVec<T, A> {
348339
/// Returns if the buffer needs to grow to fulfill the needed extra capacity.
349340
/// Mainly used to make inlining reserve-calls possible without inlining `grow`.
350-
fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool {
351-
needed_extra_capacity > self.capacity().wrapping_sub(used_capacity)
341+
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
342+
additional > self.capacity().wrapping_sub(len)
352343
}
353344

354345
fn capacity_from_bytes(excess: usize) -> usize {
@@ -368,22 +359,18 @@ impl<T, A: AllocRef> RawVec<T, A> {
368359
// so that all of the code that depends on `T` is within it, while as much
369360
// of the code that doesn't depend on `T` as possible is in functions that
370361
// are non-generic over `T`.
371-
fn grow_amortized(
372-
&mut self,
373-
used_capacity: usize,
374-
needed_extra_capacity: usize,
375-
) -> Result<(), TryReserveError> {
362+
fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
376363
// This is ensured by the calling contexts.
377-
debug_assert!(needed_extra_capacity > 0);
364+
debug_assert!(additional > 0);
365+
378366
if mem::size_of::<T>() == 0 {
379367
// Since we return a capacity of `usize::MAX` when `elem_size` is
380368
// 0, getting to here necessarily means the `RawVec` is overfull.
381369
return Err(CapacityOverflow);
382370
}
383371

384372
// Nothing we can really do about these checks, sadly.
385-
let required_cap =
386-
used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
373+
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
387374

388375
// This guarantees exponential growth. The doubling cannot overflow
389376
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
@@ -416,18 +403,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
416403
// The constraints on this method are much the same as those on
417404
// `grow_amortized`, but this method is usually instantiated less often so
418405
// it's less critical.
419-
fn grow_exact(
420-
&mut self,
421-
used_capacity: usize,
422-
needed_extra_capacity: usize,
423-
) -> Result<(), TryReserveError> {
406+
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
424407
if mem::size_of::<T>() == 0 {
425408
// Since we return a capacity of `usize::MAX` when the type size is
426409
// 0, getting to here necessarily means the `RawVec` is overfull.
427410
return Err(CapacityOverflow);
428411
}
429412

430-
let cap = used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
413+
let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
431414
let new_layout = Layout::array::<T>(cap);
432415

433416
// `finish_grow` is non-generic over `T`.

src/liballoc/vec.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -2965,12 +2965,12 @@ impl<T> Drain<'_, T> {
29652965
}
29662966

29672967
/// Makes room for inserting more elements before the tail.
2968-
unsafe fn move_tail(&mut self, extra_capacity: usize) {
2968+
unsafe fn move_tail(&mut self, additional: usize) {
29692969
let vec = self.vec.as_mut();
2970-
let used_capacity = self.tail_start + self.tail_len;
2971-
vec.buf.reserve(used_capacity, extra_capacity);
2970+
let len = self.tail_start + self.tail_len;
2971+
vec.buf.reserve(len, additional);
29722972

2973-
let new_tail_start = self.tail_start + extra_capacity;
2973+
let new_tail_start = self.tail_start + additional;
29742974
let src = vec.as_ptr().add(self.tail_start);
29752975
let dst = vec.as_mut_ptr().add(new_tail_start);
29762976
ptr::copy(src, dst, self.tail_len);

src/libarena/lib.rs

+26-26
Original file line numberDiff line numberDiff line change
@@ -146,18 +146,18 @@ impl<T> TypedArena<T> {
146146
}
147147

148148
#[inline]
149-
fn can_allocate(&self, len: usize) -> bool {
150-
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
151-
let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap();
152-
available_capacity_bytes >= at_least_bytes
149+
fn can_allocate(&self, additional: usize) -> bool {
150+
let available_bytes = self.end.get() as usize - self.ptr.get() as usize;
151+
let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
152+
available_bytes >= additional_bytes
153153
}
154154

155155
/// Ensures there's enough space in the current chunk to fit `len` objects.
156156
#[inline]
157-
fn ensure_capacity(&self, len: usize) {
158-
if !self.can_allocate(len) {
159-
self.grow(len);
160-
debug_assert!(self.can_allocate(len));
157+
fn ensure_capacity(&self, additional: usize) {
158+
if !self.can_allocate(additional) {
159+
self.grow(additional);
160+
debug_assert!(self.can_allocate(additional));
161161
}
162162
}
163163

@@ -214,31 +214,31 @@ impl<T> TypedArena<T> {
214214
/// Grows the arena.
215215
#[inline(never)]
216216
#[cold]
217-
fn grow(&self, n: usize) {
217+
fn grow(&self, additional: usize) {
218218
unsafe {
219219
// We need the element size to convert chunk sizes (ranging from
220220
// PAGE to HUGE_PAGE bytes) to element counts.
221221
let elem_size = cmp::max(1, mem::size_of::<T>());
222222
let mut chunks = self.chunks.borrow_mut();
223-
let mut new_capacity;
223+
let mut new_cap;
224224
if let Some(last_chunk) = chunks.last_mut() {
225225
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
226226
last_chunk.entries = used_bytes / mem::size_of::<T>();
227227

228228
// If the previous chunk's capacity is less than HUGE_PAGE
229229
// bytes, then this chunk will be least double the previous
230230
// chunk's size.
231-
new_capacity = last_chunk.storage.capacity();
232-
if new_capacity < HUGE_PAGE / elem_size {
233-
new_capacity = new_capacity.checked_mul(2).unwrap();
231+
new_cap = last_chunk.storage.capacity();
232+
if new_cap < HUGE_PAGE / elem_size {
233+
new_cap = new_cap.checked_mul(2).unwrap();
234234
}
235235
} else {
236-
new_capacity = PAGE / elem_size;
236+
new_cap = PAGE / elem_size;
237237
}
238-
// Also ensure that this chunk can fit `n`.
239-
new_capacity = cmp::max(n, new_capacity);
238+
// Also ensure that this chunk can fit `additional`.
239+
new_cap = cmp::max(additional, new_cap);
240240

241-
let chunk = TypedArenaChunk::<T>::new(new_capacity);
241+
let chunk = TypedArenaChunk::<T>::new(new_cap);
242242
self.ptr.set(chunk.start());
243243
self.end.set(chunk.end());
244244
chunks.push(chunk);
@@ -342,28 +342,28 @@ impl DroplessArena {
342342

343343
#[inline(never)]
344344
#[cold]
345-
fn grow(&self, needed_bytes: usize) {
345+
fn grow(&self, additional: usize) {
346346
unsafe {
347347
let mut chunks = self.chunks.borrow_mut();
348-
let mut new_capacity;
348+
let mut new_cap;
349349
if let Some(last_chunk) = chunks.last_mut() {
350350
// There is no need to update `last_chunk.entries` because that
351351
// field isn't used by `DroplessArena`.
352352

353353
// If the previous chunk's capacity is less than HUGE_PAGE
354354
// bytes, then this chunk will be least double the previous
355355
// chunk's size.
356-
new_capacity = last_chunk.storage.capacity();
357-
if new_capacity < HUGE_PAGE {
358-
new_capacity = new_capacity.checked_mul(2).unwrap();
356+
new_cap = last_chunk.storage.capacity();
357+
if new_cap < HUGE_PAGE {
358+
new_cap = new_cap.checked_mul(2).unwrap();
359359
}
360360
} else {
361-
new_capacity = PAGE;
361+
new_cap = PAGE;
362362
}
363-
// Also ensure that this chunk can fit `needed_bytes`.
364-
new_capacity = cmp::max(needed_bytes, new_capacity);
363+
// Also ensure that this chunk can fit `additional`.
364+
new_cap = cmp::max(additional, new_cap);
365365

366-
let chunk = TypedArenaChunk::<u8>::new(new_capacity);
366+
let chunk = TypedArenaChunk::<u8>::new(new_cap);
367367
self.ptr.set(chunk.start());
368368
self.end.set(chunk.end());
369369
chunks.push(chunk);

0 commit comments

Comments
 (0)