Auto merge of #41670 - scottmcm:slice-rotate, r=alexcrichton

Add an in-place rotate method for slices to libcore

A helpful primitive for moving chunks of data around inside a slice.

For example, if you have a range selected and are drag-and-dropping it somewhere else (Example from [Sean Parent's talk](https://youtu.be/qH6sSOr-yk8?t=560)).

(If this should be an RFC instead of a PR, please let me know.)

Edit: changed example
This commit is contained in:
bors 2017-06-02 07:51:20 +00:00
commit 558cd1e393
12 changed files with 285 additions and 0 deletions

View File

@ -190,6 +190,7 @@
- [sip_hash_13](library-features/sip-hash-13.md)
- [slice_concat_ext](library-features/slice-concat-ext.md)
- [slice_get_slice](library-features/slice-get-slice.md)
- [slice_rotate](library-features/slice-rotate.md)
- [slice_rsplit](library-features/slice-rsplit.md)
- [sort_internals](library-features/sort-internals.md)
- [sort_unstable](library-features/sort-unstable.md)

View File

@ -0,0 +1,7 @@
# `slice_rotate`
The tracking issue for this feature is: [#41891]
[#41891]: https://github.com/rust-lang/rust/issues/41891
------------------------

View File

@ -13,6 +13,7 @@
#![feature(i128_type)]
#![feature(rand)]
#![feature(repr_simd)]
#![feature(slice_rotate)]
#![feature(sort_unstable)]
#![feature(test)]

View File

@ -195,6 +195,11 @@ fn gen_random(len: usize) -> Vec<u64> {
rng.gen_iter::<u64>().take(len).collect()
}
fn gen_random_bytes(len: usize) -> Vec<u8> {
let mut rng = thread_rng();
rng.gen_iter::<u8>().take(len).collect()
}
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
let mut rng = thread_rng();
let mut v = gen_ascending(len);
@ -315,3 +320,39 @@ reverse!(reverse_u64, u64, |x| x as u64);
reverse!(reverse_u128, u128, |x| x as u128);
#[repr(simd)] struct F64x4(f64, f64, f64, f64);
reverse!(reverse_simd_f64x4, F64x4, |x| { let x = x as f64; F64x4(x,x,x,x) });
macro_rules! rotate {
($name:ident, $gen:expr, $len:expr, $mid:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
let size = mem::size_of_val(&$gen(1)[0]);
let mut v = $gen($len * 8 / size);
b.iter(|| black_box(&mut v).rotate(($mid*8+size-1)/size));
b.bytes = (v.len() * size) as u64;
}
}
}
rotate!(rotate_tiny_by1, gen_random, 16, 1);
rotate!(rotate_tiny_half, gen_random, 16, 16/2);
rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16/2+1);
rotate!(rotate_medium_by1, gen_random, 9158, 1);
rotate!(rotate_medium_by727_u64, gen_random, 9158, 727);
rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727);
rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727);
rotate!(rotate_medium_half, gen_random, 9158, 9158/2);
rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158/2+1);
// Intended to use more RAM than the machine has cache
rotate!(rotate_huge_by1, gen_random, 5*1024*1024, 1);
rotate!(rotate_huge_by9199_u64, gen_random, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_strings, gen_strings, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_big, gen_big_random, 5*1024*1024, 9199);
rotate!(rotate_huge_by1234577_u64, gen_random, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_strings, gen_strings, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_big, gen_big_random, 5*1024*1024, 1234577);
rotate!(rotate_huge_half, gen_random, 5*1024*1024, 5*1024*1024/2);
rotate!(rotate_huge_half_plus_one, gen_random, 5*1024*1024, 5*1024*1024/2+1);

View File

@ -55,6 +55,7 @@
#![feature(shared)]
#![feature(slice_get_slice)]
#![feature(slice_patterns)]
#![cfg_attr(not(test), feature(slice_rotate))]
#![feature(slice_rsplit)]
#![cfg_attr(not(test), feature(sort_unstable))]
#![feature(specialization)]

View File

@ -1347,6 +1347,61 @@ impl<T> [T] {
core_slice::SliceExt::sort_unstable_by_key(self, f);
}
/// Permutes the slice in-place such that `self[mid..]` moves to the
/// beginning of the slice while `self[..mid]` moves to the end of the
/// slice. Equivalently, rotates the slice `mid` places to the left
/// or `k = self.len() - mid` places to the right.
///
/// This is a "k-rotation", a permutation in which item `i` moves to
/// position `i + k`, modulo the length of the slice. See _Elements
/// of Programming_ [§10.4][eop].
///
/// Rotation by `mid` and rotation by `k` are inverse operations.
///
/// [eop]: https://books.google.com/books?id=CO9ULZGINlsC&pg=PA178&q=k-rotation
///
/// # Panics
///
/// This function will panic if `mid` is greater than the length of the
/// slice. (Note that `mid == self.len()` does _not_ panic; it's a nop
/// rotation with `k == 0`, the inverse of a rotation with `mid == 0`.)
///
/// # Complexity
///
/// Takes linear (in `self.len()`) time.
///
/// # Examples
///
/// ```
/// #![feature(slice_rotate)]
///
/// let mut a = [1, 2, 3, 4, 5, 6, 7];
/// let mid = 2;
/// a.rotate(mid);
/// assert_eq!(&a, &[3, 4, 5, 6, 7, 1, 2]);
/// let k = a.len() - mid;
/// a.rotate(k);
/// assert_eq!(&a, &[1, 2, 3, 4, 5, 6, 7]);
///
/// use std::ops::Range;
/// fn slide<T>(slice: &mut [T], range: Range<usize>, to: usize) {
/// if to < range.start {
/// slice[to..range.end].rotate(range.start-to);
/// } else if to > range.end {
/// slice[range.start..to].rotate(range.end-range.start);
/// }
/// }
/// let mut v: Vec<_> = (0..10).collect();
/// slide(&mut v, 1..4, 7);
/// assert_eq!(&v, &[0, 4, 5, 6, 1, 2, 3, 7, 8, 9]);
/// slide(&mut v, 6..8, 1);
/// assert_eq!(&v, &[0, 3, 7, 4, 5, 6, 1, 2, 8, 9]);
/// ```
#[unstable(feature = "slice_rotate", issue = "41891")]
pub fn rotate(&mut self, mid: usize) {
core_slice::SliceExt::rotate(self, mid);
}
/// Copies the elements from `src` into `self`.
///
/// The length of `src` must be the same as `self`.

View File

@ -19,6 +19,7 @@
#![feature(pattern)]
#![feature(placement_in_syntax)]
#![feature(rand)]
#![feature(slice_rotate)]
#![feature(splice)]
#![feature(step_by)]
#![feature(str_escape)]

View File

@ -466,6 +466,41 @@ fn test_sort_stability() {
}
}
#[test]
fn test_rotate() {
let expected: Vec<_> = (0..13).collect();
let mut v = Vec::new();
// no-ops
v.clone_from(&expected);
v.rotate(0);
assert_eq!(v, expected);
v.rotate(expected.len());
assert_eq!(v, expected);
let mut zst_array = [(), (), ()];
zst_array.rotate(2);
// happy path
v = (5..13).chain(0..5).collect();
v.rotate(8);
assert_eq!(v, expected);
let expected: Vec<_> = (0..1000).collect();
// small rotations in large slice, uses ptr::copy
v = (2..1000).chain(0..2).collect();
v.rotate(998);
assert_eq!(v, expected);
v = (998..1000).chain(0..998).collect();
v.rotate(2);
assert_eq!(v, expected);
// non-small prime rotation, has a few rounds of swapping
v = (389..1000).chain(0..389).collect();
v.rotate(1000-389);
assert_eq!(v, expected);
}
#[test]
fn test_concat() {
let v: [Vec<i32>; 0] = [];

View File

@ -51,6 +51,7 @@ use mem;
use marker::{Copy, Send, Sync, Sized, self};
use iter_private::TrustedRandomAccess;
mod rotate;
mod sort;
#[repr(C)]
@ -202,6 +203,9 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")]
fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
#[unstable(feature = "slice_rotate", issue = "41891")]
fn rotate(&mut self, mid: usize);
#[stable(feature = "clone_from_slice", since = "1.7.0")]
fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
@ -635,6 +639,16 @@ impl<T> SliceExt for [T] {
self.binary_search_by(|p| p.borrow().cmp(x))
}
fn rotate(&mut self, mid: usize) {
assert!(mid <= self.len());
let k = self.len() - mid;
unsafe {
let p = self.as_mut_ptr();
rotate::ptr_rotate(mid, p.offset(mid as isize), k);
}
}
#[inline]
fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
assert!(self.len() == src.len(),

112
src/libcore/slice/rotate.rs Normal file
View File

@ -0,0 +1,112 @@
// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use cmp;
use mem;
use ptr;
/// Rotation is much faster if it has access to a little bit of memory. This
/// union provides a RawVec-like interface, but to a fixed-size stack buffer.
#[allow(unions_with_drop_fields)]
union RawArray<T> {
/// Ensure this is appropriately aligned for T, and is big
/// enough for two elements even if T is enormous.
typed: [T; 2],
/// For normally-sized types, especially things like u8, having more
/// than 2 in the buffer is necessary for usefulness, so pad it out
/// enough to be helpful, but not so big as to risk overflow.
_extra: [usize; 32],
}
impl<T> RawArray<T> {
fn new() -> Self {
unsafe { mem::uninitialized() }
}
fn ptr(&self) -> *mut T {
unsafe { &self.typed as *const T as *mut T }
}
fn cap() -> usize {
if mem::size_of::<T>() == 0 {
usize::max_value()
} else {
mem::size_of::<Self>() / mem::size_of::<T>()
}
}
}
/// Rotates the range `[mid-left, mid+right)` such that the element at `mid`
/// becomes the first element. Equivalently, rotates the range `left`
/// elements to the left or `right` elements to the right.
///
/// # Safety
///
/// The specified range must be valid for reading and writing.
/// The type `T` must have non-zero size.
///
/// # Algorithm
///
/// For longer rotations, swap the left-most `delta = min(left, right)`
/// elements with the right-most `delta` elements. LLVM vectorizes this,
/// which is profitable as we only reach this step for a "large enough"
/// rotation. Doing this puts `delta` elements on the larger side into the
/// correct position, leaving a smaller rotate problem. Demonstration:
///
/// ```text
/// [ 6 7 8 9 10 11 12 13 . 1 2 3 4 5 ]
/// 1 2 3 4 5 [ 11 12 13 . 6 7 8 9 10 ]
/// 1 2 3 4 5 [ 8 9 10 . 6 7 ] 11 12 13
/// 1 2 3 4 5 6 7 [ 10 . 8 9 ] 11 12 13
/// 1 2 3 4 5 6 7 [ 9 . 8 ] 10 11 12 13
/// 1 2 3 4 5 6 7 8 [ . ] 9 10 11 12 13
/// ```
///
/// Once the rotation is small enough, copy some elements into a stack
/// buffer, `memmove` the others, and move the ones back from the buffer.
pub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {
loop {
let delta = cmp::min(left, right);
if delta <= RawArray::<T>::cap() {
break;
}
ptr_swap_n(
mid.offset(-(left as isize)),
mid.offset((right-delta) as isize),
delta);
if left <= right {
right -= delta;
} else {
left -= delta;
}
}
let rawarray = RawArray::new();
let buf = rawarray.ptr();
let dim = mid.offset(-(left as isize)).offset(right as isize);
if left <= right {
ptr::copy_nonoverlapping(mid.offset(-(left as isize)), buf, left);
ptr::copy(mid, mid.offset(-(left as isize)), right);
ptr::copy_nonoverlapping(buf, dim, left);
}
else {
ptr::copy_nonoverlapping(mid, buf, right);
ptr::copy(mid.offset(-(left as isize)), dim, left);
ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);
}
}
unsafe fn ptr_swap_n<T>(a: *mut T, b: *mut T, n: usize) {
for i in 0..n {
// These are nonoverlapping, so use mem::swap instead of ptr::swap
mem::swap(&mut *a.offset(i as isize), &mut *b.offset(i as isize));
}
}

View File

@ -30,6 +30,7 @@
#![feature(raw)]
#![feature(sip_hash_13)]
#![feature(slice_patterns)]
#![feature(slice_rotate)]
#![feature(sort_internals)]
#![feature(sort_unstable)]
#![feature(specialization)]

View File

@ -238,6 +238,22 @@ fn test_find_rfind() {
assert_eq!(v.iter().rfind(|&&x| x <= 3), Some(&3));
}
#[test]
fn test_rotate() {
const N: usize = 600;
let a: &mut [_] = &mut [0; N];
for i in 0..N {
a[i] = i;
}
a.rotate(42);
let k = N - 42;
for i in 0..N {
assert_eq!(a[(i+k)%N], i);
}
}
#[test]
fn sort_unstable() {
let mut v = [0; 600];