aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSimon Sapin <simon.sapin@exyr.org>2017-10-14 11:53:35 +0200
committerSimon Sapin <simon.sapin@exyr.org>2017-10-16 20:19:18 +0200
commit4ee1b26b6cf315b6f026f7e96a36877285f79573 (patch)
tree4408a263d7e652f4ef0450b417424fa5ca90c17d
parent3f467023feb0fc90bf2446ae2a8b38619bdcaab9 (diff)
downloadservo-4ee1b26b6cf315b6f026f7e96a36877285f79573.tar.gz
servo-4ee1b26b6cf315b6f026f7e96a36877285f79573.zip
Give up on NonZeroUsize memory layout optimization with unstable features.
I’m not confident of the implications regarding variance, dropck, struct aliasing, etc. when this is extended to generic `*const T` and `*mut T`. Release builds should use `unstable` to enable other optimizations anyway.
-rw-r--r--components/nonzero/lib.rs41
1 files changed, 5 insertions, 36 deletions
diff --git a/components/nonzero/lib.rs b/components/nonzero/lib.rs
index 6682740f34e..665dc7448f8 100644
--- a/components/nonzero/lib.rs
+++ b/components/nonzero/lib.rs
@@ -40,9 +40,6 @@ mod imp {
#[cfg(not(feature = "unstable"))]
mod imp {
- use std::cmp;
- use std::hash;
-
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]
pub struct NonZeroU32(u32);
@@ -67,14 +64,14 @@ mod imp {
}
}
- #[derive(Clone, Copy, Debug, Eq)]
- pub struct NonZeroUsize(&'static ());
+ #[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]
+ pub struct NonZeroUsize(usize);
impl NonZeroUsize {
#[inline]
pub fn new(x: usize) -> Option<Self> {
if x != 0 {
- Some(unsafe { Self::new_unchecked(x) })
+ Some(NonZeroUsize(x))
} else {
None
}
@@ -82,40 +79,12 @@ mod imp {
#[inline]
pub unsafe fn new_unchecked(x: usize) -> Self {
- NonZeroUsize(&*(x as *const ()))
+ NonZeroUsize(x)
}
#[inline]
pub fn get(self) -> usize {
- self.0 as *const () as usize
- }
- }
-
- impl PartialEq for NonZeroUsize {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- self.get() == other.get()
- }
- }
-
- impl PartialOrd for NonZeroUsize {
- #[inline]
- fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
- self.get().partial_cmp(&other.get())
- }
- }
-
- impl Ord for NonZeroUsize {
- #[inline]
- fn cmp(&self, other: &Self) -> cmp::Ordering {
- self.get().cmp(&other.get())
- }
- }
-
- impl hash::Hash for NonZeroUsize {
- #[inline]
- fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
- self.get().hash(hasher)
+ self.0
}
}
}