| OLD | NEW | 
|---|
| 1 // Copyright (c) 2011, Google Inc. | 1 // Copyright (c) 2011, Google Inc. | 
| 2 // All rights reserved. | 2 // All rights reserved. | 
| 3 // | 3 // | 
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without | 
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are | 
| 6 // met: | 6 // met: | 
| 7 // | 7 // | 
| 8 //     * Redistributions of source code must retain the above copyright | 8 //     * Redistributions of source code must retain the above copyright | 
| 9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. | 
| 10 //     * Redistributions in binary form must reproduce the above | 10 //     * Redistributions in binary form must reproduce the above | 
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 76 void EnsureNonLoop(void* node, void* next) { | 76 void EnsureNonLoop(void* node, void* next) { | 
| 77   // We only have time to do minimal checking.  We don't traverse the list, but | 77   // We only have time to do minimal checking.  We don't traverse the list, but | 
| 78   // only look for an immediate loop (cycle back to ourself). | 78   // only look for an immediate loop (cycle back to ourself). | 
| 79   if (node != next) return; | 79   if (node != next) return; | 
| 80   Log(kCrash, __FILE__, __LINE__, "Circular loop in list detected: ", next); | 80   Log(kCrash, __FILE__, __LINE__, "Circular loop in list detected: ", next); | 
| 81 } | 81 } | 
| 82 | 82 | 
| 83 inline void* MaskPtr(void* p) { | 83 inline void* MaskPtr(void* p) { | 
| 84   // Maximize ASLR entropy and guarantee the result is an invalid address. | 84   // Maximize ASLR entropy and guarantee the result is an invalid address. | 
| 85   const uintptr_t mask = ~(reinterpret_cast<uintptr_t>(TCMalloc_SystemAlloc) | 85   const uintptr_t mask = ~(reinterpret_cast<uintptr_t>(TCMalloc_SystemAlloc) | 
| 86                            >> 13) | 1; | 86                            >> 13); | 
| 87   // Do not mask NULL pointers, otherwise we could leak address state. | 87   return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(p) ^ mask); | 
| 88   if (p) |  | 
| 89     return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(p) ^ mask); |  | 
| 90   return p; |  | 
| 91 } | 88 } | 
| 92 | 89 | 
| 93 inline void* UnmaskPtr(void* p) { | 90 inline void* UnmaskPtr(void* p) { | 
| 94   return MaskPtr(p); | 91   return MaskPtr(p); | 
| 95 } | 92 } | 
| 96 | 93 | 
| 97 // Returns value of the |previous| pointer w/out running a sanity | 94 // Returns value of the |previous| pointer w/out running a sanity | 
| 98 // check. | 95 // check. | 
| 99 inline void *FL_Previous_No_Check(void *t) { | 96 inline void *FL_Previous_No_Check(void *t) { | 
| 100   return UnmaskPtr(reinterpret_cast<void**>(t)[1]); | 97   return UnmaskPtr(reinterpret_cast<void**>(t)[1]); | 
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 238 | 235 | 
| 239 namespace { | 236 namespace { | 
| 240 | 237 | 
| 241 inline void FL_SetNext(void *t, void *n) { | 238 inline void FL_SetNext(void *t, void *n) { | 
| 242   tcmalloc::SLL_SetNext(t,n); | 239   tcmalloc::SLL_SetNext(t,n); | 
| 243 } | 240 } | 
| 244 | 241 | 
| 245 } | 242 } | 
| 246 | 243 | 
| 247 #endif // TCMALLOC_USE_DOUBLYLINKED_FREELIST | 244 #endif // TCMALLOC_USE_DOUBLYLINKED_FREELIST | 
| OLD | NEW | 
|---|