X-Git-Url: https://vcs.fsf.org/?a=blobdiff_plain;f=src%2Fsrc%2Fstore.c;h=8e776568a54fbf9511fd011292f003a20ae6bbc2;hb=4381d60bc96bed88d96e8cc6b534dd0dcd48163f;hp=3192b9774f35e3c30e168be9b8724c39a4849eb8;hpb=6440616152c46002c71a3a6413adeeb0fe435db0;p=exim.git diff --git a/src/src/store.c b/src/src/store.c index 3192b9774..8e776568a 100644 --- a/src/src/store.c +++ b/src/src/store.c @@ -126,13 +126,6 @@ static storeblock *current_block[NPOOLS]; static void *next_yield[NPOOLS]; static int yield_length[NPOOLS] = { -1, -1, -1, -1, -1, -1 }; -/* The limits of the tainted pools. Tracking these on new allocations enables -a fast is_tainted implementation. We assume the kernel only allocates mmaps using -one side or the other of data+heap, not both. */ - -void * tainted_base = (void *)-1; -void * tainted_top = (void *)0; - /* pool_malloc holds the amount of memory used by the store pools; this goes up and down as store is reset or released. nonpool_malloc is the total got by malloc from other calls; this doesn't go down because it is just freed by @@ -224,36 +217,6 @@ log_write(0, LOG_MAIN|LOG_PANIC_DIE, "Taint mismatch, %s: %s %d\n", msg, func, line); } -static void -use_slow_taint_check(const uschar * why) -{ -#ifndef COMPILE_UTILITY -DEBUG(D_any) - debug_printf("switching to slow-mode taint checking (after %s) " - "taint bounds %p %p\n", why, tainted_base, tainted_top); -#endif -f.taint_check_slow = TRUE; -} - -/* If the creation of a new tainted region results in any of the -untainted regions appearing to be tainted, using the fast-mode test, -we need to switch to safe-but-slow mode. */ - -static void -verify_all_untainted(void) -{ -for (int pool = 0; pool < POOL_TAINT_BASE; pool++) - for (storeblock * b = chainbase[pool]; b; b = b->next) - { - uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK; - if (is_tainted(bc)) - { - use_slow_taint_check(US"mmap"); - return; - } - } -} - /************************************************* @@ -845,10 +808,6 @@ if (!(yield = mmap(NULL, (size_t)size, log_write(0, LOG_MAIN|LOG_PANIC_DIE, "failed to mmap %d bytes of memory: " "called from line %d of %s", size, line, func); -if (yield < tainted_base) tainted_base = yield; -if ((top = US yield + size) > tainted_top) tainted_top = top; -if (!f.taint_check_slow) verify_all_untainted(); - return store_alloc_tail(yield, size, func, line, US"Mmap"); } @@ -879,14 +838,6 @@ if (!(yield = malloc((size_t)size))) log_write(0, LOG_MAIN|LOG_PANIC_DIE, "failed to malloc %d bytes of memory: " "called from line %d in %s", size, linenumber, func); -/* If malloc ever returns apparently tainted memory, which glibc -malloc will as it uses mmap for larger requests, we must switch to -the slower checking for tainting (checking an address against all -the tainted pool block spans, rather than just the mmap span) */ - -if (!f.taint_check_slow && is_tainted(yield)) - use_slow_taint_check(US"malloc"); - return store_alloc_tail(yield, size, func, linenumber, US"Malloc"); }