diff options
author | Eric Wong <normalperson@yhbt.net> | 2012-03-05 19:20:21 -0800 |
---|---|---|
committer | Eric Wong <normalperson@yhbt.net> | 2012-03-05 19:20:21 -0800 |
commit | 1d8f75976a2516f93cbf70fe9e1a8979583929f9 (patch) | |
tree | 1056a43cb212edd7ff448d0a4dbad8b71c6a0485 /trywrite.c | |
parent | 5cb6826b3a6f974d720d8f9d1589b6c06f22bf52 (diff) | |
download | cmogstored-1d8f75976a2516f93cbf70fe9e1a8979583929f9.tar.gz |
Aligned allocations are wasteful and tend to fragment the heap. They only benefit us when dealing with frequently _written_ data that we don't want unnecessarily shared/bounced between threads. So swap mog_cachealign() for xmalloc() wherever we have infrequently-written data areas.
Diffstat (limited to 'trywrite.c')
-rw-r--r-- | trywrite.c | 2 |
1 files changed, 1 insertions, 1 deletions
@@ -12,7 +12,7 @@ struct mog_wbuf { static void * wbuf_new(size_t total, struct iovec *iov, int iovcnt) { - struct mog_wbuf *wbuf = mog_cachealign(sizeof(struct mog_wbuf) + total); + struct mog_wbuf *wbuf = xmalloc(sizeof(struct mog_wbuf) + total); void *dst = wbuf->buf; int i; |