2 * Memory-only VtBlock cache.
4 * The cached Venti blocks are in the hash chains.
5 * The cached local blocks are only in the blocks array.
6 * The free blocks are in the heap, which is supposed to
7 * be indexed by second-to-last use but actually
8 * appears to be last use.
35 u32int now; /* ticks for usage time stamps */
36 VtBlock **hash; /* hash table for finding addresses */
38 VtBlock **heap; /* heap for finding victims */
40 VtBlock *block; /* all allocated blocks */
42 int (*write)(VtConn*, uchar[VtScoreSize], uint, uchar*, int);
43 VtBlock *dead; /* blocks we don't have memory for */
48 static void cachecheck(VtCache*);
51 vtcachealloc(VtConn *z, ulong maxmem)
60 c = vtmallocz(sizeof(VtCache));
61 nblock = maxmem/100/(sizeof(VtBlock)+2*sizeof(VtBlock*));
65 c->hash = vtmallocz(nblock*sizeof(VtBlock*));
66 c->heap = vtmallocz(nblock*sizeof(VtBlock*));
67 c->block = vtmallocz(nblock*sizeof(VtBlock));
69 maxmem -= nblock*(sizeof(VtBlock) + 2*sizeof(VtBlock*));
70 maxmem -= sizeof(VtCache);
72 sysfatal("cache size far too small: %lud", maxmem0);
75 for(i=0; i<nblock; i++){
88 * BUG This is here so that vbackup can override it and do some
89 * pipelining of writes. Arguably vtwrite or vtwritepacket or the
90 * cache itself should be providing this functionality.
93 vtcachesetwrite(VtCache *c, int (*write)(VtConn*, uchar[VtScoreSize], uint, uchar*, int))
101 vtcachefree(VtCache *c)
108 for(i=0; i<c->nblock; i++) {
109 assert(c->block[i].data == nil || c->block[i].ref == 0);
110 vtfree(c->block[i].data);
120 vtcachedump(VtCache *c)
125 for(i=0; i<c->nblock; i++){
127 print("cache block %d: type %d score %V iostate %d addr %d ref %d nlock %d\n",
128 i, b->type, b->score, b->iostate, b->addr, b->ref, b->nlock);
133 cachecheck(VtCache *c)
141 for(i = 0; i < c->nheap; i++){
142 if(c->heap[i]->heap != i)
143 sysfatal("mis-heaped at %d: %d", i, c->heap[i]->heap);
144 if(i > 0 && c->heap[(i - 1) >> 1]->used - now > c->heap[i]->used - now)
145 sysfatal("bad heap ordering");
147 if(k < c->nheap && c->heap[i]->used - now > c->heap[k]->used - now)
148 sysfatal("bad heap ordering");
150 if(k < c->nheap && c->heap[i]->used - now > c->heap[k]->used - now)
151 sysfatal("bad heap ordering");
155 for(i = 0; i < c->nblock; i++){
157 if(b->ref && b->heap == BadHeap)
159 else if(b->addr != NilBlock)
162 assert(c->nheap + refed == c->nblock);
164 for(i = 0; i < c->nblock; i++){
173 upheap(int i, VtBlock *b)
182 for(; i != 0; i = p){
185 if(b->used - now >= bb->used - now)
197 downheap(int i, VtBlock *b)
210 if(k + 1 < c->nheap && c->heap[k]->used - now > c->heap[k + 1]->used - now)
213 if(b->used - now <= bb->used - now)
224 * Delete a block from the heap.
225 * Called with c->lk held.
242 b = c->heap[c->nheap];
249 * Insert a block into the heap.
250 * Called with c->lk held.
255 assert(b->heap == BadHeap);
256 upheap(b->c->nheap++, b);
260 * locate the vtBlock with the oldest second to last use.
261 * remove it from the heap, and fix up the heap.
263 /* called with c->lk held */
265 vtcachebumpblock(VtCache *c)
270 * locate the vtBlock with the oldest second to last use.
271 * remove it from the heap, and fix up the heap.
275 fprint(2, "vtcachebumpblock: no free blocks in vtCache");
281 assert(b->heap == BadHeap);
285 * unchain the vtBlock from hash chain if any
288 *(b->prev) = b->next;
290 b->next->prev = b->prev;
295 if(0)fprint(2, "droping %x:%V\n", b->addr, b->score);
296 /* set vtBlock to a reasonable state */
298 b->iostate = BioEmpty;
303 * evict blocks until there is enough memory for size bytes.
306 vtcacheevict(VtCache *c, ulong size)
311 * If we were out of memory and put some blocks
312 * to the side but now we have memory, grab one.
314 if(c->mem >= size && c->dead) {
322 * Otherwise, evict until we have memory.
325 b = vtcachebumpblock(c);
326 if(c->mem+b->size >= size)
329 * chain b onto dead list
340 * Allocate memory for block.
343 if(size > b->size || size <= b->size/2) {
348 b->data = vtmalloc(size);
354 * fetch a local block from the memory cache.
355 * if it's not there, load it, bumping some other Block.
356 * if we're out of free blocks, we're screwed.
359 vtcachelocal(VtCache *c, u32int addr, int type)
364 sysfatal("vtcachelocal: asked for nonexistent block 0");
366 sysfatal("vtcachelocal: asked for block #%ud; only %d blocks",
367 (uint)addr, c->nblock);
369 b = &c->block[addr-1];
370 if(b->addr == NilBlock || b->iostate != BioLocal)
371 sysfatal("vtcachelocal: block is not local");
374 sysfatal("vtcachelocal: block has wrong type %d != %d", b->type, type);
382 b->pc = getcallerpc(&c);
387 vtcacheallocblock(VtCache *c, int type, ulong size)
392 b = vtcacheevict(c, size);
393 b->iostate = BioLocal;
395 b->addr = (b - c->block)+1;
396 vtzeroextend(type, b->data, 0, size);
397 vtlocaltoglobal(b->addr, b->score);
402 b->pc = getcallerpc(&c);
407 * fetch a global (Venti) block from the memory cache.
408 * if it's not there, load it, bumping some other block.
411 vtcacheglobal(VtCache *c, uchar score[VtScoreSize], int type, ulong size)
419 fprint(2, "vtcacheglobal %V %d from %p\n", score, type, getcallerpc(&c));
420 addr = vtglobaltolocal(score);
421 if(addr != NilBlock){
423 fprint(2, "vtcacheglobal %V %d => local\n", score, type);
424 b = vtcachelocal(c, addr, type);
426 b->pc = getcallerpc(&c);
430 h = (u32int)(score[0]|(score[1]<<8)|(score[2]<<16)|(score[3]<<24)) % c->nhash;
433 * look for the block in the cache
436 for(b = c->hash[h]; b != nil; b = b->next){
437 if(b->addr != NilBlock || memcmp(b->score, score, VtScoreSize) != 0 || b->type != type)
443 fprint(2, "vtcacheglobal %V %d => found in cache %p; locking\n", score, type, b);
446 if(b->iostate == BioVentiError){
448 fprint(2, "cached read error for %V\n", score);
450 fprint(2, "vtcacheglobal %V %d => cache read error\n", score, type);
452 werrstr("venti i/o error");
456 fprint(2, "vtcacheglobal %V %d => found in cache; returning\n", score, type);
457 b->pc = getcallerpc(&c);
464 b = vtcacheevict(c, size);
467 memmove(b->score, score, VtScoreSize);
468 /* chain onto correct hash */
469 b->next = c->hash[h];
472 b->next->prev = &b->next;
473 b->prev = &c->hash[h];
476 * Lock b before unlocking c, so that others wait while we read.
478 * You might think there is a race between this qlock(b) before qunlock(c)
479 * and the qlock(c) while holding a qlock(b) in vtblockwrite. However,
480 * the block here can never be the block in a vtblockwrite, so we're safe.
481 * We're certainly living on the edge.
484 fprint(2, "vtcacheglobal %V %d => bumped; locking %p\n", score, type, b);
490 n = vtread(c->z, score, type, b->data, size);
493 fprint(2, "read %V: %r\n", score);
495 fprint(2, "vtcacheglobal %V %d => bumped; read error\n", score, type);
496 b->iostate = BioVentiError;
500 vtzeroextend(type, b->data, n, size);
501 b->iostate = BioVenti;
504 fprint(2, "vtcacheglobal %V %d => loaded into cache; returning\n", score, type);
505 b->pc = getcallerpc(&b);
510 * The thread that has locked b may refer to it by
511 * multiple names. Nlock counts the number of
512 * references the locking thread holds. It will call
513 * vtblockput once per reference.
516 vtblockduplock(VtBlock *b)
518 assert(b->nlock > 0);
523 * we're done with the block.
524 * unlock it. can't use it after calling this.
527 vtblockput(VtBlock* b)
534 if(0)fprint(2, "vtblockput: %d: %x %d %d\n", getpid(), b->addr, c->nheap, b->iostate);
536 fprint(2, "vtblockput %p from %p\n", b, getcallerpc(&b));
542 * b->nlock should probably stay at zero while
543 * the vtBlock is unlocked, but diskThread and vtSleep
544 * conspire to assume that they can just qlock(&b->lk); vtblockput(b),
545 * so we have to keep b->nlock set to 1 even
546 * when the vtBlock is unlocked.
548 assert(b->nlock == 0);
563 /*if(b->addr != NilBlock) print("blockput %d\n", b->addr); */
576 vtblockwrite(VtBlock *b)
578 uchar score[VtScoreSize];
583 if(b->iostate != BioLocal){
584 werrstr("vtblockwrite: not a local block");
589 n = vtzerotruncate(b->type, b->data, b->size);
591 if(c->write(c->z, score, b->type, b->data, n) < 0)
594 memmove(b->score, score, VtScoreSize);
597 b->addr = NilBlock; /* now on venti */
598 b->iostate = BioVenti;
599 h = (u32int)(score[0]|(score[1]<<8)|(score[2]<<16)|(score[3]<<24)) % c->nhash;
600 b->next = c->hash[h];
603 b->next->prev = &b->next;
604 b->prev = &c->hash[h];
610 vtblockcopy(VtBlock *b)
615 bb = vtcacheallocblock(b->c, b->type, b->size);
620 memmove(bb->data, b->data, b->size);
622 bb->pc = getcallerpc(&b);
627 vtlocaltoglobal(u32int addr, uchar score[VtScoreSize])
629 memset(score, 0, 16);
630 score[16] = addr>>24;
631 score[17] = addr>>16;
638 vtglobaltolocal(uchar score[VtScoreSize])
640 static uchar zero[16];
641 if(memcmp(score, zero, 16) != 0)
643 return (score[16]<<24)|(score[17]<<16)|(score[18]<<8)|score[19];