2 * Memory-only VtBlock cache.
4 * The cached Venti blocks are in the hash chains.
5 * The cached local blocks are only in the blocks array.
6 * The free blocks are in the heap, which is supposed to
7 * be indexed by second-to-last use but actually
8 * appears to be last use.
15 int nread, ncopy, nwrite;
33 u32int now; /* ticks for usage time stamps */
34 VtBlock **hash; /* hash table for finding addresses */
36 VtBlock **heap; /* heap for finding victims */
38 VtBlock *block; /* all allocated blocks */
40 uchar *mem; /* memory for all blocks and data */
44 static void cachecheck(VtCache*);
47 vtcachealloc(VtConn *z, int blocksize, ulong nblock, int mode)
54 c = vtmallocz(sizeof(VtCache));
57 c->blocksize = (blocksize + 127) & ~127;
61 c->hash = vtmallocz(nblock*sizeof(VtBlock*));
62 c->heap = vtmallocz(nblock*sizeof(VtBlock*));
63 c->block = vtmallocz(nblock*sizeof(VtBlock));
64 c->mem = vtmallocz(nblock*c->blocksize);
68 for(i=0; i<nblock; i++){
83 vtcachefree(VtCache *c)
90 for(i=0; i<c->nblock; i++)
91 assert(c->block[i].ref == 0);
101 vtcachedump(VtCache *c)
106 for(i=0; i<c->nblock; i++){
108 print("cache block %d: type %d score %V iostate %d addr %d ref %d nlock %d\n",
109 i, b->type, b->score, b->iostate, b->addr, b->ref, b->nlock);
114 cachecheck(VtCache *c)
123 for(i = 0; i < c->nheap; i++){
124 if(c->heap[i]->heap != i)
125 sysfatal("mis-heaped at %d: %d", i, c->heap[i]->heap);
126 if(i > 0 && c->heap[(i - 1) >> 1]->used - now > c->heap[i]->used - now)
127 sysfatal("bad heap ordering");
129 if(k < c->nheap && c->heap[i]->used - now > c->heap[k]->used - now)
130 sysfatal("bad heap ordering");
132 if(k < c->nheap && c->heap[i]->used - now > c->heap[k]->used - now)
133 sysfatal("bad heap ordering");
137 for(i = 0; i < c->nblock; i++){
139 if(b->data != &c->mem[i * size])
140 sysfatal("mis-blocked at %d", i);
141 if(b->ref && b->heap == BadHeap)
143 else if(b->addr != NilBlock)
146 if(c->nheap + refed != c->nblock){
147 fprint(2, "cachecheck: nheap %d refed %d nblocks %d\n", c->nheap, refed, c->nblock);
150 assert(c->nheap + refed == c->nblock);
152 for(i = 0; i < c->nblock; i++){
155 if(1)fprint(2, "a=%ud %V ref=%d\n", b->addr, b->score, b->ref);
159 if(refed > 0)fprint(2, "cachecheck: in used %d\n", refed);
163 upheap(int i, VtBlock *b)
172 for(; i != 0; i = p){
175 if(b->used - now >= bb->used - now)
187 downheap(int i, VtBlock *b)
200 if(k + 1 < c->nheap && c->heap[k]->used - now > c->heap[k + 1]->used - now)
203 if(b->used - now <= bb->used - now)
214 * Delete a block from the heap.
215 * Called with c->lk held.
232 b = c->heap[c->nheap];
239 * Insert a block into the heap.
240 * Called with c->lk held.
245 assert(b->heap == BadHeap);
246 upheap(b->c->nheap++, b);
250 * locate the vtBlock with the oldest second to last use.
251 * remove it from the heap, and fix up the heap.
253 /* called with c->lk held */
255 vtcachebumpblock(VtCache *c)
260 * locate the vtBlock with the oldest second to last use.
261 * remove it from the heap, and fix up the heap.
266 sysfatal("vtcachebumpblock: no free blocks in vtCache");
271 assert(b->heap == BadHeap);
275 * unchain the vtBlock from hash chain if any
278 *(b->prev) = b->next;
280 b->next->prev = b->prev;
285 if(0)fprint(2, "droping %x:%V\n", b->addr, b->score);
286 /* set vtBlock to a reasonable state */
288 b->iostate = BioEmpty;
293 * fetch a local block from the memory cache.
294 * if it's not there, load it, bumping some other Block.
295 * if we're out of free blocks, we're screwed.
298 vtcachelocal(VtCache *c, u32int addr, int type)
302 if(addr >= c->nblock)
303 sysfatal("vtcachelocal: asked for block #%ud; only %d blocks\n",
307 if(b->addr == NilBlock || b->iostate != BioLocal)
310 sysfatal("vtcachelocal: block is not local");
315 print("%d != %d\n", b->type, type);
317 sysfatal("vtcachelocal: block has wrong type %d != %d", b->type, type);
330 vtcacheallocblock(VtCache *c, int type)
334 if(type >= VtMaxType)
338 b = vtcachebumpblock(c);
339 b->iostate = BioLocal;
341 b->addr = b - c->block;
342 vtzeroextend(type, b->data, 0, c->blocksize);
343 vtlocaltoglobal(b->addr, b->score);
353 * fetch a global (Venti) block from the memory cache.
354 * if it's not there, load it, bumping some other block.
357 vtcacheglobal(VtCache *c, uchar score[VtScoreSize], int type)
364 addr = vtglobaltolocal(score);
366 return vtcachelocal(c, addr, type);
368 h = (u32int)(score[0]|(score[1]<<8)|(score[2]<<16)|(score[3]<<24)) % c->nhash;
371 * look for the block in the cache
374 for(b = c->hash[h]; b != nil; b = b->next){
375 if(b->addr != NilBlock || memcmp(b->score, score, VtScoreSize) != 0 || b->type != type)
388 b = vtcachebumpblock(c);
391 memmove(b->score, score, VtScoreSize);
392 /* chain onto correct hash */
393 b->next = c->hash[h];
396 b->next->prev = &b->next;
397 b->prev = &c->hash[h];
400 * Lock b before unlocking c, so that others wait while we read.
402 * You might think there is a race between this qlock(b) before qunlock(c)
403 * and the qlock(c) while holding a qlock(b) in vtblockwrite. However,
404 * the block here can never be the block in a vtblockwrite, so we're safe.
405 * We're certainly living on the edge.
411 n = vtread(c->z, score, type, b->data, c->blocksize);
413 fprint(2, "vtread: %r\n");
414 b->iostate = BioVentiError;
418 vtzeroextend(type, b->data, n, c->blocksize);
419 b->iostate = BioVenti;
426 * The thread that has locked b may refer to it by
427 * multiple names. Nlock counts the number of
428 * references the locking thread holds. It will call
429 * vtblockput once per reference.
432 vtblockduplock(VtBlock *b)
434 assert(b->nlock > 0);
439 * we're done with the block.
440 * unlock it. can't use it after calling this.
443 vtblockput(VtBlock* b)
450 if(0)fprint(2, "vtblockput: %d: %x %d %d\n", getpid(), b->addr, c->nheap, b->iostate);
456 * b->nlock should probably stay at zero while
457 * the vtBlock is unlocked, but diskThread and vtSleep
458 * conspire to assume that they can just qlock(&b->lk); vtblockput(b),
459 * so we have to keep b->nlock set to 1 even
460 * when the vtBlock is unlocked.
462 assert(b->nlock == 0);
477 //if(b->addr != NilBlock) print("blockput %d\n", b->addr);
489 vtblockwrite(VtBlock *b)
491 uchar score[VtScoreSize];
496 if(b->iostate != BioLocal){
498 sysfatal("vtBlockWrite: not a local block");
502 n = vtzerotruncate(b->type, b->data, c->blocksize);
503 if(vtwrite(c->z, score, b->type, b->data, n) < 0)
506 memmove(b->score, score, VtScoreSize);
509 b->iostate = BioVenti;
510 h = (u32int)(score[0]|(score[1]<<8)|(score[2]<<16)|(score[3]<<24)) % c->nhash;
511 b->next = c->hash[h];
514 b->next->prev = &b->next;
515 b->prev = &c->hash[h];
521 vtcacheblocksize(VtCache *c)
527 vtblockcopy(VtBlock *b)
532 bb = vtcacheallocblock(b->c, b->type);
537 memmove(bb->data, b->data, b->c->blocksize);
543 vtlocaltoglobal(u32int addr, uchar score[VtScoreSize])
545 memset(score, 0, 16);
546 score[16] = addr>>24;
547 score[17] = addr>>16;
554 vtglobaltolocal(uchar score[VtScoreSize])
556 static uchar zero[16];
557 if(memcmp(score, zero, 16) != 0)
559 return (score[16]<<24)|(score[17]<<16)|(score[18]<<8)|score[19];