fix: oshash file close
This commit is contained in:
		
							parent
							
								
									f1cb7ec71f
								
							
						
					
					
						commit
						6ed78e6459
					
				
					 3 changed files with 7 additions and 63 deletions
				
			
		| 
						 | 
				
			
			@ -1,60 +0,0 @@
 | 
			
		|||
 import { Database } from "jsr:@db/sqlite";
 | 
			
		||||
import {join} from "jsr:@std/path";
 | 
			
		||||
let db = new Database("./db.sqlite3")
 | 
			
		||||
const stmt = db.prepare("SELECT id, basepath, filename from document");
 | 
			
		||||
let ds = [...stmt.all()];
 | 
			
		||||
 | 
			
		||||
async function oshash(
 | 
			
		||||
    path: string
 | 
			
		||||
){
 | 
			
		||||
    const chunkSize = 4096;
 | 
			
		||||
    const minFileSize = chunkSize * 2;
 | 
			
		||||
 | 
			
		||||
    const fd = await Deno.open(path);
 | 
			
		||||
    const st = await fd.stat();
 | 
			
		||||
    let hash = BigInt(st.size);
 | 
			
		||||
 | 
			
		||||
    if (st.size < minFileSize){
 | 
			
		||||
        throw new Error("File is too small to hash");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // read first and last chunk
 | 
			
		||||
    const firstChunk = new Uint8Array(chunkSize);
 | 
			
		||||
    await fd.read(firstChunk, 0, chunkSize, 0);
 | 
			
		||||
    const lastChunk = new Uint8Array(chunkSize);
 | 
			
		||||
    await fd.read(lastChunk, 0, chunkSize, st.size - chunkSize);
 | 
			
		||||
    // iterate over first and last chunk.
 | 
			
		||||
    // for each uint64_t, add it to the hash.
 | 
			
		||||
    const firstChunkView = new DataView(firstChunk.buffer);
 | 
			
		||||
    for (let i = 0; i < chunkSize; i += 8){
 | 
			
		||||
        hash += firstChunkView.getBigUint64(i, true);
 | 
			
		||||
        // prevent overflow
 | 
			
		||||
        hash = (hash & 0xFFFFFFFFFFFFFFFFn);
 | 
			
		||||
    }
 | 
			
		||||
    const lastChunkView = new DataView(lastChunk.buffer);
 | 
			
		||||
    for (let i = 0; i < chunkSize; i += 8){
 | 
			
		||||
        hash += lastChunkView.getBigUint64(i, true);
 | 
			
		||||
        // prevent overflow
 | 
			
		||||
        hash = (hash & 0xFFFFFFFFFFFFFFFFn);
 | 
			
		||||
    }
 | 
			
		||||
    return hash;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function updateHash(ds: {id: number, basepath: string, filename: string}[]) {
 | 
			
		||||
    const content_hashs = await Promise.all(ds.map(async (d) => {
 | 
			
		||||
        const p = join(d.basepath, d.filename);
 | 
			
		||||
        return await oshash(p);
 | 
			
		||||
    }));
 | 
			
		||||
    db.transaction(() => {
 | 
			
		||||
        for (let i = 0; i < ds.length; i++) {
 | 
			
		||||
            db.run(`UPDATE document SET content_hash = ? where id = ?`, content_hashs[i].toString(), ds[i].id)
 | 
			
		||||
        }
 | 
			
		||||
    })();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
for (let i = 0; i < ds.length; i += 32) {
 | 
			
		||||
    const d = ds.slice(i, i + 32);
 | 
			
		||||
    console.log(d.map(x => x.id));
 | 
			
		||||
    await updateHash(d);
 | 
			
		||||
}
 | 
			
		||||
db.close();
 | 
			
		||||
| 
						 | 
				
			
			@ -51,11 +51,13 @@ export class ContentDiffHandler {
 | 
			
		|||
			console.log("its not in waiting_list and db!!!: ", cpath);
 | 
			
		||||
			return;
 | 
			
		||||
		}
 | 
			
		||||
	  console.log("deleted ",cpath, dbc[0].content_hash)	
 | 
			
		||||
		const content_hash = dbc[0].content_hash;
 | 
			
		||||
		// When a path is changed, it takes into account when the
 | 
			
		||||
		// creation event occurs first and the deletion occurs, not
 | 
			
		||||
		// the change event.
 | 
			
		||||
		const cf = this.waiting_list.getByHash(content_hash);
 | 
			
		||||
		
 | 
			
		||||
		if (cf) {
 | 
			
		||||
			// if a path is changed, update the changed path.
 | 
			
		||||
			console.log("update path from", cpath, "to", cf.path);
 | 
			
		||||
| 
						 | 
				
			
			@ -75,14 +77,15 @@ export class ContentDiffHandler {
 | 
			
		|||
			id: dbc[0].id,
 | 
			
		||||
			deleted_at: Date.now(),
 | 
			
		||||
		});
 | 
			
		||||
		console.log(content_hash)
 | 
			
		||||
		this.tombstone.set(content_hash, dbc[0]);
 | 
			
		||||
	}
 | 
			
		||||
	private async OnCreated(cpath: string) {
 | 
			
		||||
		const basepath = dirname(cpath);
 | 
			
		||||
		const filename = basename(cpath);
 | 
			
		||||
		console.log("createContentFile", cpath);
 | 
			
		||||
		const content = createContentFile(this.content_type, cpath);
 | 
			
		||||
		const hash = await content.getHash();
 | 
			
		||||
		console.log("create",cpath, hash)
 | 
			
		||||
		const c = this.tombstone.get(hash);
 | 
			
		||||
		if (c !== undefined) {
 | 
			
		||||
			await this.doc_cntr.update({
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -42,5 +42,6 @@ export async function oshash(
 | 
			
		|||
        // prevent overflow
 | 
			
		||||
        hash = (hash & 0xFFFFFFFFFFFFFFFFn);
 | 
			
		||||
    }
 | 
			
		||||
    fd.close();
 | 
			
		||||
    return hash;
 | 
			
		||||
}
 | 
			
		||||
		Loading…
	
	Add table
		
		Reference in a new issue