github.com/wbrown/gpt_bpe@v0.0.0-20250709161131-1571a6e8ad2d/js/test.js (about) 1 const fs = require('fs'); 2 3 const corpus = fs.readFileSync("../resources/frankenstein.txt"); 4 5 function sleep(ms) { 6 return new Promise((resolve) => { 7 setTimeout(resolve, ms); 8 }); 9 } 10 11 const getMethods = (obj) => { 12 let properties = new Set() 13 let currentObj = obj 14 do { 15 Object.getOwnPropertyNames(currentObj).map(item => properties.add(item)) 16 } while ((currentObj = Object.getPrototypeOf(currentObj))) 17 return [...properties.keys()] 18 } 19 20 async function main() { 21 try { 22 const gpt_bpe_obj = await import('./gpt_bpe.js'); 23 await sleep(1000); 24 25 console.log("Tokenizing sample text..."); 26 const sampleText = "Hello, world! This is a test."; 27 const sampleTokens = gpt_bpe_obj.default.tokenize(sampleText); 28 console.log("Sample tokens:", sampleTokens); 29 30 if (sampleTokens && sampleTokens.length > 0) { 31 const decodedText = gpt_bpe_obj.default.decode(sampleTokens); 32 console.log("Decoded text:", decodedText); 33 } 34 35 console.log("\nRunning benchmark..."); 36 const times = []; 37 for (let i = 0; i < 100; i++) { 38 const start = process.hrtime.bigint(); 39 40 tokens = gpt_bpe_obj.default.tokenize(corpus) 41 42 const end = process.hrtime.bigint(); 43 const duration = Number(end - start) / 1000000; 44 times.push(duration); 45 46 if (i === 0) { 47 console.log(`Corpus tokenized into ${tokens.length} tokens`); 48 } 49 } 50 51 const avgTime = times.reduce((a, b) => a + b, 0) / times.length; 52 const minTime = Math.min(...times); 53 const maxTime = Math.max(...times); 54 55 console.log(`\nBenchmark results (100 iterations):`); 56 console.log(`Average: ${avgTime.toFixed(2)}ms`); 57 console.log(`Min: ${minTime.toFixed(2)}ms`); 58 console.log(`Max: ${maxTime.toFixed(2)}ms`); 59 } 60 catch(err) { 61 console.log(err); 62 } 63 } 64 65 main()