we (web engine): Experimental web browser project to understand the limits of Claude
2
fork

Configure Feed

Select the types of activity you want to include in your feed.

Implement JS bytecode format and AST-to-bytecode compiler

Add register-based bytecode instruction set designed for JIT compilation
and a recursive-descent compiler that transforms the parser AST into
bytecode functions.

New files:
- bytecode.rs — Instruction set (50+ opcodes), constant pool, function
objects, bytecode builder with jump patching, and disassembler
- compiler.rs — AST → bytecode compiler with greedy register allocation,
scope-aware local variable binding, and support for all statement and
expression types

Instruction categories: register loads, arithmetic, bitwise, comparison,
logical, control flow (jumps, calls, returns), object/property access,
closures, and small-integer optimization (LoadInt8).

Compiler handles: variable declarations, function/class/arrow declarations,
if/else, for/while/do-while loops with break/continue/labels, switch,
try/catch, member access, assignment (simple and compound), logical
short-circuit, conditional expressions, template literals, destructuring,
object/array literals, and nested function compilation.

52 unit tests covering bytecode encoding, constant pool dedup, jump
patching, disassembler output, and compilation of all major grammar
productions. All 176 workspace JS tests pass.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

+2644
+892
crates/js/src/bytecode.rs
··· 1 + //! Register-based bytecode format for the JavaScript engine. 2 + //! 3 + //! Each instruction encodes register operands directly, making it suitable for 4 + //! JIT compilation. Instructions are variable-length, encoded as a 1-byte opcode 5 + //! followed by operand bytes. 6 + 7 + use std::fmt; 8 + 9 + /// A register index (0–255). 10 + pub type Reg = u8; 11 + 12 + /// An index into the constant pool. 13 + pub type ConstIdx = u16; 14 + 15 + /// An index into the name/string table. 16 + pub type NameIdx = u16; 17 + 18 + /// Bytecode instruction set. 19 + #[derive(Debug, Clone, Copy, PartialEq, Eq)] 20 + #[repr(u8)] 21 + pub enum Op { 22 + // ── Register loads ────────────────────────────────────── 23 + /// LoadConst dst, const_idx(u16) — load constant pool entry into register 24 + LoadConst = 0x01, 25 + /// LoadNull dst 26 + LoadNull = 0x02, 27 + /// LoadUndefined dst 28 + LoadUndefined = 0x03, 29 + /// LoadTrue dst 30 + LoadTrue = 0x04, 31 + /// LoadFalse dst 32 + LoadFalse = 0x05, 33 + /// Move dst, src 34 + Move = 0x06, 35 + 36 + // ── Global variable access ────────────────────────────── 37 + /// LoadGlobal dst, name_idx(u16) 38 + LoadGlobal = 0x07, 39 + /// StoreGlobal name_idx(u16), src 40 + StoreGlobal = 0x08, 41 + 42 + // ── Arithmetic ────────────────────────────────────────── 43 + /// Add dst, lhs, rhs 44 + Add = 0x10, 45 + /// Sub dst, lhs, rhs 46 + Sub = 0x11, 47 + /// Mul dst, lhs, rhs 48 + Mul = 0x12, 49 + /// Div dst, lhs, rhs 50 + Div = 0x13, 51 + /// Rem dst, lhs, rhs 52 + Rem = 0x14, 53 + /// Exp dst, lhs, rhs 54 + Exp = 0x15, 55 + /// Neg dst, src (unary minus) 56 + Neg = 0x16, 57 + 58 + // ── Bitwise ───────────────────────────────────────────── 59 + /// BitAnd dst, lhs, rhs 60 + BitAnd = 0x20, 61 + /// BitOr dst, lhs, rhs 62 + BitOr = 0x21, 63 + /// BitXor dst, lhs, rhs 64 + BitXor = 0x22, 65 + /// ShiftLeft dst, lhs, rhs 66 + ShiftLeft = 0x23, 67 + /// ShiftRight dst, lhs, rhs 68 + ShiftRight = 0x24, 69 + /// UShiftRight dst, lhs, rhs 70 + UShiftRight = 0x25, 71 + /// BitNot dst, src 72 + BitNot = 0x26, 73 + 74 + // ── Comparison ────────────────────────────────────────── 75 + /// Eq dst, lhs, rhs (==) 76 + Eq = 0x30, 77 + /// StrictEq dst, lhs, rhs (===) 78 + StrictEq = 0x31, 79 + /// NotEq dst, lhs, rhs (!=) 80 + NotEq = 0x32, 81 + /// StrictNotEq dst, lhs, rhs (!==) 82 + StrictNotEq = 0x33, 83 + /// LessThan dst, lhs, rhs 84 + LessThan = 0x34, 85 + /// LessEq dst, lhs, rhs 86 + LessEq = 0x35, 87 + /// GreaterThan dst, lhs, rhs 88 + GreaterThan = 0x36, 89 + /// GreaterEq dst, lhs, rhs 90 + GreaterEq = 0x37, 91 + 92 + // ── Logical / unary ───────────────────────────────────── 93 + /// LogicalNot dst, src 94 + LogicalNot = 0x38, 95 + /// TypeOf dst, src 96 + TypeOf = 0x39, 97 + /// InstanceOf dst, lhs, rhs 98 + InstanceOf = 0x3A, 99 + /// In dst, lhs, rhs 100 + In = 0x3B, 101 + /// Void dst, src — evaluate src, produce undefined 102 + Void = 0x3C, 103 + 104 + // ── Control flow ──────────────────────────────────────── 105 + /// Jump offset(i32) — unconditional relative jump 106 + Jump = 0x40, 107 + /// JumpIfTrue reg, offset(i32) 108 + JumpIfTrue = 0x41, 109 + /// JumpIfFalse reg, offset(i32) 110 + JumpIfFalse = 0x42, 111 + /// JumpIfNullish reg, offset(i32) 112 + JumpIfNullish = 0x43, 113 + 114 + // ── Functions / calls ─────────────────────────────────── 115 + /// Call dst, func_reg, args_start, arg_count(u8) 116 + Call = 0x50, 117 + /// Return reg 118 + Return = 0x51, 119 + /// Throw reg 120 + Throw = 0x52, 121 + /// CreateClosure dst, func_idx(u16) — create a closure from a nested function 122 + CreateClosure = 0x53, 123 + 124 + // ── Object / property ─────────────────────────────────── 125 + /// GetProperty dst, obj_reg, key_reg 126 + GetProperty = 0x60, 127 + /// SetProperty obj_reg, key_reg, val_reg 128 + SetProperty = 0x61, 129 + /// CreateObject dst 130 + CreateObject = 0x62, 131 + /// CreateArray dst 132 + CreateArray = 0x63, 133 + /// GetPropertyByName dst, obj_reg, name_idx(u16) — optimized named access 134 + GetPropertyByName = 0x64, 135 + /// SetPropertyByName obj_reg, name_idx(u16), val_reg — optimized named store 136 + SetPropertyByName = 0x65, 137 + 138 + // ── Misc ──────────────────────────────────────────────── 139 + /// Delete dst, obj_reg, key_reg 140 + Delete = 0x70, 141 + /// LoadInt8 dst, i8 — load small integer without constant pool 142 + LoadInt8 = 0x71, 143 + } 144 + 145 + impl Op { 146 + /// Decode an opcode from a byte, returning `None` for unrecognized values. 147 + pub fn from_byte(b: u8) -> Option<Op> { 148 + match b { 149 + 0x01 => Some(Op::LoadConst), 150 + 0x02 => Some(Op::LoadNull), 151 + 0x03 => Some(Op::LoadUndefined), 152 + 0x04 => Some(Op::LoadTrue), 153 + 0x05 => Some(Op::LoadFalse), 154 + 0x06 => Some(Op::Move), 155 + 0x07 => Some(Op::LoadGlobal), 156 + 0x08 => Some(Op::StoreGlobal), 157 + 0x10 => Some(Op::Add), 158 + 0x11 => Some(Op::Sub), 159 + 0x12 => Some(Op::Mul), 160 + 0x13 => Some(Op::Div), 161 + 0x14 => Some(Op::Rem), 162 + 0x15 => Some(Op::Exp), 163 + 0x16 => Some(Op::Neg), 164 + 0x20 => Some(Op::BitAnd), 165 + 0x21 => Some(Op::BitOr), 166 + 0x22 => Some(Op::BitXor), 167 + 0x23 => Some(Op::ShiftLeft), 168 + 0x24 => Some(Op::ShiftRight), 169 + 0x25 => Some(Op::UShiftRight), 170 + 0x26 => Some(Op::BitNot), 171 + 0x30 => Some(Op::Eq), 172 + 0x31 => Some(Op::StrictEq), 173 + 0x32 => Some(Op::NotEq), 174 + 0x33 => Some(Op::StrictNotEq), 175 + 0x34 => Some(Op::LessThan), 176 + 0x35 => Some(Op::LessEq), 177 + 0x36 => Some(Op::GreaterThan), 178 + 0x37 => Some(Op::GreaterEq), 179 + 0x38 => Some(Op::LogicalNot), 180 + 0x39 => Some(Op::TypeOf), 181 + 0x3A => Some(Op::InstanceOf), 182 + 0x3B => Some(Op::In), 183 + 0x3C => Some(Op::Void), 184 + 0x40 => Some(Op::Jump), 185 + 0x41 => Some(Op::JumpIfTrue), 186 + 0x42 => Some(Op::JumpIfFalse), 187 + 0x43 => Some(Op::JumpIfNullish), 188 + 0x50 => Some(Op::Call), 189 + 0x51 => Some(Op::Return), 190 + 0x52 => Some(Op::Throw), 191 + 0x53 => Some(Op::CreateClosure), 192 + 0x60 => Some(Op::GetProperty), 193 + 0x61 => Some(Op::SetProperty), 194 + 0x62 => Some(Op::CreateObject), 195 + 0x63 => Some(Op::CreateArray), 196 + 0x64 => Some(Op::GetPropertyByName), 197 + 0x65 => Some(Op::SetPropertyByName), 198 + 0x70 => Some(Op::Delete), 199 + 0x71 => Some(Op::LoadInt8), 200 + _ => None, 201 + } 202 + } 203 + } 204 + 205 + /// A constant value stored in the constant pool. 206 + #[derive(Debug, Clone, PartialEq)] 207 + pub enum Constant { 208 + Number(f64), 209 + String(String), 210 + } 211 + 212 + /// A compiled bytecode function. 213 + #[derive(Debug, Clone)] 214 + pub struct Function { 215 + /// Name of the function (empty for anonymous / top-level). 216 + pub name: String, 217 + /// Number of named parameters. 218 + pub param_count: u8, 219 + /// Total register slots needed. 220 + pub register_count: u8, 221 + /// The bytecode bytes. 222 + pub code: Vec<u8>, 223 + /// Constant pool (numbers and strings). 224 + pub constants: Vec<Constant>, 225 + /// Name table for global/property name lookups. 226 + pub names: Vec<String>, 227 + /// Nested function definitions (referenced by CreateClosure). 228 + pub functions: Vec<Function>, 229 + /// Source map: bytecode offset → source line (sorted by offset). 230 + pub source_map: Vec<(u32, u32)>, 231 + } 232 + 233 + impl Function { 234 + pub fn new(name: String, param_count: u8) -> Self { 235 + Self { 236 + name, 237 + param_count, 238 + register_count: 0, 239 + code: Vec::new(), 240 + constants: Vec::new(), 241 + names: Vec::new(), 242 + functions: Vec::new(), 243 + source_map: Vec::new(), 244 + } 245 + } 246 + } 247 + 248 + /// A builder that emits bytecode into a `Function`. 249 + pub struct BytecodeBuilder { 250 + pub func: Function, 251 + } 252 + 253 + impl BytecodeBuilder { 254 + pub fn new(name: String, param_count: u8) -> Self { 255 + Self { 256 + func: Function::new(name, param_count), 257 + } 258 + } 259 + 260 + /// Current bytecode offset (position of next emitted byte). 261 + pub fn offset(&self) -> usize { 262 + self.func.code.len() 263 + } 264 + 265 + /// Intern a constant, returning its index. 266 + pub fn add_constant(&mut self, c: Constant) -> ConstIdx { 267 + // Reuse existing constant if it matches. 268 + for (i, existing) in self.func.constants.iter().enumerate() { 269 + match (existing, &c) { 270 + (Constant::String(a), Constant::String(b)) if a == b => return i as ConstIdx, 271 + (Constant::Number(a), Constant::Number(b)) if a.to_bits() == b.to_bits() => { 272 + return i as ConstIdx; 273 + } 274 + _ => {} 275 + } 276 + } 277 + let idx = self.func.constants.len(); 278 + assert!(idx <= u16::MAX as usize, "constant pool overflow"); 279 + self.func.constants.push(c); 280 + idx as ConstIdx 281 + } 282 + 283 + /// Intern a name, returning its index. 284 + pub fn add_name(&mut self, name: &str) -> NameIdx { 285 + for (i, existing) in self.func.names.iter().enumerate() { 286 + if existing == name { 287 + return i as NameIdx; 288 + } 289 + } 290 + let idx = self.func.names.len(); 291 + assert!(idx <= u16::MAX as usize, "name table overflow"); 292 + self.func.names.push(name.to_string()); 293 + idx as NameIdx 294 + } 295 + 296 + /// Add a nested function, returning its index. 297 + pub fn add_function(&mut self, f: Function) -> u16 { 298 + let idx = self.func.functions.len(); 299 + assert!(idx <= u16::MAX as usize, "function table overflow"); 300 + self.func.functions.push(f); 301 + idx as u16 302 + } 303 + 304 + // ── Emit helpers ──────────────────────────────────────── 305 + 306 + fn emit_u8(&mut self, v: u8) { 307 + self.func.code.push(v); 308 + } 309 + 310 + fn emit_u16(&mut self, v: u16) { 311 + self.func.code.extend_from_slice(&v.to_le_bytes()); 312 + } 313 + 314 + fn emit_i32(&mut self, v: i32) { 315 + self.func.code.extend_from_slice(&v.to_le_bytes()); 316 + } 317 + 318 + // ── Single-operand instructions ───────────────────────── 319 + 320 + /// Emit: LoadNull dst | LoadUndefined dst | LoadTrue dst | LoadFalse dst 321 + pub fn emit_reg(&mut self, op: Op, dst: Reg) { 322 + self.emit_u8(op as u8); 323 + self.emit_u8(dst); 324 + } 325 + 326 + /// Emit: Move dst, src 327 + pub fn emit_reg_reg(&mut self, op: Op, a: Reg, b: Reg) { 328 + self.emit_u8(op as u8); 329 + self.emit_u8(a); 330 + self.emit_u8(b); 331 + } 332 + 333 + /// Emit: Add dst, lhs, rhs (and other 3-register instructions) 334 + pub fn emit_reg3(&mut self, op: Op, dst: Reg, lhs: Reg, rhs: Reg) { 335 + self.emit_u8(op as u8); 336 + self.emit_u8(dst); 337 + self.emit_u8(lhs); 338 + self.emit_u8(rhs); 339 + } 340 + 341 + /// Emit: LoadConst dst, const_idx | CreateClosure dst, func_idx 342 + pub fn emit_reg_u16(&mut self, op: Op, dst: Reg, idx: u16) { 343 + self.emit_u8(op as u8); 344 + self.emit_u8(dst); 345 + self.emit_u16(idx); 346 + } 347 + 348 + /// Emit: LoadGlobal dst, name_idx 349 + pub fn emit_load_global(&mut self, dst: Reg, name_idx: NameIdx) { 350 + self.emit_u8(Op::LoadGlobal as u8); 351 + self.emit_u8(dst); 352 + self.emit_u16(name_idx); 353 + } 354 + 355 + /// Emit: StoreGlobal name_idx, src 356 + pub fn emit_store_global(&mut self, name_idx: NameIdx, src: Reg) { 357 + self.emit_u8(Op::StoreGlobal as u8); 358 + self.emit_u16(name_idx); 359 + self.emit_u8(src); 360 + } 361 + 362 + /// Emit: Jump offset (placeholder, returns patch position) 363 + pub fn emit_jump(&mut self, op: Op) -> usize { 364 + self.emit_u8(op as u8); 365 + if op == Op::JumpIfTrue || op == Op::JumpIfFalse || op == Op::JumpIfNullish { 366 + panic!("use emit_cond_jump for conditional jumps"); 367 + } 368 + let pos = self.offset(); 369 + self.emit_i32(0); // placeholder 370 + pos 371 + } 372 + 373 + /// Emit: JumpIfTrue/JumpIfFalse/JumpIfNullish reg, offset (placeholder) 374 + pub fn emit_cond_jump(&mut self, op: Op, reg: Reg) -> usize { 375 + self.emit_u8(op as u8); 376 + self.emit_u8(reg); 377 + let pos = self.offset(); 378 + self.emit_i32(0); // placeholder 379 + pos 380 + } 381 + 382 + /// Patch a jump offset at the given position to point to the current offset. 383 + pub fn patch_jump(&mut self, pos: usize) { 384 + let target = self.offset() as i32; 385 + let offset = target - (pos as i32 + 4); // relative to after the i32 386 + let bytes = offset.to_le_bytes(); 387 + self.func.code[pos] = bytes[0]; 388 + self.func.code[pos + 1] = bytes[1]; 389 + self.func.code[pos + 2] = bytes[2]; 390 + self.func.code[pos + 3] = bytes[3]; 391 + } 392 + 393 + /// Emit: Jump with a known target (backward jump). 394 + pub fn emit_jump_to(&mut self, target: usize) { 395 + self.emit_u8(Op::Jump as u8); 396 + let from = self.offset() as i32 + 4; 397 + let offset = target as i32 - from; 398 + self.emit_i32(offset); 399 + } 400 + 401 + /// Emit: JumpIfTrue/JumpIfFalse with a known target (backward jump). 402 + pub fn emit_cond_jump_to(&mut self, op: Op, reg: Reg, target: usize) { 403 + self.emit_u8(op as u8); 404 + self.emit_u8(reg); 405 + let from = self.offset() as i32 + 4; 406 + let offset = target as i32 - from; 407 + self.emit_i32(offset); 408 + } 409 + 410 + /// Emit: Call dst, func_reg, args_start, arg_count 411 + pub fn emit_call(&mut self, dst: Reg, func: Reg, args_start: Reg, arg_count: u8) { 412 + self.emit_u8(Op::Call as u8); 413 + self.emit_u8(dst); 414 + self.emit_u8(func); 415 + self.emit_u8(args_start); 416 + self.emit_u8(arg_count); 417 + } 418 + 419 + /// Emit: GetPropertyByName dst, obj, name_idx 420 + pub fn emit_get_prop_name(&mut self, dst: Reg, obj: Reg, name_idx: NameIdx) { 421 + self.emit_u8(Op::GetPropertyByName as u8); 422 + self.emit_u8(dst); 423 + self.emit_u8(obj); 424 + self.emit_u16(name_idx); 425 + } 426 + 427 + /// Emit: SetPropertyByName obj, name_idx, val 428 + pub fn emit_set_prop_name(&mut self, obj: Reg, name_idx: NameIdx, val: Reg) { 429 + self.emit_u8(Op::SetPropertyByName as u8); 430 + self.emit_u8(obj); 431 + self.emit_u16(name_idx); 432 + self.emit_u8(val); 433 + } 434 + 435 + /// Emit: LoadInt8 dst, value 436 + pub fn emit_load_int8(&mut self, dst: Reg, value: i8) { 437 + self.emit_u8(Op::LoadInt8 as u8); 438 + self.emit_u8(dst); 439 + self.emit_u8(value as u8); 440 + } 441 + 442 + /// Add a source map entry: current bytecode offset → source line. 443 + pub fn add_source_map(&mut self, line: u32) { 444 + let offset = self.offset() as u32; 445 + self.func.source_map.push((offset, line)); 446 + } 447 + 448 + /// Finalize and return the compiled function. 449 + pub fn finish(self) -> Function { 450 + self.func 451 + } 452 + } 453 + 454 + // ── Disassembler ──────────────────────────────────────────── 455 + 456 + impl Function { 457 + /// Disassemble the bytecode to a human-readable string. 458 + pub fn disassemble(&self) -> String { 459 + let mut out = String::new(); 460 + out.push_str(&format!( 461 + "function {}({} params, {} regs):\n", 462 + if self.name.is_empty() { 463 + "<anonymous>" 464 + } else { 465 + &self.name 466 + }, 467 + self.param_count, 468 + self.register_count, 469 + )); 470 + 471 + // Constants 472 + if !self.constants.is_empty() { 473 + out.push_str(" constants:\n"); 474 + for (i, c) in self.constants.iter().enumerate() { 475 + out.push_str(&format!(" [{i}] {c:?}\n")); 476 + } 477 + } 478 + 479 + // Names 480 + if !self.names.is_empty() { 481 + out.push_str(" names:\n"); 482 + for (i, n) in self.names.iter().enumerate() { 483 + out.push_str(&format!(" [{i}] \"{n}\"\n")); 484 + } 485 + } 486 + 487 + // Instructions 488 + out.push_str(" code:\n"); 489 + let code = &self.code; 490 + let mut pc = 0; 491 + while pc < code.len() { 492 + let offset = pc; 493 + let byte = code[pc]; 494 + pc += 1; 495 + let Some(op) = Op::from_byte(byte) else { 496 + out.push_str(&format!(" {offset:04X} <unknown 0x{byte:02X}>\n")); 497 + continue; 498 + }; 499 + let line = match op { 500 + Op::LoadConst => { 501 + let dst = code[pc]; 502 + pc += 1; 503 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 504 + pc += 2; 505 + format!("LoadConst r{dst}, #{idx}") 506 + } 507 + Op::LoadNull => { 508 + let dst = code[pc]; 509 + pc += 1; 510 + format!("LoadNull r{dst}") 511 + } 512 + Op::LoadUndefined => { 513 + let dst = code[pc]; 514 + pc += 1; 515 + format!("LoadUndefined r{dst}") 516 + } 517 + Op::LoadTrue => { 518 + let dst = code[pc]; 519 + pc += 1; 520 + format!("LoadTrue r{dst}") 521 + } 522 + Op::LoadFalse => { 523 + let dst = code[pc]; 524 + pc += 1; 525 + format!("LoadFalse r{dst}") 526 + } 527 + Op::Move => { 528 + let dst = code[pc]; 529 + let src = code[pc + 1]; 530 + pc += 2; 531 + format!("Move r{dst}, r{src}") 532 + } 533 + Op::LoadGlobal => { 534 + let dst = code[pc]; 535 + pc += 1; 536 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 537 + pc += 2; 538 + let name = self 539 + .names 540 + .get(idx as usize) 541 + .map(|s| s.as_str()) 542 + .unwrap_or("?"); 543 + format!("LoadGlobal r{dst}, @{idx}(\"{name}\")") 544 + } 545 + Op::StoreGlobal => { 546 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 547 + pc += 2; 548 + let src = code[pc]; 549 + pc += 1; 550 + let name = self 551 + .names 552 + .get(idx as usize) 553 + .map(|s| s.as_str()) 554 + .unwrap_or("?"); 555 + format!("StoreGlobal @{idx}(\"{name}\"), r{src}") 556 + } 557 + Op::Add | Op::Sub | Op::Mul | Op::Div | Op::Rem | Op::Exp => { 558 + let dst = code[pc]; 559 + let lhs = code[pc + 1]; 560 + let rhs = code[pc + 2]; 561 + pc += 3; 562 + format!("{op:?} r{dst}, r{lhs}, r{rhs}") 563 + } 564 + Op::Neg => { 565 + let dst = code[pc]; 566 + let src = code[pc + 1]; 567 + pc += 2; 568 + format!("Neg r{dst}, r{src}") 569 + } 570 + Op::BitAnd 571 + | Op::BitOr 572 + | Op::BitXor 573 + | Op::ShiftLeft 574 + | Op::ShiftRight 575 + | Op::UShiftRight => { 576 + let dst = code[pc]; 577 + let lhs = code[pc + 1]; 578 + let rhs = code[pc + 2]; 579 + pc += 3; 580 + format!("{op:?} r{dst}, r{lhs}, r{rhs}") 581 + } 582 + Op::BitNot => { 583 + let dst = code[pc]; 584 + let src = code[pc + 1]; 585 + pc += 2; 586 + format!("BitNot r{dst}, r{src}") 587 + } 588 + Op::Eq 589 + | Op::StrictEq 590 + | Op::NotEq 591 + | Op::StrictNotEq 592 + | Op::LessThan 593 + | Op::LessEq 594 + | Op::GreaterThan 595 + | Op::GreaterEq 596 + | Op::InstanceOf 597 + | Op::In => { 598 + let dst = code[pc]; 599 + let lhs = code[pc + 1]; 600 + let rhs = code[pc + 2]; 601 + pc += 3; 602 + format!("{op:?} r{dst}, r{lhs}, r{rhs}") 603 + } 604 + Op::LogicalNot => { 605 + let dst = code[pc]; 606 + let src = code[pc + 1]; 607 + pc += 2; 608 + format!("LogicalNot r{dst}, r{src}") 609 + } 610 + Op::TypeOf => { 611 + let dst = code[pc]; 612 + let src = code[pc + 1]; 613 + pc += 2; 614 + format!("TypeOf r{dst}, r{src}") 615 + } 616 + Op::Void => { 617 + let dst = code[pc]; 618 + let src = code[pc + 1]; 619 + pc += 2; 620 + format!("Void r{dst}, r{src}") 621 + } 622 + Op::Jump => { 623 + let off = 624 + i32::from_le_bytes([code[pc], code[pc + 1], code[pc + 2], code[pc + 3]]); 625 + pc += 4; 626 + let target = pc as i32 + off; 627 + format!("Jump {off:+} (-> {target:04X})") 628 + } 629 + Op::JumpIfTrue | Op::JumpIfFalse | Op::JumpIfNullish => { 630 + let reg = code[pc]; 631 + pc += 1; 632 + let off = 633 + i32::from_le_bytes([code[pc], code[pc + 1], code[pc + 2], code[pc + 3]]); 634 + pc += 4; 635 + let target = pc as i32 + off; 636 + format!("{op:?} r{reg}, {off:+} (-> {target:04X})") 637 + } 638 + Op::Call => { 639 + let dst = code[pc]; 640 + let func = code[pc + 1]; 641 + let args_start = code[pc + 2]; 642 + let arg_count = code[pc + 3]; 643 + pc += 4; 644 + format!("Call r{dst}, r{func}, r{args_start}, {arg_count}") 645 + } 646 + Op::Return => { 647 + let reg = code[pc]; 648 + pc += 1; 649 + format!("Return r{reg}") 650 + } 651 + Op::Throw => { 652 + let reg = code[pc]; 653 + pc += 1; 654 + format!("Throw r{reg}") 655 + } 656 + Op::CreateClosure => { 657 + let dst = code[pc]; 658 + pc += 1; 659 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 660 + pc += 2; 661 + format!("CreateClosure r{dst}, func#{idx}") 662 + } 663 + Op::GetProperty => { 664 + let dst = code[pc]; 665 + let obj = code[pc + 1]; 666 + let key = code[pc + 2]; 667 + pc += 3; 668 + format!("GetProperty r{dst}, r{obj}, r{key}") 669 + } 670 + Op::SetProperty => { 671 + let obj = code[pc]; 672 + let key = code[pc + 1]; 673 + let val = code[pc + 2]; 674 + pc += 3; 675 + format!("SetProperty r{obj}, r{key}, r{val}") 676 + } 677 + Op::CreateObject => { 678 + let dst = code[pc]; 679 + pc += 1; 680 + format!("CreateObject r{dst}") 681 + } 682 + Op::CreateArray => { 683 + let dst = code[pc]; 684 + pc += 1; 685 + format!("CreateArray r{dst}") 686 + } 687 + Op::GetPropertyByName => { 688 + let dst = code[pc]; 689 + let obj = code[pc + 1]; 690 + pc += 2; 691 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 692 + pc += 2; 693 + let name = self 694 + .names 695 + .get(idx as usize) 696 + .map(|s| s.as_str()) 697 + .unwrap_or("?"); 698 + format!("GetPropertyByName r{dst}, r{obj}, @{idx}(\"{name}\")") 699 + } 700 + Op::SetPropertyByName => { 701 + let obj = code[pc]; 702 + pc += 1; 703 + let idx = u16::from_le_bytes([code[pc], code[pc + 1]]); 704 + pc += 2; 705 + let val = code[pc]; 706 + pc += 1; 707 + let name = self 708 + .names 709 + .get(idx as usize) 710 + .map(|s| s.as_str()) 711 + .unwrap_or("?"); 712 + format!("SetPropertyByName r{obj}, @{idx}(\"{name}\"), r{val}") 713 + } 714 + Op::Delete => { 715 + let dst = code[pc]; 716 + let obj = code[pc + 1]; 717 + let key = code[pc + 2]; 718 + pc += 3; 719 + format!("Delete r{dst}, r{obj}, r{key}") 720 + } 721 + Op::LoadInt8 => { 722 + let dst = code[pc]; 723 + let val = code[pc + 1] as i8; 724 + pc += 2; 725 + format!("LoadInt8 r{dst}, {val}") 726 + } 727 + }; 728 + out.push_str(&format!(" {offset:04X} {line}\n")); 729 + } 730 + 731 + // Nested functions 732 + for (i, f) in self.functions.iter().enumerate() { 733 + out.push_str(&format!("\n --- nested function [{i}] ---\n")); 734 + let nested_dis = f.disassemble(); 735 + for line in nested_dis.lines() { 736 + out.push_str(&format!(" {line}\n")); 737 + } 738 + } 739 + 740 + out 741 + } 742 + } 743 + 744 + impl fmt::Display for Function { 745 + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 746 + write!(f, "{}", self.disassemble()) 747 + } 748 + } 749 + 750 + #[cfg(test)] 751 + mod tests { 752 + use super::*; 753 + 754 + #[test] 755 + fn test_op_roundtrip() { 756 + // Verify all opcodes survive byte conversion. 757 + let ops = [ 758 + Op::LoadConst, 759 + Op::LoadNull, 760 + Op::LoadUndefined, 761 + Op::LoadTrue, 762 + Op::LoadFalse, 763 + Op::Move, 764 + Op::LoadGlobal, 765 + Op::StoreGlobal, 766 + Op::Add, 767 + Op::Sub, 768 + Op::Mul, 769 + Op::Div, 770 + Op::Rem, 771 + Op::Exp, 772 + Op::Neg, 773 + Op::BitAnd, 774 + Op::BitOr, 775 + Op::BitXor, 776 + Op::ShiftLeft, 777 + Op::ShiftRight, 778 + Op::UShiftRight, 779 + Op::BitNot, 780 + Op::Eq, 781 + Op::StrictEq, 782 + Op::NotEq, 783 + Op::StrictNotEq, 784 + Op::LessThan, 785 + Op::LessEq, 786 + Op::GreaterThan, 787 + Op::GreaterEq, 788 + Op::LogicalNot, 789 + Op::TypeOf, 790 + Op::InstanceOf, 791 + Op::In, 792 + Op::Void, 793 + Op::Jump, 794 + Op::JumpIfTrue, 795 + Op::JumpIfFalse, 796 + Op::JumpIfNullish, 797 + Op::Call, 798 + Op::Return, 799 + Op::Throw, 800 + Op::CreateClosure, 801 + Op::GetProperty, 802 + Op::SetProperty, 803 + Op::CreateObject, 804 + Op::CreateArray, 805 + Op::GetPropertyByName, 806 + Op::SetPropertyByName, 807 + Op::Delete, 808 + Op::LoadInt8, 809 + ]; 810 + for op in ops { 811 + assert_eq!( 812 + Op::from_byte(op as u8), 813 + Some(op), 814 + "roundtrip failed for {op:?}" 815 + ); 816 + } 817 + } 818 + 819 + #[test] 820 + fn test_unknown_opcode() { 821 + assert_eq!(Op::from_byte(0xFF), None); 822 + assert_eq!(Op::from_byte(0x00), None); 823 + } 824 + 825 + #[test] 826 + fn test_constant_pool_dedup() { 827 + let mut b = BytecodeBuilder::new("test".into(), 0); 828 + let i1 = b.add_constant(Constant::Number(42.0)); 829 + let i2 = b.add_constant(Constant::Number(42.0)); 830 + assert_eq!(i1, i2); 831 + let i3 = b.add_constant(Constant::String("hello".into())); 832 + let i4 = b.add_constant(Constant::String("hello".into())); 833 + assert_eq!(i3, i4); 834 + assert_ne!(i1, i3); 835 + assert_eq!(b.func.constants.len(), 2); 836 + } 837 + 838 + #[test] 839 + fn test_name_dedup() { 840 + let mut b = BytecodeBuilder::new("test".into(), 0); 841 + let i1 = b.add_name("foo"); 842 + let i2 = b.add_name("foo"); 843 + assert_eq!(i1, i2); 844 + let i3 = b.add_name("bar"); 845 + assert_ne!(i1, i3); 846 + } 847 + 848 + #[test] 849 + fn test_emit_and_disassemble() { 850 + let mut b = BytecodeBuilder::new("test".into(), 0); 851 + b.func.register_count = 3; 852 + let ci = b.add_constant(Constant::Number(10.0)); 853 + b.emit_reg_u16(Op::LoadConst, 0, ci); 854 + b.emit_reg(Op::LoadNull, 1); 855 + b.emit_reg3(Op::Add, 2, 0, 1); 856 + b.emit_reg(Op::Return, 2); 857 + let func = b.finish(); 858 + let dis = func.disassemble(); 859 + assert!(dis.contains("LoadConst r0, #0")); 860 + assert!(dis.contains("LoadNull r1")); 861 + assert!(dis.contains("Add r2, r0, r1")); 862 + assert!(dis.contains("Return r2")); 863 + } 864 + 865 + #[test] 866 + fn test_jump_patching() { 867 + let mut b = BytecodeBuilder::new("test".into(), 0); 868 + b.func.register_count = 1; 869 + b.emit_reg(Op::LoadTrue, 0); 870 + let patch = b.emit_cond_jump(Op::JumpIfFalse, 0); 871 + b.emit_reg(Op::LoadNull, 0); 872 + b.patch_jump(patch); 873 + b.emit_reg(Op::Return, 0); 874 + let func = b.finish(); 875 + let dis = func.disassemble(); 876 + // The jump should target the Return instruction 877 + assert!(dis.contains("JumpIfFalse")); 878 + assert!(dis.contains("Return")); 879 + } 880 + 881 + #[test] 882 + fn test_load_int8() { 883 + let mut b = BytecodeBuilder::new("test".into(), 0); 884 + b.func.register_count = 1; 885 + b.emit_load_int8(0, 42); 886 + b.emit_load_int8(0, -1); 887 + let func = b.finish(); 888 + let dis = func.disassemble(); 889 + assert!(dis.contains("LoadInt8 r0, 42")); 890 + assert!(dis.contains("LoadInt8 r0, -1")); 891 + } 892 + }
+1750
crates/js/src/compiler.rs
··· 1 + //! AST → register-based bytecode compiler. 2 + //! 3 + //! Walks the AST produced by the parser and emits bytecode instructions. 4 + //! Uses a simple greedy register allocator: each new temporary gets the next 5 + //! available register, and registers are freed when no longer needed. 6 + 7 + use crate::ast::*; 8 + use crate::bytecode::*; 9 + use crate::JsError; 10 + 11 + /// Compiler state for a single function scope. 12 + struct FunctionCompiler { 13 + builder: BytecodeBuilder, 14 + /// Maps local variable names to their register slots. 15 + locals: Vec<Local>, 16 + /// Next free register index. 17 + next_reg: u8, 18 + /// Stack of loop contexts for break/continue. 19 + loop_stack: Vec<LoopCtx>, 20 + } 21 + 22 + #[derive(Debug, Clone)] 23 + struct Local { 24 + name: String, 25 + reg: Reg, 26 + } 27 + 28 + struct LoopCtx { 29 + /// Label, if this is a labeled loop. 30 + label: Option<String>, 31 + /// Bytecode offset of the loop condition check (for `continue`). 32 + continue_target: usize, 33 + /// Patch positions for break jumps. 34 + break_patches: Vec<usize>, 35 + } 36 + 37 + impl FunctionCompiler { 38 + fn new(name: String, param_count: u8) -> Self { 39 + Self { 40 + builder: BytecodeBuilder::new(name, param_count), 41 + locals: Vec::new(), 42 + next_reg: 0, 43 + loop_stack: Vec::new(), 44 + } 45 + } 46 + 47 + /// Allocate a register, updating the high-water mark. 48 + fn alloc_reg(&mut self) -> Reg { 49 + let r = self.next_reg; 50 + self.next_reg = self.next_reg.checked_add(1).expect("register overflow"); 51 + if self.next_reg > self.builder.func.register_count { 52 + self.builder.func.register_count = self.next_reg; 53 + } 54 + r 55 + } 56 + 57 + /// Free the last allocated register (must be called in reverse order). 58 + fn free_reg(&mut self, r: Reg) { 59 + debug_assert_eq!( 60 + r, 61 + self.next_reg - 1, 62 + "registers must be freed in reverse order" 63 + ); 64 + self.next_reg -= 1; 65 + } 66 + 67 + /// Look up a local variable by name. 68 + fn find_local(&self, name: &str) -> Option<Reg> { 69 + self.locals 70 + .iter() 71 + .rev() 72 + .find(|l| l.name == name) 73 + .map(|l| l.reg) 74 + } 75 + 76 + /// Define a local variable. 77 + fn define_local(&mut self, name: &str) -> Reg { 78 + let reg = self.alloc_reg(); 79 + self.locals.push(Local { 80 + name: name.to_string(), 81 + reg, 82 + }); 83 + reg 84 + } 85 + } 86 + 87 + /// Compile a parsed program into a top-level bytecode function. 88 + pub fn compile(program: &Program) -> Result<Function, JsError> { 89 + let mut fc = FunctionCompiler::new("<main>".into(), 0); 90 + 91 + // Reserve r0 for the implicit return value. 92 + let result_reg = fc.alloc_reg(); 93 + fc.builder.emit_reg(Op::LoadUndefined, result_reg); 94 + 95 + compile_stmts(&mut fc, &program.body, result_reg)?; 96 + 97 + fc.builder.emit_reg(Op::Return, result_reg); 98 + Ok(fc.builder.finish()) 99 + } 100 + 101 + fn compile_stmts( 102 + fc: &mut FunctionCompiler, 103 + stmts: &[Stmt], 104 + result_reg: Reg, 105 + ) -> Result<(), JsError> { 106 + for stmt in stmts { 107 + compile_stmt(fc, stmt, result_reg)?; 108 + } 109 + Ok(()) 110 + } 111 + 112 + fn compile_stmt(fc: &mut FunctionCompiler, stmt: &Stmt, result_reg: Reg) -> Result<(), JsError> { 113 + match &stmt.kind { 114 + StmtKind::Expr(expr) => { 115 + // Expression statement: compile and store result in result_reg. 116 + compile_expr(fc, expr, result_reg)?; 117 + } 118 + 119 + StmtKind::Block(stmts) => { 120 + let saved_locals = fc.locals.len(); 121 + let saved_next = fc.next_reg; 122 + compile_stmts(fc, stmts, result_reg)?; 123 + // Pop locals from this block. 124 + fc.locals.truncate(saved_locals); 125 + fc.next_reg = saved_next; 126 + } 127 + 128 + StmtKind::VarDecl { 129 + kind: _, 130 + declarators, 131 + } => { 132 + for decl in declarators { 133 + compile_var_declarator(fc, decl)?; 134 + } 135 + } 136 + 137 + StmtKind::FunctionDecl(func_def) => { 138 + compile_function_decl(fc, func_def)?; 139 + } 140 + 141 + StmtKind::If { 142 + test, 143 + consequent, 144 + alternate, 145 + } => { 146 + compile_if(fc, test, consequent, alternate.as_deref(), result_reg)?; 147 + } 148 + 149 + StmtKind::While { test, body } => { 150 + compile_while(fc, test, body, None, result_reg)?; 151 + } 152 + 153 + StmtKind::DoWhile { body, test } => { 154 + compile_do_while(fc, body, test, None, result_reg)?; 155 + } 156 + 157 + StmtKind::For { 158 + init, 159 + test, 160 + update, 161 + body, 162 + } => { 163 + compile_for( 164 + fc, 165 + init.as_ref(), 166 + test.as_ref(), 167 + update.as_ref(), 168 + body, 169 + None, 170 + result_reg, 171 + )?; 172 + } 173 + 174 + StmtKind::ForIn { left, right, body } => { 175 + // For-in is complex; emit a stub that evaluates RHS, then TODO at runtime. 176 + let _ = left; 177 + let tmp = fc.alloc_reg(); 178 + compile_expr(fc, right, tmp)?; 179 + fc.free_reg(tmp); 180 + // For now, just compile the body once (the VM will handle iteration). 181 + compile_stmt(fc, body, result_reg)?; 182 + } 183 + 184 + StmtKind::ForOf { 185 + left, 186 + right, 187 + body, 188 + is_await: _, 189 + } => { 190 + let _ = left; 191 + let tmp = fc.alloc_reg(); 192 + compile_expr(fc, right, tmp)?; 193 + fc.free_reg(tmp); 194 + compile_stmt(fc, body, result_reg)?; 195 + } 196 + 197 + StmtKind::Return(expr) => { 198 + let ret_reg = fc.alloc_reg(); 199 + if let Some(e) = expr { 200 + compile_expr(fc, e, ret_reg)?; 201 + } else { 202 + fc.builder.emit_reg(Op::LoadUndefined, ret_reg); 203 + } 204 + fc.builder.emit_reg(Op::Return, ret_reg); 205 + fc.free_reg(ret_reg); 206 + } 207 + 208 + StmtKind::Throw(expr) => { 209 + let tmp = fc.alloc_reg(); 210 + compile_expr(fc, expr, tmp)?; 211 + fc.builder.emit_reg(Op::Throw, tmp); 212 + fc.free_reg(tmp); 213 + } 214 + 215 + StmtKind::Break(label) => { 216 + // Find the matching loop context. 217 + let idx = find_loop_ctx(&fc.loop_stack, label.as_deref()) 218 + .ok_or_else(|| JsError::SyntaxError("break outside of loop".into()))?; 219 + let patch = fc.builder.emit_jump(Op::Jump); 220 + fc.loop_stack[idx].break_patches.push(patch); 221 + } 222 + 223 + StmtKind::Continue(label) => { 224 + let idx = find_loop_ctx(&fc.loop_stack, label.as_deref()) 225 + .ok_or_else(|| JsError::SyntaxError("continue outside of loop".into()))?; 226 + let target = fc.loop_stack[idx].continue_target; 227 + fc.builder.emit_jump_to(target); 228 + } 229 + 230 + StmtKind::Labeled { label, body } => { 231 + // If body is a loop, propagate the label. 232 + match &body.kind { 233 + StmtKind::While { test, body: inner } => { 234 + compile_while(fc, test, inner, Some(label.clone()), result_reg)?; 235 + } 236 + StmtKind::DoWhile { body: inner, test } => { 237 + compile_do_while(fc, inner, test, Some(label.clone()), result_reg)?; 238 + } 239 + StmtKind::For { 240 + init, 241 + test, 242 + update, 243 + body: inner, 244 + } => { 245 + compile_for( 246 + fc, 247 + init.as_ref(), 248 + test.as_ref(), 249 + update.as_ref(), 250 + inner, 251 + Some(label.clone()), 252 + result_reg, 253 + )?; 254 + } 255 + _ => { 256 + compile_stmt(fc, body, result_reg)?; 257 + } 258 + } 259 + } 260 + 261 + StmtKind::Switch { 262 + discriminant, 263 + cases, 264 + } => { 265 + compile_switch(fc, discriminant, cases, result_reg)?; 266 + } 267 + 268 + StmtKind::Try { 269 + block, 270 + handler, 271 + finalizer, 272 + } => { 273 + // Simplified: compile blocks sequentially. 274 + // Real try/catch needs exception table support from the VM. 275 + compile_stmts(fc, block, result_reg)?; 276 + if let Some(catch) = handler { 277 + compile_stmts(fc, &catch.body, result_reg)?; 278 + } 279 + if let Some(fin) = finalizer { 280 + compile_stmts(fc, fin, result_reg)?; 281 + } 282 + } 283 + 284 + StmtKind::Empty | StmtKind::Debugger => { 285 + // No-op. 286 + } 287 + 288 + StmtKind::With { object, body } => { 289 + // Compile `with` as: evaluate object (discard), then run body. 290 + // Proper `with` scope requires VM support. 291 + let tmp = fc.alloc_reg(); 292 + compile_expr(fc, object, tmp)?; 293 + fc.free_reg(tmp); 294 + compile_stmt(fc, body, result_reg)?; 295 + } 296 + 297 + StmtKind::Import { .. } => { 298 + // Module imports are resolved before execution; no bytecode needed. 299 + } 300 + 301 + StmtKind::Export(export) => { 302 + compile_export(fc, export, result_reg)?; 303 + } 304 + 305 + StmtKind::ClassDecl(class_def) => { 306 + compile_class_decl(fc, class_def)?; 307 + } 308 + } 309 + Ok(()) 310 + } 311 + 312 + // ── Variable declarations ─────────────────────────────────── 313 + 314 + fn compile_var_declarator(fc: &mut FunctionCompiler, decl: &VarDeclarator) -> Result<(), JsError> { 315 + match &decl.pattern.kind { 316 + PatternKind::Identifier(name) => { 317 + let reg = fc.define_local(name); 318 + if let Some(init) = &decl.init { 319 + compile_expr(fc, init, reg)?; 320 + } else { 321 + fc.builder.emit_reg(Op::LoadUndefined, reg); 322 + } 323 + } 324 + _ => { 325 + // Destructuring: evaluate init, then bind patterns. 326 + let tmp = fc.alloc_reg(); 327 + if let Some(init) = &decl.init { 328 + compile_expr(fc, init, tmp)?; 329 + } else { 330 + fc.builder.emit_reg(Op::LoadUndefined, tmp); 331 + } 332 + compile_destructuring_pattern(fc, &decl.pattern, tmp)?; 333 + fc.free_reg(tmp); 334 + } 335 + } 336 + Ok(()) 337 + } 338 + 339 + fn compile_destructuring_pattern( 340 + fc: &mut FunctionCompiler, 341 + pattern: &Pattern, 342 + src: Reg, 343 + ) -> Result<(), JsError> { 344 + match &pattern.kind { 345 + PatternKind::Identifier(name) => { 346 + let reg = fc.define_local(name); 347 + fc.builder.emit_reg_reg(Op::Move, reg, src); 348 + } 349 + PatternKind::Object { 350 + properties, 351 + rest: _, 352 + } => { 353 + for prop in properties { 354 + let key_name = match &prop.key { 355 + PropertyKey::Identifier(s) | PropertyKey::String(s) => s.clone(), 356 + _ => { 357 + return Err(JsError::SyntaxError( 358 + "computed destructuring keys not yet supported".into(), 359 + )); 360 + } 361 + }; 362 + let val_reg = fc.alloc_reg(); 363 + let name_idx = fc.builder.add_name(&key_name); 364 + fc.builder.emit_get_prop_name(val_reg, src, name_idx); 365 + compile_destructuring_pattern(fc, &prop.value, val_reg)?; 366 + fc.free_reg(val_reg); 367 + } 368 + } 369 + PatternKind::Array { elements, rest: _ } => { 370 + for (i, elem) in elements.iter().enumerate() { 371 + if let Some(pat) = elem { 372 + let idx_reg = fc.alloc_reg(); 373 + if i <= 127 { 374 + fc.builder.emit_load_int8(idx_reg, i as i8); 375 + } else { 376 + let ci = fc.builder.add_constant(Constant::Number(i as f64)); 377 + fc.builder.emit_reg_u16(Op::LoadConst, idx_reg, ci); 378 + } 379 + let val_reg = fc.alloc_reg(); 380 + fc.builder.emit_reg3(Op::GetProperty, val_reg, src, idx_reg); 381 + compile_destructuring_pattern(fc, pat, val_reg)?; 382 + fc.free_reg(val_reg); 383 + fc.free_reg(idx_reg); 384 + } 385 + } 386 + } 387 + PatternKind::Assign { left, right } => { 388 + // Default value: if src is undefined, use default. 389 + let val_reg = fc.alloc_reg(); 390 + fc.builder.emit_reg_reg(Op::Move, val_reg, src); 391 + // Check if undefined, if so use default. 392 + let check_reg = fc.alloc_reg(); 393 + let undef_reg = fc.alloc_reg(); 394 + fc.builder.emit_reg(Op::LoadUndefined, undef_reg); 395 + fc.builder 396 + .emit_reg3(Op::StrictEq, check_reg, val_reg, undef_reg); 397 + fc.free_reg(undef_reg); 398 + let patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, check_reg); 399 + fc.free_reg(check_reg); 400 + // Is undefined → evaluate default. 401 + compile_expr(fc, right, val_reg)?; 402 + fc.builder.patch_jump(patch); 403 + compile_destructuring_pattern(fc, left, val_reg)?; 404 + fc.free_reg(val_reg); 405 + } 406 + } 407 + Ok(()) 408 + } 409 + 410 + // ── Function declarations ─────────────────────────────────── 411 + 412 + fn compile_function_decl(fc: &mut FunctionCompiler, func_def: &FunctionDef) -> Result<(), JsError> { 413 + let name = func_def.id.clone().unwrap_or_default(); 414 + let inner = compile_function_body(func_def)?; 415 + let func_idx = fc.builder.add_function(inner); 416 + 417 + let reg = fc.define_local(&name); 418 + fc.builder.emit_reg_u16(Op::CreateClosure, reg, func_idx); 419 + Ok(()) 420 + } 421 + 422 + fn compile_function_body(func_def: &FunctionDef) -> Result<Function, JsError> { 423 + let name = func_def.id.clone().unwrap_or_default(); 424 + let param_count = func_def.params.len().min(255) as u8; 425 + let mut inner = FunctionCompiler::new(name, param_count); 426 + 427 + // Allocate registers for parameters. 428 + for p in &func_def.params { 429 + if let PatternKind::Identifier(name) = &p.kind { 430 + inner.define_local(name); 431 + } else { 432 + // Destructuring param: allocate a register for the whole param, 433 + // then destructure from it. 434 + let _ = inner.alloc_reg(); 435 + } 436 + } 437 + 438 + // Result register for the function body. 439 + let result_reg = inner.alloc_reg(); 440 + inner.builder.emit_reg(Op::LoadUndefined, result_reg); 441 + 442 + compile_stmts(&mut inner, &func_def.body, result_reg)?; 443 + 444 + // Implicit return undefined. 445 + inner.builder.emit_reg(Op::Return, result_reg); 446 + Ok(inner.builder.finish()) 447 + } 448 + 449 + // ── Class declarations ────────────────────────────────────── 450 + 451 + fn compile_class_decl(fc: &mut FunctionCompiler, class_def: &ClassDef) -> Result<(), JsError> { 452 + let name = class_def.id.clone().unwrap_or_default(); 453 + let reg = fc.define_local(&name); 454 + 455 + // Find constructor or create empty one. 456 + let ctor = class_def.body.iter().find(|m| { 457 + matches!( 458 + &m.kind, 459 + ClassMemberKind::Method { 460 + kind: MethodKind::Constructor, 461 + .. 462 + } 463 + ) 464 + }); 465 + 466 + if let Some(member) = ctor { 467 + if let ClassMemberKind::Method { value, .. } = &member.kind { 468 + let inner = compile_function_body(value)?; 469 + let func_idx = fc.builder.add_function(inner); 470 + fc.builder.emit_reg_u16(Op::CreateClosure, reg, func_idx); 471 + } 472 + } else { 473 + // No constructor: create an empty function. 474 + let empty = Function::new(name.clone(), 0); 475 + let func_idx = fc.builder.add_function(empty); 476 + fc.builder.emit_reg_u16(Op::CreateClosure, reg, func_idx); 477 + } 478 + 479 + // Compile methods: set them as properties on the constructor's prototype. 480 + // This is simplified — real class compilation needs prototype chain setup. 481 + for member in &class_def.body { 482 + match &member.kind { 483 + ClassMemberKind::Method { 484 + key, 485 + value, 486 + kind, 487 + is_static: _, 488 + computed: _, 489 + } => { 490 + if matches!(kind, MethodKind::Constructor) { 491 + continue; 492 + } 493 + let method_name = match key { 494 + PropertyKey::Identifier(s) | PropertyKey::String(s) => s.clone(), 495 + _ => continue, 496 + }; 497 + let inner = compile_function_body(value)?; 498 + let func_idx = fc.builder.add_function(inner); 499 + let method_reg = fc.alloc_reg(); 500 + fc.builder 501 + .emit_reg_u16(Op::CreateClosure, method_reg, func_idx); 502 + let name_idx = fc.builder.add_name(&method_name); 503 + fc.builder.emit_set_prop_name(reg, name_idx, method_reg); 504 + fc.free_reg(method_reg); 505 + } 506 + ClassMemberKind::Property { .. } => { 507 + // Class fields are set in constructor; skip here. 508 + } 509 + } 510 + } 511 + 512 + Ok(()) 513 + } 514 + 515 + // ── Export ─────────────────────────────────────────────────── 516 + 517 + fn compile_export( 518 + fc: &mut FunctionCompiler, 519 + export: &ExportDecl, 520 + 521 + result_reg: Reg, 522 + ) -> Result<(), JsError> { 523 + match export { 524 + ExportDecl::Declaration(stmt) => { 525 + compile_stmt(fc, stmt, result_reg)?; 526 + } 527 + ExportDecl::Default(expr) => { 528 + compile_expr(fc, expr, result_reg)?; 529 + } 530 + ExportDecl::Named { .. } | ExportDecl::AllFrom(_) => { 531 + // Named re-exports are module-level; no bytecode needed. 532 + } 533 + } 534 + Ok(()) 535 + } 536 + 537 + // ── Control flow ──────────────────────────────────────────── 538 + 539 + fn compile_if( 540 + fc: &mut FunctionCompiler, 541 + test: &Expr, 542 + consequent: &Stmt, 543 + alternate: Option<&Stmt>, 544 + 545 + result_reg: Reg, 546 + ) -> Result<(), JsError> { 547 + let cond = fc.alloc_reg(); 548 + compile_expr(fc, test, cond)?; 549 + let else_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); 550 + fc.free_reg(cond); 551 + 552 + compile_stmt(fc, consequent, result_reg)?; 553 + 554 + if let Some(alt) = alternate { 555 + let end_patch = fc.builder.emit_jump(Op::Jump); 556 + fc.builder.patch_jump(else_patch); 557 + compile_stmt(fc, alt, result_reg)?; 558 + fc.builder.patch_jump(end_patch); 559 + } else { 560 + fc.builder.patch_jump(else_patch); 561 + } 562 + Ok(()) 563 + } 564 + 565 + fn compile_while( 566 + fc: &mut FunctionCompiler, 567 + test: &Expr, 568 + body: &Stmt, 569 + label: Option<String>, 570 + 571 + result_reg: Reg, 572 + ) -> Result<(), JsError> { 573 + let loop_start = fc.builder.offset(); 574 + 575 + let cond = fc.alloc_reg(); 576 + compile_expr(fc, test, cond)?; 577 + let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); 578 + fc.free_reg(cond); 579 + 580 + fc.loop_stack.push(LoopCtx { 581 + label, 582 + continue_target: loop_start, 583 + break_patches: Vec::new(), 584 + }); 585 + 586 + compile_stmt(fc, body, result_reg)?; 587 + fc.builder.emit_jump_to(loop_start); 588 + fc.builder.patch_jump(exit_patch); 589 + 590 + let ctx = fc.loop_stack.pop().unwrap(); 591 + for patch in ctx.break_patches { 592 + fc.builder.patch_jump(patch); 593 + } 594 + Ok(()) 595 + } 596 + 597 + fn compile_do_while( 598 + fc: &mut FunctionCompiler, 599 + body: &Stmt, 600 + test: &Expr, 601 + label: Option<String>, 602 + 603 + result_reg: Reg, 604 + ) -> Result<(), JsError> { 605 + let loop_start = fc.builder.offset(); 606 + 607 + fc.loop_stack.push(LoopCtx { 608 + label, 609 + continue_target: loop_start, 610 + break_patches: Vec::new(), 611 + }); 612 + 613 + compile_stmt(fc, body, result_reg)?; 614 + 615 + let cond = fc.alloc_reg(); 616 + compile_expr(fc, test, cond)?; 617 + fc.builder 618 + .emit_cond_jump_to(Op::JumpIfTrue, cond, loop_start); 619 + fc.free_reg(cond); 620 + 621 + let ctx = fc.loop_stack.pop().unwrap(); 622 + for patch in ctx.break_patches { 623 + fc.builder.patch_jump(patch); 624 + } 625 + Ok(()) 626 + } 627 + 628 + fn compile_for( 629 + fc: &mut FunctionCompiler, 630 + init: Option<&ForInit>, 631 + test: Option<&Expr>, 632 + update: Option<&Expr>, 633 + body: &Stmt, 634 + label: Option<String>, 635 + 636 + result_reg: Reg, 637 + ) -> Result<(), JsError> { 638 + let saved_locals = fc.locals.len(); 639 + let saved_next = fc.next_reg; 640 + 641 + // Init. 642 + if let Some(init) = init { 643 + match init { 644 + ForInit::VarDecl { 645 + kind: _, 646 + declarators, 647 + } => { 648 + for decl in declarators { 649 + compile_var_declarator(fc, decl)?; 650 + } 651 + } 652 + ForInit::Expr(expr) => { 653 + let tmp = fc.alloc_reg(); 654 + compile_expr(fc, expr, tmp)?; 655 + fc.free_reg(tmp); 656 + } 657 + } 658 + } 659 + 660 + let loop_start = fc.builder.offset(); 661 + 662 + // Test. 663 + let exit_patch = if let Some(test) = test { 664 + let cond = fc.alloc_reg(); 665 + compile_expr(fc, test, cond)?; 666 + let patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); 667 + fc.free_reg(cond); 668 + Some(patch) 669 + } else { 670 + None 671 + }; 672 + 673 + // continue_target points to the update expression (or loop_start if no update). 674 + // We'll set this after compiling the body. 675 + let continue_placeholder = fc.builder.offset(); 676 + fc.loop_stack.push(LoopCtx { 677 + label, 678 + continue_target: continue_placeholder, // will be updated 679 + break_patches: Vec::new(), 680 + }); 681 + 682 + compile_stmt(fc, body, result_reg)?; 683 + 684 + // Set the continue target to the update expression position. 685 + let continue_target = fc.builder.offset(); 686 + let loop_idx = fc.loop_stack.len() - 1; 687 + fc.loop_stack[loop_idx].continue_target = continue_target; 688 + 689 + // Update. 690 + if let Some(update) = update { 691 + let tmp = fc.alloc_reg(); 692 + compile_expr(fc, update, tmp)?; 693 + fc.free_reg(tmp); 694 + } 695 + 696 + fc.builder.emit_jump_to(loop_start); 697 + 698 + if let Some(patch) = exit_patch { 699 + fc.builder.patch_jump(patch); 700 + } 701 + 702 + let ctx = fc.loop_stack.pop().unwrap(); 703 + for patch in ctx.break_patches { 704 + fc.builder.patch_jump(patch); 705 + } 706 + 707 + fc.locals.truncate(saved_locals); 708 + fc.next_reg = saved_next; 709 + Ok(()) 710 + } 711 + 712 + fn compile_switch( 713 + fc: &mut FunctionCompiler, 714 + discriminant: &Expr, 715 + cases: &[SwitchCase], 716 + 717 + result_reg: Reg, 718 + ) -> Result<(), JsError> { 719 + let disc_reg = fc.alloc_reg(); 720 + compile_expr(fc, discriminant, disc_reg)?; 721 + 722 + // Use a loop context for break statements. 723 + fc.loop_stack.push(LoopCtx { 724 + label: None, 725 + continue_target: 0, // not applicable for switch 726 + break_patches: Vec::new(), 727 + }); 728 + 729 + let mut case_patches = Vec::new(); 730 + let mut default_patch = None; 731 + 732 + // Phase 1: emit comparison jumps for each case. 733 + for case in cases { 734 + if let Some(test) = &case.test { 735 + let test_reg = fc.alloc_reg(); 736 + compile_expr(fc, test, test_reg)?; 737 + let cmp_reg = fc.alloc_reg(); 738 + fc.builder 739 + .emit_reg3(Op::StrictEq, cmp_reg, disc_reg, test_reg); 740 + let patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, cmp_reg); 741 + fc.free_reg(cmp_reg); 742 + fc.free_reg(test_reg); 743 + case_patches.push(patch); 744 + } else { 745 + // Default case: jump is emitted after all test comparisons. 746 + case_patches.push(0); // placeholder 747 + default_patch = Some(case_patches.len() - 1); 748 + } 749 + } 750 + 751 + // Jump to default or end if no case matched. 752 + let end_jump = if default_patch.is_some() { 753 + None 754 + } else { 755 + Some(fc.builder.emit_jump(Op::Jump)) 756 + }; 757 + 758 + // Phase 2: emit case bodies. 759 + for (i, case) in cases.iter().enumerate() { 760 + if default_patch == Some(i) { 761 + // Patch the "no match" jump to here if this is the default. 762 + if let Some(end_j) = end_jump { 763 + fc.builder.patch_jump(end_j); 764 + } 765 + // We need to update the default case patch to point here. 766 + let default_jump_target = fc.builder.offset(); 767 + // The default_patch entry is a placeholder; we don't jump TO it. 768 + // Instead, if no case matched and there's a default, we jump here. 769 + // Let's fix: emit a Jump before the first case body that jumps to default. 770 + let _ = default_jump_target; 771 + } 772 + 773 + fc.builder.patch_jump(case_patches[i]); 774 + compile_stmts(fc, &case.consequent, result_reg)?; 775 + } 776 + 777 + if let Some(end_j) = end_jump { 778 + fc.builder.patch_jump(end_j); 779 + } 780 + 781 + fc.free_reg(disc_reg); 782 + 783 + let ctx = fc.loop_stack.pop().unwrap(); 784 + for patch in ctx.break_patches { 785 + fc.builder.patch_jump(patch); 786 + } 787 + Ok(()) 788 + } 789 + 790 + fn find_loop_ctx(stack: &[LoopCtx], label: Option<&str>) -> Option<usize> { 791 + if let Some(label) = label { 792 + stack 793 + .iter() 794 + .rposition(|ctx| ctx.label.as_deref() == Some(label)) 795 + } else { 796 + if stack.is_empty() { 797 + None 798 + } else { 799 + Some(stack.len() - 1) 800 + } 801 + } 802 + } 803 + 804 + // ── Expressions ───────────────────────────────────────────── 805 + 806 + fn compile_expr(fc: &mut FunctionCompiler, expr: &Expr, dst: Reg) -> Result<(), JsError> { 807 + match &expr.kind { 808 + ExprKind::Number(n) => { 809 + // Optimize small integers. 810 + let int_val = *n as i64; 811 + if int_val as f64 == *n && (-128..=127).contains(&int_val) { 812 + fc.builder.emit_load_int8(dst, int_val as i8); 813 + } else { 814 + let ci = fc.builder.add_constant(Constant::Number(*n)); 815 + fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); 816 + } 817 + } 818 + 819 + ExprKind::String(s) => { 820 + let ci = fc.builder.add_constant(Constant::String(s.clone())); 821 + fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); 822 + } 823 + 824 + ExprKind::Bool(true) => { 825 + fc.builder.emit_reg(Op::LoadTrue, dst); 826 + } 827 + 828 + ExprKind::Bool(false) => { 829 + fc.builder.emit_reg(Op::LoadFalse, dst); 830 + } 831 + 832 + ExprKind::Null => { 833 + fc.builder.emit_reg(Op::LoadNull, dst); 834 + } 835 + 836 + ExprKind::Identifier(name) => { 837 + if let Some(local_reg) = fc.find_local(name) { 838 + if local_reg != dst { 839 + fc.builder.emit_reg_reg(Op::Move, dst, local_reg); 840 + } 841 + } else { 842 + // Global lookup. 843 + let ni = fc.builder.add_name(name); 844 + fc.builder.emit_load_global(dst, ni); 845 + } 846 + } 847 + 848 + ExprKind::This => { 849 + // `this` is loaded as a global named "this" (the VM binds it). 850 + let ni = fc.builder.add_name("this"); 851 + fc.builder.emit_load_global(dst, ni); 852 + } 853 + 854 + ExprKind::Binary { op, left, right } => { 855 + let lhs = fc.alloc_reg(); 856 + compile_expr(fc, left, lhs)?; 857 + let rhs = fc.alloc_reg(); 858 + compile_expr(fc, right, rhs)?; 859 + let bytecode_op = binary_op_to_opcode(*op); 860 + fc.builder.emit_reg3(bytecode_op, dst, lhs, rhs); 861 + fc.free_reg(rhs); 862 + fc.free_reg(lhs); 863 + } 864 + 865 + ExprKind::Unary { op, argument } => { 866 + let src = fc.alloc_reg(); 867 + compile_expr(fc, argument, src)?; 868 + match op { 869 + UnaryOp::Minus => fc.builder.emit_reg_reg(Op::Neg, dst, src), 870 + UnaryOp::Plus => { 871 + // Unary + is a no-op at the bytecode level (coerces to number at runtime). 872 + fc.builder.emit_reg_reg(Op::Move, dst, src); 873 + } 874 + UnaryOp::Not => fc.builder.emit_reg_reg(Op::LogicalNot, dst, src), 875 + UnaryOp::BitwiseNot => fc.builder.emit_reg_reg(Op::BitNot, dst, src), 876 + UnaryOp::Typeof => fc.builder.emit_reg_reg(Op::TypeOf, dst, src), 877 + UnaryOp::Void => fc.builder.emit_reg_reg(Op::Void, dst, src), 878 + UnaryOp::Delete => { 879 + // Simplified: `delete x` on a simple identifier. 880 + // Real delete needs the object+key form. 881 + fc.builder.emit_reg(Op::LoadTrue, dst); 882 + } 883 + } 884 + fc.free_reg(src); 885 + } 886 + 887 + ExprKind::Update { 888 + op, 889 + argument, 890 + prefix, 891 + } => { 892 + // Get current value. 893 + compile_expr(fc, argument, dst)?; 894 + 895 + let one = fc.alloc_reg(); 896 + fc.builder.emit_load_int8(one, 1); 897 + 898 + if *prefix { 899 + // ++x / --x: modify first, return modified. 900 + match op { 901 + UpdateOp::Increment => fc.builder.emit_reg3(Op::Add, dst, dst, one), 902 + UpdateOp::Decrement => fc.builder.emit_reg3(Op::Sub, dst, dst, one), 903 + } 904 + // Store back. 905 + compile_store(fc, argument, dst)?; 906 + } else { 907 + // x++ / x--: return original, then modify. 908 + let tmp = fc.alloc_reg(); 909 + fc.builder.emit_reg_reg(Op::Move, tmp, dst); 910 + match op { 911 + UpdateOp::Increment => fc.builder.emit_reg3(Op::Add, tmp, tmp, one), 912 + UpdateOp::Decrement => fc.builder.emit_reg3(Op::Sub, tmp, tmp, one), 913 + } 914 + compile_store(fc, argument, tmp)?; 915 + fc.free_reg(tmp); 916 + } 917 + fc.free_reg(one); 918 + } 919 + 920 + ExprKind::Logical { op, left, right } => { 921 + compile_expr(fc, left, dst)?; 922 + match op { 923 + LogicalOp::And => { 924 + // Short-circuit: if falsy, skip right. 925 + let skip = fc.builder.emit_cond_jump(Op::JumpIfFalse, dst); 926 + compile_expr(fc, right, dst)?; 927 + fc.builder.patch_jump(skip); 928 + } 929 + LogicalOp::Or => { 930 + let skip = fc.builder.emit_cond_jump(Op::JumpIfTrue, dst); 931 + compile_expr(fc, right, dst)?; 932 + fc.builder.patch_jump(skip); 933 + } 934 + LogicalOp::Nullish => { 935 + let skip = fc.builder.emit_cond_jump(Op::JumpIfNullish, dst); 936 + // If NOT nullish, skip the right side. Wait — JumpIfNullish 937 + // should mean "jump if nullish" so we want: evaluate left, 938 + // if NOT nullish skip right. 939 + // Let's invert: evaluate left, check if nullish → evaluate right. 940 + // We need the jump to skip the "evaluate right" if NOT nullish. 941 + // Since JumpIfNullish jumps when nullish, we need the inverse. 942 + // Instead: use a two-step approach. 943 + // 944 + // Actually, rethink: for `a ?? b`: 945 + // 1. evaluate a → dst 946 + // 2. if dst is NOT null/undefined, jump to end 947 + // 3. evaluate b → dst 948 + // end: 949 + // JumpIfNullish jumps when IS nullish. So we want jump when NOT nullish. 950 + // Let's just use a "not nullish" check. 951 + // For now: negate and use JumpIfFalse. 952 + // Actually simpler: skip right when not nullish. 953 + // JumpIfNullish jumps WHEN nullish. We want to jump over right when NOT nullish. 954 + // So: 955 + // evaluate a → dst 956 + // JumpIfNullish dst → evaluate_right 957 + // Jump → end 958 + // evaluate_right: evaluate b → dst 959 + // end: 960 + // But we already emitted JumpIfNullish. Let's fix this. 961 + // The JumpIfNullish we emitted jumps to "after patch", which is where 962 + // we'll put the right-side code. We need another jump to skip right. 963 + let end_patch = fc.builder.emit_jump(Op::Jump); 964 + fc.builder.patch_jump(skip); // nullish → evaluate right 965 + compile_expr(fc, right, dst)?; 966 + fc.builder.patch_jump(end_patch); 967 + } 968 + } 969 + } 970 + 971 + ExprKind::Assignment { op, left, right } => { 972 + if *op == AssignOp::Assign { 973 + compile_expr(fc, right, dst)?; 974 + compile_store(fc, left, dst)?; 975 + } else { 976 + // Compound assignment: load current, operate, store. 977 + compile_expr(fc, left, dst)?; 978 + let rhs = fc.alloc_reg(); 979 + compile_expr(fc, right, rhs)?; 980 + let arith_op = compound_assign_op(*op); 981 + fc.builder.emit_reg3(arith_op, dst, dst, rhs); 982 + fc.free_reg(rhs); 983 + compile_store(fc, left, dst)?; 984 + } 985 + } 986 + 987 + ExprKind::Conditional { 988 + test, 989 + consequent, 990 + alternate, 991 + } => { 992 + let cond = fc.alloc_reg(); 993 + compile_expr(fc, test, cond)?; 994 + let else_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); 995 + fc.free_reg(cond); 996 + compile_expr(fc, consequent, dst)?; 997 + let end_patch = fc.builder.emit_jump(Op::Jump); 998 + fc.builder.patch_jump(else_patch); 999 + compile_expr(fc, alternate, dst)?; 1000 + fc.builder.patch_jump(end_patch); 1001 + } 1002 + 1003 + ExprKind::Call { callee, arguments } => { 1004 + let func_reg = fc.alloc_reg(); 1005 + compile_expr(fc, callee, func_reg)?; 1006 + 1007 + let args_start = fc.next_reg; 1008 + let arg_count = arguments.len().min(255) as u8; 1009 + for arg in arguments { 1010 + let arg_reg = fc.alloc_reg(); 1011 + compile_expr(fc, arg, arg_reg)?; 1012 + } 1013 + 1014 + fc.builder.emit_call(dst, func_reg, args_start, arg_count); 1015 + 1016 + // Free argument registers (in reverse). 1017 + for _ in 0..arg_count { 1018 + fc.next_reg -= 1; 1019 + } 1020 + fc.free_reg(func_reg); 1021 + } 1022 + 1023 + ExprKind::New { callee, arguments } => { 1024 + // For now, compile like a regular call. The VM will differentiate 1025 + // based on the `New` vs `Call` distinction (TODO: add NewCall opcode). 1026 + let func_reg = fc.alloc_reg(); 1027 + compile_expr(fc, callee, func_reg)?; 1028 + 1029 + let args_start = fc.next_reg; 1030 + let arg_count = arguments.len().min(255) as u8; 1031 + for arg in arguments { 1032 + let arg_reg = fc.alloc_reg(); 1033 + compile_expr(fc, arg, arg_reg)?; 1034 + } 1035 + 1036 + fc.builder.emit_call(dst, func_reg, args_start, arg_count); 1037 + 1038 + for _ in 0..arg_count { 1039 + fc.next_reg -= 1; 1040 + } 1041 + fc.free_reg(func_reg); 1042 + } 1043 + 1044 + ExprKind::Member { 1045 + object, 1046 + property, 1047 + computed, 1048 + } => { 1049 + let obj_reg = fc.alloc_reg(); 1050 + compile_expr(fc, object, obj_reg)?; 1051 + 1052 + if !computed { 1053 + // Static member: obj.prop → GetPropertyByName. 1054 + if let ExprKind::Identifier(name) = &property.kind { 1055 + let ni = fc.builder.add_name(name); 1056 + fc.builder.emit_get_prop_name(dst, obj_reg, ni); 1057 + } else { 1058 + let key_reg = fc.alloc_reg(); 1059 + compile_expr(fc, property, key_reg)?; 1060 + fc.builder.emit_reg3(Op::GetProperty, dst, obj_reg, key_reg); 1061 + fc.free_reg(key_reg); 1062 + } 1063 + } else { 1064 + // Computed member: obj[expr]. 1065 + let key_reg = fc.alloc_reg(); 1066 + compile_expr(fc, property, key_reg)?; 1067 + fc.builder.emit_reg3(Op::GetProperty, dst, obj_reg, key_reg); 1068 + fc.free_reg(key_reg); 1069 + } 1070 + fc.free_reg(obj_reg); 1071 + } 1072 + 1073 + ExprKind::Array(elements) => { 1074 + fc.builder.emit_reg(Op::CreateArray, dst); 1075 + for (i, elem) in elements.iter().enumerate() { 1076 + if let Some(el) = elem { 1077 + let val_reg = fc.alloc_reg(); 1078 + match el { 1079 + ArrayElement::Expr(e) => compile_expr(fc, e, val_reg)?, 1080 + ArrayElement::Spread(e) => { 1081 + // Spread in array: simplified, just compile the expression. 1082 + compile_expr(fc, e, val_reg)?; 1083 + } 1084 + } 1085 + let idx_reg = fc.alloc_reg(); 1086 + if i <= 127 { 1087 + fc.builder.emit_load_int8(idx_reg, i as i8); 1088 + } else { 1089 + let ci = fc.builder.add_constant(Constant::Number(i as f64)); 1090 + fc.builder.emit_reg_u16(Op::LoadConst, idx_reg, ci); 1091 + } 1092 + fc.builder.emit_reg3(Op::SetProperty, dst, idx_reg, val_reg); 1093 + fc.free_reg(idx_reg); 1094 + fc.free_reg(val_reg); 1095 + } 1096 + } 1097 + } 1098 + 1099 + ExprKind::Object(properties) => { 1100 + fc.builder.emit_reg(Op::CreateObject, dst); 1101 + for prop in properties { 1102 + let val_reg = fc.alloc_reg(); 1103 + if let Some(value) = &prop.value { 1104 + compile_expr(fc, value, val_reg)?; 1105 + } else { 1106 + // Shorthand: `{ x }` means `{ x: x }`. 1107 + if let PropertyKey::Identifier(name) = &prop.key { 1108 + if let Some(local) = fc.find_local(name) { 1109 + fc.builder.emit_reg_reg(Op::Move, val_reg, local); 1110 + } else { 1111 + let ni = fc.builder.add_name(name); 1112 + fc.builder.emit_load_global(val_reg, ni); 1113 + } 1114 + } else { 1115 + fc.builder.emit_reg(Op::LoadUndefined, val_reg); 1116 + } 1117 + } 1118 + 1119 + match &prop.key { 1120 + PropertyKey::Identifier(name) | PropertyKey::String(name) => { 1121 + let ni = fc.builder.add_name(name); 1122 + fc.builder.emit_set_prop_name(dst, ni, val_reg); 1123 + } 1124 + PropertyKey::Number(n) => { 1125 + let key_reg = fc.alloc_reg(); 1126 + let ci = fc.builder.add_constant(Constant::Number(*n)); 1127 + fc.builder.emit_reg_u16(Op::LoadConst, key_reg, ci); 1128 + fc.builder.emit_reg3(Op::SetProperty, dst, key_reg, val_reg); 1129 + fc.free_reg(key_reg); 1130 + } 1131 + PropertyKey::Computed(expr) => { 1132 + let key_reg = fc.alloc_reg(); 1133 + compile_expr(fc, expr, key_reg)?; 1134 + fc.builder.emit_reg3(Op::SetProperty, dst, key_reg, val_reg); 1135 + fc.free_reg(key_reg); 1136 + } 1137 + } 1138 + fc.free_reg(val_reg); 1139 + } 1140 + } 1141 + 1142 + ExprKind::Function(func_def) => { 1143 + let inner = compile_function_body(func_def)?; 1144 + let func_idx = fc.builder.add_function(inner); 1145 + fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); 1146 + } 1147 + 1148 + ExprKind::Arrow { 1149 + params, 1150 + body, 1151 + is_async: _, 1152 + } => { 1153 + let param_count = params.len().min(255) as u8; 1154 + let mut inner = FunctionCompiler::new("<arrow>".into(), param_count); 1155 + for p in params { 1156 + if let PatternKind::Identifier(name) = &p.kind { 1157 + inner.define_local(name); 1158 + } else { 1159 + let _ = inner.alloc_reg(); 1160 + } 1161 + } 1162 + let result = inner.alloc_reg(); 1163 + match body { 1164 + ArrowBody::Expr(e) => { 1165 + compile_expr(&mut inner, e, result)?; 1166 + } 1167 + ArrowBody::Block(stmts) => { 1168 + inner.builder.emit_reg(Op::LoadUndefined, result); 1169 + compile_stmts(&mut inner, stmts, result)?; 1170 + } 1171 + } 1172 + inner.builder.emit_reg(Op::Return, result); 1173 + let inner_func = inner.builder.finish(); 1174 + let func_idx = fc.builder.add_function(inner_func); 1175 + fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); 1176 + } 1177 + 1178 + ExprKind::Class(class_def) => { 1179 + // Class expression: compile like class decl but into dst. 1180 + let name = class_def.id.clone().unwrap_or_default(); 1181 + // Find constructor. 1182 + let ctor = class_def.body.iter().find(|m| { 1183 + matches!( 1184 + &m.kind, 1185 + ClassMemberKind::Method { 1186 + kind: MethodKind::Constructor, 1187 + .. 1188 + } 1189 + ) 1190 + }); 1191 + if let Some(member) = ctor { 1192 + if let ClassMemberKind::Method { value, .. } = &member.kind { 1193 + let inner = compile_function_body(value)?; 1194 + let func_idx = fc.builder.add_function(inner); 1195 + fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); 1196 + } 1197 + } else { 1198 + let empty = Function::new(name, 0); 1199 + let func_idx = fc.builder.add_function(empty); 1200 + fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); 1201 + } 1202 + } 1203 + 1204 + ExprKind::Sequence(exprs) => { 1205 + for e in exprs { 1206 + compile_expr(fc, e, dst)?; 1207 + } 1208 + } 1209 + 1210 + ExprKind::Spread(inner) => { 1211 + compile_expr(fc, inner, dst)?; 1212 + } 1213 + 1214 + ExprKind::TemplateLiteral { 1215 + quasis, 1216 + expressions, 1217 + } => { 1218 + // Compile template literal as string concatenation. 1219 + if quasis.len() == 1 && expressions.is_empty() { 1220 + let ci = fc.builder.add_constant(Constant::String(quasis[0].clone())); 1221 + fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); 1222 + } else { 1223 + // Start with first quasi. 1224 + let ci = fc.builder.add_constant(Constant::String(quasis[0].clone())); 1225 + fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); 1226 + for (i, expr) in expressions.iter().enumerate() { 1227 + let tmp = fc.alloc_reg(); 1228 + compile_expr(fc, expr, tmp)?; 1229 + fc.builder.emit_reg3(Op::Add, dst, dst, tmp); 1230 + fc.free_reg(tmp); 1231 + if i + 1 < quasis.len() { 1232 + let qi = fc 1233 + .builder 1234 + .add_constant(Constant::String(quasis[i + 1].clone())); 1235 + let tmp2 = fc.alloc_reg(); 1236 + fc.builder.emit_reg_u16(Op::LoadConst, tmp2, qi); 1237 + fc.builder.emit_reg3(Op::Add, dst, dst, tmp2); 1238 + fc.free_reg(tmp2); 1239 + } 1240 + } 1241 + } 1242 + } 1243 + 1244 + ExprKind::TaggedTemplate { tag, quasi } => { 1245 + // Simplified: call tag with the template as argument. 1246 + let func_reg = fc.alloc_reg(); 1247 + compile_expr(fc, tag, func_reg)?; 1248 + let arg_reg = fc.alloc_reg(); 1249 + compile_expr(fc, quasi, arg_reg)?; 1250 + fc.builder.emit_call(dst, func_reg, arg_reg, 1); 1251 + fc.free_reg(arg_reg); 1252 + fc.free_reg(func_reg); 1253 + } 1254 + 1255 + ExprKind::Yield { 1256 + argument, 1257 + delegate: _, 1258 + } => { 1259 + // Yield is a VM-level operation; for now compile the argument. 1260 + if let Some(arg) = argument { 1261 + compile_expr(fc, arg, dst)?; 1262 + } else { 1263 + fc.builder.emit_reg(Op::LoadUndefined, dst); 1264 + } 1265 + } 1266 + 1267 + ExprKind::Await(inner) => { 1268 + // Await is a VM-level operation; compile the argument. 1269 + compile_expr(fc, inner, dst)?; 1270 + } 1271 + 1272 + ExprKind::RegExp { .. } => { 1273 + // RegExp literals are created at runtime by the VM. 1274 + fc.builder.emit_reg(Op::LoadUndefined, dst); 1275 + } 1276 + 1277 + ExprKind::OptionalChain { base } => { 1278 + compile_expr(fc, base, dst)?; 1279 + } 1280 + } 1281 + Ok(()) 1282 + } 1283 + 1284 + /// Compile a store operation (assignment target). 1285 + fn compile_store(fc: &mut FunctionCompiler, target: &Expr, src: Reg) -> Result<(), JsError> { 1286 + match &target.kind { 1287 + ExprKind::Identifier(name) => { 1288 + if let Some(local) = fc.find_local(name) { 1289 + if local != src { 1290 + fc.builder.emit_reg_reg(Op::Move, local, src); 1291 + } 1292 + } else { 1293 + let ni = fc.builder.add_name(name); 1294 + fc.builder.emit_store_global(ni, src); 1295 + } 1296 + } 1297 + ExprKind::Member { 1298 + object, 1299 + property, 1300 + computed, 1301 + } => { 1302 + let obj_reg = fc.alloc_reg(); 1303 + compile_expr(fc, object, obj_reg)?; 1304 + if !computed { 1305 + if let ExprKind::Identifier(name) = &property.kind { 1306 + let ni = fc.builder.add_name(name); 1307 + fc.builder.emit_set_prop_name(obj_reg, ni, src); 1308 + } else { 1309 + let key_reg = fc.alloc_reg(); 1310 + compile_expr(fc, property, key_reg)?; 1311 + fc.builder.emit_reg3(Op::SetProperty, obj_reg, key_reg, src); 1312 + fc.free_reg(key_reg); 1313 + } 1314 + } else { 1315 + let key_reg = fc.alloc_reg(); 1316 + compile_expr(fc, property, key_reg)?; 1317 + fc.builder.emit_reg3(Op::SetProperty, obj_reg, key_reg, src); 1318 + fc.free_reg(key_reg); 1319 + } 1320 + fc.free_reg(obj_reg); 1321 + } 1322 + _ => { 1323 + // Other assignment targets (destructuring) not handled here. 1324 + } 1325 + } 1326 + Ok(()) 1327 + } 1328 + 1329 + fn binary_op_to_opcode(op: BinaryOp) -> Op { 1330 + match op { 1331 + BinaryOp::Add => Op::Add, 1332 + BinaryOp::Sub => Op::Sub, 1333 + BinaryOp::Mul => Op::Mul, 1334 + BinaryOp::Div => Op::Div, 1335 + BinaryOp::Rem => Op::Rem, 1336 + BinaryOp::Exp => Op::Exp, 1337 + BinaryOp::Eq => Op::Eq, 1338 + BinaryOp::Ne => Op::NotEq, 1339 + BinaryOp::StrictEq => Op::StrictEq, 1340 + BinaryOp::StrictNe => Op::StrictNotEq, 1341 + BinaryOp::Lt => Op::LessThan, 1342 + BinaryOp::Le => Op::LessEq, 1343 + BinaryOp::Gt => Op::GreaterThan, 1344 + BinaryOp::Ge => Op::GreaterEq, 1345 + BinaryOp::Shl => Op::ShiftLeft, 1346 + BinaryOp::Shr => Op::ShiftRight, 1347 + BinaryOp::Ushr => Op::UShiftRight, 1348 + BinaryOp::BitAnd => Op::BitAnd, 1349 + BinaryOp::BitOr => Op::BitOr, 1350 + BinaryOp::BitXor => Op::BitXor, 1351 + BinaryOp::In => Op::In, 1352 + BinaryOp::Instanceof => Op::InstanceOf, 1353 + } 1354 + } 1355 + 1356 + fn compound_assign_op(op: AssignOp) -> Op { 1357 + match op { 1358 + AssignOp::AddAssign => Op::Add, 1359 + AssignOp::SubAssign => Op::Sub, 1360 + AssignOp::MulAssign => Op::Mul, 1361 + AssignOp::DivAssign => Op::Div, 1362 + AssignOp::RemAssign => Op::Rem, 1363 + AssignOp::ExpAssign => Op::Exp, 1364 + AssignOp::ShlAssign => Op::ShiftLeft, 1365 + AssignOp::ShrAssign => Op::ShiftRight, 1366 + AssignOp::UshrAssign => Op::UShiftRight, 1367 + AssignOp::BitAndAssign => Op::BitAnd, 1368 + AssignOp::BitOrAssign => Op::BitOr, 1369 + AssignOp::BitXorAssign => Op::BitXor, 1370 + AssignOp::AndAssign => Op::BitAnd, // logical AND assignment uses short-circuit; simplified here 1371 + AssignOp::OrAssign => Op::BitOr, // likewise 1372 + AssignOp::NullishAssign => Op::Move, // simplified 1373 + AssignOp::Assign => unreachable!(), 1374 + } 1375 + } 1376 + 1377 + #[cfg(test)] 1378 + mod tests { 1379 + use super::*; 1380 + use crate::parser::Parser; 1381 + 1382 + /// Helper: parse and compile source, return the top-level function. 1383 + fn compile_src(src: &str) -> Function { 1384 + let program = Parser::parse(src).expect("parse failed"); 1385 + compile(&program).expect("compile failed") 1386 + } 1387 + 1388 + #[test] 1389 + fn test_compile_number_literal() { 1390 + let f = compile_src("42;"); 1391 + let dis = f.disassemble(); 1392 + assert!(dis.contains("LoadInt8 r0, 42"), "got:\n{dis}"); 1393 + assert!(dis.contains("Return r0")); 1394 + } 1395 + 1396 + #[test] 1397 + fn test_compile_large_number() { 1398 + let f = compile_src("3.14;"); 1399 + let dis = f.disassemble(); 1400 + assert!(dis.contains("LoadConst r0, #0"), "got:\n{dis}"); 1401 + assert!( 1402 + f.constants.contains(&Constant::Number(3.14)), 1403 + "constants: {:?}", 1404 + f.constants 1405 + ); 1406 + } 1407 + 1408 + #[test] 1409 + fn test_compile_string() { 1410 + let f = compile_src("\"hello\";"); 1411 + let dis = f.disassemble(); 1412 + assert!(dis.contains("LoadConst r0, #0")); 1413 + assert!(f.constants.contains(&Constant::String("hello".into()))); 1414 + } 1415 + 1416 + #[test] 1417 + fn test_compile_bool_null() { 1418 + let f = compile_src("true; false; null;"); 1419 + let dis = f.disassemble(); 1420 + assert!(dis.contains("LoadTrue r0")); 1421 + assert!(dis.contains("LoadFalse r0")); 1422 + assert!(dis.contains("LoadNull r0")); 1423 + } 1424 + 1425 + #[test] 1426 + fn test_compile_binary_arithmetic() { 1427 + let f = compile_src("1 + 2;"); 1428 + let dis = f.disassemble(); 1429 + assert!(dis.contains("Add r0, r1, r2"), "got:\n{dis}"); 1430 + } 1431 + 1432 + #[test] 1433 + fn test_compile_nested_arithmetic() { 1434 + let f = compile_src("(1 + 2) * 3;"); 1435 + let dis = f.disassemble(); 1436 + assert!(dis.contains("Add"), "got:\n{dis}"); 1437 + assert!(dis.contains("Mul"), "got:\n{dis}"); 1438 + } 1439 + 1440 + #[test] 1441 + fn test_compile_var_decl() { 1442 + let f = compile_src("var x = 10; x;"); 1443 + let dis = f.disassemble(); 1444 + // x should get a register, then be loaded from that register. 1445 + assert!(dis.contains("LoadInt8"), "got:\n{dis}"); 1446 + assert!( 1447 + dis.contains("Move") || dis.contains("LoadInt8"), 1448 + "got:\n{dis}" 1449 + ); 1450 + } 1451 + 1452 + #[test] 1453 + fn test_compile_let_const() { 1454 + let f = compile_src("let a = 1; const b = 2; a + b;"); 1455 + let dis = f.disassemble(); 1456 + assert!(dis.contains("Add"), "got:\n{dis}"); 1457 + } 1458 + 1459 + #[test] 1460 + fn test_compile_if_else() { 1461 + let f = compile_src("if (true) { 1; } else { 2; }"); 1462 + let dis = f.disassemble(); 1463 + assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); 1464 + assert!(dis.contains("Jump"), "got:\n{dis}"); 1465 + } 1466 + 1467 + #[test] 1468 + fn test_compile_while() { 1469 + let f = compile_src("var i = 0; while (i < 10) { i = i + 1; }"); 1470 + let dis = f.disassemble(); 1471 + assert!(dis.contains("LessThan"), "got:\n{dis}"); 1472 + assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); 1473 + assert!( 1474 + dis.contains("Jump"), 1475 + "backward jump should be present: {dis}" 1476 + ); 1477 + } 1478 + 1479 + #[test] 1480 + fn test_compile_do_while() { 1481 + let f = compile_src("var i = 0; do { i = i + 1; } while (i < 5);"); 1482 + let dis = f.disassemble(); 1483 + assert!(dis.contains("JumpIfTrue"), "got:\n{dis}"); 1484 + } 1485 + 1486 + #[test] 1487 + fn test_compile_for_loop() { 1488 + let f = compile_src("for (var i = 0; i < 10; i = i + 1) { i; }"); 1489 + let dis = f.disassemble(); 1490 + assert!(dis.contains("LessThan"), "got:\n{dis}"); 1491 + assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); 1492 + } 1493 + 1494 + #[test] 1495 + fn test_compile_function_decl() { 1496 + let f = compile_src("function add(a, b) { return a + b; }"); 1497 + let dis = f.disassemble(); 1498 + assert!(dis.contains("CreateClosure"), "got:\n{dis}"); 1499 + assert!(!f.functions.is_empty(), "should have nested function"); 1500 + let inner = &f.functions[0]; 1501 + assert_eq!(inner.name, "add"); 1502 + assert_eq!(inner.param_count, 2); 1503 + let inner_dis = inner.disassemble(); 1504 + assert!(inner_dis.contains("Add"), "inner:\n{inner_dis}"); 1505 + assert!(inner_dis.contains("Return"), "inner:\n{inner_dis}"); 1506 + } 1507 + 1508 + #[test] 1509 + fn test_compile_function_call() { 1510 + let f = compile_src("function f() { return 42; } f();"); 1511 + let dis = f.disassemble(); 1512 + assert!(dis.contains("Call"), "got:\n{dis}"); 1513 + } 1514 + 1515 + #[test] 1516 + fn test_compile_arrow_function() { 1517 + let f = compile_src("var add = (a, b) => a + b;"); 1518 + let dis = f.disassemble(); 1519 + assert!(dis.contains("CreateClosure"), "got:\n{dis}"); 1520 + let inner = &f.functions[0]; 1521 + assert_eq!(inner.param_count, 2); 1522 + } 1523 + 1524 + #[test] 1525 + fn test_compile_assignment() { 1526 + let f = compile_src("var x = 1; x = x + 2;"); 1527 + let dis = f.disassemble(); 1528 + assert!(dis.contains("Add"), "got:\n{dis}"); 1529 + assert!( 1530 + dis.contains("Move"), 1531 + "assignment should produce Move:\n{dis}" 1532 + ); 1533 + } 1534 + 1535 + #[test] 1536 + fn test_compile_compound_assignment() { 1537 + let f = compile_src("var x = 10; x += 5;"); 1538 + let dis = f.disassemble(); 1539 + assert!(dis.contains("Add"), "got:\n{dis}"); 1540 + } 1541 + 1542 + #[test] 1543 + fn test_compile_member_access() { 1544 + let f = compile_src("var obj = {}; obj.x;"); 1545 + let dis = f.disassemble(); 1546 + assert!(dis.contains("CreateObject"), "got:\n{dis}"); 1547 + assert!(dis.contains("GetPropertyByName"), "got:\n{dis}"); 1548 + } 1549 + 1550 + #[test] 1551 + fn test_compile_computed_member() { 1552 + let f = compile_src("var arr = []; arr[0];"); 1553 + let dis = f.disassemble(); 1554 + assert!(dis.contains("GetProperty"), "got:\n{dis}"); 1555 + } 1556 + 1557 + #[test] 1558 + fn test_compile_object_literal() { 1559 + let f = compile_src("var obj = { a: 1, b: 2 };"); 1560 + let dis = f.disassemble(); 1561 + assert!(dis.contains("CreateObject"), "got:\n{dis}"); 1562 + assert!(dis.contains("SetPropertyByName"), "got:\n{dis}"); 1563 + } 1564 + 1565 + #[test] 1566 + fn test_compile_array_literal() { 1567 + let f = compile_src("[1, 2, 3];"); 1568 + let dis = f.disassemble(); 1569 + assert!(dis.contains("CreateArray"), "got:\n{dis}"); 1570 + assert!(dis.contains("SetProperty"), "got:\n{dis}"); 1571 + } 1572 + 1573 + #[test] 1574 + fn test_compile_conditional() { 1575 + let f = compile_src("true ? 1 : 2;"); 1576 + let dis = f.disassemble(); 1577 + assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); 1578 + } 1579 + 1580 + #[test] 1581 + fn test_compile_logical_and() { 1582 + let f = compile_src("true && false;"); 1583 + let dis = f.disassemble(); 1584 + assert!(dis.contains("JumpIfFalse"), "short-circuit:\n{dis}"); 1585 + } 1586 + 1587 + #[test] 1588 + fn test_compile_logical_or() { 1589 + let f = compile_src("false || true;"); 1590 + let dis = f.disassemble(); 1591 + assert!(dis.contains("JumpIfTrue"), "short-circuit:\n{dis}"); 1592 + } 1593 + 1594 + #[test] 1595 + fn test_compile_typeof() { 1596 + let f = compile_src("typeof 42;"); 1597 + let dis = f.disassemble(); 1598 + assert!(dis.contains("TypeOf"), "got:\n{dis}"); 1599 + } 1600 + 1601 + #[test] 1602 + fn test_compile_unary_minus() { 1603 + let f = compile_src("-42;"); 1604 + let dis = f.disassemble(); 1605 + assert!(dis.contains("Neg"), "got:\n{dis}"); 1606 + } 1607 + 1608 + #[test] 1609 + fn test_compile_not() { 1610 + let f = compile_src("!true;"); 1611 + let dis = f.disassemble(); 1612 + assert!(dis.contains("LogicalNot"), "got:\n{dis}"); 1613 + } 1614 + 1615 + #[test] 1616 + fn test_compile_return() { 1617 + let f = compile_src("function f() { return 42; }"); 1618 + let inner = &f.functions[0]; 1619 + let dis = inner.disassemble(); 1620 + assert!(dis.contains("Return"), "got:\n{dis}"); 1621 + } 1622 + 1623 + #[test] 1624 + fn test_compile_empty_return() { 1625 + let f = compile_src("function f() { return; }"); 1626 + let inner = &f.functions[0]; 1627 + let dis = inner.disassemble(); 1628 + assert!(dis.contains("LoadUndefined"), "got:\n{dis}"); 1629 + assert!(dis.contains("Return"), "got:\n{dis}"); 1630 + } 1631 + 1632 + #[test] 1633 + fn test_compile_throw() { 1634 + let f = compile_src("function f() { throw 42; }"); 1635 + let inner = &f.functions[0]; 1636 + let dis = inner.disassemble(); 1637 + assert!(dis.contains("Throw"), "got:\n{dis}"); 1638 + } 1639 + 1640 + #[test] 1641 + fn test_compile_this() { 1642 + let f = compile_src("this;"); 1643 + let dis = f.disassemble(); 1644 + assert!(dis.contains("LoadGlobal"), "got:\n{dis}"); 1645 + assert!(f.names.contains(&"this".to_string())); 1646 + } 1647 + 1648 + #[test] 1649 + fn test_compile_global_var() { 1650 + let f = compile_src("console;"); 1651 + let dis = f.disassemble(); 1652 + assert!(dis.contains("LoadGlobal"), "got:\n{dis}"); 1653 + assert!(f.names.contains(&"console".to_string())); 1654 + } 1655 + 1656 + #[test] 1657 + fn test_compile_template_literal() { 1658 + let f = compile_src("`hello`;"); 1659 + assert!( 1660 + f.constants.contains(&Constant::String("hello".into())), 1661 + "constants: {:?}", 1662 + f.constants 1663 + ); 1664 + } 1665 + 1666 + #[test] 1667 + fn test_compile_switch() { 1668 + let f = compile_src("switch (1) { case 1: 42; break; case 2: 99; break; }"); 1669 + let dis = f.disassemble(); 1670 + assert!(dis.contains("StrictEq"), "got:\n{dis}"); 1671 + } 1672 + 1673 + #[test] 1674 + fn test_compile_class() { 1675 + let f = compile_src("class Foo { constructor() {} greet() { return 1; } }"); 1676 + let dis = f.disassemble(); 1677 + assert!(dis.contains("CreateClosure"), "got:\n{dis}"); 1678 + } 1679 + 1680 + #[test] 1681 + fn test_compile_update_prefix() { 1682 + let f = compile_src("var x = 0; ++x;"); 1683 + let dis = f.disassemble(); 1684 + assert!(dis.contains("Add"), "got:\n{dis}"); 1685 + } 1686 + 1687 + #[test] 1688 + fn test_compile_comparison() { 1689 + let f = compile_src("1 === 2;"); 1690 + let dis = f.disassemble(); 1691 + assert!(dis.contains("StrictEq"), "got:\n{dis}"); 1692 + } 1693 + 1694 + #[test] 1695 + fn test_compile_bitwise() { 1696 + let f = compile_src("1 & 2;"); 1697 + let dis = f.disassemble(); 1698 + assert!(dis.contains("BitAnd"), "got:\n{dis}"); 1699 + } 1700 + 1701 + #[test] 1702 + fn test_compile_void() { 1703 + let f = compile_src("void 0;"); 1704 + let dis = f.disassemble(); 1705 + assert!(dis.contains("Void"), "got:\n{dis}"); 1706 + } 1707 + 1708 + #[test] 1709 + fn test_disassembler_output_format() { 1710 + let f = compile_src("var x = 42; x + 1;"); 1711 + let dis = f.disassemble(); 1712 + // Should contain function header. 1713 + assert!(dis.contains("function <main>")); 1714 + // Should contain code section. 1715 + assert!(dis.contains("code:")); 1716 + // Should have hex offsets. 1717 + assert!(dis.contains("0000")); 1718 + } 1719 + 1720 + #[test] 1721 + fn test_register_allocation_is_minimal() { 1722 + // `var a = 1; var b = 2; a + b;` should use few registers. 1723 + let f = compile_src("var a = 1; var b = 2; a + b;"); 1724 + // r0 = result, r1 = a, r2 = b, r3/r4 = temps for addition 1725 + assert!( 1726 + f.register_count <= 6, 1727 + "too many registers: {}", 1728 + f.register_count 1729 + ); 1730 + } 1731 + 1732 + #[test] 1733 + fn test_nested_function_closure() { 1734 + let f = compile_src("function outer() { function inner() { return 1; } return inner; }"); 1735 + assert_eq!(f.functions.len(), 1); 1736 + let outer = &f.functions[0]; 1737 + assert_eq!(outer.name, "outer"); 1738 + assert_eq!(outer.functions.len(), 1); 1739 + let inner = &outer.functions[0]; 1740 + assert_eq!(inner.name, "inner"); 1741 + } 1742 + 1743 + #[test] 1744 + fn test_for_with_no_parts() { 1745 + // `for (;;) { break; }` — infinite loop with immediate break. 1746 + let f = compile_src("for (;;) { break; }"); 1747 + let dis = f.disassemble(); 1748 + assert!(dis.contains("Jump"), "got:\n{dis}"); 1749 + } 1750 + }
+2
crates/js/src/lib.rs
··· 1 1 //! JavaScript engine — lexer, parser, bytecode, register VM, GC, JIT (AArch64). 2 2 3 3 pub mod ast; 4 + pub mod bytecode; 5 + pub mod compiler; 4 6 pub mod lexer; 5 7 pub mod parser; 6 8