1use byteorder::{ByteOrder, LittleEndian};
2use leb128;
3
4mod name_section;
5
6#[derive(Debug, Clone)]
7pub struct FuncType {
8 pub params: Vec<u8>,
9 pub results: Vec<u8>,
10}
11
12#[derive(Debug, Clone)]
13pub struct FuncExport {
14 pub name: String,
15 pub func: Func,
16}
17
18#[derive(PartialEq, Clone)]
19pub enum ExportType {
20 Func,
21 Mem,
22 Global,
23}
24
25#[derive(PartialEq, Clone)]
26pub enum ImportType {
27 Func,
28}
29
30#[derive(Debug, Clone)]
31pub struct FuncCode {
32 pub opcode: u8,
33 pub immediates: Vec<Imm>,
34 pub return_type: Option<u8>,
35}
36
37#[derive(Debug, Clone, Copy)]
38pub enum Imm {
39 RESERVED,
42
43 I64(i64),
44 I32(i32),
45 F64(f64),
46}
47impl From<i64> for Imm {
48 #[inline]
49 fn from(v: i64) -> Self {
50 Imm::I64(v)
51 }
52}
53
54impl FuncCode {
55 pub fn new0(opcode: u8) -> FuncCode {
56 FuncCode {
57 opcode,
58 immediates: vec![],
59 return_type: None,
60 }
61 }
62
63 pub fn new_control(opcode: u8, rt: u8) -> FuncCode {
64 FuncCode {
65 opcode,
66 immediates: vec![],
67 return_type: Some(rt),
68 }
69 }
70
71 pub fn new1(opcode: u8, imm: Imm) -> FuncCode {
72 FuncCode {
73 opcode,
74 immediates: vec![imm],
75 return_type: None,
76 }
77 }
78
79 pub fn new2(opcode: u8, imm1: Imm, imm2: Imm) -> FuncCode {
80 FuncCode {
81 opcode,
82 immediates: vec![imm1, imm2],
83 return_type: None,
84 }
85 }
86}
87
88#[derive(Debug, Clone)]
89pub struct Element {
90 pub table: u32,
91 pub offset: u32,
92 pub funcs: Vec<u32>,
93}
94
95#[derive(Debug, Clone)]
96pub struct Table {
97 pub elemtype: TableElemType,
98 pub limits: (u32, u32), }
100
101#[derive(Debug, Clone, Copy)]
102pub enum TableElemType {
103 Funcref = 0x70,
104}
105
106#[derive(Debug, Clone)]
107pub struct Func {
108 pub sig: FuncType,
109 pub locals: Vec<FuncLocal>,
110 pub code: Vec<FuncCode>,
111}
112
113type Export = (String, usize, ExportType);
114type Import = (String, String, ImportType, usize);
115type Global = (u8, u8, u32); type FuncLocal = (usize, u8);
117
118pub const HEADER_MAGIC: [u8; 4] = [0x00, 0x61, 0x73, 0x6D];
119pub const HEADER_VERSION: [u8; 4] = [0x01, 0x00, 0x00, 0x00];
120
121pub const CUSTOM_SECTION: u8 = 0;
122pub const TYPE_SECTION: u8 = 1;
123pub const IMPORT_SECTION: u8 = 2;
124pub const FUNCTION_SECTION: u8 = 3;
125pub const TABLE_SECTION: u8 = 4;
126pub const MEMORY_SECTION: u8 = 5;
127pub const GLOBAL_SECTION: u8 = 6;
128pub const EXPORT_SECTION: u8 = 7;
129pub const START_SECTION: u8 = 8;
130pub const ELEMENT_SECTION: u8 = 9;
131pub const CODE_SECTION: u8 = 10;
132pub const DATA_SECTION: u8 = 11;
133
134pub const UNREACHABLE: u8 = 0x00;
135
136pub const NONE: u8 = 0x40;
137pub const I32: u8 = 0x7F;
138pub const I64: u8 = 0x7E;
139pub const F32: u8 = 0x7D;
140pub const F64: u8 = 0x7C;
141
142pub const I32_CONST: u8 = 0x41;
143pub const I64_CONST: u8 = 0x42;
144pub const F32_CONST: u8 = 0x43;
145pub const F64_CONST: u8 = 0x44;
146
147pub const I32_EQZ: u8 = 0x45;
148pub const I32_EQ: u8 = 0x46;
149pub const I32_NE: u8 = 0x47;
150pub const I32_AND: u8 = 0x71;
151pub const I32_OR: u8 = 0x72;
152pub const I32_SHL: u8 = 0x74;
153pub const I32_SHR_U: u8 = 0x76;
154pub const I32_LT_S: u8 = 0x48;
155pub const I32_LT_U: u8 = 0x49;
156pub const I32_GT_S: u8 = 0x4A;
157pub const I32_GT_U: u8 = 0x4B;
158pub const I32_LE_S: u8 = 0x4C;
159pub const I32_LE_U: u8 = 0x4D;
160pub const I32_GE_S: u8 = 0x4E;
161pub const I32_ADD: u8 = 0x6A;
162pub const I32_SUB: u8 = 0x6B;
163pub const I32_MUL: u8 = 0x6C;
164pub const I32_DIV_S: u8 = 0x6D;
165pub const I32_REM_S: u8 = 0x6F;
166pub const F32_DIV: u8 = 0x95;
167pub const F64_ADD: u8 = 0xA0;
168pub const F64_SUB: u8 = 0xA1;
169pub const F64_MUL: u8 = 0xA2;
170pub const F64_DIV: u8 = 0xA3;
171pub const F64_LE: u8 = 0x65;
172pub const F64_GE: u8 = 0x66;
173pub const I32_TRUNC_F32_S: u8 = 0xA8;
174pub const I32_TRUNC_F64_S: u8 = 0xAA;
175pub const F64_CONVERT_I32_S: u8 = 0xB7;
176
177pub const DROP: u8 = 0x1A;
178
179pub const LOCAL_GET: u8 = 0x20;
180pub const LOCAL_SET: u8 = 0x21;
181pub const LOCAL_TEE: u8 = 0x22;
182pub const GLOBAL_GET: u8 = 0x23;
183pub const GLOBAL_SET: u8 = 0x24;
184
185pub const I32_LOAD: u8 = 0x28;
186pub const I32_LOAD8_U: u8 = 0x2D;
187pub const I32_STORE: u8 = 0x36;
188pub const I32_STORE8: u8 = 0x3A;
189
190pub const BLOCK: u8 = 0x02;
191pub const LOOP: u8 = 0x03;
192pub const IF: u8 = 0x04;
193pub const ELSE: u8 = 0x05;
194pub const BR: u8 = 0x0C;
195pub const BR_IF: u8 = 0x0D;
196
197pub const END: u8 = 0x0B;
198pub const RETURN: u8 = 0x0F;
199pub const CALL: u8 = 0x10;
200pub const CALL_INDIRECT: u8 = 0x11;
201
202pub struct WasmCodeGen {
203 funcs: Vec<(usize, Func)>,
204 types: Vec<FuncType>,
205 exports: Vec<Export>,
206 elements: Vec<Element>,
207 tables: Vec<Table>,
208 memories: Vec<(u32, u32)>,
209 data: Vec<(u32, Vec<u8>)>, imports: Vec<Import>,
211 globals: Vec<Global>,
212 func_names: Vec<name_section::Naming>,
214}
215
216fn write_name(bytes: &mut Vec<u8>, name: String) {
217 write_unsigned_leb128(bytes, name.len() as u64);
218 bytes.extend(name.into_bytes());
220}
221
222fn write_unsigned_leb128(bytes: &mut Vec<u8>, n: u64) {
223 leb128::write::unsigned(bytes, n).expect("could not write LEB128");
224}
225
226fn write_signed_leb128(bytes: &mut Vec<u8>, n: i64) {
227 leb128::write::signed(bytes, n).expect("could not write LEB128");
228}
229
230fn write_float(bytes: &mut Vec<u8>, n: f64) {
231 let mut b = [0; 8];
232 LittleEndian::write_f64(&mut b, n);
233 bytes.extend(b.iter())
234}
235
236fn write_unsigned_leb128_at_offset(bytes: &mut Vec<u8>, offset: usize, n: usize) {
237 bytes.remove(offset);
239
240 let mut buffer = vec![];
241
242 leb128::write::unsigned(&mut buffer, n as u64).expect("could not write LEB128");
243
244 let mut i = 0;
245 for byte in buffer {
246 bytes.insert(offset + i, byte);
247 i += 1;
248 }
249}
250
251fn write_vec_len<T>(bytes: &mut Vec<u8>, vec: &Vec<T>) {
252 write_unsigned_leb128(bytes, vec.len() as u64);
253}
254
255fn write_type_section(bytes: &mut Vec<u8>, types: &Vec<FuncType>) {
256 write_vec_len(bytes, types); for functype in types {
259 bytes.push(0x60); write_vec_len(bytes, &functype.params); for b in &functype.params {
263 bytes.push(*b);
264 }
265
266 write_vec_len(bytes, &functype.results); for b in &functype.results {
268 bytes.push(*b);
269 }
270 }
271}
272
273fn write_func_section(bytes: &mut Vec<u8>, funcs: &Vec<(usize, Func)>) {
274 write_vec_len(bytes, funcs); for func in funcs {
277 write_unsigned_leb128(bytes, func.0 as u64);
278 }
279}
280
281fn write_element_section(bytes: &mut Vec<u8>, elements: &Vec<Element>) {
282 write_vec_len(bytes, elements); for element in elements {
285 write_unsigned_leb128(bytes, element.table as u64);
286
287 let offset_expr = vec![FuncCode::new1(I32_CONST, Imm::I64(element.offset as i64))];
288 write_code_expr(bytes, &offset_expr);
289
290 write_vec_len(bytes, &element.funcs); for func in &element.funcs {
292 write_unsigned_leb128(bytes, func.clone() as u64);
293 }
294 }
295}
296
297fn write_table_section(bytes: &mut Vec<u8>, tables: &Vec<Table>) {
298 write_vec_len(bytes, tables); for table in tables {
301 bytes.push(table.elemtype as u8);
302
303 let (min, max) = table.limits;
304 bytes.push(0x01);
305 write_unsigned_leb128(bytes, min as u64);
306 write_unsigned_leb128(bytes, max as u64);
307 }
308}
309
310fn write_imports_section(bytes: &mut Vec<u8>, imports: &Vec<Import>) {
311 write_vec_len(bytes, imports); for import in imports {
314 write_name(bytes, import.0.clone());
315 write_name(bytes, import.1.clone());
316
317 match import.2 {
318 ImportType::Func => bytes.push(0x0),
319 }
320
321 write_unsigned_leb128(bytes, import.3 as u64);
322 }
323}
324
325fn write_code_local(bytes: &mut Vec<u8>, locals: &Vec<FuncLocal>) {
326 write_vec_len(bytes, locals); for local in locals {
329 write_unsigned_leb128(bytes, local.0 as u64);
330 bytes.push(local.1);
331 }
332}
333
334fn write_code_expr(bytes: &mut Vec<u8>, codes: &Vec<FuncCode>) {
335 for code in codes {
336 bytes.push(code.opcode);
337 if let Some(rt) = code.return_type {
338 write_unsigned_leb128(bytes, rt as u64);
339 }
340 for imm in &code.immediates {
341 match imm {
342 Imm::I64(n) => write_signed_leb128(bytes, *n),
343 Imm::I32(n) => write_signed_leb128(bytes, *n as i64),
344 Imm::F64(f) => write_float(bytes, *f),
345 Imm::RESERVED => bytes.push(0x0),
346 };
347 }
348 }
349
350 bytes.push(END); }
352
353fn write_code_section(bytes: &mut Vec<u8>, funcs: &Vec<(usize, Func)>) {
354 write_vec_len(bytes, funcs); for func in funcs {
357 let before_offset = bytes.len();
358 bytes.push(0x0); write_code_local(bytes, &func.1.locals);
361 write_code_expr(bytes, &func.1.code);
362
363 let after_offset = bytes.len();
364
365 let func_len = after_offset - before_offset - 1;
367
368 write_unsigned_leb128_at_offset(bytes, before_offset, func_len);
369 }
370}
371
372fn write_data_section(bytes: &mut Vec<u8>, datum: &Vec<(u32, Vec<u8>)>) {
373 write_vec_len(bytes, datum); for data in datum {
376 bytes.push(0x0); bytes.push(I32_CONST);
379 write_signed_leb128(bytes, data.0 as i64); bytes.push(END);
381
382 write_vec_len(bytes, &data.1); for b in &data.1 {
384 bytes.push(*b);
385 }
386 }
387}
388
389fn write_custom_name_section(bytes: &mut Vec<u8>, names: &Vec<name_section::Naming>) {
390 write_name(bytes, "name".to_string());
391
392 name_section::write_var_uint7(1, bytes);
394
395 let name_payload_len_offset = bytes.len();
397 bytes.push(0);
399
400 name_section::write_var_uint32(names.len() as u32, bytes);
401
402 for name in names {
403 name_section::write_var_uint32(name.index as u32, bytes);
404 name_section::write_var_uint32(name.name.len() as u32, bytes);
405 bytes.extend_from_slice(&name.name.as_bytes())
406 }
407
408 let after_offset = bytes.len();
409 let section_len = after_offset - name_payload_len_offset - 1;
410
411 write_unsigned_leb128_at_offset(bytes, name_payload_len_offset, section_len);
413}
414
415pub fn write_custom_section(bytes: &mut Vec<u8>, name: &str, content: &[u8]) {
416 bytes.push(0); let start = bytes.len();
419 bytes.push(0);
421
422 write_name(bytes, name.to_owned());
423 bytes.extend_from_slice(content);
424
425 let after_offset = bytes.len();
426 let section_len = after_offset - start - 1;
427
428 write_unsigned_leb128_at_offset(bytes, start, section_len);
430}
431
432fn write_export_section(bytes: &mut Vec<u8>, exports: &Vec<Export>) {
433 write_vec_len(bytes, exports); for export in exports {
436 let (name, idx, export_type) = export;
437
438 write_name(bytes, name.clone());
439
440 match *export_type {
441 ExportType::Func => bytes.push(0x0),
442 ExportType::Mem => bytes.push(0x2),
443 ExportType::Global => bytes.push(0x3),
444 }
445 write_unsigned_leb128(bytes, *idx as u64);
446 }
447}
448
449fn write_memory_section(bytes: &mut Vec<u8>, memories: &Vec<(u32, u32)>) {
450 write_vec_len(bytes, memories); for mem in memories {
453 let (min, max) = mem;
454 bytes.push(0x01);
455 write_unsigned_leb128(bytes, *min as u64);
456 write_unsigned_leb128(bytes, *max as u64);
457 }
458}
459
460fn write_global_section(bytes: &mut Vec<u8>, globals: &Vec<Global>) {
461 write_vec_len(bytes, globals); for data in globals {
464 let (t, mutability, init) = data;
465 bytes.push(*t);
466 bytes.push(*mutability);
467
468 let expr = vec![FuncCode::new1(I32_CONST, Imm::I64(*init as i64))];
469 write_code_expr(bytes, &expr);
470 }
471}
472
473macro_rules! write_section {
474 ($b: expr, $o:expr, $id:expr, $write_fn:expr) => {
475 if $o.len() > 0 {
476 $b.push($id); let before_offset = $b.len();
479 $b.push(0x0); $write_fn(&mut $b, &$o);
482
483 let after_offset = $b.len();
484
485 let section_len = after_offset - before_offset - 1;
487
488 write_unsigned_leb128_at_offset(&mut $b, before_offset, section_len);
490 }
491 };
492}
493
494impl WasmCodeGen {
495 pub fn new() -> WasmCodeGen {
496 WasmCodeGen {
497 types: vec![],
498 funcs: vec![],
499 tables: vec![],
500 exports: vec![],
501 elements: vec![],
502 memories: vec![],
503 data: vec![],
504 imports: vec![],
505 globals: vec![],
506 func_names: vec![],
507 }
508 }
509
510 pub fn to_bytes(&self) -> Vec<u8> {
511 let mut bytes = vec![];
512
513 bytes.extend(&HEADER_MAGIC);
514 bytes.extend(&HEADER_VERSION);
515
516 write_section!(bytes, self.types, TYPE_SECTION, write_type_section);
517 write_section!(bytes, self.imports, IMPORT_SECTION, write_imports_section);
518 write_section!(bytes, self.funcs, FUNCTION_SECTION, write_func_section);
519 write_section!(bytes, self.tables, TABLE_SECTION, write_table_section);
520 write_section!(bytes, self.memories, MEMORY_SECTION, write_memory_section);
521 write_section!(bytes, self.globals, GLOBAL_SECTION, write_global_section);
522 write_section!(bytes, self.exports, EXPORT_SECTION, write_export_section);
523 write_section!(bytes, self.elements, ELEMENT_SECTION, write_element_section);
524 write_section!(bytes, self.funcs, CODE_SECTION, write_code_section);
525 write_section!(bytes, self.data, DATA_SECTION, write_data_section);
526 write_section!(
527 bytes,
528 self.func_names,
529 CUSTOM_SECTION,
530 write_custom_name_section
531 );
532
533 bytes
534 }
535
536 pub fn set_name(&mut self, idx: usize, name: String) {
537 self.func_names.push(name_section::Naming {
538 index: idx as u32,
539 name,
540 });
541 }
542
543 pub fn add_type(&mut self, t: FuncType) -> usize {
544 let idx = self.types.len();
545 self.types.push(t);
546
547 idx
548 }
549
550 pub fn add_export(&mut self, name: String, idx: usize, export_type: ExportType) {
551 self.exports.push((name, idx, export_type));
552 }
553
554 pub fn add_func(&mut self, f: Func) -> usize {
555 let funcidx = self.funcs.len() + self.imports.len();
556
557 self.funcs.push((self.types.len(), f.clone()));
558 self.add_type(f.sig);
559
560 funcidx
561 }
562
563 pub fn add_func_with_type(&mut self, f: Func, t: u32) -> usize {
564 let funcidx = self.funcs.len() + self.imports.len();
565
566 self.funcs.push((t as usize, f.clone()));
567
568 funcidx
569 }
570
571 pub fn replace_code_func(&mut self, idx: usize, code: Vec<FuncCode>) {
572 let idx = idx - self.imports.len();
574 let (type_idx, old_func) = self.funcs[idx].clone();
575 let new_func = Func {
576 sig: old_func.sig,
577 locals: old_func.locals,
578 code,
579 };
580 self.funcs[idx] = (type_idx, new_func);
581 }
582
583 pub fn add_table(&mut self, elemtype: TableElemType, min: u32, max: u32) -> usize {
584 let idx = self.tables.len();
585
586 self.tables.push(Table {
587 elemtype,
588 limits: (min, max),
589 });
590
591 idx
592 }
593
594 pub fn add_element(&mut self, table: u32, offset: u32, funcs: Vec<u32>) -> usize {
595 let idx = self.elements.len();
596
597 self.elements.push(Element {
598 table,
599 offset,
600 funcs,
601 });
602
603 idx
604 }
605
606 pub fn add_memory(&mut self, min: u32, max: u32) -> usize {
607 assert!(self.memories.len() == 0);
608 self.memories.push((min, max));
609
610 0
611 }
612
613 pub fn add_data(&mut self, offset: u32, bytes: Vec<u8>) -> u32 {
614 self.data.push((offset, bytes.clone()));
615 bytes.len() as u32
616 }
617
618 pub fn add_import(
619 &mut self,
620 module: String,
621 name: String,
622 import_type: ImportType,
623 typeidx: usize,
624 ) -> usize {
625 let importidex = self.imports.len();
626 self.imports.push((module, name, import_type, typeidx));
627
628 importidex
629 }
630
631 pub fn add_mutable_global(&mut self, valtype: u8, init: u32) -> usize {
632 let idx = self.globals.len();
633 self.globals.push((valtype, 0x01, init));
634
635 idx
636 }
637}