6 #include <caml/mlvalues.h>
7 #include <caml/memory.h>
8 #include <caml/alloc.h>
13 #define ARR_SIZE(a) (sizeof(a)/sizeof(a[0]))
31 CAMLlocal5(list, cons, rec_insn,
array,
tmp);
32 CAMLlocal4(arch_info, op_info_val, tmp2, tmp3);
42 for (j =
c; j > 0; j--) {
43 unsigned int lcount,
i;
44 cons = caml_alloc(2, 0);
46 rec_insn = caml_alloc(10, 0);
47 Store_field(rec_insn, 0, Val_int(insn[j-1].
id));
48 Store_field(rec_insn, 1, Val_int(insn[j-1].address));
49 Store_field(rec_insn, 2, Val_int(insn[j-1].
size));
52 lcount = insn[j-1].size;
54 array = caml_alloc(lcount, 0);
55 for (
i = 0;
i < lcount;
i++) {
60 Store_field(rec_insn, 3,
array);
62 Store_field(rec_insn, 4, caml_copy_string(insn[j-1].mnemonic));
63 Store_field(rec_insn, 5, caml_copy_string(insn[j-1].op_str));
67 lcount = (insn[j-1]).
detail->regs_read_count;
69 array = caml_alloc(lcount, 0);
70 for (
i = 0;
i < lcount;
i++) {
71 Store_field(
array,
i, Val_int(insn[j-1].
detail->regs_read[
i]));
77 Store_field(rec_insn, 6,
array);
80 lcount = (insn[j-1]).
detail->regs_write_count;
82 array = caml_alloc(lcount, 0);
83 for (
i = 0;
i < lcount;
i++) {
84 Store_field(
array,
i, Val_int(insn[j-1].
detail->regs_write[
i]));
90 Store_field(rec_insn, 7,
array);
93 lcount = (insn[j-1]).
detail->groups_count;
95 array = caml_alloc(lcount, 0);
96 for (
i = 0;
i < lcount;
i++) {
103 Store_field(rec_insn, 8,
array);
108 arch_info = caml_alloc(1, 0);
110 op_info_val = caml_alloc(10, 0);
111 Store_field(op_info_val, 0, Val_bool(insn[j-1].
detail->arm.usermode));
112 Store_field(op_info_val, 1, Val_int(insn[j-1].
detail->arm.vector_size));
113 Store_field(op_info_val, 2, Val_int(insn[j-1].
detail->arm.vector_data));
114 Store_field(op_info_val, 3, Val_int(insn[j-1].
detail->arm.cps_mode));
115 Store_field(op_info_val, 4, Val_int(insn[j-1].
detail->arm.cps_flag));
116 Store_field(op_info_val, 5, Val_int(insn[j-1].
detail->arm.cc));
117 Store_field(op_info_val, 6, Val_bool(insn[j-1].
detail->arm.update_flags));
118 Store_field(op_info_val, 7, Val_bool(insn[j-1].
detail->arm.writeback));
119 Store_field(op_info_val, 8, Val_int(insn[j-1].
detail->arm.mem_barrier));
121 lcount = insn[j-1].detail->arm.op_count;
123 array = caml_alloc(lcount, 0);
124 for (
i = 0;
i < lcount;
i++) {
125 tmp2 = caml_alloc(6, 0);
126 switch(insn[j-1].
detail->arm.operands[
i].type) {
129 tmp = caml_alloc(1, 1);
130 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm.operands[
i].reg));
133 tmp = caml_alloc(1, 2);
134 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm.operands[
i].imm));
137 tmp = caml_alloc(1, 3);
138 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm.operands[
i].imm));
141 tmp = caml_alloc(1, 4);
142 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm.operands[
i].imm));
145 tmp = caml_alloc(1, 5);
146 Store_field(
tmp, 0, caml_copy_double(insn[j-1].
detail->arm.operands[
i].fp));
149 tmp = caml_alloc(1, 6);
150 tmp3 = caml_alloc(5, 0);
151 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->arm.operands[
i].mem.base));
152 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->arm.operands[
i].mem.index));
153 Store_field(tmp3, 2, Val_int(insn[j-1].
detail->arm.operands[
i].mem.scale));
154 Store_field(tmp3, 3, Val_int(insn[j-1].
detail->arm.operands[
i].mem.disp));
155 Store_field(tmp3, 4, Val_int(insn[j-1].
detail->arm.operands[
i].mem.lshift));
156 Store_field(
tmp, 0, tmp3);
159 tmp = caml_alloc(1, 7);
160 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm.operands[
i].setend));
164 tmp3 = caml_alloc(2, 0);
165 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->arm.operands[
i].shift.type));
166 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->arm.operands[
i].shift.value));
167 Store_field(tmp2, 0, Val_int(insn[j-1].
detail->arm.operands[
i].vector_index));
168 Store_field(tmp2, 1, tmp3);
169 Store_field(tmp2, 2,
tmp);
170 Store_field(tmp2, 3, Val_bool(insn[j-1].
detail->arm.operands[
i].subtracted));
171 Store_field(tmp2, 4, Val_int(insn[j-1].
detail->arm.operands[
i].access));
172 Store_field(tmp2, 5, Val_int(insn[j-1].
detail->arm.operands[
i].neon_lane));
173 Store_field(
array,
i, tmp2);
178 Store_field(op_info_val, 9,
array);
181 Store_field(arch_info, 0, op_info_val);
183 Store_field(rec_insn, 9, arch_info);
187 arch_info = caml_alloc(1, 1);
189 op_info_val = caml_alloc(4, 0);
190 Store_field(op_info_val, 0, Val_int(insn[j-1].
detail->arm64.cc));
191 Store_field(op_info_val, 1, Val_bool(insn[j-1].
detail->arm64.update_flags));
192 Store_field(op_info_val, 2, Val_bool(insn[j-1].
detail->arm64.writeback));
194 lcount = insn[j-1].detail->arm64.op_count;
196 array = caml_alloc(lcount, 0);
197 for (
i = 0;
i < lcount;
i++) {
198 tmp2 = caml_alloc(6, 0);
199 switch(insn[j-1].
detail->arm64.operands[
i].type) {
201 tmp = caml_alloc(1, 1);
202 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].reg));
205 tmp = caml_alloc(1, 2);
206 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].imm));
209 tmp = caml_alloc(1, 3);
210 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].imm));
213 tmp = caml_alloc(1, 4);
214 Store_field(
tmp, 0, caml_copy_double(insn[j-1].
detail->arm64.operands[
i].fp));
217 tmp = caml_alloc(1, 5);
218 tmp3 = caml_alloc(3, 0);
219 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].mem.base));
220 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->arm64.operands[
i].mem.index));
221 Store_field(tmp3, 2, Val_int(insn[j-1].
detail->arm64.operands[
i].mem.disp));
222 Store_field(
tmp, 0, tmp3);
225 tmp = caml_alloc(1, 6);
226 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].reg));
229 tmp = caml_alloc(1, 7);
230 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].reg));
233 tmp = caml_alloc(1, 8);
234 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].pstate));
237 tmp = caml_alloc(1, 9);
238 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].sys));
241 tmp = caml_alloc(1, 10);
242 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].prefetch));
245 tmp = caml_alloc(1, 11);
246 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].barrier));
250 tmp3 = caml_alloc(2, 0);
251 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].shift.type));
252 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->arm64.operands[
i].shift.value));
254 Store_field(tmp2, 0, Val_int(insn[j-1].
detail->arm64.operands[
i].vector_index));
255 Store_field(tmp2, 1, Val_int(insn[j-1].
detail->arm64.operands[
i].vas));
256 Store_field(tmp2, 2, Val_int(insn[j-1].
detail->arm64.operands[
i].vess));
257 Store_field(tmp2, 3, tmp3);
258 Store_field(tmp2, 4, Val_int(insn[j-1].
detail->arm64.operands[
i].ext));
259 Store_field(tmp2, 5,
tmp);
261 Store_field(
array,
i, tmp2);
266 Store_field(op_info_val, 3,
array);
269 Store_field(arch_info, 0, op_info_val);
271 Store_field(rec_insn, 9, arch_info);
275 arch_info = caml_alloc(1, 2);
277 op_info_val = caml_alloc(1, 0);
279 lcount = insn[j-1].detail->mips.op_count;
281 array = caml_alloc(lcount, 0);
282 for (
i = 0;
i < lcount;
i++) {
283 tmp2 = caml_alloc(1, 0);
284 switch(insn[j-1].
detail->mips.operands[
i].type) {
286 tmp = caml_alloc(1, 1);
287 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->mips.operands[
i].reg));
290 tmp = caml_alloc(1, 2);
291 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->mips.operands[
i].imm));
294 tmp = caml_alloc(1, 3);
295 tmp3 = caml_alloc(2, 0);
296 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->mips.operands[
i].mem.base));
297 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->mips.operands[
i].mem.disp));
298 Store_field(
tmp, 0, tmp3);
302 Store_field(tmp2, 0,
tmp);
303 Store_field(
array,
i, tmp2);
308 Store_field(op_info_val, 0,
array);
311 Store_field(arch_info, 0, op_info_val);
313 Store_field(rec_insn, 9, arch_info);
317 arch_info = caml_alloc(1, 3);
319 op_info_val = caml_alloc(17, 0);
324 array = caml_alloc(lcount, 0);
325 for (
i = 0;
i < lcount;
i++) {
326 Store_field(
array,
i, Val_int(insn[j-1].
detail->x86.prefix[
i]));
330 Store_field(op_info_val, 0,
array);
335 array = caml_alloc(lcount, 0);
336 for (
i = 0;
i < lcount;
i++) {
337 Store_field(
array,
i, Val_int(insn[j-1].
detail->x86.opcode[
i]));
341 Store_field(op_info_val, 1,
array);
343 Store_field(op_info_val, 2, Val_int(insn[j-1].
detail->x86.rex));
345 Store_field(op_info_val, 3, Val_int(insn[j-1].
detail->x86.addr_size));
347 Store_field(op_info_val, 4, Val_int(insn[j-1].
detail->x86.modrm));
349 Store_field(op_info_val, 5, Val_int(insn[j-1].
detail->x86.sib));
351 Store_field(op_info_val, 6, Val_int(insn[j-1].
detail->x86.disp));
353 Store_field(op_info_val, 7, Val_int(insn[j-1].
detail->x86.sib_index));
355 Store_field(op_info_val, 8, Val_int(insn[j-1].
detail->x86.sib_scale));
357 Store_field(op_info_val, 9, Val_int(insn[j-1].
detail->x86.sib_base));
359 Store_field(op_info_val, 10, Val_int(insn[j-1].
detail->x86.xop_cc));
360 Store_field(op_info_val, 11, Val_int(insn[j-1].
detail->x86.sse_cc));
361 Store_field(op_info_val, 12, Val_int(insn[j-1].
detail->x86.avx_cc));
362 Store_field(op_info_val, 13, Val_int(insn[j-1].
detail->x86.avx_sae));
363 Store_field(op_info_val, 14, Val_int(insn[j-1].
detail->x86.avx_rm));
364 Store_field(op_info_val, 15, Val_int(insn[j-1].
detail->x86.eflags));
366 lcount = insn[j-1].detail->x86.op_count;
368 array = caml_alloc(lcount, 0);
369 for (
i = 0;
i < lcount;
i++) {
370 switch(insn[j-1].
detail->x86.operands[
i].type) {
372 tmp = caml_alloc(5, 1);
373 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->x86.operands[
i].reg));
376 tmp = caml_alloc(5, 2);
377 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->x86.operands[
i].imm));
380 tmp = caml_alloc(5, 3);
381 tmp2 = caml_alloc(5, 0);
382 Store_field(tmp2, 0, Val_int(insn[j-1].
detail->x86.operands[
i].mem.segment));
383 Store_field(tmp2, 1, Val_int(insn[j-1].
detail->x86.operands[
i].mem.base));
384 Store_field(tmp2, 2, Val_int(insn[j-1].
detail->x86.operands[
i].mem.index));
385 Store_field(tmp2, 3, Val_int(insn[j-1].
detail->x86.operands[
i].mem.scale));
386 Store_field(tmp2, 4, Val_int(insn[j-1].
detail->x86.operands[
i].mem.disp));
388 Store_field(
tmp, 0, tmp2);
393 Store_field(
tmp, 1, Val_int(insn[j-1].
detail->x86.operands[
i].size));
394 Store_field(
tmp, 2, Val_int(insn[j-1].
detail->x86.operands[
i].access));
395 Store_field(
tmp, 3, Val_int(insn[j-1].
detail->x86.operands[
i].avx_bcast));
396 Store_field(
tmp, 4, Val_int(insn[j-1].
detail->x86.operands[
i].avx_zero_opmask));
397 tmp2 = caml_alloc(1, 0);
398 Store_field(tmp2, 0,
tmp);
399 Store_field(
array,
i, tmp2);
403 Store_field(op_info_val, 16,
array);
406 Store_field(arch_info, 0, op_info_val);
408 Store_field(rec_insn, 9, arch_info);
412 arch_info = caml_alloc(1, 4);
414 op_info_val = caml_alloc(4, 0);
416 Store_field(op_info_val, 0, Val_int(insn[j-1].
detail->ppc.bc));
417 Store_field(op_info_val, 1, Val_int(insn[j-1].
detail->ppc.bh));
418 Store_field(op_info_val, 2, Val_bool(insn[j-1].
detail->ppc.update_cr0));
420 lcount = insn[j-1].detail->ppc.op_count;
422 array = caml_alloc(lcount, 0);
423 for (
i = 0;
i < lcount;
i++) {
424 tmp2 = caml_alloc(1, 0);
425 switch(insn[j-1].
detail->ppc.operands[
i].type) {
427 tmp = caml_alloc(1, 1);
428 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->ppc.operands[
i].reg));
431 tmp = caml_alloc(1, 2);
432 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->ppc.operands[
i].imm));
435 tmp = caml_alloc(1, 3);
436 tmp3 = caml_alloc(2, 0);
437 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->ppc.operands[
i].mem.base));
438 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->ppc.operands[
i].mem.disp));
439 Store_field(
tmp, 0, tmp3);
442 tmp = caml_alloc(1, 4);
443 tmp3 = caml_alloc(3, 0);
444 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->ppc.operands[
i].crx.scale));
445 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->ppc.operands[
i].crx.reg));
446 Store_field(tmp3, 2, Val_int(insn[j-1].
detail->ppc.operands[
i].crx.cond));
447 Store_field(
tmp, 0, tmp3);
451 Store_field(tmp2, 0,
tmp);
452 Store_field(
array,
i, tmp2);
457 Store_field(op_info_val, 3,
array);
460 Store_field(arch_info, 0, op_info_val);
462 Store_field(rec_insn, 9, arch_info);
467 arch_info = caml_alloc(1, 5);
469 op_info_val = caml_alloc(3, 0);
471 Store_field(op_info_val, 0, Val_int(insn[j-1].
detail->sparc.cc));
472 Store_field(op_info_val, 1, Val_int(insn[j-1].
detail->sparc.hint));
474 lcount = insn[j-1].detail->sparc.op_count;
476 array = caml_alloc(lcount, 0);
477 for (
i = 0;
i < lcount;
i++) {
478 tmp2 = caml_alloc(1, 0);
479 switch(insn[j-1].
detail->sparc.operands[
i].type) {
481 tmp = caml_alloc(1, 1);
482 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->sparc.operands[
i].reg));
485 tmp = caml_alloc(1, 2);
486 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->sparc.operands[
i].imm));
489 tmp = caml_alloc(1, 3);
490 tmp3 = caml_alloc(3, 0);
491 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->sparc.operands[
i].mem.base));
492 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->sparc.operands[
i].mem.index));
493 Store_field(tmp3, 2, Val_int(insn[j-1].
detail->sparc.operands[
i].mem.disp));
494 Store_field(
tmp, 0, tmp3);
498 Store_field(tmp2, 0,
tmp);
499 Store_field(
array,
i, tmp2);
504 Store_field(op_info_val, 2,
array);
507 Store_field(arch_info, 0, op_info_val);
509 Store_field(rec_insn, 9, arch_info);
514 arch_info = caml_alloc(1, 6);
516 op_info_val = caml_alloc(2, 0);
518 Store_field(op_info_val, 0, Val_int(insn[j-1].
detail->sysz.cc));
520 lcount = insn[j-1].detail->sysz.op_count;
522 array = caml_alloc(lcount, 0);
523 for (
i = 0;
i < lcount;
i++) {
524 tmp2 = caml_alloc(1, 0);
525 switch(insn[j-1].
detail->sysz.operands[
i].type) {
527 tmp = caml_alloc(1, 1);
528 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->sysz.operands[
i].reg));
531 tmp = caml_alloc(1, 2);
532 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->sysz.operands[
i].reg));
535 tmp = caml_alloc(1, 3);
536 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->sysz.operands[
i].imm));
539 tmp = caml_alloc(1, 4);
540 tmp3 = caml_alloc(4, 0);
541 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->sysz.operands[
i].mem.base));
542 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->sysz.operands[
i].mem.index));
543 Store_field(tmp3, 2, caml_copy_int64(insn[j-1].
detail->sysz.operands[
i].mem.length));
544 Store_field(tmp3, 3, caml_copy_int64(insn[j-1].
detail->sysz.operands[
i].mem.disp));
545 Store_field(
tmp, 0, tmp3);
549 Store_field(tmp2, 0,
tmp);
550 Store_field(
array,
i, tmp2);
555 Store_field(op_info_val, 1,
array);
558 Store_field(arch_info, 0, op_info_val);
560 Store_field(rec_insn, 9, arch_info);
565 arch_info = caml_alloc(1, 7);
567 op_info_val = caml_alloc(1, 0);
569 lcount = insn[j-1].detail->xcore.op_count;
571 array = caml_alloc(lcount, 0);
572 for (
i = 0;
i < lcount;
i++) {
573 tmp2 = caml_alloc(1, 0);
574 switch(insn[j-1].
detail->xcore.operands[
i].type) {
576 tmp = caml_alloc(1, 1);
577 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->xcore.operands[
i].reg));
580 tmp = caml_alloc(1, 2);
581 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->xcore.operands[
i].imm));
584 tmp = caml_alloc(1, 3);
585 tmp3 = caml_alloc(4, 0);
586 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->xcore.operands[
i].mem.base));
587 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->xcore.operands[
i].mem.index));
588 Store_field(tmp3, 2, caml_copy_int64(insn[j-1].
detail->xcore.operands[
i].mem.disp));
589 Store_field(tmp3, 3, caml_copy_int64(insn[j-1].
detail->xcore.operands[
i].mem.direct));
590 Store_field(
tmp, 0, tmp3);
594 Store_field(tmp2, 0,
tmp);
595 Store_field(
array,
i, tmp2);
600 Store_field(op_info_val, 0,
array);
603 Store_field(arch_info, 0, op_info_val);
605 Store_field(rec_insn, 9, arch_info);
610 arch_info = caml_alloc(1, 8);
612 op_info_val = caml_alloc(2, 0);
613 Store_field(op_info_val, 0, Val_int(insn[j-1].
detail->m680x.flags));
615 lcount = insn[j-1].detail->m680x.op_count;
617 array = caml_alloc(lcount, 0);
618 for (
i = 0;
i < lcount;
i++) {
619 tmp2 = caml_alloc(3, 0);
620 switch(insn[j-1].
detail->m680x.operands[
i].type) {
622 tmp = caml_alloc(1, 1);
623 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].imm));
626 tmp = caml_alloc(1, 2);
627 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].reg));
630 tmp = caml_alloc(1, 3);
631 tmp3 = caml_alloc(7, 0);
632 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.base_reg));
633 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.offset_reg));
634 Store_field(tmp3, 2, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.offset));
635 Store_field(tmp3, 3, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.offset_addr));
636 Store_field(tmp3, 4, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.offset_bits));
637 Store_field(tmp3, 5, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.inc_dec));
638 Store_field(tmp3, 6, Val_int(insn[j-1].
detail->m680x.operands[
i].idx.flags));
639 Store_field(
tmp, 0, tmp3);
642 tmp = caml_alloc(1, 4);
643 tmp3 = caml_alloc(2, 0);
644 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].rel.address));
645 Store_field(tmp3, 1, Val_int(insn[j-1].
detail->m680x.operands[
i].rel.offset));
646 Store_field(
tmp, 0, tmp3);
649 tmp = caml_alloc(1, 5);
650 tmp3 = caml_alloc(2, 0);
651 Store_field(tmp3, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].ext.address));
652 Store_field(tmp3, 1, Val_bool(insn[j-1].
detail->m680x.operands[
i].ext.indirect));
653 Store_field(
tmp, 0, tmp3);
656 tmp = caml_alloc(1, 6);
657 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].direct_addr));
660 tmp = caml_alloc(1, 7);
661 Store_field(
tmp, 0, Val_int(insn[j-1].
detail->m680x.operands[
i].const_val));
665 Store_field(tmp2, 0,
tmp);
666 Store_field(tmp2, 1, Val_int(insn[j-1].
detail->m680x.operands[
i].size));
667 Store_field(tmp2, 2, Val_int(insn[j-1].
detail->m680x.operands[
i].access));
668 Store_field(
array,
i, tmp2);
673 Store_field(op_info_val, 1,
array);
676 Store_field(arch_info, 0, op_info_val);
678 Store_field(rec_insn, 9, arch_info);
686 Store_field(cons, 0, rec_insn);
687 Store_field(cons, 1, list);
700 CAMLparam5(_arch, _mode,
_code, _addr, _count);
707 size_t count, code_len;
709 switch (Int_val(_arch)) {
744 caml_invalid_argument(
"Invalid arch");
745 return Val_emptylist;
748 while (_mode != Val_emptylist) {
749 head =
Field(_mode, 0);
750 switch (Int_val(head)) {
833 caml_invalid_argument(
"Invalid mode");
834 return Val_emptylist;
836 _mode =
Field(_mode, 1);
841 return Val_emptylist;
845 code_len = caml_string_length(
_code);
846 addr = Int64_val(_addr);
847 count = Int64_val(_count);
854 CAMLparam5(_arch, _handle,
_code, _addr, _count);
860 handle = Int64_val(_handle);
862 arch = Int_val(_arch);
864 code_len = caml_string_length(
_code);
865 addr = Int64_val(_addr);
866 count = Int64_val(_count);
873 CAMLparam2(_arch, _mode);
874 CAMLlocal2(list, head);
879 list = Val_emptylist;
881 switch (Int_val(_arch)) {
916 caml_invalid_argument(
"Invalid arch");
917 return Val_emptylist;
921 while (_mode != Val_emptylist) {
922 head =
Field(_mode, 0);
923 switch (Int_val(head)) {
1006 caml_invalid_argument(
"Invalid mode");
1007 return Val_emptylist;
1009 _mode =
Field(_mode, 1);
1013 CAMLreturn(Val_int(0));
1016 result = caml_alloc(1, 0);
1023 CAMLparam3(_handle, _opt, _value);
1027 switch (Int_val(_opt)) {
1047 caml_invalid_argument(
"Invalid option");
1051 err =
cs_option(Int64_val(_handle), opt, Int64_val(_value));
1053 CAMLreturn(Val_int(
err));
1060 caml_invalid_argument(
"invalid reg_id");
1064 return caml_copy_string(
name);
1071 caml_invalid_argument(
"invalid insn_id");
1075 return caml_copy_string(
name);
1082 caml_invalid_argument(
"invalid insn_id");
1086 return caml_copy_string(
name);
1097 CAMLparam1(_handle);
1100 h = Int64_val(_handle);