| Qhasm Instruction |
Input |
Evaluated flags |
Output |
Set flags |
Assembly Code |
| r = *(uint8 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movzbq n(s),r
|
| r = *(uint16 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movzwq n(s),r
|
| r = *(uint32 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movl n(s),r%32
|
| r = *(uint64 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movq n(s),r
|
| r = *( int8 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movsbq n(s),r
|
| r = *( int16 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movswq n(s),r
|
| r = *( int32 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movslq n(s),r
|
| r = *( int64 *) (s + n) |
int64 s, immediate n |
|
int64 r |
|
movq n(s),r
|
| r = *(uint8 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movzbq (s,t),r
|
| r = *(uint16 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movzwq (s,t),r
|
| r = *(uint32 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movl (s,t),r%32
|
| r = *(uint64 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movq (s,t),r
|
| r = *( int8 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movsbq (s,t),r
|
| r = *( int16 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movswq (s,t),r
|
| r = *( int32 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movslq (s,t),r
|
| r = *( int64 *) (s + t) |
int64 s, int64 t |
|
int64 r |
|
movq (s,t),r
|
| r = *(uint8 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movzbq (s,t,8),r
|
| r = *(uint16 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movzwq (s,t,8),r
|
| r = *(uint32 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movl (s,t,8),r%32
|
| r = *(uint64 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movq (s,t,8),r
|
| r = *( int8 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movsbq (s,t,8),r
|
| r = *( int16 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movswq (s,t,8),r
|
| r = *( int32 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movslq (s,t,8),r
|
| r = *( int64 *) (s + t * 8) |
int64 s, int64 t |
|
int64 r |
|
movq (s,t,8),r
|
| r = *(uint8 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movzbq n(s,t,8),r
|
| r = *(uint16 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movzwq n(s,t,8),r
|
| r = *(uint32 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movl n(s,t,8),r%32
|
| r = *(uint64 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movq n(s,t,8),r
|
| r = *( int8 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movsbq n(s,t,8),r
|
| r = *( int16 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movswq n(s,t,8),r
|
| r = *( int32 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movslq n(s,t,8),r
|
| r = *( int64 *) (s + n + t * 8) |
int64 s, int64 t, immediate n |
|
int64 r |
|
movq n(s,t,8),r
|
| (uint32) r += *(uint32 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned<, carry |
addl n(s),r%32
|
| (uint32) r += *(uint32 *) (s + n) + carry |
int64 r, int64 s, immediate n |
carry |
int64 r |
=, unsigned>, unsigned<, carry |
adcl n(s),r%32
|
| (uint32) r -= *(uint32 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
subl n(s),r%32
|
| (uint32) r &= *(uint32 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
andl n(s),r%32
|
| (uint32) r |= *(uint32 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
orl n(s),r%32
|
| (uint32) r ^= *(uint32 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
xorl n(s),r%32
|
| (uint32) r += *(uint32 *) (s + n + t * 8) |
int64 r, int64 s, int64 t, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
addl n(s,t,8),r%32
|
| (uint32) r -= *(uint32 *) (s + n + t * 8) |
int64 r, int64 s, int64 t, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
subl n(s,t,8),r%32
|
| (uint32) r &= *(uint32 *) (s + n + t * 8) |
int64 r, int64 s, int64 t, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
andl n(s,t,8),r%32
|
| (uint32) r |= *(uint32 *) (s + n + t * 8) |
int64 r, int64 s, int64 t, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
orl n(s,t,8),r%32
|
| (uint32) r ^= *(uint32 *) (s + n + t * 8) |
int64 r, int64 s, int64 t, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
xorl n(s,t,8),r%32
|
| (uint32) r += *(uint32 *) (s + t * 8) |
int64 r, int64 s, int64 t |
|
int64 r |
=, unsigned>, unsigned< |
addl (s,t,8),r%32
|
| (uint32) r -= *(uint32 *) (s + t * 8) |
int64 r, int64 s, int64 t |
|
int64 r |
=, unsigned>, unsigned< |
subl (s,t,8),r%32
|
| (uint32) r &= *(uint32 *) (s + t * 8) |
int64 r, int64 s, int64 t |
|
int64 r |
=, unsigned>, unsigned< |
andl (s,t,8),r%32
|
| (uint32) r |= *(uint32 *) (s + t * 8) |
int64 r, int64 s, int64 t |
|
int64 r |
=, unsigned>, unsigned< |
orl (s,t,8),r%32
|
| (uint32) r ^= *(uint32 *) (s + t * 8) |
int64 r, int64 s, int64 t |
|
int64 r |
=, unsigned>, unsigned< |
xorl (s,t,8),r%32
|
| r += *(uint64 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned<, carry |
addq n(s),r
|
| r += *(uint64 *) (s + n) + carry |
int64 r, int64 s, immediate n |
carry |
int64 r |
=, unsigned>, unsigned<, carry |
adcq n(s),r
|
| r -= *(uint64 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
subq n(s),r
|
| r &= *(uint64 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
andq n(s),r
|
| r |= *(uint64 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
orq n(s),r
|
| r ^= *(uint64 *) (s + n) |
int64 r, int64 s, immediate n |
|
int64 r |
=, unsigned>, unsigned< |
xorq n(s),r
|
| r = s |
stack64 s |
|
int64 r |
|
movq s,r
|
| r = bottom s |
stack64 s |
|
int64 r |
|
movl s,r%32
|
| r = top s |
stack64 s |
|
int64 r |
|
movl !shift4s,r%32
|
| r += s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
addq s,r
|
| r += s + carry |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
adcq s,r
|
| r -= s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
subq s,r
|
| r &= s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
andq s,r
|
| r |= s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
orq s,r
|
| r ^= s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
xorq s,r
|
| (uint32) r += s |
int64 r, stack64 s |
|
int64 r |
=, unsigned>, unsigned< |
addl s,r%32
|
| r = &s |
stack128 s |
|
int64 r, stack128 s |
|
leaq s,r
|
| r = ((uint32 *)&s)[0] |
stack128 s |
|
int64 r |
|
movl s,r%32
|
| r = ((uint32 *)&s)[1] |
stack128 s |
|
int64 r |
|
movl 4+s,r%32
|
| r = ((uint32 *)&s)[2] |
stack128 s |
|
int64 r |
|
movl 8+s,r%32
|
| r = ((uint32 *)&s)[3] |
stack128 s |
|
int64 r |
|
movl 12+s,r%32
|
| r = s |
stack128 s |
|
int6464 r |
|
movdqa s,r
|
| r = *(int128 *) (s + n) |
int64 s, immediate n |
|
int6464 r |
|
movdqa n(s),r
|
| int32323232 r += s |
int6464 r, stack128 s |
|
int6464 r |
|
paddd s,r
|
| uint32323232 r += s |
int6464 r, stack128 s |
|
int6464 r |
|
paddd s,r
|
| int32323232 r -= s |
int6464 r, stack128 s |
|
int6464 r |
|
psubd s,r
|
| uint32323232 r -= s |
int6464 r, stack128 s |
|
int6464 r |
|
psubd s,r
|
| uint32323232 r += *(int128 *) (s + n) |
int6464 r, int64 s, immediate n |
|
int6464 r |
|
paddd n(s),r
|
| r = &s |
stack512 s |
|
int64 r, stack512 s |
|
leaq s,r
|
| *(uint8 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movb r%8,n(s)
|
| *(uint16 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movw r%16,n(s)
|
| *(uint32 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movl r%32,n(s)
|
| *(uint64 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movq r,n(s)
|
| *( int8 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movb r%8,n(s)
|
| *( int16 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movw r%16,n(s)
|
| *( int32 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movl r%32,n(s)
|
| *( int64 *) (s + n) = r |
int64 r, int64 s, immediate n |
|
|
|
movq r,n(s)
|
| *(uint8 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movb $m,n(s)
|
| *(uint16 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movw $m,n(s)
|
| *(uint32 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movl $m,n(s)
|
| *(uint64 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movq $m,n(s)
|
| *( int8 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movb $m,n(s)
|
| *( int16 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movw $m,n(s)
|
| *( int32 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movl $m,n(s)
|
| *( int64 *) (s + n) = m |
int64 s, immediate m, immediate n |
|
|
|
movq $m,n(s)
|
| *(uint8 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movb r%8,(s,t)
|
| *(uint16 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movw r%16,(s,t)
|
| *(uint32 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movl r%32,(s,t)
|
| *(uint64 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movq r,(s,t)
|
| *( int8 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movb r%8,(s,t)
|
| *( int16 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movw r%16,(s,t)
|
| *( int32 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movl r%32,(s,t)
|
| *( int64 *) (s + t) = r |
int64 r, int64 s, int64 t |
|
|
|
movq r,(s,t)
|
| r = s |
int64 s |
|
stack64 r |
|
movq s,r
|
| inplace r bottom = s |
int64 s, stack64 r |
|
stack64 r |
|
movl s%32,r
|
| ((uint32 *)&r)[0] = 0 |
|
|
stack128 r |
|
movl $0,r
|
| ((uint32 *)&r)[1] = 0 |
stack128 r |
|
stack128 r |
|
movl $0,4+r
|
| ((uint32 *)&r)[2] = 0 |
stack128 r |
|
stack128 r |
|
movl $0,8+r
|
| ((uint32 *)&r)[3] = 0 |
stack128 r |
|
stack128 r |
|
movl $0,12+r
|
| r = s |
int6464 s |
|
stack128 r |
|
movdqa s,r
|