public class AMD64MacroAssembler extends AMD64Assembler
Modifier and Type | Class and Description |
---|---|
static class |
AMD64MacroAssembler.ExtendMode |
AMD64Assembler.AMD64BinaryArithmetic, AMD64Assembler.AMD64ImmOp, AMD64Assembler.AMD64MIOp, AMD64Assembler.AMD64MOp, AMD64Assembler.AMD64MROp, AMD64Assembler.AMD64Op, AMD64Assembler.AMD64RMIOp, AMD64Assembler.AMD64RMOp, AMD64Assembler.AMD64RROp, AMD64Assembler.AMD64Shift, AMD64Assembler.AMD64Z0Op, AMD64Assembler.ConditionFlag, AMD64Assembler.EvexGatherOp, AMD64Assembler.Options, AMD64Assembler.SSEMROp, AMD64Assembler.SSEOp, AMD64Assembler.VexAESOp, AMD64Assembler.VexFloatCompareOp, AMD64Assembler.VexGatherOp, AMD64Assembler.VexGeneralMoveOp, AMD64Assembler.VexGeneralPurposeRMOp, AMD64Assembler.VexGeneralPurposeRMVOp, AMD64Assembler.VexGeneralPurposeRVMOp, AMD64Assembler.VexMaskedMoveOp, AMD64Assembler.VexMoveMaskOp, AMD64Assembler.VexMoveOp, AMD64Assembler.VexMRIOp, AMD64Assembler.VexMROp, AMD64Assembler.VexOp, AMD64Assembler.VexRMIOp, AMD64Assembler.VexRMOp, AMD64Assembler.VexRRIOp, AMD64Assembler.VexRROp, AMD64Assembler.VexRVMConvertOp, AMD64Assembler.VexRVMIOp, AMD64Assembler.VexRVMOp, AMD64Assembler.VexRVMROp, AMD64Assembler.VexShiftImmOp, AMD64Assembler.VexShiftOp
AMD64BaseAssembler.AddressDisplacementAnnotation, AMD64BaseAssembler.EVEXComparisonPredicate, AMD64BaseAssembler.EVEXPrefixConfig, AMD64BaseAssembler.EVEXTuple, AMD64BaseAssembler.OperandDataAnnotation, AMD64BaseAssembler.OperandSize, AMD64BaseAssembler.VEXPrefixConfig
Assembler.CodeAnnotation, Assembler.InstructionCounter, Assembler.LabelHint
JCC_ERRATUM_MITIGATION_BOUNDARY, MODRM_IN_BYTES, OPCODE_IN_BYTES, P_0F, P_0F38, P_0F3A
DEFAULT_DISP8_SCALE, force4ByteNonZeroDisplacements
codePatchingAnnotationConsumer
Constructor and Description |
---|
AMD64MacroAssembler(TargetDescription target) |
AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues) |
AMD64MacroAssembler(TargetDescription target,
OptionValues optionValues,
boolean hasIntelJccErratum) |
Modifier and Type | Method and Description |
---|---|
void |
addqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
alignBeforeCall(boolean align,
int prefixInstructionSize)
Emits alignment before a direct call to a fixed address.
|
void |
andlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
andqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
applyMOpAndJcc(AMD64Assembler.AMD64MOp op,
AMD64BaseAssembler.OperandSize size,
Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
boolean annotateImm,
IntConsumer applyBeforeFusedPair) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Supplier<AMD64Address> src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget) |
void |
cmplAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmplAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmplAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpptr(Register src1,
AMD64Address src2) |
void |
cmpptr(Register src1,
Register src2) |
int |
cmpqAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
cmpqAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
int |
cmpqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
declAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
decqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
decrementl(Register reg) |
void |
decrementl(Register reg,
int value) |
void |
decrementq(Register reg) |
void |
decrementq(Register reg,
int value) |
int |
directCall(long address,
Register scratch) |
int |
directJmp(long address,
Register scratch) |
void |
fcos(Register dest,
Register value,
AMD64Address tmp) |
void |
flog(Register dest,
Register value,
boolean base10,
AMD64Address tmp) |
void |
fpop() |
void |
fsin(Register dest,
Register value,
AMD64Address tmp) |
void |
ftan(Register dest,
Register value,
AMD64Address tmp) |
void |
incqAndJcc(Register dst,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
incrementl(Register reg,
int value) |
void |
incrementq(Register reg) |
void |
incrementq(Register reg,
int value) |
int |
indirectCall(Register callReg)
Emits an indirect call instruction.
|
int |
indirectCall(Register callReg,
boolean mitigateDecodingAsDirectCall)
Emits an indirect call instruction.
|
boolean |
isAVX() |
static boolean |
isAVX(AMD64 arch) |
void |
loadAndExtendAVX(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
AMD64Address src,
Stride strideSrc) |
void |
loadAndExtendAVX(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
Register src,
Stride strideSrc) |
void |
loadAndExtendSSE(AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
AMD64Address src,
Stride strideSrc) |
void |
loadAndExtendSSE(AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
Register src,
Stride strideSrc) |
void |
movdbl(AMD64Address dst,
Register src) |
void |
movdbl(Register dst,
AMD64Address src) |
void |
movdbl(Register dst,
Register src) |
void |
movdqu(AVXKind.AVXSize size,
AMD64Address dst,
Register src) |
void |
movdqu(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
movdqu(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
movflt(AMD64Address dst,
Register src) |
void |
movflt(Register dst,
AMD64Address src) |
void |
movflt(Register dst,
Register src) |
void |
movlong(AMD64Address dst,
long src)
Non-atomic write of a 64-bit constant to memory.
|
void |
movptr(AMD64Address dst,
Register src) |
void |
movptr(Register dst,
AMD64Address src) |
void |
movSZx(AMD64BaseAssembler.OperandSize operandSize,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address src) |
void |
movSZx(Stride strideSrc,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
AMD64Address src)
Load one, two, four or eight bytes, according to
scaleSrc , into dst and zero-
or sign-extend depending on extendMode . |
void |
packusdw(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
packuswb(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
palignr(AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
void |
palignr(AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2,
int imm8) |
void |
pand(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
pand(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pand(AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2) |
void |
pandn(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pandU(AVXKind.AVXSize size,
Register dst,
AMD64Address src,
Register tmp)
PAND with unaligned memory operand.
|
void |
pcmpeq(AVXKind.AVXSize size,
JavaKind elementKind,
Register dst,
AMD64Address src)
Compares all packed bytes/words/dwords in
dst to src . |
void |
pcmpeq(AVXKind.AVXSize vectorSize,
JavaKind elementKind,
Register dst,
Register src)
Compares all packed bytes/words/dwords in
dst to src . |
void |
pcmpeq(AVXKind.AVXSize size,
Stride elementStride,
Register dst,
AMD64Address src)
Compares all packed bytes/words/dwords in
dst to src . |
void |
pcmpeq(AVXKind.AVXSize vectorSize,
Stride elementStride,
Register dst,
Register src)
Compares all packed bytes/words/dwords in
dst to src . |
void |
pcmpeqb(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
pcmpeqb(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pcmpeqd(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
pcmpeqd(AVXKind.AVXSize vectorSize,
Register dst,
Register src) |
void |
pcmpeqw(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
pcmpeqw(AVXKind.AVXSize vectorSize,
Register dst,
Register src) |
void |
pcmpgtb(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pcmpgtd(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pmovmsk(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pmovSZx(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
AMD64Address src,
Stride strideSrc) |
void |
pmovSZx(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
Register src,
Stride strideSrc) |
void |
pmovSZx(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
Register src,
Stride strideSrc,
int displacement) |
void |
pmovSZx(AVXKind.AVXSize size,
AMD64MacroAssembler.ExtendMode extendMode,
Register dst,
Stride strideDst,
Register src,
Stride strideSrc,
Register index,
int displacement)
Load elements from address
(src, index, displacement) into vector register
dst , and zero- or sign-extend them to fit scaleDst . |
void |
por(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pshufb(AVXKind.AVXSize size,
Register dst,
AMD64Address src) |
void |
pshufb(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pslld(AVXKind.AVXSize size,
Register dst,
int imm8) |
void |
pslld(AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
void |
psllw(AVXKind.AVXSize size,
Register dst,
int imm8) |
void |
psllw(AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
void |
psrld(AVXKind.AVXSize size,
Register dst,
int imm8) |
void |
psrld(AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
void |
psrlw(AVXKind.AVXSize size,
Register dst,
int imm8) |
void |
psrlw(AVXKind.AVXSize size,
Register dst,
Register src,
int imm8) |
void |
ptest(AVXKind.AVXSize size,
Register dst) |
void |
ptest(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
ptestU(AVXKind.AVXSize size,
Register dst,
AMD64Address src,
Register tmp)
PTEST with unaligned memory operand.
|
void |
pxor(AVXKind.AVXSize size,
Register dst,
Register src) |
void |
pxor(AVXKind.AVXSize size,
Register dst,
Register src1,
Register src2) |
void |
setl(AMD64Assembler.ConditionFlag cc,
Register dst) |
void |
setq(AMD64Assembler.ConditionFlag cc,
Register dst) |
void |
sublAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
sublAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
subqAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
subqAndJcc(Register dst,
Register src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
AMD64Address src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp,
IntConsumer applyBeforeFusedPair) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testAndJcc(AMD64BaseAssembler.OperandSize size,
Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testbAndJcc(Register src1,
AMD64Address src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testbAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testlAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testlAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testqAndJcc(Register src,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
testqAndJcc(Register src1,
Register src2,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
xorlAndJcc(Register dst,
AMD64Address src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
xorlAndJcc(Register dst,
int imm32,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
void |
xorqAndJcc(Register dst,
AMD64Address src,
AMD64Assembler.ConditionFlag cc,
Label branchTarget,
boolean isShortJmp) |
addl, addl, addl, addpd, addpd, addq, addq, addq, addressInBytes, addsd, addsd, aesdec, aesdeclast, aesenc, aesenclast, align, andl, andl, andpd, andpd, andq, andq, andq, bsfq, bsrl, bsrq, bswapl, bswapq, btrq, call, call, call, cdql, cdqq, clflush, clflushopt, cmovl, cmovl, cmovq, cmovq, cmpb, cmpl, cmpl, cmpl, cmpq, cmpq, cmpq, cmpw, cmpxchgb, cmpxchgl, cmpxchgq, cmpxchgw, cvtdq2pd, cvtsi2sdl, cvtsi2sdq, cvttpd2dq, cvttsd2sil, cvttsd2siq, cvttss2sil, decl, decl, decq, divsd, emit, emit, emit, emit, ensureUniquePC, ensureWithinBoundary, evmovdqu16, evmovdqu16, evmovdqu16, evmovdqu16, evmovdqu64, evmovdqu64, evpbroadcastw, evpcmpeqb, evpcmpgtb, evpcmpgtb, evpcmpuw, evpcmpuw, evpmovwb, evpmovwb, evpmovzxbw, evpmovzxbw, fcos, ffree, fincstp, fldd, fldlg2, fldln2, flds, fnstswAX, fprem, fptan, fsin, fstp, fstpd, fstps, fwait, fxch, fyl2x, getPlaceholder, getPrefixInBytes, getPrefixInBytes, getPrefixInBytes, getPrefixInBytes, hlt, illegal, imull, imulq, incl, incl, incq, int3, jcc, jcc, jcc, jcc, jccb, jmp, jmp, jmp, jmp, jmpb, jmpWithoutAlignment, kmovb, kmovb, kmovb, kmovd, kmovd, kmovd, kmovq, kmovq, kmovq, kmovw, kmovw, kmovw, kortestd, kortestq, ktestd, ktestq, lead, leaq, leave, lfence, lock, makeAddress, makeAddress, mayCrossBoundary, membar, mitigateJCCErratum, mitigateJCCErratum, movapd, movaps, movb, movb, movb, movddup, movdl, movdl, movdq, movdq, movdq, movdqa, movdqu, movdqu, movdqu, movl, movl, movl, movl, movl, movl, movlhps, movlpd, movq, movq, movq, movq, movq, movq, movsbl, movsbl, movsbq, movsbq, movsd, movsd, movsd, movslq, movslq, movslq, movss, movss, movss, movswl, movswq, movw, movw, movw, movzbl, movzbl, movzbq, movzbq, movzwl, movzwq, mulpd, mulpd, mulsd, mulsd, mulss, negl, negq, nop, nop, notl, notq, nullCheck, orl, orl, orl, orq, orq, orq, packusdw, packuswb, paddd, paddq, palignr, pand, pand, pandn, patchJumpTarget, pause, pclmulqdq, pcmpeqb, pcmpeqb, pcmpeqd, pcmpeqd, pcmpeqw, pcmpeqw, pcmpestri, pcmpestri, pcmpgtb, pcmpgtd, pextrw, pinsrw, pmovmskb, pmovsxbd, pmovsxbd, pmovsxbq, pmovsxbq, pmovsxbw, pmovsxbw, pmovsxdq, pmovsxdq, pmovsxwd, pmovsxwd, pmovsxwq, pmovsxwq, pmovzxbd, pmovzxbd, pmovzxbq, pmovzxbq, pmovzxbw, pmovzxbw, pmovzxdq, pmovzxdq, pmovzxwd, pmovzxwd, pmovzxwq, pmovzxwq, pop, popcntl, popfq, por, prefetchnta, prefetcht0, prefetcht1, prefetcht2, prefetchw, pshufb, pshufb, pshufd, pshuflw, pslld, pslldq, psllq, psllq, psllw, psrad, psrld, psrldq, psrlq, psrlw, psubd, psubusb, psubusb, punpcklbw, push, pushfq, pxor, rcpps, rdtsc, rdtscp, repStosb, repStosq, ret, sarl, sarl, sarq, sarq, sbbq, setb, sfence, shll, shll, shlq, shlq, shlxl, shrl, shrl, shrq, shrq, sqrtsd, subl, subl, subl, subpd, subq, subq, subqWide, subsd, subsd, testl, testl, testl, testq, testq, ucomisd, ucomiss, unpckhpd, unpcklpd, vmovdqu, vmovdqu, vmovdqu64, vmovdqu64, vpand, vpandn, vpclmulhqhqdq, vpclmulhqlqdq, vpclmullqhqdq, vpclmullqlqdq, vpclmulqdq, vpcmpeqb, vpcmpeqd, vpcmpeqw, vpmovmskb, vpmovzxbw, vpor, vpshufb, vpslld, vpslldq, vpsllw, vpsrld, vpsrldq, vpsrlw, vptest, vpxor, vpxor, vzeroupper, xaddb, xaddl, xaddq, xaddw, xchgb, xchgl, xchgq, xchgw, xorb, xorl, xorpd, xorpd, xorps, xorps, xorq
annotatePatchingImmediate, emitModRM, emitModRM, emitOperandHelper, emitOperandHelper, emitOperandHelper, emitOperandHelper, emitVEX, encode, evexPrefix, evexPrefix, getLFlag, getRXB, getRXB, inRC, isAVX512Register, isVariableLengthAVX512Register, needsRex, needsRex, prefix, prefix, prefix, prefix, prefix, prefix, prefixb, prefixq, prefixq, prefixq, rexw, setForce4ByteNonZeroDisplacements, simdPrefix, simdPrefix, simdPrefix, simdPrefix, supports, supportsCPUFeature, vexPrefix, vexPrefix, vexPrefix, vexPrefix, vexPrefix
addFeatures, bind, close, copy, createLabelName, emitByte, emitByte, emitInt, emitInt, emitLong, emitLong, emitShort, emitShort, emitString, emitString, emitString0, getByte, getFeatures, getInstructionCounter, getInt, getMachineCodeCallDisplacementOffset, getReturnAddressSize, getShort, inlineObjects, isCurrentRegionFeature, isTargetMP, nameOf, position, removeFeatures, requestLabelHint, reset, setCodePatchingAnnotationConsumer
public AMD64MacroAssembler(TargetDescription target)
public AMD64MacroAssembler(TargetDescription target, OptionValues optionValues)
public AMD64MacroAssembler(TargetDescription target, OptionValues optionValues, boolean hasIntelJccErratum)
public final void decrementq(Register reg)
public final void decrementq(Register reg, int value)
public final void incrementq(Register reg)
public final void incrementq(Register reg, int value)
public final void movptr(Register dst, AMD64Address src)
public final void movptr(AMD64Address dst, Register src)
public final void cmpptr(Register src1, Register src2)
public final void cmpptr(Register src1, AMD64Address src2)
public final void decrementl(Register reg)
public final void decrementl(Register reg, int value)
public final void incrementl(Register reg, int value)
public final void movflt(Register dst, Register src)
public final void movflt(Register dst, AMD64Address src)
public final void movflt(AMD64Address dst, Register src)
public final void movdbl(Register dst, Register src)
public final void movdbl(Register dst, AMD64Address src)
public final void movdbl(AMD64Address dst, Register src)
public final void movlong(AMD64Address dst, long src)
public final void setl(AMD64Assembler.ConditionFlag cc, Register dst)
public final void setq(AMD64Assembler.ConditionFlag cc, Register dst)
public final void flog(Register dest, Register value, boolean base10, AMD64Address tmp)
public final void fsin(Register dest, Register value, AMD64Address tmp)
public final void fcos(Register dest, Register value, AMD64Address tmp)
public final void ftan(Register dest, Register value, AMD64Address tmp)
public final void fpop()
public final void alignBeforeCall(boolean align, int prefixInstructionSize)
align
is true, the fixed address, i.e., the displacement of the call
instruction, should be aligned to 4 bytes; 2) when useBranchesWithin32ByteBoundary
is
true, the call instruction should be aligned with 32-bytes boundary.prefixInstructionSize
- size of the additional instruction to be emitted before the call
instruction. This is used in HotSpot inline cache convention where a movq
instruction of the cached receiver type to rax
register must be emitted
before the call instruction.public final int indirectCall(Register callReg)
public final int indirectCall(Register callReg, boolean mitigateDecodingAsDirectCall)
NativeCall::is_call_before(address pc)
function in HotSpot determines that there
is a direct call instruction whose last byte is at pc - 1
if the byte at
pc - 5
is 0xE8. An indirect call can thus be incorrectly decoded as a direct call if
the preceding instructions match this pattern. To avoid this,
mitigateDecodingAsDirectCall == true
will insert sufficient nops to avoid the false
decoding.public final int directCall(long address, Register scratch)
public final int directJmp(long address, Register scratch)
public void applyMOpAndJcc(AMD64Assembler.AMD64MOp op, AMD64BaseAssembler.OperandSize size, Register dst, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testlAndJcc(Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testqAndJcc(Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, AMD64Address src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testlAndJcc(Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testqAndJcc(Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void testbAndJcc(Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void testbAndJcc(Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, boolean annotateImm, IntConsumer applyBeforeFusedPair)
public final void cmplAndJcc(Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpqAndJcc(Register src, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmplAndJcc(Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final int cmpqAndJcc(Register src1, Register src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp, IntConsumer applyBeforeFusedPair)
public final void cmplAndJcc(Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final int cmpqAndJcc(Register src1, AMD64Address src2, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void cmpAndJcc(AMD64BaseAssembler.OperandSize size, Register src1, Supplier<AMD64Address> src2, AMD64Assembler.ConditionFlag cc, Label branchTarget)
public final void andlAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void andqAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void addqAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void sublAndJcc(Register dst, Register src, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void subqAndJcc(Register dst, Register src, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void sublAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void subqAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void incqAndJcc(Register dst, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void declAndJcc(Register dst, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void decqAndJcc(Register dst, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void xorlAndJcc(Register dst, int imm32, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void xorlAndJcc(Register dst, AMD64Address src, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void xorqAndJcc(Register dst, AMD64Address src, AMD64Assembler.ConditionFlag cc, Label branchTarget, boolean isShortJmp)
public final void movSZx(AMD64BaseAssembler.OperandSize operandSize, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address src)
public final void movSZx(Stride strideSrc, AMD64MacroAssembler.ExtendMode extendMode, Register dst, AMD64Address src)
scaleSrc
, into dst
and zero-
or sign-extend depending on extendMode
.public final void pmovSZx(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, Register src, Stride strideSrc, int displacement)
public final void pmovSZx(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, Register src, Stride strideSrc, Register index, int displacement)
(src, index, displacement)
into vector register
dst
, and zero- or sign-extend them to fit scaleDst
.size
- vector size. May be AVXKind.AVXSize.XMM
or AVXKind.AVXSize.YMM
.dst
- a XMM or YMM vector register.strideDst
- target stride. Must be greater or equal to scaleSrc
.src
- the source address.strideSrc
- source stride. Must be smaller or equal to scaleDst
.index
- address index offset, scaled by scaleSrc
.displacement
- address displacement in bytes. If scaleDst
is greater than
scaleSrc
, this displacement is scaled by the ratio of the former and
latter scales, e.g. if scaleDst
is Stride.S4
and scaleSrc
is Stride.S2
, the displacement is halved.public final void pmovSZx(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, AMD64Address src, Stride strideSrc)
public final void pmovSZx(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, Register src, Stride strideSrc)
public final void pmovmsk(AVXKind.AVXSize size, Register dst, Register src)
public final void movdqu(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void movdqu(AVXKind.AVXSize size, AMD64Address dst, Register src)
public final void movdqu(AVXKind.AVXSize size, Register dst, Register src)
public final void pcmpeq(AVXKind.AVXSize vectorSize, Stride elementStride, Register dst, Register src)
dst
to src
. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public final void pcmpeq(AVXKind.AVXSize vectorSize, JavaKind elementKind, Register dst, Register src)
dst
to src
. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public final void pcmpeqw(AVXKind.AVXSize vectorSize, Register dst, Register src)
public final void pcmpeqd(AVXKind.AVXSize vectorSize, Register dst, Register src)
public final void pcmpeqb(AVXKind.AVXSize size, Register dst, Register src)
public final void pcmpeq(AVXKind.AVXSize size, Stride elementStride, Register dst, AMD64Address src)
dst
to src
. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public final void pcmpeq(AVXKind.AVXSize size, JavaKind elementKind, Register dst, AMD64Address src)
dst
to src
. Matching values are set
to all ones (0xff, 0xffff, ...), non-matching values are set to zero.public final void pcmpeqb(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void pcmpeqw(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void pcmpeqd(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void pcmpgtb(AVXKind.AVXSize size, Register dst, Register src)
public final void pcmpgtd(AVXKind.AVXSize size, Register dst, Register src)
public final void loadAndExtendAVX(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, Register src, Stride strideSrc)
public final void loadAndExtendAVX(AVXKind.AVXSize size, AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, AMD64Address src, Stride strideSrc)
public final void loadAndExtendSSE(AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, AMD64Address src, Stride strideSrc)
public final void loadAndExtendSSE(AMD64MacroAssembler.ExtendMode extendMode, Register dst, Stride strideDst, Register src, Stride strideSrc)
public final void packuswb(AVXKind.AVXSize size, Register dst, Register src)
public final void packusdw(AVXKind.AVXSize size, Register dst, Register src)
public final void palignr(AVXKind.AVXSize size, Register dst, Register src, int imm8)
public final void palignr(AVXKind.AVXSize size, Register dst, Register src1, Register src2, int imm8)
public final void pand(AVXKind.AVXSize size, Register dst, Register src)
public final void pand(AVXKind.AVXSize size, Register dst, Register src1, Register src2)
public final void pand(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void pandU(AVXKind.AVXSize size, Register dst, AMD64Address src, Register tmp)
public final void pandn(AVXKind.AVXSize size, Register dst, Register src)
public final void por(AVXKind.AVXSize size, Register dst, Register src)
public final void pxor(AVXKind.AVXSize size, Register dst, Register src)
public final void pxor(AVXKind.AVXSize size, Register dst, Register src1, Register src2)
public final void psllw(AVXKind.AVXSize size, Register dst, int imm8)
public final void psllw(AVXKind.AVXSize size, Register dst, Register src, int imm8)
public final void psrlw(AVXKind.AVXSize size, Register dst, int imm8)
public final void psrlw(AVXKind.AVXSize size, Register dst, Register src, int imm8)
public final void pslld(AVXKind.AVXSize size, Register dst, int imm8)
public final void pslld(AVXKind.AVXSize size, Register dst, Register src, int imm8)
public final void psrld(AVXKind.AVXSize size, Register dst, int imm8)
public final void psrld(AVXKind.AVXSize size, Register dst, Register src, int imm8)
public final void pshufb(AVXKind.AVXSize size, Register dst, Register src)
public final void pshufb(AVXKind.AVXSize size, Register dst, AMD64Address src)
public final void ptest(AVXKind.AVXSize size, Register dst)
ptest
in class AMD64Assembler
public final void ptest(AVXKind.AVXSize size, Register dst, Register src)
public final void ptestU(AVXKind.AVXSize size, Register dst, AMD64Address src, Register tmp)
public boolean isAVX()
public static boolean isAVX(AMD64 arch)