8205528: Base64 encoding algorithm using AVX512 instructions
Reviewed-by: kvn, psandoz
Contributed-by: smita.kamath@intel.com
--- a/src/hotspot/cpu/x86/assembler_x86.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/assembler_x86.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -133,6 +133,8 @@
_index = noreg;
_scale = no_scale;
_disp = disp;
+ _xmmindex = xnoreg;
+ _isxmmindex = false;
switch (rtype) {
case relocInfo::external_word_type:
_rspec = external_word_Relocation::spec(loc);
@@ -172,6 +174,8 @@
_scale = no_scale;
_disp = (intptr_t) loc;
_rspec = spec;
+ _xmmindex = xnoreg;
+ _isxmmindex = false;
}
#endif // _LP64
@@ -604,6 +608,21 @@
emit_operand((Register)reg, base, index, scale, disp, rspec);
}
+void Assembler::emit_operand(XMMRegister reg, Register base, XMMRegister index,
+ Address::ScaleFactor scale, int disp,
+ RelocationHolder const& rspec) {
+ if (UseAVX > 2) {
+ int xreg_enc = reg->encoding();
+ int xmmindex_enc = index->encoding();
+ XMMRegister new_reg = as_XMMRegister(xreg_enc & 0xf);
+ XMMRegister new_index = as_XMMRegister(xmmindex_enc & 0xf);
+ emit_operand((Register)new_reg, base, (Register)new_index, scale, disp, rspec);
+ } else {
+ emit_operand((Register)reg, base, (Register)index, scale, disp, rspec);
+ }
+}
+
+
// Secret local extension to Assembler::WhichOperand:
#define end_pc_operand (_WhichOperand_limit)
@@ -1104,8 +1123,12 @@
}
void Assembler::emit_operand(XMMRegister reg, Address adr) {
- emit_operand(reg, adr._base, adr._index, adr._scale, adr._disp,
- adr._rspec);
+ if (adr.isxmmindex()) {
+ emit_operand(reg, adr._base, adr._xmmindex, adr._scale, adr._disp, adr._rspec);
+ } else {
+ emit_operand(reg, adr._base, adr._index, adr._scale, adr._disp,
+ adr._rspec);
+ }
}
// MMX operations
@@ -3419,6 +3442,15 @@
emit_int8(imm8);
}
+void Assembler::evpermi2q(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_evex(), "");
+ InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ true);
+ attributes.set_is_evex_instruction();
+ int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x76);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
void Assembler::pause() {
emit_int8((unsigned char)0xF3);
@@ -3870,6 +3902,17 @@
emit_operand(dst, src);
}
+void Assembler::vpmovzxbw(XMMRegister dst, XMMRegister src, int vector_len) {
+ assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
+ vector_len == AVX_256bit? VM_Version::supports_avx2() :
+ vector_len == AVX_512bit? VM_Version::supports_avx512bw() : 0, "");
+ InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
+ int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x30);
+ emit_int8((unsigned char) (0xC0 | encode));
+}
+
+
void Assembler::evpmovzxbw(XMMRegister dst, KRegister mask, Address src, int vector_len) {
assert(is_vector_masking(), "");
assert(VM_Version::supports_avx512vlbw(), "");
@@ -3883,7 +3926,6 @@
emit_int8(0x30);
emit_operand(dst, src);
}
-
void Assembler::evpmovwb(Address dst, XMMRegister src, int vector_len) {
assert(VM_Version::supports_avx512vlbw(), "");
assert(src != xnoreg, "sanity");
@@ -3911,6 +3953,28 @@
emit_operand(src, dst);
}
+void Assembler::evpmovdb(Address dst, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_evex(), "");
+ assert(src != xnoreg, "sanity");
+ InstructionMark im(this);
+ InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ true, /* uses_vl */ false);
+ attributes.set_address_attributes(/* tuple_type */ EVEX_QVM, /* input_size_in_bits */ EVEX_NObit);
+ attributes.set_is_evex_instruction();
+ vex_prefix(dst, 0, src->encoding(), VEX_SIMD_F3, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x31);
+ emit_operand(src, dst);
+}
+
+void Assembler::vpmovzxwd(XMMRegister dst, XMMRegister src, int vector_len) {
+ assert(vector_len == AVX_128bit? VM_Version::supports_avx() :
+ vector_len == AVX_256bit? VM_Version::supports_avx2() :
+ vector_len == AVX_512bit? VM_Version::supports_evex() : 0, " ");
+ InstructionAttr attributes(vector_len, /* rex_w */ false, /* legacy_mode */ _legacy_mode_bw, /* no_mask_reg */ true, /* uses_vl */ false);
+ int encode = vex_prefix_and_encode(dst->encoding(), 0, src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x33);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
// generic
void Assembler::pop(Register dst) {
int encode = prefix_and_encode(dst->encoding());
@@ -6080,6 +6144,24 @@
emit_int8((unsigned char)(0xC0 | encode));
}
+void Assembler::evpsrlvw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_avx512bw(), "");
+ InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
+ attributes.set_is_evex_instruction();
+ int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x10);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
+void Assembler::evpsllvw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_avx512bw(), "");
+ InstructionAttr attributes(vector_len, /* rex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
+ attributes.set_is_evex_instruction();
+ int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8(0x12);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
// Shift packed integers arithmetically right by specified number of bits.
void Assembler::psraw(XMMRegister dst, int shift) {
NOT_LP64(assert(VM_Version::supports_sse2(), ""));
@@ -6181,6 +6263,15 @@
emit_operand(dst, src);
}
+void Assembler::vpandq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_evex(), "");
+ InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
+ int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
+ emit_int8((unsigned char)0xDB);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
+
void Assembler::pandn(XMMRegister dst, XMMRegister src) {
NOT_LP64(assert(VM_Version::supports_sse2(), ""));
InstructionAttr attributes(AVX_128bit, /* vex_w */ VM_Version::supports_evex(), /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
@@ -6216,6 +6307,15 @@
emit_operand(dst, src);
}
+void Assembler::vporq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len) {
+ assert(VM_Version::supports_evex(), "");
+ InstructionAttr attributes(vector_len, /* vex_w */ true, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
+ int encode = vex_prefix_and_encode(dst->encoding(), nds->encoding(), src->encoding(), VEX_SIMD_66, VEX_OPCODE_0F, &attributes);
+ emit_int8((unsigned char)0xEB);
+ emit_int8((unsigned char)(0xC0 | encode));
+}
+
+
void Assembler::pxor(XMMRegister dst, XMMRegister src) {
NOT_LP64(assert(VM_Version::supports_sse2(), ""));
InstructionAttr attributes(AVX_128bit, /* rex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
@@ -6849,6 +6949,20 @@
emit_int8((unsigned char)(0xC0 | encode));
}
+void Assembler::evpgatherdd(XMMRegister dst, KRegister mask, Address src, int vector_len) {
+ assert(VM_Version::supports_evex(), "");
+ assert(dst != xnoreg, "sanity");
+ InstructionMark im(this);
+ InstructionAttr attributes(vector_len, /* vex_w */ false, /* legacy_mode */ false, /* no_mask_reg */ false, /* uses_vl */ true);
+ attributes.set_address_attributes(/* tuple_type */ EVEX_T1S, /* input_size_in_bits */ EVEX_64bit);
+ attributes.reset_is_clear_context();
+ attributes.set_embedded_opmask_register_specifier(mask);
+ attributes.set_is_evex_instruction();
+ // swap src<->dst for encoding
+ vex_prefix(src, 0, dst->encoding(), VEX_SIMD_66, VEX_OPCODE_0F_38, &attributes);
+ emit_int8((unsigned char)0x90);
+ emit_operand(dst, src);
+}
// Carry-Less Multiplication Quadword
void Assembler::pclmulqdq(XMMRegister dst, XMMRegister src, int mask) {
@@ -7474,7 +7588,12 @@
void Assembler::vex_prefix(Address adr, int nds_enc, int xreg_enc, VexSimdPrefix pre, VexOpcode opc, InstructionAttr *attributes) {
bool vex_r = ((xreg_enc & 8) == 8) ? 1 : 0;
bool vex_b = adr.base_needs_rex();
- bool vex_x = adr.index_needs_rex();
+ bool vex_x;
+ if (adr.isxmmindex()) {
+ vex_x = adr.xmmindex_needs_rex();
+ } else {
+ vex_x = adr.index_needs_rex();
+ }
set_attributes(attributes);
attributes->set_current_assembler(this);
@@ -7511,7 +7630,13 @@
if (UseAVX > 2 && !attributes->is_legacy_mode())
{
bool evex_r = (xreg_enc >= 16);
- bool evex_v = (nds_enc >= 16);
+ bool evex_v;
+ // EVEX.V' is set to true when VSIB is used as we may need to use higher order XMM registers (16-31)
+ if (adr.isxmmindex()) {
+ evex_v = ((adr._xmmindex->encoding() > 15) ? true : false);
+ } else {
+ evex_v = (nds_enc >= 16);
+ }
attributes->set_is_evex_instruction();
evex_prefix(vex_r, vex_b, vex_x, evex_r, evex_v, nds_enc, pre, opc);
} else {
--- a/src/hotspot/cpu/x86/assembler_x86.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/assembler_x86.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -184,8 +184,10 @@
private:
Register _base;
Register _index;
+ XMMRegister _xmmindex;
ScaleFactor _scale;
int _disp;
+ bool _isxmmindex;
RelocationHolder _rspec;
// Easily misused constructors make them private
@@ -201,8 +203,10 @@
Address()
: _base(noreg),
_index(noreg),
+ _xmmindex(xnoreg),
_scale(no_scale),
- _disp(0) {
+ _disp(0),
+ _isxmmindex(false){
}
// No default displacement otherwise Register can be implicitly
@@ -211,15 +215,19 @@
Address(Register base, int disp)
: _base(base),
_index(noreg),
+ _xmmindex(xnoreg),
_scale(no_scale),
- _disp(disp) {
+ _disp(disp),
+ _isxmmindex(false){
}
Address(Register base, Register index, ScaleFactor scale, int disp = 0)
: _base (base),
_index(index),
+ _xmmindex(xnoreg),
_scale(scale),
- _disp (disp) {
+ _disp (disp),
+ _isxmmindex(false) {
assert(!index->is_valid() == (scale == Address::no_scale),
"inconsistent address");
}
@@ -227,13 +235,26 @@
Address(Register base, RegisterOrConstant index, ScaleFactor scale = times_1, int disp = 0)
: _base (base),
_index(index.register_or_noreg()),
+ _xmmindex(xnoreg),
_scale(scale),
- _disp (disp + (index.constant_or_zero() * scale_size(scale))) {
+ _disp (disp + (index.constant_or_zero() * scale_size(scale))),
+ _isxmmindex(false){
if (!index.is_register()) scale = Address::no_scale;
assert(!_index->is_valid() == (scale == Address::no_scale),
"inconsistent address");
}
+ Address(Register base, XMMRegister index, ScaleFactor scale, int disp = 0)
+ : _base (base),
+ _index(noreg),
+ _xmmindex(index),
+ _scale(scale),
+ _disp(disp),
+ _isxmmindex(true) {
+ assert(!index->is_valid() == (scale == Address::no_scale),
+ "inconsistent address");
+ }
+
Address plus_disp(int disp) const {
Address a = (*this);
a._disp += disp;
@@ -269,24 +290,29 @@
Address(Register base, ByteSize disp)
: _base(base),
_index(noreg),
+ _xmmindex(xnoreg),
_scale(no_scale),
- _disp(in_bytes(disp)) {
+ _disp(in_bytes(disp)),
+ _isxmmindex(false){
}
Address(Register base, Register index, ScaleFactor scale, ByteSize disp)
: _base(base),
_index(index),
+ _xmmindex(xnoreg),
_scale(scale),
- _disp(in_bytes(disp)) {
+ _disp(in_bytes(disp)),
+ _isxmmindex(false){
assert(!index->is_valid() == (scale == Address::no_scale),
"inconsistent address");
}
-
Address(Register base, RegisterOrConstant index, ScaleFactor scale, ByteSize disp)
: _base (base),
_index(index.register_or_noreg()),
+ _xmmindex(xnoreg),
_scale(scale),
- _disp (in_bytes(disp) + (index.constant_or_zero() * scale_size(scale))) {
+ _disp (in_bytes(disp) + (index.constant_or_zero() * scale_size(scale))),
+ _isxmmindex(false) {
if (!index.is_register()) scale = Address::no_scale;
assert(!_index->is_valid() == (scale == Address::no_scale),
"inconsistent address");
@@ -298,8 +324,10 @@
bool uses(Register reg) const { return _base == reg || _index == reg; }
Register base() const { return _base; }
Register index() const { return _index; }
+ XMMRegister xmmindex() const { return _xmmindex; }
ScaleFactor scale() const { return _scale; }
int disp() const { return _disp; }
+ bool isxmmindex() const { return _isxmmindex; }
// Convert the raw encoding form into the form expected by the constructor for
// Address. An index of 4 (rsp) corresponds to having no index, so convert
@@ -317,6 +345,10 @@
return _index != noreg &&_index->encoding() >= 8;
}
+ bool xmmindex_needs_rex() const {
+ return _xmmindex != xnoreg && _xmmindex->encoding() >= 8;
+ }
+
relocInfo::relocType reloc() const { return _rspec.type(); }
friend class Assembler;
@@ -683,6 +715,10 @@
RelocationHolder const& rspec,
int rip_relative_correction = 0);
+ void emit_operand(XMMRegister reg, Register base, XMMRegister index,
+ Address::ScaleFactor scale,
+ int disp, RelocationHolder const& rspec);
+
void emit_operand(Register reg, Address adr, int rip_relative_correction = 0);
// operands that only take the original 32bit registers
@@ -1554,6 +1590,7 @@
void vpermq(XMMRegister dst, XMMRegister src, int imm8);
void vperm2i128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8);
void vperm2f128(XMMRegister dst, XMMRegister nds, XMMRegister src, int imm8);
+ void evpermi2q(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
void pause();
@@ -1620,11 +1657,16 @@
void pmovzxbw(XMMRegister dst, Address src);
void vpmovzxbw( XMMRegister dst, Address src, int vector_len);
+ void vpmovzxbw(XMMRegister dst, XMMRegister src, int vector_len);
void evpmovzxbw(XMMRegister dst, KRegister mask, Address src, int vector_len);
void evpmovwb(Address dst, XMMRegister src, int vector_len);
void evpmovwb(Address dst, KRegister mask, XMMRegister src, int vector_len);
+ void vpmovzxwd(XMMRegister dst, XMMRegister src, int vector_len);
+
+ void evpmovdb(Address dst, XMMRegister src, int vector_len);
+
#ifndef _LP64 // no 32bit push/pop on amd64
void popl(Address dst);
#endif
@@ -2026,6 +2068,8 @@
void vpsrlw(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len);
void vpsrld(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len);
void vpsrlq(XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len);
+ void evpsrlvw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
+ void evpsllvw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
// Arithmetic shift right packed integers (only shorts and ints, no instructions for longs)
void psraw(XMMRegister dst, int shift);
@@ -2041,6 +2085,7 @@
void pand(XMMRegister dst, XMMRegister src);
void vpand(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
void vpand(XMMRegister dst, XMMRegister nds, Address src, int vector_len);
+ void vpandq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
// Andn packed integers
void pandn(XMMRegister dst, XMMRegister src);
@@ -2049,6 +2094,7 @@
void por(XMMRegister dst, XMMRegister src);
void vpor(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
void vpor(XMMRegister dst, XMMRegister nds, Address src, int vector_len);
+ void vporq(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
// Xor packed integers
void pxor(XMMRegister dst, XMMRegister src);
@@ -2119,6 +2165,8 @@
void evpbroadcastd(XMMRegister dst, Register src, int vector_len);
void evpbroadcastq(XMMRegister dst, Register src, int vector_len);
+ void evpgatherdd(XMMRegister dst, KRegister k1, Address src, int vector_len);
+
// Carry-Less Multiplication Quadword
void pclmulqdq(XMMRegister dst, XMMRegister src, int mask);
void vpclmulqdq(XMMRegister dst, XMMRegister nds, XMMRegister src, int mask);
--- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -3570,6 +3570,15 @@
}
}
+void MacroAssembler::evmovdquq(XMMRegister dst, AddressLiteral src, int vector_len, Register rscratch) {
+ if (reachable(src)) {
+ Assembler::evmovdquq(dst, as_Address(src), vector_len);
+ } else {
+ lea(rscratch, src);
+ Assembler::evmovdquq(dst, Address(rscratch, 0), vector_len);
+ }
+}
+
void MacroAssembler::movdqa(XMMRegister dst, AddressLiteral src) {
if (reachable(src)) {
Assembler::movdqa(dst, as_Address(src));
--- a/src/hotspot/cpu/x86/macroAssembler_x86.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -1095,6 +1095,10 @@
void vmovdqu(XMMRegister dst, Address src);
void vmovdqu(XMMRegister dst, XMMRegister src);
void vmovdqu(XMMRegister dst, AddressLiteral src);
+ void evmovdquq(XMMRegister dst, Address src, int vector_len) { Assembler::evmovdquq(dst, src, vector_len); }
+ void evmovdquq(XMMRegister dst, XMMRegister src, int vector_len) { Assembler::evmovdquq(dst, src, vector_len); }
+ void evmovdquq(Address dst, XMMRegister src, int vector_len) { Assembler::evmovdquq(dst, src, vector_len); }
+ void evmovdquq(XMMRegister dst, AddressLiteral src, int vector_len, Register rscratch);
// Move Aligned Double Quadword
void movdqa(XMMRegister dst, Address src) { Assembler::movdqa(dst, src); }
@@ -1208,6 +1212,8 @@
void vpcmpeqw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
void vpmovzxbw(XMMRegister dst, Address src, int vector_len);
+ void vpmovzxbw(XMMRegister dst, XMMRegister src, int vector_len) { Assembler::vpmovzxbw(dst, src, vector_len); }
+
void vpmovmskb(Register dst, XMMRegister src);
void vpmullw(XMMRegister dst, XMMRegister nds, XMMRegister src, int vector_len);
--- a/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -4547,6 +4547,408 @@
return start;
}
+ //base64 character set
+ address base64_charset_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "base64_charset");
+ address start = __ pc();
+ __ emit_data64(0x0000004200000041, relocInfo::none);
+ __ emit_data64(0x0000004400000043, relocInfo::none);
+ __ emit_data64(0x0000004600000045, relocInfo::none);
+ __ emit_data64(0x0000004800000047, relocInfo::none);
+ __ emit_data64(0x0000004a00000049, relocInfo::none);
+ __ emit_data64(0x0000004c0000004b, relocInfo::none);
+ __ emit_data64(0x0000004e0000004d, relocInfo::none);
+ __ emit_data64(0x000000500000004f, relocInfo::none);
+ __ emit_data64(0x0000005200000051, relocInfo::none);
+ __ emit_data64(0x0000005400000053, relocInfo::none);
+ __ emit_data64(0x0000005600000055, relocInfo::none);
+ __ emit_data64(0x0000005800000057, relocInfo::none);
+ __ emit_data64(0x0000005a00000059, relocInfo::none);
+ __ emit_data64(0x0000006200000061, relocInfo::none);
+ __ emit_data64(0x0000006400000063, relocInfo::none);
+ __ emit_data64(0x0000006600000065, relocInfo::none);
+ __ emit_data64(0x0000006800000067, relocInfo::none);
+ __ emit_data64(0x0000006a00000069, relocInfo::none);
+ __ emit_data64(0x0000006c0000006b, relocInfo::none);
+ __ emit_data64(0x0000006e0000006d, relocInfo::none);
+ __ emit_data64(0x000000700000006f, relocInfo::none);
+ __ emit_data64(0x0000007200000071, relocInfo::none);
+ __ emit_data64(0x0000007400000073, relocInfo::none);
+ __ emit_data64(0x0000007600000075, relocInfo::none);
+ __ emit_data64(0x0000007800000077, relocInfo::none);
+ __ emit_data64(0x0000007a00000079, relocInfo::none);
+ __ emit_data64(0x0000003100000030, relocInfo::none);
+ __ emit_data64(0x0000003300000032, relocInfo::none);
+ __ emit_data64(0x0000003500000034, relocInfo::none);
+ __ emit_data64(0x0000003700000036, relocInfo::none);
+ __ emit_data64(0x0000003900000038, relocInfo::none);
+ __ emit_data64(0x0000002f0000002b, relocInfo::none);
+ return start;
+ }
+
+ //base64 url character set
+ address base64url_charset_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "base64url_charset");
+ address start = __ pc();
+ __ emit_data64(0x0000004200000041, relocInfo::none);
+ __ emit_data64(0x0000004400000043, relocInfo::none);
+ __ emit_data64(0x0000004600000045, relocInfo::none);
+ __ emit_data64(0x0000004800000047, relocInfo::none);
+ __ emit_data64(0x0000004a00000049, relocInfo::none);
+ __ emit_data64(0x0000004c0000004b, relocInfo::none);
+ __ emit_data64(0x0000004e0000004d, relocInfo::none);
+ __ emit_data64(0x000000500000004f, relocInfo::none);
+ __ emit_data64(0x0000005200000051, relocInfo::none);
+ __ emit_data64(0x0000005400000053, relocInfo::none);
+ __ emit_data64(0x0000005600000055, relocInfo::none);
+ __ emit_data64(0x0000005800000057, relocInfo::none);
+ __ emit_data64(0x0000005a00000059, relocInfo::none);
+ __ emit_data64(0x0000006200000061, relocInfo::none);
+ __ emit_data64(0x0000006400000063, relocInfo::none);
+ __ emit_data64(0x0000006600000065, relocInfo::none);
+ __ emit_data64(0x0000006800000067, relocInfo::none);
+ __ emit_data64(0x0000006a00000069, relocInfo::none);
+ __ emit_data64(0x0000006c0000006b, relocInfo::none);
+ __ emit_data64(0x0000006e0000006d, relocInfo::none);
+ __ emit_data64(0x000000700000006f, relocInfo::none);
+ __ emit_data64(0x0000007200000071, relocInfo::none);
+ __ emit_data64(0x0000007400000073, relocInfo::none);
+ __ emit_data64(0x0000007600000075, relocInfo::none);
+ __ emit_data64(0x0000007800000077, relocInfo::none);
+ __ emit_data64(0x0000007a00000079, relocInfo::none);
+ __ emit_data64(0x0000003100000030, relocInfo::none);
+ __ emit_data64(0x0000003300000032, relocInfo::none);
+ __ emit_data64(0x0000003500000034, relocInfo::none);
+ __ emit_data64(0x0000003700000036, relocInfo::none);
+ __ emit_data64(0x0000003900000038, relocInfo::none);
+ __ emit_data64(0x0000005f0000002d, relocInfo::none);
+
+ return start;
+ }
+
+ address base64_bswap_mask_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "bswap_mask_base64");
+ address start = __ pc();
+ __ emit_data64(0x0504038002010080, relocInfo::none);
+ __ emit_data64(0x0b0a098008070680, relocInfo::none);
+ __ emit_data64(0x0908078006050480, relocInfo::none);
+ __ emit_data64(0x0f0e0d800c0b0a80, relocInfo::none);
+ __ emit_data64(0x0605048003020180, relocInfo::none);
+ __ emit_data64(0x0c0b0a8009080780, relocInfo::none);
+ __ emit_data64(0x0504038002010080, relocInfo::none);
+ __ emit_data64(0x0b0a098008070680, relocInfo::none);
+
+ return start;
+ }
+
+ address base64_right_shift_mask_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "right_shift_mask");
+ address start = __ pc();
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+ __ emit_data64(0x0006000400020000, relocInfo::none);
+
+ return start;
+ }
+
+ address base64_left_shift_mask_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "left_shift_mask");
+ address start = __ pc();
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+ __ emit_data64(0x0000000200040000, relocInfo::none);
+
+ return start;
+ }
+
+ address base64_and_mask_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "and_mask");
+ address start = __ pc();
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ __ emit_data64(0x3f003f003f000000, relocInfo::none);
+ return start;
+ }
+
+ address base64_gather_mask_addr() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "gather_mask");
+ address start = __ pc();
+ __ emit_data64(0xffffffffffffffff, relocInfo::none);
+ return start;
+ }
+
+// Code for generating Base64 encoding.
+// Intrinsic function prototype in Base64.java:
+// private void encodeBlock(byte[] src, int sp, int sl, byte[] dst, int dp, boolean isURL) {
+ address generate_base64_encodeBlock() {
+ __ align(CodeEntryAlignment);
+ StubCodeMark mark(this, "StubRoutines", "implEncode");
+ address start = __ pc();
+ __ enter();
+
+ // Save callee-saved registers before using them
+ __ push(r12);
+ __ push(r13);
+ __ push(r14);
+ __ push(r15);
+ __ push(rbx);
+
+ // arguments
+ const Register source = c_rarg0; // Source Array
+ const Register start_offset = c_rarg1; // start offset
+ const Register end_offset = c_rarg2; // end offset
+ const Register dest = c_rarg3; // destination array
+
+#ifndef _WIN64
+ const Register dp = c_rarg4; // Position for writing to dest array
+ const Register isURL = c_rarg5;// Base64 or URL character set
+#else
+ const Address dp_mem(rbp, 6 * wordSize); // length is on stack on Win64
+ const Address isURL_mem(rbp, 7 * wordSize);
+ const Register isURL = r10; // pick the volatile windows register
+ const Register dp = r12;
+ __ movl(dp, dp_mem);
+ __ movl(isURL, isURL_mem);
+#endif
+
+ const Register length = r14;
+ Label L_process80, L_process32, L_process3, L_exit, L_processdata;
+
+ // calculate length from offsets
+ __ movl(length, end_offset);
+ __ subl(length, start_offset);
+ __ cmpl(length, 0);
+ __ jcc(Assembler::lessEqual, L_exit);
+
+ // Save k1 value in rbx
+ __ kmovql(rbx, k1);
+ __ lea(r11, ExternalAddress(StubRoutines::x86::base64_charset_addr()));
+ // check if base64 charset(isURL=0) or base64 url charset(isURL=1) needs to be loaded
+ __ cmpl(isURL, 0);
+ __ jcc(Assembler::equal, L_processdata);
+ __ lea(r11, ExternalAddress(StubRoutines::x86::base64url_charset_addr()));
+
+ // load masks required for encoding data
+ __ BIND(L_processdata);
+ __ movdqu(xmm16, ExternalAddress(StubRoutines::x86::base64_gather_mask_addr()));
+ // Set 64 bits of K register.
+ __ evpcmpeqb(k1, xmm16, xmm16, Assembler::AVX_512bit);
+ __ evmovdquq(xmm12, ExternalAddress(StubRoutines::x86::base64_bswap_mask_addr()), Assembler::AVX_256bit, r13);
+ __ evmovdquq(xmm13, ExternalAddress(StubRoutines::x86::base64_right_shift_mask_addr()), Assembler::AVX_512bit, r13);
+ __ evmovdquq(xmm14, ExternalAddress(StubRoutines::x86::base64_left_shift_mask_addr()), Assembler::AVX_512bit, r13);
+ __ evmovdquq(xmm15, ExternalAddress(StubRoutines::x86::base64_and_mask_addr()), Assembler::AVX_512bit, r13);
+
+ // Vector Base64 implementation, producing 96 bytes of encoded data
+ __ BIND(L_process80);
+ __ cmpl(length, 80);
+ __ jcc(Assembler::below, L_process32);
+ __ evmovdquq(xmm0, Address(source, start_offset, Address::times_1, 0), Assembler::AVX_256bit);
+ __ evmovdquq(xmm1, Address(source, start_offset, Address::times_1, 24), Assembler::AVX_256bit);
+ __ evmovdquq(xmm2, Address(source, start_offset, Address::times_1, 48), Assembler::AVX_256bit);
+
+ //permute the input data in such a manner that we have continuity of the source
+ __ vpermq(xmm3, xmm0, 148, Assembler::AVX_256bit);
+ __ vpermq(xmm4, xmm1, 148, Assembler::AVX_256bit);
+ __ vpermq(xmm5, xmm2, 148, Assembler::AVX_256bit);
+
+ //shuffle input and group 3 bytes of data and to it add 0 as the 4th byte.
+ //we can deal with 12 bytes at a time in a 128 bit register
+ __ vpshufb(xmm3, xmm3, xmm12, Assembler::AVX_256bit);
+ __ vpshufb(xmm4, xmm4, xmm12, Assembler::AVX_256bit);
+ __ vpshufb(xmm5, xmm5, xmm12, Assembler::AVX_256bit);
+
+ //convert byte to word. Each 128 bit register will have 6 bytes for processing
+ __ vpmovzxbw(xmm3, xmm3, Assembler::AVX_512bit);
+ __ vpmovzxbw(xmm4, xmm4, Assembler::AVX_512bit);
+ __ vpmovzxbw(xmm5, xmm5, Assembler::AVX_512bit);
+
+ // Extract bits in the following pattern 6, 4+2, 2+4, 6 to convert 3, 8 bit numbers to 4, 6 bit numbers
+ __ evpsrlvw(xmm0, xmm3, xmm13, Assembler::AVX_512bit);
+ __ evpsrlvw(xmm1, xmm4, xmm13, Assembler::AVX_512bit);
+ __ evpsrlvw(xmm2, xmm5, xmm13, Assembler::AVX_512bit);
+
+ __ evpsllvw(xmm3, xmm3, xmm14, Assembler::AVX_512bit);
+ __ evpsllvw(xmm4, xmm4, xmm14, Assembler::AVX_512bit);
+ __ evpsllvw(xmm5, xmm5, xmm14, Assembler::AVX_512bit);
+
+ __ vpsrlq(xmm0, xmm0, 8, Assembler::AVX_512bit);
+ __ vpsrlq(xmm1, xmm1, 8, Assembler::AVX_512bit);
+ __ vpsrlq(xmm2, xmm2, 8, Assembler::AVX_512bit);
+
+ __ vpsllq(xmm3, xmm3, 8, Assembler::AVX_512bit);
+ __ vpsllq(xmm4, xmm4, 8, Assembler::AVX_512bit);
+ __ vpsllq(xmm5, xmm5, 8, Assembler::AVX_512bit);
+
+ __ vpandq(xmm3, xmm3, xmm15, Assembler::AVX_512bit);
+ __ vpandq(xmm4, xmm4, xmm15, Assembler::AVX_512bit);
+ __ vpandq(xmm5, xmm5, xmm15, Assembler::AVX_512bit);
+
+ // Get the final 4*6 bits base64 encoding
+ __ vporq(xmm3, xmm3, xmm0, Assembler::AVX_512bit);
+ __ vporq(xmm4, xmm4, xmm1, Assembler::AVX_512bit);
+ __ vporq(xmm5, xmm5, xmm2, Assembler::AVX_512bit);
+
+ // Shift
+ __ vpsrlq(xmm3, xmm3, 8, Assembler::AVX_512bit);
+ __ vpsrlq(xmm4, xmm4, 8, Assembler::AVX_512bit);
+ __ vpsrlq(xmm5, xmm5, 8, Assembler::AVX_512bit);
+
+ // look up 6 bits in the base64 character set to fetch the encoding
+ // we are converting word to dword as gather instructions need dword indices for looking up encoding
+ __ vextracti64x4(xmm6, xmm3, 0);
+ __ vpmovzxwd(xmm0, xmm6, Assembler::AVX_512bit);
+ __ vextracti64x4(xmm6, xmm3, 1);
+ __ vpmovzxwd(xmm1, xmm6, Assembler::AVX_512bit);
+
+ __ vextracti64x4(xmm6, xmm4, 0);
+ __ vpmovzxwd(xmm2, xmm6, Assembler::AVX_512bit);
+ __ vextracti64x4(xmm6, xmm4, 1);
+ __ vpmovzxwd(xmm3, xmm6, Assembler::AVX_512bit);
+
+ __ vextracti64x4(xmm4, xmm5, 0);
+ __ vpmovzxwd(xmm6, xmm4, Assembler::AVX_512bit);
+
+ __ vextracti64x4(xmm4, xmm5, 1);
+ __ vpmovzxwd(xmm7, xmm4, Assembler::AVX_512bit);
+
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm4, k2, Address(r11, xmm0, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm5, k2, Address(r11, xmm1, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm8, k2, Address(r11, xmm2, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm9, k2, Address(r11, xmm3, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm10, k2, Address(r11, xmm6, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm11, k2, Address(r11, xmm7, Address::times_4, 0), Assembler::AVX_512bit);
+
+ //Down convert dword to byte. Final output is 16*6 = 96 bytes long
+ __ evpmovdb(Address(dest, dp, Address::times_1, 0), xmm4, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 16), xmm5, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 32), xmm8, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 48), xmm9, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 64), xmm10, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 80), xmm11, Assembler::AVX_512bit);
+
+ __ addq(dest, 96);
+ __ addq(source, 72);
+ __ subq(length, 72);
+ __ jmp(L_process80);
+
+ // Vector Base64 implementation generating 32 bytes of encoded data
+ __ BIND(L_process32);
+ __ cmpl(length, 32);
+ __ jcc(Assembler::below, L_process3);
+ __ evmovdquq(xmm0, Address(source, start_offset), Assembler::AVX_256bit);
+ __ vpermq(xmm0, xmm0, 148, Assembler::AVX_256bit);
+ __ vpshufb(xmm6, xmm0, xmm12, Assembler::AVX_256bit);
+ __ vpmovzxbw(xmm6, xmm6, Assembler::AVX_512bit);
+ __ evpsrlvw(xmm2, xmm6, xmm13, Assembler::AVX_512bit);
+ __ evpsllvw(xmm3, xmm6, xmm14, Assembler::AVX_512bit);
+
+ __ vpsrlq(xmm2, xmm2, 8, Assembler::AVX_512bit);
+ __ vpsllq(xmm3, xmm3, 8, Assembler::AVX_512bit);
+ __ vpandq(xmm3, xmm3, xmm15, Assembler::AVX_512bit);
+ __ vporq(xmm1, xmm2, xmm3, Assembler::AVX_512bit);
+ __ vpsrlq(xmm1, xmm1, 8, Assembler::AVX_512bit);
+ __ vextracti64x4(xmm9, xmm1, 0);
+ __ vpmovzxwd(xmm6, xmm9, Assembler::AVX_512bit);
+ __ vextracti64x4(xmm9, xmm1, 1);
+ __ vpmovzxwd(xmm5, xmm9, Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm8, k2, Address(r11, xmm6, Address::times_4, 0), Assembler::AVX_512bit);
+ __ kmovql(k2, k1);
+ __ evpgatherdd(xmm10, k2, Address(r11, xmm5, Address::times_4, 0), Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 0), xmm8, Assembler::AVX_512bit);
+ __ evpmovdb(Address(dest, dp, Address::times_1, 16), xmm10, Assembler::AVX_512bit);
+ __ subq(length, 24);
+ __ addq(dest, 32);
+ __ addq(source, 24);
+ __ jmp(L_process32);
+
+ // Scalar data processing takes 3 bytes at a time and produces 4 bytes of encoded data
+ /* This code corresponds to the scalar version of the following snippet in Base64.java
+ ** int bits = (src[sp0++] & 0xff) << 16 |(src[sp0++] & 0xff) << 8 |(src[sp0++] & 0xff);
+ ** dst[dp0++] = (byte)base64[(bits >> > 18) & 0x3f];
+ ** dst[dp0++] = (byte)base64[(bits >> > 12) & 0x3f];
+ ** dst[dp0++] = (byte)base64[(bits >> > 6) & 0x3f];
+ ** dst[dp0++] = (byte)base64[bits & 0x3f];*/
+ __ BIND(L_process3);
+ __ cmpl(length, 3);
+ __ jcc(Assembler::below, L_exit);
+ // Read 1 byte at a time
+ __ movzbl(rax, Address(source, start_offset));
+ __ shll(rax, 0x10);
+ __ movl(r15, rax);
+ __ movzbl(rax, Address(source, start_offset, Address::times_1, 1));
+ __ shll(rax, 0x8);
+ __ movzwl(rax, rax);
+ __ orl(r15, rax);
+ __ movzbl(rax, Address(source, start_offset, Address::times_1, 2));
+ __ orl(rax, r15);
+ // Save 3 bytes read in r15
+ __ movl(r15, rax);
+ __ shrl(rax, 0x12);
+ __ andl(rax, 0x3f);
+ // rax contains the index, r11 contains base64 lookup table
+ __ movb(rax, Address(r11, rax, Address::times_4));
+ // Write the encoded byte to destination
+ __ movb(Address(dest, dp, Address::times_1, 0), rax);
+ __ movl(rax, r15);
+ __ shrl(rax, 0xc);
+ __ andl(rax, 0x3f);
+ __ movb(rax, Address(r11, rax, Address::times_4));
+ __ movb(Address(dest, dp, Address::times_1, 1), rax);
+ __ movl(rax, r15);
+ __ shrl(rax, 0x6);
+ __ andl(rax, 0x3f);
+ __ movb(rax, Address(r11, rax, Address::times_4));
+ __ movb(Address(dest, dp, Address::times_1, 2), rax);
+ __ movl(rax, r15);
+ __ andl(rax, 0x3f);
+ __ movb(rax, Address(r11, rax, Address::times_4));
+ __ movb(Address(dest, dp, Address::times_1, 3), rax);
+ __ subl(length, 3);
+ __ addq(dest, 4);
+ __ addq(source, 3);
+ __ jmp(L_process3);
+ __ BIND(L_exit);
+ // restore k1 register value
+ __ kmovql(k1, rbx);
+ __ pop(rbx);
+ __ pop(r15);
+ __ pop(r14);
+ __ pop(r13);
+ __ pop(r12);
+ __ leave();
+ __ ret(0);
+ return start;
+ }
+
/**
* Arguments:
*
@@ -5428,6 +5830,17 @@
StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
}
+ if (UseBASE64Intrinsics) {
+ StubRoutines::x86::_and_mask = base64_and_mask_addr();
+ StubRoutines::x86::_bswap_mask = base64_bswap_mask_addr();
+ StubRoutines::x86::_base64_charset = base64_charset_addr();
+ StubRoutines::x86::_url_charset = base64url_charset_addr();
+ StubRoutines::x86::_gather_mask = base64_gather_mask_addr();
+ StubRoutines::x86::_left_shift_mask = base64_left_shift_mask_addr();
+ StubRoutines::x86::_right_shift_mask = base64_right_shift_mask_addr();
+ StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
+ }
+
// Safefetch stubs.
generate_safefetch("SafeFetch32", sizeof(int), &StubRoutines::_safefetch32_entry,
&StubRoutines::_safefetch32_fault_pc,
--- a/src/hotspot/cpu/x86/stubRoutines_x86.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/stubRoutines_x86.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -45,6 +45,15 @@
address StubRoutines::x86::_k256_W_adr = NULL;
address StubRoutines::x86::_k512_W_addr = NULL;
address StubRoutines::x86::_pshuffle_byte_flip_mask_addr_sha512 = NULL;
+// Base64 masks
+address StubRoutines::x86::_bswap_mask = NULL;
+address StubRoutines::x86::_base64_charset = NULL;
+address StubRoutines::x86::_gather_mask = NULL;
+address StubRoutines::x86::_right_shift_mask = NULL;
+address StubRoutines::x86::_left_shift_mask = NULL;
+address StubRoutines::x86::_and_mask = NULL;
+address StubRoutines::x86::_url_charset = NULL;
+
#endif
address StubRoutines::x86::_pshuffle_byte_flip_mask_addr = NULL;
--- a/src/hotspot/cpu/x86/stubRoutines_x86.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/stubRoutines_x86.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -138,6 +138,14 @@
static address _k512_W_addr;
// byte flip mask for sha512
static address _pshuffle_byte_flip_mask_addr_sha512;
+ // Masks for base64
+ static address _base64_charset;
+ static address _bswap_mask;
+ static address _gather_mask;
+ static address _right_shift_mask;
+ static address _left_shift_mask;
+ static address _and_mask;
+ static address _url_charset;
#endif
// byte flip mask for sha256
static address _pshuffle_byte_flip_mask_addr;
@@ -198,6 +206,13 @@
static address k256_W_addr() { return _k256_W_adr; }
static address k512_W_addr() { return _k512_W_addr; }
static address pshuffle_byte_flip_mask_addr_sha512() { return _pshuffle_byte_flip_mask_addr_sha512; }
+ static address base64_charset_addr() { return _base64_charset; }
+ static address base64url_charset_addr() { return _url_charset; }
+ static address base64_bswap_mask_addr() { return _bswap_mask; }
+ static address base64_gather_mask_addr() { return _gather_mask; }
+ static address base64_right_shift_mask_addr() { return _right_shift_mask; }
+ static address base64_left_shift_mask_addr() { return _left_shift_mask; }
+ static address base64_and_mask_addr() { return _and_mask; }
#endif
static address pshuffle_byte_flip_mask_addr() { return _pshuffle_byte_flip_mask_addr; }
static void generate_CRC32C_table(bool is_pclmulqdq_supported);
--- a/src/hotspot/cpu/x86/vm_version_x86.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/cpu/x86/vm_version_x86.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -856,6 +856,17 @@
FLAG_SET_DEFAULT(UseGHASHIntrinsics, false);
}
+ // Base64 Intrinsics (Check the condition for which the intrinsic will be active)
+ if ((UseAVX > 2) && supports_avx512vl() && supports_avx512bw()) {
+ if (FLAG_IS_DEFAULT(UseBASE64Intrinsics)) {
+ UseBASE64Intrinsics = true;
+ }
+ } else if (UseBASE64Intrinsics) {
+ if (!FLAG_IS_DEFAULT(UseBASE64Intrinsics))
+ warning("Base64 intrinsic requires EVEX instructions on this CPU");
+ FLAG_SET_DEFAULT(UseBASE64Intrinsics, false);
+ }
+
if (supports_fma() && UseSSE >= 2) { // Check UseSSE since FMA code uses SSE instructions
if (FLAG_IS_DEFAULT(UseFMA)) {
UseFMA = true;
--- a/src/hotspot/share/aot/aotCodeHeap.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/aot/aotCodeHeap.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -520,6 +520,7 @@
SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_counterMode_AESCrypt", address, StubRoutines::_counterMode_AESCrypt);
SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_ghash_processBlocks", address, StubRoutines::_ghash_processBlocks);
+ SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_base64_encodeBlock", address, StubRoutines::_base64_encodeBlock);
SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_crc32c_table_addr", address, StubRoutines::_crc32c_table_addr);
SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_updateBytesCRC32C", address, StubRoutines::_updateBytesCRC32C);
SET_AOT_GLOBAL_SYMBOL_VALUE("_aot_stub_routines_updateBytesAdler32", address, StubRoutines::_updateBytesAdler32);
--- a/src/hotspot/share/classfile/vmSymbols.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/classfile/vmSymbols.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -735,6 +735,9 @@
case vmIntrinsics::_ghash_processBlocks:
if (!UseGHASHIntrinsics) return true;
break;
+ case vmIntrinsics::_base64_encodeBlock:
+ if (!UseBASE64Intrinsics) return true;
+ break;
case vmIntrinsics::_updateBytesCRC32C:
case vmIntrinsics::_updateDirectByteBufferCRC32C:
if (!UseCRC32CIntrinsics) return true;
--- a/src/hotspot/share/classfile/vmSymbols.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/classfile/vmSymbols.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -1017,6 +1017,12 @@
do_name( implCompressMB_name, "implCompressMultiBlock0") \
do_signature(implCompressMB_signature, "([BII)I") \
\
+ /* support for java.util.Base64.Encoder*/ \
+ do_class(java_util_Base64_Encoder, "java/util/Base64$Encoder") \
+ do_intrinsic(_base64_encodeBlock, java_util_Base64_Encoder, encodeBlock_name, encodeBlock_signature, F_R) \
+ do_name(encodeBlock_name, "encodeBlock") \
+ do_signature(encodeBlock_signature, "([BII[BIZ)V") \
+ \
/* support for com.sun.crypto.provider.GHASH */ \
do_class(com_sun_crypto_provider_ghash, "com/sun/crypto/provider/GHASH") \
do_intrinsic(_ghash_processBlocks, com_sun_crypto_provider_ghash, processBlocks_name, ghash_processBlocks_signature, F_S) \
--- a/src/hotspot/share/jvmci/vmStructs_jvmci.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/jvmci/vmStructs_jvmci.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -299,6 +299,7 @@
static_field(StubRoutines, _cipherBlockChaining_encryptAESCrypt, address) \
static_field(StubRoutines, _cipherBlockChaining_decryptAESCrypt, address) \
static_field(StubRoutines, _counterMode_AESCrypt, address) \
+ static_field(StubRoutines, _base64_encodeBlock, address) \
static_field(StubRoutines, _ghash_processBlocks, address) \
static_field(StubRoutines, _sha1_implCompress, address) \
static_field(StubRoutines, _sha1_implCompressMB, address) \
--- a/src/hotspot/share/opto/c2compiler.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/opto/c2compiler.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -586,6 +586,7 @@
case vmIntrinsics::_montgomerySquare:
case vmIntrinsics::_vectorizedMismatch:
case vmIntrinsics::_ghash_processBlocks:
+ case vmIntrinsics::_base64_encodeBlock:
case vmIntrinsics::_updateCRC32:
case vmIntrinsics::_updateBytesCRC32:
case vmIntrinsics::_updateByteBufferCRC32:
--- a/src/hotspot/share/opto/escape.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/opto/escape.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -1019,6 +1019,7 @@
strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_decryptAESCrypt") == 0 ||
strcmp(call->as_CallLeaf()->_name, "counterMode_AESCrypt") == 0 ||
strcmp(call->as_CallLeaf()->_name, "ghash_processBlocks") == 0 ||
+ strcmp(call->as_CallLeaf()->_name, "encodeBlock") == 0 ||
strcmp(call->as_CallLeaf()->_name, "sha1_implCompress") == 0 ||
strcmp(call->as_CallLeaf()->_name, "sha1_implCompressMB") == 0 ||
strcmp(call->as_CallLeaf()->_name, "sha256_implCompress") == 0 ||
--- a/src/hotspot/share/opto/library_call.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/opto/library_call.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -298,6 +298,7 @@
Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
bool inline_ghash_processBlocks();
+ bool inline_base64_encodeBlock();
bool inline_sha_implCompress(vmIntrinsics::ID id);
bool inline_digestBase_implCompressMB(int predicate);
bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
@@ -833,6 +834,8 @@
case vmIntrinsics::_ghash_processBlocks:
return inline_ghash_processBlocks();
+ case vmIntrinsics::_base64_encodeBlock:
+ return inline_base64_encodeBlock();
case vmIntrinsics::_encodeISOArray:
case vmIntrinsics::_encodeByteISOArray:
@@ -6084,6 +6087,35 @@
return true;
}
+bool LibraryCallKit::inline_base64_encodeBlock() {
+ address stubAddr;
+ const char *stubName;
+ assert(UseBASE64Intrinsics, "need Base64 intrinsics support");
+ assert(callee()->signature()->size() == 6, "base64_encodeBlock has 6 parameters");
+ stubAddr = StubRoutines::base64_encodeBlock();
+ stubName = "encodeBlock";
+
+ if (!stubAddr) return false;
+ Node* base64obj = argument(0);
+ Node* src = argument(1);
+ Node* offset = argument(2);
+ Node* len = argument(3);
+ Node* dest = argument(4);
+ Node* dp = argument(5);
+ Node* isURL = argument(6);
+
+ Node* src_start = array_element_address(src, intcon(0), T_BYTE);
+ assert(src_start, "source array is NULL");
+ Node* dest_start = array_element_address(dest, intcon(0), T_BYTE);
+ assert(dest_start, "destination array is NULL");
+
+ Node* base64 = make_runtime_call(RC_LEAF,
+ OptoRuntime::base64_encodeBlock_Type(),
+ stubAddr, stubName, TypePtr::BOTTOM,
+ src_start, offset, len, dest_start, dp, isURL);
+ return true;
+}
+
//------------------------------inline_sha_implCompress-----------------------
//
// Calculate SHA (i.e., SHA-1) for single-block byte[] array.
--- a/src/hotspot/share/opto/runtime.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/opto/runtime.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -1123,6 +1123,27 @@
const TypeTuple* range = TypeTuple::make(TypeFunc::Parms, fields);
return TypeFunc::make(domain, range);
}
+// Base64 encode function
+const TypeFunc* OptoRuntime::base64_encodeBlock_Type() {
+ int argcnt = 6;
+
+ const Type** fields = TypeTuple::fields(argcnt);
+ int argp = TypeFunc::Parms;
+ fields[argp++] = TypePtr::NOTNULL; // src array
+ fields[argp++] = TypeInt::INT; // offset
+ fields[argp++] = TypeInt::INT; // length
+ fields[argp++] = TypePtr::NOTNULL; // dest array
+ fields[argp++] = TypeInt::INT; // dp
+ fields[argp++] = TypeInt::BOOL; // isURL
+ assert(argp == TypeFunc::Parms + argcnt, "correct decoding");
+ const TypeTuple* domain = TypeTuple::make(TypeFunc::Parms+argcnt, fields);
+
+ // result type needed
+ fields = TypeTuple::fields(1);
+ fields[TypeFunc::Parms + 0] = NULL; // void
+ const TypeTuple* range = TypeTuple::make(TypeFunc::Parms, fields);
+ return TypeFunc::make(domain, range);
+}
//------------- Interpreter state access for on stack replacement
const TypeFunc* OptoRuntime::osr_end_Type() {
--- a/src/hotspot/share/opto/runtime.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/opto/runtime.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -291,6 +291,7 @@
static const TypeFunc* vectorizedMismatch_Type();
static const TypeFunc* ghash_processBlocks_Type();
+ static const TypeFunc* base64_encodeBlock_Type();
static const TypeFunc* updateBytesCRC32_Type();
static const TypeFunc* updateBytesCRC32C_Type();
--- a/src/hotspot/share/runtime/globals.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/runtime/globals.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -345,6 +345,9 @@
diagnostic(bool, UseGHASHIntrinsics, false, \
"Use intrinsics for GHASH versions of crypto") \
\
+ product(bool, UseBASE64Intrinsics, false, \
+ "Use intrinsics for java.util.Base64") \
+ \
product(size_t, LargePageSizeInBytes, 0, \
"Large page size (0 to let VM choose the page size)") \
range(0, max_uintx) \
--- a/src/hotspot/share/runtime/stubRoutines.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/runtime/stubRoutines.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -128,6 +128,7 @@
address StubRoutines::_cipherBlockChaining_decryptAESCrypt = NULL;
address StubRoutines::_counterMode_AESCrypt = NULL;
address StubRoutines::_ghash_processBlocks = NULL;
+address StubRoutines::_base64_encodeBlock = NULL;
address StubRoutines::_sha1_implCompress = NULL;
address StubRoutines::_sha1_implCompressMB = NULL;
--- a/src/hotspot/share/runtime/stubRoutines.hpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/runtime/stubRoutines.hpp Wed Jun 27 20:21:42 2018 -0700
@@ -172,6 +172,7 @@
static address _cipherBlockChaining_decryptAESCrypt;
static address _counterMode_AESCrypt;
static address _ghash_processBlocks;
+ static address _base64_encodeBlock;
static address _sha1_implCompress;
static address _sha1_implCompressMB;
@@ -347,7 +348,7 @@
static address cipherBlockChaining_decryptAESCrypt() { return _cipherBlockChaining_decryptAESCrypt; }
static address counterMode_AESCrypt() { return _counterMode_AESCrypt; }
static address ghash_processBlocks() { return _ghash_processBlocks; }
-
+ static address base64_encodeBlock() { return _base64_encodeBlock; }
static address sha1_implCompress() { return _sha1_implCompress; }
static address sha1_implCompressMB() { return _sha1_implCompressMB; }
static address sha256_implCompress() { return _sha256_implCompress; }
--- a/src/hotspot/share/runtime/vmStructs.cpp Wed Jun 27 19:09:17 2018 -0700
+++ b/src/hotspot/share/runtime/vmStructs.cpp Wed Jun 27 20:21:42 2018 -0700
@@ -611,6 +611,7 @@
static_field(StubRoutines, _cipherBlockChaining_decryptAESCrypt, address) \
static_field(StubRoutines, _counterMode_AESCrypt, address) \
static_field(StubRoutines, _ghash_processBlocks, address) \
+ static_field(StubRoutines, _base64_encodeBlock, address) \
static_field(StubRoutines, _updateBytesCRC32, address) \
static_field(StubRoutines, _crc_table_adr, address) \
static_field(StubRoutines, _crc32c_table_addr, address) \
--- a/src/java.base/share/classes/java/util/Base64.java Wed Jun 27 19:09:17 2018 -0700
+++ b/src/java.base/share/classes/java/util/Base64.java Wed Jun 27 20:21:42 2018 -0700
@@ -31,6 +31,7 @@
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
+import jdk.internal.HotSpotIntrinsicCandidate;
/**
* This class consists exclusively of static methods for obtaining
@@ -390,6 +391,20 @@
return new Encoder(isURL, newline, linemax, false);
}
+ @HotSpotIntrinsicCandidate
+ private void encodeBlock(byte[] src, int sp, int sl, byte[] dst, int dp, boolean isURL) {
+ char[] base64 = isURL ? toBase64URL : toBase64;
+ for (int sp0 = sp, dp0 = dp ; sp0 < sl; ) {
+ int bits = (src[sp0++] & 0xff) << 16 |
+ (src[sp0++] & 0xff) << 8 |
+ (src[sp0++] & 0xff);
+ dst[dp0++] = (byte)base64[(bits >>> 18) & 0x3f];
+ dst[dp0++] = (byte)base64[(bits >>> 12) & 0x3f];
+ dst[dp0++] = (byte)base64[(bits >>> 6) & 0x3f];
+ dst[dp0++] = (byte)base64[bits & 0x3f];
+ }
+ }
+
private int encode0(byte[] src, int off, int end, byte[] dst) {
char[] base64 = isURL ? toBase64URL : toBase64;
int sp = off;
@@ -400,15 +415,7 @@
int dp = 0;
while (sp < sl) {
int sl0 = Math.min(sp + slen, sl);
- for (int sp0 = sp, dp0 = dp ; sp0 < sl0; ) {
- int bits = (src[sp0++] & 0xff) << 16 |
- (src[sp0++] & 0xff) << 8 |
- (src[sp0++] & 0xff);
- dst[dp0++] = (byte)base64[(bits >>> 18) & 0x3f];
- dst[dp0++] = (byte)base64[(bits >>> 12) & 0x3f];
- dst[dp0++] = (byte)base64[(bits >>> 6) & 0x3f];
- dst[dp0++] = (byte)base64[bits & 0x3f];
- }
+ encodeBlock(src, sp, sl0, dst, dp, isURL);
int dlen = (sl0 - sp) / 3 * 4;
dp += dlen;
sp = sl0;
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/BinaryContainer.java Wed Jun 27 19:09:17 2018 -0700
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/BinaryContainer.java Wed Jun 27 20:21:42 2018 -0700
@@ -213,6 +213,7 @@
{"StubRoutines::_counterMode_AESCrypt", "_aot_stub_routines_counterMode_AESCrypt" },
{"StubRoutines::_ghash_processBlocks", "_aot_stub_routines_ghash_processBlocks" },
+ {"StubRoutines::_base64_encodeBlock", "_aot_stub_routines_base64_encodeBlock" },
{"StubRoutines::_crc32c_table_addr", "_aot_stub_routines_crc32c_table_addr" },
{"StubRoutines::_updateBytesCRC32C", "_aot_stub_routines_updateBytesCRC32C" },
{"StubRoutines::_updateBytesAdler32", "_aot_stub_routines_updateBytesAdler32" },
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/hotspot/jtreg/compiler/intrinsics/base64/TestBase64.java Wed Jun 27 20:21:42 2018 -0700
@@ -0,0 +1,171 @@
+/*
+ * Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @author Eric Wang <yiming.wang@oracle.com>
+ * @summary tests java.util.Base64
+ *
+ * @run main/othervm/timeout=600 -Xbatch -DcheckOutput=true
+ * compiler.intrinsics.base64.TestBase64
+ */
+
+package compiler.intrinsics.base64;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Base64;
+import java.util.Base64.Decoder;
+import java.util.Base64.Encoder;
+import java.util.Objects;
+import java.util.Random;
+
+
+public class TestBase64 {
+ static boolean checkOutput = Boolean.getBoolean("checkOutput");
+
+ public static void main(String[] args) throws Exception {
+ int iters = (args.length > 0 ? Integer.valueOf(args[0]) : 1000000);
+ System.out.println(iters + " iterations");
+
+ test0(Base64Type.BASIC, Base64.getEncoder(), Base64.getDecoder(),"plain.txt", "baseEncode.txt", iters);
+ test0(Base64Type.URLSAFE, Base64.getUrlEncoder(), Base64.getUrlDecoder(),"plain.txt", "urlEncode.txt", iters);
+ test0(Base64Type.MIME, Base64.getMimeEncoder(), Base64.getMimeDecoder(),"plain.txt", "mimeEncode.txt", iters);
+ }
+
+ public static void test0(Base64Type type, Encoder encoder, Decoder decoder, String srcFile, String encodedFile, int numIterations) throws Exception {
+
+ String[] srcLns = Files.readAllLines(Paths.get(SRCDIR, srcFile), DEF_CHARSET)
+ .toArray(new String[0]);
+ String[] encodedLns = Files.readAllLines(Paths.get(SRCDIR, encodedFile), DEF_CHARSET)
+ .toArray(new String[0]);
+
+ for (int i = 0; i < numIterations; i++) {
+ int lns = 0;
+ for (String srcStr : srcLns) {
+ String encodedStr = null;
+ if (type != Base64Type.MIME) {
+ encodedStr = encodedLns[lns++];
+ } else {
+ while (lns < encodedLns.length) {
+ String s = encodedLns[lns++];
+ if (s.length() == 0)
+ break;
+ if (encodedStr != null) {
+ encodedStr += DEFAULT_CRLF + s;
+ } else {
+ encodedStr = s;
+ }
+ }
+ if (encodedStr == null && srcStr.length() == 0) {
+ encodedStr = "";
+ }
+ }
+
+ byte[] srcArr = srcStr.getBytes(DEF_CHARSET);
+ byte[] encodedArr = encodedStr.getBytes(DEF_CHARSET);
+
+ ByteBuffer srcBuf = ByteBuffer.wrap(srcArr);
+ ByteBuffer encodedBuf = ByteBuffer.wrap(encodedArr);
+ byte[] resArr = new byte[encodedArr.length];
+
+ // test int encode(byte[], byte[])
+ int len = encoder.encode(srcArr, resArr);
+ assertEqual(len, encodedArr.length);
+ assertEqual(resArr, encodedArr);
+
+ // test byte[] encode(byte[])
+ resArr = encoder.encode(srcArr);
+ assertEqual(resArr, encodedArr);
+
+ // test ByteBuffer encode(ByteBuffer)
+ int limit = srcBuf.limit();
+ ByteBuffer resBuf = encoder.encode(srcBuf);
+ assertEqual(srcBuf.position(), limit);
+ assertEqual(srcBuf.limit(), limit);
+ assertEqual(resBuf, encodedBuf);
+ srcBuf.rewind(); // reset for next test
+
+ // test String encodeToString(byte[])
+ String resEncodeStr = encoder.encodeToString(srcArr);
+ assertEqual(resEncodeStr, encodedStr);
+
+ // test int decode(byte[], byte[])
+ resArr = new byte[srcArr.length];
+ len = decoder.decode(encodedArr, resArr);
+ assertEqual(len, srcArr.length);
+ assertEqual(resArr, srcArr);
+
+ // test byte[] decode(byte[])
+ resArr = decoder.decode(encodedArr);
+ assertEqual(resArr, srcArr);
+
+ // test ByteBuffer decode(ByteBuffer)
+ limit = encodedBuf.limit();
+ resBuf = decoder.decode(encodedBuf);
+ assertEqual(encodedBuf.position(), limit);
+ assertEqual(encodedBuf.limit(), limit);
+ assertEqual(resBuf, srcBuf);
+ encodedBuf.rewind(); // reset for next test
+
+ // test byte[] decode(String)
+ resArr = decoder.decode(encodedStr);
+ assertEqual(resArr, srcArr);
+
+ }
+ }
+ }
+
+ // helper
+ enum Base64Type {
+ BASIC, URLSAFE, MIME
+ }
+
+ private static final String SRCDIR = System.getProperty("test.src", "compiler/intrinsics/base64/");
+ private static final Charset DEF_CHARSET = StandardCharsets.US_ASCII;
+ private static final String DEF_EXCEPTION_MSG =
+ "Assertion failed! The result is not same as expected\n";
+ private static final String DEFAULT_CRLF = "\r\n";
+
+ private static void assertEqual(Object result, Object expect) {
+ if (checkOutput) {
+ if (!Objects.deepEquals(result, expect)) {
+ String resultStr = result.toString();
+ String expectStr = expect.toString();
+ if (result instanceof byte[]) {
+ resultStr = new String((byte[]) result, DEF_CHARSET);
+ }
+ if (expect instanceof byte[]) {
+ expectStr = new String((byte[]) expect, DEF_CHARSET);
+ }
+ throw new RuntimeException(DEF_EXCEPTION_MSG +
+ " result: " + resultStr + " expected: " + expectStr);
+ }
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/hotspot/jtreg/compiler/intrinsics/base64/baseEncode.txt Wed Jun 27 20:21:42 2018 -0700
@@ -0,0 +1,183 @@
+VGhpcyB0ZXN0IGRhdGEgaXMgcGFydCBvZiByZmMyMDQ1IHdoaWNoIGluY2x1ZGVzIGFsbCBjaGFyYWN0ZXJzIGF+eiBBflosIDB+OSBhbmQgYWxsIHN5bWJvbHMs
+SXQgaXMgdXNlZCB0byB0ZXN0IGphdmEudXRpbC5CYXNlNjQuRW5jb2RlciwgYW5kIHdpbGwgYmUgZW5jb2RlZCBieSBvcmcuYXBhY2hlLmNvbW1vbnMuY29kZWMuYmluYXJ5LkJhc2U2NC5qYXZh
+dG8gdGVzdCBqYXZhLnV0aWwuQmFzZTY0LkRlY29kZXI7
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgIFtQYWdlIDFd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+ICAgVGhlc2UgZG9jdW1lbnRzIGFyZSByZXZpc2lvbnMgb2YgUkZDcyAxNTIxLCAxNTIyLCBhbmQgMTU5MCwgd2hpY2g=
+ICAgdGhlbXNlbHZlcyB3ZXJlIHJldmlzaW9ucyBvZiBSRkNzIDEzNDEgYW5kIDEzNDIuICBBbiBhcHBlbmRpeCBpbiBSRkM=
+ICAgMjA0OSBkZXNjcmliZXMgZGlmZmVyZW5jZXMgYW5kIGNoYW5nZXMgZnJvbSBwcmV2aW91cyB2ZXJzaW9ucy4=
+
+VGFibGUgb2YgQ29udGVudHM=
+
+ICAgMS4gSW50cm9kdWN0aW9uIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDM=
+ICAgMi4gRGVmaW5pdGlvbnMsIENvbnZlbnRpb25zLCBhbmQgR2VuZXJpYyBCTkYgR3JhbW1hciAuLi4uICAgIDU=
+ICAgMy4gTUlNRSBIZWFkZXIgRmllbGRzIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDg=
+ICAgNC4gTUlNRS1WZXJzaW9uIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDg=
+ICAgNS4gQ29udGVudC1UeXBlIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMTA=
+ICAgNi4gQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uICAgMTQ=
+ICAgNy4gQ29udGVudC1JRCBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjY=
+ICAgOC4gQ29udGVudC1EZXNjcmlwdGlvbiBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgOS4gQWRkaXRpb25hbCBNSU1FIEhlYWRlciBGaWVsZHMgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTAuIFN1bW1hcnkgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTEuIFNlY3VyaXR5IENvbnNpZGVyYXRpb25zIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTIuIEF1dGhvcnMnIEFkZHJlc3NlcyAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjg=
+ICAgQS4gQ29sbGVjdGVkIEdyYW1tYXIgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjk=
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgIFtQYWdlIDdd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+My4gIE1JTUUgSGVhZGVyIEZpZWxkcw==
+
+ICAgTUlNRSBkZWZpbmVzIGEgbnVtYmVyIG9mIG5ldyBSRkMgODIyIGhlYWRlciBmaWVsZHMgdGhhdCBhcmUgdXNlZCB0bw==
+ICAgZGVzY3JpYmUgdGhlIGNvbnRlbnQgb2YgYSBNSU1FIGVudGl0eS4gIFRoZXNlIGhlYWRlciBmaWVsZHMgb2NjdXIgaW4=
+ICAgYXQgbGVhc3QgdHdvIGNvbnRleHRzOg==
+
+ICAgICgxKSAgIEFzIHBhcnQgb2YgYSByZWd1bGFyIFJGQyA4MjIgbWVzc2FnZSBoZWFkZXIu
+
+ICAgICgyKSAgIEluIGEgTUlNRSBib2R5IHBhcnQgaGVhZGVyIHdpdGhpbiBhIG11bHRpcGFydA==
+ICAgICAgICAgIGNvbnN0cnVjdC4=
+
+ICAgVGhlIGZvcm1hbCBkZWZpbml0aW9uIG9mIHRoZXNlIGhlYWRlciBmaWVsZHMgaXMgYXMgZm9sbG93czo=
+
+ICAgICBNSU1FLW1lc3NhZ2UtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICBmaWVsZHM=
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICB2ZXJzaW9uIENSTEY=
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9yZWQu
+
+ICAgICBNSU1FLXBhcnQtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+ICAgICAgICAgICAgICAgICAgICAgICAgICBbIGZpZWxkcyBd
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IEFueSBmaWVsZCBub3QgYmVnaW5uaW5nIHdpdGg=
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7ICJjb250ZW50LSIgY2FuIGhhdmUgbm8gZGVmaW5lZA==
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IG1lYW5pbmcgYW5kIG1heSBiZSBpZ25vcmVkLg==
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9yZWQu
+
+ICAgVGhlIHN5bnRheCBvZiB0aGUgdmFyaW91cyBzcGVjaWZpYyBNSU1FIGhlYWRlciBmaWVsZHMgd2lsbCBiZQ==
+ICAgZGVzY3JpYmVkIGluIHRoZSBmb2xsb3dpbmcgc2VjdGlvbnMu
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgW1BhZ2UgMTFd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+NS4xLiAgU3ludGF4IG9mIHRoZSBDb250ZW50LVR5cGUgSGVhZGVyIEZpZWxk
+
+ICAgSW4gdGhlIEF1Z21lbnRlZCBCTkYgbm90YXRpb24gb2YgUkZDIDgyMiwgYSBDb250ZW50LVR5cGUgaGVhZGVyIGZpZWxk
+ICAgdmFsdWUgaXMgZGVmaW5lZCBhcyBmb2xsb3dzOg==
+
+ICAgICBjb250ZW50IDo9ICJDb250ZW50LVR5cGUiICI6IiB0eXBlICIvIiBzdWJ0eXBl
+ICAgICAgICAgICAgICAgICooIjsiIHBhcmFtZXRlcik=
+ICAgICAgICAgICAgICAgIDsgTWF0Y2hpbmcgb2YgbWVkaWEgdHlwZSBhbmQgc3VidHlwZQ==
+ICAgICAgICAgICAgICAgIDsgaXMgQUxXQVlTIGNhc2UtaW5zZW5zaXRpdmUu
+
+ICAgICB0eXBlIDo9IGRpc2NyZXRlLXR5cGUgLyBjb21wb3NpdGUtdHlwZQ==
+
+ICAgICBkaXNjcmV0ZS10eXBlIDo9ICJ0ZXh0IiAvICJpbWFnZSIgLyAiYXVkaW8iIC8gInZpZGVvIiAv
+ICAgICAgICAgICAgICAgICAgICAgICJhcHBsaWNhdGlvbiIgLyBleHRlbnNpb24tdG9rZW4=
+
+ICAgICBjb21wb3NpdGUtdHlwZSA6PSAibWVzc2FnZSIgLyAibXVsdGlwYXJ0IiAvIGV4dGVuc2lvbi10b2tlbg==
+
+ICAgICBleHRlbnNpb24tdG9rZW4gOj0gaWV0Zi10b2tlbiAvIHgtdG9rZW4=
+
+ICAgICBpZXRmLXRva2VuIDo9IDxBbiBleHRlbnNpb24gdG9rZW4gZGVmaW5lZCBieSBh
+ICAgICAgICAgICAgICAgICAgICBzdGFuZGFyZHMtdHJhY2sgUkZDIGFuZCByZWdpc3RlcmVk
+ICAgICAgICAgICAgICAgICAgICB3aXRoIElBTkEuPg==
+
+ICAgICB4LXRva2VuIDo9IDxUaGUgdHdvIGNoYXJhY3RlcnMgIlgtIiBvciAieC0iIGZvbGxvd2VkLCB3aXRo
+ICAgICAgICAgICAgICAgICBubyBpbnRlcnZlbmluZyB3aGl0ZSBzcGFjZSwgYnkgYW55IHRva2VuPg==
+
+ICAgICBzdWJ0eXBlIDo9IGV4dGVuc2lvbi10b2tlbiAvIGlhbmEtdG9rZW4=
+
+ICAgICBpYW5hLXRva2VuIDo9IDxBIHB1YmxpY2x5LWRlZmluZWQgZXh0ZW5zaW9uIHRva2VuLiBUb2tlbnM=
+ICAgICAgICAgICAgICAgICAgICBvZiB0aGlzIGZvcm0gbXVzdCBiZSByZWdpc3RlcmVkIHdpdGggSUFOQQ==
+ICAgICAgICAgICAgICAgICAgICBhcyBzcGVjaWZpZWQgaW4gUkZDIDIwNDguPg==
+
+ICAgICBwYXJhbWV0ZXIgOj0gYXR0cmlidXRlICI9IiB2YWx1ZQ==
+
+ICAgICBhdHRyaWJ1dGUgOj0gdG9rZW4=
+ICAgICAgICAgICAgICAgICAgOyBNYXRjaGluZyBvZiBhdHRyaWJ1dGVz
+ICAgICAgICAgICAgICAgICAgOyBpcyBBTFdBWVMgY2FzZS1pbnNlbnNpdGl2ZS4=
+
+ICAgICB2YWx1ZSA6PSB0b2tlbiAvIHF1b3RlZC1zdHJpbmc=
+
+ICAgICB0b2tlbiA6PSAxKjxhbnkgKFVTLUFTQ0lJKSBDSEFSIGV4Y2VwdCBTUEFDRSwgQ1RMcyw=
+ICAgICAgICAgICAgICAgICBvciB0c3BlY2lhbHM+
+
+ICAgICB0c3BlY2lhbHMgOj0gICIoIiAvICIpIiAvICI8IiAvICI+IiAvICJAIiAv
+ICAgICAgICAgICAgICAgICAgICIsIiAvICI7IiAvICI6IiAvICJcIiAvIDwiPg==
+ICAgICAgICAgICAgICAgICAgICIvIiAvICJbIiAvICJdIiAvICI/IiAvICI9Ig==
+ICAgICAgICAgICAgICAgICAgIDsgTXVzdCBiZSBpbiBxdW90ZWQtc3RyaW5nLA==
+ICAgICAgICAgICAgICAgICAgIDsgdG8gdXNlIHdpdGhpbiBwYXJhbWV0ZXIgdmFsdWVz
+
+ICAgICBkZXNjcmlwdGlvbiA6PSAiQ29udGVudC1EZXNjcmlwdGlvbiIgIjoiICp0ZXh0
+
+ICAgICBlbmNvZGluZyA6PSAiQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyIgIjoiIG1lY2hhbmlzbQ==
+
+ICAgICBlbnRpdHktaGVhZGVycyA6PSBbIGNvbnRlbnQgQ1JMRiBd
+ICAgICAgICAgICAgICAgICAgICBbIGVuY29kaW5nIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICBbIGlkIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICBbIGRlc2NyaXB0aW9uIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICAqKCBNSU1FLWV4dGVuc2lvbi1maWVsZCBDUkxGICk=
+
+ICAgICBoZXgtb2N0ZXQgOj0gIj0iIDIoRElHSVQgLyAiQSIgLyAiQiIgLyAiQyIgLyAiRCIgLyAiRSIgLyAiRiIp
+ICAgICAgICAgICAgICAgOyBPY3RldCBtdXN0IGJlIHVzZWQgZm9yIGNoYXJhY3RlcnMgPiAxMjcsID0s
+ICAgICAgICAgICAgICAgOyBTUEFDRXMgb3IgVEFCcyBhdCB0aGUgZW5kcyBvZiBsaW5lcywgYW5kIGlz
+ICAgICAgICAgICAgICAgOyByZWNvbW1lbmRlZCBmb3IgYW55IGNoYXJhY3RlciBub3QgbGlzdGVkIGlu
+ICAgICAgICAgICAgICAgOyBSRkMgMjA0OSBhcyAibWFpbC1zYWZlIi4=
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+ICAgICAgICAgIG11c3QgYmUgdXNlZC4gIEFuIGVxdWFsIHNpZ24gYXMgdGhlIGxhc3QgY2hhcmFjdGVyIG9uIGE=
+ICAgICAgICAgIGVuY29kZWQgbGluZSBpbmRpY2F0ZXMgc3VjaCBhIG5vbi1zaWduaWZpY2FudCAoInNvZnQiKQ==
+ICAgICAgICAgIGxpbmUgYnJlYWsgaW4gdGhlIGVuY29kZWQgdGV4dC4=
+
+ICAgVGh1cyBpZiB0aGUgInJhdyIgZm9ybSBvZiB0aGUgbGluZSBpcyBhIHNpbmdsZSB1bmVuY29kZWQgbGluZSB0aGF0
+ICAgc2F5czo=
+
+ICAgICBOb3cncyB0aGUgdGltZSBmb3IgYWxsIGZvbGsgdG8gY29tZSB0byB0aGUgYWlkIG9mIHRoZWlyIGNvdW50cnku
+
+ICAgVGhpcyBjYW4gYmUgcmVwcmVzZW50ZWQsIGluIHRoZSBRdW90ZWQtUHJpbnRhYmxlIGVuY29kaW5nLCBhczo=
+
+ICAgICBOb3cncyB0aGUgdGltZSA9
+ICAgICBmb3IgYWxsIGZvbGsgdG8gY29tZT0=
+ICAgICAgdG8gdGhlIGFpZCBvZiB0aGVpciBjb3VudHJ5Lg==
+
+ICAgU2luY2UgdGhlIGh5cGhlbiBjaGFyYWN0ZXIgKCItIikgbWF5IGJlIHJlcHJlc2VudGVkIGFzIGl0c2VsZiBpbiB0aGU=
+ICAgUXVvdGVkLVByaW50YWJsZSBlbmNvZGluZywgY2FyZSBtdXN0IGJlIHRha2VuLCB3aGVuIGVuY2Fwc3VsYXRpbmcgYQ==
+ICAgcXVvdGVkLXByaW50YWJsZSBlbmNvZGVkIGJvZHkgaW5zaWRlIG9uZSBvciBtb3JlIG11bHRpcGFydCBlbnRpdGllcyw=
+ICAgdG8gZW5zdXJlIHRoYXQgdGhlIGJvdW5kYXJ5IGRlbGltaXRlciBkb2VzIG5vdCBhcHBlYXIgYW55d2hlcmUgaW4gdGhl
+ICAgZW5jb2RlZCBib2R5LiAgKEEgZ29vZCBzdHJhdGVneSBpcyB0byBjaG9vc2UgYSBib3VuZGFyeSB0aGF0IGluY2x1ZGVz
+ICAgYSBjaGFyYWN0ZXIgc2VxdWVuY2Ugc3VjaCBhcyAiPV8iIHdoaWNoIGNhbiBuZXZlciBhcHBlYXIgaW4gYQ==
+ICAgcXVvdGVkLXByaW50YWJsZSBib2R5LiAgU2VlIHRoZSBkZWZpbml0aW9uIG9mIG11bHRpcGFydCBtZXNzYWdlcyBpbg==
+ICAgUkZDIDIwNDYuKQ==
+
+ICAgICAhIiMkQFtcXV5ge3x9fiU=
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgW1BhZ2UgMjRd
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+
+ICAgICAgICAgICAgICAgICAgICBUYWJsZSAxOiBUaGUgQmFzZTY0IEFscGhhYmV0
+
+ICAgICBWYWx1ZSBFbmNvZGluZyAgVmFsdWUgRW5jb2RpbmcgIFZhbHVlIEVuY29kaW5nICBWYWx1ZSBFbmNvZGluZw==
+ICAgICAgICAgMCBBICAgICAgICAgICAgMTcgUiAgICAgICAgICAgIDM0IGkgICAgICAgICAgICA1MSB6
+ICAgICAgICAgMSBCICAgICAgICAgICAgMTggUyAgICAgICAgICAgIDM1IGogICAgICAgICAgICA1MiAw
+ICAgICAgICAgMiBDICAgICAgICAgICAgMTkgVCAgICAgICAgICAgIDM2IGsgICAgICAgICAgICA1MyAx
+ICAgICAgICAgMyBEICAgICAgICAgICAgMjAgVSAgICAgICAgICAgIDM3IGwgICAgICAgICAgICA1NCAy
+ICAgICAgICAgNCBFICAgICAgICAgICAgMjEgViAgICAgICAgICAgIDM4IG0gICAgICAgICAgICA1NSAz
+ICAgICAgICAgNSBGICAgICAgICAgICAgMjIgVyAgICAgICAgICAgIDM5IG4gICAgICAgICAgICA1NiA0
+ICAgICAgICAgNiBHICAgICAgICAgICAgMjMgWCAgICAgICAgICAgIDQwIG8gICAgICAgICAgICA1NyA1
+ICAgICAgICAgNyBIICAgICAgICAgICAgMjQgWSAgICAgICAgICAgIDQxIHAgICAgICAgICAgICA1OCA2
+ICAgICAgICAgOCBJICAgICAgICAgICAgMjUgWiAgICAgICAgICAgIDQyIHEgICAgICAgICAgICA1OSA3
+ICAgICAgICAgOSBKICAgICAgICAgICAgMjYgYSAgICAgICAgICAgIDQzIHIgICAgICAgICAgICA2MCA4
+ICAgICAgICAxMCBLICAgICAgICAgICAgMjcgYiAgICAgICAgICAgIDQ0IHMgICAgICAgICAgICA2MSA5
+ICAgICAgICAxMSBMICAgICAgICAgICAgMjggYyAgICAgICAgICAgIDQ1IHQgICAgICAgICAgICA2MiAr
+ICAgICAgICAxMiBNICAgICAgICAgICAgMjkgZCAgICAgICAgICAgIDQ2IHUgICAgICAgICAgICA2MyAv
+ICAgICAgICAxMyBOICAgICAgICAgICAgMzAgZSAgICAgICAgICAgIDQ3IHY=
+ICAgICAgICAxNCBPICAgICAgICAgICAgMzEgZiAgICAgICAgICAgIDQ4IHcgICAgICAgICAocGFkKSA9
+ICAgICAgICAxNSBQICAgICAgICAgICAgMzIgZyAgICAgICAgICAgIDQ5IHg=
+ICAgICAgICAxNiBRICAgICAgICAgICAgMzMgaCAgICAgICAgICAgIDUwIHk=
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/hotspot/jtreg/compiler/intrinsics/base64/mimeEncode.txt Wed Jun 27 20:21:42 2018 -0700
@@ -0,0 +1,391 @@
+VGhpcyB0ZXN0IGRhdGEgaXMgcGFydCBvZiByZmMyMDQ1IHdoaWNoIGluY2x1ZGVzIGFsbCBjaGFy
+YWN0ZXJzIGF+eiBBflosIDB+OSBhbmQgYWxsIHN5bWJvbHMs
+
+SXQgaXMgdXNlZCB0byB0ZXN0IGphdmEudXRpbC5CYXNlNjQuRW5jb2RlciwgYW5kIHdpbGwgYmUg
+ZW5jb2RlZCBieSBvcmcuYXBhY2hlLmNvbW1vbnMuY29kZWMuYmluYXJ5LkJhc2U2NC5qYXZh
+
+dG8gdGVzdCBqYXZhLnV0aWwuQmFzZTY0LkRlY29kZXI7
+
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAg
+ICAgICAgIFtQYWdlIDFd
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAg
+ICBOb3ZlbWJlciAxOTk2
+
+
+ICAgVGhlc2UgZG9jdW1lbnRzIGFyZSByZXZpc2lvbnMgb2YgUkZDcyAxNTIxLCAxNTIyLCBhbmQg
+MTU5MCwgd2hpY2g=
+
+ICAgdGhlbXNlbHZlcyB3ZXJlIHJldmlzaW9ucyBvZiBSRkNzIDEzNDEgYW5kIDEzNDIuICBBbiBh
+cHBlbmRpeCBpbiBSRkM=
+
+ICAgMjA0OSBkZXNjcmliZXMgZGlmZmVyZW5jZXMgYW5kIGNoYW5nZXMgZnJvbSBwcmV2aW91cyB2
+ZXJzaW9ucy4=
+
+
+VGFibGUgb2YgQ29udGVudHM=
+
+
+ICAgMS4gSW50cm9kdWN0aW9uIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgIDM=
+
+ICAgMi4gRGVmaW5pdGlvbnMsIENvbnZlbnRpb25zLCBhbmQgR2VuZXJpYyBCTkYgR3JhbW1hciAu
+Li4uICAgIDU=
+
+ICAgMy4gTUlNRSBIZWFkZXIgRmllbGRzIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgIDg=
+
+ICAgNC4gTUlNRS1WZXJzaW9uIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgIDg=
+
+ICAgNS4gQ29udGVudC1UeXBlIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMTA=
+
+ICAgNi4gQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4u
+Li4uICAgMTQ=
+
+ICAgNy4gQ29udGVudC1JRCBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjY=
+
+ICAgOC4gQ29udGVudC1EZXNjcmlwdGlvbiBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjc=
+
+ICAgOS4gQWRkaXRpb25hbCBNSU1FIEhlYWRlciBGaWVsZHMgLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjc=
+
+ICAgMTAuIFN1bW1hcnkgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjc=
+
+ICAgMTEuIFNlY3VyaXR5IENvbnNpZGVyYXRpb25zIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjc=
+
+ICAgMTIuIEF1dGhvcnMnIEFkZHJlc3NlcyAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjg=
+
+ICAgQS4gQ29sbGVjdGVkIEdyYW1tYXIgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u
+Li4uICAgMjk=
+
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAg
+ICAgICAgIFtQYWdlIDdd
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAg
+ICBOb3ZlbWJlciAxOTk2
+
+
+My4gIE1JTUUgSGVhZGVyIEZpZWxkcw==
+
+
+ICAgTUlNRSBkZWZpbmVzIGEgbnVtYmVyIG9mIG5ldyBSRkMgODIyIGhlYWRlciBmaWVsZHMgdGhh
+dCBhcmUgdXNlZCB0bw==
+
+ICAgZGVzY3JpYmUgdGhlIGNvbnRlbnQgb2YgYSBNSU1FIGVudGl0eS4gIFRoZXNlIGhlYWRlciBm
+aWVsZHMgb2NjdXIgaW4=
+
+ICAgYXQgbGVhc3QgdHdvIGNvbnRleHRzOg==
+
+
+ICAgICgxKSAgIEFzIHBhcnQgb2YgYSByZWd1bGFyIFJGQyA4MjIgbWVzc2FnZSBoZWFkZXIu
+
+
+ICAgICgyKSAgIEluIGEgTUlNRSBib2R5IHBhcnQgaGVhZGVyIHdpdGhpbiBhIG11bHRpcGFydA==
+
+ICAgICAgICAgIGNvbnN0cnVjdC4=
+
+
+ICAgVGhlIGZvcm1hbCBkZWZpbml0aW9uIG9mIHRoZXNlIGhlYWRlciBmaWVsZHMgaXMgYXMgZm9s
+bG93czo=
+
+
+ICAgICBNSU1FLW1lc3NhZ2UtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICBmaWVsZHM=
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICB2ZXJzaW9uIENSTEY=
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9y
+ZWQu
+
+
+ICAgICBNSU1FLXBhcnQtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICBbIGZpZWxkcyBd
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IEFueSBmaWVsZCBub3QgYmVnaW5uaW5nIHdpdGg=
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7ICJjb250ZW50LSIgY2FuIGhhdmUgbm8gZGVmaW5l
+ZA==
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IG1lYW5pbmcgYW5kIG1heSBiZSBpZ25vcmVkLg==
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9yZWQu
+
+
+ICAgVGhlIHN5bnRheCBvZiB0aGUgdmFyaW91cyBzcGVjaWZpYyBNSU1FIGhlYWRlciBmaWVsZHMg
+d2lsbCBiZQ==
+
+ICAgZGVzY3JpYmVkIGluIHRoZSBmb2xsb3dpbmcgc2VjdGlvbnMu
+
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAg
+ICAgICAgW1BhZ2UgMTFd
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAg
+ICBOb3ZlbWJlciAxOTk2
+
+
+NS4xLiAgU3ludGF4IG9mIHRoZSBDb250ZW50LVR5cGUgSGVhZGVyIEZpZWxk
+
+
+ICAgSW4gdGhlIEF1Z21lbnRlZCBCTkYgbm90YXRpb24gb2YgUkZDIDgyMiwgYSBDb250ZW50LVR5
+cGUgaGVhZGVyIGZpZWxk
+
+ICAgdmFsdWUgaXMgZGVmaW5lZCBhcyBmb2xsb3dzOg==
+
+
+ICAgICBjb250ZW50IDo9ICJDb250ZW50LVR5cGUiICI6IiB0eXBlICIvIiBzdWJ0eXBl
+
+ICAgICAgICAgICAgICAgICooIjsiIHBhcmFtZXRlcik=
+
+ICAgICAgICAgICAgICAgIDsgTWF0Y2hpbmcgb2YgbWVkaWEgdHlwZSBhbmQgc3VidHlwZQ==
+
+ICAgICAgICAgICAgICAgIDsgaXMgQUxXQVlTIGNhc2UtaW5zZW5zaXRpdmUu
+
+
+ICAgICB0eXBlIDo9IGRpc2NyZXRlLXR5cGUgLyBjb21wb3NpdGUtdHlwZQ==
+
+
+ICAgICBkaXNjcmV0ZS10eXBlIDo9ICJ0ZXh0IiAvICJpbWFnZSIgLyAiYXVkaW8iIC8gInZpZGVv
+IiAv
+
+ICAgICAgICAgICAgICAgICAgICAgICJhcHBsaWNhdGlvbiIgLyBleHRlbnNpb24tdG9rZW4=
+
+
+ICAgICBjb21wb3NpdGUtdHlwZSA6PSAibWVzc2FnZSIgLyAibXVsdGlwYXJ0IiAvIGV4dGVuc2lv
+bi10b2tlbg==
+
+
+ICAgICBleHRlbnNpb24tdG9rZW4gOj0gaWV0Zi10b2tlbiAvIHgtdG9rZW4=
+
+
+ICAgICBpZXRmLXRva2VuIDo9IDxBbiBleHRlbnNpb24gdG9rZW4gZGVmaW5lZCBieSBh
+
+ICAgICAgICAgICAgICAgICAgICBzdGFuZGFyZHMtdHJhY2sgUkZDIGFuZCByZWdpc3RlcmVk
+
+ICAgICAgICAgICAgICAgICAgICB3aXRoIElBTkEuPg==
+
+
+ICAgICB4LXRva2VuIDo9IDxUaGUgdHdvIGNoYXJhY3RlcnMgIlgtIiBvciAieC0iIGZvbGxvd2Vk
+LCB3aXRo
+
+ICAgICAgICAgICAgICAgICBubyBpbnRlcnZlbmluZyB3aGl0ZSBzcGFjZSwgYnkgYW55IHRva2Vu
+Pg==
+
+
+ICAgICBzdWJ0eXBlIDo9IGV4dGVuc2lvbi10b2tlbiAvIGlhbmEtdG9rZW4=
+
+
+ICAgICBpYW5hLXRva2VuIDo9IDxBIHB1YmxpY2x5LWRlZmluZWQgZXh0ZW5zaW9uIHRva2VuLiBU
+b2tlbnM=
+
+ICAgICAgICAgICAgICAgICAgICBvZiB0aGlzIGZvcm0gbXVzdCBiZSByZWdpc3RlcmVkIHdpdGgg
+SUFOQQ==
+
+ICAgICAgICAgICAgICAgICAgICBhcyBzcGVjaWZpZWQgaW4gUkZDIDIwNDguPg==
+
+
+ICAgICBwYXJhbWV0ZXIgOj0gYXR0cmlidXRlICI9IiB2YWx1ZQ==
+
+
+ICAgICBhdHRyaWJ1dGUgOj0gdG9rZW4=
+
+ICAgICAgICAgICAgICAgICAgOyBNYXRjaGluZyBvZiBhdHRyaWJ1dGVz
+
+ICAgICAgICAgICAgICAgICAgOyBpcyBBTFdBWVMgY2FzZS1pbnNlbnNpdGl2ZS4=
+
+
+ICAgICB2YWx1ZSA6PSB0b2tlbiAvIHF1b3RlZC1zdHJpbmc=
+
+
+ICAgICB0b2tlbiA6PSAxKjxhbnkgKFVTLUFTQ0lJKSBDSEFSIGV4Y2VwdCBTUEFDRSwgQ1RMcyw=
+
+ICAgICAgICAgICAgICAgICBvciB0c3BlY2lhbHM+
+
+
+ICAgICB0c3BlY2lhbHMgOj0gICIoIiAvICIpIiAvICI8IiAvICI+IiAvICJAIiAv
+
+ICAgICAgICAgICAgICAgICAgICIsIiAvICI7IiAvICI6IiAvICJcIiAvIDwiPg==
+
+ICAgICAgICAgICAgICAgICAgICIvIiAvICJbIiAvICJdIiAvICI/IiAvICI9Ig==
+
+ICAgICAgICAgICAgICAgICAgIDsgTXVzdCBiZSBpbiBxdW90ZWQtc3RyaW5nLA==
+
+ICAgICAgICAgICAgICAgICAgIDsgdG8gdXNlIHdpdGhpbiBwYXJhbWV0ZXIgdmFsdWVz
+
+
+ICAgICBkZXNjcmlwdGlvbiA6PSAiQ29udGVudC1EZXNjcmlwdGlvbiIgIjoiICp0ZXh0
+
+
+ICAgICBlbmNvZGluZyA6PSAiQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyIgIjoiIG1lY2hhbmlz
+bQ==
+
+
+ICAgICBlbnRpdHktaGVhZGVycyA6PSBbIGNvbnRlbnQgQ1JMRiBd
+
+ICAgICAgICAgICAgICAgICAgICBbIGVuY29kaW5nIENSTEYgXQ==
+
+ICAgICAgICAgICAgICAgICAgICBbIGlkIENSTEYgXQ==
+
+ICAgICAgICAgICAgICAgICAgICBbIGRlc2NyaXB0aW9uIENSTEYgXQ==
+
+ICAgICAgICAgICAgICAgICAgICAqKCBNSU1FLWV4dGVuc2lvbi1maWVsZCBDUkxGICk=
+
+
+ICAgICBoZXgtb2N0ZXQgOj0gIj0iIDIoRElHSVQgLyAiQSIgLyAiQiIgLyAiQyIgLyAiRCIgLyAi
+RSIgLyAiRiIp
+
+ICAgICAgICAgICAgICAgOyBPY3RldCBtdXN0IGJlIHVzZWQgZm9yIGNoYXJhY3RlcnMgPiAxMjcs
+ID0s
+
+ICAgICAgICAgICAgICAgOyBTUEFDRXMgb3IgVEFCcyBhdCB0aGUgZW5kcyBvZiBsaW5lcywgYW5k
+IGlz
+
+ICAgICAgICAgICAgICAgOyByZWNvbW1lbmRlZCBmb3IgYW55IGNoYXJhY3RlciBub3QgbGlzdGVk
+IGlu
+
+ICAgICAgICAgICAgICAgOyBSRkMgMjA0OSBhcyAibWFpbC1zYWZlIi4=
+
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAg
+ICBOb3ZlbWJlciAxOTk2
+
+
+ICAgICAgICAgIG11c3QgYmUgdXNlZC4gIEFuIGVxdWFsIHNpZ24gYXMgdGhlIGxhc3QgY2hhcmFj
+dGVyIG9uIGE=
+
+ICAgICAgICAgIGVuY29kZWQgbGluZSBpbmRpY2F0ZXMgc3VjaCBhIG5vbi1zaWduaWZpY2FudCAo
+InNvZnQiKQ==
+
+ICAgICAgICAgIGxpbmUgYnJlYWsgaW4gdGhlIGVuY29kZWQgdGV4dC4=
+
+
+ICAgVGh1cyBpZiB0aGUgInJhdyIgZm9ybSBvZiB0aGUgbGluZSBpcyBhIHNpbmdsZSB1bmVuY29k
+ZWQgbGluZSB0aGF0
+
+ICAgc2F5czo=
+
+
+ICAgICBOb3cncyB0aGUgdGltZSBmb3IgYWxsIGZvbGsgdG8gY29tZSB0byB0aGUgYWlkIG9mIHRo
+ZWlyIGNvdW50cnku
+
+
+ICAgVGhpcyBjYW4gYmUgcmVwcmVzZW50ZWQsIGluIHRoZSBRdW90ZWQtUHJpbnRhYmxlIGVuY29k
+aW5nLCBhczo=
+
+
+ICAgICBOb3cncyB0aGUgdGltZSA9
+
+ICAgICBmb3IgYWxsIGZvbGsgdG8gY29tZT0=
+
+ICAgICAgdG8gdGhlIGFpZCBvZiB0aGVpciBjb3VudHJ5Lg==
+
+
+ICAgU2luY2UgdGhlIGh5cGhlbiBjaGFyYWN0ZXIgKCItIikgbWF5IGJlIHJlcHJlc2VudGVkIGFz
+IGl0c2VsZiBpbiB0aGU=
+
+ICAgUXVvdGVkLVByaW50YWJsZSBlbmNvZGluZywgY2FyZSBtdXN0IGJlIHRha2VuLCB3aGVuIGVu
+Y2Fwc3VsYXRpbmcgYQ==
+
+ICAgcXVvdGVkLXByaW50YWJsZSBlbmNvZGVkIGJvZHkgaW5zaWRlIG9uZSBvciBtb3JlIG11bHRp
+cGFydCBlbnRpdGllcyw=
+
+ICAgdG8gZW5zdXJlIHRoYXQgdGhlIGJvdW5kYXJ5IGRlbGltaXRlciBkb2VzIG5vdCBhcHBlYXIg
+YW55d2hlcmUgaW4gdGhl
+
+ICAgZW5jb2RlZCBib2R5LiAgKEEgZ29vZCBzdHJhdGVneSBpcyB0byBjaG9vc2UgYSBib3VuZGFy
+eSB0aGF0IGluY2x1ZGVz
+
+ICAgYSBjaGFyYWN0ZXIgc2VxdWVuY2Ugc3VjaCBhcyAiPV8iIHdoaWNoIGNhbiBuZXZlciBhcHBl
+YXIgaW4gYQ==
+
+ICAgcXVvdGVkLXByaW50YWJsZSBib2R5LiAgU2VlIHRoZSBkZWZpbml0aW9uIG9mIG11bHRpcGFy
+dCBtZXNzYWdlcyBpbg==
+
+ICAgUkZDIDIwNDYuKQ==
+
+
+ICAgICAhIiMkQFtcXV5ge3x9fiU=
+
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAg
+ICAgICAgW1BhZ2UgMjRd
+
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAg
+ICBOb3ZlbWJlciAxOTk2
+
+
+
+ICAgICAgICAgICAgICAgICAgICBUYWJsZSAxOiBUaGUgQmFzZTY0IEFscGhhYmV0
+
+
+ICAgICBWYWx1ZSBFbmNvZGluZyAgVmFsdWUgRW5jb2RpbmcgIFZhbHVlIEVuY29kaW5nICBWYWx1
+ZSBFbmNvZGluZw==
+
+ICAgICAgICAgMCBBICAgICAgICAgICAgMTcgUiAgICAgICAgICAgIDM0IGkgICAgICAgICAgICA1
+MSB6
+
+ICAgICAgICAgMSBCICAgICAgICAgICAgMTggUyAgICAgICAgICAgIDM1IGogICAgICAgICAgICA1
+MiAw
+
+ICAgICAgICAgMiBDICAgICAgICAgICAgMTkgVCAgICAgICAgICAgIDM2IGsgICAgICAgICAgICA1
+MyAx
+
+ICAgICAgICAgMyBEICAgICAgICAgICAgMjAgVSAgICAgICAgICAgIDM3IGwgICAgICAgICAgICA1
+NCAy
+
+ICAgICAgICAgNCBFICAgICAgICAgICAgMjEgViAgICAgICAgICAgIDM4IG0gICAgICAgICAgICA1
+NSAz
+
+ICAgICAgICAgNSBGICAgICAgICAgICAgMjIgVyAgICAgICAgICAgIDM5IG4gICAgICAgICAgICA1
+NiA0
+
+ICAgICAgICAgNiBHICAgICAgICAgICAgMjMgWCAgICAgICAgICAgIDQwIG8gICAgICAgICAgICA1
+NyA1
+
+ICAgICAgICAgNyBIICAgICAgICAgICAgMjQgWSAgICAgICAgICAgIDQxIHAgICAgICAgICAgICA1
+OCA2
+
+ICAgICAgICAgOCBJICAgICAgICAgICAgMjUgWiAgICAgICAgICAgIDQyIHEgICAgICAgICAgICA1
+OSA3
+
+ICAgICAgICAgOSBKICAgICAgICAgICAgMjYgYSAgICAgICAgICAgIDQzIHIgICAgICAgICAgICA2
+MCA4
+
+ICAgICAgICAxMCBLICAgICAgICAgICAgMjcgYiAgICAgICAgICAgIDQ0IHMgICAgICAgICAgICA2
+MSA5
+
+ICAgICAgICAxMSBMICAgICAgICAgICAgMjggYyAgICAgICAgICAgIDQ1IHQgICAgICAgICAgICA2
+MiAr
+
+ICAgICAgICAxMiBNICAgICAgICAgICAgMjkgZCAgICAgICAgICAgIDQ2IHUgICAgICAgICAgICA2
+MyAv
+
+ICAgICAgICAxMyBOICAgICAgICAgICAgMzAgZSAgICAgICAgICAgIDQ3IHY=
+
+ICAgICAgICAxNCBPICAgICAgICAgICAgMzEgZiAgICAgICAgICAgIDQ4IHcgICAgICAgICAocGFk
+KSA9
+
+ICAgICAgICAxNSBQICAgICAgICAgICAgMzIgZyAgICAgICAgICAgIDQ5IHg=
+
+ICAgICAgICAxNiBRICAgICAgICAgICAgMzMgaCAgICAgICAgICAgIDUwIHk=
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/hotspot/jtreg/compiler/intrinsics/base64/plain.txt Wed Jun 27 20:21:42 2018 -0700
@@ -0,0 +1,183 @@
+This test data is part of rfc2045 which includes all characters a~z A~Z, 0~9 and all symbols,
+It is used to test java.util.Base64.Encoder, and will be encoded by org.apache.commons.codec.binary.Base64.java
+to test java.util.Base64.Decoder;
+
+Freed & Borenstein Standards Track [Page 1]
+RFC 2045 Internet Message Bodies November 1996
+
+ These documents are revisions of RFCs 1521, 1522, and 1590, which
+ themselves were revisions of RFCs 1341 and 1342. An appendix in RFC
+ 2049 describes differences and changes from previous versions.
+
+Table of Contents
+
+ 1. Introduction ......................................... 3
+ 2. Definitions, Conventions, and Generic BNF Grammar .... 5
+ 3. MIME Header Fields ................................... 8
+ 4. MIME-Version Header Field ............................ 8
+ 5. Content-Type Header Field ............................ 10
+ 6. Content-Transfer-Encoding Header Field ............... 14
+ 7. Content-ID Header Field .............................. 26
+ 8. Content-Description Header Field ..................... 27
+ 9. Additional MIME Header Fields ........................ 27
+ 10. Summary ............................................. 27
+ 11. Security Considerations ............................. 27
+ 12. Authors' Addresses .................................. 28
+ A. Collected Grammar .................................... 29
+
+Freed & Borenstein Standards Track [Page 7]
+RFC 2045 Internet Message Bodies November 1996
+
+3. MIME Header Fields
+
+ MIME defines a number of new RFC 822 header fields that are used to
+ describe the content of a MIME entity. These header fields occur in
+ at least two contexts:
+
+ (1) As part of a regular RFC 822 message header.
+
+ (2) In a MIME body part header within a multipart
+ construct.
+
+ The formal definition of these header fields is as follows:
+
+ MIME-message-headers := entity-headers
+ fields
+ version CRLF
+ ; The ordering of the header
+ ; fields implied by this BNF
+ ; definition should be ignored.
+
+ MIME-part-headers := entity-headers
+ [ fields ]
+ ; Any field not beginning with
+ ; "content-" can have no defined
+ ; meaning and may be ignored.
+ ; The ordering of the header
+ ; fields implied by this BNF
+ ; definition should be ignored.
+
+ The syntax of the various specific MIME header fields will be
+ described in the following sections.
+
+Freed & Borenstein Standards Track [Page 11]
+RFC 2045 Internet Message Bodies November 1996
+
+5.1. Syntax of the Content-Type Header Field
+
+ In the Augmented BNF notation of RFC 822, a Content-Type header field
+ value is defined as follows:
+
+ content := "Content-Type" ":" type "/" subtype
+ *(";" parameter)
+ ; Matching of media type and subtype
+ ; is ALWAYS case-insensitive.
+
+ type := discrete-type / composite-type
+
+ discrete-type := "text" / "image" / "audio" / "video" /
+ "application" / extension-token
+
+ composite-type := "message" / "multipart" / extension-token
+
+ extension-token := ietf-token / x-token
+
+ ietf-token := <An extension token defined by a
+ standards-track RFC and registered
+ with IANA.>
+
+ x-token := <The two characters "X-" or "x-" followed, with
+ no intervening white space, by any token>
+
+ subtype := extension-token / iana-token
+
+ iana-token := <A publicly-defined extension token. Tokens
+ of this form must be registered with IANA
+ as specified in RFC 2048.>
+
+ parameter := attribute "=" value
+
+ attribute := token
+ ; Matching of attributes
+ ; is ALWAYS case-insensitive.
+
+ value := token / quoted-string
+
+ token := 1*<any (US-ASCII) CHAR except SPACE, CTLs,
+ or tspecials>
+
+ tspecials := "(" / ")" / "<" / ">" / "@" /
+ "," / ";" / ":" / "\" / <">
+ "/" / "[" / "]" / "?" / "="
+ ; Must be in quoted-string,
+ ; to use within parameter values
+
+ description := "Content-Description" ":" *text
+
+ encoding := "Content-Transfer-Encoding" ":" mechanism
+
+ entity-headers := [ content CRLF ]
+ [ encoding CRLF ]
+ [ id CRLF ]
+ [ description CRLF ]
+ *( MIME-extension-field CRLF )
+
+ hex-octet := "=" 2(DIGIT / "A" / "B" / "C" / "D" / "E" / "F")
+ ; Octet must be used for characters > 127, =,
+ ; SPACEs or TABs at the ends of lines, and is
+ ; recommended for any character not listed in
+ ; RFC 2049 as "mail-safe".
+
+RFC 2045 Internet Message Bodies November 1996
+
+ must be used. An equal sign as the last character on a
+ encoded line indicates such a non-significant ("soft")
+ line break in the encoded text.
+
+ Thus if the "raw" form of the line is a single unencoded line that
+ says:
+
+ Now's the time for all folk to come to the aid of their country.
+
+ This can be represented, in the Quoted-Printable encoding, as:
+
+ Now's the time =
+ for all folk to come=
+ to the aid of their country.
+
+ Since the hyphen character ("-") may be represented as itself in the
+ Quoted-Printable encoding, care must be taken, when encapsulating a
+ quoted-printable encoded body inside one or more multipart entities,
+ to ensure that the boundary delimiter does not appear anywhere in the
+ encoded body. (A good strategy is to choose a boundary that includes
+ a character sequence such as "=_" which can never appear in a
+ quoted-printable body. See the definition of multipart messages in
+ RFC 2046.)
+
+ !"#$@[\]^`{|}~%
+
+Freed & Borenstein Standards Track [Page 24]
+
+RFC 2045 Internet Message Bodies November 1996
+
+
+ Table 1: The Base64 Alphabet
+
+ Value Encoding Value Encoding Value Encoding Value Encoding
+ 0 A 17 R 34 i 51 z
+ 1 B 18 S 35 j 52 0
+ 2 C 19 T 36 k 53 1
+ 3 D 20 U 37 l 54 2
+ 4 E 21 V 38 m 55 3
+ 5 F 22 W 39 n 56 4
+ 6 G 23 X 40 o 57 5
+ 7 H 24 Y 41 p 58 6
+ 8 I 25 Z 42 q 59 7
+ 9 J 26 a 43 r 60 8
+ 10 K 27 b 44 s 61 9
+ 11 L 28 c 45 t 62 +
+ 12 M 29 d 46 u 63 /
+ 13 N 30 e 47 v
+ 14 O 31 f 48 w (pad) =
+ 15 P 32 g 49 x
+ 16 Q 33 h 50 y
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/hotspot/jtreg/compiler/intrinsics/base64/urlEncode.txt Wed Jun 27 20:21:42 2018 -0700
@@ -0,0 +1,183 @@
+VGhpcyB0ZXN0IGRhdGEgaXMgcGFydCBvZiByZmMyMDQ1IHdoaWNoIGluY2x1ZGVzIGFsbCBjaGFyYWN0ZXJzIGF-eiBBflosIDB-OSBhbmQgYWxsIHN5bWJvbHMs
+SXQgaXMgdXNlZCB0byB0ZXN0IGphdmEudXRpbC5CYXNlNjQuRW5jb2RlciwgYW5kIHdpbGwgYmUgZW5jb2RlZCBieSBvcmcuYXBhY2hlLmNvbW1vbnMuY29kZWMuYmluYXJ5LkJhc2U2NC5qYXZh
+dG8gdGVzdCBqYXZhLnV0aWwuQmFzZTY0LkRlY29kZXI7
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgIFtQYWdlIDFd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+ICAgVGhlc2UgZG9jdW1lbnRzIGFyZSByZXZpc2lvbnMgb2YgUkZDcyAxNTIxLCAxNTIyLCBhbmQgMTU5MCwgd2hpY2g=
+ICAgdGhlbXNlbHZlcyB3ZXJlIHJldmlzaW9ucyBvZiBSRkNzIDEzNDEgYW5kIDEzNDIuICBBbiBhcHBlbmRpeCBpbiBSRkM=
+ICAgMjA0OSBkZXNjcmliZXMgZGlmZmVyZW5jZXMgYW5kIGNoYW5nZXMgZnJvbSBwcmV2aW91cyB2ZXJzaW9ucy4=
+
+VGFibGUgb2YgQ29udGVudHM=
+
+ICAgMS4gSW50cm9kdWN0aW9uIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDM=
+ICAgMi4gRGVmaW5pdGlvbnMsIENvbnZlbnRpb25zLCBhbmQgR2VuZXJpYyBCTkYgR3JhbW1hciAuLi4uICAgIDU=
+ICAgMy4gTUlNRSBIZWFkZXIgRmllbGRzIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDg=
+ICAgNC4gTUlNRS1WZXJzaW9uIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgIDg=
+ICAgNS4gQ29udGVudC1UeXBlIEhlYWRlciBGaWVsZCAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMTA=
+ICAgNi4gQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uICAgMTQ=
+ICAgNy4gQ29udGVudC1JRCBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjY=
+ICAgOC4gQ29udGVudC1EZXNjcmlwdGlvbiBIZWFkZXIgRmllbGQgLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgOS4gQWRkaXRpb25hbCBNSU1FIEhlYWRlciBGaWVsZHMgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTAuIFN1bW1hcnkgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTEuIFNlY3VyaXR5IENvbnNpZGVyYXRpb25zIC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjc=
+ICAgMTIuIEF1dGhvcnMnIEFkZHJlc3NlcyAuLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjg=
+ICAgQS4gQ29sbGVjdGVkIEdyYW1tYXIgLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uICAgMjk=
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgIFtQYWdlIDdd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+My4gIE1JTUUgSGVhZGVyIEZpZWxkcw==
+
+ICAgTUlNRSBkZWZpbmVzIGEgbnVtYmVyIG9mIG5ldyBSRkMgODIyIGhlYWRlciBmaWVsZHMgdGhhdCBhcmUgdXNlZCB0bw==
+ICAgZGVzY3JpYmUgdGhlIGNvbnRlbnQgb2YgYSBNSU1FIGVudGl0eS4gIFRoZXNlIGhlYWRlciBmaWVsZHMgb2NjdXIgaW4=
+ICAgYXQgbGVhc3QgdHdvIGNvbnRleHRzOg==
+
+ICAgICgxKSAgIEFzIHBhcnQgb2YgYSByZWd1bGFyIFJGQyA4MjIgbWVzc2FnZSBoZWFkZXIu
+
+ICAgICgyKSAgIEluIGEgTUlNRSBib2R5IHBhcnQgaGVhZGVyIHdpdGhpbiBhIG11bHRpcGFydA==
+ICAgICAgICAgIGNvbnN0cnVjdC4=
+
+ICAgVGhlIGZvcm1hbCBkZWZpbml0aW9uIG9mIHRoZXNlIGhlYWRlciBmaWVsZHMgaXMgYXMgZm9sbG93czo=
+
+ICAgICBNSU1FLW1lc3NhZ2UtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICBmaWVsZHM=
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICB2ZXJzaW9uIENSTEY=
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9yZWQu
+
+ICAgICBNSU1FLXBhcnQtaGVhZGVycyA6PSBlbnRpdHktaGVhZGVycw==
+ICAgICAgICAgICAgICAgICAgICAgICAgICBbIGZpZWxkcyBd
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IEFueSBmaWVsZCBub3QgYmVnaW5uaW5nIHdpdGg=
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7ICJjb250ZW50LSIgY2FuIGhhdmUgbm8gZGVmaW5lZA==
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IG1lYW5pbmcgYW5kIG1heSBiZSBpZ25vcmVkLg==
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IFRoZSBvcmRlcmluZyBvZiB0aGUgaGVhZGVy
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGZpZWxkcyBpbXBsaWVkIGJ5IHRoaXMgQk5G
+ICAgICAgICAgICAgICAgICAgICAgICAgICA7IGRlZmluaXRpb24gc2hvdWxkIGJlIGlnbm9yZWQu
+
+ICAgVGhlIHN5bnRheCBvZiB0aGUgdmFyaW91cyBzcGVjaWZpYyBNSU1FIGhlYWRlciBmaWVsZHMgd2lsbCBiZQ==
+ICAgZGVzY3JpYmVkIGluIHRoZSBmb2xsb3dpbmcgc2VjdGlvbnMu
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgW1BhZ2UgMTFd
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+NS4xLiAgU3ludGF4IG9mIHRoZSBDb250ZW50LVR5cGUgSGVhZGVyIEZpZWxk
+
+ICAgSW4gdGhlIEF1Z21lbnRlZCBCTkYgbm90YXRpb24gb2YgUkZDIDgyMiwgYSBDb250ZW50LVR5cGUgaGVhZGVyIGZpZWxk
+ICAgdmFsdWUgaXMgZGVmaW5lZCBhcyBmb2xsb3dzOg==
+
+ICAgICBjb250ZW50IDo9ICJDb250ZW50LVR5cGUiICI6IiB0eXBlICIvIiBzdWJ0eXBl
+ICAgICAgICAgICAgICAgICooIjsiIHBhcmFtZXRlcik=
+ICAgICAgICAgICAgICAgIDsgTWF0Y2hpbmcgb2YgbWVkaWEgdHlwZSBhbmQgc3VidHlwZQ==
+ICAgICAgICAgICAgICAgIDsgaXMgQUxXQVlTIGNhc2UtaW5zZW5zaXRpdmUu
+
+ICAgICB0eXBlIDo9IGRpc2NyZXRlLXR5cGUgLyBjb21wb3NpdGUtdHlwZQ==
+
+ICAgICBkaXNjcmV0ZS10eXBlIDo9ICJ0ZXh0IiAvICJpbWFnZSIgLyAiYXVkaW8iIC8gInZpZGVvIiAv
+ICAgICAgICAgICAgICAgICAgICAgICJhcHBsaWNhdGlvbiIgLyBleHRlbnNpb24tdG9rZW4=
+
+ICAgICBjb21wb3NpdGUtdHlwZSA6PSAibWVzc2FnZSIgLyAibXVsdGlwYXJ0IiAvIGV4dGVuc2lvbi10b2tlbg==
+
+ICAgICBleHRlbnNpb24tdG9rZW4gOj0gaWV0Zi10b2tlbiAvIHgtdG9rZW4=
+
+ICAgICBpZXRmLXRva2VuIDo9IDxBbiBleHRlbnNpb24gdG9rZW4gZGVmaW5lZCBieSBh
+ICAgICAgICAgICAgICAgICAgICBzdGFuZGFyZHMtdHJhY2sgUkZDIGFuZCByZWdpc3RlcmVk
+ICAgICAgICAgICAgICAgICAgICB3aXRoIElBTkEuPg==
+
+ICAgICB4LXRva2VuIDo9IDxUaGUgdHdvIGNoYXJhY3RlcnMgIlgtIiBvciAieC0iIGZvbGxvd2VkLCB3aXRo
+ICAgICAgICAgICAgICAgICBubyBpbnRlcnZlbmluZyB3aGl0ZSBzcGFjZSwgYnkgYW55IHRva2VuPg==
+
+ICAgICBzdWJ0eXBlIDo9IGV4dGVuc2lvbi10b2tlbiAvIGlhbmEtdG9rZW4=
+
+ICAgICBpYW5hLXRva2VuIDo9IDxBIHB1YmxpY2x5LWRlZmluZWQgZXh0ZW5zaW9uIHRva2VuLiBUb2tlbnM=
+ICAgICAgICAgICAgICAgICAgICBvZiB0aGlzIGZvcm0gbXVzdCBiZSByZWdpc3RlcmVkIHdpdGggSUFOQQ==
+ICAgICAgICAgICAgICAgICAgICBhcyBzcGVjaWZpZWQgaW4gUkZDIDIwNDguPg==
+
+ICAgICBwYXJhbWV0ZXIgOj0gYXR0cmlidXRlICI9IiB2YWx1ZQ==
+
+ICAgICBhdHRyaWJ1dGUgOj0gdG9rZW4=
+ICAgICAgICAgICAgICAgICAgOyBNYXRjaGluZyBvZiBhdHRyaWJ1dGVz
+ICAgICAgICAgICAgICAgICAgOyBpcyBBTFdBWVMgY2FzZS1pbnNlbnNpdGl2ZS4=
+
+ICAgICB2YWx1ZSA6PSB0b2tlbiAvIHF1b3RlZC1zdHJpbmc=
+
+ICAgICB0b2tlbiA6PSAxKjxhbnkgKFVTLUFTQ0lJKSBDSEFSIGV4Y2VwdCBTUEFDRSwgQ1RMcyw=
+ICAgICAgICAgICAgICAgICBvciB0c3BlY2lhbHM-
+
+ICAgICB0c3BlY2lhbHMgOj0gICIoIiAvICIpIiAvICI8IiAvICI-IiAvICJAIiAv
+ICAgICAgICAgICAgICAgICAgICIsIiAvICI7IiAvICI6IiAvICJcIiAvIDwiPg==
+ICAgICAgICAgICAgICAgICAgICIvIiAvICJbIiAvICJdIiAvICI_IiAvICI9Ig==
+ICAgICAgICAgICAgICAgICAgIDsgTXVzdCBiZSBpbiBxdW90ZWQtc3RyaW5nLA==
+ICAgICAgICAgICAgICAgICAgIDsgdG8gdXNlIHdpdGhpbiBwYXJhbWV0ZXIgdmFsdWVz
+
+ICAgICBkZXNjcmlwdGlvbiA6PSAiQ29udGVudC1EZXNjcmlwdGlvbiIgIjoiICp0ZXh0
+
+ICAgICBlbmNvZGluZyA6PSAiQ29udGVudC1UcmFuc2Zlci1FbmNvZGluZyIgIjoiIG1lY2hhbmlzbQ==
+
+ICAgICBlbnRpdHktaGVhZGVycyA6PSBbIGNvbnRlbnQgQ1JMRiBd
+ICAgICAgICAgICAgICAgICAgICBbIGVuY29kaW5nIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICBbIGlkIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICBbIGRlc2NyaXB0aW9uIENSTEYgXQ==
+ICAgICAgICAgICAgICAgICAgICAqKCBNSU1FLWV4dGVuc2lvbi1maWVsZCBDUkxGICk=
+
+ICAgICBoZXgtb2N0ZXQgOj0gIj0iIDIoRElHSVQgLyAiQSIgLyAiQiIgLyAiQyIgLyAiRCIgLyAiRSIgLyAiRiIp
+ICAgICAgICAgICAgICAgOyBPY3RldCBtdXN0IGJlIHVzZWQgZm9yIGNoYXJhY3RlcnMgPiAxMjcsID0s
+ICAgICAgICAgICAgICAgOyBTUEFDRXMgb3IgVEFCcyBhdCB0aGUgZW5kcyBvZiBsaW5lcywgYW5kIGlz
+ICAgICAgICAgICAgICAgOyByZWNvbW1lbmRlZCBmb3IgYW55IGNoYXJhY3RlciBub3QgbGlzdGVkIGlu
+ICAgICAgICAgICAgICAgOyBSRkMgMjA0OSBhcyAibWFpbC1zYWZlIi4=
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+ICAgICAgICAgIG11c3QgYmUgdXNlZC4gIEFuIGVxdWFsIHNpZ24gYXMgdGhlIGxhc3QgY2hhcmFjdGVyIG9uIGE=
+ICAgICAgICAgIGVuY29kZWQgbGluZSBpbmRpY2F0ZXMgc3VjaCBhIG5vbi1zaWduaWZpY2FudCAoInNvZnQiKQ==
+ICAgICAgICAgIGxpbmUgYnJlYWsgaW4gdGhlIGVuY29kZWQgdGV4dC4=
+
+ICAgVGh1cyBpZiB0aGUgInJhdyIgZm9ybSBvZiB0aGUgbGluZSBpcyBhIHNpbmdsZSB1bmVuY29kZWQgbGluZSB0aGF0
+ICAgc2F5czo=
+
+ICAgICBOb3cncyB0aGUgdGltZSBmb3IgYWxsIGZvbGsgdG8gY29tZSB0byB0aGUgYWlkIG9mIHRoZWlyIGNvdW50cnku
+
+ICAgVGhpcyBjYW4gYmUgcmVwcmVzZW50ZWQsIGluIHRoZSBRdW90ZWQtUHJpbnRhYmxlIGVuY29kaW5nLCBhczo=
+
+ICAgICBOb3cncyB0aGUgdGltZSA9
+ICAgICBmb3IgYWxsIGZvbGsgdG8gY29tZT0=
+ICAgICAgdG8gdGhlIGFpZCBvZiB0aGVpciBjb3VudHJ5Lg==
+
+ICAgU2luY2UgdGhlIGh5cGhlbiBjaGFyYWN0ZXIgKCItIikgbWF5IGJlIHJlcHJlc2VudGVkIGFzIGl0c2VsZiBpbiB0aGU=
+ICAgUXVvdGVkLVByaW50YWJsZSBlbmNvZGluZywgY2FyZSBtdXN0IGJlIHRha2VuLCB3aGVuIGVuY2Fwc3VsYXRpbmcgYQ==
+ICAgcXVvdGVkLXByaW50YWJsZSBlbmNvZGVkIGJvZHkgaW5zaWRlIG9uZSBvciBtb3JlIG11bHRpcGFydCBlbnRpdGllcyw=
+ICAgdG8gZW5zdXJlIHRoYXQgdGhlIGJvdW5kYXJ5IGRlbGltaXRlciBkb2VzIG5vdCBhcHBlYXIgYW55d2hlcmUgaW4gdGhl
+ICAgZW5jb2RlZCBib2R5LiAgKEEgZ29vZCBzdHJhdGVneSBpcyB0byBjaG9vc2UgYSBib3VuZGFyeSB0aGF0IGluY2x1ZGVz
+ICAgYSBjaGFyYWN0ZXIgc2VxdWVuY2Ugc3VjaCBhcyAiPV8iIHdoaWNoIGNhbiBuZXZlciBhcHBlYXIgaW4gYQ==
+ICAgcXVvdGVkLXByaW50YWJsZSBib2R5LiAgU2VlIHRoZSBkZWZpbml0aW9uIG9mIG11bHRpcGFydCBtZXNzYWdlcyBpbg==
+ICAgUkZDIDIwNDYuKQ==
+
+ICAgICAhIiMkQFtcXV5ge3x9fiU=
+
+RnJlZWQgJiBCb3JlbnN0ZWluICAgICAgICAgIFN0YW5kYXJkcyBUcmFjayAgICAgICAgICAgICAgICAgICAgW1BhZ2UgMjRd
+
+UkZDIDIwNDUgICAgICAgICAgICAgICAgSW50ZXJuZXQgTWVzc2FnZSBCb2RpZXMgICAgICAgICAgICBOb3ZlbWJlciAxOTk2
+
+
+ICAgICAgICAgICAgICAgICAgICBUYWJsZSAxOiBUaGUgQmFzZTY0IEFscGhhYmV0
+
+ICAgICBWYWx1ZSBFbmNvZGluZyAgVmFsdWUgRW5jb2RpbmcgIFZhbHVlIEVuY29kaW5nICBWYWx1ZSBFbmNvZGluZw==
+ICAgICAgICAgMCBBICAgICAgICAgICAgMTcgUiAgICAgICAgICAgIDM0IGkgICAgICAgICAgICA1MSB6
+ICAgICAgICAgMSBCICAgICAgICAgICAgMTggUyAgICAgICAgICAgIDM1IGogICAgICAgICAgICA1MiAw
+ICAgICAgICAgMiBDICAgICAgICAgICAgMTkgVCAgICAgICAgICAgIDM2IGsgICAgICAgICAgICA1MyAx
+ICAgICAgICAgMyBEICAgICAgICAgICAgMjAgVSAgICAgICAgICAgIDM3IGwgICAgICAgICAgICA1NCAy
+ICAgICAgICAgNCBFICAgICAgICAgICAgMjEgViAgICAgICAgICAgIDM4IG0gICAgICAgICAgICA1NSAz
+ICAgICAgICAgNSBGICAgICAgICAgICAgMjIgVyAgICAgICAgICAgIDM5IG4gICAgICAgICAgICA1NiA0
+ICAgICAgICAgNiBHICAgICAgICAgICAgMjMgWCAgICAgICAgICAgIDQwIG8gICAgICAgICAgICA1NyA1
+ICAgICAgICAgNyBIICAgICAgICAgICAgMjQgWSAgICAgICAgICAgIDQxIHAgICAgICAgICAgICA1OCA2
+ICAgICAgICAgOCBJICAgICAgICAgICAgMjUgWiAgICAgICAgICAgIDQyIHEgICAgICAgICAgICA1OSA3
+ICAgICAgICAgOSBKICAgICAgICAgICAgMjYgYSAgICAgICAgICAgIDQzIHIgICAgICAgICAgICA2MCA4
+ICAgICAgICAxMCBLICAgICAgICAgICAgMjcgYiAgICAgICAgICAgIDQ0IHMgICAgICAgICAgICA2MSA5
+ICAgICAgICAxMSBMICAgICAgICAgICAgMjggYyAgICAgICAgICAgIDQ1IHQgICAgICAgICAgICA2MiAr
+ICAgICAgICAxMiBNICAgICAgICAgICAgMjkgZCAgICAgICAgICAgIDQ2IHUgICAgICAgICAgICA2MyAv
+ICAgICAgICAxMyBOICAgICAgICAgICAgMzAgZSAgICAgICAgICAgIDQ3IHY=
+ICAgICAgICAxNCBPICAgICAgICAgICAgMzEgZiAgICAgICAgICAgIDQ4IHcgICAgICAgICAocGFkKSA9
+ICAgICAgICAxNSBQICAgICAgICAgICAgMzIgZyAgICAgICAgICAgIDQ5IHg=
+ICAgICAgICAxNiBRICAgICAgICAgICAgMzMgaCAgICAgICAgICAgIDUwIHk=