Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement type restrictions on method arguments in the callee #412

Closed
Closed
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -1418,6 +1418,16 @@ address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
}
#endif

if (UseTypeRestrictions) {
Label not_restricted;
__ get_method(rscratch1);
__ movzwl(rscratch1, Address(rscratch1, Method::flags_offset()));
__ andl(rscratch1, Method::_type_restrictions);
__ jcc(Assembler::zero, not_restricted);
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::restricted_parameter_checks));
__ bind(not_restricted);
}

// jvmti support
__ notify_method_entry();

@@ -2728,7 +2728,7 @@ void TemplateTable::_return(TosState state) {
Label not_restricted;
__ get_method(rscratch1);
__ movzwl(rscratch1, Address(rscratch1, Method::flags_offset()));
__ andl(rscratch1, Method::_restricted_method);
__ andl(rscratch1, Method::_type_restrictions);
__ jcc(Assembler::zero, not_restricted);
Register robj = LP64_ONLY(c_rarg1) NOT_LP64(rax);
__ movptr(robj, aaddress(0));
@@ -3322,18 +3322,20 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
// Assembler::StoreStore));


Label notRestricted;
__ movl(rdx, flags);
__ shrl(rdx, ConstantPoolCacheEntry::has_restricted_type_shift);
__ andl(rdx, 0x1);
__ testl(rdx, rdx);
__ jcc(Assembler::zero, notRestricted);

__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::check_restricted_type));
__ get_cache_and_index_at_bcp(cache, index, 1);
load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
if (UseTypeRestrictions) {
Label notRestricted;
__ movl(rdx, flags);
__ shrl(rdx, ConstantPoolCacheEntry::has_restricted_type_shift);
__ andl(rdx, 0x1);
__ testl(rdx, rdx);
__ jcc(Assembler::zero, notRestricted);

__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::check_restricted_type));
__ get_cache_and_index_at_bcp(cache, index, 1);
load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);

__ bind(notRestricted);
__ bind(notRestricted);
}

Label notVolatile, Done;
__ movl(rdx, flags);
@@ -3931,19 +3933,6 @@ void TemplateTable::fast_xaccess(TosState state) {
//-----------------------------------------------------------------------------
// Calls

void TemplateTable::restricted_method_check(Register method) {
Label not_restricted;
__ movptr(rscratch1, method);
__ movzwl(rscratch1, Address(rscratch1, Method::flags_offset()));
__ andl(rscratch1, Method::_restricted_method);
__ jcc(Assembler::zero, not_restricted);
__ restore_bcp();
__ push(method);
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::restricted_parameter_checks));
__ pop(method);
__ bind(not_restricted);
}

void TemplateTable::prepare_invoke(int byte_no,
Register method, // linked method (or i-klass)
Register index, // itable index, MethodType, etc.
@@ -4057,7 +4046,6 @@ void TemplateTable::invokevirtual_helper(Register index,
// profile this call
__ profile_final_call(rax);
__ profile_arguments_type(rax, method, rbcp, true);
restricted_method_check(method);
__ jump_from_interpreted(method, rax);

__ bind(notFinal);
@@ -4072,7 +4060,6 @@ void TemplateTable::invokevirtual_helper(Register index,
// get target Method* & entry point
__ lookup_virtual_method(rax, index, method);
__ profile_arguments_type(rdx, method, rbcp, true);
restricted_method_check(method);
__ jump_from_interpreted(method, rdx);
}

@@ -4101,7 +4088,6 @@ void TemplateTable::invokespecial(int byte_no) {
// do the call
__ profile_call(rax);
__ profile_arguments_type(rax, rbx, rbcp, false);
restricted_method_check(rbx);
__ jump_from_interpreted(rbx, rax);
}

@@ -4112,7 +4098,6 @@ void TemplateTable::invokestatic(int byte_no) {
// do the call
__ profile_call(rax);
__ profile_arguments_type(rax, rbx, rbcp, false);
restricted_method_check(rbx);
__ jump_from_interpreted(rbx, rax);
}

@@ -4175,7 +4160,6 @@ void TemplateTable::invokeinterface(int byte_no) {

__ profile_final_call(rdx);
__ profile_arguments_type(rdx, rbx, rbcp, true);
restricted_method_check(rbx);
__ jump_from_interpreted(rbx, rdx);
// no return from above
__ bind(notVFinal);
@@ -4227,8 +4211,6 @@ void TemplateTable::invokeinterface(int byte_no) {

__ profile_arguments_type(rdx, rbx, rbcp, true);

restricted_method_check(rbx);

// do the call
// rcx: receiver
// rbx,: Method*
@@ -45,6 +45,4 @@

static void invoke_is_substitutable(Register aobj, Register bobj, Label& is_subst, Label& not_subst);

static void restricted_method_check(Register method);

#endif // CPU_X86_TEMPLATETABLE_X86_HPP
@@ -1656,6 +1656,15 @@ void GraphBuilder::method_return(Value x, bool ignore_return) {
break;
}

if (UseTypeRestrictions) {
if (method()->has_type_restrictions() && method()->restricted_return_value() != NULL) {
CheckCast* c = new CheckCast(method()->restricted_return_value(), x, copy_state_before());
append_split(c);
c->set_incompatible_class_change_check();
c->set_direct_compare(method()->restricted_return_value()->as_instance_klass()->is_final());
}
}

// Check to see whether we are inlining. If so, Return
// instructions become Gotos to the continuation point.
if (continuation() != NULL) {
@@ -3051,6 +3060,7 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
}

bool ignore_return = scope_data()->ignore_return();
bool insert_type_restriction_check = method()->has_type_restrictions() && bci == 0;

while (!bailed_out() && last()->as_BlockEnd() == NULL &&
(code = stream()->next()) != ciBytecodeStream::EOBC() &&
@@ -3073,6 +3083,28 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
push_exception = false;
}

if (insert_type_restriction_check) {
ciSignature* sig = method()->signature();
int idx = method()->is_static() ? 0 : 1;
for (int i = 0; i < sig->count(); i++) {
if (method()->restricted_argument_at(i) != NULL) {
ciKlass* restricted_type = method()->restricted_argument_at(i);
CheckCast* c = new CheckCast(restricted_type, state()->local_at(idx), copy_state_before());
append_split(c);
c->set_incompatible_class_change_check();
// restricted types must be primitive classes
assert(restricted_type->as_instance_klass() != NULL, "Sanity check");
assert(restricted_type->as_instance_klass()->is_final(), "Sanity check");
c->set_direct_compare(true);
// Updating argument information
state()->store_local(idx, c);
}
ciType* type = sig->type_at(i);
idx += type->size();
}
insert_type_restriction_check = false;
}

// handle bytecode
switch (code) {
case Bytecodes::_nop : /* nothing to do */ break;
@@ -751,6 +751,7 @@ LEAF(Local, Instruction)
bool is_receiver() const { return _is_receiver; }

virtual ciType* declared_type() const { return _declared_type; }
virtual void set_declared_type(ciType* type) { _declared_type = type; }

// generic
virtual void input_values_do(ValueVisitor* f) { /* no values */ }
@@ -94,6 +94,7 @@ class StubAssemblerCodeGenClosure: public Closure {
class Runtime1: public AllStatic {
friend class VMStructs;
friend class ArrayCopyStub;
friend class SharedRuntime;

public:
enum StubID {
@@ -93,6 +93,7 @@ ciMethod::ciMethod(const methodHandle& h_m, ciInstanceKlass* holder) :
_can_be_parsed = true;
_has_reserved_stack_access = h_m->has_reserved_stack_access();
_is_overpass = h_m->is_overpass();
_has_type_restrictions = h_m->has_type_restrictions();
// Lazy fields, filled in on demand. Require allocation.
_code = NULL;
_exception_handlers = NULL;
@@ -151,6 +152,24 @@ ciMethod::ciMethod(const methodHandle& h_m, ciInstanceKlass* holder) :
if (_interpreter_invocation_count == 0)
_interpreter_invocation_count = 1;
_instructions_size = -1;
if (_has_type_restrictions) {
assert(signature()->count() == h_m()->restricted_num_param(), "Must match");
int num_param = h_m()->restricted_num_param();
Arena* arena = CURRENT_ENV->arena();
_restricted_arguments = new (arena) GrowableArray<ciKlass*>(arena, num_param, num_param, NULL);
for (int i = 0; i < num_param; i++) {
if (h_m()->restricted_param_type_at(i) != NULL) {
_restricted_arguments->at_put(i, CURRENT_ENV->get_metadata(h_m()->restricted_param_type_at(i))->as_klass());
} else {
_restricted_arguments->at_put(i, NULL);
}
}
if (h_m()->restricted_return_value() != NULL) {
_restricted_return_value = CURRENT_ENV->get_metadata(h_m()->restricted_return_value())->as_klass();
} else {
_restricted_return_value = NULL;
}
}
#ifdef ASSERT
if (ReplayCompiles) {
ciReplay::initialize(this);
@@ -186,6 +205,7 @@ ciMethod::ciMethod(ciInstanceKlass* holder,
// the holder has the wrong class loader (e.g. invokedynamic call
// sites) so we pass the accessor.
_signature = new (CURRENT_ENV->arena()) ciSignature(accessor, constantPoolHandle(), signature);
_has_type_restrictions = false;
}


@@ -74,6 +74,8 @@ class ciMethod : public ciMetadata {
ciSignature* _signature;
ciMethodData* _method_data;
ciMethodBlocks* _method_blocks;
GrowableArray<ciKlass*>* _restricted_arguments;
ciKlass* _restricted_return_value;

// Code attributes.
int _code_size;
@@ -95,6 +97,7 @@ class ciMethod : public ciMetadata {
bool _can_be_statically_bound;
bool _has_reserved_stack_access;
bool _is_overpass;
bool _has_type_restrictions;

// Lazy fields, filled in on demand
address _code;
@@ -345,6 +348,7 @@ class ciMethod : public ciMetadata {
bool is_default_method() const { return !is_abstract() && !is_private() &&
holder()->is_interface(); }
bool is_overpass () const { check_is_loaded(); return _is_overpass; }
bool has_type_restrictions () const { return _has_type_restrictions; }
bool has_loops () const;
bool has_jsrs () const;
bool is_getter () const;
@@ -381,6 +385,17 @@ class ciMethod : public ciMetadata {
// Support for the inline type calling convention
bool has_scalarized_args() const;
const GrowableArray<SigEntry>* get_sig_cc();

// RestrictedMethod support
int restricted_num_param() const {
return _restricted_arguments->length();
}
ciKlass* restricted_argument_at(int index) const {
return _restricted_arguments->at(index);
}
ciKlass* restricted_return_value() const {
return _restricted_return_value;
}
};

#endif // SHARE_CI_CIMETHOD_HPP
@@ -3197,7 +3197,7 @@ Method* ClassFileParser::parse_method(const ClassFileStream* const cfs,

// Copy RestrictedMethod attribute if present
if (has_restricted_method_attribute) {
m->set_restricted_method(true);
m->set_has_type_restrictions(true);
*(m->constMethod()->restricted_num_params_addr()) = restricted_num_params;
*(m->constMethod()->restricted_return_type_index_addr()) = restricted_return_type_index;
u2* cursor = m->constMethod()->restricted_param_type_start();
@@ -536,32 +536,23 @@ JRT_END

JRT_ENTRY(void, InterpreterRuntime::restricted_parameter_checks(JavaThread* thread))
LastFrameAccessor last_frame(thread);
Method* caller = last_frame.method();
constantPoolHandle cph(THREAD, caller->constants());
Method* callee = last_frame.cache_entry()->method_if_resolved(cph);
Method* callee = last_frame.method();
assert(callee != NULL, "Something bad happened");
if (callee->has_restricted_method()) {
if (callee->has_type_restrictions()) {
ResourceMark rm(THREAD);
Symbol* signature = callee->signature();
ArgumentCount args(signature);
int arg_count = args.size();
ResourceArea *area = Thread::current()->resource_area();
int* sizes = NEW_ARENA_ARRAY(area, int, arg_count);
int i = 0;
int arg_idx = 0;
int local_idx = callee->is_static() ? 0 : 1;
for (SignatureStream ss(signature); !ss.at_return_type(); ss.next()) {
sizes[i] = parameter_type_word_count(ss.type());
i++;
}
int tos_idx = (int)last_frame.get_frame().interpreter_frame_expression_stack_size() - 3;
for (int i = arg_count - 1; i >=0; --i) {
Klass* k = callee->restricted_param_type_at(i);
Klass* k = callee->restricted_param_type_at(arg_idx);
if (k != NULL) {
oop arg = *(oop*)last_frame.get_frame().interpreter_frame_expression_stack_at(tos_idx);
oop arg = *(oop*)last_frame.get_frame().interpreter_frame_local_at(local_idx);
if (!arg->klass()->is_subtype_of(k)) {
THROW(vmSymbols::java_lang_IncompatibleClassChangeError());
}
}
tos_idx -= sizes[i];
local_idx += parameter_type_word_count(ss.type());
arg_idx++;
}
}
JRT_END
@@ -571,7 +562,7 @@ JRT_ENTRY(void, InterpreterRuntime::restricted_return_value_check(JavaThread* th
assert(last_frame.bytecode().code() == Bytecodes::_areturn, "Only areturn should have such checks");
Method* method = last_frame.method();
constantPoolHandle cph(THREAD, method->constants());
if (method->constMethod()->has_restricted_method()) {
if (method->constMethod()->has_type_restrictions()) {
Klass* k = method->restricted_return_value();
if (k != NULL && !obj->klass()->is_subtype_of(k)) {
THROW(vmSymbols::java_lang_IncompatibleClassChangeError());