Skip to content

Commit

Permalink
Allow late lookup for @CriticalNative methods.
Browse files Browse the repository at this point in the history
Test: Add and enable tests in 178-app-image-native-method
Test: Add and enable tests in jni_compiler_test
Test: Manually step through the new stub in GDB and check
      that backtrace works at various points.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_taimen-userdebug boots.
Test: run-gtests.sh
Test: testrunner.py --target --optimizing
Bug: 112189621
Change-Id: If094e5062acbb99eefa88f2afb4815f93730cb82
  • Loading branch information
vmarko committed Feb 19, 2020
1 parent 99d91d1 commit fa458ac
Show file tree
Hide file tree
Showing 52 changed files with 1,717 additions and 535 deletions.
69 changes: 49 additions & 20 deletions compiler/jni/jni_compiler_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,25 @@ extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_bar(JNIEnv*, jobject, jint
return count + 1;
}

// Note: JNI name mangling "_" -> "_1".
extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_bar_1Fast(JNIEnv*, jobject, jint count) {
return count + 1;
}

extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_sbar(JNIEnv*, jclass, jint count) {
return count + 1;
}

// Note: JNI name mangling "_" -> "_1".
extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_sbar_1Fast(JNIEnv*, jclass, jint count) {
return count + 1;
}

// Note: JNI name mangling "_" -> "_1".
extern "C" JNIEXPORT jint JNICALL Java_MyClassNatives_sbar_1Critical(jint count) {
return count + 1;
}

// TODO: In the Baker read barrier configuration, add checks to ensure
// the Marking Register's value is correct.

Expand All @@ -71,6 +86,11 @@ static bool IsCurrentJniCritical() {
return gCurrentJni == static_cast<uint32_t>(JniKind::kCritical);
}

// Is the current native method under test @FastNative?
static bool IsCurrentJniFast() {
return gCurrentJni == static_cast<uint32_t>(JniKind::kFast);
}

// Is the current native method a plain-old non-annotated native?
static bool IsCurrentJniNormal() {
return gCurrentJni == static_cast<uint32_t>(JniKind::kNormal);
Expand Down Expand Up @@ -352,6 +372,7 @@ class JniCompilerTest : public CommonCompilerTest {
void MaxParamNumberImpl();
void WithoutImplementationImpl();
void WithoutImplementationRefReturnImpl();
void StaticWithoutImplementationImpl();
void StackArgsIntsFirstImpl();
void StackArgsFloatsFirstImpl();
void StackArgsMixedImpl();
Expand All @@ -373,9 +394,7 @@ jobject JniCompilerTest::class_loader_;

// Test the normal compiler and normal generic JNI only.
// The following features are unsupported in @FastNative:
// 1) JNI stubs (lookup via dlsym) when methods aren't explicitly registered
// 2) synchronized keyword
// -- TODO: We can support (1) if we remove the mutator lock assert during stub lookup.
// 1) synchronized keyword
# define JNI_TEST_NORMAL_ONLY(TestName) \
TEST_F(JniCompilerTest, TestName ## NormalCompiler) { \
ScopedCheckHandleScope top_handle_scope_check; \
Expand Down Expand Up @@ -612,8 +631,8 @@ struct make_jni_test_decorator<R(JNIEnv*, jobject, Args...), fn> {
#define NORMAL_JNI_ONLY_NOWRAP(func) \
({ ASSERT_TRUE(IsCurrentJniNormal()); reinterpret_cast<void*>(&(func)); })
// Same as above, but with nullptr. When we want to test the stub functionality.
#define NORMAL_JNI_ONLY_NULLPTR \
({ ASSERT_TRUE(IsCurrentJniNormal()); nullptr; })
#define NORMAL_OR_FAST_JNI_ONLY_NULLPTR \
({ ASSERT_TRUE(IsCurrentJniNormal() || IsCurrentJniFast()); nullptr; })


int gJava_MyClassNatives_foo_calls[kJniKindCount] = {};
Expand All @@ -636,8 +655,8 @@ void JniCompilerTest::CompileAndRunNoArgMethodImpl() {
JNI_TEST(CompileAndRunNoArgMethod)

void JniCompilerTest::CompileAndRunIntMethodThroughStubImpl() {
SetUpForTest(false, "bar", "(I)I", NORMAL_JNI_ONLY_NULLPTR);
// calling through stub will link with &Java_MyClassNatives_bar
SetUpForTest(false, "bar", "(I)I", NORMAL_OR_FAST_JNI_ONLY_NULLPTR);
// calling through stub will link with &Java_MyClassNatives_bar{,_1Fast}

std::string reason;
ASSERT_TRUE(Runtime::Current()->GetJavaVM()->
Expand All @@ -648,12 +667,12 @@ void JniCompilerTest::CompileAndRunIntMethodThroughStubImpl() {
EXPECT_EQ(25, result);
}

// TODO: Support @FastNative and @CriticalNative through stubs.
JNI_TEST_NORMAL_ONLY(CompileAndRunIntMethodThroughStub)
// Note: @CriticalNative is only for static methods.
JNI_TEST(CompileAndRunIntMethodThroughStub)

void JniCompilerTest::CompileAndRunStaticIntMethodThroughStubImpl() {
SetUpForTest(true, "sbar", "(I)I", NORMAL_JNI_ONLY_NULLPTR);
// calling through stub will link with &Java_MyClassNatives_sbar
SetUpForTest(true, "sbar", "(I)I", nullptr);
// calling through stub will link with &Java_MyClassNatives_sbar{,_1Fast,_1Critical}

std::string reason;
ASSERT_TRUE(Runtime::Current()->GetJavaVM()->
Expand All @@ -664,8 +683,7 @@ void JniCompilerTest::CompileAndRunStaticIntMethodThroughStubImpl() {
EXPECT_EQ(43, result);
}

// TODO: Support @FastNative and @CriticalNative through stubs.
JNI_TEST_NORMAL_ONLY(CompileAndRunStaticIntMethodThroughStub)
JNI_TEST_CRITICAL(CompileAndRunStaticIntMethodThroughStub)

int gJava_MyClassNatives_fooI_calls[kJniKindCount] = {};
jint Java_MyClassNatives_fooI(JNIEnv*, jobject, jint x) {
Expand Down Expand Up @@ -1894,17 +1912,15 @@ void JniCompilerTest::WithoutImplementationImpl() {
// This will lead to error messages in the log.
ScopedLogSeverity sls(LogSeverity::FATAL);

SetUpForTest(false, "withoutImplementation", "()V", NORMAL_JNI_ONLY_NULLPTR);
SetUpForTest(false, "withoutImplementation", "()V", NORMAL_OR_FAST_JNI_ONLY_NULLPTR);

env_->CallVoidMethod(jobj_, jmethod_);

EXPECT_TRUE(Thread::Current()->IsExceptionPending());
EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE);
}

// TODO: Don't test @FastNative here since it goes through a stub lookup (unsupported) which would
// normally fail with an exception, but fails with an assert.
JNI_TEST_NORMAL_ONLY(WithoutImplementation)
JNI_TEST(WithoutImplementation)

void JniCompilerTest::WithoutImplementationRefReturnImpl() {
// This will lead to error messages in the log.
Expand All @@ -1913,16 +1929,29 @@ void JniCompilerTest::WithoutImplementationRefReturnImpl() {
SetUpForTest(false,
"withoutImplementationRefReturn",
"()Ljava/lang/Object;",
NORMAL_JNI_ONLY_NULLPTR);
NORMAL_OR_FAST_JNI_ONLY_NULLPTR);

env_->CallObjectMethod(jobj_, jmethod_);

EXPECT_TRUE(Thread::Current()->IsExceptionPending());
EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE);
}

// TODO: Should work for @FastNative too.
JNI_TEST_NORMAL_ONLY(WithoutImplementationRefReturn)
JNI_TEST(WithoutImplementationRefReturn)

void JniCompilerTest::StaticWithoutImplementationImpl() {
// This will lead to error messages in the log.
ScopedLogSeverity sls(LogSeverity::FATAL);

SetUpForTest(true, "staticWithoutImplementation", "()V", nullptr);

env_->CallStaticVoidMethod(jklass_, jmethod_);

EXPECT_TRUE(Thread::Current()->IsExceptionPending());
EXPECT_TRUE(env_->ExceptionCheck() == JNI_TRUE);
}

JNI_TEST_CRITICAL(StaticWithoutImplementation)

void Java_MyClassNatives_stackArgsIntsFirst(JNIEnv*, jclass, jint i1, jint i2, jint i3,
jint i4, jint i5, jint i6, jint i7, jint i8, jint i9,
Expand Down
13 changes: 7 additions & 6 deletions compiler/jni/quick/arm/calling_convention_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

#include <android-base/logging.h>

#include "arch/arm/jni_frame_arm.h"
#include "arch/instruction_set.h"
#include "base/macros.h"
#include "handle_scope-inl.h"
Expand All @@ -38,7 +39,7 @@ static const Register kJniArgumentRegisters[] = {
R0, R1, R2, R3
};

static const size_t kJniArgumentRegisterCount = arraysize(kJniArgumentRegisters);
static_assert(kJniArgumentRegisterCount == arraysize(kJniArgumentRegisters));

//
// Managed calling convention constants.
Expand Down Expand Up @@ -121,10 +122,6 @@ static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&cal
static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);

// The AAPCS requires 8-byte alignement. This is not as strict as the Managed ABI stack alignment.
static constexpr size_t kAapcsStackAlignment = 8u;
static_assert(kAapcsStackAlignment < kStackAlignment);

static constexpr ManagedRegister kAapcsCalleeSaveRegisters[] = {
// Core registers.
ArmManagedRegister::FromCoreRegister(R4),
Expand Down Expand Up @@ -448,7 +445,11 @@ size_t ArmJniCallingConvention::OutArgSize() const {
if (is_critical_native_ && (size != 0u || GetShorty()[0] == 'F' || GetShorty()[0] == 'D')) {
size += kFramePointerSize; // We need to spill LR with the args.
}
return RoundUp(size, kAapcsStackAlignment);
size_t out_args_size = RoundUp(size, kAapcsStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
DCHECK_EQ(out_args_size, GetCriticalNativeOutArgsSize(GetShorty(), NumArgs() + 1u));
}
return out_args_size;
}

ArrayRef<const ManagedRegister> ArmJniCallingConvention::CalleeSaveRegisters() const {
Expand Down
2 changes: 0 additions & 2 deletions compiler/jni/quick/arm/calling_convention_arm.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
namespace art {
namespace arm {

constexpr size_t kFramePointerSize = static_cast<size_t>(PointerSize::k32);

class ArmManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
ArmManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
Expand Down
22 changes: 10 additions & 12 deletions compiler/jni/quick/arm64/calling_convention_arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

#include <android-base/logging.h>

#include "arch/arm64/jni_frame_arm64.h"
#include "arch/instruction_set.h"
#include "handle_scope-inl.h"
#include "utils/arm64/managed_register_arm64.h"
Expand All @@ -27,28 +28,25 @@ namespace arm64 {

static_assert(kArm64PointerSize == PointerSize::k64, "Unexpected ARM64 pointer size");

// Up to how many float-like (float, double) args can be enregistered.
// The rest of the args must go on the stack.
constexpr size_t kMaxFloatOrDoubleRegisterArguments = 8u;
// Up to how many integer-like (pointers, objects, longs, int, short, bool, etc) args can be
// enregistered. The rest of the args must go on the stack.
constexpr size_t kMaxIntLikeRegisterArguments = 8u;

static const XRegister kXArgumentRegisters[] = {
X0, X1, X2, X3, X4, X5, X6, X7
};
static_assert(kMaxIntLikeRegisterArguments == arraysize(kXArgumentRegisters));

static const WRegister kWArgumentRegisters[] = {
W0, W1, W2, W3, W4, W5, W6, W7
};
static_assert(kMaxIntLikeRegisterArguments == arraysize(kWArgumentRegisters));

static const DRegister kDArgumentRegisters[] = {
D0, D1, D2, D3, D4, D5, D6, D7
};
static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kDArgumentRegisters));

static const SRegister kSArgumentRegisters[] = {
S0, S1, S2, S3, S4, S5, S6, S7
};
static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kSArgumentRegisters));

static constexpr ManagedRegister kCalleeSaveRegisters[] = {
// Core registers.
Expand Down Expand Up @@ -114,10 +112,6 @@ static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&cal
static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);

// The AAPCS64 requires 16-byte alignement. This is the same as the Managed ABI stack alignment.
static constexpr size_t kAapcs64StackAlignment = 16u;
static_assert(kAapcs64StackAlignment == kStackAlignment);

static constexpr ManagedRegister kAapcs64CalleeSaveRegisters[] = {
// Core registers.
Arm64ManagedRegister::FromXRegister(X19),
Expand Down Expand Up @@ -334,7 +328,11 @@ size_t Arm64JniCallingConvention::OutArgSize() const {
if (is_critical_native_ && (size != 0u || RequiresSmallResultTypeExtension())) {
size += kFramePointerSize; // We need to spill LR with the args.
}
return RoundUp(size, kStackAlignment);
size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
if (UNLIKELY(IsCriticalNative())) {
DCHECK_EQ(out_args_size, GetCriticalNativeOutArgsSize(GetShorty(), NumArgs() + 1u));
}
return out_args_size;
}

ArrayRef<const ManagedRegister> Arm64JniCallingConvention::CalleeSaveRegisters() const {
Expand Down
2 changes: 0 additions & 2 deletions compiler/jni/quick/arm64/calling_convention_arm64.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
namespace art {
namespace arm64 {

constexpr size_t kFramePointerSize = static_cast<size_t>(PointerSize::k64);

class Arm64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
Arm64ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
Expand Down
16 changes: 9 additions & 7 deletions compiler/jni/quick/x86/calling_convention_x86.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include <android-base/logging.h>

#include "arch/instruction_set.h"
#include "arch/x86/jni_frame_x86.h"
#include "handle_scope-inl.h"
#include "utils/x86/managed_register_x86.h"

Expand Down Expand Up @@ -51,9 +52,6 @@ static constexpr uint32_t CalculateCoreCalleeSpillMask(
static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
static constexpr uint32_t kFpCalleeSpillMask = 0u;

static constexpr size_t kNativeStackAlignment = 16; // IA-32 cdecl requires 16 byte alignment.
static_assert(kNativeStackAlignment == kStackAlignment);

static constexpr ManagedRegister kNativeCalleeSaveRegisters[] = {
// Core registers.
X86ManagedRegister::FromCpuRegister(EBX),
Expand Down Expand Up @@ -268,8 +266,8 @@ size_t X86JniCallingConvention::OutArgSize() const {
static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u);
static_assert((kFpCalleeSpillMask & ~kNativeFpCalleeSpillMask) == 0u);

if (is_critical_native_) {
// Add return address size for @CriticalNative
if (UNLIKELY(IsCriticalNative())) {
// Add return address size for @CriticalNative.
// For normal native the return PC is part of the managed stack frame instead of out args.
size += kFramePointerSize;
// For @CriticalNative, we can make a tail call if there are no stack args
Expand All @@ -281,13 +279,17 @@ size_t X86JniCallingConvention::OutArgSize() const {
GetShorty()[0] != 'F' && GetShorty()[0] != 'D' && !RequiresSmallResultTypeExtension());
if (return_type_ok && size == kFramePointerSize) {
// Note: This is not aligned to kNativeStackAlignment but that's OK for tail call.
DCHECK_EQ(size, kFramePointerSize);
static_assert(kFramePointerSize < kNativeStackAlignment);
DCHECK_EQ(kFramePointerSize, GetCriticalNativeOutArgsSize(GetShorty(), NumArgs() + 1u));
return kFramePointerSize;
}
}

return RoundUp(size, kNativeStackAlignment);
size_t out_args_size = RoundUp(size, kNativeStackAlignment);
if (UNLIKELY(IsCriticalNative())) {
DCHECK_EQ(out_args_size, GetCriticalNativeOutArgsSize(GetShorty(), NumArgs() + 1u));
}
return out_args_size;
}

ArrayRef<const ManagedRegister> X86JniCallingConvention::CalleeSaveRegisters() const {
Expand Down
2 changes: 0 additions & 2 deletions compiler/jni/quick/x86/calling_convention_x86.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
namespace art {
namespace x86 {

constexpr size_t kFramePointerSize = static_cast<size_t>(PointerSize::k32);

class X86ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
public:
X86ManagedRuntimeCallingConvention(bool is_static, bool is_synchronized, const char* shorty)
Expand Down
Loading

0 comments on commit fa458ac

Please sign in to comment.