bpo-44525: Specialize `CALL_FUNCTION` for C function calls (GH-26934)

This commit is contained in:
Ken Jin 2021-10-20 07:16:36 +08:00 committed by GitHub
parent 3592980f91
commit 3163e68c34
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 365 additions and 50 deletions

View File

@ -309,6 +309,7 @@ int _Py_Specialize_LoadMethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *na
int _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, _Py_CODEUNIT *instr);
int _Py_Specialize_BinaryAdd(PyObject *left, PyObject *right, _Py_CODEUNIT *instr);
int _Py_Specialize_BinaryMultiply(PyObject *left, PyObject *right, _Py_CODEUNIT *instr);
int _Py_Specialize_CallFunction(PyObject *callable, _Py_CODEUNIT *instr, int nargs, SpecializedCacheEntry *cache, PyObject *builtins);
#define PRINT_SPECIALIZATION_STATS 0
#define PRINT_SPECIALIZATION_STATS_DETAILED 0

51
Include/opcode.h generated
View File

@ -148,29 +148,34 @@ extern "C" {
#define BINARY_SUBSCR_LIST_INT 40
#define BINARY_SUBSCR_TUPLE_INT 41
#define BINARY_SUBSCR_DICT 42
#define JUMP_ABSOLUTE_QUICK 43
#define LOAD_ATTR_ADAPTIVE 44
#define LOAD_ATTR_INSTANCE_VALUE 45
#define LOAD_ATTR_WITH_HINT 46
#define LOAD_ATTR_SLOT 47
#define LOAD_ATTR_MODULE 48
#define LOAD_GLOBAL_ADAPTIVE 58
#define LOAD_GLOBAL_MODULE 80
#define LOAD_GLOBAL_BUILTIN 81
#define LOAD_METHOD_ADAPTIVE 87
#define LOAD_METHOD_CACHED 88
#define LOAD_METHOD_CLASS 120
#define LOAD_METHOD_MODULE 122
#define LOAD_METHOD_NO_DICT 123
#define STORE_ATTR_ADAPTIVE 127
#define STORE_ATTR_INSTANCE_VALUE 128
#define STORE_ATTR_SLOT 134
#define STORE_ATTR_WITH_HINT 140
#define LOAD_FAST__LOAD_FAST 143
#define STORE_FAST__LOAD_FAST 149
#define LOAD_FAST__LOAD_CONST 150
#define LOAD_CONST__LOAD_FAST 151
#define STORE_FAST__STORE_FAST 153
#define CALL_FUNCTION_ADAPTIVE 43
#define CALL_FUNCTION_BUILTIN_O 44
#define CALL_FUNCTION_BUILTIN_FAST 45
#define CALL_FUNCTION_LEN 46
#define CALL_FUNCTION_ISINSTANCE 47
#define JUMP_ABSOLUTE_QUICK 48
#define LOAD_ATTR_ADAPTIVE 58
#define LOAD_ATTR_INSTANCE_VALUE 80
#define LOAD_ATTR_WITH_HINT 81
#define LOAD_ATTR_SLOT 87
#define LOAD_ATTR_MODULE 88
#define LOAD_GLOBAL_ADAPTIVE 120
#define LOAD_GLOBAL_MODULE 122
#define LOAD_GLOBAL_BUILTIN 123
#define LOAD_METHOD_ADAPTIVE 127
#define LOAD_METHOD_CACHED 128
#define LOAD_METHOD_CLASS 134
#define LOAD_METHOD_MODULE 140
#define LOAD_METHOD_NO_DICT 143
#define STORE_ATTR_ADAPTIVE 149
#define STORE_ATTR_INSTANCE_VALUE 150
#define STORE_ATTR_SLOT 151
#define STORE_ATTR_WITH_HINT 153
#define LOAD_FAST__LOAD_FAST 154
#define STORE_FAST__LOAD_FAST 158
#define LOAD_FAST__LOAD_CONST 159
#define LOAD_CONST__LOAD_FAST 167
#define STORE_FAST__STORE_FAST 168
#define DO_TRACING 255
#ifdef NEED_OPCODE_JUMP_TABLES
static uint32_t _PyOpcode_RelativeJump[8] = {

View File

@ -232,6 +232,11 @@ _specialized_instructions = [
"BINARY_SUBSCR_LIST_INT",
"BINARY_SUBSCR_TUPLE_INT",
"BINARY_SUBSCR_DICT",
"CALL_FUNCTION_ADAPTIVE",
"CALL_FUNCTION_BUILTIN_O",
"CALL_FUNCTION_BUILTIN_FAST",
"CALL_FUNCTION_LEN",
"CALL_FUNCTION_ISINSTANCE",
"JUMP_ABSOLUTE_QUICK",
"LOAD_ATTR_ADAPTIVE",
"LOAD_ATTR_INSTANCE_VALUE",

View File

@ -0,0 +1,10 @@
Setup initial specialization infrastructure for the ``CALL_FUNCTION`` opcode.
Implemented initial specializations for C function calls:
* ``CALL_FUNCTION_BUILTIN_O`` for ``METH_O`` flag.
* ``CALL_FUNCTION_BUILTIN_FAST`` for ``METH_FASTCALL`` flag without keywords.
* ``CALL_FUNCTION_LEN`` for ``len(o)``.
* ``CALL_FUNCTION_ISINSTANCE`` for ``isinstance(o, t)``.

View File

@ -4660,6 +4660,7 @@ check_eval_breaker:
TARGET(CALL_FUNCTION) {
PREDICTED(CALL_FUNCTION);
STAT_INC(CALL_FUNCTION, unquickened);
PyObject *function;
nargs = oparg;
kwnames = NULL;
@ -4717,6 +4718,151 @@ check_eval_breaker:
DISPATCH();
}
TARGET(CALL_FUNCTION_ADAPTIVE) {
SpecializedCacheEntry *cache = GET_CACHE();
if (cache->adaptive.counter == 0) {
next_instr--;
int nargs = cache->adaptive.original_oparg;
if (_Py_Specialize_CallFunction(
PEEK(nargs + 1), next_instr, nargs, cache, BUILTINS()) < 0) {
goto error;
}
DISPATCH();
}
else {
STAT_INC(CALL_FUNCTION, deferred);
cache->adaptive.counter--;
oparg = cache->adaptive.original_oparg;
JUMP_TO_INSTRUCTION(CALL_FUNCTION);
}
}
TARGET(CALL_FUNCTION_BUILTIN_O) {
assert(cframe.use_tracing == 0);
/* Builtin METH_O functions */
PyObject *callable = SECOND();
DEOPT_IF(!PyCFunction_CheckExact(callable), CALL_FUNCTION);
DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O, CALL_FUNCTION);
_PyAdaptiveEntry *cache0 = &GET_CACHE()[0].adaptive;
record_cache_hit(cache0);
STAT_INC(CALL_FUNCTION, hit);
PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable);
PyObject *arg = POP();
PyObject *res = cfunc(PyCFunction_GET_SELF(callable), arg);
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
/* Clear the stack of the function object. */
Py_DECREF(arg);
Py_DECREF(callable);
SET_TOP(res);
if (res == NULL) {
goto error;
}
DISPATCH();
}
TARGET(CALL_FUNCTION_BUILTIN_FAST) {
assert(cframe.use_tracing == 0);
/* Builtin METH_FASTCALL functions, without keywords */
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
int nargs = cache0->original_oparg;
PyObject **pfunc = &PEEK(nargs + 1);
PyObject *callable = *pfunc;
DEOPT_IF(!PyCFunction_CheckExact(callable), CALL_FUNCTION);
DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL,
CALL_FUNCTION);
record_cache_hit(cache0);
STAT_INC(CALL_FUNCTION, hit);
PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable);
/* res = func(self, args, nargs) */
PyObject *res = ((_PyCFunctionFast)(void(*)(void))cfunc)(
PyCFunction_GET_SELF(callable),
&PEEK(nargs),
nargs);
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
/* Clear the stack of the function object. */
while (stack_pointer > pfunc) {
PyObject *x = POP();
Py_DECREF(x);
}
PUSH(res);
if (res == NULL) {
/* Not deopting because this doesn't mean our optimization was
wrong. `res` can be NULL for valid reasons. Eg. getattr(x,
'invalid'). In those cases an exception is set, so we must
handle it.
*/
goto error;
}
DISPATCH();
}
TARGET(CALL_FUNCTION_LEN) {
assert(cframe.use_tracing == 0);
/* len(o) */
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyObjectCache *cache1 = &caches[-1].obj;
assert(cache0->original_oparg == 1);
PyObject *callable = SECOND();
DEOPT_IF(callable != cache1->obj, CALL_FUNCTION);
record_cache_hit(cache0);
STAT_INC(CALL_FUNCTION, hit);
Py_ssize_t len_i = PyObject_Length(TOP());
if (len_i < 0) {
goto error;
}
PyObject *res = PyLong_FromSsize_t(len_i);
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
/* Clear the stack of the function object. */
Py_DECREF(POP());
Py_DECREF(callable);
SET_TOP(res);
if (res == NULL) {
goto error;
}
DISPATCH();
}
TARGET(CALL_FUNCTION_ISINSTANCE) {
assert(cframe.use_tracing == 0);
/* isinstance(o, o2) */
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyObjectCache *cache1 = &caches[-1].obj;
assert(cache0->original_oparg == 2);
PyObject *callable = THIRD();
DEOPT_IF(callable != cache1->obj, CALL_FUNCTION);
record_cache_hit(cache0);
STAT_INC(CALL_FUNCTION, hit);
int retval = PyObject_IsInstance(SECOND(), TOP());
if (retval < 0) {
goto error;
}
PyObject *res = PyBool_FromLong(retval);
assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
/* Clear the stack of the function object. */
Py_DECREF(POP());
Py_DECREF(POP());
Py_DECREF(callable);
SET_TOP(res);
if (res == NULL) {
goto error;
}
DISPATCH();
}
TARGET(CALL_FUNCTION_EX) {
PREDICTED(CALL_FUNCTION_EX);
PyObject *func, *callargs, *kwargs = NULL, *result;
@ -4985,6 +5131,7 @@ MISS_WITH_CACHE(LOAD_ATTR)
MISS_WITH_CACHE(STORE_ATTR)
MISS_WITH_CACHE(LOAD_GLOBAL)
MISS_WITH_CACHE(LOAD_METHOD)
MISS_WITH_CACHE(CALL_FUNCTION)
MISS_WITH_OPARG_COUNTER(BINARY_SUBSCR)
MISS_WITH_OPARG_COUNTER(BINARY_ADD)
MISS_WITH_OPARG_COUNTER(BINARY_MULTIPLY)

View File

@ -42,12 +42,12 @@ static void *opcode_targets[256] = {
&&TARGET_BINARY_SUBSCR_LIST_INT,
&&TARGET_BINARY_SUBSCR_TUPLE_INT,
&&TARGET_BINARY_SUBSCR_DICT,
&&TARGET_CALL_FUNCTION_ADAPTIVE,
&&TARGET_CALL_FUNCTION_BUILTIN_O,
&&TARGET_CALL_FUNCTION_BUILTIN_FAST,
&&TARGET_CALL_FUNCTION_LEN,
&&TARGET_CALL_FUNCTION_ISINSTANCE,
&&TARGET_JUMP_ABSOLUTE_QUICK,
&&TARGET_LOAD_ATTR_ADAPTIVE,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_WITH_EXCEPT_START,
&&TARGET_GET_AITER,
&&TARGET_GET_ANEXT,
@ -57,7 +57,7 @@ static void *opcode_targets[256] = {
&&TARGET_INPLACE_ADD,
&&TARGET_INPLACE_SUBTRACT,
&&TARGET_INPLACE_MULTIPLY,
&&TARGET_LOAD_GLOBAL_ADAPTIVE,
&&TARGET_LOAD_ATTR_ADAPTIVE,
&&TARGET_INPLACE_MODULO,
&&TARGET_STORE_SUBSCR,
&&TARGET_DELETE_SUBSCR,
@ -79,15 +79,15 @@ static void *opcode_targets[256] = {
&&TARGET_INPLACE_AND,
&&TARGET_INPLACE_XOR,
&&TARGET_INPLACE_OR,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LIST_TO_TUPLE,
&&TARGET_RETURN_VALUE,
&&TARGET_IMPORT_STAR,
&&TARGET_SETUP_ANNOTATIONS,
&&TARGET_YIELD_VALUE,
&&TARGET_LOAD_METHOD_ADAPTIVE,
&&TARGET_LOAD_METHOD_CACHED,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_MODULE,
&&TARGET_POP_EXCEPT,
&&TARGET_STORE_NAME,
&&TARGET_DELETE_NAME,
@ -119,46 +119,46 @@ static void *opcode_targets[256] = {
&&TARGET_IS_OP,
&&TARGET_CONTAINS_OP,
&&TARGET_RERAISE,
&&TARGET_LOAD_METHOD_CLASS,
&&TARGET_LOAD_GLOBAL_ADAPTIVE,
&&TARGET_JUMP_IF_NOT_EXC_MATCH,
&&TARGET_LOAD_METHOD_MODULE,
&&TARGET_LOAD_METHOD_NO_DICT,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_FAST,
&&TARGET_STORE_FAST,
&&TARGET_DELETE_FAST,
&&TARGET_STORE_ATTR_ADAPTIVE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_METHOD_ADAPTIVE,
&&TARGET_LOAD_METHOD_CACHED,
&&TARGET_GEN_START,
&&TARGET_RAISE_VARARGS,
&&TARGET_CALL_FUNCTION,
&&TARGET_MAKE_FUNCTION,
&&TARGET_BUILD_SLICE,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_LOAD_METHOD_CLASS,
&&TARGET_MAKE_CELL,
&&TARGET_LOAD_CLOSURE,
&&TARGET_LOAD_DEREF,
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_LOAD_METHOD_MODULE,
&&TARGET_CALL_FUNCTION_KW,
&&TARGET_CALL_FUNCTION_EX,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_LOAD_METHOD_NO_DICT,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
&&TARGET_MAP_ADD,
&&TARGET_LOAD_CLASSDEREF,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_STORE_ATTR_ADAPTIVE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_MATCH_CLASS,
&&TARGET_STORE_FAST__STORE_FAST,
&&_unknown_opcode,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_LOAD_METHOD,
&&TARGET_CALL_METHOD,
&&TARGET_LIST_EXTEND,
@ -166,8 +166,8 @@ static void *opcode_targets[256] = {
&&TARGET_DICT_MERGE,
&&TARGET_DICT_UPDATE,
&&TARGET_CALL_METHOD_KW,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_STORE_FAST__STORE_FAST,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,

View File

@ -128,6 +128,7 @@ _Py_GetSpecializationStats(void) {
err += add_stat_dict(stats, BINARY_MULTIPLY, "binary_multiply");
err += add_stat_dict(stats, BINARY_SUBSCR, "binary_subscr");
err += add_stat_dict(stats, STORE_ATTR, "store_attr");
err += add_stat_dict(stats, CALL_FUNCTION, "call_function");
if (err < 0) {
Py_DECREF(stats);
return NULL;
@ -185,6 +186,7 @@ _Py_PrintSpecializationStats(void)
print_stats(out, &_specialization_stats[BINARY_MULTIPLY], "binary_multiply");
print_stats(out, &_specialization_stats[BINARY_SUBSCR], "binary_subscr");
print_stats(out, &_specialization_stats[STORE_ATTR], "store_attr");
print_stats(out, &_specialization_stats[CALL_FUNCTION], "call_function");
if (out != stderr) {
fclose(out);
}
@ -235,6 +237,7 @@ static uint8_t adaptive_opcodes[256] = {
[BINARY_ADD] = BINARY_ADD_ADAPTIVE,
[BINARY_MULTIPLY] = BINARY_MULTIPLY_ADAPTIVE,
[BINARY_SUBSCR] = BINARY_SUBSCR_ADAPTIVE,
[CALL_FUNCTION] = CALL_FUNCTION_ADAPTIVE,
[STORE_ATTR] = STORE_ATTR_ADAPTIVE,
};
@ -246,6 +249,7 @@ static uint8_t cache_requirements[256] = {
[BINARY_ADD] = 0,
[BINARY_MULTIPLY] = 0,
[BINARY_SUBSCR] = 0,
[CALL_FUNCTION] = 2, /* _PyAdaptiveEntry and _PyObjectCache */
[STORE_ATTR] = 2, /* _PyAdaptiveEntry and _PyAttrCache */
};
@ -457,6 +461,15 @@ _Py_Quicken(PyCodeObject *code) {
#define SPEC_FAIL_NON_FUNCTION_SCOPE 11
#define SPEC_FAIL_DIFFERENT_TYPES 12
/* Call function */
#define SPEC_FAIL_PYCFUNCTION 10
#define SPEC_FAIL_PYCFUNCTION_WITH_KEYWORDS 13
#define SPEC_FAIL_PYCFUNCTION_FAST_WITH_KEYWORDS 14
#define SPEC_FAIL_PYCFUNCTION_NOARGS 15
#define SPEC_FAIL_BAD_CALL_FLAGS 16
#define SPEC_FAIL_PYTHON_FUNCTION 17
#define SPEC_FAIL_IMMUTABLE_CLASS 18
static int
specialize_module_load_attr(
@ -1222,3 +1235,137 @@ success:
assert(!PyErr_Occurred());
return 0;
}
#if COLLECT_SPECIALIZATION_STATS_DETAILED
static int
builtin_call_fail_kind(int ml_flags)
{
switch (ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O |
METH_KEYWORDS | METH_METHOD)) {
case METH_VARARGS:
return SPEC_FAIL_PYCFUNCTION;
case METH_VARARGS | METH_KEYWORDS:
return SPEC_FAIL_PYCFUNCTION_WITH_KEYWORDS;
case METH_FASTCALL | METH_KEYWORDS:
return SPEC_FAIL_PYCFUNCTION_FAST_WITH_KEYWORDS;
case METH_NOARGS:
return SPEC_FAIL_PYCFUNCTION_NOARGS;
/* This case should never happen with PyCFunctionObject -- only
PyMethodObject. See zlib.compressobj()'s methods for an example.
*/
case METH_METHOD | METH_FASTCALL | METH_KEYWORDS:
default:
return SPEC_FAIL_BAD_CALL_FLAGS;
}
}
#endif
static int
specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
SpecializedCacheEntry *cache, PyObject *builtins)
{
_PyObjectCache *cache1 = &cache[-1].obj;
if (PyCFunction_GET_FUNCTION(callable) == NULL) {
return 1;
}
switch (PyCFunction_GET_FLAGS(callable) &
(METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O |
METH_KEYWORDS | METH_METHOD)) {
case METH_O: {
if (nargs != 1) {
SPECIALIZATION_FAIL(CALL_FUNCTION, SPEC_FAIL_OUT_OF_RANGE);
return 1;
}
/* len(o) */
PyObject *builtin_len = PyDict_GetItemString(builtins, "len");
if (callable == builtin_len) {
cache1->obj = builtin_len; // borrowed
*instr = _Py_MAKECODEUNIT(CALL_FUNCTION_LEN,
_Py_OPARG(*instr));
return 0;
}
*instr = _Py_MAKECODEUNIT(CALL_FUNCTION_BUILTIN_O,
_Py_OPARG(*instr));
return 0;
}
case METH_FASTCALL: {
if (nargs == 2) {
/* isinstance(o1, o2) */
PyObject *builtin_isinstance = PyDict_GetItemString(
builtins, "isinstance");
if (callable == builtin_isinstance) {
cache1->obj = builtin_isinstance; // borrowed
*instr = _Py_MAKECODEUNIT(CALL_FUNCTION_ISINSTANCE,
_Py_OPARG(*instr));
return 0;
}
}
*instr = _Py_MAKECODEUNIT(CALL_FUNCTION_BUILTIN_FAST,
_Py_OPARG(*instr));
return 0;
}
default:
SPECIALIZATION_FAIL(CALL_FUNCTION,
builtin_call_fail_kind(PyCFunction_GET_FLAGS(callable)));
return 1;
}
}
#if COLLECT_SPECIALIZATION_STATS_DETAILED
static int
call_fail_kind(PyObject *callable)
{
if (PyFunction_Check(callable)) {
return SPEC_FAIL_PYTHON_FUNCTION;
}
// new-style bound methods
else if (PyInstanceMethod_Check(callable)) {
return SPEC_FAIL_METHOD;
}
else if (PyMethod_Check(callable)) {
return SPEC_FAIL_METHOD;
}
// builtin method
else if (PyCMethod_Check(callable)) {
return SPEC_FAIL_METHOD;
}
else if (PyType_Check(callable)) {
PyTypeObject *type = Py_TYPE(callable);
return PyType_HasFeature(type, Py_TPFLAGS_IMMUTABLETYPE) ?
SPEC_FAIL_IMMUTABLE_CLASS : SPEC_FAIL_MUTABLE_CLASS;
}
return SPEC_FAIL_OTHER;
}
#endif
/* TODO:
- Specialize calling types.
- Specialize python function calls.
*/
int
_Py_Specialize_CallFunction(
PyObject *callable, _Py_CODEUNIT *instr,
int nargs, SpecializedCacheEntry *cache,
PyObject *builtins)
{
int fail;
if (PyCFunction_CheckExact(callable)) {
fail = specialize_c_call(callable, instr, nargs, cache, builtins);
}
else {
SPECIALIZATION_FAIL(CALL_FUNCTION, call_fail_kind(callable));
fail = 1;
}
_PyAdaptiveEntry *cache0 = &cache->adaptive;
if (fail) {
STAT_INC(CALL_FUNCTION, specialization_failure);
assert(!PyErr_Occurred());
cache_backoff(cache0);
}
else {
STAT_INC(CALL_FUNCTION, specialization_success);
assert(!PyErr_Occurred());
cache0->counter = saturating_start();
}
return 0;
}