#Import GREQUESTS (async requests) to python x32 and x64 #Get/Set GlobalDict value #IntegrationOrchestrator tool
parent
145d8eb9cc
commit
fa8c5b0f2d
Binary file not shown.
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,26 @@
|
||||
|
||||
Except when otherwise stated (look for LICENSE files in directories or
|
||||
information at the beginning of each file) all software and
|
||||
documentation is licensed as follows:
|
||||
|
||||
The MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
@ -0,0 +1,36 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: cffi
|
||||
Version: 1.12.3
|
||||
Summary: Foreign Function Interface for Python calling C code.
|
||||
Home-page: http://cffi.readthedocs.org
|
||||
Author: Armin Rigo, Maciej Fijalkowski
|
||||
Author-email: python-cffi@googlegroups.com
|
||||
License: MIT
|
||||
Platform: UNKNOWN
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.2
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Requires-Dist: pycparser
|
||||
|
||||
|
||||
CFFI
|
||||
====
|
||||
|
||||
Foreign Function Interface for Python calling C code.
|
||||
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
||||
|
||||
Contact
|
||||
-------
|
||||
|
||||
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
||||
|
||||
|
@ -0,0 +1,44 @@
|
||||
_cffi_backend.cp37-win32.pyd,sha256=LbpnOkcB1o-4UFT2SiLEwknE-4x7oLjK6Dg7vMn412I,139264
|
||||
cffi-1.12.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
cffi-1.12.3.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294
|
||||
cffi-1.12.3.dist-info/METADATA,sha256=OA_DlKzbYB72gWWz0R2ds_RJzTIzvFx5cnHf5NXTRuo,1140
|
||||
cffi-1.12.3.dist-info/RECORD,,
|
||||
cffi-1.12.3.dist-info/WHEEL,sha256=u4Efs1ohefs6i6rm-BSBkNuQzwmF2Y4Na3ETsbtk2VM,102
|
||||
cffi-1.12.3.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
|
||||
cffi-1.12.3.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
|
||||
cffi/__init__.py,sha256=XPx-ySmw7OmYmr-7iXd3YoXhXj1HQLHYviMKpmAuWLc,513
|
||||
cffi/__pycache__/__init__.cpython-37.pyc,,
|
||||
cffi/__pycache__/api.cpython-37.pyc,,
|
||||
cffi/__pycache__/backend_ctypes.cpython-37.pyc,,
|
||||
cffi/__pycache__/cffi_opcode.cpython-37.pyc,,
|
||||
cffi/__pycache__/commontypes.cpython-37.pyc,,
|
||||
cffi/__pycache__/cparser.cpython-37.pyc,,
|
||||
cffi/__pycache__/error.cpython-37.pyc,,
|
||||
cffi/__pycache__/ffiplatform.cpython-37.pyc,,
|
||||
cffi/__pycache__/lock.cpython-37.pyc,,
|
||||
cffi/__pycache__/model.cpython-37.pyc,,
|
||||
cffi/__pycache__/pkgconfig.cpython-37.pyc,,
|
||||
cffi/__pycache__/recompiler.cpython-37.pyc,,
|
||||
cffi/__pycache__/setuptools_ext.cpython-37.pyc,,
|
||||
cffi/__pycache__/vengine_cpy.cpython-37.pyc,,
|
||||
cffi/__pycache__/vengine_gen.cpython-37.pyc,,
|
||||
cffi/__pycache__/verifier.cpython-37.pyc,,
|
||||
cffi/_cffi_errors.h,sha256=6nFQ-4dRQI1bXRoSeqdvyKU33TmutQJB_2fAhWSzdl8,3856
|
||||
cffi/_cffi_include.h,sha256=JuFfmwpRE65vym3Nxr9vDMOIEuv21tXdarkL1l2WNms,12149
|
||||
cffi/_embedding.h,sha256=PuNkRzXjURiRh7tXzVdIn0RD9pTJx04ZokHbcEO_3OY,17226
|
||||
cffi/api.py,sha256=Q07iwDD0FRwWa2fx2ZzQft69iJs9aNR52fvrtUy3EY4,41800
|
||||
cffi/backend_ctypes.py,sha256=_WkpD1SJel5gJovV-0u8hw-XvD3Efapqm9pIAEHTHn4,42449
|
||||
cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724
|
||||
cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689
|
||||
cffi/cparser.py,sha256=dcVqrRob1zqrCO--RZ6e-TtobJ7VMDpCU85W6QJ-N-4,40874
|
||||
cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877
|
||||
cffi/ffiplatform.py,sha256=HMXqR8ks2wtdsNxGaWpQ_PyqIvtiuos_vf1qKCy-cwg,4046
|
||||
cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747
|
||||
cffi/model.py,sha256=AYyjS26uiFKXtkm43qmStpy9zfGh5HVJF4UETYFBt6w,21682
|
||||
cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976
|
||||
cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374
|
||||
cffi/recompiler.py,sha256=LGqj7GPuq4KIG4axrN5G0Oy6YGmrLbBA0bHE-jCl6Oo,62711
|
||||
cffi/setuptools_ext.py,sha256=qc6arfrSzm4RNT5oJz6d5td7KJ-pHfI7bqYD0X4Q-08,8848
|
||||
cffi/vengine_cpy.py,sha256=hdyjjZNijLrg_uGMnnFyC-7GG_LxWtwB8BlS2vvVDQ0,41470
|
||||
cffi/vengine_gen.py,sha256=Zkq0-EdeZwn6qUvf_CI8iUEs2UxVIvDmKCH1j0-y0GI,26676
|
||||
cffi/verifier.py,sha256=J9Enz2rbJb9CHPqWlWQ5uQESoyr0uc7MNWugchjXBv4,11207
|
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.1)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp37-cp37m-win32
|
||||
|
@ -0,0 +1,3 @@
|
||||
[distutils.setup_keywords]
|
||||
cffi_modules = cffi.setuptools_ext:cffi_modules
|
||||
|
@ -0,0 +1,2 @@
|
||||
_cffi_backend
|
||||
cffi
|
@ -0,0 +1,14 @@
|
||||
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||
'FFIError']
|
||||
|
||||
from .api import FFI
|
||||
from .error import CDefError, FFIError, VerificationError, VerificationMissing
|
||||
from .error import PkgConfigError
|
||||
|
||||
__version__ = "1.12.3"
|
||||
__version_info__ = (1, 12, 3)
|
||||
|
||||
# The verifier module file names are based on the CRC32 of a string that
|
||||
# contains the following version number. It may be older than __version__
|
||||
# if nothing is clearly incompatible.
|
||||
__version_verifier_modules__ = "0.8.6"
|
@ -0,0 +1,147 @@
|
||||
#ifndef CFFI_MESSAGEBOX
|
||||
# ifdef _MSC_VER
|
||||
# define CFFI_MESSAGEBOX 1
|
||||
# else
|
||||
# define CFFI_MESSAGEBOX 0
|
||||
# endif
|
||||
#endif
|
||||
|
||||
|
||||
#if CFFI_MESSAGEBOX
|
||||
/* Windows only: logic to take the Python-CFFI embedding logic
|
||||
initialization errors and display them in a background thread
|
||||
with MessageBox. The idea is that if the whole program closes
|
||||
as a result of this problem, then likely it is already a console
|
||||
program and you can read the stderr output in the console too.
|
||||
If it is not a console program, then it will likely show its own
|
||||
dialog to complain, or generally not abruptly close, and for this
|
||||
case the background thread should stay alive.
|
||||
*/
|
||||
static void *volatile _cffi_bootstrap_text;
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void)
|
||||
{
|
||||
PyObject *result = NULL;
|
||||
PyObject *x, *m, *bi;
|
||||
|
||||
if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
|
||||
(void *)1, NULL) != NULL)
|
||||
return (PyObject *)1;
|
||||
|
||||
m = PyImport_AddModule("_cffi_error_capture");
|
||||
if (m == NULL)
|
||||
goto error;
|
||||
|
||||
result = PyModule_GetDict(m);
|
||||
if (result == NULL)
|
||||
goto error;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bi = PyImport_ImportModule("builtins");
|
||||
#else
|
||||
bi = PyImport_ImportModule("__builtin__");
|
||||
#endif
|
||||
if (bi == NULL)
|
||||
goto error;
|
||||
PyDict_SetItemString(result, "__builtins__", bi);
|
||||
Py_DECREF(bi);
|
||||
|
||||
x = PyRun_String(
|
||||
"import sys\n"
|
||||
"class FileLike:\n"
|
||||
" def write(self, x):\n"
|
||||
" try:\n"
|
||||
" of.write(x)\n"
|
||||
" except: pass\n"
|
||||
" self.buf += x\n"
|
||||
"fl = FileLike()\n"
|
||||
"fl.buf = ''\n"
|
||||
"of = sys.stderr\n"
|
||||
"sys.stderr = fl\n"
|
||||
"def done():\n"
|
||||
" sys.stderr = of\n"
|
||||
" return fl.buf\n", /* make sure the returned value stays alive */
|
||||
Py_file_input,
|
||||
result, result);
|
||||
Py_XDECREF(x);
|
||||
|
||||
error:
|
||||
if (PyErr_Occurred())
|
||||
{
|
||||
PyErr_WriteUnraisable(Py_None);
|
||||
PyErr_Clear();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma comment(lib, "user32.lib")
|
||||
|
||||
static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
|
||||
{
|
||||
Sleep(666); /* may be interrupted if the whole process is closing */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
|
||||
L"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#else
|
||||
MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
|
||||
"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#endif
|
||||
_cffi_bootstrap_text = NULL;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void _cffi_stop_error_capture(PyObject *ecap)
|
||||
{
|
||||
PyObject *s;
|
||||
void *text;
|
||||
|
||||
if (ecap == (PyObject *)1)
|
||||
return;
|
||||
|
||||
if (ecap == NULL)
|
||||
goto error;
|
||||
|
||||
s = PyRun_String("done()", Py_eval_input, ecap, ecap);
|
||||
if (s == NULL)
|
||||
goto error;
|
||||
|
||||
/* Show a dialog box, but in a background thread, and
|
||||
never show multiple dialog boxes at once. */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
text = PyUnicode_AsWideCharString(s, NULL);
|
||||
#else
|
||||
text = PyString_AsString(s);
|
||||
#endif
|
||||
|
||||
_cffi_bootstrap_text = text;
|
||||
|
||||
if (text != NULL)
|
||||
{
|
||||
HANDLE h;
|
||||
h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
|
||||
NULL, 0, NULL);
|
||||
if (h != NULL)
|
||||
CloseHandle(h);
|
||||
}
|
||||
/* decref the string, but it should stay alive as 'fl.buf'
|
||||
in the small module above. It will really be freed only if
|
||||
we later get another similar error. So it's a leak of at
|
||||
most one copy of the small module. That's fine for this
|
||||
situation which is usually a "fatal error" anyway. */
|
||||
Py_DECREF(s);
|
||||
PyErr_Clear();
|
||||
return;
|
||||
|
||||
error:
|
||||
_cffi_bootstrap_text = NULL;
|
||||
PyErr_Clear();
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void) { return NULL; }
|
||||
static void _cffi_stop_error_capture(PyObject *ecap) { }
|
||||
|
||||
#endif
|
@ -0,0 +1,308 @@
|
||||
#define _CFFI_
|
||||
|
||||
/* We try to define Py_LIMITED_API before including Python.h.
|
||||
|
||||
Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
|
||||
Py_REF_DEBUG are not defined. This is a best-effort approximation:
|
||||
we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
|
||||
the same works for the other two macros. Py_DEBUG implies them,
|
||||
but not the other way around.
|
||||
|
||||
Issue #350 is still open: on Windows, the code here causes it to link
|
||||
with PYTHON36.DLL (for example) instead of PYTHON3.DLL. A fix was
|
||||
attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
|
||||
does not make PYTHON3.DLL available, and so the "correctly" compiled
|
||||
version would not run inside a virtualenv. We will re-apply the fix
|
||||
after virtualenv has been fixed for some time. For explanation, see
|
||||
issue #355. For a workaround if you want PYTHON3.DLL and don't worry
|
||||
about virtualenv, see issue #350. See also 'py_limited_api' in
|
||||
setuptools_ext.py.
|
||||
*/
|
||||
#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
|
||||
# include <pyconfig.h>
|
||||
# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
|
||||
# define Py_LIMITED_API
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <Python.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#include <stddef.h>
|
||||
#include "parse_c_type.h"
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define _CFFI_UNUSED_FN __attribute__((unused))
|
||||
#else
|
||||
# define _CFFI_UNUSED_FN /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
# ifndef _Bool
|
||||
typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
# define PyInt_FromLong PyLong_FromLong
|
||||
#endif
|
||||
|
||||
#define _cffi_from_c_double PyFloat_FromDouble
|
||||
#define _cffi_from_c_float PyFloat_FromDouble
|
||||
#define _cffi_from_c_long PyInt_FromLong
|
||||
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
|
||||
#define _cffi_from_c_longlong PyLong_FromLongLong
|
||||
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
|
||||
#define _cffi_from_c__Bool PyBool_FromLong
|
||||
|
||||
#define _cffi_to_c_double PyFloat_AsDouble
|
||||
#define _cffi_to_c_float PyFloat_AsDouble
|
||||
|
||||
#define _cffi_from_c_int(x, type) \
|
||||
(((type)-1) > 0 ? /* unsigned */ \
|
||||
(sizeof(type) < sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
sizeof(type) == sizeof(long) ? \
|
||||
PyLong_FromUnsignedLong((unsigned long)x) : \
|
||||
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
|
||||
(sizeof(type) <= sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
PyLong_FromLongLong((long long)x)))
|
||||
|
||||
#define _cffi_to_c_int(o, type) \
|
||||
((type)( \
|
||||
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
|
||||
: (type)_cffi_to_c_i8(o)) : \
|
||||
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
|
||||
: (type)_cffi_to_c_i16(o)) : \
|
||||
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
|
||||
: (type)_cffi_to_c_i32(o)) : \
|
||||
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
|
||||
: (type)_cffi_to_c_i64(o)) : \
|
||||
(Py_FatalError("unsupported size for type " #type), (type)0)))
|
||||
|
||||
#define _cffi_to_c_i8 \
|
||||
((int(*)(PyObject *))_cffi_exports[1])
|
||||
#define _cffi_to_c_u8 \
|
||||
((int(*)(PyObject *))_cffi_exports[2])
|
||||
#define _cffi_to_c_i16 \
|
||||
((int(*)(PyObject *))_cffi_exports[3])
|
||||
#define _cffi_to_c_u16 \
|
||||
((int(*)(PyObject *))_cffi_exports[4])
|
||||
#define _cffi_to_c_i32 \
|
||||
((int(*)(PyObject *))_cffi_exports[5])
|
||||
#define _cffi_to_c_u32 \
|
||||
((unsigned int(*)(PyObject *))_cffi_exports[6])
|
||||
#define _cffi_to_c_i64 \
|
||||
((long long(*)(PyObject *))_cffi_exports[7])
|
||||
#define _cffi_to_c_u64 \
|
||||
((unsigned long long(*)(PyObject *))_cffi_exports[8])
|
||||
#define _cffi_to_c_char \
|
||||
((int(*)(PyObject *))_cffi_exports[9])
|
||||
#define _cffi_from_c_pointer \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
|
||||
#define _cffi_to_c_pointer \
|
||||
((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
|
||||
#define _cffi_get_struct_layout \
|
||||
not used any more
|
||||
#define _cffi_restore_errno \
|
||||
((void(*)(void))_cffi_exports[13])
|
||||
#define _cffi_save_errno \
|
||||
((void(*)(void))_cffi_exports[14])
|
||||
#define _cffi_from_c_char \
|
||||
((PyObject *(*)(char))_cffi_exports[15])
|
||||
#define _cffi_from_c_deref \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
|
||||
#define _cffi_to_c \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
|
||||
#define _cffi_from_c_struct \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
|
||||
#define _cffi_to_c_wchar_t \
|
||||
((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
|
||||
#define _cffi_from_c_wchar_t \
|
||||
((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
|
||||
#define _cffi_to_c_long_double \
|
||||
((long double(*)(PyObject *))_cffi_exports[21])
|
||||
#define _cffi_to_c__Bool \
|
||||
((_Bool(*)(PyObject *))_cffi_exports[22])
|
||||
#define _cffi_prepare_pointer_call_argument \
|
||||
((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
|
||||
PyObject *, char **))_cffi_exports[23])
|
||||
#define _cffi_convert_array_from_object \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
|
||||
#define _CFFI_CPIDX 25
|
||||
#define _cffi_call_python \
|
||||
((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
|
||||
#define _cffi_to_c_wchar3216_t \
|
||||
((int(*)(PyObject *))_cffi_exports[26])
|
||||
#define _cffi_from_c_wchar3216_t \
|
||||
((PyObject *(*)(int))_cffi_exports[27])
|
||||
#define _CFFI_NUM_EXPORTS 28
|
||||
|
||||
struct _cffi_ctypedescr;
|
||||
|
||||
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
|
||||
|
||||
#define _cffi_type(index) ( \
|
||||
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
|
||||
(struct _cffi_ctypedescr *)_cffi_types[index])
|
||||
|
||||
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
|
||||
const struct _cffi_type_context_s *ctx)
|
||||
{
|
||||
PyObject *module, *o_arg, *new_module;
|
||||
void *raw[] = {
|
||||
(void *)module_name,
|
||||
(void *)version,
|
||||
(void *)_cffi_exports,
|
||||
(void *)ctx,
|
||||
};
|
||||
|
||||
module = PyImport_ImportModule("_cffi_backend");
|
||||
if (module == NULL)
|
||||
goto failure;
|
||||
|
||||
o_arg = PyLong_FromVoidPtr((void *)raw);
|
||||
if (o_arg == NULL)
|
||||
goto failure;
|
||||
|
||||
new_module = PyObject_CallMethod(
|
||||
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
|
||||
|
||||
Py_DECREF(o_arg);
|
||||
Py_DECREF(module);
|
||||
return new_module;
|
||||
|
||||
failure:
|
||||
Py_XDECREF(module);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
#ifdef HAVE_WCHAR_H
|
||||
typedef wchar_t _cffi_wchar_t;
|
||||
#else
|
||||
typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
|
||||
#endif
|
||||
|
||||
_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return (uint16_t)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (uint16_t)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t((int)x);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return (int)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (int)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t(x);
|
||||
}
|
||||
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
#else
|
||||
_CFFI_UNUSED_FN
|
||||
static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
|
||||
# define _cffi_call_python _cffi_call_python_org
|
||||
#endif
|
||||
|
||||
|
||||
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
|
||||
|
||||
#define _cffi_prim_int(size, sign) \
|
||||
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
|
||||
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
|
||||
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
|
||||
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
|
||||
_CFFI__UNKNOWN_PRIM)
|
||||
|
||||
#define _cffi_prim_float(size) \
|
||||
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
|
||||
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
|
||||
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
|
||||
_CFFI__UNKNOWN_FLOAT_PRIM)
|
||||
|
||||
#define _cffi_check_int(got, got_nonpos, expected) \
|
||||
((got_nonpos) == (expected <= 0) && \
|
||||
(got) == (unsigned long long)expected)
|
||||
|
||||
#ifdef MS_WIN32
|
||||
# define _cffi_stdcall __stdcall
|
||||
#else
|
||||
# define _cffi_stdcall /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
@ -0,0 +1,518 @@
|
||||
|
||||
/***** Support code for embedding *****/
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
#if defined(_WIN32)
|
||||
# define CFFI_DLLEXPORT __declspec(dllexport)
|
||||
#elif defined(__GNUC__)
|
||||
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
|
||||
#else
|
||||
# define CFFI_DLLEXPORT /* nothing */
|
||||
#endif
|
||||
|
||||
|
||||
/* There are two global variables of type _cffi_call_python_fnptr:
|
||||
|
||||
* _cffi_call_python, which we declare just below, is the one called
|
||||
by ``extern "Python"`` implementations.
|
||||
|
||||
* _cffi_call_python_org, which on CPython is actually part of the
|
||||
_cffi_exports[] array, is the function pointer copied from
|
||||
_cffi_backend.
|
||||
|
||||
After initialization is complete, both are equal. However, the
|
||||
first one remains equal to &_cffi_start_and_call_python until the
|
||||
very end of initialization, when we are (or should be) sure that
|
||||
concurrent threads also see a completely initialized world, and
|
||||
only then is it changed.
|
||||
*/
|
||||
#undef _cffi_call_python
|
||||
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
|
||||
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
|
||||
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
|
||||
|
||||
|
||||
#ifndef _MSC_VER
|
||||
/* --- Assuming a GCC not infinitely old --- */
|
||||
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
|
||||
# define cffi_write_barrier() __sync_synchronize()
|
||||
# if !defined(__amd64__) && !defined(__x86_64__) && \
|
||||
!defined(__i386__) && !defined(__i386)
|
||||
# define cffi_read_barrier() __sync_synchronize()
|
||||
# else
|
||||
# define cffi_read_barrier() (void)0
|
||||
# endif
|
||||
#else
|
||||
/* --- Windows threads version --- */
|
||||
# include <Windows.h>
|
||||
# define cffi_compare_and_swap(l,o,n) \
|
||||
(InterlockedCompareExchangePointer(l,n,o) == (o))
|
||||
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
|
||||
# define cffi_read_barrier() (void)0
|
||||
static volatile LONG _cffi_dummy;
|
||||
#endif
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# ifndef _MSC_VER
|
||||
# include <pthread.h>
|
||||
static pthread_mutex_t _cffi_embed_startup_lock;
|
||||
# else
|
||||
static CRITICAL_SECTION _cffi_embed_startup_lock;
|
||||
# endif
|
||||
static char _cffi_embed_startup_lock_ready = 0;
|
||||
#endif
|
||||
|
||||
static void _cffi_acquire_reentrant_mutex(void)
|
||||
{
|
||||
static void *volatile lock = NULL;
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: pthread_mutex_init() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
if (!_cffi_embed_startup_lock_ready) {
|
||||
# ifndef _MSC_VER
|
||||
pthread_mutexattr_t attr;
|
||||
pthread_mutexattr_init(&attr);
|
||||
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
|
||||
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
|
||||
# else
|
||||
InitializeCriticalSection(&_cffi_embed_startup_lock);
|
||||
# endif
|
||||
_cffi_embed_startup_lock_ready = 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
|
||||
;
|
||||
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_lock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
EnterCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void _cffi_release_reentrant_mutex(void)
|
||||
{
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_unlock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
LeaveCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
#include "_cffi_errors.h"
|
||||
|
||||
|
||||
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
|
||||
|
||||
static void _cffi_py_initialize(void)
|
||||
{
|
||||
/* XXX use initsigs=0, which "skips initialization registration of
|
||||
signal handlers, which might be useful when Python is
|
||||
embedded" according to the Python docs. But review and think
|
||||
if it should be a user-controllable setting.
|
||||
|
||||
XXX we should also give a way to write errors to a buffer
|
||||
instead of to stderr.
|
||||
|
||||
XXX if importing 'site' fails, CPython (any version) calls
|
||||
exit(). Should we try to work around this behavior here?
|
||||
*/
|
||||
Py_InitializeEx(0);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
/* This initializes Python, imports _cffi_backend, and then the
|
||||
present .dll/.so is set up as a CPython C extension module.
|
||||
*/
|
||||
int result;
|
||||
PyGILState_STATE state;
|
||||
PyObject *pycode=NULL, *global_dict=NULL, *x;
|
||||
PyObject *builtins;
|
||||
|
||||
state = PyGILState_Ensure();
|
||||
|
||||
/* Call the initxxx() function from the present module. It will
|
||||
create and initialize us as a CPython extension module, instead
|
||||
of letting the startup Python code do it---it might reimport
|
||||
the same .dll/.so and get maybe confused on some platforms.
|
||||
It might also have troubles locating the .dll/.so again for all
|
||||
I know.
|
||||
*/
|
||||
(void)_CFFI_PYTHON_STARTUP_FUNC();
|
||||
if (PyErr_Occurred())
|
||||
goto error;
|
||||
|
||||
/* Now run the Python code provided to ffi.embedding_init_code().
|
||||
*/
|
||||
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
|
||||
"<init code for '" _CFFI_MODULE_NAME "'>",
|
||||
Py_file_input);
|
||||
if (pycode == NULL)
|
||||
goto error;
|
||||
global_dict = PyDict_New();
|
||||
if (global_dict == NULL)
|
||||
goto error;
|
||||
builtins = PyEval_GetBuiltins();
|
||||
if (builtins == NULL)
|
||||
goto error;
|
||||
if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
|
||||
goto error;
|
||||
x = PyEval_EvalCode(
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
(PyCodeObject *)
|
||||
#endif
|
||||
pycode, global_dict, global_dict);
|
||||
if (x == NULL)
|
||||
goto error;
|
||||
Py_DECREF(x);
|
||||
|
||||
/* Done! Now if we've been called from
|
||||
_cffi_start_and_call_python() in an ``extern "Python"``, we can
|
||||
only hope that the Python code did correctly set up the
|
||||
corresponding @ffi.def_extern() function. Otherwise, the
|
||||
general logic of ``extern "Python"`` functions (inside the
|
||||
_cffi_backend module) will find that the reference is still
|
||||
missing and print an error.
|
||||
*/
|
||||
result = 0;
|
||||
done:
|
||||
Py_XDECREF(pycode);
|
||||
Py_XDECREF(global_dict);
|
||||
PyGILState_Release(state);
|
||||
return result;
|
||||
|
||||
error:;
|
||||
{
|
||||
/* Print as much information as potentially useful.
|
||||
Debugging load-time failures with embedding is not fun
|
||||
*/
|
||||
PyObject *ecap;
|
||||
PyObject *exception, *v, *tb, *f, *modules, *mod;
|
||||
PyErr_Fetch(&exception, &v, &tb);
|
||||
ecap = _cffi_start_error_capture();
|
||||
f = PySys_GetObject((char *)"stderr");
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString(
|
||||
"Failed to initialize the Python-CFFI embedding logic:\n\n", f);
|
||||
}
|
||||
|
||||
if (exception != NULL) {
|
||||
PyErr_NormalizeException(&exception, &v, &tb);
|
||||
PyErr_Display(exception, v, tb);
|
||||
}
|
||||
Py_XDECREF(exception);
|
||||
Py_XDECREF(v);
|
||||
Py_XDECREF(tb);
|
||||
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
|
||||
"\ncompiled with cffi version: 1.12.3"
|
||||
"\n_cffi_backend module: ", f);
|
||||
modules = PyImport_GetModuleDict();
|
||||
mod = PyDict_GetItemString(modules, "_cffi_backend");
|
||||
if (mod == NULL) {
|
||||
PyFile_WriteString("not loaded", f);
|
||||
}
|
||||
else {
|
||||
v = PyObject_GetAttrString(mod, "__file__");
|
||||
PyFile_WriteObject(v, f, 0);
|
||||
Py_XDECREF(v);
|
||||
}
|
||||
PyFile_WriteString("\nsys.path: ", f);
|
||||
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
|
||||
PyFile_WriteString("\n\n", f);
|
||||
}
|
||||
_cffi_stop_error_capture(ecap);
|
||||
}
|
||||
result = -1;
|
||||
goto done;
|
||||
}
|
||||
|
||||
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
/* This does the basic initialization of Python. It can be called
|
||||
completely concurrently from unrelated threads. It assumes
|
||||
that we don't hold the GIL before (if it exists), and we don't
|
||||
hold it afterwards.
|
||||
|
||||
(What it really does used to be completely different in Python 2
|
||||
and Python 3, with the Python 2 solution avoiding the spin-lock
|
||||
around the Py_InitializeEx() call. However, after recent changes
|
||||
to CPython 2.7 (issue #358) it no longer works. So we use the
|
||||
Python 3 solution everywhere.)
|
||||
|
||||
This initializes Python by calling Py_InitializeEx().
|
||||
Important: this must not be called concurrently at all.
|
||||
So we use a global variable as a simple spin lock. This global
|
||||
variable must be from 'libpythonX.Y.so', not from this
|
||||
cffi-based extension module, because it must be shared from
|
||||
different cffi-based extension modules.
|
||||
|
||||
In Python < 3.8, we choose
|
||||
_PyParser_TokenNames[0] as a completely arbitrary pointer value
|
||||
that is never written to. The default is to point to the
|
||||
string "ENDMARKER". We change it temporarily to point to the
|
||||
next character in that string. (Yes, I know it's REALLY
|
||||
obscure.)
|
||||
|
||||
In Python >= 3.8, this string array is no longer writable, so
|
||||
instead we pick PyCapsuleType.tp_version_tag. We can't change
|
||||
Python < 3.8 because someone might use a mixture of cffi
|
||||
embedded modules, some of which were compiled before this file
|
||||
changed.
|
||||
*/
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# if PY_VERSION_HEX < 0x03080000
|
||||
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
|
||||
char *old_value, *locked_value;
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
locked_value = old_value + 1;
|
||||
if (old_value[0] == 'E') {
|
||||
assert(old_value[1] == 'N');
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
assert(old_value[0] == 'N');
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# else
|
||||
int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
|
||||
int old_value, locked_value;
|
||||
assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
locked_value = -42;
|
||||
if (old_value == 0) {
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
assert(old_value == locked_value);
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/* call Py_InitializeEx() */
|
||||
{
|
||||
PyGILState_STATE state = PyGILState_UNLOCKED;
|
||||
if (!Py_IsInitialized())
|
||||
_cffi_py_initialize();
|
||||
else
|
||||
state = PyGILState_Ensure();
|
||||
|
||||
PyEval_InitThreads();
|
||||
PyGILState_Release(state);
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
/* release the lock */
|
||||
while (!cffi_compare_and_swap(lock, locked_value, old_value))
|
||||
;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
|
||||
|
||||
#else
|
||||
|
||||
|
||||
/********** PyPy-specific section **********/
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
|
||||
|
||||
static struct _cffi_pypy_init_s {
|
||||
const char *name;
|
||||
void (*func)(const void *[]);
|
||||
const char *code;
|
||||
} _cffi_pypy_init = {
|
||||
_CFFI_MODULE_NAME,
|
||||
(void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
|
||||
_CFFI_PYTHON_STARTUP_CODE,
|
||||
};
|
||||
|
||||
extern int pypy_carefully_make_gil(const char *);
|
||||
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
|
||||
}
|
||||
|
||||
/********** end PyPy-specific section **********/
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
__attribute__((noinline))
|
||||
#endif
|
||||
static _cffi_call_python_fnptr _cffi_start_python(void)
|
||||
{
|
||||
/* Delicate logic to initialize Python. This function can be
|
||||
called multiple times concurrently, e.g. when the process calls
|
||||
its first ``extern "Python"`` functions in multiple threads at
|
||||
once. It can also be called recursively, in which case we must
|
||||
ignore it. We also have to consider what occurs if several
|
||||
different cffi-based extensions reach this code in parallel
|
||||
threads---it is a different copy of the code, then, and we
|
||||
can't have any shared global variable unless it comes from
|
||||
'libpythonX.Y.so'.
|
||||
|
||||
Idea:
|
||||
|
||||
* _cffi_carefully_make_gil(): "carefully" call
|
||||
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
|
||||
|
||||
* then we use a (local) custom lock to make sure that a call to this
|
||||
cffi-based extension will wait if another call to the *same*
|
||||
extension is running the initialization in another thread.
|
||||
It is reentrant, so that a recursive call will not block, but
|
||||
only one from a different thread.
|
||||
|
||||
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
|
||||
At this point, concurrent calls to Py_InitializeEx() are not
|
||||
possible: we have the GIL.
|
||||
|
||||
* do the rest of the specific initialization, which may
|
||||
temporarily release the GIL but not the custom lock.
|
||||
Only release the custom lock when we are done.
|
||||
*/
|
||||
static char called = 0;
|
||||
|
||||
if (_cffi_carefully_make_gil() != 0)
|
||||
return NULL;
|
||||
|
||||
_cffi_acquire_reentrant_mutex();
|
||||
|
||||
/* Here the GIL exists, but we don't have it. We're only protected
|
||||
from concurrency by the reentrant mutex. */
|
||||
|
||||
/* This file only initializes the embedded module once, the first
|
||||
time this is called, even if there are subinterpreters. */
|
||||
if (!called) {
|
||||
called = 1; /* invoke _cffi_initialize_python() only once,
|
||||
but don't set '_cffi_call_python' right now,
|
||||
otherwise concurrent threads won't call
|
||||
this function at all (we need them to wait) */
|
||||
if (_cffi_initialize_python() == 0) {
|
||||
/* now initialization is finished. Switch to the fast-path. */
|
||||
|
||||
/* We would like nobody to see the new value of
|
||||
'_cffi_call_python' without also seeing the rest of the
|
||||
data initialized. However, this is not possible. But
|
||||
the new value of '_cffi_call_python' is the function
|
||||
'cffi_call_python()' from _cffi_backend. So: */
|
||||
cffi_write_barrier();
|
||||
/* ^^^ we put a write barrier here, and a corresponding
|
||||
read barrier at the start of cffi_call_python(). This
|
||||
ensures that after that read barrier, we see everything
|
||||
done here before the write barrier.
|
||||
*/
|
||||
|
||||
assert(_cffi_call_python_org != NULL);
|
||||
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
else {
|
||||
/* initialization failed. Reset this to NULL, even if it was
|
||||
already set to some other value. Future calls to
|
||||
_cffi_start_python() are still forced to occur, and will
|
||||
always return NULL from now on. */
|
||||
_cffi_call_python_org = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
_cffi_release_reentrant_mutex();
|
||||
|
||||
return (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
|
||||
static
|
||||
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
|
||||
{
|
||||
_cffi_call_python_fnptr fnptr;
|
||||
int current_err = errno;
|
||||
#ifdef _MSC_VER
|
||||
int current_lasterr = GetLastError();
|
||||
#endif
|
||||
fnptr = _cffi_start_python();
|
||||
if (fnptr == NULL) {
|
||||
fprintf(stderr, "function %s() called, but initialization code "
|
||||
"failed. Returning 0.\n", externpy->name);
|
||||
memset(args, 0, externpy->size_of_result);
|
||||
}
|
||||
#ifdef _MSC_VER
|
||||
SetLastError(current_lasterr);
|
||||
#endif
|
||||
errno = current_err;
|
||||
|
||||
if (fnptr != NULL)
|
||||
fnptr(externpy, args);
|
||||
}
|
||||
|
||||
|
||||
/* The cffi_start_python() function makes sure Python is initialized
|
||||
and our cffi module is set up. It can be called manually from the
|
||||
user C code. The same effect is obtained automatically from any
|
||||
dll-exported ``extern "Python"`` function. This function returns
|
||||
-1 if initialization failed, 0 if all is OK. */
|
||||
_CFFI_UNUSED_FN
|
||||
static int cffi_start_python(void)
|
||||
{
|
||||
if (_cffi_call_python == &_cffi_start_and_call_python) {
|
||||
if (_cffi_start_python() == NULL)
|
||||
return -1;
|
||||
}
|
||||
cffi_read_barrier();
|
||||
return 0;
|
||||
}
|
||||
|
||||
#undef cffi_compare_and_swap
|
||||
#undef cffi_write_barrier
|
||||
#undef cffi_read_barrier
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
@ -0,0 +1,961 @@
|
||||
import sys, types
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError
|
||||
from . import model
|
||||
|
||||
try:
|
||||
callable
|
||||
except NameError:
|
||||
# Python 3.1
|
||||
from collections import Callable
|
||||
callable = lambda x: isinstance(x, Callable)
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
_unspecified = object()
|
||||
|
||||
|
||||
|
||||
class FFI(object):
|
||||
r'''
|
||||
The main top-level class that you instantiate once, or once per module.
|
||||
|
||||
Example usage:
|
||||
|
||||
ffi = FFI()
|
||||
ffi.cdef("""
|
||||
int printf(const char *, ...);
|
||||
""")
|
||||
|
||||
C = ffi.dlopen(None) # standard library
|
||||
-or-
|
||||
C = ffi.verify() # use a C compiler: verify the decl above is right
|
||||
|
||||
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
||||
'''
|
||||
|
||||
def __init__(self, backend=None):
|
||||
"""Create an FFI instance. The 'backend' argument is used to
|
||||
select a non-default backend, mostly for tests.
|
||||
"""
|
||||
if backend is None:
|
||||
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
||||
# _cffi_backend.so compiled.
|
||||
import _cffi_backend as backend
|
||||
from . import __version__
|
||||
if backend.__version__ != __version__:
|
||||
# bad version! Try to be as explicit as possible.
|
||||
if hasattr(backend, '__file__'):
|
||||
# CPython
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__,
|
||||
backend.__version__, backend.__file__))
|
||||
else:
|
||||
# PyPy
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__, backend.__version__))
|
||||
# (If you insist you can also try to pass the option
|
||||
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
||||
# rely on it! It's probably not going to work well.)
|
||||
|
||||
from . import cparser
|
||||
self._backend = backend
|
||||
self._lock = allocate_lock()
|
||||
self._parser = cparser.Parser()
|
||||
self._cached_btypes = {}
|
||||
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
||||
self._new_types = types.ModuleType('new_types').__dict__
|
||||
self._function_caches = []
|
||||
self._libraries = []
|
||||
self._cdefsources = []
|
||||
self._included_ffis = []
|
||||
self._windows_unicode = None
|
||||
self._init_once_cache = {}
|
||||
self._cdef_version = None
|
||||
self._embedding = None
|
||||
self._typecache = model.get_typecache(backend)
|
||||
if hasattr(backend, 'set_ffi'):
|
||||
backend.set_ffi(self)
|
||||
for name in list(backend.__dict__):
|
||||
if name.startswith('RTLD_'):
|
||||
setattr(self, name, getattr(backend, name))
|
||||
#
|
||||
with self._lock:
|
||||
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
||||
self.BCharA = self._get_cached_btype(model.char_array_type)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
# _cffi_backend: attach these constants to the class
|
||||
if not hasattr(FFI, 'NULL'):
|
||||
FFI.NULL = self.cast(self.BVoidP, 0)
|
||||
FFI.CData, FFI.CType = backend._get_types()
|
||||
else:
|
||||
# ctypes backend: attach these constants to the instance
|
||||
self.NULL = self.cast(self.BVoidP, 0)
|
||||
self.CData, self.CType = backend._get_types()
|
||||
self.buffer = backend.buffer
|
||||
|
||||
def cdef(self, csource, override=False, packed=False, pack=None):
|
||||
"""Parse the given C source. This registers all declared functions,
|
||||
types, and global variables. The functions and global variables can
|
||||
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
||||
The types can be used in 'ffi.new()' and other functions.
|
||||
If 'packed' is specified as True, all structs declared inside this
|
||||
cdef are packed, i.e. laid out without any field alignment at all.
|
||||
Alternatively, 'pack' can be a small integer, and requests for
|
||||
alignment greater than that are ignored (pack=1 is equivalent to
|
||||
packed=True).
|
||||
"""
|
||||
self._cdef(csource, override=override, packed=packed, pack=pack)
|
||||
|
||||
def embedding_api(self, csource, packed=False, pack=None):
|
||||
self._cdef(csource, packed=packed, pack=pack, dllexport=True)
|
||||
if self._embedding is None:
|
||||
self._embedding = ''
|
||||
|
||||
def _cdef(self, csource, override=False, **options):
|
||||
if not isinstance(csource, str): # unicode, on Python 2
|
||||
if not isinstance(csource, basestring):
|
||||
raise TypeError("cdef() argument must be a string")
|
||||
csource = csource.encode('ascii')
|
||||
with self._lock:
|
||||
self._cdef_version = object()
|
||||
self._parser.parse(csource, override=override, **options)
|
||||
self._cdefsources.append(csource)
|
||||
if override:
|
||||
for cache in self._function_caches:
|
||||
cache.clear()
|
||||
finishlist = self._parser._recomplete
|
||||
if finishlist:
|
||||
self._parser._recomplete = []
|
||||
for tp in finishlist:
|
||||
tp.finish_backend_type(self, finishlist)
|
||||
|
||||
def dlopen(self, name, flags=0):
|
||||
"""Load and return a dynamic library identified by 'name'.
|
||||
The standard C library can be loaded by passing None.
|
||||
Note that functions and types declared by 'ffi.cdef()' are not
|
||||
linked to a particular library, just like C headers; in the
|
||||
library we only look for the actual (untyped) symbols.
|
||||
"""
|
||||
assert isinstance(name, basestring) or name is None
|
||||
with self._lock:
|
||||
lib, function_cache = _make_ffi_library(self, name, flags)
|
||||
self._function_caches.append(function_cache)
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def dlclose(self, lib):
|
||||
"""Close a library obtained with ffi.dlopen(). After this call,
|
||||
access to functions or variables from the library will fail
|
||||
(possibly with a segmentation fault).
|
||||
"""
|
||||
type(lib).__cffi_close__(lib)
|
||||
|
||||
def _typeof_locked(self, cdecl):
|
||||
# call me with the lock!
|
||||
key = cdecl
|
||||
if key in self._parsed_types:
|
||||
return self._parsed_types[key]
|
||||
#
|
||||
if not isinstance(cdecl, str): # unicode, on Python 2
|
||||
cdecl = cdecl.encode('ascii')
|
||||
#
|
||||
type = self._parser.parse_type(cdecl)
|
||||
really_a_function_type = type.is_raw_function
|
||||
if really_a_function_type:
|
||||
type = type.as_function_pointer()
|
||||
btype = self._get_cached_btype(type)
|
||||
result = btype, really_a_function_type
|
||||
self._parsed_types[key] = result
|
||||
return result
|
||||
|
||||
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
||||
# string -> ctype object
|
||||
try:
|
||||
result = self._parsed_types[cdecl]
|
||||
except KeyError:
|
||||
with self._lock:
|
||||
result = self._typeof_locked(cdecl)
|
||||
#
|
||||
btype, really_a_function_type = result
|
||||
if really_a_function_type and not consider_function_as_funcptr:
|
||||
raise CDefError("the type %r is a function type, not a "
|
||||
"pointer-to-function type" % (cdecl,))
|
||||
return btype
|
||||
|
||||
def typeof(self, cdecl):
|
||||
"""Parse the C type given as a string and return the
|
||||
corresponding <ctype> object.
|
||||
It can also be used on 'cdata' instance to get its C type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
return self._typeof(cdecl)
|
||||
if isinstance(cdecl, self.CData):
|
||||
return self._backend.typeof(cdecl)
|
||||
if isinstance(cdecl, types.BuiltinFunctionType):
|
||||
res = _builtin_function_type(cdecl)
|
||||
if res is not None:
|
||||
return res
|
||||
if (isinstance(cdecl, types.FunctionType)
|
||||
and hasattr(cdecl, '_cffi_base_type')):
|
||||
with self._lock:
|
||||
return self._get_cached_btype(cdecl._cffi_base_type)
|
||||
raise TypeError(type(cdecl))
|
||||
|
||||
def sizeof(self, cdecl):
|
||||
"""Return the size in bytes of the argument. It can be a
|
||||
string naming a C type, or a 'cdata' instance.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
BType = self._typeof(cdecl)
|
||||
return self._backend.sizeof(BType)
|
||||
else:
|
||||
return self._backend.sizeof(cdecl)
|
||||
|
||||
def alignof(self, cdecl):
|
||||
"""Return the natural alignment size in bytes of the C type
|
||||
given as a string.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.alignof(cdecl)
|
||||
|
||||
def offsetof(self, cdecl, *fields_or_indexes):
|
||||
"""Return the offset of the named field inside the given
|
||||
structure or array, which must be given as a C type name.
|
||||
You can give several field names in case of nested structures.
|
||||
You can also give numeric values which correspond to array
|
||||
items, in case of an array type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
||||
|
||||
def new(self, cdecl, init=None):
|
||||
"""Allocate an instance according to the specified C type and
|
||||
return a pointer to it. The specified C type must be either a
|
||||
pointer or an array: ``new('X *')`` allocates an X and returns
|
||||
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
||||
n X'es and returns an array referencing it (which works
|
||||
mostly like a pointer, like in C). You can also use
|
||||
``new('X[]', n)`` to allocate an array of a non-constant
|
||||
length n.
|
||||
|
||||
The memory is initialized following the rules of declaring a
|
||||
global variable in C: by default it is zero-initialized, but
|
||||
an explicit initializer can be given which can be used to
|
||||
fill all or part of the memory.
|
||||
|
||||
When the returned <cdata> object goes out of scope, the memory
|
||||
is freed. In other words the returned <cdata> object has
|
||||
ownership of the value of type 'cdecl' that it points to. This
|
||||
means that the raw data can be used as long as this object is
|
||||
kept alive, but must not be used for a longer time. Be careful
|
||||
about that when copying the pointer to the memory somewhere
|
||||
else, e.g. into another structure.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.newp(cdecl, init)
|
||||
|
||||
def new_allocator(self, alloc=None, free=None,
|
||||
should_clear_after_alloc=True):
|
||||
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
||||
but uses the provided low-level 'alloc' and 'free' functions.
|
||||
|
||||
'alloc' is called with the size as argument. If it returns NULL, a
|
||||
MemoryError is raised. 'free' is called with the result of 'alloc'
|
||||
as argument. Both can be either Python function or directly C
|
||||
functions. If 'free' is None, then no free function is called.
|
||||
If both 'alloc' and 'free' are None, the default is used.
|
||||
|
||||
If 'should_clear_after_alloc' is set to False, then the memory
|
||||
returned by 'alloc' is assumed to be already cleared (or you are
|
||||
fine with garbage); otherwise CFFI will clear it.
|
||||
"""
|
||||
compiled_ffi = self._backend.FFI()
|
||||
allocator = compiled_ffi.new_allocator(alloc, free,
|
||||
should_clear_after_alloc)
|
||||
def allocate(cdecl, init=None):
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return allocator(cdecl, init)
|
||||
return allocate
|
||||
|
||||
def cast(self, cdecl, source):
|
||||
"""Similar to a C cast: returns an instance of the named C
|
||||
type initialized with the given 'source'. The source is
|
||||
casted between integers or pointers of any type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.cast(cdecl, source)
|
||||
|
||||
def string(self, cdata, maxlen=-1):
|
||||
"""Return a Python string (or unicode string) from the 'cdata'.
|
||||
If 'cdata' is a pointer or array of characters or bytes, returns
|
||||
the null-terminated string. The returned string extends until
|
||||
the first null character, or at most 'maxlen' characters. If
|
||||
'cdata' is an array then 'maxlen' defaults to its length.
|
||||
|
||||
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
||||
string following the same rules.
|
||||
|
||||
If 'cdata' is a single character or byte or a wchar_t, returns
|
||||
it as a string or unicode string.
|
||||
|
||||
If 'cdata' is an enum, returns the value of the enumerator as a
|
||||
string, or 'NUMBER' if the value is out of range.
|
||||
"""
|
||||
return self._backend.string(cdata, maxlen)
|
||||
|
||||
def unpack(self, cdata, length):
|
||||
"""Unpack an array of C data of the given length,
|
||||
returning a Python string/unicode/list.
|
||||
|
||||
If 'cdata' is a pointer to 'char', returns a byte string.
|
||||
It does not stop at the first null. This is equivalent to:
|
||||
ffi.buffer(cdata, length)[:]
|
||||
|
||||
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
|
||||
'length' is measured in wchar_t's; it is not the size in bytes.
|
||||
|
||||
If 'cdata' is a pointer to anything else, returns a list of
|
||||
'length' items. This is a faster equivalent to:
|
||||
[cdata[i] for i in range(length)]
|
||||
"""
|
||||
return self._backend.unpack(cdata, length)
|
||||
|
||||
#def buffer(self, cdata, size=-1):
|
||||
# """Return a read-write buffer object that references the raw C data
|
||||
# pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
||||
# an array. Can be passed to functions expecting a buffer, or directly
|
||||
# manipulated with:
|
||||
#
|
||||
# buf[:] get a copy of it in a regular string, or
|
||||
# buf[idx] as a single character
|
||||
# buf[:] = ...
|
||||
# buf[idx] = ... change the content
|
||||
# """
|
||||
# note that 'buffer' is a type, set on this instance by __init__
|
||||
|
||||
def from_buffer(self, cdecl, python_buffer=_unspecified,
|
||||
require_writable=False):
|
||||
"""Return a cdata of the given type pointing to the data of the
|
||||
given Python object, which must support the buffer interface.
|
||||
Note that this is not meant to be used on the built-in types
|
||||
str or unicode (you can build 'char[]' arrays explicitly)
|
||||
but only on objects containing large quantities of raw data
|
||||
in some other format, like 'array.array' or numpy arrays.
|
||||
|
||||
The first argument is optional and default to 'char[]'.
|
||||
"""
|
||||
if python_buffer is _unspecified:
|
||||
cdecl, python_buffer = self.BCharA, cdecl
|
||||
elif isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.from_buffer(cdecl, python_buffer,
|
||||
require_writable)
|
||||
|
||||
def memmove(self, dest, src, n):
|
||||
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
||||
|
||||
Like the C function memmove(), the memory areas may overlap;
|
||||
apart from that it behaves like the C function memcpy().
|
||||
|
||||
'src' can be any cdata ptr or array, or any Python buffer object.
|
||||
'dest' can be any cdata ptr or array, or a writable Python buffer
|
||||
object. The size to copy, 'n', is always measured in bytes.
|
||||
|
||||
Unlike other methods, this one supports all Python buffer including
|
||||
byte strings and bytearrays---but it still does not support
|
||||
non-contiguous buffers.
|
||||
"""
|
||||
return self._backend.memmove(dest, src, n)
|
||||
|
||||
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
||||
"""Return a callback object or a decorator making such a
|
||||
callback object. 'cdecl' must name a C function pointer type.
|
||||
The callback invokes the specified 'python_callable' (which may
|
||||
be provided either directly or via a decorator). Important: the
|
||||
callback object must be manually kept alive for as long as the
|
||||
callback may be invoked from the C level.
|
||||
"""
|
||||
def callback_decorator_wrap(python_callable):
|
||||
if not callable(python_callable):
|
||||
raise TypeError("the 'python_callable' argument "
|
||||
"is not callable")
|
||||
return self._backend.callback(cdecl, python_callable,
|
||||
error, onerror)
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
||||
if python_callable is None:
|
||||
return callback_decorator_wrap # decorator mode
|
||||
else:
|
||||
return callback_decorator_wrap(python_callable) # direct mode
|
||||
|
||||
def getctype(self, cdecl, replace_with=''):
|
||||
"""Return a string giving the C type 'cdecl', which may be itself
|
||||
a string or a <ctype> object. If 'replace_with' is given, it gives
|
||||
extra text to append (or insert for more complicated C types), like
|
||||
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
replace_with = replace_with.strip()
|
||||
if (replace_with.startswith('*')
|
||||
and '&[' in self._backend.getcname(cdecl, '&')):
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif replace_with and not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
return self._backend.getcname(cdecl, replace_with)
|
||||
|
||||
def gc(self, cdata, destructor, size=0):
|
||||
"""Return a new cdata object that points to the same
|
||||
data. Later, when this new cdata object is garbage-collected,
|
||||
'destructor(old_cdata_object)' will be called.
|
||||
|
||||
The optional 'size' gives an estimate of the size, used to
|
||||
trigger the garbage collection more eagerly. So far only used
|
||||
on PyPy. It tells the GC that the returned object keeps alive
|
||||
roughly 'size' bytes of external memory.
|
||||
"""
|
||||
return self._backend.gcp(cdata, destructor, size)
|
||||
|
||||
def _get_cached_btype(self, type):
|
||||
assert self._lock.acquire(False) is False
|
||||
# call me with the lock!
|
||||
try:
|
||||
BType = self._cached_btypes[type]
|
||||
except KeyError:
|
||||
finishlist = []
|
||||
BType = type.get_cached_btype(self, finishlist)
|
||||
for type in finishlist:
|
||||
type.finish_backend_type(self, finishlist)
|
||||
return BType
|
||||
|
||||
def verify(self, source='', tmpdir=None, **kwargs):
|
||||
"""Verify that the current ffi signatures compile on this
|
||||
machine, and return a dynamic library object. The dynamic
|
||||
library can be used to call functions and access global
|
||||
variables declared in this 'ffi'. The library is compiled
|
||||
by the C compiler: it gives you C-level API compatibility
|
||||
(including calling macros). This is unlike 'ffi.dlopen()',
|
||||
which requires binary compatibility in the signatures.
|
||||
"""
|
||||
from .verifier import Verifier, _caller_dir_pycache
|
||||
#
|
||||
# If set_unicode(True) was called, insert the UNICODE and
|
||||
# _UNICODE macro declarations
|
||||
if self._windows_unicode:
|
||||
self._apply_windows_unicode(kwargs)
|
||||
#
|
||||
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
||||
# up the caller's directory, which we want to be the caller of
|
||||
# ffi.verify(), as opposed to the caller of Veritier().
|
||||
tmpdir = tmpdir or _caller_dir_pycache()
|
||||
#
|
||||
# Make a Verifier() and use it to load the library.
|
||||
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
||||
lib = self.verifier.load_library()
|
||||
#
|
||||
# Save the loaded library for keep-alive purposes, even
|
||||
# if the caller doesn't keep it alive itself (it should).
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def _get_errno(self):
|
||||
return self._backend.get_errno()
|
||||
def _set_errno(self, errno):
|
||||
self._backend.set_errno(errno)
|
||||
errno = property(_get_errno, _set_errno, None,
|
||||
"the value of 'errno' from/to the C calls")
|
||||
|
||||
def getwinerror(self, code=-1):
|
||||
return self._backend.getwinerror(code)
|
||||
|
||||
def _pointer_to(self, ctype):
|
||||
with self._lock:
|
||||
return model.pointer_cache(self, ctype)
|
||||
|
||||
def addressof(self, cdata, *fields_or_indexes):
|
||||
"""Return the address of a <cdata 'struct-or-union'>.
|
||||
If 'fields_or_indexes' are given, returns the address of that
|
||||
field or array item in the structure or array, recursively in
|
||||
case of nested structures.
|
||||
"""
|
||||
try:
|
||||
ctype = self._backend.typeof(cdata)
|
||||
except TypeError:
|
||||
if '__addressof__' in type(cdata).__dict__:
|
||||
return type(cdata).__addressof__(cdata, *fields_or_indexes)
|
||||
raise
|
||||
if fields_or_indexes:
|
||||
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
||||
else:
|
||||
if ctype.kind == "pointer":
|
||||
raise TypeError("addressof(pointer)")
|
||||
offset = 0
|
||||
ctypeptr = self._pointer_to(ctype)
|
||||
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
||||
|
||||
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
||||
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
||||
for field1 in fields_or_indexes:
|
||||
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
||||
offset += offset1
|
||||
return ctype, offset
|
||||
|
||||
def include(self, ffi_to_include):
|
||||
"""Includes the typedefs, structs, unions and enums defined
|
||||
in another FFI instance. Usage is similar to a #include in C,
|
||||
where a part of the program might include types defined in
|
||||
another part for its own usage. Note that the include()
|
||||
method has no effect on functions, constants and global
|
||||
variables, which must anyway be accessed directly from the
|
||||
lib object returned by the original FFI instance.
|
||||
"""
|
||||
if not isinstance(ffi_to_include, FFI):
|
||||
raise TypeError("ffi.include() expects an argument that is also of"
|
||||
" type cffi.FFI, not %r" % (
|
||||
type(ffi_to_include).__name__,))
|
||||
if ffi_to_include is self:
|
||||
raise ValueError("self.include(self)")
|
||||
with ffi_to_include._lock:
|
||||
with self._lock:
|
||||
self._parser.include(ffi_to_include._parser)
|
||||
self._cdefsources.append('[')
|
||||
self._cdefsources.extend(ffi_to_include._cdefsources)
|
||||
self._cdefsources.append(']')
|
||||
self._included_ffis.append(ffi_to_include)
|
||||
|
||||
def new_handle(self, x):
|
||||
return self._backend.newp_handle(self.BVoidP, x)
|
||||
|
||||
def from_handle(self, x):
|
||||
return self._backend.from_handle(x)
|
||||
|
||||
def release(self, x):
|
||||
self._backend.release(x)
|
||||
|
||||
def set_unicode(self, enabled_flag):
|
||||
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
||||
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
||||
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
||||
declare these types to be (pointers to) plain 8-bit characters.
|
||||
This is mostly for backward compatibility; you usually want True.
|
||||
"""
|
||||
if self._windows_unicode is not None:
|
||||
raise ValueError("set_unicode() can only be called once")
|
||||
enabled_flag = bool(enabled_flag)
|
||||
if enabled_flag:
|
||||
self.cdef("typedef wchar_t TBYTE;"
|
||||
"typedef wchar_t TCHAR;"
|
||||
"typedef const wchar_t *LPCTSTR;"
|
||||
"typedef const wchar_t *PCTSTR;"
|
||||
"typedef wchar_t *LPTSTR;"
|
||||
"typedef wchar_t *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
else:
|
||||
self.cdef("typedef char TBYTE;"
|
||||
"typedef char TCHAR;"
|
||||
"typedef const char *LPCTSTR;"
|
||||
"typedef const char *PCTSTR;"
|
||||
"typedef char *LPTSTR;"
|
||||
"typedef char *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
self._windows_unicode = enabled_flag
|
||||
|
||||
def _apply_windows_unicode(self, kwds):
|
||||
defmacros = kwds.get('define_macros', ())
|
||||
if not isinstance(defmacros, (list, tuple)):
|
||||
raise TypeError("'define_macros' must be a list or tuple")
|
||||
defmacros = list(defmacros) + [('UNICODE', '1'),
|
||||
('_UNICODE', '1')]
|
||||
kwds['define_macros'] = defmacros
|
||||
|
||||
def _apply_embedding_fix(self, kwds):
|
||||
# must include an argument like "-lpython2.7" for the compiler
|
||||
def ensure(key, value):
|
||||
lst = kwds.setdefault(key, [])
|
||||
if value not in lst:
|
||||
lst.append(value)
|
||||
#
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
import os
|
||||
if sys.platform == "win32":
|
||||
# we need 'libpypy-c.lib'. Current distributions of
|
||||
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
|
||||
pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
|
||||
else:
|
||||
# we need 'libpypy-c.{so,dylib}', which should be by
|
||||
# default located in 'sys.prefix/bin' for installed
|
||||
# systems.
|
||||
if sys.version_info < (3,):
|
||||
pythonlib = "pypy-c"
|
||||
else:
|
||||
pythonlib = "pypy3-c"
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
|
||||
# On uninstalled pypy's, the libpypy-c is typically found in
|
||||
# .../pypy/goal/.
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
template = "python%d%d"
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
template += '_d'
|
||||
else:
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # 2.6
|
||||
from distutils import sysconfig
|
||||
template = "python%d.%d"
|
||||
if sysconfig.get_config_var('DEBUG_EXT'):
|
||||
template += sysconfig.get_config_var('DEBUG_EXT')
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
if hasattr(sys, 'abiflags'):
|
||||
pythonlib += sys.abiflags
|
||||
ensure('libraries', pythonlib)
|
||||
if sys.platform == "win32":
|
||||
ensure('extra_link_args', '/MANIFEST')
|
||||
|
||||
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
||||
import os
|
||||
if hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() cannot be called several times "
|
||||
"per ffi object")
|
||||
if not isinstance(module_name, basestring):
|
||||
raise TypeError("'module_name' must be a string")
|
||||
if os.sep in module_name or (os.altsep and os.altsep in module_name):
|
||||
raise ValueError("'module_name' must not contain '/': use a dotted "
|
||||
"name to make a 'package.module' location")
|
||||
self._assigned_source = (str(module_name), source,
|
||||
source_extension, kwds)
|
||||
|
||||
def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
|
||||
source_extension='.c', **kwds):
|
||||
from . import pkgconfig
|
||||
if not isinstance(pkgconfig_libs, list):
|
||||
raise TypeError("the pkgconfig_libs argument must be a list "
|
||||
"of package names")
|
||||
kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
|
||||
pkgconfig.merge_flags(kwds, kwds2)
|
||||
self.set_source(module_name, source, source_extension, **kwds)
|
||||
|
||||
def distutils_extension(self, tmpdir='build', verbose=True):
|
||||
from distutils.dir_util import mkpath
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
||||
return self.verifier.get_extension()
|
||||
raise ValueError("set_source() must be called before"
|
||||
" distutils_extension()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("distutils_extension() is only for C extension "
|
||||
"modules, not for dlopen()-style pure Python "
|
||||
"modules")
|
||||
mkpath(tmpdir)
|
||||
ext, updated = recompile(self, module_name,
|
||||
source, tmpdir=tmpdir, extradir=tmpdir,
|
||||
source_extension=source_extension,
|
||||
call_c_compiler=False, **kwds)
|
||||
if verbose:
|
||||
if updated:
|
||||
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
||||
else:
|
||||
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
||||
return ext
|
||||
|
||||
def emit_c_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("emit_c_code() is only for C extension modules, "
|
||||
"not for dlopen()-style pure Python modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False, **kwds)
|
||||
|
||||
def emit_python_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is not None:
|
||||
raise TypeError("emit_python_code() is only for dlopen()-style "
|
||||
"pure Python modules, not for C extension modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False, **kwds)
|
||||
|
||||
def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
|
||||
"""The 'target' argument gives the final file name of the
|
||||
compiled DLL. Use '*' to force distutils' choice, suitable for
|
||||
regular CPython C API modules. Use a file name ending in '.*'
|
||||
to ask for the system's default extension for dynamic libraries
|
||||
(.so/.dll/.dylib).
|
||||
|
||||
The default is '*' when building a non-embedded C API extension,
|
||||
and (module_name + '.*') when building an embedded library.
|
||||
"""
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before compile()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
return recompile(self, module_name, source, tmpdir=tmpdir,
|
||||
target=target, source_extension=source_extension,
|
||||
compiler_verbose=verbose, debug=debug, **kwds)
|
||||
|
||||
def init_once(self, func, tag):
|
||||
# Read _init_once_cache[tag], which is either (False, lock) if
|
||||
# we're calling the function now in some thread, or (True, result).
|
||||
# Don't call setdefault() in most cases, to avoid allocating and
|
||||
# immediately freeing a lock; but still use setdefaut() to avoid
|
||||
# races.
|
||||
try:
|
||||
x = self._init_once_cache[tag]
|
||||
except KeyError:
|
||||
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
|
||||
# Common case: we got (True, result), so we return the result.
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Else, it's a lock. Acquire it to serialize the following tests.
|
||||
with x[1]:
|
||||
# Read again from _init_once_cache the current status.
|
||||
x = self._init_once_cache[tag]
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Call the function and store the result back.
|
||||
result = func()
|
||||
self._init_once_cache[tag] = (True, result)
|
||||
return result
|
||||
|
||||
def embedding_init_code(self, pysource):
|
||||
if self._embedding:
|
||||
raise ValueError("embedding_init_code() can only be called once")
|
||||
# fix 'pysource' before it gets dumped into the C file:
|
||||
# - remove empty lines at the beginning, so it starts at "line 1"
|
||||
# - dedent, if all non-empty lines are indented
|
||||
# - check for SyntaxErrors
|
||||
import re
|
||||
match = re.match(r'\s*\n', pysource)
|
||||
if match:
|
||||
pysource = pysource[match.end():]
|
||||
lines = pysource.splitlines() or ['']
|
||||
prefix = re.match(r'\s*', lines[0]).group()
|
||||
for i in range(1, len(lines)):
|
||||
line = lines[i]
|
||||
if line.rstrip():
|
||||
while not line.startswith(prefix):
|
||||
prefix = prefix[:-1]
|
||||
i = len(prefix)
|
||||
lines = [line[i:]+'\n' for line in lines]
|
||||
pysource = ''.join(lines)
|
||||
#
|
||||
compile(pysource, "cffi_init", "exec")
|
||||
#
|
||||
self._embedding = pysource
|
||||
|
||||
def def_extern(self, *args, **kwds):
|
||||
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
|
||||
"objects")
|
||||
|
||||
def list_types(self):
|
||||
"""Returns the user type names known to this FFI instance.
|
||||
This returns a tuple containing three lists of names:
|
||||
(typedef_names, names_of_structs, names_of_unions)
|
||||
"""
|
||||
typedefs = []
|
||||
structs = []
|
||||
unions = []
|
||||
for key in self._parser._declarations:
|
||||
if key.startswith('typedef '):
|
||||
typedefs.append(key[8:])
|
||||
elif key.startswith('struct '):
|
||||
structs.append(key[7:])
|
||||
elif key.startswith('union '):
|
||||
unions.append(key[6:])
|
||||
typedefs.sort()
|
||||
structs.sort()
|
||||
unions.sort()
|
||||
return (typedefs, structs, unions)
|
||||
|
||||
|
||||
def _load_backend_lib(backend, name, flags):
|
||||
import os
|
||||
if name is None:
|
||||
if sys.platform != "win32":
|
||||
return backend.load_library(None, flags)
|
||||
name = "c" # Windows: load_library(None) fails, but this works
|
||||
# on Python 2 (backward compatibility hack only)
|
||||
first_error = None
|
||||
if '.' in name or '/' in name or os.sep in name:
|
||||
try:
|
||||
return backend.load_library(name, flags)
|
||||
except OSError as e:
|
||||
first_error = e
|
||||
import ctypes.util
|
||||
path = ctypes.util.find_library(name)
|
||||
if path is None:
|
||||
if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
|
||||
raise OSError("dlopen(None) cannot work on Windows for Python 3 "
|
||||
"(see http://bugs.python.org/issue23606)")
|
||||
msg = ("ctypes.util.find_library() did not manage "
|
||||
"to locate a library called %r" % (name,))
|
||||
if first_error is not None:
|
||||
msg = "%s. Additionally, %s" % (first_error, msg)
|
||||
raise OSError(msg)
|
||||
return backend.load_library(path, flags)
|
||||
|
||||
def _make_ffi_library(ffi, libname, flags):
|
||||
backend = ffi._backend
|
||||
backendlib = _load_backend_lib(backend, libname, flags)
|
||||
#
|
||||
def accessor_function(name):
|
||||
key = 'function ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
value = backendlib.load_function(BType, name)
|
||||
library.__dict__[name] = value
|
||||
#
|
||||
def accessor_variable(name):
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
read_variable = backendlib.read_variable
|
||||
write_variable = backendlib.write_variable
|
||||
setattr(FFILibrary, name, property(
|
||||
lambda self: read_variable(BType, name),
|
||||
lambda self, value: write_variable(BType, name, value)))
|
||||
#
|
||||
def addressof_var(name):
|
||||
try:
|
||||
return addr_variables[name]
|
||||
except KeyError:
|
||||
with ffi._lock:
|
||||
if name not in addr_variables:
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
if BType.kind != 'array':
|
||||
BType = model.pointer_cache(ffi, BType)
|
||||
p = backendlib.load_function(BType, name)
|
||||
addr_variables[name] = p
|
||||
return addr_variables[name]
|
||||
#
|
||||
def accessor_constant(name):
|
||||
raise NotImplementedError("non-integer constant '%s' cannot be "
|
||||
"accessed from a dlopen() library" % (name,))
|
||||
#
|
||||
def accessor_int_constant(name):
|
||||
library.__dict__[name] = ffi._parser._int_constants[name]
|
||||
#
|
||||
accessors = {}
|
||||
accessors_version = [False]
|
||||
addr_variables = {}
|
||||
#
|
||||
def update_accessors():
|
||||
if accessors_version[0] is ffi._cdef_version:
|
||||
return
|
||||
#
|
||||
for key, (tp, _) in ffi._parser._declarations.items():
|
||||
if not isinstance(tp, model.EnumType):
|
||||
tag, name = key.split(' ', 1)
|
||||
if tag == 'function':
|
||||
accessors[name] = accessor_function
|
||||
elif tag == 'variable':
|
||||
accessors[name] = accessor_variable
|
||||
elif tag == 'constant':
|
||||
accessors[name] = accessor_constant
|
||||
else:
|
||||
for i, enumname in enumerate(tp.enumerators):
|
||||
def accessor_enum(name, tp=tp, i=i):
|
||||
tp.check_not_partial()
|
||||
library.__dict__[name] = tp.enumvalues[i]
|
||||
accessors[enumname] = accessor_enum
|
||||
for name in ffi._parser._int_constants:
|
||||
accessors.setdefault(name, accessor_int_constant)
|
||||
accessors_version[0] = ffi._cdef_version
|
||||
#
|
||||
def make_accessor(name):
|
||||
with ffi._lock:
|
||||
if name in library.__dict__ or name in FFILibrary.__dict__:
|
||||
return # added by another thread while waiting for the lock
|
||||
if name not in accessors:
|
||||
update_accessors()
|
||||
if name not in accessors:
|
||||
raise AttributeError(name)
|
||||
accessors[name](name)
|
||||
#
|
||||
class FFILibrary(object):
|
||||
def __getattr__(self, name):
|
||||
make_accessor(name)
|
||||
return getattr(self, name)
|
||||
def __setattr__(self, name, value):
|
||||
try:
|
||||
property = getattr(self.__class__, name)
|
||||
except AttributeError:
|
||||
make_accessor(name)
|
||||
setattr(self, name, value)
|
||||
else:
|
||||
property.__set__(self, value)
|
||||
def __dir__(self):
|
||||
with ffi._lock:
|
||||
update_accessors()
|
||||
return accessors.keys()
|
||||
def __addressof__(self, name):
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
make_accessor(name)
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
raise AttributeError("cffi library has no function or "
|
||||
"global variable named '%s'" % (name,))
|
||||
def __cffi_close__(self):
|
||||
backendlib.close_lib()
|
||||
self.__dict__.clear()
|
||||
#
|
||||
if libname is not None:
|
||||
try:
|
||||
if not isinstance(libname, str): # unicode, on Python 2
|
||||
libname = libname.encode('utf-8')
|
||||
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
||||
except UnicodeError:
|
||||
pass
|
||||
library = FFILibrary()
|
||||
return library, library.__dict__
|
||||
|
||||
def _builtin_function_type(func):
|
||||
# a hack to make at least ffi.typeof(builtin_function) work,
|
||||
# if the builtin function was obtained by 'vengine_cpy'.
|
||||
import sys
|
||||
try:
|
||||
module = sys.modules[func.__module__]
|
||||
ffi = module._cffi_original_ffi
|
||||
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
||||
tp = types_of_builtin_funcs[func]
|
||||
except (KeyError, AttributeError, TypeError):
|
||||
return None
|
||||
else:
|
||||
with ffi._lock:
|
||||
return ffi._get_cached_btype(tp)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,187 @@
|
||||
from .error import VerificationError
|
||||
|
||||
class CffiOp(object):
|
||||
def __init__(self, op, arg):
|
||||
self.op = op
|
||||
self.arg = arg
|
||||
|
||||
def as_c_expr(self):
|
||||
if self.op is None:
|
||||
assert isinstance(self.arg, str)
|
||||
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
||||
classname = CLASS_NAME[self.op]
|
||||
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
||||
|
||||
def as_python_bytes(self):
|
||||
if self.op is None and self.arg.isdigit():
|
||||
value = int(self.arg) # non-negative: '-' not in self.arg
|
||||
if value >= 2**31:
|
||||
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
||||
% (self.arg,))
|
||||
return format_four_bytes(value)
|
||||
if isinstance(self.arg, str):
|
||||
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
||||
return format_four_bytes((self.arg << 8) | self.op)
|
||||
|
||||
def __str__(self):
|
||||
classname = CLASS_NAME.get(self.op, self.op)
|
||||
return '(%s %s)' % (classname, self.arg)
|
||||
|
||||
def format_four_bytes(num):
|
||||
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
||||
(num >> 24) & 0xFF,
|
||||
(num >> 16) & 0xFF,
|
||||
(num >> 8) & 0xFF,
|
||||
(num ) & 0xFF)
|
||||
|
||||
OP_PRIMITIVE = 1
|
||||
OP_POINTER = 3
|
||||
OP_ARRAY = 5
|
||||
OP_OPEN_ARRAY = 7
|
||||
OP_STRUCT_UNION = 9
|
||||
OP_ENUM = 11
|
||||
OP_FUNCTION = 13
|
||||
OP_FUNCTION_END = 15
|
||||
OP_NOOP = 17
|
||||
OP_BITFIELD = 19
|
||||
OP_TYPENAME = 21
|
||||
OP_CPYTHON_BLTN_V = 23 # varargs
|
||||
OP_CPYTHON_BLTN_N = 25 # noargs
|
||||
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
||||
OP_CONSTANT = 29
|
||||
OP_CONSTANT_INT = 31
|
||||
OP_GLOBAL_VAR = 33
|
||||
OP_DLOPEN_FUNC = 35
|
||||
OP_DLOPEN_CONST = 37
|
||||
OP_GLOBAL_VAR_F = 39
|
||||
OP_EXTERN_PYTHON = 41
|
||||
|
||||
PRIM_VOID = 0
|
||||
PRIM_BOOL = 1
|
||||
PRIM_CHAR = 2
|
||||
PRIM_SCHAR = 3
|
||||
PRIM_UCHAR = 4
|
||||
PRIM_SHORT = 5
|
||||
PRIM_USHORT = 6
|
||||
PRIM_INT = 7
|
||||
PRIM_UINT = 8
|
||||
PRIM_LONG = 9
|
||||
PRIM_ULONG = 10
|
||||
PRIM_LONGLONG = 11
|
||||
PRIM_ULONGLONG = 12
|
||||
PRIM_FLOAT = 13
|
||||
PRIM_DOUBLE = 14
|
||||
PRIM_LONGDOUBLE = 15
|
||||
|
||||
PRIM_WCHAR = 16
|
||||
PRIM_INT8 = 17
|
||||
PRIM_UINT8 = 18
|
||||
PRIM_INT16 = 19
|
||||
PRIM_UINT16 = 20
|
||||
PRIM_INT32 = 21
|
||||
PRIM_UINT32 = 22
|
||||
PRIM_INT64 = 23
|
||||
PRIM_UINT64 = 24
|
||||
PRIM_INTPTR = 25
|
||||
PRIM_UINTPTR = 26
|
||||
PRIM_PTRDIFF = 27
|
||||
PRIM_SIZE = 28
|
||||
PRIM_SSIZE = 29
|
||||
PRIM_INT_LEAST8 = 30
|
||||
PRIM_UINT_LEAST8 = 31
|
||||
PRIM_INT_LEAST16 = 32
|
||||
PRIM_UINT_LEAST16 = 33
|
||||
PRIM_INT_LEAST32 = 34
|
||||
PRIM_UINT_LEAST32 = 35
|
||||
PRIM_INT_LEAST64 = 36
|
||||
PRIM_UINT_LEAST64 = 37
|
||||
PRIM_INT_FAST8 = 38
|
||||
PRIM_UINT_FAST8 = 39
|
||||
PRIM_INT_FAST16 = 40
|
||||
PRIM_UINT_FAST16 = 41
|
||||
PRIM_INT_FAST32 = 42
|
||||
PRIM_UINT_FAST32 = 43
|
||||
PRIM_INT_FAST64 = 44
|
||||
PRIM_UINT_FAST64 = 45
|
||||
PRIM_INTMAX = 46
|
||||
PRIM_UINTMAX = 47
|
||||
PRIM_FLOATCOMPLEX = 48
|
||||
PRIM_DOUBLECOMPLEX = 49
|
||||
PRIM_CHAR16 = 50
|
||||
PRIM_CHAR32 = 51
|
||||
|
||||
_NUM_PRIM = 52
|
||||
_UNKNOWN_PRIM = -1
|
||||
_UNKNOWN_FLOAT_PRIM = -2
|
||||
_UNKNOWN_LONG_DOUBLE = -3
|
||||
|
||||
_IO_FILE_STRUCT = -1
|
||||
|
||||
PRIMITIVE_TO_INDEX = {
|
||||
'char': PRIM_CHAR,
|
||||
'short': PRIM_SHORT,
|
||||
'int': PRIM_INT,
|
||||
'long': PRIM_LONG,
|
||||
'long long': PRIM_LONGLONG,
|
||||
'signed char': PRIM_SCHAR,
|
||||
'unsigned char': PRIM_UCHAR,
|
||||
'unsigned short': PRIM_USHORT,
|
||||
'unsigned int': PRIM_UINT,
|
||||
'unsigned long': PRIM_ULONG,
|
||||
'unsigned long long': PRIM_ULONGLONG,
|
||||
'float': PRIM_FLOAT,
|
||||
'double': PRIM_DOUBLE,
|
||||
'long double': PRIM_LONGDOUBLE,
|
||||
'float _Complex': PRIM_FLOATCOMPLEX,
|
||||
'double _Complex': PRIM_DOUBLECOMPLEX,
|
||||
'_Bool': PRIM_BOOL,
|
||||
'wchar_t': PRIM_WCHAR,
|
||||
'char16_t': PRIM_CHAR16,
|
||||
'char32_t': PRIM_CHAR32,
|
||||
'int8_t': PRIM_INT8,
|
||||
'uint8_t': PRIM_UINT8,
|
||||
'int16_t': PRIM_INT16,
|
||||
'uint16_t': PRIM_UINT16,
|
||||
'int32_t': PRIM_INT32,
|
||||
'uint32_t': PRIM_UINT32,
|
||||
'int64_t': PRIM_INT64,
|
||||
'uint64_t': PRIM_UINT64,
|
||||
'intptr_t': PRIM_INTPTR,
|
||||
'uintptr_t': PRIM_UINTPTR,
|
||||
'ptrdiff_t': PRIM_PTRDIFF,
|
||||
'size_t': PRIM_SIZE,
|
||||
'ssize_t': PRIM_SSIZE,
|
||||
'int_least8_t': PRIM_INT_LEAST8,
|
||||
'uint_least8_t': PRIM_UINT_LEAST8,
|
||||
'int_least16_t': PRIM_INT_LEAST16,
|
||||
'uint_least16_t': PRIM_UINT_LEAST16,
|
||||
'int_least32_t': PRIM_INT_LEAST32,
|
||||
'uint_least32_t': PRIM_UINT_LEAST32,
|
||||
'int_least64_t': PRIM_INT_LEAST64,
|
||||
'uint_least64_t': PRIM_UINT_LEAST64,
|
||||
'int_fast8_t': PRIM_INT_FAST8,
|
||||
'uint_fast8_t': PRIM_UINT_FAST8,
|
||||
'int_fast16_t': PRIM_INT_FAST16,
|
||||
'uint_fast16_t': PRIM_UINT_FAST16,
|
||||
'int_fast32_t': PRIM_INT_FAST32,
|
||||
'uint_fast32_t': PRIM_UINT_FAST32,
|
||||
'int_fast64_t': PRIM_INT_FAST64,
|
||||
'uint_fast64_t': PRIM_UINT_FAST64,
|
||||
'intmax_t': PRIM_INTMAX,
|
||||
'uintmax_t': PRIM_UINTMAX,
|
||||
}
|
||||
|
||||
F_UNION = 0x01
|
||||
F_CHECK_FIELDS = 0x02
|
||||
F_PACKED = 0x04
|
||||
F_EXTERNAL = 0x08
|
||||
F_OPAQUE = 0x10
|
||||
|
||||
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
||||
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
||||
'F_EXTERNAL', 'F_OPAQUE']])
|
||||
|
||||
CLASS_NAME = {}
|
||||
for _name, _value in list(globals().items()):
|
||||
if _name.startswith('OP_') and isinstance(_value, int):
|
||||
CLASS_NAME[_value] = _name[3:]
|
@ -0,0 +1,80 @@
|
||||
import sys
|
||||
from . import model
|
||||
from .error import FFIError
|
||||
|
||||
|
||||
COMMON_TYPES = {}
|
||||
|
||||
try:
|
||||
# fetch "bool" and all simple Windows types
|
||||
from _cffi_backend import _get_common_types
|
||||
_get_common_types(COMMON_TYPES)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
||||
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
||||
|
||||
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
if _type.endswith('_t'):
|
||||
COMMON_TYPES[_type] = _type
|
||||
del _type
|
||||
|
||||
_CACHE = {}
|
||||
|
||||
def resolve_common_type(parser, commontype):
|
||||
try:
|
||||
return _CACHE[commontype]
|
||||
except KeyError:
|
||||
cdecl = COMMON_TYPES.get(commontype, commontype)
|
||||
if not isinstance(cdecl, str):
|
||||
result, quals = cdecl, 0 # cdecl is already a BaseType
|
||||
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
result, quals = model.PrimitiveType(cdecl), 0
|
||||
elif cdecl == 'set-unicode-needed':
|
||||
raise FFIError("The Windows type %r is only available after "
|
||||
"you call ffi.set_unicode()" % (commontype,))
|
||||
else:
|
||||
if commontype == cdecl:
|
||||
raise FFIError(
|
||||
"Unsupported type: %r. Please look at "
|
||||
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
||||
"and file an issue if you think this type should really "
|
||||
"be supported." % (commontype,))
|
||||
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
||||
|
||||
assert isinstance(result, model.BaseTypeByIdentity)
|
||||
_CACHE[commontype] = result, quals
|
||||
return result, quals
|
||||
|
||||
|
||||
# ____________________________________________________________
|
||||
# extra types for Windows (most of them are in commontypes.c)
|
||||
|
||||
|
||||
def win_common_types():
|
||||
return {
|
||||
"UNICODE_STRING": model.StructType(
|
||||
"_UNICODE_STRING",
|
||||
["Length",
|
||||
"MaximumLength",
|
||||
"Buffer"],
|
||||
[model.PrimitiveType("unsigned short"),
|
||||
model.PrimitiveType("unsigned short"),
|
||||
model.PointerType(model.PrimitiveType("wchar_t"))],
|
||||
[-1, -1, -1]),
|
||||
"PUNICODE_STRING": "UNICODE_STRING *",
|
||||
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
||||
|
||||
"TBYTE": "set-unicode-needed",
|
||||
"TCHAR": "set-unicode-needed",
|
||||
"LPCTSTR": "set-unicode-needed",
|
||||
"PCTSTR": "set-unicode-needed",
|
||||
"LPTSTR": "set-unicode-needed",
|
||||
"PTSTR": "set-unicode-needed",
|
||||
"PTBYTE": "set-unicode-needed",
|
||||
"PTCHAR": "set-unicode-needed",
|
||||
}
|
||||
|
||||
if sys.platform == 'win32':
|
||||
COMMON_TYPES.update(win_common_types())
|
@ -0,0 +1,931 @@
|
||||
from . import model
|
||||
from .commontypes import COMMON_TYPES, resolve_common_type
|
||||
from .error import FFIError, CDefError
|
||||
try:
|
||||
from . import _pycparser as pycparser
|
||||
except ImportError:
|
||||
import pycparser
|
||||
import weakref, re, sys
|
||||
|
||||
try:
|
||||
if sys.version_info < (3,):
|
||||
import thread as _thread
|
||||
else:
|
||||
import _thread
|
||||
lock = _thread.allocate_lock()
|
||||
except ImportError:
|
||||
lock = None
|
||||
|
||||
def _workaround_for_static_import_finders():
|
||||
# Issue #392: packaging tools like cx_Freeze can not find these
|
||||
# because pycparser uses exec dynamic import. This is an obscure
|
||||
# workaround. This function is never called.
|
||||
import pycparser.yacctab
|
||||
import pycparser.lextab
|
||||
|
||||
CDEF_SOURCE_STRING = "<cdef source string>"
|
||||
_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
|
||||
re.DOTALL | re.MULTILINE)
|
||||
_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
|
||||
r"\b((?:[^\n\\]|\\.)*?)$",
|
||||
re.DOTALL | re.MULTILINE)
|
||||
_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
|
||||
_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
|
||||
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
|
||||
_r_words = re.compile(r"\w+|\S")
|
||||
_parser_cache = None
|
||||
_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
|
||||
_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
|
||||
_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
|
||||
_r_cdecl = re.compile(r"\b__cdecl\b")
|
||||
_r_extern_python = re.compile(r'\bextern\s*"'
|
||||
r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
|
||||
_r_star_const_space = re.compile( # matches "* const "
|
||||
r"[*]\s*((const|volatile|restrict)\b\s*)+")
|
||||
_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
|
||||
r"\.\.\.")
|
||||
_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
|
||||
|
||||
def _get_parser():
|
||||
global _parser_cache
|
||||
if _parser_cache is None:
|
||||
_parser_cache = pycparser.CParser()
|
||||
return _parser_cache
|
||||
|
||||
def _workaround_for_old_pycparser(csource):
|
||||
# Workaround for a pycparser issue (fixed between pycparser 2.10 and
|
||||
# 2.14): "char*const***" gives us a wrong syntax tree, the same as
|
||||
# for "char***(*const)". This means we can't tell the difference
|
||||
# afterwards. But "char(*const(***))" gives us the right syntax
|
||||
# tree. The issue only occurs if there are several stars in
|
||||
# sequence with no parenthesis inbetween, just possibly qualifiers.
|
||||
# Attempt to fix it by adding some parentheses in the source: each
|
||||
# time we see "* const" or "* const *", we add an opening
|
||||
# parenthesis before each star---the hard part is figuring out where
|
||||
# to close them.
|
||||
parts = []
|
||||
while True:
|
||||
match = _r_star_const_space.search(csource)
|
||||
if not match:
|
||||
break
|
||||
#print repr(''.join(parts)+csource), '=>',
|
||||
parts.append(csource[:match.start()])
|
||||
parts.append('('); closing = ')'
|
||||
parts.append(match.group()) # e.g. "* const "
|
||||
endpos = match.end()
|
||||
if csource.startswith('*', endpos):
|
||||
parts.append('('); closing += ')'
|
||||
level = 0
|
||||
i = endpos
|
||||
while i < len(csource):
|
||||
c = csource[i]
|
||||
if c == '(':
|
||||
level += 1
|
||||
elif c == ')':
|
||||
if level == 0:
|
||||
break
|
||||
level -= 1
|
||||
elif c in ',;=':
|
||||
if level == 0:
|
||||
break
|
||||
i += 1
|
||||
csource = csource[endpos:i] + closing + csource[i:]
|
||||
#print repr(''.join(parts)+csource)
|
||||
parts.append(csource)
|
||||
return ''.join(parts)
|
||||
|
||||
def _preprocess_extern_python(csource):
|
||||
# input: `extern "Python" int foo(int);` or
|
||||
# `extern "Python" { int foo(int); }`
|
||||
# output:
|
||||
# void __cffi_extern_python_start;
|
||||
# int foo(int);
|
||||
# void __cffi_extern_python_stop;
|
||||
#
|
||||
# input: `extern "Python+C" int foo(int);`
|
||||
# output:
|
||||
# void __cffi_extern_python_plus_c_start;
|
||||
# int foo(int);
|
||||
# void __cffi_extern_python_stop;
|
||||
parts = []
|
||||
while True:
|
||||
match = _r_extern_python.search(csource)
|
||||
if not match:
|
||||
break
|
||||
endpos = match.end() - 1
|
||||
#print
|
||||
#print ''.join(parts)+csource
|
||||
#print '=>'
|
||||
parts.append(csource[:match.start()])
|
||||
if 'C' in match.group(1):
|
||||
parts.append('void __cffi_extern_python_plus_c_start; ')
|
||||
else:
|
||||
parts.append('void __cffi_extern_python_start; ')
|
||||
if csource[endpos] == '{':
|
||||
# grouping variant
|
||||
closing = csource.find('}', endpos)
|
||||
if closing < 0:
|
||||
raise CDefError("'extern \"Python\" {': no '}' found")
|
||||
if csource.find('{', endpos + 1, closing) >= 0:
|
||||
raise NotImplementedError("cannot use { } inside a block "
|
||||
"'extern \"Python\" { ... }'")
|
||||
parts.append(csource[endpos+1:closing])
|
||||
csource = csource[closing+1:]
|
||||
else:
|
||||
# non-grouping variant
|
||||
semicolon = csource.find(';', endpos)
|
||||
if semicolon < 0:
|
||||
raise CDefError("'extern \"Python\": no ';' found")
|
||||
parts.append(csource[endpos:semicolon+1])
|
||||
csource = csource[semicolon+1:]
|
||||
parts.append(' void __cffi_extern_python_stop;')
|
||||
#print ''.join(parts)+csource
|
||||
#print
|
||||
parts.append(csource)
|
||||
return ''.join(parts)
|
||||
|
||||
def _warn_for_string_literal(csource):
|
||||
if '"' in csource:
|
||||
import warnings
|
||||
warnings.warn("String literal found in cdef() or type source. "
|
||||
"String literals are ignored here, but you should "
|
||||
"remove them anyway because some character sequences "
|
||||
"confuse pre-parsing.")
|
||||
|
||||
def _preprocess(csource):
|
||||
# Remove comments. NOTE: this only work because the cdef() section
|
||||
# should not contain any string literal!
|
||||
csource = _r_comment.sub(' ', csource)
|
||||
# Remove the "#define FOO x" lines
|
||||
macros = {}
|
||||
for match in _r_define.finditer(csource):
|
||||
macroname, macrovalue = match.groups()
|
||||
macrovalue = macrovalue.replace('\\\n', '').strip()
|
||||
macros[macroname] = macrovalue
|
||||
csource = _r_define.sub('', csource)
|
||||
#
|
||||
if pycparser.__version__ < '2.14':
|
||||
csource = _workaround_for_old_pycparser(csource)
|
||||
#
|
||||
# BIG HACK: replace WINAPI or __stdcall with "volatile const".
|
||||
# It doesn't make sense for the return type of a function to be
|
||||
# "volatile volatile const", so we abuse it to detect __stdcall...
|
||||
# Hack number 2 is that "int(volatile *fptr)();" is not valid C
|
||||
# syntax, so we place the "volatile" before the opening parenthesis.
|
||||
csource = _r_stdcall2.sub(' volatile volatile const(', csource)
|
||||
csource = _r_stdcall1.sub(' volatile volatile const ', csource)
|
||||
csource = _r_cdecl.sub(' ', csource)
|
||||
#
|
||||
# Replace `extern "Python"` with start/end markers
|
||||
csource = _preprocess_extern_python(csource)
|
||||
#
|
||||
# Now there should not be any string literal left; warn if we get one
|
||||
_warn_for_string_literal(csource)
|
||||
#
|
||||
# Replace "[...]" with "[__dotdotdotarray__]"
|
||||
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
|
||||
#
|
||||
# Replace "...}" with "__dotdotdotNUM__}". This construction should
|
||||
# occur only at the end of enums; at the end of structs we have "...;}"
|
||||
# and at the end of vararg functions "...);". Also replace "=...[,}]"
|
||||
# with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
|
||||
# giving an unknown value.
|
||||
matches = list(_r_partial_enum.finditer(csource))
|
||||
for number, match in enumerate(reversed(matches)):
|
||||
p = match.start()
|
||||
if csource[p] == '=':
|
||||
p2 = csource.find('...', p, match.end())
|
||||
assert p2 > p
|
||||
csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
|
||||
csource[p2+3:])
|
||||
else:
|
||||
assert csource[p:p+3] == '...'
|
||||
csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
|
||||
csource[p+3:])
|
||||
# Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
|
||||
csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
|
||||
# Replace "float ..." or "double..." with "__dotdotdotfloat__"
|
||||
csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
|
||||
# Replace all remaining "..." with the same name, "__dotdotdot__",
|
||||
# which is declared with a typedef for the purpose of C parsing.
|
||||
return csource.replace('...', ' __dotdotdot__ '), macros
|
||||
|
||||
def _common_type_names(csource):
|
||||
# Look in the source for what looks like usages of types from the
|
||||
# list of common types. A "usage" is approximated here as the
|
||||
# appearance of the word, minus a "definition" of the type, which
|
||||
# is the last word in a "typedef" statement. Approximative only
|
||||
# but should be fine for all the common types.
|
||||
look_for_words = set(COMMON_TYPES)
|
||||
look_for_words.add(';')
|
||||
look_for_words.add(',')
|
||||
look_for_words.add('(')
|
||||
look_for_words.add(')')
|
||||
look_for_words.add('typedef')
|
||||
words_used = set()
|
||||
is_typedef = False
|
||||
paren = 0
|
||||
previous_word = ''
|
||||
for word in _r_words.findall(csource):
|
||||
if word in look_for_words:
|
||||
if word == ';':
|
||||
if is_typedef:
|
||||
words_used.discard(previous_word)
|
||||
look_for_words.discard(previous_word)
|
||||
is_typedef = False
|
||||
elif word == 'typedef':
|
||||
is_typedef = True
|
||||
paren = 0
|
||||
elif word == '(':
|
||||
paren += 1
|
||||
elif word == ')':
|
||||
paren -= 1
|
||||
elif word == ',':
|
||||
if is_typedef and paren == 0:
|
||||
words_used.discard(previous_word)
|
||||
look_for_words.discard(previous_word)
|
||||
else: # word in COMMON_TYPES
|
||||
words_used.add(word)
|
||||
previous_word = word
|
||||
return words_used
|
||||
|
||||
|
||||
class Parser(object):
|
||||
|
||||
def __init__(self):
|
||||
self._declarations = {}
|
||||
self._included_declarations = set()
|
||||
self._anonymous_counter = 0
|
||||
self._structnode2type = weakref.WeakKeyDictionary()
|
||||
self._options = {}
|
||||
self._int_constants = {}
|
||||
self._recomplete = []
|
||||
self._uses_new_feature = None
|
||||
|
||||
def _parse(self, csource):
|
||||
csource, macros = _preprocess(csource)
|
||||
# XXX: for more efficiency we would need to poke into the
|
||||
# internals of CParser... the following registers the
|
||||
# typedefs, because their presence or absence influences the
|
||||
# parsing itself (but what they are typedef'ed to plays no role)
|
||||
ctn = _common_type_names(csource)
|
||||
typenames = []
|
||||
for name in sorted(self._declarations):
|
||||
if name.startswith('typedef '):
|
||||
name = name[8:]
|
||||
typenames.append(name)
|
||||
ctn.discard(name)
|
||||
typenames += sorted(ctn)
|
||||
#
|
||||
csourcelines = []
|
||||
csourcelines.append('# 1 "<cdef automatic initialization code>"')
|
||||
for typename in typenames:
|
||||
csourcelines.append('typedef int %s;' % typename)
|
||||
csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
|
||||
' __dotdotdot__;')
|
||||
# this forces pycparser to consider the following in the file
|
||||
# called <cdef source string> from line 1
|
||||
csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
|
||||
csourcelines.append(csource)
|
||||
fullcsource = '\n'.join(csourcelines)
|
||||
if lock is not None:
|
||||
lock.acquire() # pycparser is not thread-safe...
|
||||
try:
|
||||
ast = _get_parser().parse(fullcsource)
|
||||
except pycparser.c_parser.ParseError as e:
|
||||
self.convert_pycparser_error(e, csource)
|
||||
finally:
|
||||
if lock is not None:
|
||||
lock.release()
|
||||
# csource will be used to find buggy source text
|
||||
return ast, macros, csource
|
||||
|
||||
def _convert_pycparser_error(self, e, csource):
|
||||
# xxx look for "<cdef source string>:NUM:" at the start of str(e)
|
||||
# and interpret that as a line number. This will not work if
|
||||
# the user gives explicit ``# NUM "FILE"`` directives.
|
||||
line = None
|
||||
msg = str(e)
|
||||
match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
|
||||
if match:
|
||||
linenum = int(match.group(1), 10)
|
||||
csourcelines = csource.splitlines()
|
||||
if 1 <= linenum <= len(csourcelines):
|
||||
line = csourcelines[linenum-1]
|
||||
return line
|
||||
|
||||
def convert_pycparser_error(self, e, csource):
|
||||
line = self._convert_pycparser_error(e, csource)
|
||||
|
||||
msg = str(e)
|
||||
if line:
|
||||
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
|
||||
else:
|
||||
msg = 'parse error\n%s' % (msg,)
|
||||
raise CDefError(msg)
|
||||
|
||||
def parse(self, csource, override=False, packed=False, pack=None,
|
||||
dllexport=False):
|
||||
if packed:
|
||||
if packed != True:
|
||||
raise ValueError("'packed' should be False or True; use "
|
||||
"'pack' to give another value")
|
||||
if pack:
|
||||
raise ValueError("cannot give both 'pack' and 'packed'")
|
||||
pack = 1
|
||||
elif pack:
|
||||
if pack & (pack - 1):
|
||||
raise ValueError("'pack' must be a power of two, not %r" %
|
||||
(pack,))
|
||||
else:
|
||||
pack = 0
|
||||
prev_options = self._options
|
||||
try:
|
||||
self._options = {'override': override,
|
||||
'packed': pack,
|
||||
'dllexport': dllexport}
|
||||
self._internal_parse(csource)
|
||||
finally:
|
||||
self._options = prev_options
|
||||
|
||||
def _internal_parse(self, csource):
|
||||
ast, macros, csource = self._parse(csource)
|
||||
# add the macros
|
||||
self._process_macros(macros)
|
||||
# find the first "__dotdotdot__" and use that as a separator
|
||||
# between the repeated typedefs and the real csource
|
||||
iterator = iter(ast.ext)
|
||||
for decl in iterator:
|
||||
if decl.name == '__dotdotdot__':
|
||||
break
|
||||
else:
|
||||
assert 0
|
||||
current_decl = None
|
||||
#
|
||||
try:
|
||||
self._inside_extern_python = '__cffi_extern_python_stop'
|
||||
for decl in iterator:
|
||||
current_decl = decl
|
||||
if isinstance(decl, pycparser.c_ast.Decl):
|
||||
self._parse_decl(decl)
|
||||
elif isinstance(decl, pycparser.c_ast.Typedef):
|
||||
if not decl.name:
|
||||
raise CDefError("typedef does not declare any name",
|
||||
decl)
|
||||
quals = 0
|
||||
if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
|
||||
decl.type.type.names[-1].startswith('__dotdotdot')):
|
||||
realtype = self._get_unknown_type(decl)
|
||||
elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
|
||||
isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(decl.type.type.type,
|
||||
pycparser.c_ast.IdentifierType) and
|
||||
decl.type.type.type.names[-1].startswith('__dotdotdot')):
|
||||
realtype = self._get_unknown_ptr_type(decl)
|
||||
else:
|
||||
realtype, quals = self._get_type_and_quals(
|
||||
decl.type, name=decl.name, partial_length_ok=True)
|
||||
self._declare('typedef ' + decl.name, realtype, quals=quals)
|
||||
elif decl.__class__.__name__ == 'Pragma':
|
||||
pass # skip pragma, only in pycparser 2.15
|
||||
else:
|
||||
raise CDefError("unexpected <%s>: this construct is valid "
|
||||
"C but not valid in cdef()" %
|
||||
decl.__class__.__name__, decl)
|
||||
except CDefError as e:
|
||||
if len(e.args) == 1:
|
||||
e.args = e.args + (current_decl,)
|
||||
raise
|
||||
except FFIError as e:
|
||||
msg = self._convert_pycparser_error(e, csource)
|
||||
if msg:
|
||||
e.args = (e.args[0] + "\n *** Err: %s" % msg,)
|
||||
raise
|
||||
|
||||
def _add_constants(self, key, val):
|
||||
if key in self._int_constants:
|
||||
if self._int_constants[key] == val:
|
||||
return # ignore identical double declarations
|
||||
raise FFIError(
|
||||
"multiple declarations of constant: %s" % (key,))
|
||||
self._int_constants[key] = val
|
||||
|
||||
def _add_integer_constant(self, name, int_str):
|
||||
int_str = int_str.lower().rstrip("ul")
|
||||
neg = int_str.startswith('-')
|
||||
if neg:
|
||||
int_str = int_str[1:]
|
||||
# "010" is not valid oct in py3
|
||||
if (int_str.startswith("0") and int_str != '0'
|
||||
and not int_str.startswith("0x")):
|
||||
int_str = "0o" + int_str[1:]
|
||||
pyvalue = int(int_str, 0)
|
||||
if neg:
|
||||
pyvalue = -pyvalue
|
||||
self._add_constants(name, pyvalue)
|
||||
self._declare('macro ' + name, pyvalue)
|
||||
|
||||
def _process_macros(self, macros):
|
||||
for key, value in macros.items():
|
||||
value = value.strip()
|
||||
if _r_int_literal.match(value):
|
||||
self._add_integer_constant(key, value)
|
||||
elif value == '...':
|
||||
self._declare('macro ' + key, value)
|
||||
else:
|
||||
raise CDefError(
|
||||
'only supports one of the following syntax:\n'
|
||||
' #define %s ... (literally dot-dot-dot)\n'
|
||||
' #define %s NUMBER (with NUMBER an integer'
|
||||
' constant, decimal/hex/octal)\n'
|
||||
'got:\n'
|
||||
' #define %s %s'
|
||||
% (key, key, key, value))
|
||||
|
||||
def _declare_function(self, tp, quals, decl):
|
||||
tp = self._get_type_pointer(tp, quals)
|
||||
if self._options.get('dllexport'):
|
||||
tag = 'dllexport_python '
|
||||
elif self._inside_extern_python == '__cffi_extern_python_start':
|
||||
tag = 'extern_python '
|
||||
elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
|
||||
tag = 'extern_python_plus_c '
|
||||
else:
|
||||
tag = 'function '
|
||||
self._declare(tag + decl.name, tp)
|
||||
|
||||
def _parse_decl(self, decl):
|
||||
node = decl.type
|
||||
if isinstance(node, pycparser.c_ast.FuncDecl):
|
||||
tp, quals = self._get_type_and_quals(node, name=decl.name)
|
||||
assert isinstance(tp, model.RawFunctionType)
|
||||
self._declare_function(tp, quals, decl)
|
||||
else:
|
||||
if isinstance(node, pycparser.c_ast.Struct):
|
||||
self._get_struct_union_enum_type('struct', node)
|
||||
elif isinstance(node, pycparser.c_ast.Union):
|
||||
self._get_struct_union_enum_type('union', node)
|
||||
elif isinstance(node, pycparser.c_ast.Enum):
|
||||
self._get_struct_union_enum_type('enum', node)
|
||||
elif not decl.name:
|
||||
raise CDefError("construct does not declare any variable",
|
||||
decl)
|
||||
#
|
||||
if decl.name:
|
||||
tp, quals = self._get_type_and_quals(node,
|
||||
partial_length_ok=True)
|
||||
if tp.is_raw_function:
|
||||
self._declare_function(tp, quals, decl)
|
||||
elif (tp.is_integer_type() and
|
||||
hasattr(decl, 'init') and
|
||||
hasattr(decl.init, 'value') and
|
||||
_r_int_literal.match(decl.init.value)):
|
||||
self._add_integer_constant(decl.name, decl.init.value)
|
||||
elif (tp.is_integer_type() and
|
||||
isinstance(decl.init, pycparser.c_ast.UnaryOp) and
|
||||
decl.init.op == '-' and
|
||||
hasattr(decl.init.expr, 'value') and
|
||||
_r_int_literal.match(decl.init.expr.value)):
|
||||
self._add_integer_constant(decl.name,
|
||||
'-' + decl.init.expr.value)
|
||||
elif (tp is model.void_type and
|
||||
decl.name.startswith('__cffi_extern_python_')):
|
||||
# hack: `extern "Python"` in the C source is replaced
|
||||
# with "void __cffi_extern_python_start;" and
|
||||
# "void __cffi_extern_python_stop;"
|
||||
self._inside_extern_python = decl.name
|
||||
else:
|
||||
if self._inside_extern_python !='__cffi_extern_python_stop':
|
||||
raise CDefError(
|
||||
"cannot declare constants or "
|
||||
"variables with 'extern \"Python\"'")
|
||||
if (quals & model.Q_CONST) and not tp.is_array_type:
|
||||
self._declare('constant ' + decl.name, tp, quals=quals)
|
||||
else:
|
||||
self._declare('variable ' + decl.name, tp, quals=quals)
|
||||
|
||||
def parse_type(self, cdecl):
|
||||
return self.parse_type_and_quals(cdecl)[0]
|
||||
|
||||
def parse_type_and_quals(self, cdecl):
|
||||
ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
|
||||
assert not macros
|
||||
exprnode = ast.ext[-1].type.args.params[0]
|
||||
if isinstance(exprnode, pycparser.c_ast.ID):
|
||||
raise CDefError("unknown identifier '%s'" % (exprnode.name,))
|
||||
return self._get_type_and_quals(exprnode.type)
|
||||
|
||||
def _declare(self, name, obj, included=False, quals=0):
|
||||
if name in self._declarations:
|
||||
prevobj, prevquals = self._declarations[name]
|
||||
if prevobj is obj and prevquals == quals:
|
||||
return
|
||||
if not self._options.get('override'):
|
||||
raise FFIError(
|
||||
"multiple declarations of %s (for interactive usage, "
|
||||
"try cdef(xx, override=True))" % (name,))
|
||||
assert '__dotdotdot__' not in name.split()
|
||||
self._declarations[name] = (obj, quals)
|
||||
if included:
|
||||
self._included_declarations.add(obj)
|
||||
|
||||
def _extract_quals(self, type):
|
||||
quals = 0
|
||||
if isinstance(type, (pycparser.c_ast.TypeDecl,
|
||||
pycparser.c_ast.PtrDecl)):
|
||||
if 'const' in type.quals:
|
||||
quals |= model.Q_CONST
|
||||
if 'volatile' in type.quals:
|
||||
quals |= model.Q_VOLATILE
|
||||
if 'restrict' in type.quals:
|
||||
quals |= model.Q_RESTRICT
|
||||
return quals
|
||||
|
||||
def _get_type_pointer(self, type, quals, declname=None):
|
||||
if isinstance(type, model.RawFunctionType):
|
||||
return type.as_function_pointer()
|
||||
if (isinstance(type, model.StructOrUnionOrEnum) and
|
||||
type.name.startswith('$') and type.name[1:].isdigit() and
|
||||
type.forcename is None and declname is not None):
|
||||
return model.NamedPointerType(type, declname, quals)
|
||||
return model.PointerType(type, quals)
|
||||
|
||||
def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
|
||||
# first, dereference typedefs, if we have it already parsed, we're good
|
||||
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
|
||||
len(typenode.type.names) == 1 and
|
||||
('typedef ' + typenode.type.names[0]) in self._declarations):
|
||||
tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
|
||||
quals |= self._extract_quals(typenode)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.ArrayDecl):
|
||||
# array type
|
||||
if typenode.dim is None:
|
||||
length = None
|
||||
else:
|
||||
length = self._parse_constant(
|
||||
typenode.dim, partial_length_ok=partial_length_ok)
|
||||
tp, quals = self._get_type_and_quals(typenode.type,
|
||||
partial_length_ok=partial_length_ok)
|
||||
return model.ArrayType(tp, length), quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.PtrDecl):
|
||||
# pointer type
|
||||
itemtype, itemquals = self._get_type_and_quals(typenode.type)
|
||||
tp = self._get_type_pointer(itemtype, itemquals, declname=name)
|
||||
quals = self._extract_quals(typenode)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.TypeDecl):
|
||||
quals = self._extract_quals(typenode)
|
||||
type = typenode.type
|
||||
if isinstance(type, pycparser.c_ast.IdentifierType):
|
||||
# assume a primitive type. get it from .names, but reduce
|
||||
# synonyms to a single chosen combination
|
||||
names = list(type.names)
|
||||
if names != ['signed', 'char']: # keep this unmodified
|
||||
prefixes = {}
|
||||
while names:
|
||||
name = names[0]
|
||||
if name in ('short', 'long', 'signed', 'unsigned'):
|
||||
prefixes[name] = prefixes.get(name, 0) + 1
|
||||
del names[0]
|
||||
else:
|
||||
break
|
||||
# ignore the 'signed' prefix below, and reorder the others
|
||||
newnames = []
|
||||
for prefix in ('unsigned', 'short', 'long'):
|
||||
for i in range(prefixes.get(prefix, 0)):
|
||||
newnames.append(prefix)
|
||||
if not names:
|
||||
names = ['int'] # implicitly
|
||||
if names == ['int']: # but kill it if 'short' or 'long'
|
||||
if 'short' in prefixes or 'long' in prefixes:
|
||||
names = []
|
||||
names = newnames + names
|
||||
ident = ' '.join(names)
|
||||
if ident == 'void':
|
||||
return model.void_type, quals
|
||||
if ident == '__dotdotdot__':
|
||||
raise FFIError(':%d: bad usage of "..."' %
|
||||
typenode.coord.line)
|
||||
tp0, quals0 = resolve_common_type(self, ident)
|
||||
return tp0, (quals | quals0)
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Struct):
|
||||
# 'struct foobar'
|
||||
tp = self._get_struct_union_enum_type('struct', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Union):
|
||||
# 'union foobar'
|
||||
tp = self._get_struct_union_enum_type('union', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(type, pycparser.c_ast.Enum):
|
||||
# 'enum foobar'
|
||||
tp = self._get_struct_union_enum_type('enum', type, name)
|
||||
return tp, quals
|
||||
#
|
||||
if isinstance(typenode, pycparser.c_ast.FuncDecl):
|
||||
# a function type
|
||||
return self._parse_function_type(typenode, name), 0
|
||||
#
|
||||
# nested anonymous structs or unions end up here
|
||||
if isinstance(typenode, pycparser.c_ast.Struct):
|
||||
return self._get_struct_union_enum_type('struct', typenode, name,
|
||||
nested=True), 0
|
||||
if isinstance(typenode, pycparser.c_ast.Union):
|
||||
return self._get_struct_union_enum_type('union', typenode, name,
|
||||
nested=True), 0
|
||||
#
|
||||
raise FFIError(":%d: bad or unsupported type declaration" %
|
||||
typenode.coord.line)
|
||||
|
||||
def _parse_function_type(self, typenode, funcname=None):
|
||||
params = list(getattr(typenode.args, 'params', []))
|
||||
for i, arg in enumerate(params):
|
||||
if not hasattr(arg, 'type'):
|
||||
raise CDefError("%s arg %d: unknown type '%s'"
|
||||
" (if you meant to use the old C syntax of giving"
|
||||
" untyped arguments, it is not supported)"
|
||||
% (funcname or 'in expression', i + 1,
|
||||
getattr(arg, 'name', '?')))
|
||||
ellipsis = (
|
||||
len(params) > 0 and
|
||||
isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
|
||||
isinstance(params[-1].type.type,
|
||||
pycparser.c_ast.IdentifierType) and
|
||||
params[-1].type.type.names == ['__dotdotdot__'])
|
||||
if ellipsis:
|
||||
params.pop()
|
||||
if not params:
|
||||
raise CDefError(
|
||||
"%s: a function with only '(...)' as argument"
|
||||
" is not correct C" % (funcname or 'in expression'))
|
||||
args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
|
||||
for argdeclnode in params]
|
||||
if not ellipsis and args == [model.void_type]:
|
||||
args = []
|
||||
result, quals = self._get_type_and_quals(typenode.type)
|
||||
# the 'quals' on the result type are ignored. HACK: we absure them
|
||||
# to detect __stdcall functions: we textually replace "__stdcall"
|
||||
# with "volatile volatile const" above.
|
||||
abi = None
|
||||
if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
|
||||
if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
|
||||
abi = '__stdcall'
|
||||
return model.RawFunctionType(tuple(args), result, ellipsis, abi)
|
||||
|
||||
def _as_func_arg(self, type, quals):
|
||||
if isinstance(type, model.ArrayType):
|
||||
return model.PointerType(type.item, quals)
|
||||
elif isinstance(type, model.RawFunctionType):
|
||||
return type.as_function_pointer()
|
||||
else:
|
||||
return type
|
||||
|
||||
def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
|
||||
# First, a level of caching on the exact 'type' node of the AST.
|
||||
# This is obscure, but needed because pycparser "unrolls" declarations
|
||||
# such as "typedef struct { } foo_t, *foo_p" and we end up with
|
||||
# an AST that is not a tree, but a DAG, with the "type" node of the
|
||||
# two branches foo_t and foo_p of the trees being the same node.
|
||||
# It's a bit silly but detecting "DAG-ness" in the AST tree seems
|
||||
# to be the only way to distinguish this case from two independent
|
||||
# structs. See test_struct_with_two_usages.
|
||||
try:
|
||||
return self._structnode2type[type]
|
||||
except KeyError:
|
||||
pass
|
||||
#
|
||||
# Note that this must handle parsing "struct foo" any number of
|
||||
# times and always return the same StructType object. Additionally,
|
||||
# one of these times (not necessarily the first), the fields of
|
||||
# the struct can be specified with "struct foo { ...fields... }".
|
||||
# If no name is given, then we have to create a new anonymous struct
|
||||
# with no caching; in this case, the fields are either specified
|
||||
# right now or never.
|
||||
#
|
||||
force_name = name
|
||||
name = type.name
|
||||
#
|
||||
# get the type or create it if needed
|
||||
if name is None:
|
||||
# 'force_name' is used to guess a more readable name for
|
||||
# anonymous structs, for the common case "typedef struct { } foo".
|
||||
if force_name is not None:
|
||||
explicit_name = '$%s' % force_name
|
||||
else:
|
||||
self._anonymous_counter += 1
|
||||
explicit_name = '$%d' % self._anonymous_counter
|
||||
tp = None
|
||||
else:
|
||||
explicit_name = name
|
||||
key = '%s %s' % (kind, name)
|
||||
tp, _ = self._declarations.get(key, (None, None))
|
||||
#
|
||||
if tp is None:
|
||||
if kind == 'struct':
|
||||
tp = model.StructType(explicit_name, None, None, None)
|
||||
elif kind == 'union':
|
||||
tp = model.UnionType(explicit_name, None, None, None)
|
||||
elif kind == 'enum':
|
||||
if explicit_name == '__dotdotdot__':
|
||||
raise CDefError("Enums cannot be declared with ...")
|
||||
tp = self._build_enum_type(explicit_name, type.values)
|
||||
else:
|
||||
raise AssertionError("kind = %r" % (kind,))
|
||||
if name is not None:
|
||||
self._declare(key, tp)
|
||||
else:
|
||||
if kind == 'enum' and type.values is not None:
|
||||
raise NotImplementedError(
|
||||
"enum %s: the '{}' declaration should appear on the first "
|
||||
"time the enum is mentioned, not later" % explicit_name)
|
||||
if not tp.forcename:
|
||||
tp.force_the_name(force_name)
|
||||
if tp.forcename and '$' in tp.name:
|
||||
self._declare('anonymous %s' % tp.forcename, tp)
|
||||
#
|
||||
self._structnode2type[type] = tp
|
||||
#
|
||||
# enums: done here
|
||||
if kind == 'enum':
|
||||
return tp
|
||||
#
|
||||
# is there a 'type.decls'? If yes, then this is the place in the
|
||||
# C sources that declare the fields. If no, then just return the
|
||||
# existing type, possibly still incomplete.
|
||||
if type.decls is None:
|
||||
return tp
|
||||
#
|
||||
if tp.fldnames is not None:
|
||||
raise CDefError("duplicate declaration of struct %s" % name)
|
||||
fldnames = []
|
||||
fldtypes = []
|
||||
fldbitsize = []
|
||||
fldquals = []
|
||||
for decl in type.decls:
|
||||
if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
|
||||
''.join(decl.type.names) == '__dotdotdot__'):
|
||||
# XXX pycparser is inconsistent: 'names' should be a list
|
||||
# of strings, but is sometimes just one string. Use
|
||||
# str.join() as a way to cope with both.
|
||||
self._make_partial(tp, nested)
|
||||
continue
|
||||
if decl.bitsize is None:
|
||||
bitsize = -1
|
||||
else:
|
||||
bitsize = self._parse_constant(decl.bitsize)
|
||||
self._partial_length = False
|
||||
type, fqual = self._get_type_and_quals(decl.type,
|
||||
partial_length_ok=True)
|
||||
if self._partial_length:
|
||||
self._make_partial(tp, nested)
|
||||
if isinstance(type, model.StructType) and type.partial:
|
||||
self._make_partial(tp, nested)
|
||||
fldnames.append(decl.name or '')
|
||||
fldtypes.append(type)
|
||||
fldbitsize.append(bitsize)
|
||||
fldquals.append(fqual)
|
||||
tp.fldnames = tuple(fldnames)
|
||||
tp.fldtypes = tuple(fldtypes)
|
||||
tp.fldbitsize = tuple(fldbitsize)
|
||||
tp.fldquals = tuple(fldquals)
|
||||
if fldbitsize != [-1] * len(fldbitsize):
|
||||
if isinstance(tp, model.StructType) and tp.partial:
|
||||
raise NotImplementedError("%s: using both bitfields and '...;'"
|
||||
% (tp,))
|
||||
tp.packed = self._options.get('packed')
|
||||
if tp.completed: # must be re-completed: it is not opaque any more
|
||||
tp.completed = 0
|
||||
self._recomplete.append(tp)
|
||||
return tp
|
||||
|
||||
def _make_partial(self, tp, nested):
|
||||
if not isinstance(tp, model.StructOrUnion):
|
||||
raise CDefError("%s cannot be partial" % (tp,))
|
||||
if not tp.has_c_name() and not nested:
|
||||
raise NotImplementedError("%s is partial but has no C name" %(tp,))
|
||||
tp.partial = True
|
||||
|
||||
def _parse_constant(self, exprnode, partial_length_ok=False):
|
||||
# for now, limited to expressions that are an immediate number
|
||||
# or positive/negative number
|
||||
if isinstance(exprnode, pycparser.c_ast.Constant):
|
||||
s = exprnode.value
|
||||
if '0' <= s[0] <= '9':
|
||||
s = s.rstrip('uUlL')
|
||||
try:
|
||||
if s.startswith('0'):
|
||||
return int(s, 8)
|
||||
else:
|
||||
return int(s, 10)
|
||||
except ValueError:
|
||||
if len(s) > 1:
|
||||
if s.lower()[0:2] == '0x':
|
||||
return int(s, 16)
|
||||
elif s.lower()[0:2] == '0b':
|
||||
return int(s, 2)
|
||||
raise CDefError("invalid constant %r" % (s,))
|
||||
elif s[0] == "'" and s[-1] == "'" and (
|
||||
len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
|
||||
return ord(s[-2])
|
||||
else:
|
||||
raise CDefError("invalid constant %r" % (s,))
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||
exprnode.op == '+'):
|
||||
return self._parse_constant(exprnode.expr)
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||
exprnode.op == '-'):
|
||||
return -self._parse_constant(exprnode.expr)
|
||||
# load previously defined int constant
|
||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||
exprnode.name in self._int_constants):
|
||||
return self._int_constants[exprnode.name]
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||
exprnode.name == '__dotdotdotarray__'):
|
||||
if partial_length_ok:
|
||||
self._partial_length = True
|
||||
return '...'
|
||||
raise FFIError(":%d: unsupported '[...]' here, cannot derive "
|
||||
"the actual array length in this context"
|
||||
% exprnode.coord.line)
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
|
||||
exprnode.op == '+'):
|
||||
return (self._parse_constant(exprnode.left) +
|
||||
self._parse_constant(exprnode.right))
|
||||
#
|
||||
if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
|
||||
exprnode.op == '-'):
|
||||
return (self._parse_constant(exprnode.left) -
|
||||
self._parse_constant(exprnode.right))
|
||||
#
|
||||
raise FFIError(":%d: unsupported expression: expected a "
|
||||
"simple numeric constant" % exprnode.coord.line)
|
||||
|
||||
def _build_enum_type(self, explicit_name, decls):
|
||||
if decls is not None:
|
||||
partial = False
|
||||
enumerators = []
|
||||
enumvalues = []
|
||||
nextenumvalue = 0
|
||||
for enum in decls.enumerators:
|
||||
if _r_enum_dotdotdot.match(enum.name):
|
||||
partial = True
|
||||
continue
|
||||
if enum.value is not None:
|
||||
nextenumvalue = self._parse_constant(enum.value)
|
||||
enumerators.append(enum.name)
|
||||
enumvalues.append(nextenumvalue)
|
||||
self._add_constants(enum.name, nextenumvalue)
|
||||
nextenumvalue += 1
|
||||
enumerators = tuple(enumerators)
|
||||
enumvalues = tuple(enumvalues)
|
||||
tp = model.EnumType(explicit_name, enumerators, enumvalues)
|
||||
tp.partial = partial
|
||||
else: # opaque enum
|
||||
tp = model.EnumType(explicit_name, (), ())
|
||||
return tp
|
||||
|
||||
def include(self, other):
|
||||
for name, (tp, quals) in other._declarations.items():
|
||||
if name.startswith('anonymous $enum_$'):
|
||||
continue # fix for test_anonymous_enum_include
|
||||
kind = name.split(' ', 1)[0]
|
||||
if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
|
||||
self._declare(name, tp, included=True, quals=quals)
|
||||
for k, v in other._int_constants.items():
|
||||
self._add_constants(k, v)
|
||||
|
||||
def _get_unknown_type(self, decl):
|
||||
typenames = decl.type.type.names
|
||||
if typenames == ['__dotdotdot__']:
|
||||
return model.unknown_type(decl.name)
|
||||
|
||||
if typenames == ['__dotdotdotint__']:
|
||||
if self._uses_new_feature is None:
|
||||
self._uses_new_feature = "'typedef int... %s'" % decl.name
|
||||
return model.UnknownIntegerType(decl.name)
|
||||
|
||||
if typenames == ['__dotdotdotfloat__']:
|
||||
# note: not for 'long double' so far
|
||||
if self._uses_new_feature is None:
|
||||
self._uses_new_feature = "'typedef float... %s'" % decl.name
|
||||
return model.UnknownFloatType(decl.name)
|
||||
|
||||
raise FFIError(':%d: unsupported usage of "..." in typedef'
|
||||
% decl.coord.line)
|
||||
|
||||
def _get_unknown_ptr_type(self, decl):
|
||||
if decl.type.type.type.names == ['__dotdotdot__']:
|
||||
return model.unknown_ptr_type(decl.name)
|
||||
raise FFIError(':%d: unsupported usage of "..." in typedef'
|
||||
% decl.coord.line)
|
@ -0,0 +1,31 @@
|
||||
|
||||
class FFIError(Exception):
|
||||
__module__ = 'cffi'
|
||||
|
||||
class CDefError(Exception):
|
||||
__module__ = 'cffi'
|
||||
def __str__(self):
|
||||
try:
|
||||
current_decl = self.args[1]
|
||||
filename = current_decl.coord.file
|
||||
linenum = current_decl.coord.line
|
||||
prefix = '%s:%d: ' % (filename, linenum)
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
prefix = ''
|
||||
return '%s%s' % (prefix, self.args[0])
|
||||
|
||||
class VerificationError(Exception):
|
||||
""" An error raised when verification fails
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class VerificationMissing(Exception):
|
||||
""" An error raised when incomplete structures are passed into
|
||||
cdef, but no verification has been done
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class PkgConfigError(Exception):
|
||||
""" An error raised for missing modules in pkg-config
|
||||
"""
|
||||
__module__ = 'cffi'
|
@ -0,0 +1,127 @@
|
||||
import sys, os
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
||||
'extra_objects', 'depends']
|
||||
|
||||
def get_extension(srcfilename, modname, sources=(), **kwds):
|
||||
_hack_at_distutils()
|
||||
from distutils.core import Extension
|
||||
allsources = [srcfilename]
|
||||
for src in sources:
|
||||
allsources.append(os.path.normpath(src))
|
||||
return Extension(name=modname, sources=allsources, **kwds)
|
||||
|
||||
def compile(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
"""Compile a C extension module using distutils."""
|
||||
|
||||
_hack_at_distutils()
|
||||
saved_environ = os.environ.copy()
|
||||
try:
|
||||
outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
|
||||
outputfilename = os.path.abspath(outputfilename)
|
||||
finally:
|
||||
# workaround for a distutils bugs where some env vars can
|
||||
# become longer and longer every time it is used
|
||||
for key, value in saved_environ.items():
|
||||
if os.environ.get(key) != value:
|
||||
os.environ[key] = value
|
||||
return outputfilename
|
||||
|
||||
def _build(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
# XXX compact but horrible :-(
|
||||
from distutils.core import Distribution
|
||||
import distutils.errors, distutils.log
|
||||
#
|
||||
dist = Distribution({'ext_modules': [ext]})
|
||||
dist.parse_config_files()
|
||||
options = dist.get_option_dict('build_ext')
|
||||
if debug is None:
|
||||
debug = sys.flags.debug
|
||||
options['debug'] = ('ffiplatform', debug)
|
||||
options['force'] = ('ffiplatform', True)
|
||||
options['build_lib'] = ('ffiplatform', tmpdir)
|
||||
options['build_temp'] = ('ffiplatform', tmpdir)
|
||||
#
|
||||
try:
|
||||
old_level = distutils.log.set_threshold(0) or 0
|
||||
try:
|
||||
distutils.log.set_verbosity(compiler_verbose)
|
||||
dist.run_command('build_ext')
|
||||
cmd_obj = dist.get_command_obj('build_ext')
|
||||
[soname] = cmd_obj.get_outputs()
|
||||
finally:
|
||||
distutils.log.set_threshold(old_level)
|
||||
except (distutils.errors.CompileError,
|
||||
distutils.errors.LinkError) as e:
|
||||
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
||||
#
|
||||
return soname
|
||||
|
||||
try:
|
||||
from os.path import samefile
|
||||
except ImportError:
|
||||
def samefile(f1, f2):
|
||||
return os.path.abspath(f1) == os.path.abspath(f2)
|
||||
|
||||
def maybe_relative_path(path):
|
||||
if not os.path.isabs(path):
|
||||
return path # already relative
|
||||
dir = path
|
||||
names = []
|
||||
while True:
|
||||
prevdir = dir
|
||||
dir, name = os.path.split(prevdir)
|
||||
if dir == prevdir or not dir:
|
||||
return path # failed to make it relative
|
||||
names.append(name)
|
||||
try:
|
||||
if samefile(dir, os.curdir):
|
||||
names.reverse()
|
||||
return os.path.join(*names)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
try:
|
||||
int_or_long = (int, long)
|
||||
import cStringIO
|
||||
except NameError:
|
||||
int_or_long = int # Python 3
|
||||
import io as cStringIO
|
||||
|
||||
def _flatten(x, f):
|
||||
if isinstance(x, str):
|
||||
f.write('%ds%s' % (len(x), x))
|
||||
elif isinstance(x, dict):
|
||||
keys = sorted(x.keys())
|
||||
f.write('%dd' % len(keys))
|
||||
for key in keys:
|
||||
_flatten(key, f)
|
||||
_flatten(x[key], f)
|
||||
elif isinstance(x, (list, tuple)):
|
||||
f.write('%dl' % len(x))
|
||||
for value in x:
|
||||
_flatten(value, f)
|
||||
elif isinstance(x, int_or_long):
|
||||
f.write('%di' % (x,))
|
||||
else:
|
||||
raise TypeError(
|
||||
"the keywords to verify() contains unsupported object %r" % (x,))
|
||||
|
||||
def flatten(x):
|
||||
f = cStringIO.StringIO()
|
||||
_flatten(x, f)
|
||||
return f.getvalue()
|
||||
|
||||
def _hack_at_distutils():
|
||||
# Windows-only workaround for some configurations: see
|
||||
# https://bugs.python.org/issue23246 (Python 2.7 with
|
||||
# a specific MS compiler suite download)
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
import setuptools # for side-effects, patches distutils
|
||||
except ImportError:
|
||||
pass
|
@ -0,0 +1,30 @@
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3,):
|
||||
try:
|
||||
from thread import allocate_lock
|
||||
except ImportError:
|
||||
from dummy_thread import allocate_lock
|
||||
else:
|
||||
try:
|
||||
from _thread import allocate_lock
|
||||
except ImportError:
|
||||
from _dummy_thread import allocate_lock
|
||||
|
||||
|
||||
##import sys
|
||||
##l1 = allocate_lock
|
||||
|
||||
##class allocate_lock(object):
|
||||
## def __init__(self):
|
||||
## self._real = l1()
|
||||
## def __enter__(self):
|
||||
## for i in range(4, 0, -1):
|
||||
## print sys._getframe(i).f_code
|
||||
## print
|
||||
## return self._real.__enter__()
|
||||
## def __exit__(self, *args):
|
||||
## return self._real.__exit__(*args)
|
||||
## def acquire(self, f):
|
||||
## assert f is False
|
||||
## return self._real.acquire(f)
|
@ -0,0 +1,614 @@
|
||||
import types
|
||||
import weakref
|
||||
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError, VerificationError, VerificationMissing
|
||||
|
||||
# type qualifiers
|
||||
Q_CONST = 0x01
|
||||
Q_RESTRICT = 0x02
|
||||
Q_VOLATILE = 0x04
|
||||
|
||||
def qualify(quals, replace_with):
|
||||
if quals & Q_CONST:
|
||||
replace_with = ' const ' + replace_with.lstrip()
|
||||
if quals & Q_VOLATILE:
|
||||
replace_with = ' volatile ' + replace_with.lstrip()
|
||||
if quals & Q_RESTRICT:
|
||||
# It seems that __restrict is supported by gcc and msvc.
|
||||
# If you hit some different compiler, add a #define in
|
||||
# _cffi_include.h for it (and in its copies, documented there)
|
||||
replace_with = ' __restrict ' + replace_with.lstrip()
|
||||
return replace_with
|
||||
|
||||
|
||||
class BaseTypeByIdentity(object):
|
||||
is_array_type = False
|
||||
is_raw_function = False
|
||||
|
||||
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
||||
result = self.c_name_with_marker
|
||||
assert result.count('&') == 1
|
||||
# some logic duplication with ffi.getctype()... :-(
|
||||
replace_with = replace_with.strip()
|
||||
if replace_with:
|
||||
if replace_with.startswith('*') and '&[' in result:
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
replace_with = qualify(quals, replace_with)
|
||||
result = result.replace('&', replace_with)
|
||||
if '$' in result:
|
||||
raise VerificationError(
|
||||
"cannot generate '%s' in %s: unknown type name"
|
||||
% (self._get_c_name(), context))
|
||||
return result
|
||||
|
||||
def _get_c_name(self):
|
||||
return self.c_name_with_marker.replace('&', '')
|
||||
|
||||
def has_c_name(self):
|
||||
return '$' not in self._get_c_name()
|
||||
|
||||
def is_integer_type(self):
|
||||
return False
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
try:
|
||||
BType = ffi._cached_btypes[self]
|
||||
except KeyError:
|
||||
BType = self.build_backend_type(ffi, finishlist)
|
||||
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
||||
assert BType2 is BType
|
||||
return BType
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % (self._get_c_name(),)
|
||||
|
||||
def _get_items(self):
|
||||
return [(name, getattr(self, name)) for name in self._attrs_]
|
||||
|
||||
|
||||
class BaseType(BaseTypeByIdentity):
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.__class__ == other.__class__ and
|
||||
self._get_items() == other._get_items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, tuple(self._get_items())))
|
||||
|
||||
|
||||
class VoidType(BaseType):
|
||||
_attrs_ = ()
|
||||
|
||||
def __init__(self):
|
||||
self.c_name_with_marker = 'void&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_void_type')
|
||||
|
||||
void_type = VoidType()
|
||||
|
||||
|
||||
class BasePrimitiveType(BaseType):
|
||||
def is_complex_type(self):
|
||||
return False
|
||||
|
||||
|
||||
class PrimitiveType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
ALL_PRIMITIVE_TYPES = {
|
||||
'char': 'c',
|
||||
'short': 'i',
|
||||
'int': 'i',
|
||||
'long': 'i',
|
||||
'long long': 'i',
|
||||
'signed char': 'i',
|
||||
'unsigned char': 'i',
|
||||
'unsigned short': 'i',
|
||||
'unsigned int': 'i',
|
||||
'unsigned long': 'i',
|
||||
'unsigned long long': 'i',
|
||||
'float': 'f',
|
||||
'double': 'f',
|
||||
'long double': 'f',
|
||||
'float _Complex': 'j',
|
||||
'double _Complex': 'j',
|
||||
'_Bool': 'i',
|
||||
# the following types are not primitive in the C sense
|
||||
'wchar_t': 'c',
|
||||
'char16_t': 'c',
|
||||
'char32_t': 'c',
|
||||
'int8_t': 'i',
|
||||
'uint8_t': 'i',
|
||||
'int16_t': 'i',
|
||||
'uint16_t': 'i',
|
||||
'int32_t': 'i',
|
||||
'uint32_t': 'i',
|
||||
'int64_t': 'i',
|
||||
'uint64_t': 'i',
|
||||
'int_least8_t': 'i',
|
||||
'uint_least8_t': 'i',
|
||||
'int_least16_t': 'i',
|
||||
'uint_least16_t': 'i',
|
||||
'int_least32_t': 'i',
|
||||
'uint_least32_t': 'i',
|
||||
'int_least64_t': 'i',
|
||||
'uint_least64_t': 'i',
|
||||
'int_fast8_t': 'i',
|
||||
'uint_fast8_t': 'i',
|
||||
'int_fast16_t': 'i',
|
||||
'uint_fast16_t': 'i',
|
||||
'int_fast32_t': 'i',
|
||||
'uint_fast32_t': 'i',
|
||||
'int_fast64_t': 'i',
|
||||
'uint_fast64_t': 'i',
|
||||
'intptr_t': 'i',
|
||||
'uintptr_t': 'i',
|
||||
'intmax_t': 'i',
|
||||
'uintmax_t': 'i',
|
||||
'ptrdiff_t': 'i',
|
||||
'size_t': 'i',
|
||||
'ssize_t': 'i',
|
||||
}
|
||||
|
||||
def __init__(self, name):
|
||||
assert name in self.ALL_PRIMITIVE_TYPES
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_char_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
||||
def is_integer_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
||||
def is_float_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
||||
def is_complex_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
||||
|
||||
|
||||
class UnknownIntegerType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_integer_type(self):
|
||||
return True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("integer type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
class UnknownFloatType(BasePrimitiveType):
|
||||
_attrs_ = ('name', )
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("float type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
|
||||
class BaseFunctionType(BaseType):
|
||||
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
||||
|
||||
def __init__(self, args, result, ellipsis, abi=None):
|
||||
self.args = args
|
||||
self.result = result
|
||||
self.ellipsis = ellipsis
|
||||
self.abi = abi
|
||||
#
|
||||
reprargs = [arg._get_c_name() for arg in self.args]
|
||||
if self.ellipsis:
|
||||
reprargs.append('...')
|
||||
reprargs = reprargs or ['void']
|
||||
replace_with = self._base_pattern % (', '.join(reprargs),)
|
||||
if abi is not None:
|
||||
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
||||
self.c_name_with_marker = (
|
||||
self.result.c_name_with_marker.replace('&', replace_with))
|
||||
|
||||
|
||||
class RawFunctionType(BaseFunctionType):
|
||||
# Corresponds to a C type like 'int(int)', which is the C type of
|
||||
# a function, but not a pointer-to-function. The backend has no
|
||||
# notion of such a type; it's used temporarily by parsing.
|
||||
_base_pattern = '(&)(%s)'
|
||||
is_raw_function = True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise CDefError("cannot render the type %r: it is a function "
|
||||
"type, not a pointer-to-function type" % (self,))
|
||||
|
||||
def as_function_pointer(self):
|
||||
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class FunctionPtrType(BaseFunctionType):
|
||||
_base_pattern = '(*&)(%s)'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
result = self.result.get_cached_btype(ffi, finishlist)
|
||||
args = []
|
||||
for tp in self.args:
|
||||
args.append(tp.get_cached_btype(ffi, finishlist))
|
||||
abi_args = ()
|
||||
if self.abi == "__stdcall":
|
||||
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
||||
try:
|
||||
abi_args = (ffi._backend.FFI_STDCALL,)
|
||||
except AttributeError:
|
||||
pass
|
||||
return global_cache(self, ffi, 'new_function_type',
|
||||
tuple(args), result, self.ellipsis, *abi_args)
|
||||
|
||||
def as_raw_function(self):
|
||||
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class PointerType(BaseType):
|
||||
_attrs_ = ('totype', 'quals')
|
||||
|
||||
def __init__(self, totype, quals=0):
|
||||
self.totype = totype
|
||||
self.quals = quals
|
||||
extra = qualify(quals, " *&")
|
||||
if totype.is_array_type:
|
||||
extra = "(%s)" % (extra.lstrip(),)
|
||||
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
||||
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
||||
|
||||
voidp_type = PointerType(void_type)
|
||||
|
||||
def ConstPointerType(totype):
|
||||
return PointerType(totype, Q_CONST)
|
||||
|
||||
const_voidp_type = ConstPointerType(void_type)
|
||||
|
||||
|
||||
class NamedPointerType(PointerType):
|
||||
_attrs_ = ('totype', 'name')
|
||||
|
||||
def __init__(self, totype, name, quals=0):
|
||||
PointerType.__init__(self, totype, quals)
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
|
||||
class ArrayType(BaseType):
|
||||
_attrs_ = ('item', 'length')
|
||||
is_array_type = True
|
||||
|
||||
def __init__(self, item, length):
|
||||
self.item = item
|
||||
self.length = length
|
||||
#
|
||||
if length is None:
|
||||
brackets = '&[]'
|
||||
elif length == '...':
|
||||
brackets = '&[/*...*/]'
|
||||
else:
|
||||
brackets = '&[%s]' % length
|
||||
self.c_name_with_marker = (
|
||||
self.item.c_name_with_marker.replace('&', brackets))
|
||||
|
||||
def resolve_length(self, newlength):
|
||||
return ArrayType(self.item, newlength)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
if self.length == '...':
|
||||
raise CDefError("cannot render the type %r: unknown length" %
|
||||
(self,))
|
||||
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
||||
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
||||
|
||||
char_array_type = ArrayType(PrimitiveType('char'), None)
|
||||
|
||||
|
||||
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
||||
_attrs_ = ('name',)
|
||||
forcename = None
|
||||
|
||||
def build_c_name_with_marker(self):
|
||||
name = self.forcename or '%s %s' % (self.kind, self.name)
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
self.forcename = forcename
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def get_official_name(self):
|
||||
assert self.c_name_with_marker.endswith('&')
|
||||
return self.c_name_with_marker[:-1]
|
||||
|
||||
|
||||
class StructOrUnion(StructOrUnionOrEnum):
|
||||
fixedlayout = None
|
||||
completed = 0
|
||||
partial = False
|
||||
packed = 0
|
||||
|
||||
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
||||
self.name = name
|
||||
self.fldnames = fldnames
|
||||
self.fldtypes = fldtypes
|
||||
self.fldbitsize = fldbitsize
|
||||
self.fldquals = fldquals
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def anonymous_struct_fields(self):
|
||||
if self.fldtypes is not None:
|
||||
for name, type in zip(self.fldnames, self.fldtypes):
|
||||
if name == '' and isinstance(type, StructOrUnion):
|
||||
yield type
|
||||
|
||||
def enumfields(self, expand_anonymous_struct_union=True):
|
||||
fldquals = self.fldquals
|
||||
if fldquals is None:
|
||||
fldquals = (0,) * len(self.fldnames)
|
||||
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
||||
self.fldbitsize, fldquals):
|
||||
if (name == '' and isinstance(type, StructOrUnion)
|
||||
and expand_anonymous_struct_union):
|
||||
# nested anonymous struct/union
|
||||
for result in type.enumfields():
|
||||
yield result
|
||||
else:
|
||||
yield (name, type, bitsize, quals)
|
||||
|
||||
def force_flatten(self):
|
||||
# force the struct or union to have a declaration that lists
|
||||
# directly all fields returned by enumfields(), flattening
|
||||
# nested anonymous structs/unions.
|
||||
names = []
|
||||
types = []
|
||||
bitsizes = []
|
||||
fldquals = []
|
||||
for name, type, bitsize, quals in self.enumfields():
|
||||
names.append(name)
|
||||
types.append(type)
|
||||
bitsizes.append(bitsize)
|
||||
fldquals.append(quals)
|
||||
self.fldnames = tuple(names)
|
||||
self.fldtypes = tuple(types)
|
||||
self.fldbitsize = tuple(bitsizes)
|
||||
self.fldquals = tuple(fldquals)
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
||||
can_delay)
|
||||
if not can_delay:
|
||||
self.finish_backend_type(ffi, finishlist)
|
||||
return BType
|
||||
|
||||
def finish_backend_type(self, ffi, finishlist):
|
||||
if self.completed:
|
||||
if self.completed != 2:
|
||||
raise NotImplementedError("recursive structure declaration "
|
||||
"for '%s'" % (self.name,))
|
||||
return
|
||||
BType = ffi._cached_btypes[self]
|
||||
#
|
||||
self.completed = 1
|
||||
#
|
||||
if self.fldtypes is None:
|
||||
pass # not completing it: it's an opaque struct
|
||||
#
|
||||
elif self.fixedlayout is None:
|
||||
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
||||
for tp in self.fldtypes]
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
||||
extra_flags = ()
|
||||
if self.packed:
|
||||
if self.packed == 1:
|
||||
extra_flags = (8,) # SF_PACKED
|
||||
else:
|
||||
extra_flags = (0, self.packed)
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
-1, -1, *extra_flags)
|
||||
#
|
||||
else:
|
||||
fldtypes = []
|
||||
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
||||
for i in range(len(self.fldnames)):
|
||||
fsize = fieldsize[i]
|
||||
ftype = self.fldtypes[i]
|
||||
#
|
||||
if isinstance(ftype, ArrayType) and ftype.length == '...':
|
||||
# fix the length to match the total size
|
||||
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
||||
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
||||
if nrest != 0:
|
||||
self._verification_error(
|
||||
"field '%s.%s' has a bogus size?" % (
|
||||
self.name, self.fldnames[i] or '{}'))
|
||||
ftype = ftype.resolve_length(nlen)
|
||||
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
||||
self.fldtypes[i+1:])
|
||||
#
|
||||
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
||||
if isinstance(ftype, ArrayType) and ftype.length is None:
|
||||
assert fsize == 0
|
||||
else:
|
||||
bitemsize = ffi.sizeof(BFieldType)
|
||||
if bitemsize != fsize:
|
||||
self._verification_error(
|
||||
"field '%s.%s' is declared as %d bytes, but is "
|
||||
"really %d bytes" % (self.name,
|
||||
self.fldnames[i] or '{}',
|
||||
bitemsize, fsize))
|
||||
fldtypes.append(BFieldType)
|
||||
#
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
totalsize, totalalignment)
|
||||
self.completed = 2
|
||||
|
||||
def _verification_error(self, msg):
|
||||
raise VerificationError(msg)
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and self.fixedlayout is None:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
finishlist.append(self)
|
||||
#
|
||||
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
||||
self.get_official_name(), key=self)
|
||||
|
||||
|
||||
class StructType(StructOrUnion):
|
||||
kind = 'struct'
|
||||
|
||||
|
||||
class UnionType(StructOrUnion):
|
||||
kind = 'union'
|
||||
|
||||
|
||||
class EnumType(StructOrUnionOrEnum):
|
||||
kind = 'enum'
|
||||
partial = False
|
||||
partial_resolved = False
|
||||
|
||||
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
||||
self.name = name
|
||||
self.enumerators = enumerators
|
||||
self.enumvalues = enumvalues
|
||||
self.baseinttype = baseinttype
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
StructOrUnionOrEnum.force_the_name(self, forcename)
|
||||
if self.forcename is None:
|
||||
name = self.get_official_name()
|
||||
self.forcename = '$' + name.replace(' ', '_')
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and not self.partial_resolved:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
base_btype = self.build_baseinttype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_enum_type',
|
||||
self.get_official_name(),
|
||||
self.enumerators, self.enumvalues,
|
||||
base_btype, key=self)
|
||||
|
||||
def build_baseinttype(self, ffi, finishlist):
|
||||
if self.baseinttype is not None:
|
||||
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
||||
#
|
||||
if self.enumvalues:
|
||||
smallest_value = min(self.enumvalues)
|
||||
largest_value = max(self.enumvalues)
|
||||
else:
|
||||
import warnings
|
||||
try:
|
||||
# XXX! The goal is to ensure that the warnings.warn()
|
||||
# will not suppress the warning. We want to get it
|
||||
# several times if we reach this point several times.
|
||||
__warningregistry__.clear()
|
||||
except NameError:
|
||||
pass
|
||||
warnings.warn("%r has no values explicitly defined; "
|
||||
"guessing that it is equivalent to 'unsigned int'"
|
||||
% self._get_c_name())
|
||||
smallest_value = largest_value = 0
|
||||
if smallest_value < 0: # needs a signed type
|
||||
sign = 1
|
||||
candidate1 = PrimitiveType("int")
|
||||
candidate2 = PrimitiveType("long")
|
||||
else:
|
||||
sign = 0
|
||||
candidate1 = PrimitiveType("unsigned int")
|
||||
candidate2 = PrimitiveType("unsigned long")
|
||||
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
||||
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
||||
size1 = ffi.sizeof(btype1)
|
||||
size2 = ffi.sizeof(btype2)
|
||||
if (smallest_value >= ((-1) << (8*size1-1)) and
|
||||
largest_value < (1 << (8*size1-sign))):
|
||||
return btype1
|
||||
if (smallest_value >= ((-1) << (8*size2-1)) and
|
||||
largest_value < (1 << (8*size2-sign))):
|
||||
return btype2
|
||||
raise CDefError("%s values don't all fit into either 'long' "
|
||||
"or 'unsigned long'" % self._get_c_name())
|
||||
|
||||
def unknown_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
tp.force_the_name(name)
|
||||
tp.origin = "unknown_type"
|
||||
return tp
|
||||
|
||||
def unknown_ptr_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
return NamedPointerType(tp, name)
|
||||
|
||||
|
||||
global_lock = allocate_lock()
|
||||
_typecache_cffi_backend = weakref.WeakValueDictionary()
|
||||
|
||||
def get_typecache(backend):
|
||||
# returns _typecache_cffi_backend if backend is the _cffi_backend
|
||||
# module, or type(backend).__typecache if backend is an instance of
|
||||
# CTypesBackend (or some FakeBackend class during tests)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
return _typecache_cffi_backend
|
||||
with global_lock:
|
||||
if not hasattr(type(backend), '__typecache'):
|
||||
type(backend).__typecache = weakref.WeakValueDictionary()
|
||||
return type(backend).__typecache
|
||||
|
||||
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
||||
key = kwds.pop('key', (funcname, args))
|
||||
assert not kwds
|
||||
try:
|
||||
return ffi._typecache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
res = getattr(ffi._backend, funcname)(*args)
|
||||
except NotImplementedError as e:
|
||||
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
||||
# note that setdefault() on WeakValueDictionary is not atomic
|
||||
# and contains a rare bug (http://bugs.python.org/issue19542);
|
||||
# we have to use a lock and do it ourselves
|
||||
cache = ffi._typecache
|
||||
with global_lock:
|
||||
res1 = cache.get(key)
|
||||
if res1 is None:
|
||||
cache[key] = res
|
||||
return res
|
||||
else:
|
||||
return res1
|
||||
|
||||
def pointer_cache(ffi, BType):
|
||||
return global_cache('?', ffi, 'new_pointer_type', BType)
|
||||
|
||||
def attach_exception_info(e, name):
|
||||
if e.args and type(e.args[0]) is str:
|
||||
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
@ -0,0 +1,181 @@
|
||||
|
||||
/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
|
||||
beginning of C sources generated by CFFI's ffi.set_source(). */
|
||||
|
||||
typedef void *_cffi_opcode_t;
|
||||
|
||||
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
|
||||
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
|
||||
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
|
||||
|
||||
#define _CFFI_OP_PRIMITIVE 1
|
||||
#define _CFFI_OP_POINTER 3
|
||||
#define _CFFI_OP_ARRAY 5
|
||||
#define _CFFI_OP_OPEN_ARRAY 7
|
||||
#define _CFFI_OP_STRUCT_UNION 9
|
||||
#define _CFFI_OP_ENUM 11
|
||||
#define _CFFI_OP_FUNCTION 13
|
||||
#define _CFFI_OP_FUNCTION_END 15
|
||||
#define _CFFI_OP_NOOP 17
|
||||
#define _CFFI_OP_BITFIELD 19
|
||||
#define _CFFI_OP_TYPENAME 21
|
||||
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
|
||||
#define _CFFI_OP_CONSTANT 29
|
||||
#define _CFFI_OP_CONSTANT_INT 31
|
||||
#define _CFFI_OP_GLOBAL_VAR 33
|
||||
#define _CFFI_OP_DLOPEN_FUNC 35
|
||||
#define _CFFI_OP_DLOPEN_CONST 37
|
||||
#define _CFFI_OP_GLOBAL_VAR_F 39
|
||||
#define _CFFI_OP_EXTERN_PYTHON 41
|
||||
|
||||
#define _CFFI_PRIM_VOID 0
|
||||
#define _CFFI_PRIM_BOOL 1
|
||||
#define _CFFI_PRIM_CHAR 2
|
||||
#define _CFFI_PRIM_SCHAR 3
|
||||
#define _CFFI_PRIM_UCHAR 4
|
||||
#define _CFFI_PRIM_SHORT 5
|
||||
#define _CFFI_PRIM_USHORT 6
|
||||
#define _CFFI_PRIM_INT 7
|
||||
#define _CFFI_PRIM_UINT 8
|
||||
#define _CFFI_PRIM_LONG 9
|
||||
#define _CFFI_PRIM_ULONG 10
|
||||
#define _CFFI_PRIM_LONGLONG 11
|
||||
#define _CFFI_PRIM_ULONGLONG 12
|
||||
#define _CFFI_PRIM_FLOAT 13
|
||||
#define _CFFI_PRIM_DOUBLE 14
|
||||
#define _CFFI_PRIM_LONGDOUBLE 15
|
||||
|
||||
#define _CFFI_PRIM_WCHAR 16
|
||||
#define _CFFI_PRIM_INT8 17
|
||||
#define _CFFI_PRIM_UINT8 18
|
||||
#define _CFFI_PRIM_INT16 19
|
||||
#define _CFFI_PRIM_UINT16 20
|
||||
#define _CFFI_PRIM_INT32 21
|
||||
#define _CFFI_PRIM_UINT32 22
|
||||
#define _CFFI_PRIM_INT64 23
|
||||
#define _CFFI_PRIM_UINT64 24
|
||||
#define _CFFI_PRIM_INTPTR 25
|
||||
#define _CFFI_PRIM_UINTPTR 26
|
||||
#define _CFFI_PRIM_PTRDIFF 27
|
||||
#define _CFFI_PRIM_SIZE 28
|
||||
#define _CFFI_PRIM_SSIZE 29
|
||||
#define _CFFI_PRIM_INT_LEAST8 30
|
||||
#define _CFFI_PRIM_UINT_LEAST8 31
|
||||
#define _CFFI_PRIM_INT_LEAST16 32
|
||||
#define _CFFI_PRIM_UINT_LEAST16 33
|
||||
#define _CFFI_PRIM_INT_LEAST32 34
|
||||
#define _CFFI_PRIM_UINT_LEAST32 35
|
||||
#define _CFFI_PRIM_INT_LEAST64 36
|
||||
#define _CFFI_PRIM_UINT_LEAST64 37
|
||||
#define _CFFI_PRIM_INT_FAST8 38
|
||||
#define _CFFI_PRIM_UINT_FAST8 39
|
||||
#define _CFFI_PRIM_INT_FAST16 40
|
||||
#define _CFFI_PRIM_UINT_FAST16 41
|
||||
#define _CFFI_PRIM_INT_FAST32 42
|
||||
#define _CFFI_PRIM_UINT_FAST32 43
|
||||
#define _CFFI_PRIM_INT_FAST64 44
|
||||
#define _CFFI_PRIM_UINT_FAST64 45
|
||||
#define _CFFI_PRIM_INTMAX 46
|
||||
#define _CFFI_PRIM_UINTMAX 47
|
||||
#define _CFFI_PRIM_FLOATCOMPLEX 48
|
||||
#define _CFFI_PRIM_DOUBLECOMPLEX 49
|
||||
#define _CFFI_PRIM_CHAR16 50
|
||||
#define _CFFI_PRIM_CHAR32 51
|
||||
|
||||
#define _CFFI__NUM_PRIM 52
|
||||
#define _CFFI__UNKNOWN_PRIM (-1)
|
||||
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
|
||||
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
|
||||
|
||||
#define _CFFI__IO_FILE_STRUCT (-1)
|
||||
|
||||
|
||||
struct _cffi_global_s {
|
||||
const char *name;
|
||||
void *address;
|
||||
_cffi_opcode_t type_op;
|
||||
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
|
||||
// OP_CPYTHON_BLTN_*: addr of direct function
|
||||
};
|
||||
|
||||
struct _cffi_getconst_s {
|
||||
unsigned long long value;
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
int gindex;
|
||||
};
|
||||
|
||||
struct _cffi_struct_union_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
|
||||
int flags; // _CFFI_F_* flags below
|
||||
size_t size;
|
||||
int alignment;
|
||||
int first_field_index; // -> _cffi_fields array
|
||||
int num_fields;
|
||||
};
|
||||
#define _CFFI_F_UNION 0x01 // is a union, not a struct
|
||||
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
|
||||
// "standard layout" or if some are missing
|
||||
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
|
||||
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
|
||||
#define _CFFI_F_OPAQUE 0x10 // opaque
|
||||
|
||||
struct _cffi_field_s {
|
||||
const char *name;
|
||||
size_t field_offset;
|
||||
size_t field_size;
|
||||
_cffi_opcode_t field_type_op;
|
||||
};
|
||||
|
||||
struct _cffi_enum_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_ENUM
|
||||
int type_prim; // _CFFI_PRIM_xxx
|
||||
const char *enumerators; // comma-delimited string
|
||||
};
|
||||
|
||||
struct _cffi_typename_s {
|
||||
const char *name;
|
||||
int type_index; /* if opaque, points to a possibly artificial
|
||||
OP_STRUCT which is itself opaque */
|
||||
};
|
||||
|
||||
struct _cffi_type_context_s {
|
||||
_cffi_opcode_t *types;
|
||||
const struct _cffi_global_s *globals;
|
||||
const struct _cffi_field_s *fields;
|
||||
const struct _cffi_struct_union_s *struct_unions;
|
||||
const struct _cffi_enum_s *enums;
|
||||
const struct _cffi_typename_s *typenames;
|
||||
int num_globals;
|
||||
int num_struct_unions;
|
||||
int num_enums;
|
||||
int num_typenames;
|
||||
const char *const *includes;
|
||||
int num_types;
|
||||
int flags; /* future extension */
|
||||
};
|
||||
|
||||
struct _cffi_parse_info_s {
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
_cffi_opcode_t *output;
|
||||
unsigned int output_size;
|
||||
size_t error_location;
|
||||
const char *error_message;
|
||||
};
|
||||
|
||||
struct _cffi_externpy_s {
|
||||
const char *name;
|
||||
size_t size_of_result;
|
||||
void *reserved1, *reserved2;
|
||||
};
|
||||
|
||||
#ifdef _CFFI_INTERNAL
|
||||
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
|
||||
static int search_in_globals(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
#endif
|
@ -0,0 +1,121 @@
|
||||
# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
|
||||
import sys, os, subprocess
|
||||
|
||||
from .error import PkgConfigError
|
||||
|
||||
|
||||
def merge_flags(cfg1, cfg2):
|
||||
"""Merge values from cffi config flags cfg2 to cf1
|
||||
|
||||
Example:
|
||||
merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
|
||||
{"libraries": ["one", "two"]}
|
||||
"""
|
||||
for key, value in cfg2.items():
|
||||
if key not in cfg1:
|
||||
cfg1[key] = value
|
||||
else:
|
||||
if not isinstance(cfg1[key], list):
|
||||
raise TypeError("cfg1[%r] should be a list of strings" % (key,))
|
||||
if not isinstance(value, list):
|
||||
raise TypeError("cfg2[%r] should be a list of strings" % (key,))
|
||||
cfg1[key].extend(value)
|
||||
return cfg1
|
||||
|
||||
|
||||
def call(libname, flag, encoding=sys.getfilesystemencoding()):
|
||||
"""Calls pkg-config and returns the output if found
|
||||
"""
|
||||
a = ["pkg-config", "--print-errors"]
|
||||
a.append(flag)
|
||||
a.append(libname)
|
||||
try:
|
||||
pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except EnvironmentError as e:
|
||||
raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
|
||||
|
||||
bout, berr = pc.communicate()
|
||||
if pc.returncode != 0:
|
||||
try:
|
||||
berr = berr.decode(encoding)
|
||||
except Exception:
|
||||
pass
|
||||
raise PkgConfigError(berr.strip())
|
||||
|
||||
if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
|
||||
try:
|
||||
bout = bout.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
|
||||
"be decoded with encoding %r:\n%r" %
|
||||
(flag, libname, encoding, bout))
|
||||
|
||||
if os.altsep != '\\' and '\\' in bout:
|
||||
raise PkgConfigError("pkg-config %s %s returned an unsupported "
|
||||
"backslash-escaped output:\n%r" %
|
||||
(flag, libname, bout))
|
||||
return bout
|
||||
|
||||
|
||||
def flags_from_pkgconfig(libs):
|
||||
r"""Return compiler line flags for FFI.set_source based on pkg-config output
|
||||
|
||||
Usage
|
||||
...
|
||||
ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
|
||||
|
||||
If pkg-config is installed on build machine, then arguments include_dirs,
|
||||
library_dirs, libraries, define_macros, extra_compile_args and
|
||||
extra_link_args are extended with an output of pkg-config for libfoo and
|
||||
libbar.
|
||||
|
||||
Raises PkgConfigError in case the pkg-config call fails.
|
||||
"""
|
||||
|
||||
def get_include_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-I")]
|
||||
|
||||
def get_library_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-L")]
|
||||
|
||||
def get_libraries(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-l")]
|
||||
|
||||
# convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
|
||||
def get_macros(string):
|
||||
def _macro(x):
|
||||
x = x[2:] # drop "-D"
|
||||
if '=' in x:
|
||||
return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
|
||||
else:
|
||||
return (x, None) # "-Dfoo" => ("foo", None)
|
||||
return [_macro(x) for x in string.split() if x.startswith("-D")]
|
||||
|
||||
def get_other_cflags(string):
|
||||
return [x for x in string.split() if not x.startswith("-I") and
|
||||
not x.startswith("-D")]
|
||||
|
||||
def get_other_libs(string):
|
||||
return [x for x in string.split() if not x.startswith("-L") and
|
||||
not x.startswith("-l")]
|
||||
|
||||
# return kwargs for given libname
|
||||
def kwargs(libname):
|
||||
fse = sys.getfilesystemencoding()
|
||||
all_cflags = call(libname, "--cflags")
|
||||
all_libs = call(libname, "--libs")
|
||||
return {
|
||||
"include_dirs": get_include_dirs(all_cflags),
|
||||
"library_dirs": get_library_dirs(all_libs),
|
||||
"libraries": get_libraries(all_libs),
|
||||
"define_macros": get_macros(all_cflags),
|
||||
"extra_compile_args": get_other_cflags(all_cflags),
|
||||
"extra_link_args": get_other_libs(all_libs),
|
||||
}
|
||||
|
||||
# merge all arguments together
|
||||
ret = {}
|
||||
for libname in libs:
|
||||
lib_flags = kwargs(libname)
|
||||
merge_flags(ret, lib_flags)
|
||||
return ret
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,217 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
def error(msg):
|
||||
from distutils.errors import DistutilsSetupError
|
||||
raise DistutilsSetupError(msg)
|
||||
|
||||
|
||||
def execfile(filename, glob):
|
||||
# We use execfile() (here rewritten for Python 3) instead of
|
||||
# __import__() to load the build script. The problem with
|
||||
# a normal import is that in some packages, the intermediate
|
||||
# __init__.py files may already try to import the file that
|
||||
# we are generating.
|
||||
with open(filename) as f:
|
||||
src = f.read()
|
||||
src += '\n' # Python 2.6 compatibility
|
||||
code = compile(src, filename, 'exec')
|
||||
exec(code, glob, glob)
|
||||
|
||||
|
||||
def add_cffi_module(dist, mod_spec):
|
||||
from cffi.api import FFI
|
||||
|
||||
if not isinstance(mod_spec, basestring):
|
||||
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
||||
" not %r" % (type(mod_spec).__name__,))
|
||||
mod_spec = str(mod_spec)
|
||||
try:
|
||||
build_file_name, ffi_var_name = mod_spec.split(':')
|
||||
except ValueError:
|
||||
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
||||
(mod_spec,))
|
||||
if not os.path.exists(build_file_name):
|
||||
ext = ''
|
||||
rewritten = build_file_name.replace('.', '/') + '.py'
|
||||
if os.path.exists(rewritten):
|
||||
ext = ' (rewrite cffi_modules to [%r])' % (
|
||||
rewritten + ':' + ffi_var_name,)
|
||||
error("%r does not name an existing file%s" % (build_file_name, ext))
|
||||
|
||||
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
||||
execfile(build_file_name, mod_vars)
|
||||
|
||||
try:
|
||||
ffi = mod_vars[ffi_var_name]
|
||||
except KeyError:
|
||||
error("%r: object %r not found in module" % (mod_spec,
|
||||
ffi_var_name))
|
||||
if not isinstance(ffi, FFI):
|
||||
ffi = ffi() # maybe it's a function instead of directly an ffi
|
||||
if not isinstance(ffi, FFI):
|
||||
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
||||
type(ffi).__name__))
|
||||
if not hasattr(ffi, '_assigned_source'):
|
||||
error("%r: the set_source() method was not called" % (mod_spec,))
|
||||
module_name, source, source_extension, kwds = ffi._assigned_source
|
||||
if ffi._windows_unicode:
|
||||
kwds = kwds.copy()
|
||||
ffi._apply_windows_unicode(kwds)
|
||||
|
||||
if source is None:
|
||||
_add_py_module(dist, ffi, module_name)
|
||||
else:
|
||||
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
||||
|
||||
def _set_py_limited_api(Extension, kwds):
|
||||
"""
|
||||
Add py_limited_api to kwds if setuptools >= 26 is in use.
|
||||
Do not alter the setting if it already exists.
|
||||
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
|
||||
|
||||
CPython itself should ignore the flag in a debugging version
|
||||
(by not listing .abi3.so in the extensions it supports), but
|
||||
it doesn't so far, creating troubles. That's why we check
|
||||
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
|
||||
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
|
||||
|
||||
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
|
||||
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
|
||||
For now we'll skip py_limited_api on all Windows versions to avoid an
|
||||
inconsistent mess.
|
||||
"""
|
||||
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
|
||||
and sys.platform != 'win32'):
|
||||
import setuptools
|
||||
try:
|
||||
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
|
||||
if setuptools_major_version >= 26:
|
||||
kwds['py_limited_api'] = True
|
||||
except ValueError: # certain development versions of setuptools
|
||||
# If we don't know the version number of setuptools, we
|
||||
# try to set 'py_limited_api' anyway. At worst, we get a
|
||||
# warning.
|
||||
kwds['py_limited_api'] = True
|
||||
return kwds
|
||||
|
||||
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
||||
from distutils.core import Extension
|
||||
# We are a setuptools extension. Need this build_ext for py_limited_api.
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from distutils.dir_util import mkpath
|
||||
from distutils import log
|
||||
from cffi import recompiler
|
||||
|
||||
allsources = ['$PLACEHOLDER']
|
||||
allsources.extend(kwds.pop('sources', []))
|
||||
kwds = _set_py_limited_api(Extension, kwds)
|
||||
ext = Extension(name=module_name, sources=allsources, **kwds)
|
||||
|
||||
def make_mod(tmpdir, pre_run=None):
|
||||
c_file = os.path.join(tmpdir, module_name + source_extension)
|
||||
log.info("generating cffi module %r" % c_file)
|
||||
mkpath(tmpdir)
|
||||
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
||||
# arguments just before we turn the ffi into C code. To use it,
|
||||
# subclass the 'distutils.command.build_ext.build_ext' class and
|
||||
# add a method 'def pre_run(self, ext, ffi)'.
|
||||
if pre_run is not None:
|
||||
pre_run(ext, ffi)
|
||||
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
return c_file
|
||||
|
||||
if dist.ext_modules is None:
|
||||
dist.ext_modules = []
|
||||
dist.ext_modules.append(ext)
|
||||
|
||||
base_class = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class):
|
||||
def run(self):
|
||||
if ext.sources[0] == '$PLACEHOLDER':
|
||||
pre_run = getattr(self, 'pre_run', None)
|
||||
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
||||
base_class.run(self)
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
||||
# classes. Even in this case the 'build_ext' command should be
|
||||
# run once; but just in case, the logic above does nothing if
|
||||
# called again.
|
||||
|
||||
|
||||
def _add_py_module(dist, ffi, module_name):
|
||||
from distutils.dir_util import mkpath
|
||||
from setuptools.command.build_py import build_py
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from distutils import log
|
||||
from cffi import recompiler
|
||||
|
||||
def generate_mod(py_file):
|
||||
log.info("generating cffi module %r" % py_file)
|
||||
mkpath(os.path.dirname(py_file))
|
||||
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
|
||||
base_class = dist.cmdclass.get('build_py', build_py)
|
||||
class build_py_make_mod(base_class):
|
||||
def run(self):
|
||||
base_class.run(self)
|
||||
module_path = module_name.split('.')
|
||||
module_path[-1] += '.py'
|
||||
generate_mod(os.path.join(self.build_lib, *module_path))
|
||||
def get_source_files(self):
|
||||
# This is called from 'setup.py sdist' only. Exclude
|
||||
# the generate .py module in this case.
|
||||
saved_py_modules = self.py_modules
|
||||
try:
|
||||
if saved_py_modules:
|
||||
self.py_modules = [m for m in saved_py_modules
|
||||
if m != module_name]
|
||||
return base_class.get_source_files(self)
|
||||
finally:
|
||||
self.py_modules = saved_py_modules
|
||||
dist.cmdclass['build_py'] = build_py_make_mod
|
||||
|
||||
# distutils and setuptools have no notion I could find of a
|
||||
# generated python module. If we don't add module_name to
|
||||
# dist.py_modules, then things mostly work but there are some
|
||||
# combination of options (--root and --record) that will miss
|
||||
# the module. So we add it here, which gives a few apparently
|
||||
# harmless warnings about not finding the file outside the
|
||||
# build directory.
|
||||
# Then we need to hack more in get_source_files(); see above.
|
||||
if dist.py_modules is None:
|
||||
dist.py_modules = []
|
||||
dist.py_modules.append(module_name)
|
||||
|
||||
# the following is only for "build_ext -i"
|
||||
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class_2):
|
||||
def run(self):
|
||||
base_class_2.run(self)
|
||||
if self.inplace:
|
||||
# from get_ext_fullpath() in distutils/command/build_ext.py
|
||||
module_path = module_name.split('.')
|
||||
package = '.'.join(module_path[:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
file_name = module_path[-1] + '.py'
|
||||
generate_mod(os.path.join(package_dir, file_name))
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
|
||||
def cffi_modules(dist, attr, value):
|
||||
assert attr == 'cffi_modules'
|
||||
if isinstance(value, basestring):
|
||||
value = [value]
|
||||
|
||||
for cffi_module in value:
|
||||
add_cffi_module(dist, cffi_module)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,675 @@
|
||||
#
|
||||
# DEPRECATED: implementation for ffi.verify()
|
||||
#
|
||||
import sys, os
|
||||
import types
|
||||
|
||||
from . import model
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
class VGenericEngine(object):
|
||||
_class_key = 'g'
|
||||
_gen_python_module = False
|
||||
|
||||
def __init__(self, verifier):
|
||||
self.verifier = verifier
|
||||
self.ffi = verifier.ffi
|
||||
self.export_symbols = []
|
||||
self._struct_pending_verification = {}
|
||||
|
||||
def patch_extension_kwds(self, kwds):
|
||||
# add 'export_symbols' to the dictionary. Note that we add the
|
||||
# list before filling it. When we fill it, it will thus also show
|
||||
# up in kwds['export_symbols'].
|
||||
kwds.setdefault('export_symbols', self.export_symbols)
|
||||
|
||||
def find_module(self, module_name, path, so_suffixes):
|
||||
for so_suffix in so_suffixes:
|
||||
basename = module_name + so_suffix
|
||||
if path is None:
|
||||
path = sys.path
|
||||
for dirname in path:
|
||||
filename = os.path.join(dirname, basename)
|
||||
if os.path.isfile(filename):
|
||||
return filename
|
||||
|
||||
def collect_types(self):
|
||||
pass # not needed in the generic engine
|
||||
|
||||
def _prnt(self, what=''):
|
||||
self._f.write(what + '\n')
|
||||
|
||||
def write_source_to_f(self):
|
||||
prnt = self._prnt
|
||||
# first paste some standard set of lines that are mostly '#include'
|
||||
prnt(cffimod_header)
|
||||
# then paste the C source given by the user, verbatim.
|
||||
prnt(self.verifier.preamble)
|
||||
#
|
||||
# call generate_gen_xxx_decl(), for every xxx found from
|
||||
# ffi._parser._declarations. This generates all the functions.
|
||||
self._generate('decl')
|
||||
#
|
||||
# on Windows, distutils insists on putting init_cffi_xyz in
|
||||
# 'export_symbols', so instead of fighting it, just give up and
|
||||
# give it one
|
||||
if sys.platform == 'win32':
|
||||
if sys.version_info >= (3,):
|
||||
prefix = 'PyInit_'
|
||||
else:
|
||||
prefix = 'init'
|
||||
modname = self.verifier.get_module_name()
|
||||
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
||||
|
||||
def load_library(self, flags=0):
|
||||
# import it with the CFFI backend
|
||||
backend = self.ffi._backend
|
||||
# needs to make a path that contains '/', on Posix
|
||||
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
||||
module = backend.load_library(filename, flags)
|
||||
#
|
||||
# call loading_gen_struct() to get the struct layout inferred by
|
||||
# the C compiler
|
||||
self._load(module, 'loading')
|
||||
|
||||
# build the FFILibrary class and instance, this is a module subclass
|
||||
# because modules are expected to have usually-constant-attributes and
|
||||
# in PyPy this means the JIT is able to treat attributes as constant,
|
||||
# which we want.
|
||||
class FFILibrary(types.ModuleType):
|
||||
_cffi_generic_module = module
|
||||
_cffi_ffi = self.ffi
|
||||
_cffi_dir = []
|
||||
def __dir__(self):
|
||||
return FFILibrary._cffi_dir
|
||||
library = FFILibrary("")
|
||||
#
|
||||
# finally, call the loaded_gen_xxx() functions. This will set
|
||||
# up the 'library' object.
|
||||
self._load(module, 'loaded', library=library)
|
||||
return library
|
||||
|
||||
def _get_declarations(self):
|
||||
lst = [(key, tp) for (key, (tp, qual)) in
|
||||
self.ffi._parser._declarations.items()]
|
||||
lst.sort()
|
||||
return lst
|
||||
|
||||
def _generate(self, step_name):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
try:
|
||||
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
||||
step_name))
|
||||
except AttributeError:
|
||||
raise VerificationError(
|
||||
"not implemented in verify(): %r" % name)
|
||||
try:
|
||||
method(tp, realname)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _load(self, module, step_name, **kwds):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
||||
try:
|
||||
method(tp, realname, module, **kwds)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _generate_nothing(self, tp, name):
|
||||
pass
|
||||
|
||||
def _loaded_noop(self, tp, name, module, **kwds):
|
||||
pass
|
||||
|
||||
# ----------
|
||||
# typedefs: generates no code so far
|
||||
|
||||
_generate_gen_typedef_decl = _generate_nothing
|
||||
_loading_gen_typedef = _loaded_noop
|
||||
_loaded_gen_typedef = _loaded_noop
|
||||
|
||||
# ----------
|
||||
# function declarations
|
||||
|
||||
def _generate_gen_function_decl(self, tp, name):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
# cannot support vararg functions better than this: check for its
|
||||
# exact type (including the fixed arguments), and build it as a
|
||||
# constant function pointer (no _cffi_f_%s wrapper)
|
||||
self._generate_gen_const(False, name, tp)
|
||||
return
|
||||
prnt = self._prnt
|
||||
numargs = len(tp.args)
|
||||
argnames = []
|
||||
for i, type in enumerate(tp.args):
|
||||
indirection = ''
|
||||
if isinstance(type, model.StructOrUnion):
|
||||
indirection = '*'
|
||||
argnames.append('%sx%d' % (indirection, i))
|
||||
context = 'argument of %s' % name
|
||||
arglist = [type.get_c_name(' %s' % arg, context)
|
||||
for type, arg in zip(tp.args, argnames)]
|
||||
tpresult = tp.result
|
||||
if isinstance(tpresult, model.StructOrUnion):
|
||||
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
||||
tpresult = model.void_type
|
||||
arglist = ', '.join(arglist) or 'void'
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
self.export_symbols.append(wrappername)
|
||||
if tp.abi:
|
||||
abi = tp.abi + ' '
|
||||
else:
|
||||
abi = ''
|
||||
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
||||
context = 'result of %s' % name
|
||||
prnt(tpresult.get_c_name(funcdecl, context))
|
||||
prnt('{')
|
||||
#
|
||||
if isinstance(tp.result, model.StructOrUnion):
|
||||
result_code = '*r = '
|
||||
elif not isinstance(tp.result, model.VoidType):
|
||||
result_code = 'return '
|
||||
else:
|
||||
result_code = ''
|
||||
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
_loading_gen_function = _loaded_noop
|
||||
|
||||
def _loaded_gen_function(self, tp, name, module, library):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
newfunction = self._load_constant(False, tp, name, module)
|
||||
else:
|
||||
indirections = []
|
||||
base_tp = tp
|
||||
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
||||
or isinstance(tp.result, model.StructOrUnion)):
|
||||
indirect_args = []
|
||||
for i, typ in enumerate(tp.args):
|
||||
if isinstance(typ, model.StructOrUnion):
|
||||
typ = model.PointerType(typ)
|
||||
indirections.append((i, typ))
|
||||
indirect_args.append(typ)
|
||||
indirect_result = tp.result
|
||||
if isinstance(indirect_result, model.StructOrUnion):
|
||||
if indirect_result.fldtypes is None:
|
||||
raise TypeError("'%s' is used as result type, "
|
||||
"but is opaque" % (
|
||||
indirect_result._get_c_name(),))
|
||||
indirect_result = model.PointerType(indirect_result)
|
||||
indirect_args.insert(0, indirect_result)
|
||||
indirections.insert(0, ("result", indirect_result))
|
||||
indirect_result = model.void_type
|
||||
tp = model.FunctionPtrType(tuple(indirect_args),
|
||||
indirect_result, tp.ellipsis)
|
||||
BFunc = self.ffi._get_cached_btype(tp)
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
newfunction = module.load_function(BFunc, wrappername)
|
||||
for i, typ in indirections:
|
||||
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
||||
base_tp)
|
||||
setattr(library, name, newfunction)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
||||
backend = self.ffi._backend
|
||||
BType = self.ffi._get_cached_btype(tp)
|
||||
if i == "result":
|
||||
ffi = self.ffi
|
||||
def newfunc(*args):
|
||||
res = ffi.new(BType)
|
||||
oldfunc(res, *args)
|
||||
return res[0]
|
||||
else:
|
||||
def newfunc(*args):
|
||||
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
||||
return oldfunc(*args)
|
||||
newfunc._cffi_base_type = base_tp
|
||||
return newfunc
|
||||
|
||||
# ----------
|
||||
# named structs
|
||||
|
||||
def _generate_gen_struct_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'struct', name)
|
||||
|
||||
def _loading_gen_struct(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'struct', name, module)
|
||||
|
||||
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_gen_union_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'union', name)
|
||||
|
||||
def _loading_gen_union(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'union', name, module)
|
||||
|
||||
def _loaded_gen_union(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
#
|
||||
prnt = self._prnt
|
||||
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
||||
prnt('{')
|
||||
prnt(' /* only to generate compile-time warnings or errors */')
|
||||
prnt(' (void)p;')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if (isinstance(ftype, model.PrimitiveType)
|
||||
and ftype.is_integer_type()) or fbitsize >= 0:
|
||||
# accept all integers, but complain on float or double
|
||||
prnt(' (void)((p->%s) << 1);' % fname)
|
||||
else:
|
||||
# only accept exactly the type declared.
|
||||
try:
|
||||
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
||||
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
||||
fname))
|
||||
except VerificationError as e:
|
||||
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
||||
prnt('}')
|
||||
self.export_symbols.append(layoutfuncname)
|
||||
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
||||
prnt('{')
|
||||
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
||||
prnt(' static intptr_t nums[] = {')
|
||||
prnt(' sizeof(%s),' % cname)
|
||||
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
prnt(' offsetof(%s, %s),' % (cname, fname))
|
||||
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
||||
prnt(' 0, /* %s */' % ftype._get_c_name())
|
||||
else:
|
||||
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
||||
prnt(' -1')
|
||||
prnt(' };')
|
||||
prnt(' return nums[i];')
|
||||
prnt(' /* the next line is not executed, but compiled */')
|
||||
prnt(' %s(0);' % (checkfuncname,))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_struct_or_union(self, tp, prefix, name, module):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
#
|
||||
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
||||
function = module.load_function(BFunc, layoutfuncname)
|
||||
layout = []
|
||||
num = 0
|
||||
while True:
|
||||
x = function(num)
|
||||
if x < 0: break
|
||||
layout.append(x)
|
||||
num += 1
|
||||
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
||||
# use the function()'s sizes and offsets to guide the
|
||||
# layout of the struct
|
||||
totalsize = layout[0]
|
||||
totalalignment = layout[1]
|
||||
fieldofs = layout[2::2]
|
||||
fieldsize = layout[3::2]
|
||||
tp.force_flatten()
|
||||
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
||||
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
||||
else:
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
self._struct_pending_verification[tp] = layout, cname
|
||||
|
||||
def _loaded_struct_or_union(self, tp):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
||||
|
||||
if tp in self._struct_pending_verification:
|
||||
# check that the layout sizes and offsets match the real ones
|
||||
def check(realvalue, expectedvalue, msg):
|
||||
if realvalue != expectedvalue:
|
||||
raise VerificationError(
|
||||
"%s (we have %d, but C compiler says %d)"
|
||||
% (msg, expectedvalue, realvalue))
|
||||
ffi = self.ffi
|
||||
BStruct = ffi._get_cached_btype(tp)
|
||||
layout, cname = self._struct_pending_verification.pop(tp)
|
||||
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
||||
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
||||
i = 2
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
check(layout[i], ffi.offsetof(BStruct, fname),
|
||||
"wrong offset for field %r" % (fname,))
|
||||
if layout[i+1] != 0:
|
||||
BField = ffi._get_cached_btype(ftype)
|
||||
check(layout[i+1], ffi.sizeof(BField),
|
||||
"wrong size for field %r" % (fname,))
|
||||
i += 2
|
||||
assert i == len(layout)
|
||||
|
||||
# ----------
|
||||
# 'anonymous' declarations. These are produced for anonymous structs
|
||||
# or unions; the 'name' is obtained by a typedef.
|
||||
|
||||
def _generate_gen_anonymous_decl(self, tp, name):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._generate_gen_enum_decl(tp, name, '')
|
||||
else:
|
||||
self._generate_struct_or_union_decl(tp, '', name)
|
||||
|
||||
def _loading_gen_anonymous(self, tp, name, module):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loading_gen_enum(tp, name, module, '')
|
||||
else:
|
||||
self._loading_struct_or_union(tp, '', name, module)
|
||||
|
||||
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loaded_gen_enum(tp, name, module, **kwds)
|
||||
else:
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
# ----------
|
||||
# constants, likely declared with '#define'
|
||||
|
||||
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
||||
check_value=None):
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_%s_%s' % (category, name)
|
||||
self.export_symbols.append(funcname)
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
assert category == 'const'
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
self._check_int_constant_value(name, check_value)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
elif is_int:
|
||||
assert category == 'const'
|
||||
prnt('int %s(long long *out_value)' % funcname)
|
||||
prnt('{')
|
||||
prnt(' *out_value = (long long)(%s);' % (name,))
|
||||
prnt(' return (%s) <= 0;' % (name,))
|
||||
prnt('}')
|
||||
else:
|
||||
assert tp is not None
|
||||
assert check_value is None
|
||||
if category == 'var':
|
||||
ampersand = '&'
|
||||
else:
|
||||
ampersand = ''
|
||||
extra = ''
|
||||
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
||||
extra = 'const *'
|
||||
ampersand = '&'
|
||||
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
||||
prnt('{')
|
||||
prnt(' return (%s%s);' % (ampersand, name))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _generate_gen_constant_decl(self, tp, name):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
self._generate_gen_const(is_int, name, tp)
|
||||
|
||||
_loading_gen_constant = _loaded_noop
|
||||
|
||||
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
||||
funcname = '_cffi_const_%s' % name
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
self._load_known_int_constant(module, funcname)
|
||||
value = check_value
|
||||
elif is_int:
|
||||
BType = self.ffi._typeof_locked("long long*")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType)
|
||||
negative = function(p)
|
||||
value = int(p[0])
|
||||
if value < 0 and not negative:
|
||||
BLongLong = self.ffi._typeof_locked("long long")[0]
|
||||
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
||||
else:
|
||||
assert check_value is None
|
||||
fntypeextra = '(*)(void)'
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
fntypeextra = '*' + fntypeextra
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
value = function()
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
value = value[0]
|
||||
return value
|
||||
|
||||
def _loaded_gen_constant(self, tp, name, module, library):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
value = self._load_constant(is_int, tp, name, module)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# enums
|
||||
|
||||
def _check_int_constant_value(self, name, value):
|
||||
prnt = self._prnt
|
||||
if value <= 0:
|
||||
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
||||
name, name, value))
|
||||
else:
|
||||
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
||||
name, name, value))
|
||||
prnt(' char buf[64];')
|
||||
prnt(' if ((%s) <= 0)' % name)
|
||||
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
||||
prnt(' else')
|
||||
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
||||
name)
|
||||
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
||||
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
||||
prnt(' return -1;')
|
||||
prnt(' }')
|
||||
|
||||
def _load_known_int_constant(self, module, funcname):
|
||||
BType = self.ffi._typeof_locked("char[]")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType, 256)
|
||||
if function(p) < 0:
|
||||
error = self.ffi.string(p)
|
||||
if sys.version_info >= (3,):
|
||||
error = str(error, 'utf-8')
|
||||
raise VerificationError(error)
|
||||
|
||||
def _enum_funcname(self, prefix, name):
|
||||
# "$enum_$1" => "___D_enum____D_1"
|
||||
name = name.replace('$', '___D_')
|
||||
return '_cffi_e_%s_%s' % (prefix, name)
|
||||
|
||||
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
||||
if tp.partial:
|
||||
for enumerator in tp.enumerators:
|
||||
self._generate_gen_const(True, enumerator)
|
||||
return
|
||||
#
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt = self._prnt
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
self._check_int_constant_value(enumerator, enumvalue)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
||||
if tp.partial:
|
||||
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
||||
for enumerator in tp.enumerators]
|
||||
tp.enumvalues = tuple(enumvalues)
|
||||
tp.partial_resolved = True
|
||||
else:
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self._load_known_int_constant(module, funcname)
|
||||
|
||||
def _loaded_gen_enum(self, tp, name, module, library):
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
setattr(library, enumerator, enumvalue)
|
||||
type(library)._cffi_dir.append(enumerator)
|
||||
|
||||
# ----------
|
||||
# macros: for now only for integers
|
||||
|
||||
def _generate_gen_macro_decl(self, tp, name):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
self._generate_gen_const(True, name, check_value=check_value)
|
||||
|
||||
_loading_gen_macro = _loaded_noop
|
||||
|
||||
def _loaded_gen_macro(self, tp, name, module, library):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
value = self._load_constant(True, tp, name, module,
|
||||
check_value=check_value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# global variables
|
||||
|
||||
def _generate_gen_variable_decl(self, tp, name):
|
||||
if isinstance(tp, model.ArrayType):
|
||||
if tp.length == '...':
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt("size_t %s(void)" % funcname)
|
||||
prnt("{")
|
||||
prnt(" return sizeof(%s);" % (name,))
|
||||
prnt("}")
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
self._generate_gen_const(False, name, tp_ptr)
|
||||
else:
|
||||
tp_ptr = model.PointerType(tp)
|
||||
self._generate_gen_const(False, name, tp_ptr, category='var')
|
||||
|
||||
_loading_gen_variable = _loaded_noop
|
||||
|
||||
def _loaded_gen_variable(self, tp, name, module, library):
|
||||
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
||||
# sense that "a=..." is forbidden
|
||||
if tp.length == '...':
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
size = function()
|
||||
BItemType = self.ffi._get_cached_btype(tp.item)
|
||||
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
||||
if rest != 0:
|
||||
raise VerificationError(
|
||||
"bad size: %r does not seem to be an array of %s" %
|
||||
(name, tp.item))
|
||||
tp = tp.resolve_length(length)
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
value = self._load_constant(False, tp_ptr, name, module)
|
||||
# 'value' is a <cdata 'type *'> which we have to replace with
|
||||
# a <cdata 'type[N]'> if the N is actually known
|
||||
if tp.length is not None:
|
||||
BArray = self.ffi._get_cached_btype(tp)
|
||||
value = self.ffi.cast(BArray, value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
return
|
||||
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
||||
# it by a property on the class, which reads/writes into ptr[0].
|
||||
funcname = '_cffi_var_%s' % name
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
ptr = function()
|
||||
def getter(library):
|
||||
return ptr[0]
|
||||
def setter(library, value):
|
||||
ptr[0] = value
|
||||
setattr(type(library), name, property(getter, setter))
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
cffimod_header = r'''
|
||||
#include <stdio.h>
|
||||
#include <stddef.h>
|
||||
#include <stdarg.h>
|
||||
#include <errno.h>
|
||||
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
#endif
|
||||
'''
|
@ -0,0 +1,306 @@
|
||||
#
|
||||
# DEPRECATED: implementation for ffi.verify()
|
||||
#
|
||||
import sys, os, binascii, shutil, io
|
||||
from . import __version_verifier_modules__
|
||||
from . import ffiplatform
|
||||
from .error import VerificationError
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
import importlib.machinery
|
||||
def _extension_suffixes():
|
||||
return importlib.machinery.EXTENSION_SUFFIXES[:]
|
||||
else:
|
||||
import imp
|
||||
def _extension_suffixes():
|
||||
return [suffix for suffix, _, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
NativeIO = io.StringIO
|
||||
else:
|
||||
class NativeIO(io.BytesIO):
|
||||
def write(self, s):
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('ascii')
|
||||
super(NativeIO, self).write(s)
|
||||
|
||||
|
||||
class Verifier(object):
|
||||
|
||||
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
|
||||
ext_package=None, tag='', force_generic_engine=False,
|
||||
source_extension='.c', flags=None, relative_to=None, **kwds):
|
||||
if ffi._parser._uses_new_feature:
|
||||
raise VerificationError(
|
||||
"feature not supported with ffi.verify(), but only "
|
||||
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
|
||||
self.ffi = ffi
|
||||
self.preamble = preamble
|
||||
if not modulename:
|
||||
flattened_kwds = ffiplatform.flatten(kwds)
|
||||
vengine_class = _locate_engine_class(ffi, force_generic_engine)
|
||||
self._vengine = vengine_class(self)
|
||||
self._vengine.patch_extension_kwds(kwds)
|
||||
self.flags = flags
|
||||
self.kwds = self.make_relative_to(kwds, relative_to)
|
||||
#
|
||||
if modulename:
|
||||
if tag:
|
||||
raise TypeError("can't specify both 'modulename' and 'tag'")
|
||||
else:
|
||||
key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
|
||||
preamble, flattened_kwds] +
|
||||
ffi._cdefsources)
|
||||
if sys.version_info >= (3,):
|
||||
key = key.encode('utf-8')
|
||||
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
|
||||
k1 = k1.lstrip('0x').rstrip('L')
|
||||
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
|
||||
k2 = k2.lstrip('0').rstrip('L')
|
||||
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
|
||||
k1, k2)
|
||||
suffix = _get_so_suffixes()[0]
|
||||
self.tmpdir = tmpdir or _caller_dir_pycache()
|
||||
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
|
||||
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
|
||||
self.ext_package = ext_package
|
||||
self._has_source = False
|
||||
self._has_module = False
|
||||
|
||||
def write_source(self, file=None):
|
||||
"""Write the C source code. It is produced in 'self.sourcefilename',
|
||||
which can be tweaked beforehand."""
|
||||
with self.ffi._lock:
|
||||
if self._has_source and file is None:
|
||||
raise VerificationError(
|
||||
"source code already written")
|
||||
self._write_source(file)
|
||||
|
||||
def compile_module(self):
|
||||
"""Write the C source code (if not done already) and compile it.
|
||||
This produces a dynamic link library in 'self.modulefilename'."""
|
||||
with self.ffi._lock:
|
||||
if self._has_module:
|
||||
raise VerificationError("module already compiled")
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
self._compile_module()
|
||||
|
||||
def load_library(self):
|
||||
"""Get a C module from this Verifier instance.
|
||||
Returns an instance of a FFILibrary class that behaves like the
|
||||
objects returned by ffi.dlopen(), but that delegates all
|
||||
operations to the C module. If necessary, the C code is written
|
||||
and compiled first.
|
||||
"""
|
||||
with self.ffi._lock:
|
||||
if not self._has_module:
|
||||
self._locate_module()
|
||||
if not self._has_module:
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
self._compile_module()
|
||||
return self._load_library()
|
||||
|
||||
def get_module_name(self):
|
||||
basename = os.path.basename(self.modulefilename)
|
||||
# kill both the .so extension and the other .'s, as introduced
|
||||
# by Python 3: 'basename.cpython-33m.so'
|
||||
basename = basename.split('.', 1)[0]
|
||||
# and the _d added in Python 2 debug builds --- but try to be
|
||||
# conservative and not kill a legitimate _d
|
||||
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
|
||||
basename = basename[:-2]
|
||||
return basename
|
||||
|
||||
def get_extension(self):
|
||||
ffiplatform._hack_at_distutils() # backward compatibility hack
|
||||
if not self._has_source:
|
||||
with self.ffi._lock:
|
||||
if not self._has_source:
|
||||
self._write_source()
|
||||
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
|
||||
modname = self.get_module_name()
|
||||
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
|
||||
|
||||
def generates_python_module(self):
|
||||
return self._vengine._gen_python_module
|
||||
|
||||
def make_relative_to(self, kwds, relative_to):
|
||||
if relative_to and os.path.dirname(relative_to):
|
||||
dirname = os.path.dirname(relative_to)
|
||||
kwds = kwds.copy()
|
||||
for key in ffiplatform.LIST_OF_FILE_NAMES:
|
||||
if key in kwds:
|
||||
lst = kwds[key]
|
||||
if not isinstance(lst, (list, tuple)):
|
||||
raise TypeError("keyword '%s' should be a list or tuple"
|
||||
% (key,))
|
||||
lst = [os.path.join(dirname, fn) for fn in lst]
|
||||
kwds[key] = lst
|
||||
return kwds
|
||||
|
||||
# ----------
|
||||
|
||||
def _locate_module(self):
|
||||
if not os.path.isfile(self.modulefilename):
|
||||
if self.ext_package:
|
||||
try:
|
||||
pkg = __import__(self.ext_package, None, None, ['__doc__'])
|
||||
except ImportError:
|
||||
return # cannot import the package itself, give up
|
||||
# (e.g. it might be called differently before installation)
|
||||
path = pkg.__path__
|
||||
else:
|
||||
path = None
|
||||
filename = self._vengine.find_module(self.get_module_name(), path,
|
||||
_get_so_suffixes())
|
||||
if filename is None:
|
||||
return
|
||||
self.modulefilename = filename
|
||||
self._vengine.collect_types()
|
||||
self._has_module = True
|
||||
|
||||
def _write_source_to(self, file):
|
||||
self._vengine._f = file
|
||||
try:
|
||||
self._vengine.write_source_to_f()
|
||||
finally:
|
||||
del self._vengine._f
|
||||
|
||||
def _write_source(self, file=None):
|
||||
if file is not None:
|
||||
self._write_source_to(file)
|
||||
else:
|
||||
# Write our source file to an in memory file.
|
||||
f = NativeIO()
|
||||
self._write_source_to(f)
|
||||
source_data = f.getvalue()
|
||||
|
||||
# Determine if this matches the current file
|
||||
if os.path.exists(self.sourcefilename):
|
||||
with open(self.sourcefilename, "r") as fp:
|
||||
needs_written = not (fp.read() == source_data)
|
||||
else:
|
||||
needs_written = True
|
||||
|
||||
# Actually write the file out if it doesn't match
|
||||
if needs_written:
|
||||
_ensure_dir(self.sourcefilename)
|
||||
with open(self.sourcefilename, "w") as fp:
|
||||
fp.write(source_data)
|
||||
|
||||
# Set this flag
|
||||
self._has_source = True
|
||||
|
||||
def _compile_module(self):
|
||||
# compile this C source
|
||||
tmpdir = os.path.dirname(self.sourcefilename)
|
||||
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
|
||||
try:
|
||||
same = ffiplatform.samefile(outputfilename, self.modulefilename)
|
||||
except OSError:
|
||||
same = False
|
||||
if not same:
|
||||
_ensure_dir(self.modulefilename)
|
||||
shutil.move(outputfilename, self.modulefilename)
|
||||
self._has_module = True
|
||||
|
||||
def _load_library(self):
|
||||
assert self._has_module
|
||||
if self.flags is not None:
|
||||
return self._vengine.load_library(self.flags)
|
||||
else:
|
||||
return self._vengine.load_library()
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
_FORCE_GENERIC_ENGINE = False # for tests
|
||||
|
||||
def _locate_engine_class(ffi, force_generic_engine):
|
||||
if _FORCE_GENERIC_ENGINE:
|
||||
force_generic_engine = True
|
||||
if not force_generic_engine:
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
force_generic_engine = True
|
||||
else:
|
||||
try:
|
||||
import _cffi_backend
|
||||
except ImportError:
|
||||
_cffi_backend = '?'
|
||||
if ffi._backend is not _cffi_backend:
|
||||
force_generic_engine = True
|
||||
if force_generic_engine:
|
||||
from . import vengine_gen
|
||||
return vengine_gen.VGenericEngine
|
||||
else:
|
||||
from . import vengine_cpy
|
||||
return vengine_cpy.VCPythonEngine
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
_TMPDIR = None
|
||||
|
||||
def _caller_dir_pycache():
|
||||
if _TMPDIR:
|
||||
return _TMPDIR
|
||||
result = os.environ.get('CFFI_TMPDIR')
|
||||
if result:
|
||||
return result
|
||||
filename = sys._getframe(2).f_code.co_filename
|
||||
return os.path.abspath(os.path.join(os.path.dirname(filename),
|
||||
'__pycache__'))
|
||||
|
||||
def set_tmpdir(dirname):
|
||||
"""Set the temporary directory to use instead of __pycache__."""
|
||||
global _TMPDIR
|
||||
_TMPDIR = dirname
|
||||
|
||||
def cleanup_tmpdir(tmpdir=None, keep_so=False):
|
||||
"""Clean up the temporary directory by removing all files in it
|
||||
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
|
||||
tmpdir = tmpdir or _caller_dir_pycache()
|
||||
try:
|
||||
filelist = os.listdir(tmpdir)
|
||||
except OSError:
|
||||
return
|
||||
if keep_so:
|
||||
suffix = '.c' # only remove .c files
|
||||
else:
|
||||
suffix = _get_so_suffixes()[0].lower()
|
||||
for fn in filelist:
|
||||
if fn.lower().startswith('_cffi_') and (
|
||||
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
|
||||
try:
|
||||
os.unlink(os.path.join(tmpdir, fn))
|
||||
except OSError:
|
||||
pass
|
||||
clean_dir = [os.path.join(tmpdir, 'build')]
|
||||
for dir in clean_dir:
|
||||
try:
|
||||
for fn in os.listdir(dir):
|
||||
fn = os.path.join(dir, fn)
|
||||
if os.path.isdir(fn):
|
||||
clean_dir.append(fn)
|
||||
else:
|
||||
os.unlink(fn)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _get_so_suffixes():
|
||||
suffixes = _extension_suffixes()
|
||||
if not suffixes:
|
||||
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
|
||||
if sys.platform == 'win32':
|
||||
suffixes = [".pyd"]
|
||||
else:
|
||||
suffixes = [".so"]
|
||||
|
||||
return suffixes
|
||||
|
||||
def _ensure_dir(filename):
|
||||
dirname = os.path.dirname(filename)
|
||||
if dirname and not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
@ -0,0 +1,62 @@
|
||||
Gevent is written and maintained by
|
||||
|
||||
Denis Bilenko
|
||||
Matt Iversen
|
||||
Steffen Prince
|
||||
Jason Madden
|
||||
|
||||
and the contributors (ordered by the date of first contribution):
|
||||
|
||||
Jason Toffaletti
|
||||
Mike Barton
|
||||
Ludvig Ericson
|
||||
Marcus Cavanaugh
|
||||
Matt Goodall
|
||||
Ralf Schmitt
|
||||
Daniele Varrazzo
|
||||
Nicholas Piël
|
||||
Örjan Persson
|
||||
Uriel Katz
|
||||
Ted Suzman
|
||||
Randall Leeds
|
||||
Erik Näslund
|
||||
Alexey Borzenkov
|
||||
David Hain
|
||||
Dmitry Chechik
|
||||
Ned Rockson
|
||||
Tommie Gannert
|
||||
Shaun Lindsay
|
||||
Andreas Blixt
|
||||
Nick Barkas
|
||||
Galfy Pundee
|
||||
Alexander Boudkar
|
||||
Damien Churchill
|
||||
Tom Lynn
|
||||
Shaun Cutts
|
||||
David LaBissoniere
|
||||
Alexandre Kandalintsev
|
||||
Geert Jansen
|
||||
Vitaly Kruglikov
|
||||
Saúl Ibarra Corretgé
|
||||
Oliver Beattie
|
||||
Bobby Powers
|
||||
Anton Patrushev
|
||||
Jan-Philip Gehrcke
|
||||
Alex Gaynor
|
||||
陈小玉
|
||||
Philip Conrad
|
||||
Heungsub Lee
|
||||
Ron Rothman
|
||||
|
||||
See https://github.com/gevent/gevent/graphs/contributors for more info.
|
||||
|
||||
Gevent is inspired by and uses some code from eventlet which was written by
|
||||
|
||||
Bob Ipollito
|
||||
Donovan Preston
|
||||
|
||||
The win32util module is taken from Twisted. The tblib module is taken from python-tblib by Ionel Cristian Mărieș.
|
||||
|
||||
Some modules (local, ssl) contain code from the Python standard library.
|
||||
|
||||
If your code is used in gevent and you are not mentioned above, please contact the maintainer.
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,25 @@
|
||||
MIT License
|
||||
|
||||
Except when otherwise stated (look at the beginning of each file) the software
|
||||
and the documentation in this project are copyrighted by:
|
||||
|
||||
Denis Bilenko and the contributors, http://www.gevent.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,367 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: gevent
|
||||
Version: 1.4.0
|
||||
Summary: Coroutine-based network library
|
||||
Home-page: http://www.gevent.org/
|
||||
Author: Denis Bilenko
|
||||
Author-email: denis.bilenko@gmail.com
|
||||
Maintainer: Jason Madden
|
||||
Maintainer-email: jason@nextthought.com
|
||||
License: MIT
|
||||
Project-URL: Bug Tracker, https://github.com/gevent/gevent/issues
|
||||
Project-URL: Source Code, https://github.com/gevent/gevent/
|
||||
Project-URL: Documentation, http://www.gevent.org
|
||||
Keywords: greenlet coroutine cooperative multitasking light threads monkey
|
||||
Platform: UNKNOWN
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Operating System :: MacOS :: MacOS X
|
||||
Classifier: Operating System :: POSIX
|
||||
Classifier: Operating System :: Microsoft :: Windows
|
||||
Classifier: Topic :: Internet
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
||||
Description-Content-Type: text/x-rst
|
||||
Requires-Dist: greenlet (>=0.4.14) ; platform_python_implementation == "CPython"
|
||||
Requires-Dist: cffi (>=1.11.5) ; sys_platform == "win32" and platform_python_implementation == "CPython"
|
||||
Provides-Extra: dnspython
|
||||
Requires-Dist: dnspython ; extra == 'dnspython'
|
||||
Requires-Dist: idna ; extra == 'dnspython'
|
||||
Provides-Extra: doc
|
||||
Requires-Dist: repoze.sphinx.autointerface ; extra == 'doc'
|
||||
Provides-Extra: events
|
||||
Requires-Dist: zope.event ; extra == 'events'
|
||||
Requires-Dist: zope.interface ; extra == 'events'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: zope.interface ; extra == 'test'
|
||||
Requires-Dist: zope.event ; extra == 'test'
|
||||
Requires-Dist: requests ; extra == 'test'
|
||||
Requires-Dist: objgraph ; extra == 'test'
|
||||
Requires-Dist: psutil ; (platform_python_implementation == "CPython" or sys_platform != "win32") and extra == 'test'
|
||||
Requires-Dist: futures ; (python_version == "2.7") and extra == 'test'
|
||||
Requires-Dist: mock ; (python_version == "2.7") and extra == 'test'
|
||||
Requires-Dist: coverage (>=5.0a3) ; (sys_platform != "win32") and extra == 'test'
|
||||
Requires-Dist: coveralls (>=1.0) ; (sys_platform != "win32") and extra == 'test'
|
||||
|
||||
========
|
||||
gevent
|
||||
========
|
||||
|
||||
.. image:: https://travis-ci.org/gevent/gevent.svg?branch=master
|
||||
:target: https://travis-ci.org/gevent/gevent
|
||||
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/q4kl21ng2yo2ixur?svg=true
|
||||
:target: https://ci.appveyor.com/project/denik/gevent
|
||||
|
||||
.. image:: https://coveralls.io/repos/gevent/gevent/badge.svg?branch=master&service=github
|
||||
:target: https://coveralls.io/github/gevent/gevent?branch=master
|
||||
|
||||
..
|
||||
This file is included in README.rst from the top-level
|
||||
so it is limited to pure ReST markup, not Sphinx.
|
||||
|
||||
|
||||
|
||||
gevent is a coroutine_ -based Python_ networking library that uses
|
||||
`greenlet <https://greenlet.readthedocs.io>`_ to provide a high-level synchronous API on top of the `libev`_
|
||||
or `libuv`_ event loop.
|
||||
|
||||
Features include:
|
||||
|
||||
|
||||
* Fast event loop based on `libev`_ or `libuv`_.
|
||||
* Lightweight execution units based on greenlets.
|
||||
* API that re-uses concepts from the Python standard library (for
|
||||
examples there are `events`_ and
|
||||
`queues`_).
|
||||
* `Cooperative sockets with SSL support <http://www.gevent.org/api/index.html#networking>`_
|
||||
* `Cooperative DNS queries <http://www.gevent.org/dns.html>`_ performed through a threadpool,
|
||||
dnspython, or c-ares.
|
||||
* `Monkey patching utility <http://www.gevent.org/intro.html#monkey-patching>`_ to get 3rd party modules to become cooperative
|
||||
* TCP/UDP/HTTP servers
|
||||
* Subprocess support (through `gevent.subprocess`_)
|
||||
* Thread pools
|
||||
|
||||
gevent is `inspired by eventlet`_ but features a more consistent API,
|
||||
simpler implementation and better performance. Read why others `use
|
||||
gevent`_ and check out the list of the `open source projects based on
|
||||
gevent`_.
|
||||
|
||||
gevent was written by `Denis Bilenko <http://denisbilenko.com/>`_.
|
||||
|
||||
Since version 1.1, gevent is maintained by Jason Madden for
|
||||
`NextThought <https://nextthought.com>`_ with help from the
|
||||
`contributors <https://github.com/gevent/gevent/graphs/contributors>`_
|
||||
and is licensed under the MIT license.
|
||||
|
||||
See `what's new`_ in the latest major release.
|
||||
|
||||
Check out the detailed changelog_ for this version.
|
||||
|
||||
.. _events: http://www.gevent.org/api/gevent.event.html#gevent.event.Event
|
||||
.. _queues: http://www.gevent.org/api/gevent.queue.html#gevent.queue.Queue
|
||||
.. _gevent.subprocess: http://www.gevent.org/api/gevent.subprocess.html#module-gevent.subprocess
|
||||
|
||||
.. _coroutine: https://en.wikipedia.org/wiki/Coroutine
|
||||
.. _Python: http://python.org
|
||||
.. _libev: http://software.schmorp.de/pkg/libev.html
|
||||
.. _libuv: http://libuv.org
|
||||
.. _inspired by eventlet: http://blog.gevent.org/2010/02/27/why-gevent/
|
||||
.. _use gevent: http://groups.google.com/group/gevent/browse_thread/thread/4de9703e5dca8271
|
||||
.. _open source projects based on gevent: https://github.com/gevent/gevent/wiki/Projects
|
||||
.. _what's new: http://www.gevent.org/whatsnew_1_3.html
|
||||
.. _changelog: http://www.gevent.org/changelog.html
|
||||
|
||||
|
||||
Read the documentation online at http://www.gevent.org.
|
||||
|
||||
Post feedback and issues on the `bug tracker`_, `mailing list`_, blog_
|
||||
and `twitter (@gevent)`_.
|
||||
|
||||
|
||||
===============================
|
||||
Installation and Requirements
|
||||
===============================
|
||||
|
||||
.. _installation:
|
||||
|
||||
..
|
||||
This file is included in README.rst so it is limited to plain
|
||||
ReST markup, not Sphinx.
|
||||
|
||||
Supported Platforms
|
||||
===================
|
||||
|
||||
`gevent 1.3`_ runs on Python 2.7 and Python 3. Releases 3.4, 3.5 and
|
||||
3.6 of Python 3 are supported. (Users of older versions of Python 2
|
||||
need to install gevent 1.0.x (2.5), 1.1.x (2.6) or 1.2.x (<=2.7.8);
|
||||
gevent 1.2 can be installed on Python 3.3.) gevent requires the
|
||||
`greenlet <https://greenlet.readthedocs.io>`_ library and will install
|
||||
the `cffi`_ library by default on Windows.
|
||||
|
||||
gevent 1.3 also runs on PyPy 5.5 and above, although 5.9 or above is
|
||||
strongly recommended. On PyPy, there are no external dependencies.
|
||||
|
||||
gevent is tested on Windows, OS X, and Linux, and should run on most
|
||||
other Unix-like operating systems (e.g., FreeBSD, Solaris, etc.)
|
||||
|
||||
.. note:: On Windows using the libev backend, gevent is
|
||||
limited to a maximum of 1024 open sockets due to
|
||||
`limitations in libev`_. This limitation should not exist
|
||||
with the default libuv backend.
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
.. note::
|
||||
|
||||
This section is about installing released versions of gevent
|
||||
as distributed on the `Python Package Index`_
|
||||
|
||||
.. _Python Package Index: http://pypi.org/project/gevent
|
||||
|
||||
gevent and greenlet can both be installed with `pip`_, e.g., ``pip
|
||||
install gevent``. Installation using `buildout
|
||||
<http://docs.buildout.org/en/latest/>`_ is also supported.
|
||||
|
||||
On Windows, OS X, and Linux, both gevent and greenlet are
|
||||
distributed as binary `wheels`_.
|
||||
|
||||
.. tip::
|
||||
|
||||
You need Pip 8.0 or later, or buildout 2.10.0 to install the
|
||||
binary wheels.
|
||||
|
||||
.. tip::
|
||||
|
||||
On Linux, you'll need to install gevent from source if you wish to
|
||||
use the libuv loop implementation. This is because the `manylinux1
|
||||
<https://www.python.org/dev/peps/pep-0513/>`_ specification for the
|
||||
distributed wheels does not support libuv. The `cffi`_ library
|
||||
*must* be installed at build time.
|
||||
|
||||
|
||||
Installing From Source
|
||||
----------------------
|
||||
|
||||
If you are unable to use the binary wheels (for platforms where no
|
||||
pre-built wheels are available or if wheel installation is disabled,
|
||||
e.g., for libuv support on Linux), here are some things you need to know.
|
||||
|
||||
- You can install gevent from source with ``pip install --no-binary
|
||||
gevent gevent``.
|
||||
|
||||
- You'll need a working C compiler that can build Python extensions.
|
||||
On some platforms, you may need to install Python development
|
||||
packages.
|
||||
|
||||
- Installing from source requires ``setuptools``. This is installed
|
||||
automatically in virtual environments and by buildout. However,
|
||||
gevent uses :pep:`496` environment markers in ``setup.py``.
|
||||
Consequently, you'll need a version of setuptools newer than 25
|
||||
(mid 2016) to install gevent from source; a version that's too old
|
||||
will produce a ``ValueError``. Older versions of pipenv may also
|
||||
`have issues installing gevent for this reason
|
||||
<https://github.com/pypa/pipenv/issues/2113>`_.
|
||||
|
||||
- To build the libuv backend (which is required on Windows and
|
||||
optional elsewhere), or the CFFI-based libev backend, you must
|
||||
install `cffi`_ before attempting to install gevent on CPython (on
|
||||
PyPy this step is not necessary).
|
||||
|
||||
|
||||
Common Installation Issues
|
||||
--------------------------
|
||||
|
||||
The following are some common installation problems and solutions for
|
||||
those compiling gevent from source.
|
||||
|
||||
- Some Linux distributions are now mounting their temporary
|
||||
directories with the ``noexec`` option. This can cause a standard
|
||||
``pip install gevent`` to fail with an error like ``cannot run C
|
||||
compiled programs``. One fix is to mount the temporary directory
|
||||
without that option. Another may be to use the ``--build`` option to
|
||||
``pip install`` to specify another directory. See `issue #570
|
||||
<https://github.com/gevent/gevent/issues/570>`_ and `issue #612
|
||||
<https://github.com/gevent/gevent/issues/612>`_ for examples.
|
||||
|
||||
- Also check for conflicts with environment variables like ``CFLAGS``. For
|
||||
example, see `Library Updates <http://www.gevent.org/whatsnew_1_1.html#library-updates-label>`_.
|
||||
|
||||
- Users of a recent SmartOS release may need to customize the
|
||||
``CPPFLAGS`` (the environment variable containing the default
|
||||
options for the C preprocessor) if they are using the libev shipped
|
||||
with gevent. See `Operating Systems
|
||||
<http://www.gevent.org/whatsnew_1_1.html#operating-systems-label>`_
|
||||
for more information.
|
||||
|
||||
- If you see ``ValueError: ("Expected ',' or end-of-list in", "cffi >=
|
||||
1.11.5 ; sys_platform == 'win32' and platform_python_implementation
|
||||
== 'CPython'", 'at', " ; sys_platform == 'win32' and
|
||||
platform_python_implementation == 'CPython'")``, the version of
|
||||
setuptools is too old. Install a more recent version of setuptools.
|
||||
|
||||
|
||||
Extra Dependencies
|
||||
==================
|
||||
|
||||
gevent has no runtime dependencies outside the standard library,
|
||||
greenlet and (on some platforms) `cffi`_. However, there are a
|
||||
number of additional libraries that extend gevent's functionality and
|
||||
will be used if they are available.
|
||||
|
||||
The `psutil <https://pypi.org/project/psutil>`_ library is needed to
|
||||
monitor memory usage.
|
||||
|
||||
`zope.event <https://pypi.org/project/zope.event>`_ is highly
|
||||
recommended for configurable event support; it can be installed with
|
||||
the ``events`` extra, e.g., ``pip install gevent[events]``.
|
||||
|
||||
`dnspython <https://pypi.org/project/dnspython>`_ is required for the
|
||||
new pure-Python resolver, and on Python 2, so is `idna
|
||||
<https://pypi.org/project/idna>`_. They can be installed with the
|
||||
``dnspython`` extra.
|
||||
|
||||
|
||||
Development
|
||||
===========
|
||||
|
||||
To install the latest development version::
|
||||
|
||||
pip install setuptools cffi 'cython>=0.28' git+git://github.com/gevent/gevent.git#egg=gevent
|
||||
|
||||
.. note::
|
||||
|
||||
You will not be able to run gevent's test suite using that method.
|
||||
|
||||
To hack on gevent (using a virtualenv)::
|
||||
|
||||
$ git clone https://github.com/gevent/gevent.git
|
||||
$ cd gevent
|
||||
$ virtualenv env
|
||||
$ source env/bin/activate
|
||||
(env) $ pip install -r dev-requirements.txt
|
||||
|
||||
.. note::
|
||||
|
||||
The notes above about installing from source apply here as well.
|
||||
The ``dev-requirements.txt`` file takes care of the library
|
||||
prerequisites (CFFI, Cython), but having a working C compiler that
|
||||
can create Python extensions is up to you.
|
||||
|
||||
|
||||
Running Tests
|
||||
-------------
|
||||
|
||||
There are a few different ways to run the tests. To simply run the
|
||||
tests on one version of Python during development, begin with the
|
||||
above instructions to install gevent in a virtual environment and then
|
||||
run::
|
||||
|
||||
(env) $ python -mgevent.tests
|
||||
|
||||
Before submitting a pull request, it's a good idea to run the tests
|
||||
across all supported versions of Python, and to check the code quality
|
||||
using prospector. This is what is done on Travis CI. Locally it
|
||||
can be done using tox::
|
||||
|
||||
pip install tox
|
||||
tox
|
||||
|
||||
The testrunner accepts a ``--coverage`` argument to enable code
|
||||
coverage metrics through the `coverage.py`_ package. That would go
|
||||
something like this::
|
||||
|
||||
python -m gevent.tests --coverage
|
||||
coverage combine
|
||||
coverage html -i
|
||||
<open htmlcov/index.html>
|
||||
|
||||
Continuous integration
|
||||
----------------------
|
||||
|
||||
A test suite is run for every push and pull request submitted. Travis
|
||||
CI is used to test on Linux, and `AppVeyor`_ runs the builds on
|
||||
Windows.
|
||||
|
||||
.. image:: https://travis-ci.org/gevent/gevent.svg?branch=master
|
||||
:target: https://travis-ci.org/gevent/gevent
|
||||
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/q4kl21ng2yo2ixur?svg=true
|
||||
:target: https://ci.appveyor.com/project/denik/gevent
|
||||
|
||||
|
||||
Builds on Travis CI automatically submit updates to `coveralls.io`_ to
|
||||
monitor test coverage.
|
||||
|
||||
.. image:: https://coveralls.io/repos/gevent/gevent/badge.svg?branch=master&service=github
|
||||
:target: https://coveralls.io/github/gevent/gevent?branch=master
|
||||
|
||||
.. note:: On Debian, you will probably need ``libpythonX.Y-testsuite``
|
||||
installed to run all the tests.
|
||||
|
||||
.. _coverage.py: https://pypi.python.org/pypi/coverage/
|
||||
.. _coveralls.io: https://coveralls.io/github/gevent/gevent
|
||||
.. _`pip`: https://pip.pypa.io/en/stable/installing/
|
||||
.. _`wheels`: http://pythonwheels.com
|
||||
.. _`gevent 1.3`: whatsnew_1_3.html
|
||||
|
||||
.. _`cffi`: https://cffi.readthedocs.io
|
||||
.. _`limitations in libev`: http://pod.tst.eu/http://cvs.schmorp.de/libev/ev.pod#WIN32_PLATFORM_LIMITATIONS_AND_WORKA
|
||||
.. _AppVeyor: https://ci.appveyor.com/project/denik/gevent
|
||||
|
||||
|
||||
.. _bug tracker: https://github.com/gevent/gevent/wiki/Projects
|
||||
.. _mailing list: http://groups.google.com/group/gevent
|
||||
.. _blog: http://blog.gevent.org
|
||||
.. _twitter (@gevent): http://twitter.com/gevent
|
||||
|
||||
|
@ -0,0 +1,94 @@
|
||||
gevent is licensed under the MIT license. See the LICENSE file for the
|
||||
complete license.
|
||||
|
||||
Portions of this software may have other licenses.
|
||||
|
||||
=============================================
|
||||
|
||||
greentest/2.7
|
||||
greentest/2.7.8
|
||||
greentest/2.7pypy
|
||||
greentest/3.3
|
||||
greentest/3.4
|
||||
greentest/3.5
|
||||
-----------------
|
||||
|
||||
Copyright (c) 2001-2016 Python Software Foundation; All Rights Reserved
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001-2016 Python Software Foundation; All Rights
|
||||
Reserved" are retained in Python alone or in any derivative version prepared
|
||||
by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
============================================
|
||||
|
||||
gevent/libuv/_corecffi_source.c
|
||||
gevent/libuv/_corecffi_cdef.c
|
||||
|
||||
Originally based on code from https://github.com/veegee/guv
|
||||
|
||||
Copyright (c) 2014 V G
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
|
||||
===========================================
|
@ -0,0 +1,563 @@
|
||||
gevent-1.4.0.dist-info/AUTHORS,sha256=IS4ttuioANx5ucZqOXHiezC9ys2nkpxl1M_8f77Rleo,1303
|
||||
gevent-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
gevent-1.4.0.dist-info/LICENSE,sha256=TUa8EdGeOFPVQyWXO44sUwkPVjinvyf6H18SMseJAfc,1235
|
||||
gevent-1.4.0.dist-info/METADATA,sha256=sWhcP3zm6JeF0jrJNoZmE_WJ1tirRm_QGd9018zsewg,14012
|
||||
gevent-1.4.0.dist-info/NOTICE,sha256=ZJOCR8qaV_7kwRZWQEuTwxMCkYfhPaeHySe2xkpoBYM,4004
|
||||
gevent-1.4.0.dist-info/RECORD,,
|
||||
gevent-1.4.0.dist-info/WHEEL,sha256=5cvWootWBSRa7expZAttvqZiGBlccnXzLfZShBiXXSk,102
|
||||
gevent-1.4.0.dist-info/entry_points.txt,sha256=j3Bs4dZY03xbORf-NbA6xkzNErzi1OVktpPF8rFrRQA,96
|
||||
gevent-1.4.0.dist-info/top_level.txt,sha256=fpElGiTe2fdw27vmNxdV5MQpyndjzWZMk5TB_NMYPSI,7
|
||||
gevent/__abstract_linkable.cp37-win32.pyd,sha256=LAlnYI_INjfCwxSXTdUCVFj7e8G9xnMgKc7TEo1R9Qo,59904
|
||||
gevent/__abstract_linkable.pxd,sha256=PVN1gd3tfa_yMOYZ7d-kjc1No2sz7Wie_IIgsx3-x_Q,1469
|
||||
gevent/__greenlet_primitives.cp37-win32.pyd,sha256=I7WI_R8dOMrKW2YkUxv3O279snaHeaE9FvhCgpkmPO0,47104
|
||||
gevent/__greenlet_primitives.pxd,sha256=GW2sdrRzeRtvjqQ7D-oay0BTQ7FEVt2Tt7KUn6KVjNU,1101
|
||||
gevent/__hub_local.cp37-win32.pyd,sha256=ekXxSNi16OUlMqpxvoILJVIW8B53zDNXbuCy03Tn5kc,43520
|
||||
gevent/__hub_local.pxd,sha256=DTX_cs_K3W_QZxRZnDkER897FMrBGB8DuBumW0HFQqk,522
|
||||
gevent/__hub_primitives.cp37-win32.pyd,sha256=ZQ2a2PLzVYX02V8wUDjLELR-3d6VU1k4r_d2LoMFrgk,103424
|
||||
gevent/__hub_primitives.pxd,sha256=jMZVuU8c5LDF5ACKmGYQgbZinRaf2cRxdYbUNFeoR8o,1972
|
||||
gevent/__ident.cp37-win32.pyd,sha256=B-hCbDozJ2mfqq0MNtkk9gZC9ieEzdiBIq642la0qYs,39424
|
||||
gevent/__ident.pxd,sha256=toMQT_XCLLeNB9rbjRHp5h9vdBHNbL5tplra0BUSiDQ,496
|
||||
gevent/__imap.cp37-win32.pyd,sha256=t2QX79aVwvxoE7TPa-McGvaMYaY0AomzZQaZod-VU4w,72192
|
||||
gevent/__imap.pxd,sha256=BIgNRMvjzrChq4PwAHnEOhOr5h-9Wte0s4USf7KPqsg,1103
|
||||
gevent/__init__.py,sha256=LmqW8f68068GxkaRliZHHpDt_Ox5kp6Em-HUQf90vl8,5245
|
||||
gevent/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/__pycache__/_abstract_linkable.cpython-37.pyc,,
|
||||
gevent/__pycache__/_compat.cpython-37.pyc,,
|
||||
gevent/__pycache__/_config.cpython-37.pyc,,
|
||||
gevent/__pycache__/_fileobjectcommon.cpython-37.pyc,,
|
||||
gevent/__pycache__/_fileobjectposix.cpython-37.pyc,,
|
||||
gevent/__pycache__/_greenlet_primitives.cpython-37.pyc,,
|
||||
gevent/__pycache__/_hub_local.cpython-37.pyc,,
|
||||
gevent/__pycache__/_hub_primitives.cpython-37.pyc,,
|
||||
gevent/__pycache__/_ident.cpython-37.pyc,,
|
||||
gevent/__pycache__/_imap.cpython-37.pyc,,
|
||||
gevent/__pycache__/_interfaces.cpython-37.pyc,,
|
||||
gevent/__pycache__/_monitor.cpython-37.pyc,,
|
||||
gevent/__pycache__/_patcher.cpython-37.pyc,,
|
||||
gevent/__pycache__/_semaphore.cpython-37.pyc,,
|
||||
gevent/__pycache__/_socket2.cpython-37.pyc,,
|
||||
gevent/__pycache__/_socket3.cpython-37.pyc,,
|
||||
gevent/__pycache__/_socketcommon.cpython-37.pyc,,
|
||||
gevent/__pycache__/_ssl2.cpython-37.pyc,,
|
||||
gevent/__pycache__/_ssl3.cpython-37.pyc,,
|
||||
gevent/__pycache__/_sslgte279.cpython-37.pyc,,
|
||||
gevent/__pycache__/_tblib.cpython-37.pyc,,
|
||||
gevent/__pycache__/_threading.cpython-37.pyc,,
|
||||
gevent/__pycache__/_tracer.cpython-37.pyc,,
|
||||
gevent/__pycache__/_util.cpython-37.pyc,,
|
||||
gevent/__pycache__/_util_py2.cpython-37.pyc,,
|
||||
gevent/__pycache__/_waiter.cpython-37.pyc,,
|
||||
gevent/__pycache__/ares.cpython-37.pyc,,
|
||||
gevent/__pycache__/backdoor.cpython-37.pyc,,
|
||||
gevent/__pycache__/baseserver.cpython-37.pyc,,
|
||||
gevent/__pycache__/builtins.cpython-37.pyc,,
|
||||
gevent/__pycache__/core.cpython-37.pyc,,
|
||||
gevent/__pycache__/event.cpython-37.pyc,,
|
||||
gevent/__pycache__/events.cpython-37.pyc,,
|
||||
gevent/__pycache__/exceptions.cpython-37.pyc,,
|
||||
gevent/__pycache__/fileobject.cpython-37.pyc,,
|
||||
gevent/__pycache__/greenlet.cpython-37.pyc,,
|
||||
gevent/__pycache__/hub.cpython-37.pyc,,
|
||||
gevent/__pycache__/local.cpython-37.pyc,,
|
||||
gevent/__pycache__/lock.cpython-37.pyc,,
|
||||
gevent/__pycache__/monkey.cpython-37.pyc,,
|
||||
gevent/__pycache__/os.cpython-37.pyc,,
|
||||
gevent/__pycache__/pool.cpython-37.pyc,,
|
||||
gevent/__pycache__/pywsgi.cpython-37.pyc,,
|
||||
gevent/__pycache__/queue.cpython-37.pyc,,
|
||||
gevent/__pycache__/resolver_ares.cpython-37.pyc,,
|
||||
gevent/__pycache__/resolver_thread.cpython-37.pyc,,
|
||||
gevent/__pycache__/select.cpython-37.pyc,,
|
||||
gevent/__pycache__/server.cpython-37.pyc,,
|
||||
gevent/__pycache__/signal.cpython-37.pyc,,
|
||||
gevent/__pycache__/socket.cpython-37.pyc,,
|
||||
gevent/__pycache__/ssl.cpython-37.pyc,,
|
||||
gevent/__pycache__/subprocess.cpython-37.pyc,,
|
||||
gevent/__pycache__/thread.cpython-37.pyc,,
|
||||
gevent/__pycache__/threading.cpython-37.pyc,,
|
||||
gevent/__pycache__/threadpool.cpython-37.pyc,,
|
||||
gevent/__pycache__/time.cpython-37.pyc,,
|
||||
gevent/__pycache__/timeout.cpython-37.pyc,,
|
||||
gevent/__pycache__/util.cpython-37.pyc,,
|
||||
gevent/__pycache__/win32util.cpython-37.pyc,,
|
||||
gevent/__semaphore.cp37-win32.pyd,sha256=4GqduzGpcRfjgAT-LgftRa1nG8q4lqRazFUtgTAJrak,66048
|
||||
gevent/__semaphore.pxd,sha256=h600zR6GZTPXMYxpMmNeL4gd01mlraW10yZf2Clt8RU,665
|
||||
gevent/__tracer.cp37-win32.pyd,sha256=84PAc8NV1zlRTFrb01y8bbaXLYPsOIbQ8gQ_TTIz2_4,73728
|
||||
gevent/__tracer.pxd,sha256=m7uZ-bUN4D8Av_NE9xUJfRkiCnu5BGZ4RjNJvHHosHo,884
|
||||
gevent/__waiter.cp37-win32.pyd,sha256=IHvAU2KBf8aGaJI66dd2EosyJeO_2lCMlqseMjDYulk,64000
|
||||
gevent/__waiter.pxd,sha256=oYVauEKGS6MFNVNjsp5H_sxHEjqBc_ha2oEbE7d9DcE,1129
|
||||
gevent/_abstract_linkable.c,sha256=uqEpUaCamxodFMprktiwhx9iEdoL3X_BVEp84OQ-NUU,345981
|
||||
gevent/_abstract_linkable.html,sha256=JR0imfGj_tZKKy8rfuWjDLtOY-oqZo8DbCxP-yIsZY8,169474
|
||||
gevent/_abstract_linkable.py,sha256=jWNoSAdXtnRlJhodxFP-wvOoV9RmMQHkrUtuSaPPbaI,8416
|
||||
gevent/_compat.py,sha256=W8P-VbE5JgGUtUc9eOAKleDFBdi_IBWIEQySgeZDVX4,5028
|
||||
gevent/_config.py,sha256=zFnJyoiW39i97TavK9U5fvxPO3O6n3B2MBP1bv0Z8dc,20240
|
||||
gevent/_event.cp37-win32.pyd,sha256=vms6MGIlBeo3KqgIBqQ4t1Gk-WqqtUk2xDXz5EYqt90,88064
|
||||
gevent/_event.pxd,sha256=-jkoHbYbLcvKC1FBwgOady2vUrSofRhHVMASlhs-oKg,628
|
||||
gevent/_ffi/__init__.py,sha256=BTBgjjvO4ecQBPbReBhem-0zvy1Mq6jXf5dMrykGIhs,493
|
||||
gevent/_ffi/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/_ffi/__pycache__/callback.cpython-37.pyc,,
|
||||
gevent/_ffi/__pycache__/loop.cpython-37.pyc,,
|
||||
gevent/_ffi/__pycache__/watcher.cpython-37.pyc,,
|
||||
gevent/_ffi/callback.py,sha256=XR8Nll5CiX4QKDwflPLpWj1-BaUGcwka0QvO3qqpN8s,1651
|
||||
gevent/_ffi/loop.py,sha256=Yjl9FKSl1eXfFsCi5xrlxKUwIfUKIDbFuRvo_9E1ErI,28728
|
||||
gevent/_ffi/watcher.py,sha256=BUIJ-bzOgfAqvL8nVmd5x_jQ_X7t79mHe0BgEHRW3ys,20551
|
||||
gevent/_fileobjectcommon.py,sha256=KDySdmDpBDsMUmBe-LAtbep_GBhpfpWk3pSX5WC1U5s,9652
|
||||
gevent/_fileobjectposix.py,sha256=Q3HFvNsn2Snqo2QSDz1RTlEjWQBLbwGjlxSKO0k0Fk0,13796
|
||||
gevent/_greenlet.cp37-win32.pyd,sha256=fQ-1EtOJsgIKDAhwfvhzFwsnMINiTA21HjozSOFmIws,191488
|
||||
gevent/_greenlet.pxd,sha256=IWVlcZLwVDLykNwnEf6wSQFWp8PfpOld53AytbiTcrg,4375
|
||||
gevent/_greenlet_primitives.c,sha256=6h1iAjjXOQ1OtjztyfKTF3EhaSQ6R3LAc2sV7lRgTZI,263097
|
||||
gevent/_greenlet_primitives.html,sha256=7Z5Uk-xSe3yP0a8nY6_eWFyH7JHirqLEKi4IIaatPUw,92705
|
||||
gevent/_greenlet_primitives.py,sha256=DeeZeRb_FSMKcKamkUrH4TXsDHCBeoFI0ic4_LLI7Sg,3259
|
||||
gevent/_hub_local.c,sha256=tO2t0f6iQZz6fqi-whm4eOIpIVJQq63jizh9at3e1yA,227773
|
||||
gevent/_hub_local.html,sha256=VCIfCDQqrH6YkgDZ5ZT1Rlqo6ynmiYaPEFVPS6qW814,83545
|
||||
gevent/_hub_local.py,sha256=lECCOC5_CeP7Jby4zj1Lebm5SUbRhaQJMytNe_zI8_k,2739
|
||||
gevent/_hub_primitives.c,sha256=5guinh-mdlZChsI0UJrPjuV18WuYGUOvz1OJBcs4d-U,528532
|
||||
gevent/_hub_primitives.html,sha256=9GOjtOWJIpHCgztA_xupnzjCTloYUkY8grYbtc97FT0,373917
|
||||
gevent/_hub_primitives.py,sha256=1IWA3lkQNZSkot8OsqQCwSB4cNeM2ytif3FPZyyHYTE,12946
|
||||
gevent/_ident.c,sha256=F3RYBX5_aVUnJ6VJL8FQFGMMysk6HKOD18gwpTVbA6Q,213241
|
||||
gevent/_ident.html,sha256=Qg9DeQVa4yxhw3FjxtXnRiI2dfCYYmsjhvr8MLsj3EQ,61604
|
||||
gevent/_ident.py,sha256=eyA7pXNr3saDDZwJfXjn8U69wS2iWbrJcGjcAEcEbDk,2245
|
||||
gevent/_imap.c,sha256=fVCHuTGHxrO1396K62eeP_nvwOHxF0ccS_gTkHpc9AI,421402
|
||||
gevent/_imap.html,sha256=0fGaZ42tSbEGFx5jIcnbB_s_6WtKZpQ1M_8zdP4MYI4,165842
|
||||
gevent/_imap.py,sha256=RubqWMTHdK4vLPzLY4vptCnbNfsfLRi9_uNy9Fw-XLM,7710
|
||||
gevent/_interfaces.py,sha256=0MwiLJ_mI-fN6HewTmgljM93Q9GJSx02f5Pf4BUCb8w,6983
|
||||
gevent/_local.cp37-win32.pyd,sha256=06YgQSzV1rTDBPIsFmYeM1Jw1hzBXmvdXNdu0Au8SdU,107008
|
||||
gevent/_local.pxd,sha256=NImltr7cRrnk-7zRZ2lXUigD3v4xOSqZWz0kd2F2BoU,2792
|
||||
gevent/_monitor.py,sha256=ArTvznAbobIICMOuV55T5Kbs2U0LBI6tjeOPcS07xRs,11901
|
||||
gevent/_patcher.py,sha256=-KdbfhqMLEG3PDBrqIXuYYZWPWanseOgGiARIPX8-qY,4118
|
||||
gevent/_queue.cp37-win32.pyd,sha256=C8GMdDKCtywDagMBfYeckl0veiw9ZZV1z2PhfX3wA1s,173056
|
||||
gevent/_queue.pxd,sha256=qBieZQPOfYWbiu3FIWF1nqCP8tLe-y4zrxtNtwLKjY8,1391
|
||||
gevent/_semaphore.c,sha256=vzZJYV-Hh2Ec5_Qu7Ge96shpJ-QXtJD4-DDz8B0fTPE,349794
|
||||
gevent/_semaphore.html,sha256=1-PupnHnpQlJaRByqOIuhEI6W98Hp7H1wXllfwqQPks,172337
|
||||
gevent/_semaphore.py,sha256=5M9rHCYEnU-9Bi4DZyDvQCNtp_BVm15P0h-61qbes-I,6686
|
||||
gevent/_socket2.py,sha256=FXl1CyVXG0CWI1xrcfrigtwv9VYo3ReTIlWhW4cy2Jg,16534
|
||||
gevent/_socket3.py,sha256=9V2yjAIcICi0lxyfdQ6GtLEZ0I8Ne45AGBlG6_AtxwY,28893
|
||||
gevent/_socketcommon.py,sha256=tD3JqqWor88DYbceWzXrdQDX_8hz4kpGaSvQqV_UX7w,13454
|
||||
gevent/_ssl2.py,sha256=EnUQAc8ouigaMJAJVuO0FE9xmYjrtLf2NhNGB9dqX30,16951
|
||||
gevent/_ssl3.py,sha256=cZUs3JiTzPMBuRq0cMuPfWctbGharkbJL8Il2criV60,27103
|
||||
gevent/_sslgte279.py,sha256=UMNcclu1pKHcKKDwAB-B8Vctd42oxRAccrN2xcJ12V0,27490
|
||||
gevent/_tblib.py,sha256=ce8x51beuHDdXob1LvvqW7XSNS0q1rsuXtKzkuBL83Q,13246
|
||||
gevent/_threading.py,sha256=Ah9cozibYKrfdzTyh30qNGdhyKpoHywLbgFZeM6Nyo0,5291
|
||||
gevent/_tracer.c,sha256=iHHu6XRjWY_MenUsIBL5G80_eD_ibVYO59cXTCCFkKU,404267
|
||||
gevent/_tracer.html,sha256=ahBikt-K41EvjRW7dB1Y7lmQITROOWtGmAY29ew_0bw,229792
|
||||
gevent/_tracer.py,sha256=EdNhOERSRYk21We8C6LdgD02m_rvI45QOYAJXPZwMmE,6235
|
||||
gevent/_util.py,sha256=aOapAAnsrl1NqBOAkRRpwYZxu0QorXCVPGuW7L2cu1A,5035
|
||||
gevent/_util_py2.py,sha256=og4n5HlSiFY6-NWoZiOJfQ3r47wMn3PgFiONHGPLAyA,506
|
||||
gevent/_waiter.c,sha256=wavFMBpoFzalUdusiYuELaCXa-AbGk3PmWQgc9nCXVM,354890
|
||||
gevent/_waiter.html,sha256=aHjAH9jAKMcjKyGwAx2lFMVy4TOyvT68ebtXAER9Yi4,172037
|
||||
gevent/_waiter.py,sha256=2Wr2da5g49-IJtPI-TWy7Di1RhMHhmkE0QDERVPNo1g,7265
|
||||
gevent/ares.py,sha256=KJvKlPIqupEi51HaVx0Yli4HU97IhtFSN4cgIKJKLh4,336
|
||||
gevent/backdoor.py,sha256=-mXkfGChbQnC-vdPKBwzT937KcGzYPDh0rlKoe9u_B8,7123
|
||||
gevent/baseserver.py,sha256=yWX_FckWVfm3Go-O_xaDUw33ajqUXnAtfTWZ0UymEm4,15389
|
||||
gevent/builtins.py,sha256=H6T0k0yufU6qsnOZAQpXjQBwXF6L2-FdWK-c36IZocU,4718
|
||||
gevent/core.py,sha256=XgaVreHocvO9JCVby3JCo8ixbllJL08V9OrA7ETDaHs,479
|
||||
gevent/event.c,sha256=o_fa8VvtKFZzDqksDV-bhCGQcRDoCvmnw40Iy_HD6Fw,453770
|
||||
gevent/event.html,sha256=5DUg00J20K9LTGr01BWzhtZGEijY5-q82Wf5L8xe8i8,283514
|
||||
gevent/event.py,sha256=ptwIV0ogu8Yew2v-F115uSdLp-Te04yCFCulKLaoeFU,13571
|
||||
gevent/events.py,sha256=YTVY_G_ORI9VFcls-whRvr3GpsYKCpwVWxnPzt2YFVA,15611
|
||||
gevent/exceptions.py,sha256=KDmYwbSQjtMbl-wwyIsIQRGLBK20HdTdKpFrGPau2Vw,2093
|
||||
gevent/fileobject.py,sha256=gM2GT14akHsWCbIlCDonc9gi2Tq3tjffn4uVjWGccto,1892
|
||||
gevent/greenlet.c,sha256=VmpHOoDNLzzYaBNC6sxjqqzjtaift1JVSyOJNNR1Ggg,1023793
|
||||
gevent/greenlet.html,sha256=Jfuvo9GryoJcxfFfop36hBX5xDtz1fYMOqZTFBmbJn8,800938
|
||||
gevent/greenlet.py,sha256=1X2Mn6cxp62smf59S5z7RD4iMOOJn38t4SP7zxZNhDI,36559
|
||||
gevent/hub.py,sha256=OXjVswgRyrOooy83jz_y_hnYhcoCLwr_rzR61uABWYQ,27333
|
||||
gevent/libev/__init__.py,sha256=I6hpYFJCnbBBDrousKzZ7Ql--mnfAFwfM2q1BuxcMfI,169
|
||||
gevent/libev/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/libev/__pycache__/_corecffi_build.cpython-37.pyc,,
|
||||
gevent/libev/__pycache__/corecffi.cpython-37.pyc,,
|
||||
gevent/libev/__pycache__/watcher.cpython-37.pyc,,
|
||||
gevent/libev/_corecffi_build.py,sha256=dvd3DMajYmV6vPHHfKbMV3s7rolGWOHaqf7nSDdyfL0,2374
|
||||
gevent/libev/_corecffi_cdef.c,sha256=l9-BecMsvpKaPDerD8zWtjPIhbg9LDszf2m4X0BXZS0,6679
|
||||
gevent/libev/_corecffi_source.c,sha256=xYFjS9IxF8RQKpS9HG6A1xMB7XPFO16sbcs6Fv5MPXk,2078
|
||||
gevent/libev/callbacks.c,sha256=_6gnWL5t0__7FuzMdQq2otP8Z6ammfE31MzwvlE7ZpQ,5983
|
||||
gevent/libev/callbacks.h,sha256=WTsA3IguzFiDUb9EQi3qQewg9goN4azoeQdb3m0LgVs,1029
|
||||
gevent/libev/corecext.c,sha256=fC0AI_uZXcq-gQAjgXA0x4gfb0OnyK-_hnvE1lB2lXg,951856
|
||||
gevent/libev/corecext.cp37-win32.pyd,sha256=Fx9ovIGXVGxpQUZjId8RYMkerExu2UTrF5PAkHTj7T4,217600
|
||||
gevent/libev/corecext.h,sha256=KLlGMFw9sgLonvmJdDJxnFGEock2E9Tx5M0JWxvJslw,4018
|
||||
gevent/libev/corecext.html,sha256=PSSE94dGxO-MFSILAIibbo4b39gtOu_z0HzXrYLl2vE,1037242
|
||||
gevent/libev/corecext.pyx,sha256=dUmw0dF4aLTQpz5ujw-tvgx3mytiWK5UDLlRo60CNp0,44499
|
||||
gevent/libev/corecffi.py,sha256=LqFjBgQngupsRdva06hXkUy7hf4k8GG9g3DJJHEaemg,11915
|
||||
gevent/libev/libev.h,sha256=H_4mpixGJB1kyFg2uG_e5hNkYWCbc4b5AbCKyLY-llc,2800
|
||||
gevent/libev/libev.pxd,sha256=3Nw5UjV2MQS5SrwejGzyHExqWBQ_8CkKDflVUL3D_mc,5891
|
||||
gevent/libev/libev_vfd.h,sha256=ud1OzvgVJdl_EzGPp9w2Ljs0wYKOm4a5c7IIeZ-bK0s,5825
|
||||
gevent/libev/stathelper.c,sha256=kHXlvNryG-6NnAmFRS0owynyAtWjHQ3qhyp3gcdgA2s,5017
|
||||
gevent/libev/watcher.py,sha256=7y07l7ilDyDTgyUJg-q2fhSYmImox11CTd0GCu1B7QA,7926
|
||||
gevent/libuv/__init__.py,sha256=I6hpYFJCnbBBDrousKzZ7Ql--mnfAFwfM2q1BuxcMfI,169
|
||||
gevent/libuv/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/libuv/__pycache__/_corecffi_build.cpython-37.pyc,,
|
||||
gevent/libuv/__pycache__/loop.cpython-37.pyc,,
|
||||
gevent/libuv/__pycache__/watcher.cpython-37.pyc,,
|
||||
gevent/libuv/_corecffi.cp37-win32.pyd,sha256=CRGnxMPUmYRZCHLYCfRFboI_tpglKcsaKvW_LptnlY4,128000
|
||||
gevent/libuv/_corecffi_build.py,sha256=2Z1xENB2jMkloYR1IahcR_gee5fq-jNxPb0lKDSMSko,8787
|
||||
gevent/libuv/_corecffi_cdef.c,sha256=Cg5viD68beoK4cEmA_ZNODv03rssb42Nl0PawNBNOyM,12922
|
||||
gevent/libuv/_corecffi_source.c,sha256=oy2RQKKWqCjpI3_rha_0T-vrr77-KBm79KpJawSpGNE,5795
|
||||
gevent/libuv/loop.py,sha256=c821BlJTXO6kJgUcSTjyQpvCEV14w22x1wMvnhAaaIM,23684
|
||||
gevent/libuv/watcher.py,sha256=1W4t-wrhwifBqLGwhabOkI4yNGsl3yf48ZJckJPemuo,26791
|
||||
gevent/local.c,sha256=Bf-NUM8iptXj1UKHRG7VkKeZq6ugmO3ZBabZfNSWctY,560357
|
||||
gevent/local.html,sha256=HpcSYyrazt9WjiKwHd8Qut8Y3RA4Lh2n88_y1JweV1Y,340512
|
||||
gevent/local.py,sha256=eQbEf5OCApOGLsWA2IMfxAN5fudLxSzn-5X8kTBGy6w,20617
|
||||
gevent/lock.py,sha256=Taq0qWkFjz554UhzZEmlP5ifedan3DuLCWJxn6ZGn1s,8082
|
||||
gevent/monkey.py,sha256=Ngjo5ZvSSHW3ys5zNPDi-aZ_FV4zfW6RtzAWcObDDE4,40683
|
||||
gevent/os.py,sha256=_1YwH4wyx3omsyfI7bzlXypIHdbTiXhKbDQc8MLEi9M,19554
|
||||
gevent/pool.py,sha256=zTnB7IM4Wa-SDq5G2DkaJ5GZZXunN1sVJk2_ijOqDl8,25584
|
||||
gevent/pywsgi.py,sha256=-Aya6QNUfwcf0Y0CIoLOqwvrAALwwoRa_k-jymfHCyk,60568
|
||||
gevent/queue.c,sha256=ZLmAuM-ZgyiKNczRfHIoI4haH-RepUz3Hbj642p8BcA,915680
|
||||
gevent/queue.html,sha256=ZXfdFLRrNpYraLzNu3xVXnpXUOghcX86cxB0BUHutlE,725133
|
||||
gevent/queue.py,sha256=JbvQulA3dhdB26ciX6PrppJMi_RoxSeN8Maf6BckloI,22806
|
||||
gevent/resolver/__init__.py,sha256=opqBGkhgE-Q7ex5wq2UKUL-XTSF6kCSeHHPes-Z-8GY,3690
|
||||
gevent/resolver/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/resolver/__pycache__/ares.cpython-37.pyc,,
|
||||
gevent/resolver/__pycache__/blocking.cpython-37.pyc,,
|
||||
gevent/resolver/__pycache__/dnspython.cpython-37.pyc,,
|
||||
gevent/resolver/__pycache__/thread.cpython-37.pyc,,
|
||||
gevent/resolver/ares.py,sha256=EHMPHZmmrq3VsTWck3OLA07AidtieN1RUucEz89yg8s,13829
|
||||
gevent/resolver/blocking.py,sha256=5ubBMewB7X-JouMKIlf_s2JNw4KJ_EqmNVUg4PrrSaA,1216
|
||||
gevent/resolver/cares.c,sha256=_csBn7Mqn5QCUbLbwOems5Dr0OzHqlTG733_AS4pjJQ,618987
|
||||
gevent/resolver/cares.cp37-win32.pyd,sha256=Z9c2YfHdmQZUOdrC3oZFX2gcmRVLOC-4_yN0-bh4PKc,154624
|
||||
gevent/resolver/cares.html,sha256=ZgWXkxq8XsTaBRM_R-Of66-KyGrEt1XGKWzKEs0XlGY,461050
|
||||
gevent/resolver/cares.pyx,sha256=G581qVmtMs5fNOS25Ue2U-Y8xqpVxiMO_4SqcEZakHg,16578
|
||||
gevent/resolver/cares_ntop.h,sha256=-YEPNUD3Ibx3hv6_II1iQ-nVOVsxbcG4gBLrnjTaUyI,148
|
||||
gevent/resolver/cares_pton.h,sha256=nfvWc----WPHJSyMHtAsB6yJWStxg2YdYwQMNDQMcuw,217
|
||||
gevent/resolver/dnshelper.c,sha256=wruRYzUEK9NK3txHOVeKbjBtTeMTgDhTEGRcRzOoRmY,4033
|
||||
gevent/resolver/dnspython.py,sha256=TpXTzmAZAlc6IpXUqv8VK9HfFB98vtINtOZUc8CUqOs,25168
|
||||
gevent/resolver/libcares.pxd,sha256=rwX-IiuFFBUEq2ESZ7GIEwqWDjZx7PfXSfupwOOVcAc,2770
|
||||
gevent/resolver/thread.py,sha256=lNlqGlEZfL0SViJx6Ow_S7EL1CNcUGcvWDuYyxLAR2o,2493
|
||||
gevent/resolver_ares.py,sha256=s5Jo9Z0b-zKxSWcIvW5onaFE2OrfqLuNnTPlOoxFxEQ,486
|
||||
gevent/resolver_thread.py,sha256=jcKcEVCXwyRqcsDUZmryQ9hc-83yztgaM4kuTKHOvaw,504
|
||||
gevent/select.py,sha256=x8nHoAV_2i5Poi-MTWMJZxYMouXDNUr4iWfs47P-uKs,10187
|
||||
gevent/server.py,sha256=xOgzEEle5wJ4yTfB5U-kYkRiH9B6nx20S8DCG_Nt-f0,9591
|
||||
gevent/signal.py,sha256=7uu4a7v0kGyOqauqqdtZxAVQy8IrdjWHKM6SpqAvhko,4915
|
||||
gevent/socket.py,sha256=xz-ntpsujqpXQ14lLyAnGOlW7679Quisw-r9m8HZWqQ,4870
|
||||
gevent/ssl.py,sha256=N5qr4kd8jXmKfxYOqiPBAFRV4n9FZiZHgDFetHIbc_k,1200
|
||||
gevent/subprocess.py,sha256=Bo8CCTwRJ2d08r2mmHn0IseEHMlFScZ-pJ9zyqUp8kc,68291
|
||||
gevent/testing/__init__.py,sha256=hkVMAVkgmlsjJGUlTIuAPayGGd81kpFy5LvoQ8PdqCI,3976
|
||||
gevent/testing/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/errorhandler.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/exception.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/flaky.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/hub.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/leakcheck.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/modules.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/monkey_test.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/openfiles.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/params.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/patched_tests_setup.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/six.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/skipping.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/sockets.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/switching.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/sysinfo.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/testcase.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/testrunner.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/timing.cpython-37.pyc,,
|
||||
gevent/testing/__pycache__/util.cpython-37.pyc,,
|
||||
gevent/testing/coveragesite/__pycache__/sitecustomize.cpython-37.pyc,,
|
||||
gevent/testing/coveragesite/sitecustomize.py,sha256=GSOkHhxLE_pjOHuUn4InKmmuLyIGSIumySFVVSmc4Vo,558
|
||||
gevent/testing/errorhandler.py,sha256=0S16ylXH_epEJBV9jZ4Ukd8-zlFdeb0FCv8eJyo0g8Q,2015
|
||||
gevent/testing/exception.py,sha256=yQHF9Ebom2JAKUq70mLsdFk9p4eorpK36O-3iH1LL1Q,1265
|
||||
gevent/testing/flaky.py,sha256=x-IujIZGK_m2FYRyi4RxKMZhLfxq25p47En4DAlYhCs,4104
|
||||
gevent/testing/hub.py,sha256=tH0vKcSW_C5vqcPr4cdf-ucKQ5YjcP4kHFG8T1VDfSc,1568
|
||||
gevent/testing/leakcheck.py,sha256=CyxY7HiZW_kxpVgztdX_hYIZhgrAwMxTm-7xlt_nwNo,7013
|
||||
gevent/testing/modules.py,sha256=YszQZAglfns-HabYDWmFjoDRX5Oxhh67DRaMU4z5gQY,3182
|
||||
gevent/testing/monkey_test.py,sha256=QFHQgmxlTE8RVzakU4QZ5yaaamYrvCLAGKO1leNM_Fw,2432
|
||||
gevent/testing/openfiles.py,sha256=TYorMzGRNQD4lznfDU0NNyxiEMsF_h6t5CqKO9DApU0,4869
|
||||
gevent/testing/params.py,sha256=NP8muVgzgVrC9wudC7gQTYTJpllKG3GQVpQI-UL-iig,3033
|
||||
gevent/testing/patched_tests_setup.py,sha256=TwxLVQBaLT8OekKkaQrY5KpIm3AFMY3eBG9vNJTu_zk,49460
|
||||
gevent/testing/six.py,sha256=uOHGN3XiRVQViBTFgaTac8nBy5azRODOFWvkcQVbtdA,1004
|
||||
gevent/testing/skipping.py,sha256=R1EzxSRTnOaKlr7jBaVxue5v0CnCTmTuHBh-NHwIVl0,3668
|
||||
gevent/testing/sockets.py,sha256=dMxbn7khWbBWqDwjEZithklFCK2yphiwgJOliYZvxRI,1914
|
||||
gevent/testing/switching.py,sha256=6idIaCiHKFZF8aibeHjxIdZi38cxXCKuhQRHUT1YWoo,2708
|
||||
gevent/testing/sysinfo.py,sha256=0LOpveTMOw2mv3cLQwLXRTpaAQ4jEXrpFA3O9s_oPWQ,4184
|
||||
gevent/testing/testcase.py,sha256=0GJFaSjK1Pr-QZn5M9q5SdFP1MXzlQ70v751N46ZNOY,12783
|
||||
gevent/testing/testrunner.py,sha256=hEG_r9VXTlxqgkB91PMHpC1u5s1ppzOmSrrIrFXl3nE,16385
|
||||
gevent/testing/timing.py,sha256=mhsdL-_Q0UMQyVjrIQGNlHphaw0Z_TBrB5hvzRUyxxk,4949
|
||||
gevent/testing/util.py,sha256=CehaaTcFyUUyJbDK5Rqs8hcn1lGfwF7C7Ep7xB8TbSg,12915
|
||||
gevent/tests/2_7_keycert.pem,sha256=xPXi5idPcQVbrhgxBqF2TNGm6sSZ2aLVVEt6DWzplL8,1783
|
||||
gevent/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
gevent/tests/__main__.py,sha256=EMw-OppCjl-heu15mLg-cf400NS1Ikuy96OisvLoKLM,179
|
||||
gevent/tests/__pycache__/__init__.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/__main__.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_blocks_at_top_level.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_import_import_patch.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_import_patch.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_import_wait.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_imports_at_top_level.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/_imports_imports_at_top_level.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/getaddrinfo_module.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/known_failures.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/lock_tests.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__GreenletExit.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test___config.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test___example_servers.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test___ident.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test___monitor.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test___monkey_patching.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__all__.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__api.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__api_timeout.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__ares_host_result.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__ares_timeout.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__backdoor.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__close_backend_fd.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__compat.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_async.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_callback.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_fork.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_loop_run.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_loop_run_sig_mod.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_stat.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_timer.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__core_watcher.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__destroy.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__destroy_default_loop.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__doctests.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__environ.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__event.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__events.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__example_echoserver.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__example_portforwarder.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__example_udp_client.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__example_udp_server.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__examples.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__exc_info.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__execmodules.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__fileobject.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__getaddrinfo_import.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__greenio.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__greenlet.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__greenletset.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__greenness.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__hub.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__hub_join.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__hub_join_timeout.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__import_blocking_in_greenlet.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__import_wait.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue112.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue230.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue330.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue467.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue6.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue600.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue607.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue639.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issue_728.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__issues461_471.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__iwait.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__joinall.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__local.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__loop_callback.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__makefile_ref.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__memleak.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_builtins_future.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_hub_in_thread.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_logging.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_multiple_imports.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_queue.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_scope.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_selectors.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_sigchld.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_sigchld_2.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_sigchld_3.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_ssl_warning.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_ssl_warning2.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__monkey_ssl_warning3.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__nondefaultloop.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__order.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__os.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__pool.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__pywsgi.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__queue.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__real_greenlet.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__refcount.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__refcount_core.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__select.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__semaphore.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__server.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__server_pywsgi.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__signal.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__sleep0.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_close.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_dns.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_dns6.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_errors.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_ex.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_send_memoryview.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_ssl.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socket_timeout.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__socketpair.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__ssl.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__subprocess.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__subprocess_interrupted.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__subprocess_poll.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__systemerror.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_2.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_before_monkey.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_holding_lock_while_monkey.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_monkey_in_thread.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_native_before_monkey.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_patched_local.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threading_vs_settrace.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threadpool.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__threadpool_executor_patched.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__timeout.cpython-37.pyc,,
|
||||
gevent/tests/__pycache__/test__util.cpython-37.pyc,,
|
||||
gevent/tests/_blocks_at_top_level.py,sha256=Hp36RFiC0djMSfvUHZsu8pVttpc7Hbmv_7VGq6xW630,48
|
||||
gevent/tests/_import_import_patch.py,sha256=IbgraY7KaPggcX1JNVkUQTTBSboegF_VWSDFJp38buI,28
|
||||
gevent/tests/_import_patch.py,sha256=_PWRiLjpsFyhT2CxTDIE9ZVS9gcCFqzQGFKel00zc2s,47
|
||||
gevent/tests/_import_wait.py,sha256=8353o30STWbRg53op9CWmTXfElU6VV4klLdqiq7Jmjg,570
|
||||
gevent/tests/_imports_at_top_level.py,sha256=9SCo81uRMT8xWbDFUBhbc_EwAoii9oygwOBSSNWfWWI,55
|
||||
gevent/tests/_imports_imports_at_top_level.py,sha256=VcIaDELcdgeEMqO_Cndy0XMjx05h5eG4_F_12giOSDs,345
|
||||
gevent/tests/badcert.pem,sha256=JioQeRZkHH8hGsWJjAF3U1zQvcWqhyzG6IOEJpTY9SE,1928
|
||||
gevent/tests/badkey.pem,sha256=gaBK9px_gG7DmrLKxfD6f6i-toAmARBTVfs-YGFRQF0,2162
|
||||
gevent/tests/getaddrinfo_module.py,sha256=oFyeNRywc3QO5HlpuV5DVcpUbml8hFn86pbWm_mGQX8,116
|
||||
gevent/tests/hosts_file.txt,sha256=07jEX3FicSKuiUJbQ_14H0MP8v7r35h_usGUmScPnSM,290909
|
||||
gevent/tests/https_svn_python_org_root.pem,sha256=wOB3Onnc62Iu9kEFd8GcHhd_suucYjpJNA3jyfHeJWA,2569
|
||||
gevent/tests/keycert.pem,sha256=r0KE1WH9eV6X4mUykpCY5Dm8_robBSi4zwMcGBPtMi4,1872
|
||||
gevent/tests/known_failures.py,sha256=Z_1njrvmPuHBgQdMlkUeem6Io6-h3AwuRJM5X7sfIl8,16153
|
||||
gevent/tests/lock_tests.py,sha256=FAHkMkAQSAofnKSYo4Q1yUz5DnMmXURfhp-zMbFzQUs,21861
|
||||
gevent/tests/monkey_package/__main__.py,sha256=uXMcIdKy25HEJtowa4Xd-uwrXQK-ca15TZ9knYMtwFc,180
|
||||
gevent/tests/monkey_package/__pycache__/__main__.cpython-37.pyc,,
|
||||
gevent/tests/monkey_package/__pycache__/issue302monkey.cpython-37.pyc,,
|
||||
gevent/tests/monkey_package/__pycache__/script.cpython-37.pyc,,
|
||||
gevent/tests/monkey_package/issue302monkey.py,sha256=DMdW7B0MVcpTMKk94hW4kxqBzbBXFs4QQBwsFN844dQ,1095
|
||||
gevent/tests/monkey_package/script.py,sha256=2bXmWybFAoMYldTLN41r9eqnsCVK1QXnaEcl1EeQstc,391
|
||||
gevent/tests/nullcert.pem,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
gevent/tests/server.crt,sha256=B4Oj77EfnNyQIPmD9UxB_0QZuUT5evuDcGhSvx9ClRk,887
|
||||
gevent/tests/server.key,sha256=ZtnbGm6-aP6CTTJPoBv1QoIEz6SgeOfgHKuWTJ4cxhQ,891
|
||||
gevent/tests/sha256.pem,sha256=trYsA7FY0hyVoV1AoGNwZ_s6T89eiiOIFexoNRv029s,2065
|
||||
gevent/tests/test__GreenletExit.py,sha256=qHtC7KnjCG039F_VGDXnsrhyWaQXvfcmgnVB2Rfa_Vk,127
|
||||
gevent/tests/test___config.py,sha256=b-Aj8uJnuqYUFkM5BNZLp_coMjBS2GZnMgKh4r-rfCs,4817
|
||||
gevent/tests/test___example_servers.py,sha256=Wi13Mlr3jFX3Lb7Wo3L4I5Z5VJ5ABU0Tma-T8mFP830,4741
|
||||
gevent/tests/test___ident.py,sha256=gZk0QW6gQy8_ZhB_mNwCSqno7P7N4ItkWs8U3DZfogU,2101
|
||||
gevent/tests/test___monitor.py,sha256=HhzValkaHYp-y0QzhzDzMYU4ZoSWiWfsCHsIqo8QeLU,12573
|
||||
gevent/tests/test___monkey_patching.py,sha256=5VD3iRMovke9RUgI16nXAtXNgnFMQO_-dDp1HKmNEco,3659
|
||||
gevent/tests/test__all__.py,sha256=HBRkZEDjChGPleSuNzUV9Zxs6aMMLphDaHJ3xTIhXx4,8964
|
||||
gevent/tests/test__api.py,sha256=zJF6cfQKnPscZio9ErpkY2Mn2NNPOzNnzX4J8ujx4ww,4550
|
||||
gevent/tests/test__api_timeout.py,sha256=j2P5BHLGkLTvYg-NzcE90-gxXGzc6wxoomF43C05Bxg,6307
|
||||
gevent/tests/test__ares_host_result.py,sha256=OXQIzDgE-rz3Bj-_s6PjbicMGaPqnAPmtSa3wWzk6iI,873
|
||||
gevent/tests/test__ares_timeout.py,sha256=7plIQnTiPeoWkF-yHxbuqAe6ohKsMJP4ZUGgW3yzfTY,1257
|
||||
gevent/tests/test__backdoor.py,sha256=_pHAgWh4w34KkEwPa46FF7sb6Xn81sUonJyuSjBXcgI,4193
|
||||
gevent/tests/test__close_backend_fd.py,sha256=K7ZbNpu9Xj1aF1jHHDmPr0iHzdIQje8yFsFWfzZyguE,1964
|
||||
gevent/tests/test__compat.py,sha256=YBE4IJwtRozcCRqeZXY9dkxqjZ6m2xS0Pk1ceApjvnE,1439
|
||||
gevent/tests/test__core.py,sha256=0-JaW0m9zRvFESvya34Saomy62-__KTKpfsvQJRBvZw,5668
|
||||
gevent/tests/test__core_async.py,sha256=vySo8IqVpKPhd_ljN_cEmxV_ng5ORxKBzJY-Gy-7tL4,547
|
||||
gevent/tests/test__core_callback.py,sha256=1zjZvKOavnYIzXiJpgtn0JjTSpwXA2Syx5U1OueEN54,471
|
||||
gevent/tests/test__core_fork.py,sha256=DNq33dpct7xiD6A_M1atR2L9wi27y-KCwcoy5hCt-ms,1830
|
||||
gevent/tests/test__core_loop_run.py,sha256=Yi1xKGcqGWaBVeu54XGDWISFklYXIc7r2d-Xgz71gsU,668
|
||||
gevent/tests/test__core_loop_run_sig_mod.py,sha256=qaD4Q7Z0UXU_pevsXMjaylPoABFc-rfNToXi36ygIEE,597
|
||||
gevent/tests/test__core_stat.py,sha256=YvqLSe-9j5tIFC6MoPQhD5_0MdBtxrbVagp4o0jzpw8,3754
|
||||
gevent/tests/test__core_timer.py,sha256=e6VG-IHLiQ3OkrTOYGiLMX4VdU6RLG3UoA69uao2xG8,4330
|
||||
gevent/tests/test__core_watcher.py,sha256=ULftUAJqrMLYgzItmSzEosgeagKbI72m0oheKn14vYo,3573
|
||||
gevent/tests/test__destroy.py,sha256=UjbbX-16T0Kjpb2Y0IHhnE7n3-6PD18eKTxdFCHlBaM,1682
|
||||
gevent/tests/test__destroy_default_loop.py,sha256=JAja9tOyflLkKonpJW4TDmHHKVOnnSMx95_OxnfEkwA,1657
|
||||
gevent/tests/test__doctests.py,sha256=aZqNLQDOpyvFYkhmqgXyDRhtV3CnN50H4OnZkp0vC0E,3613
|
||||
gevent/tests/test__environ.py,sha256=FHBRtFqtDQbDU4qBBb5XHGRDd7Myh6wj4KJFVqmB2lg,520
|
||||
gevent/tests/test__event.py,sha256=QHboAng3_0tuw_AnyCQFIHFnGs3LgaV70Gz4cEqk6ek,7511
|
||||
gevent/tests/test__events.py,sha256=8yEdTm-oZdWvODHhIACN_ZOZQCH-oqZ_yDGbuAR_wQk,1268
|
||||
gevent/tests/test__example_echoserver.py,sha256=uS8U1qV3wV_9zK40ZMP41dplVgC1RruSrgGr0gzK5vI,1197
|
||||
gevent/tests/test__example_portforwarder.py,sha256=AgA1rWRkrkt4Jz4D6hxFLR109Ca18-Q5Bl3PssKgZ3U,2030
|
||||
gevent/tests/test__example_udp_client.py,sha256=42k_6BAWaIsy876GUJJI48SJJCGPtBnBoeAStSQPX38,786
|
||||
gevent/tests/test__example_udp_server.py,sha256=HKE94iVWq-qs-K5ROcIIOkZETGVWU1erPOE_dinvz_0,512
|
||||
gevent/tests/test__examples.py,sha256=bKo_UR91wK3iGbh4z6TP3du0OfKgpDnj7ku0-RTzcOo,2246
|
||||
gevent/tests/test__exc_info.py,sha256=23iWivgGUbroxrgJ9r1Be2NDiv8E4zEVUB9tnOPI9WI,1381
|
||||
gevent/tests/test__execmodules.py,sha256=1D-P4MmCOldJf-Dc-dVKrJgDnDj2km0MGPlqV6IMZjI,871
|
||||
gevent/tests/test__fileobject.py,sha256=1UTOw0-pTgKEDmyCAbPANiQRWFtuF0KH294m9QlhGqI,8682
|
||||
gevent/tests/test__getaddrinfo_import.py,sha256=Z4rjp1EFnUQin20Qxg_w7m1Fkkn21LuKTPzvUPoPIqg,334
|
||||
gevent/tests/test__greenio.py,sha256=ar48aU9uSrfFsPslpeE6Q19SPSs8_u9Hdr-RVrM2Mr4,4559
|
||||
gevent/tests/test__greenlet.py,sha256=uesiAG-p2b5LKNbkVwBhR2ioF6MjpluMlAolC7eYTHA,25050
|
||||
gevent/tests/test__greenletset.py,sha256=6jGMzRbFGNdLmub8-eVIw9iOZfEQ0GaXdqvzbNxvpEE,4490
|
||||
gevent/tests/test__greenness.py,sha256=4iiDkwZmagF_7cxqUKBL1LnERP-h8AnPH96YVA6Lt7s,2605
|
||||
gevent/tests/test__hub.py,sha256=ktpNQLmZFs2Ncx0MFtRbXU3nIRleMyeRHKClemm7oho,11233
|
||||
gevent/tests/test__hub_join.py,sha256=hVk84a1yZ6liO2ovSlunzwC4fPJ8cZAk0wvOoxui1f8,485
|
||||
gevent/tests/test__hub_join_timeout.py,sha256=D7_mRFIRY-yPrxa8YbCLtSH75PfLjRePy069yJgZRF4,2905
|
||||
gevent/tests/test__import_blocking_in_greenlet.py,sha256=TnqXgCo-JsrpoWuIDXbdn555kuXSj4fdSGRGoXZJr3w,431
|
||||
gevent/tests/test__import_wait.py,sha256=cEy4415W-meuNeXm6s8b1meXtvrmSj7nieOV8hNvgYY,143
|
||||
gevent/tests/test__issue112.py,sha256=kqropEsaNonQ70O6nHGZCGUoueLdB6LgRB73a4SZyqU,370
|
||||
gevent/tests/test__issue230.py,sha256=wqiwH_D2Qt1A2CJ_tWZAv50p5AoMis_ZRhEE1Qz58Vk,392
|
||||
gevent/tests/test__issue330.py,sha256=KlMXB1G1rvRqN5PzsJPs3YKwAsxPps52_9MoUtoMXNc,2259
|
||||
gevent/tests/test__issue467.py,sha256=q02FmOgt9IA1zBE6dqCsKs1AxeJ2_-s62SiNIyZXev8,948
|
||||
gevent/tests/test__issue6.py,sha256=_VDF4xiqM1HJev4wAUIaD3ELVG4HlnTWowpbdbwqXWc,1350
|
||||
gevent/tests/test__issue600.py,sha256=LhkXP-F1cdczD1oFlJbeGUegOLHGWOg9IVJN9XfEgRE,1306
|
||||
gevent/tests/test__issue607.py,sha256=uGWlqsn9ySDg40WXrpoRBJXoDDSzK5yDpcl7I6frc2o,1080
|
||||
gevent/tests/test__issue639.py,sha256=ZeDfYeBgB4FxgGsm9RPSoNnjlcvf_eThUMlut-5Y5GI,55
|
||||
gevent/tests/test__issue_728.py,sha256=1u6WSToRxMYe70aLU5vMhrWSZ_OHtwN9oP6L4UXXywg,212
|
||||
gevent/tests/test__issues461_471.py,sha256=q6B4wAl5nbrrOxFNoKTzcp1hc4Z-etD042p8t0NQidQ,3576
|
||||
gevent/tests/test__iwait.py,sha256=uzef1gKSo8dDbciyjZobklIXNDdc-B0ehEKb3iIn2Bg,1205
|
||||
gevent/tests/test__joinall.py,sha256=RF92Mw-qLxdrnJHE0xfuSMLuM_LfFveDsXth35j8C_M,111
|
||||
gevent/tests/test__local.py,sha256=SrkwrrprNxQspzYqDXZ7GidI0wfO7Og-vUVE4vOGeq8,11742
|
||||
gevent/tests/test__loop_callback.py,sha256=AMDWCjzrH8fr3dvMsrEFIzRMB2AjKcRq0ZfYt9fE57E,161
|
||||
gevent/tests/test__makefile_ref.py,sha256=a4wMUCY21dRsP2bDiCsOBWi1DqAxUzu5HS_hts5NSVI,17102
|
||||
gevent/tests/test__memleak.py,sha256=ijqrQ7qUwrWxdjLUKN4rtVPoTZVLv9LJphkd1GFxp8Y,1275
|
||||
gevent/tests/test__monkey.py,sha256=eT5rgqT2j3m-iobo8pSo8a7vGflYz9_saPzF7Im0DY8,5995
|
||||
gevent/tests/test__monkey_builtins_future.py,sha256=ZUJj7wWz9jEa9vDPSdEPrjqewiUwBspmtgh7RN8LymA,521
|
||||
gevent/tests/test__monkey_hub_in_thread.py,sha256=iMWv4a8Agy_llZypYxXo62kSB7LLTdNG5u9N_eHKIg8,520
|
||||
gevent/tests/test__monkey_logging.py,sha256=6uqY5n7TfzyVaiIFSBnSkW0OluQgcuFu2u7k9PC9q-w,1121
|
||||
gevent/tests/test__monkey_multiple_imports.py,sha256=QwmJJ4r3RXOQhti_5vj3Let0zllXzq4GwDY8NqzJUuQ,296
|
||||
gevent/tests/test__monkey_queue.py,sha256=jXMUF1DYdbSvwoFvReQFH86f0GfnTMrUNkP-R_eqels,12335
|
||||
gevent/tests/test__monkey_scope.py,sha256=JhgnSgGBvNYka-dqP9m9kLMLVEA8JZ2qZ7L1w95kzHM,1770
|
||||
gevent/tests/test__monkey_selectors.py,sha256=Oca3HpplLS9r6jSfFvM0E-NVoZEO4Zxeg-eQaD848iY,599
|
||||
gevent/tests/test__monkey_sigchld.py,sha256=f9o9x5LjUie6L2Z4n8keUPB1VlzsF7lX0DJOTQnn_FI,2103
|
||||
gevent/tests/test__monkey_sigchld_2.py,sha256=scXDckYto7VYr4oTPYpvMgcfL5AOavVpOtdKA2jMxEM,1580
|
||||
gevent/tests/test__monkey_sigchld_3.py,sha256=TKc67r-c7u6jG3QVN_KDPy01PYrQQkRimnH0bcPAeU8,1471
|
||||
gevent/tests/test__monkey_ssl_warning.py,sha256=-UkFSgrOLE_jmmeIOqs_sFIJ-LSVmvuXZKjN7r1W_nY,1022
|
||||
gevent/tests/test__monkey_ssl_warning2.py,sha256=NRlZ8-s-doOC6xNkQbaiVPIaqOtFBfEmQzyrKsUukww,1255
|
||||
gevent/tests/test__monkey_ssl_warning3.py,sha256=WZEOHQoewYAuYJu0f8UMjpmRzaR0B-sf0wBhvaRKTEQ,1330
|
||||
gevent/tests/test__nondefaultloop.py,sha256=Y3IrgT8SF3SmO3A1IlvC0nF4GCqxzvKES0KqvO72crE,204
|
||||
gevent/tests/test__order.py,sha256=iI8wh316sNia20IkHx7wSnE_LKdCsse6Q89xVkQev1U,1125
|
||||
gevent/tests/test__os.py,sha256=zoRh5Z5138wSU_sWgncYbSXSfr-v7HzqJ7MBnfdXUSc,5827
|
||||
gevent/tests/test__pool.py,sha256=33weK0xDFzH_-_PWrrBj-iIwxkAbpGDoMEZFUd07D3Q,17926
|
||||
gevent/tests/test__pywsgi.py,sha256=gtMSayFtxMuiMmwzWFpMQEBReYdIXVXhbCmqxAfzKDY,64069
|
||||
gevent/tests/test__queue.py,sha256=HTQVGLSkJYYesHqOSBq20DLb1ibzykCYVK-i6-NM9S8,12918
|
||||
gevent/tests/test__real_greenlet.py,sha256=58odp7leEc7C-CBIE_8Fposz8qS3y2nZp-DrPAuVo7E,464
|
||||
gevent/tests/test__refcount.py,sha256=Di1TWn7evn9elIiLtKjvq4g0Aeat0N0QCFZPmeGHzz8,5099
|
||||
gevent/tests/test__refcount_core.py,sha256=MaDRNKzNoDoCPFTZuHMpehszx7eMxUw83uTzjKIJZJI,509
|
||||
gevent/tests/test__select.py,sha256=kefxR5a1oKULgsNKb1NDnqRwpycL5swCcUUCtw0pIHI,3843
|
||||
gevent/tests/test__semaphore.py,sha256=bjRsbb7iHO_CRUcJOf8NOkwArvepyIeY2OPWueT3gFU,2542
|
||||
gevent/tests/test__server.py,sha256=Wrqz_M1--WYRobmtKFv5Py-uppvXJJIqLHIXvf1IvAg,16756
|
||||
gevent/tests/test__server_pywsgi.py,sha256=NI0mP2XbtsmCEfPj3OwKnzIpWruf15QCukCGnY5O4n0,2919
|
||||
gevent/tests/test__signal.py,sha256=aruabTlycpxjUwORKfB4JUS029dU9Wq8GsYWNFmicjU,4923
|
||||
gevent/tests/test__sleep0.py,sha256=uoruOPjsaPk1m0thN_4UppH4kW4k9fHQXDuLXnc3u5k,139
|
||||
gevent/tests/test__socket.py,sha256=Eb34BqW7dvSjkX-9UckAFuW7vxSPQ2ly0z4Jpp5wQhE,14704
|
||||
gevent/tests/test__socket_close.py,sha256=z-ecsccygna2cczb0SDkbwwee0f6JziWRvssm_PxHJI,1807
|
||||
gevent/tests/test__socket_dns.py,sha256=U8x2L4qxzfdglLrr0qNEpalShr7koEslvRbyMR_w0S4,24226
|
||||
gevent/tests/test__socket_dns6.py,sha256=trqgbuh3uGZoFNEcU5TtakLJeXQIpA9w-OtgLmSX5TI,2453
|
||||
gevent/tests/test__socket_errors.py,sha256=AnMt04cPU4iQjIYet_CyLttK1FO7t6mMPXk8QWmjXPs,1718
|
||||
gevent/tests/test__socket_ex.py,sha256=gOL_NKAVHf4WTsdDOv8KPDYMqiyu9XOtxMtYLbBpx08,1128
|
||||
gevent/tests/test__socket_send_memoryview.py,sha256=_C_8FbQhmCUi8pCqTYh0tSeTMy5K6JHptS8NqWkYtNc,886
|
||||
gevent/tests/test__socket_ssl.py,sha256=aS6ACvSO6K30qHwhSFXBXyIkg6RZ9-YFbpd-uunDQMo,828
|
||||
gevent/tests/test__socket_timeout.py,sha256=fJh7XqjJGlol-Lkr7echpJqD7pQBGUaLd5RO6APVf-M,1407
|
||||
gevent/tests/test__socketpair.py,sha256=mhxBAxWxWSnse0_NfEfA3_Cfh07V2tOkX4ygCMfhKJQ,851
|
||||
gevent/tests/test__ssl.py,sha256=e7iSoq07_lAkz8-UOPsW_v5wHyatGCNiSJQas_t7eGs,4070
|
||||
gevent/tests/test__subprocess.py,sha256=QVLlWF4zsUfonzQf1CYWPfQrAV3HD3URXk-1dhqFe84,18984
|
||||
gevent/tests/test__subprocess_interrupted.py,sha256=Blz-qIHuXGdSpI9D__zNn0WEcjUwSbaEhN7c_ikZ9_M,790
|
||||
gevent/tests/test__subprocess_poll.py,sha256=Y7CYIeryaY-WVXN9fGUhLDLqzq1KrSQKGdfUF1Ry0SQ,179
|
||||
gevent/tests/test__systemerror.py,sha256=SFKdXD3MbGhHnp6jwAOeaOFvrhLuoXmv0o3I7NL6NCk,3184
|
||||
gevent/tests/test__threading.py,sha256=s8nR6ik-dEkzR_3Nb2nftPfEW7M9pJVArqB0xPEaTJU,1443
|
||||
gevent/tests/test__threading_2.py,sha256=lbmU3HWtTCzWMVaGJI_mzwYJ3PBvUiU9JtIxsXJgz7k,22811
|
||||
gevent/tests/test__threading_before_monkey.py,sha256=Z-bNhXH218tp6vFFwqGmdL2EnSR26ydp9MZh9tH1zY4,675
|
||||
gevent/tests/test__threading_holding_lock_while_monkey.py,sha256=OnttJRH3ukPydDLll5KThIOEiBV1MkucUGIdO4eHDMA,337
|
||||
gevent/tests/test__threading_monkey_in_thread.py,sha256=jRbwWX-SA95tIoxITU4Yq83kRGO3Z8ZfDI5YVcrN004,2393
|
||||
gevent/tests/test__threading_native_before_monkey.py,sha256=qQe7WMSc_KCWhejfq-Hfa4Yv4Gb0vRP0F0c05GD9RQ0,1661
|
||||
gevent/tests/test__threading_patched_local.py,sha256=sXtfMuPgAEF5gl646OM-MELvQX3MZACtyU54ClWAku8,523
|
||||
gevent/tests/test__threading_vs_settrace.py,sha256=2iiHS0DWnHJ0gSLvVLcaKxoF-SofUEs_7exrSecDfr0,5051
|
||||
gevent/tests/test__threadpool.py,sha256=chJVc6BDfJ24MkKDTkwd-aFE-VrSDHdm2pmsZPkk6d8,19480
|
||||
gevent/tests/test__threadpool_executor_patched.py,sha256=UMJlfgDDCykK9gc-mC8w3JjeLJsk_k7DGoBfBKhb_Co,379
|
||||
gevent/tests/test__timeout.py,sha256=uRjOchrp6NVrjkxrCW9UMd6r5iheRe8EjzpW5XDD7Bg,5243
|
||||
gevent/tests/test__util.py,sha256=lA-L0D3a0IcefEwJm0E1qRxrvJ3kkKxBjwJfnr0t8v4,10149
|
||||
gevent/tests/test_server.crt,sha256=B4Oj77EfnNyQIPmD9UxB_0QZuUT5evuDcGhSvx9ClRk,887
|
||||
gevent/tests/test_server.key,sha256=ZtnbGm6-aP6CTTJPoBv1QoIEz6SgeOfgHKuWTJ4cxhQ,891
|
||||
gevent/tests/tests_that_dont_do_leakchecks.txt,sha256=hqT3OFiGvKj8V8jugeRR42mLIZ9tS8xHRQK5sS4sYR8,204
|
||||
gevent/tests/tests_that_dont_monkeypatch.txt,sha256=DeqqzP8rEzNtssCO1OPDwLJSyb3JSkQuEI9cAkpnATI,568
|
||||
gevent/tests/tests_that_dont_use_resolver.txt,sha256=vE3Mhrrtx_N7WB1_fxT9dySMRiAO3eMH5LNhuWol84A,3171
|
||||
gevent/tests/wrongcert.pem,sha256=6n4u7wcalNKCtnMsq7J3Y7uOiez901ZLiH38oE0jGUM,1880
|
||||
gevent/thread.py,sha256=XktB9n2NaVYycKhRVoYjGwizq0kjU2ohihgSyjdQssU,3608
|
||||
gevent/threading.py,sha256=hOmCvuScgvK54WFYjbru76pg6Z1-iJkOqaDf4CgQ-AY,8634
|
||||
gevent/threadpool.py,sha256=CNnBZXQ7t67I9VMlKFzWg42d8MZkIAhXWAdMHbBsI8M,20228
|
||||
gevent/time.py,sha256=C0eRlHq0rBxy9tC_SsIywkYaBNlwO1bc04qFi2OceB4,491
|
||||
gevent/timeout.py,sha256=kURx8Rwi9__4OPbKQoB_UPHYGd_WoOsm1dz5r60h_Rc,12692
|
||||
gevent/util.py,sha256=RF24Ji8psB5ZC4r7BLUFK3cNSKSOH6NcubA4umaPxOQ,19617
|
||||
gevent/win32util.py,sha256=WBk_YNf_kk3QF3PMUdScqgM_PreF4OBhfXq2W5264n0,3637
|
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.32.3)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp37-cp37m-win32
|
||||
|
@ -0,0 +1,3 @@
|
||||
[gevent.plugins.monkey.will_patch_all]
|
||||
signal_os_incompat = gevent.monkey:_subscribe_signal_os
|
||||
|
@ -0,0 +1 @@
|
||||
gevent
|
Binary file not shown.
@ -0,0 +1,53 @@
|
||||
cimport cython
|
||||
|
||||
from gevent.__greenlet_primitives cimport SwitchOutGreenletWithLoop
|
||||
from gevent.__hub_local cimport get_hub_noargs as get_hub
|
||||
|
||||
cdef InvalidSwitchError
|
||||
cdef Timeout
|
||||
cdef bint _greenlet_imported
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
void PyGreenlet_Import()
|
||||
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
cdef void _init()
|
||||
|
||||
cdef class AbstractLinkable(object):
|
||||
# We declare the __weakref__ here in the base (even though
|
||||
# that's not really what we want) as a workaround for a Cython
|
||||
# issue we see reliably on 3.7b4 and sometimes on 3.6. See
|
||||
# https://github.com/cython/cython/issues/2270
|
||||
cdef object __weakref__
|
||||
|
||||
cdef readonly SwitchOutGreenletWithLoop hub
|
||||
|
||||
cdef _notifier
|
||||
cdef set _links
|
||||
cdef bint _notify_all
|
||||
|
||||
cpdef rawlink(self, callback)
|
||||
cpdef bint ready(self)
|
||||
cpdef unlink(self, callback)
|
||||
|
||||
cdef _check_and_notify(self)
|
||||
cpdef _notify_links(self)
|
||||
cdef _wait_core(self, timeout, catch=*)
|
||||
cdef _wait_return_value(self, waited, wait_success)
|
||||
cdef _wait(self, timeout=*)
|
Binary file not shown.
@ -0,0 +1,47 @@
|
||||
cimport cython
|
||||
|
||||
# This file must not cimport anything from gevent.
|
||||
cdef get_objects
|
||||
cdef wref
|
||||
|
||||
cdef BlockingSwitchOutError
|
||||
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
object PyGreenlet_Switch(greenlet self, void* args, void* kwargs)
|
||||
void PyGreenlet_Import()
|
||||
|
||||
@cython.final
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef bint _greenlet_imported
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
cdef inline object _greenlet_switch(greenlet self):
|
||||
return PyGreenlet_Switch(self, NULL, NULL)
|
||||
|
||||
cdef class TrackedRawGreenlet(greenlet):
|
||||
pass
|
||||
|
||||
cdef class SwitchOutGreenletWithLoop(TrackedRawGreenlet):
|
||||
cdef public loop
|
||||
|
||||
cpdef switch(self)
|
||||
cpdef switch_out(self)
|
||||
|
||||
|
||||
cpdef list get_reachable_greenlets()
|
Binary file not shown.
@ -0,0 +1,17 @@
|
||||
from gevent.__greenlet_primitives cimport SwitchOutGreenletWithLoop
|
||||
|
||||
cdef _threadlocal
|
||||
|
||||
cpdef get_hub_class()
|
||||
cpdef SwitchOutGreenletWithLoop get_hub_if_exists()
|
||||
cpdef set_hub(SwitchOutGreenletWithLoop hub)
|
||||
cpdef get_loop()
|
||||
cpdef set_loop(loop)
|
||||
|
||||
# We can't cdef this, it won't do varargs.
|
||||
# cpdef WaitOperationsGreenlet get_hub(*args, **kwargs)
|
||||
|
||||
# XXX: TODO: Move the definition of TrackedRawGreenlet
|
||||
# into a file that can be cython compiled so get_hub can
|
||||
# return that.
|
||||
cpdef SwitchOutGreenletWithLoop get_hub_noargs()
|
Binary file not shown.
@ -0,0 +1,73 @@
|
||||
cimport cython
|
||||
|
||||
from gevent.__greenlet_primitives cimport SwitchOutGreenletWithLoop
|
||||
from gevent.__hub_local cimport get_hub_noargs as get_hub
|
||||
|
||||
from gevent.__waiter cimport Waiter
|
||||
from gevent.__waiter cimport MultipleWaiter
|
||||
|
||||
cdef InvalidSwitchError
|
||||
cdef _waiter
|
||||
cdef _greenlet_primitives
|
||||
cdef traceback
|
||||
cdef _timeout_error
|
||||
cdef Timeout
|
||||
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
void PyGreenlet_Import()
|
||||
|
||||
@cython.final
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef bint _greenlet_imported
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
|
||||
cdef class WaitOperationsGreenlet(SwitchOutGreenletWithLoop):
|
||||
|
||||
cpdef wait(self, watcher)
|
||||
cpdef cancel_wait(self, watcher, error, close_watcher=*)
|
||||
cpdef _cancel_wait(self, watcher, error, close_watcher)
|
||||
|
||||
cdef class _WaitIterator:
|
||||
cdef SwitchOutGreenletWithLoop _hub
|
||||
cdef MultipleWaiter _waiter
|
||||
cdef _switch
|
||||
cdef _timeout
|
||||
cdef _objects
|
||||
cdef _timer
|
||||
cdef Py_ssize_t _count
|
||||
cdef bint _begun
|
||||
|
||||
|
||||
cdef _begin(self)
|
||||
cdef _cleanup(self)
|
||||
|
||||
cpdef __enter__(self)
|
||||
cpdef __exit__(self, typ, value, tb)
|
||||
|
||||
|
||||
cpdef iwait_on_objects(objects, timeout=*, count=*)
|
||||
cpdef wait_on_objects(objects=*, timeout=*, count=*)
|
||||
|
||||
cdef _primitive_wait(watcher, timeout, timeout_exc, WaitOperationsGreenlet hub)
|
||||
cpdef wait_on_watcher(watcher, timeout=*, timeout_exc=*, WaitOperationsGreenlet hub=*)
|
||||
cpdef wait_read(fileno, timeout=*, timeout_exc=*)
|
||||
cpdef wait_write(fileno, timeout=*, timeout_exc=*, event=*)
|
||||
cpdef wait_readwrite(fileno, timeout=*, timeout_exc=*, event=*)
|
||||
cpdef wait_on_socket(socket, watcher, timeout_exc=*)
|
Binary file not shown.
@ -0,0 +1,26 @@
|
||||
cimport cython
|
||||
|
||||
cdef extern from "Python.h":
|
||||
|
||||
ctypedef class weakref.ref [object PyWeakReference]:
|
||||
pass
|
||||
|
||||
cdef heappop
|
||||
cdef heappush
|
||||
cdef object WeakKeyDictionary
|
||||
cdef type ref
|
||||
|
||||
@cython.internal
|
||||
@cython.final
|
||||
cdef class ValuedWeakRef(ref):
|
||||
cdef object value
|
||||
|
||||
@cython.final
|
||||
cdef class IdentRegistry:
|
||||
cdef object _registry
|
||||
cdef list _available_idents
|
||||
|
||||
@cython.final
|
||||
cpdef object get_ident(self, obj)
|
||||
@cython.final
|
||||
cpdef _return_ident(self, ValuedWeakRef ref)
|
Binary file not shown.
@ -0,0 +1,45 @@
|
||||
cimport cython
|
||||
from gevent._greenlet cimport Greenlet
|
||||
from gevent.__semaphore cimport Semaphore
|
||||
from gevent._queue cimport UnboundQueue
|
||||
|
||||
@cython.freelist(100)
|
||||
@cython.internal
|
||||
@cython.final
|
||||
cdef class Failure:
|
||||
cdef readonly exc
|
||||
cdef raise_exception
|
||||
|
||||
cdef inline _raise_exc(Failure failure)
|
||||
|
||||
cdef class IMapUnordered(Greenlet):
|
||||
cdef bint _zipped
|
||||
cdef func
|
||||
cdef iterable
|
||||
cdef spawn
|
||||
cdef Semaphore _result_semaphore
|
||||
cdef int _outstanding_tasks
|
||||
cdef int _max_index
|
||||
|
||||
cdef readonly UnboundQueue queue
|
||||
cdef readonly bint finished
|
||||
|
||||
cdef _inext(self)
|
||||
cdef _ispawn(self, func, item, int item_index)
|
||||
|
||||
# Passed to greenlet.link
|
||||
cpdef _on_result(self, greenlet)
|
||||
# Called directly
|
||||
cdef _on_finish(self, exception)
|
||||
|
||||
cdef _iqueue_value_for_success(self, greenlet)
|
||||
cdef _iqueue_value_for_failure(self, greenlet)
|
||||
cdef _iqueue_value_for_self_finished(self)
|
||||
cdef _iqueue_value_for_self_failure(self, exception)
|
||||
|
||||
cdef class IMap(IMapUnordered):
|
||||
cdef int index
|
||||
cdef dict _results
|
||||
|
||||
@cython.locals(index=int)
|
||||
cdef _inext(self)
|
@ -0,0 +1,180 @@
|
||||
# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details.
|
||||
"""
|
||||
gevent is a coroutine-based Python networking library that uses greenlet
|
||||
to provide a high-level synchronous API on top of libev event loop.
|
||||
|
||||
See http://www.gevent.org/ for the documentation.
|
||||
|
||||
.. versionchanged:: 1.3a2
|
||||
Add the `config` object.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
_version_info = namedtuple('version_info',
|
||||
('major', 'minor', 'micro', 'releaselevel', 'serial'))
|
||||
|
||||
#: The programatic version identifier. The fields have (roughly) the
|
||||
#: same meaning as :data:`sys.version_info`
|
||||
#: .. deprecated:: 1.2
|
||||
#: Use ``pkg_resources.parse_version(__version__)`` (or the equivalent
|
||||
#: ``packaging.version.Version(__version__)``).
|
||||
version_info = _version_info(1, 4, 0, 'dev', 0)
|
||||
|
||||
#: The human-readable PEP 440 version identifier.
|
||||
#: Use ``pkg_resources.parse_version(__version__)`` or
|
||||
#: ``packaging.version.Version(__version__)`` to get a machine-usable
|
||||
#: value.
|
||||
__version__ = '1.4.0'
|
||||
|
||||
|
||||
__all__ = [
|
||||
'get_hub',
|
||||
'Greenlet',
|
||||
'GreenletExit',
|
||||
'spawn',
|
||||
'spawn_later',
|
||||
'spawn_raw',
|
||||
'iwait',
|
||||
'wait',
|
||||
'killall',
|
||||
'Timeout',
|
||||
'with_timeout',
|
||||
'getcurrent',
|
||||
'sleep',
|
||||
'idle',
|
||||
'kill',
|
||||
'signal', # deprecated
|
||||
'signal_handler',
|
||||
'fork',
|
||||
'reinit',
|
||||
'getswitchinterval',
|
||||
'setswitchinterval',
|
||||
# Added in 1.3a2
|
||||
'config',
|
||||
]
|
||||
|
||||
|
||||
import sys
|
||||
if sys.platform == 'win32':
|
||||
# trigger WSAStartup call
|
||||
import socket # pylint:disable=unused-import,useless-suppression
|
||||
del socket
|
||||
|
||||
try:
|
||||
# Floating point number, in number of seconds,
|
||||
# like time.time
|
||||
getswitchinterval = sys.getswitchinterval
|
||||
setswitchinterval = sys.setswitchinterval
|
||||
except AttributeError:
|
||||
# Running on Python 2
|
||||
_switchinterval = 0.005
|
||||
|
||||
def getswitchinterval():
|
||||
return _switchinterval
|
||||
|
||||
def setswitchinterval(interval):
|
||||
# Weed out None and non-numbers. This is not
|
||||
# exactly exception compatible with the Python 3
|
||||
# versions.
|
||||
if interval > 0:
|
||||
global _switchinterval
|
||||
_switchinterval = interval
|
||||
|
||||
from gevent._config import config
|
||||
from gevent._hub_local import get_hub
|
||||
from gevent._hub_primitives import iwait_on_objects as iwait
|
||||
from gevent._hub_primitives import wait_on_objects as wait
|
||||
|
||||
from gevent.greenlet import Greenlet, joinall, killall
|
||||
joinall = joinall # export for pylint
|
||||
spawn = Greenlet.spawn
|
||||
spawn_later = Greenlet.spawn_later
|
||||
#: The singleton configuration object for gevent.
|
||||
config = config
|
||||
|
||||
from gevent.timeout import Timeout, with_timeout
|
||||
from gevent.hub import getcurrent, GreenletExit, spawn_raw, sleep, idle, kill, reinit
|
||||
try:
|
||||
from gevent.os import fork
|
||||
except ImportError:
|
||||
__all__.remove('fork')
|
||||
|
||||
# See https://github.com/gevent/gevent/issues/648
|
||||
# A temporary backwards compatibility shim to enable users to continue
|
||||
# to treat 'from gevent import signal' as a callable, to matter whether
|
||||
# the 'gevent.signal' module has been imported first
|
||||
from gevent.hub import signal as _signal_class
|
||||
signal_handler = _signal_class
|
||||
from gevent import signal as _signal_module
|
||||
|
||||
# The object 'gevent.signal' must:
|
||||
# - be callable, returning a gevent.hub.signal;
|
||||
# - answer True to isinstance(gevent.signal(...), gevent.signal);
|
||||
# - answer True to isinstance(gevent.signal(...), gevent.hub.signal)
|
||||
# - have all the attributes of the module 'gevent.signal';
|
||||
# - answer True to isinstance(gevent.signal, types.ModuleType) (optional)
|
||||
|
||||
# The only way to do this is to use a metaclass, an instance of which (a class)
|
||||
# is put in sys.modules and is substituted for gevent.hub.signal.
|
||||
# This handles everything except the last one.
|
||||
|
||||
|
||||
class _signal_metaclass(type):
|
||||
|
||||
def __getattr__(cls, name):
|
||||
return getattr(_signal_module, name)
|
||||
|
||||
def __setattr__(cls, name, value):
|
||||
setattr(_signal_module, name, value)
|
||||
|
||||
def __instancecheck__(cls, instance):
|
||||
return isinstance(instance, _signal_class)
|
||||
|
||||
def __dir__(cls):
|
||||
return dir(_signal_module)
|
||||
|
||||
|
||||
class signal(object):
|
||||
|
||||
__doc__ = _signal_module.__doc__
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
return _signal_class(*args, **kwargs)
|
||||
|
||||
|
||||
# The metaclass is applied after the class declaration
|
||||
# for Python 2/3 compatibility
|
||||
signal = _signal_metaclass(str("signal"),
|
||||
(),
|
||||
dict(signal.__dict__))
|
||||
|
||||
sys.modules['gevent.signal'] = signal
|
||||
sys.modules['gevent.hub'].signal = signal
|
||||
|
||||
del sys
|
||||
|
||||
|
||||
# the following makes hidden imports visible to freezing tools like
|
||||
# py2exe. see https://github.com/gevent/gevent/issues/181
|
||||
# This is not well maintained or tested, though, so it likely becomes
|
||||
# outdated on each major release.
|
||||
|
||||
def __dependencies_for_freezing(): # pragma: no cover
|
||||
# pylint:disable=unused-import
|
||||
from gevent import core
|
||||
from gevent import resolver_thread
|
||||
from gevent import resolver_ares
|
||||
from gevent import socket as _socket
|
||||
from gevent import threadpool
|
||||
from gevent import thread
|
||||
from gevent import threading
|
||||
from gevent import select
|
||||
from gevent import subprocess
|
||||
import pprint
|
||||
import traceback
|
||||
import signal as _signal
|
||||
|
||||
del __dependencies_for_freezing
|
Binary file not shown.
@ -0,0 +1,23 @@
|
||||
cimport cython
|
||||
|
||||
from gevent.__abstract_linkable cimport AbstractLinkable
|
||||
cdef Timeout
|
||||
|
||||
|
||||
cdef class Semaphore(AbstractLinkable):
|
||||
cdef public int counter
|
||||
|
||||
cpdef bint locked(self)
|
||||
cpdef int release(self) except -1000
|
||||
# We don't really want this to be public, but
|
||||
# threadpool uses it
|
||||
cpdef _start_notify(self)
|
||||
cpdef int wait(self, object timeout=*) except -1000
|
||||
cpdef bint acquire(self, int blocking=*, object timeout=*) except -1000
|
||||
cpdef __enter__(self)
|
||||
cpdef __exit__(self, object t, object v, object tb)
|
||||
|
||||
cdef class BoundedSemaphore(Semaphore):
|
||||
cdef readonly int _initial_value
|
||||
|
||||
cpdef int release(self) except -1000
|
Binary file not shown.
@ -0,0 +1,43 @@
|
||||
cimport cython
|
||||
|
||||
cdef sys
|
||||
cdef traceback
|
||||
|
||||
cdef settrace
|
||||
cdef getcurrent
|
||||
|
||||
cdef format_run_info
|
||||
|
||||
cdef perf_counter
|
||||
cdef gmctime
|
||||
|
||||
|
||||
cdef class GreenletTracer:
|
||||
cpdef readonly object active_greenlet
|
||||
cpdef readonly object previous_trace_function
|
||||
cpdef readonly Py_ssize_t greenlet_switch_counter
|
||||
|
||||
cdef bint _killed
|
||||
|
||||
cpdef _trace(self, str event, tuple args)
|
||||
|
||||
@cython.locals(did_switch=bint)
|
||||
cpdef did_block_hub(self, hub)
|
||||
|
||||
cpdef kill(self)
|
||||
|
||||
@cython.internal
|
||||
cdef class _HubTracer(GreenletTracer):
|
||||
cpdef readonly object hub
|
||||
cpdef readonly double max_blocking_time
|
||||
|
||||
|
||||
cdef class HubSwitchTracer(_HubTracer):
|
||||
cpdef readonly double last_entered_hub
|
||||
|
||||
cdef class MaxSwitchTracer(_HubTracer):
|
||||
cpdef readonly double max_blocking
|
||||
cpdef readonly double last_switch
|
||||
|
||||
@cython.locals(switched_at=double)
|
||||
cpdef _trace(self, str event, tuple args)
|
Binary file not shown.
@ -0,0 +1,48 @@
|
||||
cimport cython
|
||||
|
||||
from gevent.__greenlet_primitives cimport SwitchOutGreenletWithLoop
|
||||
from gevent.__hub_local cimport get_hub_noargs as get_hub
|
||||
|
||||
cdef sys
|
||||
cdef ConcurrentObjectUseError
|
||||
|
||||
|
||||
cdef bint _greenlet_imported
|
||||
cdef _NONE
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
void PyGreenlet_Import()
|
||||
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
cdef class Waiter:
|
||||
cdef readonly SwitchOutGreenletWithLoop hub
|
||||
cdef readonly greenlet greenlet
|
||||
cdef readonly value
|
||||
cdef _exception
|
||||
|
||||
cpdef get(self)
|
||||
cpdef clear(self)
|
||||
|
||||
# cpdef of switch leads to parameter errors...
|
||||
#cpdef switch(self, value)
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class MultipleWaiter(Waiter):
|
||||
cdef list _values
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,202 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False
|
||||
"""
|
||||
Internal module, support for the linkable protocol for "event" like objects.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from gevent._hub_local import get_hub_noargs as get_hub
|
||||
|
||||
from gevent.exceptions import InvalidSwitchError
|
||||
from gevent.timeout import Timeout
|
||||
|
||||
locals()['getcurrent'] = __import__('greenlet').getcurrent
|
||||
locals()['greenlet_init'] = lambda: None
|
||||
|
||||
__all__ = [
|
||||
'AbstractLinkable',
|
||||
]
|
||||
|
||||
class AbstractLinkable(object):
|
||||
# Encapsulates the standard parts of the linking and notifying
|
||||
# protocol common to both repeatable events (Event, Semaphore) and
|
||||
# one-time events (AsyncResult).
|
||||
|
||||
__slots__ = ('hub', '_links', '_notifier', '_notify_all', '__weakref__')
|
||||
|
||||
def __init__(self):
|
||||
# Before this implementation, AsyncResult and Semaphore
|
||||
# maintained the order of notifications, but Event did not.
|
||||
|
||||
# In gevent 1.3, before Semaphore extended this class,
|
||||
# that was changed to not maintain the order. It was done because
|
||||
# Event guaranteed to only call callbacks once (a set) but
|
||||
# AsyncResult had no such guarantees.
|
||||
|
||||
# Semaphore likes to maintain order of callbacks, though,
|
||||
# so when it was added we went back to a list implementation
|
||||
# for storing callbacks. But we want to preserve the unique callback
|
||||
# property, so we manually check.
|
||||
|
||||
# We generally don't expect to have so many waiters (for any of those
|
||||
# objects) that testing membership and removing is a bottleneck.
|
||||
|
||||
# In PyPy 2.6.1 with Cython 0.23, `cdef public` or `cdef
|
||||
# readonly` or simply `cdef` attributes of type `object` can appear to leak if
|
||||
# a Python subclass is used (this is visible simply
|
||||
# instantiating this subclass if _links=[]). Our _links and
|
||||
# _notifier are such attributes, and gevent.thread subclasses
|
||||
# this class. Thus, we carefully manage the lifetime of the
|
||||
# objects we put in these attributes so that, in the normal
|
||||
# case of a semaphore used correctly (deallocated when it's not
|
||||
# locked and no one is waiting), the leak goes away (because
|
||||
# these objects are back to None). This can also be solved on PyPy
|
||||
# by simply not declaring these objects in the pxd file, but that doesn't work for
|
||||
# CPython ("No attribute...")
|
||||
# See https://github.com/gevent/gevent/issues/660
|
||||
self._links = set()
|
||||
self._notifier = None
|
||||
# This is conceptually a class attribute, defined here for ease of access in
|
||||
# cython. If it's true, when notifiers fire, all existing callbacks are called.
|
||||
# If its false, we only call callbacks as long as ready() returns true.
|
||||
self._notify_all = True
|
||||
# we don't want to do get_hub() here to allow defining module-level objects
|
||||
# without initializing the hub
|
||||
self.hub = None
|
||||
|
||||
def linkcount(self):
|
||||
# For testing: how many objects are linked to this one?
|
||||
return len(self._links)
|
||||
|
||||
def ready(self):
|
||||
# Instances must define this
|
||||
raise NotImplementedError
|
||||
|
||||
def _check_and_notify(self):
|
||||
# If this object is ready to be notified, begin the process.
|
||||
if self.ready() and self._links and not self._notifier:
|
||||
if self.hub is None:
|
||||
self.hub = get_hub()
|
||||
|
||||
self._notifier = self.hub.loop.run_callback(self._notify_links)
|
||||
|
||||
def rawlink(self, callback):
|
||||
"""
|
||||
Register a callback to call when this object is ready.
|
||||
|
||||
*callback* will be called in the :class:`Hub
|
||||
<gevent.hub.Hub>`, so it must not use blocking gevent API.
|
||||
*callback* will be passed one argument: this instance.
|
||||
"""
|
||||
if not callable(callback):
|
||||
raise TypeError('Expected callable: %r' % (callback, ))
|
||||
|
||||
self._links.add(callback)
|
||||
self._check_and_notify()
|
||||
|
||||
def unlink(self, callback):
|
||||
"""Remove the callback set by :meth:`rawlink`"""
|
||||
self._links.discard(callback)
|
||||
|
||||
if not self._links and self._notifier is not None:
|
||||
# If we currently have one queued, de-queue it.
|
||||
# This will break a reference cycle.
|
||||
# (self._notifier -> self._notify_links -> self)
|
||||
# But we can't set it to None in case it was actually running.
|
||||
self._notifier.stop()
|
||||
|
||||
|
||||
def _notify_links(self):
|
||||
# We release self._notifier here. We are called by it
|
||||
# at the end of the loop, and it is now false in a boolean way (as soon
|
||||
# as this method returns).
|
||||
notifier = self._notifier
|
||||
# We were ready() at the time this callback was scheduled;
|
||||
# we may not be anymore, and that status may change during
|
||||
# callback processing. Some of our subclasses will want to
|
||||
# notify everyone that the status was once true, even though not it
|
||||
# may not be anymore.
|
||||
todo = set(self._links)
|
||||
try:
|
||||
for link in todo:
|
||||
if not self._notify_all and not self.ready():
|
||||
break
|
||||
|
||||
if link not in self._links:
|
||||
# Been removed already by some previous link. OK, fine.
|
||||
continue
|
||||
try:
|
||||
link(self)
|
||||
except: # pylint:disable=bare-except
|
||||
# We're running in the hub, so getcurrent() returns
|
||||
# a hub.
|
||||
self.hub.handle_error((link, self), *sys.exc_info()) # pylint:disable=undefined-variable
|
||||
finally:
|
||||
if getattr(link, 'auto_unlink', None):
|
||||
# This attribute can avoid having to keep a reference to the function
|
||||
# *in* the function, which is a cycle
|
||||
self.unlink(link)
|
||||
finally:
|
||||
# We should not have created a new notifier even if callbacks
|
||||
# released us because we loop through *all* of our links on the
|
||||
# same callback while self._notifier is still true.
|
||||
assert self._notifier is notifier
|
||||
self._notifier = None
|
||||
|
||||
# Our set of active links changed, and we were told to stop on the first
|
||||
# time we went unready. See if we're ready, and if so, go around
|
||||
# again.
|
||||
if not self._notify_all and todo != self._links:
|
||||
self._check_and_notify()
|
||||
|
||||
def _wait_core(self, timeout, catch=Timeout):
|
||||
# The core of the wait implementation, handling
|
||||
# switching and linking. If *catch* is set to (),
|
||||
# a timeout that elapses will be allowed to be raised.
|
||||
# Returns a true value if the wait succeeded without timing out.
|
||||
switch = getcurrent().switch # pylint:disable=undefined-variable
|
||||
self.rawlink(switch)
|
||||
try:
|
||||
with Timeout._start_new_or_dummy(timeout) as timer:
|
||||
try:
|
||||
if self.hub is None:
|
||||
self.hub = get_hub()
|
||||
result = self.hub.switch()
|
||||
if result is not self: # pragma: no cover
|
||||
raise InvalidSwitchError('Invalid switch into Event.wait(): %r' % (result, ))
|
||||
return True
|
||||
except catch as ex:
|
||||
if ex is not timer:
|
||||
raise
|
||||
# test_set_and_clear and test_timeout in test_threading
|
||||
# rely on the exact return values, not just truthish-ness
|
||||
return False
|
||||
finally:
|
||||
self.unlink(switch)
|
||||
|
||||
def _wait_return_value(self, waited, wait_success):
|
||||
# pylint:disable=unused-argument
|
||||
# Subclasses should override this to return a value from _wait.
|
||||
# By default we return None.
|
||||
return None # pragma: no cover all extent subclasses override
|
||||
|
||||
def _wait(self, timeout=None):
|
||||
if self.ready():
|
||||
return self._wait_return_value(False, False)
|
||||
|
||||
gotit = self._wait_core(timeout)
|
||||
return self._wait_return_value(True, gotit)
|
||||
|
||||
def _init():
|
||||
greenlet_init() # pylint:disable=undefined-variable
|
||||
|
||||
_init()
|
||||
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__abstract_linkable')
|
@ -0,0 +1,160 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
internal gevent python 2/python 3 bridges. Not for external use.
|
||||
"""
|
||||
|
||||
from __future__ import print_function, absolute_import, division
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] >= 3
|
||||
PYPY = hasattr(sys, 'pypy_version_info')
|
||||
WIN = sys.platform.startswith("win")
|
||||
LINUX = sys.platform.startswith('linux')
|
||||
OSX = sys.platform == 'darwin'
|
||||
|
||||
|
||||
PURE_PYTHON = PYPY or os.getenv('PURE_PYTHON')
|
||||
|
||||
## Types
|
||||
|
||||
if PY3:
|
||||
string_types = (str,)
|
||||
integer_types = (int,)
|
||||
text_type = str
|
||||
native_path_types = (str, bytes)
|
||||
thread_mod_name = '_thread'
|
||||
|
||||
else:
|
||||
import __builtin__ # pylint:disable=import-error
|
||||
string_types = (__builtin__.basestring,)
|
||||
text_type = __builtin__.unicode
|
||||
integer_types = (int, __builtin__.long)
|
||||
native_path_types = string_types
|
||||
thread_mod_name = 'thread'
|
||||
|
||||
def NativeStrIO():
|
||||
import io
|
||||
return io.BytesIO() if str is bytes else io.StringIO()
|
||||
|
||||
## Exceptions
|
||||
if PY3:
|
||||
def reraise(t, value, tb=None): # pylint:disable=unused-argument
|
||||
if value.__traceback__ is not tb and tb is not None:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
def exc_clear():
|
||||
pass
|
||||
|
||||
else:
|
||||
from gevent._util_py2 import reraise # pylint:disable=import-error,no-name-in-module
|
||||
reraise = reraise # export
|
||||
exc_clear = sys.exc_clear
|
||||
|
||||
## import locks
|
||||
try:
|
||||
# In Python 3.4 and newer in CPython and PyPy3,
|
||||
# imp.acquire_lock and imp.release_lock are delegated to
|
||||
# '_imp'. (Which is also used by importlib.) 'imp' itself is
|
||||
# deprecated. Avoid that warning.
|
||||
import _imp as imp
|
||||
except ImportError:
|
||||
import imp
|
||||
imp_acquire_lock = imp.acquire_lock
|
||||
imp_release_lock = imp.release_lock
|
||||
|
||||
## Functions
|
||||
if PY3:
|
||||
iteritems = dict.items
|
||||
itervalues = dict.values
|
||||
xrange = range
|
||||
izip = zip
|
||||
|
||||
else:
|
||||
iteritems = dict.iteritems # python 3: pylint:disable=no-member
|
||||
itervalues = dict.itervalues # python 3: pylint:disable=no-member
|
||||
xrange = __builtin__.xrange
|
||||
from itertools import izip # python 3: pylint:disable=no-member,no-name-in-module
|
||||
izip = izip
|
||||
|
||||
# fspath from 3.6 os.py, but modified to raise the same exceptions as the
|
||||
# real native implementation.
|
||||
# Define for testing
|
||||
def _fspath(path):
|
||||
"""
|
||||
Return the path representation of a path-like object.
|
||||
|
||||
If str or bytes is passed in, it is returned unchanged. Otherwise the
|
||||
os.PathLike interface is used to get the path representation. If the
|
||||
path representation is not str or bytes, TypeError is raised. If the
|
||||
provided path is not str, bytes, or os.PathLike, TypeError is raised.
|
||||
"""
|
||||
if isinstance(path, native_path_types):
|
||||
return path
|
||||
|
||||
# Work from the object's type to match method resolution of other magic
|
||||
# methods.
|
||||
path_type = type(path)
|
||||
try:
|
||||
path_type_fspath = path_type.__fspath__
|
||||
except AttributeError:
|
||||
raise TypeError("expected str, bytes or os.PathLike object, "
|
||||
"not " + path_type.__name__)
|
||||
|
||||
path_repr = path_type_fspath(path)
|
||||
if isinstance(path_repr, native_path_types):
|
||||
return path_repr
|
||||
|
||||
raise TypeError("expected {}.__fspath__() to return str or bytes, "
|
||||
"not {}".format(path_type.__name__,
|
||||
type(path_repr).__name__))
|
||||
try:
|
||||
from os import fspath # pylint: disable=unused-import,no-name-in-module
|
||||
except ImportError:
|
||||
# if not available, use the Python version as transparently as
|
||||
# possible
|
||||
fspath = _fspath
|
||||
fspath.__name__ = 'fspath'
|
||||
|
||||
try:
|
||||
from os import fsencode # pylint: disable=unused-import,no-name-in-module
|
||||
except ImportError:
|
||||
encoding = sys.getfilesystemencoding() or ('utf-8' if not WIN else 'mbcs')
|
||||
errors = 'strict' if WIN and encoding == 'mbcs' else 'surrogateescape'
|
||||
|
||||
# Added in 3.2, so this is for Python 2.7. Note that it doesn't have
|
||||
# sys.getfilesystemencodeerrors(), which was added in 3.6
|
||||
def fsencode(filename):
|
||||
"""Encode filename (an os.PathLike, bytes, or str) to the filesystem
|
||||
encoding with 'surrogateescape' error handler, return bytes unchanged.
|
||||
On Windows, use 'strict' error handler if the file system encoding is
|
||||
'mbcs' (which is the default encoding).
|
||||
"""
|
||||
filename = fspath(filename) # Does type-checking of `filename`.
|
||||
if isinstance(filename, bytes):
|
||||
return filename
|
||||
|
||||
try:
|
||||
return filename.encode(encoding, errors)
|
||||
except LookupError:
|
||||
# Can't encode it, and the error handler doesn't
|
||||
# exist. Probably on Python 2 with an astral character.
|
||||
# Not sure how to handle this.
|
||||
raise UnicodeEncodeError("Can't encode path to filesystem encoding")
|
||||
|
||||
|
||||
## Clocks
|
||||
try:
|
||||
# Python 3.3+ (PEP 418)
|
||||
from time import perf_counter
|
||||
perf_counter = perf_counter
|
||||
except ImportError:
|
||||
import time
|
||||
|
||||
if sys.platform == "win32":
|
||||
perf_counter = time.clock
|
||||
else:
|
||||
perf_counter = time.time
|
@ -0,0 +1,701 @@
|
||||
# Copyright (c) 2018 gevent. See LICENSE for details.
|
||||
"""
|
||||
gevent tunables.
|
||||
|
||||
This should be used as ``from gevent import config``. That variable
|
||||
is an object of :class:`Config`.
|
||||
|
||||
.. versionadded:: 1.3a2
|
||||
"""
|
||||
|
||||
from __future__ import print_function, absolute_import, division
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
from gevent._compat import string_types
|
||||
from gevent._compat import WIN
|
||||
|
||||
__all__ = [
|
||||
'config',
|
||||
]
|
||||
|
||||
ALL_SETTINGS = []
|
||||
|
||||
class SettingType(type):
|
||||
# pylint:disable=bad-mcs-classmethod-argument
|
||||
|
||||
def __new__(cls, name, bases, cls_dict):
|
||||
if name == 'Setting':
|
||||
return type.__new__(cls, name, bases, cls_dict)
|
||||
|
||||
cls_dict["order"] = len(ALL_SETTINGS)
|
||||
if 'name' not in cls_dict:
|
||||
cls_dict['name'] = name.lower()
|
||||
|
||||
if 'environment_key' not in cls_dict:
|
||||
cls_dict['environment_key'] = 'GEVENT_' + cls_dict['name'].upper()
|
||||
|
||||
|
||||
new_class = type.__new__(cls, name, bases, cls_dict)
|
||||
new_class.fmt_desc(cls_dict.get("desc", ""))
|
||||
new_class.__doc__ = new_class.desc
|
||||
ALL_SETTINGS.append(new_class)
|
||||
|
||||
if new_class.document:
|
||||
setting_name = cls_dict['name']
|
||||
|
||||
def getter(self):
|
||||
return self.settings[setting_name].get()
|
||||
|
||||
def setter(self, value): # pragma: no cover
|
||||
# The setter should never be hit, Config has a
|
||||
# __setattr__ that would override. But for the sake
|
||||
# of consistency we provide one.
|
||||
self.settings[setting_name].set(value)
|
||||
|
||||
prop = property(getter, setter, doc=new_class.__doc__)
|
||||
|
||||
setattr(Config, cls_dict['name'], prop)
|
||||
return new_class
|
||||
|
||||
def fmt_desc(cls, desc):
|
||||
desc = textwrap.dedent(desc).strip()
|
||||
if hasattr(cls, 'shortname_map'):
|
||||
desc += (
|
||||
"\n\nThis is an importable value. It can be "
|
||||
"given as a string naming an importable object, "
|
||||
"or a list of strings in preference order and the first "
|
||||
"successfully importable object will be used. (Separate values "
|
||||
"in the environment variable with commas.) "
|
||||
"It can also be given as the callable object itself (in code). "
|
||||
)
|
||||
if cls.shortname_map:
|
||||
desc += "Shorthand names for default objects are %r" % (list(cls.shortname_map),)
|
||||
if getattr(cls.validate, '__doc__'):
|
||||
desc += '\n\n' + textwrap.dedent(cls.validate.__doc__).strip()
|
||||
if isinstance(cls.default, str) and hasattr(cls, 'shortname_map'):
|
||||
default = "`%s`" % (cls.default,)
|
||||
else:
|
||||
default = "`%r`" % (cls.default,)
|
||||
desc += "\n\nThe default value is %s" % (default,)
|
||||
desc += ("\n\nThe environment variable ``%s`` "
|
||||
"can be used to control this." % (cls.environment_key,))
|
||||
setattr(cls, "desc", desc)
|
||||
return desc
|
||||
|
||||
def validate_invalid(value):
|
||||
raise ValueError("Not a valid value: %r" % (value,))
|
||||
|
||||
def validate_bool(value):
|
||||
"""
|
||||
This is a boolean value.
|
||||
|
||||
In the environment variable, it may be given as ``1``, ``true``,
|
||||
``on`` or ``yes`` for `True`, or ``0``, ``false``, ``off``, or
|
||||
``no`` for `False`.
|
||||
"""
|
||||
if isinstance(value, string_types):
|
||||
value = value.lower().strip()
|
||||
if value in ('1', 'true', 'on', 'yes'):
|
||||
value = True
|
||||
elif value in ('0', 'false', 'off', 'no') or not value:
|
||||
value = False
|
||||
else:
|
||||
raise ValueError("Invalid boolean string: %r" % (value,))
|
||||
return bool(value)
|
||||
|
||||
def validate_anything(value):
|
||||
return value
|
||||
|
||||
convert_str_value_as_is = validate_anything
|
||||
|
||||
class Setting(object):
|
||||
name = None
|
||||
value = None
|
||||
validate = staticmethod(validate_invalid)
|
||||
default = None
|
||||
environment_key = None
|
||||
document = True
|
||||
|
||||
desc = """\
|
||||
|
||||
A long ReST description.
|
||||
|
||||
The first line should be a single sentence.
|
||||
|
||||
"""
|
||||
|
||||
def _convert(self, value):
|
||||
if isinstance(value, string_types):
|
||||
return value.split(',')
|
||||
return value
|
||||
|
||||
def _default(self):
|
||||
result = os.environ.get(self.environment_key, self.default)
|
||||
result = self._convert(result)
|
||||
return result
|
||||
|
||||
def get(self):
|
||||
# If we've been specifically set, return it
|
||||
if 'value' in self.__dict__:
|
||||
return self.value
|
||||
# Otherwise, read from the environment and reify
|
||||
# so we return consistent results.
|
||||
self.value = self.validate(self._default())
|
||||
return self.value
|
||||
|
||||
def set(self, val):
|
||||
self.value = self.validate(self._convert(val))
|
||||
|
||||
|
||||
Setting = SettingType('Setting', (Setting,), dict(Setting.__dict__))
|
||||
|
||||
def make_settings():
|
||||
"""
|
||||
Return fresh instances of all classes defined in `ALL_SETTINGS`.
|
||||
"""
|
||||
settings = {}
|
||||
for setting_kind in ALL_SETTINGS:
|
||||
setting = setting_kind()
|
||||
assert setting.name not in settings
|
||||
settings[setting.name] = setting
|
||||
return settings
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""
|
||||
Global configuration for gevent.
|
||||
|
||||
There is one instance of this object at ``gevent.config``. If you
|
||||
are going to make changes in code, instead of using the documented
|
||||
environment variables, you need to make the changes before using
|
||||
any parts of gevent that might need those settings. For example::
|
||||
|
||||
>>> from gevent import config
|
||||
>>> config.fileobject = 'thread'
|
||||
|
||||
>>> from gevent import fileobject
|
||||
>>> fileobject.FileObject.__name__
|
||||
'FileObjectThread'
|
||||
|
||||
.. versionadded:: 1.3a2
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.settings = make_settings()
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in self.settings:
|
||||
raise AttributeError("No configuration setting for: %r" % name)
|
||||
return self.settings[name].get()
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name != "settings" and name in self.settings:
|
||||
self.set(name, value)
|
||||
else:
|
||||
super(Config, self).__setattr__(name, value)
|
||||
|
||||
def set(self, name, value):
|
||||
if name not in self.settings:
|
||||
raise AttributeError("No configuration setting for: %r" % name)
|
||||
self.settings[name].set(value)
|
||||
|
||||
def __dir__(self):
|
||||
return list(self.settings)
|
||||
|
||||
|
||||
class ImportableSetting(object):
|
||||
|
||||
def _import_one_of(self, candidates):
|
||||
assert isinstance(candidates, list)
|
||||
if not candidates:
|
||||
raise ImportError('Cannot import from empty list')
|
||||
|
||||
for item in candidates[:-1]:
|
||||
try:
|
||||
return self._import_one(item)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return self._import_one(candidates[-1])
|
||||
|
||||
def _import_one(self, path, _MISSING=object()):
|
||||
if not isinstance(path, string_types):
|
||||
return path
|
||||
|
||||
if '.' not in path or '/' in path:
|
||||
raise ImportError("Cannot import %r. "
|
||||
"Required format: [package.]module.class. "
|
||||
"Or choose from %r"
|
||||
% (path, list(self.shortname_map)))
|
||||
|
||||
|
||||
module, item = path.rsplit('.', 1)
|
||||
module = importlib.import_module(module)
|
||||
x = getattr(module, item, _MISSING)
|
||||
if x is _MISSING:
|
||||
raise ImportError('Cannot import %r from %r' % (item, module))
|
||||
return x
|
||||
|
||||
shortname_map = {}
|
||||
|
||||
def validate(self, value):
|
||||
if isinstance(value, type):
|
||||
return value
|
||||
return self._import_one_of([self.shortname_map.get(x, x) for x in value])
|
||||
|
||||
def get_options(self):
|
||||
result = {}
|
||||
for name, val in self.shortname_map.items():
|
||||
try:
|
||||
result[name] = self._import_one(val)
|
||||
except ImportError as e:
|
||||
result[name] = e
|
||||
return result
|
||||
|
||||
|
||||
class BoolSettingMixin(object):
|
||||
validate = staticmethod(validate_bool)
|
||||
# Don't do string-to-list conversion.
|
||||
_convert = staticmethod(convert_str_value_as_is)
|
||||
|
||||
|
||||
class IntSettingMixin(object):
|
||||
# Don't do string-to-list conversion.
|
||||
def _convert(self, value):
|
||||
if value:
|
||||
return int(value)
|
||||
|
||||
validate = staticmethod(validate_anything)
|
||||
|
||||
|
||||
class _PositiveValueMixin(object):
|
||||
|
||||
def validate(self, value):
|
||||
if value is not None and value <= 0:
|
||||
raise ValueError("Must be positive")
|
||||
return value
|
||||
|
||||
|
||||
class FloatSettingMixin(_PositiveValueMixin):
|
||||
def _convert(self, value):
|
||||
if value:
|
||||
return float(value)
|
||||
|
||||
|
||||
class ByteCountSettingMixin(_PositiveValueMixin):
|
||||
|
||||
_MULTIPLES = {
|
||||
# All keys must be the same size.
|
||||
'kb': 1024,
|
||||
'mb': 1024 * 1024,
|
||||
'gb': 1024 * 1024 * 1024,
|
||||
}
|
||||
|
||||
_SUFFIX_SIZE = 2
|
||||
|
||||
def _convert(self, value):
|
||||
if not value or not isinstance(value, str):
|
||||
return value
|
||||
value = value.lower()
|
||||
for s, m in self._MULTIPLES.items():
|
||||
if value[-self._SUFFIX_SIZE:] == s:
|
||||
return int(value[:-self._SUFFIX_SIZE]) * m
|
||||
return int(value)
|
||||
|
||||
|
||||
class Resolver(ImportableSetting, Setting):
|
||||
|
||||
desc = """\
|
||||
The callable that will be used to create
|
||||
:attr:`gevent.hub.Hub.resolver`.
|
||||
|
||||
See :doc:`dns` for more information.
|
||||
"""
|
||||
|
||||
default = [
|
||||
'thread',
|
||||
'dnspython',
|
||||
'ares',
|
||||
'block',
|
||||
]
|
||||
|
||||
shortname_map = {
|
||||
'ares': 'gevent.resolver.ares.Resolver',
|
||||
'thread': 'gevent.resolver.thread.Resolver',
|
||||
'block': 'gevent.resolver.blocking.Resolver',
|
||||
'dnspython': 'gevent.resolver.dnspython.Resolver',
|
||||
}
|
||||
|
||||
|
||||
|
||||
class Threadpool(ImportableSetting, Setting):
|
||||
|
||||
desc = """\
|
||||
The kind of threadpool we use.
|
||||
"""
|
||||
|
||||
default = 'gevent.threadpool.ThreadPool'
|
||||
|
||||
|
||||
class Loop(ImportableSetting, Setting):
|
||||
|
||||
desc = """\
|
||||
The kind of the loop we use.
|
||||
|
||||
On Windows, this defaults to libuv, while on
|
||||
other platforms it defaults to libev.
|
||||
|
||||
"""
|
||||
|
||||
default = [
|
||||
'libev-cext',
|
||||
'libev-cffi',
|
||||
'libuv-cffi',
|
||||
] if not WIN else [
|
||||
'libuv-cffi',
|
||||
'libev-cext',
|
||||
'libev-cffi',
|
||||
]
|
||||
|
||||
shortname_map = {
|
||||
'libev-cext': 'gevent.libev.corecext.loop',
|
||||
'libev-cffi': 'gevent.libev.corecffi.loop',
|
||||
'libuv-cffi': 'gevent.libuv.loop.loop',
|
||||
}
|
||||
|
||||
shortname_map['libuv'] = shortname_map['libuv-cffi']
|
||||
|
||||
|
||||
class FormatContext(ImportableSetting, Setting):
|
||||
name = 'format_context'
|
||||
|
||||
# using pprint.pformat can override custom __repr__ methods on dict/list
|
||||
# subclasses, which can be a security concern
|
||||
default = 'pprint.saferepr'
|
||||
|
||||
|
||||
class LibevBackend(Setting):
|
||||
name = 'libev_backend'
|
||||
environment_key = 'GEVENT_BACKEND'
|
||||
|
||||
desc = """\
|
||||
The backend for libev, such as 'select'
|
||||
"""
|
||||
|
||||
default = None
|
||||
|
||||
validate = staticmethod(validate_anything)
|
||||
|
||||
|
||||
class FileObject(ImportableSetting, Setting):
|
||||
desc = """\
|
||||
The kind of ``FileObject`` we will use.
|
||||
|
||||
See :mod:`gevent.fileobject` for a detailed description.
|
||||
|
||||
"""
|
||||
environment_key = 'GEVENT_FILE'
|
||||
|
||||
default = [
|
||||
'posix',
|
||||
'thread',
|
||||
]
|
||||
|
||||
shortname_map = {
|
||||
'thread': 'gevent._fileobjectcommon.FileObjectThread',
|
||||
'posix': 'gevent._fileobjectposix.FileObjectPosix',
|
||||
'block': 'gevent._fileobjectcommon.FileObjectBlock'
|
||||
}
|
||||
|
||||
|
||||
class WatchChildren(BoolSettingMixin, Setting):
|
||||
desc = """\
|
||||
Should we *not* watch children with the event loop watchers?
|
||||
|
||||
This is an advanced setting.
|
||||
|
||||
See :mod:`gevent.os` for a detailed description.
|
||||
"""
|
||||
name = 'disable_watch_children'
|
||||
environment_key = 'GEVENT_NOWAITPID'
|
||||
default = False
|
||||
|
||||
|
||||
class TraceMalloc(IntSettingMixin, Setting):
|
||||
name = 'trace_malloc'
|
||||
environment_key = 'PYTHONTRACEMALLOC'
|
||||
default = False
|
||||
|
||||
desc = """\
|
||||
Should FFI objects track their allocation?
|
||||
|
||||
This is only useful for low-level debugging.
|
||||
|
||||
On Python 3, this environment variable is built in to the
|
||||
interpreter, and it may also be set with the ``-X
|
||||
tracemalloc`` command line argument.
|
||||
|
||||
On Python 2, gevent interprets this argument and adds extra
|
||||
tracking information for FFI objects.
|
||||
"""
|
||||
|
||||
|
||||
class TrackGreenletTree(BoolSettingMixin, Setting):
|
||||
name = 'track_greenlet_tree'
|
||||
environment_key = 'GEVENT_TRACK_GREENLET_TREE'
|
||||
default = True
|
||||
|
||||
desc = """\
|
||||
Should `Greenlet` objects track their spawning tree?
|
||||
|
||||
Setting this to a false value will make spawning `Greenlet`
|
||||
objects and using `spawn_raw` faster, but the
|
||||
``spawning_greenlet``, ``spawn_tree_locals`` and ``spawning_stack``
|
||||
will not be captured.
|
||||
|
||||
.. versionadded:: 1.3b1
|
||||
"""
|
||||
|
||||
|
||||
## Monitoring settings
|
||||
# All env keys should begin with GEVENT_MONITOR
|
||||
|
||||
class MonitorThread(BoolSettingMixin, Setting):
|
||||
name = 'monitor_thread'
|
||||
environment_key = 'GEVENT_MONITOR_THREAD_ENABLE'
|
||||
default = False
|
||||
|
||||
desc = """\
|
||||
Should each hub start a native OS thread to monitor
|
||||
for problems?
|
||||
|
||||
Such a thread will periodically check to see if the event loop
|
||||
is blocked for longer than `max_blocking_time`, producing output on
|
||||
the hub's exception stream (stderr by default) if it detects this condition.
|
||||
|
||||
If this setting is true, then this thread will be created
|
||||
the first time the hub is switched to,
|
||||
or you can call :meth:`gevent.hub.Hub.start_periodic_monitoring_thread` at any
|
||||
time to create it (from the same thread that will run the hub). That function
|
||||
will return an instance of :class:`gevent.events.IPeriodicMonitorThread`
|
||||
to which you can add your own monitoring functions. That function
|
||||
also emits an event of :class:`gevent.events.PeriodicMonitorThreadStartedEvent`.
|
||||
|
||||
.. seealso:: `max_blocking_time`
|
||||
|
||||
.. versionadded:: 1.3b1
|
||||
"""
|
||||
|
||||
class MaxBlockingTime(FloatSettingMixin, Setting):
|
||||
name = 'max_blocking_time'
|
||||
# This environment key doesn't follow the convention because it's
|
||||
# meant to match a key used by existing projects
|
||||
environment_key = 'GEVENT_MAX_BLOCKING_TIME'
|
||||
default = 0.1
|
||||
|
||||
desc = """\
|
||||
If the `monitor_thread` is enabled, this is
|
||||
approximately how long (in seconds)
|
||||
the event loop will be allowed to block before a warning is issued.
|
||||
|
||||
This function depends on using `greenlet.settrace`, so installing
|
||||
your own trace function after starting the monitoring thread will
|
||||
cause this feature to misbehave unless you call the function
|
||||
returned by `greenlet.settrace`. If you install a tracing function *before*
|
||||
the monitoring thread is started, it will still be called.
|
||||
|
||||
.. note:: In the unlikely event of creating and using multiple different
|
||||
gevent hubs in the same native thread in a short period of time,
|
||||
especially without destroying the hubs, false positives may be reported.
|
||||
|
||||
.. versionadded:: 1.3b1
|
||||
"""
|
||||
|
||||
class MonitorMemoryPeriod(FloatSettingMixin, Setting):
|
||||
name = 'memory_monitor_period'
|
||||
|
||||
environment_key = 'GEVENT_MONITOR_MEMORY_PERIOD'
|
||||
default = 5
|
||||
|
||||
desc = """\
|
||||
If `monitor_thread` is enabled, this is approximately how long
|
||||
(in seconds) we will go between checking the processes memory usage.
|
||||
|
||||
Checking the memory usage is relatively expensive on some operating
|
||||
systems, so this should not be too low. gevent will place a floor
|
||||
value on it.
|
||||
"""
|
||||
|
||||
class MonitorMemoryMaxUsage(ByteCountSettingMixin, Setting):
|
||||
name = 'max_memory_usage'
|
||||
|
||||
environment_key = 'GEVENT_MONITOR_MEMORY_MAX'
|
||||
default = None
|
||||
|
||||
desc = """\
|
||||
If `monitor_thread` is enabled,
|
||||
then if memory usage exceeds this amount (in bytes), events will
|
||||
be emitted. See `gevent.events`. In the environment variable, you can use
|
||||
a suffix of 'kb', 'mb' or 'gb' to specify the value in kilobytes, megabytes
|
||||
or gigibytes.
|
||||
|
||||
There is no default value for this setting. If you wish to
|
||||
cap memory usage, you must choose a value.
|
||||
"""
|
||||
|
||||
# The ares settings are all interpreted by
|
||||
# gevent/resolver/ares.pyx, so we don't do
|
||||
# any validation here.
|
||||
|
||||
class AresSettingMixin(object):
|
||||
|
||||
document = False
|
||||
|
||||
@property
|
||||
def kwarg_name(self):
|
||||
return self.name[5:]
|
||||
|
||||
validate = staticmethod(validate_anything)
|
||||
|
||||
_convert = staticmethod(convert_str_value_as_is)
|
||||
|
||||
class AresFlags(AresSettingMixin, Setting):
|
||||
name = 'ares_flags'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_FLAGS'
|
||||
|
||||
class AresTimeout(AresSettingMixin, Setting):
|
||||
document = True
|
||||
name = 'ares_timeout'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_TIMEOUT'
|
||||
desc = """\
|
||||
|
||||
.. deprecated:: 1.3a2
|
||||
Prefer the :attr:`resolver_timeout` setting. If both are set,
|
||||
the results are not defined.
|
||||
"""
|
||||
|
||||
class AresTries(AresSettingMixin, Setting):
|
||||
name = 'ares_tries'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_TRIES'
|
||||
|
||||
class AresNdots(AresSettingMixin, Setting):
|
||||
name = 'ares_ndots'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_NDOTS'
|
||||
|
||||
class AresUDPPort(AresSettingMixin, Setting):
|
||||
name = 'ares_udp_port'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_UDP_PORT'
|
||||
|
||||
class AresTCPPort(AresSettingMixin, Setting):
|
||||
name = 'ares_tcp_port'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_TCP_PORT'
|
||||
|
||||
class AresServers(AresSettingMixin, Setting):
|
||||
document = True
|
||||
name = 'ares_servers'
|
||||
default = None
|
||||
environment_key = 'GEVENTARES_SERVERS'
|
||||
desc = """\
|
||||
A list of strings giving the IP addresses of nameservers for the ares resolver.
|
||||
|
||||
In the environment variable, these strings are separated by commas.
|
||||
|
||||
.. deprecated:: 1.3a2
|
||||
Prefer the :attr:`resolver_nameservers` setting. If both are set,
|
||||
the results are not defined.
|
||||
"""
|
||||
|
||||
# Generic nameservers, works for dnspython and ares.
|
||||
class ResolverNameservers(AresSettingMixin, Setting):
|
||||
document = True
|
||||
name = 'resolver_nameservers'
|
||||
default = None
|
||||
environment_key = 'GEVENT_RESOLVER_NAMESERVERS'
|
||||
desc = """\
|
||||
A list of strings giving the IP addresses of nameservers for the (non-system) resolver.
|
||||
|
||||
In the environment variable, these strings are separated by commas.
|
||||
|
||||
.. rubric:: Resolver Behaviour
|
||||
|
||||
* blocking
|
||||
|
||||
Ignored
|
||||
|
||||
* Threaded
|
||||
|
||||
Ignored
|
||||
|
||||
* dnspython
|
||||
|
||||
If this setting is not given, the dnspython resolver will
|
||||
load nameservers to use from ``/etc/resolv.conf``
|
||||
or the Windows registry. This setting replaces any nameservers read
|
||||
from those means. Note that the file and registry are still read
|
||||
for other settings.
|
||||
|
||||
.. caution:: dnspython does not validate the members of the list.
|
||||
An improper address (such as a hostname instead of IP) has
|
||||
undefined results, including hanging the process.
|
||||
|
||||
* ares
|
||||
|
||||
Similar to dnspython, but with more platform and compile-time
|
||||
options. ares validates that the members of the list are valid
|
||||
addresses.
|
||||
"""
|
||||
|
||||
# Normal string-to-list rules. But still validate_anything.
|
||||
_convert = Setting._convert
|
||||
|
||||
# TODO: In the future, support reading a resolv.conf file
|
||||
# *other* than /etc/resolv.conf, and do that both on Windows
|
||||
# and other platforms. Also offer the option to disable the system
|
||||
# configuration entirely.
|
||||
|
||||
@property
|
||||
def kwarg_name(self):
|
||||
return 'servers'
|
||||
|
||||
# Generic timeout, works for dnspython and ares
|
||||
class ResolverTimeout(FloatSettingMixin, AresSettingMixin, Setting):
|
||||
document = True
|
||||
name = 'resolver_timeout'
|
||||
environment_key = 'GEVENT_RESOLVER_TIMEOUT'
|
||||
desc = """\
|
||||
The total amount of time that the DNS resolver will spend making queries.
|
||||
|
||||
Only the ares and dnspython resolvers support this.
|
||||
|
||||
.. versionadded:: 1.3a2
|
||||
"""
|
||||
|
||||
@property
|
||||
def kwarg_name(self):
|
||||
return 'timeout'
|
||||
|
||||
config = Config()
|
||||
|
||||
# Go ahead and attempt to import the loop when this class is
|
||||
# instantiated. The hub won't work if the loop can't be found. This
|
||||
# can solve problems with the class being imported from multiple
|
||||
# threads at once, leading to one of the imports failing.
|
||||
# factories are themselves handled lazily. See #687.
|
||||
|
||||
# Don't cache it though, in case the user re-configures through the
|
||||
# API.
|
||||
|
||||
try:
|
||||
Loop().get()
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
Binary file not shown.
@ -0,0 +1,30 @@
|
||||
cimport cython
|
||||
|
||||
from gevent.__hub_local cimport get_hub_noargs as get_hub
|
||||
from gevent.__abstract_linkable cimport AbstractLinkable
|
||||
|
||||
cdef _None
|
||||
cdef reraise
|
||||
cdef dump_traceback
|
||||
cdef load_traceback
|
||||
|
||||
cdef Timeout
|
||||
|
||||
cdef class Event(AbstractLinkable):
|
||||
cdef bint _flag
|
||||
|
||||
cdef class AsyncResult(AbstractLinkable):
|
||||
cdef readonly _value
|
||||
cdef readonly tuple _exc_info
|
||||
|
||||
# For the use of _imap.py
|
||||
cdef public int _imap_task_index
|
||||
|
||||
cpdef get(self, block=*, timeout=*)
|
||||
cpdef bint successful(self)
|
||||
|
||||
cpdef wait(self, timeout=*)
|
||||
cpdef bint done(self)
|
||||
|
||||
cpdef bint cancel(self)
|
||||
cpdef bint cancelled(self)
|
@ -0,0 +1,27 @@
|
||||
"""
|
||||
Internal helpers for FFI implementations.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
def _dbg(*args, **kwargs):
|
||||
# pylint:disable=unused-argument
|
||||
pass
|
||||
|
||||
#_dbg = print
|
||||
|
||||
def _pid_dbg(*args, **kwargs):
|
||||
kwargs['file'] = sys.stderr
|
||||
print(os.getpid(), *args, **kwargs)
|
||||
|
||||
CRITICAL = 1
|
||||
ERROR = 3
|
||||
DEBUG = 5
|
||||
TRACE = 9
|
||||
|
||||
GEVENT_DEBUG_LEVEL = vars()[os.getenv("GEVENT_DEBUG", 'CRITICAL').upper()]
|
||||
|
||||
if GEVENT_DEBUG_LEVEL >= TRACE:
|
||||
_dbg = _pid_dbg
|
@ -0,0 +1,58 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__all__ = [
|
||||
'callback',
|
||||
]
|
||||
|
||||
|
||||
# For times when *args is captured but often not passed (empty),
|
||||
# we can avoid keeping the new tuple that was created for *args
|
||||
# around by using a constant.
|
||||
_NOARGS = ()
|
||||
|
||||
|
||||
class callback(object):
|
||||
|
||||
__slots__ = ('callback', 'args')
|
||||
|
||||
def __init__(self, cb, args):
|
||||
self.callback = cb
|
||||
self.args = args or _NOARGS
|
||||
|
||||
def stop(self):
|
||||
self.callback = None
|
||||
self.args = None
|
||||
|
||||
close = stop
|
||||
|
||||
# Note that __nonzero__ and pending are different
|
||||
# bool() is used in contexts where we need to know whether to schedule another callback,
|
||||
# so it's true if it's pending or currently running
|
||||
# 'pending' has the same meaning as libev watchers: it is cleared before actually
|
||||
# running the callback
|
||||
|
||||
def __nonzero__(self):
|
||||
# it's nonzero if it's pending or currently executing
|
||||
# NOTE: This depends on loop._run_callbacks setting the args property
|
||||
# to None.
|
||||
return self.args is not None
|
||||
__bool__ = __nonzero__
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
return self.callback is not None
|
||||
|
||||
def _format(self):
|
||||
return ''
|
||||
|
||||
def __repr__(self):
|
||||
result = "<%s at 0x%x" % (self.__class__.__name__, id(self))
|
||||
if self.pending:
|
||||
result += " pending"
|
||||
if self.callback is not None:
|
||||
result += " callback=%r" % (self.callback, )
|
||||
if self.args is not None:
|
||||
result += " args=%r" % (self.args, )
|
||||
if self.callback is None and self.args is None:
|
||||
result += " stopped"
|
||||
return result + ">"
|
@ -0,0 +1,713 @@
|
||||
"""
|
||||
Basic loop implementation for ffi-based cores.
|
||||
"""
|
||||
# pylint: disable=too-many-lines, protected-access, redefined-outer-name, not-callable
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from collections import deque
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from gevent._ffi import _dbg
|
||||
from gevent._ffi import GEVENT_DEBUG_LEVEL
|
||||
from gevent._ffi import TRACE
|
||||
from gevent._ffi.callback import callback
|
||||
from gevent._compat import PYPY
|
||||
|
||||
from gevent import getswitchinterval
|
||||
|
||||
__all__ = [
|
||||
'AbstractLoop',
|
||||
'assign_standard_callbacks',
|
||||
]
|
||||
|
||||
|
||||
class _EVENTSType(object):
|
||||
def __repr__(self):
|
||||
return 'gevent.core.EVENTS'
|
||||
|
||||
EVENTS = GEVENT_CORE_EVENTS = _EVENTSType()
|
||||
|
||||
|
||||
#####
|
||||
## Note on CFFI objects, callbacks and the lifecycle of watcher objects
|
||||
#
|
||||
# Each subclass of `watcher` allocates a C structure of the
|
||||
# appropriate type e.g., struct gevent_ev_io and holds this pointer in
|
||||
# its `_gwatcher` attribute. When that watcher instance is garbage
|
||||
# collected, then the C structure is also freed. The C structure is
|
||||
# passed to libev from the watcher's start() method and then to the
|
||||
# appropriate C callback function, e.g., _gevent_ev_io_callback, which
|
||||
# passes it back to python's _python_callback where we need the
|
||||
# watcher instance. Therefore, as long as that callback is active (the
|
||||
# watcher is started), the watcher instance must not be allowed to get
|
||||
# GC'd---any access at the C level or even the FFI level to the freed
|
||||
# memory could crash the process.
|
||||
#
|
||||
# However, the typical idiom calls for writing something like this:
|
||||
# loop.io(fd, python_cb).start()
|
||||
# thus forgetting the newly created watcher subclass and allowing it to be immediately
|
||||
# GC'd. To combat this, when the watcher is started, it places itself into the loop's
|
||||
# `_keepaliveset`, and it only removes itself when the watcher's `stop()` method is called.
|
||||
# Often, this is the *only* reference keeping the watcher object, and hence its C structure,
|
||||
# alive.
|
||||
#
|
||||
# This is slightly complicated by the fact that the python-level
|
||||
# callback, called from the C callback, could choose to manually stop
|
||||
# the watcher. When we return to the C level callback, we now have an
|
||||
# invalid pointer, and attempting to pass it back to Python (e.g., to
|
||||
# handle an error) could crash. Hence, _python_callback,
|
||||
# _gevent_io_callback, and _python_handle_error cooperate to make sure
|
||||
# that the watcher instance stays in the loops `_keepaliveset` while
|
||||
# the C code could be running---and if it gets removed, to not call back
|
||||
# to Python again.
|
||||
# See also https://github.com/gevent/gevent/issues/676
|
||||
####
|
||||
class AbstractCallbacks(object):
|
||||
|
||||
|
||||
def __init__(self, ffi):
|
||||
self.ffi = ffi
|
||||
self.callbacks = []
|
||||
if GEVENT_DEBUG_LEVEL < TRACE:
|
||||
self.from_handle = ffi.from_handle
|
||||
|
||||
def from_handle(self, handle): # pylint:disable=method-hidden
|
||||
x = self.ffi.from_handle(handle)
|
||||
return x
|
||||
|
||||
def python_callback(self, handle, revents):
|
||||
"""
|
||||
Returns an integer having one of three values:
|
||||
|
||||
- -1
|
||||
An exception occurred during the callback and you must call
|
||||
:func:`_python_handle_error` to deal with it. The Python watcher
|
||||
object will have the exception tuple saved in ``_exc_info``.
|
||||
- 1
|
||||
Everything went according to plan. You should check to see if the libev
|
||||
watcher is still active, and call :func:`python_stop` if it is not. This will
|
||||
clean up the memory. Finding the watcher still active at the event loop level,
|
||||
but not having stopped itself at the gevent level is a buggy scenario and
|
||||
shouldn't happen.
|
||||
- 2
|
||||
Everything went according to plan, but the watcher has already
|
||||
been stopped. Its memory may no longer be valid.
|
||||
|
||||
This function should never return 0, as that's the default value that
|
||||
Python exceptions will produce.
|
||||
"""
|
||||
#print("Running callback", handle)
|
||||
orig_ffi_watcher = None
|
||||
try:
|
||||
# Even dereferencing the handle needs to be inside the try/except;
|
||||
# if we don't return normally (e.g., a signal) then we wind up going
|
||||
# to the 'onerror' handler (unhandled_onerror), which
|
||||
# is not what we want; that can permanently wedge the loop depending
|
||||
# on which callback was executing.
|
||||
# XXX: See comments in that function. We may be able to restart and do better?
|
||||
if not handle:
|
||||
# Hmm, a NULL handle. That's not supposed to happen.
|
||||
# We can easily get into a loop if we deref it and allow that
|
||||
# to raise.
|
||||
_dbg("python_callback got null handle")
|
||||
return 1
|
||||
the_watcher = self.from_handle(handle)
|
||||
orig_ffi_watcher = the_watcher._watcher
|
||||
args = the_watcher.args
|
||||
if args is None:
|
||||
# Legacy behaviour from corecext: convert None into ()
|
||||
# See test__core_watcher.py
|
||||
args = _NOARGS
|
||||
if args and args[0] == GEVENT_CORE_EVENTS:
|
||||
args = (revents, ) + args[1:]
|
||||
#print("Calling function", the_watcher.callback, args)
|
||||
the_watcher.callback(*args)
|
||||
except: # pylint:disable=bare-except
|
||||
_dbg("Got exception servicing watcher with handle", handle, sys.exc_info())
|
||||
# It's possible for ``the_watcher`` to be undefined (UnboundLocalError)
|
||||
# if we threw an exception (signal) on the line that created that variable.
|
||||
# This is typically the case with a signal under libuv
|
||||
try:
|
||||
the_watcher
|
||||
except UnboundLocalError:
|
||||
the_watcher = self.from_handle(handle)
|
||||
the_watcher._exc_info = sys.exc_info()
|
||||
# Depending on when the exception happened, the watcher
|
||||
# may or may not have been stopped. We need to make sure its
|
||||
# memory stays valid so we can stop it at the ev level if needed.
|
||||
# If its loop is gone, it has already been stopped,
|
||||
# see https://github.com/gevent/gevent/issues/1295 for a case where
|
||||
# that happened
|
||||
if the_watcher.loop is not None:
|
||||
the_watcher.loop._keepaliveset.add(the_watcher)
|
||||
return -1
|
||||
else:
|
||||
if (the_watcher.loop is not None
|
||||
and the_watcher in the_watcher.loop._keepaliveset
|
||||
and the_watcher._watcher is orig_ffi_watcher):
|
||||
# It didn't stop itself, *and* it didn't stop itself, reset
|
||||
# its watcher, and start itself again. libuv's io watchers MAY
|
||||
# do that.
|
||||
# The normal, expected scenario when we find the watcher still
|
||||
# in the keepaliveset is that it is still active at the event loop
|
||||
# level, so we don't expect that python_stop gets called.
|
||||
#_dbg("The watcher has not stopped itself, possibly still active", the_watcher)
|
||||
return 1
|
||||
return 2 # it stopped itself
|
||||
|
||||
def python_handle_error(self, handle, _revents):
|
||||
_dbg("Handling error for handle", handle)
|
||||
if not handle:
|
||||
return
|
||||
try:
|
||||
watcher = self.from_handle(handle)
|
||||
exc_info = watcher._exc_info
|
||||
del watcher._exc_info
|
||||
# In the past, we passed the ``watcher`` itself as the context,
|
||||
# which typically meant that the Hub would just print
|
||||
# the exception. This is a problem because sometimes we can't
|
||||
# detect signals until late in ``python_callback``; specifically,
|
||||
# test_selectors.py:DefaultSelectorTest.test_select_interrupt_exc
|
||||
# installs a SIGALRM handler that raises an exception. That exception can happen
|
||||
# before we enter ``python_callback`` or at any point within it because of the way
|
||||
# libuv swallows signals. By passing None, we get the exception prapagated into
|
||||
# the main greenlet (which is probably *also* not what we always want, but
|
||||
# I see no way to distinguish the cases).
|
||||
watcher.loop.handle_error(None, *exc_info)
|
||||
finally:
|
||||
# XXX Since we're here on an error condition, and we
|
||||
# made sure that the watcher object was put in loop._keepaliveset,
|
||||
# what about not stopping the watcher? Looks like a possible
|
||||
# memory leak?
|
||||
# XXX: This used to do "if revents & (libev.EV_READ | libev.EV_WRITE)"
|
||||
# before stopping. Why?
|
||||
try:
|
||||
watcher.stop()
|
||||
except: # pylint:disable=bare-except
|
||||
watcher.loop.handle_error(watcher, *sys.exc_info())
|
||||
return # pylint:disable=lost-exception
|
||||
|
||||
def unhandled_onerror(self, t, v, tb):
|
||||
# This is supposed to be called for signals, etc.
|
||||
# This is the onerror= value for CFFI.
|
||||
# If we return None, C will get a value of 0/NULL;
|
||||
# if we raise, CFFI will print the exception and then
|
||||
# return 0/NULL; (unless error= was configured)
|
||||
# If things go as planned, we return the value that asks
|
||||
# C to call back and check on if the watcher needs to be closed or
|
||||
# not.
|
||||
|
||||
# XXX: TODO: Could this cause events to be lost? Maybe we need to return
|
||||
# a value that causes the C loop to try the callback again?
|
||||
# at least for signals under libuv, which are delivered at very odd times.
|
||||
# Hopefully the event still shows up when we poll the next time.
|
||||
watcher = None
|
||||
handle = tb.tb_frame.f_locals['handle'] if tb is not None else None
|
||||
if handle: # handle could be NULL
|
||||
watcher = self.from_handle(handle)
|
||||
if watcher is not None:
|
||||
watcher.loop.handle_error(None, t, v, tb)
|
||||
return 1
|
||||
|
||||
# Raising it causes a lot of noise from CFFI
|
||||
print("WARNING: gevent: Unhandled error with no watcher",
|
||||
file=sys.stderr)
|
||||
traceback.print_exception(t, v, tb)
|
||||
|
||||
def python_stop(self, handle):
|
||||
if not handle: # pragma: no cover
|
||||
print(
|
||||
"WARNING: gevent: Unable to dereference handle; not stopping watcher. "
|
||||
"Native resources may leak. This is most likely a bug in gevent.",
|
||||
file=sys.stderr)
|
||||
# The alternative is to crash with no helpful information
|
||||
# NOTE: Raising exceptions here does nothing, they're swallowed by CFFI.
|
||||
# Since the C level passed in a null pointer, even dereferencing the handle
|
||||
# will just produce some exceptions.
|
||||
return
|
||||
watcher = self.from_handle(handle)
|
||||
watcher.stop()
|
||||
|
||||
if not PYPY:
|
||||
def python_check_callback(self, watcher_ptr): # pylint:disable=unused-argument
|
||||
# If we have the onerror callback, this is a no-op; all the real
|
||||
# work to rethrow the exception is done by the onerror callback
|
||||
|
||||
# NOTE: Unlike the rest of the functions, this is called with a pointer
|
||||
# to the C level structure, *not* a pointer to the void* that represents a
|
||||
# <cdata> for the Python Watcher object.
|
||||
pass
|
||||
else: # PyPy
|
||||
# On PyPy, we need the function to have some sort of body, otherwise
|
||||
# the signal exceptions don't always get caught, *especially* with
|
||||
# libuv (however, there's no reason to expect this to only be a libuv
|
||||
# issue; it's just that we don't depend on the periodic signal timer
|
||||
# under libev, so the issue is much more pronounced under libuv)
|
||||
# test_socket's test_sendall_interrupted can hang.
|
||||
# See https://github.com/gevent/gevent/issues/1112
|
||||
|
||||
def python_check_callback(self, watcher_ptr): # pylint:disable=unused-argument
|
||||
# Things we've tried that *don't* work:
|
||||
# greenlet.getcurrent()
|
||||
# 1 + 1
|
||||
try:
|
||||
raise MemoryError()
|
||||
except MemoryError:
|
||||
pass
|
||||
|
||||
def python_prepare_callback(self, watcher_ptr):
|
||||
loop = self._find_loop_from_c_watcher(watcher_ptr)
|
||||
if loop is None: # pragma: no cover
|
||||
print("WARNING: gevent: running prepare callbacks from a destroyed handle: ",
|
||||
watcher_ptr)
|
||||
return
|
||||
loop._run_callbacks()
|
||||
|
||||
def check_callback_onerror(self, t, v, tb):
|
||||
watcher_ptr = tb.tb_frame.f_locals['watcher_ptr'] if tb is not None else None
|
||||
if watcher_ptr:
|
||||
loop = self._find_loop_from_c_watcher(watcher_ptr)
|
||||
if loop is not None:
|
||||
# None as the context argument causes the exception to be raised
|
||||
# in the main greenlet.
|
||||
loop.handle_error(None, t, v, tb)
|
||||
return None
|
||||
raise v # Let CFFI print
|
||||
|
||||
def _find_loop_from_c_watcher(self, watcher_ptr):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
|
||||
def assign_standard_callbacks(ffi, lib, callbacks_class, extras=()): # pylint:disable=unused-argument
|
||||
# callbacks keeps these cdata objects alive at the python level
|
||||
callbacks = callbacks_class(ffi)
|
||||
extras = tuple([(getattr(callbacks, name), error) for name, error in extras])
|
||||
for (func, error_func) in ((callbacks.python_callback, None),
|
||||
(callbacks.python_handle_error, None),
|
||||
(callbacks.python_stop, None),
|
||||
(callbacks.python_check_callback,
|
||||
callbacks.check_callback_onerror),
|
||||
(callbacks.python_prepare_callback,
|
||||
callbacks.check_callback_onerror)) + extras:
|
||||
# The name of the callback function matches the 'extern Python' declaration.
|
||||
error_func = error_func or callbacks.unhandled_onerror
|
||||
callback = ffi.def_extern(onerror=error_func)(func)
|
||||
# keep alive the cdata
|
||||
# (def_extern returns the original function, and it requests that
|
||||
# the function be "global", so maybe it keeps a hard reference to it somewhere now
|
||||
# unlike ffi.callback(), and we don't need to do this?)
|
||||
callbacks.callbacks.append(callback)
|
||||
|
||||
# At this point, the library C variable (static function, actually)
|
||||
# is filled in.
|
||||
|
||||
return callbacks
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
basestring = (bytes, str)
|
||||
integer_types = (int,)
|
||||
else:
|
||||
import __builtin__ # pylint:disable=import-error
|
||||
basestring = (__builtin__.basestring,)
|
||||
integer_types = (int, __builtin__.long)
|
||||
|
||||
|
||||
|
||||
|
||||
_NOARGS = ()
|
||||
|
||||
CALLBACK_CHECK_COUNT = 50
|
||||
|
||||
class AbstractLoop(object):
|
||||
# pylint:disable=too-many-public-methods,too-many-instance-attributes
|
||||
|
||||
error_handler = None
|
||||
|
||||
_CHECK_POINTER = None
|
||||
|
||||
_TIMER_POINTER = None
|
||||
_TIMER_CALLBACK_SIG = None
|
||||
|
||||
_PREPARE_POINTER = None
|
||||
|
||||
starting_timer_may_update_loop_time = False
|
||||
|
||||
# Subclasses should set this in __init__ to reflect
|
||||
# whether they were the default loop.
|
||||
_default = None
|
||||
|
||||
def __init__(self, ffi, lib, watchers, flags=None, default=None):
|
||||
self._ffi = ffi
|
||||
self._lib = lib
|
||||
self._ptr = None
|
||||
self._handle_to_self = self._ffi.new_handle(self) # XXX: Reference cycle?
|
||||
self._watchers = watchers
|
||||
self._in_callback = False
|
||||
self._callbacks = deque()
|
||||
# Stores python watcher objects while they are started
|
||||
self._keepaliveset = set()
|
||||
self._init_loop_and_aux_watchers(flags, default)
|
||||
|
||||
|
||||
def _init_loop_and_aux_watchers(self, flags=None, default=None):
|
||||
|
||||
self._ptr = self._init_loop(flags, default)
|
||||
|
||||
|
||||
# self._check is a watcher that runs in each iteration of the
|
||||
# mainloop, just after the blocking call. It's point is to handle
|
||||
# signals. It doesn't run watchers or callbacks, it just exists to give
|
||||
# CFFI a chance to raise signal exceptions so we can handle them.
|
||||
self._check = self._ffi.new(self._CHECK_POINTER)
|
||||
self._check.data = self._handle_to_self
|
||||
self._init_and_start_check()
|
||||
|
||||
# self._prepare is a watcher that runs in each iteration of the mainloop,
|
||||
# just before the blocking call. It's where we run deferred callbacks
|
||||
# from self.run_callback. This cooperates with _setup_for_run_callback()
|
||||
# to schedule self._timer0 if needed.
|
||||
self._prepare = self._ffi.new(self._PREPARE_POINTER)
|
||||
self._prepare.data = self._handle_to_self
|
||||
self._init_and_start_prepare()
|
||||
|
||||
# A timer we start and stop on demand. If we have callbacks,
|
||||
# too many to run in one iteration of _run_callbacks, we turn this
|
||||
# on so as to have the next iteration of the run loop return to us
|
||||
# as quickly as possible.
|
||||
# TODO: There may be a more efficient way to do this using ev_timer_again;
|
||||
# see the "ev_timer" section of the ev manpage (http://linux.die.net/man/3/ev)
|
||||
# Alternatively, setting the ev maximum block time may also work.
|
||||
self._timer0 = self._ffi.new(self._TIMER_POINTER)
|
||||
self._timer0.data = self._handle_to_self
|
||||
self._init_callback_timer()
|
||||
|
||||
# TODO: We may be able to do something nicer and use the existing python_callback
|
||||
# combined with onerror and the class check/timer/prepare to simplify things
|
||||
# and unify our handling
|
||||
|
||||
def _init_loop(self, flags, default):
|
||||
"""
|
||||
Called by __init__ to create or find the loop. The return value
|
||||
is assigned to self._ptr.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _init_and_start_check(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _init_and_start_prepare(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _init_callback_timer(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _stop_callback_timer(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _start_callback_timer(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _check_callback_handle_error(self, t, v, tb):
|
||||
self.handle_error(None, t, v, tb)
|
||||
|
||||
def _run_callbacks(self): # pylint:disable=too-many-branches
|
||||
# When we're running callbacks, its safe for timers to
|
||||
# update the notion of the current time (because if we're here,
|
||||
# we're not running in a timer callback that may let other timers
|
||||
# run; this is mostly an issue for libuv).
|
||||
|
||||
# That's actually a bit of a lie: on libev, self._timer0 really is
|
||||
# a timer, and so sometimes this is running in a timer callback, not
|
||||
# a prepare callback. But that's OK, libev doesn't suffer from cascading
|
||||
# timer expiration and its safe to update the loop time at any
|
||||
# moment there.
|
||||
self.starting_timer_may_update_loop_time = True
|
||||
try:
|
||||
count = CALLBACK_CHECK_COUNT
|
||||
now = self.now()
|
||||
expiration = now + getswitchinterval()
|
||||
self._stop_callback_timer()
|
||||
while self._callbacks:
|
||||
cb = self._callbacks.popleft() # pylint:disable=assignment-from-no-return
|
||||
count -= 1
|
||||
self.unref() # XXX: libuv doesn't have a global ref count!
|
||||
callback = cb.callback
|
||||
cb.callback = None
|
||||
args = cb.args
|
||||
if callback is None or args is None:
|
||||
# it's been stopped
|
||||
continue
|
||||
|
||||
try:
|
||||
callback(*args)
|
||||
except: # pylint:disable=bare-except
|
||||
# If we allow an exception to escape this method (while we are running the ev callback),
|
||||
# then CFFI will print the error and libev will continue executing.
|
||||
# There are two problems with this. The first is that the code after
|
||||
# the loop won't run. The second is that any remaining callbacks scheduled
|
||||
# for this loop iteration will be silently dropped; they won't run, but they'll
|
||||
# also not be *stopped* (which is not a huge deal unless you're looking for
|
||||
# consistency or checking the boolean/pending status; the loop doesn't keep
|
||||
# a reference to them like it does to watchers...*UNLESS* the callback itself had
|
||||
# a reference to a watcher; then I don't know what would happen, it depends on
|
||||
# the state of the watcher---a leak or crash is not totally inconceivable).
|
||||
# The Cython implementation in core.ppyx uses gevent_call from callbacks.c
|
||||
# to run the callback, which uses gevent_handle_error to handle any errors the
|
||||
# Python callback raises...it unconditionally simply prints any error raised
|
||||
# by loop.handle_error and clears it, so callback handling continues.
|
||||
# We take a similar approach (but are extra careful about printing)
|
||||
try:
|
||||
self.handle_error(cb, *sys.exc_info())
|
||||
except: # pylint:disable=bare-except
|
||||
try:
|
||||
print("Exception while handling another error", file=sys.stderr)
|
||||
traceback.print_exc()
|
||||
except: # pylint:disable=bare-except
|
||||
pass # Nothing we can do here
|
||||
finally:
|
||||
# NOTE: this must be reset here, because cb.args is used as a flag in
|
||||
# the callback class so that bool(cb) of a callback that has been run
|
||||
# becomes False
|
||||
cb.args = None
|
||||
|
||||
# We've finished running one group of callbacks
|
||||
# but we may have more, so before looping check our
|
||||
# switch interval.
|
||||
if count == 0 and self._callbacks:
|
||||
count = CALLBACK_CHECK_COUNT
|
||||
self.update_now()
|
||||
if self.now() >= expiration:
|
||||
now = 0
|
||||
break
|
||||
|
||||
# Update the time before we start going again, if we didn't
|
||||
# just do so.
|
||||
if now != 0:
|
||||
self.update_now()
|
||||
|
||||
if self._callbacks:
|
||||
self._start_callback_timer()
|
||||
finally:
|
||||
self.starting_timer_may_update_loop_time = False
|
||||
|
||||
def _stop_aux_watchers(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def destroy(self):
|
||||
if self._ptr:
|
||||
try:
|
||||
if not self._can_destroy_loop(self._ptr):
|
||||
return False
|
||||
self._stop_aux_watchers()
|
||||
self._destroy_loop(self._ptr)
|
||||
finally:
|
||||
# not ffi.NULL, we don't want something that can be
|
||||
# passed to C and crash later. This will create nice friendly
|
||||
# TypeError from CFFI.
|
||||
self._ptr = None
|
||||
del self._handle_to_self
|
||||
del self._callbacks
|
||||
del self._keepaliveset
|
||||
|
||||
return True
|
||||
|
||||
def _can_destroy_loop(self, ptr):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _destroy_loop(self, ptr):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def ptr(self):
|
||||
return self._ptr
|
||||
|
||||
@property
|
||||
def WatcherType(self):
|
||||
return self._watchers.watcher
|
||||
|
||||
@property
|
||||
def MAXPRI(self):
|
||||
return 1
|
||||
|
||||
@property
|
||||
def MINPRI(self):
|
||||
return 1
|
||||
|
||||
def _handle_syserr(self, message, errno):
|
||||
try:
|
||||
errno = os.strerror(errno)
|
||||
except: # pylint:disable=bare-except
|
||||
traceback.print_exc()
|
||||
try:
|
||||
message = '%s: %s' % (message, errno)
|
||||
except: # pylint:disable=bare-except
|
||||
traceback.print_exc()
|
||||
self.handle_error(None, SystemError, SystemError(message), None)
|
||||
|
||||
def handle_error(self, context, type, value, tb):
|
||||
handle_error = None
|
||||
error_handler = self.error_handler
|
||||
if error_handler is not None:
|
||||
# we do want to do getattr every time so that setting Hub.handle_error property just works
|
||||
handle_error = getattr(error_handler, 'handle_error', error_handler)
|
||||
handle_error(context, type, value, tb)
|
||||
else:
|
||||
self._default_handle_error(context, type, value, tb)
|
||||
|
||||
def _default_handle_error(self, context, type, value, tb): # pylint:disable=unused-argument
|
||||
# note: Hub sets its own error handler so this is not used by gevent
|
||||
# this is here to make core.loop usable without the rest of gevent
|
||||
# Should cause the loop to stop running.
|
||||
traceback.print_exception(type, value, tb)
|
||||
|
||||
|
||||
def run(self, nowait=False, once=False):
|
||||
raise NotImplementedError()
|
||||
|
||||
def reinit(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def ref(self):
|
||||
# XXX: libuv doesn't do it this way
|
||||
raise NotImplementedError()
|
||||
|
||||
def unref(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def break_(self, how=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify(self):
|
||||
pass
|
||||
|
||||
def now(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update_now(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self):
|
||||
import warnings
|
||||
warnings.warn("'update' is deprecated; use 'update_now'",
|
||||
DeprecationWarning,
|
||||
stacklevel=2)
|
||||
self.update_now()
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), self._format())
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
return self._default if self._ptr else False
|
||||
|
||||
@property
|
||||
def iteration(self):
|
||||
return -1
|
||||
|
||||
@property
|
||||
def depth(self):
|
||||
return -1
|
||||
|
||||
@property
|
||||
def backend_int(self):
|
||||
return 0
|
||||
|
||||
@property
|
||||
def backend(self):
|
||||
return "default"
|
||||
|
||||
@property
|
||||
def pendingcnt(self):
|
||||
return 0
|
||||
|
||||
def io(self, fd, events, ref=True, priority=None):
|
||||
return self._watchers.io(self, fd, events, ref, priority)
|
||||
|
||||
def timer(self, after, repeat=0.0, ref=True, priority=None):
|
||||
return self._watchers.timer(self, after, repeat, ref, priority)
|
||||
|
||||
def signal(self, signum, ref=True, priority=None):
|
||||
return self._watchers.signal(self, signum, ref, priority)
|
||||
|
||||
def idle(self, ref=True, priority=None):
|
||||
return self._watchers.idle(self, ref, priority)
|
||||
|
||||
def prepare(self, ref=True, priority=None):
|
||||
return self._watchers.prepare(self, ref, priority)
|
||||
|
||||
def check(self, ref=True, priority=None):
|
||||
return self._watchers.check(self, ref, priority)
|
||||
|
||||
def fork(self, ref=True, priority=None):
|
||||
return self._watchers.fork(self, ref, priority)
|
||||
|
||||
def async_(self, ref=True, priority=None):
|
||||
return self._watchers.async_(self, ref, priority)
|
||||
|
||||
# Provide BWC for those that can use 'async' as is
|
||||
locals()['async'] = async_
|
||||
|
||||
if sys.platform != "win32":
|
||||
|
||||
def child(self, pid, trace=0, ref=True):
|
||||
return self._watchers.child(self, pid, trace, ref)
|
||||
|
||||
def install_sigchld(self):
|
||||
pass
|
||||
|
||||
def stat(self, path, interval=0.0, ref=True, priority=None):
|
||||
return self._watchers.stat(self, path, interval, ref, priority)
|
||||
|
||||
def callback(self, priority=None):
|
||||
return callback(self, priority)
|
||||
|
||||
def _setup_for_run_callback(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def run_callback(self, func, *args):
|
||||
# If we happen to already be running callbacks (inside
|
||||
# _run_callbacks), this could happen almost immediately,
|
||||
# without the loop cycling.
|
||||
cb = callback(func, args)
|
||||
self._callbacks.append(cb)
|
||||
self._setup_for_run_callback()
|
||||
|
||||
return cb
|
||||
|
||||
def _format(self):
|
||||
if not self._ptr:
|
||||
return 'destroyed'
|
||||
msg = self.backend
|
||||
if self.default:
|
||||
msg += ' default'
|
||||
msg += ' pending=%s' % self.pendingcnt
|
||||
msg += self._format_details()
|
||||
return msg
|
||||
|
||||
def _format_details(self):
|
||||
msg = ''
|
||||
fileno = self.fileno() # pylint:disable=assignment-from-none
|
||||
try:
|
||||
activecnt = self.activecnt
|
||||
except AttributeError:
|
||||
activecnt = None
|
||||
if activecnt is not None:
|
||||
msg += ' ref=' + repr(activecnt)
|
||||
if fileno is not None:
|
||||
msg += ' fileno=' + repr(fileno)
|
||||
#if sigfd is not None and sigfd != -1:
|
||||
# msg += ' sigfd=' + repr(sigfd)
|
||||
return msg
|
||||
|
||||
def fileno(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def activecnt(self):
|
||||
if not self._ptr:
|
||||
raise ValueError('operation on destroyed loop')
|
||||
return 0
|
@ -0,0 +1,641 @@
|
||||
"""
|
||||
Useful base classes for watchers. The available
|
||||
watchers will depend on the specific event loop.
|
||||
"""
|
||||
# pylint:disable=not-callable
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import signal as signalmodule
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
from gevent._config import config
|
||||
|
||||
try:
|
||||
from tracemalloc import get_object_traceback
|
||||
|
||||
def tracemalloc(init):
|
||||
# PYTHONTRACEMALLOC env var controls this on Python 3.
|
||||
return init
|
||||
except ImportError: # Python < 3.4
|
||||
|
||||
if config.trace_malloc:
|
||||
# Use the same env var to turn this on for Python 2
|
||||
import traceback
|
||||
|
||||
class _TB(object):
|
||||
__slots__ = ('lines',)
|
||||
|
||||
def __init__(self, lines):
|
||||
# These end in newlines, which we don't want for consistency
|
||||
self.lines = [x.rstrip() for x in lines]
|
||||
|
||||
def format(self):
|
||||
return self.lines
|
||||
|
||||
def tracemalloc(init):
|
||||
@functools.wraps(init)
|
||||
def traces(self, *args, **kwargs):
|
||||
init(self, *args, **kwargs)
|
||||
self._captured_malloc = _TB(traceback.format_stack())
|
||||
return traces
|
||||
|
||||
def get_object_traceback(obj):
|
||||
return obj._captured_malloc
|
||||
|
||||
else:
|
||||
def get_object_traceback(_obj):
|
||||
return None
|
||||
|
||||
def tracemalloc(init):
|
||||
return init
|
||||
|
||||
from gevent._compat import fsencode
|
||||
|
||||
from gevent._ffi import _dbg # pylint:disable=unused-import
|
||||
from gevent._ffi import GEVENT_DEBUG_LEVEL
|
||||
from gevent._ffi import DEBUG
|
||||
from gevent._ffi.loop import GEVENT_CORE_EVENTS
|
||||
from gevent._ffi.loop import _NOARGS
|
||||
|
||||
ALLOW_WATCHER_DEL = GEVENT_DEBUG_LEVEL >= DEBUG
|
||||
|
||||
__all__ = [
|
||||
|
||||
]
|
||||
|
||||
try:
|
||||
ResourceWarning
|
||||
except NameError:
|
||||
class ResourceWarning(Warning):
|
||||
"Python 2 fallback"
|
||||
|
||||
class _NoWatcherResult(int):
|
||||
|
||||
def __repr__(self):
|
||||
return "<NoWatcher>"
|
||||
|
||||
_NoWatcherResult = _NoWatcherResult(0)
|
||||
|
||||
def events_to_str(event_field, all_events):
|
||||
result = []
|
||||
for (flag, string) in all_events:
|
||||
c_flag = flag
|
||||
if event_field & c_flag:
|
||||
result.append(string)
|
||||
event_field = event_field & (~c_flag)
|
||||
if not event_field:
|
||||
break
|
||||
if event_field:
|
||||
result.append(hex(event_field))
|
||||
return '|'.join(result)
|
||||
|
||||
|
||||
def not_while_active(func):
|
||||
@functools.wraps(func)
|
||||
def nw(self, *args, **kwargs):
|
||||
if self.active:
|
||||
raise ValueError("not while active")
|
||||
func(self, *args, **kwargs)
|
||||
return nw
|
||||
|
||||
def only_if_watcher(func):
|
||||
@functools.wraps(func)
|
||||
def if_w(self):
|
||||
if self._watcher:
|
||||
return func(self)
|
||||
return _NoWatcherResult
|
||||
return if_w
|
||||
|
||||
|
||||
class LazyOnClass(object):
|
||||
|
||||
@classmethod
|
||||
def lazy(cls, cls_dict, func):
|
||||
"Put a LazyOnClass object in *cls_dict* with the same name as *func*"
|
||||
cls_dict[func.__name__] = cls(func)
|
||||
|
||||
def __init__(self, func, name=None):
|
||||
self.name = name or func.__name__
|
||||
self.func = func
|
||||
|
||||
def __get__(self, inst, klass):
|
||||
if inst is None: # pragma: no cover
|
||||
return self
|
||||
|
||||
val = self.func(inst)
|
||||
setattr(klass, self.name, val)
|
||||
return val
|
||||
|
||||
|
||||
class AbstractWatcherType(type):
|
||||
"""
|
||||
Base metaclass for watchers.
|
||||
|
||||
To use, you will:
|
||||
|
||||
- subclass the watcher class defined from this type.
|
||||
- optionally subclass this type
|
||||
"""
|
||||
# pylint:disable=bad-mcs-classmethod-argument
|
||||
|
||||
_FFI = None
|
||||
_LIB = None
|
||||
|
||||
def __new__(cls, name, bases, cls_dict):
|
||||
if name != 'watcher' and not cls_dict.get('_watcher_skip_ffi'):
|
||||
cls._fill_watcher(name, bases, cls_dict)
|
||||
if '__del__' in cls_dict and not ALLOW_WATCHER_DEL: # pragma: no cover
|
||||
raise TypeError("CFFI watchers are not allowed to have __del__")
|
||||
return type.__new__(cls, name, bases, cls_dict)
|
||||
|
||||
@classmethod
|
||||
def _fill_watcher(cls, name, bases, cls_dict):
|
||||
# TODO: refactor smaller
|
||||
# pylint:disable=too-many-locals
|
||||
if name.endswith('_'):
|
||||
# Strip trailing _ added to avoid keyword duplications
|
||||
# e.g., async_
|
||||
name = name[:-1]
|
||||
|
||||
def _mro_get(attr, bases, error=True):
|
||||
for b in bases:
|
||||
try:
|
||||
return getattr(b, attr)
|
||||
except AttributeError:
|
||||
continue
|
||||
if error: # pragma: no cover
|
||||
raise AttributeError(attr)
|
||||
_watcher_prefix = cls_dict.get('_watcher_prefix') or _mro_get('_watcher_prefix', bases)
|
||||
|
||||
if '_watcher_type' not in cls_dict:
|
||||
watcher_type = _watcher_prefix + '_' + name
|
||||
cls_dict['_watcher_type'] = watcher_type
|
||||
elif not cls_dict['_watcher_type'].startswith(_watcher_prefix):
|
||||
watcher_type = _watcher_prefix + '_' + cls_dict['_watcher_type']
|
||||
cls_dict['_watcher_type'] = watcher_type
|
||||
|
||||
active_name = _watcher_prefix + '_is_active'
|
||||
|
||||
def _watcher_is_active(self):
|
||||
return getattr(self._LIB, active_name)
|
||||
|
||||
LazyOnClass.lazy(cls_dict, _watcher_is_active)
|
||||
|
||||
watcher_struct_name = cls_dict.get('_watcher_struct_name')
|
||||
if not watcher_struct_name:
|
||||
watcher_struct_pattern = (cls_dict.get('_watcher_struct_pattern')
|
||||
or _mro_get('_watcher_struct_pattern', bases, False)
|
||||
or 'struct %s')
|
||||
watcher_struct_name = watcher_struct_pattern % (watcher_type,)
|
||||
|
||||
def _watcher_struct_pointer_type(self):
|
||||
return self._FFI.typeof(watcher_struct_name + ' *')
|
||||
|
||||
LazyOnClass.lazy(cls_dict, _watcher_struct_pointer_type)
|
||||
|
||||
callback_name = (cls_dict.get('_watcher_callback_name')
|
||||
or _mro_get('_watcher_callback_name', bases, False)
|
||||
or '_gevent_generic_callback')
|
||||
|
||||
def _watcher_callback(self):
|
||||
return self._FFI.addressof(self._LIB, callback_name)
|
||||
|
||||
LazyOnClass.lazy(cls_dict, _watcher_callback)
|
||||
|
||||
def _make_meth(name, watcher_name):
|
||||
def meth(self):
|
||||
lib_name = self._watcher_type + '_' + name
|
||||
return getattr(self._LIB, lib_name)
|
||||
meth.__name__ = watcher_name
|
||||
return meth
|
||||
|
||||
for meth_name in 'start', 'stop', 'init':
|
||||
watcher_name = '_watcher' + '_' + meth_name
|
||||
if watcher_name not in cls_dict:
|
||||
LazyOnClass.lazy(cls_dict, _make_meth(meth_name, watcher_name))
|
||||
|
||||
def new_handle(cls, obj):
|
||||
return cls._FFI.new_handle(obj)
|
||||
|
||||
def new(cls, kind):
|
||||
return cls._FFI.new(kind)
|
||||
|
||||
class watcher(object):
|
||||
|
||||
_callback = None
|
||||
_args = None
|
||||
_watcher = None
|
||||
# self._handle has a reference to self, keeping it alive.
|
||||
# We must keep self._handle alive for ffi.from_handle() to be
|
||||
# able to work. We only fill this in when we are started,
|
||||
# and when we are stopped we destroy it.
|
||||
# NOTE: This is a GC cycle, so we keep it around for as short
|
||||
# as possible.
|
||||
_handle = None
|
||||
|
||||
@tracemalloc
|
||||
def __init__(self, _loop, ref=True, priority=None, args=_NOARGS):
|
||||
self.loop = _loop
|
||||
self.__init_priority = priority
|
||||
self.__init_args = args
|
||||
self.__init_ref = ref
|
||||
self._watcher_full_init()
|
||||
|
||||
|
||||
def _watcher_full_init(self):
|
||||
priority = self.__init_priority
|
||||
ref = self.__init_ref
|
||||
args = self.__init_args
|
||||
|
||||
self._watcher_create(ref)
|
||||
|
||||
if priority is not None:
|
||||
self._watcher_ffi_set_priority(priority)
|
||||
|
||||
try:
|
||||
self._watcher_ffi_init(args)
|
||||
except:
|
||||
# Let these be GC'd immediately.
|
||||
# If we keep them around to when *we* are gc'd,
|
||||
# they're probably invalid, meaning any native calls
|
||||
# we do then to close() them are likely to fail
|
||||
self._watcher = None
|
||||
raise
|
||||
self._watcher_ffi_set_init_ref(ref)
|
||||
|
||||
@classmethod
|
||||
def _watcher_ffi_close(cls, ffi_watcher):
|
||||
pass
|
||||
|
||||
def _watcher_create(self, ref): # pylint:disable=unused-argument
|
||||
self._watcher = self._watcher_new()
|
||||
|
||||
def _watcher_new(self):
|
||||
return type(self).new(self._watcher_struct_pointer_type) # pylint:disable=no-member
|
||||
|
||||
def _watcher_ffi_set_init_ref(self, ref):
|
||||
pass
|
||||
|
||||
def _watcher_ffi_set_priority(self, priority):
|
||||
pass
|
||||
|
||||
def _watcher_ffi_init(self, args):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _watcher_ffi_start(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _watcher_ffi_stop(self):
|
||||
self._watcher_stop(self.loop._ptr, self._watcher)
|
||||
|
||||
def _watcher_ffi_ref(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _watcher_ffi_unref(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _watcher_ffi_start_unref(self):
|
||||
# While a watcher is active, we don't keep it
|
||||
# referenced. This allows a timer, for example, to be started,
|
||||
# and still allow the loop to end if there is nothing
|
||||
# else to do. see test__order.TestSleep0 for one example.
|
||||
self._watcher_ffi_unref()
|
||||
|
||||
def _watcher_ffi_stop_ref(self):
|
||||
self._watcher_ffi_ref()
|
||||
|
||||
# A string identifying the type of libev object we watch, e.g., 'ev_io'
|
||||
# This should be a class attribute.
|
||||
_watcher_type = None
|
||||
# A class attribute that is the callback on the libev object that init's the C struct,
|
||||
# e.g., libev.ev_io_init. If None, will be set by _init_subclasses.
|
||||
_watcher_init = None
|
||||
# A class attribute that is the callback on the libev object that starts the C watcher,
|
||||
# e.g., libev.ev_io_start. If None, will be set by _init_subclasses.
|
||||
_watcher_start = None
|
||||
# A class attribute that is the callback on the libev object that stops the C watcher,
|
||||
# e.g., libev.ev_io_stop. If None, will be set by _init_subclasses.
|
||||
_watcher_stop = None
|
||||
# A cffi ctype object identifying the struct pointer we create.
|
||||
# This is a class attribute set based on the _watcher_type
|
||||
_watcher_struct_pointer_type = None
|
||||
# The attribute of the libev object identifying the custom
|
||||
# callback function for this type of watcher. This is a class
|
||||
# attribute set based on the _watcher_type in _init_subclasses.
|
||||
_watcher_callback = None
|
||||
_watcher_is_active = None
|
||||
|
||||
def close(self):
|
||||
if self._watcher is None:
|
||||
return
|
||||
|
||||
self.stop()
|
||||
_watcher = self._watcher
|
||||
self._watcher = None
|
||||
self._watcher_set_data(_watcher, self._FFI.NULL) # pylint: disable=no-member
|
||||
self._watcher_ffi_close(_watcher)
|
||||
self.loop = None
|
||||
|
||||
def _watcher_set_data(self, the_watcher, data):
|
||||
# This abstraction exists for the sole benefit of
|
||||
# libuv.watcher.stat, which "subclasses" uv_handle_t.
|
||||
# Can we do something to avoid this extra function call?
|
||||
the_watcher.data = data
|
||||
return data
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, t, v, tb):
|
||||
self.close()
|
||||
|
||||
if ALLOW_WATCHER_DEL:
|
||||
def __del__(self):
|
||||
if self._watcher:
|
||||
tb = get_object_traceback(self)
|
||||
tb_msg = ''
|
||||
if tb is not None:
|
||||
tb_msg = '\n'.join(tb.format())
|
||||
tb_msg = '\nTraceback:\n' + tb_msg
|
||||
warnings.warn("Failed to close watcher %r%s" % (self, tb_msg),
|
||||
ResourceWarning)
|
||||
|
||||
# may fail if __init__ did; will be harmlessly printed
|
||||
self.close()
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
formats = self._format()
|
||||
result = "<%s at 0x%x%s" % (self.__class__.__name__, id(self), formats)
|
||||
if self.pending:
|
||||
result += " pending"
|
||||
if self.callback is not None:
|
||||
fself = getattr(self.callback, '__self__', None)
|
||||
if fself is self:
|
||||
result += " callback=<bound method %s of self>" % (self.callback.__name__)
|
||||
else:
|
||||
result += " callback=%r" % (self.callback, )
|
||||
if self.args is not None:
|
||||
result += " args=%r" % (self.args, )
|
||||
if self.callback is None and self.args is None:
|
||||
result += " stopped"
|
||||
result += " watcher=%s" % (self._watcher)
|
||||
result += " handle=%s" % (self._watcher_handle)
|
||||
result += " ref=%s" % (self.ref)
|
||||
return result + ">"
|
||||
|
||||
@property
|
||||
def _watcher_handle(self):
|
||||
if self._watcher:
|
||||
return self._watcher.data
|
||||
|
||||
def _format(self):
|
||||
return ''
|
||||
|
||||
@property
|
||||
def ref(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_callback(self):
|
||||
return self._callback
|
||||
|
||||
def _set_callback(self, cb):
|
||||
if not callable(cb) and cb is not None:
|
||||
raise TypeError("Expected callable, not %r" % (cb, ))
|
||||
if cb is None:
|
||||
if '_callback' in self.__dict__:
|
||||
del self._callback
|
||||
else:
|
||||
self._callback = cb
|
||||
callback = property(_get_callback, _set_callback)
|
||||
|
||||
def _get_args(self):
|
||||
return self._args
|
||||
|
||||
def _set_args(self, args):
|
||||
if not isinstance(args, tuple) and args is not None:
|
||||
raise TypeError("args must be a tuple or None")
|
||||
if args is None:
|
||||
if '_args' in self.__dict__:
|
||||
del self._args
|
||||
else:
|
||||
self._args = args
|
||||
|
||||
args = property(_get_args, _set_args)
|
||||
|
||||
def start(self, callback, *args):
|
||||
if callback is None:
|
||||
raise TypeError('callback must be callable, not None')
|
||||
self.callback = callback
|
||||
self.args = args or _NOARGS
|
||||
self.loop._keepaliveset.add(self)
|
||||
self._handle = self._watcher_set_data(self._watcher, type(self).new_handle(self)) # pylint:disable=no-member
|
||||
self._watcher_ffi_start()
|
||||
self._watcher_ffi_start_unref()
|
||||
|
||||
def stop(self):
|
||||
if self._callback is None:
|
||||
assert self.loop is None or self not in self.loop._keepaliveset
|
||||
return
|
||||
self._watcher_ffi_stop_ref()
|
||||
self._watcher_ffi_stop()
|
||||
self.loop._keepaliveset.discard(self)
|
||||
self._handle = None
|
||||
self._watcher_set_data(self._watcher, self._FFI.NULL) # pylint:disable=no-member
|
||||
self.callback = None
|
||||
self.args = None
|
||||
|
||||
def _get_priority(self):
|
||||
return None
|
||||
|
||||
@not_while_active
|
||||
def _set_priority(self, priority):
|
||||
pass
|
||||
|
||||
priority = property(_get_priority, _set_priority)
|
||||
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
if self._watcher is not None and self._watcher_is_active(self._watcher):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
return False
|
||||
|
||||
watcher = AbstractWatcherType('watcher', (object,), dict(watcher.__dict__))
|
||||
|
||||
class IoMixin(object):
|
||||
|
||||
EVENT_MASK = 0
|
||||
|
||||
def __init__(self, loop, fd, events, ref=True, priority=None, _args=None):
|
||||
# Win32 only works with sockets, and only when we use libuv, because
|
||||
# we don't use _open_osfhandle. See libuv/watchers.py:io for a description.
|
||||
if fd < 0:
|
||||
raise ValueError('fd must be non-negative: %r' % fd)
|
||||
if events & ~self.EVENT_MASK:
|
||||
raise ValueError('illegal event mask: %r' % events)
|
||||
self._fd = fd
|
||||
super(IoMixin, self).__init__(loop, ref=ref, priority=priority,
|
||||
args=_args or (fd, events))
|
||||
|
||||
def start(self, callback, *args, **kwargs):
|
||||
args = args or _NOARGS
|
||||
if kwargs.get('pass_events'):
|
||||
args = (GEVENT_CORE_EVENTS, ) + args
|
||||
super(IoMixin, self).start(callback, *args)
|
||||
|
||||
def _format(self):
|
||||
return ' fd=%d' % self._fd
|
||||
|
||||
class TimerMixin(object):
|
||||
_watcher_type = 'timer'
|
||||
|
||||
def __init__(self, loop, after=0.0, repeat=0.0, ref=True, priority=None):
|
||||
if repeat < 0.0:
|
||||
raise ValueError("repeat must be positive or zero: %r" % repeat)
|
||||
self._after = after
|
||||
self._repeat = repeat
|
||||
super(TimerMixin, self).__init__(loop, ref=ref, priority=priority, args=(after, repeat))
|
||||
|
||||
def start(self, callback, *args, **kw):
|
||||
update = kw.get("update", self.loop.starting_timer_may_update_loop_time)
|
||||
if update:
|
||||
# Quoth the libev doc: "This is a costly operation and is
|
||||
# usually done automatically within ev_run(). This
|
||||
# function is rarely useful, but when some event callback
|
||||
# runs for a very long time without entering the event
|
||||
# loop, updating libev's idea of the current time is a
|
||||
# good idea."
|
||||
|
||||
# 1.3 changed the default for this to False *unless* the loop is
|
||||
# running a callback; see libuv for details. Note that
|
||||
# starting Timeout objects still sets this to true.
|
||||
|
||||
self.loop.update_now()
|
||||
super(TimerMixin, self).start(callback, *args)
|
||||
|
||||
def again(self, callback, *args, **kw):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class SignalMixin(object):
|
||||
_watcher_type = 'signal'
|
||||
|
||||
def __init__(self, loop, signalnum, ref=True, priority=None):
|
||||
if signalnum < 1 or signalnum >= signalmodule.NSIG:
|
||||
raise ValueError('illegal signal number: %r' % signalnum)
|
||||
# still possible to crash on one of libev's asserts:
|
||||
# 1) "libev: ev_signal_start called with illegal signal number"
|
||||
# EV_NSIG might be different from signal.NSIG on some platforms
|
||||
# 2) "libev: a signal must not be attached to two different loops"
|
||||
# we probably could check that in LIBEV_EMBED mode, but not in general
|
||||
self._signalnum = signalnum
|
||||
super(SignalMixin, self).__init__(loop, ref=ref, priority=priority, args=(signalnum, ))
|
||||
|
||||
|
||||
class IdleMixin(object):
|
||||
_watcher_type = 'idle'
|
||||
|
||||
|
||||
class PrepareMixin(object):
|
||||
_watcher_type = 'prepare'
|
||||
|
||||
|
||||
class CheckMixin(object):
|
||||
_watcher_type = 'check'
|
||||
|
||||
|
||||
class ForkMixin(object):
|
||||
_watcher_type = 'fork'
|
||||
|
||||
|
||||
class AsyncMixin(object):
|
||||
_watcher_type = 'async'
|
||||
|
||||
def send(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ChildMixin(object):
|
||||
|
||||
# hack for libuv which doesn't extend watcher
|
||||
_CALL_SUPER_INIT = True
|
||||
|
||||
def __init__(self, loop, pid, trace=0, ref=True):
|
||||
if not loop.default:
|
||||
raise TypeError('child watchers are only available on the default loop')
|
||||
loop.install_sigchld()
|
||||
self._pid = pid
|
||||
if self._CALL_SUPER_INIT:
|
||||
super(ChildMixin, self).__init__(loop, ref=ref, args=(pid, trace))
|
||||
|
||||
def _format(self):
|
||||
return ' pid=%r rstatus=%r' % (self.pid, self.rstatus)
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
return self._pid
|
||||
|
||||
@property
|
||||
def rpid(self):
|
||||
# The received pid, the result of the waitpid() call.
|
||||
return self._rpid
|
||||
|
||||
_rpid = None
|
||||
_rstatus = 0
|
||||
|
||||
@property
|
||||
def rstatus(self):
|
||||
return self._rstatus
|
||||
|
||||
class StatMixin(object):
|
||||
|
||||
@staticmethod
|
||||
def _encode_path(path):
|
||||
return fsencode(path)
|
||||
|
||||
def __init__(self, _loop, path, interval=0.0, ref=True, priority=None):
|
||||
# Store the encoded path in the same attribute that corecext does
|
||||
self._paths = self._encode_path(path)
|
||||
|
||||
# Keep the original path to avoid re-encoding, especially on Python 3
|
||||
self._path = path
|
||||
|
||||
# Although CFFI would automatically convert a bytes object into a char* when
|
||||
# calling ev_stat_init(..., char*, ...), on PyPy the char* pointer is not
|
||||
# guaranteed to live past the function call. On CPython, only with a constant/interned
|
||||
# bytes object is the pointer guaranteed to last path the function call. (And since
|
||||
# Python 3 is pretty much guaranteed to produce a newly-encoded bytes object above, thats
|
||||
# rarely the case). Therefore, we must keep a reference to the produced cdata object
|
||||
# so that the struct ev_stat_watcher's `path` pointer doesn't become invalid/deallocated
|
||||
self._cpath = self._FFI.new('char[]', self._paths)
|
||||
|
||||
self._interval = interval
|
||||
super(StatMixin, self).__init__(_loop, ref=ref, priority=priority,
|
||||
args=(self._cpath,
|
||||
interval))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def attr(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def prev(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def interval(self):
|
||||
return self._interval
|
@ -0,0 +1,281 @@
|
||||
from __future__ import absolute_import, print_function, division
|
||||
|
||||
try:
|
||||
from errno import EBADF
|
||||
except ImportError:
|
||||
EBADF = 9
|
||||
|
||||
import os
|
||||
from io import TextIOWrapper
|
||||
import functools
|
||||
import sys
|
||||
|
||||
|
||||
from gevent.hub import _get_hub_noargs as get_hub
|
||||
from gevent._compat import integer_types
|
||||
from gevent._compat import reraise
|
||||
from gevent.lock import Semaphore, DummySemaphore
|
||||
|
||||
class cancel_wait_ex(IOError):
|
||||
|
||||
def __init__(self):
|
||||
super(cancel_wait_ex, self).__init__(
|
||||
EBADF, 'File descriptor was closed in another greenlet')
|
||||
|
||||
|
||||
class FileObjectClosed(IOError):
|
||||
|
||||
def __init__(self):
|
||||
super(FileObjectClosed, self).__init__(
|
||||
EBADF, 'Bad file descriptor (FileObject was closed)')
|
||||
|
||||
class FileObjectBase(object):
|
||||
"""
|
||||
Internal base class to ensure a level of consistency
|
||||
between FileObjectPosix and FileObjectThread
|
||||
"""
|
||||
|
||||
# List of methods we delegate to the wrapping IO object, if they
|
||||
# implement them and we do not.
|
||||
_delegate_methods = (
|
||||
# General methods
|
||||
'flush',
|
||||
'fileno',
|
||||
'writable',
|
||||
'readable',
|
||||
'seek',
|
||||
'seekable',
|
||||
'tell',
|
||||
|
||||
# Read
|
||||
'read',
|
||||
'readline',
|
||||
'readlines',
|
||||
'read1',
|
||||
|
||||
# Write
|
||||
'write',
|
||||
'writelines',
|
||||
'truncate',
|
||||
)
|
||||
|
||||
|
||||
# Whether we are translating universal newlines or not.
|
||||
_translate = False
|
||||
|
||||
_translate_encoding = None
|
||||
_translate_errors = None
|
||||
|
||||
def __init__(self, io, closefd):
|
||||
"""
|
||||
:param io: An io.IOBase-like object.
|
||||
"""
|
||||
self._io = io
|
||||
# We don't actually use this property ourself, but we save it (and
|
||||
# pass it along) for compatibility.
|
||||
self._close = closefd
|
||||
|
||||
if self._translate:
|
||||
# This automatically handles delegation by assigning to
|
||||
# self.io
|
||||
self.translate_newlines(None, self._translate_encoding, self._translate_errors)
|
||||
else:
|
||||
self._do_delegate_methods()
|
||||
|
||||
|
||||
io = property(lambda s: s._io,
|
||||
# Historically we either hand-wrote all the delegation methods
|
||||
# to use self.io, or we simply used __getattr__ to look them up at
|
||||
# runtime. This meant people could change the io attribute on the fly
|
||||
# and it would mostly work (subprocess.py used to do that). We don't recommend
|
||||
# that, but we still support it.
|
||||
lambda s, nv: setattr(s, '_io', nv) or s._do_delegate_methods())
|
||||
|
||||
def _do_delegate_methods(self):
|
||||
for meth_name in self._delegate_methods:
|
||||
meth = getattr(self._io, meth_name, None)
|
||||
implemented_by_class = hasattr(type(self), meth_name)
|
||||
if meth and not implemented_by_class:
|
||||
setattr(self, meth_name, self._wrap_method(meth))
|
||||
elif hasattr(self, meth_name) and not implemented_by_class:
|
||||
delattr(self, meth_name)
|
||||
|
||||
def _wrap_method(self, method):
|
||||
"""
|
||||
Wrap a method we're copying into our dictionary from the underlying
|
||||
io object to do something special or different, if necessary.
|
||||
"""
|
||||
return method
|
||||
|
||||
def translate_newlines(self, mode, *text_args, **text_kwargs):
|
||||
wrapper = TextIOWrapper(self._io, *text_args, **text_kwargs)
|
||||
if mode:
|
||||
wrapper.mode = mode
|
||||
self.io = wrapper
|
||||
self._translate = True
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
"""True if the file is closed"""
|
||||
return self._io is None
|
||||
|
||||
def close(self):
|
||||
if self._io is None:
|
||||
return
|
||||
|
||||
io = self._io
|
||||
self._io = None
|
||||
self._do_close(io, self._close)
|
||||
|
||||
def _do_close(self, fobj, closefd):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __getattr__(self, name):
|
||||
if self._io is None:
|
||||
raise FileObjectClosed()
|
||||
return getattr(self._io, name)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s _fobj=%r%s>' % (self.__class__.__name__, self.io, self._extra_repr())
|
||||
|
||||
def _extra_repr(self):
|
||||
return ''
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.close()
|
||||
|
||||
class FileObjectBlock(FileObjectBase):
|
||||
|
||||
def __init__(self, fobj, *args, **kwargs):
|
||||
closefd = kwargs.pop('close', True)
|
||||
if kwargs:
|
||||
raise TypeError('Unexpected arguments: %r' % kwargs.keys())
|
||||
if isinstance(fobj, integer_types):
|
||||
if not closefd:
|
||||
# we cannot do this, since fdopen object will close the descriptor
|
||||
raise TypeError('FileObjectBlock does not support close=False on an fd.')
|
||||
fobj = os.fdopen(fobj, *args)
|
||||
super(FileObjectBlock, self).__init__(fobj, closefd)
|
||||
|
||||
def _do_close(self, fobj, closefd):
|
||||
fobj.close()
|
||||
|
||||
class FileObjectThread(FileObjectBase):
|
||||
"""
|
||||
A file-like object wrapping another file-like object, performing all blocking
|
||||
operations on that object in a background thread.
|
||||
|
||||
.. caution::
|
||||
Attempting to change the threadpool or lock of an existing FileObjectThread
|
||||
has undefined consequences.
|
||||
|
||||
.. versionchanged:: 1.1b1
|
||||
The file object is closed using the threadpool. Note that whether or
|
||||
not this action is synchronous or asynchronous is not documented.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fobj, mode=None, bufsize=-1, close=True, threadpool=None, lock=True):
|
||||
"""
|
||||
:param fobj: The underlying file-like object to wrap, or an integer fileno
|
||||
that will be pass to :func:`os.fdopen` along with *mode* and *bufsize*.
|
||||
:keyword bool lock: If True (the default) then all operations will
|
||||
be performed one-by-one. Note that this does not guarantee that, if using
|
||||
this file object from multiple threads/greenlets, operations will be performed
|
||||
in any particular order, only that no two operations will be attempted at the
|
||||
same time. You can also pass your own :class:`gevent.lock.Semaphore` to synchronize
|
||||
file operations with an external resource.
|
||||
:keyword bool close: If True (the default) then when this object is closed,
|
||||
the underlying object is closed as well.
|
||||
"""
|
||||
closefd = close
|
||||
self.threadpool = threadpool or get_hub().threadpool
|
||||
self.lock = lock
|
||||
if self.lock is True:
|
||||
self.lock = Semaphore()
|
||||
elif not self.lock:
|
||||
self.lock = DummySemaphore()
|
||||
if not hasattr(self.lock, '__enter__'):
|
||||
raise TypeError('Expected a Semaphore or boolean, got %r' % type(self.lock))
|
||||
if isinstance(fobj, integer_types):
|
||||
if not closefd:
|
||||
# we cannot do this, since fdopen object will close the descriptor
|
||||
raise TypeError('FileObjectThread does not support close=False on an fd.')
|
||||
if mode is None:
|
||||
assert bufsize == -1, "If you use the default mode, you can't choose a bufsize"
|
||||
fobj = os.fdopen(fobj)
|
||||
else:
|
||||
fobj = os.fdopen(fobj, mode, bufsize)
|
||||
|
||||
self.__io_holder = [fobj] # signal for _wrap_method
|
||||
super(FileObjectThread, self).__init__(fobj, closefd)
|
||||
|
||||
def _do_close(self, fobj, closefd):
|
||||
self.__io_holder[0] = None # for _wrap_method
|
||||
try:
|
||||
with self.lock:
|
||||
self.threadpool.apply(fobj.flush)
|
||||
finally:
|
||||
if closefd:
|
||||
# Note that we're not taking the lock; older code
|
||||
# did fobj.close() without going through the threadpool at all,
|
||||
# so acquiring the lock could potentially introduce deadlocks
|
||||
# that weren't present before. Avoiding the lock doesn't make
|
||||
# the existing race condition any worse.
|
||||
# We wrap the close in an exception handler and re-raise directly
|
||||
# to avoid the (common, expected) IOError from being logged by the pool
|
||||
def close(_fobj=fobj):
|
||||
try:
|
||||
_fobj.close()
|
||||
except: # pylint:disable=bare-except
|
||||
return sys.exc_info()
|
||||
finally:
|
||||
_fobj = None
|
||||
del fobj
|
||||
|
||||
exc_info = self.threadpool.apply(close)
|
||||
del close
|
||||
|
||||
if exc_info:
|
||||
reraise(*exc_info)
|
||||
|
||||
def _do_delegate_methods(self):
|
||||
super(FileObjectThread, self)._do_delegate_methods()
|
||||
if not hasattr(self, 'read1') and 'r' in getattr(self._io, 'mode', ''):
|
||||
self.read1 = self.read
|
||||
self.__io_holder[0] = self._io
|
||||
|
||||
def _extra_repr(self):
|
||||
return ' threadpool=%r' % (self.threadpool,)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
line = self.readline()
|
||||
if line:
|
||||
return line
|
||||
raise StopIteration
|
||||
__next__ = next
|
||||
|
||||
def _wrap_method(self, method):
|
||||
# NOTE: We are careful to avoid introducing a refcycle
|
||||
# within self. Our wrapper cannot refer to self.
|
||||
io_holder = self.__io_holder
|
||||
lock = self.lock
|
||||
threadpool = self.threadpool
|
||||
|
||||
@functools.wraps(method)
|
||||
def thread_method(*args, **kwargs):
|
||||
if io_holder[0] is None:
|
||||
# This is different than FileObjectPosix, etc,
|
||||
# because we want to save the expensive trip through
|
||||
# the threadpool.
|
||||
raise FileObjectClosed()
|
||||
with lock:
|
||||
return threadpool.apply(method, args, kwargs)
|
||||
|
||||
return thread_method
|
@ -0,0 +1,357 @@
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
import io
|
||||
from io import BufferedReader
|
||||
from io import BufferedWriter
|
||||
from io import BytesIO
|
||||
from io import DEFAULT_BUFFER_SIZE
|
||||
from io import RawIOBase
|
||||
from io import UnsupportedOperation
|
||||
|
||||
from gevent._compat import reraise
|
||||
from gevent._fileobjectcommon import cancel_wait_ex
|
||||
from gevent._fileobjectcommon import FileObjectBase
|
||||
from gevent.hub import get_hub
|
||||
from gevent.os import _read
|
||||
from gevent.os import _write
|
||||
from gevent.os import ignored_errors
|
||||
from gevent.os import make_nonblocking
|
||||
|
||||
|
||||
class GreenFileDescriptorIO(RawIOBase):
|
||||
|
||||
# Note that RawIOBase has a __del__ method that calls
|
||||
# self.close(). (In C implementations like CPython, this is
|
||||
# the type's tp_dealloc slot; prior to Python 3, the object doesn't
|
||||
# appear to have a __del__ method, even though it functionally does)
|
||||
|
||||
_read_event = None
|
||||
_write_event = None
|
||||
_closed = False
|
||||
_seekable = None
|
||||
|
||||
def __init__(self, fileno, mode='r', closefd=True):
|
||||
RawIOBase.__init__(self) # Python 2: pylint:disable=no-member,non-parent-init-called
|
||||
|
||||
self._closefd = closefd
|
||||
self._fileno = fileno
|
||||
make_nonblocking(fileno)
|
||||
readable = 'r' in mode
|
||||
writable = 'w' in mode
|
||||
|
||||
self.hub = get_hub()
|
||||
io_watcher = self.hub.loop.io
|
||||
try:
|
||||
if readable:
|
||||
self._read_event = io_watcher(fileno, 1)
|
||||
|
||||
if writable:
|
||||
self._write_event = io_watcher(fileno, 2)
|
||||
except:
|
||||
# If anything goes wrong, it's important to go ahead and
|
||||
# close these watchers *now*, especially under libuv, so
|
||||
# that they don't get eventually reclaimed by the garbage
|
||||
# collector at some random time, thanks to the C level
|
||||
# slot (even though we don't seem to have any actual references
|
||||
# at the Python level). Previously, if we didn't close now,
|
||||
# that random close in the future would cause issues if we had duplicated
|
||||
# the fileno (if a wrapping with statement had closed an open fileobject,
|
||||
# for example)
|
||||
|
||||
# test__fileobject can show a failure if this doesn't happen
|
||||
# TRAVIS=true GEVENT_LOOP=libuv python -m gevent.tests.test__fileobject \
|
||||
# TestFileObjectPosix.test_seek TestFileObjectThread.test_bufsize_0
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def readable(self):
|
||||
return self._read_event is not None
|
||||
|
||||
def writable(self):
|
||||
return self._write_event is not None
|
||||
|
||||
def seekable(self):
|
||||
if self._seekable is None:
|
||||
try:
|
||||
os.lseek(self._fileno, 0, os.SEEK_CUR)
|
||||
except OSError:
|
||||
self._seekable = False
|
||||
else:
|
||||
self._seekable = True
|
||||
return self._seekable
|
||||
|
||||
def fileno(self):
|
||||
return self._fileno
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self._closed
|
||||
|
||||
def __destroy_events(self):
|
||||
read_event = self._read_event
|
||||
write_event = self._write_event
|
||||
hub = self.hub
|
||||
self.hub = self._read_event = self._write_event = None
|
||||
|
||||
if read_event is not None:
|
||||
hub.cancel_wait(read_event, cancel_wait_ex, True)
|
||||
if write_event is not None:
|
||||
hub.cancel_wait(write_event, cancel_wait_ex, True)
|
||||
|
||||
def close(self):
|
||||
if self._closed:
|
||||
return
|
||||
self.flush()
|
||||
# TODO: Can we use 'read_event is not None and write_event is
|
||||
# not None' to mean _closed?
|
||||
self._closed = True
|
||||
self.__destroy_events()
|
||||
fileno = self._fileno
|
||||
if self._closefd:
|
||||
self._fileno = None
|
||||
os.close(fileno)
|
||||
|
||||
# RawIOBase provides a 'read' method that will call readall() if
|
||||
# the `size` was missing or -1 and otherwise call readinto(). We
|
||||
# want to take advantage of this to avoid single byte reads when
|
||||
# possible. This is highlighted by a bug in BufferedIOReader that
|
||||
# calls read() in a loop when its readall() method is invoked;
|
||||
# this was fixed in Python 3.3, but we still need our workaround for 2.7. See
|
||||
# https://github.com/gevent/gevent/issues/675)
|
||||
def __read(self, n):
|
||||
if self._read_event is None:
|
||||
raise UnsupportedOperation('read')
|
||||
while True:
|
||||
try:
|
||||
return _read(self._fileno, n)
|
||||
except (IOError, OSError) as ex:
|
||||
if ex.args[0] not in ignored_errors:
|
||||
raise
|
||||
self.hub.wait(self._read_event)
|
||||
|
||||
def readall(self):
|
||||
ret = BytesIO()
|
||||
while True:
|
||||
data = self.__read(DEFAULT_BUFFER_SIZE)
|
||||
if not data:
|
||||
break
|
||||
ret.write(data)
|
||||
return ret.getvalue()
|
||||
|
||||
def readinto(self, b):
|
||||
data = self.__read(len(b))
|
||||
n = len(data)
|
||||
try:
|
||||
b[:n] = data
|
||||
except TypeError as err:
|
||||
import array
|
||||
if not isinstance(b, array.array):
|
||||
raise err
|
||||
b[:n] = array.array(b'b', data)
|
||||
return n
|
||||
|
||||
def write(self, b):
|
||||
if self._write_event is None:
|
||||
raise UnsupportedOperation('write')
|
||||
while True:
|
||||
try:
|
||||
return _write(self._fileno, b)
|
||||
except (IOError, OSError) as ex:
|
||||
if ex.args[0] not in ignored_errors:
|
||||
raise
|
||||
self.hub.wait(self._write_event)
|
||||
|
||||
def seek(self, offset, whence=0):
|
||||
try:
|
||||
return os.lseek(self._fileno, offset, whence)
|
||||
except IOError: # pylint:disable=try-except-raise
|
||||
raise
|
||||
except OSError as ex: # pylint:disable=duplicate-except
|
||||
# Python 2.x
|
||||
# make sure on Python 2.x we raise an IOError
|
||||
# as documented for RawIOBase.
|
||||
# See https://github.com/gevent/gevent/issues/1323
|
||||
reraise(IOError, IOError(*ex.args), sys.exc_info()[2])
|
||||
|
||||
|
||||
class FlushingBufferedWriter(BufferedWriter):
|
||||
|
||||
def write(self, b):
|
||||
ret = BufferedWriter.write(self, b)
|
||||
self.flush()
|
||||
return ret
|
||||
|
||||
|
||||
class FileObjectPosix(FileObjectBase):
|
||||
"""
|
||||
A file-like object that operates on non-blocking files but
|
||||
provides a synchronous, cooperative interface.
|
||||
|
||||
.. caution::
|
||||
This object is only effective wrapping files that can be used meaningfully
|
||||
with :func:`select.select` such as sockets and pipes.
|
||||
|
||||
In general, on most platforms, operations on regular files
|
||||
(e.g., ``open('a_file.txt')``) are considered non-blocking
|
||||
already, even though they can take some time to complete as
|
||||
data is copied to the kernel and flushed to disk: this time
|
||||
is relatively bounded compared to sockets or pipes, though.
|
||||
A :func:`~os.read` or :func:`~os.write` call on such a file
|
||||
will still effectively block for some small period of time.
|
||||
Therefore, wrapping this class around a regular file is
|
||||
unlikely to make IO gevent-friendly: reading or writing large
|
||||
amounts of data could still block the event loop.
|
||||
|
||||
If you'll be working with regular files and doing IO in large
|
||||
chunks, you may consider using
|
||||
:class:`~gevent.fileobject.FileObjectThread` or
|
||||
:func:`~gevent.os.tp_read` and :func:`~gevent.os.tp_write` to bypass this
|
||||
concern.
|
||||
|
||||
.. note::
|
||||
Random read/write (e.g., ``mode='rwb'``) is not supported.
|
||||
For that, use :class:`io.BufferedRWPair` around two instance of this
|
||||
class.
|
||||
|
||||
.. tip::
|
||||
Although this object provides a :meth:`fileno` method and so
|
||||
can itself be passed to :func:`fcntl.fcntl`, setting the
|
||||
:data:`os.O_NONBLOCK` flag will have no effect (reads will
|
||||
still block the greenlet, although other greenlets can run).
|
||||
However, removing that flag *will cause this object to no
|
||||
longer be cooperative* (other greenlets will no longer run).
|
||||
|
||||
You can use the internal ``fileio`` attribute of this object
|
||||
(a :class:`io.RawIOBase`) to perform non-blocking byte reads.
|
||||
Note, however, that once you begin directly using this
|
||||
attribute, the results from using methods of *this* object
|
||||
are undefined, especially in text mode. (See :issue:`222`.)
|
||||
|
||||
.. versionchanged:: 1.1
|
||||
Now uses the :mod:`io` package internally. Under Python 2, previously
|
||||
used the undocumented class :class:`socket._fileobject`. This provides
|
||||
better file-like semantics (and portability to Python 3).
|
||||
.. versionchanged:: 1.2a1
|
||||
Document the ``fileio`` attribute for non-blocking reads.
|
||||
"""
|
||||
|
||||
#: platform specific default for the *bufsize* parameter
|
||||
default_bufsize = io.DEFAULT_BUFFER_SIZE
|
||||
|
||||
def __init__(self, fobj, mode='rb', bufsize=-1, close=True):
|
||||
"""
|
||||
:param fobj: Either an integer fileno, or an object supporting the
|
||||
usual :meth:`socket.fileno` method. The file *will* be
|
||||
put in non-blocking mode using :func:`gevent.os.make_nonblocking`.
|
||||
:keyword str mode: The manner of access to the file, one of "rb", "rU" or "wb"
|
||||
(where the "b" or "U" can be omitted).
|
||||
If "U" is part of the mode, universal newlines will be used. On Python 2,
|
||||
if 't' is not in the mode, this will result in returning byte (native) strings;
|
||||
putting 't' in the mode will return text strings. This may cause
|
||||
:exc:`UnicodeDecodeError` to be raised.
|
||||
:keyword int bufsize: If given, the size of the buffer to use. The default
|
||||
value means to use a platform-specific default
|
||||
Other values are interpreted as for the :mod:`io` package.
|
||||
Buffering is ignored in text mode.
|
||||
|
||||
.. versionchanged:: 1.3a1
|
||||
|
||||
On Python 2, enabling universal newlines no longer forces unicode
|
||||
IO.
|
||||
|
||||
.. versionchanged:: 1.2a1
|
||||
|
||||
A bufsize of 0 in write mode is no longer forced to be 1.
|
||||
Instead, the underlying buffer is flushed after every write
|
||||
operation to simulate a bufsize of 0. In gevent 1.0, a
|
||||
bufsize of 0 was flushed when a newline was written, while
|
||||
in gevent 1.1 it was flushed when more than one byte was
|
||||
written. Note that this may have performance impacts.
|
||||
"""
|
||||
|
||||
if isinstance(fobj, int):
|
||||
fileno = fobj
|
||||
fobj = None
|
||||
else:
|
||||
fileno = fobj.fileno()
|
||||
if not isinstance(fileno, int):
|
||||
raise TypeError('fileno must be int: %r' % fileno)
|
||||
|
||||
orig_mode = mode
|
||||
mode = (mode or 'rb').replace('b', '')
|
||||
if 'U' in mode:
|
||||
self._translate = True
|
||||
if bytes is str and 't' not in mode:
|
||||
# We're going to be producing unicode objects, but
|
||||
# universal newlines doesn't do that in the stdlib,
|
||||
# so fix that to return str objects. The fix is two parts:
|
||||
# first, set an encoding on the stream that can round-trip
|
||||
# all bytes, and second, decode all bytes once they've been read.
|
||||
self._translate_encoding = 'latin-1'
|
||||
import functools
|
||||
|
||||
def wrap_method(m):
|
||||
if m.__name__.startswith("read"):
|
||||
@functools.wraps(m)
|
||||
def wrapped(*args, **kwargs):
|
||||
result = m(*args, **kwargs)
|
||||
assert isinstance(result, unicode) # pylint:disable=undefined-variable
|
||||
return result.encode('latin-1')
|
||||
return wrapped
|
||||
return m
|
||||
self._wrap_method = wrap_method
|
||||
mode = mode.replace('U', '')
|
||||
else:
|
||||
self._translate = False
|
||||
|
||||
mode = mode.replace('t', '')
|
||||
|
||||
if len(mode) != 1 and mode not in 'rw': # pragma: no cover
|
||||
# Python 3 builtin `open` raises a ValueError for invalid modes;
|
||||
# Python 2 ignores it. In the past, we raised an AssertionError, if __debug__ was
|
||||
# enabled (which it usually was). Match Python 3 because it makes more sense
|
||||
# and because __debug__ may not be enabled.
|
||||
# NOTE: This is preventing a mode like 'rwb' for binary random access;
|
||||
# that code was never tested and was explicitly marked as "not used"
|
||||
raise ValueError('mode can only be [rb, rU, wb], not %r' % (orig_mode,))
|
||||
|
||||
|
||||
self._orig_bufsize = bufsize
|
||||
if bufsize < 0 or bufsize == 1:
|
||||
bufsize = self.default_bufsize
|
||||
elif bufsize == 0:
|
||||
bufsize = 1
|
||||
|
||||
if mode == 'r':
|
||||
IOFamily = BufferedReader
|
||||
else:
|
||||
assert mode == 'w'
|
||||
IOFamily = BufferedWriter
|
||||
if self._orig_bufsize == 0:
|
||||
# We could also simply pass self.fileio as *io*, but this way
|
||||
# we at least consistently expose a BufferedWriter in our *io*
|
||||
# attribute.
|
||||
IOFamily = FlushingBufferedWriter
|
||||
|
||||
|
||||
self._fobj = fobj
|
||||
# This attribute is documented as available for non-blocking reads.
|
||||
self.fileio = GreenFileDescriptorIO(fileno, mode, closefd=close)
|
||||
|
||||
buffered_fobj = IOFamily(self.fileio, bufsize)
|
||||
|
||||
super(FileObjectPosix, self).__init__(buffered_fobj, close)
|
||||
|
||||
def _do_close(self, fobj, closefd):
|
||||
try:
|
||||
fobj.close()
|
||||
# self.fileio already knows whether or not to close the
|
||||
# file descriptor
|
||||
self.fileio.close()
|
||||
finally:
|
||||
self._fobj = None
|
||||
self.fileio = None
|
||||
|
||||
def __iter__(self):
|
||||
return self._io
|
Binary file not shown.
@ -0,0 +1,177 @@
|
||||
# cython: auto_pickle=False
|
||||
|
||||
cimport cython
|
||||
from gevent.__ident cimport IdentRegistry
|
||||
from gevent.__hub_local cimport get_hub_noargs as get_hub
|
||||
from gevent.__waiter cimport Waiter
|
||||
|
||||
cdef bint _PYPY
|
||||
cdef sys_getframe
|
||||
cdef sys_exc_info
|
||||
cdef Timeout
|
||||
cdef GreenletExit
|
||||
cdef InvalidSwitchError
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
void PyGreenlet_Import()
|
||||
|
||||
@cython.final
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef bint _greenlet_imported
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
cdef extern from "Python.h":
|
||||
|
||||
ctypedef class types.CodeType [object PyCodeObject]:
|
||||
pass
|
||||
|
||||
cdef extern from "frameobject.h":
|
||||
|
||||
ctypedef class types.FrameType [object PyFrameObject]:
|
||||
cdef CodeType f_code
|
||||
cdef int f_lineno
|
||||
# We can't declare this in the object, because it's
|
||||
# allowed to be NULL, and Cython can't handle that.
|
||||
# We have to go through the python machinery to get a
|
||||
# proper None instead.
|
||||
# cdef FrameType f_back
|
||||
|
||||
cdef void _init()
|
||||
|
||||
cdef class SpawnedLink:
|
||||
cdef public object callback
|
||||
|
||||
|
||||
@cython.final
|
||||
cdef class SuccessSpawnedLink(SpawnedLink):
|
||||
pass
|
||||
|
||||
@cython.final
|
||||
cdef class FailureSpawnedLink(SpawnedLink):
|
||||
pass
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
@cython.freelist(1000)
|
||||
cdef class _Frame:
|
||||
cdef readonly CodeType f_code
|
||||
cdef readonly int f_lineno
|
||||
cdef readonly _Frame f_back
|
||||
|
||||
|
||||
@cython.final
|
||||
@cython.locals(frames=list,frame=FrameType)
|
||||
cdef inline list _extract_stack(int limit)
|
||||
|
||||
@cython.final
|
||||
@cython.locals(previous=_Frame, frame=tuple, f=_Frame)
|
||||
cdef _Frame _Frame_from_list(list frames)
|
||||
|
||||
|
||||
cdef class Greenlet(greenlet):
|
||||
cdef readonly object value
|
||||
cdef readonly tuple args
|
||||
cdef readonly dict kwargs
|
||||
cdef readonly object spawning_greenlet
|
||||
cdef public dict spawn_tree_locals
|
||||
|
||||
# This is accessed with getattr() dynamically so it
|
||||
# must be visible to Python
|
||||
cdef readonly list _spawning_stack_frames
|
||||
|
||||
cdef list _links
|
||||
cdef tuple _exc_info
|
||||
cdef object _notifier
|
||||
cdef object _start_event
|
||||
cdef str _formatted_info
|
||||
cdef object _ident
|
||||
|
||||
cpdef bint has_links(self)
|
||||
cpdef join(self, timeout=*)
|
||||
cpdef bint ready(self)
|
||||
cpdef bint successful(self)
|
||||
cpdef rawlink(self, object callback)
|
||||
cpdef str _formatinfo(self)
|
||||
|
||||
@cython.locals(reg=IdentRegistry)
|
||||
cdef _get_minimal_ident(self)
|
||||
|
||||
|
||||
cdef bint __started_but_aborted(self)
|
||||
cdef bint __start_cancelled_by_kill(self)
|
||||
cdef bint __start_pending(self)
|
||||
cdef bint __never_started_or_killed(self)
|
||||
cdef bint __start_completed(self)
|
||||
cdef __handle_death_before_start(self, tuple args)
|
||||
|
||||
cdef __cancel_start(self)
|
||||
|
||||
cdef _report_result(self, object result)
|
||||
cdef _report_error(self, tuple exc_info)
|
||||
# This is used as the target of a callback
|
||||
# from the loop, and so needs to be a cpdef
|
||||
cpdef _notify_links(self)
|
||||
|
||||
# Hmm, declaring _raise_exception causes issues when _imap
|
||||
# is also compiled.
|
||||
# TypeError: wrap() takes exactly one argument (0 given)
|
||||
# cpdef _raise_exception(self)
|
||||
|
||||
|
||||
|
||||
# Declare a bunch of imports as cdefs so they can
|
||||
# be accessed directly as static vars without
|
||||
# doing a module global lookup. This is especially important
|
||||
# for spawning greenlets.
|
||||
cdef _greenlet__init__
|
||||
cdef _threadlocal
|
||||
cdef get_hub_class
|
||||
cdef wref
|
||||
|
||||
cdef dump_traceback
|
||||
cdef load_traceback
|
||||
cdef Waiter
|
||||
cdef wait
|
||||
cdef iwait
|
||||
cdef reraise
|
||||
cpdef GEVENT_CONFIG
|
||||
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _dummy_event:
|
||||
cdef readonly bint pending
|
||||
cdef readonly bint active
|
||||
|
||||
cpdef stop(self)
|
||||
cpdef start(self, cb)
|
||||
cpdef close(self)
|
||||
|
||||
cdef _dummy_event _cancelled_start_event
|
||||
cdef _dummy_event _start_completed_event
|
||||
|
||||
|
||||
@cython.locals(diehards=list)
|
||||
cdef _killall3(list greenlets, object exception, object waiter)
|
||||
cdef _killall(list greenlets, object exception)
|
||||
|
||||
@cython.locals(done=list)
|
||||
cpdef joinall(greenlets, timeout=*, raise_error=*, count=*)
|
||||
|
||||
cdef set _spawn_callbacks
|
||||
cdef void _call_spawn_callbacks(Greenlet gr) except *
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# copyright (c) 2018 gevent. See LICENSE.
|
||||
# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False
|
||||
"""
|
||||
A collection of primitives used by the hub, and suitable for
|
||||
compilation with Cython because of their frequency of use.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from weakref import ref as wref
|
||||
from gc import get_objects
|
||||
|
||||
from greenlet import greenlet
|
||||
|
||||
from gevent.exceptions import BlockingSwitchOutError
|
||||
|
||||
|
||||
# In Cython, we define these as 'cdef inline' functions. The
|
||||
# compilation unit cannot have a direct assignment to them (import
|
||||
# is assignment) without generating a 'lvalue is not valid target'
|
||||
# error.
|
||||
locals()['getcurrent'] = __import__('greenlet').getcurrent
|
||||
locals()['greenlet_init'] = lambda: None
|
||||
locals()['_greenlet_switch'] = greenlet.switch
|
||||
|
||||
__all__ = [
|
||||
'TrackedRawGreenlet',
|
||||
'SwitchOutGreenletWithLoop',
|
||||
]
|
||||
|
||||
class TrackedRawGreenlet(greenlet):
|
||||
|
||||
def __init__(self, function, parent):
|
||||
greenlet.__init__(self, function, parent)
|
||||
# See greenlet.py's Greenlet class. We capture the cheap
|
||||
# parts to maintain the tree structure, but we do not capture
|
||||
# the stack because that's too expensive for 'spawn_raw'.
|
||||
|
||||
current = getcurrent() # pylint:disable=undefined-variable
|
||||
self.spawning_greenlet = wref(current)
|
||||
# See Greenlet for how trees are maintained.
|
||||
try:
|
||||
self.spawn_tree_locals = current.spawn_tree_locals
|
||||
except AttributeError:
|
||||
self.spawn_tree_locals = {}
|
||||
if current.parent:
|
||||
current.spawn_tree_locals = self.spawn_tree_locals
|
||||
|
||||
|
||||
class SwitchOutGreenletWithLoop(TrackedRawGreenlet):
|
||||
# Subclasses must define:
|
||||
# - self.loop
|
||||
|
||||
# This class defines loop in its .pxd for Cython. This lets us avoid
|
||||
# circular dependencies with the hub.
|
||||
|
||||
def switch(self):
|
||||
switch_out = getattr(getcurrent(), 'switch_out', None) # pylint:disable=undefined-variable
|
||||
if switch_out is not None:
|
||||
switch_out()
|
||||
return _greenlet_switch(self) # pylint:disable=undefined-variable
|
||||
|
||||
def switch_out(self):
|
||||
raise BlockingSwitchOutError('Impossible to call blocking function in the event loop callback')
|
||||
|
||||
|
||||
def get_reachable_greenlets():
|
||||
# We compile this loop with Cython so that it's faster, and so that
|
||||
# the GIL isn't dropped at unpredictable times during the loop.
|
||||
# Dropping the GIL could lead to accessing partly constructed objects
|
||||
# in undefined states (particularly, tuples). This helps close a hole
|
||||
# where a `SystemError: Objects/tupleobject.c bad argument to internal function`
|
||||
# could get raised. (Note that this probably doesn't completely close the hole,
|
||||
# if other threads have dropped the GIL, but hopefully the speed makes that
|
||||
# more rare.) See https://github.com/gevent/gevent/issues/1302
|
||||
return [
|
||||
x for x in get_objects()
|
||||
if isinstance(x, greenlet) and not getattr(x, 'greenlet_tree_is_ignored', False)
|
||||
]
|
||||
|
||||
def _init():
|
||||
greenlet_init() # pylint:disable=undefined-variable
|
||||
|
||||
_init()
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__greenlet_primitives')
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,101 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# copyright 2018 gevent. See LICENSE
|
||||
"""
|
||||
Maintains the thread local hub.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
from gevent._compat import thread_mod_name
|
||||
|
||||
__all__ = [
|
||||
'get_hub',
|
||||
'get_hub_noargs',
|
||||
'get_hub_if_exists',
|
||||
]
|
||||
|
||||
# These must be the "real" native thread versions,
|
||||
# not monkey-patched.
|
||||
# We are imported early enough (by gevent/__init__) that
|
||||
# we can rely on not being monkey-patched in any way yet.
|
||||
class _Threadlocal(__import__(thread_mod_name)._local):
|
||||
|
||||
def __init__(self):
|
||||
# Use a class with an initializer so that we can test
|
||||
# for 'is None' instead of catching AttributeError, making
|
||||
# the code cleaner and possibly solving some corner cases
|
||||
# (like #687)
|
||||
super(_Threadlocal, self).__init__()
|
||||
self.Hub = None
|
||||
self.loop = None
|
||||
self.hub = None
|
||||
|
||||
_threadlocal = _Threadlocal()
|
||||
|
||||
Hub = None # Set when gevent.hub is imported
|
||||
|
||||
def get_hub_class():
|
||||
"""Return the type of hub to use for the current thread.
|
||||
|
||||
If there's no type of hub for the current thread yet, 'gevent.hub.Hub' is used.
|
||||
"""
|
||||
hubtype = _threadlocal.Hub
|
||||
if hubtype is None:
|
||||
hubtype = _threadlocal.Hub = Hub
|
||||
return hubtype
|
||||
|
||||
def set_default_hub_class(hubtype):
|
||||
global Hub
|
||||
Hub = hubtype
|
||||
|
||||
def get_hub(*args, **kwargs):
|
||||
"""
|
||||
Return the hub for the current thread.
|
||||
|
||||
If a hub does not exist in the current thread, a new one is
|
||||
created of the type returned by :func:`get_hub_class`.
|
||||
|
||||
.. deprecated:: 1.3b1
|
||||
The ``*args`` and ``**kwargs`` arguments are deprecated. They were
|
||||
only used when the hub was created, and so were non-deterministic---to be
|
||||
sure they were used, *all* callers had to pass them, or they were order-dependent.
|
||||
Use ``set_hub`` instead.
|
||||
"""
|
||||
hub = _threadlocal.hub
|
||||
if hub is None:
|
||||
hubtype = get_hub_class()
|
||||
hub = _threadlocal.hub = hubtype(*args, **kwargs)
|
||||
return hub
|
||||
|
||||
def get_hub_noargs():
|
||||
# Just like get_hub, but cheaper to call because it
|
||||
# takes no arguments or kwargs. See also a copy in
|
||||
# gevent/greenlet.py
|
||||
hub = _threadlocal.hub
|
||||
if hub is None:
|
||||
hubtype = get_hub_class()
|
||||
hub = _threadlocal.hub = hubtype()
|
||||
return hub
|
||||
|
||||
def get_hub_if_exists():
|
||||
"""Return the hub for the current thread.
|
||||
|
||||
Return ``None`` if no hub has been created yet.
|
||||
"""
|
||||
return _threadlocal.hub
|
||||
|
||||
|
||||
def set_hub(hub):
|
||||
_threadlocal.hub = hub
|
||||
|
||||
def get_loop():
|
||||
return _threadlocal.loop
|
||||
|
||||
def set_loop(loop):
|
||||
_threadlocal.loop = loop
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__hub_local')
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,394 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# copyright (c) 2018 gevent. See LICENSE.
|
||||
# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False,binding=True
|
||||
"""
|
||||
A collection of primitives used by the hub, and suitable for
|
||||
compilation with Cython because of their frequency of use.
|
||||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import traceback
|
||||
|
||||
from gevent.exceptions import InvalidSwitchError
|
||||
from gevent.exceptions import ConcurrentObjectUseError
|
||||
|
||||
from gevent import _greenlet_primitives
|
||||
from gevent import _waiter
|
||||
from gevent._util import _NONE
|
||||
from gevent._hub_local import get_hub_noargs as get_hub
|
||||
from gevent.timeout import Timeout
|
||||
|
||||
# In Cython, we define these as 'cdef inline' functions. The
|
||||
# compilation unit cannot have a direct assignment to them (import
|
||||
# is assignment) without generating a 'lvalue is not valid target'
|
||||
# error.
|
||||
locals()['getcurrent'] = __import__('greenlet').getcurrent
|
||||
locals()['greenlet_init'] = lambda: None
|
||||
locals()['Waiter'] = _waiter.Waiter
|
||||
locals()['MultipleWaiter'] = _waiter.MultipleWaiter
|
||||
locals()['SwitchOutGreenletWithLoop'] = _greenlet_primitives.SwitchOutGreenletWithLoop
|
||||
|
||||
__all__ = [
|
||||
'WaitOperationsGreenlet',
|
||||
'iwait_on_objects',
|
||||
'wait_on_objects',
|
||||
'wait_read',
|
||||
'wait_write',
|
||||
'wait_readwrite',
|
||||
]
|
||||
|
||||
class WaitOperationsGreenlet(SwitchOutGreenletWithLoop): # pylint:disable=undefined-variable
|
||||
|
||||
def wait(self, watcher):
|
||||
"""
|
||||
Wait until the *watcher* (which must not be started) is ready.
|
||||
|
||||
The current greenlet will be unscheduled during this time.
|
||||
"""
|
||||
waiter = Waiter(self) # pylint:disable=undefined-variable
|
||||
watcher.start(waiter.switch, waiter)
|
||||
try:
|
||||
result = waiter.get()
|
||||
if result is not waiter:
|
||||
raise InvalidSwitchError('Invalid switch into %s: %r (expected %r)' % (
|
||||
getcurrent(), # pylint:disable=undefined-variable
|
||||
result, waiter))
|
||||
finally:
|
||||
watcher.stop()
|
||||
|
||||
def cancel_wait(self, watcher, error, close_watcher=False):
|
||||
"""
|
||||
Cancel an in-progress call to :meth:`wait` by throwing the given *error*
|
||||
in the waiting greenlet.
|
||||
|
||||
.. versionchanged:: 1.3a1
|
||||
Added the *close_watcher* parameter. If true, the watcher
|
||||
will be closed after the exception is thrown. The watcher should then
|
||||
be discarded. Closing the watcher is important to release native resources.
|
||||
.. versionchanged:: 1.3a2
|
||||
Allow the *watcher* to be ``None``. No action is taken in that case.
|
||||
"""
|
||||
if watcher is None:
|
||||
# Presumably already closed.
|
||||
# See https://github.com/gevent/gevent/issues/1089
|
||||
return
|
||||
if watcher.callback is not None:
|
||||
self.loop.run_callback(self._cancel_wait, watcher, error, close_watcher)
|
||||
elif close_watcher:
|
||||
watcher.close()
|
||||
|
||||
def _cancel_wait(self, watcher, error, close_watcher):
|
||||
# We have to check again to see if it was still active by the time
|
||||
# our callback actually runs.
|
||||
active = watcher.active
|
||||
cb = watcher.callback
|
||||
if close_watcher:
|
||||
watcher.close()
|
||||
if active:
|
||||
# The callback should be greenlet.switch(). It may or may not be None.
|
||||
glet = getattr(cb, '__self__', None)
|
||||
if glet is not None:
|
||||
glet.throw(error)
|
||||
|
||||
|
||||
class _WaitIterator(object):
|
||||
|
||||
def __init__(self, objects, hub, timeout, count):
|
||||
self._hub = hub
|
||||
self._waiter = MultipleWaiter(hub) # pylint:disable=undefined-variable
|
||||
self._switch = self._waiter.switch
|
||||
self._timeout = timeout
|
||||
self._objects = objects
|
||||
|
||||
self._timer = None
|
||||
self._begun = False
|
||||
|
||||
# Even if we're only going to return 1 object,
|
||||
# we must still rawlink() *all* of them, so that no
|
||||
# matter which one finishes first we find it.
|
||||
self._count = len(objects) if count is None else min(count, len(objects))
|
||||
|
||||
def _begin(self):
|
||||
if self._begun:
|
||||
return
|
||||
|
||||
self._begun = True
|
||||
|
||||
# XXX: If iteration doesn't actually happen, we
|
||||
# could leave these links around!
|
||||
for obj in self._objects:
|
||||
obj.rawlink(self._switch)
|
||||
|
||||
if self._timeout is not None:
|
||||
self._timer = self._hub.loop.timer(self._timeout, priority=-1)
|
||||
self._timer.start(self._switch, self)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
self._begin()
|
||||
|
||||
if self._count == 0:
|
||||
# Exhausted
|
||||
self._cleanup()
|
||||
raise StopIteration()
|
||||
|
||||
self._count -= 1
|
||||
try:
|
||||
item = self._waiter.get()
|
||||
self._waiter.clear()
|
||||
if item is self:
|
||||
# Timer expired, no more
|
||||
self._cleanup()
|
||||
raise StopIteration()
|
||||
return item
|
||||
except:
|
||||
self._cleanup()
|
||||
raise
|
||||
|
||||
next = __next__
|
||||
|
||||
def _cleanup(self):
|
||||
if self._timer is not None:
|
||||
self._timer.close()
|
||||
self._timer = None
|
||||
|
||||
objs = self._objects
|
||||
self._objects = ()
|
||||
for aobj in objs:
|
||||
unlink = getattr(aobj, 'unlink', None)
|
||||
if unlink is not None:
|
||||
try:
|
||||
unlink(self._switch)
|
||||
except: # pylint:disable=bare-except
|
||||
traceback.print_exc()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, typ, value, tb):
|
||||
self._cleanup()
|
||||
|
||||
|
||||
def iwait_on_objects(objects, timeout=None, count=None):
|
||||
"""
|
||||
Iteratively yield *objects* as they are ready, until all (or *count*) are ready
|
||||
or *timeout* expired.
|
||||
|
||||
If you will only be consuming a portion of the *objects*, you should
|
||||
do so inside a ``with`` block on this object to avoid leaking resources::
|
||||
|
||||
with gevent.iwait((a, b, c)) as it:
|
||||
for i in it:
|
||||
if i is a:
|
||||
break
|
||||
|
||||
:param objects: A sequence (supporting :func:`len`) containing objects
|
||||
implementing the wait protocol (rawlink() and unlink()).
|
||||
:keyword int count: If not `None`, then a number specifying the maximum number
|
||||
of objects to wait for. If ``None`` (the default), all objects
|
||||
are waited for.
|
||||
:keyword float timeout: If given, specifies a maximum number of seconds
|
||||
to wait. If the timeout expires before the desired waited-for objects
|
||||
are available, then this method returns immediately.
|
||||
|
||||
.. seealso:: :func:`wait`
|
||||
|
||||
.. versionchanged:: 1.1a1
|
||||
Add the *count* parameter.
|
||||
.. versionchanged:: 1.1a2
|
||||
No longer raise :exc:`LoopExit` if our caller switches greenlets
|
||||
in between items yielded by this function.
|
||||
.. versionchanged:: 1.4
|
||||
Add support to use the returned object as a context manager.
|
||||
"""
|
||||
# QQQ would be nice to support iterable here that can be generated slowly (why?)
|
||||
hub = get_hub()
|
||||
if objects is None:
|
||||
return [hub.join(timeout=timeout)]
|
||||
return _WaitIterator(objects, hub, timeout, count)
|
||||
|
||||
|
||||
def wait_on_objects(objects=None, timeout=None, count=None):
|
||||
"""
|
||||
Wait for ``objects`` to become ready or for event loop to finish.
|
||||
|
||||
If ``objects`` is provided, it must be a list containing objects
|
||||
implementing the wait protocol (rawlink() and unlink() methods):
|
||||
|
||||
- :class:`gevent.Greenlet` instance
|
||||
- :class:`gevent.event.Event` instance
|
||||
- :class:`gevent.lock.Semaphore` instance
|
||||
- :class:`gevent.subprocess.Popen` instance
|
||||
|
||||
If ``objects`` is ``None`` (the default), ``wait()`` blocks until
|
||||
the current event loop has nothing to do (or until ``timeout`` passes):
|
||||
|
||||
- all greenlets have finished
|
||||
- all servers were stopped
|
||||
- all event loop watchers were stopped.
|
||||
|
||||
If ``count`` is ``None`` (the default), wait for all ``objects``
|
||||
to become ready.
|
||||
|
||||
If ``count`` is a number, wait for (up to) ``count`` objects to become
|
||||
ready. (For example, if count is ``1`` then the function exits
|
||||
when any object in the list is ready).
|
||||
|
||||
If ``timeout`` is provided, it specifies the maximum number of
|
||||
seconds ``wait()`` will block.
|
||||
|
||||
Returns the list of ready objects, in the order in which they were
|
||||
ready.
|
||||
|
||||
.. seealso:: :func:`iwait`
|
||||
"""
|
||||
if objects is None:
|
||||
hub = get_hub()
|
||||
return hub.join(timeout=timeout) # pylint:disable=
|
||||
return list(iwait_on_objects(objects, timeout, count))
|
||||
|
||||
_timeout_error = Exception
|
||||
|
||||
def set_default_timeout_error(e):
|
||||
global _timeout_error
|
||||
_timeout_error = e
|
||||
|
||||
def _primitive_wait(watcher, timeout, timeout_exc, hub):
|
||||
if watcher.callback is not None:
|
||||
raise ConcurrentObjectUseError('This socket is already used by another greenlet: %r'
|
||||
% (watcher.callback, ))
|
||||
|
||||
if hub is None:
|
||||
hub = get_hub()
|
||||
|
||||
if timeout is None:
|
||||
hub.wait(watcher)
|
||||
return
|
||||
|
||||
timeout = Timeout._start_new_or_dummy(
|
||||
timeout,
|
||||
(timeout_exc
|
||||
if timeout_exc is not _NONE or timeout is None
|
||||
else _timeout_error('timed out')))
|
||||
|
||||
with timeout:
|
||||
hub.wait(watcher)
|
||||
|
||||
# Suitable to be bound as an instance method
|
||||
def wait_on_socket(socket, watcher, timeout_exc=None):
|
||||
if socket is None or watcher is None:
|
||||
# test__hub TestCloseSocketWhilePolling, on Python 2; Python 3
|
||||
# catches the EBADF differently.
|
||||
raise ConcurrentObjectUseError("The socket has already been closed by another greenlet")
|
||||
_primitive_wait(watcher, socket.timeout,
|
||||
timeout_exc if timeout_exc is not None else _NONE,
|
||||
socket.hub)
|
||||
|
||||
def wait_on_watcher(watcher, timeout=None, timeout_exc=_NONE, hub=None):
|
||||
"""
|
||||
wait(watcher, timeout=None, [timeout_exc=None]) -> None
|
||||
|
||||
Block the current greenlet until *watcher* is ready.
|
||||
|
||||
If *timeout* is non-negative, then *timeout_exc* is raised after
|
||||
*timeout* second has passed.
|
||||
|
||||
If :func:`cancel_wait` is called on *io* by another greenlet,
|
||||
raise an exception in this blocking greenlet
|
||||
(``socket.error(EBADF, 'File descriptor was closed in another
|
||||
greenlet')`` by default).
|
||||
|
||||
:param io: An event loop watcher, most commonly an IO watcher obtained from
|
||||
:meth:`gevent.core.loop.io`
|
||||
:keyword timeout_exc: The exception to raise if the timeout expires.
|
||||
By default, a :class:`socket.timeout` exception is raised.
|
||||
If you pass a value for this keyword, it is interpreted as for
|
||||
:class:`gevent.timeout.Timeout`.
|
||||
|
||||
:raises ~gevent.hub.ConcurrentObjectUseError: If the *watcher* is
|
||||
already started.
|
||||
"""
|
||||
_primitive_wait(watcher, timeout, timeout_exc, hub)
|
||||
|
||||
|
||||
def wait_read(fileno, timeout=None, timeout_exc=_NONE):
|
||||
"""
|
||||
wait_read(fileno, timeout=None, [timeout_exc=None]) -> None
|
||||
|
||||
Block the current greenlet until *fileno* is ready to read.
|
||||
|
||||
For the meaning of the other parameters and possible exceptions,
|
||||
see :func:`wait`.
|
||||
|
||||
.. seealso:: :func:`cancel_wait`
|
||||
"""
|
||||
hub = get_hub()
|
||||
io = hub.loop.io(fileno, 1)
|
||||
try:
|
||||
return wait_on_watcher(io, timeout, timeout_exc, hub)
|
||||
finally:
|
||||
io.close()
|
||||
|
||||
|
||||
def wait_write(fileno, timeout=None, timeout_exc=_NONE, event=_NONE):
|
||||
"""
|
||||
wait_write(fileno, timeout=None, [timeout_exc=None]) -> None
|
||||
|
||||
Block the current greenlet until *fileno* is ready to write.
|
||||
|
||||
For the meaning of the other parameters and possible exceptions,
|
||||
see :func:`wait`.
|
||||
|
||||
.. deprecated:: 1.1
|
||||
The keyword argument *event* is ignored. Applications should not pass this parameter.
|
||||
In the future, doing so will become an error.
|
||||
|
||||
.. seealso:: :func:`cancel_wait`
|
||||
"""
|
||||
# pylint:disable=unused-argument
|
||||
hub = get_hub()
|
||||
io = hub.loop.io(fileno, 2)
|
||||
try:
|
||||
return wait_on_watcher(io, timeout, timeout_exc, hub)
|
||||
finally:
|
||||
io.close()
|
||||
|
||||
|
||||
def wait_readwrite(fileno, timeout=None, timeout_exc=_NONE, event=_NONE):
|
||||
"""
|
||||
wait_readwrite(fileno, timeout=None, [timeout_exc=None]) -> None
|
||||
|
||||
Block the current greenlet until *fileno* is ready to read or
|
||||
write.
|
||||
|
||||
For the meaning of the other parameters and possible exceptions,
|
||||
see :func:`wait`.
|
||||
|
||||
.. deprecated:: 1.1
|
||||
The keyword argument *event* is ignored. Applications should not pass this parameter.
|
||||
In the future, doing so will become an error.
|
||||
|
||||
.. seealso:: :func:`cancel_wait`
|
||||
"""
|
||||
# pylint:disable=unused-argument
|
||||
hub = get_hub()
|
||||
io = hub.loop.io(fileno, 3)
|
||||
try:
|
||||
return wait_on_watcher(io, timeout, timeout_exc, hub)
|
||||
finally:
|
||||
io.close()
|
||||
|
||||
|
||||
def _init():
|
||||
greenlet_init() # pylint:disable=undefined-variable
|
||||
|
||||
_init()
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__hub_primitives')
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,884 @@
|
||||
<!DOCTYPE html>
|
||||
<!-- Generated by Cython 0.29.2 -->
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Cython: _ident.py</title>
|
||||
<style type="text/css">
|
||||
|
||||
body.cython { font-family: courier; font-size: 12; }
|
||||
|
||||
.cython.tag { }
|
||||
.cython.line { margin: 0em }
|
||||
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
||||
|
||||
.cython.line .run { background-color: #B0FFB0; }
|
||||
.cython.line .mis { background-color: #FFB0B0; }
|
||||
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
||||
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
||||
|
||||
.cython.code .py_c_api { color: red; }
|
||||
.cython.code .py_macro_api { color: #FF7000; }
|
||||
.cython.code .pyx_c_api { color: #FF3000; }
|
||||
.cython.code .pyx_macro_api { color: #FF7000; }
|
||||
.cython.code .refnanny { color: #FFA000; }
|
||||
.cython.code .trace { color: #FFA000; }
|
||||
.cython.code .error_goto { color: #FFA000; }
|
||||
|
||||
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
||||
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_attr { color: #0000FF; }
|
||||
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
||||
.cython.code .c_call { color: #0000FF; }
|
||||
|
||||
.cython.score-0 {background-color: #FFFFff;}
|
||||
.cython.score-1 {background-color: #FFFFe7;}
|
||||
.cython.score-2 {background-color: #FFFFd4;}
|
||||
.cython.score-3 {background-color: #FFFFc4;}
|
||||
.cython.score-4 {background-color: #FFFFb6;}
|
||||
.cython.score-5 {background-color: #FFFFaa;}
|
||||
.cython.score-6 {background-color: #FFFF9f;}
|
||||
.cython.score-7 {background-color: #FFFF96;}
|
||||
.cython.score-8 {background-color: #FFFF8d;}
|
||||
.cython.score-9 {background-color: #FFFF86;}
|
||||
.cython.score-10 {background-color: #FFFF7f;}
|
||||
.cython.score-11 {background-color: #FFFF79;}
|
||||
.cython.score-12 {background-color: #FFFF73;}
|
||||
.cython.score-13 {background-color: #FFFF6e;}
|
||||
.cython.score-14 {background-color: #FFFF6a;}
|
||||
.cython.score-15 {background-color: #FFFF66;}
|
||||
.cython.score-16 {background-color: #FFFF62;}
|
||||
.cython.score-17 {background-color: #FFFF5e;}
|
||||
.cython.score-18 {background-color: #FFFF5b;}
|
||||
.cython.score-19 {background-color: #FFFF57;}
|
||||
.cython.score-20 {background-color: #FFFF55;}
|
||||
.cython.score-21 {background-color: #FFFF52;}
|
||||
.cython.score-22 {background-color: #FFFF4f;}
|
||||
.cython.score-23 {background-color: #FFFF4d;}
|
||||
.cython.score-24 {background-color: #FFFF4b;}
|
||||
.cython.score-25 {background-color: #FFFF48;}
|
||||
.cython.score-26 {background-color: #FFFF46;}
|
||||
.cython.score-27 {background-color: #FFFF44;}
|
||||
.cython.score-28 {background-color: #FFFF43;}
|
||||
.cython.score-29 {background-color: #FFFF41;}
|
||||
.cython.score-30 {background-color: #FFFF3f;}
|
||||
.cython.score-31 {background-color: #FFFF3e;}
|
||||
.cython.score-32 {background-color: #FFFF3c;}
|
||||
.cython.score-33 {background-color: #FFFF3b;}
|
||||
.cython.score-34 {background-color: #FFFF39;}
|
||||
.cython.score-35 {background-color: #FFFF38;}
|
||||
.cython.score-36 {background-color: #FFFF37;}
|
||||
.cython.score-37 {background-color: #FFFF36;}
|
||||
.cython.score-38 {background-color: #FFFF35;}
|
||||
.cython.score-39 {background-color: #FFFF34;}
|
||||
.cython.score-40 {background-color: #FFFF33;}
|
||||
.cython.score-41 {background-color: #FFFF32;}
|
||||
.cython.score-42 {background-color: #FFFF31;}
|
||||
.cython.score-43 {background-color: #FFFF30;}
|
||||
.cython.score-44 {background-color: #FFFF2f;}
|
||||
.cython.score-45 {background-color: #FFFF2e;}
|
||||
.cython.score-46 {background-color: #FFFF2d;}
|
||||
.cython.score-47 {background-color: #FFFF2c;}
|
||||
.cython.score-48 {background-color: #FFFF2b;}
|
||||
.cython.score-49 {background-color: #FFFF2b;}
|
||||
.cython.score-50 {background-color: #FFFF2a;}
|
||||
.cython.score-51 {background-color: #FFFF29;}
|
||||
.cython.score-52 {background-color: #FFFF29;}
|
||||
.cython.score-53 {background-color: #FFFF28;}
|
||||
.cython.score-54 {background-color: #FFFF27;}
|
||||
.cython.score-55 {background-color: #FFFF27;}
|
||||
.cython.score-56 {background-color: #FFFF26;}
|
||||
.cython.score-57 {background-color: #FFFF26;}
|
||||
.cython.score-58 {background-color: #FFFF25;}
|
||||
.cython.score-59 {background-color: #FFFF24;}
|
||||
.cython.score-60 {background-color: #FFFF24;}
|
||||
.cython.score-61 {background-color: #FFFF23;}
|
||||
.cython.score-62 {background-color: #FFFF23;}
|
||||
.cython.score-63 {background-color: #FFFF22;}
|
||||
.cython.score-64 {background-color: #FFFF22;}
|
||||
.cython.score-65 {background-color: #FFFF22;}
|
||||
.cython.score-66 {background-color: #FFFF21;}
|
||||
.cython.score-67 {background-color: #FFFF21;}
|
||||
.cython.score-68 {background-color: #FFFF20;}
|
||||
.cython.score-69 {background-color: #FFFF20;}
|
||||
.cython.score-70 {background-color: #FFFF1f;}
|
||||
.cython.score-71 {background-color: #FFFF1f;}
|
||||
.cython.score-72 {background-color: #FFFF1f;}
|
||||
.cython.score-73 {background-color: #FFFF1e;}
|
||||
.cython.score-74 {background-color: #FFFF1e;}
|
||||
.cython.score-75 {background-color: #FFFF1e;}
|
||||
.cython.score-76 {background-color: #FFFF1d;}
|
||||
.cython.score-77 {background-color: #FFFF1d;}
|
||||
.cython.score-78 {background-color: #FFFF1c;}
|
||||
.cython.score-79 {background-color: #FFFF1c;}
|
||||
.cython.score-80 {background-color: #FFFF1c;}
|
||||
.cython.score-81 {background-color: #FFFF1c;}
|
||||
.cython.score-82 {background-color: #FFFF1b;}
|
||||
.cython.score-83 {background-color: #FFFF1b;}
|
||||
.cython.score-84 {background-color: #FFFF1b;}
|
||||
.cython.score-85 {background-color: #FFFF1a;}
|
||||
.cython.score-86 {background-color: #FFFF1a;}
|
||||
.cython.score-87 {background-color: #FFFF1a;}
|
||||
.cython.score-88 {background-color: #FFFF1a;}
|
||||
.cython.score-89 {background-color: #FFFF19;}
|
||||
.cython.score-90 {background-color: #FFFF19;}
|
||||
.cython.score-91 {background-color: #FFFF19;}
|
||||
.cython.score-92 {background-color: #FFFF19;}
|
||||
.cython.score-93 {background-color: #FFFF18;}
|
||||
.cython.score-94 {background-color: #FFFF18;}
|
||||
.cython.score-95 {background-color: #FFFF18;}
|
||||
.cython.score-96 {background-color: #FFFF18;}
|
||||
.cython.score-97 {background-color: #FFFF17;}
|
||||
.cython.score-98 {background-color: #FFFF17;}
|
||||
.cython.score-99 {background-color: #FFFF17;}
|
||||
.cython.score-100 {background-color: #FFFF17;}
|
||||
.cython.score-101 {background-color: #FFFF16;}
|
||||
.cython.score-102 {background-color: #FFFF16;}
|
||||
.cython.score-103 {background-color: #FFFF16;}
|
||||
.cython.score-104 {background-color: #FFFF16;}
|
||||
.cython.score-105 {background-color: #FFFF16;}
|
||||
.cython.score-106 {background-color: #FFFF15;}
|
||||
.cython.score-107 {background-color: #FFFF15;}
|
||||
.cython.score-108 {background-color: #FFFF15;}
|
||||
.cython.score-109 {background-color: #FFFF15;}
|
||||
.cython.score-110 {background-color: #FFFF15;}
|
||||
.cython.score-111 {background-color: #FFFF15;}
|
||||
.cython.score-112 {background-color: #FFFF14;}
|
||||
.cython.score-113 {background-color: #FFFF14;}
|
||||
.cython.score-114 {background-color: #FFFF14;}
|
||||
.cython.score-115 {background-color: #FFFF14;}
|
||||
.cython.score-116 {background-color: #FFFF14;}
|
||||
.cython.score-117 {background-color: #FFFF14;}
|
||||
.cython.score-118 {background-color: #FFFF13;}
|
||||
.cython.score-119 {background-color: #FFFF13;}
|
||||
.cython.score-120 {background-color: #FFFF13;}
|
||||
.cython.score-121 {background-color: #FFFF13;}
|
||||
.cython.score-122 {background-color: #FFFF13;}
|
||||
.cython.score-123 {background-color: #FFFF13;}
|
||||
.cython.score-124 {background-color: #FFFF13;}
|
||||
.cython.score-125 {background-color: #FFFF12;}
|
||||
.cython.score-126 {background-color: #FFFF12;}
|
||||
.cython.score-127 {background-color: #FFFF12;}
|
||||
.cython.score-128 {background-color: #FFFF12;}
|
||||
.cython.score-129 {background-color: #FFFF12;}
|
||||
.cython.score-130 {background-color: #FFFF12;}
|
||||
.cython.score-131 {background-color: #FFFF12;}
|
||||
.cython.score-132 {background-color: #FFFF11;}
|
||||
.cython.score-133 {background-color: #FFFF11;}
|
||||
.cython.score-134 {background-color: #FFFF11;}
|
||||
.cython.score-135 {background-color: #FFFF11;}
|
||||
.cython.score-136 {background-color: #FFFF11;}
|
||||
.cython.score-137 {background-color: #FFFF11;}
|
||||
.cython.score-138 {background-color: #FFFF11;}
|
||||
.cython.score-139 {background-color: #FFFF11;}
|
||||
.cython.score-140 {background-color: #FFFF11;}
|
||||
.cython.score-141 {background-color: #FFFF10;}
|
||||
.cython.score-142 {background-color: #FFFF10;}
|
||||
.cython.score-143 {background-color: #FFFF10;}
|
||||
.cython.score-144 {background-color: #FFFF10;}
|
||||
.cython.score-145 {background-color: #FFFF10;}
|
||||
.cython.score-146 {background-color: #FFFF10;}
|
||||
.cython.score-147 {background-color: #FFFF10;}
|
||||
.cython.score-148 {background-color: #FFFF10;}
|
||||
.cython.score-149 {background-color: #FFFF10;}
|
||||
.cython.score-150 {background-color: #FFFF0f;}
|
||||
.cython.score-151 {background-color: #FFFF0f;}
|
||||
.cython.score-152 {background-color: #FFFF0f;}
|
||||
.cython.score-153 {background-color: #FFFF0f;}
|
||||
.cython.score-154 {background-color: #FFFF0f;}
|
||||
.cython.score-155 {background-color: #FFFF0f;}
|
||||
.cython.score-156 {background-color: #FFFF0f;}
|
||||
.cython.score-157 {background-color: #FFFF0f;}
|
||||
.cython.score-158 {background-color: #FFFF0f;}
|
||||
.cython.score-159 {background-color: #FFFF0f;}
|
||||
.cython.score-160 {background-color: #FFFF0f;}
|
||||
.cython.score-161 {background-color: #FFFF0e;}
|
||||
.cython.score-162 {background-color: #FFFF0e;}
|
||||
.cython.score-163 {background-color: #FFFF0e;}
|
||||
.cython.score-164 {background-color: #FFFF0e;}
|
||||
.cython.score-165 {background-color: #FFFF0e;}
|
||||
.cython.score-166 {background-color: #FFFF0e;}
|
||||
.cython.score-167 {background-color: #FFFF0e;}
|
||||
.cython.score-168 {background-color: #FFFF0e;}
|
||||
.cython.score-169 {background-color: #FFFF0e;}
|
||||
.cython.score-170 {background-color: #FFFF0e;}
|
||||
.cython.score-171 {background-color: #FFFF0e;}
|
||||
.cython.score-172 {background-color: #FFFF0e;}
|
||||
.cython.score-173 {background-color: #FFFF0d;}
|
||||
.cython.score-174 {background-color: #FFFF0d;}
|
||||
.cython.score-175 {background-color: #FFFF0d;}
|
||||
.cython.score-176 {background-color: #FFFF0d;}
|
||||
.cython.score-177 {background-color: #FFFF0d;}
|
||||
.cython.score-178 {background-color: #FFFF0d;}
|
||||
.cython.score-179 {background-color: #FFFF0d;}
|
||||
.cython.score-180 {background-color: #FFFF0d;}
|
||||
.cython.score-181 {background-color: #FFFF0d;}
|
||||
.cython.score-182 {background-color: #FFFF0d;}
|
||||
.cython.score-183 {background-color: #FFFF0d;}
|
||||
.cython.score-184 {background-color: #FFFF0d;}
|
||||
.cython.score-185 {background-color: #FFFF0d;}
|
||||
.cython.score-186 {background-color: #FFFF0d;}
|
||||
.cython.score-187 {background-color: #FFFF0c;}
|
||||
.cython.score-188 {background-color: #FFFF0c;}
|
||||
.cython.score-189 {background-color: #FFFF0c;}
|
||||
.cython.score-190 {background-color: #FFFF0c;}
|
||||
.cython.score-191 {background-color: #FFFF0c;}
|
||||
.cython.score-192 {background-color: #FFFF0c;}
|
||||
.cython.score-193 {background-color: #FFFF0c;}
|
||||
.cython.score-194 {background-color: #FFFF0c;}
|
||||
.cython.score-195 {background-color: #FFFF0c;}
|
||||
.cython.score-196 {background-color: #FFFF0c;}
|
||||
.cython.score-197 {background-color: #FFFF0c;}
|
||||
.cython.score-198 {background-color: #FFFF0c;}
|
||||
.cython.score-199 {background-color: #FFFF0c;}
|
||||
.cython.score-200 {background-color: #FFFF0c;}
|
||||
.cython.score-201 {background-color: #FFFF0c;}
|
||||
.cython.score-202 {background-color: #FFFF0c;}
|
||||
.cython.score-203 {background-color: #FFFF0b;}
|
||||
.cython.score-204 {background-color: #FFFF0b;}
|
||||
.cython.score-205 {background-color: #FFFF0b;}
|
||||
.cython.score-206 {background-color: #FFFF0b;}
|
||||
.cython.score-207 {background-color: #FFFF0b;}
|
||||
.cython.score-208 {background-color: #FFFF0b;}
|
||||
.cython.score-209 {background-color: #FFFF0b;}
|
||||
.cython.score-210 {background-color: #FFFF0b;}
|
||||
.cython.score-211 {background-color: #FFFF0b;}
|
||||
.cython.score-212 {background-color: #FFFF0b;}
|
||||
.cython.score-213 {background-color: #FFFF0b;}
|
||||
.cython.score-214 {background-color: #FFFF0b;}
|
||||
.cython.score-215 {background-color: #FFFF0b;}
|
||||
.cython.score-216 {background-color: #FFFF0b;}
|
||||
.cython.score-217 {background-color: #FFFF0b;}
|
||||
.cython.score-218 {background-color: #FFFF0b;}
|
||||
.cython.score-219 {background-color: #FFFF0b;}
|
||||
.cython.score-220 {background-color: #FFFF0b;}
|
||||
.cython.score-221 {background-color: #FFFF0b;}
|
||||
.cython.score-222 {background-color: #FFFF0a;}
|
||||
.cython.score-223 {background-color: #FFFF0a;}
|
||||
.cython.score-224 {background-color: #FFFF0a;}
|
||||
.cython.score-225 {background-color: #FFFF0a;}
|
||||
.cython.score-226 {background-color: #FFFF0a;}
|
||||
.cython.score-227 {background-color: #FFFF0a;}
|
||||
.cython.score-228 {background-color: #FFFF0a;}
|
||||
.cython.score-229 {background-color: #FFFF0a;}
|
||||
.cython.score-230 {background-color: #FFFF0a;}
|
||||
.cython.score-231 {background-color: #FFFF0a;}
|
||||
.cython.score-232 {background-color: #FFFF0a;}
|
||||
.cython.score-233 {background-color: #FFFF0a;}
|
||||
.cython.score-234 {background-color: #FFFF0a;}
|
||||
.cython.score-235 {background-color: #FFFF0a;}
|
||||
.cython.score-236 {background-color: #FFFF0a;}
|
||||
.cython.score-237 {background-color: #FFFF0a;}
|
||||
.cython.score-238 {background-color: #FFFF0a;}
|
||||
.cython.score-239 {background-color: #FFFF0a;}
|
||||
.cython.score-240 {background-color: #FFFF0a;}
|
||||
.cython.score-241 {background-color: #FFFF0a;}
|
||||
.cython.score-242 {background-color: #FFFF0a;}
|
||||
.cython.score-243 {background-color: #FFFF0a;}
|
||||
.cython.score-244 {background-color: #FFFF0a;}
|
||||
.cython.score-245 {background-color: #FFFF0a;}
|
||||
.cython.score-246 {background-color: #FFFF09;}
|
||||
.cython.score-247 {background-color: #FFFF09;}
|
||||
.cython.score-248 {background-color: #FFFF09;}
|
||||
.cython.score-249 {background-color: #FFFF09;}
|
||||
.cython.score-250 {background-color: #FFFF09;}
|
||||
.cython.score-251 {background-color: #FFFF09;}
|
||||
.cython.score-252 {background-color: #FFFF09;}
|
||||
.cython.score-253 {background-color: #FFFF09;}
|
||||
.cython.score-254 {background-color: #FFFF09;}
|
||||
</style>
|
||||
</head>
|
||||
<body class="cython">
|
||||
<p><span style="border-bottom: solid 1px grey;">Generated by Cython 0.29.2</span></p>
|
||||
<p>
|
||||
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
||||
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
||||
</p>
|
||||
<p>Raw output: <a href="_ident.c">_ident.c</a></p>
|
||||
<div class="cython"><pre class="cython line score-0"> <span class="">01</span>: # -*- coding: utf-8 -*-</pre>
|
||||
<pre class="cython line score-0"> <span class="">02</span>: # Copyright 2018 gevent contributors. See LICENSE for details.</pre>
|
||||
<pre class="cython line score-0"> <span class="">03</span>: # cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False</pre>
|
||||
<pre class="cython line score-0"> <span class="">04</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">05</span>: from __future__ import absolute_import</pre>
|
||||
<pre class="cython line score-0"> <span class="">06</span>: from __future__ import division</pre>
|
||||
<pre class="cython line score-0"> <span class="">07</span>: from __future__ import print_function</pre>
|
||||
<pre class="cython line score-0"> <span class="">08</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">09</span>: </pre>
|
||||
<pre class="cython line score-16" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">10</span>: from weakref import WeakKeyDictionary</pre>
|
||||
<pre class='cython code score-16 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_WeakKeyDictionary);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_WeakKeyDictionary);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_WeakKeyDictionary);
|
||||
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_weakref, __pyx_t_1, 0);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_2, __pyx_n_s_WeakKeyDictionary);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_XGOTREF</span>(__pyx_v_6gevent_7__ident_WeakKeyDictionary);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_v_6gevent_7__ident_WeakKeyDictionary, __pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
</pre><pre class="cython line score-11" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">11</span>: from weakref import ref</pre>
|
||||
<pre class='cython code score-11 '> __pyx_t_2 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_ref);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_ref);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_2, 0, __pyx_n_s_ref);
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_weakref, __pyx_t_2, 0);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">12</span>: </pre>
|
||||
<pre class="cython line score-16" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">13</span>: from heapq import heappop</pre>
|
||||
<pre class='cython code score-16 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_heappop);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_heappop);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_heappop);
|
||||
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_heapq, __pyx_t_1, 0);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_2, __pyx_n_s_heappop);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_XGOTREF</span>(__pyx_v_6gevent_7__ident_heappop);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_v_6gevent_7__ident_heappop, __pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
</pre><pre class="cython line score-16" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">14</span>: from heapq import heappush</pre>
|
||||
<pre class='cython code score-16 '> __pyx_t_2 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_heappush);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_heappush);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_2, 0, __pyx_n_s_heappush);
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_heapq, __pyx_t_2, 0);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_1, __pyx_n_s_heappush);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_2);
|
||||
<span class='refnanny'>__Pyx_XGOTREF</span>(__pyx_v_6gevent_7__ident_heappush);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_v_6gevent_7__ident_heappush, __pyx_t_2);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">15</span>: </pre>
|
||||
<pre class="cython line score-13" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">16</span>: __all__ = [</pre>
|
||||
<pre class='cython code score-13 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_IdentRegistry);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_IdentRegistry);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_IdentRegistry);
|
||||
if (<span class='py_c_api'>PyDict_SetItem</span>(__pyx_d, __pyx_n_s_all, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 16, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">17</span>: 'IdentRegistry',</pre>
|
||||
<pre class="cython line score-0"> <span class="">18</span>: ]</pre>
|
||||
<pre class="cython line score-0"> <span class="">19</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">20</span>: class ValuedWeakRef(ref):</pre>
|
||||
<pre class="cython line score-0"> <span class="">21</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">22</span>: A weak ref with an associated value.</pre>
|
||||
<pre class="cython line score-0"> <span class="">23</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">24</span>: </pre>
|
||||
<pre class="cython line score-15" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">25</span>: __slots__ = ('value',)</pre>
|
||||
<pre class='cython code score-15 '> if (<span class='py_c_api'>PyDict_SetItem</span>((PyObject *)__pyx_ptype_6gevent_7__ident_ValuedWeakRef->tp_dict, __pyx_n_s_slots, __pyx_tuple_) < 0) <span class='error_goto'>__PYX_ERR(0, 25, __pyx_L1_error)</span>
|
||||
<span class='py_c_api'>PyType_Modified</span>(__pyx_ptype_6gevent_7__ident_ValuedWeakRef);
|
||||
/* … */
|
||||
__pyx_tuple_ = <span class='py_c_api'>PyTuple_Pack</span>(1, __pyx_n_s_value);<span class='error_goto'> if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 25, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_tuple_);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_tuple_);
|
||||
</pre><pre class="cython line score-0"> <span class="">26</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">27</span>: </pre>
|
||||
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">28</span>: class IdentRegistry(object):</pre>
|
||||
<pre class='cython code score-0 '>struct __pyx_vtabstruct_6gevent_7__ident_IdentRegistry {
|
||||
PyObject *(*get_ident)(struct __pyx_obj_6gevent_7__ident_IdentRegistry *, PyObject *, int __pyx_skip_dispatch);
|
||||
PyObject *(*_return_ident)(struct __pyx_obj_6gevent_7__ident_IdentRegistry *, struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *, int __pyx_skip_dispatch);
|
||||
};
|
||||
static struct __pyx_vtabstruct_6gevent_7__ident_IdentRegistry *__pyx_vtabptr_6gevent_7__ident_IdentRegistry;
|
||||
</pre><pre class="cython line score-0"> <span class="">29</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">30</span>: Maintains a unique mapping of (small) positive integer identifiers</pre>
|
||||
<pre class="cython line score-0"> <span class="">31</span>: to objects that can be weakly referenced.</pre>
|
||||
<pre class="cython line score-0"> <span class="">32</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">33</span>: It is guaranteed that no two objects will have the the same</pre>
|
||||
<pre class="cython line score-0"> <span class="">34</span>: identifier at the same time, as long as those objects are</pre>
|
||||
<pre class="cython line score-0"> <span class="">35</span>: also uniquely hashable.</pre>
|
||||
<pre class="cython line score-0"> <span class="">36</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">37</span>: </pre>
|
||||
<pre class="cython line score-16" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">38</span>: def __init__(self):</pre>
|
||||
<pre class='cython code score-16 '>/* Python wrapper */
|
||||
static int __pyx_pw_6gevent_7__ident_13IdentRegistry_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
|
||||
static int __pyx_pw_6gevent_7__ident_13IdentRegistry_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
|
||||
int __pyx_r;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("__init__ (wrapper)", 0);
|
||||
if (unlikely(<span class='py_macro_api'>PyTuple_GET_SIZE</span>(__pyx_args) > 0)) {
|
||||
<span class='pyx_c_api'>__Pyx_RaiseArgtupleInvalid</span>("__init__", 1, 0, 0, <span class='py_macro_api'>PyTuple_GET_SIZE</span>(__pyx_args)); return -1;}
|
||||
if (unlikely(__pyx_kwds) && unlikely(<span class='py_c_api'>PyDict_Size</span>(__pyx_kwds) > 0) && unlikely(!<span class='pyx_c_api'>__Pyx_CheckKeywordStrings</span>(__pyx_kwds, "__init__", 0))) return -1;
|
||||
__pyx_r = __pyx_pf_6gevent_7__ident_13IdentRegistry___init__(((struct __pyx_obj_6gevent_7__ident_IdentRegistry *)__pyx_v_self));
|
||||
|
||||
/* function exit code */
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
static int __pyx_pf_6gevent_7__ident_13IdentRegistry___init__(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self) {
|
||||
int __pyx_r;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("__init__", 0);
|
||||
/* … */
|
||||
/* function exit code */
|
||||
__pyx_r = 0;
|
||||
goto __pyx_L0;
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_3);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = -1;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
</pre><pre class="cython line score-0"> <span class="">39</span>: # {obj -> (ident, wref(obj))}</pre>
|
||||
<pre class="cython line score-18" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">40</span>: self._registry = WeakKeyDictionary()</pre>
|
||||
<pre class='cython code score-18 '> <span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_6gevent_7__ident_WeakKeyDictionary);
|
||||
__pyx_t_2 = __pyx_v_6gevent_7__ident_WeakKeyDictionary; __pyx_t_3 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_2))) {
|
||||
__pyx_t_3 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_2);
|
||||
if (likely(__pyx_t_3)) {
|
||||
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_3);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_2, function);
|
||||
}
|
||||
}
|
||||
__pyx_t_1 = (__pyx_t_3) ? <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_2, __pyx_t_3) : <span class='pyx_c_api'>__Pyx_PyObject_CallNoArg</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_3); __pyx_t_3 = 0;
|
||||
if (unlikely(!__pyx_t_1)) <span class='error_goto'>__PYX_ERR(0, 40, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_v_self->_registry);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_v_self->_registry);
|
||||
__pyx_v_self->_registry = __pyx_t_1;
|
||||
__pyx_t_1 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">41</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">42</span>: # A heap of numbers that have been used and returned</pre>
|
||||
<pre class="cython line score-6" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">43</span>: self._available_idents = []</pre>
|
||||
<pre class='cython code score-6 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(0);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 43, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_v_self->_available_idents);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_v_self->_available_idents);
|
||||
__pyx_v_self->_available_idents = ((PyObject*)__pyx_t_1);
|
||||
__pyx_t_1 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">44</span>: </pre>
|
||||
<pre class="cython line score-31" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">45</span>: def get_ident(self, obj):</pre>
|
||||
<pre class='cython code score-31 '>static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_3get_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/
|
||||
static PyObject *__pyx_f_6gevent_7__ident_13IdentRegistry_get_ident(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self, PyObject *__pyx_v_obj, CYTHON_UNUSED int __pyx_skip_dispatch) {
|
||||
PyObject *__pyx_v_ident = NULL;
|
||||
struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *__pyx_v_vref = NULL;
|
||||
PyObject *__pyx_r = NULL;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("get_ident", 0);
|
||||
/* … */
|
||||
/* function exit code */
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_4);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_8);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry.get_ident", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = 0;
|
||||
__pyx_L0:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_v_ident);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>((PyObject *)__pyx_v_vref);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
/* Python wrapper */
|
||||
static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_3get_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_obj); /*proto*/
|
||||
static char __pyx_doc_6gevent_7__ident_13IdentRegistry_2get_ident[] = "IdentRegistry.get_ident(self, obj)\n\n Retrieve the identifier for *obj*, creating one\n if necessary.\n ";
|
||||
static PyMethodDef __pyx_mdef_6gevent_7__ident_13IdentRegistry_3get_ident = {"get_ident", (PyCFunction)__pyx_pw_6gevent_7__ident_13IdentRegistry_3get_ident, METH_O, __pyx_doc_6gevent_7__ident_13IdentRegistry_2get_ident};
|
||||
static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_3get_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_obj) {
|
||||
PyObject *__pyx_r = 0;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("get_ident (wrapper)", 0);
|
||||
__pyx_r = __pyx_pf_6gevent_7__ident_13IdentRegistry_2get_ident(((struct __pyx_obj_6gevent_7__ident_IdentRegistry *)__pyx_v_self), ((PyObject *)__pyx_v_obj));
|
||||
|
||||
/* function exit code */
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
static PyObject *__pyx_pf_6gevent_7__ident_13IdentRegistry_2get_ident(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self, PyObject *__pyx_v_obj) {
|
||||
PyObject *__pyx_r = NULL;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("get_ident", 0);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
|
||||
__pyx_t_1 = __pyx_f_6gevent_7__ident_13IdentRegistry_get_ident(__pyx_v_self, __pyx_v_obj, 1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
__pyx_r = __pyx_t_1;
|
||||
__pyx_t_1 = 0;
|
||||
goto __pyx_L0;
|
||||
|
||||
/* function exit code */
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry.get_ident", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = NULL;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
/* … */
|
||||
__pyx_tuple__2 = <span class='py_c_api'>PyTuple_Pack</span>(2, __pyx_n_s_self, __pyx_n_s_obj);<span class='error_goto'> if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 45, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_tuple__2);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_tuple__2);
|
||||
/* … */
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_CyFunction_NewEx</span>(&__pyx_mdef_6gevent_7__ident_13IdentRegistry_3get_ident, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentRegistry_get_ident, NULL, __pyx_n_s_gevent___ident, __pyx_d, ((PyObject *)__pyx_codeobj__3));<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
if (<span class='py_c_api'>PyDict_SetItem</span>((PyObject *)__pyx_ptype_6gevent_7__ident_IdentRegistry->tp_dict, __pyx_n_s_get_ident, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 45, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
<span class='py_c_api'>PyType_Modified</span>(__pyx_ptype_6gevent_7__ident_IdentRegistry);
|
||||
__pyx_codeobj__3 = (PyObject*)<span class='pyx_c_api'>__Pyx_PyCode_New</span>(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_gevent__ident_py, __pyx_n_s_get_ident, 45, __pyx_empty_bytes);<span class='error_goto'> if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(0, 45, __pyx_L1_error)</span>
|
||||
</pre><pre class="cython line score-0"> <span class="">46</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">47</span>: Retrieve the identifier for *obj*, creating one</pre>
|
||||
<pre class="cython line score-0"> <span class="">48</span>: if necessary.</pre>
|
||||
<pre class="cython line score-0"> <span class="">49</span>: """</pre>
|
||||
<pre class="cython line score-0"> <span class="">50</span>: </pre>
|
||||
<pre class="cython line score-8" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">51</span>: try:</pre>
|
||||
<pre class='cython code score-8 '> {
|
||||
/*try:*/ {
|
||||
/* … */
|
||||
}
|
||||
__pyx_L3_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_4); __pyx_t_4 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5); __pyx_t_5 = 0;
|
||||
/* … */
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_2);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_3);
|
||||
<span class='pyx_c_api'>__Pyx_ExceptionReset</span>(__pyx_t_1, __pyx_t_2, __pyx_t_3);
|
||||
goto __pyx_L1_error;
|
||||
__pyx_L7_try_return:;
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_2);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_3);
|
||||
<span class='pyx_c_api'>__Pyx_ExceptionReset</span>(__pyx_t_1, __pyx_t_2, __pyx_t_3);
|
||||
goto __pyx_L0;
|
||||
__pyx_L4_exception_handled:;
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_1);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_2);
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_t_3);
|
||||
<span class='pyx_c_api'>__Pyx_ExceptionReset</span>(__pyx_t_1, __pyx_t_2, __pyx_t_3);
|
||||
}
|
||||
</pre><pre class="cython line score-6" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">52</span>: return self._registry[obj][0]</pre>
|
||||
<pre class='cython code score-6 '> <span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
|
||||
__pyx_t_4 = <span class='pyx_c_api'>__Pyx_PyObject_GetItem</span>(__pyx_v_self->_registry, __pyx_v_obj);<span class='error_goto'> if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 52, __pyx_L3_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_4);
|
||||
__pyx_t_5 = <span class='pyx_c_api'>__Pyx_GetItemInt</span>(__pyx_t_4, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 52, __pyx_L3_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_4); __pyx_t_4 = 0;
|
||||
__pyx_r = __pyx_t_5;
|
||||
__pyx_t_5 = 0;
|
||||
goto __pyx_L7_try_return;
|
||||
</pre><pre class="cython line score-4" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">53</span>: except KeyError:</pre>
|
||||
<pre class='cython code score-4 '> __pyx_t_6 = <span class='pyx_c_api'>__Pyx_PyErr_ExceptionMatches</span>(__pyx_builtin_KeyError);
|
||||
if (__pyx_t_6) {
|
||||
<span class='pyx_c_api'>__Pyx_ErrRestore</span>(0,0,0);
|
||||
goto __pyx_L4_exception_handled;
|
||||
}
|
||||
goto __pyx_L5_except_error;
|
||||
__pyx_L5_except_error:;
|
||||
</pre><pre class="cython line score-0"> <span class="">54</span>: pass</pre>
|
||||
<pre class="cython line score-0"> <span class="">55</span>: </pre>
|
||||
<pre class="cython line score-1" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">56</span>: if self._available_idents:</pre>
|
||||
<pre class='cython code score-1 '> __pyx_t_7 = (__pyx_v_self->_available_idents != Py_None)&&(<span class='py_macro_api'>PyList_GET_SIZE</span>(__pyx_v_self->_available_idents) != 0);
|
||||
if (__pyx_t_7) {
|
||||
/* … */
|
||||
goto __pyx_L9;
|
||||
}
|
||||
</pre><pre class="cython line score-0"> <span class="">57</span>: # Take the smallest free number</pre>
|
||||
<pre class="cython line score-15" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">58</span>: ident = heappop(self._available_idents)</pre>
|
||||
<pre class='cython code score-15 '> <span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_6gevent_7__ident_heappop);
|
||||
__pyx_t_4 = __pyx_v_6gevent_7__ident_heappop; __pyx_t_8 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_4))) {
|
||||
__pyx_t_8 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_4);
|
||||
if (likely(__pyx_t_8)) {
|
||||
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_4);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_8);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_4, function);
|
||||
}
|
||||
}
|
||||
__pyx_t_5 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_8, __pyx_v_self->_available_idents) : <span class='pyx_c_api'>__Pyx_PyObject_CallOneArg</span>(__pyx_t_4, __pyx_v_self->_available_idents);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_8); __pyx_t_8 = 0;
|
||||
if (unlikely(!__pyx_t_5)) <span class='error_goto'>__PYX_ERR(0, 58, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_4); __pyx_t_4 = 0;
|
||||
__pyx_v_ident = __pyx_t_5;
|
||||
__pyx_t_5 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">59</span>: else:</pre>
|
||||
<pre class="cython line score-0"> <span class="">60</span>: # Allocate a bigger one</pre>
|
||||
<pre class="cython line score-12" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">61</span>: ident = len(self._registry)</pre>
|
||||
<pre class='cython code score-12 '> /*else*/ {
|
||||
__pyx_t_5 = __pyx_v_self->_registry;
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_5);
|
||||
__pyx_t_9 = <span class='py_c_api'>PyObject_Length</span>(__pyx_t_5);<span class='error_goto'> if (unlikely(__pyx_t_9 == ((Py_ssize_t)-1))) __PYX_ERR(0, 61, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_5); __pyx_t_5 = 0;
|
||||
__pyx_t_5 = <span class='py_c_api'>PyInt_FromSsize_t</span>(__pyx_t_9);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 61, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
__pyx_v_ident = __pyx_t_5;
|
||||
__pyx_t_5 = 0;
|
||||
}
|
||||
__pyx_L9:;
|
||||
</pre><pre class="cython line score-0"> <span class="">62</span>: </pre>
|
||||
<pre class="cython line score-13" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">63</span>: vref = ValuedWeakRef(obj, self._return_ident)</pre>
|
||||
<pre class='cython code score-13 '> __pyx_t_5 = <span class='pyx_c_api'>__Pyx_PyObject_GetAttrStr</span>(((PyObject *)__pyx_v_self), __pyx_n_s_return_ident);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 63, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
__pyx_t_4 = <span class='py_c_api'>PyTuple_New</span>(2);<span class='error_goto'> if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 63, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_4);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_obj);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_v_obj);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_4, 0, __pyx_v_obj);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_5);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_4, 1, __pyx_t_5);
|
||||
__pyx_t_5 = 0;
|
||||
__pyx_t_5 = <span class='pyx_c_api'>__Pyx_PyObject_Call</span>(((PyObject *)__pyx_ptype_6gevent_7__ident_ValuedWeakRef), __pyx_t_4, NULL);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 63, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_4); __pyx_t_4 = 0;
|
||||
__pyx_v_vref = ((struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *)__pyx_t_5);
|
||||
__pyx_t_5 = 0;
|
||||
</pre><pre class="cython line score-2" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">64</span>: vref.value = ident # pylint:disable=assigning-non-slot,attribute-defined-outside-init</pre>
|
||||
<pre class='cython code score-2 '> <span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_ident);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_v_ident);
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_v_vref->value);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_v_vref->value);
|
||||
__pyx_v_vref->value = __pyx_v_ident;
|
||||
</pre><pre class="cython line score-15" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">65</span>: self._registry[obj] = (ident, vref)</pre>
|
||||
<pre class='cython code score-15 '> __pyx_t_5 = <span class='py_c_api'>PyTuple_New</span>(2);<span class='error_goto'> if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 65, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_ident);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_v_ident);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_5, 0, __pyx_v_ident);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(((PyObject *)__pyx_v_vref));
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(((PyObject *)__pyx_v_vref));
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_5, 1, ((PyObject *)__pyx_v_vref));
|
||||
if (unlikely(<span class='py_c_api'>PyObject_SetItem</span>(__pyx_v_self->_registry, __pyx_v_obj, __pyx_t_5) < 0)) <span class='error_goto'>__PYX_ERR(0, 65, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_5); __pyx_t_5 = 0;
|
||||
</pre><pre class="cython line score-2" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">66</span>: return ident</pre>
|
||||
<pre class='cython code score-2 '> <span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_ident);
|
||||
__pyx_r = __pyx_v_ident;
|
||||
goto __pyx_L0;
|
||||
</pre><pre class="cython line score-0"> <span class="">67</span>: </pre>
|
||||
<pre class="cython line score-31" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">68</span>: def _return_ident(self, vref):</pre>
|
||||
<pre class='cython code score-31 '>static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_5_return_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_vref); /*proto*/
|
||||
static PyObject *__pyx_f_6gevent_7__ident_13IdentRegistry__return_ident(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self, struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *__pyx_v_vref, CYTHON_UNUSED int __pyx_skip_dispatch) {
|
||||
PyObject *__pyx_r = NULL;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("_return_ident", 0);
|
||||
/* … */
|
||||
/* function exit code */
|
||||
__pyx_r = Py_None; <span class='pyx_macro_api'>__Pyx_INCREF</span>(Py_None);
|
||||
goto __pyx_L0;
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_3);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_4);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_7);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry._return_ident", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = 0;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
/* Python wrapper */
|
||||
static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_5_return_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_vref); /*proto*/
|
||||
static char __pyx_doc_6gevent_7__ident_13IdentRegistry_4_return_ident[] = "IdentRegistry._return_ident(self, ValuedWeakRef vref)";
|
||||
static PyMethodDef __pyx_mdef_6gevent_7__ident_13IdentRegistry_5_return_ident = {"_return_ident", (PyCFunction)__pyx_pw_6gevent_7__ident_13IdentRegistry_5_return_ident, METH_O, __pyx_doc_6gevent_7__ident_13IdentRegistry_4_return_ident};
|
||||
static PyObject *__pyx_pw_6gevent_7__ident_13IdentRegistry_5_return_ident(PyObject *__pyx_v_self, PyObject *__pyx_v_vref) {
|
||||
PyObject *__pyx_r = 0;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("_return_ident (wrapper)", 0);
|
||||
if (unlikely(!<span class='pyx_c_api'>__Pyx_ArgTypeTest</span>(((PyObject *)__pyx_v_vref), __pyx_ptype_6gevent_7__ident_ValuedWeakRef, 1, "vref", 0))) <span class='error_goto'>__PYX_ERR(0, 68, __pyx_L1_error)</span>
|
||||
__pyx_r = __pyx_pf_6gevent_7__ident_13IdentRegistry_4_return_ident(((struct __pyx_obj_6gevent_7__ident_IdentRegistry *)__pyx_v_self), ((struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *)__pyx_v_vref));
|
||||
|
||||
/* function exit code */
|
||||
goto __pyx_L0;
|
||||
__pyx_L1_error:;
|
||||
__pyx_r = NULL;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
static PyObject *__pyx_pf_6gevent_7__ident_13IdentRegistry_4_return_ident(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self, struct __pyx_obj_6gevent_7__ident_ValuedWeakRef *__pyx_v_vref) {
|
||||
PyObject *__pyx_r = NULL;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("_return_ident", 0);
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_r);
|
||||
__pyx_t_1 = __pyx_f_6gevent_7__ident_13IdentRegistry__return_ident(__pyx_v_self, __pyx_v_vref, 1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
__pyx_r = __pyx_t_1;
|
||||
__pyx_t_1 = 0;
|
||||
goto __pyx_L0;
|
||||
|
||||
/* function exit code */
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry._return_ident", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = NULL;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_XGIVEREF</span>(__pyx_r);
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
/* … */
|
||||
__pyx_tuple__4 = <span class='py_c_api'>PyTuple_Pack</span>(2, __pyx_n_s_self, __pyx_n_s_vref);<span class='error_goto'> if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 68, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_tuple__4);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_tuple__4);
|
||||
/* … */
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_CyFunction_NewEx</span>(&__pyx_mdef_6gevent_7__ident_13IdentRegistry_5_return_ident, __Pyx_CYFUNCTION_CCLASS, __pyx_n_s_IdentRegistry__return_ident, NULL, __pyx_n_s_gevent___ident, __pyx_d, ((PyObject *)__pyx_codeobj__5));<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
if (<span class='py_c_api'>PyDict_SetItem</span>((PyObject *)__pyx_ptype_6gevent_7__ident_IdentRegistry->tp_dict, __pyx_n_s_return_ident, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 68, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
<span class='py_c_api'>PyType_Modified</span>(__pyx_ptype_6gevent_7__ident_IdentRegistry);
|
||||
</pre><pre class="cython line score-0"> <span class="">69</span>: # By the time this is called, self._registry has been</pre>
|
||||
<pre class="cython line score-0"> <span class="">70</span>: # updated</pre>
|
||||
<pre class="cython line score-0" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">71</span>: if heappush is not None:</pre>
|
||||
<pre class='cython code score-0 '> __pyx_t_1 = (__pyx_v_6gevent_7__ident_heappush != Py_None);
|
||||
__pyx_t_2 = (__pyx_t_1 != 0);
|
||||
if (__pyx_t_2) {
|
||||
/* … */
|
||||
}
|
||||
</pre><pre class="cython line score-0"> <span class="">72</span>: # Under some circumstances we can get called</pre>
|
||||
<pre class="cython line score-0"> <span class="">73</span>: # when the interpreter is shutting down, and globals</pre>
|
||||
<pre class="cython line score-0"> <span class="">74</span>: # aren't available any more.</pre>
|
||||
<pre class="cython line score-39" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">75</span>: heappush(self._available_idents, vref.value)</pre>
|
||||
<pre class='cython code score-39 '> <span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_6gevent_7__ident_heappush);
|
||||
__pyx_t_4 = __pyx_v_6gevent_7__ident_heappush; __pyx_t_5 = NULL;
|
||||
__pyx_t_6 = 0;
|
||||
if (CYTHON_UNPACK_METHODS && unlikely(<span class='py_c_api'>PyMethod_Check</span>(__pyx_t_4))) {
|
||||
__pyx_t_5 = <span class='py_macro_api'>PyMethod_GET_SELF</span>(__pyx_t_4);
|
||||
if (likely(__pyx_t_5)) {
|
||||
PyObject* function = <span class='py_macro_api'>PyMethod_GET_FUNCTION</span>(__pyx_t_4);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_5);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(function);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF_SET</span>(__pyx_t_4, function);
|
||||
__pyx_t_6 = 1;
|
||||
}
|
||||
}
|
||||
#if CYTHON_FAST_PYCALL
|
||||
if (<span class='py_c_api'>PyFunction_Check</span>(__pyx_t_4)) {
|
||||
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_self->_available_idents, __pyx_v_vref->value};
|
||||
__pyx_t_3 = <span class='pyx_c_api'>__Pyx_PyFunction_FastCall</span>(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 75, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5); __pyx_t_5 = 0;
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_3);
|
||||
} else
|
||||
#endif
|
||||
#if CYTHON_FAST_PYCCALL
|
||||
if (<span class='pyx_c_api'>__Pyx_PyFastCFunction_Check</span>(__pyx_t_4)) {
|
||||
PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_self->_available_idents, __pyx_v_vref->value};
|
||||
__pyx_t_3 = <span class='pyx_c_api'>__Pyx_PyCFunction_FastCall</span>(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 75, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_5); __pyx_t_5 = 0;
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_3);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
__pyx_t_7 = <span class='py_c_api'>PyTuple_New</span>(2+__pyx_t_6);<span class='error_goto'> if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 75, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_7);
|
||||
if (__pyx_t_5) {
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_5); <span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL;
|
||||
}
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_self->_available_idents);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_v_self->_available_idents);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_7, 0+__pyx_t_6, __pyx_v_self->_available_idents);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_v_vref->value);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_v_vref->value);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_7, 1+__pyx_t_6, __pyx_v_vref->value);
|
||||
__pyx_t_3 = <span class='pyx_c_api'>__Pyx_PyObject_Call</span>(__pyx_t_4, __pyx_t_7, NULL);<span class='error_goto'> if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 75, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_3);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_7); __pyx_t_7 = 0;
|
||||
}
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_4); __pyx_t_4 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_3); __pyx_t_3 = 0;
|
||||
</pre><pre class="cython line score-0"> <span class="">76</span>: </pre>
|
||||
<pre class="cython line score-3" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">77</span>: def __len__(self):</pre>
|
||||
<pre class='cython code score-3 '>/* Python wrapper */
|
||||
static Py_ssize_t __pyx_pw_6gevent_7__ident_13IdentRegistry_7__len__(PyObject *__pyx_v_self); /*proto*/
|
||||
static Py_ssize_t __pyx_pw_6gevent_7__ident_13IdentRegistry_7__len__(PyObject *__pyx_v_self) {
|
||||
Py_ssize_t __pyx_r;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("__len__ (wrapper)", 0);
|
||||
__pyx_r = __pyx_pf_6gevent_7__ident_13IdentRegistry_6__len__(((struct __pyx_obj_6gevent_7__ident_IdentRegistry *)__pyx_v_self));
|
||||
|
||||
/* function exit code */
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
|
||||
static Py_ssize_t __pyx_pf_6gevent_7__ident_13IdentRegistry_6__len__(struct __pyx_obj_6gevent_7__ident_IdentRegistry *__pyx_v_self) {
|
||||
Py_ssize_t __pyx_r;
|
||||
<span class='refnanny'>__Pyx_RefNannyDeclarations</span>
|
||||
<span class='refnanny'>__Pyx_RefNannySetupContext</span>("__len__", 0);
|
||||
/* … */
|
||||
/* function exit code */
|
||||
__pyx_L1_error:;
|
||||
<span class='pyx_macro_api'>__Pyx_XDECREF</span>(__pyx_t_1);
|
||||
<span class='pyx_c_api'>__Pyx_AddTraceback</span>("gevent.__ident.IdentRegistry.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = -1;
|
||||
__pyx_L0:;
|
||||
<span class='refnanny'>__Pyx_RefNannyFinishContext</span>();
|
||||
return __pyx_r;
|
||||
}
|
||||
</pre><pre class="cython line score-7" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">78</span>: return len(self._registry)</pre>
|
||||
<pre class='cython code score-7 '> __pyx_t_1 = __pyx_v_self->_registry;
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_t_1);
|
||||
__pyx_t_2 = <span class='py_c_api'>PyObject_Length</span>(__pyx_t_1);<span class='error_goto'> if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 78, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_r = __pyx_t_2;
|
||||
goto __pyx_L0;
|
||||
</pre><pre class="cython line score-0"> <span class="">79</span>: </pre>
|
||||
<pre class="cython line score-0"> <span class="">80</span>: </pre>
|
||||
<pre class="cython line score-19" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">81</span>: from gevent._util import import_c_accel</pre>
|
||||
<pre class='cython code score-19 '> __pyx_t_1 = <span class='py_c_api'>PyList_New</span>(1);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_import_c_accel);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_import_c_accel);
|
||||
<span class='py_macro_api'>PyList_SET_ITEM</span>(__pyx_t_1, 0, __pyx_n_s_import_c_accel);
|
||||
__pyx_t_2 = <span class='pyx_c_api'>__Pyx_Import</span>(__pyx_n_s_gevent__util, __pyx_t_1, 0);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 81, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_ImportFrom</span>(__pyx_t_2, __pyx_n_s_import_c_accel);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
if (<span class='py_c_api'>PyDict_SetItem</span>(__pyx_d, __pyx_n_s_import_c_accel, __pyx_t_1) < 0) <span class='error_goto'>__PYX_ERR(0, 81, __pyx_L1_error)</span>
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
</pre><pre class="cython line score-17" onclick="(function(s){s.display=s.display==='block'?'none':'block'})(this.nextElementSibling.style)">+<span class="">82</span>: import_c_accel(globals(), 'gevent.__ident')</pre>
|
||||
<pre class='cython code score-17 '> <span class='pyx_c_api'>__Pyx_GetModuleGlobalName</span>(__pyx_t_2, __pyx_n_s_import_c_accel);<span class='error_goto'> if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 82, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_2);
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_Globals</span>();<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
__pyx_t_3 = <span class='py_c_api'>PyTuple_New</span>(2);<span class='error_goto'> if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 82, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_3);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_t_1);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_3, 0, __pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_INCREF</span>(__pyx_n_s_gevent___ident);
|
||||
<span class='refnanny'>__Pyx_GIVEREF</span>(__pyx_n_s_gevent___ident);
|
||||
<span class='py_macro_api'>PyTuple_SET_ITEM</span>(__pyx_t_3, 1, __pyx_n_s_gevent___ident);
|
||||
__pyx_t_1 = 0;
|
||||
__pyx_t_1 = <span class='pyx_c_api'>__Pyx_PyObject_Call</span>(__pyx_t_2, __pyx_t_3, NULL);<span class='error_goto'> if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error)</span>
|
||||
<span class='refnanny'>__Pyx_GOTREF</span>(__pyx_t_1);
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_2); __pyx_t_2 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_3); __pyx_t_3 = 0;
|
||||
<span class='pyx_macro_api'>__Pyx_DECREF</span>(__pyx_t_1); __pyx_t_1 = 0;
|
||||
</pre></div></body></html>
|
@ -0,0 +1,82 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2018 gevent contributors. See LICENSE for details.
|
||||
# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
from weakref import WeakKeyDictionary
|
||||
from weakref import ref
|
||||
|
||||
from heapq import heappop
|
||||
from heapq import heappush
|
||||
|
||||
__all__ = [
|
||||
'IdentRegistry',
|
||||
]
|
||||
|
||||
class ValuedWeakRef(ref):
|
||||
"""
|
||||
A weak ref with an associated value.
|
||||
"""
|
||||
|
||||
__slots__ = ('value',)
|
||||
|
||||
|
||||
class IdentRegistry(object):
|
||||
"""
|
||||
Maintains a unique mapping of (small) positive integer identifiers
|
||||
to objects that can be weakly referenced.
|
||||
|
||||
It is guaranteed that no two objects will have the the same
|
||||
identifier at the same time, as long as those objects are
|
||||
also uniquely hashable.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# {obj -> (ident, wref(obj))}
|
||||
self._registry = WeakKeyDictionary()
|
||||
|
||||
# A heap of numbers that have been used and returned
|
||||
self._available_idents = []
|
||||
|
||||
def get_ident(self, obj):
|
||||
"""
|
||||
Retrieve the identifier for *obj*, creating one
|
||||
if necessary.
|
||||
"""
|
||||
|
||||
try:
|
||||
return self._registry[obj][0]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if self._available_idents:
|
||||
# Take the smallest free number
|
||||
ident = heappop(self._available_idents)
|
||||
else:
|
||||
# Allocate a bigger one
|
||||
ident = len(self._registry)
|
||||
|
||||
vref = ValuedWeakRef(obj, self._return_ident)
|
||||
vref.value = ident # pylint:disable=assigning-non-slot,attribute-defined-outside-init
|
||||
self._registry[obj] = (ident, vref)
|
||||
return ident
|
||||
|
||||
def _return_ident(self, vref):
|
||||
# By the time this is called, self._registry has been
|
||||
# updated
|
||||
if heappush is not None:
|
||||
# Under some circumstances we can get called
|
||||
# when the interpreter is shutting down, and globals
|
||||
# aren't available any more.
|
||||
heappush(self._available_idents, vref.value)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._registry)
|
||||
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__ident')
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,227 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2018 gevent
|
||||
# cython: auto_pickle=False,embedsignature=True,always_allow_keywords=False,infer_types=True
|
||||
|
||||
"""
|
||||
Iterators across greenlets or AsyncResult objects.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
from gevent import _semaphore
|
||||
from gevent import queue
|
||||
|
||||
|
||||
__all__ = [
|
||||
'IMapUnordered',
|
||||
'IMap',
|
||||
]
|
||||
|
||||
locals()['Greenlet'] = __import__('gevent').Greenlet
|
||||
locals()['Semaphore'] = _semaphore.Semaphore
|
||||
locals()['UnboundQueue'] = queue.UnboundQueue
|
||||
|
||||
|
||||
class Failure(object):
|
||||
__slots__ = ('exc', 'raise_exception')
|
||||
|
||||
def __init__(self, exc, raise_exception=None):
|
||||
self.exc = exc
|
||||
self.raise_exception = raise_exception
|
||||
|
||||
|
||||
def _raise_exc(failure):
|
||||
# For cython.
|
||||
if failure.raise_exception:
|
||||
failure.raise_exception()
|
||||
else:
|
||||
raise failure.exc
|
||||
|
||||
class IMapUnordered(Greenlet): # pylint:disable=undefined-variable
|
||||
"""
|
||||
At iterator of map results.
|
||||
"""
|
||||
|
||||
def __init__(self, func, iterable, spawn, maxsize=None, _zipped=False):
|
||||
"""
|
||||
An iterator that.
|
||||
|
||||
:param callable spawn: The function we use to create new greenlets.
|
||||
:keyword int maxsize: If given and not-None, specifies the maximum number of
|
||||
finished results that will be allowed to accumulated awaiting the reader;
|
||||
more than that number of results will cause map function greenlets to begin
|
||||
to block. This is most useful is there is a great disparity in the speed of
|
||||
the mapping code and the consumer and the results consume a great deal of resources.
|
||||
Using a bound is more computationally expensive than not using a bound.
|
||||
|
||||
.. versionchanged:: 1.1b3
|
||||
Added the *maxsize* parameter.
|
||||
"""
|
||||
Greenlet.__init__(self) # pylint:disable=undefined-variable
|
||||
self.spawn = spawn
|
||||
self._zipped = _zipped
|
||||
self.func = func
|
||||
self.iterable = iterable
|
||||
self.queue = UnboundQueue() # pylint:disable=undefined-variable
|
||||
|
||||
|
||||
if maxsize:
|
||||
# Bounding the queue is not enough if we want to keep from
|
||||
# accumulating objects; the result value will be around as
|
||||
# the greenlet's result, blocked on self.queue.put(), and
|
||||
# we'll go on to spawn another greenlet, which in turn can
|
||||
# create the result. So we need a semaphore to prevent a
|
||||
# greenlet from exiting while the queue is full so that we
|
||||
# don't spawn the next greenlet (assuming that self.spawn
|
||||
# is of course bounded). (Alternatively we could have the
|
||||
# greenlet itself do the insert into the pool, but that
|
||||
# takes some rework).
|
||||
#
|
||||
# Given the use of a semaphore at this level, sizing the queue becomes
|
||||
# redundant, and that lets us avoid having to use self.link() instead
|
||||
# of self.rawlink() to avoid having blocking methods called in the
|
||||
# hub greenlet.
|
||||
self._result_semaphore = Semaphore(maxsize) # pylint:disable=undefined-variable
|
||||
else:
|
||||
self._result_semaphore = None
|
||||
|
||||
self._outstanding_tasks = 0
|
||||
# The index (zero based) of the maximum number of
|
||||
# results we will have.
|
||||
self._max_index = -1
|
||||
self.finished = False
|
||||
|
||||
|
||||
# We're iterating in a different greenlet than we're running.
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
if self._result_semaphore is not None:
|
||||
self._result_semaphore.release()
|
||||
value = self._inext()
|
||||
if isinstance(value, Failure):
|
||||
_raise_exc(value)
|
||||
return value
|
||||
|
||||
next = __next__ # Py2
|
||||
|
||||
def _inext(self):
|
||||
return self.queue.get()
|
||||
|
||||
def _ispawn(self, func, item, item_index):
|
||||
if self._result_semaphore is not None:
|
||||
self._result_semaphore.acquire()
|
||||
self._outstanding_tasks += 1
|
||||
g = self.spawn(func, item) if not self._zipped else self.spawn(func, *item)
|
||||
g._imap_task_index = item_index
|
||||
g.rawlink(self._on_result)
|
||||
return g
|
||||
|
||||
def _run(self): # pylint:disable=method-hidden
|
||||
try:
|
||||
func = self.func
|
||||
for item in self.iterable:
|
||||
self._max_index += 1
|
||||
self._ispawn(func, item, self._max_index)
|
||||
self._on_finish(None)
|
||||
except BaseException as e:
|
||||
self._on_finish(e)
|
||||
raise
|
||||
finally:
|
||||
self.spawn = None
|
||||
self.func = None
|
||||
self.iterable = None
|
||||
self._result_semaphore = None
|
||||
|
||||
def _on_result(self, greenlet):
|
||||
# This method will be called in the hub greenlet (we rawlink)
|
||||
self._outstanding_tasks -= 1
|
||||
count = self._outstanding_tasks
|
||||
finished = self.finished
|
||||
ready = self.ready()
|
||||
put_finished = False
|
||||
|
||||
if ready and count <= 0 and not finished:
|
||||
finished = self.finished = True
|
||||
put_finished = True
|
||||
|
||||
if greenlet.successful():
|
||||
self.queue.put(self._iqueue_value_for_success(greenlet))
|
||||
else:
|
||||
self.queue.put(self._iqueue_value_for_failure(greenlet))
|
||||
|
||||
if put_finished:
|
||||
self.queue.put(self._iqueue_value_for_self_finished())
|
||||
|
||||
def _on_finish(self, exception):
|
||||
# Called in this greenlet.
|
||||
if self.finished:
|
||||
return
|
||||
|
||||
if exception is not None:
|
||||
self.finished = True
|
||||
self.queue.put(self._iqueue_value_for_self_failure(exception))
|
||||
return
|
||||
|
||||
if self._outstanding_tasks <= 0:
|
||||
self.finished = True
|
||||
self.queue.put(self._iqueue_value_for_self_finished())
|
||||
|
||||
def _iqueue_value_for_success(self, greenlet):
|
||||
return greenlet.value
|
||||
|
||||
def _iqueue_value_for_failure(self, greenlet):
|
||||
return Failure(greenlet.exception, getattr(greenlet, '_raise_exception'))
|
||||
|
||||
def _iqueue_value_for_self_finished(self):
|
||||
return Failure(StopIteration())
|
||||
|
||||
def _iqueue_value_for_self_failure(self, exception):
|
||||
return Failure(exception, self._raise_exception)
|
||||
|
||||
|
||||
class IMap(IMapUnordered):
|
||||
# A specialization of IMapUnordered that returns items
|
||||
# in the order in which they were generated, not
|
||||
# the order in which they finish.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# The result dictionary: {index: value}
|
||||
self._results = {}
|
||||
|
||||
# The index of the result to return next.
|
||||
self.index = 0
|
||||
IMapUnordered.__init__(self, *args, **kwargs)
|
||||
|
||||
def _inext(self):
|
||||
try:
|
||||
value = self._results.pop(self.index)
|
||||
except KeyError:
|
||||
# Wait for our index to finish.
|
||||
while 1:
|
||||
index, value = self.queue.get()
|
||||
if index == self.index:
|
||||
break
|
||||
else:
|
||||
self._results[index] = value
|
||||
self.index += 1
|
||||
return value
|
||||
|
||||
def _iqueue_value_for_success(self, greenlet):
|
||||
return (greenlet._imap_task_index, IMapUnordered._iqueue_value_for_success(self, greenlet))
|
||||
|
||||
def _iqueue_value_for_failure(self, greenlet):
|
||||
return (greenlet._imap_task_index, IMapUnordered._iqueue_value_for_failure(self, greenlet))
|
||||
|
||||
def _iqueue_value_for_self_finished(self):
|
||||
return (self._max_index + 1, IMapUnordered._iqueue_value_for_self_finished(self))
|
||||
|
||||
def _iqueue_value_for_self_failure(self, exception):
|
||||
return (self._max_index + 1, IMapUnordered._iqueue_value_for_self_failure(self, exception))
|
||||
|
||||
from gevent._util import import_c_accel
|
||||
import_c_accel(globals(), 'gevent.__imap')
|
@ -0,0 +1,230 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2018 gevent contributors. See LICENSE for details.
|
||||
"""
|
||||
Interfaces gevent uses that don't belong any one place.
|
||||
|
||||
This is not a public module, these interfaces are not
|
||||
currently exposed to the public, they mostly exist for
|
||||
documentation and testing purposes.
|
||||
|
||||
.. versionadded:: 1.3b2
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from gevent._util import Interface
|
||||
from gevent._util import Attribute
|
||||
|
||||
# pylint:disable=no-method-argument, unused-argument, no-self-argument
|
||||
|
||||
__all__ = [
|
||||
'ILoop',
|
||||
'IWatcher',
|
||||
]
|
||||
|
||||
class ILoop(Interface):
|
||||
"""
|
||||
The common interface expected for all event loops.
|
||||
|
||||
.. caution::
|
||||
This is an internal, low-level interface. It may change
|
||||
between minor versions of gevent.
|
||||
|
||||
.. rubric:: Watchers
|
||||
|
||||
The methods that create event loop watchers are `io`, `timer`,
|
||||
`signal`, `idle`, `prepare`, `check`, `fork`, `async_`, `child`,
|
||||
`stat`. These all return various types of :class:`IWatcher`.
|
||||
|
||||
All of those methods have one or two common arguments. *ref* is a
|
||||
boolean saying whether the event loop is allowed to exit even if
|
||||
this watcher is still started. *priority* is event loop specific.
|
||||
"""
|
||||
|
||||
default = Attribute("Boolean indicating whether this is the default loop")
|
||||
|
||||
approx_timer_resolution = Attribute(
|
||||
"Floating point number of seconds giving (approximately) the minimum "
|
||||
"resolution of a timer (and hence the minimun value the sleep can sleep for). "
|
||||
"On libuv, this is fixed by the library, but on libev it is just a guess "
|
||||
"and the actual value is system dependent."
|
||||
)
|
||||
|
||||
def run(nowait=False, once=False):
|
||||
"""
|
||||
Run the event loop.
|
||||
|
||||
This is usually called automatically by the hub greenlet, but
|
||||
in special cases (when the hub is *not* running) you can use
|
||||
this to control how the event loop runs (for example, to integrate
|
||||
it with another event loop).
|
||||
"""
|
||||
|
||||
def now():
|
||||
"""
|
||||
now() -> float
|
||||
|
||||
Return the loop's notion of the current time.
|
||||
|
||||
This may not necessarily be related to :func:`time.time` (it
|
||||
may have a different starting point), but it must be expressed
|
||||
in fractional seconds (the same *units* used by :func:`time.time`).
|
||||
"""
|
||||
|
||||
def update_now():
|
||||
"""
|
||||
Update the loop's notion of the current time.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
In the past, this available as ``update``. This is still available as
|
||||
an alias but will be removed in the future.
|
||||
"""
|
||||
|
||||
def destroy():
|
||||
"""
|
||||
Clean up resources used by this loop.
|
||||
|
||||
If you create loops
|
||||
(especially loops that are not the default) you *should* call
|
||||
this method when you are done with the loop.
|
||||
|
||||
.. caution::
|
||||
|
||||
As an implementation note, the libev C loop implementation has a
|
||||
finalizer (``__del__``) that destroys the object, but the libuv
|
||||
and libev CFFI implementations do not. The C implementation may change.
|
||||
|
||||
"""
|
||||
|
||||
def io(fd, events, ref=True, priority=None):
|
||||
"""
|
||||
Create and return a new IO watcher for the given *fd*.
|
||||
|
||||
*events* is a bitmask specifying which events to watch
|
||||
for. 1 means read, and 2 means write.
|
||||
"""
|
||||
|
||||
def timer(after, repeat=0.0, ref=True, priority=None):
|
||||
"""
|
||||
Create and return a timer watcher that will fire after *after* seconds.
|
||||
|
||||
If *repeat* is given, the timer will continue to fire every *repeat* seconds.
|
||||
"""
|
||||
|
||||
def signal(signum, ref=True, priority=None):
|
||||
"""
|
||||
Create and return a signal watcher for the signal *signum*,
|
||||
one of the constants defined in :mod:`signal`.
|
||||
|
||||
This is platform and event loop specific.
|
||||
"""
|
||||
|
||||
def idle(ref=True, priority=None):
|
||||
"""
|
||||
Create and return a watcher that fires when the event loop is idle.
|
||||
"""
|
||||
|
||||
def prepare(ref=True, priority=None):
|
||||
"""
|
||||
Create and return a watcher that fires before the event loop
|
||||
polls for IO.
|
||||
|
||||
.. caution:: This method is not supported by libuv.
|
||||
"""
|
||||
|
||||
def check(ref=True, priority=None):
|
||||
"""
|
||||
Create and return a watcher that fires after the event loop
|
||||
polls for IO.
|
||||
"""
|
||||
|
||||
def fork(ref=True, priority=None):
|
||||
"""
|
||||
Create a watcher that fires when the process forks.
|
||||
|
||||
Availability: POSIX
|
||||
"""
|
||||
|
||||
def async_(ref=True, priority=None):
|
||||
"""
|
||||
Create a watcher that fires when triggered, possibly
|
||||
from another thread.
|
||||
|
||||
.. versionchanged:: 1.3
|
||||
This was previously just named ``async``; for compatibility
|
||||
with Python 3.7 where ``async`` is a keyword it was renamed.
|
||||
On older versions of Python the old name is still around, but
|
||||
it will be removed in the future.
|
||||
"""
|
||||
|
||||
if sys.platform != "win32":
|
||||
|
||||
def child(pid, trace=0, ref=True):
|
||||
"""
|
||||
Create a watcher that fires for events on the child with process ID *pid*.
|
||||
|
||||
This is platform specific and not available on Windows.
|
||||
"""
|
||||
|
||||
def stat(path, interval=0.0, ref=True, priority=None):
|
||||
"""
|
||||
Create a watcher that monitors the filesystem item at *path*.
|
||||
|
||||
If the operating system doesn't support event notifications
|
||||
from the filesystem, poll for changes every *interval* seconds.
|
||||
"""
|
||||
|
||||
def run_callback(func, *args):
|
||||
"""
|
||||
Run the *func* passing it *args* at the next opportune moment.
|
||||
|
||||
This is a way of handing control to the event loop and deferring
|
||||
an action.
|
||||
"""
|
||||
|
||||
class IWatcher(Interface):
|
||||
"""
|
||||
An event loop watcher.
|
||||
|
||||
These objects call their *callback* function when the event
|
||||
loop detects the event has happened.
|
||||
|
||||
.. important:: You *must* call :meth:`close` when you are
|
||||
done with this object to avoid leaking native resources.
|
||||
"""
|
||||
|
||||
def start(callback, *args, **kwargs):
|
||||
"""
|
||||
Have the event loop begin watching for this event.
|
||||
|
||||
When the event is detected, *callback* will be called with
|
||||
*args*.
|
||||
|
||||
.. caution::
|
||||
|
||||
Not all watchers accept ``**kwargs``,
|
||||
and some watchers define special meanings for certain keyword args.
|
||||
"""
|
||||
|
||||
def stop():
|
||||
"""
|
||||
Have the event loop stop watching this event.
|
||||
|
||||
In the future you may call :meth:`start` to begin watching
|
||||
again.
|
||||
"""
|
||||
|
||||
def close():
|
||||
"""
|
||||
Dispose of any native resources associated with the watcher.
|
||||
|
||||
If we were active, stop.
|
||||
|
||||
Attempting to operate on this object after calling close is
|
||||
undefined. You should dispose of any references you have to it
|
||||
after calling this method.
|
||||
"""
|
Binary file not shown.
@ -0,0 +1,113 @@
|
||||
# cython: auto_pickle=False
|
||||
|
||||
cimport cython
|
||||
from gevent._greenlet cimport Greenlet
|
||||
|
||||
cdef bint _PYPY
|
||||
cdef ref
|
||||
cdef copy
|
||||
|
||||
cdef object _marker
|
||||
cdef str key_prefix
|
||||
cdef bint _greenlet_imported
|
||||
|
||||
|
||||
cdef extern from "greenlet/greenlet.h":
|
||||
|
||||
ctypedef class greenlet.greenlet [object PyGreenlet]:
|
||||
pass
|
||||
|
||||
# These are actually macros and so much be included
|
||||
# (defined) in each .pxd, as are the two functions
|
||||
# that call them.
|
||||
greenlet PyGreenlet_GetCurrent()
|
||||
void PyGreenlet_Import()
|
||||
|
||||
cdef inline greenlet getcurrent():
|
||||
return PyGreenlet_GetCurrent()
|
||||
|
||||
cdef inline void greenlet_init():
|
||||
global _greenlet_imported
|
||||
if not _greenlet_imported:
|
||||
PyGreenlet_Import()
|
||||
_greenlet_imported = True
|
||||
|
||||
|
||||
cdef void _init()
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _wrefdict(dict):
|
||||
cdef object __weakref__
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _greenlet_deleted:
|
||||
cdef object idt
|
||||
cdef object wrdicts
|
||||
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _local_deleted:
|
||||
cdef str key
|
||||
cdef object wrthread
|
||||
cdef _greenlet_deleted greenlet_deleted
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _localimpl:
|
||||
cdef str key
|
||||
cdef dict dicts
|
||||
cdef tuple localargs
|
||||
cdef dict localkwargs
|
||||
cdef tuple localtypeid
|
||||
cdef object __weakref__
|
||||
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class _localimpl_dict_entry:
|
||||
cdef object wrgreenlet
|
||||
cdef dict localdict
|
||||
|
||||
@cython.locals(localdict=dict, key=str,
|
||||
greenlet_deleted=_greenlet_deleted,
|
||||
local_deleted=_local_deleted)
|
||||
cdef dict _localimpl_create_dict(_localimpl self,
|
||||
greenlet greenlet,
|
||||
object idt)
|
||||
|
||||
cdef set _local_attrs
|
||||
|
||||
cdef class local:
|
||||
cdef _localimpl _local__impl
|
||||
cdef set _local_type_get_descriptors
|
||||
cdef set _local_type_set_or_del_descriptors
|
||||
cdef set _local_type_del_descriptors
|
||||
cdef set _local_type_set_descriptors
|
||||
cdef set _local_type_vars
|
||||
cdef type _local_type
|
||||
|
||||
@cython.locals(entry=_localimpl_dict_entry,
|
||||
dct=dict, duplicate=dict,
|
||||
instance=local)
|
||||
cpdef local __copy__(local self)
|
||||
|
||||
|
||||
@cython.locals(impl=_localimpl,dct=dict,
|
||||
dct=dict, entry=_localimpl_dict_entry)
|
||||
cdef inline dict _local_get_dict(local self)
|
||||
|
||||
@cython.locals(entry=_localimpl_dict_entry)
|
||||
cdef _local__copy_dict_from(local self, _localimpl impl, dict duplicate)
|
||||
|
||||
@cython.locals(mro=list, gets=set, dels=set, set_or_del=set,
|
||||
type_self=type, type_attr=type,
|
||||
sets=set)
|
||||
cdef tuple _local_find_descriptors(local self)
|
||||
|
||||
@cython.locals(result=list, local_impl=_localimpl,
|
||||
entry=_localimpl_dict_entry, k=str,
|
||||
greenlet_dict=dict)
|
||||
cpdef all_local_dicts_for_greenlet(greenlet greenlet)
|
@ -0,0 +1,325 @@
|
||||
# Copyright (c) 2018 gevent. See LICENSE for details.
|
||||
from __future__ import print_function, absolute_import, division
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from weakref import ref as wref
|
||||
|
||||
from greenlet import getcurrent
|
||||
|
||||
from gevent import config as GEVENT_CONFIG
|
||||
from gevent.monkey import get_original
|
||||
from gevent.events import notify
|
||||
from gevent.events import EventLoopBlocked
|
||||
from gevent.events import MemoryUsageThresholdExceeded
|
||||
from gevent.events import MemoryUsageUnderThreshold
|
||||
from gevent.events import IPeriodicMonitorThread
|
||||
from gevent.events import implementer
|
||||
|
||||
from gevent._tracer import GreenletTracer
|
||||
from gevent._compat import thread_mod_name
|
||||
from gevent._compat import perf_counter
|
||||
|
||||
|
||||
|
||||
__all__ = [
|
||||
'PeriodicMonitoringThread',
|
||||
]
|
||||
|
||||
get_thread_ident = get_original(thread_mod_name, 'get_ident')
|
||||
start_new_thread = get_original(thread_mod_name, 'start_new_thread')
|
||||
thread_sleep = get_original('time', 'sleep')
|
||||
|
||||
|
||||
|
||||
class MonitorWarning(RuntimeWarning):
|
||||
"""The type of warnings we emit."""
|
||||
|
||||
|
||||
class _MonitorEntry(object):
|
||||
|
||||
__slots__ = ('function', 'period', 'last_run_time')
|
||||
|
||||
def __init__(self, function, period):
|
||||
self.function = function
|
||||
self.period = period
|
||||
self.last_run_time = 0
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.function == other.function and self.period == other.period
|
||||
|
||||
def __repr__(self):
|
||||
return repr((self.function, self.period, self.last_run_time))
|
||||
|
||||
|
||||
@implementer(IPeriodicMonitorThread)
|
||||
class PeriodicMonitoringThread(object):
|
||||
# This doesn't extend threading.Thread because that gets monkey-patched.
|
||||
# We use the low-level 'start_new_thread' primitive instead.
|
||||
|
||||
# The amount of seconds we will sleep when we think we have nothing
|
||||
# to do.
|
||||
inactive_sleep_time = 2.0
|
||||
|
||||
# The absolute minimum we will sleep, regardless of
|
||||
# what particular monitoring functions want to say.
|
||||
min_sleep_time = 0.005
|
||||
|
||||
# The minimum period in seconds at which we will check memory usage.
|
||||
# Getting memory usage is fairly expensive.
|
||||
min_memory_monitor_period = 2
|
||||
|
||||
# A list of _MonitorEntry objects: [(function(hub), period, last_run_time))]
|
||||
# The first entry is always our entry for self.monitor_blocking
|
||||
_monitoring_functions = None
|
||||
|
||||
# The calculated min sleep time for the monitoring functions list.
|
||||
_calculated_sleep_time = None
|
||||
|
||||
# A boolean value that also happens to capture the
|
||||
# memory usage at the time we exceeded the threshold. Reset
|
||||
# to 0 when we go back below.
|
||||
_memory_exceeded = 0
|
||||
|
||||
# The instance of GreenletTracer we're using
|
||||
_greenlet_tracer = None
|
||||
|
||||
def __init__(self, hub):
|
||||
self._hub_wref = wref(hub, self._on_hub_gc)
|
||||
self.should_run = True
|
||||
|
||||
# Must be installed in the thread that the hub is running in;
|
||||
# the trace function is threadlocal
|
||||
assert get_thread_ident() == hub.thread_ident
|
||||
self._greenlet_tracer = GreenletTracer()
|
||||
|
||||
self._monitoring_functions = [_MonitorEntry(self.monitor_blocking,
|
||||
GEVENT_CONFIG.max_blocking_time)]
|
||||
self._calculated_sleep_time = GEVENT_CONFIG.max_blocking_time
|
||||
# Create the actual monitoring thread. This is effectively a "daemon"
|
||||
# thread.
|
||||
self.monitor_thread_ident = start_new_thread(self, ())
|
||||
|
||||
# We must track the PID to know if your thread has died after a fork
|
||||
self.pid = os.getpid()
|
||||
|
||||
def _on_fork(self):
|
||||
# Pseudo-standard method that resolver_ares and threadpool
|
||||
# also have, called by hub.reinit()
|
||||
pid = os.getpid()
|
||||
if pid != self.pid:
|
||||
self.pid = pid
|
||||
self.monitor_thread_ident = start_new_thread(self, ())
|
||||
|
||||
@property
|
||||
def hub(self):
|
||||
return self._hub_wref()
|
||||
|
||||
|
||||
def monitoring_functions(self):
|
||||
# Return a list of _MonitorEntry objects
|
||||
|
||||
# Update max_blocking_time each time.
|
||||
mbt = GEVENT_CONFIG.max_blocking_time # XXX: Events so we know when this changes.
|
||||
if mbt != self._monitoring_functions[0].period:
|
||||
self._monitoring_functions[0].period = mbt
|
||||
self._calculated_sleep_time = min(x.period for x in self._monitoring_functions)
|
||||
return self._monitoring_functions
|
||||
|
||||
def add_monitoring_function(self, function, period):
|
||||
if not callable(function):
|
||||
raise ValueError("function must be callable")
|
||||
|
||||
if period is None:
|
||||
# Remove.
|
||||
self._monitoring_functions = [
|
||||
x for x in self._monitoring_functions
|
||||
if x.function != function
|
||||
]
|
||||
elif period <= 0:
|
||||
raise ValueError("Period must be positive.")
|
||||
else:
|
||||
# Add or update period
|
||||
entry = _MonitorEntry(function, period)
|
||||
self._monitoring_functions = [
|
||||
x if x.function != function else entry
|
||||
for x in self._monitoring_functions
|
||||
]
|
||||
if entry not in self._monitoring_functions:
|
||||
self._monitoring_functions.append(entry)
|
||||
self._calculated_sleep_time = min(x.period for x in self._monitoring_functions)
|
||||
|
||||
def calculate_sleep_time(self):
|
||||
min_sleep = self._calculated_sleep_time
|
||||
if min_sleep <= 0:
|
||||
# Everyone wants to be disabled. Sleep for a longer period of
|
||||
# time than usual so we don't spin unnecessarily. We might be
|
||||
# enabled again in the future.
|
||||
return self.inactive_sleep_time
|
||||
return max((min_sleep, self.min_sleep_time))
|
||||
|
||||
def kill(self):
|
||||
if not self.should_run:
|
||||
# Prevent overwriting trace functions.
|
||||
return
|
||||
# Stop this monitoring thread from running.
|
||||
self.should_run = False
|
||||
# Uninstall our tracing hook
|
||||
self._greenlet_tracer.kill()
|
||||
|
||||
def _on_hub_gc(self, _):
|
||||
self.kill()
|
||||
|
||||
def __call__(self):
|
||||
# The function that runs in the monitoring thread.
|
||||
# We cannot use threading.current_thread because it would
|
||||
# create an immortal DummyThread object.
|
||||
getcurrent().gevent_monitoring_thread = wref(self)
|
||||
|
||||
try:
|
||||
while self.should_run:
|
||||
functions = self.monitoring_functions()
|
||||
assert functions
|
||||
sleep_time = self.calculate_sleep_time()
|
||||
|
||||
thread_sleep(sleep_time)
|
||||
|
||||
# Make sure the hub is still around, and still active,
|
||||
# and keep it around while we are here.
|
||||
hub = self.hub
|
||||
if not hub:
|
||||
self.kill()
|
||||
|
||||
if self.should_run:
|
||||
this_run = perf_counter()
|
||||
for entry in functions:
|
||||
f = entry.function
|
||||
period = entry.period
|
||||
last_run = entry.last_run_time
|
||||
if period and last_run + period <= this_run:
|
||||
entry.last_run_time = this_run
|
||||
f(hub)
|
||||
del hub # break our reference to hub while we sleep
|
||||
|
||||
except SystemExit:
|
||||
pass
|
||||
except: # pylint:disable=bare-except
|
||||
# We're a daemon thread, so swallow any exceptions that get here
|
||||
# during interpreter shutdown.
|
||||
if not sys or not sys.stderr: # pragma: no cover
|
||||
# Interpreter is shutting down
|
||||
pass
|
||||
else:
|
||||
hub = self.hub
|
||||
if hub is not None:
|
||||
# XXX: This tends to do bad things like end the process, because we
|
||||
# try to switch *threads*, which can't happen. Need something better.
|
||||
hub.handle_error(self, *sys.exc_info())
|
||||
|
||||
def monitor_blocking(self, hub):
|
||||
# Called periodically to see if the trace function has
|
||||
# fired to switch greenlets. If not, we will print
|
||||
# the greenlet tree.
|
||||
|
||||
# For tests, we return a true value when we think we found something
|
||||
# blocking
|
||||
|
||||
did_block = self._greenlet_tracer.did_block_hub(hub)
|
||||
if not did_block:
|
||||
return
|
||||
|
||||
active_greenlet = did_block[1]
|
||||
report = self._greenlet_tracer.did_block_hub_report(
|
||||
hub, active_greenlet,
|
||||
dict(greenlet_stacks=False, current_thread_ident=self.monitor_thread_ident))
|
||||
|
||||
stream = hub.exception_stream
|
||||
for line in report:
|
||||
# Printing line by line may interleave with other things,
|
||||
# but it should also prevent a "reentrant call to print"
|
||||
# when the report is large.
|
||||
print(line, file=stream)
|
||||
|
||||
notify(EventLoopBlocked(active_greenlet, GEVENT_CONFIG.max_blocking_time, report))
|
||||
return (active_greenlet, report)
|
||||
|
||||
def ignore_current_greenlet_blocking(self):
|
||||
self._greenlet_tracer.ignore_current_greenlet_blocking()
|
||||
|
||||
def monitor_current_greenlet_blocking(self):
|
||||
self._greenlet_tracer.monitor_current_greenlet_blocking()
|
||||
|
||||
def _get_process(self): # pylint:disable=method-hidden
|
||||
try:
|
||||
# The standard library 'resource' module doesn't provide
|
||||
# a standard way to get the RSS measure, only the maximum.
|
||||
# You might be tempted to try to compute something by adding
|
||||
# together text and data sizes, but on many systems those come back
|
||||
# zero. So our only option is psutil.
|
||||
from psutil import Process, AccessDenied
|
||||
# Make sure it works (why would we be denied access to our own process?)
|
||||
try:
|
||||
proc = Process()
|
||||
proc.memory_full_info()
|
||||
except AccessDenied: # pragma: no cover
|
||||
proc = None
|
||||
except ImportError:
|
||||
proc = None
|
||||
|
||||
self._get_process = lambda: proc
|
||||
return proc
|
||||
|
||||
def can_monitor_memory_usage(self):
|
||||
return self._get_process() is not None
|
||||
|
||||
def install_monitor_memory_usage(self):
|
||||
# Start monitoring memory usage, if possible.
|
||||
# If not possible, emit a warning.
|
||||
if not self.can_monitor_memory_usage():
|
||||
import warnings
|
||||
warnings.warn("Unable to monitor memory usage. Install psutil.",
|
||||
MonitorWarning)
|
||||
return
|
||||
|
||||
self.add_monitoring_function(self.monitor_memory_usage,
|
||||
max(GEVENT_CONFIG.memory_monitor_period,
|
||||
self.min_memory_monitor_period))
|
||||
|
||||
def monitor_memory_usage(self, _hub):
|
||||
max_allowed = GEVENT_CONFIG.max_memory_usage
|
||||
if not max_allowed:
|
||||
# They disabled it.
|
||||
return -1 # value for tests
|
||||
|
||||
rusage = self._get_process().memory_full_info()
|
||||
# uss only documented available on Windows, Linux, and OS X.
|
||||
# If not available, fall back to rss as an aproximation.
|
||||
mem_usage = getattr(rusage, 'uss', 0) or rusage.rss
|
||||
|
||||
event = None # Return value for tests
|
||||
|
||||
if mem_usage > max_allowed:
|
||||
if mem_usage > self._memory_exceeded:
|
||||
# We're still growing
|
||||
event = MemoryUsageThresholdExceeded(
|
||||
mem_usage, max_allowed, rusage)
|
||||
notify(event)
|
||||
self._memory_exceeded = mem_usage
|
||||
else:
|
||||
# we're below. Were we above it last time?
|
||||
if self._memory_exceeded:
|
||||
event = MemoryUsageUnderThreshold(
|
||||
mem_usage, max_allowed, rusage, self._memory_exceeded)
|
||||
notify(event)
|
||||
self._memory_exceeded = 0
|
||||
|
||||
return event
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s at %s in thread %s greenlet %r for %r>' % (
|
||||
self.__class__.__name__,
|
||||
hex(id(self)),
|
||||
hex(self.monitor_thread_ident),
|
||||
getcurrent(),
|
||||
self._hub_wref())
|
@ -0,0 +1,127 @@
|
||||
# Copyright 2018 gevent. See LICENSE for details.
|
||||
|
||||
# Portions of the following are inspired by code from eventlet. I
|
||||
# believe they are distinct enough that no eventlet copyright would
|
||||
# apply (they are not a copy or substantial portion of the eventlot
|
||||
# code).
|
||||
|
||||
# Added in gevent 1.3a2. Not public in that release.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import importlib
|
||||
import sys
|
||||
|
||||
from gevent._compat import PY3
|
||||
from gevent._compat import iteritems
|
||||
from gevent._compat import imp_acquire_lock
|
||||
from gevent._compat import imp_release_lock
|
||||
|
||||
|
||||
from gevent.builtins import __import__ as _import
|
||||
|
||||
|
||||
MAPPING = {
|
||||
'gevent.local': '_threading_local',
|
||||
'gevent.socket': 'socket',
|
||||
'gevent.select': 'select',
|
||||
'gevent.ssl': 'ssl',
|
||||
'gevent.thread': '_thread' if PY3 else 'thread',
|
||||
'gevent.subprocess': 'subprocess',
|
||||
'gevent.os': 'os',
|
||||
'gevent.threading': 'threading',
|
||||
'gevent.builtins': 'builtins' if PY3 else '__builtin__',
|
||||
'gevent.signal': 'signal',
|
||||
'gevent.time': 'time',
|
||||
'gevent.queue': 'queue' if PY3 else 'Queue',
|
||||
}
|
||||
|
||||
_PATCH_PREFIX = '__g_patched_module_'
|
||||
|
||||
class _SysModulesPatcher(object):
|
||||
|
||||
def __init__(self, importing):
|
||||
self._saved = {}
|
||||
self.importing = importing
|
||||
self.green_modules = {
|
||||
stdlib_name: importlib.import_module(gevent_name)
|
||||
for gevent_name, stdlib_name
|
||||
in iteritems(MAPPING)
|
||||
}
|
||||
self.orig_imported = frozenset(sys.modules)
|
||||
|
||||
def _save(self):
|
||||
for modname in self.green_modules:
|
||||
self._saved[modname] = sys.modules.get(modname, None)
|
||||
|
||||
self._saved[self.importing] = sys.modules.get(self.importing, None)
|
||||
# Anything we've already patched regains its original name during this
|
||||
# process
|
||||
for mod_name, mod in iteritems(sys.modules):
|
||||
if mod_name.startswith(_PATCH_PREFIX):
|
||||
orig_mod_name = mod_name[len(_PATCH_PREFIX):]
|
||||
self._saved[mod_name] = sys.modules.get(orig_mod_name, None)
|
||||
self.green_modules[orig_mod_name] = mod
|
||||
|
||||
def _replace(self):
|
||||
# Cover the target modules so that when you import the module it
|
||||
# sees only the patched versions
|
||||
for name, mod in iteritems(self.green_modules):
|
||||
sys.modules[name] = mod
|
||||
|
||||
def _restore(self):
|
||||
for modname, mod in iteritems(self._saved):
|
||||
if mod is not None:
|
||||
sys.modules[modname] = mod
|
||||
else:
|
||||
try:
|
||||
del sys.modules[modname]
|
||||
except KeyError:
|
||||
pass
|
||||
# Anything from the same package tree we imported this time
|
||||
# needs to be saved so we can restore it later, and so it doesn't
|
||||
# leak into the namespace.
|
||||
pkg_prefix = self.importing.split('.', 1)[0]
|
||||
for modname, mod in list(iteritems(sys.modules)):
|
||||
if (modname not in self.orig_imported
|
||||
and modname != self.importing
|
||||
and not modname.startswith(_PATCH_PREFIX)
|
||||
and modname.startswith(pkg_prefix)):
|
||||
sys.modules[_PATCH_PREFIX + modname] = mod
|
||||
del sys.modules[modname]
|
||||
|
||||
def __exit__(self, t, v, tb):
|
||||
try:
|
||||
self._restore()
|
||||
finally:
|
||||
imp_release_lock()
|
||||
|
||||
def __enter__(self):
|
||||
imp_acquire_lock()
|
||||
self._save()
|
||||
self._replace()
|
||||
|
||||
|
||||
def import_patched(module_name):
|
||||
"""
|
||||
Import *module_name* with gevent monkey-patches active,
|
||||
and return the greened module.
|
||||
|
||||
Any sub-modules that were imported by the package are also
|
||||
saved.
|
||||
|
||||
"""
|
||||
patched_name = _PATCH_PREFIX + module_name
|
||||
if patched_name in sys.modules:
|
||||
return sys.modules[patched_name]
|
||||
|
||||
|
||||
# Save the current module state, and restore on exit,
|
||||
# capturing desirable changes in the modules package.
|
||||
with _SysModulesPatcher(module_name):
|
||||
sys.modules.pop(module_name, None)
|
||||
|
||||
module = _import(module_name, {}, {}, module_name.split('.')[:-1])
|
||||
sys.modules[patched_name] = module
|
||||
|
||||
return module
|
Binary file not shown.
@ -0,0 +1,74 @@
|
||||
cimport cython
|
||||
from gevent.__waiter cimport Waiter
|
||||
from gevent._event cimport Event
|
||||
|
||||
cdef _heappush
|
||||
cdef _heappop
|
||||
cdef _heapify
|
||||
|
||||
@cython.final
|
||||
cdef _safe_remove(deq, item)
|
||||
|
||||
@cython.final
|
||||
@cython.internal
|
||||
cdef class ItemWaiter(Waiter):
|
||||
cdef readonly item
|
||||
cdef readonly queue
|
||||
|
||||
cdef class Queue:
|
||||
cdef __weakref__
|
||||
cdef readonly hub
|
||||
cdef readonly queue
|
||||
|
||||
cdef getters
|
||||
cdef putters
|
||||
|
||||
cdef _event_unlock
|
||||
cdef Py_ssize_t _maxsize
|
||||
|
||||
cpdef _get(self)
|
||||
cpdef _put(self, item)
|
||||
cpdef _peek(self)
|
||||
|
||||
cpdef Py_ssize_t qsize(self)
|
||||
cpdef bint empty(self)
|
||||
cpdef bint full(self)
|
||||
|
||||
cpdef put(self, item, block=*, timeout=*)
|
||||
cpdef put_nowait(self, item)
|
||||
|
||||
cdef __get_or_peek(self, method, block, timeout)
|
||||
|
||||
cpdef get(self, block=*, timeout=*)
|
||||
cpdef get_nowait(self)
|
||||
cpdef peek(self, block=*, timeout=*)
|
||||
cpdef peek_nowait(self)
|
||||
|
||||
cdef _schedule_unlock(self)
|
||||
|
||||
@cython.final
|
||||
cdef class UnboundQueue(Queue):
|
||||
pass
|
||||
|
||||
cdef class PriorityQueue(Queue):
|
||||
pass
|
||||
|
||||
cdef class LifoQueue(Queue):
|
||||
pass
|
||||
|
||||
cdef class JoinableQueue(Queue):
|
||||
cdef Event _cond
|
||||
cdef readonly int unfinished_tasks
|
||||
|
||||
|
||||
cdef class Channel:
|
||||
cdef __weakref__
|
||||
cdef readonly getters
|
||||
cdef readonly putters
|
||||
cdef readonly hub
|
||||
cdef _event_unlock
|
||||
|
||||
cpdef get(self, block=*, timeout=*)
|
||||
cpdef get_nowait(self)
|
||||
|
||||
cdef _schedule_unlock(self)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue