This commit is contained in:
2026-04-10 15:06:59 +02:00
parent 3031b7153b
commit e5a4711004
7806 changed files with 1918528 additions and 335 deletions

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
import warnings
from numba.core.errors import NumbaPendingDeprecationWarning
# The pycc module requires setuptools.
try:
import setuptools
except ImportError:
msg = "The 'setuptools' package is required at runtime for pycc support."
raise ImportError(msg)
# Public API
from .cc import CC
from .decorators import export, exportmany
# If use of anything is attempted through the `pycc` import path this warning
# will be shown.
__pycc_deprecation_doc_url = ("https://numba.readthedocs.io/en/stable/"
"reference/deprecation.html"
"#deprecation-of-the-numba-pycc-module")
__pycc_pending_deprecation_message = ("The 'pycc' module is pending "
"deprecation. Replacement technology is "
"being developed.\n\n"
"Pending Deprecation in Numba 0.57.0. "
"For more information please see: "
f"{__pycc_deprecation_doc_url}")
_pend_dep = NumbaPendingDeprecationWarning(__pycc_pending_deprecation_message)
warnings.warn(_pend_dep, stacklevel=2)

View File

@@ -0,0 +1,309 @@
from setuptools import distutils as dutils
from setuptools.command import build_ext
from setuptools.extension import Extension
import os
import shutil
import sys
import tempfile
from numba.core import typing, sigutils
from numba.core.compiler_lock import global_compiler_lock
from numba.pycc.compiler import ModuleCompiler, ExportEntry
from numba.pycc.platform import Toolchain
from numba import cext
dir_util = dutils.dir_util
log = dutils.log
extension_libs = cext.get_extension_libs()
class CC(object):
"""
An ahead-of-time compiler to create extension modules that don't
depend on Numba.
"""
# NOTE: using ccache can speed up repetitive builds
# (especially for the mixin modules)
_mixin_sources = ['modulemixin.c',] + extension_libs
# -flto strips all unused helper functions, which 1) makes the
# produced output much smaller and 2) can make the linking step faster.
# (the Windows linker seems to do this by default, judging by the results)
_extra_cflags = {
# Comment out due to odd behavior with GCC 4.9+ with LTO
# 'posix': ['-flto'],
}
_extra_ldflags = {
# Comment out due to odd behavior with GCC 4.9+ with LTO
# 'posix': ['-flto'],
}
def __init__(self, extension_name, source_module=None):
if '.' in extension_name:
raise ValueError("basename should be a simple module name, not "
"qualified name")
self._basename = extension_name
self._init_function = 'pycc_init_' + extension_name
self._exported_functions = {}
# Resolve source module name and directory
f = sys._getframe(1)
if source_module is None:
dct = f.f_globals
source_module = dct['__name__']
elif hasattr(source_module, '__name__'):
dct = source_module.__dict__
source_module = source_module.__name__
else:
dct = sys.modules[source_module].__dict__
self._source_path = dct.get('__file__', '')
self._source_module = source_module
self._toolchain = Toolchain()
self._verbose = False
# By default, output in directory of caller module
self._output_dir = os.path.dirname(self._source_path)
self._output_file = self._toolchain.get_ext_filename(extension_name)
self._use_nrt = True
self._target_cpu = ''
@property
def name(self):
"""
The name of the extension module to create.
"""
return self._basename
@property
def output_file(self):
"""
The specific output file (a DLL) that will be generated.
"""
return self._output_file
@output_file.setter
def output_file(self, value):
self._output_file = value
@property
def output_dir(self):
"""
The directory the output file will be put in.
"""
return self._output_dir
@output_dir.setter
def output_dir(self, value):
self._output_dir = value
@property
def use_nrt(self):
return self._use_nrt
@use_nrt.setter
def use_nrt(self, value):
self._use_nrt = value
@property
def target_cpu(self):
"""
The target CPU model for code generation.
"""
return self._target_cpu
@target_cpu.setter
def target_cpu(self, value):
self._target_cpu = value
@property
def verbose(self):
"""
Whether to display detailed information when compiling.
"""
return self._verbose
@verbose.setter
def verbose(self, value):
self._verbose = value
def export(self, exported_name, sig):
"""
Mark a function for exporting in the extension module.
"""
fn_args, fn_retty = sigutils.normalize_signature(sig)
sig = typing.signature(fn_retty, *fn_args)
if exported_name in self._exported_functions:
raise KeyError("duplicated export symbol %s" % (exported_name))
def decorator(func):
entry = ExportEntry(exported_name, sig, func)
self._exported_functions[exported_name] = entry
return func
return decorator
@property
def _export_entries(self):
return sorted(self._exported_functions.values(),
key=lambda entry: entry.symbol)
def _get_mixin_sources(self):
here = os.path.dirname(__file__)
mixin_sources = self._mixin_sources[:]
if self._use_nrt:
mixin_sources.append('../core/runtime/nrt.cpp')
return [os.path.join(here, f) for f in mixin_sources]
def _get_mixin_defines(self):
# Macro definitions required by modulemixin.c
return [
('PYCC_MODULE_NAME', self._basename),
('PYCC_USE_NRT', int(self._use_nrt)),
]
def _get_extra_cflags(self):
extra_cflags = self._extra_cflags.get(sys.platform, [])
if not extra_cflags:
extra_cflags = self._extra_cflags.get(os.name, [])
return extra_cflags
def _get_extra_ldflags(self):
extra_ldflags = self._extra_ldflags.get(sys.platform, [])
if not extra_ldflags:
extra_ldflags = self._extra_ldflags.get(os.name, [])
# helperlib uses pthread on linux. make sure we are linking to it.
if sys.platform.startswith("linux"):
if "-pthread" not in extra_ldflags:
extra_ldflags.append('-pthread')
return extra_ldflags
def _compile_mixins(self, build_dir):
sources = self._get_mixin_sources()
macros = self._get_mixin_defines()
include_dirs = self._toolchain.get_python_include_dirs()
extra_cflags = self._get_extra_cflags()
# XXX distutils creates a whole subtree inside build_dir,
# e.g. /tmp/test_pycc/home/antoine/numba/numba/pycc/modulemixin.o
objects = self._toolchain.compile_objects(sources, build_dir,
include_dirs=include_dirs,
macros=macros,
extra_cflags=extra_cflags)
return objects
@global_compiler_lock
def _compile_object_files(self, build_dir):
compiler = ModuleCompiler(self._export_entries, self._basename,
self._use_nrt, cpu_name=self._target_cpu)
compiler.external_init_function = self._init_function
temp_obj = os.path.join(build_dir,
os.path.splitext(self._output_file)[0] + '.o')
log.info("generating LLVM code for '%s' into %s",
self._basename, temp_obj)
compiler.write_native_object(temp_obj, wrap=True)
return [temp_obj], compiler.dll_exports
@global_compiler_lock
def compile(self):
"""
Compile the extension module.
"""
self._toolchain.verbose = self.verbose
build_dir = tempfile.mkdtemp(prefix='pycc-build-%s-' % self._basename)
# Compile object file
objects, dll_exports = self._compile_object_files(build_dir)
# Compile mixins
objects += self._compile_mixins(build_dir)
# Then create shared library
extra_ldflags = self._get_extra_ldflags()
output_dll = os.path.join(self._output_dir, self._output_file)
libraries = self._toolchain.get_python_libraries()
library_dirs = self._toolchain.get_python_library_dirs()
self._toolchain.link_shared(output_dll, objects,
libraries, library_dirs,
export_symbols=dll_exports,
extra_ldflags=extra_ldflags)
shutil.rmtree(build_dir)
def distutils_extension(self, **kwargs):
"""
Create a distutils extension object that can be used in your
setup.py.
"""
macros = kwargs.pop('macros', []) + self._get_mixin_defines()
depends = kwargs.pop('depends', []) + [self._source_path]
extra_compile_args = (kwargs.pop('extra_compile_args', [])
+ self._get_extra_cflags())
extra_link_args = (kwargs.pop('extra_link_args', [])
+ self._get_extra_ldflags())
include_dirs = (kwargs.pop('include_dirs', [])
+ self._toolchain.get_python_include_dirs())
libraries = (kwargs.pop('libraries', [])
+ self._toolchain.get_python_libraries())
library_dirs = (kwargs.pop('library_dirs', [])
+ self._toolchain.get_python_library_dirs())
python_package_path = self._source_module[:self._source_module.rfind('.')+1]
ext = _CCExtension(name=python_package_path + self._basename,
sources=self._get_mixin_sources(),
depends=depends,
define_macros=macros,
include_dirs=include_dirs,
libraries=libraries,
library_dirs=library_dirs,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
**kwargs)
ext.monkey_patch_distutils()
ext._cc = self
return ext
class _CCExtension(Extension):
"""
A Numba-specific Extension subclass to LLVM-compile pure Python code
to an extension module.
"""
_cc = None
_distutils_monkey_patched = False
def _prepare_object_files(self, build_ext):
cc = self._cc
dir_util.mkpath(os.path.join(build_ext.build_temp, *self.name.split('.')[:-1]))
objects, _ = cc._compile_object_files(build_ext.build_temp)
# Add generated object files for linking
self.extra_objects = objects
@classmethod
def monkey_patch_distutils(cls):
"""
Monkey-patch distutils with our own build_ext class knowing
about pycc-compiled extensions modules.
"""
if cls._distutils_monkey_patched:
return
_orig_build_ext = build_ext.build_ext
class _CC_build_ext(_orig_build_ext):
def build_extension(self, ext):
if isinstance(ext, _CCExtension):
ext._prepare_object_files(self)
_orig_build_ext.build_extension(self, ext)
build_ext.build_ext = _CC_build_ext
cls._distutils_monkey_patched = True

View File

@@ -0,0 +1,512 @@
# -*- coding: utf-8 -*-
import logging
import os
import sys
from llvmlite import ir
from llvmlite.binding import Linkage
from numba.pycc import llvm_types as lt
from numba.core.cgutils import create_constant_array
from numba.core.compiler import compile_extra, Flags
from numba.core.compiler_lock import global_compiler_lock
from numba.core.registry import cpu_target
from numba.core.runtime import nrtdynmod
from numba.core import cgutils
from numba.core.environment import lookup_environment
logger = logging.getLogger(__name__)
__all__ = ['ModuleCompiler']
NULL = ir.Constant(lt._void_star, None)
ZERO = ir.Constant(lt._int32, 0)
ONE = ir.Constant(lt._int32, 1)
METH_VARARGS_AND_KEYWORDS = ir.Constant(lt._int32, 1|2)
def get_header():
import numpy
import textwrap
return textwrap.dedent("""\
#include <stdint.h>
#ifndef HAVE_LONGDOUBLE
#define HAVE_LONGDOUBLE %d
#endif
typedef struct {
float real;
float imag;
} complex64;
typedef struct {
double real;
double imag;
} complex128;
#if HAVE_LONGDOUBLE
typedef struct {
long double real;
long double imag;
} complex256;
#endif
typedef float float32;
typedef double float64;
#if HAVE_LONGDOUBLE
typedef long double float128;
#endif
""" % hasattr(numpy, 'complex256'))
class ExportEntry(object):
"""
A simple record for exporting symbols.
"""
def __init__(self, symbol, signature, function):
self.symbol = symbol
self.signature = signature
self.function = function
def __repr__(self):
return "ExportEntry(%r, %r)" % (self.symbol, self.signature)
class _ModuleCompiler(object):
"""A base class to compile Python modules to a single shared library or
extension module.
:param export_entries: a list of ExportEntry instances.
:param module_name: the name of the exported module.
"""
#: Structure used to describe a method of an extension type.
#: struct PyMethodDef {
#: const char *ml_name; /* The name of the built-in function/method */
#: PyCFunction ml_meth; /* The C function that implements it */
#: int ml_flags; /* Combination of METH_xxx flags, which mostly
#: describe the args expected by the C func */
#: const char *ml_doc; /* The __doc__ attribute, or NULL */
#: };
method_def_ty = ir.LiteralStructType((lt._int8_star,
lt._void_star,
lt._int32,
lt._int8_star))
method_def_ptr = ir.PointerType(method_def_ty)
# The structure type constructed by PythonAPI.serialize_uncached()
# when updating this, also make sure to update `env_def_t` struct in
# numba/pycc/modulemixin.c
env_def_ty = ir.LiteralStructType((lt._void_star,
lt._int32,
lt._void_star,
lt._void_star,
lt._int32))
env_def_ptr = ir.PointerType(env_def_ty)
def __init__(self, export_entries, module_name, use_nrt=False,
**aot_options):
self.module_name = module_name
self.export_python_wrap = False
self.dll_exports = []
self.export_entries = export_entries
# Used by the CC API but not the legacy API
self.external_init_function = None
self.use_nrt = use_nrt
self.typing_context = cpu_target.typing_context
self.context = cpu_target.target_context.with_aot_codegen(
self.module_name, **aot_options)
def _mangle_method_symbol(self, func_name):
return "._pycc_method_%s" % (func_name,)
def _emit_python_wrapper(self, llvm_module):
"""Emit generated Python wrapper and extension module code.
"""
raise NotImplementedError
@global_compiler_lock
def _cull_exports(self):
"""Read all the exported functions/modules in the translator
environment, and join them into a single LLVM module.
"""
self.exported_function_types = {}
self.function_environments = {}
self.environment_gvs = {}
self.extra_environments = {}
codegen = self.context.codegen()
library = codegen.create_library(self.module_name)
# Generate IR for all exported functions
flags = Flags()
flags.no_compile = True
if not self.export_python_wrap:
flags.no_cpython_wrapper = True
flags.no_cfunc_wrapper = True
if self.use_nrt:
flags.nrt = True
# Compile NRT helpers
nrt_module, _ = nrtdynmod.create_nrt_module(self.context)
library.add_ir_module(nrt_module)
for entry in self.export_entries:
cres = compile_extra(self.typing_context, self.context,
entry.function,
entry.signature.args,
entry.signature.return_type, flags,
locals={}, library=library)
# Fix up dynamic exc globals
module = library._final_module
for gv in module.functions:
if gv.name.startswith("__excinfo_unwrap_args"):
gv.linkage = "linkonce_odr"
func_name = cres.fndesc.llvm_func_name
llvm_func = cres.library.get_function(func_name)
if self.export_python_wrap:
llvm_func.linkage = 'internal'
wrappername = cres.fndesc.llvm_cpython_wrapper_name
wrapper = cres.library.get_function(wrappername)
wrapper.name = self._mangle_method_symbol(entry.symbol)
wrapper.linkage = 'external'
fnty = cres.target_context.call_conv.get_function_type(
cres.fndesc.restype, cres.fndesc.argtypes)
self.exported_function_types[entry] = fnty
self.function_environments[entry] = cres.environment
self.environment_gvs[entry] = cres.fndesc.env_name
# Search for extra environments from linked libraries
for linkedlib in library._linking_libraries:
linkedmod = linkedlib._final_module
# Find environments
for gv in linkedmod.global_variables:
gvn = gv.name
if gvn.startswith("_ZN08NumbaEnv"):
env = lookup_environment(gvn)
if env is not None:
if env.can_cache():
self.extra_environments[gvn] = env
else:
llvm_func.name = entry.symbol
self.dll_exports.append(entry.symbol)
if self.export_python_wrap:
wrapper_module = library.create_ir_module("wrapper")
self._emit_python_wrapper(wrapper_module)
library.add_ir_module(wrapper_module)
# Hide all functions in the DLL except those explicitly exported
library.finalize()
for fn in library.get_defined_functions():
if fn.name not in self.dll_exports:
if fn.linkage in {Linkage.private, Linkage.internal}:
# Private/Internal linkage must have "default" visibility
fn.visibility = "default"
else:
fn.visibility = 'hidden'
return library
def write_llvm_bitcode(self, output, wrap=False, **kws):
self.export_python_wrap = wrap
library = self._cull_exports()
with open(output, 'wb') as fout:
fout.write(library.emit_bitcode())
def write_native_object(self, output, wrap=False, **kws):
self.export_python_wrap = wrap
library = self._cull_exports()
with open(output, 'wb') as fout:
fout.write(library.emit_native_object())
def emit_type(self, tyobj):
ret_val = str(tyobj)
if 'int' in ret_val:
if ret_val.endswith(('8', '16', '32', '64')):
ret_val += "_t"
return ret_val
def emit_header(self, output):
fname, ext = os.path.splitext(output)
with open(fname + '.h', 'w') as fout:
fout.write(get_header())
fout.write("\n/* Prototypes */\n")
for export_entry in self.export_entries:
name = export_entry.symbol
restype = self.emit_type(export_entry.signature.return_type)
args = ", ".join(self.emit_type(argtype)
for argtype in export_entry.signature.args)
fout.write("extern %s %s(%s);\n" % (restype, name, args))
def _emit_method_array(self, llvm_module):
"""
Collect exported methods and emit a PyMethodDef array.
:returns: a pointer to the PyMethodDef array.
"""
method_defs = []
for entry in self.export_entries:
name = entry.symbol
llvm_func_name = self._mangle_method_symbol(name)
fnty = self.exported_function_types[entry]
lfunc = ir.Function(llvm_module, fnty, llvm_func_name)
method_name = self.context.insert_const_string(llvm_module, name)
method_def_const = ir.Constant.literal_struct(
(method_name,
ir.Constant.bitcast(lfunc, lt._void_star),
METH_VARARGS_AND_KEYWORDS,
NULL))
method_defs.append(method_def_const)
sentinel = ir.Constant.literal_struct([NULL, NULL, ZERO, NULL])
method_defs.append(sentinel)
method_array_init = create_constant_array(self.method_def_ty, method_defs)
method_array = cgutils.add_global_variable(llvm_module,
method_array_init.type,
'.module_methods')
method_array.initializer = method_array_init
method_array.linkage = 'internal'
method_array_ptr = ir.Constant.gep(method_array, [ZERO, ZERO])
return method_array_ptr
def _emit_environment_array(self, llvm_module, builder, pyapi):
"""
Emit an array of env_def_t structures (see modulemixin.c)
storing the pickled environment constants for each of the
exported functions.
"""
env_defs = []
for entry in self.export_entries:
env = self.function_environments[entry]
# Constants may be unhashable so avoid trying to cache them
env_def = pyapi.serialize_uncached(env.consts)
env_defs.append(env_def)
# Append extra environments
env_defs.extend([
pyapi.serialize_uncached(env.consts)
for env in self.extra_environments.values()
])
env_defs_init = create_constant_array(self.env_def_ty, env_defs)
gv = self.context.insert_unique_const(llvm_module,
'.module_environments',
env_defs_init)
return gv.gep([ZERO, ZERO])
def _emit_envgvs_array(self, llvm_module, builder, pyapi):
"""
Emit an array of Environment pointers that needs to be filled at
initialization.
"""
env_setters = []
for entry in self.export_entries:
envgv_name = self.environment_gvs[entry]
gv = self.context.declare_env_global(llvm_module, envgv_name)
envgv = gv.bitcast(lt._void_star)
env_setters.append(envgv)
# Append extra environments
env_setters.extend([
self.context.declare_env_global(
llvm_module,
envgv_name
).bitcast(lt._void_star)
for envgv_name in self.extra_environments
])
# The array ends with NULL
env_setters.append(lt._void_star(None))
env_setters_init = create_constant_array(lt._void_star, env_setters)
gv = self.context.insert_unique_const(llvm_module,
'.module_envgvs',
env_setters_init)
return gv.gep([ZERO, ZERO])
def _emit_module_init_code(self, llvm_module, builder, modobj,
method_array, env_array, envgv_array):
"""
Emit call to "external" init function, if any.
"""
if self.external_init_function:
fnty = ir.FunctionType(lt._int32,
[modobj.type, self.method_def_ptr,
self.env_def_ptr, envgv_array.type])
fn = ir.Function(llvm_module, fnty, self.external_init_function)
return builder.call(fn, [modobj, method_array, env_array,
envgv_array])
else:
return None
class ModuleCompiler(_ModuleCompiler):
_ptr_fun = lambda ret, *args: ir.PointerType(ir.FunctionType(ret, args))
#: typedef int (*visitproc)(PyObject *, void *);
visitproc_ty = _ptr_fun(lt._int8,
lt._pyobject_head_p)
#: typedef int (*inquiry)(PyObject *);
inquiry_ty = _ptr_fun(lt._int8,
lt._pyobject_head_p)
#: typedef int (*traverseproc)(PyObject *, visitproc, void *);
traverseproc_ty = _ptr_fun(lt._int8,
lt._pyobject_head_p,
visitproc_ty,
lt._void_star)
# typedef void (*freefunc)(void *)
freefunc_ty = _ptr_fun(lt._int8,
lt._void_star)
# PyObject* (*m_init)(void);
m_init_ty = _ptr_fun(lt._int8)
_char_star = lt._int8_star
#: typedef struct PyModuleDef_Base {
#: PyObject_HEAD
#: PyObject* (*m_init)(void);
#: Py_ssize_t m_index;
#: PyObject* m_copy;
#: } PyModuleDef_Base;
module_def_base_ty = ir.LiteralStructType(
(
lt._pyobject_head,
m_init_ty,
lt._llvm_py_ssize_t,
lt._pyobject_head_p
))
#: This struct holds all information that is needed to create a module object.
#: typedef struct PyModuleDef{
#: PyModuleDef_Base m_base;
#: const char* m_name;
#: const char* m_doc;
#: Py_ssize_t m_size;
#: PyMethodDef *m_methods;
#: inquiry m_reload;
#: traverseproc m_traverse;
#: inquiry m_clear;
#: freefunc m_free;
#: }PyModuleDef;
module_def_ty = ir.LiteralStructType(
(
module_def_base_ty,
_char_star,
_char_star,
lt._llvm_py_ssize_t,
_ModuleCompiler.method_def_ptr,
inquiry_ty,
traverseproc_ty,
inquiry_ty,
freefunc_ty
))
@property
def module_create_definition(self):
"""
Return the signature and name of the Python C API function to
initialize the module.
"""
signature = ir.FunctionType(lt._pyobject_head_p,
(ir.PointerType(self.module_def_ty),
lt._int32))
name = "PyModule_Create2"
if lt._trace_refs_:
name += "TraceRefs"
return signature, name
@property
def module_init_definition(self):
"""
Return the name and signature of the module's initialization function.
"""
signature = ir.FunctionType(lt._pyobject_head_p, ())
return signature, "PyInit_" + self.module_name
def _emit_python_wrapper(self, llvm_module):
# Figure out the Python C API module creation function, and
# get a LLVM function for it.
create_module_fn = ir.Function(llvm_module, *self.module_create_definition)
create_module_fn.linkage = 'external'
# Define a constant string for the module name.
mod_name_const = self.context.insert_const_string(llvm_module,
self.module_name)
mod_def_base_init = ir.Constant.literal_struct(
(lt._pyobject_head_init, # PyObject_HEAD
ir.Constant(self.m_init_ty, None), # m_init
ir.Constant(lt._llvm_py_ssize_t, None), # m_index
ir.Constant(lt._pyobject_head_p, None), # m_copy
)
)
mod_def_base = cgutils.add_global_variable(llvm_module,
mod_def_base_init.type,
'.module_def_base')
mod_def_base.initializer = mod_def_base_init
mod_def_base.linkage = 'internal'
method_array = self._emit_method_array(llvm_module)
mod_def_init = ir.Constant.literal_struct(
(mod_def_base_init, # m_base
mod_name_const, # m_name
ir.Constant(self._char_star, None), # m_doc
ir.Constant(lt._llvm_py_ssize_t, -1), # m_size
method_array, # m_methods
ir.Constant(self.inquiry_ty, None), # m_reload
ir.Constant(self.traverseproc_ty, None), # m_traverse
ir.Constant(self.inquiry_ty, None), # m_clear
ir.Constant(self.freefunc_ty, None) # m_free
)
)
# Define a constant string for the module name.
mod_def = cgutils.add_global_variable(llvm_module, mod_def_init.type,
'.module_def')
mod_def.initializer = mod_def_init
mod_def.linkage = 'internal'
# Define the module initialization function.
mod_init_fn = ir.Function(llvm_module, *self.module_init_definition)
entry = mod_init_fn.append_basic_block('Entry')
builder = ir.IRBuilder(entry)
pyapi = self.context.get_python_api(builder)
mod = builder.call(create_module_fn,
(mod_def,
ir.Constant(lt._int32, sys.api_version)))
# Test if module has been created correctly.
# (XXX for some reason comparing with the NULL constant fails llvm
# with an assertion in pydebug mode)
with builder.if_then(cgutils.is_null(builder, mod)):
builder.ret(NULL.bitcast(mod_init_fn.type.pointee.return_type))
env_array = self._emit_environment_array(llvm_module, builder, pyapi)
envgv_array = self._emit_envgvs_array(llvm_module, builder, pyapi)
ret = self._emit_module_init_code(llvm_module, builder, mod,
method_array, env_array, envgv_array)
if ret is not None:
with builder.if_then(cgutils.is_not_null(builder, ret)):
# Init function errored out
builder.ret(ir.Constant(mod.type, None))
builder.ret(mod)
self.dll_exports.append(mod_init_fn.name)

View File

@@ -0,0 +1,72 @@
import re
import warnings
from numba.core import typing, sigutils
from numba.pycc.compiler import ExportEntry
# Registry is okay to be a global because we are using pycc as a standalone
# commandline tool.
export_registry = []
def export(prototype):
warnings.warn("export() is deprecated, use the numba.pycc.CC API instead",
DeprecationWarning, stacklevel=2)
sym, sig = parse_prototype(prototype)
def wrappped(func):
fn_argtys, fn_retty = sigutils.normalize_signature(sig)
signature = typing.signature(fn_retty, *fn_argtys)
entry = ExportEntry(symbol=sym, signature=signature, function=func)
export_registry.append(entry)
return wrappped
def exportmany(prototypes):
warnings.warn("exportmany() is deprecated, use the numba.pycc.CC API instead",
DeprecationWarning, stacklevel=2)
def wrapped(func):
for proto in prototypes:
export(proto)(func)
return wrapped
def process_input_files(inputs):
"""
Read input source files for execution of legacy @export / @exportmany
decorators.
"""
for ifile in inputs:
with open(ifile) as fin:
exec(compile(fin.read(), ifile, 'exec'))
def clear_export_registry():
export_registry[:] = []
# --------------------------------- Internal ---------------------------------
re_symbol = re.compile(r'[_a-z][_a-z0-9]*', re.I)
def parse_prototype(text):
"""Separate the symbol and function-type in a a string with
"symbol function-type" (e.g. "mult float(float, float)")
Returns
---------
(symbol_string, functype_string)
"""
m = re_symbol.match(text)
if not m:
raise ValueError("Invalid function name for export prototype")
s = m.start(0)
e = m.end(0)
symbol = text[s:e]
functype = text[e + 1:]
return symbol, functype

View File

@@ -0,0 +1,37 @@
import sys
import ctypes
import struct as struct_
import llvmlite.ir
from llvmlite.ir import Constant
_trace_refs_ = hasattr(sys, 'getobjects')
_plat_bits = struct_.calcsize('@P') * 8
_int8 = llvmlite.ir.IntType(8)
_int32 = llvmlite.ir.IntType(32)
_void_star = llvmlite.ir.PointerType(_int8)
_int8_star = _void_star
_sizeof_py_ssize_t = ctypes.sizeof(getattr(ctypes, 'c_size_t'))
_llvm_py_ssize_t = llvmlite.ir.IntType(_sizeof_py_ssize_t * 8)
if _trace_refs_:
_pyobject_head = llvmlite.ir.LiteralStructType([_void_star, _void_star,
_llvm_py_ssize_t, _void_star])
_pyobject_head_init = Constant.literal_struct([
Constant(_void_star, None), # _ob_next
Constant(_void_star, None), # _ob_prev
Constant(_llvm_py_ssize_t, 1), # ob_refcnt
Constant(_void_star, None), # ob_type
])
else:
_pyobject_head = llvmlite.ir.LiteralStructType([_llvm_py_ssize_t, _void_star])
_pyobject_head_init = Constant.literal_struct([
Constant(_llvm_py_ssize_t, 1), # ob_refcnt
Constant(_void_star, None), # ob_type
])
_pyobject_head_p = llvmlite.ir.PointerType(_pyobject_head)

View File

@@ -0,0 +1,221 @@
/*
* This C file is compiled and linked into pycc-generated shared objects.
* It provides the Numba helper functions for runtime use in pycc-compiled
* functions.
*/
#include "../_numba_common.h"
#include "../_pymodule.h"
/* Define all runtime-required symbols in this C module, but do not
export them outside the shared library if possible. */
#define NUMBA_EXPORT_FUNC(_rettype) VISIBILITY_HIDDEN _rettype
#define NUMBA_EXPORT_DATA(_vartype) VISIBILITY_HIDDEN _vartype
#define PYCC_COMPILING
#include "../_helperlib.c"
#include "../_dynfunc.c"
#if PYCC_USE_NRT
#include "../core/runtime/_nrt_python.c"
#include "../core/runtime/nrt.h"
#endif
#if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION >= 12)
#define Py_BUILD_CORE 1
#include "internal/pycore_pyhash.h"
#undef Py_BUILD_CORE
#endif
/* Defines hashsecret variables (see issue #6386) */
int64_t _numba_hashsecret_siphash_k0;
int64_t _numba_hashsecret_siphash_k1;
int64_t _numba_hashsecret_djbx33a_suffix;
/* NOTE: import_array() is macro, not a function. It returns NULL on
failure */
static void *
wrap_import_array(void) {
import_array();
return (void *) 1;
}
static int
init_numpy(void) {
return wrap_import_array() != NULL;
}
#ifndef PYCC_MODULE_NAME
#error PYCC_MODULE_NAME must be defined
#endif
/* Preprocessor trick: need to use two levels of macros otherwise
PYCC_MODULE_NAME would not get expanded */
#define __PYCC(prefix, modname) prefix ## modname
#define _PYCC(prefix, modname) __PYCC(prefix, modname)
#define PYCC(prefix) _PYCC(prefix, PYCC_MODULE_NAME)
/* Silence warnings about unused functions */
VISIBILITY_HIDDEN void **PYCC(_unused_) = {
(void *) Numba_make_generator,
};
/* The structure type constructed by PythonAPI.serialize_uncached() */
typedef struct {
const char *data;
int len;
const char *hashbuf;
const char *func_ptr; // pointer to unwrap dyn args function
int alloc_flag;
} env_def_t;
/* Environment GlobalVariable address type */
typedef void **env_gv_t;
/*
* Recreate an environment object from a env_def_t structure.
*/
static EnvironmentObject *
recreate_environment(PyObject *module, env_def_t env)
{
EnvironmentObject *envobj;
PyObject *env_consts;
env_consts = numba_unpickle(env.data, env.len, env.hashbuf);
if (env_consts == NULL)
return NULL;
if (!PyList_Check(env_consts)) {
PyErr_Format(PyExc_TypeError,
"environment constants should be a list, got '%s'",
Py_TYPE(env_consts)->tp_name);
Py_DECREF(env_consts);
return NULL;
}
envobj = env_new_empty(&EnvironmentType);
if (envobj == NULL) {
Py_DECREF(env_consts);
return NULL;
}
envobj->consts = env_consts;
envobj->globals = PyModule_GetDict(module);
if (envobj->globals == NULL) {
Py_DECREF(envobj);
return NULL;
}
Py_INCREF(envobj->globals);
return envobj;
}
/*
* Subroutine to initialize all resources required for running the
* pycc-compiled functions.
*/
int
PYCC(pycc_init_) (PyObject *module, PyMethodDef *defs,
env_def_t *envs,
env_gv_t *envgvs)
{
/* Aligns hashsecret with values in current python process so that
* hashes computed inside the pycc module are correct if imported
* by the current process. Imports in a new process get the right
* hash secret through:
* `numba.cpython.hashing._load_hashsecret`.
*/
_numba_hashsecret_siphash_k0 = _Py_HashSecret.siphash.k0;
_numba_hashsecret_siphash_k1 = _Py_HashSecret.siphash.k1;
_numba_hashsecret_djbx33a_suffix = _Py_HashSecret.djbx33a.suffix;
PyMethodDef *fdef;
PyObject *modname = NULL;
PyObject *docobj = NULL;
int i;
if (!init_numpy()) {
goto error;
}
if (init_dynfunc_module(module)) {
goto error;
}
/* Initialize random generation. */
numba_rnd_ensure_global_init();
#if PYCC_USE_NRT
NRT_MemSys_init();
if (init_nrt_python_module(module)) {
goto error;
}
#endif
modname = PyObject_GetAttrString(module, "__name__");
if (modname == NULL) {
goto error;
}
/* Empty docstring for all compiled functions */
docobj = PyString_FromString("");
if (docobj == NULL) {
goto error;
}
/* Overwrite C method objects with our own Closure objects, in order
* to make their environments available to the compiled functions.
*/
for (i = 0, fdef = defs; fdef->ml_name != NULL; i++, fdef++) {
PyObject *func;
PyObject *nameobj;
EnvironmentObject *envobj;
envobj = recreate_environment(module, envs[i]);
if (envobj == NULL) {
goto error;
}
nameobj = PyString_FromString(fdef->ml_name);
if (nameobj == NULL) {
Py_DECREF(envobj);
goto error;
}
// Store the environment pointer into the global
*envgvs[i] = envobj;
func = pycfunction_new(module, nameobj, docobj,
fdef->ml_meth, envobj, NULL);
Py_DECREF(envobj);
Py_DECREF(nameobj);
if (func == NULL) {
goto error;
}
if (PyObject_SetAttrString(module, fdef->ml_name, func)) {
Py_DECREF(func);
goto error;
}
Py_DECREF(func);
}
/* Recreate other environment objects
envgvs is expected to end with a NULL pointer.
*/
for (; envgvs[i]; ++i) {
EnvironmentObject *envobj;
envobj = recreate_environment(module, envs[i]);
if (envobj == NULL) {
goto error;
}
// Store the environment pointer into the global
*envgvs[i] = envobj;
Py_DECREF(envobj);
}
Py_DECREF(docobj);
Py_DECREF(modname);
return 0;
error:
Py_XDECREF(docobj);
Py_XDECREF(modname);
return -1;
}

View File

@@ -0,0 +1,218 @@
import setuptools
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
import numpy as np
import functools
import os
import subprocess
import sys
from tempfile import mkdtemp
from contextlib import contextmanager
from pathlib import Path
# Wire in distutils components from setuptools
CCompiler = setuptools.distutils.ccompiler.CCompiler
new_compiler = setuptools.distutils.ccompiler.new_compiler
customize_compiler = setuptools.distutils.sysconfig.customize_compiler
log = setuptools.distutils.log
_configs = {
# DLL suffix, Python C extension suffix
'win': ('.dll', '.pyd'),
'default': ('.so', '.so'),
}
def get_configs(arg):
return _configs.get(sys.platform[:3], _configs['default'])[arg]
find_shared_ending = functools.partial(get_configs, 0)
find_pyext_ending = functools.partial(get_configs, 1)
@contextmanager
def _gentmpfile(suffix):
# windows locks the tempfile so use a tempdir + file, see
# https://github.com/numba/numba/issues/3304
try:
tmpdir = mkdtemp()
ntf = open(os.path.join(tmpdir, "temp%s" % suffix), 'wt')
yield ntf
finally:
try:
ntf.close()
os.remove(ntf)
except Exception:
pass
else:
os.rmdir(tmpdir)
@functools.lru_cache(maxsize=1)
def external_compiler_works():
"""
Returns True if the "external compiler" bound in numpy.distutil is present
and working, False otherwise.
"""
compiler = new_compiler()
customize_compiler(compiler)
for suffix in ['.c', '.cxx']:
try:
with _gentmpfile(suffix) as ntf:
simple_c = "int main(void) { return 0; }"
ntf.write(simple_c)
ntf.flush()
ntf.close()
# *output_dir* is set to avoid the compiler putting temp files
# in the current directory.
compiler.compile([ntf.name], output_dir=Path(ntf.name).anchor)
except Exception: # likely CompileError or file system issue
return False
return True
class _DummyExtension(object):
libraries = []
class Toolchain(object):
def __init__(self):
if not external_compiler_works():
self._raise_external_compiler_error()
self._verbose = False
self._compiler = new_compiler()
customize_compiler(self._compiler)
self._build_ext = build_ext(Distribution())
self._build_ext.finalize_options()
self._py_lib_dirs = self._build_ext.library_dirs
self._py_include_dirs = self._build_ext.include_dirs
np_compile_args = {'include_dirs': [np.get_include(),],}
if sys.platform == 'win32':
np_compile_args['libraries'] = []
else:
np_compile_args['libraries'] = ['m',]
self._math_info = np_compile_args
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, value):
self._verbose = value
# DEBUG will let Numpy spew many messages, so stick to INFO
# to print commands executed by distutils
log.set_threshold(log.INFO if value else log.WARN)
def _raise_external_compiler_error(self):
basemsg = ("Attempted to compile AOT function without the "
"compiler used by `numpy.distutils` present.")
conda_msg = "If using conda try:\n\n#> conda install %s"
plt = sys.platform
if plt.startswith('linux'):
if sys.maxsize <= 2 ** 32:
compilers = ['gcc_linux-32', 'gxx_linux-32']
else:
compilers = ['gcc_linux-64', 'gxx_linux-64']
msg = "%s %s" % (basemsg, conda_msg % ' '.join(compilers))
elif plt.startswith('darwin'):
compilers = ['clang_osx-64', 'clangxx_osx-64']
msg = "%s %s" % (basemsg, conda_msg % ' '.join(compilers))
elif plt.startswith('win32'):
winmsg = "Cannot find suitable msvc."
msg = "%s %s" % (basemsg, winmsg)
else:
msg = "Unknown platform %s" % plt
raise RuntimeError(msg)
def compile_objects(self, sources, output_dir,
include_dirs=(), depends=(), macros=(),
extra_cflags=None):
"""
Compile the given source files into a separate object file each,
all beneath the *output_dir*. A list of paths to object files
is returned.
*macros* has the same format as in distutils: a list of 1- or 2-tuples.
If a 1-tuple (name,), the given name is considered undefined by
the C preprocessor.
If a 2-tuple (name, value), the given name is expanded into the
given value by the C preprocessor.
"""
objects = self._compiler.compile(sources,
output_dir=output_dir,
include_dirs=include_dirs,
depends=depends,
macros=macros or [],
extra_preargs=extra_cflags)
return objects
def link_shared(self, output, objects, libraries=(),
library_dirs=(), export_symbols=(),
extra_ldflags=None):
"""
Create a shared library *output* linking the given *objects*
and *libraries* (all strings).
"""
output_dir, output_filename = os.path.split(output)
self._compiler.link(CCompiler.SHARED_OBJECT, objects,
output_filename, output_dir,
libraries, library_dirs,
export_symbols=export_symbols,
extra_preargs=extra_ldflags)
def get_python_libraries(self):
"""
Get the library arguments necessary to link with Python.
"""
libs = self._build_ext.get_libraries(_DummyExtension())
if sys.platform == 'win32':
# Under Windows, need to link explicitly against the CRT,
# as the MSVC compiler would implicitly do.
# (XXX msvcrtd in pydebug mode?)
libs = libs + ['msvcrt']
return libs + self._math_info['libraries']
def get_python_library_dirs(self):
"""
Get the library directories necessary to link with Python.
"""
return list(self._py_lib_dirs)
def get_python_include_dirs(self):
"""
Get the include directories necessary to compile against the Python
and Numpy C APIs.
"""
return list(self._py_include_dirs) + self._math_info['include_dirs']
def get_ext_filename(self, ext_name):
"""
Given a C extension's module name, return its intended filename.
"""
return self._build_ext.get_ext_filename(ext_name)
def _quote_arg(arg):
"""
Quote the argument for safe use in a shell command line.
"""
# If there is a quote in the string, assume relevants parts of the
# string are already quoted (e.g. '-I"C:\\Program Files\\..."')
if '"' not in arg and ' ' in arg:
return '"%s"' % arg
return arg
def _is_sequence(arg):
if isinstance(arg, (str, bytes)):
return False
try:
len(arg)
return True
except Exception:
return False