From f40fb59f5025c6d07ef2e4bd47f9bbf78b05e989 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 14:44:56 -0600 Subject: [PATCH 01/33] Fix install docs --- doc/misc.rst | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/doc/misc.rst b/doc/misc.rst index 6729b29b..5db2ede6 100644 --- a/doc/misc.rst +++ b/doc/misc.rst @@ -5,28 +5,27 @@ This command should install :mod:`islpy`:: pip install islpy -You may need to run this with :command:`sudo`. -If you don't already have `pip `_, -run this beforehand:: +For a more manual installation from source, `download the source +`__, unpack it, and say:: - curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py - python get-pip.py + pip install -v . -For a more manual installation, `download the source -`_, unpack it, and say:: +You may also clone its git repository:: - python setup.py install + git clone --recursive https://github.com/inducer/islpy.git -You may also clone its git repository:: +The following attempts an editable installation, however note +that this may run into various issues and is not well-supported +by the build tools:: - git clone --recursive http://git.tiker.net/trees/islpy.git - git clone --recursive git://github.com/inducer/islpy + $ pip install --no-build-isolation -e . -Wiki and FAQ -============ +Support +======= -A `wiki page `_ is also available, where install -instructions and an FAQ will grow over time. +You can try posting questions or comments at the +`Github Discussions site `__ +for islpy. For a mailing list, please consider using the `isl list `_ until they tell us to get From ffe384285dc5c58b0e7a40a8200428024583ee84 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 14:45:20 -0600 Subject: [PATCH 02/33] gen_wrap: generate function signatures --- doc/reference.rst | 2 +- gen_wrap.py | 94 ++++++++++++++++++++-------------------- src/wrapper/wrap_isl.cpp | 5 +++ 3 files changed, 52 insertions(+), 49 deletions(-) diff --git a/doc/reference.rst b/doc/reference.rst index 6bec390e..37a843fa 100644 --- a/doc/reference.rst +++ b/doc/reference.rst @@ -92,7 +92,7 @@ Convenience Lifetime Helpers ^^^^^^^^^^^^^^^^ -.. class:: ffi_callback_handle +.. class:: CallbackLifetimeHandle Some callbacks, notably those in :class:`AstBuild`, need to outlive the function call to which they're passed. These callback return a callback diff --git a/gen_wrap.py b/gen_wrap.py index 8fb1d4f0..c2eb17a2 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -876,10 +876,11 @@ def write_wrapper(outf, meth): input_args = [] post_call = [] extra_ret_vals = [] - extra_ret_descrs = [] + extra_ret_types = [] preamble = [] arg_names = [] + arg_sigs = [] checks.append("isl_ctx *islpy_ctx = nullptr;") @@ -906,8 +907,7 @@ def write_wrapper(outf, meth): if (meth.cls in ["ast_build", "ast_print_options"] and meth.name.startswith("set_")): extra_ret_vals.append(f"py_{arg.name}") - extra_ret_descrs.append("(opaque handle to " - "manage callback lifetime)") + extra_ret_types.append("CallbackLifetimeHandle ") input_args.append(f"py::object py_{arg.name}") passed_args.append(cb_name) @@ -915,6 +915,7 @@ def write_wrapper(outf, meth): preamble.append(get_callback(cb_name, arg)) + arg_sigs.append(f"{arg.name}: Callable") docs.append(":param {name}: callback({args})".format( name=arg.name, args=", ".join( @@ -931,10 +932,10 @@ def write_wrapper(outf, meth): doc_cls = arg.base_type if doc_cls.startswith("isl_"): doc_cls = doc_cls[4:] - if doc_cls == "unsigned long": + else: doc_cls = "int" - docs.append(f":param {arg.name}: :class:`{doc_cls}`") + arg_sigs.append(f"{arg.name}: {doc_cls}") elif arg.base_type in ["char", "const char"] and arg.ptr == "*": if arg.semantics is SEM_KEEP: @@ -949,7 +950,7 @@ def _arg_to_const_str(arg: Argument) -> str: input_args.append(f"{_arg_to_const_str(arg)}{arg.base_type} *{arg.name}") - docs.append(f":param {arg.name}: string") + arg_sigs.append(f"{arg.name}: str") elif arg.base_type in ["int", "isl_bool"] and arg.ptr == "*": if arg.name in ["exact", "tight"]: @@ -959,8 +960,7 @@ def _arg_to_const_str(arg: Argument) -> str: extra_ret_vals.append(f"(bool) arg_{arg.name}") else: extra_ret_vals.append(f"arg_{arg.name}") - extra_ret_descrs.append( - f"{arg.name} ({to_py_class(arg.base_type)})") + extra_ret_types.append(to_py_class(arg.base_type)) arg_names.pop() else: raise SignatureNotSupported("int *") @@ -968,7 +968,6 @@ def _arg_to_const_str(arg: Argument) -> str: elif arg.base_type == "isl_val" and arg.ptr == "*" and arg_idx > 0: # {{{ val input argument - arg_descr = f":param {arg.name}: :class:`Val`" input_args.append(f"py::object py_{arg.name}") checks.append(""" std::unique_ptr unique_arg_%(name)s; @@ -1013,7 +1012,7 @@ def _arg_to_const_str(arg: Argument) -> str: post_call.append(f"unique_arg_{arg.name}.release();") passed_args.append(f"unique_arg_{arg.name}->m_data") - docs.append(arg_descr) + arg_sigs.append(f"{arg.name}: Val | int") # }}} @@ -1021,10 +1020,8 @@ def _arg_to_const_str(arg: Argument) -> str: # {{{ isl types input arguments arg_cls = arg.base_type[4:] - arg_descr = f":param {arg.name}: :class:`{to_py_class(arg_cls)}`" if arg_idx == 0 and meth.is_mutator: - arg_descr += " (mutated in-place)" input_args.append(f"py::object py_{arg.name}") checks.append(""" isl::%(cls)s &arg_%(name)s( @@ -1038,7 +1035,7 @@ def _arg_to_const_str(arg: Argument) -> str: "cls": arg_cls}) passed_args.append(f"arg_{arg.name}.m_data") post_call.append(f"arg_{arg.name}.invalidate();") - arg_descr += " (mutated in-place)" + docs.append("..note::\n {arg.name} is mutated in-place.\n\n") else: if arg.semantics is None and arg.base_type != "isl_ctx": @@ -1077,7 +1074,9 @@ def _arg_to_const_str(arg: Argument) -> str: input_args.append(f"{arg_cls} &arg_{arg.name}") post_call.append(f"arg_{arg.name}.invalidate();") passed_args.append(f"arg_{arg.name}.m_data") - arg_descr += " (:ref:`becomes invalid `)" + docs.append( + "..note::\n {arg.name} " + ":ref:`becomes invalid `)\n\n") else: passed_args.append(f"arg_{arg.name}.m_data") input_args.append(f"{arg_cls} const &arg_{arg.name}") @@ -1092,7 +1091,7 @@ def _arg_to_const_str(arg: Argument) -> str: islpy_ctx = {arg.base_type}_get_ctx(arg_{arg.name}.m_data); """) - docs.append(arg_descr) + arg_sigs.append(f"{arg.name}: {to_py_class(arg_cls)}") # }}} @@ -1118,8 +1117,7 @@ def _arg_to_const_str(arg: Argument) -> str: """ % {"name": arg.name, "ret_cls": ret_cls}) extra_ret_vals.append(f"py_ret_{arg.name}") - extra_ret_descrs.append( - f"{arg.name} (:class:`{to_py_class(ret_cls)}`)") + extra_ret_types.append(to_py_class(ret_cls)) # }}} @@ -1142,7 +1140,7 @@ def _arg_to_const_str(arg: Argument) -> str: arg_idx += 1 - processed_return_type = f"{meth.return_base_type} {meth.return_ptr}" + processed_return_type = f"{meth.return_base_type} {meth.return_ptr}".strip() if meth.return_base_type == "void" and not meth.return_ptr: result_capture = "" @@ -1168,18 +1166,18 @@ def _arg_to_const_str(arg: Argument) -> str: if meth.name.startswith("is_") or meth.name.startswith("has_"): processed_return_type = "bool" - ret_descr = processed_return_type + ret_type = processed_return_type if extra_ret_vals: if len(extra_ret_vals) == 1: processed_return_type = "py::object" body.append(f"return py::object(result, {extra_ret_vals[0]});") - ret_descr = extra_ret_descrs[0] + ret_type, = extra_ret_types else: processed_return_type = "py::object" body.append("return py::make_tuple(result, {});".format( ", ".join(extra_ret_vals))) - ret_descr = "tuple: ({})".format(", ".join(extra_ret_descrs)) + ret_type = f"tuple[{', '.join(extra_ret_types)}]" else: body.append("return result;") @@ -1195,18 +1193,18 @@ def _arg_to_const_str(arg: Argument) -> str: assert not (meth.name.startswith("is_") or meth.name.startswith("has_")) - ret_descr = processed_return_type + ret_type = "None" if extra_ret_vals: if len(extra_ret_vals) == 1: processed_return_type = "py::object" body.append(f"return py::object({extra_ret_vals[0]});") - ret_descr = extra_ret_descrs[0] + ret_type, = extra_ret_types else: processed_return_type = "py::object" body.append("return py::make_tuple({});".format( ", ".join(extra_ret_vals))) - ret_descr = "tuple: ({})".format(", ".join(extra_ret_descrs)) + ret_type = f"tuple[{', '.join(extra_ret_types)}]" else: body.append("return result;") @@ -1221,18 +1219,18 @@ def _arg_to_const_str(arg: Argument) -> str: """) processed_return_type = "bool" - ret_descr = "bool" + ret_type = "bool" if extra_ret_vals: if len(extra_ret_vals) == 1: processed_return_type = "py::object" body.append(f"return py::object({extra_ret_vals[0]});") - ret_descr = extra_ret_descrs[0] + ret_type, = extra_ret_types else: processed_return_type = "py::object" body.append("return py::make_tuple({});".format( ", ".join(extra_ret_vals))) - ret_descr = "tuple: ({})".format(", ".join(extra_ret_descrs)) + ret_type = f"tuple[{', '.join(extra_ret_types)}]" else: body.append("return result;") @@ -1245,7 +1243,7 @@ def _arg_to_const_str(arg: Argument) -> str: raise NotImplementedError("extra ret val with safe type") body.append("return result;") - ret_descr = processed_return_type + ret_type = "int" # }}} @@ -1263,7 +1261,8 @@ def _arg_to_const_str(arg: Argument) -> str: body.append(f"arg_{meth.args[0].name}.take_possession_of(result);") body.append(f"return py_{meth.args[0].name};") - ret_descr = f":class:`{to_py_class(ret_cls)}` (self)" + ret_type = to_py_class(ret_cls) + docs.append("..note::\n Returns *self*.\n\n") else: processed_return_type = "py::object" isl_obj_ret_val = \ @@ -1272,10 +1271,9 @@ def _arg_to_const_str(arg: Argument) -> str: if extra_ret_vals: isl_obj_ret_val = "py::make_tuple({}, {})".format( isl_obj_ret_val, ", ".join(extra_ret_vals)) - ret_descr = "tuple: (:class:`{}`, {})".format( - to_py_class(ret_cls), ", ".join(extra_ret_descrs)) + ret_type = f"tuple[{', '.join(extra_ret_types)}]" else: - ret_descr = f":class:`{to_py_class(ret_cls)}`" + ret_type = to_py_class(ret_cls) if meth.return_semantics is None and ret_cls != "ctx": raise Undocumented(meth) @@ -1308,7 +1306,7 @@ def _arg_to_const_str(arg: Argument) -> str: if meth.return_semantics is SEM_GIVE: body.append("free(result);") - ret_descr = "string" + ret_type = "str" elif (meth.return_base_type == "void" and meth.return_ptr == "*" @@ -1317,7 +1315,7 @@ def _arg_to_const_str(arg: Argument) -> str: body.append(""" return py::borrow((PyObject *) result); """) - ret_descr = "a user-specified python object" + ret_type = "object" processed_return_type = "py::object" elif meth.return_base_type == "void" and not meth.return_ptr: @@ -1325,13 +1323,13 @@ def _arg_to_const_str(arg: Argument) -> str: processed_return_type = "py::object" if len(extra_ret_vals) == 1: body.append(f"return {extra_ret_vals[0]};") - ret_descr = extra_ret_descrs[0] + ret_type, = extra_ret_types else: body.append("return py::make_tuple({});".format( ", ".join(extra_ret_vals))) - ret_descr = "tuple: {}".format(", ".join(extra_ret_descrs)) + ret_type = f"tuple[{', '.join(extra_ret_types)}]" else: - ret_descr = "None" + ret_type = "None" else: raise SignatureNotSupported( @@ -1351,17 +1349,16 @@ def _arg_to_const_str(arg: Argument) -> str: inputs=", ".join(input_args), body="\n".join(body))) - docs = (["{}({})".format(meth.name, ", ".join(arg_names)), - "", *docs, f":return: {ret_descr}"]) + sig_str = f"({', '.join(arg_sigs)}) -> {ret_type}" - return arg_names, "\n".join(docs) + return arg_names, "\n".join(docs), sig_str # }}} # {{{ exposer generator -def write_exposer(outf, meth, arg_names, doc_str): +def write_exposer(outf, meth, arg_names, doc_str, sig_str): func_name = f"isl::{meth.cls}_{meth.name}" py_name = meth.name @@ -1388,12 +1385,13 @@ def write_exposer(outf, meth, arg_names, doc_str): "This function is not part of the officially public isl API. " "Use at your own risk.") - doc_str_arg = ', "{}"'.format(doc_str.replace("\n", "\\n")) - wrap_class = CLASS_MAP.get(meth.cls, meth.cls) outf.write(f'wrap_{wrap_class}.def{"_static" if meth.is_static else ""}(' - f'"{py_name}", {func_name}{args_str+doc_str_arg});\n') + f'"{py_name}", {func_name}{args_str}' + f', py::sig("def {py_name}{sig_str}")' + f', "{py_name}{sig_str}\\n{doc_str.replace("\n", "\\n")}"' + ');\n') if meth.name == "read_from_str": assert meth.is_static @@ -1443,13 +1441,13 @@ def write_wrappers(expf, wrapf, methods): continue try: - arg_names, doc_str = write_wrapper(wrapf, meth) - write_exposer(expf, meth, arg_names, doc_str) + arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) + write_exposer(expf, meth, arg_names, doc_str, sig_str) except Undocumented: undoc.append(str(meth)) except Retry: - arg_names, doc_str = write_wrapper(wrapf, meth) - write_exposer(expf, meth, arg_names, doc_str) + arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) + write_exposer(expf, meth, arg_names, doc_str, sig_str) except SignatureNotSupported: _, e, _ = sys.exc_info() print(f"SKIP (sig not supported: {e}): {meth}") diff --git a/src/wrapper/wrap_isl.cpp b/src/wrapper/wrap_isl.cpp index 57af55c1..183363ae 100644 --- a/src/wrapper/wrap_isl.cpp +++ b/src/wrapper/wrap_isl.cpp @@ -43,6 +43,9 @@ namespace isl } return nullptr; } + + // bogus, unused, just in service of type annotation + struct callback_lifetime_handle { }; } @@ -224,6 +227,8 @@ NB_MODULE(_isl, m) m.def("isl_version", [] () { return isl_version(); }); + py::class_ wrap_cb_lifetime_handle(m, "CallbackLifetimeHandle"); + islpy_expose_part1(m); islpy_expose_part2(m); islpy_expose_part3(m); From 2ed9591e3c9d01eca49d9999d8c5e44e937fb26f Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 14:45:28 -0600 Subject: [PATCH 03/33] Add py.typed marker --- islpy/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 islpy/py.typed diff --git a/islpy/py.typed b/islpy/py.typed new file mode 100644 index 00000000..e69de29b From be1fd9ae4c9ed58fbd42fb6baaf9bbd5fba5fe20 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 14:45:45 -0600 Subject: [PATCH 04/33] Sphinx: remove signature processing --- doc/conf.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 0ab41894..f64b5f1a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -23,22 +23,6 @@ } -def autodoc_process_signature(app, what, name, obj, options, signature, - return_annotation): - from inspect import ismethod - if ismethod(obj) and obj.__doc__: - import re - pattern = r"^[ \n]*%s(\([a-z_0-9, ]+\))" % re.escape(obj.__name__) - func_match = re.match(pattern, obj.__doc__) - - if func_match is not None: - signature = func_match.group(1) - elif obj.__name__ == "is_valid": - signature = "()" - - return (signature, return_annotation) - - def autodoc_process_docstring(app, what, name, obj, options, lines): # clear out redundant pybind-generated member list if any("Members" in ln for ln in lines): @@ -93,4 +77,3 @@ def gen_method_string(meth_name): def setup(app): app.connect("autodoc-process-docstring", autodoc_process_docstring) - app.connect("autodoc-process-signature", autodoc_process_signature) From 6dbb52dc3b106a5a26c5a4c9a34fda67f10ad97a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 14:46:20 -0600 Subject: [PATCH 05/33] Remove deprecated SuppressedWarnings --- islpy/__init__.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/islpy/__init__.py b/islpy/__init__.py index 707f5eff..21f2f6fa 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -1240,20 +1240,6 @@ def affs_from_space(space): return result -class SuppressedWarnings: - def __init__(self, ctx): - from warnings import warn - warn("islpy.SuppressedWarnings is a deprecated no-op and will be removed " - "in 2023. Simply remove the use of it to avoid this warning.", - DeprecationWarning, stacklevel=1) - - def __enter__(self): - pass - - def __exit__(self, type, value, traceback): - pass - - # {{{ give sphinx something to import so we can produce docs def _define_doc_link_names(): From 1125278d885948db03987fab14e833d6b8020adf Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 15:02:59 -0600 Subject: [PATCH 06/33] __init__: import from _isl, don't assign --- islpy/__init__.py | 276 ++++++++++++++++++++++++++++++---------------- 1 file changed, 179 insertions(+), 97 deletions(-) diff --git a/islpy/__init__.py b/islpy/__init__.py index 21f2f6fa..229fd7fa 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -86,105 +86,97 @@ def clear_cache(obj): # }}} -Error = _isl.Error - # {{{ name imports -isl_version = _isl.isl_version - -Context = _isl.Context -IdList = _isl.IdList -ValList = _isl.ValList -BasicSetList = _isl.BasicSetList -BasicMapList = _isl.BasicMapList -SetList = _isl.SetList -MapList = _isl.MapList -UnionSetList = _isl.UnionSetList -ConstraintList = _isl.ConstraintList -AffList = _isl.AffList -PwAffList = _isl.PwAffList -PwMultiAffList = _isl.PwMultiAffList -AstExprList = _isl.AstExprList -AstNodeList = _isl.AstNodeList - -QPolynomialList = _isl.QPolynomialList -PwQPolynomialList = _isl.PwQPolynomialList -PwQPolynomialFoldList = _isl.PwQPolynomialFoldList - -UnionPwAffList = _isl.UnionPwAffList -UnionPwMultiAffList = _isl.UnionPwMultiAffList -UnionMapList = _isl.UnionMapList -UnionSetList = _isl.UnionSetList - -IdToAstExpr = _isl.IdToAstExpr -Printer = _isl.Printer -Val = _isl.Val -MultiVal = _isl.MultiVal -Vec = _isl.Vec -Mat = _isl.Mat -FixedBox = _isl.FixedBox -Aff = _isl.Aff -PwAff = _isl.PwAff -UnionPwAff = _isl.UnionPwAff -MultiAff = _isl.MultiAff -MultiPwAff = _isl.MultiPwAff -PwMultiAff = _isl.PwMultiAff -UnionPwMultiAff = _isl.UnionPwMultiAff -UnionPwAffList = _isl.UnionPwAffList -MultiUnionPwAff = _isl.MultiUnionPwAff -Id = _isl.Id -MultiId = _isl.MultiId -Constraint = _isl.Constraint -Space = _isl.Space -LocalSpace = _isl.LocalSpace -BasicSet = _isl.BasicSet -BasicMap = _isl.BasicMap -Set = _isl.Set -Map = _isl.Map -UnionMap = _isl.UnionMap -UnionSet = _isl.UnionSet -Point = _isl.Point -Vertex = _isl.Vertex -Cell = _isl.Cell -Vertices = _isl.Vertices -StrideInfo = _isl.StrideInfo -QPolynomialFold = _isl.QPolynomialFold -PwQPolynomialFold = _isl.PwQPolynomialFold -UnionPwQPolynomialFold = _isl.UnionPwQPolynomialFold -UnionPwQPolynomial = _isl.UnionPwQPolynomial -QPolynomial = _isl.QPolynomial -PwQPolynomial = _isl.PwQPolynomial -Term = _isl.Term -ScheduleConstraints = _isl.ScheduleConstraints -ScheduleNode = _isl.ScheduleNode -Schedule = _isl.Schedule -AccessInfo = _isl.AccessInfo -Flow = _isl.Flow -Restriction = _isl.Restriction -UnionAccessInfo = _isl.UnionAccessInfo -UnionFlow = _isl.UnionFlow -AstExpr = _isl.AstExpr -AstNode = _isl.AstNode -AstPrintOptions = _isl.AstPrintOptions -AstBuild = _isl.AstBuild - -error = _isl.error -stat = _isl.stat -dim_type = _isl.dim_type -schedule_node_type = _isl.schedule_node_type -ast_expr_op_type = _isl.ast_expr_op_type -ast_expr_type = _isl.ast_expr_type -ast_node_type = _isl.ast_node_type -ast_loop_type = _isl.ast_loop_type -fold = _isl.fold -format = _isl.format -yaml_style = _isl.yaml_style -bound = _isl.bound -on_error = _isl.on_error -schedule_algorithm = _isl.schedule_algorithm - -# backward compatibility -ast_op_type = _isl.ast_expr_op_type +from islpy._isl import ( + AccessInfo, + Aff, + AffList, + AstBuild, + AstExpr, + AstExprList, + AstNode, + AstNodeList, + AstPrintOptions, + BasicMap, + BasicMapList, + BasicSet, + BasicSetList, + Cell, + Constraint, + ConstraintList, + Context, + Error, + FixedBox, + Flow, + Id, + IdList, + IdToAstExpr, + LocalSpace, + Map, + MapList, + Mat, + MultiAff, + MultiId, + MultiPwAff, + MultiUnionPwAff, + MultiVal, + Point, + Printer, + PwAff, + PwAffList, + PwMultiAff, + PwMultiAffList, + PwQPolynomial, + PwQPolynomialFold, + PwQPolynomialFoldList, + PwQPolynomialList, + QPolynomial, + QPolynomialFold, + QPolynomialList, + Restriction, + Schedule, + ScheduleConstraints, + ScheduleNode, + Set, + SetList, + Space, + StrideInfo, + Term, + UnionAccessInfo, + UnionFlow, + UnionMap, + UnionMapList, + UnionPwAff, + UnionPwAffList, + UnionPwMultiAff, + UnionPwMultiAffList, + UnionPwQPolynomial, + UnionPwQPolynomialFold, + UnionSet, + UnionSetList, + Val, + ValList, + Vec, + Vertex, + Vertices, + ast_expr_op_type, + ast_expr_type, + ast_loop_type, + ast_node_type, + bound, + dim_type, + error, + fold, + format, + isl_version, + on_error, + schedule_algorithm, + schedule_node_type, + stat, + yaml_style, +) + # }}} @@ -1253,5 +1245,95 @@ class Div: # }}} +__all__ = ( + "AccessInfo", + "Aff", + "AffList", + "AstBuild", + "AstExpr", + "AstExprList", + "AstNode", + "AstNodeList", + "AstPrintOptions", + "BasicMap", + "BasicMapList", + "BasicSet", + "BasicSetList", + "Cell", + "Constraint", + "ConstraintList", + "Context", + "Error", + "FixedBox", + "Flow", + "Id", + "IdList", + "IdToAstExpr", + "LocalSpace", + "Map", + "MapList", + "Mat", + "MultiAff", + "MultiId", + "MultiPwAff", + "MultiUnionPwAff", + "MultiVal", + "Point", + "Printer", + "PwAff", + "PwAffList", + "PwMultiAff", + "PwMultiAffList", + "PwQPolynomial", + "PwQPolynomialFold", + "PwQPolynomialFoldList", + "PwQPolynomialList", + "QPolynomial", + "QPolynomialFold", + "QPolynomialList", + "Restriction", + "Schedule", + "ScheduleConstraints", + "ScheduleNode", + "Set", + "SetList", + "Space", + "StrideInfo", + "Term", + "UnionAccessInfo", + "UnionFlow", + "UnionMap", + "UnionMapList", + "UnionPwAff", + "UnionPwAffList", + "UnionPwAffList", + "UnionPwMultiAff", + "UnionPwMultiAffList", + "UnionPwQPolynomial", + "UnionPwQPolynomialFold", + "UnionSet", + "UnionSetList", + "UnionSetList", + "Val", + "ValList", + "Vec", + "Vertex", + "Vertices", + "ast_expr_op_type", + "ast_expr_type", + "ast_loop_type", + "ast_node_type", + "bound", + "dim_type", + "error", + "fold", + "format", + "isl_version", + "on_error", + "schedule_algorithm", + "schedule_node_type", + "stat", + "yaml_style", +) # vim: foldmethod=marker From 86393883e2802e18e30d90195881fc37fbe50982 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 15:03:18 -0600 Subject: [PATCH 07/33] Annotate pwaff_get_pieces --- islpy/__init__.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/islpy/__init__.py b/islpy/__init__.py index 229fd7fa..ad9f721a 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -20,7 +20,7 @@ THE SOFTWARE. """ -from typing import Any, Callable, Optional, TypeVar, cast +from typing import Any, Callable, Optional, Sequence, TypeVar, cast import islpy._isl as _isl from islpy.version import VERSION, VERSION_TEXT # noqa @@ -636,11 +636,7 @@ def obj_get_var_names(self, dimtype): # {{{ piecewise - def pwaff_get_pieces(self): - """ - :return: list of (:class:`Set`, :class:`Aff`) - """ - + def pwaff_get_pieces(self) -> Sequence[tuple[Set, Aff]]: result = [] def append_tuple(*args): From 0ad3681a42df4a8a300bf967e566d43c6940343c Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 May 2025 15:06:50 -0500 Subject: [PATCH 08/33] doc: Delete a now-extraneous comment on Python 2's long integers --- doc/reference.rst | 6 ------ 1 file changed, 6 deletions(-) diff --git a/doc/reference.rst b/doc/reference.rst index 37a843fa..bc188eb4 100644 --- a/doc/reference.rst +++ b/doc/reference.rst @@ -39,12 +39,6 @@ append a `.copy()` to that argument. (Note that you will notice if an object got deleted for you accidentally, as the next operation on it will simply fail with an exception.) -Integers --------- - -Whenever an integer argument is required, :mod:`islpy` supports regular Python -integers and Python :class:`long` integers. It will return Python long integers. - .. _automatic-casts: Automatic Casts From 13de9b929a0d3ac18b44ea913bb23f5efbb4c4f9 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 15:03:34 -0600 Subject: [PATCH 09/33] Fix an import shadow in __init__ --- islpy/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/islpy/__init__.py b/islpy/__init__.py index ad9f721a..55df9be8 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -205,8 +205,8 @@ def _read_from_str_wrapper(cls, context, s, dims_with_apostrophes): cls_from_str = cls.read_from_str(context, s) # Apostrophes in dim names have been lost, put them back - for dim_name, (dim_type, dim_idx) in dims_with_apostrophes.items(): - cls_from_str = cls_from_str.set_dim_name(dim_type, dim_idx, dim_name) + for dim_name, (dt, dim_idx) in dims_with_apostrophes.items(): + cls_from_str = cls_from_str.set_dim_name(dt, dim_idx, dim_name) return cls_from_str From da6da603088f8dc66fd7c2e2839ce01331c102b3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Dec 2024 15:03:43 -0600 Subject: [PATCH 10/33] Configure ruff ignores for stub file --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 1cf63e16..08b66914 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,6 +68,9 @@ known-local-folder = [ ] lines-after-imports = 2 +[tool.ruff.lint.per-file-ignores] +"islpy/*.pyi" = ["N801", "E501", "I001", "F401", "E202"] + [tool.cibuildwheel] # nanobind does not support Py<3.8 # i686 does not have enough memory for LTO to complete From 9bf7d0a9920ed8ffe6e3f4173c66d46b25cc7bd7 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:37:08 -0500 Subject: [PATCH 11/33] Bump min Python version to 3.10 --- .github/workflows/ci.yml | 4 ++-- CMakeLists.txt | 2 +- examples/demo.py | 4 ++-- pyproject.toml | 4 ++-- setup.py | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 55bb930a..37830d31 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.10', '3.x'] + python-version: ['3.10', '3.12', '3.x'] steps: - uses: actions/checkout@v4 - @@ -42,7 +42,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.10', '3.x'] + python-version: ['3.10', '3.12', '3.x'] steps: - uses: actions/checkout@v4 - diff --git a/CMakeLists.txt b/CMakeLists.txt index eed3c649..e25ec7f9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.15...3.27) project(islpy) -find_package(Python 3.8 COMPONENTS Interpreter Development.Module REQUIRED) +find_package(Python 3.10 COMPONENTS Interpreter Development.Module REQUIRED) # Force Release build by default if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) diff --git a/examples/demo.py b/examples/demo.py index 26ae0985..2ae07491 100644 --- a/examples/demo.py +++ b/examples/demo.py @@ -63,10 +63,10 @@ def plot_basic_set(bset, *args, **kwargs): codes[0] = Path.MOVETO pathdata = [ - (code, tuple(coord)) for code, coord in zip(codes, vertex_pts)] + (code, tuple(coord)) for code, coord in zip(codes, vertex_pts, strict=True)] pathdata.append((Path.CLOSEPOLY, (0, 0))) - codes, verts = zip(*pathdata) + codes, verts = zip(*pathdata, strict=True) path = mpath.Path(verts, codes) patch = mpatches.PathPatch(path, **kwargs) pt.gca().add_patch(patch) diff --git a/pyproject.toml b/pyproject.toml index 08b66914..f995abea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ exclude = [ "isl", "aksetup_helper.py" ] -target-version = "py38" +target-version = "py310" [tool.ruff.lint] @@ -74,7 +74,7 @@ lines-after-imports = 2 [tool.cibuildwheel] # nanobind does not support Py<3.8 # i686 does not have enough memory for LTO to complete -skip = ["pp*", "cp3[67]-*", "*_i686"] +skip = ["pp*", "cp3[6789]-*", "*_i686"] test-requires = "pytest" test-command = "pytest {project}/test" diff --git a/setup.py b/setup.py index df62be3c..bfb74c7e 100644 --- a/setup.py +++ b/setup.py @@ -310,7 +310,7 @@ def main(): packages=find_packages(), - python_requires="~=3.8", + python_requires="~=3.10", setup_requires=setup_requires, extras_require={ "test": ["pytest>=2"], From ec674e0cd597e51f6d3a8c02462b5ce6c880f059 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:37:32 -0500 Subject: [PATCH 12/33] setup.py: Add commented-out bit showing how to enable a debug build --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index bfb74c7e..57f29958 100644 --- a/setup.py +++ b/setup.py @@ -208,6 +208,8 @@ def main(): with open("README.rst") as readme_f: readme = readme_f.read() + # cmake_args.append("-DCMAKE_BUILD_TYPE=Debug") + if conf["USE_SHIPPED_ISL"]: cmake_args.append("-DUSE_SHIPPED_ISL:bool=1") isl_inc_dirs = ["isl-supplementary", "isl/include", "isl"] From f925ccf578d95a70d1041f8b89b64db028828f47 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:44:54 -0500 Subject: [PATCH 13/33] Add bare-bones basedpyright config --- pyproject.toml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f995abea..b2253a0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,3 +82,30 @@ test-command = "pytest {project}/test" [tool.cibuildwheel.macos.environment] # Needed for full C++17 support MACOSX_DEPLOYMENT_TARGET = "10.14" + + +[tool.basedpyright] +reportImplicitStringConcatenation = "none" +reportUnnecessaryIsInstance = "none" +reportUnusedCallResult = "none" +reportExplicitAny = "none" +reportUnreachable = "none" + +# This reports even cycles that are qualified by 'if TYPE_CHECKING'. Not what +# we care about at this moment. +# https://github.com/microsoft/pyright/issues/746 +reportImportCycles = "none" +pythonVersion = "3.10" +pythonPlatform = "All" + +[[tool.basedpyright.executionEnvironments]] +root = "islpy/_monkeypatch.py" +reportUnknownArgumentType = "hint" +reportAttributeAccessIssue = "none" +reportPrivateUsage = "none" + +[[tool.basedpyright.executionEnvironments]] +root = "islpy/_isl.pyi" +reportUnannotatedClassAttribute = "none" +reportImplicitOverride = "none" + From 1a423e49a56f6970b16e2e2a8aa8e4def0e177d3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:46:26 -0500 Subject: [PATCH 14/33] Enable TC ruff checks --- gen_wrap.py | 5 +++-- pyproject.toml | 3 +-- setup.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index c2eb17a2..98ff39c9 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -23,9 +23,10 @@ import os import re import sys +from collections.abc import Mapping, Sequence from dataclasses import dataclass from os.path import join -from typing import ClassVar, List, Mapping, Sequence +from typing import ClassVar, TextIO SEM_TAKE = "take" @@ -110,7 +111,7 @@ class Argument: class CallbackArgument: name: str return_semantics: str - return_decl_words: List[str] + return_decl_words: list[str] return_base_type: str return_ptr: str args: Sequence[Argument] diff --git a/pyproject.toml b/pyproject.toml index b2253a0c..454a0299 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,14 +44,13 @@ extend-select = [ "UP", # pyupgrade "RUF", # ruff "W", # pycodestyle + "TC", ] extend-ignore = [ "C90", # McCabe complexity "E221", # multiple spaces before operator "E226", # missing whitespace around arithmetic operator "E402", # module-level import not at top of file - "UP006", # updated annotations due to __future__ import - "UP007", # updated annotations due to __future__ import "UP031", # use f-strings instead of % "UP032", # use f-strings instead of .format ] diff --git a/setup.py b/setup.py index 57f29958..22f4ae08 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ import shutil import sys -from typing import List, Sequence +from collections.abc import Sequence # Needed for aksetup to be found @@ -77,7 +77,7 @@ def get_config_schema(): def _get_isl_sources(use_shipped_imath: bool, use_imath_sio: bool) -> Sequence[str]: - extra_objects: List[str] = [] + extra_objects: list[str] = [] from glob import glob isl_blocklist = [ From 38c8dbbbbb425a85dcf1a9a5a8304fec02899dfc Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:47:01 -0500 Subject: [PATCH 15/33] Add some type annotations to gen_wrap --- gen_wrap.py | 56 ++++++++++++++++++++++++++--------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index 98ff39c9..26f145cd 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -71,7 +71,7 @@ class SignatureNotSupported(ValueError): # noqa: N818 pass -def to_py_class(cls): +def to_py_class(cls: str): if cls.startswith("isl_"): cls = cls[4:] @@ -102,7 +102,7 @@ def to_py_class(cls): class Argument: is_const: bool name: str - semantics: str + semantics: str | None base_type: str ptr: str @@ -221,7 +221,7 @@ def __repr__(self): "ast_build", ] } -CLASSES = [] +CLASSES: list[str] = [] for cls_list in PART_TO_CLASSES.values(): CLASSES.extend(cls_list) @@ -300,9 +300,9 @@ def __repr__(self): r"\s*\)") -def filter_semantics(words): - semantics = [] - other_words = [] +def filter_semantics(words: Sequence[str]): + semantics: list[str] = [] + other_words: list[str] = [] for w in words: if w in ISL_SEM_TO_SEM: semantics.append(ISL_SEM_TO_SEM[w]) @@ -316,7 +316,7 @@ def filter_semantics(words): return None, other_words -def split_at_unparenthesized_commas(s): +def split_at_unparenthesized_commas(s: str): paren_level = 0 i = 0 last_start = 0 @@ -336,7 +336,7 @@ def split_at_unparenthesized_commas(s): yield s[last_start:i] -def parse_arg(arg): +def parse_arg(arg: str) -> CallbackArgument | Argument: if "(*" in arg: arg_match = FUNC_PTR_RE.match(arg) assert arg_match is not None, f"fptr: {arg}" @@ -429,7 +429,7 @@ class FunctionData: "2exp": "two_exp" } - def __init__(self, include_dirs): + def __init__(self, include_dirs: Sequence[str]): self.classes_to_methods = {} self.include_dirs = include_dirs self.seen_c_names = set() @@ -454,7 +454,7 @@ def get_header_contents(self, fname): finally: inf.close() - def get_header_hashes(self, fnames): + def get_header_hashes(self, fnames: Sequence[str]): import hashlib h = hashlib.sha256() h.update(b"v1-") @@ -465,7 +465,7 @@ def get_header_hashes(self, fnames): preprocessed_dir = "preproc-headers" macro_headers: ClassVar[Sequence[str]] = ["isl/multi.h", "isl/list.h"] - def get_preprocessed_header(self, fname): + def get_preprocessed_header(self, fname: str) -> str: header_hash = self.get_header_hashes( [*self.macro_headers, fname]) @@ -502,11 +502,11 @@ def get_preprocessed_header(self, fname): # {{{ read_header - def read_header(self, fname): + def read_header(self, fname: str): lines = self.get_preprocessed_header(fname).split("\n") # heed continuations, split at semicolons - new_lines = [] + new_lines: list[str] = [] i = 0 while i < len(lines): my_line = lines[i].strip() @@ -580,7 +580,7 @@ def read_header(self, fname): # {{{ parse_decl - def parse_decl(self, decl): + def parse_decl(self, decl: str): decl_match = DECL_RE.match(decl) if decl_match is None: print(f"WARNING: func decl regexp not matched: {decl}") @@ -868,20 +868,20 @@ def get_callback(cb_name, cb): # {{{ wrapper generator -def write_wrapper(outf, meth): - body = [] - checks = [] - docs = [] +def write_wrapper(outf: TextIO, meth: Method): + body: list[str] = [] + checks: list[str] = [] + docs: list[str] = [] - passed_args = [] - input_args = [] - post_call = [] - extra_ret_vals = [] - extra_ret_types = [] - preamble = [] + passed_args: list[str] = [] + input_args: list[str] = [] + post_call: list[str] = [] + extra_ret_vals: list[str] = [] + extra_ret_types: list[str] = [] + preamble: list[str] = [] - arg_names = [] - arg_sigs = [] + arg_names: list[str] = [] + arg_sigs: list[str] = [] checks.append("isl_ctx *islpy_ctx = nullptr;") @@ -1359,7 +1359,7 @@ def _arg_to_const_str(arg: Argument) -> str: # {{{ exposer generator -def write_exposer(outf, meth, arg_names, doc_str, sig_str): +def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: str): func_name = f"isl::{meth.cls}_{meth.name}" py_name = meth.name @@ -1419,7 +1419,7 @@ def write_exposer(outf, meth, arg_names, doc_str, sig_str): wrapped_isl_functions = set() -def write_wrappers(expf, wrapf, methods): +def write_wrappers(expf, wrapf, methods: Sequence[Method]): undoc = [] for meth in methods: From 4325774505bfd7cc0d6459dadd261364ffc9ded8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:47:47 -0500 Subject: [PATCH 16/33] Fix (apparently-3.12+) escapes-within-f-strings --- gen_wrap.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gen_wrap.py b/gen_wrap.py index 26f145cd..87557241 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1388,10 +1388,12 @@ def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: wrap_class = CLASS_MAP.get(meth.cls, meth.cls) + newline = "\n" + escaped_newline = "\\n" outf.write(f'wrap_{wrap_class}.def{"_static" if meth.is_static else ""}(' f'"{py_name}", {func_name}{args_str}' f', py::sig("def {py_name}{sig_str}")' - f', "{py_name}{sig_str}\\n{doc_str.replace("\n", "\\n")}"' + f', "{py_name}{sig_str}\\n{doc_str.replace(newline, escaped_newline)}"' ');\n') if meth.name == "read_from_str": From c22be6dcab209c44d7e95af31ee09ab321e32ed7 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 13:48:26 -0500 Subject: [PATCH 17/33] Split monkeypatching out of islpy/__init__.py into separate file, add types --- gen_wrap.py | 11 + islpy/__init__.py | 864 +---------------------------------- islpy/_monkeypatch.py | 1015 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 1034 insertions(+), 856 deletions(-) create mode 100644 islpy/_monkeypatch.py diff --git a/gen_wrap.py b/gen_wrap.py index 87557241..45fe3ea4 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1396,6 +1396,17 @@ def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: f', "{py_name}{sig_str}\\n{doc_str.replace(newline, escaped_newline)}"' ');\n') + if meth.name == "get_space": + outf.write(f'wrap_{wrap_class}.def_prop_ro(' + f'"space", {func_name}{args_str}' + ', py::sig("def space(self) -> Space")' + ');\n') + + if meth.name in ["get_user", "get_name"]: + outf.write(f'wrap_{wrap_class}.def_prop_ro(' + f'"{meth.name[4:]}", {func_name}{args_str}' + ');\n') + if meth.name == "read_from_str": assert meth.is_static outf.write(f'wrap_{wrap_class}.def("__init__",' diff --git a/islpy/__init__.py b/islpy/__init__.py index 55df9be8..19cf3146 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -20,72 +20,14 @@ THE SOFTWARE. """ -from typing import Any, Callable, Optional, Sequence, TypeVar, cast +from typing import Literal -import islpy._isl as _isl +import islpy._isl as _isl # noqa: F401 from islpy.version import VERSION, VERSION_TEXT # noqa __version__ = VERSION_TEXT -# {{{ copied verbatim from pytools to avoid numpy/pytools dependency - -F = TypeVar("F", bound=Callable[..., Any]) - - -class _HasKwargs: - pass - - -def _memoize_on_first_arg(function: F, cache_dict_name: Optional[str] = None) -> F: - """Like :func:`memoize_method`, but for functions that take the object - in which do memoization information is stored as first argument. - - Supports cache deletion via ``function_name.clear_cache(self)``. - """ - from sys import intern - - if cache_dict_name is None: - cache_dict_name = intern( - f"_memoize_dic_{function.__module__}{function.__name__}" - ) - - def wrapper(obj, *args, **kwargs): - if kwargs: - key = (_HasKwargs, frozenset(kwargs.items()), *args) - else: - key = args - - try: - return getattr(obj, cache_dict_name)[key] - except AttributeError: - attribute_error = True - except KeyError: - attribute_error = False - - result = function(obj, *args, **kwargs) - if attribute_error: - object.__setattr__(obj, cache_dict_name, {key: result}) - return result - else: - getattr(obj, cache_dict_name)[key] = result - return result - - def clear_cache(obj): - object.__delattr__(obj, cache_dict_name) - - from functools import update_wrapper - new_wrapper = update_wrapper(wrapper, function) - - # type-ignore because mypy has a point here, stuffing random attributes - # into the function's dict is moderately sketchy. - new_wrapper.clear_cache = clear_cache # type: ignore[attr-defined] - - return cast(F, new_wrapper) - -# }}} - - # {{{ name imports from islpy._isl import ( @@ -177,800 +119,23 @@ def clear_cache(obj): yaml_style, ) - -# }}} +# importing _monkeypatch has the side effect of actually monkeypatching +from islpy._monkeypatch import _CHECK_DIM_TYPES, EXPR_CLASSES -_CHECK_DIM_TYPES = [ - dim_type.in_, dim_type.param, dim_type.set] +# }}} -ALL_CLASSES = tuple(getattr(_isl, cls) for cls in dir(_isl) if cls[0].isupper()) -EXPR_CLASSES = tuple(cls for cls in ALL_CLASSES - if "Aff" in cls.__name__ or "Polynomial" in cls.__name__) DEFAULT_CONTEXT = Context() -def _get_default_context(): +def _get_default_context() -> Context: """A callable to get the default context for the benefit of Python's ``__reduce__`` protocol. """ return DEFAULT_CONTEXT -def _read_from_str_wrapper(cls, context, s, dims_with_apostrophes): - """A callable to reconstitute instances from strings for the benefit - of Python's ``__reduce__`` protocol. - """ - cls_from_str = cls.read_from_str(context, s) - - # Apostrophes in dim names have been lost, put them back - for dim_name, (dt, dim_idx) in dims_with_apostrophes.items(): - cls_from_str = cls_from_str.set_dim_name(dt, dim_idx, dim_name) - - return cls_from_str - - -def _add_functionality(): - import islpy._isl as _isl # noqa - - # {{{ dim_type - - def dim_type_reduce(v): - return (dim_type, (int(v),)) - - dim_type.__reduce__ = dim_type_reduce - - # }}} - - # {{{ Context - - def context_reduce(self): - if self._wraps_same_instance_as(DEFAULT_CONTEXT): - return (_get_default_context, ()) - else: - return (Context, ()) - - def context_eq(self, other): - return isinstance(other, Context) and self._wraps_same_instance_as(other) - - def context_ne(self, other): - return not self.__eq__(other) - - Context.__reduce__ = context_reduce - Context.__eq__ = context_eq - Context.__ne__ = context_ne - - # }}} - - # {{{ generic initialization, pickling - - def generic_reduce(self): - ctx = self.get_ctx() - prn = Printer.to_str(ctx) - prn = getattr(prn, f"print_{self._base_name}")(self) - - # Reconstructing from string will remove apostrophes in dim names, - # so keep track of dim names with apostrophes - dims_with_apostrophes = { - dname: pos for dname, pos in self.get_var_dict().items() - if "'" in dname} - - return ( - _read_from_str_wrapper, - (type(self), ctx, prn.get_str(), dims_with_apostrophes)) - - for cls in ALL_CLASSES: - if hasattr(cls, "read_from_str"): - cls.__reduce__ = generic_reduce - - # }}} - - # {{{ printing - - def generic_str(self): - prn = Printer.to_str(self.get_ctx()) - getattr(prn, f"print_{self._base_name}")(self) - return prn.get_str() - - def generic_repr(self): - prn = Printer.to_str(self.get_ctx()) - getattr(prn, f"print_{self._base_name}")(self) - return f'{type(self).__name__}("{prn.get_str()}")' - - for cls in ALL_CLASSES: - if (hasattr(cls, "_base_name") - and hasattr(Printer, f"print_{cls._base_name}")): - cls.__str__ = generic_str - cls.__repr__ = generic_repr - - if not hasattr(cls, "__hash__"): - raise AssertionError(f"not hashable: {cls}") - - # }}} - - # {{{ Python set-like behavior - - def obj_or(self, other): - try: - return self.union(other) - except TypeError: - return NotImplemented - - def obj_and(self, other): - try: - return self.intersect(other) - except TypeError: - return NotImplemented - - def obj_sub(self, other): - try: - return self.subtract(other) - except TypeError: - return NotImplemented - - for cls in [BasicSet, BasicMap, Set, Map]: - cls.__or__ = obj_or - cls.__ror__ = obj_or - cls.__and__ = obj_and - cls.__rand__ = obj_and - cls.__sub__ = obj_sub - - # }}} - - # {{{ Space - - def space_get_id_dict(self, dimtype=None): - """Return a dictionary mapping variable :class:`Id` instances to tuples - of (:class:`dim_type`, index). - - :param dimtype: None to get all variables, otherwise - one of :class:`dim_type`. - """ - result = {} - - def set_dim_id(name, tp, idx): - if name in result: - raise RuntimeError(f"non-unique var id '{name}' encountered") - result[name] = tp, idx - - if dimtype is None: - types = _CHECK_DIM_TYPES - else: - types = [dimtype] - - for tp in types: - for i in range(self.dim(tp)): - name = self.get_dim_id(tp, i) - if name is not None: - set_dim_id(name, tp, i) - - return result - - def space_get_var_dict(self, dimtype=None, ignore_out=False): - """Return a dictionary mapping variable names to tuples of - (:class:`dim_type`, index). - - :param dimtype: None to get all variables, otherwise - one of :class:`dim_type`. - """ - result = {} - - def set_dim_name(name, tp, idx): - if name in result: - raise RuntimeError(f"non-unique var name '{name}' encountered") - result[name] = tp, idx - - if dimtype is None: - types = _CHECK_DIM_TYPES - if ignore_out: - types = types[:] - types.remove(dim_type.out) - else: - types = [dimtype] - - for tp in types: - for i in range(self.dim(tp)): - name = self.get_dim_name(tp, i) - if name is not None: - set_dim_name(name, tp, i) - - return result - - def space_create_from_names(ctx, set=None, in_=None, out=None, params=()): - """Create a :class:`Space` from lists of variable names. - - :param set_: names of `set`-type variables. - :param in_: names of `in`-type variables. - :param out: names of `out`-type variables. - :param params: names of parameter-type variables. - """ - dt = dim_type - - if set is not None: - if in_ is not None or out is not None: - raise RuntimeError("must pass only one of set / (in_,out)") - - result = Space.set_alloc(ctx, nparam=len(params), - dim=len(set)) - - for i, name in enumerate(set): - result = result.set_dim_name(dt.set, i, name) - - elif in_ is not None and out is not None: - if set is not None: - raise RuntimeError("must pass only one of set / (in_,out)") - - result = Space.alloc(ctx, nparam=len(params), - n_in=len(in_), n_out=len(out)) - - for i, name in enumerate(in_): - result = result.set_dim_name(dt.in_, i, name) - - for i, name in enumerate(out): - result = result.set_dim_name(dt.out, i, name) - else: - raise RuntimeError("invalid parameter combination") - - for i, name in enumerate(params): - result = result.set_dim_name(dt.param, i, name) - - return result - - Space.create_from_names = staticmethod(space_create_from_names) - Space.get_var_dict = space_get_var_dict - Space.get_id_dict = space_get_id_dict - - # }}} - - # {{{ coefficient wrangling - - def obj_set_coefficients(self, dim_tp, args): - """ - :param dim_tp: :class:`dim_type` - :param args: :class:`list` of coefficients, for indices `0..len(args)-1`. - - .. versionchanged:: 2011.3 - New for :class:`Aff` - """ - for i, coeff in enumerate(args): - self = self.set_coefficient_val(dim_tp, i, coeff) - - return self - - def obj_set_coefficients_by_name(self, iterable, name_to_dim=None): - """Set the coefficients and the constant. - - :param iterable: a :class:`dict` or iterable of :class:`tuple` - instances mapping variable names to their coefficients. - The constant is set to the value of the key '1'. - - .. versionchanged:: 2011.3 - New for :class:`Aff` - """ - try: - iterable = list(iterable.items()) - except AttributeError: - pass - - if name_to_dim is None: - name_to_dim = self.get_space().get_var_dict() - - for name, coeff in iterable: - if name == 1: - self = self.set_constant_val(coeff) - else: - tp, idx = name_to_dim[name] - self = self.set_coefficient_val(tp, idx, coeff) - - return self - - def obj_get_coefficients_by_name(self, dimtype=None, dim_to_name=None): - """Return a dictionary mapping variable names to coefficients. - - :param dimtype: None to get all variables, otherwise - one of :class:`dim_type`. - - .. versionchanged:: 2011.3 - New for :class:`Aff` - """ - if dimtype is None: - types = _CHECK_DIM_TYPES - else: - types = [dimtype] - - result = {} - for tp in types: - for i in range(self.get_space().dim(tp)): - coeff = self.get_coefficient_val(tp, i) - if coeff: - if dim_to_name is None: - name = self.get_dim_name(tp, i) - else: - name = dim_to_name[tp, i] - - result[name] = coeff - - const = self.get_constant_val() - if const: - result[1] = const - - return result - - for coeff_class in [Constraint, Aff]: - coeff_class.set_coefficients = obj_set_coefficients - coeff_class.set_coefficients_by_name = obj_set_coefficients_by_name - coeff_class.get_coefficients_by_name = obj_get_coefficients_by_name - - # }}} - - # {{{ Id - - Id.user = property(Id.get_user) - Id.name = property(Id.get_name) - - # }}} - - # {{{ Constraint - - def eq_from_names(space, coefficients=None): - """Create a constraint `const + coeff_1*var_1 +... == 0`. - - :param space: :class:`Space` - :param coefficients: a :class:`dict` or iterable of :class:`tuple` - instances mapping variable names to their coefficients - The constant is set to the value of the key '1'. - - .. versionchanged:: 2011.3 - Eliminated the separate *const* parameter. - """ - if coefficients is None: - coefficients = {} - c = Constraint.equality_alloc(space) - return c.set_coefficients_by_name(coefficients) - - def ineq_from_names(space, coefficients=None): - """Create a constraint `const + coeff_1*var_1 +... >= 0`. - - :param space: :class:`Space` - :param coefficients: a :class:`dict` or iterable of :class:`tuple` - instances mapping variable names to their coefficients - The constant is set to the value of the key '1'. - - .. versionchanged:: 2011.3 - Eliminated the separate *const* parameter. - """ - if coefficients is None: - coefficients = {} - c = Constraint.inequality_alloc(space) - return c.set_coefficients_by_name(coefficients) - - Constraint.eq_from_names = staticmethod(eq_from_names) - Constraint.ineq_from_names = staticmethod(ineq_from_names) - - # }}} - - def basic_obj_get_constraints(self): - """Get a list of constraints.""" - result = [] - self.foreach_constraint(result.append) - return result - - # {{{ BasicSet - - BasicSet.get_constraints = basic_obj_get_constraints - - # }}} - - # {{{ BasicMap - - BasicMap.get_constraints = basic_obj_get_constraints - - # }}} - - # {{{ Set - - def set_get_basic_sets(self): - """Get the list of :class:`BasicSet` instances in this :class:`Set`.""" - result = [] - self.foreach_basic_set(result.append) - return result - - Set.get_basic_sets = set_get_basic_sets - BasicSet.get_basic_sets = set_get_basic_sets - - # }}} - - # {{{ Map - - def map_get_basic_maps(self): - """Get the list of :class:`BasicMap` instances in this :class:`Map`.""" - result = [] - self.foreach_basic_map(result.append) - return result - - Map.get_basic_maps = map_get_basic_maps - - # }}} - - # {{{ common functionality - - def obj_get_id_dict(self, dimtype=None): - """Return a dictionary mapping :class:`Id` instances to tuples of - (:class:`dim_type`, index). - - :param dimtype: None to get all variables, otherwise - one of :class:`dim_type`. - """ - return self.get_space().get_id_dict(dimtype) - - @_memoize_on_first_arg - def obj_get_var_dict(self, dimtype=None): - """Return a dictionary mapping variable names to tuples of - (:class:`dim_type`, index). - - :param dimtype: None to get all variables, otherwise - one of :class:`dim_type`. - """ - return self.get_space().get_var_dict( - dimtype, ignore_out=isinstance(self, EXPR_CLASSES)) - - def obj_get_var_ids(self, dimtype): - """Return a list of :class:`Id` instances for :class:`dim_type` *dimtype*.""" - return [self.get_dim_name(dimtype, i) for i in range(self.dim(dimtype))] - - @_memoize_on_first_arg - def obj_get_var_names(self, dimtype): - """Return a list of dim names (in order) for :class:`dim_type` *dimtype*.""" - return [self.get_dim_name(dimtype, i) for i in range(self.dim(dimtype))] - - for cls in ALL_CLASSES: - if hasattr(cls, "get_space") and cls is not Space: - cls.get_id_dict = obj_get_id_dict - cls.get_var_dict = obj_get_var_dict - cls.get_var_ids = obj_get_var_ids - cls.get_var_names = obj_get_var_names - cls.space = property(cls.get_space) - - # }}} - - # {{{ piecewise - - def pwaff_get_pieces(self) -> Sequence[tuple[Set, Aff]]: - result = [] - - def append_tuple(*args): - result.append(args) - - self.foreach_piece(append_tuple) - return result - - def pwqpolynomial_get_pieces(self): - """ - :return: list of (:class:`Set`, :class:`QPolynomial`) - """ - - result = [] - - def append_tuple(*args): - result.append(args) - - self.foreach_piece(append_tuple) - return result - - def pw_get_aggregate_domain(self): - """ - :return: a :class:`Set` that is the union of the domains of all pieces - """ - - result = Set.empty(self.get_domain_space()) - for dom, _ in self.get_pieces(): - result = result.union(dom) - - return result - - PwAff.get_pieces = pwaff_get_pieces - Aff.get_pieces = pwaff_get_pieces - PwAff.get_aggregate_domain = pw_get_aggregate_domain - - PwQPolynomial.get_pieces = pwqpolynomial_get_pieces - PwQPolynomial.get_aggregate_domain = pw_get_aggregate_domain - - # }}} - - # {{{ QPolynomial - - def qpolynomial_get_terms(self): - """Get the list of :class:`Term` instances in this :class:`QPolynomial`.""" - result = [] - self.foreach_term(result.append) - return result - - QPolynomial.get_terms = qpolynomial_get_terms - - # }}} - - # {{{ PwQPolynomial - - def pwqpolynomial_eval_with_dict(self, value_dict): - """Evaluates *self* for the parameters specified by - *value_dict*, which maps parameter names to their values. - """ - - pt = Point.zero(self.space.params()) - - for i in range(self.space.dim(dim_type.param)): - par_name = self.space.get_dim_name(dim_type.param, i) - pt = pt.set_coordinate_val( - dim_type.param, i, value_dict[par_name]) - - return self.eval(pt).to_python() - - PwQPolynomial.eval_with_dict = pwqpolynomial_eval_with_dict - - # }}} - - # {{{ arithmetic - - def _number_to_expr_like(template, num): - number_aff = Aff.zero_on_domain(template.get_domain_space()) - number_aff = number_aff.set_constant_val(num) - - if isinstance(template, Aff): - return number_aff - if isinstance(template, QPolynomial): - return QPolynomial.from_aff(number_aff) - - # everything else is piecewise - - if template.get_pieces(): - number_pw_aff = PwAff.empty(template.get_space()) - for set, _ in template.get_pieces(): - number_pw_aff = set.indicator_function().cond( - number_aff, number_pw_aff) - else: - number_pw_aff = PwAff.alloc( - Set.universe(template.domain().space), - number_aff) - - if isinstance(template, PwAff): - return number_pw_aff - - elif isinstance(template, PwQPolynomial): - return PwQPolynomial.from_pw_aff(number_pw_aff) - - else: - raise TypeError("unexpected template type") - - ARITH_CLASSES = (Aff, PwAff, QPolynomial, PwQPolynomial) # noqa - - def expr_like_add(self, other): - if not isinstance(other, ARITH_CLASSES): - other = _number_to_expr_like(self, other) - - try: - return self.add(other) - except TypeError: - return NotImplemented - - def expr_like_sub(self, other): - if not isinstance(other, ARITH_CLASSES): - other = _number_to_expr_like(self, other) - - try: - return self.sub(other) - except TypeError: - return NotImplemented - - def expr_like_rsub(self, other): - if not isinstance(other, ARITH_CLASSES): - other = _number_to_expr_like(self, other) - - return -self + other - - def expr_like_mul(self, other): - if not isinstance(other, ARITH_CLASSES): - other = _number_to_expr_like(self, other) - - try: - return self.mul(other) - except TypeError: - return NotImplemented - - def expr_like_floordiv(self, other): - return self.scale_down_val(other).floor() - - for expr_like_class in ARITH_CLASSES: - expr_like_class.__add__ = expr_like_add - expr_like_class.__radd__ = expr_like_add - expr_like_class.__sub__ = expr_like_sub - expr_like_class.__rsub__ = expr_like_rsub - expr_like_class.__mul__ = expr_like_mul - expr_like_class.__rmul__ = expr_like_mul - expr_like_class.__neg__ = expr_like_class.neg - - for qpoly_class in [QPolynomial, PwQPolynomial]: - qpoly_class.__pow__ = qpoly_class.pow - - for aff_class in [Aff, PwAff]: - aff_class.__mod__ = aff_class.mod_val - aff_class.__floordiv__ = expr_like_floordiv - - # }}} - - # {{{ Val - - def val_rsub(self, other): - return -self + other - - def val_bool(self): - return not self.is_zero() - - def val_repr(self): - return f'{type(self).__name__}("{self.to_str()}")' - - def val_to_python(self): - if not self.is_int(): - raise ValueError("can only convert integer Val to python") - - return int(self.to_str()) - - Val.__add__ = Val.add - Val.__radd__ = Val.add - Val.__sub__ = Val.sub - Val.__rsub__ = val_rsub - Val.__mul__ = Val.mul - Val.__rmul__ = Val.mul - Val.__neg__ = Val.neg - Val.__mod__ = Val.mod - Val.__bool__ = Val.__nonzero__ = val_bool - - Val.__lt__ = Val.lt - Val.__gt__ = Val.gt - Val.__le__ = Val.le - Val.__ge__ = Val.ge - Val.__eq__ = Val.eq - Val.__ne__ = Val.ne - - Val.__repr__ = val_repr - Val.__str__ = Val.to_str - Val.to_python = val_to_python - - # }}} - - # {{{ rich comparisons - - def obj_eq(self, other): - assert self.get_ctx() == other.get_ctx(), ( - "Equality-comparing two objects from different ISL Contexts " - "will likely lead to entertaining (but never useful) results. " - "In particular, Spaces with matching names will no longer be " - "equal.") - - return self.is_equal(other) - - def obj_ne(self, other): - return not self.__eq__(other) - - for cls in ALL_CLASSES: - if hasattr(cls, "is_equal"): - cls.__eq__ = obj_eq - cls.__ne__ = obj_ne - - def obj_lt(self, other): - return self.is_strict_subset(other) - - def obj_le(self, other): - return self.is_subset(other) - - def obj_gt(self, other): - return other.is_strict_subset(self) - - def obj_ge(self, other): - return other.is_subset(self) - - for cls in [BasicSet, BasicMap, Set, Map]: - cls.__lt__ = obj_lt - cls.__le__ = obj_le - cls.__gt__ = obj_gt - cls.__ge__ = obj_ge - - # }}} - - # {{{ project_out_except - - def obj_project_out_except(obj, names, types): - """ - :param types: list of :class:`dim_type` determining - the types of axes to project out - :param names: names of axes matching the above which - should be left alone by the projection - - .. versionadded:: 2011.3 - """ - - for tp in types: - while True: - space = obj.get_space() - var_dict = space.get_var_dict(tp) - - all_indices = set(range(space.dim(tp))) - leftover_indices = {var_dict[name][1] for name in names - if name in var_dict} - project_indices = all_indices-leftover_indices - if not project_indices: - break - - min_index = min(project_indices) - count = 1 - while min_index+count in project_indices: - count += 1 - - obj = obj.project_out(tp, min_index, count) - - return obj - - # }}} - - # {{{ eliminate_except - - def obj_eliminate_except(obj, names, types): - """ - :param types: list of :class:`dim_type` determining - the types of axes to eliminate - :param names: names of axes matching the above which - should be left alone by the eliminate - - .. versionadded:: 2011.3 - """ - - for tp in types: - space = obj.get_space() - var_dict = space.get_var_dict(tp) - to_eliminate = ( - set(range(space.dim(tp))) - - {var_dict[name][1] for name in names - if name in var_dict}) - - while to_eliminate: - min_index = min(to_eliminate) - count = 1 - while min_index+count in to_eliminate: - count += 1 - - obj = obj.eliminate(tp, min_index, count) - - to_eliminate -= set(range(min_index, min_index+count)) - - return obj - - # }}} - - # {{{ add_constraints - - def obj_add_constraints(obj, constraints): - """ - .. versionadded:: 2011.3 - """ - - for cns in constraints: - obj = obj.add_constraint(cns) - - return obj - - # }}} - - for c in [BasicSet, BasicMap, Set, Map]: - c.project_out_except = obj_project_out_except - c.add_constraints = obj_add_constraints - - for c in [BasicSet, Set]: - c.eliminate_except = obj_eliminate_except - - -_add_functionality() - - def _back_to_basic(new_obj, old_obj): # Work around set_dim_id not being available for Basic{Set,Map} if isinstance(old_obj, BasicSet) and isinstance(new_obj, Set): @@ -1107,7 +272,7 @@ def align_spaces(obj, template, obj_bigger_ok=False, across_dim_types=None): template = type(template).from_params(template) if isinstance(template, EXPR_CLASSES): - dim_types = _CHECK_DIM_TYPES[:] + dim_types = list(_CHECK_DIM_TYPES) dim_types.remove(dim_type.out) else: dim_types = _CHECK_DIM_TYPES @@ -1188,7 +353,7 @@ def make_zero_and_vars(set_vars, params=(), ctx=None): return affs_from_space(space) -def affs_from_space(space): +def affs_from_space(space: Space) -> dict[Literal[0] | str, PwAff]: """ :return: a dictionary from variable names (in *set_vars* and *params*) to :class:`PwAff` instances that represent each of the @@ -1228,19 +393,6 @@ def affs_from_space(space): return result -# {{{ give sphinx something to import so we can produce docs - -def _define_doc_link_names(): - class Div: - pass - - _isl.Div = Div - - -_define_doc_link_names() - -# }}} - __all__ = ( "AccessInfo", "Aff", diff --git a/islpy/_monkeypatch.py b/islpy/_monkeypatch.py new file mode 100644 index 00000000..473431cf --- /dev/null +++ b/islpy/_monkeypatch.py @@ -0,0 +1,1015 @@ +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Literal, + Protocol, + TypeAlias, + TypeVar, + cast, +) + + +if TYPE_CHECKING: + import islpy._isl as _isl +else: + import sys + if "_isl" not in sys.modules: + import islpy._isl as _isl + else: + # This is used for monkeypatching during stub generation. + # See stubgen/stubgen.py and CMakeLists for orchestration details. + import _isl + + +ALL_CLASSES: tuple[type, ...] = tuple( + getattr(_isl, cls) for cls in dir(_isl) if cls[0].isupper()) +EXPR_CLASSES: tuple[type, ...] = tuple(cls for cls in ALL_CLASSES + if "Aff" in cls.__name__ or "Polynomial" in cls.__name__) +ARITH_CLASSES: tuple[type, ...] = ( + _isl.Aff, _isl.PwAff, _isl.QPolynomial, _isl.PwQPolynomial) + +_CHECK_DIM_TYPES: tuple[_isl.dim_type, ...] = ( + _isl.dim_type.in_, _isl.dim_type.param, _isl.dim_type.set) + + +# {{{ typing helpers + +SelfT = TypeVar("SelfT") + +BasicT = TypeVar("BasicT", _isl.BasicSet, _isl.BasicMap) + +AffOrConstraintT = TypeVar("AffOrConstraintT", _isl.Aff, _isl.Constraint) +AffLikeT = TypeVar("AffLikeT", _isl.Aff, _isl.PwAff) +ExprLike: TypeAlias = _isl.Aff | _isl.PwAff | _isl.QPolynomial | _isl.PwQPolynomial +ExprLikeT = TypeVar("ExprLikeT", _isl.Aff, _isl.PwAff, + _isl.QPolynomial, _isl.PwQPolynomial + ) +SetLikeT = TypeVar("SetLikeT", bound=_isl.BasicSet | _isl.Set) +SetOrMap: TypeAlias = _isl.BasicSet | _isl.Set | _isl.BasicMap | _isl.Map +SetOrMapT = TypeVar("SetOrMapT", bound=SetOrMap) + +HasSpace: TypeAlias = ( + _isl.Space + | _isl.Aff + | _isl.BasicMap + | _isl.BasicSet + | _isl.Constraint + | _isl.LocalSpace + | _isl.Map + | _isl.MultiAff + | _isl.MultiId + | _isl.MultiPwAff + | _isl.MultiUnionPwAff + | _isl.MultiVal + | _isl.Point + | _isl.PwAff + | _isl.PwMultiAff + | _isl.PwQPolynomial + | _isl.PwQPolynomialFold + | _isl.QPolynomial + | _isl.QPolynomialFold + | _isl.Set + | _isl.UnionMap + | _isl.UnionPwAff + | _isl.UnionPwMultiAff + | _isl.UnionPwQPolynomial + | _isl.UnionPwQPolynomialFold + | _isl.UnionSet + ) + + +class IslObject(Protocol): + def get_ctx(self) -> _isl.Context: + ... + + def _wraps_same_instance_as(self, other: object) -> bool: + ... + + _base_name: ClassVar[str] + +# }}} + + +# {{{ copied verbatim from pytools to avoid numpy/pytools dependency + +F = TypeVar("F", bound=Callable[..., Any]) + + +class _HasKwargs: + pass + + +def _memoize_on_first_arg(function: F, cache_dict_name: str | None = None) -> F: + """Like :func:`memoize_method`, but for functions that take the object + in which do memoization information is stored as first argument. + + Supports cache deletion via ``function_name.clear_cache(self)``. + """ + from sys import intern + + if cache_dict_name is None: + cache_dict_name = intern( + f"_memoize_dic_{function.__module__}{function.__name__}" + ) + + def wrapper(obj, *args, **kwargs): + if kwargs: + key = (_HasKwargs, frozenset(kwargs.items()), *args) + else: + key = args + + try: + return getattr(obj, cache_dict_name)[key] + except AttributeError: + attribute_error = True + except KeyError: + attribute_error = False + + result = function(obj, *args, **kwargs) + if attribute_error: + object.__setattr__(obj, cache_dict_name, {key: result}) + return result + else: + getattr(obj, cache_dict_name)[key] = result + return result + + def clear_cache(obj): + object.__delattr__(obj, cache_dict_name) + + from functools import update_wrapper + new_wrapper = update_wrapper(wrapper, function) + + # type-ignore because mypy has a point here, stuffing random attributes + # into the function's dict is moderately sketchy. + new_wrapper.clear_cache = clear_cache # type: ignore[attr-defined] + + return cast("F", new_wrapper) + +# }}} + + +def _read_from_str_wrapper(cls, context, s, dims_with_apostrophes): + """A callable to reconstitute instances from strings for the benefit + of Python's ``__reduce__`` protocol. + """ + cls_from_str = cls.read_from_str(context, s) + + # Apostrophes in dim names have been lost, put them back + for dim_name, (dt, dim_idx) in dims_with_apostrophes.items(): + cls_from_str = cls_from_str.set_dim_name(dt, dim_idx, dim_name) + + return cls_from_str + + +def dim_type_reduce(self: _isl.dim_type): + return (_isl.dim_type, (int(self),)) + + +def context_reduce(self: _isl.Context): + from islpy import DEFAULT_CONTEXT, _get_default_context + if self._wraps_same_instance_as(DEFAULT_CONTEXT): + return (_get_default_context, ()) + else: + return (_isl.Context, ()) + + +def context_eq(self: IslObject, other: object): + return isinstance(other, _isl.Context) and self._wraps_same_instance_as(other) + + +def context_ne(self: object, other: object) -> bool: + return not self.__eq__(other) + + +def generic_reduce(self: IslObject): + ctx = self.get_ctx() + prn = _isl.Printer.to_str(ctx) + prn = getattr(prn, f"print_{self._base_name}")(self) + + # Reconstructing from string will remove apostrophes in dim names, + # so keep track of dim names with apostrophes + dims_with_apostrophes = { + dname: pos for dname, pos in self.get_var_dict().items() + if "'" in dname} + + return ( + _read_from_str_wrapper, + (type(self), ctx, prn.get_str(), dims_with_apostrophes)) + + +def generic_str(self: IslObject) -> str: + prn = _isl.Printer.to_str(self.get_ctx()) + getattr(prn, f"print_{self._base_name}")(self) + return prn.get_str() + + +def generic_repr(self: IslObject) -> str: + prn = _isl.Printer.to_str(self.get_ctx()) + getattr(prn, f"print_{self._base_name}")(self) + return f'{type(self).__name__}("{prn.get_str()}")' + + +def space_get_id_dict( + self: _isl.Space, + dimtype: _isl.dim_type | None = None + ) -> Mapping[_isl.Id, tuple[_isl.dim_type, int]]: + """Return a dictionary mapping variable :class:`Id` instances to tuples + of (:class:`dim_type`, index). + + :param dimtype: None to get all variables, otherwise + one of :class:`dim_type`. + """ + result = {} + + def set_dim_id(name, tp, idx): + if name in result: + raise RuntimeError(f"non-unique var id '{name}' encountered") + result[name] = tp, idx + + if dimtype is None: + types = _CHECK_DIM_TYPES + else: + types = [dimtype] + + for tp in types: + for i in range(self.dim(tp)): + name = self.get_dim_id(tp, i) + if name is not None: + set_dim_id(name, tp, i) + + return result + + +def space_get_var_dict( + self: _isl.Space, + dimtype: _isl.dim_type | None = None, + ignore_out: bool = False + ) -> Mapping[str, tuple[_isl.dim_type, int]]: + """Return a dictionary mapping variable names to tuples of + (:class:`dim_type`, index). + + :param dimtype: None to get all variables, otherwise + one of :class:`dim_type`. + """ + result: dict[str, tuple[_isl.dim_type, int]] = {} + + def set_dim_name(name: str, tp: _isl.dim_type, idx: int): + if name in result: + raise RuntimeError(f"non-unique var name '{name}' encountered") + result[name] = tp, idx + + if dimtype is None: + types = list(_CHECK_DIM_TYPES) + if ignore_out: + types = types[:] + types.remove(_isl.dim_type.out) + else: + types = [dimtype] + + for tp in types: + for i in range(self.dim(tp)): + name = self.get_dim_name(tp, i) + if name is not None: + set_dim_name(name, tp, i) + + return result + + +def space_create_from_names( + ctx: _isl.Context, + set: Sequence[str] | None = None, + in_: Sequence[str] | None = None, + out: Sequence[str] | None = None, + params: Sequence[str] = () + ) -> _isl.Space: + """Create a :class:`Space` from lists of variable names. + + :param set_: names of `set`-type variables. + :param in_: names of `in`-type variables. + :param out: names of `out`-type variables. + :param params: names of parameter-type variables. + """ + dt = _isl.dim_type + + if set is not None: + if in_ is not None or out is not None: + raise RuntimeError("must pass only one of set / (in_,out)") + + result = _isl.Space.set_alloc(ctx, nparam=len(params), + dim=len(set)) + + for i, name in enumerate(set): + result = result.set_dim_name(dt.set, i, name) + + elif in_ is not None and out is not None: + if set is not None: + raise RuntimeError("must pass only one of set / (in_,out)") + + result = _isl.Space.alloc(ctx, nparam=len(params), + n_in=len(in_), n_out=len(out)) + + for i, name in enumerate(in_): + result = result.set_dim_name(dt.in_, i, name) + + for i, name in enumerate(out): + result = result.set_dim_name(dt.out, i, name) + else: + raise RuntimeError("invalid parameter combination") + + for i, name in enumerate(params): + result = result.set_dim_name(dt.param, i, name) + + return result + + +def obj_or(self: SetOrMapT, other: object) -> SetOrMapT: + try: + return self.union(other) + except TypeError: + return NotImplemented + + +def obj_and(self: SetOrMapT, other: object) -> SetOrMapT: + try: + return self.intersect(other) + except TypeError: + return NotImplemented + + +def obj_sub(self: SetOrMapT, other: object) -> SetOrMapT: + try: + return self.subtract(other) + except TypeError: + return NotImplemented + + +def obj_set_coefficients( + self: AffOrConstraintT, + dim_tp: _isl.dim_type, + args: Sequence[_isl.Val | int], + ) -> AffOrConstraintT: + """ + :param dim_tp: :class:`dim_type` + :param args: :class:`list` of coefficients, for indices `0..len(args)-1`. + + .. versionchanged:: 2011.3 + New for :class:`Aff` + """ + for i, coeff in enumerate(args): + self = self.set_coefficient_val(dim_tp, i, coeff) + + return self + + +def obj_set_coefficients_by_name( + self: AffOrConstraintT, + iterable: Iterable[tuple[str | Literal[1], _isl.Val | int]] + | Mapping[str | Literal[1], _isl.Val | int], + name_to_dim: Mapping[str, tuple[_isl.dim_type, int]] | None = None, + ) -> AffOrConstraintT: + """Set the coefficients and the constant. + + :param iterable: a :class:`dict` or iterable of :class:`tuple` + instances mapping variable names to their coefficients. + The constant is set to the value of the key '1'. + + .. versionchanged:: 2011.3 + New for :class:`Aff` + """ + try: + coeff_iterable: Iterable[tuple[str | Literal[1], _isl.Val | int]] = \ + list(iterable.items()) + except AttributeError: + coeff_iterable = \ + cast("Iterable[tuple[str | Literal[1], _isl.Val | int]]", iterable) + + if name_to_dim is None: + name_to_dim = obj_get_var_dict(self) + + for name, coeff in coeff_iterable: + if name == 1: + self = self.set_constant_val(coeff) + else: + assert name + tp, idx = name_to_dim[name] + self = self.set_coefficient_val(tp, idx, coeff) + + return self + + +def obj_get_coefficients_by_name( + self: _isl.Constraint | _isl.Aff, + dimtype: _isl.dim_type | None = None, + dim_to_name: Mapping[tuple[_isl.dim_type, int], str] | None = None, + ) -> dict[str | Literal[1], _isl.Val]: + """Return a dictionary mapping variable names to coefficients. + + :param dimtype: None to get all variables, otherwise + one of :class:`dim_type`. + + .. versionchanged:: 2011.3 + New for :class:`Aff` + """ + if dimtype is None: + types: Sequence[_isl.dim_type] = _CHECK_DIM_TYPES + else: + types = [dimtype] + + result: dict[Literal[1] | str, _isl.Val] = {} + for tp in types: + for i in range(self.get_space().dim(tp)): + coeff = self.get_coefficient_val(tp, i) + if coeff: + if dim_to_name is None: + name = self.get_dim_name(tp, i) + assert name + else: + name = dim_to_name[tp, i] + + result[name] = coeff + + const = self.get_constant_val() + if const: + result[1] = const + + return result + + +def eq_from_names( + space: _isl.Space, + coefficients: Mapping[str | Literal[1], _isl.Val | int] | None = None + ) -> _isl.Constraint: + """Create a constraint `const + coeff_1*var_1 +... == 0`. + + :param space: :class:`Space` + :param coefficients: a :class:`dict` or iterable of :class:`tuple` + instances mapping variable names to their coefficients + The constant is set to the value of the key '1'. + + .. versionchanged:: 2011.3 + Eliminated the separate *const* parameter. + """ + if coefficients is None: + coefficients = {} + c = _isl.Constraint.equality_alloc(space) + return obj_set_coefficients_by_name(c, coefficients) + + +def ineq_from_names( + space: _isl.Space, + coefficients: Mapping[str | Literal[1], _isl.Val | int] | None = None + ) -> _isl.Constraint: + """Create a constraint `const + coeff_1*var_1 +... >= 0`. + + :param space: :class:`Space` + :param coefficients: a :class:`dict` or iterable of :class:`tuple` + instances mapping variable names to their coefficients + The constant is set to the value of the key '1'. + + .. versionchanged:: 2011.3 + Eliminated the separate *const* parameter. + """ + if coefficients is None: + coefficients = {} + c = _isl.Constraint.inequality_alloc(space) + return obj_set_coefficients_by_name(c, coefficients) + + +def basic_obj_get_constraints( + self: _isl.BasicSet | _isl.BasicMap + ) -> list[_isl.Constraint]: + """Get a list of constraints.""" + result: list[_isl.Constraint] = [] + self.foreach_constraint(result.append) + return result + + +def set_get_basic_sets(self: _isl.Set | _isl.BasicSet) -> list[_isl.BasicSet]: + """Get the list of :class:`BasicSet` instances in this :class:`Set`.""" + result: list[_isl.BasicSet] = [] + self.foreach_basic_set(result.append) + return result + + +def map_get_basic_maps(self: _isl.Map) -> list[_isl.BasicMap]: + """Get the list of :class:`BasicMap` instances in this :class:`Map`.""" + result: list[_isl.BasicMap] = [] + self.foreach_basic_map(result.append) + return result + + +def obj_get_id_dict( + self: HasSpace, + dimtype: _isl.dim_type | None = None + ) -> Mapping[_isl.Id, tuple[_isl.dim_type, int]]: + """Return a dictionary mapping :class:`Id` instances to tuples of + (:class:`dim_type`, index). + + :param dimtype: None to get all variables, otherwise + one of :class:`dim_type`. + """ + return self.get_space().get_id_dict(dimtype) + + +@_memoize_on_first_arg +def obj_get_var_dict( + self: HasSpace, + dimtype: _isl.dim_type | None = None + ) -> Mapping[str, tuple[_isl.dim_type, int]]: + """Return a dictionary mapping variable names to tuples of + (:class:`dim_type`, index). + + :param dimtype: None to get all variables, otherwise + one of :class:`dim_type`. + """ + return self.get_space().get_var_dict( + dimtype, ignore_out=isinstance(self, EXPR_CLASSES)) + + +def obj_get_var_ids( + self: HasSpace, + dimtype: _isl.dim_type + ) -> Sequence[str]: + """Return a list of :class:`Id` instances for :class:`dim_type` *dimtype*.""" + return [self.get_dim_name(dimtype, i) for i in range(self.dim(dimtype))] + + +@_memoize_on_first_arg +def obj_get_var_names(self: HasSpace, dimtype: _isl.dim_type) -> Sequence[str]: + """Return a list of dim names (in order) for :class:`dim_type` *dimtype*.""" + return [self.get_dim_name(dimtype, i) + for i in range(self.dim(dimtype))] + + +def pwaff_get_pieces(self: _isl.PwAff | _isl.Aff) -> list[tuple[_isl.Set, _isl.Aff]]: + result: list[tuple[_isl.Set, _isl.Aff]] = [] + + def append_tuple(s: _isl.Set, v: _isl.Aff): + result.append((s, v)) + + self.foreach_piece(append_tuple) + return result + + +def pwqpolynomial_get_pieces( + self: _isl.PwQPolynomial + ) -> list[tuple[_isl.Set, _isl.QPolynomial]]: + """ + :return: list of (:class:`Set`, :class:`QPolynomial`) + """ + + result: list[tuple[_isl.Set, _isl.QPolynomial]] = [] + + def append_tuple(s: _isl.Set, v: _isl.QPolynomial): + result.append((s, v)) + + self.foreach_piece(append_tuple) + return result + + +def pw_get_aggregate_domain(self: _isl.PwAff | _isl.PwQPolynomial) -> _isl.Set: + """ + :return: a :class:`Set` that is the union of the domains of all pieces + """ + + result = _isl.Set.empty(self.get_domain_space()) + for dom, _ in self.get_pieces(): + result = result.union(cast("_isl.Set", dom)) + + return result + + +def qpolynomial_get_terms(self: _isl.QPolynomial) -> list[_isl.Term]: + """Get the list of :class:`Term` instances in this :class:`QPolynomial`.""" + result: list[_isl.Term] = [] + self.foreach_term(result.append) + return result + + +def pwqpolynomial_eval_with_dict( + self: _isl.PwQPolynomial, + value_dict: Mapping[str, int | _isl.Val] + ) -> int: + """Evaluates *self* for the parameters specified by + *value_dict*, which maps parameter names to their values. + """ + + pt = _isl.Point.zero(self.space.params()) + + for i in range(self.space.dim(_isl.dim_type.param)): + par_name = self.space.get_dim_name(_isl.dim_type.param, i) + assert par_name + pt = pt.set_coordinate_val( + _isl.dim_type.param, i, value_dict[par_name]) + + return self.eval(pt).to_python() + + +def _number_to_expr_like(template: ExprLikeT, num: int | _isl.Val) -> ExprLikeT: + number_aff = _isl.Aff.zero_on_domain(template.get_domain_space()) + number_aff = number_aff.set_constant_val(num) + + if isinstance(template, _isl.Aff): + return number_aff + if isinstance(template, _isl.QPolynomial): + return _isl.QPolynomial.from_aff(number_aff) + + # everything else is piecewise + + if template.get_pieces(): + number_pw_aff = _isl.PwAff.empty(template.get_space()) + for set, _ in template.get_pieces(): + number_pw_aff = set.indicator_function().cond( + number_aff, number_pw_aff) + else: + number_pw_aff = _isl.PwAff.alloc( + _isl.Set.universe(template.domain().space), + number_aff) + + if isinstance(template, _isl.PwAff): + return number_pw_aff + + elif isinstance(template, _isl.PwQPolynomial): + return _isl.PwQPolynomial.from_pw_aff(number_pw_aff) + + else: + raise TypeError("unexpected template type") + + +def expr_like_add(self: ExprLikeT, other: ExprLikeT | int | _isl.Val) -> ExprLikeT: + if not isinstance(other, ExprLike): + other = _number_to_expr_like(self, other) + + try: + return self.add(other) + except TypeError: + return NotImplemented + + +def expr_like_sub(self: ExprLikeT, other: ExprLikeT | int | _isl.Val): + if not isinstance(other, ExprLike): + other = _number_to_expr_like(self, other) + + try: + return self.sub(other) + except TypeError: + return NotImplemented + + +def expr_like_rsub(self: ExprLikeT, other: ExprLikeT | int | _isl.Val) -> ExprLikeT: + if not isinstance(other, ExprLike): + other = _number_to_expr_like(self, other) + + return -self + other + + +def expr_like_mul(self: ExprLikeT, other: ExprLikeT | int | _isl.Val) -> ExprLikeT: + if not isinstance(other, ExprLike): + other = _number_to_expr_like(self, other) + + try: + return self.mul(other) + except TypeError: + return NotImplemented + + +def expr_like_floordiv(self: AffLikeT, other: _isl.Val) -> AffLikeT: + return self.scale_down_val(other).floor() + + +def val_rsub(self: _isl.Val, other: _isl.Val) -> _isl.Val: + return -self + other + + +def val_bool(self: _isl.Val) -> bool: + return not self.is_zero() + + +def val_repr(self: _isl.Val) -> str: + return f'{type(self).__name__}("{self.to_str()}")' + + +def val_to_python(self: _isl.Val) -> int: + if not self.is_int(): + raise ValueError("can only convert integer Val to python") + + return int(self.to_str()) + + +def obj_eq(self: IslObject, other: object) -> bool: + assert self.get_ctx() == other.get_ctx(), ( + "Equality-comparing two objects from different ISL Contexts " + "will likely lead to entertaining (but never useful) results. " + "In particular, Spaces with matching names will no longer be " + "equal.") + + return self.is_equal(other) + + +def obj_ne(self: object, other: object) -> bool: + return not self.__eq__(other) + + +for cls in ALL_CLASSES: + if hasattr(cls, "is_equal"): + cls.__eq__ = obj_eq + cls.__ne__ = obj_ne + + +def obj_lt(self: SetOrMapT, other: SetOrMapT) -> bool: + return self.is_strict_subset(other) + + +def obj_le(self: SetOrMapT, other: SetOrMapT) -> bool: + return self.is_subset(other) + + +def obj_gt(self: SetOrMapT, other: SetOrMapT) -> bool: + return other.is_strict_subset(self) + + +def obj_ge(self: SetOrMapT, other: SetOrMapT) -> bool: + return other.is_subset(self) + + +# {{{ project_out_except + +def obj_project_out_except( + obj: SetLikeT, + names: Collection[str], + types: Collection[_isl.dim_type] + ) -> SetLikeT: + """ + :param types: list of :class:`dim_type` determining + the types of axes to project out + :param names: names of axes matching the above which + should be left alone by the projection + + .. versionadded:: 2011.3 + """ + + for tp in types: + while True: + space = obj.get_space() + var_dict = space.get_var_dict(tp) + + all_indices = set(range(space.dim(tp))) + leftover_indices = {var_dict[name][1] for name in names + if name in var_dict} + project_indices = all_indices-leftover_indices + if not project_indices: + break + + min_index = min(project_indices) + count = 1 + while min_index+count in project_indices: + count += 1 + + obj = obj.project_out(tp, min_index, count) + + return obj + +# }}} + + +# {{{ eliminate_except + +def obj_eliminate_except( + obj: SetLikeT, + names: Collection[str], + types: Collection[_isl.dim_type] + ) -> SetLikeT: + """ + :param types: list of :class:`dim_type` determining + the types of axes to eliminate + :param names: names of axes matching the above which + should be left alone by the eliminate + + .. versionadded:: 2011.3 + """ + + for tp in types: + space = obj.get_space() + var_dict = space.get_var_dict(tp) + to_eliminate = ( + set(range(space.dim(tp))) + - {var_dict[name][1] for name in names + if name in var_dict}) + + while to_eliminate: + min_index = min(to_eliminate) + count = 1 + while min_index+count in to_eliminate: + count += 1 + + obj = obj.eliminate(tp, min_index, count) + + to_eliminate -= set(range(min_index, min_index+count)) + + return obj + +# }}} + + +# {{{ add_constraints + +def obj_add_constraints(obj: BasicT, constraints: Iterable[_isl.Constraint]) -> BasicT: + """ + .. versionadded:: 2011.3 + """ + + for cns in constraints: + obj = obj.add_constraint(cns) + + return obj + +# }}} + + +def _add_functionality() -> None: + _isl.dim_type.__reduce__ = dim_type_reduce + + # {{{ Context + + _isl.Context.__reduce__ = context_reduce + _isl.Context.__eq__ = context_eq + _isl.Context.__ne__ = context_ne + + # }}} + + # {{{ generic initialization, pickling + + for cls in ALL_CLASSES: + if hasattr(cls, "read_from_str"): + cls.__reduce__ = generic_reduce + + # }}} + + # {{{ printing + + for cls in ALL_CLASSES: + if (hasattr(cls, "_base_name") + and hasattr(_isl.Printer, f"print_{cls._base_name}")): + cls.__str__ = generic_str + cls.__repr__ = generic_repr + + if not hasattr(cls, "__hash__"): + raise AssertionError(f"not hashable: {cls}") + + # }}} + + # {{{ Python set-like behavior + + for cls in [_isl.BasicSet, _isl.BasicMap, _isl.Set, _isl.Map]: + cls.__or__ = obj_or + cls.__ror__ = obj_or + cls.__and__ = obj_and + cls.__rand__ = obj_and + cls.__sub__ = obj_sub + + # }}} + + # {{{ Space + + _isl.Space.create_from_names = staticmethod(space_create_from_names) + _isl.Space.get_var_dict = space_get_var_dict + _isl.Space.get_id_dict = space_get_id_dict + + # }}} + + # {{{ coefficient wrangling + + for coeff_class in [_isl.Constraint, _isl.Aff]: + coeff_class.set_coefficients = obj_set_coefficients + coeff_class.set_coefficients_by_name = obj_set_coefficients_by_name + coeff_class.get_coefficients_by_name = obj_get_coefficients_by_name + + # }}} + + # {{{ Constraint + + _isl.Constraint.eq_from_names = staticmethod(eq_from_names) + _isl.Constraint.ineq_from_names = staticmethod(ineq_from_names) + + # }}} + + # {{{ BasicSet + + _isl.BasicSet.get_constraints = basic_obj_get_constraints + + # }}} + + # {{{ BasicMap + + _isl.BasicMap.get_constraints = basic_obj_get_constraints + + # }}} + + # {{{ Set + + _isl.Set.get_basic_sets = set_get_basic_sets + _isl.BasicSet.get_basic_sets = set_get_basic_sets + + # }}} + + # {{{ Map + + _isl.Map.get_basic_maps = map_get_basic_maps + + # }}} + + +# {{{ common functionality + +for cls in ALL_CLASSES: + if hasattr(cls, "get_space") and cls is not _isl.Space: + cls.get_id_dict = obj_get_id_dict + cls.get_var_dict = obj_get_var_dict + cls.get_var_ids = obj_get_var_ids + cls.get_var_names = obj_get_var_names + + # }}} + + # {{{ piecewise + + _isl.PwAff.get_pieces = pwaff_get_pieces + _isl.Aff.get_pieces = pwaff_get_pieces + _isl.PwAff.get_aggregate_domain = pw_get_aggregate_domain + + _isl.PwQPolynomial.get_pieces = pwqpolynomial_get_pieces + _isl.PwQPolynomial.get_aggregate_domain = pw_get_aggregate_domain + + # }}} + + _isl.QPolynomial.get_terms = qpolynomial_get_terms + + _isl.PwQPolynomial.eval_with_dict = pwqpolynomial_eval_with_dict + + # {{{ arithmetic + + for expr_like_class in ARITH_CLASSES: + expr_like_class.__add__ = expr_like_add + expr_like_class.__radd__ = expr_like_add + expr_like_class.__sub__ = expr_like_sub + expr_like_class.__rsub__ = expr_like_rsub + expr_like_class.__mul__ = expr_like_mul + expr_like_class.__rmul__ = expr_like_mul + expr_like_class.__neg__ = expr_like_class.neg + + for qpoly_class in [_isl.QPolynomial, _isl.PwQPolynomial]: + qpoly_class.__pow__ = qpoly_class.pow + + for aff_class in [_isl.Aff, _isl.PwAff]: + aff_class.__mod__ = aff_class.mod_val + aff_class.__floordiv__ = expr_like_floordiv + + # }}} + + # {{{ Val + + val_cls = _isl.Val + + val_cls.__add__ = val_cls.add + val_cls.__radd__ = val_cls.add + val_cls.__sub__ = val_cls.sub + val_cls.__rsub__ = val_rsub + val_cls.__mul__ = val_cls.mul + val_cls.__rmul__ = val_cls.mul + val_cls.__neg__ = val_cls.neg + val_cls.__mod__ = val_cls.mod + val_cls.__bool__ = val_cls.__nonzero__ = val_bool + + val_cls.__lt__ = val_cls.lt + val_cls.__gt__ = val_cls.gt + val_cls.__le__ = val_cls.le + val_cls.__ge__ = val_cls.ge + val_cls.__eq__ = val_cls.eq + val_cls.__ne__ = val_cls.ne + + val_cls.__repr__ = val_repr + val_cls.__str__ = val_cls.to_str + val_cls.to_python = val_to_python + + # }}} + + # {{{ rich comparisons + + for cls in [_isl.BasicSet, _isl.BasicMap, _isl.Set, _isl.Map]: + cls.__lt__ = obj_lt + cls.__le__ = obj_le + cls.__gt__ = obj_gt + cls.__ge__ = obj_ge + + # }}} + + for c in [_isl.BasicSet, _isl.BasicMap, _isl.Set, _isl.Map]: + c.project_out_except = obj_project_out_except + c.add_constraints = obj_add_constraints + + for c in [_isl.BasicSet, _isl.Set]: + c.eliminate_except = obj_eliminate_except + + +_add_functionality() From d012fbbd9ffbb71f16f471073ada23988d4547ff Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 14:07:31 -0500 Subject: [PATCH 18/33] Add a stub generator --- pyproject.toml | 5 ++- stubgen/stubgen.py | 84 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 stubgen/stubgen.py diff --git a/pyproject.toml b/pyproject.toml index 454a0299..e9b456a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,10 @@ known-local-folder = [ lines-after-imports = 2 [tool.ruff.lint.per-file-ignores] -"islpy/*.pyi" = ["N801", "E501", "I001", "F401", "E202"] +"islpy/*.pyi" = [ + "N801", "N802", "E501", "I001", "F401", "E202", "E203", "Q000", + "RUF012" +] [tool.cibuildwheel] # nanobind does not support Py<3.8 diff --git a/stubgen/stubgen.py b/stubgen/stubgen.py new file mode 100644 index 00000000..55be1876 --- /dev/null +++ b/stubgen/stubgen.py @@ -0,0 +1,84 @@ +import argparse +import importlib +import sys +from collections.abc import Callable +from pathlib import Path +from typing import TYPE_CHECKING, Any, cast + +from nanobind.stubgen import StubGen as StubGenBase +from typing_extensions import override + + +if TYPE_CHECKING: + import enum + + +class StubGen(StubGenBase): + # can be removed once https://github.com/wjakob/nanobind/pull/1055 is merged + @override + def put_function(self, + fn: Callable[..., Any], + name: str | None = None, + parent: object | None = None + ): + fn_module = getattr(fn, "__module__", None) + + if (name and fn_module + and fn_module != self.module.__name__ + and parent is not None): + self.import_object(fn_module, name=None) + rhs = f"{fn_module}.{fn.__qualname__}" + if type(fn) is staticmethod: + rhs = f"staticmethod({rhs})" + self.write_ln(f"{name} = {rhs}\n") + + return + + super().put_function(fn, name, parent) + + @override + def put(self, + value: object, + name: str | None = None, + parent: object | None = None + ) -> None: + if name == "in_" and parent and parent.__name__ == "dim_type": + # https://github.com/wjakob/nanobind/discussions/1066 + self.write_ln(f"{name} = {cast('enum.Enum', value).value}") + + super().put(value, name, parent) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-m", "--module", default="islpy._isl") + parser.add_argument("--exec", nargs="+") + parser.add_argument("--python-path", nargs="+") + parser.add_argument("-o", "--output-dir", default="../islpy") + args = parser.parse_args() + output_path = Path(cast("str", args.output_dir)) + + sys.path.extend(cast("list[str]", args.python_path or [])) + + mod = importlib.import_module(cast("str", args.module)) + for fname in cast("list[str]", args.exec or []): + execdict = {"__name__": "islpy._monkeypatch"} + with open(fname) as inf: + exec(compile(inf.read(), fname, "exec"), execdict) + + sg = StubGen( + module=mod, + quiet=True, + recursive=False, + include_docstrings=False, + ) + sg.put(mod) + prefix_lines = "\n".join([ + "from collections.abc import Callable", + ]) + with open(output_path / "_isl.pyi", "w") as outf: + outf.write(f"{prefix_lines}\n{sg.get()}") + + +if __name__ == "__main__": + main() From bc37c53ac8d1672ba342b87d2b0c43186c77e788 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 14:14:48 -0500 Subject: [PATCH 19/33] Limit Github actions PR concurrency --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37830d31..bd2d57e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,10 @@ on: schedule: - cron: '17 3 * * 0' +concurrency: + group: ${{ github.head_ref || github.ref_name }} + cancel-in-progress: true + jobs: ruff: name: Ruff From f6d4ed61b17b1436d9e5fc42771437188a04209e Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 22 May 2025 15:37:17 -0500 Subject: [PATCH 20/33] CMakeLists: move to 4-wide indentation --- CMakeLists.txt | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e25ec7f9..7891b21c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,27 +4,27 @@ find_package(Python 3.10 COMPONENTS Interpreter Development.Module REQUIRED) # Force Release build by default if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") + set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") endif() # Detect the installed nanobind package and import it into CMake execute_process( - COMMAND "${Python_EXECUTABLE}" -m nanobind --cmake_dir - OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE NB_DIR) + COMMAND "${Python_EXECUTABLE}" -m nanobind --cmake_dir + OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE NB_DIR) list(APPEND CMAKE_PREFIX_PATH "${NB_DIR}") find_package(nanobind CONFIG REQUIRED) nanobind_add_module( - _isl - NB_STATIC # Build static libnanobind (the extension module itself remains a shared library) - NOMINSIZE # Optimize for speed, not for size - LTO # Enable LTO - src/wrapper/wrap_isl.cpp - src/wrapper/wrap_isl_part1.cpp - src/wrapper/wrap_isl_part2.cpp - src/wrapper/wrap_isl_part3.cpp - ${ISL_SOURCES} + _isl + NB_STATIC # Build static libnanobind (the extension module itself remains a shared library) + NOMINSIZE # Optimize for speed, not for size + LTO # Enable LTO + src/wrapper/wrap_isl.cpp + src/wrapper/wrap_isl_part1.cpp + src/wrapper/wrap_isl_part2.cpp + src/wrapper/wrap_isl_part3.cpp + ${ISL_SOURCES} ) # Work around https://github.com/inducer/islpy/issues/120. @@ -33,31 +33,31 @@ nanobind_add_module( set_source_files_properties(${ISL_SOURCES} PROPERTIES COMPILE_DEFINITIONS __OPTIMIZE_SIZE__) if(USE_IMATH_FOR_MP) - target_compile_definitions(_isl PRIVATE USE_IMATH_FOR_MP=1) + target_compile_definitions(_isl PRIVATE USE_IMATH_FOR_MP=1) endif() if(USE_IMATH_SIO) - target_compile_definitions(_isl PRIVATE USE_SMALL_INT_OPT=1) + target_compile_definitions(_isl PRIVATE USE_SMALL_INT_OPT=1) endif() if(USE_GMP_FOR_MP) - target_compile_definitions(_isl PRIVATE USE_GMP_FOR_MP=1) + target_compile_definitions(_isl PRIVATE USE_GMP_FOR_MP=1) endif() if(USE_BARVINOK) - target_compile_definitions(_isl PRIVATE ISLPY_INCLUDE_BARVINOK=1) - target_include_directories(_isl PRIVATE ${BARVINOK_INC_DIRS}) - target_link_directories(_isl PRIVATE ${BARVINOK_LIB_DIRS}) - target_link_libraries(_isl PRIVATE ${BARVINOK_LIB_NAMES}) + target_compile_definitions(_isl PRIVATE ISLPY_INCLUDE_BARVINOK=1) + target_include_directories(_isl PRIVATE ${BARVINOK_INC_DIRS}) + target_link_directories(_isl PRIVATE ${BARVINOK_LIB_DIRS}) + target_link_libraries(_isl PRIVATE ${BARVINOK_LIB_NAMES}) endif() target_include_directories(_isl PRIVATE ${ISL_INC_DIRS}) if(USE_SHIPPED_ISL) - target_compile_definitions(_isl PRIVATE GIT_HEAD_ID="${ISL_GIT_HEAD_ID}") + target_compile_definitions(_isl PRIVATE GIT_HEAD_ID="${ISL_GIT_HEAD_ID}") else() - target_link_directories(_isl PRIVATE ${ISL_LIB_DIRS}) - target_link_libraries(_isl PRIVATE ${ISL_LIB_NAMES}) + target_link_directories(_isl PRIVATE ${ISL_LIB_DIRS}) + target_link_libraries(_isl PRIVATE ${ISL_LIB_NAMES}) endif() install(TARGETS _isl LIBRARY DESTINATION .) From dead6119e5dbbd12b65e8a7a9433ec986efa581a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 May 2025 15:38:31 -0500 Subject: [PATCH 21/33] Fix doc build for typed islpy --- doc/conf.py | 17 +++++++++-------- doc/ref_fundamental.rst | 2 -- doc/reference.rst | 14 ++++++++++++++ 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index f64b5f1a..45ebc9c1 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -23,11 +23,7 @@ } -def autodoc_process_docstring(app, what, name, obj, options, lines): - # clear out redundant pybind-generated member list - if any("Members" in ln for ln in lines): - del lines[:] - +def autodoc_process_docstring(app, what, name, obj, options, lines: list[str]): arg_list_re = re.compile(r"^([a-zA-Z0-9_]+)\((.*?)\)") from inspect import isclass, isroutine @@ -37,9 +33,9 @@ def autodoc_process_docstring(app, what, name, obj, options, lines): if isroutine(getattr(obj, nm)) and (not nm.startswith("_") or nm in UNDERSCORE_WHITELIST)] - def gen_method_string(meth_name): + def gen_method_string(meth_name: str): try: - result = ":meth:`%s`" % meth_name + result: str = ":meth:`%s`" % meth_name meth_obj = getattr(obj, meth_name) if meth_obj.__doc__ is None: return result @@ -66,7 +62,7 @@ def gen_method_string(meth_name): for meth_name in methods] + lines for nm in methods: - underscore_autodoc = [] + underscore_autodoc: list[str] = [] if nm in UNDERSCORE_WHITELIST: underscore_autodoc.append(".. automethod:: %s" % nm) @@ -75,5 +71,10 @@ def gen_method_string(meth_name): lines.extend(underscore_autodoc) +autodoc_default_options = { + "undoc-members": True, +} + + def setup(app): app.connect("autodoc-process-docstring", autodoc_process_docstring) diff --git a/doc/ref_fundamental.rst b/doc/ref_fundamental.rst index 9183b173..0e449e69 100644 --- a/doc/ref_fundamental.rst +++ b/doc/ref_fundamental.rst @@ -44,8 +44,6 @@ Value .. autoclass:: Val :members: - .. automethod:: to_python - Multi-Value ----------- diff --git a/doc/reference.rst b/doc/reference.rst index bc188eb4..02ff7e23 100644 --- a/doc/reference.rst +++ b/doc/reference.rst @@ -177,4 +177,18 @@ Canonical Names for Internal Module See :class:`islpy.Printer`. +.. currentmodule:: islpy._monkeypatch + +.. class:: SetLikeT + + A type variable with an upper bound of :class:`islpy.BasicSet` | :class:`islpy.Set`. + +.. class:: AffOrConstraintT + + A type variable with an upper bound of :class:`islpy.Aff` | :class:`islpy.Constraint`. + +.. class:: BasicT + + A type variable with an upper bound of :class:`islpy.BasicSet` | :class:`islpy.BasicMap`. + .. vim: sw=4 From d5807caf3ab601e36981112ad64cbfa1184ebf29 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 29 May 2025 13:00:33 -0500 Subject: [PATCH 22/33] Switch to scikit build core --- CMakeLists.txt | 192 +++++++- MANIFEST.in | 43 -- Makefile.in | 14 - README_SETUP.txt | 34 -- aksetup_helper.py | 1011 ---------------------------------------- build-with-barvinok.sh | 23 +- configure.py | 6 - gen_wrap.py | 40 +- islpy/version.py | 13 +- pyproject.toml | 65 ++- setup.py | 326 ------------- 11 files changed, 289 insertions(+), 1478 deletions(-) delete mode 100644 MANIFEST.in delete mode 100644 Makefile.in delete mode 100644 README_SETUP.txt delete mode 100644 aksetup_helper.py delete mode 100755 configure.py delete mode 100644 setup.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 7891b21c..941ae86c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,13 +1,16 @@ +# Useful setting for looking at build commands (passed to pip install): +# --config-settings=cmake.define.CMAKE_VERBOSE_MAKEFILE=ON +# +# To build with debug info: Run pip install with +# --config-settings=cmake.build-type=Debug +# Note that setting CMAKE_BUILD_TYPE to Debug here does not suffice: +# scikit build core will still silently strip the debug symbols: +# https://github.com/scikit-build/scikit-build-core/issues/875 + cmake_minimum_required(VERSION 3.15...3.27) project(islpy) find_package(Python 3.10 COMPONENTS Interpreter Development.Module REQUIRED) -# Force Release build by default -if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") -endif() - # Detect the installed nanobind package and import it into CMake execute_process( COMMAND "${Python_EXECUTABLE}" -m nanobind --cmake_dir @@ -15,6 +18,161 @@ execute_process( list(APPEND CMAKE_PREFIX_PATH "${NB_DIR}") find_package(nanobind CONFIG REQUIRED) +option(USE_SHIPPED_ISL "Use shipped ISL" 1) +option(USE_SHIPPED_IMATH "Use shipped IMATH" 1) +option(USE_IMATH_FOR_MP "Use IMATH for multiprecision arithmetic" 1) +option(USE_IMATH_SIO "Use IMATH small-integer optimization" 1) +option(USE_GMP_FOR_MP "Use GMP" 0) +option(USE_BARVINOK "Use Barvinok (beware of GPL license)" 0) + +if(USE_SHIPPED_ISL) + if(USE_BARVINOK) + message(FATAL_ERROR "Using barvinok is not compatible with shipped ISL") + endif() + set(ISL_SOURCES + isl/isl_schedule.c + isl/isl_ast_build_expr.c + isl/isl_sample.c + isl/isl_coalesce.c + isl/isl_fold.c + isl/isl_schedule_read.c + isl/isl_aff_map.c + isl/isl_scheduler_clustering.c + isl/isl_flow.c + isl/isl_map_subtract.c + isl/uset_to_umap.c + isl/isl_hash.c + isl/isl_aff.c + isl/isl_transitive_closure.c + isl/isl_map_simplify.c + isl/print.c + isl/basis_reduction_tab.c + isl/isl_schedule_constraints.c + isl/isl_sort.c + isl/isl_ast.c + isl/bset_to_bmap.c + isl/bset_from_bmap.c + isl/isl_schedule_band.c + isl/isl_bernstein.c + isl/uset_from_umap.c + isl/isl_scheduler.c + isl/isl_set_to_ast_graft_list.c + isl/isl_convex_hull.c + isl/isl_schedule_tree.c + isl/isl_tarjan.c + isl/isl_equalities.c + isl/isl_constraint.c + isl/isl_union_map.c + isl/isl_bound.c + isl/isl_stride.c + isl/set_list_from_map_list_inl.c + isl/isl_farkas.c + isl/isl_tab_pip.c + isl/set_to_map.c + isl/set_from_map.c + isl/isl_lp.c + isl/isl_ffs.c + isl/isl_id_to_ast_expr.c + isl/isl_val.c + isl/isl_set_list.c + isl/isl_space.c + isl/isl_tab.c + isl/isl_map.c + isl/isl_version.c + isl/isl_stream.c + isl/isl_local_space.c + isl/isl_id_to_pw_aff.c + isl/isl_ilp.c + isl/isl_range.c + isl/isl_point.c + isl/isl_schedule_node.c + isl/isl_polynomial.c + isl/isl_options.c + isl/isl_morph.c + isl/isl_deprecated.c + isl/isl_ctx.c + isl/isl_seq.c + isl/isl_box.c + isl/isl_output.c + isl/isl_factorization.c + isl/isl_printer.c + isl/dep.c + isl/isl_id_to_id.c + isl/isl_ast_build.c + isl/isl_ast_codegen.c + isl/isl_obj.c + isl/isl_scheduler_scc.c + isl/isl_vec.c + isl/isl_map_list.c + isl/isl_vertices.c + isl/isl_arg.c + isl/isl_mat.c + isl/isl_id.c + isl/isl_affine_hull.c + isl/isl_scan.c + isl/isl_map_to_basic_set.c + isl/isl_blk.c + isl/isl_dim_map.c + isl/isl_local.c + isl/isl_reordering.c + isl/isl_ast_graft.c + isl/isl_input.c + ) + set(ISL_INC_DIRS + ${CMAKE_SOURCE_DIR}/isl-supplementary + ${CMAKE_SOURCE_DIR}/isl/include + ${CMAKE_SOURCE_DIR}/isl + ) + if(USE_IMATH_FOR_MP) + if(USE_SHIPPED_IMATH) + list(APPEND ISL_SOURCES + isl/isl_imath.c + isl/imath/imath.c + isl/imath/imrat.c + isl/imath/gmp_compat.c + ) + list(APPEND ISL_INC_DIRS isl/imath) + endif() + if(USE_IMATH_SIO) + list(APPEND ISL_SOURCES + isl/isl_int_sioimath.c + isl/isl_val_sioimath.c + ) + endif() + endif() +else() + set(ISL_SOURCES) + if(NOT ISL_LIB_NAMES) + set(ISL_LIB_NAMES isl) + if(USE_BARVINOK) + list(PREPEND ISL_LIB_NAMES barvinok) + endif() + endif() +endif() + +set(ISLPY_GENERATED_SOURCE + ${CMAKE_BINARY_DIR}/generated/gen-expose-part1.inc + ${CMAKE_BINARY_DIR}/generated/gen-expose-part2.inc + ${CMAKE_BINARY_DIR}/generated/gen-expose-part3.inc + ${CMAKE_BINARY_DIR}/generated/gen-wrap-part1.inc + ${CMAKE_BINARY_DIR}/generated/gen-wrap-part2.inc + ${CMAKE_BINARY_DIR}/generated/gen-wrap-part3.inc +) + +if(USE_BARVINOK) + set(ISLPY_GENERATION_FLAGS --barvinok) +else() + set(ISLPY_GENERATION_FLAGS) +endif() + +add_custom_command( + OUTPUT ${ISLPY_GENERATED_SOURCE} + COMMAND ${Python_EXECUTABLE} ${CMAKE_SOURCE_DIR}/gen_wrap.py + -o ${CMAKE_BINARY_DIR}/generated + -I ${ISL_INC_DIRS} + ${ISLPY_GENERATION_FLAGS} +) + nanobind_add_module( _isl NB_STATIC # Build static libnanobind (the extension module itself remains a shared library) @@ -25,7 +183,9 @@ nanobind_add_module( src/wrapper/wrap_isl_part2.cpp src/wrapper/wrap_isl_part3.cpp ${ISL_SOURCES} + ${ISLPY_GENERATED_SOURCE} ) +target_include_directories(_isl PRIVATE ${CMAKE_BINARY_DIR}/generated) # Work around https://github.com/inducer/islpy/issues/120. # See https://stackoverflow.com/questions/43554227/extern-inline-func-results-in-undefined-reference-error @@ -54,12 +214,28 @@ endif() target_include_directories(_isl PRIVATE ${ISL_INC_DIRS}) if(USE_SHIPPED_ISL) - target_compile_definitions(_isl PRIVATE GIT_HEAD_ID="${ISL_GIT_HEAD_ID}") + target_compile_definitions(_isl PRIVATE GIT_HEAD_ID="included-with-islpy") else() target_link_directories(_isl PRIVATE ${ISL_LIB_DIRS}) target_link_libraries(_isl PRIVATE ${ISL_LIB_NAMES}) endif() -install(TARGETS _isl LIBRARY DESTINATION .) +install(TARGETS _isl LIBRARY DESTINATION islpy) + +set(ISLPY_STUB_FILE ${CMAKE_BINARY_DIR}/_isl.pyi) +add_custom_command( + OUTPUT ${ISLPY_STUB_FILE} + COMMAND ${Python_EXECUTABLE} ${CMAKE_SOURCE_DIR}/stubgen/stubgen.py + -o ${CMAKE_BINARY_DIR} + --exec ${CMAKE_SOURCE_DIR}/islpy/_monkeypatch.py + --python-path ${CMAKE_BINARY_DIR} + -m _isl + DEPENDS _isl +) +add_custom_target( + _isl_stub + ALL DEPENDS ${CMAKE_BINARY_DIR}/_isl.pyi +) +install(FILES ${ISLPY_STUB_FILE} DESTINATION islpy) # vim: sw=2 diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 676dbb18..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,43 +0,0 @@ -include isl/*.ac -include isl/*.c -include isl/*.h -include isl/imath/*.c -include isl/imath/*.h -include isl/imath_wrap/*.h -include isl/imath_wrap/*.c -include isl/include/isl/*.h -include isl/include/isl/*.c -include isl/include/isl/deprecated/*.h -include isl-supplementary/isl/*.h -include isl-supplementary/*.h - -include build-with-barvinok.sh - -include src/wrapper/*.h -include src/wrapper/*.hpp -include src/wrapper/*.cpp - -include gen_wrap.py - -include test/*.py -include examples/*.py - -include doc/*.rst -include doc/images/*png -include doc/Makefile -include doc/conf.py -include doc/_static/*.css -include doc/_templates/*.html - -include configure.py -include Makefile.in -include aksetup_helper.py -include README_SETUP.txt -include CMakeLists.txt - -include README.rst -include CITATION.cff -include isl/LICENSE -include isl/imath/LICENSE - -recursive-exclude preproc-headers * diff --git a/Makefile.in b/Makefile.in deleted file mode 100644 index 0d3bfaf3..00000000 --- a/Makefile.in +++ /dev/null @@ -1,14 +0,0 @@ -.PHONY : all install clean tags - -all: tags - ${PYTHON_EXE} setup.py build - -install: all tags - ${PYTHON_EXE} setup.py install - -clean: - rm -Rf build - rm -f tags - -tags: - ctags -R src || true diff --git a/README_SETUP.txt b/README_SETUP.txt deleted file mode 100644 index 07cbb551..00000000 --- a/README_SETUP.txt +++ /dev/null @@ -1,34 +0,0 @@ -Hi, welcome. - -This Python package uses aksetup for installation, which means that -installation should be easy and quick. - -If you don't want to continue reading, just try the regular - - ./configure.py --help - ./configure.py --some-options - make - sudo make install - -That should do the trick. (By the way: If a config option says "several ok", -then you may specify several values, separated by commas.) - -aksetup also supports regular distutils installation, without using -configure: - - python setup.py build - sudo python setup.py install - -In this case, configuration is obtained from files in this order: - -/etc/aksetup-defaults.py -$HOME/.aksetup-defaults.py -$PACKAGEDIR/siteconf.py - -Once you've run configure, you can copy options from your siteconf.py file to -one of these files, and you won't ever have to configure them again manually. -In fact, you may pass the options "--update-user" and "--update-global" to -configure, and it will automatically update these files for you. - -This is particularly handy if you want to perform an unattended or automatic -installation via easy_install. diff --git a/aksetup_helper.py b/aksetup_helper.py deleted file mode 100644 index 57e82ebd..00000000 --- a/aksetup_helper.py +++ /dev/null @@ -1,1011 +0,0 @@ -import os -import sys -try: - from setuptools import Extension - from setuptools.command.build_ext import ( # noqa: N812 - build_ext as BaseBuildExtCommand) - -except ImportError: - class Extension: - pass - - class BaseBuildExtCommand: - pass - - -def count_down_delay(delay): - from time import sleep - while delay: - sys.stdout.write("Continuing in %d seconds... \r" % delay) - sys.stdout.flush() - delay -= 1 - sleep(1) - print("") - - -DASH_SEPARATOR = 75 * "-" - - -def setup(*args, **kwargs): - from setuptools import setup - try: - setup(*args, **kwargs) - except KeyboardInterrupt: - raise - except SystemExit: - raise - except Exception: - print(DASH_SEPARATOR) - print("Sorry, your build failed. Try rerunning configure.py with " - "different options.") - print(DASH_SEPARATOR) - raise - - -def get_numpy_incpath(): - from os.path import join, dirname, exists - from importlib.util import find_spec - origin = find_spec("numpy").origin - if origin is None: - raise RuntimeError("origin of numpy package not found") - - pathname = dirname(origin) - for p in [ - join(pathname, "_core", "include"), # numpy 2 onward - join(pathname, "core", "include"), # numpy prior to 2 - ]: - if exists(join(p, "numpy", "arrayobject.h")): - return p - - raise RuntimeError("no valid path for numpy found") - - -class NumpyExtension(Extension): - # nicked from - # http://mail.python.org/pipermail/distutils-sig/2007-September/008253.html - # solution by Michael Hoffmann - def __init__(self, *args, **kwargs): - Extension.__init__(self, *args, **kwargs) - self._include_dirs = self.include_dirs - del self.include_dirs # restore overwritten property - - def get_additional_include_dirs(self): - return [get_numpy_incpath()] - - def get_include_dirs(self): - return self._include_dirs + self.get_additional_include_dirs() - - def set_include_dirs(self, value): - self._include_dirs = value - - def del_include_dirs(self): - pass - - include_dirs = property(get_include_dirs, set_include_dirs, del_include_dirs) - - -class ExtensionUsingNumpy(Extension): - """Unlike :class:`NumpyExtension`, this class does not require numpy to be - importable upon extension module creation, allowing ``setup_requires=["numpy"]`` - to work. On the other hand, it requires the use of:: - - setup(..., - cmdclass={'build_ext': NumpyBuildExtCommand}) - - or - - setup(..., - cmdclass={'build_ext': PybindBuildExtCommand}) - """ - - -class NumpyBuildExtCommand(BaseBuildExtCommand): - def build_extension(self, extension): - # We add the numpy include dir right before building the - # extension, in order to avoid having to import numpy when - # the setup script is imported, which would prevent - # installation before manual installation of numpy. - if isinstance(extension, ExtensionUsingNumpy): - numpy_incpath = get_numpy_incpath() - if numpy_incpath not in extension.include_dirs: - extension.include_dirs.append(numpy_incpath) - - BaseBuildExtCommand.build_extension(self, extension) - - -# {{{ tools - -def flatten(lst): - """For an iterable of sub-iterables, generate each member of each - sub-iterable in turn, i.e. a flattened version of that super-iterable. - - Example: Turn [[a,b,c],[d,e,f]] into [a,b,c,d,e,f]. - """ - for sublist in lst: - for j in sublist: - yield j - - -def humanize(sym_str): - words = sym_str.lower().replace("_", " ").split(" ") - return " ".join([word.capitalize() for word in words]) - -# }}} - - -# {{{ siteconf handling - -def get_config(schema=None, warn_about_no_config=True): - if schema is None: - from setup import get_config_schema - schema = get_config_schema() - - if (not schema.have_config() and not schema.have_global_config() - and warn_about_no_config): - print("*************************************************************") - print("*** I have detected that you have not run configure.py.") - print("*************************************************************") - print("*** Additionally, no global config files were found.") - print("*** I will go ahead with the default configuration.") - print("*** In all likelihood, this will not work out.") - print("*** ") - print("*** See README_SETUP.txt for more information.") - print("*** ") - print("*** If the build does fail, just re-run configure.py with the") - print("*** correct arguments, and then retry. Good luck!") - print("*************************************************************") - print("*** HIT Ctrl-C NOW IF THIS IS NOT WHAT YOU WANT") - print("*************************************************************") - - count_down_delay(delay=10) - - config = expand_options(schema.read_config()) - schema.update_config_from_and_modify_command_line(config, sys.argv) - return config - - -def hack_distutils(debug=False, fast_link=True, what_opt=3): - # hack distutils.sysconfig to eliminate debug flags - # stolen from mpi4py - - def remove_prefixes(optlist, bad_prefixes): - for bad_prefix in bad_prefixes: - for i, flag in enumerate(optlist): - if flag.startswith(bad_prefix): - optlist.pop(i) - break - return optlist - - if not sys.platform.lower().startswith("win"): - from distutils import sysconfig - - cvars = sysconfig.get_config_vars() - - bad_prefixes = ["-g", "-O", "-Wstrict-prototypes", "-DNDEBUG"] - - cflags = cvars.get("OPT") - if cflags: - cflags = remove_prefixes(cflags.split(), bad_prefixes) - if debug: - cflags.append("-g") - else: - if what_opt is None: - pass - else: - cflags.append("-O%s" % what_opt) - cflags.append("-DNDEBUG") - - cvars["OPT"] = str.join(" ", cflags) - - cflags = cvars.get("CONFIGURE_CFLAGS") - if cflags: - cflags = remove_prefixes(cflags.split(), bad_prefixes) - cvars["CONFIGURE_CFLAGS"] = str.join(" ", cflags) - - if "BASECFLAGS" in cvars: - cvars["CFLAGS"] = cvars["BASECFLAGS"] + " " + cvars.get("OPT", "") - else: - assert "CFLAGS" in cvars - - if fast_link: - for varname in ["LDSHARED", "BLDSHARED"]: - ldsharedflags = cvars.get(varname) - if ldsharedflags: - ldsharedflags = remove_prefixes(ldsharedflags.split(), - ["-Wl,-O"]) - cvars[varname] = str.join(" ", ldsharedflags) - -# }}} - - -# {{{ configure guts - -def default_or(a, b): - if a is None: - return b - else: - return a - - -def expand_str(s, options): - import re - - def my_repl(match): - sym = match.group(1) - try: - repl = options[sym] - except KeyError: - from os import environ - repl = environ[sym] - - return expand_str(repl, options) - - return re.subn(r"\$\{([a-zA-Z0-9_]+)\}", my_repl, s)[0] - - -def expand_value(v, options): - if isinstance(v, str): - return expand_str(v, options) - elif isinstance(v, list): - result = [] - for i in v: - try: - exp_i = expand_value(i, options) - except Exception: - pass - else: - result.append(exp_i) - - return result - else: - return v - - -def expand_options(options): - return dict( - (k, expand_value(v, options)) for k, v in options.items()) - - -class ConfigSchema: - def __init__(self, options, conf_file="siteconf.py", conf_dir=os.path.dirname(__file__)): - self.optdict = dict((opt.name, opt) for opt in options) - self.options = options - self.conf_dir = conf_dir - self.conf_file = conf_file - - from os.path import expanduser - self.user_conf_file = expanduser("~/.aksetup-defaults.py") - - if not sys.platform.lower().startswith("win"): - self.global_conf_file = "/etc/aksetup-defaults.py" - else: - self.global_conf_file = None - - def get_conf_file(self): - import os - return os.path.join(self.conf_dir, self.conf_file) - - def set_conf_dir(self, conf_dir): - self.conf_dir = conf_dir - - def get_default_config(self): - return dict((opt.name, opt.default) for opt in self.options) - - def read_config_from_pyfile(self, filename): - result = {} - filevars = {} - infile = open(filename, "r") - try: - contents = infile.read() - finally: - infile.close() - - exec(compile(contents, filename, "exec"), filevars) - - for key, value in filevars.items(): - if key in self.optdict: - result[key] = value - - return result - - def update_conf_file(self, filename, config): - result = {} - filevars = {} - - try: - exec(compile(open(filename, "r").read(), filename, "exec"), filevars) - except IOError: - pass - - if "__builtins__" in filevars: - del filevars["__builtins__"] - - for key, value in config.items(): - if value is not None: - filevars[key] = value - - keys = list(filevars.keys()) - keys.sort() - - outf = open(filename, "w") - for key in keys: - outf.write("%s = %s\n" % (key, repr(filevars[key]))) - outf.close() - - return result - - def update_user_config(self, config): - self.update_conf_file(self.user_conf_file, config) - - def update_global_config(self, config): - if self.global_conf_file is not None: - self.update_conf_file(self.global_conf_file, config) - - def get_default_config_with_files(self): - result = self.get_default_config() - - import os - - confignames = [] - if self.global_conf_file is not None: - confignames.append(self.global_conf_file) - confignames.append(self.user_conf_file) - - for fn in confignames: - if os.access(fn, os.R_OK): - result.update(self.read_config_from_pyfile(fn)) - - return result - - def have_global_config(self): - import os - result = os.access(self.user_conf_file, os.R_OK) - - if self.global_conf_file is not None: - result = result or os.access(self.global_conf_file, os.R_OK) - - return result - - def have_config(self): - import os - return os.access(self.get_conf_file(), os.R_OK) - - def update_from_python_snippet(self, config, py_snippet, filename): - filevars = {} - exec(compile(py_snippet, filename, "exec"), filevars) - - for key, value in filevars.items(): - if key in self.optdict: - config[key] = value - elif key == "__builtins__": - pass - else: - raise KeyError("invalid config key in %s: %s" % ( - filename, key)) - - def update_config_from_and_modify_command_line(self, config, argv): - cfg_prefix = "--conf:" - - i = 0 - while i < len(argv): - arg = argv[i] - - if arg.startswith(cfg_prefix): - del argv[i] - self.update_from_python_snippet( - config, arg[len(cfg_prefix):], "") - else: - i += 1 - - return config - - def read_config(self): - import os - cfile = self.get_conf_file() - - result = self.get_default_config_with_files() - if os.access(cfile, os.R_OK): - with open(cfile, "r") as inf: - py_snippet = inf.read() - self.update_from_python_snippet(result, py_snippet, cfile) - - return result - - def add_to_configparser(self, parser, def_config=None): - if def_config is None: - def_config = self.get_default_config_with_files() - - for opt in self.options: - default = default_or(def_config.get(opt.name), opt.default) - opt.add_to_configparser(parser, default) - - def get_from_configparser(self, options): - result = {} - for opt in self.options: - result[opt.name] = opt.take_from_configparser(options) - return result - - def write_config(self, config): - outf = open(self.get_conf_file(), "w") - for opt in self.options: - value = config[opt.name] - if value is not None: - outf.write("%s = %s\n" % (opt.name, repr(config[opt.name]))) - outf.close() - - def make_substitutions(self, config): - return dict((opt.name, opt.value_to_str(config[opt.name])) - for opt in self.options) - - -class Option(object): - def __init__(self, name, default=None, help=None): - self.name = name - self.default = default - self.help = help - - def as_option(self): - return self.name.lower().replace("_", "-") - - def metavar(self): - last_underscore = self.name.rfind("_") - return self.name[last_underscore+1:] - - def get_help(self, default): - result = self.help - if self.default: - result += " (default: %s)" % self.value_to_str( - default_or(default, self.default)) - return result - - def value_to_str(self, default): - return default - - def add_to_configparser(self, parser, default=None): - default = default_or(default, self.default) - default_str = self.value_to_str(default) - parser.add_option( - "--" + self.as_option(), dest=self.name, - default=default_str, - metavar=self.metavar(), help=self.get_help(default)) - - def take_from_configparser(self, options): - return getattr(options, self.name) - - -class Switch(Option): - def add_to_configparser(self, parser, default=None): - if not isinstance(self.default, bool): - raise ValueError("Switch options must have a default") - - if default is None: - default = self.default - - option_name = self.as_option() - - if default: - option_name = "no-" + option_name - action = "store_false" - else: - action = "store_true" - - parser.add_option( - "--" + option_name, - dest=self.name, - help=self.get_help(default), - default=default, - action=action) - - -class StringListOption(Option): - def value_to_str(self, default): - if default is None: - return None - - return ",".join([str(el).replace(",", r"\,") for el in default]) - - def get_help(self, default): - return Option.get_help(self, default) + " (several ok)" - - def take_from_configparser(self, options): - opt = getattr(options, self.name) - if opt is None: - return None - else: - if opt: - import re - sep = re.compile(r"(?= 64 breaks editable builds: - # https://github.com/scikit-build/scikit-build/pull/737#issuecomment-1215573830 - # setuptools < 64 is incompatible with Python 3.12. - # So: no editable builds on Python 3.12, for now. - "setuptools>=42,<64;python_version<'3.12'", - "setuptools>=64;python_version>='3.12'", - - "wheel>=0.34.2", - "scikit-build", - "nanobind>=1.3", + "scikit-build-core >=0.9.3", + "nanobind >=1.9.2", "pcpp", - # Added dynamically in setup.py if needed - # "cmake>=3.18", - # "ninja", + # stubgen uses @override :/ + "typing_extensions>=4.5", +] + +[project] +name = "islpy" +version = "2025.2" +description = "Wrapper around isl, an integer set library" +readme = "README.rst" +license = "MIT" +authors = [ + { name = "Andreas Kloeckner", email = "inform@tiker.net" }, +] +requires-python = "~=3.10" + +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Other Audience", + "Intended Audience :: Science/Research", + "Programming Language :: C++", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Visualization", + "Topic :: Software Development :: Libraries", +] + +[project.urls] +Documentation = "https://documen.tician.de/islpy" +Repository = "https://github.com/inducer/islpy" + + +[dependency-groups] +dev = [ + "pytest>=2", ] -build-backend = "setuptools.build_meta" [tool.inducer-ci-support] disable-editable-pip-install = true +[tool.scikit-build] +sdist.exclude = [ + ".github", + "run-*.sh", +] + +# FIXME: Comment out before committing +# Use with --no-build-isolation for fast development builds +# build-dir = "build/{wheel_tag}" + [tool.ruff] preview = true exclude = [ diff --git a/setup.py b/setup.py deleted file mode 100644 index 22f4ae08..00000000 --- a/setup.py +++ /dev/null @@ -1,326 +0,0 @@ -#!/usr/bin/env python - -__copyright__ = """ -Copyright (C) 2011-20 Andreas Kloeckner -""" - -__license__ = """ -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""" - -import shutil -import sys -from collections.abc import Sequence - - -# Needed for aksetup to be found -sys.path.extend(["."]) - - -def get_config_schema(): - from aksetup_helper import ( - ConfigSchema, - IncludeDir, - Libraries, - LibraryDir, - StringListOption, - Switch, - ) - - default_cxxflags = [ - # Required for pybind11: - # https://pybind11.readthedocs.io/en/stable/faq.html#someclass-declared-with-greater-visibility-than-the-type-of-its-field-someclass-member-wattributes - "-fvisibility=hidden" - ] - - return ConfigSchema([ - Switch("USE_SHIPPED_ISL", True, "Use included copy of isl"), - Switch("USE_SHIPPED_IMATH", True, "Use included copy of imath in isl"), - Switch("USE_GMP", True, "Use gmp in external isl"), - Switch("USE_BARVINOK", False, "Include wrapper for Barvinok"), - Switch("USE_IMATH_SIO", True, "When using imath, use small-integer " - "optimization"), - - IncludeDir("GMP", []), - LibraryDir("GMP", []), - Libraries("GMP", ["gmp"]), - - IncludeDir("ISL", ["/usr/include"]), - LibraryDir("ISL", []), - Libraries("ISL", ["isl"]), - - IncludeDir("BARVINOK", []), - LibraryDir("BARVINOK", []), - Libraries("BARVINOK", ["barvinok", "polylibgmp"]), - - StringListOption("CXXFLAGS", default_cxxflags, - help="Any extra C++ compiler options to include"), - StringListOption("LDFLAGS", [], - help="Any extra linker options to include"), - ]) - - -def _get_isl_sources(use_shipped_imath: bool, use_imath_sio: bool) -> Sequence[str]: - extra_objects: list[str] = [] - - from glob import glob - isl_blocklist = [ - "_templ.c", - "_templ_yaml.c", - "mp_get", - "extract_key.c", - "isl_multi_templ.c", - "isl_multi_apply_set.c", - "isl_multi_gist.c", - "isl_multi_coalesce.c", - "isl_multi_intersect.c", - "isl_multi_floor.c", - "isl_multi_apply_union_set.c", - "isl_multi_cmp.c", - "isl_multi_pw_aff_explicit_domain.c", - "isl_multi_hash.c", - "isl_multi_dims.c", - "isl_multi_explicit_domain.c", - "isl_multi_no_explicit_domain.c", - "isl_multi_align_set.c", - "isl_multi_align_union_set.c", - "isl_multi_union_pw_aff_explicit_domain.c", - "isl_union_templ.c", - "isl_union_multi.c", - "isl_union_eval.c", - "isl_union_neg.c", - "isl_union_single.c", - "isl_pw_hash.c", - "isl_pw_eval.c", - "isl_pw_union_opt.c", - "isl_type_check_match_range_multi_val.c", - ] - - for fn in glob("isl/*.c"): - blocklisted = False - for bl in isl_blocklist: - if bl in fn: - blocklisted = True - break - - if "no_piplib" in fn: - pass - elif "piplib" in fn: - blocklisted = True - - if "gmp" in fn: - if use_shipped_imath: - continue - if "imath" in fn: - if not use_shipped_imath: - continue - - if "sioimath" in fn and not use_imath_sio: - continue - if "isl_val_imath" in fn and use_imath_sio: - continue - - if "isl_ast_int.c" in fn and use_shipped_imath: - continue - - inf = open(fn, encoding="utf-8") - try: - contents = inf.read() - finally: - inf.close() - - if "int main(" not in contents and not blocklisted: - extra_objects.append(fn) - - if use_shipped_imath: - extra_objects.extend([ - "isl/imath/imath.c", - "isl/imath/imrat.c", - "isl/imath/gmp_compat.c", - # "isl/imath_wrap/imath.c", - # "isl/imath_wrap/imrat.c", - # "isl/imath_wrap/gmp_compat.c", - ]) - - return extra_objects - - -def main(): - import nanobind # noqa: F401 - from setuptools import find_packages - from skbuild import setup - - # {{{ import aksetup_helper bits - - prev_path = sys.path[:] - # FIXME skbuild seems to remove this. Why? - sys.path.append(".") - - from aksetup_helper import check_git_submodules, get_config - from gen_wrap import gen_wrapper - - sys.path = prev_path - - # }}} - - check_git_submodules() - - conf = get_config(get_config_schema(), warn_about_no_config=False) - - cmake_args = [] - - INCLUDE_DIRS = ["src/wrapper"] # noqa: N806 - LIBRARY_DIRS = [] # noqa: N806 - LIBRARIES = [] # noqa: N806 - - LIBRARY_DIRS.extend(conf["ISL_LIB_DIR"]) - LIBRARIES.extend(conf["ISL_LIBNAME"]) - - INCLUDE_DIRS.extend(conf["ISL_INC_DIR"]) - - if not (conf["USE_SHIPPED_ISL"] and conf["USE_SHIPPED_IMATH"]) and \ - conf["USE_GMP"]: - INCLUDE_DIRS.extend(conf["GMP_INC_DIR"]) - LIBRARY_DIRS.extend(conf["GMP_LIB_DIR"]) - LIBRARIES.extend(conf["GMP_LIBNAME"]) - - init_filename = "islpy/version.py" - with open(init_filename) as version_f: - version_py = version_f.read() - exec(compile(version_py, init_filename, "exec"), conf) - - with open("README.rst") as readme_f: - readme = readme_f.read() - - # cmake_args.append("-DCMAKE_BUILD_TYPE=Debug") - - if conf["USE_SHIPPED_ISL"]: - cmake_args.append("-DUSE_SHIPPED_ISL:bool=1") - isl_inc_dirs = ["isl-supplementary", "isl/include", "isl"] - - if conf["USE_SHIPPED_IMATH"]: - cmake_args.append("-DUSE_IMATH_FOR_MP:bool=1") - if conf["USE_IMATH_SIO"]: - cmake_args.append("-DUSE_IMATH_SIO:bool=1") - - isl_inc_dirs.append("isl/imath") - else: - cmake_args.append("-DUSE_GMP_FOR_MP:bool=1") - - extra_objects = _get_isl_sources( - use_shipped_imath=conf["USE_SHIPPED_IMATH"], - use_imath_sio=conf["USE_IMATH_SIO"]) - - cmake_args.append(f"-DISL_INC_DIRS:LIST={';'.join(isl_inc_dirs)}") - - cmake_args.append(f"-DISL_SOURCES:list={';'.join(extra_objects)}") - - with open("isl/configure.ac") as inf: - isl_version_line, = (ln for ln in inf - if ln.strip().startswith("versioninfo")) - - _, isl_version = isl_version_line.strip().split("=") - isl_version = isl_version.replace(":", ".") - - cmake_args.append(f"-DISL_GIT_HEAD_ID=isl-{isl_version}-included-with-islpy") - else: - if conf["ISL_INC_DIR"]: - cmake_args.append(f"-DISL_INC_DIRS:LIST=" - f"{';'.join(conf['ISL_INC_DIR'])}") - - if conf["ISL_LIB_DIR"]: - cmake_args.append(f"-DISL_LIB_DIRS:LIST=" - f"{';'.join(conf['ISL_LIB_DIR'])}") - - cmake_args.append(f"-DISL_LIB_NAMES={';'.join(conf['ISL_LIBNAME'])}") - - cmake_args.append("-DISL_SOURCES:list=") - - isl_inc_dirs = conf["ISL_INC_DIR"] - - if conf["USE_BARVINOK"]: - if conf["USE_SHIPPED_ISL"]: - raise RuntimeError("barvinok wrapper is not compatible with using " - "shipped isl") - if conf["USE_SHIPPED_IMATH"]: - raise RuntimeError("barvinok wrapper is not compatible with using " - "shipped imath") - - cmake_args.append("-DUSE_BARVINOK:bool=1") - cmake_args.append( - f"-DBARVINOK_INC_DIRS:LIST={';'.join(conf['BARVINOK_INC_DIR'])}") - cmake_args.append( - f"-DBARVINOK_LIB_DIRS:LIST={';'.join(conf['BARVINOK_LIB_DIR'])}") - cmake_args.append( - f"-DBARVINOK_LIB_NAMES:LIST={';'.join(conf['BARVINOK_LIBNAME'])}") - - isl_inc_dirs.extend(conf["BARVINOK_INC_DIR"]) - - if conf["CXXFLAGS"]: - cmake_args.append(f"-DCMAKE_CXX_FLAGS:STRING=" - f"{' '.join(conf['CXXFLAGS'])}") - - gen_wrapper(isl_inc_dirs, include_barvinok=conf["USE_BARVINOK"]) - - setup_requires = [] - if shutil.which("cmake") is None: - setup_requires += ["cmake>=3.18"] - if shutil.which("ninja") is None: - setup_requires += ["ninja"] - - setup(name="islpy", - version=conf["VERSION_TEXT"], - description="Wrapper around isl, an integer set library", - long_description=readme, - long_description_content_type="text/x-rst", - author="Andreas Kloeckner", - author_email="inform@tiker.net", - license="MIT", - url="http://documen.tician.de/islpy", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: Other Audience", - "Intended Audience :: Science/Research", - "Natural Language :: English", - "Programming Language :: C++", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Topic :: Multimedia :: Graphics :: 3D Modeling", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Mathematics", - "Topic :: Scientific/Engineering :: Physics", - "Topic :: Scientific/Engineering :: Visualization", - "Topic :: Software Development :: Libraries", - ], - - packages=find_packages(), - - python_requires="~=3.10", - setup_requires=setup_requires, - extras_require={ - "test": ["pytest>=2"], - }, - cmake_args=cmake_args, - cmake_install_dir="islpy", - ) - - -if __name__ == "__main__": - main() From 3e039e2662730ee6fc782208cce06b53c53fdac7 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 May 2025 10:24:33 -0500 Subject: [PATCH 23/33] gen_wrap: Remove logic to store preprocessed headers --- gen_wrap.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index fddfae64..86345562 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -464,31 +464,9 @@ def get_header_hashes(self, fnames: Sequence[str]): h.update(self.get_header_contents(fname).encode()) return h.hexdigest() - preprocessed_dir = "preproc-headers" macro_headers: ClassVar[Sequence[str]] = ["isl/multi.h", "isl/list.h"] def get_preprocessed_header(self, fname: str) -> str: - header_hash = self.get_header_hashes( - [*self.macro_headers, fname]) - - # cache preprocessed headers to avoid install-time - # dependency on pcpp - import errno - try: - os.mkdir(self.preprocessed_dir) - except OSError as err: - if err.errno == errno.EEXIST: - pass - else: - raise - - prepro_fname = join(self.preprocessed_dir, header_hash) - try: - with open(prepro_fname) as inf: - return inf.read() - except OSError: - pass - print(f"preprocessing {fname}...") macro_header_contents = [ self.get_header_contents(mh) @@ -497,9 +475,6 @@ def get_preprocessed_header(self, fname: str) -> str: prepro_header = preprocess_with_macros( macro_header_contents, self.get_header_contents(fname)) - with open(prepro_fname, "w") as outf: - outf.write(prepro_header) - return prepro_header # {{{ read_header From 5072a6df040657b48f18563e962a3d2916b67a13 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 May 2025 14:05:18 -0500 Subject: [PATCH 24/33] gen_wrap: improve some types --- gen_wrap.py | 100 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 60 insertions(+), 40 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index 86345562..a59ef8a8 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + __copyright__ = "Copyright (C) 2011-20 Andreas Kloeckner" __license__ = """ @@ -21,14 +24,17 @@ """ import argparse -import os import re import sys -from collections.abc import Mapping, Sequence -from dataclasses import dataclass -from os.path import join +from dataclasses import dataclass, field from pathlib import Path -from typing import ClassVar, TextIO +from typing import TYPE_CHECKING, ClassVar, TextIO + +from typing_extensions import override + + +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence SEM_TAKE = "take" @@ -112,11 +118,11 @@ class Argument: @dataclass class CallbackArgument: name: str - return_semantics: str + return_semantics: str | None return_decl_words: list[str] return_base_type: str return_ptr: str - args: Sequence[Argument] + args: Sequence[Argument | CallbackArgument] @dataclass @@ -124,10 +130,10 @@ class Method: cls: str name: str c_name: str - return_semantics: str + return_semantics: str | None return_base_type: str return_ptr: str - args: Sequence[Argument] + args: Sequence[Argument | CallbackArgument] is_exported: bool is_constructor: bool mutator_veto: bool = False @@ -138,23 +144,26 @@ def __post_init__(self): if not self.is_static: self.args[0].name = "self" + @property + def first_arg(self) -> Argument: + first_arg = self.args[0] + assert isinstance(first_arg, Argument) + return first_arg + @property def is_static(self): return not (self.args - and self.args[0].base_type.startswith(f"isl_{self.cls}")) + and self.first_arg.base_type.startswith(f"isl_{self.cls}")) @property def is_mutator(self): return (not self.is_static - and self.args[0].semantics is SEM_TAKE - and self.return_ptr == "*" == self.args[0].ptr - and self.return_base_type == self.args[0].base_type + and self.first_arg.semantics is SEM_TAKE + and self.return_ptr == "*" == self.first_arg.ptr + and self.return_base_type == self.first_arg.base_type and self.return_semantics is SEM_GIVE and not self.mutator_veto - and self.args[0].base_type in NON_COPYABLE_WITH_ISL_PREFIX) - - def __repr__(self): - return f"" + and self.first_arg.base_type in NON_COPYABLE_WITH_ISL_PREFIX) # }}} @@ -425,18 +434,18 @@ def on_directive_handle(self, directive, toks, ifpassthru, precedingtoks): # {{{ FunctionData (includes parser) +@dataclass class FunctionData: INVALID_PY_IDENTIFIER_RENAMING_MAP: ClassVar[Mapping[str, str]] = { "2exp": "two_exp" } - def __init__(self, include_dirs: Sequence[str]): - self.classes_to_methods = {} - self.include_dirs = include_dirs - self.seen_c_names = set() + include_dirs: Sequence[str] + classes_to_methods: dict[str, list[Method]] = field(default_factory=dict) + seen_c_names: set[str] = field(default_factory=set) - def get_header_contents(self, fname): + def get_header_contents(self, fname: str): from os.path import join success = False for inc_dir in self.include_dirs: @@ -702,10 +711,10 @@ def parse_decl(self, decl: str): # {{{ get_callback -def get_callback(cb_name, cb): - pre_call = [] - passed_args = [] - post_call = [] +def get_callback(cb_name: str, cb: CallbackArgument): + pre_call: list[str] = [] + passed_args: list[str] = [] + post_call: list[str] = [] assert cb.args[-1].name == "user" @@ -845,6 +854,16 @@ def get_callback(cb_name, cb): # {{{ wrapper generator +@dataclass(frozen=True) +class TypeSignature: + arg_types: Sequence[str] + ret_type: str + + @override + def __str__(self) -> str: + return f"({', '.join(self.arg_types)}) -> {self.ret_type}" + + def write_wrapper(outf: TextIO, meth: Method): body: list[str] = [] checks: list[str] = [] @@ -858,7 +877,7 @@ def write_wrapper(outf: TextIO, meth: Method): preamble: list[str] = [] arg_names: list[str] = [] - arg_sigs: list[str] = [] + arg_types: list[str] = [] checks.append("isl_ctx *islpy_ctx = nullptr;") @@ -893,7 +912,7 @@ def write_wrapper(outf: TextIO, meth: Method): preamble.append(get_callback(cb_name, arg)) - arg_sigs.append(f"{arg.name}: Callable") + arg_types.append(f"{arg.name}: Callable") docs.append(":param {name}: callback({args})".format( name=arg.name, args=", ".join( @@ -913,7 +932,7 @@ def write_wrapper(outf: TextIO, meth: Method): else: doc_cls = "int" - arg_sigs.append(f"{arg.name}: {doc_cls}") + arg_types.append(f"{arg.name}: {doc_cls}") elif arg.base_type in ["char", "const char"] and arg.ptr == "*": if arg.semantics is SEM_KEEP: @@ -928,7 +947,7 @@ def _arg_to_const_str(arg: Argument) -> str: input_args.append(f"{_arg_to_const_str(arg)}{arg.base_type} *{arg.name}") - arg_sigs.append(f"{arg.name}: str") + arg_types.append(f"{arg.name}: str") elif arg.base_type in ["int", "isl_bool"] and arg.ptr == "*": if arg.name in ["exact", "tight"]: @@ -990,7 +1009,7 @@ def _arg_to_const_str(arg: Argument) -> str: post_call.append(f"unique_arg_{arg.name}.release();") passed_args.append(f"unique_arg_{arg.name}->m_data") - arg_sigs.append(f"{arg.name}: Val | int") + arg_types.append(f"{arg.name}: Val | int") # }}} @@ -1069,7 +1088,10 @@ def _arg_to_const_str(arg: Argument) -> str: islpy_ctx = {arg.base_type}_get_ctx(arg_{arg.name}.m_data); """) - arg_sigs.append(f"{arg.name}: {to_py_class(arg_cls)}") + if arg.name == "self": + arg_types.append(f"{arg.name}") + else: + arg_types.append(f"{arg.name}: {to_py_class(arg_cls)}") # }}} @@ -1327,9 +1349,7 @@ def _arg_to_const_str(arg: Argument) -> str: inputs=", ".join(input_args), body="\n".join(body))) - sig_str = f"({', '.join(arg_sigs)}) -> {ret_type}" - - return arg_names, "\n".join(docs), sig_str + return arg_names, "\n".join(docs), TypeSignature(arg_types, ret_type) # }}} @@ -1428,25 +1448,25 @@ def write_wrappers(expf, wrapf, methods: Sequence[Method]): if val_versions: # no need to expose C integer versions of things print("SKIP (val version available): {} -> {}".format( - meth, ", ".join(str(s) for s in val_versions))) + meth.c_name, ", ".join(m.c_name for m in val_versions))) continue try: arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) write_exposer(expf, meth, arg_names, doc_str, sig_str) except Undocumented: - undoc.append(str(meth)) + undoc.append(meth) except Retry: arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) write_exposer(expf, meth, arg_names, doc_str, sig_str) except SignatureNotSupported: _, e, _ = sys.exc_info() - print(f"SKIP (sig not supported: {e}): {meth}") + print(f"SKIP (sig not supported: {e}): {meth.c_name}") else: wrapped_isl_functions.add(meth.name) - pass - print("SKIP ({} undocumented methods): {}".format(len(undoc), ", ".join(undoc))) + print("SKIP ({} undocumented methods): {}" + .format(len(undoc), ", ".join(m.c_name for m in undoc))) ADD_VERSIONS = { From e7387d91baffb3a1e27544a6c7f2d9e8960639bc Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 May 2025 11:48:32 -0500 Subject: [PATCH 25/33] gen_wrap: Fix type annotation for tuple types --- gen_wrap.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gen_wrap.py b/gen_wrap.py index a59ef8a8..33da9b97 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1271,7 +1271,8 @@ def _arg_to_const_str(arg: Argument) -> str: if extra_ret_vals: isl_obj_ret_val = "py::make_tuple({}, {})".format( isl_obj_ret_val, ", ".join(extra_ret_vals)) - ret_type = f"tuple[{', '.join(extra_ret_types)}]" + ret_types = [to_py_class(ret_cls), * extra_ret_types] + ret_type = f"tuple[{', '.join(ret_types)}]" else: ret_type = to_py_class(ret_cls) From 64f4e8461f7cccc4ccad94f08673f6450d29d38a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 May 2025 12:12:17 -0500 Subject: [PATCH 26/33] gen_wrap: annotate callback types --- gen_wrap.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/gen_wrap.py b/gen_wrap.py index 33da9b97..2a51cc8c 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -80,6 +80,12 @@ class SignatureNotSupported(ValueError): # noqa: N818 def to_py_class(cls: str): + if cls == "isl_bool": + return "bool" + + if cls == "int": + return cls + if cls.startswith("isl_"): cls = cls[4:] @@ -864,6 +870,24 @@ def __str__(self) -> str: return f"({', '.join(self.arg_types)}) -> {self.ret_type}" +def get_cb_type_sig(cb: CallbackArgument) -> str: + arg_types: list[str] = [] + + for arg in cb.args: + assert isinstance(arg, Argument) + if arg.name == "user": + continue + + arg_types.append(to_py_class(arg.base_type)) + + if cb.return_base_type == "isl_stat": + ret_type = "None" + else: + ret_type = to_py_class(cb.return_base_type) + + return f"Callable[[{', '.join(arg_types)}], {ret_type}]" + + def write_wrapper(outf: TextIO, meth: Method): body: list[str] = [] checks: list[str] = [] @@ -912,7 +936,7 @@ def write_wrapper(outf: TextIO, meth: Method): preamble.append(get_callback(cb_name, arg)) - arg_types.append(f"{arg.name}: Callable") + arg_types.append(f"{arg.name}: {get_cb_type_sig(arg)}") docs.append(":param {name}: callback({args})".format( name=arg.name, args=", ".join( From 1ef5fd557c0f26e11a3551cc9840da4e3e94747e Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 May 2025 14:16:02 -0500 Subject: [PATCH 27/33] gen_wrap: Refactor wrapper/exposer invocation --- gen_wrap.py | 67 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 47 insertions(+), 20 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index 2a51cc8c..545f7739 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1381,7 +1381,14 @@ def _arg_to_const_str(arg: Argument) -> str: # {{{ exposer generator -def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: str): +def write_exposer( + outf: TextIO, + meth: Method, + arg_names: Sequence[str], + doc_str: str, + type_sig: TypeSignature, + class_to_methods: Mapping[str, Sequence[Method]], + ): func_name = f"isl::{meth.cls}_{meth.name}" py_name = meth.name @@ -1402,20 +1409,15 @@ def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: # if meth.is_static: # doc_str = "(static method)\n" + doc_str - if not meth.is_exported: - doc_str = doc_str + ( - "\n\n.. warning::\n\n " - "This function is not part of the officially public isl API. " - "Use at your own risk.") - wrap_class = CLASS_MAP.get(meth.cls, meth.cls) newline = "\n" escaped_newline = "\\n" + escaped_doc_str = doc_str.replace(newline, escaped_newline) outf.write(f'wrap_{wrap_class}.def{"_static" if meth.is_static else ""}(' f'"{py_name}", {func_name}{args_str}' - f', py::sig("def {py_name}{sig_str}")' - f', "{py_name}{sig_str}\\n{doc_str.replace(newline, escaped_newline)}"' + f', py::sig("def {py_name}{type_sig}")' + f', "{py_name}{type_sig}\\n{escaped_doc_str}"' ');\n') if meth.name == "get_space": @@ -1451,12 +1453,40 @@ def write_exposer(outf: TextIO, meth: Method, arg_names, doc_str: str, sig_str: # }}} -wrapped_isl_functions = set() +wrapped_isl_functions: set[str] = set() + + +def wrap_and_expose( + meth: Method, + wrapf: TextIO, + expf: TextIO, + class_to_methods: Mapping[str, Sequence[Method]], + ): + arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) + + if not meth.is_exported: + doc_str = doc_str + ( + "\n\n.. warning::\n\n " + "This function is not part of the officially public isl API. " + "Use at your own risk.") + + write_exposer(expf, meth, arg_names, doc_str, sig_str, + class_to_methods=class_to_methods) -def write_wrappers(expf, wrapf, methods: Sequence[Method]): - undoc = [] +def write_wrappers( + expf: TextIO, + wrapf: TextIO, + classes_to_methods: Mapping[str, Sequence[Method]], + classes: Sequence[str], + ): + undoc: list[Method] = [] + methods = [ + m + for cls in classes + for m in classes_to_methods.get(cls, []) + ] for meth in methods: # print "TRY_WRAP:", meth if meth.name.endswith("_si") or meth.name.endswith("_ui"): @@ -1477,13 +1507,13 @@ def write_wrappers(expf, wrapf, methods: Sequence[Method]): continue try: - arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) - write_exposer(expf, meth, arg_names, doc_str, sig_str) + wrap_and_expose(meth, + wrapf=wrapf, expf=expf, class_to_methods=classes_to_methods) except Undocumented: undoc.append(meth) except Retry: - arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) - write_exposer(expf, meth, arg_names, doc_str, sig_str) + wrap_and_expose(meth, + wrapf=wrapf, expf=expf, class_to_methods=classes_to_methods) except SignatureNotSupported: _, e, _ = sys.exc_info() print(f"SKIP (sig not supported: {e}): {meth.c_name}") @@ -1607,11 +1637,8 @@ def gen_wrapper(include_dirs: Sequence[str], if isl_version is None or ADD_VERSIONS.get(cls) is None or ADD_VERSIONS.get(cls) <= isl_version] + write_wrappers(expf, wrapf, fdata.classes_to_methods, classes) - write_wrappers(expf, wrapf, [ - meth - for cls in classes - for meth in fdata.classes_to_methods.get(cls, [])]) # {{{ add automatic 'self' upcasts From 70a5c83a19a32236c0ccb62117732b7a5ed28f2b Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 May 2025 14:23:31 -0500 Subject: [PATCH 28/33] gen_wrap: Refactor generation of self-upcasts --- gen_wrap.py | 139 +++++++++++++++++++--------------------------------- 1 file changed, 50 insertions(+), 89 deletions(-) diff --git a/gen_wrap.py b/gen_wrap.py index 545f7739..6eb02fc0 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -171,6 +171,10 @@ def is_mutator(self): and not self.mutator_veto and self.first_arg.base_type in NON_COPYABLE_WITH_ISL_PREFIX) + def arg_types(self) -> tuple[str, ...]: + return tuple(arg.base_type if isinstance(arg, Argument) else "callable" + for arg in self.args) + # }}} @@ -248,6 +252,18 @@ def is_mutator(self): "options": "ctx", } +AUTO_UPCASTS: Mapping[str, tuple[str, ...]] = { + "pw_aff": ("aff", ), + "union_pw_aff": ("aff", "pw_aff", ), + "local_space": ("space", ), + "pw_multi_aff": ("multi_aff", ), + "union_pw_multi_aff": ("multi_aff", "pw_multi_aff", ), + "set": ("basic_set", ), + "union_set": ("basic_set", "set", ), + "map": ("basic_map", ), + "union_map": ("basic_map", "map", ), +} + # }}} @@ -1387,7 +1403,7 @@ def write_exposer( arg_names: Sequence[str], doc_str: str, type_sig: TypeSignature, - class_to_methods: Mapping[str, Sequence[Method]], + meth_to_overloads: dict[tuple[str, str], list[Method]], ): func_name = f"isl::{meth.cls}_{meth.name}" py_name = meth.name @@ -1450,6 +1466,32 @@ def write_exposer( f' isl::handle_isl_error(ctx, "isl_{meth.cls}_read_from_str");' '}, py::arg("s"), py::arg("context").none(true)=py::none());\n') + if not meth.is_static: + for basic_cls in AUTO_UPCASTS.get(meth.cls, []): + basic_overloads = meth_to_overloads.setdefault((basic_cls, meth.name), []) + if any(basic_meth + for basic_meth in basic_overloads + if (basic_meth.is_static + or meth.arg_types()[1:] == basic_meth.arg_types()[1:]) + ): + continue + + basic_overloads.append(meth) + + upcast_doc_str = (f"{doc_str}\n\nUpcast from " + f":class:`{to_py_class(basic_cls)}` to " + f":class:`{to_py_class(meth.cls)}`.") + escaped_doc_str = upcast_doc_str.replace(newline, escaped_newline) + outf.write(f"// automatic upcast to {meth.cls}\n") + outf.write(f'wrap_{basic_cls}.def(' + # Do not be tempted to pass 'arg_str' here, it will + # prevent implicit conversion. + # https://github.com/wjakob/nanobind/issues/1061 + f'"{py_name}", {func_name}' + f', py::sig("def {py_name}{type_sig}")' + f', "{py_name}{type_sig}\\n{escaped_doc_str}"' + ');\n') + # }}} @@ -1460,7 +1502,7 @@ def wrap_and_expose( meth: Method, wrapf: TextIO, expf: TextIO, - class_to_methods: Mapping[str, Sequence[Method]], + meth_to_overloads: dict[tuple[str, str], list[Method]], ): arg_names, doc_str, sig_str = write_wrapper(wrapf, meth) @@ -1471,7 +1513,7 @@ def wrap_and_expose( "Use at your own risk.") write_exposer(expf, meth, arg_names, doc_str, sig_str, - class_to_methods=class_to_methods) + meth_to_overloads=meth_to_overloads) def write_wrappers( @@ -1487,6 +1529,9 @@ def write_wrappers( for cls in classes for m in classes_to_methods.get(cls, []) ] + meth_to_overloads = { + (m.cls, m.name): [m] for m in methods + } for meth in methods: # print "TRY_WRAP:", meth if meth.name.endswith("_si") or meth.name.endswith("_ui"): @@ -1508,12 +1553,12 @@ def write_wrappers( try: wrap_and_expose(meth, - wrapf=wrapf, expf=expf, class_to_methods=classes_to_methods) + wrapf=wrapf, expf=expf, meth_to_overloads=meth_to_overloads) except Undocumented: undoc.append(meth) except Retry: wrap_and_expose(meth, - wrapf=wrapf, expf=expf, class_to_methods=classes_to_methods) + wrapf=wrapf, expf=expf, meth_to_overloads=meth_to_overloads) except SignatureNotSupported: _, e, _ = sys.exc_info() print(f"SKIP (sig not supported: {e}): {meth.c_name}") @@ -1533,56 +1578,6 @@ def write_wrappers( } -upcasts = {} - - -def add_upcasts(basic_class, special_class, fmap, expf): - - def my_ismethod(method): - if method.name.endswith("_si") or method.name.endswith("_ui"): - return False - - if method.name not in wrapped_isl_functions: - return False - - if method.is_static: - return False - - return True - - expf.write(f"\n// {{{{{{ Upcasts from {basic_class} to {special_class}\n\n") - - for special_method in fmap[special_class]: - if not my_ismethod(special_method): - continue - - found = False - - for basic_method in fmap[basic_class]: - if basic_method.name == special_method.name: - found = True - break - - if found: - if not my_ismethod(basic_method): - continue - - else: - if (basic_class in upcasts - and special_method.name in upcasts[basic_class]): - continue - - upcasts.setdefault(basic_class, []).append(special_method.name) - - doc_str = (f'"\\n\\nUpcast from :class:`{to_py_class(basic_class)}`' - + f' to :class:`{to_py_class(special_class)}`\\n"') - - expf.write(f'wrap_{basic_class}.def("{special_method.name}", ' - f"isl::{special_class}_{special_method.name}, {doc_str});\n") - - expf.write("\n// }}}\n\n") - - def gen_wrapper(include_dirs: Sequence[str], *, output_dir: str | None = None, @@ -1639,40 +1634,6 @@ def gen_wrapper(include_dirs: Sequence[str], or ADD_VERSIONS.get(cls) <= isl_version] write_wrappers(expf, wrapf, fdata.classes_to_methods, classes) - - # {{{ add automatic 'self' upcasts - - # note: automatic upcasts for method arguments are provided through - # 'implicitly_convertible'. - - if part == "part1": - add_upcasts("aff", "pw_aff", fdata.classes_to_methods, expf) - add_upcasts("pw_aff", "union_pw_aff", fdata.classes_to_methods, expf) - add_upcasts("aff", "union_pw_aff", fdata.classes_to_methods, expf) - - add_upcasts("space", "local_space", fdata.classes_to_methods, expf) - - add_upcasts("multi_aff", "pw_multi_aff", fdata.classes_to_methods, expf) - add_upcasts("pw_multi_aff", "union_pw_multi_aff", - fdata.classes_to_methods, expf) - add_upcasts("multi_aff", "union_pw_multi_aff", - fdata.classes_to_methods, expf) - - elif part == "part2": - add_upcasts("basic_set", "set", fdata.classes_to_methods, expf) - add_upcasts("set", "union_set", fdata.classes_to_methods, expf) - add_upcasts("basic_set", "union_set", fdata.classes_to_methods, expf) - - add_upcasts("basic_map", "map", fdata.classes_to_methods, expf) - add_upcasts("map", "union_map", fdata.classes_to_methods, expf) - add_upcasts("basic_map", "union_map", fdata.classes_to_methods, expf) - - elif part == "part3": - # empty - pass - - # }}} - expf.close() wrapf.close() From 29d0d6f3c140c029d961ee765de58d11aac014d8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 29 May 2025 17:06:43 -0500 Subject: [PATCH 29/33] gen_wrap: let stubs allow for automatic upcasts --- gen_wrap.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/gen_wrap.py b/gen_wrap.py index 6eb02fc0..d614eb87 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1131,7 +1131,12 @@ def _arg_to_const_str(arg: Argument) -> str: if arg.name == "self": arg_types.append(f"{arg.name}") else: - arg_types.append(f"{arg.name}: {to_py_class(arg_cls)}") + acceptable_arg_classes = ( + arg_cls, + *AUTO_UPCASTS.get(arg_cls, ())) + arg_annotation = " | ".join( + to_py_class(ac) for ac in acceptable_arg_classes) + arg_types.append(f"{arg.name}: {arg_annotation}") # }}} From 427ecf51a419500854ad95fea4d3bc4d23b49271 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sat, 31 May 2025 13:16:30 -0500 Subject: [PATCH 30/33] gen_wrap: Reject unexpected self classes --- gen_wrap.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gen_wrap.py b/gen_wrap.py index d614eb87..c4bfcad4 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -926,6 +926,13 @@ def write_wrapper(outf: TextIO, meth: Method): arg = meth.args[arg_idx] arg_names.append(arg.name) + if (arg_idx == 0 + and not meth.is_static + and isinstance(arg, Argument) + and arg.base_type.startswith("isl_") + and arg.base_type[4:] != meth.cls): + raise Undocumented(f"unexpected self class: {meth.c_name}") + if isinstance(arg, CallbackArgument): has_userptr = ( arg_idx + 1 < len(meth.args) From c63a7e598e7b6619903dc24a7b8f0c63439c1074 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sat, 31 May 2025 13:16:52 -0500 Subject: [PATCH 31/33] gen_wrap: get_dim_name can return None --- gen_wrap.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gen_wrap.py b/gen_wrap.py index c4bfcad4..0e81e564 100644 --- a/gen_wrap.py +++ b/gen_wrap.py @@ -1359,7 +1359,10 @@ def _arg_to_const_str(arg: Argument) -> str: if meth.return_semantics is SEM_GIVE: body.append("free(result);") - ret_type = "str" + if meth.name == "get_dim_name": + ret_type = "str | None" + else: + ret_type = "str" elif (meth.return_base_type == "void" and meth.return_ptr == "*" From a1b58b81363ca036bf405b2f73859148d758283f Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Jun 2025 13:19:20 -0500 Subject: [PATCH 32/33] Add some types in islpy.__init__ --- doc/reference.rst | 1 + islpy/__init__.py | 60 +++++++++++++++++++++++++++++++---------------- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/doc/reference.rst b/doc/reference.rst index 02ff7e23..1150f920 100644 --- a/doc/reference.rst +++ b/doc/reference.rst @@ -136,6 +136,7 @@ Output Helper functions ^^^^^^^^^^^^^^^^ +.. autoclass:: AlignableT .. autofunction:: align_spaces .. autofunction:: align_two diff --git a/islpy/__init__.py b/islpy/__init__.py index 19cf3146..2ada80b0 100644 --- a/islpy/__init__.py +++ b/islpy/__init__.py @@ -20,10 +20,10 @@ THE SOFTWARE. """ -from typing import Literal +from collections.abc import Collection, Sequence +from typing import Literal, TypeAlias, TypeVar -import islpy._isl as _isl # noqa: F401 -from islpy.version import VERSION, VERSION_TEXT # noqa +from islpy.version import VERSION, VERSION_TEXT __version__ = VERSION_TEXT @@ -126,6 +126,19 @@ # }}} +# {{{ typing helpers + +Alignable: TypeAlias = ( + Space + | Set | Map + | BasicSet | BasicMap + | Aff | PwAff +) +AlignableT = TypeVar("AlignableT", bound=Alignable) + +# }}} + + DEFAULT_CONTEXT = Context() @@ -165,8 +178,14 @@ def _set_dim_id(obj, dt, idx, id): return _back_to_basic(obj.set_dim_id(dt, idx, id), obj) -def _align_dim_type(template_dt, obj, template, obj_bigger_ok, obj_names, - template_names): +def _align_dim_type( + template_dt: dim_type, + obj: AlignableT, + template: AlignableT, + obj_bigger_ok: bool, + obj_names: Collection[str], + template_names: Collection[str], + ) -> AlignableT: # {{{ deal with Aff, PwAff @@ -246,7 +265,11 @@ def _align_dim_type(template_dt, obj, template, obj_bigger_ok, obj_names, return obj -def align_spaces(obj, template, obj_bigger_ok=False, across_dim_types=None): +def align_spaces( + obj: AlignableT, + template: Alignable, + obj_bigger_ok: bool = False, + ) -> AlignableT: """ Try to make the space in which *obj* lives the same as that of *template* by adding/matching named dimensions. @@ -255,12 +278,6 @@ def align_spaces(obj, template, obj_bigger_ok=False, across_dim_types=None): has more dimensions than *template*. """ - if across_dim_types is not None: - from warnings import warn - warn("across_dim_types is deprecated and should no longer be used. " - "It never had any effect anyway.", - DeprecationWarning, stacklevel=2) - have_any_param_domains = ( isinstance(obj, (Set, BasicSet)) and isinstance(template, (Set, BasicSet)) @@ -295,24 +312,25 @@ def align_spaces(obj, template, obj_bigger_ok=False, across_dim_types=None): return obj -def align_two(obj1, obj2, across_dim_types=None): +def align_two( + obj1: AlignableT, + obj2: AlignableT, + ) -> tuple[AlignableT, AlignableT]: """Align the spaces of two objects, potentially modifying both of them. See also :func:`align_spaces`. """ - if across_dim_types is not None: - from warnings import warn - warn("across_dim_types is deprecated and should no longer be used. " - "It never had any effect anyway.", - DeprecationWarning, stacklevel=2) - obj1 = align_spaces(obj1, obj2, obj_bigger_ok=True) obj2 = align_spaces(obj2, obj1, obj_bigger_ok=True) return (obj1, obj2) -def make_zero_and_vars(set_vars, params=(), ctx=None): +def make_zero_and_vars( + set_vars: Sequence[str], + params: Sequence[str] = (), + ctx: Context | None = None + ) -> dict[str | Literal[0], PwAff]: """ :arg set_vars: an iterable of variable names, or a comma-separated string :arg params: an iterable of variable names, or a comma-separated string @@ -394,6 +412,8 @@ def affs_from_space(space: Space) -> dict[Literal[0] | str, PwAff]: __all__ = ( + "VERSION", + "VERSION_TEXT", "AccessInfo", "Aff", "AffList", From 5ca23cb20fee5c50ae020f73be23d1dd4ad22fd8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sun, 1 Jun 2025 14:52:35 -0500 Subject: [PATCH 33/33] Add a basedpyright CI --- .basedpyright/baseline.json | 214 ++++++++++++++++++++++++++++++++++++ .github/workflows/ci.yml | 16 +++ 2 files changed, 230 insertions(+) create mode 100644 .basedpyright/baseline.json diff --git a/.basedpyright/baseline.json b/.basedpyright/baseline.json new file mode 100644 index 00000000..032b1581 --- /dev/null +++ b/.basedpyright/baseline.json @@ -0,0 +1,214 @@ +{ + "files": { + "./islpy/_isl.pyi": [ + { + "code": "reportAssignmentType", + "range": { + "startColumn": 13, + "endColumn": 15, + "lineCount": 1 + } + }, + { + "code": "reportAssignmentType", + "range": { + "startColumn": 13, + "endColumn": 15, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportIncompatibleMethodOverride", + "range": { + "startColumn": 8, + "endColumn": 14, + "lineCount": 1 + } + }, + { + "code": "reportIncompatibleMethodOverride", + "range": { + "startColumn": 8, + "endColumn": 14, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 19, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 38, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 36, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 23, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 22, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 38, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 37, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 36, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 23, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 22, + "lineCount": 1 + } + }, + { + "code": "reportOverlappingOverload", + "range": { + "startColumn": 8, + "endColumn": 12, + "lineCount": 1 + } + } + ] + } +} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd2d57e1..b826cec0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,6 +41,22 @@ jobs: curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh . ./build-and-test-py-project.sh + basdedpyright: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: "Main Script" + run: | + curl -L -O https://tiker.net/ci-support-v0 + . ./ci-support-v0 + build_py_project_in_venv + pip install nanobind typing-extensions basedpyright + (cd stubgen; python stubgen.py) + basedpyright islpy/_isl.pyi + examples: name: Examples Linux on Py${{ matrix.python-version }} runs-on: ubuntu-latest