Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 70 additions & 1 deletion Lib/test/test_generated_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def skip_if_different_mount_drives():

test_tools.skip_if_missing("cases_generator")
with test_tools.imports_under_tool("cases_generator"):
from analyzer import StackItem
from analyzer import StackItem, analyze_forest
from cwriter import CWriter
import parser
from stack import Local, Stack
Expand Down Expand Up @@ -2638,5 +2638,74 @@ def test_replace_opocode_uop_reject_array_effects(self):
"Pure evaluation cannot take array-like inputs"):
self.run_cases_test(input, input2, output)

class TestAnalyzer(unittest.TestCase):
"""Tests for analyzer.add_macro() recording-uop placement rules (gh-148285)."""

def _parse_and_analyze(self, src: str) -> None:
"""Parse a raw DSL fragment and run analyze_forest() on it."""
nodes = parse_src(src)
analyze_forest(nodes)

# ---- shared DSL fragments -----------------------------------------------

_SPECIALIZE_OP = """\
specializing op(_SPECIALIZE_DUMMY, (counter/1, value -- value)) {
}
"""
_RECORD_OP = """\
op(_RECORD_DUMMY, (value -- value)) {
RECORD_VALUE(PyStackRef_AsPyObjectBorrow(value));
}
"""
_WORKER_OP = """\
op(_WORKER_DUMMY, (value -- res)) {
res = value;
}
"""

# ---- test cases ---------------------------------------------------------

def test_recording_uop_after_specializing(self):
"""Valid: recording uop directly follows a specializing uop."""
src = (
self._SPECIALIZE_OP
+ self._RECORD_OP
+ self._WORKER_OP
+ "macro(VALID_DIRECT) = _SPECIALIZE_DUMMY + _RECORD_DUMMY + _WORKER_DUMMY;\n"
)
# Must not raise.
self._parse_and_analyze(src)

def test_recording_uop_after_specializing_with_cache(self):
"""Valid: recording uop follows a specializing uop with a cache effect between them.

CacheEffect entries are transparent — they must not close the gate that
allows a recording uop to follow a specializing uop.
"""
src = (
self._SPECIALIZE_OP
+ self._RECORD_OP
+ self._WORKER_OP
+ "macro(VALID_CACHE) = _SPECIALIZE_DUMMY + unused/1 + _RECORD_DUMMY + _WORKER_DUMMY;\n"
)
# Must not raise.
self._parse_and_analyze(src)

def test_recording_uop_after_non_specializing_raises(self):
"""Invalid: recording uop after a plain (non-specializing) uop must be rejected."""
src = (
self._SPECIALIZE_OP
+ self._RECORD_OP
+ self._WORKER_OP
+ "macro(INVALID) = _WORKER_DUMMY + _RECORD_DUMMY;\n"
)
with self.assertRaisesRegex(
SyntaxError,
r"Recording uop _RECORD_DUMMY must be first in macro "
r"or immediately follow a specializing uop",
):
self._parse_and_analyze(src)


if __name__ == "__main__":
unittest.main()
11 changes: 11 additions & 0 deletions Python/optimizer_bytecodes.c
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,10 @@ dummy_func(void) {
}
else {
sym_set_const(owner, type);
if ((((PyTypeObject *)type)->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) == 0) {
PyType_Watch(TYPE_WATCHER_ID, type);
_Py_BloomFilter_Add(dependencies, type);
}
}
}
}
Expand Down Expand Up @@ -1213,6 +1217,13 @@ dummy_func(void) {
(void)framesize;
}

op(_CHECK_IS_NOT_PY_CALLABLE, (callable, unused, unused[oparg] -- callable, unused, unused[oparg])) {
PyTypeObject *type = sym_get_type(callable);
if (type && type != &PyFunction_Type && type != &PyMethod_Type) {
ADD_OP(_NOP, 0, 0);
}
}

op(_PUSH_FRAME, (new_frame -- )) {
SYNC_SP();
if (!CURRENT_FRAME_IS_INIT_SHIM()) {
Expand Down
17 changes: 13 additions & 4 deletions Tools/cases_generator/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,11 @@ def add_macro(
macro: parser.Macro, instructions: dict[str, Instruction], uops: dict[str, Uop]
) -> None:
parts: list[Part] = []
first = True
# Tracks the last "plain" uop (neither specializing nor recording).
# CacheEffect, flush, specializing, and recording uops all leave it
# unchanged, so prev_uop being non-None is sufficient to mean
# "a disqualifying uop has been seen before this recording uop".
prev_uop: Uop | None = None
for part in macro.uops:
match part:
case parser.OpName():
Expand All @@ -1144,12 +1148,16 @@ def add_macro(
f"No Uop named {part.name}", macro.tokens[0]
)
uop = uops[part.name]
if uop.properties.records_value and not first:
if uop.properties.records_value and prev_uop is not None:
raise analysis_error(
f"Recording uop {part.name} must be first in macro",
f"Recording uop {part.name} must be first in macro "
f"or immediately follow a specializing uop",
macro.tokens[0])
parts.append(uop)
first = False
# Only plain worker uops set prev_uop; specializing and
# recording uops are excluded so the check above stays simple.
if not uop.properties.records_value and "specializing" not in uop.annotations:
prev_uop = uop
case parser.CacheEffect():
parts.append(Skip(part.size))
case _:
Expand All @@ -1158,6 +1166,7 @@ def add_macro(
add_instruction(macro.first_token, macro.name, parts, instructions)



def add_family(
pfamily: parser.Family,
instructions: dict[str, Instruction],
Expand Down
Loading