Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
26010da
Removable castable field
gramalingam Nov 7, 2025
e4d8b8f
Prepare to replace Variable by ir.Value
gramalingam Nov 7, 2025
c8708ef
Create ir.Values, first pass
gramalingam Nov 8, 2025
2738841
A minor fix
gramalingam Nov 8, 2025
0e8efef
Various bug fixes
gramalingam Nov 8, 2025
a577f1a
Final cleanup
gramalingam Nov 8, 2025
dbda8e4
Fix matmul fusion testcase
gramalingam Nov 8, 2025
85937ed
First updates to builder
gramalingam Nov 8, 2025
9534b95
Create nodes
gramalingam Nov 11, 2025
957be40
Adding ir Graph step 1
gramalingam Nov 15, 2025
09a0c16
IRFunction cleanup docstring
gramalingam Nov 15, 2025
1d73d52
Further partial cleanup of IR
gramalingam Nov 15, 2025
ddce3b3
Ir builder cleanup
gramalingam Nov 15, 2025
7b8cd9b
IR cleanup
gramalingam Nov 15, 2025
fae6609
IR builder cleanup
gramalingam Nov 15, 2025
6b87407
IR builder cleanup
gramalingam Nov 15, 2025
0047814
Fix attribute error
gramalingam Nov 15, 2025
1917af1
Default value for attr parameter
gramalingam Nov 16, 2025
6b01c98
Fix to-graph-proto
gramalingam Nov 16, 2025
74ca996
Opset imports
gramalingam Nov 16, 2025
d855c2c
Minor fix
gramalingam Nov 16, 2025
4372082
Run lint
gramalingam Nov 16, 2025
cf1c12a
More cleanup
gramalingam Nov 16, 2025
29ec139
More cleanup
gramalingam Nov 17, 2025
95a76a2
More cleanup
gramalingam Nov 17, 2025
053e7c5
Remove unused
gramalingam Nov 17, 2025
46239f1
Minor cleanup
gramalingam Nov 17, 2025
f7eb6d1
Merge branch 'main' into rama/converter
gramalingam Nov 17, 2025
dda7977
Move to_model_proto
gramalingam Nov 17, 2025
4527f92
More cleanup
gramalingam Nov 17, 2025
fd3c14a
More cleanup
gramalingam Nov 17, 2025
09bc3d3
More cleanup
gramalingam Nov 17, 2025
5e17ae1
Address lint warning
gramalingam Nov 17, 2025
3e4b59c
Remove unused code
gramalingam Nov 17, 2025
cdfc749
More cleanup
gramalingam Nov 17, 2025
a46dcc2
Fix lint issue
gramalingam Nov 17, 2025
e0281a3
Add support for type annotation
gramalingam Nov 19, 2025
c7d1eb5
ir builder cleanup
gramalingam Nov 19, 2025
fa95a93
Cleanup IRBuilder
gramalingam Nov 20, 2025
86e10dd
More cleanup
gramalingam Nov 20, 2025
62627cb
Cleanup irbuilder
gramalingam Nov 26, 2025
811bb0a
minor fixes
gramalingam Dec 19, 2025
4f97263
Address PR feedback
gramalingam Dec 19, 2025
ac77c92
Address PR feedback
gramalingam Dec 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 7 additions & 10 deletions onnxscript/_internal/autocast.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,30 +189,27 @@ def get_type_info(x):
def static_cast_inputs(
converter_: converter.Converter,
op_schema: Optional[OpSchema],
args: Sequence[Optional[converter.Variable]],
args: Sequence[Optional[ir.Value]],
) -> tuple[str, ...]:
"""Used for autocast during script-translation.
This is meant to transform expressions like "Add(X, 1)" to "Add(X, CastLike(1, X))"
Polymorphic constants (like 0 and 1) are cast to the type of other operands as needed.
"""

def get_type_info(x: Optional[converter.Variable]) -> Optional[converter.Variable]:
def get_type_info(x: Optional[ir.Value]) -> Optional[ir.Value]:
"""Returns x back if x can serve as the target-type for a cast (as the second
argument of CastLike) and None otherwise. In the expression "Add(X, 1), 1 is
castable, while X can serve as the target-type.
"""
return None if x is None or x.is_castable else x
return None if x is None or converter_.is_castable(x.name) else x

def cast_like(
x: Optional[converter.Variable], y: Optional[converter.Variable]
) -> Optional[str]:
def cast_like(x: Optional[ir.Value], y: Optional[ir.Value]) -> Optional[str]:
if x is None:
return None
if x.is_castable and y is not None:
if converter_.is_castable(x.name) and y is not None:
# Polymorphic constant x is cast to the type of y:
x_cast = converter_.generate_unique_name(f"{x.name}_cast")
converter_.emit([x_cast], "CastLike", [x.name, y.name])
return x_cast
return x.name
return converter_.emit1([x_cast], "CastLike", [x, y])
return x

return cast_inputs(get_type_info, cast_like, op_schema, args)
43 changes: 43 additions & 0 deletions onnxscript/_internal/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import numpy as np
import onnx
import onnx_ir as ir

from onnxscript import tensor

Expand Down Expand Up @@ -87,6 +88,41 @@
raise ValueError(f"Value of type {type(val)} is invalid as an ONNX input/output.")


def value_to_type(val):
"""Return an ir.Value representation of a python-value."""
if isinstance(val, (np.ndarray, tensor.Tensor)):
elem_type = onnx.helper.np_dtype_to_tensor_dtype(val.dtype) # noqa: TID251
shape = val.shape
return (ir.TensorType(elem_type), shape)
elif isinstance(val, int):
elem_type = onnx.TensorProto.INT32
shape = []
return (ir.TensorType(elem_type), shape)
elif isinstance(val, (float, np.float32)):
elem_type = onnx.TensorProto.FLOAT
shape = []
return (ir.TensorType(elem_type), shape)
elif isinstance(val, list):
if len(val) > 0:
type, shape = value_to_type(val[0])
return ir.SequenceType(type), shape
# Edge-case. Cannot determine a suitable ONNX type for an empty list.
# Should be using a typed-value instead.
# Treated as a sequence of tensors of float-type.
return ir.SequenceType(ir.TensorType(onnx.TensorProto.FLOAT)), None
if isinstance(val, numbers.Number):
nparray = np.array(val)
elem_type = onnx.helper.np_dtype_to_tensor_dtype(nparray.dtype) # noqa: TID251
return ir.TensorType(elem_type), []
raise ValueError(f"Value of type {type(val)} is invalid as an ONNX input/output.")

Check failure

Code scanning / CodeQL

Potentially uninitialized local variable Error

Local variable 'type' may be used before it is initialized.

Copilot Autofix

AI 2 days ago

In general, this class of problem is fixed by ensuring that any local variable is always initialized before being read, or by removing/renaming suspicious locals so the analysis is straightforward. Here, the simplest, behavior‑preserving fix is to avoid using the name type as a local variable in value_to_type. That variable is only used as the element type for a sequence (ir.SequenceType(type)), and analogous code elsewhere uses names like elem_type. We can rename the variable to elem_type and update its single use. This does not change logic or outputs, it only clarifies intent and ensures static analysis no longer thinks there is a potentially uninitialized local.

Concretely, in onnxscript/_internal/utils.py within value_to_type, in the elif isinstance(val, list) branch, update the line type, shape = value_to_type(val[0]) to elem_type, shape = value_to_type(val[0]) and the following return ir.SequenceType(type), shape to return ir.SequenceType(elem_type), shape. No imports or additional definitions are required, and no other parts of the file need to change.

Suggested changeset 1
onnxscript/_internal/utils.py

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/onnxscript/_internal/utils.py b/onnxscript/_internal/utils.py
--- a/onnxscript/_internal/utils.py
+++ b/onnxscript/_internal/utils.py
@@ -104,8 +104,8 @@
         return (ir.TensorType(elem_type), shape)
     elif isinstance(val, list):
         if len(val) > 0:
-            type, shape = value_to_type(val[0])
-            return ir.SequenceType(type), shape
+            elem_type, shape = value_to_type(val[0])
+            return ir.SequenceType(elem_type), shape
         # Edge-case. Cannot determine a suitable ONNX type for an empty list.
         # Should be using a typed-value instead.
         # Treated as a sequence of tensors of float-type.
EOF
@@ -104,8 +104,8 @@
return (ir.TensorType(elem_type), shape)
elif isinstance(val, list):
if len(val) > 0:
type, shape = value_to_type(val[0])
return ir.SequenceType(type), shape
elem_type, shape = value_to_type(val[0])
return ir.SequenceType(elem_type), shape
# Edge-case. Cannot determine a suitable ONNX type for an empty list.
# Should be using a typed-value instead.
# Treated as a sequence of tensors of float-type.
Copilot is powered by AI and may make mistakes. Always verify output.
Unable to commit as this autofix suggestion is now outdated


def value_to_ir_value(name: str, val) -> ir.Value:
"""Return an ir.Value representation of a python-value."""
type, shape = value_to_type(val)
return ir.Value(name=name, type=type, shape=shape)


def values_to_value_infos(name_values):
"""Create a list of ValueInfoProto from a list of (name, value) pairs,
skipping any None values.
Expand All @@ -96,3 +132,10 @@
for (name, val) in name_values
if val is not None
]


def values_to_ir_values(name_values):
"""Create a list of ir.Value from a list of (name, value) pairs,
skipping any None values.
"""
return [value_to_ir_value(name, val) for (name, val) in name_values if val is not None]
Loading
Loading