python.data-structure ========================= dictionary ^^^^^^^^^^ .. note:: Tags: :doc:`python.data-structure ` Support Level: SUPPORTED Original source code: .. code-block:: python import torch def dictionary(x, y): """ Dictionary structures are inlined and flattened along tracing. """ elements = {} elements["x2"] = x * x y = y * elements["x2"] return {"y": y} Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, l_x_: "f32[3, 2]", l_y_: "i64[]"): mul: "f32[3, 2]" = torch.ops.aten.mul.Tensor(l_x_, l_x_); l_x_ = None mul_1: "f32[3, 2]" = torch.ops.aten.mul.Tensor(l_y_, mul); l_y_ = mul = None return (mul_1,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='l_x_'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_y_'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='mul_1'), target=None)]) Range constraints: {} Equality constraints: [] fn_with_kwargs ^^^^^^^^^^^^^^ .. note:: Tags: :doc:`python.data-structure ` Support Level: SUPPORTED Original source code: .. code-block:: python import torch ), tags={"python.data-structure"}, support_level=SupportLevel.SUPPORTED, ) def fn_with_kwargs(pos0, tuple0, *myargs, mykw0, **mykwargs): """ Keyword arguments are not supported at the moment. """ out = pos0 for arg in tuple0: out = out * arg for arg in myargs: out = out * arg out = out * mykw0 out = out * mykwargs["input0"] * mykwargs["input1"] return out Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, out: "f32[4]", arg: "f32[4]", arg_1: "f32[4]", arg_2: "f32[4]", arg_3: "f32[4]", l_mykw0_: "f32[4]", l_mykwargs_input0_: "f32[4]", l_mykwargs_input1_: "f32[4]"): mul: "f32[4]" = torch.ops.aten.mul.Tensor(out, arg); out = arg = None mul_1: "f32[4]" = torch.ops.aten.mul.Tensor(mul, arg_1); mul = arg_1 = None mul_2: "f32[4]" = torch.ops.aten.mul.Tensor(mul_1, arg_2); mul_1 = arg_2 = None mul_3: "f32[4]" = torch.ops.aten.mul.Tensor(mul_2, arg_3); mul_2 = arg_3 = None mul_4: "f32[4]" = torch.ops.aten.mul.Tensor(mul_3, l_mykw0_); mul_3 = l_mykw0_ = None mul_5: "f32[4]" = torch.ops.aten.mul.Tensor(mul_4, l_mykwargs_input0_); mul_4 = l_mykwargs_input0_ = None mul_6: "f32[4]" = torch.ops.aten.mul.Tensor(mul_5, l_mykwargs_input1_); mul_5 = l_mykwargs_input1_ = None return (mul_6,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='out'), target=None), InputSpec(kind=, arg=TensorArgument(name='arg'), target=None), InputSpec(kind=, arg=TensorArgument(name='arg_1'), target=None), InputSpec(kind=, arg=TensorArgument(name='arg_2'), target=None), InputSpec(kind=, arg=TensorArgument(name='arg_3'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_mykw0_'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_mykwargs_input0_'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_mykwargs_input1_'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='mul_6'), target=None)]) Range constraints: {} Equality constraints: [] list_contains ^^^^^^^^^^^^^ .. note:: Tags: :doc:`torch.dynamic-shape `, :doc:`python.data-structure `, :doc:`python.assert ` Support Level: SUPPORTED Original source code: .. code-block:: python import torch def list_contains(x): """ List containment relation can be checked on a dynamic shape or constants. """ assert x.size(-1) in [6, 2] assert x.size(0) not in [4, 5, 6] assert "monkey" not in ["cow", "pig"] return x + x Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, l_x_: "f32[3, 2]"): add: "f32[3, 2]" = torch.ops.aten.add.Tensor(l_x_, l_x_); l_x_ = None return (add,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='l_x_'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='add'), target=None)]) Range constraints: {} Equality constraints: [] list_unpack ^^^^^^^^^^^ .. note:: Tags: :doc:`python.data-structure `, :doc:`python.control-flow ` Support Level: SUPPORTED Original source code: .. code-block:: python from typing import List import torch def list_unpack(args: List[torch.Tensor]): """ Lists are treated as static construct, therefore unpacking should be erased after tracing. """ x, *y = args return x + y[0] Result: .. code-block:: ExportedProgram: class GraphModule(torch.nn.Module): def forward(self, x: "f32[3, 2]", l_args_1_: "i64[]", arg2: "i64[]"): add: "f32[3, 2]" = torch.ops.aten.add.Tensor(x, l_args_1_); x = l_args_1_ = None return (add,) Graph signature: ExportGraphSignature(input_specs=[InputSpec(kind=, arg=TensorArgument(name='x'), target=None), InputSpec(kind=, arg=TensorArgument(name='l_args_1_'), target=None), InputSpec(kind=, arg=TensorArgument(name='arg2'), target=None)], output_specs=[OutputSpec(kind=, arg=TensorArgument(name='add'), target=None)]) Range constraints: {} Equality constraints: []