Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion infra/ansible/ansible.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@ callbacks_enabled = profile_tasks
localhost_warning = False
# Make output human-readable.
stdout_callback = yaml
# Ansible 2.19 requires this environment variable being set, so that we can use
# string variables as boolean.
allow_broken_conditionals = true

[inventory]
# Silence warning about no inventory.
# This option is available since Ansible 2.14 (available only with Python 3.9+).
inventory_unparsed_warning = False
inventory_unparsed_warning = False
7 changes: 7 additions & 0 deletions test/stablehlo/test_unbounded_dynamism.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

class UnboundedDynamismExportTest(unittest.TestCase):

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_add(self):
args = (torch.rand((10, 197, 768)), torch.rand((10, 197, 768)))
dynamic_shapes = (({0: Dim("dim")}, {0: Dim("dim")}),)
Expand Down Expand Up @@ -78,6 +79,7 @@ def test_addmm(self):
# Hit stablehlo.dot shape refinement error when inferencing saved_model in TF.
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_bmm(self):
args = (
torch.rand((24, 197, 64)),
Expand All @@ -99,6 +101,7 @@ def test_bmm(self):
self.assertTrue(os.path.exists(os.path.join(tempdir, 'saved_model.pb')))
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_bmm_dynamic_out_dim(self):
args = (
torch.rand((8, 128, 256)),
Expand Down Expand Up @@ -141,6 +144,7 @@ def test_bmm_dynamic_reduction_dim(self):
self.assertTrue(os.path.exists(os.path.join(tempdir, 'saved_model.pb')))
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_cat(self):
args = (torch.rand((10, 1, 768)), torch.rand((10, 196, 768)))
dynamic_shapes = (({0: Dim("dim")}, {0: Dim("dim")}),)
Expand Down Expand Up @@ -240,6 +244,7 @@ def test_cumsum(self):
self.assertTrue(os.path.exists(os.path.join(tempdir, 'saved_model.pb')))
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_div(self):
args = (torch.rand((10, 12, 197)), torch.rand((10, 12, 197)))
dynamic_shapes = (({0: Dim("dim")}, {0: Dim("dim")}),)
Expand Down Expand Up @@ -340,6 +345,7 @@ def forward(self, x):
self.assertTrue(os.path.exists(os.path.join(tempdir, 'saved_model.pb')))
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_mul(self):
args = (torch.rand((10, 2, 768)), torch.rand((10, 2, 768)))
dynamic_shapes = (({0: Dim("dim")}, {0: Dim("dim")}),)
Expand Down Expand Up @@ -571,6 +577,7 @@ def test_softmax(self):
self.assertTrue(os.path.exists(os.path.join(tempdir, 'saved_model.pb')))
compare_exported_program_and_saved_model_result(ep, tempdir, args)

@unittest.skip("https://github.com/pytorch/xla/issues/9637")
def test_sub(self):
args = (torch.rand((10, 1, 1, 10)), torch.rand((10, 1, 1, 10)))
dynamic_shapes = (({0: Dim("dim")}, {0: Dim("dim")}),)
Expand Down
Loading