test_batch.py 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141
  1. # -*- coding: utf-8 -*-
  2. """
  3. :codeauthor: Nicole Thomas <nicole@saltstack.com>
  4. """
  5. from __future__ import absolute_import, print_function, unicode_literals
  6. import pytest
  7. import salt.utils.platform
  8. from tests.support.case import ShellCase
  9. @pytest.mark.windows_whitelisted
  10. @pytest.mark.usefixtures("salt_sub_minion")
  11. class BatchTest(ShellCase):
  12. """
  13. Integration tests for the salt.cli.batch module
  14. """
  15. if salt.utils.platform.is_windows():
  16. run_timeout = 180
  17. else:
  18. run_timeout = 30
  19. @pytest.mark.slow_test(seconds=60) # Test takes >30 and <=60 seconds
  20. def test_batch_run(self):
  21. """
  22. Tests executing a simple batch command to help catch regressions
  23. """
  24. ret = "Executing run on [{0}]".format(repr("sub_minion"))
  25. cmd = self.run_salt(
  26. '"*minion" test.echo "batch testing" -b 50%', timeout=self.run_timeout,
  27. )
  28. self.assertIn(ret, cmd)
  29. @pytest.mark.slow_test(seconds=30) # Test takes >10 and <=30 seconds
  30. def test_batch_run_number(self):
  31. """
  32. Tests executing a simple batch command using a number division instead of
  33. a percentage with full batch CLI call.
  34. """
  35. ret = "Executing run on [{0}, {1}]".format(repr("minion"), repr("sub_minion"))
  36. cmd = self.run_salt(
  37. '"*minion" test.ping --batch-size 2', timeout=self.run_timeout,
  38. )
  39. self.assertIn(ret, cmd)
  40. @pytest.mark.slow_test(seconds=60) # Test takes >30 and <=60 seconds
  41. def test_batch_run_grains_targeting(self):
  42. """
  43. Tests executing a batch command using a percentage divisor as well as grains
  44. targeting.
  45. """
  46. os_grain = ""
  47. sub_min_ret = "Executing run on [{0}]".format(repr("sub_minion"))
  48. min_ret = "Executing run on [{0}]".format(repr("minion"))
  49. for item in self.run_salt("minion grains.get os"):
  50. if item != "minion:":
  51. os_grain = item
  52. os_grain = os_grain.strip()
  53. cmd = self.run_salt(
  54. '-C "G@os:{0} and not localhost" -b 25% test.ping'.format(os_grain),
  55. timeout=self.run_timeout,
  56. )
  57. self.assertIn(sub_min_ret, cmd)
  58. self.assertIn(min_ret, cmd)
  59. @pytest.mark.slow_test(seconds=120) # Test takes >60 and <=120 seconds
  60. def test_batch_exit_code(self):
  61. """
  62. Test that a failed state returns a non-zero exit code in batch mode
  63. """
  64. cmd = self.run_salt(
  65. ' "*" state.single test.fail_without_changes name=test_me -b 25%',
  66. with_retcode=True,
  67. timeout=self.run_timeout,
  68. )
  69. self.assertEqual(cmd[-1], 2)
  70. # Test for failhard + batch. The best possible solution here was to do something like that:
  71. # assertRaises(StopIteration)
  72. # But it's impossible due to nature of the tests execution via fork()
  73. @pytest.mark.slow_test(seconds=30) # Test takes >10 and <=30 seconds
  74. def test_batch_module_stopping_after_error(self):
  75. """
  76. Test that a failed command stops the batch run
  77. """
  78. minions_list = []
  79. retcode = None
  80. # Executing salt with batch: 1 and with failhard. It should stop after the first error.
  81. cmd = self.run_salt(
  82. '"*minion" test.retcode 42 -b 1 --out=yaml --failhard',
  83. timeout=self.run_timeout,
  84. )
  85. # Parsing the output. Idea is to fetch number on minions and retcode of the execution.
  86. # retcode var could be overwritten in case of broken failhard but number of minions check should still fail.
  87. for line in cmd:
  88. if line.startswith("Executing run on"):
  89. minions_list.append(line)
  90. if line.startswith("retcode"):
  91. retcode = line[-1]
  92. # We expect to have only one minion to be run
  93. self.assertEqual(1, len(minions_list))
  94. # We expect to find a retcode in the output
  95. self.assertIsNot(None, retcode)
  96. # We expect retcode to be non-zero
  97. self.assertNotEqual(0, retcode)
  98. @pytest.mark.slow_test(seconds=30) # Test takes >10 and <=30 seconds
  99. def test_batch_state_stopping_after_error(self):
  100. """
  101. Test that a failed state stops the batch run
  102. """
  103. minions_list = []
  104. retcode = None
  105. # Executing salt with batch: 1 and with failhard. It should stop after the first error.
  106. cmd = self.run_salt(
  107. '"*minion" state.single test.fail_without_changes name=test_me -b 1 --out=yaml --failhard',
  108. timeout=self.run_timeout,
  109. )
  110. # Parsing the output. Idea is to fetch number on minions and retcode of the execution.
  111. # retcode var could be overwritten in case of broken failhard but number of minions check should still fail.
  112. for line in cmd:
  113. if line.startswith("Executing run on"):
  114. minions_list.append(line)
  115. if line.startswith("retcode"):
  116. retcode = line[-1]
  117. # We expect to have only one minion to be run
  118. self.assertEqual(1, len(minions_list))
  119. # We expect to find a retcode in the output
  120. self.assertIsNot(None, retcode)
  121. # We expect retcode to be non-zero
  122. self.assertNotEqual(0, retcode)