test_archive.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439
  1. # -*- coding: utf-8 -*-
  2. """
  3. Tests for the archive state
  4. """
  5. from __future__ import absolute_import, print_function, unicode_literals
  6. import errno
  7. import logging
  8. import os
  9. import salt.utils.files
  10. import salt.utils.platform
  11. from tests.support.case import ModuleCase
  12. from tests.support.helpers import Webserver, skip_if_not_root, slowTest
  13. from tests.support.mixins import SaltReturnAssertsMixin
  14. from tests.support.runtests import RUNTIME_VARS
  15. # Setup logging
  16. log = logging.getLogger(__name__)
  17. ARCHIVE_DIR = (
  18. os.path.join("c:/", "tmp") if salt.utils.platform.is_windows() else "/tmp/archive"
  19. )
  20. ARCHIVE_NAME = "custom.tar.gz"
  21. ARCHIVE_TAR_SOURCE = "http://localhost:{0}/{1}".format(9999, ARCHIVE_NAME)
  22. ARCHIVE_TAR_HASH = "md5=7643861ac07c30fe7d2310e9f25ca514"
  23. ARCHIVE_TAR_SHA_HASH = (
  24. "sha256=9591159d86f0a180e4e0645b2320d0235e23e66c66797df61508bf185e0ac1d2"
  25. )
  26. ARCHIVE_TAR_BAD_HASH = "md5=d41d8cd98f00b204e9800998ecf8427e"
  27. ARCHIVE_TAR_HASH_UPPER = "md5=7643861AC07C30FE7D2310E9F25CA514"
  28. class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
  29. """
  30. Validate the archive state
  31. """
  32. @classmethod
  33. def setUpClass(cls):
  34. cls.webserver = Webserver()
  35. cls.webserver.start()
  36. cls.archive_tar_source = cls.webserver.url("custom.tar.gz")
  37. cls.archive_local_tar_source = "file://{0}".format(
  38. os.path.join(RUNTIME_VARS.BASE_FILES, ARCHIVE_NAME)
  39. )
  40. cls.untar_file = os.path.join(ARCHIVE_DIR, "custom/README")
  41. @classmethod
  42. def tearDownClass(cls):
  43. cls.webserver.stop()
  44. def setUp(self):
  45. self._clear_archive_dir()
  46. def tearDown(self):
  47. self._clear_archive_dir()
  48. try:
  49. salt.utils.files.rm_rf(
  50. os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "cache", "archive_hash")
  51. )
  52. except OSError:
  53. # some tests do notcreate the archive_hash directory
  54. pass
  55. @staticmethod
  56. def _clear_archive_dir():
  57. try:
  58. salt.utils.files.rm_rf(ARCHIVE_DIR)
  59. except OSError as exc:
  60. if exc.errno != errno.ENOENT:
  61. raise
  62. def _check_extracted(self, path):
  63. """
  64. function to check if file was extracted
  65. """
  66. log.debug("Checking for extracted file: %s", path)
  67. self.assertTrue(os.path.isfile(path))
  68. def run_function(self, *args, **kwargs): # pylint: disable=arguments-differ
  69. ret = super(ArchiveTest, self).run_function(*args, **kwargs)
  70. log.debug("ret = %s", ret)
  71. return ret
  72. def run_state(self, *args, **kwargs): # pylint: disable=arguments-differ
  73. ret = super(ArchiveTest, self).run_state(*args, **kwargs)
  74. log.debug("ret = %s", ret)
  75. return ret
  76. def test_archive_extracted_skip_verify(self):
  77. """
  78. test archive.extracted with skip_verify
  79. """
  80. ret = self.run_state(
  81. "archive.extracted",
  82. name=ARCHIVE_DIR,
  83. source=self.archive_tar_source,
  84. archive_format="tar",
  85. skip_verify=True,
  86. )
  87. if "Timeout" in ret:
  88. self.skipTest("Timeout talking to local tornado server.")
  89. self.assertSaltTrueReturn(ret)
  90. self._check_extracted(self.untar_file)
  91. def test_archive_extracted_with_source_hash(self):
  92. """
  93. test archive.extracted without skip_verify
  94. only external resources work to check to
  95. ensure source_hash is verified correctly
  96. """
  97. ret = self.run_state(
  98. "archive.extracted",
  99. name=ARCHIVE_DIR,
  100. source=self.archive_tar_source,
  101. archive_format="tar",
  102. source_hash=ARCHIVE_TAR_HASH,
  103. )
  104. if "Timeout" in ret:
  105. self.skipTest("Timeout talking to local tornado server.")
  106. self.assertSaltTrueReturn(ret)
  107. self._check_extracted(self.untar_file)
  108. @skip_if_not_root
  109. def test_archive_extracted_with_root_user_and_group(self):
  110. """
  111. test archive.extracted with user and group set to "root"
  112. """
  113. r_group = "root"
  114. if salt.utils.platform.is_darwin():
  115. r_group = "wheel"
  116. ret = self.run_state(
  117. "archive.extracted",
  118. name=ARCHIVE_DIR,
  119. source=self.archive_tar_source,
  120. archive_format="tar",
  121. source_hash=ARCHIVE_TAR_HASH,
  122. user="root",
  123. group=r_group,
  124. )
  125. if "Timeout" in ret:
  126. self.skipTest("Timeout talking to local tornado server.")
  127. self.assertSaltTrueReturn(ret)
  128. self._check_extracted(self.untar_file)
  129. @slowTest
  130. def test_archive_extracted_with_strip_in_options(self):
  131. """
  132. test archive.extracted with --strip in options
  133. """
  134. ret = self.run_state(
  135. "archive.extracted",
  136. name=ARCHIVE_DIR,
  137. source=self.archive_tar_source,
  138. source_hash=ARCHIVE_TAR_HASH,
  139. options="--strip=1",
  140. enforce_toplevel=False,
  141. )
  142. if "Timeout" in ret:
  143. self.skipTest("Timeout talking to local tornado server.")
  144. self.assertSaltTrueReturn(ret)
  145. self._check_extracted(os.path.join(ARCHIVE_DIR, "README"))
  146. def test_archive_extracted_with_strip_components_in_options(self):
  147. """
  148. test archive.extracted with --strip-components in options
  149. """
  150. ret = self.run_state(
  151. "archive.extracted",
  152. name=ARCHIVE_DIR,
  153. source=self.archive_tar_source,
  154. source_hash=ARCHIVE_TAR_HASH,
  155. options="--strip-components=1",
  156. enforce_toplevel=False,
  157. )
  158. if "Timeout" in ret:
  159. self.skipTest("Timeout talking to local tornado server.")
  160. self.assertSaltTrueReturn(ret)
  161. self._check_extracted(os.path.join(ARCHIVE_DIR, "README"))
  162. @slowTest
  163. def test_archive_extracted_without_archive_format(self):
  164. """
  165. test archive.extracted with no archive_format option
  166. """
  167. ret = self.run_state(
  168. "archive.extracted",
  169. name=ARCHIVE_DIR,
  170. source=self.archive_tar_source,
  171. source_hash=ARCHIVE_TAR_HASH,
  172. )
  173. if "Timeout" in ret:
  174. self.skipTest("Timeout talking to local tornado server.")
  175. self.assertSaltTrueReturn(ret)
  176. self._check_extracted(self.untar_file)
  177. def test_archive_extracted_with_cmd_unzip_false(self):
  178. """
  179. test archive.extracted using use_cmd_unzip argument as false
  180. """
  181. ret = self.run_state(
  182. "archive.extracted",
  183. name=ARCHIVE_DIR,
  184. source=self.archive_tar_source,
  185. source_hash=ARCHIVE_TAR_HASH,
  186. use_cmd_unzip=False,
  187. archive_format="tar",
  188. )
  189. if "Timeout" in ret:
  190. self.skipTest("Timeout talking to local tornado server.")
  191. self.assertSaltTrueReturn(ret)
  192. self._check_extracted(self.untar_file)
  193. def test_local_archive_extracted(self):
  194. """
  195. test archive.extracted with local file
  196. """
  197. ret = self.run_state(
  198. "archive.extracted",
  199. name=ARCHIVE_DIR,
  200. source=self.archive_local_tar_source,
  201. archive_format="tar",
  202. )
  203. self.assertSaltTrueReturn(ret)
  204. self._check_extracted(self.untar_file)
  205. def test_local_archive_extracted_skip_verify(self):
  206. """
  207. test archive.extracted with local file, bad hash and skip_verify
  208. """
  209. ret = self.run_state(
  210. "archive.extracted",
  211. name=ARCHIVE_DIR,
  212. source=self.archive_local_tar_source,
  213. archive_format="tar",
  214. source_hash=ARCHIVE_TAR_BAD_HASH,
  215. skip_verify=True,
  216. )
  217. self.assertSaltTrueReturn(ret)
  218. self._check_extracted(self.untar_file)
  219. @slowTest
  220. def test_local_archive_extracted_with_source_hash(self):
  221. """
  222. test archive.extracted with local file and valid hash
  223. """
  224. ret = self.run_state(
  225. "archive.extracted",
  226. name=ARCHIVE_DIR,
  227. source=self.archive_local_tar_source,
  228. archive_format="tar",
  229. source_hash=ARCHIVE_TAR_HASH,
  230. )
  231. self.assertSaltTrueReturn(ret)
  232. self._check_extracted(self.untar_file)
  233. @slowTest
  234. def test_local_archive_extracted_with_bad_source_hash(self):
  235. """
  236. test archive.extracted with local file and bad hash
  237. """
  238. ret = self.run_state(
  239. "archive.extracted",
  240. name=ARCHIVE_DIR,
  241. source=self.archive_local_tar_source,
  242. archive_format="tar",
  243. source_hash=ARCHIVE_TAR_BAD_HASH,
  244. )
  245. self.assertSaltFalseReturn(ret)
  246. def test_local_archive_extracted_with_uppercase_source_hash(self):
  247. """
  248. test archive.extracted with local file and bad hash
  249. """
  250. ret = self.run_state(
  251. "archive.extracted",
  252. name=ARCHIVE_DIR,
  253. source=self.archive_local_tar_source,
  254. archive_format="tar",
  255. source_hash=ARCHIVE_TAR_HASH_UPPER,
  256. )
  257. self.assertSaltTrueReturn(ret)
  258. self._check_extracted(self.untar_file)
  259. @slowTest
  260. def test_archive_extracted_with_non_base_saltenv(self):
  261. """
  262. test archive.extracted with a saltenv other than `base`
  263. """
  264. ret = self.run_function(
  265. "state.sls",
  266. ["issue45893"],
  267. pillar={"issue45893.name": ARCHIVE_DIR},
  268. saltenv="prod",
  269. )
  270. self.assertSaltTrueReturn(ret)
  271. self._check_extracted(os.path.join(ARCHIVE_DIR, self.untar_file))
  272. @slowTest
  273. def test_local_archive_extracted_with_skip_files_list_verify(self):
  274. """
  275. test archive.extracted with local file and skip_files_list_verify set to True
  276. """
  277. expected_comment = (
  278. "existing source sum is the same as the expected one and "
  279. "skip_files_list_verify argument was set to True. "
  280. "Extraction is not needed"
  281. )
  282. # Clearing the minion cache at the start to ensure that different tests of
  283. # skip_files_list_verify won't affect each other
  284. self.run_function("saltutil.clear_cache")
  285. self.run_function("saltutil.sync_all")
  286. ret = self.run_state(
  287. "archive.extracted",
  288. name=ARCHIVE_DIR,
  289. source=self.archive_local_tar_source,
  290. archive_format="tar",
  291. skip_files_list_verify=True,
  292. source_hash_update=True,
  293. keep_source=True,
  294. source_hash=ARCHIVE_TAR_SHA_HASH,
  295. )
  296. self.assertSaltTrueReturn(ret)
  297. self._check_extracted(self.untar_file)
  298. ret = self.run_state(
  299. "archive.extracted",
  300. name=ARCHIVE_DIR,
  301. source=self.archive_local_tar_source,
  302. archive_format="tar",
  303. skip_files_list_verify=True,
  304. source_hash_update=True,
  305. keep_source=True,
  306. source_hash=ARCHIVE_TAR_SHA_HASH,
  307. )
  308. self.assertSaltTrueReturn(ret)
  309. self.assertInSaltComment(expected_comment, ret)
  310. def test_local_archive_extracted_with_skip_files_list_verify_and_keep_source_is_false(
  311. self,
  312. ):
  313. """
  314. test archive.extracted with local file and skip_files_list_verify set to True
  315. and keep_source is set to False.
  316. """
  317. expected_comment = (
  318. "existing source sum is the same as the expected one and "
  319. "skip_files_list_verify argument was set to True. "
  320. "Extraction is not needed"
  321. )
  322. # Clearing the minion cache at the start to ensure that different tests of
  323. # skip_files_list_verify won't affect each other
  324. self.run_function("saltutil.clear_cache")
  325. self.run_function("saltutil.sync_all")
  326. ret = self.run_state(
  327. "archive.extracted",
  328. name=ARCHIVE_DIR,
  329. source=self.archive_local_tar_source,
  330. archive_format="tar",
  331. skip_files_list_verify=True,
  332. source_hash_update=True,
  333. keep_source=False,
  334. source_hash=ARCHIVE_TAR_SHA_HASH,
  335. )
  336. self.assertSaltTrueReturn(ret)
  337. self._check_extracted(self.untar_file)
  338. ret = self.run_state(
  339. "archive.extracted",
  340. name=ARCHIVE_DIR,
  341. source=self.archive_local_tar_source,
  342. archive_format="tar",
  343. skip_files_list_verify=True,
  344. source_hash_update=True,
  345. keep_source=False,
  346. source_hash=ARCHIVE_TAR_SHA_HASH,
  347. )
  348. self.assertSaltTrueReturn(ret)
  349. self.assertInSaltComment(expected_comment, ret)
  350. @slowTest
  351. def test_local_archive_extracted_trim_output(self):
  352. """
  353. test archive.extracted with local file and trim_output set to 1
  354. """
  355. expected_changes = {
  356. "directories_created": ["/tmp/archive/"],
  357. "extracted_files": ["custom"],
  358. }
  359. ret = self.run_state(
  360. "archive.extracted",
  361. name=ARCHIVE_DIR,
  362. source=self.archive_local_tar_source,
  363. archive_format="tar",
  364. skip_files_list_verify=True,
  365. source_hash_update=True,
  366. source_hash=ARCHIVE_TAR_SHA_HASH,
  367. trim_output=1,
  368. )
  369. self.assertSaltTrueReturn(ret)
  370. self._check_extracted(self.untar_file)
  371. state_ret = ret["archive_|-/tmp/archive_|-/tmp/archive_|-extracted"]
  372. self.assertTrue(
  373. state_ret["comment"].endswith("Output was trimmed to 1 number of lines")
  374. )
  375. self.assertEqual(state_ret["changes"], expected_changes)