TestBigfile.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575
  1. import time
  2. import io
  3. import binascii
  4. import pytest
  5. import mock
  6. from Connection import ConnectionServer
  7. from Content.ContentManager import VerifyError
  8. from File import FileServer
  9. from File import FileRequest
  10. from Worker import WorkerManager
  11. from Peer import Peer
  12. from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked
  13. from Test import Spy
  14. from util import Msgpack
  15. @pytest.mark.usefixtures("resetSettings")
  16. @pytest.mark.usefixtures("resetTempSettings")
  17. class TestBigfile:
  18. privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
  19. piece_size = 1024 * 1024
  20. def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10):
  21. f = site.storage.open(inner_path, "w")
  22. for i in range(pieces * 100):
  23. f.write(("Test%s" % i).ljust(10, "-") * 1000)
  24. f.close()
  25. assert site.content_manager.sign("content.json", self.privatekey)
  26. return inner_path
  27. def testPiecemapCreate(self, site):
  28. inner_path = self.createBigfile(site)
  29. content = site.storage.loadJson("content.json")
  30. assert "data/optional.any.iso" in content["files_optional"]
  31. file_node = content["files_optional"][inner_path]
  32. assert file_node["size"] == 10 * 1000 * 1000
  33. assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6"
  34. assert file_node["piecemap"] == inner_path + ".piecemap.msgpack"
  35. piecemap = Msgpack.unpack(site.storage.open(file_node["piecemap"], "rb").read())["optional.any.iso"]
  36. assert len(piecemap["sha512_pieces"]) == 10
  37. assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
  38. assert binascii.hexlify(piecemap["sha512_pieces"][0]) == b"a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
  39. def testVerifyPiece(self, site):
  40. inner_path = self.createBigfile(site)
  41. # Verify all 10 piece
  42. f = site.storage.open(inner_path, "rb")
  43. for i in range(10):
  44. piece = io.BytesIO(f.read(1024 * 1024))
  45. piece.seek(0)
  46. site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
  47. f.close()
  48. # Try to verify piece 0 with piece 1 hash
  49. with pytest.raises(VerifyError) as err:
  50. i = 1
  51. f = site.storage.open(inner_path, "rb")
  52. piece = io.BytesIO(f.read(1024 * 1024))
  53. f.close()
  54. site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
  55. assert "Invalid hash" in str(err.value)
  56. def testSparseFile(self, site):
  57. inner_path = "sparsefile"
  58. # Create a 100MB sparse file
  59. site.storage.createSparseFile(inner_path, 100 * 1024 * 1024)
  60. # Write to file beginning
  61. s = time.time()
  62. f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
  63. time_write_start = time.time() - s
  64. # Write to file end
  65. s = time.time()
  66. f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
  67. time_write_end = time.time() - s
  68. # Verify writes
  69. f = site.storage.open(inner_path)
  70. assert f.read(10) == b"hellostart"
  71. f.seek(99 * 1024 * 1024)
  72. assert f.read(8) == b"helloend"
  73. f.close()
  74. site.storage.delete(inner_path)
  75. # Writing to end shold not take much longer, than writing to start
  76. assert time_write_end <= max(0.1, time_write_start * 1.1)
  77. def testRangedFileRequest(self, file_server, site, site_temp):
  78. inner_path = self.createBigfile(site)
  79. file_server.sites[site.address] = site
  80. client = FileServer(file_server.ip, 1545)
  81. client.sites[site_temp.address] = site_temp
  82. site_temp.connection_server = client
  83. connection = client.getConnection(file_server.ip, 1544)
  84. # Add file_server as peer to client
  85. peer_file_server = site_temp.addPeer(file_server.ip, 1544)
  86. buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
  87. assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
  88. assert buff.getvalue().startswith(b"Test524") # Correct data
  89. buff.seek(0)
  90. assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
  91. connection.close()
  92. client.stop()
  93. def testRangedFileDownload(self, file_server, site, site_temp):
  94. inner_path = self.createBigfile(site)
  95. # Init source server
  96. site.connection_server = file_server
  97. file_server.sites[site.address] = site
  98. # Make sure the file and the piecemap in the optional hashfield
  99. file_info = site.content_manager.getFileInfo(inner_path)
  100. assert site.content_manager.hashfield.hasHash(file_info["sha512"])
  101. piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"]
  102. assert site.content_manager.hashfield.hasHash(piecemap_hash)
  103. # Init client server
  104. client = ConnectionServer(file_server.ip, 1545)
  105. site_temp.connection_server = client
  106. peer_client = site_temp.addPeer(file_server.ip, 1544)
  107. # Download site
  108. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  109. bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
  110. assert not bad_files
  111. # client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring()
  112. # assert client_piecefield == "1" * 10
  113. # Download 5. and 10. block
  114. site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
  115. site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
  116. # Verify 0. block not downloaded
  117. f = site_temp.storage.open(inner_path)
  118. assert f.read(10) == b"\0" * 10
  119. # Verify 5. and 10. block downloaded
  120. f.seek(5 * 1024 * 1024)
  121. assert f.read(7) == b"Test524"
  122. f.seek(9 * 1024 * 1024)
  123. assert f.read(7) == b"943---T"
  124. # Verify hashfield
  125. assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) # 18343: data/optional.any.iso, 43727: data/optional.any.iso.hashmap.msgpack
  126. def testOpenBigfile(self, file_server, site, site_temp):
  127. inner_path = self.createBigfile(site)
  128. # Init source server
  129. site.connection_server = file_server
  130. file_server.sites[site.address] = site
  131. # Init client server
  132. client = ConnectionServer(file_server.ip, 1545)
  133. site_temp.connection_server = client
  134. site_temp.addPeer(file_server.ip, 1544)
  135. # Download site
  136. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  137. # Open virtual file
  138. assert not site_temp.storage.isFile(inner_path)
  139. with site_temp.storage.openBigfile(inner_path) as f:
  140. with Spy.Spy(FileRequest, "route") as requests:
  141. f.seek(5 * 1024 * 1024)
  142. assert f.read(7) == b"Test524"
  143. f.seek(9 * 1024 * 1024)
  144. assert f.read(7) == b"943---T"
  145. assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
  146. assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
  147. assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
  148. assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
  149. # Test requesting already downloaded
  150. with Spy.Spy(FileRequest, "route") as requests:
  151. f.seek(5 * 1024 * 1024)
  152. assert f.read(7) == b"Test524"
  153. assert len(requests) == 0
  154. # Test requesting multi-block overflow reads
  155. with Spy.Spy(FileRequest, "route") as requests:
  156. f.seek(5 * 1024 * 1024) # We already have this block
  157. data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
  158. assert data.startswith(b"Test524")
  159. assert data.endswith(b"Test838-")
  160. assert b"\0" not in data # No null bytes allowed
  161. assert len(requests) == 2 # Two block download
  162. # Test out of range request
  163. f.seek(5 * 1024 * 1024)
  164. data = f.read(1024 * 1024 * 30)
  165. assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024)
  166. f.seek(30 * 1024 * 1024)
  167. data = f.read(1024 * 1024 * 30)
  168. assert len(data) == 0
  169. @pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked])
  170. def testPiecefield(self, piecefield_obj, site):
  171. testdatas = [
  172. b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01",
  173. b"\x00\x01\x00\x01\x00\x01" * 10 + b"\x00\x01" * 90 + b"\x01\x00" * 400 + b"\x00" * 4999,
  174. b"\x01" * 10000,
  175. b"\x00" * 10000
  176. ]
  177. for testdata in testdatas:
  178. piecefield = piecefield_obj()
  179. piecefield.frombytes(testdata)
  180. assert piecefield.tobytes() == testdata
  181. assert piecefield[0] == testdata[0]
  182. assert piecefield[100] == testdata[100]
  183. assert piecefield[1000] == testdata[1000]
  184. assert piecefield[len(testdata) - 1] == testdata[len(testdata) - 1]
  185. packed = piecefield.pack()
  186. piecefield_new = piecefield_obj()
  187. piecefield_new.unpack(packed)
  188. assert piecefield.tobytes() == piecefield_new.tobytes()
  189. assert piecefield_new.tobytes() == testdata
  190. def testFileGet(self, file_server, site, site_temp):
  191. inner_path = self.createBigfile(site)
  192. # Init source server
  193. site.connection_server = file_server
  194. file_server.sites[site.address] = site
  195. # Init client server
  196. site_temp.connection_server = FileServer(file_server.ip, 1545)
  197. site_temp.connection_server.sites[site_temp.address] = site_temp
  198. site_temp.addPeer(file_server.ip, 1544)
  199. # Download site
  200. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  201. # Download second block
  202. with site_temp.storage.openBigfile(inner_path) as f:
  203. f.seek(1024 * 1024)
  204. assert f.read(1024)[0:1] != b"\0"
  205. # Make sure first block not download
  206. with site_temp.storage.open(inner_path) as f:
  207. assert f.read(1024)[0:1] == b"\0"
  208. peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
  209. # Should drop error on first block request
  210. assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1))
  211. # Should not drop error for second block request
  212. assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2))
  213. def benchmarkPeerMemory(self, site, file_server):
  214. # Init source server
  215. site.connection_server = file_server
  216. file_server.sites[site.address] = site
  217. import psutil, os
  218. meminfo = psutil.Process(os.getpid()).memory_info
  219. mem_s = meminfo()[0]
  220. s = time.time()
  221. for i in range(25000):
  222. site.addPeer(file_server.ip, i)
  223. print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
  224. print(list(site.peers.values())[0].piecefields)
  225. def testUpdatePiecefield(self, file_server, site, site_temp):
  226. inner_path = self.createBigfile(site)
  227. server1 = file_server
  228. server1.sites[site.address] = site
  229. server2 = FileServer(file_server.ip, 1545)
  230. server2.sites[site_temp.address] = site_temp
  231. site_temp.connection_server = server2
  232. # Add file_server as peer to client
  233. server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
  234. # Testing piecefield sync
  235. assert len(server2_peer1.piecefields) == 0
  236. assert server2_peer1.updatePiecefields() # Query piecefields from peer
  237. assert len(server2_peer1.piecefields) > 0
  238. def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp):
  239. inner_path = self.createBigfile(site)
  240. server1 = file_server
  241. server1.sites[site.address] = site
  242. server2 = FileServer(file_server.ip, 1545)
  243. server2.sites[site_temp.address] = site_temp
  244. site_temp.connection_server = server2
  245. # Add file_server as peer to client
  246. server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working
  247. site_temp.downloadContent("content.json", download_files=False)
  248. site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
  249. # Add fake peers with optional files downloaded
  250. for i in range(5):
  251. fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544)
  252. fake_peer.hashfield = site.content_manager.hashfield
  253. fake_peer.has_hashfield = True
  254. with Spy.Spy(WorkerManager, "addWorker") as requests:
  255. site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
  256. site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024))
  257. # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields
  258. assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0
  259. def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp):
  260. inner_path = self.createBigfile(site)
  261. server1 = file_server
  262. server1.sites[site.address] = site
  263. server2 = FileServer(file_server.ip, 1545)
  264. server2.sites[site_temp.address] = site_temp
  265. site_temp.connection_server = server2
  266. sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
  267. # Create 10 fake peer for each piece
  268. for i in range(10):
  269. peer = Peer(file_server.ip, 1544, site_temp, server2)
  270. peer.piecefields[sha512][i] = b"\x01"
  271. peer.updateHashfield = mock.MagicMock(return_value=False)
  272. peer.updatePiecefields = mock.MagicMock(return_value=False)
  273. peer.findHashIds = mock.MagicMock(return_value={"nope": []})
  274. peer.hashfield = site.content_manager.hashfield
  275. peer.has_hashfield = True
  276. peer.key = "Peer:%s" % i
  277. site_temp.peers["Peer:%s" % i] = peer
  278. site_temp.downloadContent("content.json", download_files=False)
  279. site_temp.needFile("data/optional.any.iso.piecemap.msgpack")
  280. with Spy.Spy(Peer, "getFile") as requests:
  281. for i in range(10):
  282. site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024))
  283. assert len(requests) == 10
  284. for i in range(10):
  285. assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
  286. def testDownloadStats(self, file_server, site, site_temp):
  287. inner_path = self.createBigfile(site)
  288. # Init source server
  289. site.connection_server = file_server
  290. file_server.sites[site.address] = site
  291. # Init client server
  292. client = ConnectionServer(file_server.ip, 1545)
  293. site_temp.connection_server = client
  294. site_temp.addPeer(file_server.ip, 1544)
  295. # Download site
  296. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  297. # Open virtual file
  298. assert not site_temp.storage.isFile(inner_path)
  299. # Check size before downloads
  300. assert site_temp.settings["size"] < 10 * 1024 * 1024
  301. assert site_temp.settings["optional_downloaded"] == 0
  302. size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"]
  303. size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
  304. with site_temp.storage.openBigfile(inner_path) as f:
  305. assert b"\0" not in f.read(1024)
  306. assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
  307. with site_temp.storage.openBigfile(inner_path) as f:
  308. # Don't count twice
  309. assert b"\0" not in f.read(1024)
  310. assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
  311. # Add second block
  312. assert b"\0" not in f.read(1024 * 1024)
  313. assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
  314. def testPrebuffer(self, file_server, site, site_temp):
  315. inner_path = self.createBigfile(site)
  316. # Init source server
  317. site.connection_server = file_server
  318. file_server.sites[site.address] = site
  319. # Init client server
  320. client = ConnectionServer(file_server.ip, 1545)
  321. site_temp.connection_server = client
  322. site_temp.addPeer(file_server.ip, 1544)
  323. # Download site
  324. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  325. # Open virtual file
  326. assert not site_temp.storage.isFile(inner_path)
  327. with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
  328. with Spy.Spy(FileRequest, "route") as requests:
  329. f.seek(5 * 1024 * 1024)
  330. assert f.read(7) == b"Test524"
  331. # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
  332. assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
  333. time.sleep(0.5) # Wait prebuffer download
  334. sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]
  335. assert site_temp.storage.piecefields[sha512].tostring() == "0000011100"
  336. # No prebuffer beyond end of the file
  337. f.seek(9 * 1024 * 1024)
  338. assert b"\0" not in f.read(7)
  339. assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
  340. def testDownloadAllPieces(self, file_server, site, site_temp):
  341. inner_path = self.createBigfile(site)
  342. # Init source server
  343. site.connection_server = file_server
  344. file_server.sites[site.address] = site
  345. # Init client server
  346. client = ConnectionServer(file_server.ip, 1545)
  347. site_temp.connection_server = client
  348. site_temp.addPeer(file_server.ip, 1544)
  349. # Download site
  350. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  351. # Open virtual file
  352. assert not site_temp.storage.isFile(inner_path)
  353. with Spy.Spy(FileRequest, "route") as requests:
  354. site_temp.needFile("%s|all" % inner_path)
  355. assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece
  356. # Don't re-download already got pieces
  357. with Spy.Spy(FileRequest, "route") as requests:
  358. site_temp.needFile("%s|all" % inner_path)
  359. assert len(requests) == 0
  360. def testFileSize(self, file_server, site, site_temp):
  361. inner_path = self.createBigfile(site)
  362. # Init source server
  363. site.connection_server = file_server
  364. file_server.sites[site.address] = site
  365. # Init client server
  366. client = ConnectionServer(file_server.ip, 1545)
  367. site_temp.connection_server = client
  368. site_temp.addPeer(file_server.ip, 1544)
  369. # Download site
  370. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  371. # Open virtual file
  372. assert not site_temp.storage.isFile(inner_path)
  373. # Download first block
  374. site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024))
  375. assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size
  376. site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024))
  377. assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path)
  378. def testFileRename(self, file_server, site, site_temp):
  379. inner_path = self.createBigfile(site)
  380. # Init source server
  381. site.connection_server = file_server
  382. file_server.sites[site.address] = site
  383. # Init client server
  384. site_temp.connection_server = FileServer(file_server.ip, 1545)
  385. site_temp.connection_server.sites[site_temp.address] = site_temp
  386. site_temp.addPeer(file_server.ip, 1544)
  387. # Download site
  388. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  389. with Spy.Spy(FileRequest, "route") as requests:
  390. site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size))
  391. assert len([req for req in requests if req[1] == "streamFile"]) == 2 # 1 piece + piecemap
  392. # Rename the file
  393. inner_path_new = inner_path.replace(".iso", "-new.iso")
  394. site.storage.rename(inner_path, inner_path_new)
  395. site.storage.delete("data/optional.any.iso.piecemap.msgpack")
  396. assert site.content_manager.sign("content.json", self.privatekey, remove_missing_optional=True)
  397. files_optional = site.content_manager.contents["content.json"]["files_optional"].keys()
  398. assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional
  399. assert "data/optional.any.iso.piecemap.msgpack" not in files_optional
  400. assert "data/optional.any.iso" not in files_optional
  401. with Spy.Spy(FileRequest, "route") as requests:
  402. site.publish()
  403. time.sleep(0.1)
  404. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Wait for download
  405. assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0
  406. with site_temp.storage.openBigfile(inner_path_new, prebuffer=0) as f:
  407. f.read(1024)
  408. # First piece already downloaded
  409. assert [req for req in requests if req[1] == "streamFile"] == []
  410. # Second piece needs to be downloaded + changed piecemap
  411. f.seek(self.piece_size)
  412. f.read(1024)
  413. assert [req[3]["inner_path"] for req in requests if req[1] == "streamFile"] == [inner_path_new + ".piecemap.msgpack", inner_path_new]
  414. @pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30])
  415. def testNullFileRead(self, file_server, site, site_temp, size):
  416. inner_path = "data/optional.iso"
  417. f = site.storage.open(inner_path, "w")
  418. f.write("\0" * size)
  419. f.close()
  420. assert site.content_manager.sign("content.json", self.privatekey)
  421. # Init source server
  422. site.connection_server = file_server
  423. file_server.sites[site.address] = site
  424. # Init client server
  425. site_temp.connection_server = FileServer(file_server.ip, 1545)
  426. site_temp.connection_server.sites[site_temp.address] = site_temp
  427. site_temp.addPeer(file_server.ip, 1544)
  428. # Download site
  429. site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10)
  430. if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile
  431. site_temp.needFile(inner_path + "|all")
  432. else:
  433. site_temp.needFile(inner_path)
  434. assert site_temp.storage.getSize(inner_path) == size