Gather.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. # ---------------------------------------------------------------------
  2. # ------------------------- Plight Rising -----------------------------
  3. # -----------------------------txtsd-----------------------------------
  4. # ---------------------------------------------------------------------
  5. """Does gathering and Pinkerton and feeds your lair"""
  6. # Imports -------------------------------------------------------------
  7. import time
  8. import datetime
  9. import random
  10. import re
  11. import sys
  12. import requests
  13. from configobj import ConfigObj
  14. from validate import Validator
  15. from bs4 import BeautifulSoup as bs
  16. # End Imports ---------------------------------------------------------
  17. class Gather:
  18. def __init__(self, acc):
  19. self.acc = acc
  20. self.userID = acc.getID()
  21. self.configspec = ConfigObj('config.spec', encoding='UTF8', list_values=False)
  22. self.config = ConfigObj('config.ini', configspec=self.configspec)
  23. val = Validator()
  24. test = self.config.validate(val, preserve_errors=True)
  25. self.area = {'earth': '1',
  26. 'plague': '2',
  27. 'wind': '3',
  28. 'water': '4',
  29. 'lightning': '5',
  30. 'ice': '6',
  31. 'shadow': '7',
  32. 'light': '8',
  33. 'arcane': '9',
  34. 'nature': '10',
  35. 'fire': '11'
  36. }
  37. self.areachoice = self.area[self.config['account']['gather']['area']]
  38. self.action = self.config['account']['gather']['action']
  39. self.pinkerton = self.config['account']['pinkerton']
  40. self.feed = self.config['account']['feed']
  41. def getItems(self, html_):
  42. things = re.findall("<a rel=\"(.*?)\" class=\"clue\"[\s\S]*?>(\d+?)<\/div>", html_.text)
  43. for x in things:
  44. self.check = False
  45. self.tried = 0
  46. while not self.check:
  47. if (self.tried < 3):
  48. try:
  49. html = self.acc.get('http://flightrising.com/' + x[0],
  50. head={
  51. 'Accept': 'text/html, */*; q=0.01',
  52. 'X-Requested-With': 'XMLHttpRequest',
  53. },
  54. referer='/main.php?p=gather&action=' + self.action
  55. )
  56. self.check = True
  57. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  58. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_0_error_getItems')
  59. self.tried += 1
  60. pass
  61. else:
  62. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  63. sys.exit()
  64. # info = re.findall(">([^\s].*?)<\/div>", html.text)
  65. # rating = re.search("tooltip_(\d*)star\.", html.text)
  66. soup = bs(html.text)
  67. strings = list(soup.stripped_strings)
  68. found = {}
  69. try:
  70. if (strings[1] == 'Food') or (strings[1] == 'Insect') or (strings[1] == 'Meat') or (strings[1] == 'Plant') or (strings[1] == 'Seafood'):
  71. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  72. found['subtype'] = strings[1]
  73. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  74. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  75. found['fp'] = int(re.search("(\d+)", strings[4]).group(1))
  76. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  77. elif (strings[1] == 'Materials') or (strings[1] == 'Dragonmade Material') or (strings[1] == 'Minerals & Ores') or (strings[1] == 'Organic Material'):
  78. if len(strings) == 3:
  79. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  80. found['subtype'] = 'NIL'
  81. found['desc'] = strings[1].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  82. found['cost'] = int(re.search("(\d+)", strings[2]).group(1))
  83. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  84. else:
  85. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  86. found['subtype'] = strings[1]
  87. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  88. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  89. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  90. elif strings[1] == 'Apparel':
  91. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  92. found['subtype'] = strings[1]
  93. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  94. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  95. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  96. elif strings[1] == 'Familiar':
  97. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  98. found['subtype'] = strings[1]
  99. found['desc'] = strings[2].encode('utf8').decode('utf8').replace('\u2026', '...').replace('\u2019', "'") if re.search('\\u2019', strings[2]) else strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  100. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  101. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  102. elif (strings[1] == 'Battle') or (strings[1] == 'Energy Stone') or (strings[1] == 'Battle Item') or (strings[1] == 'Augment Stone') or (strings[1] == 'Accessory Stone') or (strings[1] == 'Ability Stone'):
  103. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  104. found['subtype'] = strings[1]
  105. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  106. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  107. found['level'] = int(re.search("(\d+)", strings[4]).group(1))
  108. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  109. elif strings[1] == 'Other':
  110. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  111. found['subtype'] = strings[1]
  112. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  113. found['cost'] = int(re.search("(\d+)", strings[3]).group(1))
  114. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  115. elif strings[1] == 'Skins':
  116. found['name'] = strings[0].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  117. found['subtype'] = strings[1]
  118. found['desc'] = strings[2].encode('latin1').decode('utf8').replace('\u2026', '...').replace('\u2019', "'")
  119. found['rating'] = int(re.search("tooltip_(\d+)star\.", html.text).group(1)) if re.search("tooltip_(\d+)star\.", html.text) else 0
  120. else:
  121. print('New type of item found.')
  122. except:
  123. print("Derp", found)
  124. try:
  125. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + x[1] + ' [' + found['subtype'] + '] [' + found['name'] + ']')
  126. # for y in info:
  127. # print(' [' + y + ']', end='')
  128. # print(' [Rating: ' + rating.group(1) + ']')
  129. except:
  130. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Unable to print.')
  131. if re.search("level_up.png", html_.text):
  132. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + "Your [" + self.action + "] leveled up!")
  133. print("-")
  134. def gather(self):
  135. self.check = False
  136. self.tried = 0
  137. while not self.check:
  138. if (self.tried < 3):
  139. try:
  140. html = self.acc.get('/main.php',
  141. param={
  142. 'p': 'gather'
  143. },
  144. referer='/main.php?p=hoard'
  145. )
  146. self.check = True
  147. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  148. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_1_error_pregather')
  149. self.tried += 1
  150. pass
  151. else:
  152. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  153. sys.exit()
  154. # Parse page, look for remaining turns
  155. turns = None
  156. turns = re.search("Turns Left Today:[\s\S]*?(\d+)[\s]*<\/div>", html.text)
  157. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Turns remaining today: ' + turns.group(1))
  158. if (int(turns.group(1)) > 0):
  159. self.check = False
  160. self.tried = 0
  161. while not self.check:
  162. if (self.tried < 3):
  163. try:
  164. time.sleep(random.uniform(2, 4))
  165. html2 = self.acc.post('/main.php',
  166. param={
  167. 'p': 'gather',
  168. 'action': self.action,
  169. },
  170. data={
  171. 'gather': self.areachoice,
  172. },
  173. head={
  174. 'Cache-Control': 'max-age=0',
  175. },
  176. referer='/main.php?p=gather'
  177. )
  178. self.check = True
  179. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  180. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_2_error_first')
  181. self.tried += 1
  182. pass
  183. else:
  184. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  185. sys.exit()
  186. # Look for and print acquired items and levelups
  187. self.getItems(html2)
  188. for x in range(0, int(turns.group(1)) - 1):
  189. self.check = False
  190. self.tried = 0
  191. while not self.check:
  192. if (self.tried < 3):
  193. try:
  194. time.sleep(random.uniform(1, 2))
  195. html3 = self.acc.post('/main.php',
  196. param={
  197. 'p': 'gather',
  198. 'action': self.action,
  199. },
  200. data={
  201. 'gather': self.areachoice,
  202. },
  203. head={
  204. 'Cache-Control': 'max-age=0',
  205. },
  206. referer='/main.php?p=gather&action=' + self.action
  207. )
  208. self.check = True
  209. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  210. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_3_error_subsequent')
  211. self.tried += 1
  212. pass
  213. else:
  214. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  215. sys.exit()
  216. # Look for and print acquired items and levelups
  217. self.getItems(html3)
  218. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Done Gathering')
  219. # Pinkerton section
  220. if self.pinkerton:
  221. time.sleep(random.uniform(2, 4))
  222. print('\n' + '[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Visiting Pinkerton')
  223. self.check = False
  224. self.tried = 0
  225. while not self.check:
  226. if (self.tried < 3):
  227. try:
  228. html4 = self.acc.get('/main.php',
  229. param={
  230. 'p': 'tradepost',
  231. },
  232. referer='/main.php?p=hoard'
  233. )
  234. html5 = self.acc.get('/main.php',
  235. param={
  236. 'p': 'tradepost',
  237. 'lot': 'pile',
  238. },
  239. referer='/main.php?p=tradepost'
  240. )
  241. self.check = True
  242. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  243. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_4_error_Pinkerton1')
  244. self.tried += 1
  245. pass
  246. else:
  247. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  248. sys.exit()
  249. if re.search("disabled=\"disabled\"", html5.text):
  250. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + "Already grabbed today's item.")
  251. else:
  252. time.sleep(random.uniform(2, 4))
  253. self.check = False
  254. self.tried = 0
  255. while not self.check:
  256. if (self.tried < 3):
  257. try:
  258. html6 = self.acc.post('/includes/ol/ol_pinkpile.php',
  259. head={
  260. 'Accept': '*/*',
  261. 'X-Requested-With': 'XMLHttpRequest',
  262. },
  263. referer='/main.php?p=tradepost&lot=pile'
  264. )
  265. self.check = True
  266. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  267. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_5_error_Pinkerton1')
  268. self.tried += 1
  269. pass
  270. else:
  271. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  272. sys.exit()
  273. link = re.search('<a rel=\"(.*?)\" class=\"clue\"[\s\S]*?>', html6.text)
  274. self.check = False
  275. self.tried = 0
  276. while not self.check:
  277. if (self.tried < 3):
  278. try:
  279. html7 = self.acc.get('http://flightrising.com/' + link.group(1),
  280. head={
  281. 'Accept': 'text/html, */*; q=0.01',
  282. 'X-Requested-With': 'XMLHttpRequest',
  283. },
  284. referer='/main.php?p=tradepost&lot=pile'
  285. )
  286. self.check = True
  287. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  288. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_6_error_Pinkerton2')
  289. self.tried += 1
  290. pass
  291. else:
  292. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  293. sys.exit()
  294. info = re.findall(">([^\s].*?)<\/div>", html7.text)
  295. rating = re.search("tooltip_(\d*)star\.", html7.text)
  296. print('[' + str(datetime.datetime.now().time())[:-3] + '] ', end='')
  297. for x in info:
  298. print('[' + x + '] ', end='')
  299. print(' [Rating: ' + rating.group(1) + ']')
  300. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + "Done Pinking\n")
  301. # Feeding section
  302. if self.feed:
  303. time.sleep(random.uniform(2, 4))
  304. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Feeding dergs')
  305. self.check = False
  306. self.tried = 0
  307. while not self.check:
  308. if (self.tried < 3):
  309. try:
  310. html8 = self.acc.post('/includes/ol/feed.php',
  311. head={
  312. 'Accept': '*/*',
  313. 'X-Requested-With': 'XMLHttpRequest',
  314. },
  315. referer='/main.php?p=lair&id=' + self.userID
  316. )
  317. self.check = True
  318. except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
  319. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Gather_7_error_Feed')
  320. self.tried += 1
  321. pass
  322. else:
  323. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad network. Try again later.')
  324. sys.exit()
  325. print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + "Done Feeding\n")