data 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. #!/usr/bin/env python3
  2. # coding: utf-8
  3. # ▓▓▓▓▓▓▓▓▓▓
  4. # ░▓ Author ▓ Abdullah <https://abdullah.today/>
  5. # ░▓▓▓▓▓▓▓▓▓▓
  6. # ░░░░░░░░░░
  7. import subprocess
  8. import requests
  9. from bs4 import BeautifulSoup
  10. # Getting values
  11. get_url_1, get_url_2 = subprocess.run(['gpg', '-dq',
  12. '/home/ak/.local/share/misc/url-1.gpg'], check=True, stdout=subprocess.PIPE,
  13. stderr=subprocess.PIPE, encoding='utf-8'), subprocess.run(['gpg', '-dq',
  14. '/home/ak/.local/share/misc/url-2.gpg'], check=True,
  15. stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')
  16. url_1, url_2 = get_url_1.stdout[:-1], get_url_2.stdout[:-1]
  17. user = subprocess.sys.argv[1]
  18. try:
  19. tries = subprocess.sys.argv[2]
  20. except IndexError:
  21. tries = 0
  22. def get_info_1(user):
  23. r = requests.post(url_1, data={'number': user})
  24. soup = BeautifulSoup(r.content, 'html.parser')
  25. for data in soup.find_all('tr'):
  26. print(data.get_text(separator=': -------> ', strip=True))
  27. def get_info_2(user):
  28. r = requests.post(url_2, data={'cnnum': user})
  29. soup = BeautifulSoup(r.content, 'html.parser')
  30. for data in soup.find_all('tr'):
  31. print(data.get_text(separator=': -------> ', strip=True))
  32. if tries == str(1):
  33. get_info_1(user)
  34. elif tries == str(2):
  35. get_info_2(user)
  36. elif tries == 0:
  37. get_info_1(user)
  38. get_info_2(user)