reqhead 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. #!/usr/bin/env python3
  2. """ Status Checker to detect response codes and server information """
  3. # full source: https://github.com/angela-d/request-headers-checker
  4. # license: GPLv3
  5. # author: angela-d
  6. import pprint
  7. from json import dumps, loads
  8. import sys
  9. from ssl import create_default_context
  10. import socket
  11. from argparse import ArgumentParser
  12. from requests import get, exceptions
  13. # font colors
  14. GREEN = '\033[32m'
  15. YELLOW = '\033[33m'
  16. CLR_COLOR = '\033[m' # reset color to default
  17. def startup(entered_url):
  18. """ Startup """
  19. try:
  20. if entered_url is None:
  21. print(
  22. GREEN
  23. + "\t\tEnter the url you wish to investigate, including the http(s)://"
  24. + CLR_COLOR
  25. )
  26. entered_url = input("URL: ")
  27. get_http_info(entered_url)
  28. # if a user chooses to cancel their session before processing a url
  29. except KeyboardInterrupt:
  30. print('\nExiting..')
  31. sys.exit(0)
  32. except EOFError:
  33. print('\nExiting..')
  34. sys.exit(0)
  35. def get_http_info(check_url):
  36. """ Parse the user-entered URL """
  37. # try & circumvent user agent blocking for python requests
  38. spoof_agent = {
  39. 'Accept-Language': 'en-US,en;q=0.5',
  40. 'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/66.0', # pylint: disable=C0301
  41. 'Accept' : 'text/html,application/xhtml+xml,application/xml;q =0.9,*/*;q=0.8',
  42. 'Referrer' : 'https://www.startpage.com'
  43. }
  44. try:
  45. access_url = get(check_url, headers=spoof_agent)
  46. if len(access_url.history) < 1:
  47. # normal request
  48. print(GREEN + '\n\t\t----- Headers for', check_url, ' -----' + CLR_COLOR)
  49. print("\nHTTP Response: " + str(access_url.status_code))
  50. request_header = dumps(dict(access_url.headers))
  51. format_headers = pprint.PrettyPrinter(indent=2)
  52. format_headers.pprint(loads(request_header))
  53. # obtain ssl issuer information (if https)
  54. if 'https:' in check_url:
  55. print(GREEN + '\n\t\t----- SSL Headers for', check_url, ' -----' + CLR_COLOR)
  56. hostname = check_url.split("//")[-1].split("/")[0].split('?')[0]
  57. context = create_default_context()
  58. server = context.wrap_socket(socket.socket(), server_hostname=hostname) #pylint: disable=C0326
  59. server.connect((hostname, 443))
  60. cert = server.getpeercert()
  61. issued = dict(extract[0] for extract in cert['subject'])
  62. issuer = dict(extract[0] for extract in cert['issuer'])
  63. print('SSL cert issued to', issued['commonName'], 'by', issuer['commonName'],\
  64. 'on', cert['notBefore'], 'expires', cert['notAfter'])
  65. else:
  66. # redirects detected
  67. print(YELLOW + '\n\t\t----- Redirect(s) detected for', check_url, ' -----' + CLR_COLOR)
  68. redirect_status = get(check_url, allow_redirects=False)
  69. print(redirect_status.status_code, ' detected from', check_url, \
  70. 'to', redirect_status.headers['Location'], \
  71. '\n\t\x1B[3mRun a check on ' + redirect_status.headers['Location'] \
  72. + ' instead.\x1B[23m\n\n')
  73. startup(entered_url=None)
  74. # some errors encountered
  75. except exceptions.InvalidSchema as show_error:
  76. print(show_error)
  77. startup(entered_url=None)
  78. except exceptions.MissingSchema as show_error:
  79. print(show_error)
  80. startup(entered_url=None)
  81. except exceptions.ConnectionError:
  82. print("Took too long to connect; ensure the URL is valid and try again.")
  83. sys.exit(1)
  84. except exceptions.Timeout:
  85. print("Timeout. Server might be offline or not responding.")
  86. sys.exit(1)
  87. except exceptions.TooManyRedirects:
  88. print("Infinite Redirects. Site is poorly configured.")
  89. sys.exit(1)
  90. if __name__ == '__main__':
  91. PARSE = ArgumentParser(description='A Python tool to get header \
  92. information from a website.\nhttp:// or https:// are required.')
  93. PARSE.add_argument(
  94. '--version', '-v',
  95. action='version',
  96. version='%(prog)s 2.0.0'
  97. )
  98. PARSE.add_argument(
  99. '--url', '-url',
  100. action='store_true'
  101. )
  102. NAMESPACE = PARSE.parse_known_args()
  103. if len(NAMESPACE[1]) == 1:
  104. ARGTUP = NAMESPACE[1]
  105. startup(ARGTUP[0])
  106. else:
  107. startup(entered_url=None)