#!/usr/bin/env python

import json
import random
import requests
import signal
import sys
import threading
import time
import datetime
import argparse

dHTTP  = 0
dHTTPS = 1
dMixed = 2

### ------------------ Defaults ---------------------
user = 'admin'
password = 'password'
connection  = dMixed
NumThreads = 3
http_port = '80'
https_port = '443'
file_list = 'webFiles.json'
### -------------------------------------------------

parser = argparse.ArgumentParser(description="Stresses the receiver's web server",
                                 formatter_class=argparse.RawDescriptionHelpFormatter)

parser.add_argument("IPAddr", help="IP Address of the receiver")
parser.add_argument("--user", help="Username")
parser.add_argument("--password", help="Password")
parser.add_argument("--connection", help="Connection type (0=HTTP, 1=HTTPS, 2=HTTP+HTTPS - default 2)")
parser.add_argument("--http_port", help="Port for http connection - default 80")
parser.add_argument("--https_port", help="Port for https connection - default 443")
parser.add_argument("--num_threads", help="Number of threads (default = 3)")
parser.add_argument("--file_list", help="JSON file that contains a list of files to download")
# By default we set the proxy to 'None'. If you set the following flag, python's requests will get
# the proxy setting from your environment variable. args.use_env_proxy will default to False
parser.add_argument('--use_env_proxy', help='Use the proxy from env variable http_proxy - default no proxy', action='store_true')
parser.add_argument("--on_off", nargs=2, help="Provide an on/off duty cycle for enabling the stress test (in sec), ie --on_off 1800 900")
args = parser.parse_args()

on  = None
off = None
if args.on_off:
  on, off = args.on_off
  on  = int(on)
  off = int(off)

suspend = False

server = args.IPAddr

if(args.user is not None):
  user = args.user
if(args.password is not None):
  password = args.password
if(args.connection is not None):
  connection = int( args.connection )
if(args.http_port is not None):
  http_port = args.http_port
if(args.https_port is not None):
  https_port = args.https_port
if(args.num_threads is not None):
  NumThreads = int(args.num_threads)
if(args.file_list is not None):
  file_list = args.file_list

use_env_proxy = args.use_env_proxy

def signal_handler(signal, frame):
  global ThreadsActive
  ThreadsActive = False
  print('You pressed Ctrl+C!')
  print("Exiting")
  for i in range(NumThreads):
    httpThread[i].join()
  sys.exit("CTRL-C")

def httpWorker(num):
  global ValidPage

  while(ThreadsActive == True):
    if(suspend == True):
      time.sleep(1)
    else:
      # The file list provides files that may be installed. Depending on the build,
      # some of these may be missing. We'll keep track of any 404s and not try that
      # page again.
      index = random.randint(0,MaxFiles-1)
      if(ValidPage[index] == False):
        continue
   
      connectType = 'http'

      rand = random.randint(0,1)
      if( (connection == dHTTPS) or ( (connection == dMixed) and (rand == 0) ) ):
        connectType += 's'
        link = connectType + '://' + server + ':' + https_port
      else:
        link = connectType + '://' + server + ':' + http_port
      
      link += webFiles[index]['file']

      # Use rand to control whether we ask for the data gzipped (or not)
      rand = random.randint(0,1)
      ISOStr = datetime.datetime.now().isoformat()
      try:
        # Depending on Rand we'll allow gzipped data (or not) this will alter the workload and code
        # path in the receiver.
        if(rand == 0):
          headers={'Accept-Encoding':'gzip'}
        else:
          headers={'Accept-Encoding':'None'}

        # Note requests.get by default does not have a timeout. Therefore, if something goes wrong it will
        # block. Instead we set a 10 second timeout.
        if(use_env_proxy):
          r = requests.get(link, auth=(user,password), verify=False,headers=headers, timeout=10)
        else:
          r = requests.get(link, auth=(user,password), verify=False,headers=headers, timeout=10, proxies={'http':None,'https':None})
        
        if(r.status_code == 404):
          ValidPage[index] = False
        
        if('Content-Encoding' in r.headers.keys()):
          compression = r.headers['Content-Encoding']
        else:
          compression = 'None'

        if('Content-Length' in r.headers.keys()):
          length=r.headers['Content-Length']
        else:
          length=0

        print(ISOStr,"Thread=",num,"Response Time=",r.elapsed.total_seconds(),"[s] Content Length = ",length,"HTTP Ret=",r.status_code,"Compress=",compression,link)
      except SystemExit:
        print(ISOStr,"system exit exception")
        quit()
      except Exception as e:
        print(ISOStr,'Bad Request {l} - Error! Code: {c}, Message, {m}'.format(l = link,c = type(e).__name__, m = str(e)))
        # Under unknown conditions we end with very few valid pages (must get 404s). When we get an 
        # error reset the list
        for i in range(MaxFiles):
          ValidPage[i] = True
#
# -------------- Code Start ----------------
#

signal.signal(signal.SIGINT, signal_handler)

print('Using file list: ',file_list)

with open(file_list,'r') as f: 
  webFiles = json.load(f)

MaxFiles = len(webFiles)

ValidPage = [True] * MaxFiles

# As the certificate from our receivers can't be authenticated we're going to turn
# off validating it. However, this causes a warning so make sure we suppress that.
requests.packages.urllib3.disable_warnings()

httpThread= []
# Kick off the threads
ThreadsActive = True
for i in range(NumThreads):
  t = threading.Thread(target=httpWorker, args=(i,))
  httpThread.append(t)
  t.start()

while True:
  if(on is None):
    time.sleep(1)
  else:
    suspend = False
    time.sleep(on)
    suspend = True
    time.sleep(off)

