Skip to content
This repository has been archived by the owner on Feb 3, 2021. It is now read-only.

syntax fixes #3

Merged
merged 2 commits into from
Jun 16, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 37 additions & 36 deletions bin/spark-cluster-create
Original file line number Diff line number Diff line change
Expand Up @@ -10,59 +10,60 @@ except ImportError:
import ConfigParser as configparser

# Path to config
_config_path = os.path.join(os.path.dirname(__file__), '../configuration.cfg')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), '../configuration.cfg')

if __name__ == '__main__':

_pool_id = None
_vm_count = None
_vm_size = None
_custom_script = None

_wait = True
pool_id = None
vm_count = None
vm_size = None
custom_script = None
wait = True

# parse arguments
parser = argparse.ArgumentParser(prog="az_spark")

parser.add_argument("--cluster-id", required=True,
help="the unique name of your spark cluster")
parser.add_argument("--cluster-size", type=int, required=True,
help="number of vms in your cluster")
parser.add_argument("--cluster-vm-size", required=True,
help="size of each vm in your cluster")
parser.add_argument("--custom-script",
help="absolute path of custom bash script (.sh) to run on each node")
parser = argparse.ArgumentParser(prog='az_spark')

parser.add_argument('--id', dest='cluster_id', required=True,
help='The unique id of your spark cluster')
parser.add_argument('--size', type=int, required=True,
help='Number of vms in your cluster')
parser.add_argument('--vm-size', required=True,
help='VM size for nodes in your cluster')
parser.add_argument('--custom-script',
help='Absolute path of custom bash script (.sh) to run on each node')
parser.add_argument('--wait', dest='wait', action='store_true')
parser.add_argument('--no-wait', dest='wait', action='store_false')
parser.set_defaults(wait=False)

args = parser.parse_args()

print()
if args.cluster_id is not None:
_pool_id = args.cluster_id
print("spark cluster id: %s" % _pool_id)
pool_id = args.cluster_id

if args.cluster_size is not None:
_vm_count = args.cluster_size
print("spark cluster size: %i" % _vm_count)
if args.size is not None:
vm_count = args.size

if args.cluster_vm_size is not None:
_vm_size = args.cluster_vm_size
print("spark cluster vm size: %s" % _vm_size)
if args.vm_size is not None:
vm_size = args.vm_size

if args.custom_script is not None:
_custom_script = args.custom_script
print("path to custom script: %s" % _custom_script)
custom_script = args.custom_script

if args.wait is not None:
if args.wait == False:
_wait = False
print("wait for cluster: %r" % _wait)
wait = False

print('-------------------------------------------')
print('spark cluster id: {}'.format(pool_id))
print('spark cluster size: {}'.format(vm_count))
print('spark cluster vm size: {}'.format(vm_size))
print('path to custom script: {}'.format(custom_script))
print('wait for cluster: {}'.format(wait))
print('-------------------------------------------')

# Read config file
global_config = configparser.ConfigParser()
global_config.read(_config_path)
global_config.read(CONFIG_PATH)

# Set up batch configuration
batch_account_key = global_config.get('Batch', 'batchaccountkey')
Expand Down Expand Up @@ -90,8 +91,8 @@ if __name__ == '__main__':
clusterlib.create_cluster(
batch_client,
blob_client,
_custom_script,
_pool_id,
_vm_count,
_vm_size,
_wait)
custom_script,
pool_id,
vm_count,
vm_size,
wait)
60 changes: 25 additions & 35 deletions bin/spark-cluster-create-user
Original file line number Diff line number Diff line change
Expand Up @@ -10,42 +10,44 @@ except ImportError:
import ConfigParser as configparser

# Path to config
_config_path = os.path.join(os.path.dirname(__file__), '../configuration.cfg')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), '../configuration.cfg')

if __name__ == '__main__':

_pool_id = None
_username = 'admin'
_password = 'pass123!'
pool_id = None
username = 'admin'
password = 'pass123!'

# parse arguments
parser = argparse.ArgumentParser(prog="az_spark")
parser = argparse.ArgumentParser(prog='az_spark')

parser.add_argument("--cluster-id", required=True,
help="the unique name of your spark cluster")
parser.add_argument("-u", "--user",
help="the relative path to your spark app in your directory")
parser.add_argument("-p", "--password",
help="the relative path to your spark app in your directory")
parser.add_argument('--id', dest='cluster_id', required=True,
help='The unique id of your spark cluster')
parser.add_argument('-u', '--username',
help='The usernameto access your spark cluster\'s head node')
parser.add_argument('-p', '--password',
help='The password to access your spark cluster\'s head node')

args = parser.parse_args()

print()
if args.cluster_id is not None:
_pool_id = args.cluster_id
print("spark cluster id: %s" % _pool_id)
pool_id = args.cluster_id

if args.user is not None:
_username = args.user
print("az_spark username: %s" % _username)
if args.username is not None:
username = args.username

if args.password is not None:
_password = args.password
print("az_spark password: %s" % _password)
password = args.password

print('-------------------------------------------')
print('spark cluster id: {}'.format(pool_id))
print('username: {}'.format(username))
print('password: {}'.format(password))
print('-------------------------------------------')

# Read config file
global_config = configparser.ConfigParser()
global_config.read(_config_path)
global_config.read(CONFIG_PATH)

# Set up batch configuration
batch_account_key = global_config.get('Batch', 'batchaccountkey')
Expand All @@ -61,19 +63,7 @@ if __name__ == '__main__':
# get ssh command
clusterlib.create_user(
batch_client,
_pool_id,
_username,
_password)
pool_id,
username,
password)













20 changes: 9 additions & 11 deletions bin/spark-cluster-delete
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,26 @@ except ImportError:
import ConfigParser as configparser

# Path to config
_config_path = os.path.join(os.path.dirname(__file__), '../configuration.cfg')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), '../configuration.cfg')

if __name__ == '__main__':

_pool_id = None
pool_id = None

# parse arguments
parser = argparse.ArgumentParser(prog="az_spark")

parser.add_argument("--cluster-id", required=True,
help="the unique name of your spark cluster")
parser = argparse.ArgumentParser(prog='az_spark')

parser.add_argument(dest='cluster_id',
help='The unique id of your spark cluster')

args = parser.parse_args()

print()
if args.cluster_id is not None:
_pool_id = args.cluster_id
print("delete cluster id: %s" % _pool_id)
pool_id = args.cluster_id

# Read config file
global_config = configparser.ConfigParser()
global_config.read(_config_path)
global_config.read(CONFIG_PATH)

# Set up batch configuration
batch_account_key = global_config.get('Batch', 'batchaccountkey')
Expand All @@ -47,5 +45,5 @@ if __name__ == '__main__':
# Delete specified cluster
clusterlib.delete_cluster(
batch_client,
_pool_id)
pool_id)

20 changes: 8 additions & 12 deletions bin/spark-cluster-get
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,26 @@ except ImportError:
import ConfigParser as configparser

# Path to config
_config_path = os.path.join(os.path.dirname(__file__), '../configuration.cfg')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), '../configuration.cfg')

if __name__ == '__main__':

_pool_id = None
_vm_count = None
_vm_size = None
_wait = True
pool_id = None

# parse arguments
parser = argparse.ArgumentParser(prog="az_spark")
parser = argparse.ArgumentParser(prog='az_spark')

parser.add_argument("--cluster-id", required=True,
help="the unique name of your spark cluster")
parser.add_argument(dest='cluster_id',
help='The unique id of your spark cluster')

args = parser.parse_args()

print()
if args.cluster_id is not None:
_pool_id = args.cluster_id
pool_id = args.cluster_id

# Read config file
global_config = configparser.ConfigParser()
global_config.read(_config_path)
global_config.read(CONFIG_PATH)

# Set up batch configuration
batch_account_key = global_config.get('Batch', 'batchaccountkey')
Expand All @@ -49,4 +45,4 @@ if __name__ == '__main__':
# create spark cluster
clusterlib.get_cluster_details(
batch_client,
_pool_id)
pool_id)
14 changes: 4 additions & 10 deletions bin/spark-cluster-list
Original file line number Diff line number Diff line change
Expand Up @@ -10,22 +10,17 @@ except ImportError:
import ConfigParser as configparser

# Path to config
_config_path = os.path.join(os.path.dirname(__file__), '../configuration.cfg')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), '../configuration.cfg')

if __name__ == '__main__':

_pool_id = None
_vm_count = None
_vm_size = None
_wait = True

# parse arguments
parser = argparse.ArgumentParser(prog="az_spark")
parser = argparse.ArgumentParser(prog='az_spark')
args = parser.parse_args()

# Read config file
global_config = configparser.ConfigParser()
global_config.read(_config_path)
global_config.read(CONFIG_PATH)

# Set up batch configuration
batch_account_key = global_config.get('Batch', 'batchaccountkey')
Expand All @@ -39,5 +34,4 @@ if __name__ == '__main__':
batch_service_url)

# create spark cluster
clusterlib.list_clusters(
batch_client)
clusterlib.list_clusters(batch_client)
Loading