@@ -310,6 +310,10 @@ def parse_args():
310310 "--additional-tags" , type = "string" , default = "" ,
311311 help = "Additional tags to set on the machines; tags are comma-separated, while name and " +
312312 "value are colon separated; ex: \" Task:MySparkProject,Env:production\" " )
313+ parser .add_option (
314+ "--tag-volumes" , action = "store_true" , default = False ,
315+ help = "Apply the tags given in --additional-tags to any EBS volumes " +
316+ "attached to master and slave instances." )
313317 parser .add_option (
314318 "--copy-aws-credentials" , action = "store_true" , default = False ,
315319 help = "Add AWS credentials to hadoop configuration to allow Spark to access S3" )
@@ -751,16 +755,28 @@ def launch_cluster(conn, opts, cluster_name):
751755 map (str .strip , tag .split (':' , 1 )) for tag in opts .additional_tags .split (',' )
752756 )
753757
758+ print ('Applying tags to master nodes' )
754759 for master in master_nodes :
755760 master .add_tags (
756761 dict (additional_tags , Name = '{cn}-master-{iid}' .format (cn = cluster_name , iid = master .id ))
757762 )
758763
764+ print ('Applying tags to slave nodes' )
759765 for slave in slave_nodes :
760766 slave .add_tags (
761767 dict (additional_tags , Name = '{cn}-slave-{iid}' .format (cn = cluster_name , iid = slave .id ))
762768 )
763769
770+ if opts .tag_volumes :
771+ if len (additional_tags ) > 0 :
772+ print ('Applying tags to volumes' )
773+ all_instance_ids = [x .id for x in master_nodes + slave_nodes ]
774+ volumes = conn .get_all_volumes (filters = {'attachment.instance-id' : all_instance_ids })
775+ for v in volumes :
776+ v .add_tags (additional_tags )
777+ else :
778+ print ('--tag-volumes has no effect without --additional-tags' )
779+
764780 # Return all the instances
765781 return (master_nodes , slave_nodes )
766782
0 commit comments