I have a playbook for ansible to create ec2 instance dynamically this works fine.
I add them to host group. I want ansible to run a script on the dynamically created ec2 instances.
It fails at connection waiting. I am providing the same key that ansible used to create ec2 instance.
- name: Deploy EC2 Master node
 hosts: localhost
 gather_facts: False
 vars_files:
- ~/aws-common/automation/ansible/config/var_input.yml
 tasks:
- name: Get AWS credentials
 sts_assume_role:
 role_arn: “{{ role_arn }}”
 role_session_name: “{{ role_session_name }}”
 register: assumed_role
- name: Provision a set of instances
 ec2:
 key_name: “{{ key_name }}”
 group: “{{ group }}”
 instance_type: “{{ instance_type }}”
 region: “{{ region }}”
 image: “{{ image }}”
 wait: “{{ wait }}”
 wait_timeout: “{{ wait_timeout }}”
 count: “{{ count }}”
 instance_profile_name: “{{ instance_profile_name }}”
 instance_tags:
 Name: “{{ Name }}”
 deployer: “{{ deployer }}”
 resourceowner: “{{ resourceowner }}”
 monitoring: “{{ monitoring }}”
 vpc_subnet_id: “{{ vpc_subnet_id }}”
 assign_public_ip: “{{ assign_public_ip }}”
 aws_access_key: “{{ assumed_role.sts_creds.access_key }}”
 aws_secret_key: “{{ assumed_role.sts_creds.secret_key }}”
 security_token: “{{ assumed_role.sts_creds.session_token }}”
 volumes:
- device_name: /dev/sda1
 volume_type: gp2
 encrypted: true
 volume_name: HadoopMaster-rootvolume /
 volume_size: 100
 delete_on_termination: true
- device_name: /dev/sdf
 volume_type: gp2
 encrypted: true
 volume_name: HadoopMaster /sso/sfw
 volume_size: 70
 delete_on_termination: true
register: ec2
- 
name: Add new instance to host group 
 add_host:
 hostname: “{{ item.private_ip }}”
 groupname: launched
 loop: “{{ ec2.instances }}”
- 
name: Wait for SSH to come up 
 delegate_to: “{{ item.private_dns_name }}”
 wait_for_connection:
 delay: 60
 timeout: 320
 loop: “{{ ec2.instances }}”
- 
hosts: launched 
 name: Mounting the attached EBS volumes
 user: ec2-user
 gather_facts: false
 tasks:
- 
name: Run a script with arguments (free form) 
 script: ~/aws-common/automation/ansible/files/formatandmount.sh
[desind@comdpmactl01 files]$ cat ~/ansible.cfg
[defaults]
inventory = hosts
executable=/etc/ansible-wrapper
retry_files_enabled = False
allow_world_readable_tmpfiles = True
remote_tmp = /tmp/.ansible-${USER}/tmp
#log_path: “/home/desind/myAnsible/test.log”
command_warnings = False
[ssh_connection]
Adding ForwardAgent=yes so ‘copy wallet to backup directory’ works.
The ssh chain is vspoem04->vspoem01->ansible_fqdn
Without ForwardAgent, the vspoem01->ansible_fqdn fails
ssh_args = -o ControlMaster=no -o ForwardAgent=yes
pipelining = True
host_key_checking = False
Command: /sso/sfw/python2/bin/ansible-playbook test2.yaml --extra-vars=‘ansible_python_interpreter=/usr/bin/python’ -i ansible_ssh_private_key_file=~/aws-common/automation/ansible/files/kafka.ppk --user ec2-user