GitBucket
4.21.2
Toggle navigation
Snippets
Sign in
Files
Branches
1
Releases
Issues
Pull requests
Labels
Priorities
Milestones
Wiki
Forks
stephen.cranefield
/
ansible_spark_openstack
Browse code
Point to var files
master
1 parent
2228f93
commit
0cf993a51b6d1558929d97b6c37ac64e879d9713
Johan Dahlberg
authored
on 4 Mar 2015
Patch
Showing
2 changed files
create_spark_cloud_playbook.yml
deploy_spark_playbook.yml
Ignore Space
Show notes
View
create_spark_cloud_playbook.yml
--- - hosts: localhost connection: local vars_files: - vars/main.yml tasks: - name: Create a new spark master instance nova_compute: state: "{{ instance_state }}" name: spark-master image_id: 9bf957ba-a0ce-4513-ba8c-e80d42ea9faf key_name: "{{ key_name }}" wait_for: 200 flavor_id: 4 nics: - net-id: "{{ network_id }}" security_groups: spark,default meta: hostname: spark-master ansible_host_groups: spark_masters,default register: spark_master - name: Create a new spark slaves nova_compute: state: "{{ instance_state }}" name: "{{ item }}" image_id: 9bf957ba-a0ce-4513-ba8c-e80d42ea9faf key_name: "{{ key_name }}" wait_for: 200 flavor_id: 4 nics: - net-id: "{{ network_id }}" security_groups: spark,default meta: hostname: "{{ item }}" ansible_host_groups: spark_slaves,default register: spark_slaves with_sequence: start=0 end="{{ nbr_of_slaves }}" format=spark-slave%02x
--- - hosts: localhost connection: local tasks: - name: Create a new spark master instance nova_compute: state: "{{ instance_state }}" name: spark-master image_id: 9bf957ba-a0ce-4513-ba8c-e80d42ea9faf key_name: "{{ key_name }}" wait_for: 200 flavor_id: 4 nics: - net-id: "{{ network_id }}" security_groups: spark,default meta: hostname: spark-master ansible_host_groups: spark_masters,default register: spark_master - name: Create a new spark slaves nova_compute: state: "{{ instance_state }}" name: "{{ item }}" image_id: 9bf957ba-a0ce-4513-ba8c-e80d42ea9faf key_name: "{{ key_name }}" wait_for: 200 flavor_id: 4 nics: - net-id: "{{ network_id }}" security_groups: spark,default meta: hostname: "{{ item }}" ansible_host_groups: spark_slaves,default register: spark_slaves with_sequence: start=0 end="{{ nbr_of_slaves }}" format=spark-slave%02x
Ignore Space
Show notes
View
deploy_spark_playbook.yml
--- # ------------------------ # Deploy the general stuff # ------------------------ - hosts: all sudo: yes vars_files: - vars/main.yml pre_tasks: - name: Update APT cache apt: update_cache=yes tasks: - name: install java apt: name=openjdk-7-jre state=present update_cache=yes - name: disable net.ipv6.conf.all.disable_ipv6 sysctl: name=net.ipv6.conf.all.disable_ipv6 value=1 state=present - name: disable net.ipv6.conf.default.disable_ipv6 sysctl: name=net.ipv6.conf.default.disable_ipv6 value=1 state=present - name: disable net.ipv6.conf.lo.disable_ipv6 sysctl: name=net.ipv6.conf.lo.disable_ipv6 value=1 state=present - name: distribute host file template: src=templates/hosts.j2 dest=/etc/hosts - name: deploy ssh-keys copy: src={{ssh_keys_to_use}} dest=/home/{{ user }}/.ssh/ register: ssh_key - name: distribute ssh config template: src=templates/config.j2 dest=/home/{{ user }}/.ssh/config register: ssh_debug - name: download spark get_url: url=http://d3kbcqa49mib13.cloudfront.net/spark-1.2.1-bin-hadoop2.4.tgz dest=/opt/ sha256sum=8e618cf67b3090acf87119a96e5e2e20e51f6266c44468844c185122b492b454 - name: unzip spark unarchive: copy=no src=/opt/spark-1.2.1-bin-hadoop2.4.tgz dest=/opt - name: deploy slaves configuration template: src=templates/slaves.j2 dest=/opt/spark-1.2.1-bin-hadoop2.4/conf/slaves # -------------------------------------------------- # Kick of spark (making the master start the slaves) # -------------------------------------------------- - hosts: spark_masters tasks: - name: stop spark master (if running) command: /opt/spark-1.2.1-bin-hadoop2.4/sbin/stop-master.sh - name: start spark master shell: SPARK_MASTER_IP="{{ ansible_hostname }}" /opt/spark-1.2.1-bin-hadoop2.4/sbin/start-master.sh - name: stop the slaves (if running) shell: /opt/spark-1.2.1-bin-hadoop2.4/sbin/stop-slaves.sh - name: start the slaves shell: /opt/spark-1.2.1-bin-hadoop2.4/sbin/start-slaves.sh
--- # ------------------------ # Deploy the general stuff # ------------------------ - hosts: all sudo: yes pre_tasks: - name: Update APT cache apt: update_cache=yes tasks: - name: install java apt: name=openjdk-7-jre state=present update_cache=yes - name: disable net.ipv6.conf.all.disable_ipv6 sysctl: name=net.ipv6.conf.all.disable_ipv6 value=1 state=present - name: disable net.ipv6.conf.default.disable_ipv6 sysctl: name=net.ipv6.conf.default.disable_ipv6 value=1 state=present - name: disable net.ipv6.conf.lo.disable_ipv6 sysctl: name=net.ipv6.conf.lo.disable_ipv6 value=1 state=present - name: distribute host file template: src=templates/hosts.j2 dest=/etc/hosts - name: deploy ssh-keys copy: src={{ssh_keys_to_use}} dest=/home/{{ user }}/.ssh/ register: ssh_key - name: distribute ssh config template: src=templates/config.j2 dest=/home/{{ user }}/.ssh/config register: ssh_debug - name: download spark get_url: url=http://d3kbcqa49mib13.cloudfront.net/spark-1.2.1-bin-hadoop2.4.tgz dest=/opt/ sha256sum=8e618cf67b3090acf87119a96e5e2e20e51f6266c44468844c185122b492b454 - name: unzip spark unarchive: copy=no src=/opt/spark-1.2.1-bin-hadoop2.4.tgz dest=/opt - name: deploy slaves configuration template: src=templates/slaves.j2 dest=/opt/spark-1.2.1-bin-hadoop2.4/conf/slaves # -------------------------------------------------- # Kick of spark (making the master start the slaves) # -------------------------------------------------- - hosts: spark_masters tasks: - name: stop spark master (if running) command: /opt/spark-1.2.1-bin-hadoop2.4/sbin/stop-master.sh - name: start spark master shell: SPARK_MASTER_IP="{{ ansible_hostname }}" /opt/spark-1.2.1-bin-hadoop2.4/sbin/start-master.sh - name: stop the slaves (if running) shell: /opt/spark-1.2.1-bin-hadoop2.4/sbin/stop-slaves.sh - name: start the slaves shell: /opt/spark-1.2.1-bin-hadoop2.4/sbin/start-slaves.sh
Show line notes below