Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 1 | #!/bin/bash |
| 2 | |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 3 | lBlue='\033[1;34m' |
| 4 | green='\033[0;32m' |
| 5 | orange='\033[0;33m' |
| 6 | NC='\033[0m' |
| 7 | red='\033[0;31m' |
| 8 | yellow='\033[1;33m' |
| 9 | dGrey='\033[1;30m' |
| 10 | lGrey='\033[1;37m' |
| 11 | lCyan='\033[1;36m' |
| 12 | wd=`pwd` |
| 13 | |
| 14 | |
| 15 | # Clean up any prior executions |
| 16 | rm -fr .keys |
| 17 | rm -f ansible/hosts/cluster |
| 18 | rm -f ansible/host_vars/* |
| 19 | |
| 20 | # Source the configuration information |
| 21 | . install.cfg |
| 22 | |
Sergio Slobodrian | 37f4a0e | 2017-06-14 07:50:01 -0400 | [diff] [blame] | 23 | if [ -z "$hosts" ]; then |
| 24 | echo -e "${red}No hosts specifed!!${NC}" |
| 25 | echo -e "${red}Did you forget to update the config file ${yellow}installer.cfg${red}?${NC}" |
| 26 | exit |
| 27 | fi |
| 28 | |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 29 | # Create the key directory |
| 30 | mkdir .keys |
| 31 | |
| 32 | # Create the host list |
| 33 | echo "[cluster]" > ansible/hosts/cluster |
| 34 | |
| 35 | # Silence SSH and avoid prompts |
| 36 | rm -f ~/.ssh/config |
| 37 | echo "Host *" > ~/.ssh/config |
| 38 | echo " StrictHostKeyChecking no" >> ~/.ssh/config |
| 39 | echo " UserKnownHostsFile /dev/null" >> ~/.ssh/config |
| 40 | |
| 41 | sudo cp ~/.ssh/config /root/.ssh/config |
| 42 | |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 43 | for i in $hosts |
| 44 | do |
| 45 | # Generate the key for the host |
| 46 | echo -e "${lBlue}Generating the key-pair for communication with host ${yellow}$i${NC}" |
| 47 | ssh-keygen -f ./$i -t rsa -N '' |
| 48 | mv $i .keys |
| 49 | |
| 50 | # Generate the pre-configuration script |
| 51 | echo -e "${lBlue}Creating the pre-configuration script${NC}" |
| 52 | cat <<HERE > bash_login.sh |
| 53 | #!/bin/bash |
| 54 | echo "voltha ALL=(ALL) NOPASSWD:ALL" > tmp |
| 55 | sudo chown root.root tmp |
| 56 | sudo mv tmp /etc/sudoers.d/voltha |
| 57 | sudo mkdir /home/voltha |
| 58 | mkdir voltha_ssh |
| 59 | ssh-keygen -f ~/voltha_ssh/id_rsa -t rsa -N '' |
| 60 | sudo mv voltha_ssh /home/voltha/.ssh |
| 61 | HERE |
| 62 | echo "sudo cat <<HERE > /home/voltha/.ssh/authorized_keys" >> bash_login.sh |
| 63 | cat $i.pub >> bash_login.sh |
| 64 | echo "HERE" >> bash_login.sh |
| 65 | echo "chmod 400 /home/voltha/.ssh/authorized_keys" >> bash_login.sh |
| 66 | echo "sudo useradd -b /home -d /home/voltha voltha -s /bin/bash" >> bash_login.sh |
| 67 | echo "sudo chown -R voltha.voltha /home/voltha" >> bash_login.sh |
| 68 | echo "echo 'voltha:voltha' | sudo chpasswd" >> bash_login.sh |
| 69 | echo "rm .bash_login" >> bash_login.sh |
| 70 | echo "logout" >> bash_login.sh |
| 71 | rm $i.pub |
| 72 | # Copy the pre-config file to the VM |
| 73 | echo -e "${lBlue}Transfering pre-configuration script to ${yellow}$i${NC}" |
| 74 | if [ -d ".test" ]; then |
| 75 | echo -e "${red}Test mode set!!${lBlue} Using pre-populated ssh key for ${yellow}$i${NC}" |
Sergio Slobodrian | c547771 | 2017-06-07 11:56:56 -0400 | [diff] [blame] | 76 | scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i .test/$i bash_login.sh $iUser@$i:.bash_login |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 77 | else |
Sergio Slobodrian | c547771 | 2017-06-07 11:56:56 -0400 | [diff] [blame] | 78 | scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no bash_login.sh $iUser@$i:.bash_login |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 79 | fi |
| 80 | rm bash_login.sh |
| 81 | |
| 82 | # Run the pre-config file on the VM |
| 83 | echo -e "${lBlue}Running the pre-configuration script on ${yellow}$i${NC}" |
| 84 | if [ -d ".test" ]; then |
| 85 | echo -e "${red}Test mode set!!${lBlue} Using pre-populated ssh key for ${yellow}$i${NC}" |
Sergio Slobodrian | c547771 | 2017-06-07 11:56:56 -0400 | [diff] [blame] | 86 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i .test/$i $iUser@$i |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 87 | else |
Sergio Slobodrian | c547771 | 2017-06-07 11:56:56 -0400 | [diff] [blame] | 88 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no $iUser@$i |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 89 | fi |
| 90 | |
| 91 | # Configure ansible and ssh for silent operation |
| 92 | echo -e "${lBlue}Configuring ansible${NC}" |
| 93 | echo $i >> ansible/hosts/cluster |
| 94 | echo "ansible_ssh_private_key_file: $wd/.keys/$i" > ansible/host_vars/$i |
| 95 | |
| 96 | # Create the tunnel to the registry to allow pulls from localhost |
| 97 | echo -e "${lBlue}Creating a secure shell tunnel to the registry for ${yellow}$i${NC}" |
| 98 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i .keys/$i -f voltha@$i -R 5000:localhost:5000 -N |
| 99 | |
| 100 | done |
| 101 | # Add the dependent software list to the cluster variables |
| 102 | echo -e "${lBlue}Setting up dependent software${NC}" |
Sergio Slobodrian | 263900e | 2017-06-16 21:39:04 -0400 | [diff] [blame] | 103 | # Delete any grub updates since the boot disk is almost |
| 104 | # guaranteed not to be the same device as the installer. |
| 105 | mkdir grub_updates |
| 106 | sudo mv deb_files/*grub* grub_updates |
Sergio Slobodrian | ba9cbd8 | 2017-06-22 11:45:49 -0400 | [diff] [blame] | 107 | # Sort the packages in dependency order to get rid of scary non-errors |
| 108 | # that are issued by ansible. |
| 109 | #echo -e "${lBlue}Dependency sorting dependent software${NC}" |
| 110 | #./sort_packages.sh |
| 111 | #echo "deb_files:" >> ansible/group_vars/all |
| 112 | #for i in `cat sortedDebs.txt` |
| 113 | #do |
| 114 | #echo " - $i" >> ansible/group_vars/all |
| 115 | #done |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 116 | |
Sergio Slobodrian | d24189e | 2017-06-10 23:27:15 -0400 | [diff] [blame] | 117 | # Make sure the ssh keys propagate to all hosts allowing passwordless logins between them |
| 118 | echo -e "${lBlue}Propagating ssh keys${NC}" |
Sergio Slobodrian | 37f4a0e | 2017-06-14 07:50:01 -0400 | [diff] [blame] | 119 | cp -r .keys ansible/roles/cluster-host/files |
Sergio Slobodrian | d24189e | 2017-06-10 23:27:15 -0400 | [diff] [blame] | 120 | |
Sergio Slobodrian | ee4b2bc | 2017-06-05 10:08:59 -0400 | [diff] [blame] | 121 | # Running ansible |
| 122 | echo -e "${lBlue}Running ansible${NC}" |
| 123 | cp ansible/ansible.cfg .ansible.cfg |
| 124 | sudo ansible-playbook ansible/voltha.yml -i ansible/hosts/cluster |
| 125 | |
Sergio Slobodrian | 37f4a0e | 2017-06-14 07:50:01 -0400 | [diff] [blame] | 126 | # Now all 3 servers need to be rebooted because of software installs. |
| 127 | # Reboot them and wait patiently until they all come back. |
| 128 | # Note this destroys the registry tunnel wich is no longer needed. |
| 129 | hList="" |
| 130 | for i in $hosts |
| 131 | do |
| 132 | echo -e "${lBlue}Rebooting cluster hosts${NC}" |
| 133 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i .keys/$i voltha@$i sudo telinit 6 |
| 134 | hList="$i $hList" |
| 135 | done |
| 136 | |
| 137 | # Give the hosts time to shut down so that pings stop working or the |
| 138 | # script just falls through the next loop and the rest fails. |
| 139 | echo -e "${lBlue}Waiting for shutdown${NC}" |
| 140 | sleep 5 |
| 141 | |
| 142 | |
| 143 | while [ ! -z "$hList" ]; |
| 144 | do |
| 145 | # Attempt to ping the VMs on the list one by one. |
| 146 | echo -e "${lBlue}Waiting for hosts to reboot ${yellow}$hList${NC}" |
| 147 | for i in $hList |
| 148 | do |
| 149 | ping -q -c 1 $i > /dev/null 2>&1 |
| 150 | ret=$? |
| 151 | if [ $ret -eq 0 ]; then |
| 152 | ipExpr=`echo $i | sed -e "s/\./[.]/g"` |
| 153 | hList=`echo $hList | sed -e "s/$ipExpr//" | sed -e "s/^ //" | sed -e "s/ $//"` |
| 154 | fi |
| 155 | done |
| 156 | |
| 157 | done |
| 158 | |
Sergio Slobodrian | d24189e | 2017-06-10 23:27:15 -0400 | [diff] [blame] | 159 | # Now initialize the the docker swarm cluster with managers. |
| 160 | # The first server needs to be the primary swarm manager |
| 161 | # the other nodes are backup mangers that join the swarm. |
| 162 | # In the future, worker nodes will likely be added. |
| 163 | |
| 164 | echo "[swarm-master]" > ansible/hosts/swarm-master |
| 165 | echo "[swarm-master-backup]" > ansible/hosts/swarm-master-backup |
| 166 | |
| 167 | ctr=1 |
| 168 | for i in $hosts |
| 169 | do |
| 170 | if [ $ctr -eq 1 ]; then |
| 171 | echo $i >> ansible/hosts/swarm-master |
| 172 | echo "swarm_master_addr: \"$i\"" >> ansible/group_vars/all |
| 173 | ctr=0 |
| 174 | else |
| 175 | echo $i >> ansible/hosts/swarm-master-backup |
| 176 | fi |
| 177 | done |
Sergio Slobodrian | 263900e | 2017-06-16 21:39:04 -0400 | [diff] [blame] | 178 | sudo ansible-playbook ansible/swarm.yml -i ansible/hosts/swarm-master |
| 179 | sudo ansible-playbook ansible/swarm.yml -i ansible/hosts/swarm-master-backup |
| 180 | sudo ansible-playbook ansible/voltha.yml -i ansible/hosts/swarm-master |
Sergio Slobodrian | d24189e | 2017-06-10 23:27:15 -0400 | [diff] [blame] | 181 | |