PageRenderTime 53ms CodeModel.GetById 24ms RepoModel.GetById 1ms app.codeStats 0ms

/provisioning/set_up_my_bpro.yml

https://codeberg.org/keks24/set-up-my-bpro
YAML | 312 lines | 242 code | 43 blank | 27 comment | 0 complexity | 36dd154cdec4daa6de21948da674a298 MD5 | raw file
Possible License(s): Apache-2.0
  1. ---
  2. #############################################################################
  3. # Copyright 2018 Ramon Fischer #
  4. # #
  5. # Licensed under the Apache License, Version 2.0 (the "License"); #
  6. # you may not use this file except in compliance with the License. #
  7. # You may obtain a copy of the License at #
  8. # #
  9. # http://www.apache.org/licenses/LICENSE-2.0 #
  10. # #
  11. # Unless required by applicable law or agreed to in writing, software #
  12. # distributed under the License is distributed on an "AS IS" BASIS, #
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
  14. # See the License for the specific language governing permissions and #
  15. # limitations under the License. #
  16. #############################################################################
  17. - hosts: "all"
  18. pre_tasks:
  19. - name: check, if credentials file exists
  20. local_action:
  21. module: "stat"
  22. path: "/vagrant/provisioning/group_vars/credentials.yml"
  23. register: "CREDENTIALS"
  24. failed_when: "CREDENTIALS.stat.exists == false"
  25. tags:
  26. - pre_tasks
  27. - include_vars: "/vagrant/provisioning/group_vars/credentials.yml"
  28. when:
  29. - CREDENTIALS.stat.exists == true
  30. tags:
  31. - pre_tasks
  32. - name: install python packages for provisioning via ansible
  33. raw: "apt-get install python python3"
  34. tags:
  35. - pre_tasks
  36. - name: create directory for ansible dotfile switches
  37. file:
  38. path: "/usr/local/etc/.ansible_dotfile_switches"
  39. owner: "root"
  40. group: "staff"
  41. mode: "0755"
  42. state: "directory"
  43. tags:
  44. - pre_tasks
  45. - name: upgrade installed packages
  46. package:
  47. upgrade: "dist"
  48. update_cache: "yes"
  49. cache_valid_time: "{{ CACHE_VALID_TIME }}"
  50. tags:
  51. - pre_tasks
  52. - name: check, if root password has been already set
  53. stat:
  54. path: "/usr/local/etc/.ansible_dotfile_switches/root_password_set"
  55. register: "ROOT_PASSWORD_SET"
  56. tags:
  57. - pre_tasks
  58. - name: set 20 characters random password for user "root"
  59. user:
  60. name: "root"
  61. group: "root"
  62. password: "{{ lookup('password', '/home/vagrant/credentials/password_super_user_root chars=ascii_letters,digits,hexdigits,punctuation') | password_hash('sha512') }}"
  63. state: "present"
  64. when:
  65. - ROOT_PASSWORD_SET.stat.exists == false
  66. tags:
  67. - pre_tasks
  68. - name: create indicator
  69. file:
  70. path: "/usr/local/etc/.ansible_dotfile_switches/root_password_set"
  71. owner: "root"
  72. group: "staff"
  73. mode: "0644"
  74. state: "touch"
  75. when:
  76. - ROOT_PASSWORD_SET.stat.exists == false
  77. tags:
  78. - pre_tasks
  79. - name: create group "ansible"
  80. group:
  81. name: "ansible"
  82. state: "present"
  83. tags:
  84. - pre_tasks
  85. # create user "ansible" for future provisioning
  86. - name: create user "ansible" with 20 characters random password
  87. user:
  88. name: "ansible"
  89. group: "ansible"
  90. groups:
  91. - "sudo"
  92. password: "{{ lookup('password', '/home/vagrant/credentials/password_privileged_user_ansible chars=ascii_letters,digits,hexdigits,punctuation') | password_hash('sha512') }}"
  93. shell: "/bin/bash"
  94. home: "/home/ansible/"
  95. update_password: "on_create"
  96. create_home: "yes"
  97. append: "yes"
  98. state: "present"
  99. tags:
  100. - pre_tasks
  101. - name: set authorised key for user "ansible"
  102. authorized_key:
  103. user: "ansible"
  104. key: "{{ VAULT_ANSIBLE_SSH_PUBLIC_KEY }}"
  105. exclusive: "yes"
  106. state: "present"
  107. tags:
  108. - pre_tasks
  109. roles:
  110. - { role: "remove_dotfile_switches",
  111. REMOVE_ALL: "no",
  112. REMOVE_FILES_IN_LIST: "no",
  113. FILE_DELETE_LIST: ["", ""] }
  114. - { role: "hostname",
  115. HOSTNAME: "bananapi",
  116. DOMAIN_NAMES: "bananapi.local" }
  117. - "tmpfs"
  118. - { role: "sudo",
  119. USERNAME: "pi",
  120. SUDO_USER: "yes",
  121. RESTRICT_SU: "yes" }
  122. - { role: "harden_sshd",
  123. PERMIT_ROOT_LOGIN: "no",
  124. USERNAME: "pi",
  125. SUDO_USER: "yes",
  126. SET_ALLOW_GROUPS_IN_LIST: "yes",
  127. ALLOW_GROUP_LIST: ["ssh"],
  128. LOCAL_TEST: "yes" }
  129. - "netselect-apt"
  130. - "common"
  131. - "ntp"
  132. - { role: "fail2ban",
  133. IGNORE_IP_ADDRESS: "{{ VAULT_SERVER_NET_IP_ADDRESS }}",
  134. DEFAULT_BAN_TIME: "86400",
  135. DEFAULT_FIND_TIME: "3600" }
  136. - { role: "clamav",
  137. INFECTED_FILES_DIRECTORY: "/usr/local/etc/clamav/infected_files" }
  138. - { role: "ssh_banner",
  139. FONT_NAME: "slant",
  140. BANNER_TEXT: "{{ VAULT_DDCLIENT_DYN_DNS_FQDN }}" }
  141. - { role: "motd",
  142. BACKUP_OLD_STATIC_MOTD: "no",
  143. BACKUP_OLD_DYNAMIC_MOTD: "no" }
  144. - { role: "unattended-upgrades",
  145. RECIPIENT_EMAIL_ADDRESS: "{{ VAULT_RECIPIENT_EMAIL_ADDRESS }}",
  146. HOSTNAME: "{{ VAULT_POSTFIX_HOSTNAME }}",
  147. SMTP_FQDN: "{{ VAULT_POSTFIX_SMTP_FQDN }}",
  148. SMTP_PORT: "{{ VAULT_POSTFIX_SMTP_PORT }}",
  149. RELAY_EMAIL_ADDRESS: "{{ VAULT_POSTFIX_RELAY_EMAIL_ADDRESS }}",
  150. RELAY_EMAIL_PASSWORD: "{{ VAULT_POSTFIX_RELAY_EMAIL_PASSWORD }}" }
  151. - { role: "neovim",
  152. USERNAME: "root" }
  153. - { role: "neovim",
  154. USERNAME: "pi",
  155. SUDO_USER: "yes" }
  156. - { role: "zsh",
  157. USERNAME: "root" }
  158. - { role: "zsh",
  159. USERNAME: "pi" }
  160. - { role: "tmux",
  161. USERNAME: "root",
  162. WEMUX_HOST_USER: "root" }
  163. - { role: "tmux",
  164. USERNAME: "pi",
  165. SUDO_USER: "yes",
  166. WEMUX_HOST_USER: "root" }
  167. - { role: "gitalias",
  168. USERNAME: "pi",
  169. SUDO_USER: "yes" }
  170. - { role: "apache",
  171. APACHE_USERNAME: "web-bananapi",
  172. DOMAIN_NAME: "bananapi.local",
  173. HAS_WWW_PREFIX: "no",
  174. HAS_SSL: "no" }
  175. - { role: "nfs_share",
  176. ALLOWED_IP_ADDRESS: "{{ VAULT_SERVER_NET_IP_ADDRESS }}",
  177. SDA1_UUID: "{{ VAULT_SDA1_UUID }}",
  178. SDA2_UUID: "{{ VAULT_SDA2_UUID }}",
  179. NFS_OPTIONS: "rw,sync,no_subtree_check,root_squash",
  180. USERNAME: "pi",
  181. SUDO_USER: "yes" }
  182. - { role: "aria2c",
  183. APACHE_USERNAME: "web-aria2ui",
  184. DOMAIN_NAME: "aria2ui.bananapi.local",
  185. HAS_WWW_PREFIX: "no",
  186. HAS_SSL: "no",
  187. USE_SYSTEMD_SERVICE_FILE: "yes",
  188. RPC_LISTEN_PORT: "6800",
  189. INTERFACE: "{{ VAULT_SERVER_STATIC_IP_ADDRESS }}",
  190. ALLOWED_IP_ADDRESS: "{{ VAULT_SERVER_NET_IP_ADDRESS }}" }
  191. - { role: "mplayer",
  192. WITH_FFMPEG: "yes" }
  193. - { role: "ddclient",
  194. DDCLIENT_PROTOCOL: "{{ VAULT_DDCLIENT_PROTOCOL }}",
  195. DDCLIENT_USE: "{{ VAULT_DDCLIENT_SYNCHRONISE_METHOD }}",
  196. DDCLIENT_TARGET: "{{ VAULT_DDCLIENT_TARGET }}",
  197. DDCLIENT_SSL: "yes",
  198. DDCLIENT_SERVER: "{{ VAULT_DDCLIENT_UPDATE_SERVER }}",
  199. DDCLIENT_LOGIN: "{{ VAULT_DEDYN_USERNAME }}",
  200. DDCLIENT_PASSWORD: "{{ VAULT_DEDYN_PASSWORD }}",
  201. DDCLIENT_DYN_DNS_FQDN: "{{ VAULT_DDCLIENT_DYN_DNS_FQDN }}",
  202. LETS_ENCRYPT_EMAIL_ADDRESS: "{{ VAULT_LETS_ENCRYPT_EMAIL_ADDRESS }}",
  203. WITH_LETS_ENCRYPT: "yes" }
  204. - { role: "purge_unnecessary_packages",
  205. PURGE_PACKAGES_IN_LIST: "yes",
  206. PACKAGE_PURGE_LIST: ["command-not-found", "nano", "vim-common", "vim-tiny"] }
  207. post_tasks:
  208. - name: update file database
  209. shell: "updatedb"
  210. tags:
  211. - post_tasks
  212. - debug:
  213. msg:
  214. - " ######################"
  215. - " # IMPORTANT MESSAGES #"
  216. - " ######################"
  217. - ""
  218. - " ----------------------> list usage here <------------------------"
  219. - "refactor me"
  220. - "motd old files location"
  221. - " 1) /home/vagrant/new_ssh_port_live: a new port number has been generated on the target server which can be found on the 'bpro-provision-vm': vagrant ssh bpro-provison-vm"
  222. - " 1.1) log in to target server:"
  223. - " 1.1.1) execute: 'ssh pi@<target_server_ip> -p <ssh_port_number>'"
  224. - " 1.2) log in to 'bpro-development-vm:'"
  225. - " 1.2.1) execute: 'vagrant ssh bpro-development-vm'"
  226. - " 1.2.2) if the switch 'LOCAL_TEST: no' for role 'harden_sshd' was used, go to the directory where the 'Vagrantfile' is saved and execute:"
  227. - " 'ssh -i .vagrant/machines/bpro-development-vm/virtualbox/private_key vagrant@172.28.128.9 -p <ssh_port_number>'"
  228. - " !! WARNING !! You neither will be able to run 'vagrant up --provision' nor 'vagrant provision' again because vagrant uses ssh port 22. execute: 'vagrant destroy -f && vagrant up --provision' to rebuild the setup"
  229. - " 2) /home/vagrant/credentials: generated credentials can be found on the 'bpro-provision-vm': vagrant ssh bpro-provision-vm"
  230. - " 3) become root: on the target server, first log in as user 'pi' and then execute 'sudo -i' to become root"
  231. - " 4) /etc/apt/sources.list: check, if the selected mirror: {{ lookup('file', '/home/vagrant/fastest_mirror') }} on the 'bpro-development-vm' and on the target server is official and is not discontinued! For more information go to: 'https://www.debian.org/mirror/list'"
  232. - " 5) nfs share: check, if the setup was successfull by executing on the client: 'showmount --exports <ip_of_nfs_server>'"
  233. - " on the server also make sure that the hard drive partitions - sda1 and sda2 - were sucessfully mounted: 'mount | grep sda', 'ls /media/{BACKUP,BANANAPI}/', 'vi /etc/fstab'"
  234. tags:
  235. - post_tasks
  236. # live system only
  237. - hosts: "live"
  238. pre_tasks:
  239. - name: check, if deploy file exists
  240. local_action:
  241. module: "stat"
  242. path: "/vagrant/.deploy_to_live"
  243. register: "DEPLOY_TO_LIVE"
  244. tags:
  245. - pre_tasks
  246. roles:
  247. - { role: "remove_dotfile_switches",
  248. REMOVE_ALL: "no",
  249. REMOVE_FILES_IN_LIST: "yes",
  250. FILE_DELETE_LIST: ["sshd_settings_set"],
  251. when: DEPLOY_TO_LIVE.stat.exists == true }
  252. - { role: "harden_sshd",
  253. PERMIT_ROOT_LOGIN: "no",
  254. USERNAME: "pi",
  255. SUDO_USER: "yes",
  256. SET_ALLOW_GROUPS_IN_LIST: "yes",
  257. ALLOW_GROUP_LIST: ["ssh"],
  258. LOCAL_TEST: "no",
  259. when: DEPLOY_TO_LIVE.stat.exists == true }
  260. # how to call roles dynamically with "host_vars"?!
  261. # issue: https://github.com/ansible/ansible/issues/18341
  262. # - hosts: <some_host_in_inventory_file>
  263. # tasks:
  264. # - include_role:
  265. # name: "<role_name>"
  266. # vars: "{{ item }}"
  267. # with_items: "{{ hostvars[inventory_hostname].<some_host_in_inventory_file> }}"
  268. # when:
  269. # - hostvars[inventory_hostname].<some_host_in_inventory_file> is defined