Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions roles/splunk/tasks/configure_apps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@
delegate_to: localhost
changed_when: false

- name: Apply cluster bundle
include_tasks: splunk_bundle_apply.yml

# Conditional for block
when:
- git_apps is defined
Expand Down
70 changes: 0 additions & 70 deletions roles/splunk/tasks/install_apps.yml
Original file line number Diff line number Diff line change
@@ -1,75 +1,5 @@
---
# This task MUST be called by configure_apps.yml to work correctly. Do NOT call this task directly via the deployment_task var!
- name: Set correct handler for master-apps
set_fact:
handler: "apply indexer cluster bundle"
when: app_dest == 'etc/master-apps'

- name: Set correct handler for deployment-apps
set_fact:
handler: "reload deployment server"
when: app_dest == 'etc/deployment-apps'

- name: Set correct handler for shcluster/apps
set_fact:
handler: "apply shcluster-bundle"
when: app_dest == 'etc/shcluster/apps'

- name: "Set default restart splunk handler for all other paths (e.g. etc/auth)"
set_fact:
handler: "restart splunk"
when:
- app_dest != 'etc/shcluster/apps'
- app_dest != 'etc/deployment-apps'
- app_dest != 'etc/master-apps'

- name: Check if correct vars are defined for SHC app management
block:
- name: Trigger failure if target_shc_group_name var is undefined
fail:
msg: "Please add a target_shc_group_name variable to the host_vars for your SH Deployer host(s) before proceeding."
when: target_shc_group_name is not defined

- name: Trigger failure if splunk_admin_username var is undefined
fail:
msg: "Please add a splunk_admin_username variable to the host_vars for your SHC hosts before proceeding."
when: splunk_admin_username is not defined

- name: Trigger failure if splunk_admin_password var is undefined
fail:
msg:
- "Please add a splunk_admin_password variable to the host_vars or group_vars for your SHC hosts before proceeding."
- "Tip: To encrypt the var value, you can use: ansible-vault encrypt_string --ask-vault-pass 'var_value_to_encrypt' --name 'splunk_admin_password' then, use the --ask-vault-pass argument when running the play."
when: splunk_admin_password is not defined

- name: Get SHC status from first SH under the "target_shc_group_name" inventory group specified in the shdeployer host_vars
uri:
url: "https://{{ groups[target_shc_group_name] | first }}:{{ splunkd_port }}/services/shcluster/status"
method: GET
user: "{{ splunk_admin_username }}"
password: "{{ splunk_admin_password }}"
validate_certs: false
return_content: true
status_code: 200
body:
output_mode=json
no_log: true
changed_when: false
register: shc_status_response

- name: Set vars for deployment based on SHC status
set_fact:
service_ready_state: "{{ shc_status_response.json.entry[0].content.captain.service_ready_flag }}"
deploy_target: "{{ shc_status_response.json.entry[0].content.captain.mgmt_uri }}"

- name: Trigger failure if SHC is NOT ready
fail:
msg: "The SHC has service ready state of FALSE. Please investigate and re-run Ansible play after resolving."
when:
- not service_ready_state

when: handler == "apply shcluster-bundle"

# Note: By using the synchronize module, if the repo already exists on the target host, we are able to only update the diff while preserving the local/ folder
- name: "Synchronize {{ item.name }} repo from local Ansible host to {{ splunk_home }}/{{ app_dest }}/{{ item.name }} on remote host"
synchronize:
Expand Down
15 changes: 15 additions & 0 deletions roles/splunk/tasks/splunk_apply_indexer_cluster_bundle.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
- name: apply indexer cluster bundle
ansible.builtin.command: |
{{ splunk_home }}/bin/splunk apply cluster-bundle --answer-yes --skip-validation -auth {{ splunk_auth }} {{ extra_args }}
become: true
become_user: "{{ splunk_nix_user }}"
register: apply_cluster_bundle_result
changed_when: apply_cluster_bundle_result.rc == 0
failed_when: apply_cluster_bundle_result.rc != 0
retries: "{{ splunk_apply_cluster_bundle_retries }}"
delay: "{{ splunk_apply_cluster_bundle_delay }}"
no_log: true
when: "'clustermanager' in group_names"
vars:
extra_args: "{{ extra_args | default('') }}"
16 changes: 16 additions & 0 deletions roles/splunk/tasks/splunk_apply_shcluster_bundle.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
---
- name: apply shcluster-bundle
ansible.builtin.command: |
{{ splunk_home }}/bin/splunk apply shcluster-bundle -preserve-lookups true --answer-yes -auth '{{ splunk_auth }}' {{ extra_args }} -target {{ deploy_target }}
become: true
become_user: "{{ splunk_nix_user }}"
register: apply_shcluster_bundle_result
changed_when: apply_shcluster_bundle_result.rc == 0
failed_when: apply_shcluster_bundle_result.rc != 0
retries: "{{ splunk_apply_shcluster_bundle_retries }}"
delay: "{{ splunk_apply_shcluster_bundle_delay }}"
no_log: true
when: "'shdeployer' in group_names and deploy_target is defined"
vars:
deploy_target: "{{ deploy_target | default(''.join(('https://', groups[target_shc_group_name] | first ))) }}"
extra_args: "{{ extra_args | default('') }}"
79 changes: 79 additions & 0 deletions roles/splunk/tasks/splunk_bundle_apply.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
---
# This is an attempt to centralize bundle deployment so that
# all segments of the cluster can use one bundle deployment yaml -
# most of this is taken from install_apps.yml, but we want to
# repurpose it so we don't have to run the bundle application more
# than once when installing apps

- name: Set correct handler for master-apps
ansible.builtin.set_fact:
handler: "apply_indexer_cluster_bundle"
when: "'clustermanager' in group_names"

- name: Set correct handler for deployment-apps
ansible.builtin.set_fact:
handler: "reload_deployment_server"
when: "'deploymentserver' in group_names"

- name: Set correct handler for shcluster/apps
ansible.builtin.set_fact:
handler: "apply_shcluster_bundle"
when: "'shdeployer' in group_names"

- name: "Set default restart splunk handler for all other paths (e.g. etc/auth)"
ansible.builtin.set_fact:
handler: "restart"
when:
- "'clustermanager' not in group_names"
- "'shdeployer' not in group_names"
- "'deploymentserver' not in group_names"

- name: Check if correct vars are defined for SHC app management
block:
- name: Trigger failure if target_shc_group_name var is undefined
ansible.builtin.fail:
msg: "Please add a target_shc_group_name variable to the host_vars for your SH Deployer host(s) before proceeding."
when: target_shc_group_name is not defined

- name: Trigger failure if splunk_admin_username var is undefined
ansible.builtin.fail:
msg: "Please add a splunk_admin_username variable to the host_vars for your SHC hosts before proceeding."
when: splunk_admin_username is not defined

- name: Trigger failure if splunk_admin_password var is undefined
ansible.builtin.fail:
msg:
- "Please add a splunk_admin_password variable to the host_vars or group_vars for your SHC hosts before proceeding."
- "Tip: To encrypt the var value, you can use: ansible-vault encrypt_string --ask-vault-pass 'var_value_to_encrypt' --name 'splunk_admin_password' then, use the --ask-vault-pass argument when running the play."
when: splunk_admin_password is not defined

- name: Get SHC status from first SH under the "target_shc_group_name" inventory group specified in the shdeployer host_vars
ansible.builtin.uri:
url: "https://{{ groups[target_shc_group_name] | first }}:{{ splunkd_port }}/services/shcluster/status"
method: GET
user: "{{ splunk_admin_username }}"
password: "{{ splunk_admin_password }}"
validate_certs: false
return_content: true
status_code: 200
body:
output_mode=json
no_log: true
changed_when: false
register: shc_status_response

- name: Set vars for deployment based on SHC status
ansible.builtin.set_fact:
service_ready_state: "{{ shc_status_response.json.entry[0].content.captain.service_ready_flag }}"
deploy_target: "{{ shc_status_response.json.entry[0].content.captain.mgmt_uri }}"

- name: Trigger failure if SHC is NOT ready
ansible.builtin.fail:
msg: "The SHC has service ready state of FALSE. Please investigate and re-run Ansible play after resolving."
when:
- not service_ready_state

when: handler == "apply_shcluster_bundle"

- name: Run bundle apply based on name of file
include_tasks: "{{ '_'.join(('splunk', handler, '.yml')) }}"
11 changes: 11 additions & 0 deletions roles/splunk/tasks/splunk_reload_deployment_server.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
- name: Reload deployment server
ansible.builtin.command: |
{{ splunk_home }}/bin/splunk reload deploy-server -auth {{ splunk_auth }} -timeout {{ timeout_value }} {{ extra_args }}
become: true
become_user: "{{ splunk_nix_user }}"
no_log: true
when: "'deploymentserver' in group_names"
vars:
extra_args: "{{ extra_args | default('') }}"
timeout_value: "{{ timeout_value | default('600') }}"