Skip to content

Commit 4556a7c

Browse files
committed
Automate disk snapshots in addition to S3.
1 parent d750fbc commit 4556a7c

File tree

8 files changed

+152
-66
lines changed

8 files changed

+152
-66
lines changed

roles/snapshots/defaults/main.yml

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
snapshot_disk_name: ipfs_disk
3+
snapshot_disk_path: /var/lib/elasticsearch/backup
4+
snapshot_s3_name: ipfs_s3
5+
snapshot_s3_bucket: ipfs-search-snapshots

roles/snapshots/files/ipfs-search-snapshot.sh

-3
This file was deleted.

roles/snapshots/handlers/tasks.yml

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
---
2+
- name: restart elasticsearch
3+
systemd: daemon_reload=yes state=restarted name=elasticsearch

roles/snapshots/tasks/disk.yml

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
---
2+
- name: Check snapshot repository
3+
uri:
4+
url: "http://127.0.0.1:9200/_snapshot/{{ snapshot_disk_name }}"
5+
status_code: 200, 404
6+
register: snapshot_check
7+
- name: Create snapshot repository
8+
uri:
9+
url: "http://127.0.0.1:9200/_snapshot/{{ snapshot_disk_name }}"
10+
method: PUT
11+
body_format: json
12+
body: |
13+
{
14+
"type": "fs",
15+
"settings": {
16+
"location": "{{ snapshot_disk_path }}",
17+
"compress": true,
18+
"chunk_size": "524288000B"
19+
}
20+
}
21+
when: snapshot_check.status == 404
22+
- name: Install snapshot script
23+
template: src=ipfs-search-snapshot.sh dest=/usr/local/bin/ipfs-search-snapshot-disk.sh mode=755
24+
vars:
25+
- snapshot_name: "{{ snapshot_disk_name }}"
26+
- name: Elasticsearch daily snapshot cronjob
27+
cron:
28+
name: "Elasticsearch S3 snapshot"
29+
cron_file: /etc/crontab
30+
minute: 16
31+
hour: 4
32+
backup: yes
33+
user: nobody
34+
job: "/usr/local/bin/ipfs-search-snapshot-disk.sh"

roles/snapshots/tasks/main.yml

+8-63
Original file line numberDiff line numberDiff line change
@@ -1,64 +1,9 @@
11
---
2-
- name: Check snapshot repository
3-
uri:
4-
url: http://127.0.0.1:9200/_snapshot/ipfs_s3
5-
status_code: 200, 404
6-
register: snapshot_check
7-
- name: Check keystore
8-
command: /usr/share/elasticsearch/bin/elasticsearch-keystore list
9-
register: keystore
10-
failed_when: keystore.rc not in [0, 65]
11-
- name: Loading secrets
12-
include_vars:
13-
file: ../../vault/secrets.yml
14-
when: |
15-
keystore.rc == 65 or \
16-
's3.client.default.access_key' not in keystore.stdout" or \
17-
's3.client.default.secret_key' not in keystore.stdout
18-
- name: Create keystore
19-
command: /usr/share/elasticsearch/bin/elasticsearch-keystore create
20-
when: keystore.rc == 65
21-
- name: Add S3 access key
22-
command: /usr/share/elasticsearch/bin/elasticsearch-keystore add s3.client.default.access_key --stdin
23-
args:
24-
stdin: "{{ s3_access_key }}"
25-
when: "'s3.client.default.access_key' not in keystore.stdout"
26-
register: added_access_key
27-
- name: Add S3 secret key
28-
command: /usr/share/elasticsearch/bin/elasticsearch-keystore add s3.client.default.secret_key --stdin
29-
args:
30-
stdin: "{{ s3_secret_key }}"
31-
when: "'s3.client.default.secret_key' not in keystore.stdout"
32-
register: added_secret_key
33-
- name: Restart elasticsearch
34-
systemd: daemon_reload=yes state=restarted name=elasticsearch
35-
when: added_access_key.changed or added_secret_key.changed
36-
- name: Wait for elasticsearch to be ready
37-
wait_for:
38-
host: 127.0.0.1
39-
port: 9200
40-
- name: Create snapshot repository
41-
uri:
42-
url: http://127.0.0.1:9200/_snapshot/ipfs_s3
43-
method: PUT
44-
body_format: json
45-
body: |
46-
{
47-
"type": "s3",
48-
"settings": {
49-
"bucket": "ipfs-search-snapshots",
50-
"storage_class": "standard_ia"
51-
}
52-
}
53-
when: snapshot_check.status == 404
54-
- name: Install snapshot script
55-
copy: src=ipfs-search-snapshot.sh dest=/usr/local/bin/ipfs-search-snapshot.sh mode=755
56-
- name: Elasticsearch daily snapshot cronjob
57-
cron:
58-
name: "Elasticsearch snapshot"
59-
cron_file: /etc/crontab
60-
minute: 16
61-
hour: 3
62-
backup: yes
63-
user: nobody
64-
job: "/usr/local/bin/ipfs-search-snapshot.sh"
2+
- name: Configuring S3 snapshots
3+
import_tasks: s3.yml
4+
tags:
5+
- snapshots-s3
6+
- name: Configuring disk snapshots
7+
import_tasks: disk.yml
8+
tags:
9+
- snapshots-disk

roles/snapshots/tasks/s3.yml

+67
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
---
2+
- name: Check snapshot repository
3+
uri:
4+
url: "http://127.0.0.1:9200/_snapshot/{{ snapshot_s3_name }}"
5+
status_code: 200, 404
6+
register: snapshot_check
7+
- name: Check keystore
8+
command: /usr/share/elasticsearch/bin/elasticsearch-keystore list
9+
register: keystore
10+
failed_when: keystore.rc not in [0, 65]
11+
- name: Loading secrets
12+
include_vars:
13+
file: ../../vault/secrets.yml
14+
when: |
15+
keystore.rc == 65 or
16+
's3.client.default.access_key' not in keystore.stdout or
17+
's3.client.default.secret_key' not in keystore.stdout
18+
- name: Create keystore
19+
command: /usr/share/elasticsearch/bin/elasticsearch-keystore create
20+
when: keystore.rc == 65
21+
- name: Add S3 access key
22+
command: /usr/share/elasticsearch/bin/elasticsearch-keystore add s3.client.default.access_key --stdin
23+
args:
24+
stdin: "{{ s3_access_key }}"
25+
when: "'s3.client.default.access_key' not in keystore.stdout"
26+
register: added_access_key
27+
- name: Add S3 secret key
28+
command: /usr/share/elasticsearch/bin/elasticsearch-keystore add s3.client.default.secret_key --stdin
29+
args:
30+
stdin: "{{ s3_secret_key }}"
31+
when: "'s3.client.default.secret_key' not in keystore.stdout"
32+
register: added_secret_key
33+
- name: Restart elasticsearch
34+
systemd: daemon_reload=yes state=restarted name=elasticsearch
35+
when: added_access_key.changed or added_secret_key.changed
36+
- name: Wait for elasticsearch to be ready
37+
wait_for:
38+
host: 127.0.0.1
39+
port: 9200
40+
- name: Create snapshot repository
41+
uri:
42+
url: "http://127.0.0.1:9200/_snapshot/{{ snapshot_s3_name }}"
43+
method: PUT
44+
body_format: json
45+
body: |
46+
{
47+
"type": "s3",
48+
"settings": {
49+
"bucket": "{{ snapshot_s3_bucket }}",
50+
"storage_class": "standard_ia",
51+
"compress": true
52+
}
53+
}
54+
when: snapshot_check.status == 404
55+
- name: Install snapshot script
56+
template: src=ipfs-search-snapshot.sh dest=/usr/local/bin/ipfs-search-snapshot-disk.sh mode=755
57+
vars:
58+
- snapshot_name: "{{ snapshot_s3_name }}"
59+
- name: Elasticsearch daily snapshot cronjob
60+
cron:
61+
name: "Elasticsearch S3 snapshot"
62+
cron_file: /etc/crontab
63+
minute: 16
64+
hour: 3
65+
backup: yes
66+
user: nobody
67+
job: "/usr/local/bin/ipfs-search-snapshot-s3.sh"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#!/bin/bash
2+
3+
IPFS=""
4+
SNAPSHOT_DIR=""
5+
KEY_NAME=""
6+
7+
echo "Publishing most recent IPFS snapshot to: $KEY_NAME"
8+
9+
OLD_HASH=""
10+
echo "Latest snapshot: $OLD_HASH"
11+
12+
# Add dir to IPFS
13+
NEW_HASH=`$IPFS add -w --nocopy --fscache $SNAPSHOT_DIR`
14+
STATUS=$?
15+
16+
if [[ $STATUS == "0" ]]; then
17+
echo "Success creating snapshot: $NEW_HASH"
18+
19+
echo "Replacing pin for $OLD_HASH with $NEW_HASH"
20+
$IPFS pin update $OLD_HASH $NEW_HASH
21+
22+
echo "Publishing to $KEY_NAME"
23+
$IPFS name publish --key $KEY_NAME $NEW_HASH
24+
25+
echo "Performing filestore garbage collection"
26+
$IPFS filestore gc
27+
else
28+
echo "Error creating new snapshot!"
29+
exit -1
30+
fi
31+
32+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/bin/sh
2+
3+
curl -s -XPUT http://127.0.0.1:9200/_snapshot/{{ snapshot_name }}/snapshot_`date +'%y%m%d_%H%M'` | jq -e '.accepted' > /dev/null

0 commit comments

Comments
 (0)