1
|
0
|
import os
|
2
|
0
|
import re
|
3
|
0
|
import json
|
4
|
0
|
import stat
|
5
|
0
|
import tempfile
|
6
|
0
|
from datetime import datetime
|
7
|
0
|
from io import StringIO
|
8
|
0
|
from collections import OrderedDict
|
9
|
0
|
from django.conf import settings
|
10
|
0
|
from distutils.version import LooseVersion as Version
|
11
|
0
|
from django.core.management.base import BaseCommand
|
12
|
0
|
from cyborgbackup.main.models import Job, Repository
|
13
|
0
|
from cyborgbackup.main.expect import run
|
14
|
0
|
from cyborgbackup.main.models.settings import Setting
|
15
|
0
|
from cyborgbackup.main.utils.common import get_ssh_version
|
16
|
0
|
from cyborgbackup.main.utils.encryption import decrypt_field
|
17
|
0
|
from elasticsearch import Elasticsearch
|
18
|
|
|
19
|
0
|
es = Elasticsearch([{'host': 'localhost', 'port': 9200}])
|
20
|
0
|
OPENSSH_KEY_ERROR = u'''\
|
21
|
|
It looks like you're trying to use a private key in OpenSSH format, which \
|
22
|
|
isn't supported by the installed version of OpenSSH on this instance. \
|
23
|
|
Try upgrading OpenSSH or providing your private key in an different format. \
|
24
|
|
'''
|
25
|
|
|
26
|
|
|
27
|
0
|
class Command(BaseCommand):
|
28
|
|
"""Rebuild Catalog
|
29
|
|
"""
|
30
|
0
|
help = 'Rebuild Catalog from all Repositories.'
|
31
|
|
|
32
|
0
|
cleanup_paths = []
|
33
|
|
|
34
|
0
|
def get_password_prompts(self, **kwargs):
|
35
|
0
|
d = OrderedDict()
|
36
|
0
|
for k, v in kwargs['passwords'].items():
|
37
|
0
|
d[re.compile(r'Enter passphrase for .*'+k+r':\s*?$', re.M)] = k
|
38
|
0
|
d[re.compile(r'Enter passphrase for .*'+k, re.M)] = k
|
39
|
0
|
d[re.compile(r'Bad passphrase, try again for .*:\s*?$', re.M)] = ''
|
40
|
0
|
return d
|
41
|
|
|
42
|
0
|
def get_ssh_key_path(self, instance, **kwargs):
|
43
|
|
'''
|
44
|
|
If using an SSH key, return the path for use by ssh-agent.
|
45
|
|
'''
|
46
|
0
|
private_data_files = kwargs.get('private_data_files', {})
|
47
|
0
|
if 'ssh' in private_data_files.get('credentials', {}):
|
48
|
0
|
return private_data_files['credentials']['ssh']
|
49
|
|
|
50
|
0
|
return ''
|
51
|
|
|
52
|
0
|
def build_passwords(self, job, **kwargs):
|
53
|
|
'''
|
54
|
|
Build a dictionary of passwords for SSH private key, SSH user, sudo/su.
|
55
|
|
'''
|
56
|
0
|
passwords = {}
|
57
|
0
|
for setting in Setting.objects.filter(key__contains='ssh_key'):
|
58
|
0
|
set = Setting.objects.get(key=setting.key.replace('ssh_key', 'ssh_password'))
|
59
|
0
|
passwords['credential_{}'.format(setting.key)] = decrypt_field(set, 'value')
|
60
|
0
|
return passwords
|
61
|
|
|
62
|
0
|
def build_private_data(self, instance, **kwargs):
|
63
|
|
'''
|
64
|
|
Return SSH private key data (only if stored in DB as ssh_key_data).
|
65
|
|
Return structure is a dict of the form:
|
66
|
|
'''
|
67
|
0
|
private_data = {'credentials': {}}
|
68
|
0
|
for sets in Setting.objects.filter(key__contains='ssh_key'):
|
69
|
|
# If we were sent SSH credentials, decrypt them and send them
|
70
|
|
# back (they will be written to a temporary file).
|
71
|
0
|
private_data['credentials'][sets] = decrypt_field(sets, 'value') or ''
|
72
|
|
|
73
|
0
|
return private_data
|
74
|
|
|
75
|
0
|
def build_private_data_dir(self, instance, **kwargs):
|
76
|
|
'''
|
77
|
|
Create a temporary directory for job-related files.
|
78
|
|
'''
|
79
|
0
|
path = tempfile.mkdtemp(prefix='cyborgbackup_%s_' % instance.pk, dir='/tmp/')
|
80
|
0
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
81
|
0
|
self.cleanup_paths.append(path)
|
82
|
0
|
return path
|
83
|
|
|
84
|
0
|
def build_private_data_files(self, instance, **kwargs):
|
85
|
|
'''
|
86
|
|
Creates temporary files containing the private data.
|
87
|
|
Returns a dictionary i.e.,
|
88
|
|
|
89
|
|
{
|
90
|
|
'credentials': {
|
91
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
92
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
93
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
94
|
|
}
|
95
|
|
}
|
96
|
|
'''
|
97
|
0
|
private_data = self.build_private_data(instance, **kwargs)
|
98
|
0
|
private_data_files = {'credentials': {}}
|
99
|
0
|
if private_data is not None:
|
100
|
0
|
ssh_ver = get_ssh_version()
|
101
|
0
|
ssh_too_old = True if ssh_ver == "unknown" else Version(ssh_ver) < Version("6.0")
|
102
|
0
|
openssh_keys_supported = ssh_ver != "unknown" and Version(ssh_ver) >= Version("6.5")
|
103
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
104
|
|
# Bail out now if a private key was provided in OpenSSH format
|
105
|
|
# and we're running an earlier version (<6.5).
|
106
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not openssh_keys_supported:
|
107
|
0
|
raise RuntimeError(OPENSSH_KEY_ERROR)
|
108
|
0
|
listpaths = []
|
109
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
110
|
|
# OpenSSH formatted keys must have a trailing newline to be
|
111
|
|
# accepted by ssh-add.
|
112
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'):
|
113
|
0
|
data += '\n'
|
114
|
|
# For credentials used with ssh-add, write to a named pipe which
|
115
|
|
# will be read then closed, instead of leaving the SSH key on disk.
|
116
|
0
|
if sets and not ssh_too_old:
|
117
|
0
|
name = 'credential_{}'.format(sets.key)
|
118
|
0
|
path = os.path.join(kwargs['private_data_dir'], name)
|
119
|
0
|
run.open_fifo_write(path, data)
|
120
|
0
|
listpaths.append(path)
|
121
|
0
|
if len(listpaths) > 1:
|
122
|
0
|
private_data_files['credentials']['ssh'] = listpaths
|
123
|
0
|
elif len(listpaths) == 1:
|
124
|
0
|
private_data_files['credentials']['ssh'] = listpaths[0]
|
125
|
|
|
126
|
0
|
return private_data_files
|
127
|
|
|
128
|
0
|
def launch_command(self, cmd, instance, key, path, **kwargs):
|
129
|
0
|
cwd = '/tmp/'
|
130
|
0
|
env = {}
|
131
|
0
|
env['BORG_PASSPHRASE'] = key
|
132
|
0
|
env['BORG_REPO'] = path
|
133
|
0
|
env['BORG_RELOCATED_REPO_ACCESS_IS_OK'] = 'yes'
|
134
|
0
|
env['BORG_RSH'] = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
135
|
0
|
args = cmd
|
136
|
0
|
safe_args = args
|
137
|
|
|
138
|
0
|
kwargs['private_data_dir'] = self.build_private_data_dir(instance, **kwargs)
|
139
|
0
|
kwargs['private_data_files'] = self.build_private_data_files(instance, **kwargs)
|
140
|
0
|
kwargs['passwords'] = self.build_passwords(instance, **kwargs)
|
141
|
|
|
142
|
0
|
expect_passwords = {}
|
143
|
0
|
for k, v in self.get_password_prompts(**kwargs).items():
|
144
|
0
|
expect_passwords[k] = kwargs['passwords'].get(v, '') or ''
|
145
|
|
|
146
|
0
|
_kw = dict(
|
147
|
|
expect_passwords=expect_passwords,
|
148
|
|
job_timeout=getattr(settings, 'DEFAULT_JOB_TIMEOUT', 0),
|
149
|
|
idle_timeout=getattr(settings, 'JOB_RUN_IDLE_TIMEOUT', None),
|
150
|
|
extra_update_fields={},
|
151
|
|
pexpect_timeout=getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
152
|
|
)
|
153
|
0
|
stdout_handle = StringIO()
|
154
|
|
|
155
|
0
|
ssh_key_path = self.get_ssh_key_path(instance, **kwargs)
|
156
|
|
# If we're executing on an isolated host, don't bother adding the
|
157
|
|
# key to the agent in this environment
|
158
|
0
|
if ssh_key_path:
|
159
|
0
|
ssh_auth_sock = os.path.join(kwargs['private_data_dir'], 'ssh_auth.sock')
|
160
|
0
|
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
161
|
0
|
safe_args = run.wrap_args_with_ssh_agent(safe_args, ssh_key_path, ssh_auth_sock)
|
162
|
|
|
163
|
0
|
status, rc = run.run_pexpect(
|
164
|
|
args, cwd, env, stdout_handle, **_kw
|
165
|
|
)
|
166
|
|
|
167
|
0
|
lines = stdout_handle.getvalue().splitlines()
|
168
|
0
|
return lines
|
169
|
|
|
170
|
0
|
def handle(self, *args, **kwargs):
|
171
|
|
# Sanity check: Is there already a running job on the System?
|
172
|
0
|
jobs = Job.objects.filter(status="running")
|
173
|
0
|
if jobs.exists():
|
174
|
0
|
print('A job is already running, exiting.')
|
175
|
0
|
return
|
176
|
|
|
177
|
0
|
repos = Repository.objects.filter(enabled=True)
|
178
|
0
|
repoArchives = []
|
179
|
0
|
if repos.exists():
|
180
|
0
|
for repo in repos:
|
181
|
0
|
lines = self.launch_command(["borg", "list", "::"], repo, repo.repository_key, repo.path, **kwargs)
|
182
|
|
|
183
|
0
|
for line in lines:
|
184
|
0
|
archive_name = line.split(' ')[0] #
|
185
|
0
|
for archtype in ('rootfs', 'vm', 'mysql', 'postgresql', 'config', 'piped', 'mail', 'folders'):
|
186
|
0
|
if '{}-'.format(archtype) in archive_name:
|
187
|
0
|
repoArchives.append(archive_name)
|
188
|
|
|
189
|
0
|
entries = Job.objects.filter(job_type='job', status='successful')
|
190
|
0
|
if entries.exists():
|
191
|
0
|
for entry in entries:
|
192
|
0
|
if entry.archive_name != '' and entry.archive_name not in repoArchives:
|
193
|
0
|
print('Delete {} from catalog'.format(entry.archive_name))
|
194
|
|
# Catalog.objects.filter(archive_name=entry.archive_name).delete()
|
195
|
|
# entry.archive_name = ''
|
196
|
|
# entry.save()
|
197
|
|
|
198
|
0
|
for repo in repos:
|
199
|
0
|
jobs = Job.objects.filter(policy__repository_id=repo.pk,
|
200
|
|
status='successful',
|
201
|
|
job_type='job').order_by('-finished')
|
202
|
0
|
if jobs.exists():
|
203
|
0
|
for job in jobs:
|
204
|
0
|
if job.archive_name and job.archive_name != '':
|
205
|
0
|
search_object = {'query': {"bool": {"must": [{
|
206
|
|
'term': {
|
207
|
|
'archive_name.keyword': job.archive_name,
|
208
|
|
}
|
209
|
|
}]}}}
|
210
|
0
|
res = es.search(index="catalog", body=search_object)
|
211
|
0
|
if res['hits']['total']['value'] == 0:
|
212
|
0
|
lines = self.launch_command(["borg",
|
213
|
|
"list",
|
214
|
|
"--json-lines",
|
215
|
|
"::{}".format(job.archive_name)],
|
216
|
|
repo,
|
217
|
|
repo.repository_key,
|
218
|
|
repo.path,
|
219
|
|
**kwargs)
|
220
|
0
|
hoursTimezone = round(
|
221
|
|
(round(
|
222
|
|
(datetime.now()-datetime.utcnow()).total_seconds())/1800)
|
223
|
|
/ 2)
|
224
|
0
|
for line in lines:
|
225
|
0
|
data = None
|
226
|
0
|
try:
|
227
|
0
|
data = json.loads(line)
|
228
|
0
|
except Exception:
|
229
|
0
|
pass
|
230
|
0
|
if data:
|
231
|
0
|
search_object = {'query': {"bool": {"must": [{
|
232
|
|
'term': {
|
233
|
|
'path.keyword': data['path']
|
234
|
|
}
|
235
|
|
}, {
|
236
|
|
'term': {
|
237
|
|
'archive_name.keyword': job.archive_name,
|
238
|
|
}
|
239
|
|
}]}}}
|
240
|
0
|
res = es.search(index="catalog", body=search_object)
|
241
|
0
|
if res['hits']['total']['value'] == 0:
|
242
|
0
|
print("Insert {} {}".format(job.archive_name, data['path']))
|
243
|
0
|
data = json.loads(line)
|
244
|
0
|
es.index(index='catalog', doc_type='_doc', body={
|
245
|
|
'path': data['path'],
|
246
|
|
'job': job.pk,
|
247
|
|
'archive_name': job.archive_name,
|
248
|
|
'mode': data['mode'],
|
249
|
|
'owner': data['user'],
|
250
|
|
'group': data['group'],
|
251
|
|
'type': data['type'],
|
252
|
|
'size': data['size'],
|
253
|
|
'healthy': data['healthy'],
|
254
|
|
'mtime': '{}+0{}00'.format(data['mtime'].replace('T', ' '),
|
255
|
|
hoursTimezone)
|
256
|
|
})
|