1
|
0
|
import os
|
2
|
0
|
import re
|
3
|
0
|
import json
|
4
|
0
|
import stat
|
5
|
0
|
import tempfile
|
6
|
0
|
from datetime import datetime
|
7
|
0
|
from io import StringIO
|
8
|
0
|
from django.db import transaction
|
9
|
0
|
from collections import OrderedDict
|
10
|
0
|
from django.conf import settings
|
11
|
0
|
from distutils.version import LooseVersion as Version
|
12
|
0
|
from django.core.management.base import BaseCommand
|
13
|
0
|
from cyborgbackup.main.models import Job, Repository, Catalog
|
14
|
0
|
from cyborgbackup.main.expect import run
|
15
|
0
|
from cyborgbackup.main.models.settings import Setting
|
16
|
0
|
from cyborgbackup.main.utils.common import get_ssh_version
|
17
|
0
|
from cyborgbackup.main.utils.encryption import decrypt_field
|
18
|
|
|
19
|
0
|
OPENSSH_KEY_ERROR = u'''\
|
20
|
|
It looks like you're trying to use a private key in OpenSSH format, which \
|
21
|
|
isn't supported by the installed version of OpenSSH on this instance. \
|
22
|
|
Try upgrading OpenSSH or providing your private key in an different format. \
|
23
|
|
'''
|
24
|
|
|
25
|
|
|
26
|
0
|
class Command(BaseCommand):
|
27
|
|
"""Rebuild Catalog
|
28
|
|
"""
|
29
|
0
|
help = 'Rebuild Catalog from all Repositories.'
|
30
|
|
|
31
|
0
|
cleanup_paths = []
|
32
|
|
|
33
|
0
|
def get_password_prompts(self, **kwargs):
|
34
|
0
|
d = OrderedDict()
|
35
|
0
|
for k, v in kwargs['passwords'].items():
|
36
|
0
|
d[re.compile(r'Enter passphrase for .*'+k+r':\s*?$', re.M)] = k
|
37
|
0
|
d[re.compile(r'Enter passphrase for .*'+k, re.M)] = k
|
38
|
0
|
d[re.compile(r'Bad passphrase, try again for .*:\s*?$', re.M)] = ''
|
39
|
0
|
return d
|
40
|
|
|
41
|
0
|
def get_ssh_key_path(self, instance, **kwargs):
|
42
|
|
'''
|
43
|
|
If using an SSH key, return the path for use by ssh-agent.
|
44
|
|
'''
|
45
|
0
|
private_data_files = kwargs.get('private_data_files', {})
|
46
|
0
|
if 'ssh' in private_data_files.get('credentials', {}):
|
47
|
0
|
return private_data_files['credentials']['ssh']
|
48
|
|
|
49
|
0
|
return ''
|
50
|
|
|
51
|
0
|
def build_passwords(self, job, **kwargs):
|
52
|
|
'''
|
53
|
|
Build a dictionary of passwords for SSH private key, SSH user, sudo/su.
|
54
|
|
'''
|
55
|
0
|
passwords = {}
|
56
|
0
|
for setting in Setting.objects.filter(key__contains='ssh_key'):
|
57
|
0
|
set = Setting.objects.get(key=setting.key.replace('ssh_key', 'ssh_password'))
|
58
|
0
|
passwords['credential_{}'.format(setting.key)] = decrypt_field(set, 'value')
|
59
|
0
|
return passwords
|
60
|
|
|
61
|
0
|
def build_private_data(self, instance, **kwargs):
|
62
|
|
'''
|
63
|
|
Return SSH private key data (only if stored in DB as ssh_key_data).
|
64
|
|
Return structure is a dict of the form:
|
65
|
|
'''
|
66
|
0
|
private_data = {'credentials': {}}
|
67
|
0
|
for sets in Setting.objects.filter(key__contains='ssh_key'):
|
68
|
|
# If we were sent SSH credentials, decrypt them and send them
|
69
|
|
# back (they will be written to a temporary file).
|
70
|
0
|
private_data['credentials'][sets] = decrypt_field(sets, 'value') or ''
|
71
|
|
|
72
|
0
|
return private_data
|
73
|
|
|
74
|
0
|
def build_private_data_dir(self, instance, **kwargs):
|
75
|
|
'''
|
76
|
|
Create a temporary directory for job-related files.
|
77
|
|
'''
|
78
|
0
|
path = tempfile.mkdtemp(prefix='cyborgbackup_%s_' % instance.pk, dir='/tmp/')
|
79
|
0
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
80
|
0
|
self.cleanup_paths.append(path)
|
81
|
0
|
return path
|
82
|
|
|
83
|
0
|
def build_private_data_files(self, instance, **kwargs):
|
84
|
|
'''
|
85
|
|
Creates temporary files containing the private data.
|
86
|
|
Returns a dictionary i.e.,
|
87
|
|
|
88
|
|
{
|
89
|
|
'credentials': {
|
90
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
91
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
92
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
93
|
|
}
|
94
|
|
}
|
95
|
|
'''
|
96
|
0
|
private_data = self.build_private_data(instance, **kwargs)
|
97
|
0
|
private_data_files = {'credentials': {}}
|
98
|
0
|
if private_data is not None:
|
99
|
0
|
ssh_ver = get_ssh_version()
|
100
|
0
|
ssh_too_old = True if ssh_ver == "unknown" else Version(ssh_ver) < Version("6.0")
|
101
|
0
|
openssh_keys_supported = ssh_ver != "unknown" and Version(ssh_ver) >= Version("6.5")
|
102
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
103
|
|
# Bail out now if a private key was provided in OpenSSH format
|
104
|
|
# and we're running an earlier version (<6.5).
|
105
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not openssh_keys_supported:
|
106
|
0
|
raise RuntimeError(OPENSSH_KEY_ERROR)
|
107
|
0
|
listpaths = []
|
108
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
109
|
|
# OpenSSH formatted keys must have a trailing newline to be
|
110
|
|
# accepted by ssh-add.
|
111
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'):
|
112
|
0
|
data += '\n'
|
113
|
|
# For credentials used with ssh-add, write to a named pipe which
|
114
|
|
# will be read then closed, instead of leaving the SSH key on disk.
|
115
|
0
|
if sets and not ssh_too_old:
|
116
|
0
|
name = 'credential_{}'.format(sets.key)
|
117
|
0
|
path = os.path.join(kwargs['private_data_dir'], name)
|
118
|
0
|
run.open_fifo_write(path, data)
|
119
|
0
|
listpaths.append(path)
|
120
|
0
|
if len(listpaths) > 1:
|
121
|
0
|
private_data_files['credentials']['ssh'] = listpaths
|
122
|
0
|
elif len(listpaths) == 1:
|
123
|
0
|
private_data_files['credentials']['ssh'] = listpaths[0]
|
124
|
|
|
125
|
0
|
return private_data_files
|
126
|
|
|
127
|
0
|
def launch_command(self, cmd, instance, key, path, **kwargs):
|
128
|
0
|
cwd = '/tmp/'
|
129
|
0
|
env = {}
|
130
|
0
|
env['BORG_PASSPHRASE'] = key
|
131
|
0
|
env['BORG_REPO'] = path
|
132
|
0
|
env['BORG_RELOCATED_REPO_ACCESS_IS_OK'] = 'yes'
|
133
|
0
|
env['BORG_RSH'] = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
134
|
0
|
args = cmd
|
135
|
0
|
safe_args = args
|
136
|
|
|
137
|
0
|
kwargs['private_data_dir'] = self.build_private_data_dir(instance, **kwargs)
|
138
|
0
|
kwargs['private_data_files'] = self.build_private_data_files(instance, **kwargs)
|
139
|
0
|
kwargs['passwords'] = self.build_passwords(instance, **kwargs)
|
140
|
|
|
141
|
0
|
expect_passwords = {}
|
142
|
0
|
for k, v in self.get_password_prompts(**kwargs).items():
|
143
|
0
|
expect_passwords[k] = kwargs['passwords'].get(v, '') or ''
|
144
|
|
|
145
|
0
|
_kw = dict(
|
146
|
|
expect_passwords=expect_passwords,
|
147
|
|
job_timeout=getattr(settings, 'DEFAULT_JOB_TIMEOUT', 0),
|
148
|
|
idle_timeout=getattr(settings, 'JOB_RUN_IDLE_TIMEOUT', None),
|
149
|
|
extra_update_fields={},
|
150
|
|
pexpect_timeout=getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
151
|
|
)
|
152
|
0
|
stdout_handle = StringIO()
|
153
|
|
|
154
|
0
|
ssh_key_path = self.get_ssh_key_path(instance, **kwargs)
|
155
|
|
# If we're executing on an isolated host, don't bother adding the
|
156
|
|
# key to the agent in this environment
|
157
|
0
|
if ssh_key_path:
|
158
|
0
|
ssh_auth_sock = os.path.join(kwargs['private_data_dir'], 'ssh_auth.sock')
|
159
|
0
|
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
160
|
0
|
safe_args = run.wrap_args_with_ssh_agent(safe_args, ssh_key_path, ssh_auth_sock)
|
161
|
|
|
162
|
0
|
status, rc = run.run_pexpect(
|
163
|
|
args, cwd, env, stdout_handle, **_kw
|
164
|
|
)
|
165
|
|
|
166
|
0
|
lines = stdout_handle.getvalue().splitlines()
|
167
|
0
|
return lines
|
168
|
|
|
169
|
0
|
def handle(self, *args, **kwargs):
|
170
|
|
# Sanity check: Is there already a running job on the System?
|
171
|
0
|
jobs = Job.objects.filter(status="running")
|
172
|
0
|
if jobs.exists():
|
173
|
0
|
print('A job is already running, exiting.')
|
174
|
0
|
return
|
175
|
|
|
176
|
0
|
repos = Repository.objects.filter(enabled=True)
|
177
|
0
|
repoArchives = []
|
178
|
0
|
if repos.exists():
|
179
|
0
|
for repo in repos:
|
180
|
0
|
lines = self.launch_command(["borg", "list", "::"], repo, repo.repository_key, repo.path, **kwargs)
|
181
|
|
|
182
|
0
|
for line in lines:
|
183
|
0
|
archive_name = line.split(' ')[0] #
|
184
|
0
|
for type in ('rootfs', 'vm', 'mysql', 'postgresql', 'config', 'piped', 'mail', 'folders'):
|
185
|
0
|
if '{}-'.format(type) in archive_name:
|
186
|
0
|
repoArchives.append(archive_name)
|
187
|
|
|
188
|
0
|
entries = Job.objects.filter(job_type='job', status='successful')
|
189
|
0
|
if entries.exists():
|
190
|
0
|
for entry in entries:
|
191
|
0
|
if entry.archive_name != '' and entry.archive_name not in repoArchives:
|
192
|
0
|
print('Delete {} from catalog'.format(entry.archive_name))
|
193
|
0
|
Catalog.objects.filter(archive_name=entry.archive_name).delete()
|
194
|
0
|
entry.archive_name = ''
|
195
|
0
|
entry.save()
|
196
|
|
|
197
|
0
|
for repo in repos:
|
198
|
0
|
jobs = Job.objects.filter(policy__repository_id=repo.pk,
|
199
|
|
status='successful',
|
200
|
|
job_type='job').order_by('-finished')
|
201
|
0
|
if jobs.exists():
|
202
|
0
|
for job in jobs:
|
203
|
0
|
if job.archive_name \
|
204
|
|
and job.archive_name != '' \
|
205
|
|
and job.archive_name == 'rootfs-dave.milkywan.cloud-2019-02-22_02-00':
|
206
|
0
|
lines = self.launch_command(["borg",
|
207
|
|
"list",
|
208
|
|
"--json-lines",
|
209
|
|
"::{}".format(job.archive_name)],
|
210
|
|
repo,
|
211
|
|
repo.repository_key,
|
212
|
|
repo.path,
|
213
|
|
**kwargs)
|
214
|
|
# elements = len(lines)
|
215
|
0
|
hoursTimezone = round((round((datetime.now()-datetime.utcnow()).total_seconds())/1800)/2)
|
216
|
0
|
with transaction.atomic():
|
217
|
0
|
for line in lines:
|
218
|
0
|
try:
|
219
|
0
|
data = json.loads(line)
|
220
|
0
|
entries = Catalog.objects.filter(archive_name=job.archive_name,
|
221
|
|
path=data['path'],
|
222
|
|
size=data['size'])
|
223
|
0
|
if not entries.exists():
|
224
|
0
|
entry = Catalog()
|
225
|
0
|
entry.path = data['path']
|
226
|
0
|
entry.job = job
|
227
|
0
|
entry.archive_name = job.archive_name
|
228
|
0
|
entry.mode = data['mode']
|
229
|
0
|
entry.owner = data['user']
|
230
|
0
|
entry.group = data['group']
|
231
|
0
|
entry.type = data['type']
|
232
|
0
|
entry.size = data['size']
|
233
|
0
|
entry.healthy = data['healthy']
|
234
|
0
|
entry.mtime = '{}+0{}00'.format(data['mtime'].replace('T', ' '),
|
235
|
|
hoursTimezone)
|
236
|
0
|
entry.save()
|
237
|
0
|
except Exception as e:
|
238
|
0
|
print(e)
|
239
|
0
|
continue
|