1
|
0
|
import os
|
2
|
0
|
import re
|
3
|
0
|
import stat
|
4
|
0
|
import shutil
|
5
|
0
|
import tempfile
|
6
|
0
|
from io import StringIO
|
7
|
0
|
from collections import OrderedDict
|
8
|
0
|
from django.conf import settings
|
9
|
0
|
from elasticsearch import Elasticsearch
|
10
|
0
|
from distutils.version import LooseVersion as Version
|
11
|
0
|
from cyborgbackup.main.expect import run
|
12
|
0
|
from cyborgbackup.main.models.settings import Setting
|
13
|
0
|
from cyborgbackup.main.utils.common import get_ssh_version
|
14
|
0
|
from cyborgbackup.main.utils.encryption import decrypt_field
|
15
|
|
|
16
|
|
# Django
|
17
|
0
|
from django.core.management.base import BaseCommand
|
18
|
0
|
from django.db import transaction
|
19
|
|
|
20
|
|
# CyBorgBackup
|
21
|
0
|
from cyborgbackup.main.models import Job, Repository
|
22
|
|
|
23
|
0
|
es_conf = settings.ELASTICSEARCH_DSL['default']['hosts'].split(':')
|
24
|
0
|
es = Elasticsearch([{'host': es_conf[0], 'port': int(es_conf[1])}])
|
25
|
|
|
26
|
0
|
OPENSSH_KEY_ERROR = u'''\
|
27
|
|
It looks like you're trying to use a private key in OpenSSH format, which \
|
28
|
|
isn't supported by the installed version of OpenSSH on this instance. \
|
29
|
|
Try upgrading OpenSSH or providing your private key in an different format. \
|
30
|
|
'''
|
31
|
|
|
32
|
|
|
33
|
0
|
class Command(BaseCommand):
|
34
|
|
'''
|
35
|
|
Management command to cleanup old jobs.
|
36
|
|
'''
|
37
|
|
|
38
|
0
|
help = 'Remove old jobs from the database.'
|
39
|
|
|
40
|
0
|
cleanup_paths = []
|
41
|
|
|
42
|
0
|
def add_arguments(self, parser):
|
43
|
0
|
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
|
44
|
|
default=False, help='Dry run mode (show items that would '
|
45
|
|
'be removed)')
|
46
|
0
|
parser.add_argument('--jobs', dest='only_jobs', action='store_true',
|
47
|
|
default=True,
|
48
|
|
help='Remove jobs')
|
49
|
|
|
50
|
0
|
def get_password_prompts(self, **kwargs):
|
51
|
0
|
d = OrderedDict()
|
52
|
0
|
for k, v in kwargs['passwords'].items():
|
53
|
0
|
d[re.compile(r'Enter passphrase for .*'+k+r':\s*?$', re.M)] = k
|
54
|
0
|
d[re.compile(r'Enter passphrase for .*'+k, re.M)] = k
|
55
|
0
|
d[re.compile(r'Bad passphrase, try again for .*:\s*?$', re.M)] = ''
|
56
|
0
|
return d
|
57
|
|
|
58
|
0
|
def get_ssh_key_path(self, instance, **kwargs):
|
59
|
|
'''
|
60
|
|
If using an SSH key, return the path for use by ssh-agent.
|
61
|
|
'''
|
62
|
0
|
private_data_files = kwargs.get('private_data_files', {})
|
63
|
0
|
if 'ssh' in private_data_files.get('credentials', {}):
|
64
|
0
|
return private_data_files['credentials']['ssh']
|
65
|
|
|
66
|
0
|
return ''
|
67
|
|
|
68
|
0
|
def build_passwords(self, job, **kwargs):
|
69
|
|
'''
|
70
|
|
Build a dictionary of passwords for SSH private key, SSH user, sudo/su.
|
71
|
|
'''
|
72
|
0
|
passwords = {}
|
73
|
0
|
for setting in Setting.objects.filter(key__contains='ssh_key'):
|
74
|
0
|
set = Setting.objects.get(key=setting.key.replace('ssh_key', 'ssh_password'))
|
75
|
0
|
passwords['credential_{}'.format(setting.key)] = decrypt_field(set, 'value')
|
76
|
0
|
return passwords
|
77
|
|
|
78
|
0
|
def build_private_data(self, instance, **kwargs):
|
79
|
|
'''
|
80
|
|
Return SSH private key data (only if stored in DB as ssh_key_data).
|
81
|
|
Return structure is a dict of the form:
|
82
|
|
'''
|
83
|
0
|
private_data = {'credentials': {}}
|
84
|
0
|
for sets in Setting.objects.filter(key__contains='ssh_key'):
|
85
|
|
# If we were sent SSH credentials, decrypt them and send them
|
86
|
|
# back (they will be written to a temporary file).
|
87
|
0
|
private_data['credentials'][sets] = decrypt_field(sets, 'value') or ''
|
88
|
|
|
89
|
0
|
return private_data
|
90
|
|
|
91
|
0
|
def build_private_data_dir(self, instance, **kwargs):
|
92
|
|
'''
|
93
|
|
Create a temporary directory for job-related files.
|
94
|
|
'''
|
95
|
0
|
path = tempfile.mkdtemp(prefix='cyborgbackup_%s_' % instance.pk, dir='/tmp/')
|
96
|
0
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
97
|
0
|
self.cleanup_paths.append(path)
|
98
|
0
|
return path
|
99
|
|
|
100
|
0
|
def build_private_data_files(self, instance, **kwargs):
|
101
|
|
'''
|
102
|
|
Creates temporary files containing the private data.
|
103
|
|
Returns a dictionary i.e.,
|
104
|
|
|
105
|
|
{
|
106
|
|
'credentials': {
|
107
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
108
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
109
|
|
<cyborgbackup.main.models.Credential>: '/path/to/decrypted/data',
|
110
|
|
}
|
111
|
|
}
|
112
|
|
'''
|
113
|
0
|
private_data = self.build_private_data(instance, **kwargs)
|
114
|
0
|
private_data_files = {'credentials': {}}
|
115
|
0
|
if private_data is not None:
|
116
|
0
|
ssh_ver = get_ssh_version()
|
117
|
0
|
ssh_too_old = True if ssh_ver == "unknown" else Version(ssh_ver) < Version("6.0")
|
118
|
0
|
openssh_keys_supported = ssh_ver != "unknown" and Version(ssh_ver) >= Version("6.5")
|
119
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
120
|
|
# Bail out now if a private key was provided in OpenSSH format
|
121
|
|
# and we're running an earlier version (<6.5).
|
122
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not openssh_keys_supported:
|
123
|
0
|
raise RuntimeError(OPENSSH_KEY_ERROR)
|
124
|
0
|
listpaths = []
|
125
|
0
|
for sets, data in private_data.get('credentials', {}).items():
|
126
|
|
# OpenSSH formatted keys must have a trailing newline to be
|
127
|
|
# accepted by ssh-add.
|
128
|
0
|
if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'):
|
129
|
0
|
data += '\n'
|
130
|
|
# For credentials used with ssh-add, write to a named pipe which
|
131
|
|
# will be read then closed, instead of leaving the SSH key on disk.
|
132
|
0
|
if sets and not ssh_too_old:
|
133
|
0
|
name = 'credential_{}'.format(sets.key)
|
134
|
0
|
path = os.path.join(kwargs['private_data_dir'], name)
|
135
|
0
|
run.open_fifo_write(path, data)
|
136
|
0
|
listpaths.append(path)
|
137
|
0
|
if len(listpaths) > 1:
|
138
|
0
|
private_data_files['credentials']['ssh'] = listpaths
|
139
|
0
|
elif len(listpaths) == 1:
|
140
|
0
|
private_data_files['credentials']['ssh'] = listpaths[0]
|
141
|
|
|
142
|
0
|
return private_data_files
|
143
|
|
|
144
|
0
|
def launch_command(self, cmd, instance, key, path, **kwargs):
|
145
|
0
|
cwd = '/tmp/'
|
146
|
0
|
env = {}
|
147
|
0
|
env['BORG_PASSPHRASE'] = key
|
148
|
0
|
env['BORG_REPO'] = path
|
149
|
0
|
env['BORG_RELOCATED_REPO_ACCESS_IS_OK'] = 'yes'
|
150
|
0
|
env['BORG_RSH'] = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
151
|
0
|
args = cmd
|
152
|
0
|
safe_args = args
|
153
|
|
|
154
|
0
|
kwargs['private_data_dir'] = self.build_private_data_dir(instance, **kwargs)
|
155
|
0
|
kwargs['private_data_files'] = self.build_private_data_files(instance, **kwargs)
|
156
|
0
|
kwargs['passwords'] = self.build_passwords(instance, **kwargs)
|
157
|
|
|
158
|
0
|
expect_passwords = {}
|
159
|
0
|
for k, v in self.get_password_prompts(**kwargs).items():
|
160
|
0
|
expect_passwords[k] = kwargs['passwords'].get(v, '') or ''
|
161
|
|
|
162
|
0
|
_kw = dict(
|
163
|
|
expect_passwords=expect_passwords,
|
164
|
|
job_timeout=getattr(settings, 'DEFAULT_JOB_TIMEOUT', 0),
|
165
|
|
idle_timeout=getattr(settings, 'JOB_RUN_IDLE_TIMEOUT', None),
|
166
|
|
extra_update_fields={},
|
167
|
|
pexpect_timeout=getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
168
|
|
)
|
169
|
0
|
stdout_handle = StringIO()
|
170
|
|
|
171
|
0
|
ssh_key_path = self.get_ssh_key_path(instance, **kwargs)
|
172
|
|
# If we're executing on an isolated host, don't bother adding the
|
173
|
|
# key to the agent in this environment
|
174
|
0
|
if ssh_key_path:
|
175
|
0
|
ssh_auth_sock = os.path.join(kwargs['private_data_dir'], 'ssh_auth.sock')
|
176
|
0
|
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
177
|
0
|
safe_args = run.wrap_args_with_ssh_agent(safe_args, ssh_key_path, ssh_auth_sock)
|
178
|
|
|
179
|
0
|
status, rc = run.run_pexpect(
|
180
|
|
args, cwd, env, stdout_handle, **_kw
|
181
|
|
)
|
182
|
|
|
183
|
0
|
lines = stdout_handle.getvalue().splitlines()
|
184
|
0
|
shutil.rmtree(kwargs['private_data_dir'])
|
185
|
0
|
return lines
|
186
|
|
|
187
|
0
|
def cleanup_jobs(self):
|
188
|
|
# Sanity check: Is there already a running job on the System?
|
189
|
0
|
jobs = Job.objects.filter(status="running")
|
190
|
0
|
if jobs.exists():
|
191
|
0
|
print('A job is already running, exiting.')
|
192
|
0
|
return
|
193
|
|
|
194
|
0
|
repos = Repository.objects.filter(enabled=True)
|
195
|
0
|
repoArchives = []
|
196
|
0
|
if repos.exists():
|
197
|
0
|
for repo in repos:
|
198
|
0
|
lines = self.launch_command(["borg", "list", "::"], repo, repo.repository_key, repo.path)
|
199
|
|
|
200
|
0
|
for line in lines:
|
201
|
0
|
archive_name = line.split(' ')[0] #
|
202
|
0
|
for type in ('rootfs', 'vm', 'mysql', 'postgresql', 'config', 'piped', 'mail', 'folders'):
|
203
|
0
|
if '{}-'.format(type) in archive_name:
|
204
|
0
|
repoArchives.append(archive_name)
|
205
|
0
|
print(repoArchives)
|
206
|
|
|
207
|
0
|
entries = Job.objects.filter(job_type='job')
|
208
|
0
|
if entries.exists():
|
209
|
0
|
for entry in entries:
|
210
|
0
|
if entry.archive_name != '' and entry.archive_name and entry.archive_name not in repoArchives:
|
211
|
0
|
action_text = 'would delete' if self.dry_run else 'deleting'
|
212
|
0
|
print('{} {}'.format(action_text, entry.archive_name))
|
213
|
0
|
if not self.dry_run:
|
214
|
0
|
es.delete_by_query(index="catalog", doc_type='_doc',
|
215
|
|
body={"query": {"match": {"archive_name": entry.archive_name}}})
|
216
|
0
|
entry.delete()
|
217
|
|
|
218
|
0
|
return 0, 0
|
219
|
|
|
220
|
0
|
@transaction.atomic
|
221
|
|
def handle(self, *args, **options):
|
222
|
0
|
self.verbosity = int(options.get('verbosity', 1))
|
223
|
0
|
self.dry_run = bool(options.get('dry_run', False))
|
224
|
|
|
225
|
0
|
model_names = ('jobs',)
|
226
|
0
|
models_to_cleanup = set()
|
227
|
0
|
for m in model_names:
|
228
|
0
|
if options.get('only_%s' % m, False):
|
229
|
0
|
models_to_cleanup.add(m)
|
230
|
0
|
if not models_to_cleanup:
|
231
|
0
|
models_to_cleanup.update(model_names)
|
232
|
|
|
233
|
0
|
for m in model_names:
|
234
|
0
|
if m in models_to_cleanup:
|
235
|
0
|
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
|
236
|
0
|
if self.dry_run:
|
237
|
0
|
print('{}: {} would be deleted, {} would be skipped.'.format(m.replace('_', ' '),
|
238
|
|
deleted, skipped))
|
239
|
|
else:
|
240
|
0
|
print('{}: {} deleted, {} skipped.'.format(m.replace('_', ' '), deleted, skipped))
|