-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathuptpy.py
358 lines (294 loc) · 11.8 KB
/
uptpy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
"""
uptpy - Python FTP uploader with minumum overhead and zero extra dependencies.
I'd like a webspace updated as quickly & reliable as possible! How to do that?
We need to know what needs to be updated instead of just pushing all without
checking. For that we'll download a manifest file containing the files for each
target directory. If such file is not yet present the remote dirs are initially
scanned. Then we loop over a local manifest and sync anything off.
Afterwards we update the remote manifest.
Pros:
* checks are done super quickly
* actual update uploads are as compact as possible
* deletions are handled as well (Of files once uploaded via uptpy)
* any other remote dirs and files are ignored by default
Cons:
* initially ALL files need to be uploaded (we "could" instead download first
and update if necessary. TBD)
* If a file is removed remotely uptpy wouldn't notice. (We could do a post-
check to fix any missing files. TBD)
* we need to have this remote manifest file (which could potentially be found
on the server and reveal files and hashes) (we could however obfuscate it at
least a little.)
"""
import os
import sys
import json
import time
import ftplib
import hashlib
import logging
import posixpath
logging.basicConfig()
log = logging.getLogger('uptpy')
log.setLevel(logging.DEBUG)
__version__ = '1.1.0'
__version_info__ = (1, 1, 0)
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
REMOTE_MANIFEST = '_uptpy.json'
IGNORE_DOT_NAMES = True
ENCODING = 'utf8'
# ENCODING = 'latin-1'
# ENCODING = 'cp437'
# If this is set True uptpy will perform an initial remote scan! Anything that's
# not also in the local path will then be deleted!! Use only if you want to
# match local and remote 1 to 1! Otherwise uptpy will ONLY care about the local
# path files and ignore anything that's also on the server.
SCAN_REMOTE = False
MSG_DELE = '250 DELE command successful'
ERROR_UNICODE_REASON = 'invalid continuation byte'
def update(host='', user='', passwd='', local_path='', remote_path='', in_ftp=None):
# type: (str, str, str, str, str, ftplib.FTP | None) -> int
"""Perform
Log into FTP, check remote against given local path, resolve diffs.
* dir missing remotely: create dir, upload all files
* dir missing locally: remove containing files, delete dir
* file missing remotely: upload
* file missing locally: delete remotely
* file different locally: upload
"""
start_time = time.time()
ftp, _ftp_created = get_ftp(host, user, passwd, ENCODING, in_ftp)
remote_dirs = load_manifest(ftp, remote_path)
if not remote_dirs:
if SCAN_REMOTE:
remote_dirs = scan_remote(ftp, remote_path)
else:
remote_dirs = {'': {}}
local_dirs = scan_local(local_path)
if remote_dirs == local_dirs:
log.info(
'No difference! All good! (check took %.3fs)' % (time.time() - start_time)
)
return 0
num_changes = 0
for rel_dir, dir_data in local_dirs.items():
# dir missing remotely
if rel_dir not in remote_dirs:
mkdirs(ftp, remote_path, rel_dir)
remote_dirs[rel_dir] = {}
for file_name, ldata in dir_data.items():
rel_path = posixpath.join(rel_dir, file_name)
if file_name in remote_dirs[rel_dir]:
rdata = remote_dirs[rel_dir][file_name]
# all good if file exists and is identical
if rdata.get('hash') == ldata['hash']:
continue
log.info('File different! %s', rel_path)
# upload if missing or changed
_upload(ftp, rel_path, local_path, remote_path)
num_changes += 1
continue
# delete files no longer local
for file_name in set(remote_dirs[rel_dir]).difference(dir_data):
try:
result = ftp.delete(posixpath.join(remote_path, rel_dir, file_name))
if result == MSG_DELE:
log.info('DELE: %s', posixpath.join(rel_dir, file_name))
num_changes += 1
except ftplib.error_perm:
pass
# dir needs to be deleted
for dir_name in sorted(set(remote_dirs).difference(local_dirs), reverse=True):
# Might be directory with no files but subdirs that should be up!
if any(ld.startswith(dir_name) for ld in local_dirs):
continue
for file_name in remote_dirs[dir_name]:
try:
result = ftp.delete(posixpath.join(remote_path, dir_name, file_name))
if result == '250 DELE command successful':
log.info('DELE: %s', posixpath.join(dir_name, file_name))
num_changes += 1
except ftplib.error_perm:
pass
dir_path = posixpath.join(remote_path, dir_name)
try:
res = ftp.rmd(dir_path)
if res == '250 RMD command successful':
log.info('RMD: %s', dir_path)
except ftplib.error_perm as error:
log.error('Error deleting "%s"!\n%s', dir_path, error)
update_manifest(ftp, local_dirs, local_path, remote_path)
if _ftp_created:
ftp.close()
return num_changes
def update_manifest(ftp, data, local_path, remote_path):
"""Update the remote manifest."""
if not local_path:
local_path = THIS_DIR
tmp_local_mani = os.path.join(local_path, REMOTE_MANIFEST)
with open(tmp_local_mani, 'w') as file_obj:
json.dump(data, file_obj, sort_keys=True)
_upload(ftp, REMOTE_MANIFEST, local_path, remote_path)
def _upload(ftp, rel_path, local_path, remote_path):
# type: (ftplib.FTP, str, str, str) -> bool
local_file_path = os.path.join(local_path, rel_path)
rel_path = rel_path.replace(os.path.sep, posixpath.sep)
with open(local_file_path, 'rb') as file_obj:
try:
res = ftp.storbinary(
'STOR %s' % posixpath.join(remote_path, rel_path), file_obj
)
if res == '226 Transfer complete':
log.info('STOR: %s', rel_path)
return True
log.error(res)
except ftplib.error_perm:
log.exception('Error uploading file "%s"', rel_path)
log.info(
'local file: %s exists:%s',
local_file_path,
os.path.isfile(local_file_path),
)
parent_dir = posixpath.dirname(posixpath.join(remote_path, rel_path))
log.info('parent dir: %s exists:%s', parent_dir, os.path.isdir(parent_dir))
return False
def scan_remote(ftp, remote_path, ignores=None):
# type: (ftplib.FTP, str, list[str] | None) -> dict[str, dict[str, dict[str, str|int]]]
log.info('Scanning remote path: %s ...', remote_path)
data = {}
t0 = time.time()
_scan_remote(ftp, remote_path, '', data, ignores)
print('%s took %.3fs' % ('_scan_remote', time.time() - t0))
return data
def _scan_remote(ftp, root, path, data, ignores):
# type: (ftplib.FTP, str, str, dict[str, dict[str, str]], list[str] | None) -> None
files = {}
try:
for name, item in ftp.mlsd(posixpath.join(root, path)):
if IGNORE_DOT_NAMES and name.startswith('.') or _is_ignored(name, ignores):
continue
if item['type'] == 'file':
files[name] = {'size': int(item['size'])}
elif item['type'] == 'dir':
_scan_remote(ftp, root, posixpath.join(path, name), data, ignores)
except UnicodeDecodeError as error:
if error.reason == ERROR_UNICODE_REASON and ftp.encoding.lower() in (
'utf8',
'utf-8',
):
raise Exception(
'Try with a different encoding like `latin-1` or `cp437`!'
) from error
raise error
# Collect remote dirs no matter if files or not
# to be able to delete empty folders.
data[path] = files
log.info('dir: %s - %i files', path, len(files))
def scan_local(root, ignores=None):
# type: (str, list[str] | None) -> dict[str, dict[str, dict[str, str | int]]]
data = {}
_scan_local(root, '', data, ignores)
return data
def _scan_local(root, path, data, ignores):
# type: (str, str, dict[str, dict[str, dict[str, str | int]]], list[str] | None) -> None
has_files, has_dirs = False, False
_path = path.replace(os.path.sep, posixpath.sep)
for item in os.scandir(os.path.join(root, path)):
# for dirpath, _, filenames in os.walk(os.path.join(root, path)):
if IGNORE_DOT_NAMES and item.name.startswith('.'):
continue
rel_path = os.path.join(path, item.name)
if _is_ignored(rel_path, ignores):
continue
if item.is_file():
data.setdefault(_path, {})[item.name] = {
'size': os.path.getsize(item.path),
'hash': _hsh(item.path),
}
has_files = True
elif item.is_dir():
_scan_local(root, rel_path, data, ignores)
has_dirs = True
# Collect directory when if has only subdirs but no files.
if not has_files and has_dirs:
data.setdefault(_path, {})
def _is_ignored(name, ignores):
# type: (str, list[str] | None) -> bool
"""
Check name or relative path against provided ignores.
"""
if name == REMOTE_MANIFEST:
return True
if ignores is None:
return False
return False
def get_ftp(host, user, passwd, encoding=ENCODING, in_ftp=None):
# type: (str, str, str, str, ftplib.FTP | None) -> tuple[ftplib.FTP, bool]
if in_ftp is not None:
return in_ftp, False
log.info('Connecting to "%s" ...', host)
try:
ftp = ftplib.FTP(host, encoding=encoding)
except Exception as error:
raise Exception('Error creating connection to "%s"\n%s' % (host, error))
result = ftp.login(user, passwd)
log.info(result)
log.info(ftp.getwelcome())
return ftp, True
def _hsh(local_path):
# type: (str) -> str
hasherobj = hashlib.sha256()
size = pow(2, 16)
with open(local_path, 'rb') as fobj:
buf = fobj.read(size)
len_buf = len(buf)
while len_buf > 0:
hasherobj.update(buf)
buf = fobj.read(size)
len_buf = len(buf)
return hasherobj.hexdigest()
def load_manifest(ftp, remote_path):
# type: (ftplib.FTP, str) -> dict[str, dict[str, dict[str, str | int]]]
try:
content = read_remote(ftp, posixpath.join(remote_path, REMOTE_MANIFEST))
except ftplib.error_perm:
return {}
try:
data = json.loads(content)
except json.JSONDecodeError:
return {}
return data
def read_remote(ftp, path):
# type: (ftplib.FTP, str) -> str
lines = []
ftp.retrlines('RETR ' + path, lines.append)
return '\n'.join(lines)
def mkdirs(ftp, root, path=''):
# type: (ftplib.FTP, str, str) -> None
if path:
path = path.replace(os.path.sep, posixpath.sep)
path = posixpath.join(root, path)
else:
path = root
parts = path.split(posixpath.sep)
created = ''
for i in range(1, len(parts) + 1):
this_dir = posixpath.join(*parts[:i])
# try and skip error 550. It's not utterly specific:
# "Requested action not taken. File unavailable (e.g., file not found, no access)."
# see: https://en.wikipedia.org/wiki/List_of_FTP_server_return_codes
# but it can't be that the parent dir isn't there (we're looping the path) and
# without listing we cannot easily check for presence, It's also quicker than listing!
try:
ftp.mkd(this_dir)
except ftplib.error_perm as error:
if error.args[0].startswith('550'):
continue
raise error
created = posixpath.join(*parts[:i])
if created:
log.info('MKD: %s', created)
if __name__ == '__main__':
import sys
if sys.argv[1:]:
update(*sys.argv[1:6])