feat: Fortschritt und Statusverfolgung für Uploads hinzugefügt; Versionsnummer auf 1.0.49 erhöht
This commit is contained in:
224
app.py
224
app.py
@@ -10,6 +10,7 @@ import threading
|
||||
import subprocess
|
||||
import time
|
||||
import posixpath
|
||||
import select
|
||||
import uuid as _uuid_mod
|
||||
import urllib.request as _urlreq
|
||||
import urllib.error as _urlerr
|
||||
@@ -874,6 +875,14 @@ upload_state = {
|
||||
'running': False,
|
||||
'current': '',
|
||||
'results': [],
|
||||
'progress': 0,
|
||||
'total': 0,
|
||||
'done': 0,
|
||||
'bytes_total': 0,
|
||||
'bytes_done': 0,
|
||||
'current_file': '',
|
||||
'eta_sec': None,
|
||||
'speed_bps': 0,
|
||||
}
|
||||
upload_lock = threading.Lock()
|
||||
|
||||
@@ -896,6 +905,76 @@ def _rclone_obscure(pw):
|
||||
return r.stdout.strip()
|
||||
|
||||
|
||||
def _parse_percent(text: str):
|
||||
m = re.search(r'(\d+(?:\.\d+)?)%', text)
|
||||
if not m:
|
||||
return None
|
||||
try:
|
||||
return max(0.0, min(100.0, float(m.group(1))))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _rclone_copyto_progress(src: Path, dest: str, base_done: int,
|
||||
file_size: int, total_bytes: int, start_ts: float,
|
||||
timeout: int = 7200):
|
||||
args = [
|
||||
'rclone', '--config', str(RCLONE_CONF),
|
||||
'copyto', str(src), dest,
|
||||
'--retries', '1',
|
||||
'--progress',
|
||||
'--stats', '1s',
|
||||
'--stats-one-line',
|
||||
]
|
||||
try:
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
text=True, bufsize=1)
|
||||
started = time.time()
|
||||
stderr_parts = []
|
||||
buf = ''
|
||||
while True:
|
||||
if p.poll() is not None:
|
||||
break
|
||||
if time.time() - started > timeout:
|
||||
p.kill()
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=f'Timeout nach {timeout}s')
|
||||
|
||||
ready, _, _ = select.select([p.stderr], [], [], 0.2) if p.stderr else ([], [], [])
|
||||
if not ready:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
chunk = p.stderr.read(1)
|
||||
if not chunk:
|
||||
continue
|
||||
stderr_parts.append(chunk)
|
||||
if chunk not in ('\r', '\n'):
|
||||
buf += chunk
|
||||
continue
|
||||
|
||||
pct = _parse_percent(buf)
|
||||
buf = ''
|
||||
if pct is not None:
|
||||
transferred = int(file_size * pct / 100)
|
||||
bytes_done = base_done + transferred
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bytes_done / elapsed if elapsed > 1 else 0
|
||||
eta = int((total_bytes - bytes_done) / speed) if speed > 0 and total_bytes > bytes_done else 0
|
||||
with upload_lock:
|
||||
upload_state.update(bytes_done=bytes_done,
|
||||
progress=int(bytes_done / total_bytes * 100) if total_bytes else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
|
||||
stdout, stderr_tail = p.communicate(timeout=5)
|
||||
if stderr_tail:
|
||||
stderr_parts.append(stderr_tail)
|
||||
return subprocess.CompletedProcess(args, p.returncode, stdout=stdout or '',
|
||||
stderr=''.join(stderr_parts))
|
||||
except subprocess.TimeoutExpired:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=f'Timeout nach {timeout}s')
|
||||
except Exception as e:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=str(e))
|
||||
|
||||
|
||||
def _remote_name(tid):
|
||||
return f'picopy_{tid}'
|
||||
|
||||
@@ -905,14 +984,23 @@ def _join_remote_path(*parts) -> str:
|
||||
|
||||
|
||||
def _remote_exists(remote_path: str) -> bool:
|
||||
return _remote_size(remote_path) is not None
|
||||
|
||||
|
||||
def _remote_size(remote_path: str):
|
||||
r = _rclone('lsjson', remote_path, timeout=20)
|
||||
if r.returncode != 0:
|
||||
return False
|
||||
return None
|
||||
try:
|
||||
data = json.loads(r.stdout or '[]')
|
||||
return bool(data)
|
||||
if isinstance(data, dict):
|
||||
return data.get('Size')
|
||||
if isinstance(data, list) and data:
|
||||
item = data[0]
|
||||
return item.get('Size') if isinstance(item, dict) else None
|
||||
return None
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return bool(r.stdout.strip())
|
||||
return None
|
||||
|
||||
|
||||
def _remote_unique_rel_path(t: dict, rel_path: str) -> str:
|
||||
@@ -1007,12 +1095,17 @@ def run_uploads(local_dir: Path, cfg: dict):
|
||||
return
|
||||
|
||||
with upload_lock:
|
||||
upload_state.update(running=True, results=[], current='')
|
||||
upload_state.update(running=True, results=[], current='',
|
||||
progress=0, total=0, done=0,
|
||||
bytes_total=0, bytes_done=0,
|
||||
current_file='', eta_sec=None, speed_bps=0)
|
||||
|
||||
for t in targets:
|
||||
name = t.get('name', t['id'])
|
||||
with upload_lock:
|
||||
upload_state['current'] = name
|
||||
upload_state.update(current=name, progress=0, total=0, done=0,
|
||||
bytes_total=0, bytes_done=0,
|
||||
current_file='', eta_sec=None, speed_bps=0)
|
||||
|
||||
add_log(f'Upload >> {name}...')
|
||||
dest_root = t.get('dest_path', 'PiCopy').strip('/')
|
||||
@@ -1049,43 +1142,78 @@ def run_uploads(local_dir: Path, cfg: dict):
|
||||
add_log(f'Upload {name}: mkdir rc={mk.returncode}'
|
||||
+ (f' err={mk.stderr.strip()[:100]}' if mk.returncode != 0 else ''))
|
||||
|
||||
# 3. Kopieren
|
||||
# 3. Kopieren mit Fortschritt
|
||||
add_log(f'Upload {name}: starte copy von {local_dir}')
|
||||
dup_mode = cfg.get('duplicate_handling', 'skip')
|
||||
if dup_mode == 'rename':
|
||||
errors = []
|
||||
for f in sorted(local_dir.rglob('*')):
|
||||
if not f.is_file():
|
||||
continue
|
||||
rel = f.relative_to(local_dir).as_posix()
|
||||
remote_rel = _remote_unique_rel_path(t, _join_remote_path(dest_rel, rel))
|
||||
rr = _rclone('copyto', str(f), _smb_conn(t, remote_rel),
|
||||
'--retries', '1', timeout=7200)
|
||||
if rr.returncode != 0:
|
||||
errors.append(rr.stderr.strip() or f'{rel}: unbekannter Fehler')
|
||||
if len(errors) >= 5:
|
||||
break
|
||||
r = subprocess.CompletedProcess(
|
||||
args=['rclone', 'copyto'],
|
||||
returncode=1 if errors else 0,
|
||||
stdout='',
|
||||
stderr='\n'.join(errors),
|
||||
)
|
||||
else:
|
||||
copy_args = [
|
||||
'copy', str(local_dir), dest,
|
||||
'--create-empty-src-dirs',
|
||||
'--transfers', '1',
|
||||
'--retries', '1',
|
||||
]
|
||||
files = sorted(f for f in local_dir.rglob('*') if f.is_file())
|
||||
dirs = sorted(d for d in local_dir.rglob('*') if d.is_dir())
|
||||
bytes_total = sum(f.stat().st_size for f in files)
|
||||
with upload_lock:
|
||||
upload_state.update(total=len(files), bytes_total=bytes_total,
|
||||
progress=100 if not files else 0)
|
||||
|
||||
for d in dirs:
|
||||
rel_dir = d.relative_to(local_dir).as_posix()
|
||||
_rclone('mkdir', _smb_conn(t, _join_remote_path(dest_rel, rel_dir)), timeout=30)
|
||||
|
||||
errors = []
|
||||
skipped = 0
|
||||
start_ts = time.time()
|
||||
for idx, f in enumerate(files, start=1):
|
||||
rel = f.relative_to(local_dir).as_posix()
|
||||
fsize = f.stat().st_size
|
||||
remote_rel = _join_remote_path(dest_rel, rel)
|
||||
with upload_lock:
|
||||
upload_state.update(done=idx, current_file=rel,
|
||||
progress=int(idx / len(files) * 100) if files else 100)
|
||||
|
||||
if dup_mode == 'skip':
|
||||
copy_args.append('--size-only')
|
||||
r = _rclone(*copy_args, timeout=7200)
|
||||
remote_size = _remote_size(_smb_conn(t, remote_rel))
|
||||
if remote_size == fsize:
|
||||
skipped += 1
|
||||
with upload_lock:
|
||||
bd = upload_state['bytes_done'] + fsize
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bd / elapsed if elapsed > 1 else 0
|
||||
eta = int((bytes_total - bd) / speed) if speed > 0 and bytes_total > bd else 0
|
||||
upload_state.update(bytes_done=bd,
|
||||
progress=int(bd / bytes_total * 100) if bytes_total else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
continue
|
||||
elif dup_mode == 'rename':
|
||||
remote_rel = _remote_unique_rel_path(t, remote_rel)
|
||||
|
||||
with upload_lock:
|
||||
base_done = upload_state['bytes_done']
|
||||
rr = _rclone_copyto_progress(f, _smb_conn(t, remote_rel),
|
||||
base_done, fsize, bytes_total, start_ts)
|
||||
if rr.returncode != 0:
|
||||
errors.append(rr.stderr.strip() or f'{rel}: unbekannter Fehler')
|
||||
if len(errors) >= 5:
|
||||
break
|
||||
|
||||
with upload_lock:
|
||||
bd = base_done + fsize
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bd / elapsed if elapsed > 1 else 0
|
||||
eta = int((bytes_total - bd) / speed) if speed > 0 and bytes_total > bd else 0
|
||||
upload_state.update(bytes_done=bd,
|
||||
progress=int(bd / bytes_total * 100) if bytes_total else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
|
||||
r = subprocess.CompletedProcess(
|
||||
args=['rclone', 'copyto'],
|
||||
returncode=1 if errors else 0,
|
||||
stdout='',
|
||||
stderr='\n'.join(errors),
|
||||
)
|
||||
ok = r.returncode == 0
|
||||
err = ''
|
||||
if not ok:
|
||||
err = r.stderr.strip() or 'Unbekannter Fehler'
|
||||
add_log(f'Upload {name}: rclone stderr: {err[:300]}')
|
||||
elif skipped:
|
||||
add_log(f'Upload {name}: {skipped} Dateien übersprungen')
|
||||
|
||||
with upload_lock:
|
||||
upload_state['results'].append({'name': name, 'ok': ok, 'msg': err})
|
||||
@@ -1094,6 +1222,7 @@ def run_uploads(local_dir: Path, cfg: dict):
|
||||
with upload_lock:
|
||||
upload_state['running'] = False
|
||||
upload_state['current'] = ''
|
||||
upload_state['current_file'] = ''
|
||||
|
||||
|
||||
# -- Flask Routes --------------------------------------------------------------
|
||||
@@ -1848,6 +1977,17 @@ body{background:var(--bg);color:var(--txt);font-family:-apple-system,BlinkMacSys
|
||||
<div id="upload-block" style="display:none;margin-top:.75rem;padding:.65rem .85rem;background:var(--bg2);border-radius:.5rem;border:1px solid var(--brd)">
|
||||
<div class="sec" style="margin-top:0">Fernkopie</div>
|
||||
<div id="upload-current" style="font-size:.83rem;color:var(--acc)"></div>
|
||||
<div id="upload-prog" style="display:none;margin-top:.45rem">
|
||||
<div class="prog-track"><div class="prog-fill" id="upload-fill" style="width:0%"></div></div>
|
||||
<div class="meta-row">
|
||||
<span class="pill acc" id="upload-pct"></span>
|
||||
<span class="pill" id="upload-files"></span>
|
||||
<span class="pill" id="upload-bytes"></span>
|
||||
<span class="pill acc" id="upload-eta" style="display:none"></span>
|
||||
<span class="pill" id="upload-speed" style="display:none"></span>
|
||||
</div>
|
||||
<div id="upload-file" style="font-size:.74rem;color:var(--sub);margin-top:.3rem;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;font-family:monospace"></div>
|
||||
</div>
|
||||
<div id="upload-results" style="margin-top:.3rem;display:flex;flex-direction:column;gap:.2rem"></div>
|
||||
</div>
|
||||
|
||||
@@ -2598,7 +2738,8 @@ function fileIcon(n){
|
||||
return'📄';
|
||||
}
|
||||
function fmtBytes(b){
|
||||
if(!b)return'';
|
||||
if(b==null)return'';
|
||||
if(b===0)return'0 B';
|
||||
if(b<1024)return b+' B';
|
||||
if(b<1048576)return(b/1024).toFixed(1)+' KB';
|
||||
if(b<1073741824)return(b/1048576).toFixed(1)+' MB';
|
||||
@@ -2730,6 +2871,19 @@ async function poll(){
|
||||
if(u.running||u.results.length){
|
||||
ub.style.display='block';
|
||||
$('upload-current').innerHTML=u.running?'⚡ '+u.current+'...':'';
|
||||
const up=$('upload-prog'),uf=$('upload-fill');
|
||||
const pct=Math.max(0,Math.min(100,u.progress||0));
|
||||
if(u.running){
|
||||
up.style.display='block'; uf.style.width=pct+'%';
|
||||
$('upload-pct').textContent=pct+'%';
|
||||
$('upload-files').textContent=(u.done||0)+' / '+(u.total||0)+' Dateien';
|
||||
$('upload-bytes').textContent=fmtBytes(u.bytes_done||0)+' / '+fmtBytes(u.bytes_total||0);
|
||||
const ue=fmtETA(u.eta_sec); $('upload-eta').style.display=ue?'':'none'; $('upload-eta').textContent=ue?'⏱ '+ue:'';
|
||||
const us=fmtSpd(u.speed_bps); $('upload-speed').style.display=us?'':'none'; $('upload-speed').textContent=us?'⚡ '+us:'';
|
||||
$('upload-file').textContent=u.current_file||'';
|
||||
}else{
|
||||
up.style.display='none';
|
||||
}
|
||||
$('upload-results').innerHTML=u.results.map(r=>`<div style="font-size:.79rem;color:${r.ok?'var(--grn)':'var(--red)'}">${r.ok?'✓':'✗'} ${r.name}${r.msg?' - '+r.msg:''}</div>`).join('');
|
||||
}else ub.style.display='none';
|
||||
}catch(e){}
|
||||
|
||||
Reference in New Issue
Block a user