Refactor code structure for improved readability and maintainability
This commit is contained in:
0
picopy/__init__.py
Normal file
0
picopy/__init__.py
Normal file
98
picopy/config.py
Normal file
98
picopy/config.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""PiCopy – Konfiguration, Pfade, Konstanten, Logging."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
RAW_BASE = 'https://git.leuschner.dev/Tobias/PiCopy/raw/branch/main'
|
||||
VERSION_FILE = Path(__file__).parent.parent / 'version.txt'
|
||||
|
||||
|
||||
def load_installed_version():
|
||||
try:
|
||||
return VERSION_FILE.read_text(encoding='utf-8').strip() or '1.0.4'
|
||||
except Exception:
|
||||
return 'X.X.X'
|
||||
|
||||
|
||||
VERSION = load_installed_version()
|
||||
|
||||
BASE_DIR = Path('/opt/picopy')
|
||||
CONFIG_FILE = BASE_DIR / 'config.json'
|
||||
STATE_FILE = BASE_DIR / 'state.json'
|
||||
LOG_DIR = BASE_DIR / 'logs'
|
||||
LOG_FILE = LOG_DIR / 'picopy.log'
|
||||
INTERNAL_DEST_DIR = BASE_DIR / 'internal'
|
||||
LOG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
HISTORY_FILE = BASE_DIR / 'history.json'
|
||||
MAX_HISTORY = 100
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s %(levelname)s %(message)s',
|
||||
handlers=[logging.FileHandler(LOG_FILE), logging.StreamHandler()]
|
||||
)
|
||||
log = logging.getLogger('picopy')
|
||||
|
||||
NM_AP_CON = 'PiCopy-AP'
|
||||
NM_CLIENT_CON = 'PiCopy-WiFi'
|
||||
WIFI_BOOT_WAIT = 25 # Sekunden warten beim Start bevor AP gestartet wird
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
# USB
|
||||
'source_ports': [], # [{port, label}, ...]
|
||||
'source_port': None, 'source_label': '', # Migration legacy
|
||||
'dest_port': None, 'dest_label': '',
|
||||
'dest_type': 'usb', 'internal_dest_label': 'Interner Speicher',
|
||||
'internal_share_enabled': False,
|
||||
'folder_format': '%Y-%m-%d', 'add_time': True,
|
||||
'subfolder': True, 'auto_copy': True,
|
||||
'file_filter': '', 'exclude_system': True,
|
||||
'duplicate_handling': 'skip',
|
||||
'verify_checksum': False, 'delete_source': False,
|
||||
# WiFi
|
||||
'wifi_ssid': '', 'wifi_password': '',
|
||||
'ap_ssid': 'PiCopy', 'ap_password': 'PiCopy,',
|
||||
# WireGuard
|
||||
'wireguard_auto': False,
|
||||
}
|
||||
|
||||
|
||||
def load_cfg():
|
||||
cfg = DEFAULT_CONFIG.copy()
|
||||
try:
|
||||
if CONFIG_FILE.exists():
|
||||
cfg.update(json.loads(CONFIG_FILE.read_text(encoding='utf-8')))
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
log.error(f'config.json korrupt ({e}), verwende Standardwerte')
|
||||
try: CONFIG_FILE.rename(CONFIG_FILE.with_suffix('.corrupt'))
|
||||
except Exception: pass
|
||||
except Exception as e:
|
||||
log.warning(f'config.json nicht lesbar: {e}')
|
||||
return cfg
|
||||
|
||||
|
||||
def save_cfg(cfg):
|
||||
_atomic_write(CONFIG_FILE, json.dumps(cfg, indent=2))
|
||||
|
||||
|
||||
def _atomic_write(path: Path, content: str) -> None:
|
||||
"""Schreibt atomar: erst .tmp, dann os.replace() - sicher bei Stromausfall."""
|
||||
tmp = path.with_suffix(path.suffix + '.tmp')
|
||||
try:
|
||||
tmp.write_text(content, encoding='utf-8')
|
||||
with open(tmp, 'rb') as fh:
|
||||
os.fsync(fh.fileno()) # Daten wirklich auf Datenträger schreiben
|
||||
os.replace(str(tmp), str(path)) # Atomares Umbenennen (POSIX-Garantie)
|
||||
except Exception:
|
||||
try: tmp.unlink(missing_ok=True)
|
||||
except Exception: pass
|
||||
raise
|
||||
|
||||
|
||||
def _fmt_bytes(b):
|
||||
if b < 1024: return f'{b} B'
|
||||
if b < 1024**2: return f'{b/1024:.1f} KB'
|
||||
if b < 1024**3: return f'{b/1024**2:.1f} MB'
|
||||
return f'{b/1024**3:.2f} GB'
|
||||
415
picopy/copy_engine.py
Normal file
415
picopy/copy_engine.py
Normal file
@@ -0,0 +1,415 @@
|
||||
"""PiCopy – Kopierlogik: do_copy, check_auto_copy, usb_monitor."""
|
||||
|
||||
import hashlib as _hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import load_cfg, _fmt_bytes, log
|
||||
from picopy.state import (
|
||||
copy_state, copy_lock, save_state, append_history, add_log
|
||||
)
|
||||
from picopy.usb import usb_devices, ensure_mount, internal_dest_device
|
||||
|
||||
_copy_thread: threading.Thread | None = None
|
||||
|
||||
SYSTEM_EXCLUDES = {
|
||||
'.DS_Store', 'Thumbs.db', 'thumbs.db', 'desktop.ini',
|
||||
'.Spotlight-V100', '.Trashes', '.fseventsd', '.TemporaryItems',
|
||||
'.VolumeIcon.icns', 'RECYCLER', '$RECYCLE.BIN',
|
||||
'System Volume Information', '.DocumentRevisions-V100',
|
||||
}
|
||||
|
||||
|
||||
def _should_copy(f: Path, cfg: dict) -> bool:
|
||||
if cfg.get('exclude_system'):
|
||||
for part in f.parts:
|
||||
if part in SYSTEM_EXCLUDES:
|
||||
return False
|
||||
if f.name.startswith('._'):
|
||||
return False
|
||||
filt = cfg.get('file_filter', '').strip()
|
||||
if filt:
|
||||
allowed = {e.strip().lower().lstrip('.') for e in filt.split(',') if e.strip()}
|
||||
if f.suffix.lower().lstrip('.') not in allowed:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _unique_path(p: Path) -> Path:
|
||||
stem, suffix, parent = p.stem, p.suffix, p.parent
|
||||
i = 1
|
||||
while True:
|
||||
candidate = parent / f'{stem}_({i}){suffix}'
|
||||
if not candidate.exists():
|
||||
return candidate
|
||||
i += 1
|
||||
|
||||
|
||||
def _file_md5(p: Path) -> str:
|
||||
h = _hashlib.md5()
|
||||
with open(p, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(65536), b''):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def _resolve_source_ports(cfg) -> list:
|
||||
"""Gibt source_ports als [{port, label}]-Liste zurück. Migriert altes source_port-Feld."""
|
||||
ports = cfg.get('source_ports') or []
|
||||
if not ports and cfg.get('source_port'):
|
||||
ports = [{'port': cfg['source_port'], 'label': cfg.get('source_label', '')}]
|
||||
return ports
|
||||
|
||||
|
||||
def _configured_destination(cfg, devs):
|
||||
if cfg.get('dest_type') == 'internal':
|
||||
return internal_dest_device(cfg)
|
||||
return next((d for d in devs if d['usb_port'] == cfg.get('dest_port')), None)
|
||||
|
||||
|
||||
def do_copy(src_devs, dst_dev, cfg):
|
||||
"""Kopiert von einer oder mehreren Quellen auf ein Ziel."""
|
||||
dst_mp = None
|
||||
dst_owned = False
|
||||
src_mounts = [] # [(src_dev, src_mp, src_owned)]
|
||||
_upload_thread = None
|
||||
_hist = {
|
||||
'start': time.time(),
|
||||
'ok': False, 'copied': 0, 'skipped': 0, 'errors': 0,
|
||||
'bytes': 0, 'error_msg': '',
|
||||
}
|
||||
try:
|
||||
with copy_lock:
|
||||
copy_state.update(running=True, progress=0, error=None,
|
||||
done=0, total=0, logs=[], current='',
|
||||
bytes_total=0, bytes_done=0,
|
||||
start_ts=time.time(), eta_sec=None, speed_bps=0,
|
||||
phase='copy',
|
||||
space_warning=False, space_needed=0, space_free=0,
|
||||
last_success_file='')
|
||||
save_state()
|
||||
n = len(src_devs)
|
||||
add_log(f'Kopiervorgang gestartet ({n} Quelle{"n" if n != 1 else ""})')
|
||||
|
||||
dst_mp, dst_owned = ensure_mount(dst_dev)
|
||||
if not dst_mp:
|
||||
raise RuntimeError(f'Ziel nicht mountbar: {dst_dev["device"]}')
|
||||
add_log(f'Ziel: {dst_mp} ({dst_dev["label"]})')
|
||||
|
||||
ts = datetime.now()
|
||||
date_str = ts.strftime(cfg['folder_format'])
|
||||
if cfg.get('add_time'):
|
||||
date_str += '_' + ts.strftime('%H%M%S')
|
||||
|
||||
# -- Alle Quellen mounten & Dateien sammeln -------------------------
|
||||
# source_data: [(src_dev, src_path, files, dst_dir, incomplete_marker)]
|
||||
source_data = []
|
||||
total = 0
|
||||
bytes_total = 0
|
||||
|
||||
for src_dev in src_devs:
|
||||
with copy_lock:
|
||||
cancelled = not copy_state['running']
|
||||
if cancelled:
|
||||
add_log('Abgebrochen')
|
||||
return
|
||||
|
||||
src_mp_i, src_owned_i = ensure_mount(src_dev)
|
||||
src_mounts.append((src_dev, src_mp_i, src_owned_i))
|
||||
if not src_mp_i:
|
||||
add_log(f'Quelle nicht mountbar: {src_dev["device"]} - übersprungen')
|
||||
continue
|
||||
|
||||
add_log(f'Quelle: {src_mp_i} ({src_dev["label"]})')
|
||||
src_path = Path(src_mp_i)
|
||||
all_files = [f for f in src_path.rglob('*') if f.is_file()]
|
||||
files = [f for f in all_files if _should_copy(f, cfg)]
|
||||
n_filtered = len(all_files) - len(files)
|
||||
if n_filtered:
|
||||
add_log(f'{n_filtered} Dateien gefiltert ({src_dev["label"]})')
|
||||
|
||||
label = re.sub(r'[^\w\-]', '_', src_dev.get('label', 'source'))
|
||||
dst_dir_i = Path(dst_mp) / date_str
|
||||
if cfg.get('subfolder'):
|
||||
dst_dir_i = dst_dir_i / label
|
||||
dst_dir_i.mkdir(parents=True, exist_ok=True)
|
||||
add_log(f'Zielordner: {dst_dir_i}')
|
||||
|
||||
for stale in dst_dir_i.rglob('*.picopy_tmp'):
|
||||
stale.unlink(missing_ok=True)
|
||||
|
||||
incomplete_marker_i = dst_dir_i / '.picopy_incomplete'
|
||||
import json as _json
|
||||
incomplete_marker_i.write_text(_json.dumps({
|
||||
'started': datetime.now().isoformat(),
|
||||
'source': src_dev.get('label', ''),
|
||||
}))
|
||||
|
||||
total += len(files)
|
||||
bytes_total += sum(f.stat().st_size for f in files)
|
||||
source_data.append((src_dev, src_path, files, dst_dir_i, incomplete_marker_i))
|
||||
|
||||
with copy_lock:
|
||||
copy_state['total'] = total
|
||||
copy_state['bytes_total'] = bytes_total
|
||||
add_log(f'{total} Dateien gesamt ({_fmt_bytes(bytes_total)})')
|
||||
|
||||
# -- Speicherplatz-Prüfung ------------------------------------------
|
||||
try:
|
||||
dst_free = shutil.disk_usage(dst_mp).free
|
||||
except Exception:
|
||||
dst_free = 0
|
||||
if bytes_total > 0 and dst_free < bytes_total:
|
||||
with copy_lock:
|
||||
copy_state.update(space_warning=True,
|
||||
space_needed=bytes_total,
|
||||
space_free=dst_free)
|
||||
add_log(
|
||||
f'⚠ Nicht genug Speicherplatz! '
|
||||
f'Benötigt: {_fmt_bytes(bytes_total)}, '
|
||||
f'Verfügbar: {_fmt_bytes(dst_free)} – '
|
||||
f'Quelle wird nicht gelöscht'
|
||||
)
|
||||
save_state()
|
||||
|
||||
# -- Phase 1: Kopieren (alle Quellen) --------------------------------
|
||||
dup_mode = cfg.get('duplicate_handling', 'skip')
|
||||
all_copied_pairs = []
|
||||
skipped = 0
|
||||
io_errors = 0
|
||||
global_done = 0
|
||||
|
||||
for src_dev_i, src_path_i, files_i, dst_dir_i, _ in source_data:
|
||||
if len(src_devs) > 1:
|
||||
add_log(f'Kopiere: {src_dev_i["label"]}')
|
||||
for f in files_i:
|
||||
with copy_lock:
|
||||
cancelled = not copy_state['running']
|
||||
if cancelled:
|
||||
add_log('Abgebrochen')
|
||||
return
|
||||
global_done += 1
|
||||
rel = f.relative_to(src_path_i)
|
||||
dst_f = dst_dir_i / rel
|
||||
try:
|
||||
dst_f.parent.mkdir(parents=True, exist_ok=True)
|
||||
except OSError as mkdir_err:
|
||||
io_errors += 1
|
||||
add_log(f'⚠ Verzeichnis nicht erstellbar ({dst_f.parent.name}): {mkdir_err}')
|
||||
with copy_lock:
|
||||
copy_state.update(done=global_done,
|
||||
progress=int(global_done/total*100) if total else 100,
|
||||
current=str(f.name))
|
||||
continue
|
||||
|
||||
if dst_f.exists():
|
||||
if dup_mode == 'skip':
|
||||
if dst_f.stat().st_size == f.stat().st_size:
|
||||
skipped += 1
|
||||
with copy_lock:
|
||||
copy_state.update(done=global_done,
|
||||
progress=int(global_done/total*100) if total else 100,
|
||||
current=str(f.name))
|
||||
continue
|
||||
else:
|
||||
add_log(f'Unvollständige Datei, wird neu kopiert: {f.name}')
|
||||
elif dup_mode == 'rename':
|
||||
dst_f = _unique_path(dst_f)
|
||||
|
||||
fsize = f.stat().st_size
|
||||
tmp_f = dst_f.with_name(dst_f.name + '.picopy_tmp')
|
||||
try:
|
||||
shutil.copy2(f, tmp_f)
|
||||
os.replace(str(tmp_f), str(dst_f))
|
||||
except OSError as copy_err:
|
||||
try: tmp_f.unlink(missing_ok=True)
|
||||
except Exception: pass
|
||||
io_errors += 1
|
||||
add_log(f'⚠ Fehler bei {f.name}: {copy_err}')
|
||||
with copy_lock:
|
||||
copy_state.update(done=global_done,
|
||||
progress=int(global_done/total*100) if total else 100,
|
||||
current=str(f.name))
|
||||
continue
|
||||
all_copied_pairs.append((f, dst_f))
|
||||
|
||||
with copy_lock:
|
||||
copy_state['bytes_done'] += fsize
|
||||
copy_state['last_success_file'] = str(dst_f)
|
||||
bd = copy_state['bytes_done']
|
||||
bt = copy_state['bytes_total']
|
||||
elapsed = time.time() - copy_state['start_ts']
|
||||
speed = bd / elapsed if elapsed > 1 else 0
|
||||
eta = int((bt - bd) / speed) if speed > 0 and bt > bd else 0
|
||||
copy_state.update(done=global_done,
|
||||
progress=int(global_done/total*100) if total else 100,
|
||||
current=str(f.name), speed_bps=int(speed), eta_sec=eta)
|
||||
if global_done % 20 == 0:
|
||||
save_state()
|
||||
|
||||
msg_parts = [f'{len(all_copied_pairs)} kopiert']
|
||||
if skipped:
|
||||
msg_parts.append(f'{skipped} übersprungen')
|
||||
if io_errors:
|
||||
msg_parts.append(f'{io_errors} Fehler (I/O)')
|
||||
|
||||
# -- Phase 2: Verifizieren ------------------------------------------
|
||||
verify_errors = 0
|
||||
verified_pairs = list(all_copied_pairs)
|
||||
|
||||
if cfg.get('verify_checksum') and all_copied_pairs:
|
||||
with copy_lock:
|
||||
copy_state.update(phase='verify', progress=0, done=0,
|
||||
total=len(all_copied_pairs), current='',
|
||||
eta_sec=None, speed_bps=0)
|
||||
add_log(f'Verifiziere {len(all_copied_pairs)} Dateien...')
|
||||
verified_pairs = []
|
||||
|
||||
for i, (src_f, dst_f) in enumerate(all_copied_pairs):
|
||||
with copy_lock:
|
||||
cancelled = not copy_state['running']
|
||||
if not cancelled:
|
||||
copy_state.update(done=i+1,
|
||||
progress=int((i+1)/len(all_copied_pairs)*100),
|
||||
current=src_f.name)
|
||||
if cancelled:
|
||||
add_log('Abgebrochen')
|
||||
return
|
||||
if _file_md5(src_f) == _file_md5(dst_f):
|
||||
verified_pairs.append((src_f, dst_f))
|
||||
else:
|
||||
verify_errors += 1
|
||||
add_log(f'⚠ Prüfsummenfehler: {src_f.name}')
|
||||
try: dst_f.unlink()
|
||||
except Exception: pass
|
||||
|
||||
if verify_errors:
|
||||
msg_parts.append(f'{verify_errors} Prüfsummenfehler!')
|
||||
add_log(f'Verifizierung: {verify_errors} Fehler!')
|
||||
else:
|
||||
add_log(f'Alle {len(verified_pairs)} Dateien verifiziert ✓')
|
||||
|
||||
# -- Phase 3: Quelle löschen ----------------------------------------
|
||||
if cfg.get('delete_source') and verified_pairs:
|
||||
with copy_lock:
|
||||
_space_warn = copy_state.get('space_warning', False)
|
||||
if _space_warn:
|
||||
add_log('Quelldateien NICHT gelöscht (Speicherplatz unzureichend)')
|
||||
elif verify_errors:
|
||||
add_log('Quelldateien NICHT gelöscht (Prüfsummenfehler)')
|
||||
else:
|
||||
with copy_lock:
|
||||
copy_state.update(phase='delete', current='')
|
||||
add_log(f'Lösche {len(verified_pairs)} Quelldateien...')
|
||||
del_errors = 0
|
||||
for src_f, _ in verified_pairs:
|
||||
try:
|
||||
src_f.unlink()
|
||||
except Exception as e:
|
||||
del_errors += 1
|
||||
log.warning(f'Löschen fehlgeschlagen: {src_f}: {e}')
|
||||
if del_errors:
|
||||
msg_parts.append(f'{del_errors} Löschfehler')
|
||||
else:
|
||||
add_log('Quelle geleert ✓')
|
||||
|
||||
subprocess.run(['sync'], capture_output=True)
|
||||
for _, _, _, _, incomplete_marker_i in source_data:
|
||||
try: incomplete_marker_i.unlink(missing_ok=True)
|
||||
except Exception: pass
|
||||
|
||||
with copy_lock:
|
||||
copy_state['last_copy'] = datetime.now().isoformat()
|
||||
_hist['bytes'] = copy_state['bytes_done']
|
||||
_hist.update(ok=True, copied=len(all_copied_pairs),
|
||||
skipped=skipped, errors=io_errors)
|
||||
add_log('Fertig! ' + ', '.join(msg_parts))
|
||||
|
||||
dst_dir_root = Path(dst_mp) / date_str
|
||||
upload_files = [dst_f for _, dst_f in verified_pairs if dst_f.exists()]
|
||||
if upload_files:
|
||||
from picopy.upload import run_uploads
|
||||
_upload_thread = threading.Thread(
|
||||
target=run_uploads,
|
||||
args=(dst_dir_root, cfg, upload_files),
|
||||
daemon=True
|
||||
)
|
||||
_upload_thread.start()
|
||||
elif any(t.get('enabled') for t in cfg.get('upload_targets', [])):
|
||||
add_log('NAS-Upload: keine neu auf das Ziel übertragenen Dateien')
|
||||
|
||||
except Exception as e:
|
||||
log.exception('Copy failed')
|
||||
with copy_lock:
|
||||
copy_state['error'] = str(e)
|
||||
_hist['error_msg'] = str(e)
|
||||
add_log(f'Fehler: {e}')
|
||||
|
||||
finally:
|
||||
# Erst warten bis NAS-Upload fertig, dann erst unmounten
|
||||
if _upload_thread is not None and _upload_thread.is_alive():
|
||||
add_log('Warte auf NAS-Upload vor Unmount...')
|
||||
_upload_thread.join()
|
||||
subprocess.run(['sync'], capture_output=True)
|
||||
for _, src_mp_i, src_owned_i in src_mounts:
|
||||
if src_owned_i and src_mp_i:
|
||||
subprocess.run(['umount', src_mp_i], capture_output=True)
|
||||
if dst_owned and dst_mp:
|
||||
subprocess.run(['umount', dst_mp], capture_output=True)
|
||||
with copy_lock:
|
||||
copy_state['running'] = False
|
||||
copy_state['current'] = ''
|
||||
copy_state['phase'] = 'idle'
|
||||
save_state()
|
||||
# Verlaufseintrag speichern
|
||||
append_history({
|
||||
'ts': datetime.now().isoformat(),
|
||||
'duration': int(time.time() - _hist['start']),
|
||||
'sources': [d.get('label', d.get('device', '?')) for d in src_devs],
|
||||
'dest': dst_dev.get('label', dst_dev.get('device', '?')) if dst_dev else '?',
|
||||
'copied': _hist['copied'],
|
||||
'skipped': _hist['skipped'],
|
||||
'errors': _hist['errors'],
|
||||
'bytes': _hist['bytes'],
|
||||
'ok': _hist['ok'],
|
||||
'error': _hist['error_msg'],
|
||||
})
|
||||
|
||||
|
||||
def check_auto_copy():
|
||||
cfg = load_cfg()
|
||||
src_ports = _resolve_source_ports(cfg)
|
||||
if not cfg.get('auto_copy') or not src_ports:
|
||||
return
|
||||
if cfg.get('dest_type') != 'internal' and not cfg.get('dest_port'):
|
||||
return
|
||||
with copy_lock:
|
||||
if copy_state['running'] or copy_state['error']:
|
||||
return
|
||||
devs = usb_devices()
|
||||
srcs = [next((d for d in devs if d['usb_port'] == sp['port']), None) for sp in src_ports]
|
||||
srcs = [s for s in srcs if s is not None]
|
||||
dst = _configured_destination(cfg, devs)
|
||||
if srcs and dst:
|
||||
log.info(f'Auto-Copy: {len(srcs)} Quelle(n) und Ziel verbunden')
|
||||
threading.Thread(target=do_copy, args=(srcs, dst, cfg), daemon=True).start()
|
||||
|
||||
|
||||
def usb_monitor():
|
||||
try:
|
||||
import pyudev
|
||||
ctx = pyudev.Context()
|
||||
mon = pyudev.Monitor.from_netlink(ctx)
|
||||
mon.filter_by(subsystem='block', device_type='disk')
|
||||
for dev in iter(mon.poll, None):
|
||||
if dev.action == 'add':
|
||||
log.info(f'USB eingesteckt: {dev.device_node}')
|
||||
threading.Timer(3.0, check_auto_copy).start()
|
||||
except ImportError:
|
||||
log.warning('pyudev nicht verfügbar')
|
||||
151
picopy/samba.py
Normal file
151
picopy/samba.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""PiCopy – Interner Speicher/Samba: internal_share_state, alle Samba-Funktionen."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import INTERNAL_DEST_DIR, load_cfg, save_cfg, log
|
||||
|
||||
SAMBA_CONF = Path('/etc/samba/smb.conf')
|
||||
SAMBA_BEGIN = '# BEGIN PICOPY INTERNAL SHARE'
|
||||
SAMBA_END = '# END PICOPY INTERNAL SHARE'
|
||||
|
||||
internal_share_state = {
|
||||
'installed': False,
|
||||
'enabled': False,
|
||||
'active': False,
|
||||
'path': str(INTERNAL_DEST_DIR),
|
||||
'share': 'PiCopy',
|
||||
'pkg_running': False,
|
||||
'pkg_error': None,
|
||||
'error': None,
|
||||
}
|
||||
internal_share_lock = threading.Lock()
|
||||
|
||||
|
||||
def _internal_usage():
|
||||
INTERNAL_DEST_DIR.mkdir(parents=True, exist_ok=True)
|
||||
usage = shutil.disk_usage(INTERNAL_DEST_DIR)
|
||||
return {
|
||||
'path': str(INTERNAL_DEST_DIR),
|
||||
'total': usage.total,
|
||||
'used': usage.used,
|
||||
'free': usage.free,
|
||||
}
|
||||
|
||||
|
||||
def smbd_installed():
|
||||
return shutil.which('smbd') is not None
|
||||
|
||||
|
||||
def _systemctl(*args, timeout=20):
|
||||
try:
|
||||
return subprocess.run(['systemctl'] + list(args), capture_output=True,
|
||||
text=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
return subprocess.CompletedProcess(['systemctl'] + list(args), 1,
|
||||
stdout='', stderr=str(e))
|
||||
|
||||
|
||||
def _smbd_active():
|
||||
if not smbd_installed():
|
||||
return False
|
||||
r = _systemctl('is-active', 'smbd', timeout=5)
|
||||
return r.returncode == 0 and r.stdout.strip() == 'active'
|
||||
|
||||
|
||||
def internal_share_update_state():
|
||||
cfg = load_cfg()
|
||||
usage = _internal_usage()
|
||||
with internal_share_lock:
|
||||
internal_share_state.update(
|
||||
installed=smbd_installed(),
|
||||
enabled=bool(cfg.get('internal_share_enabled')),
|
||||
active=_smbd_active(),
|
||||
path=usage['path'],
|
||||
total=usage['total'],
|
||||
used=usage['used'],
|
||||
free=usage['free'],
|
||||
)
|
||||
return dict(internal_share_state)
|
||||
|
||||
|
||||
def _write_samba_share(enabled: bool):
|
||||
old = SAMBA_CONF.read_text(encoding='utf-8') if SAMBA_CONF.exists() else ''
|
||||
pattern = re.compile(rf'\n?{re.escape(SAMBA_BEGIN)}.*?{re.escape(SAMBA_END)}\n?', re.S)
|
||||
cleaned = pattern.sub('\n', old).rstrip() + '\n'
|
||||
if enabled:
|
||||
INTERNAL_DEST_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INTERNAL_DEST_DIR.chmod(0o755)
|
||||
block = f"""
|
||||
{SAMBA_BEGIN}
|
||||
[PiCopy]
|
||||
path = {INTERNAL_DEST_DIR}
|
||||
browseable = yes
|
||||
read only = yes
|
||||
guest ok = yes
|
||||
force user = root
|
||||
{SAMBA_END}
|
||||
"""
|
||||
cleaned += block
|
||||
tmp = SAMBA_CONF.with_suffix('.conf.picopy_tmp')
|
||||
tmp.write_text(cleaned, encoding='utf-8')
|
||||
os.replace(str(tmp), str(SAMBA_CONF))
|
||||
|
||||
|
||||
def _install_samba_if_needed():
|
||||
if smbd_installed():
|
||||
return True, ''
|
||||
with internal_share_lock:
|
||||
internal_share_state.update(pkg_running=True, pkg_error=None)
|
||||
try:
|
||||
r = subprocess.run(['apt-get', 'install', '-y', 'samba'],
|
||||
capture_output=True, text=True, timeout=300,
|
||||
env={**os.environ, 'DEBIAN_FRONTEND': 'noninteractive'})
|
||||
if r.returncode != 0:
|
||||
err = (r.stderr.strip().splitlines()[-1]
|
||||
if r.stderr.strip() else 'samba-Installation fehlgeschlagen')
|
||||
with internal_share_lock:
|
||||
internal_share_state['pkg_error'] = err
|
||||
return False, err
|
||||
return True, ''
|
||||
except Exception as e:
|
||||
with internal_share_lock:
|
||||
internal_share_state['pkg_error'] = str(e)
|
||||
return False, str(e)
|
||||
finally:
|
||||
with internal_share_lock:
|
||||
internal_share_state['pkg_running'] = False
|
||||
|
||||
|
||||
def set_internal_share_enabled(enabled: bool):
|
||||
ok, err = (True, '')
|
||||
if enabled:
|
||||
ok, err = _install_samba_if_needed()
|
||||
if not ok:
|
||||
return False, err
|
||||
elif not smbd_installed():
|
||||
cfg = load_cfg()
|
||||
cfg['internal_share_enabled'] = False
|
||||
save_cfg(cfg)
|
||||
internal_share_update_state()
|
||||
return True, ''
|
||||
try:
|
||||
_write_samba_share(enabled)
|
||||
if enabled:
|
||||
_systemctl('enable', '--now', 'smbd', timeout=60)
|
||||
_systemctl('restart', 'smbd', timeout=60)
|
||||
else:
|
||||
_systemctl('restart', 'smbd', timeout=60)
|
||||
cfg = load_cfg()
|
||||
cfg['internal_share_enabled'] = enabled
|
||||
save_cfg(cfg)
|
||||
internal_share_update_state()
|
||||
return True, ''
|
||||
except Exception as e:
|
||||
with internal_share_lock:
|
||||
internal_share_state['error'] = str(e)
|
||||
return False, str(e)
|
||||
73
picopy/state.py
Normal file
73
picopy/state.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""PiCopy – Kopierstatus, Verlauf, add_log."""
|
||||
|
||||
import json
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import (
|
||||
STATE_FILE, HISTORY_FILE, MAX_HISTORY,
|
||||
_atomic_write, log
|
||||
)
|
||||
|
||||
copy_state = {
|
||||
'running': False, 'progress': 0,
|
||||
'total': 0, 'done': 0, 'current': '',
|
||||
'error': None, 'last_copy': None, 'logs': [],
|
||||
'bytes_total': 0, 'bytes_done': 0,
|
||||
'start_ts': None, 'eta_sec': None, 'speed_bps': 0,
|
||||
'phase': 'idle',
|
||||
'space_warning': False, 'space_needed': 0, 'space_free': 0,
|
||||
'last_success_file': '',
|
||||
}
|
||||
copy_lock = threading.Lock()
|
||||
|
||||
|
||||
def load_state():
|
||||
global copy_state
|
||||
try:
|
||||
if STATE_FILE.exists():
|
||||
saved = json.loads(STATE_FILE.read_text(encoding='utf-8'))
|
||||
saved['running'] = False
|
||||
saved['current'] = ''
|
||||
copy_state.update(saved)
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
log.warning(f'state.json korrupt ({e}), starte mit leerem Zustand')
|
||||
try: STATE_FILE.rename(STATE_FILE.with_suffix('.corrupt'))
|
||||
except Exception: pass
|
||||
except Exception as e:
|
||||
log.warning(f'state.json nicht lesbar: {e}')
|
||||
|
||||
|
||||
def save_state():
|
||||
try:
|
||||
with copy_lock:
|
||||
data = dict(copy_state)
|
||||
_atomic_write(STATE_FILE, json.dumps(data))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def load_history() -> list:
|
||||
try:
|
||||
if HISTORY_FILE.exists():
|
||||
return json.loads(HISTORY_FILE.read_text(encoding='utf-8'))
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
|
||||
def append_history(entry: dict):
|
||||
h = load_history()
|
||||
h.insert(0, entry)
|
||||
try:
|
||||
_atomic_write(HISTORY_FILE, json.dumps(h[:MAX_HISTORY]))
|
||||
except Exception as e:
|
||||
log.warning(f'Verlauf speichern fehlgeschlagen: {e}')
|
||||
|
||||
|
||||
def add_log(msg):
|
||||
log.info(msg)
|
||||
with copy_lock:
|
||||
copy_state['logs'].append({'t': datetime.now().strftime('%H:%M:%S'), 'm': msg})
|
||||
copy_state['logs'] = copy_state['logs'][-200:]
|
||||
222
picopy/system.py
Normal file
222
picopy/system.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""PiCopy – Systeminfo, Format-Drives, Update-System."""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import urllib.request as _urlreq
|
||||
|
||||
from picopy.config import BASE_DIR, RAW_BASE, VERSION, _atomic_write, log
|
||||
from picopy.state import copy_state
|
||||
|
||||
update_state = {
|
||||
'current': VERSION,
|
||||
'latest': None,
|
||||
'available': False,
|
||||
'checking': False,
|
||||
'error': None,
|
||||
'last_checked': None,
|
||||
}
|
||||
update_lock = threading.Lock()
|
||||
|
||||
format_state = {'running': False, 'error': None, 'done': False, 'fs': '', 'device': ''}
|
||||
|
||||
FORMAT_FILESYSTEMS = {
|
||||
'exfat': {
|
||||
'label': 'exFAT',
|
||||
'desc': 'Empfohlen – Mac & Windows, keine 4-GB-Dateigrößenbeschränkung',
|
||||
'cmd': lambda dev, name: ['mkfs.exfat', '-n', name, dev],
|
||||
'pkg': 'exfatprogs',
|
||||
},
|
||||
'fat32': {
|
||||
'label': 'FAT32',
|
||||
'desc': 'Mac & Windows, max. 4 GB pro Datei',
|
||||
'cmd': lambda dev, name: ['mkfs.vfat', '-F', '32', '-n', name[:11], dev],
|
||||
'pkg': 'dosfstools',
|
||||
},
|
||||
'ntfs': {
|
||||
'label': 'NTFS',
|
||||
'desc': 'Windows nativ, Mac nur lesen',
|
||||
'cmd': lambda dev, name: ['mkfs.ntfs', '-f', '-L', name[:32], dev],
|
||||
'pkg': 'ntfs-3g',
|
||||
},
|
||||
}
|
||||
|
||||
# Liste aller Dateien die beim Update heruntergeladen werden müssen
|
||||
UPDATE_FILES = [
|
||||
'app.py',
|
||||
'version.txt',
|
||||
'PiCopy_Logo.png',
|
||||
'picopy/__init__.py',
|
||||
'picopy/config.py',
|
||||
'picopy/state.py',
|
||||
'picopy/usb.py',
|
||||
'picopy/copy_engine.py',
|
||||
'picopy/wifi.py',
|
||||
'picopy/wireguard.py',
|
||||
'picopy/samba.py',
|
||||
'picopy/upload.py',
|
||||
'picopy/system.py',
|
||||
'routes/__init__.py',
|
||||
'routes/copy_routes.py',
|
||||
'routes/wifi_routes.py',
|
||||
'routes/wireguard_routes.py',
|
||||
'routes/upload_routes.py',
|
||||
'routes/system_routes.py',
|
||||
'routes/browse_routes.py',
|
||||
'templates/index.html',
|
||||
]
|
||||
|
||||
|
||||
def get_sysinfo() -> dict:
|
||||
info: dict = {}
|
||||
# CPU-Temperatur (Raspberry Pi)
|
||||
for zone in ('/sys/class/thermal/thermal_zone0/temp',
|
||||
'/sys/class/thermal/thermal_zone1/temp'):
|
||||
try:
|
||||
raw = Path(zone).read_text().strip()
|
||||
info['cpu_temp'] = round(int(raw) / 1000, 1)
|
||||
break
|
||||
except Exception:
|
||||
info['cpu_temp'] = None
|
||||
# RAM
|
||||
try:
|
||||
mem: dict = {}
|
||||
for line in Path('/proc/meminfo').read_text().splitlines():
|
||||
parts = line.split()
|
||||
if len(parts) >= 2:
|
||||
mem[parts[0].rstrip(':')] = int(parts[1])
|
||||
total = mem.get('MemTotal', 0)
|
||||
avail = mem.get('MemAvailable', 0)
|
||||
used = total - avail
|
||||
info['ram_total'] = round(total / 1024)
|
||||
info['ram_used'] = round(used / 1024)
|
||||
info['ram_pct'] = round(used / total * 100) if total else 0
|
||||
except Exception:
|
||||
info['ram_total'] = info['ram_used'] = info['ram_pct'] = None
|
||||
# SD-Karte (root-Dateisystem)
|
||||
try:
|
||||
du = shutil.disk_usage('/')
|
||||
info['disk_total'] = round(du.total / 1e9, 1)
|
||||
info['disk_used'] = round(du.used / 1e9, 1)
|
||||
info['disk_pct'] = round(du.used / du.total * 100) if du.total else 0
|
||||
except Exception:
|
||||
info['disk_total'] = info['disk_used'] = info['disk_pct'] = None
|
||||
return info
|
||||
|
||||
|
||||
def _vtuple(v):
|
||||
try:
|
||||
return tuple(int(x) for x in v.strip().lstrip('v').split('.'))
|
||||
except Exception:
|
||||
return (0,)
|
||||
|
||||
|
||||
def check_for_updates():
|
||||
with update_lock:
|
||||
if update_state['checking']:
|
||||
return
|
||||
update_state['checking'] = True
|
||||
update_state['error'] = None
|
||||
|
||||
try:
|
||||
req = _urlreq.urlopen(f'{RAW_BASE}/version.txt', timeout=10)
|
||||
latest = req.read().decode().strip()
|
||||
avail = _vtuple(latest) > _vtuple(VERSION)
|
||||
with update_lock:
|
||||
update_state.update(latest=latest, available=avail,
|
||||
last_checked=datetime.now().isoformat())
|
||||
if avail:
|
||||
log.info(f'Update verfügbar: {VERSION} -> {latest}')
|
||||
except Exception as e:
|
||||
with update_lock:
|
||||
update_state['error'] = str(e)
|
||||
log.warning(f'Update-Check fehlgeschlagen: {e}')
|
||||
finally:
|
||||
with update_lock:
|
||||
update_state['checking'] = False
|
||||
|
||||
|
||||
def update_check_loop():
|
||||
time.sleep(5) # Kurz nach Start einmalig prüfen
|
||||
while True:
|
||||
check_for_updates()
|
||||
time.sleep(6 * 3600) # Dann alle 6 Stunden
|
||||
|
||||
|
||||
def install_update():
|
||||
"""Lädt alle Moduldateien herunter, prüft Syntax und ersetzt sie atomar."""
|
||||
log.info('Update wird heruntergeladen...')
|
||||
|
||||
# Zuerst version.txt holen und neuen Code validieren
|
||||
vreq = _urlreq.urlopen(f'{RAW_BASE}/version.txt', timeout=10)
|
||||
new_version = vreq.read().decode().strip()
|
||||
|
||||
# app.py herunterladen und Syntax prüfen
|
||||
req = _urlreq.urlopen(f'{RAW_BASE}/app.py', timeout=60)
|
||||
new_app_code = req.read().decode()
|
||||
compile(new_app_code, 'app.py', 'exec')
|
||||
|
||||
# Logo
|
||||
logo_req = _urlreq.urlopen(f'{RAW_BASE}/PiCopy_Logo.png', timeout=30)
|
||||
logo_data = logo_req.read()
|
||||
|
||||
# Alle Dateien schreiben
|
||||
for rel_path in UPDATE_FILES:
|
||||
dest = BASE_DIR / rel_path
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
url = f'{RAW_BASE}/{rel_path}'
|
||||
if rel_path == 'app.py':
|
||||
content_bytes = new_app_code.encode('utf-8')
|
||||
elif rel_path == 'version.txt':
|
||||
content_bytes = (new_version + '\n').encode('utf-8')
|
||||
elif rel_path == 'PiCopy_Logo.png':
|
||||
content_bytes = logo_data
|
||||
else:
|
||||
try:
|
||||
r = _urlreq.urlopen(url, timeout=60)
|
||||
content_bytes = r.read()
|
||||
except Exception as e:
|
||||
log.warning(f'Update: {rel_path} konnte nicht heruntergeladen werden: {e}')
|
||||
continue
|
||||
|
||||
tmp = dest.with_suffix(dest.suffix + '.tmp')
|
||||
tmp.write_bytes(content_bytes)
|
||||
with open(tmp, 'rb') as fh:
|
||||
os.fsync(fh.fileno())
|
||||
os.replace(str(tmp), str(dest))
|
||||
|
||||
log.info('Update installiert - starte Dienst neu...')
|
||||
subprocess.Popen(['systemctl', 'restart', 'picopy'])
|
||||
|
||||
|
||||
def do_format(fs: str, name: str, dev: str):
|
||||
"""Formatiert ein Laufwerk. Wird in einem Thread ausgeführt."""
|
||||
format_state.update(running=True, error=None, done=False, fs=fs, device=dev)
|
||||
try:
|
||||
# Aushängen falls gemountet
|
||||
subprocess.run(['umount', dev], capture_output=True)
|
||||
|
||||
cmd = FORMAT_FILESYSTEMS[fs]['cmd'](dev, name)
|
||||
r = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
|
||||
if r.returncode != 0:
|
||||
err = r.stderr.strip() or r.stdout.strip() or 'Unbekannter Fehler'
|
||||
# Hilfreiche Meldung wenn Paket fehlt
|
||||
pkg = FORMAT_FILESYSTEMS[fs]['pkg']
|
||||
if 'not found' in err or r.returncode == 127:
|
||||
err = f'Befehl nicht gefunden – bitte installieren: apt install {pkg}'
|
||||
format_state.update(error=err)
|
||||
return
|
||||
format_state.update(done=True)
|
||||
log.info(f'Formatierung {fs} auf {dev} abgeschlossen')
|
||||
except subprocess.TimeoutExpired:
|
||||
format_state.update(error='Timeout – Formatierung dauerte zu lange')
|
||||
except Exception as e:
|
||||
format_state.update(error=str(e))
|
||||
finally:
|
||||
format_state['running'] = False
|
||||
379
picopy/upload.py
Normal file
379
picopy/upload.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""PiCopy – NAS-Upload (rclone): upload_state, upload_lock, alle rclone-Helpers, run_uploads."""
|
||||
|
||||
import json
|
||||
import posixpath
|
||||
import re
|
||||
import select
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import BASE_DIR, load_cfg, log
|
||||
from picopy.state import add_log
|
||||
|
||||
RCLONE_CONF = BASE_DIR / 'rclone.conf'
|
||||
|
||||
upload_state = {
|
||||
'running': False,
|
||||
'current': '',
|
||||
'results': [],
|
||||
'progress': 0,
|
||||
'total': 0,
|
||||
'done': 0,
|
||||
'bytes_total': 0,
|
||||
'bytes_done': 0,
|
||||
'current_file': '',
|
||||
'eta_sec': None,
|
||||
'speed_bps': 0,
|
||||
}
|
||||
upload_lock = threading.Lock()
|
||||
|
||||
|
||||
def _rclone(*args, timeout=60):
|
||||
try:
|
||||
return subprocess.run(
|
||||
['rclone', '--config', str(RCLONE_CONF)] + list(args),
|
||||
capture_output=True, text=True, timeout=timeout
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=f'Timeout nach {timeout}s')
|
||||
except Exception as e:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=str(e))
|
||||
|
||||
|
||||
def _rclone_obscure(pw):
|
||||
r = subprocess.run(['rclone', 'obscure', pw],
|
||||
capture_output=True, text=True, timeout=10)
|
||||
return r.stdout.strip()
|
||||
|
||||
|
||||
def _parse_percent(text: str):
|
||||
m = re.search(r'(\d+(?:\.\d+)?)%', text)
|
||||
if not m:
|
||||
return None
|
||||
try:
|
||||
return max(0.0, min(100.0, float(m.group(1))))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _rclone_copyto_progress(src: Path, dest: str, base_done: int,
|
||||
file_size: int, total_bytes: int, start_ts: float,
|
||||
timeout: int = 7200):
|
||||
args = [
|
||||
'rclone', '--config', str(RCLONE_CONF),
|
||||
'copyto', str(src), dest,
|
||||
'--retries', '1',
|
||||
'--progress',
|
||||
'--stats', '1s',
|
||||
'--stats-one-line',
|
||||
]
|
||||
try:
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
text=True, bufsize=1)
|
||||
started = time.time()
|
||||
stderr_parts = []
|
||||
buf = ''
|
||||
while True:
|
||||
if p.poll() is not None:
|
||||
break
|
||||
if time.time() - started > timeout:
|
||||
p.kill()
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=f'Timeout nach {timeout}s')
|
||||
|
||||
ready, _, _ = select.select([p.stderr], [], [], 0.2) if p.stderr else ([], [], [])
|
||||
if not ready:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
chunk = p.stderr.read(1)
|
||||
if not chunk:
|
||||
continue
|
||||
stderr_parts.append(chunk)
|
||||
if chunk not in ('\r', '\n'):
|
||||
buf += chunk
|
||||
continue
|
||||
|
||||
pct = _parse_percent(buf)
|
||||
buf = ''
|
||||
if pct is not None:
|
||||
transferred = int(file_size * pct / 100)
|
||||
bytes_done = base_done + transferred
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bytes_done / elapsed if elapsed > 1 else 0
|
||||
eta = int((total_bytes - bytes_done) / speed) if speed > 0 and total_bytes > bytes_done else 0
|
||||
with upload_lock:
|
||||
upload_state.update(bytes_done=bytes_done,
|
||||
progress=int(bytes_done / total_bytes * 100) if total_bytes else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
|
||||
stdout, stderr_tail = p.communicate(timeout=5)
|
||||
if stderr_tail:
|
||||
stderr_parts.append(stderr_tail)
|
||||
return subprocess.CompletedProcess(args, p.returncode, stdout=stdout or '',
|
||||
stderr=''.join(stderr_parts))
|
||||
except subprocess.TimeoutExpired:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=f'Timeout nach {timeout}s')
|
||||
except Exception as e:
|
||||
return subprocess.CompletedProcess(args, 1, stdout='', stderr=str(e))
|
||||
|
||||
|
||||
def _remote_name(tid):
|
||||
return f'picopy_{tid}'
|
||||
|
||||
|
||||
def _join_remote_path(*parts) -> str:
|
||||
return '/'.join(str(p).strip('/') for p in parts if str(p).strip('/'))
|
||||
|
||||
|
||||
def _remote_exists(remote_path: str) -> bool:
|
||||
return _remote_size(remote_path) is not None
|
||||
|
||||
|
||||
def _remote_size(remote_path: str):
|
||||
r = _rclone('lsjson', remote_path, timeout=20)
|
||||
if r.returncode != 0:
|
||||
return None
|
||||
try:
|
||||
data = json.loads(r.stdout or '[]')
|
||||
if isinstance(data, dict):
|
||||
return data.get('Size')
|
||||
if isinstance(data, list) and data:
|
||||
item = data[0]
|
||||
return item.get('Size') if isinstance(item, dict) else None
|
||||
return None
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _remote_unique_rel_path(t: dict, rel_path: str) -> str:
|
||||
if not _remote_exists(_smb_conn(t, rel_path)):
|
||||
return rel_path
|
||||
|
||||
parent = posixpath.dirname(rel_path)
|
||||
name = posixpath.basename(rel_path)
|
||||
stem, suffix = posixpath.splitext(name)
|
||||
i = 1
|
||||
while True:
|
||||
candidate_name = f'{stem}_({i}){suffix}'
|
||||
candidate = _join_remote_path(parent, candidate_name)
|
||||
if not _remote_exists(_smb_conn(t, candidate)):
|
||||
return candidate
|
||||
i += 1
|
||||
|
||||
|
||||
def _smb_conn(t: dict, path: str = '') -> str:
|
||||
"""Baut ein rclone-Ziel fuer gespeicherte SMB-Targets.
|
||||
|
||||
Bei rclone SMB ist die Freigabe der erste Pfadteil nach dem Remote:
|
||||
remote:share/ordner. Die Remote-Konfiguration enthaelt Host und Login.
|
||||
"""
|
||||
share = t.get('smb_share', '')
|
||||
remote_path = _join_remote_path(share, path)
|
||||
if t.get('id'):
|
||||
return f'{_remote_name(t["id"])}:{remote_path}'
|
||||
|
||||
host = t.get('smb_host', '')
|
||||
if not host:
|
||||
return f':{remote_path}'
|
||||
conn = f':smb,host={host}'
|
||||
if t.get('smb_user'):
|
||||
conn += f',user={t["smb_user"]}'
|
||||
if t.get('smb_pass'):
|
||||
conn += f',pass={t["smb_pass"]}'
|
||||
conn += f':{remote_path}'
|
||||
return conn
|
||||
|
||||
|
||||
def configure_smb_remote(tid, host, share, user, pw):
|
||||
rn = _remote_name(tid)
|
||||
_rclone('config', 'delete', rn)
|
||||
args = ['config', 'create', rn, 'smb', f'host={host}']
|
||||
if user:
|
||||
args += [f'user={user}']
|
||||
if pw:
|
||||
args += [f'pass={_rclone_obscure(pw)}']
|
||||
r = _rclone(*args)
|
||||
return r.returncode == 0, r.stderr.strip()
|
||||
|
||||
|
||||
def delete_remote(tid):
|
||||
_rclone('config', 'delete', _remote_name(tid))
|
||||
|
||||
|
||||
def test_remote(tid):
|
||||
cfg = load_cfg()
|
||||
targets = cfg.get('upload_targets', [])
|
||||
t = next((x for x in targets if x['id'] == tid), {'id': tid})
|
||||
dest_root = t.get('dest_path', 'PiCopy').strip('/')
|
||||
root = _smb_conn(t)
|
||||
dest = _smb_conn(t, dest_root)
|
||||
test_dir_name = '.picopy_writetest'
|
||||
test_dir = _smb_conn(t, f'{dest_root}/{test_dir_name}' if dest_root else test_dir_name)
|
||||
# 1. Verbindung prüfen
|
||||
r = _rclone('lsd', root, timeout=15)
|
||||
if r.returncode != 0:
|
||||
err = r.stderr.strip().splitlines()[-1] if r.stderr.strip() else 'Verbindung fehlgeschlagen'
|
||||
return False, f'Verbindung: {err}'
|
||||
# 2. Zielordner und Schreibzugriff prüfen: Ziel anlegen, Testverzeichnis anlegen + sofort löschen
|
||||
mk = _rclone('mkdir', dest, timeout=15)
|
||||
if mk.returncode != 0:
|
||||
err = mk.stderr.strip().splitlines()[-1] if mk.stderr.strip() else 'Zielordner konnte nicht angelegt werden'
|
||||
return False, f'Zielordner: {err}'
|
||||
rw = _rclone('mkdir', test_dir, timeout=15)
|
||||
if rw.returncode != 0:
|
||||
err = rw.stderr.strip().splitlines()[-1] if rw.stderr.strip() else 'Schreiben fehlgeschlagen'
|
||||
return False, f'Kein Schreibzugriff: {err}'
|
||||
_rclone('rmdir', test_dir, timeout=10)
|
||||
return True, ''
|
||||
|
||||
|
||||
def run_uploads(local_dir: Path, cfg: dict, upload_files=None):
|
||||
"""Lädt die zuletzt lokal geschriebenen Dateien zu allen aktiven Fernzielen hoch."""
|
||||
# Frische Config laden damit zwischenzeitliche Änderungen (z.B. Deaktivierung) berücksichtigt werden
|
||||
current_cfg = load_cfg()
|
||||
targets = [t for t in current_cfg.get('upload_targets', []) if t.get('enabled')]
|
||||
if not targets:
|
||||
return
|
||||
|
||||
with upload_lock:
|
||||
upload_state.update(running=True, results=[], current='',
|
||||
progress=0, total=0, done=0,
|
||||
bytes_total=0, bytes_done=0,
|
||||
current_file='', eta_sec=None, speed_bps=0)
|
||||
|
||||
for t in targets:
|
||||
name = t.get('name', t['id'])
|
||||
with upload_lock:
|
||||
upload_state.update(current=name, progress=0, total=0, done=0,
|
||||
bytes_total=0, bytes_done=0,
|
||||
current_file='', eta_sec=None, speed_bps=0)
|
||||
|
||||
add_log(f'Upload >> {name}...')
|
||||
dest_root = t.get('dest_path', 'PiCopy').strip('/')
|
||||
root = _smb_conn(t)
|
||||
# local_dir ist der lokal erzeugte Datumsordner. Auf dem NAS soll die
|
||||
# gleiche Struktur entstehen wie auf dem Ziellaufwerk: Ziel/Datum/...
|
||||
dest_rel = _join_remote_path(dest_root, local_dir.name)
|
||||
dest = _smb_conn(t, dest_rel)
|
||||
share = t.get('smb_share', '')
|
||||
dest_label = _join_remote_path(share, dest_rel) or '/'
|
||||
add_log(f'Upload {name}: Ziel {dest_label}')
|
||||
|
||||
# Quellverzeichnis prüfen
|
||||
if not local_dir.exists():
|
||||
err = f'Quellverzeichnis nicht gefunden: {local_dir}'
|
||||
add_log(f'Upload {name}: ✗ {err}')
|
||||
with upload_lock:
|
||||
upload_state['results'].append({'name': name, 'ok': False, 'msg': err})
|
||||
continue
|
||||
|
||||
# 1. Verbindung prüfen
|
||||
conn = _rclone('lsd', root, timeout=15)
|
||||
add_log(f'Upload {name}: Verbindung rc={conn.returncode}')
|
||||
if conn.returncode != 0:
|
||||
err = (conn.stderr.strip().splitlines()[-1] if conn.stderr.strip()
|
||||
else 'NAS nicht erreichbar')
|
||||
add_log(f'Upload {name}: ✗ {err}')
|
||||
with upload_lock:
|
||||
upload_state['results'].append({'name': name, 'ok': False, 'msg': err})
|
||||
continue
|
||||
|
||||
# 2. Zielordner anlegen
|
||||
mk = _rclone('mkdir', dest, timeout=30)
|
||||
add_log(f'Upload {name}: mkdir rc={mk.returncode}'
|
||||
+ (f' err={mk.stderr.strip()[:100]}' if mk.returncode != 0 else ''))
|
||||
|
||||
# 3. Kopieren mit Fortschritt
|
||||
add_log(f'Upload {name}: starte copy von {local_dir}')
|
||||
dup_mode = cfg.get('duplicate_handling', 'skip')
|
||||
if upload_files is None:
|
||||
files = sorted(f for f in local_dir.rglob('*') if f.is_file())
|
||||
else:
|
||||
files = []
|
||||
for f in upload_files:
|
||||
f = Path(f)
|
||||
try:
|
||||
f.relative_to(local_dir)
|
||||
except ValueError:
|
||||
continue
|
||||
if f.is_file():
|
||||
files.append(f)
|
||||
files = sorted(files)
|
||||
dirs = sorted({p for f in files for p in f.relative_to(local_dir).parents
|
||||
if str(p) != '.'})
|
||||
bytes_total = sum(f.stat().st_size for f in files)
|
||||
with upload_lock:
|
||||
upload_state.update(total=len(files), bytes_total=bytes_total,
|
||||
progress=100 if not files else 0)
|
||||
|
||||
for d in dirs:
|
||||
_rclone('mkdir', _smb_conn(t, _join_remote_path(dest_rel, d.as_posix())), timeout=30)
|
||||
|
||||
errors = []
|
||||
skipped = 0
|
||||
start_ts = time.time()
|
||||
for idx, f in enumerate(files, start=1):
|
||||
rel = f.relative_to(local_dir).as_posix()
|
||||
fsize = f.stat().st_size
|
||||
remote_rel = _join_remote_path(dest_rel, rel)
|
||||
with upload_lock:
|
||||
upload_state.update(done=idx, current_file=rel,
|
||||
progress=int(idx / len(files) * 100) if files else 100)
|
||||
|
||||
if dup_mode == 'skip':
|
||||
remote_size = _remote_size(_smb_conn(t, remote_rel))
|
||||
if remote_size == fsize:
|
||||
skipped += 1
|
||||
with upload_lock:
|
||||
bd = upload_state['bytes_done'] + fsize
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bd / elapsed if elapsed > 1 else 0
|
||||
eta = int((bytes_total - bd) / speed) if speed > 0 and bytes_total > bd else 0
|
||||
upload_state.update(bytes_done=bd,
|
||||
progress=int(bd / bytes_total * 100) if bytes_total else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
continue
|
||||
elif dup_mode == 'rename':
|
||||
remote_rel = _remote_unique_rel_path(t, remote_rel)
|
||||
|
||||
with upload_lock:
|
||||
base_done = upload_state['bytes_done']
|
||||
rr = _rclone_copyto_progress(f, _smb_conn(t, remote_rel),
|
||||
base_done, fsize, bytes_total, start_ts)
|
||||
if rr.returncode != 0:
|
||||
errors.append(rr.stderr.strip() or f'{rel}: unbekannter Fehler')
|
||||
if len(errors) >= 5:
|
||||
break
|
||||
|
||||
with upload_lock:
|
||||
bd = base_done + fsize
|
||||
elapsed = time.time() - start_ts
|
||||
speed = bd / elapsed if elapsed > 1 else 0
|
||||
eta = int((bytes_total - bd) / speed) if speed > 0 and bytes_total > bd else 0
|
||||
upload_state.update(bytes_done=bd,
|
||||
progress=int(bd / bytes_total * 100) if bytes_total else 100,
|
||||
speed_bps=int(speed), eta_sec=eta)
|
||||
|
||||
r = subprocess.CompletedProcess(
|
||||
args=['rclone', 'copyto'],
|
||||
returncode=1 if errors else 0,
|
||||
stdout='',
|
||||
stderr='\n'.join(errors),
|
||||
)
|
||||
ok = r.returncode == 0
|
||||
err = ''
|
||||
if not ok:
|
||||
err = r.stderr.strip() or 'Unbekannter Fehler'
|
||||
add_log(f'Upload {name}: rclone stderr: {err[:300]}')
|
||||
elif skipped:
|
||||
add_log(f'Upload {name}: {skipped} Dateien übersprungen')
|
||||
|
||||
with upload_lock:
|
||||
upload_state['results'].append({'name': name, 'ok': ok, 'msg': err})
|
||||
add_log(f'Upload {name}: {"✓ OK" if ok else "✗ Fehler - " + err}')
|
||||
|
||||
with upload_lock:
|
||||
upload_state['running'] = False
|
||||
upload_state['current'] = ''
|
||||
upload_state['current_file'] = ''
|
||||
131
picopy/usb.py
Normal file
131
picopy/usb.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""PiCopy – USB-Erkennung: usb_devices, usb_port_of, ensure_mount, cleanup_stale_mounts."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import INTERNAL_DEST_DIR, log
|
||||
|
||||
|
||||
def usb_port_of(dev_name):
|
||||
"""Gibt den physischen USB-Port-Pfad zurück (z.B. '2-2').
|
||||
Primär via udevadm, Fallback via sysfs."""
|
||||
# Primär: udevadm (zuverlässiger)
|
||||
try:
|
||||
r = subprocess.run(
|
||||
['udevadm', 'info', '-q', 'path', '-n', f'/dev/{dev_name}'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
if r.returncode == 0:
|
||||
port = None
|
||||
for seg in r.stdout.strip().split('/'):
|
||||
if re.fullmatch(r'\d+-[\d.]+', seg):
|
||||
port = seg
|
||||
if port:
|
||||
return port
|
||||
except Exception:
|
||||
pass
|
||||
# Fallback: sysfs readlink
|
||||
try:
|
||||
real = Path(f'/sys/block/{dev_name}').resolve()
|
||||
port = None
|
||||
for seg in str(real).split('/'):
|
||||
if re.fullmatch(r'\d+[\-\d.]+', seg) and ':' not in seg:
|
||||
port = seg
|
||||
return port
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def usb_devices():
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
['lsblk', '-J', '-o', 'NAME,TRAN,MOUNTPOINT,LABEL,SIZE,MODEL'],
|
||||
timeout=10, text=True
|
||||
)
|
||||
data = json.loads(out)
|
||||
except Exception as e:
|
||||
log.error(f'lsblk: {e}')
|
||||
return []
|
||||
|
||||
result = []
|
||||
for bd in data.get('blockdevices', []):
|
||||
if bd.get('tran') != 'usb':
|
||||
continue
|
||||
name = bd['name']
|
||||
port = usb_port_of(name)
|
||||
model = (bd.get('label') or bd.get('model') or name).strip()
|
||||
for child in (bd.get('children') or []):
|
||||
result.append({
|
||||
'device': f'/dev/{child["name"]}',
|
||||
'usb_port': port,
|
||||
'mount': child.get('mountpoint') or '',
|
||||
'label': (child.get('label') or model).strip(),
|
||||
'size': child.get('size') or bd.get('size') or '',
|
||||
})
|
||||
if not bd.get('children'):
|
||||
result.append({
|
||||
'device': f'/dev/{name}',
|
||||
'usb_port': port,
|
||||
'mount': bd.get('mountpoint') or '',
|
||||
'label': model,
|
||||
'size': bd.get('size') or '',
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
def ensure_mount(dev_info):
|
||||
if dev_info.get('internal'):
|
||||
INTERNAL_DEST_DIR.mkdir(parents=True, exist_ok=True)
|
||||
return str(INTERNAL_DEST_DIR), False
|
||||
mp = dev_info.get('mount')
|
||||
if mp:
|
||||
return mp, False
|
||||
dev = dev_info['device']
|
||||
mp = f'/mnt/picopy{dev.replace("/","_")}'
|
||||
os.makedirs(mp, exist_ok=True)
|
||||
r = subprocess.run(['mount', dev, mp], capture_output=True)
|
||||
if r.returncode:
|
||||
log.error(f'mount failed: {r.stderr.decode()}')
|
||||
return None, False
|
||||
return mp, True
|
||||
|
||||
|
||||
def cleanup_stale_mounts() -> None:
|
||||
"""Bereinigt beim Start hängen gebliebene PiCopy-Mounts (z.B. nach Stromausfall)."""
|
||||
try:
|
||||
with open('/proc/mounts') as fh:
|
||||
mps = [line.split()[1] for line in fh if '/mnt/picopy' in line]
|
||||
for mp in mps:
|
||||
log.info(f'Bereinige veralteten Mount: {mp}')
|
||||
subprocess.run(['umount', '-l', mp], capture_output=True)
|
||||
except Exception as e:
|
||||
log.warning(f'Stale-Mount-Bereinigung fehlgeschlagen: {e}')
|
||||
|
||||
|
||||
def internal_dest_device(cfg=None):
|
||||
from picopy.config import load_cfg, _fmt_bytes
|
||||
cfg = cfg or load_cfg()
|
||||
usage = _internal_usage()
|
||||
return {
|
||||
'device': 'internal',
|
||||
'usb_port': '__internal__',
|
||||
'mount': str(INTERNAL_DEST_DIR),
|
||||
'label': cfg.get('internal_dest_label') or 'Interner Speicher',
|
||||
'size': _fmt_bytes(usage['free']) + ' frei',
|
||||
'internal': True,
|
||||
}
|
||||
|
||||
|
||||
def _internal_usage():
|
||||
import shutil
|
||||
INTERNAL_DEST_DIR.mkdir(parents=True, exist_ok=True)
|
||||
usage = shutil.disk_usage(INTERNAL_DEST_DIR)
|
||||
return {
|
||||
'path': str(INTERNAL_DEST_DIR),
|
||||
'total': usage.total,
|
||||
'used': usage.used,
|
||||
'free': usage.free,
|
||||
}
|
||||
167
picopy/wifi.py
Normal file
167
picopy/wifi.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""PiCopy – WiFi: wifi_state, wifi_lock, nm(), Helpers, wifi_monitor."""
|
||||
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
|
||||
from picopy.config import (
|
||||
NM_AP_CON, NM_CLIENT_CON, WIFI_BOOT_WAIT,
|
||||
load_cfg, log
|
||||
)
|
||||
|
||||
wifi_state = {
|
||||
'mode': 'unknown', # 'client' | 'ap' | 'disconnected'
|
||||
'ssid': '',
|
||||
'ip': '',
|
||||
}
|
||||
wifi_lock = threading.Lock()
|
||||
|
||||
|
||||
def nm(*args):
|
||||
return subprocess.run(['nmcli'] + list(args),
|
||||
capture_output=True, text=True, timeout=20)
|
||||
|
||||
|
||||
def get_wlan0_info():
|
||||
r = nm('-t', '-f', 'DEVICE,STATE,CONNECTION', 'dev')
|
||||
for line in r.stdout.splitlines():
|
||||
parts = line.split(':')
|
||||
if parts and parts[0] == 'wlan0':
|
||||
return {
|
||||
'state': parts[1] if len(parts) > 1 else '',
|
||||
'connection': ':'.join(parts[2:]) if len(parts) > 2 else '',
|
||||
}
|
||||
return {'state': '', 'connection': ''}
|
||||
|
||||
|
||||
def get_wifi_ip():
|
||||
r = nm('-t', '-f', 'IP4.ADDRESS', 'dev', 'show', 'wlan0')
|
||||
for line in r.stdout.splitlines():
|
||||
if 'IP4.ADDRESS' in line:
|
||||
ip = line.split(':')[-1].split('/')[0].strip()
|
||||
if ip:
|
||||
return ip
|
||||
return ''
|
||||
|
||||
|
||||
def is_client_connected():
|
||||
info = get_wlan0_info()
|
||||
return (info['state'] == 'connected'
|
||||
and info['connection']
|
||||
and NM_AP_CON not in info['connection'])
|
||||
|
||||
|
||||
def is_ap_active():
|
||||
r = nm('-t', '-f', 'NAME,STATE', 'con', 'show', '--active')
|
||||
return any(NM_AP_CON in l and 'activated' in l for l in r.stdout.splitlines())
|
||||
|
||||
|
||||
def start_ap(ssid, password):
|
||||
log.info(f'Starte AP: {ssid}')
|
||||
nm('con', 'delete', NM_AP_CON)
|
||||
time.sleep(1)
|
||||
r = nm('dev', 'wifi', 'hotspot',
|
||||
'ifname', 'wlan0',
|
||||
'ssid', ssid,
|
||||
'password', password,
|
||||
'con-name', NM_AP_CON)
|
||||
ok = r.returncode == 0
|
||||
if ok:
|
||||
log.info('AP gestartet')
|
||||
else:
|
||||
log.error(f'AP Fehler: {r.stderr}')
|
||||
return ok
|
||||
|
||||
|
||||
def stop_ap():
|
||||
log.info('Stoppe AP')
|
||||
nm('con', 'down', NM_AP_CON)
|
||||
|
||||
|
||||
def connect_client_wifi(ssid, password):
|
||||
log.info(f'Verbinde mit WiFi: {ssid}')
|
||||
# Bestehende PiCopy-WiFi Verbindung löschen
|
||||
nm('con', 'delete', NM_CLIENT_CON)
|
||||
time.sleep(1)
|
||||
r = nm('dev', 'wifi', 'connect', ssid,
|
||||
'password', password,
|
||||
'name', NM_CLIENT_CON,
|
||||
'ifname', 'wlan0')
|
||||
ok = r.returncode == 0
|
||||
if ok:
|
||||
log.info(f'Verbunden mit {ssid}')
|
||||
else:
|
||||
log.error(f'WiFi-Verbindung fehlgeschlagen: {r.stderr.strip()}')
|
||||
return ok
|
||||
|
||||
|
||||
def scan_wifi_networks():
|
||||
nm('dev', 'wifi', 'rescan')
|
||||
time.sleep(2)
|
||||
r = nm('-t', '-f', 'SSID,SIGNAL,SECURITY', 'dev', 'wifi', 'list')
|
||||
seen, nets = set(), []
|
||||
for line in r.stdout.splitlines():
|
||||
parts = line.split(':')
|
||||
if len(parts) >= 2:
|
||||
ssid = parts[0].strip()
|
||||
signal = parts[1].strip() if len(parts) > 1 else '0'
|
||||
security = ':'.join(parts[2:]).strip() if len(parts) > 2 else ''
|
||||
if ssid and ssid not in seen:
|
||||
seen.add(ssid)
|
||||
nets.append({'ssid': ssid, 'signal': int(signal) if signal.isdigit() else 0, 'security': security})
|
||||
return sorted(nets, key=lambda x: -x['signal'])
|
||||
|
||||
|
||||
def update_wifi_state():
|
||||
info = get_wlan0_info()
|
||||
if info['state'] == 'connected':
|
||||
if NM_AP_CON in info['connection']:
|
||||
with wifi_lock:
|
||||
wifi_state.update(mode='ap',
|
||||
ssid=load_cfg().get('ap_ssid', 'PiCopy'),
|
||||
ip='10.42.0.1')
|
||||
else:
|
||||
ip = get_wifi_ip()
|
||||
with wifi_lock:
|
||||
wifi_state.update(mode='client',
|
||||
ssid=info['connection'],
|
||||
ip=ip)
|
||||
else:
|
||||
with wifi_lock:
|
||||
wifi_state.update(mode='disconnected', ssid='', ip='')
|
||||
|
||||
|
||||
def wifi_monitor():
|
||||
log.info(f'WiFi-Monitor: warte {WIFI_BOOT_WAIT}s auf Verbindung...')
|
||||
time.sleep(WIFI_BOOT_WAIT)
|
||||
|
||||
while True:
|
||||
try:
|
||||
update_wifi_state()
|
||||
with wifi_lock:
|
||||
mode = wifi_state['mode']
|
||||
|
||||
if mode == 'disconnected':
|
||||
cfg = load_cfg()
|
||||
ssid = cfg.get('wifi_ssid', '')
|
||||
pw = cfg.get('wifi_password', '')
|
||||
|
||||
connected = False
|
||||
if ssid:
|
||||
connected = connect_client_wifi(ssid, pw)
|
||||
if connected:
|
||||
time.sleep(5)
|
||||
update_wifi_state()
|
||||
|
||||
if not connected:
|
||||
ap_ssid = cfg.get('ap_ssid', 'PiCopy')
|
||||
ap_pw = cfg.get('ap_password', 'PiCopy,')
|
||||
if start_ap(ap_ssid, ap_pw):
|
||||
time.sleep(3)
|
||||
with wifi_lock:
|
||||
wifi_state.update(mode='ap', ssid=ap_ssid, ip='10.42.0.1')
|
||||
|
||||
except Exception as e:
|
||||
log.error(f'WiFi-Monitor Fehler: {e}')
|
||||
|
||||
time.sleep(30)
|
||||
152
picopy/wireguard.py
Normal file
152
picopy/wireguard.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""PiCopy – WireGuard VPN: wg_state, wg_lock, alle wg_* Funktionen, wg_monitor."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from picopy.config import log
|
||||
|
||||
WG_CONF = Path('/etc/wireguard/picopy.conf')
|
||||
WG_IFACE = 'picopy'
|
||||
|
||||
|
||||
def wg_is_installed():
|
||||
return shutil.which('wg-quick') is not None
|
||||
|
||||
|
||||
wg_state = {
|
||||
'connected': False,
|
||||
'ip': '',
|
||||
'peer': '',
|
||||
'error': None,
|
||||
'has_config': False,
|
||||
'installed': False,
|
||||
'pkg_running': False,
|
||||
'pkg_action': '',
|
||||
'pkg_error': None,
|
||||
}
|
||||
wg_lock = threading.Lock()
|
||||
|
||||
|
||||
def wg_update_state():
|
||||
inst = wg_is_installed()
|
||||
has_conf = WG_CONF.exists()
|
||||
if not inst:
|
||||
with wg_lock:
|
||||
wg_state.update(installed=False, connected=False, ip='', peer='',
|
||||
has_config=has_conf)
|
||||
return
|
||||
r = subprocess.run(['wg', 'show', WG_IFACE],
|
||||
capture_output=True, text=True, timeout=5)
|
||||
if r.returncode != 0:
|
||||
with wg_lock:
|
||||
wg_state.update(installed=True, connected=False, ip='', peer='',
|
||||
has_config=has_conf)
|
||||
return
|
||||
ip_r = subprocess.run(['ip', '-4', 'addr', 'show', WG_IFACE],
|
||||
capture_output=True, text=True, timeout=5)
|
||||
ip = ''
|
||||
for line in ip_r.stdout.splitlines():
|
||||
if line.strip().startswith('inet '):
|
||||
ip = line.strip().split()[1].split('/')[0]
|
||||
break
|
||||
peer = ''
|
||||
for line in r.stdout.splitlines():
|
||||
if line.startswith('peer:'):
|
||||
peer = line.split(':', 1)[-1].strip()
|
||||
break
|
||||
with wg_lock:
|
||||
wg_state.update(installed=True, connected=True, ip=ip, peer=peer,
|
||||
error=None, has_config=has_conf)
|
||||
|
||||
|
||||
def wg_connect():
|
||||
if not WG_CONF.exists():
|
||||
with wg_lock:
|
||||
wg_state['error'] = 'Keine Konfiguration vorhanden'
|
||||
return False
|
||||
r = subprocess.run(['wg-quick', 'up', WG_IFACE],
|
||||
capture_output=True, text=True, timeout=30)
|
||||
if r.returncode == 0:
|
||||
time.sleep(1)
|
||||
wg_update_state()
|
||||
log.info('WireGuard verbunden')
|
||||
return True
|
||||
lines = r.stderr.strip().splitlines() if r.stderr.strip() else []
|
||||
real_errors = [l for l in lines if not l.strip().startswith('[#]')]
|
||||
err = (real_errors[-1] if real_errors else lines[-1] if lines else 'Unbekannter Fehler')
|
||||
if 'resolvconf' in err and 'not found' in err:
|
||||
err = 'resolvconf fehlt - bitte WireGuard deinstallieren und neu installieren (openresolv wird dann mitinstalliert)'
|
||||
with wg_lock:
|
||||
wg_state.update(connected=False, error=err)
|
||||
log.error(f'WireGuard Fehler: {err}')
|
||||
return False
|
||||
|
||||
|
||||
def wg_disconnect():
|
||||
r = subprocess.run(['wg-quick', 'down', WG_IFACE],
|
||||
capture_output=True, text=True, timeout=15)
|
||||
with wg_lock:
|
||||
wg_state.update(connected=False, ip='', peer='', error=None)
|
||||
log.info('WireGuard getrennt')
|
||||
return r.returncode == 0
|
||||
|
||||
|
||||
def _wg_apt(action: str, packages: list):
|
||||
"""Führt apt-get install/remove aus und aktualisiert pkg_state."""
|
||||
with wg_lock:
|
||||
if wg_state['pkg_running']:
|
||||
return
|
||||
wg_state.update(pkg_running=True, pkg_action=action, pkg_error=None)
|
||||
try:
|
||||
cmd = ['apt-get', action, '-y'] + packages
|
||||
r = subprocess.run(cmd, capture_output=True, text=True, timeout=300,
|
||||
env={**os.environ, 'DEBIAN_FRONTEND': 'noninteractive'})
|
||||
if r.returncode != 0:
|
||||
err = (r.stderr.strip().splitlines()[-1]
|
||||
if r.stderr.strip() else f'apt-get {action} fehlgeschlagen')
|
||||
log.error(f'WireGuard apt {action}: {err}')
|
||||
with wg_lock:
|
||||
wg_state['pkg_error'] = err
|
||||
else:
|
||||
log.info(f'WireGuard apt {action} abgeschlossen')
|
||||
except Exception as e:
|
||||
with wg_lock:
|
||||
wg_state['pkg_error'] = str(e)
|
||||
finally:
|
||||
with wg_lock:
|
||||
wg_state['pkg_running'] = False
|
||||
wg_state['pkg_action'] = ''
|
||||
wg_update_state()
|
||||
|
||||
|
||||
def wg_install():
|
||||
_wg_apt('install', ['wireguard', 'wireguard-tools', 'openresolv'])
|
||||
|
||||
|
||||
def wg_uninstall():
|
||||
wg_disconnect()
|
||||
_wg_apt('remove', ['wireguard', 'wireguard-tools'])
|
||||
|
||||
|
||||
def wg_save_config(content: str):
|
||||
try:
|
||||
WG_CONF.parent.mkdir(parents=True, exist_ok=True)
|
||||
WG_CONF.write_text(content, encoding='utf-8')
|
||||
WG_CONF.chmod(0o600)
|
||||
return True, ''
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
def wg_monitor():
|
||||
while True:
|
||||
try:
|
||||
wg_update_state()
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(10)
|
||||
Reference in New Issue
Block a user