2026-04-14 07:21:33 +09:00
|
|
|
#!/bin/bash
|
|
|
|
|
set -e
|
2026-04-14 07:41:50 +09:00
|
|
|
|
|
|
|
|
SYNC_DIR="${SYNC_DIR:-$HOME/.hermes-sync}"
|
|
|
|
|
HERMES_HOME="${HERMES_HOME:-$HOME/.hermes}"
|
2026-04-14 07:16:37 +08:00
|
|
|
HOSTNAME=$(hostname)
|
2026-04-14 07:22:10 +09:00
|
|
|
cd "$SYNC_DIR"
|
2026-04-14 07:21:33 +09:00
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
echo "[$(date '+%H:%M:%S')] Sync from $HOSTNAME..."
|
2026-04-14 07:41:50 +09:00
|
|
|
|
2026-04-14 08:29:14 +09:00
|
|
|
# ── Step 0: Clean local state_merged.db to avoid git pull conflicts ──────
|
|
|
|
|
# The merged db is produced by this script, not tracked, and can conflict
|
|
|
|
|
# with remote when pulling. Remove it before fetch+merge.
|
|
|
|
|
rm -f "$SYNC_DIR/state_merged.db"
|
|
|
|
|
|
2026-04-20 18:28:06 +08:00
|
|
|
# ── Step 1: Export local state.db (via temp dir to avoid lock & WAL issues) ─
|
|
|
|
|
python3 << 'PYEOF'
|
2026-04-14 07:16:37 +08:00
|
|
|
import sqlite3, os, shutil, tempfile
|
|
|
|
|
|
|
|
|
|
local_db = os.path.join(os.path.expanduser('~/.hermes'), 'state.db')
|
|
|
|
|
export_db = os.path.join(os.path.expanduser('~/.hermes-sync'), f"state_{os.environ.get('HOSTNAME') or __import__('socket').gethostname()}.db")
|
2026-04-20 18:28:06 +08:00
|
|
|
hostname = os.environ.get('HOSTNAME') or __import__('socket').gethostname()
|
2026-04-14 07:16:37 +08:00
|
|
|
tmpdir = tempfile.mkdtemp(prefix='hs_exp_')
|
2026-04-14 07:41:50 +09:00
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
try:
|
2026-04-20 18:28:06 +08:00
|
|
|
# Step 1a: Open with WAL mode and checkpoint (forces WAL -> main db)
|
|
|
|
|
conn = sqlite3.connect(f'file:{local_db}?mode=ro', uri=True)
|
|
|
|
|
jm = conn.execute('PRAGMA journal_mode').fetchone()[0]
|
|
|
|
|
|
|
|
|
|
if jm == 'wal':
|
|
|
|
|
# WAL mode: checkpoint first to flush WAL into main db
|
|
|
|
|
conn.execute('PRAGMA wal_checkpoint(TRUNCATE)')
|
|
|
|
|
print(f'WAL checkpointed ({jm})')
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
# Step 1b: Copy all three files (db + -wal + -shm) if they exist
|
2026-04-14 07:16:37 +08:00
|
|
|
tmp_db = os.path.join(tmpdir, 'db')
|
2026-04-20 18:28:06 +08:00
|
|
|
for suffix in ['', '-wal', '-shm']:
|
|
|
|
|
src = local_db + suffix
|
|
|
|
|
if os.path.exists(src):
|
|
|
|
|
shutil.copy2(src, tmp_db + suffix)
|
2026-04-14 07:16:37 +08:00
|
|
|
|
2026-04-20 18:28:06 +08:00
|
|
|
# Step 1c: Verify (open read-only to confirm it's not corrupted)
|
|
|
|
|
test = sqlite3.connect(f'file:{tmp_db}?mode=ro', uri=True)
|
2026-04-14 07:16:37 +08:00
|
|
|
r = test.execute('SELECT COUNT(*) FROM sessions').fetchone()[0]
|
|
|
|
|
m = test.execute('SELECT COUNT(*) FROM messages').fetchone()[0]
|
|
|
|
|
test.close()
|
|
|
|
|
|
|
|
|
|
shutil.copy2(tmp_db, export_db)
|
2026-04-20 18:28:06 +08:00
|
|
|
if os.path.exists(tmp_db + '-wal'):
|
|
|
|
|
shutil.copy2(tmp_db + '-wal', export_db + '-wal')
|
|
|
|
|
if os.path.exists(tmp_db + '-shm'):
|
|
|
|
|
shutil.copy2(tmp_db + '-shm', export_db + '-shm')
|
|
|
|
|
|
|
|
|
|
print(f'Exported: {r}s/{m}m (journal={jm})')
|
2026-04-14 07:16:37 +08:00
|
|
|
finally:
|
|
|
|
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
|
|
|
|
PYEOF
|
|
|
|
|
|
|
|
|
|
# ── Step 2: Git stage ────────────────────────────────────────────────────
|
|
|
|
|
git add -A
|
2026-04-14 07:42:09 +09:00
|
|
|
HAS_LOCAL=false
|
|
|
|
|
if ! git diff --cached --quiet || ! git diff --quiet; then
|
|
|
|
|
HAS_LOCAL=true
|
2026-04-14 07:41:50 +09:00
|
|
|
fi
|
|
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
# ── Step 3: Fetch + merge ─────────────────────────────────────────────────
|
2026-04-14 07:41:50 +09:00
|
|
|
git fetch origin main
|
|
|
|
|
|
|
|
|
|
if git rev-parse HEAD >/dev/null 2>&1 && \
|
|
|
|
|
git rev-parse origin/main >/dev/null 2>&1 && \
|
|
|
|
|
! git merge-base --is-ancestor HEAD origin/main 2>/dev/null; then
|
2026-04-14 07:16:37 +08:00
|
|
|
echo "Merging remote..."
|
2026-04-14 07:42:09 +09:00
|
|
|
if [ "$HAS_LOCAL" = true ]; then
|
|
|
|
|
git stash push -m "local $(date)" 2>/dev/null || true
|
|
|
|
|
if ! git merge origin/main --no-edit 2>/dev/null; then
|
2026-04-14 07:16:37 +08:00
|
|
|
git checkout --ours sync.sh memories/MEMORY.md 2>/dev/null || true
|
2026-04-14 07:41:50 +09:00
|
|
|
git add -A
|
2026-04-14 07:42:09 +09:00
|
|
|
git commit -m "Auto-resolve $(date)" 2>/dev/null || true
|
2026-04-14 07:41:50 +09:00
|
|
|
fi
|
2026-04-14 07:42:09 +09:00
|
|
|
if git stash list | grep -q "local "; then
|
|
|
|
|
git stash pop 2>/dev/null || true
|
2026-04-14 07:41:50 +09:00
|
|
|
git rebase origin/main 2>/dev/null || {
|
|
|
|
|
git rebase --abort 2>/dev/null || true
|
|
|
|
|
git merge origin/main --no-edit 2>/dev/null || true
|
|
|
|
|
}
|
|
|
|
|
fi
|
|
|
|
|
else
|
2026-04-14 07:16:37 +08:00
|
|
|
git merge origin/main --no-edit 2>/dev/null || \
|
|
|
|
|
git merge --ff-only origin/main 2>/dev/null || \
|
|
|
|
|
git reset --hard origin/main
|
2026-04-14 07:41:50 +09:00
|
|
|
fi
|
|
|
|
|
fi
|
|
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
# ── Step 4: Merge all state_*.db → state_merged.db ──────────────────────
|
|
|
|
|
python3 << 'PYEOF'
|
|
|
|
|
import sqlite3, os, glob, shutil, tempfile
|
|
|
|
|
|
|
|
|
|
sync_dir = os.path.expanduser('~/.hermes-sync')
|
|
|
|
|
merged_path = os.path.join(sync_dir, 'state_merged.db')
|
|
|
|
|
db_files = sorted(glob.glob(os.path.join(sync_dir, 'state_*.db')))
|
|
|
|
|
db_files = [f for f in db_files if not f.endswith('_merged.db')]
|
|
|
|
|
print(f'Merging {len(db_files)} DBs')
|
|
|
|
|
|
|
|
|
|
tmpdir = tempfile.mkdtemp(prefix='hs_merge_')
|
|
|
|
|
tmp_merged = os.path.join(tmpdir, 'merged.db')
|
|
|
|
|
|
|
|
|
|
try:
|
2026-04-20 18:28:06 +08:00
|
|
|
# Create merged DB (DELETE journal avoids WAL complications during merge)
|
2026-04-14 07:16:37 +08:00
|
|
|
conn = sqlite3.connect(tmp_merged)
|
|
|
|
|
conn.execute('PRAGMA journal_mode=DELETE')
|
|
|
|
|
conn.execute('PRAGMA locking_mode=NORMAL')
|
|
|
|
|
conn.execute('PRAGMA synchronous=FULL')
|
|
|
|
|
|
2026-04-20 18:28:06 +08:00
|
|
|
# Use one reference DB to get exact schema (handles FTS, etc.)
|
|
|
|
|
ref_db = db_files[0]
|
|
|
|
|
ref_copy = os.path.join(tmpdir, 'ref.db')
|
|
|
|
|
shutil.copy2(ref_db, ref_copy)
|
|
|
|
|
if os.path.exists(ref_db + '-wal'):
|
|
|
|
|
shutil.copy2(ref_db + '-wal', ref_copy + '-wal')
|
|
|
|
|
if os.path.exists(ref_db + '-shm'):
|
|
|
|
|
shutil.copy2(ref_db + '-shm', ref_copy + '-shm')
|
|
|
|
|
|
|
|
|
|
ref = sqlite3.connect(f'file:{ref_copy}?mode=ro', uri=True)
|
|
|
|
|
for line in ref.iterdump():
|
|
|
|
|
# Skip FTS virtual table data rows (they are derived, not real data)
|
|
|
|
|
if line.startswith('INSERT INTO messages_fts') or \
|
|
|
|
|
line.startswith('DELETE FROM messages_fts'):
|
|
|
|
|
continue
|
|
|
|
|
try:
|
|
|
|
|
conn.execute(line)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
pass # Ignore schema errors from partial DDL
|
|
|
|
|
ref.close()
|
|
|
|
|
os.remove(ref_copy)
|
|
|
|
|
if os.path.exists(ref_copy + '-wal'):
|
|
|
|
|
os.remove(ref_copy + '-wal')
|
|
|
|
|
|
|
|
|
|
# Copy sessions (INSERT OR REPLACE to dedupe by PK)
|
2026-04-14 07:16:37 +08:00
|
|
|
for db_file in db_files:
|
|
|
|
|
name = os.path.basename(db_file)
|
2026-04-20 18:28:06 +08:00
|
|
|
|
|
|
|
|
# Copy to temp with WAL files
|
2026-04-14 07:16:37 +08:00
|
|
|
tmp_copy = os.path.join(tmpdir, name)
|
|
|
|
|
shutil.copy2(db_file, tmp_copy)
|
|
|
|
|
if os.path.exists(db_file + '-wal'):
|
|
|
|
|
shutil.copy2(db_file + '-wal', tmp_copy + '-wal')
|
2026-04-20 18:28:06 +08:00
|
|
|
if os.path.exists(db_file + '-shm'):
|
|
|
|
|
shutil.copy2(db_file + '-shm', tmp_copy + '-shm')
|
2026-04-14 07:16:37 +08:00
|
|
|
|
2026-04-20 18:28:06 +08:00
|
|
|
src = sqlite3.connect(f'file:{tmp_copy}?mode=ro', uri=True)
|
2026-04-14 07:16:37 +08:00
|
|
|
|
|
|
|
|
s_cnt = src.execute('SELECT COUNT(*) FROM sessions').fetchone()[0]
|
|
|
|
|
m_cnt = src.execute('SELECT COUNT(*) FROM messages').fetchone()[0]
|
2026-04-20 18:28:06 +08:00
|
|
|
jm = src.execute('PRAGMA journal_mode').fetchone()[0]
|
|
|
|
|
print(f' {name}: {s_cnt}s/{m_cnt}m journal={jm}')
|
2026-04-14 07:16:37 +08:00
|
|
|
|
|
|
|
|
sess_rows = src.execute('SELECT * FROM sessions').fetchall()
|
|
|
|
|
sess_cols = len(src.execute('PRAGMA table_info(sessions)').fetchall())
|
|
|
|
|
for row in sess_rows:
|
|
|
|
|
conn.execute(f'INSERT OR REPLACE INTO sessions VALUES ({",".join(["?"]*sess_cols)})', row)
|
|
|
|
|
|
|
|
|
|
msg_rows = src.execute('SELECT * FROM messages').fetchall()
|
|
|
|
|
msg_cols = len(src.execute('PRAGMA table_info(messages)').fetchall())
|
|
|
|
|
for row in msg_rows:
|
|
|
|
|
conn.execute(f'INSERT OR IGNORE INTO messages VALUES ({",".join(["?"]*msg_cols)})', row)
|
|
|
|
|
|
|
|
|
|
src.close()
|
|
|
|
|
os.remove(tmp_copy)
|
2026-04-20 18:28:06 +08:00
|
|
|
for suf in ['-wal', '-shm']:
|
|
|
|
|
if os.path.exists(tmp_copy + suf):
|
|
|
|
|
os.remove(tmp_copy + suf)
|
2026-04-14 07:16:37 +08:00
|
|
|
|
|
|
|
|
conn.commit()
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
if os.path.exists(merged_path):
|
|
|
|
|
os.remove(merged_path)
|
|
|
|
|
shutil.copy2(tmp_merged, merged_path)
|
2026-04-20 18:28:06 +08:00
|
|
|
# Ensure no WAL on merged (merge output should be clean DELETE)
|
|
|
|
|
for suf in ['-wal', '-shm']:
|
|
|
|
|
if os.path.exists(merged_path + suf):
|
|
|
|
|
os.remove(merged_path + suf)
|
2026-04-14 07:16:37 +08:00
|
|
|
print(f'Merged: {os.path.getsize(merged_path)/1024:.0f} KB')
|
|
|
|
|
|
|
|
|
|
finally:
|
|
|
|
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
|
|
|
|
PYEOF
|
|
|
|
|
|
2026-04-14 08:23:33 +09:00
|
|
|
# ── Step 5: Stage merged DB (it was just created in Step 4) ─────────────
|
|
|
|
|
git add state_merged.db 2>/dev/null || true
|
|
|
|
|
|
|
|
|
|
# ── Step 6: Push ─────────────────────────────────────────────────────────
|
2026-04-14 07:42:09 +09:00
|
|
|
if [ "$HAS_LOCAL" = true ]; then
|
2026-04-14 07:41:50 +09:00
|
|
|
git commit -m "Sync $(date '+%Y-%m-%d %H:%M')" 2>/dev/null || true
|
2026-04-14 07:42:09 +09:00
|
|
|
if ! git push origin main 2>&1; then
|
2026-04-14 07:16:37 +08:00
|
|
|
echo "Push rejected, pulling..."
|
2026-04-14 07:41:50 +09:00
|
|
|
git pull origin main --no-edit 2>/dev/null || true
|
2026-04-14 07:16:37 +08:00
|
|
|
git push origin main 2>&1 || echo "Push failed"
|
2026-04-14 07:41:50 +09:00
|
|
|
fi
|
|
|
|
|
else
|
2026-04-14 07:16:37 +08:00
|
|
|
echo "No local changes"
|
2026-04-14 07:41:50 +09:00
|
|
|
fi
|
|
|
|
|
|
2026-04-14 08:23:48 +09:00
|
|
|
# ── Step 7: Restore merged state to local hermes ─────────────────────────
|
2026-04-14 07:16:37 +08:00
|
|
|
python3 << 'PYEOF'
|
|
|
|
|
import sqlite3, os, shutil, tempfile
|
|
|
|
|
|
|
|
|
|
hermes_home = os.path.expanduser('~/.hermes')
|
|
|
|
|
merged_path = os.path.join(os.path.expanduser('~/.hermes-sync'), 'state_merged.db')
|
|
|
|
|
local_db = os.path.join(hermes_home, 'state.db')
|
|
|
|
|
|
|
|
|
|
if not os.path.exists(merged_path):
|
|
|
|
|
print('No merged DB, skipping restore')
|
|
|
|
|
else:
|
|
|
|
|
tmpdir = tempfile.mkdtemp(prefix='hs_rest_')
|
|
|
|
|
try:
|
2026-04-20 18:28:06 +08:00
|
|
|
# Step 7a: Ensure merged db is fully checkpointed (DELETE journal mode)
|
|
|
|
|
merge_conn = sqlite3.connect(f'file:{merged_path}?mode=ro', uri=True)
|
|
|
|
|
merge_jm = merge_conn.execute('PRAGMA journal_mode').fetchone()[0]
|
|
|
|
|
if merge_jm == 'wal':
|
|
|
|
|
merge_conn.execute('PRAGMA wal_checkpoint(TRUNCATE)')
|
|
|
|
|
print(f'Restored merged DB had WAL, checkpointed ({merge_jm})')
|
|
|
|
|
merge_conn.close()
|
|
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
tmp_db = os.path.join(tmpdir, 'db')
|
|
|
|
|
shutil.copy2(merged_path, tmp_db)
|
2026-04-20 18:28:06 +08:00
|
|
|
# Also copy -wal/-shm if present
|
|
|
|
|
for suf in ['-wal', '-shm']:
|
|
|
|
|
if os.path.exists(merged_path + suf):
|
|
|
|
|
shutil.copy2(merged_path + suf, tmp_db + suf)
|
2026-04-14 07:16:37 +08:00
|
|
|
|
2026-04-20 18:28:06 +08:00
|
|
|
# Step 7b: Verify before overwriting
|
|
|
|
|
test = sqlite3.connect(f'file:{tmp_db}?mode=ro', uri=True)
|
2026-04-14 07:16:37 +08:00
|
|
|
r = test.execute('SELECT COUNT(*) FROM sessions').fetchone()[0]
|
|
|
|
|
m = test.execute('SELECT COUNT(*) FROM messages').fetchone()[0]
|
2026-04-20 18:28:06 +08:00
|
|
|
test.execute('PRAGMA integrity_check') # verify
|
2026-04-14 07:16:37 +08:00
|
|
|
test.close()
|
|
|
|
|
|
|
|
|
|
shutil.copy2(local_db, local_db + '.bak')
|
|
|
|
|
shutil.copy2(tmp_db, local_db)
|
2026-04-20 18:28:06 +08:00
|
|
|
# Also copy WAL files to local
|
|
|
|
|
for suf in ['-wal', '-shm']:
|
|
|
|
|
if os.path.exists(tmp_db + suf):
|
|
|
|
|
shutil.copy2(tmp_db + suf, local_db + suf)
|
|
|
|
|
elif os.path.exists(local_db + suf):
|
|
|
|
|
os.remove(local_db + suf)
|
2026-04-14 07:16:37 +08:00
|
|
|
os.remove(local_db + '.bak')
|
|
|
|
|
print(f'Restored: {r}s/{m}m')
|
|
|
|
|
finally:
|
|
|
|
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
|
|
|
|
PYEOF
|
|
|
|
|
|
2026-04-14 08:23:48 +09:00
|
|
|
# ── Step 8: Sync memories + skills (additive) ────────────────────────────
|
2026-04-14 07:41:50 +09:00
|
|
|
cp "$SYNC_DIR/memories/MEMORY.md" "$HERMES_HOME/memories/MEMORY.md" 2>/dev/null || true
|
|
|
|
|
if [ -d "$SYNC_DIR/skills" ]; then
|
2026-04-14 07:16:37 +08:00
|
|
|
mkdir -p "$HERMES_HOME/skills"
|
2026-04-14 07:41:50 +09:00
|
|
|
rsync -a --ignore-existing "$SYNC_DIR/skills/" "$HERMES_HOME/skills/" 2>/dev/null || \
|
|
|
|
|
cp -rn "$SYNC_DIR/skills/"* "$HERMES_HOME/skills/" 2>/dev/null || true
|
|
|
|
|
fi
|
|
|
|
|
|
2026-04-14 07:16:37 +08:00
|
|
|
echo "[$(date '+%H:%M:%S')] Done"
|