264 lines
9.7 KiB
Python
264 lines
9.7 KiB
Python
#!/usr/bin/env python3
|
||
"""
|
||
Sync team 12 (万物苏网络) from production → test env.
|
||
|
||
- Team 4 (洁雯团队) 两边都已存在且用户列表一致,不动。
|
||
- Team 12 (万物苏网络) 测试库不存在,从正式服完整拷贝:
|
||
accounts_team → 1 行 (id=12 保留)
|
||
accounts_user → 11 行 (team 12 成员,保留源 id)
|
||
generation_assetgroup → 62 行 (AUTO id,映射 old→new)
|
||
generation_asset → N 行 (AUTO id,remap group_id)
|
||
accounts_loginrecord → N 行 (AUTO id)
|
||
accounts_loginanomaly → N 行 (AUTO id, remap login_record_id)
|
||
accounts_activesession → N 行 (AUTO id)
|
||
accounts_adminauditlog → N 行 (operator_id IN team12 users, AUTO id)
|
||
generation_generationrecord → 440 行 (AUTO id)
|
||
|
||
用法:
|
||
python3 migrate_from_prod.py # dry-run,事务回滚
|
||
python3 migrate_from_prod.py --commit # 实际写入测试环境
|
||
"""
|
||
import sys
|
||
import pymysql
|
||
|
||
PROD = dict(
|
||
host='mysql-d9bb4e81696d-public.rds.volces.com',
|
||
port=3306, user='zyc', password='Zyc188208',
|
||
database='video_auto', charset='utf8mb4',
|
||
autocommit=False, cursorclass=pymysql.cursors.DictCursor,
|
||
)
|
||
TEST = dict(
|
||
host='mysql-8351f937d637-public.rds.volces.com',
|
||
port=3306, user='zyc', password='Zyc188208',
|
||
database='video_auto', charset='utf8mb4',
|
||
autocommit=False,
|
||
)
|
||
|
||
TEAM_ID = 12
|
||
|
||
|
||
def fetch_all(cur, sql, *params):
|
||
cur.execute(sql, params)
|
||
return cur.fetchall()
|
||
|
||
|
||
def main():
|
||
commit = '--commit' in sys.argv
|
||
|
||
print('Connecting to PROD (read-only fetch)...')
|
||
prod = pymysql.connect(**PROD)
|
||
pcur = prod.cursor()
|
||
|
||
# 1) team
|
||
team = fetch_all(pcur, 'SELECT * FROM accounts_team WHERE id=%s', TEAM_ID)
|
||
assert len(team) == 1, f'Expected 1 team, got {len(team)}'
|
||
team_row = team[0]
|
||
|
||
# 2) users
|
||
users = fetch_all(pcur, 'SELECT * FROM accounts_user WHERE team_id=%s ORDER BY id', TEAM_ID)
|
||
user_ids = [u['id'] for u in users]
|
||
print(f'team={team_row["name"]} users={len(users)} ids={user_ids}')
|
||
|
||
# 3) assetgroups
|
||
agroups = fetch_all(pcur, 'SELECT * FROM generation_assetgroup WHERE team_id=%s ORDER BY id', TEAM_ID)
|
||
group_ids = [g['id'] for g in agroups]
|
||
|
||
# 4) assets — group_id in agroup set
|
||
if group_ids:
|
||
ph = ','.join(['%s'] * len(group_ids))
|
||
assets = fetch_all(pcur, f'SELECT * FROM generation_asset WHERE group_id IN ({ph}) ORDER BY id', *group_ids)
|
||
else:
|
||
assets = []
|
||
|
||
# 5) login records (team_id = TEAM_ID OR user_id IN users)
|
||
if user_ids:
|
||
ph = ','.join(['%s'] * len(user_ids))
|
||
lrs = fetch_all(pcur, f'SELECT * FROM accounts_loginrecord WHERE user_id IN ({ph}) ORDER BY id', *user_ids)
|
||
else:
|
||
lrs = []
|
||
|
||
# 6) login anomalies (team_id = TEAM_ID)
|
||
las = fetch_all(pcur, 'SELECT * FROM accounts_loginanomaly WHERE team_id=%s ORDER BY id', TEAM_ID)
|
||
|
||
# 7) active sessions
|
||
if user_ids:
|
||
ph = ','.join(['%s'] * len(user_ids))
|
||
ases = fetch_all(pcur, f'SELECT * FROM accounts_activesession WHERE user_id IN ({ph}) ORDER BY id', *user_ids)
|
||
else:
|
||
ases = []
|
||
|
||
# 8) admin audit logs (operator_id in team12 users)
|
||
if user_ids:
|
||
ph = ','.join(['%s'] * len(user_ids))
|
||
als = fetch_all(pcur, f'SELECT * FROM accounts_adminauditlog WHERE operator_id IN ({ph}) ORDER BY id', *user_ids)
|
||
else:
|
||
als = []
|
||
|
||
# 9) generation records
|
||
if user_ids:
|
||
ph = ','.join(['%s'] * len(user_ids))
|
||
gens = fetch_all(pcur, f'SELECT * FROM generation_generationrecord WHERE user_id IN ({ph}) ORDER BY id', *user_ids)
|
||
else:
|
||
gens = []
|
||
|
||
# 10) team anomaly config
|
||
tacs = fetch_all(pcur, 'SELECT * FROM accounts_teamanomalyconfig WHERE team_id=%s', TEAM_ID)
|
||
|
||
prod.close()
|
||
|
||
print(f'Fetched: team=1 users={len(users)} assetgroups={len(agroups)} assets={len(assets)} '
|
||
f'loginrecords={len(lrs)} loginanomalies={len(las)} activesessions={len(ases)} '
|
||
f'adminauditlogs={len(als)} generationrecords={len(gens)} teamanomalyconfig={len(tacs)}')
|
||
|
||
# --- target test DB schema may have extra fields or be identical; we fetch column list to be safe ---
|
||
print('\nConnecting to TEST DB for write...')
|
||
test = pymysql.connect(**TEST)
|
||
tcur = test.cursor()
|
||
|
||
def get_test_cols(tbl):
|
||
tcur.execute(f"SHOW COLUMNS FROM `{tbl}`")
|
||
return [row[0] for row in tcur.fetchall()]
|
||
|
||
def align_row(src_row, test_cols, overrides=None, drop_id=True):
|
||
"""Produce (cols, values) aligned to test schema.
|
||
- Drop id if drop_id=True (AUTO_INCREMENT)
|
||
- Apply overrides {col: value}
|
||
- Fill missing columns with sensible defaults (empty string / NULL)
|
||
"""
|
||
overrides = overrides or {}
|
||
cols, vals = [], []
|
||
for c in test_cols:
|
||
if drop_id and c == 'id':
|
||
continue
|
||
if c in overrides:
|
||
vals.append(overrides[c])
|
||
elif c in src_row:
|
||
vals.append(src_row[c])
|
||
else:
|
||
# new NOT-NULL column in test schema not present in prod — fill empty str
|
||
vals.append('')
|
||
cols.append(c)
|
||
return cols, vals
|
||
|
||
def ins(tbl, cols, vals):
|
||
ph = ','.join(['%s'] * len(cols))
|
||
sql = f"INSERT INTO `{tbl}` ({','.join('`'+c+'`' for c in cols)}) VALUES ({ph})"
|
||
tcur.execute(sql, vals)
|
||
return tcur.lastrowid
|
||
|
||
try:
|
||
tcur.execute('SET FOREIGN_KEY_CHECKS = 0')
|
||
|
||
# 1) accounts_team — preserve id
|
||
print('\n[1/10] accounts_team')
|
||
team_cols_test = get_test_cols('accounts_team')
|
||
c, v = align_row(team_row, team_cols_test, drop_id=False)
|
||
ins('accounts_team', c, v)
|
||
print(f' inserted team id={TEAM_ID}')
|
||
|
||
# 2) accounts_user — preserve id
|
||
print('\n[2/10] accounts_user')
|
||
user_cols_test = get_test_cols('accounts_user')
|
||
for u in users:
|
||
c, v = align_row(u, user_cols_test, drop_id=False)
|
||
ins('accounts_user', c, v)
|
||
print(f' inserted {len(users)} users')
|
||
|
||
# 3) accounts_teamanomalyconfig
|
||
print('\n[3/10] accounts_teamanomalyconfig')
|
||
if tacs:
|
||
tac_cols_test = get_test_cols('accounts_teamanomalyconfig')
|
||
for t in tacs:
|
||
c, v = align_row(t, tac_cols_test, drop_id=True)
|
||
ins('accounts_teamanomalyconfig', c, v)
|
||
print(f' inserted {len(tacs)} rows')
|
||
else:
|
||
print(' 0 rows')
|
||
|
||
# 4) generation_assetgroup — AUTO id, keep map
|
||
print('\n[4/10] generation_assetgroup')
|
||
ag_cols_test = get_test_cols('generation_assetgroup')
|
||
ag_map = {}
|
||
for g in agroups:
|
||
c, v = align_row(g, ag_cols_test, drop_id=True)
|
||
new_id = ins('generation_assetgroup', c, v)
|
||
ag_map[g['id']] = new_id
|
||
print(f' inserted {len(ag_map)} rows')
|
||
|
||
# 5) generation_asset — AUTO id, remap group_id
|
||
print('\n[5/10] generation_asset')
|
||
a_cols_test = get_test_cols('generation_asset')
|
||
for a in assets:
|
||
ov = {'group_id': ag_map[a['group_id']]}
|
||
c, v = align_row(a, a_cols_test, overrides=ov, drop_id=True)
|
||
ins('generation_asset', c, v)
|
||
print(f' inserted {len(assets)} rows')
|
||
|
||
# 6) accounts_loginrecord — AUTO id, keep map
|
||
print('\n[6/10] accounts_loginrecord')
|
||
lr_cols_test = get_test_cols('accounts_loginrecord')
|
||
lr_map = {}
|
||
for lr in lrs:
|
||
c, v = align_row(lr, lr_cols_test, drop_id=True)
|
||
new_id = ins('accounts_loginrecord', c, v)
|
||
lr_map[lr['id']] = new_id
|
||
print(f' inserted {len(lr_map)} rows')
|
||
|
||
# 7) accounts_loginanomaly — AUTO id, remap login_record_id
|
||
print('\n[7/10] accounts_loginanomaly')
|
||
la_cols_test = get_test_cols('accounts_loginanomaly')
|
||
skipped_la = 0
|
||
for la in las:
|
||
if la['login_record_id'] not in lr_map:
|
||
# login_record not fetched (shouldn't happen if schema consistent) → skip
|
||
skipped_la += 1
|
||
continue
|
||
ov = {'login_record_id': lr_map[la['login_record_id']]}
|
||
c, v = align_row(la, la_cols_test, overrides=ov, drop_id=True)
|
||
ins('accounts_loginanomaly', c, v)
|
||
print(f' inserted {len(las)-skipped_la} rows (skipped {skipped_la})')
|
||
|
||
# 8) accounts_activesession
|
||
print('\n[8/10] accounts_activesession')
|
||
as_cols_test = get_test_cols('accounts_activesession')
|
||
for a in ases:
|
||
c, v = align_row(a, as_cols_test, drop_id=True)
|
||
ins('accounts_activesession', c, v)
|
||
print(f' inserted {len(ases)} rows')
|
||
|
||
# 9) accounts_adminauditlog
|
||
print('\n[9/10] accounts_adminauditlog')
|
||
al_cols_test = get_test_cols('accounts_adminauditlog')
|
||
for al in als:
|
||
c, v = align_row(al, al_cols_test, drop_id=True)
|
||
ins('accounts_adminauditlog', c, v)
|
||
print(f' inserted {len(als)} rows')
|
||
|
||
# 10) generation_generationrecord
|
||
print('\n[10/10] generation_generationrecord')
|
||
g_cols_test = get_test_cols('generation_generationrecord')
|
||
for g in gens:
|
||
c, v = align_row(g, g_cols_test, drop_id=True)
|
||
ins('generation_generationrecord', c, v)
|
||
print(f' inserted {len(gens)} rows')
|
||
|
||
tcur.execute('SET FOREIGN_KEY_CHECKS = 1')
|
||
|
||
if commit:
|
||
test.commit()
|
||
print('\n✅ COMMITTED to test DB')
|
||
else:
|
||
test.rollback()
|
||
print('\n🔎 Rolled back (use --commit to persist)')
|
||
|
||
except Exception as e:
|
||
test.rollback()
|
||
print(f'\n❌ Error: {e}')
|
||
raise
|
||
finally:
|
||
test.close()
|
||
|
||
|
||
if __name__ == '__main__':
|
||
main()
|