159 lines
5.9 KiB
Python
159 lines
5.9 KiB
Python
# -*- coding: UTF-8 -*-
|
|
import api
|
|
import config
|
|
import logging
|
|
import utils
|
|
import copy
|
|
import time
|
|
|
|
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
|
|
logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)
|
|
|
|
def get_all_records(client: api.Client, access_token: str, app_token: str, table_id: str):
|
|
records = []
|
|
page_token = None
|
|
while True:
|
|
resp = client.get_records_list(access_token, app_token, table_id, page_token=page_token)
|
|
items = resp.get('items', [])
|
|
if items:
|
|
records.extend(items)
|
|
if resp.get('has_more'):
|
|
page_token = resp.get('page_token')
|
|
else:
|
|
break
|
|
return records
|
|
|
|
|
|
def get_all_fields(client: api.Client, access_token: str, app_token: str, table_id: str):
|
|
fields = []
|
|
page_token = None
|
|
while True:
|
|
resp = client.get_fields_list(access_token, app_token, table_id, page_token=page_token)
|
|
items = resp.get('items', [])
|
|
if items:
|
|
fields.extend(items)
|
|
if resp.get('has_more'):
|
|
page_token = resp.get('page_token')
|
|
else:
|
|
break
|
|
return fields
|
|
|
|
|
|
def get_all_tables(client: api.Client, access_token: str, app_token: str):
|
|
tables = []
|
|
page_token = None
|
|
while True:
|
|
resp = client.get_tables_list(access_token, app_token, page_token=page_token)
|
|
items = resp.get('items', [])
|
|
if items:
|
|
tables.extend(items)
|
|
if resp.get('has_more'):
|
|
page_token = resp.get('page_token')
|
|
else:
|
|
break
|
|
return tables
|
|
|
|
|
|
def copy_single_table(client: api.Client, access_token: str, source_app_token: str, source_table_id: str, target_app_token: str, table_name: str):
|
|
logging.info(f"Copying table '{table_name}' ({source_table_id}) from {source_app_token} to {target_app_token}")
|
|
|
|
fields = get_all_fields(client, access_token, source_app_token, source_table_id)
|
|
|
|
# Create Target Table
|
|
target_table_id = client.create_table(access_token, target_app_token, table_name)
|
|
|
|
# Get initial default fields set by Feishu
|
|
target_initial_fields = get_all_fields(client, access_token, target_app_token, target_table_id)
|
|
initial_field_names = {f['field_name']: f['field_id'] for f in target_initial_fields}
|
|
|
|
target_field_map = {}
|
|
|
|
for f in fields:
|
|
name = f['field_name']
|
|
ftype = f['type']
|
|
fprop = copy.deepcopy(f.get('property'))
|
|
|
|
# Clean up IDs from options to avoid errors
|
|
if fprop and 'options' in fprop:
|
|
for opt in fprop['options']:
|
|
if 'id' in opt:
|
|
del opt['id']
|
|
|
|
if name in initial_field_names:
|
|
field_id = initial_field_names[name]
|
|
try:
|
|
client.update_field(access_token, target_app_token, target_table_id, field_id, name, ftype, fprop)
|
|
target_field_map[name] = field_id
|
|
except utils.LarkException as e:
|
|
logging.error(f"Failed to update default field '{name}': {e}")
|
|
else:
|
|
try:
|
|
resp = client.add_field(access_token, target_app_token, target_table_id, name, ftype, fprop)
|
|
target_field_map[name] = resp['field_id']
|
|
except utils.LarkException as e:
|
|
logging.error(f"Failed to create field '{name}': {e}")
|
|
|
|
# Read Records from Source Table
|
|
records = get_all_records(client, access_token, source_app_token, source_table_id)
|
|
|
|
target_records = []
|
|
for r in records:
|
|
original_fields = r.get('fields', {})
|
|
new_fields = {}
|
|
for fname, fvalue in original_fields.items():
|
|
if fname in target_field_map:
|
|
new_fields[fname] = fvalue
|
|
target_records.append({'fields': new_fields})
|
|
|
|
CHUNK_SIZE = 500
|
|
for i in range(0, len(target_records), CHUNK_SIZE):
|
|
chunk = target_records[i:i + CHUNK_SIZE]
|
|
if chunk:
|
|
try:
|
|
client.batch_create_records(access_token, target_app_token, target_table_id, chunk)
|
|
except utils.LarkException as e:
|
|
logging.error(f"Failed to insert chunk of records: {e}")
|
|
|
|
logging.info(f"Successfully copied table '{table_name}' to {target_table_id}!")
|
|
return target_table_id
|
|
|
|
|
|
def import_all_tables(client: api.Client, access_token: str, source_app_tokens: list, target_app_token: str):
|
|
"""
|
|
Imports all tables from a list of source bitable Apps into the target App.
|
|
"""
|
|
for source_app_token in source_app_tokens:
|
|
logging.info(f"Fetching tables from source app: {source_app_token}")
|
|
try:
|
|
tables = get_all_tables(client, access_token, source_app_token)
|
|
for table in tables:
|
|
source_table_id = table['table_id']
|
|
table_name = table['name']
|
|
copy_single_table(client, access_token, source_app_token, source_table_id, target_app_token, table_name)
|
|
except Exception as e:
|
|
logging.error(f"Failed to copy tables from {source_app_token}: {e}")
|
|
|
|
if __name__ == "__main__":
|
|
client = api.Client(config.LARK_HOST)
|
|
|
|
try:
|
|
access_token = client.get_tenant_access_token(config.APP_ID, config.APP_SECRET)
|
|
except Exception as e:
|
|
logging.error(f"Could not get access token: {e}")
|
|
exit(1)
|
|
|
|
logging.info(f"Using App ID: {config.APP_ID}")
|
|
|
|
# Setup source and target tokens. You can put multiple source app tokens in a list.
|
|
SOURCE_APP_TOKENS = [
|
|
config.MERGE_SOURCE_APP_TOKEN_1, # Or replace with an actual App token string
|
|
# "another_source_app_token",
|
|
]
|
|
TARGET_APP_TOKEN = config.MERGE_TARGET_APP_TOKEN
|
|
|
|
is_placeholder = (config.MERGE_SOURCE_APP_TOKEN_1 == "source_app_token_1")
|
|
if is_placeholder:
|
|
logging.warning("Please update config.py or environment variables with valid MERGE_* tokens to execute.")
|
|
else:
|
|
import_all_tables(client, access_token, SOURCE_APP_TOKENS, TARGET_APP_TOKEN)
|