86 lines
2.8 KiB
Bash
Executable File
86 lines
2.8 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
echo "☁️ Exporting data from TiDB Cloud..."
|
|
|
|
# Check if .env exists
|
|
if [ ! -f .env ]; then
|
|
echo "❌ .env file not found!"
|
|
echo "📝 Please create .env file with your TiDB Cloud credentials"
|
|
exit 1
|
|
fi
|
|
|
|
# Source environment variables
|
|
source .env
|
|
|
|
# Validate required variables
|
|
if [ -z "$TEST_DB_HOST" ] || [ -z "$TEST_DB_USER" ] || [ -z "$TEST_DB_PASSWORD" ]; then
|
|
echo "❌ Missing database credentials in .env"
|
|
echo "📝 Required: TEST_DB_HOST, TEST_DB_USER, TEST_DB_PASSWORD"
|
|
exit 1
|
|
fi
|
|
|
|
# Create export directory
|
|
EXPORT_DIR="/tmp/tidb-cloud-export"
|
|
mkdir -p $EXPORT_DIR
|
|
|
|
# Test connection
|
|
echo "🔍 Testing connection to TiDB Cloud..."
|
|
mysql -h $TEST_DB_HOST -P ${TEST_DB_PORT:-4000} -u $TEST_DB_USER -p$TEST_DB_PASSWORD -e "SELECT 1" >/dev/null 2>&1
|
|
if [ $? -ne 0 ]; then
|
|
echo "❌ Cannot connect to TiDB Cloud"
|
|
exit 1
|
|
fi
|
|
|
|
echo "✅ Connected successfully"
|
|
|
|
# Export schema using SQL queries
|
|
echo "📦 Exporting schema..."
|
|
|
|
# Create database statement
|
|
echo "CREATE DATABASE IF NOT EXISTS \`${DATABASE_NAME:-workflow_local}\`;
|
|
USE \`${DATABASE_NAME:-workflow_local}\`;
|
|
" > $EXPORT_DIR/schema.sql
|
|
|
|
# Get table schemas
|
|
for table in ${TABLES//,/ }; do
|
|
echo "-- Table: $table"
|
|
mysql -h $TEST_DB_HOST -P ${TEST_DB_PORT:-4000} -u $TEST_DB_USER -p$TEST_DB_PASSWORD -e "SHOW CREATE TABLE \`${DATABASE_NAME:-workflow_local}\`.$table;" -N -s | cut -f2 >> $EXPORT_DIR/schema.sql
|
|
echo ";" >> $EXPORT_DIR/schema.sql
|
|
echo "" >> $EXPORT_DIR/schema.sql
|
|
done
|
|
|
|
# Check if export was successful
|
|
if [ ! -s "$EXPORT_DIR/schema.sql" ]; then
|
|
echo "❌ Schema export failed - empty file"
|
|
exit 1
|
|
fi
|
|
|
|
echo "✅ Schema exported to $EXPORT_DIR/schema.sql"
|
|
|
|
# Export data using SQL
|
|
echo "📦 Exporting data..."
|
|
|
|
# Clear data file
|
|
> $EXPORT_DIR/data.sql
|
|
|
|
# Export data for each table
|
|
for table in ${TABLES//,/ }; do
|
|
echo "-- Data for table: $table"
|
|
# Simple approach: export as CSV and convert to INSERT statements
|
|
mysql -h $TEST_DB_HOST -P ${TEST_DB_PORT:-4000} -u $TEST_DB_USER -p$TEST_DB_PASSWORD -e "SELECT * FROM \`${DATABASE_NAME:-workflow_local}\`.$table;" | sed '1d' > $EXPORT_DIR/${table}.csv
|
|
|
|
# If we have data, convert to INSERT statements
|
|
if [ -s "$EXPORT_DIR/${table}.csv" ]; then
|
|
# This is a simplified approach - for production use, you'd want a more robust CSV to SQL converter
|
|
echo "-- Note: Data export for $table requires manual conversion from CSV" >> $EXPORT_DIR/data.sql
|
|
echo "-- CSV file location: $EXPORT_DIR/${table}.csv" >> $EXPORT_DIR/data.sql
|
|
fi
|
|
done
|
|
|
|
echo "⚠️ Data export completed - CSV files created for manual import"
|
|
echo "📂 Export completed successfully!"
|
|
echo " Schema: $EXPORT_DIR/schema.sql"
|
|
echo " Data CSV files:"
|
|
for table in ${TABLES//,/ }; do
|
|
echo " $EXPORT_DIR/${table}.csv"
|
|
done |