Browse Source

使用 Alembic 进行数据库版本控制和迁移管理

drggboy 4 weeks ago
parent
commit
bcdf820419
9 changed files with 1051 additions and 0 deletions
  1. 89 0
      alembic.ini
  2. 19 0
      app/database.py
  3. BIN
      app/models/orm_models.py
  4. 218 0
      db_migrate.py
  5. 167 0
      migrations/README.md
  6. 100 0
      migrations/env.py
  7. 27 0
      migrations/script.py.mako
  8. 296 0
      migrations/versions/66f4104d9498_init.py
  9. 135 0
      reset_db.py

+ 89 - 0
alembic.ini

@@ -0,0 +1,89 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = migrations
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date
+# within the migration file as well as the filename.
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; this defaults
+# to migrations/versions.  When using multiple version
+# directories, initial revisions must be specified with --version-path
+# version_locations = %(here)s/bar %(here)s/bat migrations/versions
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = postgresql://%(DB_USER)s:%(DB_PASSWORD)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts.  See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stdout,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S

+ 19 - 0
app/database.py

@@ -22,6 +22,7 @@ DB_PASSWORD = os.getenv("DB_PASSWORD", "root")
 DB_HOST = os.getenv("DB_HOST", "localhost")
 DB_PORT = os.getenv("DB_PORT", "5432")
 DB_NAME = os.getenv("DB_NAME", "testdb")
+print(DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME)
 
 # 构建数据库连接URL
 SQLALCHEMY_DATABASE_URL = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
@@ -71,3 +72,21 @@ def get_db():
         raise
     finally:
         db.close()
+
+def execute_sql(sql_statement):
+    """
+    执行原始SQL语句
+    
+    Args:
+        sql_statement: 要执行的SQL语句
+    
+    Returns:
+        执行结果
+    """
+    try:
+        with engine.begin() as connection:
+            result = connection.execute(sql_statement)
+            return result
+    except SQLAlchemyError as e:
+        logger.error(f"执行SQL语句失败: {str(e)}")
+        raise

BIN
app/models/orm_models.py


+ 218 - 0
db_migrate.py

@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+数据库迁移辅助脚本
+
+此脚本提供简单的命令行接口来执行常见的数据库迁移操作。
+"""
+
+import argparse
+import os
+import sys
+import importlib
+import traceback
+from alembic.config import Config
+from alembic import command
+
+# 直接设置环境变量,确保所有地方使用相同的数据库连接信息
+os.environ["DB_USER"] = "postgres"
+os.environ["DB_PASSWORD"] = "123456789Qq"
+os.environ["DB_HOST"] = "localhost"
+os.environ["DB_PORT"] = "5432"
+os.environ["DB_NAME"] = "soilgd"
+
+def get_alembic_config():
+    """
+    获取 Alembic 配置
+    
+    Returns:
+        Alembic 配置对象
+    """
+    # 使用相对路径获取配置文件
+    script_dir = os.path.dirname(os.path.abspath(__file__))
+    alembic_ini_path = os.path.join(script_dir, 'alembic.ini')
+    print(f"使用配置文件: {alembic_ini_path}")
+    
+    # 创建配置对象
+    alembic_cfg = Config(alembic_ini_path)
+    return alembic_cfg
+
+
+def create_migration(message, auto=True):
+    """
+    创建新的迁移
+    
+    Args:
+        message: 迁移说明
+        auto: 是否自动生成迁移内容
+    """
+    cfg = get_alembic_config()
+    try:
+        print(f"开始创建迁移: {message}, 自动生成: {auto}")
+        if auto:
+            command.revision(cfg, message=message, autogenerate=True)
+        else:
+            command.revision(cfg, message=message, autogenerate=False)
+        print(f"成功创建迁移 '{message}'")
+        return 0
+    except Exception as e:
+        print(f"创建迁移失败: {str(e)}")
+        print("详细错误信息:")
+        traceback.print_exc()
+        return 1
+
+
+def upgrade(target="head"):
+    """
+    升级数据库
+    
+    Args:
+        target: 目标版本,默认为最新版本
+    """
+    cfg = get_alembic_config()
+    try:
+        command.upgrade(cfg, target)
+        print(f"成功升级数据库到 '{target}'")
+        return 0
+    except Exception as e:
+        print(f"升级数据库失败: {str(e)}")
+        return 1
+
+
+def downgrade(target="-1"):
+    """
+    降级数据库
+    
+    Args:
+        target: 目标版本,默认为上一个版本
+    """
+    cfg = get_alembic_config()
+    try:
+        command.downgrade(cfg, target)
+        print(f"成功降级数据库到 '{target}'")
+        return 0
+    except Exception as e:
+        print(f"降级数据库失败: {str(e)}")
+        return 1
+
+
+def show_history():
+    """显示迁移历史"""
+    cfg = get_alembic_config()
+    try:
+        command.history(cfg)
+        return 0
+    except Exception as e:
+        print(f"显示历史失败: {str(e)}")
+        return 1
+
+
+def show_current():
+    """显示当前版本"""
+    cfg = get_alembic_config()
+    try:
+        command.current(cfg)
+        return 0
+    except Exception as e:
+        print(f"显示当前版本失败: {str(e)}")
+        return 1
+
+
+def stamp_revision(target="head"):
+    """
+    将数据库版本标记为指定版本,而不运行任何迁移
+    
+    Args:
+        target: 目标版本,默认为最新版本
+    """
+    cfg = get_alembic_config()
+    try:
+        command.stamp(cfg, target)
+        print(f"成功将数据库版本标记为 '{target}'")
+        return 0
+    except Exception as e:
+        print(f"标记数据库版本失败: {str(e)}")
+        return 1
+
+
+def execute_sql(sql_statement):
+    """
+    直接执行 SQL 语句
+    
+    Args:
+        sql_statement: 要执行的 SQL 语句
+    """
+    try:
+        from app.database import execute_sql
+        execute_sql(sql_statement)
+        print(f"成功执行 SQL: {sql_statement}")
+        return 0
+    except Exception as e:
+        print(f"执行 SQL 失败: {str(e)}")
+        return 1
+
+
+def main():
+    """主函数"""
+    parser = argparse.ArgumentParser(description='数据库迁移辅助工具')
+    subparsers = parser.add_subparsers(dest='command', help='命令')
+    
+    # 检查 alembic 是否已安装
+    try:
+        importlib.import_module('alembic')
+    except ImportError:
+        print("错误: 未找到 alembic 模块,请先安装: pip install alembic")
+        return 1
+    
+    # 创建迁移
+    create_parser = subparsers.add_parser('create', help='创建新的迁移')
+    create_parser.add_argument('message', help='迁移说明')
+    create_parser.add_argument('--manual', action='store_true', help='手动创建迁移(不自动生成)')
+    
+    # 升级数据库
+    upgrade_parser = subparsers.add_parser('upgrade', help='升级数据库')
+    upgrade_parser.add_argument('target', nargs='?', default='head', help='目标版本,默认为最新版本')
+    
+    # 降级数据库
+    downgrade_parser = subparsers.add_parser('downgrade', help='降级数据库')
+    downgrade_parser.add_argument('target', nargs='?', default='-1', help='目标版本,默认为上一个版本')
+    
+    # 标记数据库版本
+    stamp_parser = subparsers.add_parser('stamp', help='标记数据库版本,不执行迁移')
+    stamp_parser.add_argument('target', nargs='?', default='head', help='目标版本,默认为最新版本')
+    
+    # 执行 SQL
+    sql_parser = subparsers.add_parser('sql', help='执行 SQL 语句')
+    sql_parser.add_argument('statement', help='要执行的 SQL 语句')
+    
+    # 显示历史
+    subparsers.add_parser('history', help='显示迁移历史')
+    
+    # 显示当前版本
+    subparsers.add_parser('current', help='显示当前版本')
+    
+    args = parser.parse_args()
+    
+    if args.command == 'create':
+        return create_migration(args.message, not args.manual)
+    elif args.command == 'upgrade':
+        return upgrade(args.target)
+    elif args.command == 'downgrade':
+        return downgrade(args.target)
+    elif args.command == 'stamp':
+        return stamp_revision(args.target)
+    elif args.command == 'sql':
+        return execute_sql(args.statement)
+    elif args.command == 'history':
+        return show_history()
+    elif args.command == 'current':
+        return show_current()
+    else:
+        parser.print_help()
+        return 0
+
+
+if __name__ == '__main__':
+    sys.exit(main())
+

+ 167 - 0
migrations/README.md

@@ -0,0 +1,167 @@
+# 数据库迁移指南
+
+本项目使用 Alembic 进行数据库版本控制和迁移管理。
+
+## 使用 db_migrate.py 脚本
+
+本项目提供了 `db_migrate.py` 辅助脚本,简化了数据库迁移操作。推荐使用此脚本而不是直接使用 alembic 命令。
+
+### 创建新迁移
+
+自动生成迁移脚本(根据模型变更):
+
+```bash
+python db_migrate.py create "描述迁移内容"
+```
+
+手动创建空白迁移脚本:
+
+```bash
+python db_migrate.py create "描述迁移内容" --manual
+```
+
+### 执行迁移
+
+升级到最新版本:
+
+```bash
+python db_migrate.py upgrade
+```
+
+升级指定版本:
+
+```bash
+python db_migrate.py upgrade <版本号>
+```
+
+### 降级迁移
+
+降级到上一版本:
+
+```bash
+python db_migrate.py downgrade
+```
+
+降级到指定版本:
+
+```bash
+python db_migrate.py downgrade <版本号>
+```
+
+### 标记数据库版本
+
+将数据库标记为指定版本(不执行实际迁移):
+
+```bash
+python db_migrate.py stamp head
+```
+
+这在从SQL备份恢复数据库后特别有用,可以避免重复执行迁移。
+
+### 查看迁移信息
+
+查看当前版本:
+
+```bash
+python db_migrate.py current
+```
+
+查看迁移历史:
+
+```bash
+python db_migrate.py history
+```
+
+### 执行SQL语句
+
+直接执行SQL语句:
+
+```bash
+python db_migrate.py sql "SQL语句"
+```
+
+## 原始 Alembic 命令参考
+
+以下是直接使用 Alembic 的命令,仅供参考。建议优先使用上述 `db_migrate.py` 脚本。
+
+### 创建新迁移
+
+自动生成迁移脚本(根据模型变更):
+
+```bash
+alembic revision --autogenerate -m "描述迁移内容"
+```
+
+手动创建空白迁移脚本:
+
+```bash
+alembic revision -m "描述迁移内容"
+```
+
+### 执行迁移
+
+升级到最新版本:
+
+```bash
+alembic upgrade head
+```
+
+升级指定版本:
+
+```bash
+alembic upgrade <版本号>
+```
+
+升级指定步数:
+
+```bash
+alembic upgrade +<步数>
+```
+
+### 降级迁移
+
+降级到上一版本:
+
+```bash
+alembic downgrade -1
+```
+
+降级到指定版本:
+
+```bash
+alembic downgrade <版本号>
+```
+
+降级指定步数:
+
+```bash
+alembic downgrade -<步数>
+```
+
+### 查看迁移信息
+
+查看当前版本:
+
+```bash
+alembic current
+```
+
+查看迁移历史:
+
+```bash
+alembic history
+```
+
+查看特定迁移的SQL:
+
+```bash
+alembic upgrade <版本号>:head --sql
+```
+
+## 迁移工作流
+
+1. 修改 SQLAlchemy 模型
+2. 生成迁移脚本 `python db_migrate.py create "描述变更"`
+3. 检查生成的脚本是否正确
+4. 应用迁移 `python db_migrate.py upgrade`
+5. 提交迁移脚本到版本控制系统

+ 100 - 0
migrations/env.py

@@ -0,0 +1,100 @@
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+# 添加类型导入以解决兼容性问题
+from sqlalchemy.types import NullType
+# 添加GeoAlchemy2导入
+import geoalchemy2
+
+from alembic import context
+import os
+import sys
+
+# 添加项目根目录到 Python 路径
+sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+
+# 从orm_models导入Base和模型
+from app.models.orm_models import Base
+# 导入数据库连接信息
+from app.database import DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# 设置数据库连接 URL
+section = config.config_ini_section
+config.set_section_option(section, 'DB_USER', DB_USER)
+config.set_section_option(section, 'DB_PASSWORD', DB_PASSWORD)
+config.set_section_option(section, 'DB_HOST', DB_HOST)
+config.set_section_option(section, 'DB_PORT', DB_PORT)
+config.set_section_option(section, 'DB_NAME', DB_NAME)
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+    fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+    """Run migrations in 'offline' mode.
+
+    This configures the context with just a URL
+    and not an Engine, though an Engine is acceptable
+    here as well.  By skipping the Engine creation
+    we don't even need a DBAPI to be available.
+
+    Calls to context.execute() here emit the given string to the
+    script output.
+
+    """
+    url = config.get_main_option("sqlalchemy.url")
+    context.configure(
+        url=url,
+        target_metadata=target_metadata,
+        literal_binds=True,
+        dialect_opts={"paramstyle": "named"},
+    )
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+def run_migrations_online():
+    """Run migrations in 'online' mode.
+
+    In this scenario we need to create an Engine
+    and associate a connection with the context.
+
+    """
+    connectable = engine_from_config(
+        config.get_section(config.config_ini_section),
+        prefix="sqlalchemy.",
+        poolclass=pool.NullPool,
+    )
+
+    with connectable.connect() as connection:
+        context.configure(
+            connection=connection, target_metadata=target_metadata
+        )
+
+        with context.begin_transaction():
+            context.run_migrations()
+
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    run_migrations_online()

+ 27 - 0
migrations/script.py.mako

@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+    """升级数据库到当前版本"""
+    ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+    """将数据库降级到上一版本"""
+    ${downgrades if downgrades else "pass"}

+ 296 - 0
migrations/versions/66f4104d9498_init.py

@@ -0,0 +1,296 @@
+"""init
+
+Revision ID: 66f4104d9498
+Revises: 
+Create Date: 2025-05-10 18:05:19.822335
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import geoalchemy2
+
+
+# revision identifiers, used by Alembic.
+revision = '66f4104d9498'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    """升级数据库到当前版本"""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('Conventional_land_information',
+    sa.Column('Lon', sa.Float(precision=53), nullable=False, comment='经度-Long.(°)'),
+    sa.Column('Lat', sa.Float(precision=53), nullable=False, comment='纬度-Lat.(°)'),
+    sa.Column('Altitude', sa.Numeric(), nullable=True, comment='海拔-Alt.(m)'),
+    sa.Column('Landform', sa.Text(), nullable=True, comment='地貌类型'),
+    sa.Column('Landuse', sa.Text(), nullable=True, comment='土地利用类型'),
+    sa.Column('Soiltype', sa.Text(), nullable=True, comment='土壤类型'),
+    sa.Column('Lithologic_type', sa.Text(), nullable=True, comment='岩性类型'),
+    sa.PrimaryKeyConstraint('Lon', 'Lat'),
+    comment='常规土地信息'
+    )
+    op.create_table('Point_information',
+    sa.Column('Lon', sa.Float(precision=53), nullable=False, comment='经度-Long.(°)'),
+    sa.Column('Lat', sa.Float(precision=53), nullable=False, comment='纬度-Lat.(°)'),
+    sa.Column('Province', sa.Text(), nullable=False),
+    sa.Column('City', sa.Text(), nullable=True, comment='市'),
+    sa.Column('District', sa.Text(), nullable=True, comment='县'),
+    sa.Column('Point_id', sa.Integer(), autoincrement=True, nullable=False, comment='唯一编码'),
+    sa.PrimaryKeyConstraint('Point_id'),
+    comment='点位信息'
+    )
+    op.create_table('fifty_thousand_survey_data',
+    sa.Column('gid', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('id', sa.Float(precision=53), nullable=True),
+    sa.Column('dwmc', sa.String(length=254), nullable=True),
+    sa.Column('r_dwmc', sa.String(length=254), nullable=True),
+    sa.Column('lat', sa.Float(precision=53), nullable=True),
+    sa.Column('lon', sa.Float(precision=53), nullable=True),
+    sa.Column('xmc', sa.String(length=254), nullable=True),
+    sa.Column('zmc', sa.String(length=254), nullable=True),
+    sa.Column('cmc', sa.String(length=254), nullable=True),
+    sa.Column('tql_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_as', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('ph', sa.Float(precision=53), nullable=True),
+    sa.Column('zzm_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('zzm_as', sa.Float(precision=53), nullable=True),
+    sa.Column('zzm_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('wzm_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('wzm_as', sa.Float(precision=53), nullable=True),
+    sa.Column('wzm_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('tcb_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('tcb_as', sa.Float(precision=53), nullable=True),
+    sa.Column('tcb_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('mcb_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('mcb_as', sa.Float(precision=53), nullable=True),
+    sa.Column('mcb_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_as', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('z_mzs_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('z_mzs_as', sa.Float(precision=53), nullable=True),
+    sa.Column('z_mzs_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('w_mzs_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('w_mzs_as', sa.Float(precision=53), nullable=True),
+    sa.Column('w_mzs_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_max', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_max', sa.Float(precision=53), nullable=True),
+    sa.Column('s_tfj_max', sa.Float(precision=53), nullable=True),
+    sa.Column('s_mfj_max', sa.Float(precision=53), nullable=True),
+    sa.Column('h_tfj_max', sa.Float(precision=53), nullable=True),
+    sa.Column('h_mfj_max', sa.Float(precision=53), nullable=True),
+    sa.Column('s_pdtj', sa.String(length=254), nullable=True),
+    sa.Column('h_pdtj', sa.String(length=254), nullable=True),
+    sa.Column('s_xtfx', sa.String(length=254), nullable=True),
+    sa.Column('h_xtfx', sa.String(length=254), nullable=True),
+    sa.Column('mzs_cd_max', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_as_max', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_pd_max', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_cd_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_as_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_pb_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_cd_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_as_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_pb_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('s_cd_pdtj', sa.String(length=254), nullable=True),
+    sa.Column('s_cd_pdtj1', sa.String(length=254), nullable=True),
+    sa.Column('s_pb_pdtj', sa.String(length=254), nullable=True),
+    sa.Column('s_cd_xtfx', sa.String(length=254), nullable=True),
+    sa.Column('s_as_xtfx', sa.String(length=254), nullable=True),
+    sa.Column('s_pb_xtfx', sa.String(length=254), nullable=True),
+    sa.Column('s_cd_fz', sa.Float(precision=53), nullable=True),
+    sa.Column('s_as_fz', sa.Float(precision=53), nullable=True),
+    sa.Column('cd_as_pdtj', sa.String(length=254), nullable=True),
+    sa.Column('cd_as_fhwr', sa.String(length=254), nullable=True),
+    sa.Column('wrlx', sa.String(length=254), nullable=True),
+    sa.Column('wrlx_pb', sa.String(length=254), nullable=True),
+    sa.Column('cec__cmol_', sa.Float(precision=53), nullable=True),
+    sa.Column('tom_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_as', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_as', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('nl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('fl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('sl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('nl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('fl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('sl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('trlx', sa.String(length=254), nullable=True),
+    sa.Column('dtpa_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('lin_suan_er_qing_an', sa.Float(precision=53), nullable=True),
+    sa.Column('dtpa_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_as', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_cr', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_hg', sa.Float(precision=53), nullable=True),
+    sa.Column('f_2_00mm', sa.String(length=254), nullable=True),
+    sa.Column('2_1mm', sa.Float(precision=53), nullable=True),
+    sa.Column('1_0_5mm', sa.Float(precision=53), nullable=True),
+    sa.Column('0_5_0_25mm', sa.Float(precision=53), nullable=True),
+    sa.Column('0_25_0_05m', sa.Float(precision=53), nullable=True),
+    sa.Column('0_05_0_02m', sa.Float(precision=53), nullable=True),
+    sa.Column('0_02_0_002', sa.Float(precision=53), nullable=True),
+    sa.Column('f_0_002mm', sa.Float(precision=53), nullable=True),
+    sa.Column('ph_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('ph_fj_2', sa.Float(precision=53), nullable=True),
+    sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='POINT', from_text='ST_GeomFromEWKT', name='geometry'), nullable=True),
+    sa.PrimaryKeyConstraint('gid')
+    )
+    # 注释掉索引创建
+    # op.create_index('idx_fifty_thousand_survey_data_geom', 'fifty_thousand_survey_data', ['geom'], unique=False, postgresql_using='gist')
+    # op.create_index(op.f('ix_fifty_thousand_survey_data_geom'), 'fifty_thousand_survey_data', ['geom'], unique=False)
+    op.create_table('raster_table',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('rast', geoalchemy2.types.Raster(from_text='raster', name='raster'), nullable=True),
+    sa.PrimaryKeyConstraint('id')
+    )
+    # op.create_index(op.f('ix_raster_table_rast'), 'raster_table', ['rast'], unique=False)
+    op.create_table('surveydata',
+    sa.Column('id', sa.Float(precision=53), autoincrement=True, nullable=False),
+    sa.Column('dwmc', sa.String(length=50), nullable=True),
+    sa.Column('r_dwmc', sa.String(length=50), nullable=True),
+    sa.Column('lat', sa.Numeric(precision=9, scale=6), nullable=True),
+    sa.Column('lon', sa.Numeric(precision=9, scale=6), nullable=True),
+    sa.Column('xmc', sa.String(length=50), nullable=True),
+    sa.Column('zmc', sa.String(length=50), nullable=True),
+    sa.Column('cmc', sa.String(length=50), nullable=True),
+    sa.Column('tql_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tql_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tql_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('ph', sa.Numeric(precision=4, scale=2), nullable=True),
+    sa.Column('zzm_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('zzm_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('zzm_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('wzm_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('wzm_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('wzm_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tcb_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tcb_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tcb_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('mcb_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('mcb_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('mcb_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tzs_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tzs_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tzs_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('z_mzs_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('z_mzs_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('z_mzs_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('w_mzs_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('w_mzs_as', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('w_mzs_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tzs_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('mzs_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('s_tfj_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('s_mfj_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('h_tfj_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('h_mfj_max', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('s_pdtj', sa.String(length=10), nullable=True),
+    sa.Column('h_pdtj', sa.String(length=10), nullable=True),
+    sa.Column('s_xtfx', sa.String(length=50), nullable=True),
+    sa.Column('h_xtfx', sa.String(length=50), nullable=True),
+    sa.Column('cec_cmol', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('tom_g_kg', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('trlx', sa.String(length=50), nullable=True),
+    sa.Column('dtpa_cd', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('dtpa_pb', sa.Numeric(precision=10, scale=6), nullable=True),
+    sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='POINT', srid=4326, from_text='ST_GeomFromEWKT', name='geometry'), nullable=True),
+    sa.Column('mzs_cd_max', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_as_max', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_pd_max', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_cd_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_as_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('tzs_pb_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_cd_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_as_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('mzs_pb_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('s_cd_pdtj', sa.String(), nullable=True),
+    sa.Column('s_cd_pdtj1', sa.String(), nullable=True),
+    sa.Column('s_pb_pdtj', sa.String(), nullable=True),
+    sa.Column('s_cd_xtfx', sa.String(), nullable=True),
+    sa.Column('s_as_xtfx', sa.String(), nullable=True),
+    sa.Column('s_pb_xtfx', sa.String(), nullable=True),
+    sa.Column('s_cd_fz', sa.Float(precision=53), nullable=True),
+    sa.Column('s_as_fz', sa.Float(precision=53), nullable=True),
+    sa.Column('cd_as_pdtj', sa.String(), nullable=True),
+    sa.Column('cd_as_fhwr', sa.String(), nullable=True),
+    sa.Column('wrlx', sa.String(), nullable=True),
+    sa.Column('wrlx_pb', sa.String(), nullable=True),
+    sa.Column('cec__cmol_', sa.Float(precision=53), nullable=True),
+    sa.Column('tom_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_as', sa.Float(precision=53), nullable=True),
+    sa.Column('mmax_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_as', sa.Float(precision=53), nullable=True),
+    sa.Column('bcf_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('nl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('fl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('sl_g_kg_', sa.Float(precision=53), nullable=True),
+    sa.Column('nl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('fl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('sl_zb', sa.Float(precision=53), nullable=True),
+    sa.Column('lin_suan_er_qing_an', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_cd', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_as', sa.Float(precision=53), nullable=True),
+    sa.Column('zb_pb', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_cr', sa.Float(precision=53), nullable=True),
+    sa.Column('tql_hg', sa.Float(precision=53), nullable=True),
+    sa.Column('f_2_00mm', sa.String(), nullable=True),
+    sa.Column('2_1mm', sa.Float(precision=53), nullable=True),
+    sa.Column('1_0_5mm', sa.Float(precision=53), nullable=True),
+    sa.Column('0_5_0_25mm', sa.Float(precision=53), nullable=True),
+    sa.Column('0_25_0_05m', sa.Float(precision=53), nullable=True),
+    sa.Column('0_05_0_02m', sa.Float(precision=53), nullable=True),
+    sa.Column('0_02_0_002', sa.Float(precision=53), nullable=True),
+    sa.Column('f_0_002mm', sa.Float(precision=53), nullable=True),
+    sa.Column('ph_fj', sa.Float(precision=53), nullable=True),
+    sa.Column('ph_fj_2', sa.Float(precision=53), nullable=True),
+    sa.PrimaryKeyConstraint('id')
+    )
+    # op.create_index('idx_surveydata_geom', 'surveydata', ['geom'], unique=False, postgresql_using='gist')
+    # op.create_index(op.f('ix_surveydata_geom'), 'surveydata', ['geom'], unique=False)
+    op.create_table('unit_ceil',
+    sa.Column('gid', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('OBJECTID', sa.Float(precision=53), nullable=True),
+    sa.Column('BSM', sa.String(length=20), nullable=True),
+    sa.Column('PXZQDM', sa.String(length=2), nullable=True),
+    sa.Column('PXZQMC', sa.String(length=50), nullable=True),
+    sa.Column('CXZQDM', sa.String(length=4), nullable=True),
+    sa.Column('CXZQMC', sa.String(length=50), nullable=True),
+    sa.Column('SUM_NYDTBM', sa.Numeric(), nullable=True),
+    sa.Column('XCDYBM', sa.String(length=20), nullable=True),
+    sa.Column('Shape_Leng', sa.Numeric(), nullable=True),
+    sa.Column('Shape_Area', sa.Numeric(), nullable=True),
+    sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='GEOMETRYZM', srid=4490, dimension=4, from_text='ST_GeomFromEWKT', name='geometry'), nullable=True),
+    sa.PrimaryKeyConstraint('gid')
+    )
+    # op.create_index('idx_unit_ceil_geom', 'unit_ceil', ['geom'], unique=False, postgresql_using='gist')
+    # op.create_index(op.f('ix_unit_ceil_geom'), 'unit_ceil', ['geom'], unique=False)
+    # ### end Alembic commands ###
+
+
+def downgrade():
+    """将数据库降级到上一版本"""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_index(op.f('ix_unit_ceil_geom'), table_name='unit_ceil')
+    op.drop_index('idx_unit_ceil_geom', table_name='unit_ceil', postgresql_using='gist')
+    op.drop_table('unit_ceil')
+    op.drop_index(op.f('ix_surveydata_geom'), table_name='surveydata')
+    op.drop_index('idx_surveydata_geom', table_name='surveydata', postgresql_using='gist')
+    op.drop_table('surveydata')
+    op.drop_index(op.f('ix_raster_table_rast'), table_name='raster_table')
+    op.drop_table('raster_table')
+    op.drop_index(op.f('ix_fifty_thousand_survey_data_geom'), table_name='fifty_thousand_survey_data')
+    op.drop_index('idx_fifty_thousand_survey_data_geom', table_name='fifty_thousand_survey_data', postgresql_using='gist')
+    op.drop_table('fifty_thousand_survey_data')
+    op.drop_table('Point_information')
+    op.drop_table('Conventional_land_information')
+    # ### end Alembic commands ###

+ 135 - 0
reset_db.py

@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+数据库重置脚本
+
+这个脚本用于清理数据库并准备新的迁移
+"""
+
+import os
+import sys
+from sqlalchemy import text, inspect, create_engine
+import re
+import logging
+from sqlalchemy.ext.declarative import declarative_base
+
+# 配置日志
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# 数据库连接信息
+DB_USER = "postgres"
+DB_PASSWORD = "123456789Qq"
+DB_HOST = "localhost"
+DB_PORT = "5432"
+DB_NAME = "soilgd"
+print(f"连接到数据库: {DB_USER}@{DB_HOST}:{DB_PORT}/{DB_NAME}")
+
+# 构建数据库连接URL
+SQLALCHEMY_DATABASE_URL = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
+
+# 创建数据库引擎
+engine = create_engine(
+    SQLALCHEMY_DATABASE_URL,
+    pool_size=5,
+    max_overflow=10,
+    pool_timeout=30,
+    pool_recycle=1800
+)
+
+# 要删除的表列表(按顺序)
+TABLES_TO_DROP = [
+    'fifty_thousand_survey_data',
+    'surveydata',
+    'unit_ceil',
+    'raster_table',
+    'Conventional_land_information',
+    'Point_information',
+    'alembic_version'
+]
+
+# 不应该删除的系统表
+SYSTEM_TABLES = [
+    'spatial_ref_sys',  # PostGIS系统表
+    'geography_columns',
+    'geometry_columns',
+    'raster_columns',
+    'raster_overviews'
+]
+
+def get_all_tables():
+    """获取数据库中所有表的名称(排除系统表)"""
+    inspector = inspect(engine)
+    tables = inspector.get_table_names()
+    # 排除系统表
+    return [t for t in tables if t not in SYSTEM_TABLES]
+
+def get_table_case_insensitive(table_name):
+    """不区分大小写查找表名"""
+    all_tables = get_all_tables()
+    return [t for t in all_tables if t.lower() == table_name.lower()]
+
+def reset_database():
+    """重置数据库,删除所有表"""
+    print("\n==== 开始重置数据库 ====")
+    
+    # 获取删除前的所有表
+    before_tables = get_all_tables()
+    print(f"\n删除前的表: {before_tables}")
+    
+    try:
+        for table_name in TABLES_TO_DROP:
+            # 查找该表在数据库中的实际名称(考虑大小写)
+            actual_tables = get_table_case_insensitive(table_name)
+            
+            if not actual_tables:
+                print(f"表 '{table_name}' 不存在,跳过")
+                continue
+            
+            for actual_table in actual_tables:
+                try:
+                    with engine.begin() as conn:
+                        print(f"\n尝试删除表 '{actual_table}'...")
+                        # 使用双引号包裹表名以保持大小写
+                        sql = f'DROP TABLE IF EXISTS "{actual_table}" CASCADE'
+                        conn.execute(text(sql))
+                        print(f"执行SQL: {sql}")
+                        
+                        # 验证表是否已删除
+                        check_tables = get_table_case_insensitive(actual_table)
+                        if not check_tables:
+                            print(f"✓ 表 '{actual_table}' 已成功删除")
+                        else:
+                            print(f"✗ 表 '{actual_table}' 删除失败,仍然存在")
+                except Exception as e:
+                    print(f"删除表 '{actual_table}' 时出错: {str(e)}")
+        
+        # 获取删除后的所有表
+        after_tables = get_all_tables()
+        print(f"\n==== 删除操作完成 ====")
+        print(f"删除前的表: {len(before_tables)}个 - {before_tables}")
+        print(f"删除后的表: {len(after_tables)}个 - {after_tables}")
+        
+        # 找出仍然存在的表
+        remaining_tables = []
+        for table in before_tables:
+            # 检查表是否应该被删除
+            should_be_deleted = any(re.match(f"^{t}$", table, re.IGNORECASE) for t in TABLES_TO_DROP)
+            if should_be_deleted and table in after_tables:
+                remaining_tables.append(table)
+        
+        if remaining_tables:
+            print(f"\n警告:以下表应该被删除但仍然存在: {remaining_tables}")
+            print("这可能是由于权限问题或者表依赖关系导致")
+        else:
+            print("\n所有目标表已成功删除")
+        
+        print("\n==== 数据库重置完成 ====")
+        return 0
+    except Exception as e:
+        print(f"\n重置数据库时出错: {str(e)}")
+        return 1
+
+if __name__ == '__main__':
+    sys.exit(reset_database())