This commit is contained in:
2025-12-01 17:21:38 +08:00
parent 32fee2b8ab
commit fab8c13cb3
7511 changed files with 996300 additions and 0 deletions

View File

@@ -0,0 +1,53 @@
"""rename api provider description
Revision ID: 00bacef91f18
Revises: 8ec536f3c800
Create Date: 2024-01-07 04:07:34.482983
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '00bacef91f18'
down_revision = '8ec536f3c800'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
batch_op.drop_column('description_str')
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
batch_op.drop_column('description_str')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
batch_op.drop_column('description')
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
batch_op.drop_column('description')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""add workflow tool label and tool bindings idx
Revision ID: 03f98355ba0e
Revises: 9e98fbaffb88
Create Date: 2024-05-25 07:17:00.539125
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '03f98355ba0e'
down_revision = '9e98fbaffb88'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('tool_label_bindings', schema=None) as batch_op:
batch_op.create_unique_constraint('unique_tool_label_bind', ['tool_id', 'label_name'])
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('label', sa.String(length=255), server_default='', nullable=False))
def downgrade():
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.drop_column('label')
with op.batch_alter_table('tool_label_bindings', schema=None) as batch_op:
batch_op.drop_constraint('unique_tool_label_bind', type_='unique')

View File

@@ -0,0 +1,56 @@
"""update AppModelConfig and add table TracingAppConfig
Revision ID: 04c602f5dc9b
Revises: 4e99a8df00ff
Create Date: 2024-06-12 07:49:07.666510
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '04c602f5dc9b'
down_revision = '4ff534e1eb11'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
)
else:
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ##
op.drop_table('tracing_app_configs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,72 @@
"""add api tool privacy
Revision ID: 053da0c1d756
Revises: 4829e54d2fee
Create Date: 2024-01-12 06:47:21.656262
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '053da0c1d756'
down_revision = '4829e54d2fee'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_conversation_variables',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('variables_str', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
)
else:
op.create_table('tool_conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('variables_str', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), nullable=True))
batch_op.alter_column('icon',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('icon',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=False)
batch_op.drop_column('privacy_policy')
op.drop_table('tool_conversation_variables')
# ### end Alembic commands ###

View File

@@ -0,0 +1,44 @@
"""remove tool id from model invoke
Revision ID: 114eed84c228
Revises: c71211c8f604
Create Date: 2024-01-10 04:40:57.257824
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '114eed84c228'
down_revision = 'c71211c8f604'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.drop_column('tool_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
else:
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
# ### end Alembic commands ###

View File

@@ -0,0 +1,45 @@
"""add dataset permission tenant id
Revision ID: 161cadc1af8d
Revises: 7e6a8693e07a
Create Date: 2024-07-05 14:30:59.472593
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '161cadc1af8d'
down_revision = '7e6a8693e07a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
# Step 1: Add column without NOT NULL constraint
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
else:
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
# Step 1: Add column without NOT NULL constraint
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
batch_op.drop_column('tenant_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""empty message
Revision ID: 16830a790f0f
Revises: 380c6aa5a70d
Create Date: 2024-02-01 08:21:31.111119
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '16830a790f0f'
down_revision = '380c6aa5a70d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_account_joins', schema=None) as batch_op:
batch_op.add_column(sa.Column('current', sa.Boolean(), server_default=sa.text('false'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_account_joins', schema=None) as batch_op:
batch_op.drop_column('current')
# ### end Alembic commands ###

View File

@@ -0,0 +1,128 @@
"""add provider model support
Revision ID: 16fa53d9faec
Revises: 8d2d099ceb74
Create Date: 2023-08-06 16:57:51.248337
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '16fa53d9faec'
down_revision = '8d2d099ceb74'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
)
else:
op.create_table('provider_models',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
)
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.create_index('provider_model_tenant_id_provider_idx', ['tenant_id', 'provider_name'], unique=False)
if _is_pg(conn):
op.create_table('tenant_default_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
)
else:
op.create_table('tenant_default_models',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
)
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.create_index('tenant_default_model_tenant_id_provider_type_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
if _is_pg(conn):
op.create_table('tenant_preferred_model_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
)
else:
op.create_table('tenant_preferred_model_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
)
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
batch_op.create_index('tenant_preferred_model_provider_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
batch_op.drop_index('tenant_preferred_model_provider_tenant_provider_idx')
op.drop_table('tenant_preferred_model_providers')
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.drop_index('tenant_default_model_tenant_id_provider_type_idx')
op.drop_table('tenant_default_models')
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.drop_index('provider_model_tenant_id_provider_idx')
op.drop_table('provider_models')
# ### end Alembic commands ###

View File

@@ -0,0 +1,42 @@
"""add-keyworg-table-storage-type
Revision ID: 17b5ab037c40
Revises: a8f9b3c45e4a
Create Date: 2024-04-01 09:48:54.232201
"""
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '17b5ab037c40'
down_revision = 'a8f9b3c45e4a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False))
else:
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'"), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.drop_column('data_source_type')
# ### end Alembic commands ###

View File

@@ -0,0 +1,37 @@
"""modify provider model name length
Revision ID: 187385f442fc
Revises: 88072f0caa04
Create Date: 2024-01-02 07:18:43.887428
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '187385f442fc'
down_revision = '88072f0caa04'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.alter_column('model_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.alter_column('model_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""update tools original_url length
Revision ID: 1787fbae959a
Revises: eeb2e349e6ac
Create Date: 2024-08-09 08:01:12.817620
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '1787fbae959a'
down_revision = 'eeb2e349e6ac'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('original_url',
existing_type=sa.VARCHAR(length=255),
type_=sa.String(length=2048),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('original_url',
existing_type=sa.String(length=2048),
type_=sa.VARCHAR(length=255),
existing_nullable=True)
# ### end Alembic commands ###

View File

@@ -0,0 +1,73 @@
"""support conversation variables
Revision ID: 63a83fcf12ba
Revises: 1787fbae959a
Create Date: 2024-08-13 06:33:07.950379
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '63a83fcf12ba'
down_revision = '1787fbae959a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('workflow__conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
)
else:
op.create_table('workflow__conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('data', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
)
with op.batch_alter_table('workflow__conversation_variables', schema=None) as batch_op:
batch_op.create_index(batch_op.f('workflow__conversation_variables_app_id_idx'), ['app_id'], unique=False)
batch_op.create_index(batch_op.f('workflow__conversation_variables_created_at_idx'), ['created_at'], unique=False)
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False))
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('conversation_variables', models.types.LongText(), default='{}', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_column('conversation_variables')
with op.batch_alter_table('workflow__conversation_variables', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('workflow__conversation_variables_created_at_idx'))
batch_op.drop_index(batch_op.f('workflow__conversation_variables_app_id_idx'))
op.drop_table('workflow__conversation_variables')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""add conversations.dialogue_count
Revision ID: 8782057ff0dc
Revises: 63a83fcf12ba
Create Date: 2024-08-14 13:54:25.161324
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '8782057ff0dc'
down_revision = '63a83fcf12ba'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.add_column(sa.Column('dialogue_count', sa.Integer(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.drop_column('dialogue_count')
# ### end Alembic commands ###

View File

@@ -0,0 +1,72 @@
"""add-tidb-auth-binding
Revision ID: 0251a1c768cc
Revises: 63a83fcf12ba
Create Date: 2024-08-15 09:56:59.012490
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '0251a1c768cc'
down_revision = 'bbadea11becb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False),
sa.Column('account', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
)
else:
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'"), nullable=False),
sa.Column('account', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
)
with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op:
batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False)
batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False)
batch_op.create_index('tidb_auth_bindings_created_at_idx', ['created_at'], unique=False)
batch_op.create_index('tidb_auth_bindings_tenant_idx', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op:
batch_op.drop_index('tidb_auth_bindings_tenant_idx')
batch_op.drop_index('tidb_auth_bindings_created_at_idx')
batch_op.drop_index('tidb_auth_bindings_active_idx')
batch_op.drop_index('tidb_auth_bindings_status_idx')
op.drop_table('tidb_auth_bindings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""app and site icon type
Revision ID: a6be81136580
Revises: 8782057ff0dc
Create Date: 2024-08-15 10:01:24.697888
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = 'a6be81136580'
down_revision = '8782057ff0dc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('icon_type', sa.String(length=255), nullable=True))
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.add_column(sa.Column('icon_type', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.drop_column('icon_type')
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.drop_column('icon_type')
# ### end Alembic commands ###

View File

@@ -0,0 +1,28 @@
"""rename workflow__conversation_variables to workflow_conversation_variables
Revision ID: 2dbe42621d96
Revises: a6be81136580
Create Date: 2024-08-20 04:55:38.160010
"""
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '2dbe42621d96'
down_revision = 'a6be81136580'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.rename_table('workflow__conversation_variables', 'workflow_conversation_variables')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.rename_table('workflow_conversation_variables', 'workflow__conversation_variables')
# ### end Alembic commands ###

View File

@@ -0,0 +1,52 @@
"""add created_by and updated_by to app, modelconfig, and site
Revision ID: d0187d6a88dd
Revises: 2dbe42621d96
Create Date: 2024-08-25 04:41:18.157397
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = "d0187d6a88dd"
down_revision = "2dbe42621d96"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("app_model_configs", schema=None) as batch_op:
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
with op.batch_alter_table("apps", schema=None) as batch_op:
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
with op.batch_alter_table("sites", schema=None) as batch_op:
batch_op.add_column(sa.Column("created_by", models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column("updated_by", models.types.StringUUID(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("sites", schema=None) as batch_op:
batch_op.drop_column("updated_by")
batch_op.drop_column("created_by")
with op.batch_alter_table("apps", schema=None) as batch_op:
batch_op.drop_column("updated_by")
batch_op.drop_column("created_by")
with op.batch_alter_table("app_model_configs", schema=None) as batch_op:
batch_op.drop_column("updated_by")
batch_op.drop_column("created_by")
# ### end Alembic commands ###

View File

@@ -0,0 +1,45 @@
"""add use_icon_as_answer_icon fields for app and site
Revision ID: 030f4915f36a
Revises: d0187d6a88dd
Create Date: 2024-09-01 12:55:45.129687
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = "030f4915f36a"
down_revision = "d0187d6a88dd"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("apps", schema=None) as batch_op:
batch_op.add_column(
sa.Column("use_icon_as_answer_icon", sa.Boolean(), server_default=sa.text("false"), nullable=False)
)
with op.batch_alter_table("sites", schema=None) as batch_op:
batch_op.add_column(
sa.Column("use_icon_as_answer_icon", sa.Boolean(), server_default=sa.text("false"), nullable=False)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("sites", schema=None) as batch_op:
batch_op.drop_column("use_icon_as_answer_icon")
with op.batch_alter_table("apps", schema=None) as batch_op:
batch_op.drop_column("use_icon_as_answer_icon")
# ### end Alembic commands ###

View File

@@ -0,0 +1,46 @@
"""add parent_message_id to messages
Revision ID: d57ba9ebb251
Revises: 675b5321501b
Create Date: 2024-09-11 10:12:45.826265
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'd57ba9ebb251'
down_revision = '675b5321501b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.add_column(sa.Column('parent_message_id', models.types.StringUUID(), nullable=True))
# Set parent_message_id for existing messages to distinguish them from new messages with actual parent IDs or NULLs
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Use uuid_nil() function
op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL')
else:
# MySQL: Use a specific UUID value to represent nil
op.execute("UPDATE messages SET parent_message_id = '00000000-0000-0000-0000-000000000000' WHERE parent_message_id IS NULL")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_column('parent_message_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,80 @@
"""update-retrieval-resource
Revision ID: 6af6a521a53e
Revises: ec3df697ebbb
Create Date: 2024-09-24 09:22:43.570120
"""
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '6af6a521a53e'
down_revision = 'd57ba9ebb251'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=True)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=True)
else:
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('document_id',
existing_type=models.types.StringUUID(),
nullable=True)
batch_op.alter_column('data_source_type',
existing_type=models.types.LongText(),
nullable=True)
batch_op.alter_column('segment_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=False)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=False)
else:
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('segment_id',
existing_type=models.types.StringUUID(),
nullable=False)
batch_op.alter_column('data_source_type',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('document_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,109 @@
"""external_knowledge_api
Revision ID: 33f5fac87f29
Revises: 6af6a521a53e
Create Date: 2024-09-25 04:34:57.249436
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '33f5fac87f29'
down_revision = '6af6a521a53e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('settings', sa.Text(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
)
else:
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('settings', models.types.LongText(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
)
with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op:
batch_op.create_index('external_knowledge_apis_name_idx', ['name'], unique=False)
batch_op.create_index('external_knowledge_apis_tenant_idx', ['tenant_id'], unique=False)
if _is_pg(conn):
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_id', sa.Text(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
)
else:
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_id', sa.String(length=512), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.create_index('external_knowledge_bindings_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('external_knowledge_bindings_external_knowledge_api_idx', ['external_knowledge_api_id'], unique=False)
batch_op.create_index('external_knowledge_bindings_external_knowledge_idx', ['external_knowledge_id'], unique=False)
batch_op.create_index('external_knowledge_bindings_tenant_idx', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.drop_index('external_knowledge_bindings_tenant_idx')
batch_op.drop_index('external_knowledge_bindings_external_knowledge_idx')
batch_op.drop_index('external_knowledge_bindings_external_knowledge_api_idx')
batch_op.drop_index('external_knowledge_bindings_dataset_idx')
op.drop_table('external_knowledge_bindings')
with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op:
batch_op.drop_index('external_knowledge_apis_tenant_idx')
batch_op.drop_index('external_knowledge_apis_name_idx')
op.drop_table('external_knowledge_apis')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""increase max length of builtin tool provider
Revision ID: ddcc8bbef391
Revises: d57ba9ebb251
Create Date: 2024-09-29 08:35:58.062698
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ddcc8bbef391'
down_revision = 'a91b476a53de' # HEAD OF PLUGIN BRANCH
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=256),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.String(length=256),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,48 @@
"""fix wrong service-api history
Revision ID: d8e744d88ed6
Revises: 33f5fac87f29
Create Date: 2024-10-09 13:29:23.548498
"""
from alembic import op
from constants import UUID_NIL
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd8e744d88ed6'
down_revision = '33f5fac87f29'
branch_labels = None
depends_on = None
# (UTC) release date of v0.9.0
v0_9_0_release_date= '2024-09-29 12:00:00'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
sql = f"""UPDATE
messages
SET
parent_message_id = '{UUID_NIL}'
WHERE
invoke_from = 'service-api'
AND parent_message_id IS NULL
AND created_at >= '{v0_9_0_release_date}';"""
op.execute(sql)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
sql = f"""UPDATE
messages
SET
parent_message_id = NULL
WHERE
invoke_from = 'service-api'
AND parent_message_id = '{UUID_NIL}'
AND created_at >= '{v0_9_0_release_date}';"""
op.execute(sql)
# ### end Alembic commands ###

View File

@@ -0,0 +1,81 @@
"""add name and size to tool_files
Revision ID: bbadea11becb
Revises: 33f5fac87f29
Create Date: 2024-10-10 05:16:14.764268
"""
import sqlalchemy as sa
from alembic import op, context
# revision identifiers, used by Alembic.
revision = "bbadea11becb"
down_revision = "d8e744d88ed6"
branch_labels = None
depends_on = None
def _is_pg(conn):
return conn.dialect.name == "postgresql"
def upgrade():
def _has_name_or_size_column() -> bool:
# We cannot access the database in offline mode, so assume
# the "name" and "size" columns do not exist.
if context.is_offline_mode():
# Log a warning message to inform the user that the database schema cannot be inspected
# in offline mode, and the generated SQL may not accurately reflect the actual execution.
op.execute(
"-- Executing in offline mode, assuming the name and size columns do not exist.\n"
"-- The generated SQL may differ from what will actually be executed.\n"
"-- Please review the migration script carefully!"
)
return False
# Use SQLAlchemy inspector to get the columns of the 'tool_files' table
inspector = sa.inspect(conn)
columns = [col["name"] for col in inspector.get_columns("tool_files")]
# If 'name' or 'size' columns already exist, exit the upgrade function
if "name" in columns or "size" in columns:
return True
return False
# ### commands auto generated by Alembic - please adjust! ###
# Get the database connection
conn = op.get_bind()
if _has_name_or_size_column():
return
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.add_column(sa.Column("name", sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.String(length=255), nullable=False)
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.drop_column("size")
batch_op.drop_column("name")
# ### end Alembic commands ###

View File

@@ -0,0 +1,58 @@
"""add_white_list
Revision ID: 43fa78bc3b7d
Revises: 0251a1c768cc
Create Date: 2024-10-22 09:59:23.713716
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '43fa78bc3b7d'
down_revision = '0251a1c768cc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
)
else:
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
)
with op.batch_alter_table('whitelists', schema=None) as batch_op:
batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('whitelists', schema=None) as batch_op:
batch_op.drop_index('whitelists_tenant_idx')
op.drop_table('whitelists')
# ### end Alembic commands ###

View File

@@ -0,0 +1,53 @@
"""add_tenant_plugin_permisisons
Revision ID: 08ec4f75af5e
Revises: ddcc8bbef391
Create Date: 2024-10-28 07:20:39.711124
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '08ec4f75af5e'
down_revision = 'ddcc8bbef391'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
)
else:
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_plugin_permissions')
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""Add upload_files.source_url
Revision ID: d3f6769a94a3
Revises: 43fa78bc3b7d
Create Date: 2024-11-01 04:34:23.816198
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd3f6769a94a3'
down_revision = '43fa78bc3b7d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.add_column(sa.Column('source_url', sa.String(length=255), server_default='', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.drop_column('source_url')
# ### end Alembic commands ###

View File

@@ -0,0 +1,52 @@
"""rename conversation variables index name
Revision ID: 93ad8c19c40b
Revises: d3f6769a94a3
Create Date: 2024-11-01 04:49:53.100250
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '93ad8c19c40b'
down_revision = 'd3f6769a94a3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if conn.dialect.name == 'postgresql':
# Rename indexes for PostgreSQL
op.execute('ALTER INDEX workflow__conversation_variables_app_id_idx RENAME TO workflow_conversation_variables_app_id_idx')
op.execute('ALTER INDEX workflow__conversation_variables_created_at_idx RENAME TO workflow_conversation_variables_created_at_idx')
else:
# For other databases, use the original drop and create method
with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op:
batch_op.drop_index('workflow__conversation_variables_app_id_idx')
batch_op.drop_index('workflow__conversation_variables_created_at_idx')
batch_op.create_index(batch_op.f('workflow_conversation_variables_app_id_idx'), ['app_id'], unique=False)
batch_op.create_index(batch_op.f('workflow_conversation_variables_created_at_idx'), ['created_at'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if conn.dialect.name == 'postgresql':
# Rename indexes back for PostgreSQL
op.execute('ALTER INDEX workflow_conversation_variables_app_id_idx RENAME TO workflow__conversation_variables_app_id_idx')
op.execute('ALTER INDEX workflow_conversation_variables_created_at_idx RENAME TO workflow__conversation_variables_created_at_idx')
else:
# For other databases, use the original drop and create method
with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('workflow_conversation_variables_created_at_idx'))
batch_op.drop_index(batch_op.f('workflow_conversation_variables_app_id_idx'))
batch_op.create_index('workflow__conversation_variables_created_at_idx', ['created_at'], unique=False)
batch_op.create_index('workflow__conversation_variables_app_id_idx', ['app_id'], unique=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,65 @@
"""update upload_files.source_url
Revision ID: f4d7ce70a7ca
Revises: 93ad8c19c40b
Create Date: 2024-11-01 05:40:03.531751
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f4d7ce70a7ca'
down_revision = '93ad8c19c40b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
existing_nullable=False,
existing_default=sa.text("''"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
existing_nullable=False,
existing_default=sa.text("''"))
# ### end Alembic commands ###

View File

@@ -0,0 +1,112 @@
"""update type of custom_disclaimer to TEXT
Revision ID: d07474999927
Revises: f4d7ce70a7ca
Create Date: 2024-11-01 06:22:27.981398
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd07474999927'
down_revision = 'f4d7ce70a7ca'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
if _is_pg(conn):
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
else:
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
# ### end Alembic commands ###

View File

@@ -0,0 +1,125 @@
"""update workflows graph, features and updated_at
Revision ID: 09a8d1878d9b
Revises: d07474999927
Create Date: 2024-11-01 06:23:59.579186
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '09a8d1878d9b'
down_revision = 'd07474999927'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=False)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=False)
op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL")
op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL")
op.execute("UPDATE workflows SET features = '' WHERE features IS NULL")
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('graph',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('features',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=sa.TIMESTAMP(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=True)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.TIMESTAMP(),
nullable=True)
batch_op.alter_column('features',
existing_type=models.types.LongText(),
nullable=True)
batch_op.alter_column('graph',
existing_type=models.types.LongText(),
nullable=True)
if _is_pg(conn):
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
else:
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=True)
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=True)
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""add_created_at_index_for_messages
Revision ID: 01d6889832f7
Revises: 09a8d1878d9b
Create Date: 2024-11-12 09:25:05.527827
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '01d6889832f7'
down_revision = '09a8d1878d9b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.create_index('message_created_at_idx', ['created_at'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_index('message_created_at_idx')
# ### end Alembic commands ###

View File

@@ -0,0 +1,85 @@
"""parent-child-index
Revision ID: e19037032219
Revises: 01d6889832f7
Create Date: 2024-11-22 07:01:17.550037
"""
from alembic import op
import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'e19037032219'
down_revision = 'd7999dfa4aae'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('word_count', sa.Integer(), nullable=False),
sa.Column('index_node_id', sa.String(length=255), nullable=True),
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('indexing_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('error', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
)
else:
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('content', models.types.LongText(), nullable=False),
sa.Column('word_count', sa.Integer(), nullable=False),
sa.Column('index_node_id', sa.String(length=255), nullable=True),
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('indexing_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('error', models.types.LongText(), nullable=True),
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
)
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
batch_op.create_index('child_chunk_dataset_id_idx', ['tenant_id', 'dataset_id', 'document_id', 'segment_id', 'index_node_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
batch_op.drop_index('child_chunk_dataset_id_idx')
op.drop_table('child_chunks')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""add exceptions_count field to WorkflowRun model
Revision ID: cf8f4fc45278
Revises: 01d6889832f7
Create Date: 2024-11-28 05:53:21.576178
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cf8f4fc45278'
down_revision = '01d6889832f7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.add_column(sa.Column('exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.drop_column('exceptions_count')
# ### end Alembic commands ###

View File

@@ -0,0 +1,58 @@
"""remove unused tool_providers
Revision ID: 11b07f66c737
Revises: cf8f4fc45278
Create Date: 2024-12-19 17:46:25.780116
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '11b07f66c737'
down_revision = 'cf8f4fc45278'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_providers')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_providers',
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
else:
op.create_table('tool_providers',
sa.Column('id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), autoincrement=False, nullable=True),
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,38 @@
"""add retry_index field to node-execution model
Revision ID: e1944c35e15e
Revises: 11b07f66c737
Create Date: 2024-12-20 06:28:30.287197
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e1944c35e15e'
down_revision = '11b07f66c737'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
# We don't need these fields anymore, but this file is already merged into the main branch,
# so we need to keep this file for the sake of history, and this change will be reverted in the next migration.
# with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
# batch_op.add_column(sa.Column('retry_index', sa.Integer(), server_default=sa.text('0'), nullable=True))
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
# with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
# batch_op.drop_column('retry_index')
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,44 @@
"""remove workflow_node_executions.retry_index if exists
Revision ID: d7999dfa4aae
Revises: e1944c35e15e
Create Date: 2024-12-23 11:54:15.344543
"""
from alembic import op, context
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "d7999dfa4aae"
down_revision = "e1944c35e15e"
branch_labels = None
depends_on = None
def upgrade():
def _has_retry_index_column() -> bool:
if context.is_offline_mode():
# Log a warning message to inform the user that the database schema cannot be inspected
# in offline mode, and the generated SQL may not accurately reflect the actual execution.
op.execute(
'-- Executing in offline mode: assuming the "retry_index" column does not exist.\n'
"-- The generated SQL may differ from what will actually be executed.\n"
"-- Please review the migration script carefully!"
)
return False
conn = op.get_bind()
inspector = inspect(conn)
return "retry_index" in [col["name"] for col in inspector.get_columns("workflow_node_executions")]
has_column = _has_retry_index_column()
if has_column:
with op.batch_alter_table("workflow_node_executions", schema=None) as batch_op:
batch_op.drop_column("retry_index")
def downgrade():
# No downgrade needed as we don't want to restore the column
pass

View File

@@ -0,0 +1,65 @@
"""add_auto_disabled_dataset_logs
Revision ID: 923752d42eb6
Revises: e19037032219
Create Date: 2024-12-25 11:37:55.467101
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '923752d42eb6'
down_revision = 'e19037032219'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
)
else:
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
)
with op.batch_alter_table('dataset_auto_disable_logs', schema=None) as batch_op:
batch_op.create_index('dataset_auto_disable_log_created_atx', ['created_at'], unique=False)
batch_op.create_index('dataset_auto_disable_log_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('dataset_auto_disable_log_tenant_idx', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_auto_disable_logs', schema=None) as batch_op:
batch_op.drop_index('dataset_auto_disable_log_tenant_idx')
batch_op.drop_index('dataset_auto_disable_log_dataset_idx')
batch_op.drop_index('dataset_auto_disable_log_created_atx')
op.drop_table('dataset_auto_disable_logs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,41 @@
"""change workflow_runs.total_tokens to bigint
Revision ID: a91b476a53de
Revises: 923752d42eb6
Create Date: 2025-01-01 20:00:01.207369
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a91b476a53de'
down_revision = '923752d42eb6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.alter_column('total_tokens',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False,
existing_server_default=sa.text('0'))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.alter_column('total_tokens',
existing_type=sa.BigInteger(),
type_=sa.INTEGER(),
existing_nullable=False,
existing_server_default=sa.text('0'))
# ### end Alembic commands ###

View File

@@ -0,0 +1,60 @@
"""add_rate_limit_logs
Revision ID: f051706725cc
Revises: 923752d42eb6
Create Date: 2025-01-14 06:17:35.536388
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f051706725cc'
down_revision = 'ee79d9b1c156'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
)
else:
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
)
with op.batch_alter_table('rate_limit_logs', schema=None) as batch_op:
batch_op.create_index('rate_limit_log_operation_idx', ['operation'], unique=False)
batch_op.create_index('rate_limit_log_tenant_idx', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('rate_limit_logs', schema=None) as batch_op:
batch_op.drop_index('rate_limit_log_tenant_idx')
batch_op.drop_index('rate_limit_log_operation_idx')
op.drop_table('rate_limit_logs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,134 @@
"""add_metadata_function
Revision ID: d20049ed0af6
Revises: 08ec4f75af5e
Create Date: 2025-02-27 09:17:48.903213
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'd20049ed0af6'
down_revision = 'f051706725cc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
else:
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op:
batch_op.create_index('dataset_metadata_binding_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_document_idx', ['document_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_metadata_idx', ['metadata_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_tenant_idx', ['tenant_id'], unique=False)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
)
with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op:
batch_op.create_index('dataset_metadata_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('dataset_metadata_tenant_idx', ['tenant_id'], unique=False)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('built_in_field_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False))
if _is_pg(conn):
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin')
else:
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.drop_index('document_metadata_idx', postgresql_using='gin')
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True)
else:
pass
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('built_in_field_enabled')
with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op:
batch_op.drop_index('dataset_metadata_tenant_idx')
batch_op.drop_index('dataset_metadata_dataset_idx')
op.drop_table('dataset_metadatas')
with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op:
batch_op.drop_index('dataset_metadata_binding_tenant_idx')
batch_op.drop_index('dataset_metadata_binding_metadata_idx')
batch_op.drop_index('dataset_metadata_binding_document_idx')
batch_op.drop_index('dataset_metadata_binding_dataset_idx')
op.drop_table('dataset_metadata_bindings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""extend_provider_name_column
Revision ID: 4413929e1ec2
Revises: 08ec4f75af5e
Create Date: 2025-03-03 03:04:58.181493
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4413929e1ec2'
down_revision = '08ec4f75af5e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,42 @@
"""add marked_name and marked_comment in workflows
Revision ID: ee79d9b1c156
Revises: 4413929e1ec2
Create Date: 2025-03-03 14:36:05.750346
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ee79d9b1c156'
down_revision = '5511c782ee4c'
branch_labels = None
depends_on = None
def _is_pg(conn):
return conn.dialect.name == "postgresql"
def upgrade():
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default=''))
batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default=''))
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('marked_name', sa.String(length=255), nullable=False, server_default=''))
batch_op.add_column(sa.Column('marked_comment', sa.String(length=255), nullable=False, server_default=''))
def downgrade():
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_column('marked_comment')
batch_op.drop_column('marked_name')

View File

@@ -0,0 +1,64 @@
"""extend provider column
Revision ID: 5511c782ee4c
Revises: 4413929e1ec2
Create Date: 2025-03-07 03:15:05.364804
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5511c782ee4c'
down_revision = '4413929e1ec2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,43 @@
"""change documentsegment and childchunk indexes
Revision ID: 6a9f914f656c
Revises: d20049ed0af6
Create Date: 2025-03-29 22:27:24.789481
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6a9f914f656c'
down_revision = 'd20049ed0af6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
batch_op.create_index('child_chunks_node_idx', ['index_node_id', 'dataset_id'], unique=False)
batch_op.create_index('child_chunks_segment_idx', ['segment_id'], unique=False)
with op.batch_alter_table('document_segments', schema=None) as batch_op:
batch_op.drop_index('document_segment_dataset_node_idx')
batch_op.create_index('document_segment_node_dataset_idx', ['index_node_id', 'dataset_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('document_segments', schema=None) as batch_op:
batch_op.drop_index('document_segment_node_dataset_idx')
batch_op.create_index('document_segment_dataset_node_idx', ['dataset_id', 'index_node_id'], unique=False)
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
batch_op.drop_index('child_chunks_segment_idx')
batch_op.drop_index('child_chunks_node_idx')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""add index for workflow_conversation_variables.conversation_id
Revision ID: d28f2004b072
Revises: 6a9f914f656c
Create Date: 2025-05-14 14:03:36.713828
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd28f2004b072'
down_revision = '6a9f914f656c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op:
batch_op.create_index(batch_op.f('workflow_conversation_variables_conversation_id_idx'), ['conversation_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('workflow_conversation_variables_conversation_id_idx'))
# ### end Alembic commands ###

View File

@@ -0,0 +1,77 @@
"""add WorkflowDraftVariable model
Revision ID: 2adcbe1f5dfb
Revises: d28f2004b072
Create Date: 2025-05-15 15:31:03.128680
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = "2adcbe1f5dfb"
down_revision = "d28f2004b072"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
sa.Column("node_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=255), nullable=False),
sa.Column("selector", sa.String(length=255), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.Column("visible", sa.Boolean(), nullable=False),
sa.Column("editable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
)
else:
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
sa.Column("node_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=255), nullable=False),
sa.Column("selector", sa.String(length=255), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=False),
sa.Column("value", models.types.LongText(), nullable=False),
sa.Column("visible", sa.Boolean(), nullable=False),
sa.Column("editable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
# Dropping `workflow_draft_variables` also drops any index associated with it.
op.drop_table("workflow_draft_variables")
# ### end Alembic commands ###

View File

@@ -0,0 +1,80 @@
"""`workflow_draft_varaibles` add `node_execution_id` column, add an index for `workflow_node_executions`.
Revision ID: 4474872b0ee6
Revises: 2adcbe1f5dfb
Create Date: 2025-06-06 14:24:44.213018
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4474872b0ee6'
down_revision = '2adcbe1f5dfb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
# context manager to wrap the index creation statement.
# Reference:
#
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
with op.get_context().autocommit_block():
op.create_index(
op.f('workflow_node_executions_tenant_id_idx'),
"workflow_node_executions",
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
unique=False,
postgresql_concurrently=True,
)
else:
op.create_index(
op.f('workflow_node_executions_tenant_id_idx'),
"workflow_node_executions",
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
unique=False,
)
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.add_column(sa.Column('node_execution_id', models.types.StringUUID(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
# `DROP INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
# context manager to wrap the index creation statement.
# Reference:
#
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
# `DROP INDEX CONCURRENTLY` cannot run within a transaction, so commit existing transactions first.
# Reference:
#
# https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
conn = op.get_bind()
if _is_pg(conn):
with op.get_context().autocommit_block():
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True)
else:
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'))
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.drop_column('node_execution_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,66 @@
"""remove sequence_number from workflow_runs
Revision ID: 0ab65e1cc7fa
Revises: 4474872b0ee6
Create Date: 2025-06-19 16:33:13.377215
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0ab65e1cc7fa'
down_revision = '4474872b0ee6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('workflow_run_tenant_app_sequence_idx'))
batch_op.drop_column('sequence_number')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
# WARNING: This downgrade CANNOT recover the original sequence_number values!
# The original sequence numbers are permanently lost after the upgrade.
# This downgrade will regenerate sequence numbers based on created_at order,
# which may result in different values than the original sequence numbers.
#
# If you need to preserve original sequence numbers, use the alternative
# migration approach that creates a backup table before removal.
# Step 1: Add sequence_number column as nullable first
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.add_column(sa.Column('sequence_number', sa.INTEGER(), autoincrement=False, nullable=True))
# Step 2: Populate sequence_number values based on created_at order within each app
# NOTE: This recreates sequence numbering logic but values will be different
# from the original sequence numbers that were removed in the upgrade
connection = op.get_bind()
connection.execute(sa.text("""
UPDATE workflow_runs
SET sequence_number = subquery.row_num
FROM (
SELECT id, ROW_NUMBER() OVER (
PARTITION BY tenant_id, app_id
ORDER BY created_at, id
) as row_num
FROM workflow_runs
) subquery
WHERE workflow_runs.id = subquery.id
"""))
# Step 3: Make the column NOT NULL and add the index
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.alter_column('sequence_number', nullable=False)
batch_op.create_index(batch_op.f('workflow_run_tenant_app_sequence_idx'), ['tenant_id', 'app_id', 'sequence_number'], unique=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,108 @@
"""add mcp server tool and app server
Revision ID: 58eb7bdb93fe
Revises: 0ab65e1cc7fa
Create Date: 2025-06-25 09:36:07.510570
"""
from alembic import op
import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '58eb7bdb93fe'
down_revision = '0ab65e1cc7fa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
else:
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False),
sa.Column('parameters', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
if _is_pg(conn):
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', sa.Text(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)
else:
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', models.types.LongText(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_mcp_providers')
op.drop_table('app_mcp_servers')
# ### end Alembic commands ###

View File

@@ -0,0 +1,101 @@
"""add uuidv7 function in SQL
Revision ID: 1c9ba48be8e4
Revises: 58eb7bdb93fe
Create Date: 2025-07-02 23:32:38.484499
"""
"""
The functions in this files comes from https://github.com/dverite/postgres-uuidv7-sql/, with minor modifications.
LICENSE:
# Copyright and License
Copyright (c) 2024, Daniel Vérité
Permission to use, copy, modify, and distribute this software and its documentation for any purpose, without fee, and without a written agreement is hereby granted, provided that the above copyright notice and this paragraph and the following two paragraphs appear in all copies.
In no event shall Daniel Vérité be liable to any party for direct, indirect, special, incidental, or consequential damages, including lost profits, arising out of the use of this software and its documentation, even if Daniel Vérité has been advised of the possibility of such damage.
Daniel Vérité specifically disclaims any warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose. The software provided hereunder is on an "AS IS" basis, and Daniel Vérité has no obligations to provide maintenance, support, updates, enhancements, or modifications.
"""
from alembic import op
import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '1c9ba48be8e4'
down_revision = '58eb7bdb93fe'
branch_labels: None = None
depends_on: None = None
def upgrade():
# This implementation differs slightly from the original uuidv7 function in
# https://github.com/dverite/postgres-uuidv7-sql/.
# The ability to specify source timestamp has been removed because its type signature is incompatible with
# PostgreSQL 18's `uuidv7` function. This capability is rarely needed in practice, as IDs can be
# generated and controlled within the application layer.
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Create uuidv7 functions
op.execute(sa.text(r"""
/* Main function to generate a uuidv7 value with millisecond precision */
CREATE FUNCTION uuidv7() RETURNS uuid
AS
$$
-- Replace the first 48 bits of a uuidv4 with the current
-- number of milliseconds since 1970-01-01 UTC
-- and set the "ver" field to 7 by setting additional bits
SELECT encode(
set_bit(
set_bit(
overlay(uuid_send(gen_random_uuid()) placing
substring(int8send((extract(epoch from clock_timestamp()) * 1000)::bigint) from
3)
from 1 for 6),
52, 1),
53, 1), 'hex')::uuid;
$$ LANGUAGE SQL VOLATILE PARALLEL SAFE;
COMMENT ON FUNCTION uuidv7 IS
'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness';
"""))
op.execute(sa.text(r"""
CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid
AS
$$
/* uuid fields: version=0b0111, variant=0b10 */
SELECT encode(
overlay('\x00000000000070008000000000000000'::bytea
placing substring(int8send(floor(extract(epoch from $1) * 1000)::bigint) from 3)
from 1 for 6),
'hex')::uuid;
$$ LANGUAGE SQL STABLE STRICT PARALLEL SAFE;
COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS
'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.';
"""
))
else:
pass
def downgrade():
conn = op.get_bind()
if _is_pg(conn):
op.execute(sa.text("DROP FUNCTION uuidv7"))
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
else:
pass

View File

@@ -0,0 +1,98 @@
"""tool oauth
Revision ID: 71f5020c6470
Revises: 4474872b0ee6
Create Date: 2025-06-24 17:05:43.118647
"""
from alembic import op
import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '71f5020c6470'
down_revision = '1c9ba48be8e4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
)
else:
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
)
if _is_pg(conn):
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
)
else:
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
)
if _is_pg(conn):
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
else:
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'"), nullable=False))
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'"), nullable=False))
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider'])
batch_op.drop_column('credential_type')
batch_op.drop_column('is_default')
batch_op.drop_column('name')
op.drop_table('tool_oauth_tenant_clients')
op.drop_table('tool_oauth_system_clients')
# ### end Alembic commands ###

View File

@@ -0,0 +1,51 @@
"""update models
Revision ID: 1a83934ad6d1
Revises: 71f5020c6470
Create Date: 2025-07-21 09:35:48.774794
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1a83934ad6d1'
down_revision = '71f5020c6470'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.alter_column('server_identifier',
existing_type=sa.VARCHAR(length=24),
type_=sa.String(length=64),
existing_nullable=False)
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('tool_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=128),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.alter_column('tool_name',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.alter_column('server_identifier',
existing_type=sa.String(length=64),
type_=sa.VARCHAR(length=24),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,34 @@
"""oauth_refresh_token
Revision ID: 375fe79ead14
Revises: 1a83934ad6d1
Create Date: 2025-07-22 00:19:45.599636
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '375fe79ead14'
down_revision = '1a83934ad6d1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('expires_at', sa.BigInteger(), server_default=sa.text('-1'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.drop_column('expires_at')
# ### end Alembic commands ###

View File

@@ -0,0 +1,63 @@
"""add_tenant_plugin_autoupgrade_table
Revision ID: 8bcc02c9bd07
Revises: 375fe79ead14
Create Date: 2025-07-23 15:08:50.161441
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '8bcc02c9bd07'
down_revision = '375fe79ead14'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
)
else:
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
sa.Column('exclude_plugins', sa.JSON(), nullable=False),
sa.Column('include_plugins', sa.JSON(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tenant_plugin_auto_upgrade_strategies')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""manual dataset field update
Revision ID: 532b3f888abf
Revises: 8bcc02c9bd07
Create Date: 2025-07-24 14:50:48.779833
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '532b3f888abf'
down_revision = '8bcc02c9bd07'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
if _is_pg(conn):
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
else:
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
def downgrade():
conn = op.get_bind()
if _is_pg(conn):
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
else:
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")

View File

@@ -0,0 +1,33 @@
"""add timeout for tool_mcp_providers
Revision ID: fa8b0fa6f407
Revises: 532b3f888abf
Create Date: 2025-08-07 11:15:31.517985
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fa8b0fa6f407'
down_revision = '532b3f888abf'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('timeout', sa.Float(), server_default=sa.text('30'), nullable=False))
batch_op.add_column(sa.Column('sse_read_timeout', sa.Float(), server_default=sa.text('300'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_mcp_providers', schema=None) as batch_op:
batch_op.drop_column('sse_read_timeout')
batch_op.drop_column('timeout')
# ### end Alembic commands ###

View File

@@ -0,0 +1,240 @@
"""Add provider multi credential support
Revision ID: e8446f481c1e
Revises: 8bcc02c9bd07
Create Date: 2025-08-09 15:53:54.341341
"""
from alembic import op, context
from libs.uuid_utils import uuidv7
import models as models
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'e8446f481c1e'
down_revision = 'fa8b0fa6f407'
branch_labels = None
depends_on = None
def upgrade():
# Create provider_credentials table
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
)
else:
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
)
# Create index for provider_credentials
with op.batch_alter_table('provider_credentials', schema=None) as batch_op:
batch_op.create_index('provider_credential_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False)
# Add credential_id to providers table
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True))
# Add credential_id to load_balancing_model_configs table
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True))
if not context.is_offline_mode():
migrate_existing_providers_data()
else:
op.execute(
'-- [IMPORTANT] Data migration skipped!!!\n'
"-- You should manually run data migration function `migrate_existing_providers_data`\n"
f"-- inside file {__file__}\n"
"-- Please review the migration script carefully!"
)
# Remove encrypted_config column from providers table after migration
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.drop_column('encrypted_config')
def migrate_existing_providers_data():
"""migrate providers table data to provider_credentials"""
conn = op.get_bind()
# Define table structure for data manipulation
if _is_pg(conn):
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
else:
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
if _is_pg(conn):
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
else:
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
# Get database connection
conn = op.get_bind()
# Query all existing providers data
existing_providers = conn.execute(
sa.select(providers_table.c.id, providers_table.c.tenant_id,
providers_table.c.provider_name, providers_table.c.encrypted_config,
providers_table.c.created_at, providers_table.c.updated_at)
.where(providers_table.c.encrypted_config.isnot(None))
).fetchall()
# Iterate through each provider and insert into provider_credentials
for provider in existing_providers:
credential_id = str(uuidv7())
if not provider.encrypted_config or provider.encrypted_config.strip() == '':
continue
# Insert into provider_credentials table
conn.execute(
provider_credential_table.insert().values(
id=credential_id,
tenant_id=provider.tenant_id,
provider_name=provider.provider_name,
credential_name='API_KEY1', # Use a default name
encrypted_config=provider.encrypted_config,
created_at=provider.created_at,
updated_at=provider.updated_at
)
)
# Update original providers table, set credential_id
conn.execute(
providers_table.update()
.where(providers_table.c.id == provider.id)
.values(
credential_id=credential_id,
)
)
def downgrade():
# Re-add encrypted_config column to providers table
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
# Migrate data back from provider_credentials to providers
if not context.is_offline_mode():
migrate_data_back_to_providers()
else:
op.execute(
'-- [IMPORTANT] Data migration skipped!!!\n'
"-- You should manually run data migration function `migrate_data_back_to_providers`\n"
f"-- inside file {__file__}\n"
"-- Please review the migration script carefully!"
)
# Remove credential_id columns
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.drop_column('credential_id')
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.drop_column('credential_id')
# Drop provider_credentials table
op.drop_table('provider_credentials')
def migrate_data_back_to_providers():
"""Migrate data back from provider_credentials to providers table for downgrade"""
# Define table structure for data manipulation
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', sa.Text()),
column('credential_id', models.types.StringUUID()),
)
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
)
# Get database connection
conn = op.get_bind()
# Query providers that have credential_id
providers_with_credentials = conn.execute(
sa.select(providers_table.c.id, providers_table.c.credential_id)
.where(providers_table.c.credential_id.isnot(None))
).fetchall()
# For each provider, get the credential data and update providers table
for provider in providers_with_credentials:
credential = conn.execute(
sa.select(provider_credential_table.c.encrypted_config)
.where(provider_credential_table.c.id == provider.credential_id)
).fetchone()
if credential:
# Update providers table with encrypted_config from credential
conn.execute(
providers_table.update()
.where(providers_table.c.id == provider.id)
.values(encrypted_config=credential.encrypted_config)
)

View File

@@ -0,0 +1,254 @@
"""Add provider model multi credential support
Revision ID: 0e154742a5fa
Revises: e8446f481c1e
Create Date: 2025-08-13 16:05:42.657730
"""
from alembic import op, context
from libs.uuid_utils import uuidv7
import models as models
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '0e154742a5fa'
down_revision = 'e8446f481c1e'
branch_labels = None
depends_on = None
def upgrade():
# Create provider_model_credentials table
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
)
else:
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
)
# Create index for provider_model_credentials
with op.batch_alter_table('provider_model_credentials', schema=None) as batch_op:
batch_op.create_index('provider_model_credential_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_name', 'model_type'], unique=False)
# Add credential_id to provider_models table
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True))
# Add credential_source_type to load_balancing_model_configs table
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_source_type', sa.String(length=40), nullable=True))
if not context.is_offline_mode():
# Migrate existing provider_models data
migrate_existing_provider_models_data()
else:
op.execute(
'-- [IMPORTANT] Data migration skipped!!!\n'
"-- You should manually run data migration function `migrate_existing_provider_models_data`\n"
f"-- inside file {__file__}\n"
"-- Please review the migration script carefully!"
)
# Remove encrypted_config column from provider_models table after migration
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.drop_column('encrypted_config')
def migrate_existing_provider_models_data():
"""migrate provider_models table data to provider_model_credentials"""
conn = op.get_bind()
# Define table structure for data manipulation
if _is_pg(conn):
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
else:
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
if _is_pg(conn):
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
else:
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
# Get database connection
conn = op.get_bind()
# Query all existing provider_models data with encrypted_config
existing_provider_models = conn.execute(
sa.select(provider_models_table.c.id, provider_models_table.c.tenant_id,
provider_models_table.c.provider_name, provider_models_table.c.model_name,
provider_models_table.c.model_type, provider_models_table.c.encrypted_config,
provider_models_table.c.created_at, provider_models_table.c.updated_at)
.where(provider_models_table.c.encrypted_config.isnot(None))
).fetchall()
# Iterate through each provider_model and insert into provider_model_credentials
for provider_model in existing_provider_models:
if not provider_model.encrypted_config or provider_model.encrypted_config.strip() == '':
continue
credential_id = str(uuidv7())
# Insert into provider_model_credentials table
conn.execute(
provider_model_credentials_table.insert().values(
id=credential_id,
tenant_id=provider_model.tenant_id,
provider_name=provider_model.provider_name,
model_name=provider_model.model_name,
model_type=provider_model.model_type,
credential_name='API_KEY1', # Use a default name
encrypted_config=provider_model.encrypted_config,
created_at=provider_model.created_at,
updated_at=provider_model.updated_at
)
)
# Update original provider_models table, set credential_id
conn.execute(
provider_models_table.update()
.where(provider_models_table.c.id == provider_model.id)
.values(credential_id=credential_id)
)
def downgrade():
# Re-add encrypted_config column to provider_models table
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
else:
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
if not context.is_offline_mode():
# Migrate data back from provider_model_credentials to provider_models
migrate_data_back_to_provider_models()
else:
op.execute(
'-- [IMPORTANT] Data migration skipped!!!\n'
"-- You should manually run data migration function `migrate_data_back_to_provider_models`\n"
f"-- inside file {__file__}\n"
"-- Please review the migration script carefully!"
)
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.drop_column('credential_id')
# Remove credential_source_type column from load_balancing_model_configs
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.drop_column('credential_source_type')
# Drop provider_model_credentials table
op.drop_table('provider_model_credentials')
def migrate_data_back_to_provider_models():
"""Migrate data back from provider_model_credentials to provider_models table for downgrade"""
# Define table structure for data manipulation
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('encrypted_config', sa.Text()),
column('credential_id', models.types.StringUUID()),
)
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('encrypted_config', sa.Text()),
)
# Get database connection
conn = op.get_bind()
# Query provider_models that have credential_id
provider_models_with_credentials = conn.execute(
sa.select(provider_models_table.c.id, provider_models_table.c.credential_id)
.where(provider_models_table.c.credential_id.isnot(None))
).fetchall()
# For each provider_model, get the credential data and update provider_models table
for provider_model in provider_models_with_credentials:
credential = conn.execute(
sa.select(provider_model_credentials_table.c.encrypted_config)
.where(provider_model_credentials_table.c.id == provider_model.credential_id)
).fetchone()
if credential:
# Update provider_models table with encrypted_config from credential
conn.execute(
provider_models_table.update()
.where(provider_models_table.c.id == provider_model.id)
.values(encrypted_config=credential.encrypted_config)
)

View File

@@ -0,0 +1,66 @@
"""empty message
Revision ID: 8d289573e1da
Revises: 0e154742a5fa
Create Date: 2025-08-20 17:47:17.015695
"""
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '8d289573e1da'
down_revision = '0e154742a5fa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False),
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
)
else:
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uris', sa.JSON(), default='[]', nullable=False),
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
)
with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op:
batch_op.create_index('oauth_provider_app_client_id_idx', ['client_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op:
batch_op.drop_index('oauth_provider_app_client_id_idx')
op.drop_table('oauth_provider_apps')
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""chore: add workflow app log run id index
Revision ID: b95962a3885c
Revises: 0e154742a5fa
Create Date: 2025-08-29 15:34:09.838623
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'b95962a3885c'
down_revision = '8d289573e1da'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op:
batch_op.create_index('workflow_app_log_workflow_run_id_idx', ['workflow_run_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op:
batch_op.drop_index('workflow_app_log_workflow_run_id_idx')
# ### end Alembic commands ###

View File

@@ -0,0 +1,36 @@
"""add_headers_to_mcp_provider
Revision ID: c20211f18133
Revises: 8d289573e1da
Create Date: 2025-08-29 10:07:54.163626
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c20211f18133'
down_revision = 'b95962a3885c'
branch_labels = None
depends_on = None
def upgrade():
# Add encrypted_headers column to tool_mcp_providers table
conn = op.get_bind()
if _is_pg(conn):
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
else:
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
def downgrade():
# Remove encrypted_headers column from tool_mcp_providers table
op.drop_column('tool_mcp_providers', 'encrypted_headers')

View File

@@ -0,0 +1,42 @@
"""Add credential status for provider table
Revision ID: cf7c38a32b2d
Revises: c20211f18133
Create Date: 2025-09-11 15:37:17.771298
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cf7c38a32b2d'
down_revision = 'c20211f18133'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'"), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.drop_column('credential_status')
# ### end Alembic commands ###

View File

@@ -0,0 +1,392 @@
"""knowledge_pipeline_migrate
Revision ID: 68519ad5cd18
Revises: cf7c38a32b2d
Create Date: 2025-09-17 15:15:50.697885
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '68519ad5cd18'
down_revision = 'cf7c38a32b2d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
else:
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
if _is_pg(conn):
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
else:
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
if _is_pg(conn):
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', sa.Text(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
else:
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=128), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', models.types.LongText(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
if _is_pg(conn):
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', sa.Text(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
else:
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', models.types.LongText(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
if _is_pg(conn):
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
else:
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', models.types.LongText(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
if _is_pg(conn):
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', models.types.LongText(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
if _is_pg(conn):
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.Text(), nullable=False),
sa.Column('provider_name', sa.Text(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
else:
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', models.types.LongText(), nullable=False),
sa.Column('provider_name', models.types.LongText(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
if _is_pg(conn):
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
else:
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
if _is_pg(conn):
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
else:
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
if _is_pg(conn):
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
else:
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_id', models.types.StringUUID(), nullable=True, comment='Reference to WorkflowDraftVariableFile if variable is offloaded to external storage'))
batch_op.add_column(
sa.Column(
'is_default_value', sa.Boolean(), nullable=False,
server_default=sa.text(text="FALSE"),
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
)
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', models.types.LongText(), default='{}', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_column('rag_pipeline_variables')
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.drop_index('workflow_draft_variable_file_id_idx')
batch_op.drop_column('is_default_value')
batch_op.drop_column('file_id')
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('enable_api')
batch_op.drop_column('chunk_structure')
batch_op.drop_column('pipeline_id')
batch_op.drop_column('runtime_mode')
batch_op.drop_column('icon_info')
batch_op.drop_column('keyword_number')
op.drop_table('workflow_node_execution_offload')
op.drop_table('workflow_draft_variable_files')
op.drop_table('pipelines')
op.drop_table('pipeline_recommended_plugins')
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.drop_index('pipeline_customized_template_tenant_idx')
op.drop_table('pipeline_customized_templates')
op.drop_table('pipeline_built_in_templates')
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.drop_index('document_pipeline_execution_logs_document_id_idx')
op.drop_table('document_pipeline_execution_logs')
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_index('datasource_provider_auth_type_provider_idx')
op.drop_table('datasource_providers')
op.drop_table('datasource_oauth_tenant_params')
op.drop_table('datasource_oauth_params')
# ### end Alembic commands ###

View File

@@ -0,0 +1,35 @@
"""add app_mode for messsage
Revision ID: d98acf217d43
Revises: 68519ad5cd18
Create Date: 2025-10-14 16:18:08.568011
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd98acf217d43'
down_revision = '68519ad5cd18'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.add_column(sa.Column('app_mode', sa.String(length=255), nullable=True))
batch_op.create_index('message_app_mode_idx', ['app_mode'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_index('message_app_mode_idx')
batch_op.drop_column('app_mode')
# ### end Alembic commands ###

View File

@@ -0,0 +1,47 @@
"""remove-builtin-template-user
Revision ID: ae662b25d9bc
Revises: d98acf217d43
Create Date: 2025-10-21 14:30:28.566192
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ae662b25d9bc'
down_revision = 'd98acf217d43'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.drop_column('updated_by')
batch_op.drop_column('created_by')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
# ### end Alembic commands ###

View File

@@ -0,0 +1,58 @@
"""add WorkflowPause model
Revision ID: 03f8dcbc611e
Revises: ae662b25d9bc
Create Date: 2025-10-22 16:11:31.805407
"""
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = "03f8dcbc611e"
down_revision = "ae662b25d9bc"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table(
"workflow_pauses",
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
)
else:
op.create_table(
"workflow_pauses",
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("workflow_pauses")
# ### end Alembic commands ###

View File

@@ -0,0 +1,380 @@
"""introduce_trigger
Revision ID: 669ffd70119c
Revises: 03f8dcbc611e
Create Date: 2025-10-30 15:18:49.549156
"""
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
from models.enums import AppTriggerStatus, AppTriggerType
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '669ffd70119c'
down_revision = '03f8dcbc611e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_trigger_pkey')
)
else:
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_trigger_pkey')
)
with op.batch_alter_table('app_triggers', schema=None) as batch_op:
batch_op.create_index('app_trigger_tenant_app_idx', ['tenant_id', 'app_id'], unique=False)
if _is_pg(conn):
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx')
)
else:
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx')
)
if _is_pg(conn):
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
)
else:
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
)
if _is_pg(conn):
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'),
sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'),
sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'),
sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'),
sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'),
sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'),
sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'),
sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
)
else:
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'),
sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'),
sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'),
sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'),
sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'),
sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'),
sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'),
sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
)
with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op:
batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True)
batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False)
batch_op.create_index('idx_trigger_providers_tenant_provider', ['tenant_id', 'provider_id'], unique=False)
if _is_pg(conn):
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=512), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('subscription_id', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
)
else:
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=512), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('subscription_id', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
)
with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op:
batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False)
if _is_pg(conn):
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('cron_expression', sa.String(length=255), nullable=False),
sa.Column('timezone', sa.String(length=64), nullable=False),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
)
else:
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('cron_expression', sa.String(length=255), nullable=False),
sa.Column('timezone', sa.String(length=64), nullable=False),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
)
with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op:
batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False)
if _is_pg(conn):
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
sa.Column('root_node_id', sa.String(length=255), nullable=True),
sa.Column('trigger_metadata', sa.Text(), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('trigger_data', sa.Text(), nullable=False),
sa.Column('inputs', sa.Text(), nullable=False),
sa.Column('outputs', sa.Text(), nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('error', sa.Text(), nullable=True),
sa.Column('queue_name', sa.String(length=100), nullable=False),
sa.Column('celery_task_id', sa.String(length=255), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('elapsed_time', sa.Float(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', sa.String(length=255), nullable=False),
sa.Column('triggered_at', sa.DateTime(), nullable=True),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
)
else:
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
sa.Column('root_node_id', sa.String(length=255), nullable=True),
sa.Column('trigger_metadata', models.types.LongText(), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('trigger_data', models.types.LongText(), nullable=False),
sa.Column('inputs', models.types.LongText(), nullable=False),
sa.Column('outputs', models.types.LongText(), nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('error', models.types.LongText(), nullable=True),
sa.Column('queue_name', sa.String(length=100), nullable=False),
sa.Column('celery_task_id', sa.String(length=255), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('elapsed_time', sa.Float(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', sa.String(length=255), nullable=False),
sa.Column('triggered_at', sa.DateTime(), nullable=True),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
)
with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op:
batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False)
batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False)
batch_op.create_index('workflow_trigger_log_tenant_app_idx', ['tenant_id', 'app_id'], unique=False)
batch_op.create_index('workflow_trigger_log_workflow_id_idx', ['workflow_id'], unique=False)
batch_op.create_index('workflow_trigger_log_workflow_run_idx', ['workflow_run_id'], unique=False)
if _is_pg(conn):
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('webhook_id', sa.String(length=24), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
)
else:
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('webhook_id', sa.String(length=24), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
)
with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op:
batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False)
with op.batch_alter_table('celery_taskmeta', schema=None) as batch_op:
batch_op.alter_column('task_id',
existing_type=sa.VARCHAR(length=155),
nullable=False)
batch_op.alter_column('status',
existing_type=sa.VARCHAR(length=50),
nullable=False)
with op.batch_alter_table('celery_tasksetmeta', schema=None) as batch_op:
batch_op.alter_column('taskset_id',
existing_type=sa.VARCHAR(length=155),
nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.drop_column('credential_status')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'"), autoincrement=False, nullable=True))
with op.batch_alter_table('celery_tasksetmeta', schema=None) as batch_op:
batch_op.alter_column('taskset_id',
existing_type=sa.VARCHAR(length=155),
nullable=True)
with op.batch_alter_table('celery_taskmeta', schema=None) as batch_op:
batch_op.alter_column('status',
existing_type=sa.VARCHAR(length=50),
nullable=True)
batch_op.alter_column('task_id',
existing_type=sa.VARCHAR(length=155),
nullable=True)
with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op:
batch_op.drop_index('workflow_webhook_trigger_tenant_idx')
op.drop_table('workflow_webhook_triggers')
with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op:
batch_op.drop_index('workflow_trigger_log_workflow_run_idx')
batch_op.drop_index('workflow_trigger_log_workflow_id_idx')
batch_op.drop_index('workflow_trigger_log_tenant_app_idx')
batch_op.drop_index('workflow_trigger_log_status_idx')
batch_op.drop_index('workflow_trigger_log_created_at_idx')
op.drop_table('workflow_trigger_logs')
with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op:
batch_op.drop_index('workflow_schedule_plan_next_idx')
op.drop_table('workflow_schedule_plans')
with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op:
batch_op.drop_index('workflow_plugin_trigger_tenant_subscription_idx')
op.drop_table('workflow_plugin_triggers')
with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op:
batch_op.drop_index('idx_trigger_providers_tenant_provider')
batch_op.drop_index('idx_trigger_providers_tenant_endpoint')
batch_op.drop_index('idx_trigger_providers_endpoint')
op.drop_table('trigger_subscriptions')
op.drop_table('trigger_oauth_tenant_clients')
op.drop_table('trigger_oauth_system_clients')
with op.batch_alter_table('app_triggers', schema=None) as batch_op:
batch_op.drop_index('app_trigger_tenant_app_idx')
op.drop_table('app_triggers')
# ### end Alembic commands ###

View File

@@ -0,0 +1,131 @@
"""empty message
Revision ID: 09cfdda155d1
Revises: 669ffd70119c
Create Date: 2025-11-15 21:02:32.472885
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql, mysql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '09cfdda155d1'
down_revision = '669ffd70119c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=255),
type_=sa.String(length=128),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.TEXT(),
type_=sa.String(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('exclude_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(exclude_plugins)::json')
batch_op.alter_column('include_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(include_plugins)::json')
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=mysql.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=mysql.TIMESTAMP(),
type_=sa.DateTime(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('include_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
batch_op.alter_column('exclude_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.String(length=512),
type_=sa.TEXT(),
existing_nullable=False)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.DateTime(),
type_=mysql.TIMESTAMP(),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=mysql.VARCHAR(length=512),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,43 @@
"""add message files into agent thought
Revision ID: 23db93619b9d
Revises: 8ae9bc661daa
Create Date: 2024-01-18 08:46:37.302657
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '23db93619b9d'
down_revision = '8ae9bc661daa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
else:
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.drop_column('message_files')
# ### end Alembic commands ###

View File

@@ -0,0 +1,78 @@
"""add_app_anntation_setting
Revision ID: 246ba09cbbdb
Revises: 714aafe25d39
Create Date: 2023-12-14 11:26:12.287264
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '246ba09cbbdb'
down_revision = '714aafe25d39'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_annotation_settings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', postgresql.UUID(), nullable=False),
sa.Column('created_user_id', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_user_id', postgresql.UUID(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
)
else:
op.create_table('app_annotation_settings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', models.types.StringUUID(), nullable=False),
sa.Column('created_user_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_user_id', models.types.StringUUID(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
)
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
batch_op.create_index('app_annotation_settings_app_idx', ['app_id'], unique=False)
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.drop_column('annotation_reply')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
batch_op.drop_index('app_annotation_settings_app_idx')
op.drop_table('app_annotation_settings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,43 @@
"""add app tracing
Revision ID: 2a3aebbbf4bb
Revises: c031d46af369
Create Date: 2024-06-17 10:08:54.803701
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '2a3aebbbf4bb'
down_revision = 'c031d46af369'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
else:
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.drop_column('tracing')
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""add is_universal in apps
Revision ID: 2beac44e5f5f
Revises: d3d503a3471c
Create Date: 2023-07-07 12:11:29.156057
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2beac44e5f5f'
down_revision = 'a5b56fb053ef'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_universal', sa.Boolean(), server_default=sa.text('false'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.drop_column('is_universal')
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""add_qa_document_language
Revision ID: 2c8af9671032
Revises: 8d2d099ceb74
Create Date: 2023-08-01 18:57:27.294973
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2c8af9671032'
down_revision = '5022897aaceb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.add_column(sa.Column('doc_language', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.drop_column('doc_language')
# ### end Alembic commands ###

View File

@@ -0,0 +1,58 @@
"""add_tenant_id_in_api_token
Revision ID: 2e9819ca5b28
Revises: 6e2cfb077b04
Create Date: 2023-09-22 15:41:01.243183
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '2e9819ca5b28'
down_revision = 'ab23c11305d4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
batch_op.drop_column('dataset_id')
else:
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
batch_op.drop_column('dataset_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
batch_op.drop_index('api_token_tenant_idx')
batch_op.drop_column('tenant_id')
else:
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
batch_op.drop_index('api_token_tenant_idx')
batch_op.drop_column('tenant_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,43 @@
"""add tool labels to agent thought
Revision ID: 380c6aa5a70d
Revises: dfb3b7f477da
Create Date: 2024-01-24 10:58:15.644445
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '380c6aa5a70d'
down_revision = 'dfb3b7f477da'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
else:
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_labels_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.drop_column('tool_labels_str')
# ### end Alembic commands ###

View File

@@ -0,0 +1,57 @@
"""add tool label bings
Revision ID: 3b18fea55204
Revises: 7bdef072e63a
Create Date: 2024-05-14 09:27:18.857890
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3b18fea55204'
down_revision = '7bdef072e63a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
)
else:
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
)
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), server_default='', nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.drop_column('privacy_policy')
op.drop_table('tool_label_bindings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,94 @@
"""add-tags-and-binding-table
Revision ID: 3c7cac9521c6
Revises: c3311b089690
Create Date: 2024-04-11 06:17:34.278594
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3c7cac9521c6'
down_revision = 'c3311b089690'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tag_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('tag_id', postgresql.UUID(), nullable=True),
sa.Column('target_id', postgresql.UUID(), nullable=True),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
)
else:
op.create_table('tag_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('tag_id', models.types.StringUUID(), nullable=True),
sa.Column('target_id', models.types.StringUUID(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
)
with op.batch_alter_table('tag_bindings', schema=None) as batch_op:
batch_op.create_index('tag_bind_tag_id_idx', ['tag_id'], unique=False)
batch_op.create_index('tag_bind_target_id_idx', ['target_id'], unique=False)
if _is_pg(conn):
op.create_table('tags',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_pkey')
)
else:
op.create_table('tags',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_pkey')
)
with op.batch_alter_table('tags', schema=None) as batch_op:
batch_op.create_index('tag_name_idx', ['name'], unique=False)
batch_op.create_index('tag_type_idx', ['type'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tags', schema=None) as batch_op:
batch_op.drop_index('tag_type_idx')
batch_op.drop_index('tag_name_idx')
op.drop_table('tags')
with op.batch_alter_table('tag_bindings', schema=None) as batch_op:
batch_op.drop_index('tag_bind_target_id_idx')
batch_op.drop_index('tag_bind_tag_id_idx')
op.drop_table('tag_bindings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,125 @@
"""add_assistant_app
Revision ID: 3ef9b2b6bee6
Revises: 89c7899ca936
Create Date: 2024-01-05 15:26:25.117551
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3ef9b2b6bee6'
down_revision = '89c7899ca936'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_api_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', sa.Text(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('description_str', sa.Text(), nullable=False),
sa.Column('tools_str', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_api_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', models.types.LongText(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('description_str', models.types.LongText(), nullable=False),
sa.Column('tools_str', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_builtin_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_builtin_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_published_apps',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('llm_description', sa.Text(), nullable=False),
sa.Column('query_description', sa.Text(), nullable=False),
sa.Column('query_name', sa.String(length=40), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('author', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_published_apps',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('llm_description', models.types.LongText(), nullable=False),
sa.Column('query_description', models.types.LongText(), nullable=False),
sa.Column('query_name', sa.String(length=40), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('author', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_published_apps')
op.drop_table('tool_builtin_providers')
op.drop_table('tool_api_providers')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""'add_max_active_requests'
Revision ID: 408176b91ad3
Revises: 7e6a8693e07a
Create Date: 2024-07-04 09:25:14.029023
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '408176b91ad3'
down_revision = '161cadc1af8d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('max_active_requests', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.drop_column('max_active_requests')
# ### end Alembic commands ###

View File

@@ -0,0 +1,82 @@
"""conversation columns set nullable
Revision ID: 42e85ed5564d
Revises: f9107f83abab
Create Date: 2024-03-07 08:30:29.133614
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '42e85ed5564d'
down_revision = 'f9107f83abab'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=True)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True)
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=True)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('app_model_config_id',
existing_type=models.types.StringUUID(),
nullable=True)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True)
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('app_model_config_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,31 @@
"""add-annotation-histoiry-score
Revision ID: 46976cc39132
Revises: e1901f623fd0
Create Date: 2023-12-13 04:39:59.302971
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '46976cc39132'
down_revision = 'e1901f623fd0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
batch_op.add_column(sa.Column('score', sa.Float(), server_default=sa.text('0'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
batch_op.drop_column('score')
# ### end Alembic commands ###

View File

@@ -0,0 +1,39 @@
"""modify default model name length
Revision ID: 47cc7df8c4f3
Revises: 3c7cac9521c6
Create Date: 2024-05-10 09:48:09.046298
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '47cc7df8c4f3'
down_revision = '3c7cac9521c6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.alter_column('model_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.alter_column('model_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,57 @@
"""add tool file
Revision ID: 4823da1d26cf
Revises: 053da0c1d756
Create Date: 2024-01-15 11:37:16.782718
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4823da1d26cf'
down_revision = '053da0c1d756'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_files',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('file_key', sa.String(length=255), nullable=False),
sa.Column('mimetype', sa.String(length=255), nullable=False),
sa.Column('original_url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
)
else:
op.create_table('tool_files',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('file_key', sa.String(length=255), nullable=False),
sa.Column('mimetype', sa.String(length=255), nullable=False),
sa.Column('original_url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_files')
# ### end Alembic commands ###

View File

@@ -0,0 +1,61 @@
"""change message chain id to nullable
Revision ID: 4829e54d2fee
Revises: 114eed84c228
Create Date: 2024-01-12 03:42:27.362415
"""
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4829e54d2fee'
down_revision = '114eed84c228'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=True)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,75 @@
"""update_dataset_model_field_null_available
Revision ID: 4bcffcd64aa4
Revises: 853f9b9cd3b6
Create Date: 2023-08-28 20:58:50.077056
"""
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4bcffcd64aa4'
down_revision = '853f9b9cd3b6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'openai'::character varying"))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'text-embedding-ada-002'"))
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'openai'"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'openai'::character varying"))
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'openai'"))
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'text-embedding-ada-002'"))
# ### end Alembic commands ###

View File

@@ -0,0 +1,163 @@
"""add load balancing
Revision ID: 4e99a8df00ff
Revises: 47cc7df8c4f3
Create Date: 2024-05-10 12:08:09.812736
"""
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4e99a8df00ff'
down_revision = '64a70a7aab8b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
)
else:
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
)
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.create_index('load_balancing_model_config_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
if _is_pg(conn):
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
)
else:
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
)
with op.batch_alter_table('provider_model_settings', schema=None) as batch_op:
batch_op.create_index('provider_model_setting_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('provider_orders', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('provider_orders', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False)
with op.batch_alter_table('provider_model_settings', schema=None) as batch_op:
batch_op.drop_index('provider_model_setting_tenant_provider_model_idx')
op.drop_table('provider_model_settings')
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.drop_index('load_balancing_model_config_tenant_provider_model_idx')
op.drop_table('load_balancing_model_configs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,33 @@
"""add workflow to site
Revision ID: 4ff534e1eb11
Revises: 7b45942e39bb
Create Date: 2024-06-21 04:16:03.419634
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '4ff534e1eb11'
down_revision = '7b45942e39bb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.add_column(sa.Column('show_workflow_steps', sa.Boolean(), server_default=sa.text('true'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.drop_column('show_workflow_steps')
# ### end Alembic commands ###

View File

@@ -0,0 +1,49 @@
"""add model name in embedding
Revision ID: 5022897aaceb
Revises: bf0aec5ba2cf
Create Date: 2023-08-11 14:38:15.499460
"""
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '5022897aaceb'
down_revision = 'bf0aec5ba2cf'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False))
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'"), nullable=False))
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['hash'])
batch_op.drop_column('model_name')
# ### end Alembic commands ###

View File

@@ -0,0 +1,65 @@
"""update model
Revision ID: 53bf8af60645
Revises: 8e5588e6412e
Create Date: 2024-07-24 08:06:55.291031
"""
import sqlalchemy as sa
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '53bf8af60645'
down_revision = '8e5588e6412e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False,
existing_server_default=sa.text("''"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False,
existing_server_default=sa.text("''"))
# ### end Alembic commands ###

View File

@@ -0,0 +1,57 @@
"""enable tool file without conversation id
Revision ID: 563cf8bf777b
Revises: b5429b71023c
Create Date: 2024-03-14 04:54:56.679506
"""
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '563cf8bf777b'
down_revision = 'b5429b71023c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=True)
else:
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -0,0 +1,45 @@
"""Custom Disclaimer
Revision ID: 5fda94355fce
Revises: 47cc7df8c4f3
Create Date: 2024-05-10 20:04:45.806549
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '5fda94355fce'
down_revision = '47cc7df8c4f3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_disclaimer', sa.String(length=255), nullable=True))
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_disclaimer', sa.String(length=255), nullable=True))
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_disclaimer', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.drop_column('custom_disclaimer')
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.drop_column('custom_disclaimer')
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.drop_column('custom_disclaimer')
# ### end Alembic commands ###

View File

@@ -0,0 +1,41 @@
"""add last active at
Revision ID: 614f77cecc48
Revises: a45f4dfde53b
Create Date: 2023-06-15 13:33:00.357467
"""
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '614f77cecc48'
down_revision = 'a45f4dfde53b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
else:
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.drop_column('last_active_at')
# ### end Alembic commands ###

View File

@@ -0,0 +1,22 @@
"""merge branches
Revision ID: 63f9175e515b
Revises: 2a3aebbbf4bb, b69ca54b9208
Create Date: 2024-06-26 09:46:36.573505
"""
import models as models
# revision identifiers, used by Alembic.
revision = '63f9175e515b'
down_revision = ('2a3aebbbf4bb', 'b69ca54b9208')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@@ -0,0 +1,32 @@
"""add workflow run index
Revision ID: 64a70a7aab8b
Revises: 03f98355ba0e
Create Date: 2024-05-28 12:32:00.276061
"""
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '64a70a7aab8b'
down_revision = '03f98355ba0e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.create_index('workflow_run_tenant_app_sequence_idx', ['tenant_id', 'app_id', 'sequence_number'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.drop_index('workflow_run_tenant_app_sequence_idx')
# ### end Alembic commands ###

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,35 @@
"""add node_execution_id into node_executions
Revision ID: 675b5321501b
Revises: 030f4915f36a
Create Date: 2024-08-12 10:54:02.259331
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '675b5321501b'
down_revision = '030f4915f36a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
batch_op.add_column(sa.Column('node_execution_id', sa.String(length=255), nullable=True))
batch_op.create_index('workflow_node_execution_id_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'node_execution_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
batch_op.drop_index('workflow_node_execution_id_idx')
batch_op.drop_column('node_execution_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,86 @@
"""add_dataset_retriever_resource
Revision ID: 6dcb43972bdc
Revises: 4bcffcd64aa4
Create Date: 2023-09-06 16:51:27.385844
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '6dcb43972bdc'
down_revision = '4bcffcd64aa4'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_retriever_resources',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('message_id', postgresql.UUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
sa.Column('dataset_name', sa.Text(), nullable=False),
sa.Column('document_id', postgresql.UUID(), nullable=False),
sa.Column('document_name', sa.Text(), nullable=False),
sa.Column('data_source_type', sa.Text(), nullable=False),
sa.Column('segment_id', postgresql.UUID(), nullable=False),
sa.Column('score', sa.Float(), nullable=True),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.Column('word_count', sa.Integer(), nullable=True),
sa.Column('segment_position', sa.Integer(), nullable=True),
sa.Column('index_node_hash', sa.Text(), nullable=True),
sa.Column('retriever_from', sa.Text(), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
)
else:
op.create_table('dataset_retriever_resources',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_name', models.types.LongText(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('document_name', models.types.LongText(), nullable=False),
sa.Column('data_source_type', models.types.LongText(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('score', sa.Float(), nullable=True),
sa.Column('content', models.types.LongText(), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.Column('word_count', sa.Integer(), nullable=True),
sa.Column('segment_position', sa.Integer(), nullable=True),
sa.Column('index_node_hash', models.types.LongText(), nullable=True),
sa.Column('retriever_from', models.types.LongText(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
)
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.create_index('dataset_retriever_resource_message_id_idx', ['message_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.drop_index('dataset_retriever_resource_message_id_idx')
op.drop_table('dataset_retriever_resources')
# ### end Alembic commands ###

View File

@@ -0,0 +1,70 @@
"""add_dataset_collection_binding
Revision ID: 6e2cfb077b04
Revises: 77e83833755c
Create Date: 2023-09-13 22:16:48.027810
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '6e2cfb077b04'
down_revision = '77e83833755c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_collection_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
)
else:
op.create_table('dataset_collection_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
)
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False)
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('collection_binding_id')
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
batch_op.drop_index('provider_model_name_idx')
op.drop_table('dataset_collection_bindings')
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""add-embedding-cache-created_at_index
Revision ID: 6e957a32015b
Revises: fecff1c3da27
Create Date: 2024-07-19 17:21:34.414705
"""
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = '6e957a32015b'
down_revision = 'fecff1c3da27'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.create_index('created_at_idx', ['created_at'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.drop_index('created_at_idx')
# ### end Alembic commands ###

Some files were not shown because too many files have changed in this diff Show More