import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

revision = '001_expert_training_models'
down_revision = None
branch_labels = None
depends_on = None

def upgrade() -> None:
    op.execute('CREATE SCHEMA IF NOT EXISTS hub_global')
    op.create_table('expert_profiles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('expertise_areas', postgresql.ARRAY(sa.Text()), nullable=False), sa.Column('skills', postgresql.ARRAY(sa.Text()), nullable=False), sa.Column('bio', sa.Text(), nullable=True), sa.Column('total_annotations', sa.Integer(), server_default='0'), sa.Column('total_reviews', sa.Integer(), server_default='0'), sa.Column('avg_quality_score', sa.Float(), server_default='0.0'), sa.Column('reputation_score', sa.Integer(), server_default='0'), sa.Column('meta_data', postgresql.JSONB(), server_default='{}'), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('is_deleted', sa.Boolean(), server_default='false'), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted_by', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), schema='hub_global')
    op.create_index('ix_expert_profiles_id', 'expert_profiles', ['id'], schema='hub_global')
    op.create_index('ix_expert_profiles_user_id', 'expert_profiles', ['user_id'], unique=True, schema='hub_global')
    op.create_table('expert_contributions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('contribution_type', sa.String(50), nullable=False), sa.Column('task_id', sa.Integer(), nullable=False), sa.Column('quality_score', sa.Float(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('is_deleted', sa.Boolean(), server_default='false'), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted_by', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), schema='hub_global')
    op.create_index('ix_expert_contributions_user_id', 'expert_contributions', ['user_id'], schema='hub_global')
    op.create_table('annotation_tasks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('content', sa.Text(), nullable=False), sa.Column('task_type', sa.String(50), nullable=False), sa.Column('status', sa.String(50), server_default='pending'), sa.Column('priority', sa.String(50), server_default='medium'), sa.Column('deadline', sa.DateTime(), nullable=True), sa.Column('assigned_to', sa.Integer(), nullable=True), sa.Column('assigned_by', sa.Integer(), nullable=True), sa.Column('created_by', sa.Integer(), nullable=True), sa.Column('completed_at', sa.DateTime(), nullable=True), sa.Column('reviewed_by', sa.Integer(), nullable=True), sa.Column('quality_score', sa.Float(), nullable=True), sa.Column('reviewer_feedback', sa.Text(), nullable=True), sa.Column('annotation_data', postgresql.JSONB(), nullable=True), sa.Column('meta_data', postgresql.JSONB(), server_default='{}'), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('is_deleted', sa.Boolean(), server_default='false'), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted_by', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), schema='hub_global')
    op.create_index('ix_annotation_tasks_status', 'annotation_tasks', ['status'], schema='hub_global')
    op.create_index('ix_annotation_tasks_assigned_to', 'annotation_tasks', ['assigned_to'], schema='hub_global')
    op.create_table('training_datasets', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('dataset_type', sa.String(50), nullable=False), sa.Column('version', sa.Integer(), server_default='1'), sa.Column('parent_version_id', sa.Integer(), nullable=True), sa.Column('total_samples', sa.Integer(), server_default='0'), sa.Column('train_samples', sa.Integer(), server_default='0'), sa.Column('validation_samples', sa.Integer(), server_default='0'), sa.Column('test_samples', sa.Integer(), server_default='0'), sa.Column('created_by', sa.Integer(), nullable=True), sa.Column('meta_data', postgresql.JSONB(), server_default='{}'), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('is_deleted', sa.Boolean(), server_default='false'), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted_by', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), schema='hub_global')
    op.create_table('dataset_samples', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dataset_id', sa.Integer(), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('label', sa.String(255), nullable=False), sa.Column('split', sa.String(50), server_default='train'), sa.Column('source_task_id', sa.Integer(), nullable=True), sa.Column('meta_data', postgresql.JSONB(), server_default='{}'), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('is_deleted', sa.Boolean(), server_default='false'), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted_by', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(['dataset_id'], ['hub_global.training_datasets.id']), schema='hub_global')

def downgrade() -> None:
    op.drop_table('dataset_samples', schema='hub_global')
    op.drop_table('training_datasets', schema='hub_global')
    op.drop_table('annotation_tasks', schema='hub_global')
    op.drop_table('expert_contributions', schema='hub_global')
    op.drop_table('expert_profiles', schema='hub_global')
