feat: 新增多模块功能与服务实现
新增广告计划、用户资产、B2B交易、合规规则等核心模型 实现爬虫工作器、贸易服务、现金流预测等业务服务 添加RBAC权限测试、压力测试等测试用例 完善扩展程序的消息处理与内容脚本功能 重构应用入口与文档生成器 更新项目规则与业务闭环分析文档
This commit is contained in:
677
server/src/services/DevOpsService.ts
Normal file
677
server/src/services/DevOpsService.ts
Normal file
@@ -0,0 +1,677 @@
|
||||
import db from '../config/database';
|
||||
import { logger } from '../utils/logger';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface BackupConfig {
|
||||
tenantId: string;
|
||||
shopId: string;
|
||||
taskId: string;
|
||||
traceId: string;
|
||||
businessType: 'TOC' | 'TOB';
|
||||
backupType: 'FULL' | 'INCREMENTAL' | 'DIFFERENTIAL';
|
||||
tables?: string[];
|
||||
excludeTables?: string[];
|
||||
compression: boolean;
|
||||
encryption: boolean;
|
||||
retentionDays: number;
|
||||
schedule?: {
|
||||
enabled: boolean;
|
||||
cron: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface BackupResult {
|
||||
success: boolean;
|
||||
backupId: string;
|
||||
backupType: string;
|
||||
filePath: string;
|
||||
fileSize: number;
|
||||
tablesBackedUp: number;
|
||||
recordsCount: number;
|
||||
checksum: string;
|
||||
startedAt: Date;
|
||||
completedAt: Date;
|
||||
duration: number;
|
||||
}
|
||||
|
||||
export interface RestoreConfig {
|
||||
tenantId: string;
|
||||
shopId: string;
|
||||
taskId: string;
|
||||
traceId: string;
|
||||
businessType: 'TOC' | 'TOB';
|
||||
backupId: string;
|
||||
targetTables?: string[];
|
||||
verifyAfterRestore: boolean;
|
||||
}
|
||||
|
||||
export interface RestoreResult {
|
||||
success: boolean;
|
||||
backupId: string;
|
||||
tablesRestored: number;
|
||||
recordsRestored: number;
|
||||
startedAt: Date;
|
||||
completedAt: Date;
|
||||
duration: number;
|
||||
verificationResult?: {
|
||||
success: boolean;
|
||||
checksumMatch: boolean;
|
||||
recordCountMatch: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface MonitoringConfig {
|
||||
tenantId: string;
|
||||
shopId: string;
|
||||
taskId: string;
|
||||
traceId: string;
|
||||
businessType: 'TOC' | 'TOB';
|
||||
metrics: MetricConfig[];
|
||||
alertChannels: AlertChannelConfig[];
|
||||
}
|
||||
|
||||
export interface MetricConfig {
|
||||
metricId: string;
|
||||
metricName: string;
|
||||
metricType: 'CPU' | 'MEMORY' | 'DISK' | 'NETWORK' | 'DATABASE' | 'API_LATENCY' | 'ERROR_RATE' | 'CUSTOM';
|
||||
threshold: {
|
||||
warning: number;
|
||||
critical: number;
|
||||
};
|
||||
evaluationWindow: number;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export interface AlertChannelConfig {
|
||||
channelId: string;
|
||||
channelType: 'EMAIL' | 'SLACK' | 'WEBHOOK' | 'SMS';
|
||||
config: Record<string, any>;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export interface AlertRule {
|
||||
ruleId: string;
|
||||
ruleName: string;
|
||||
metricId: string;
|
||||
condition: 'GT' | 'LT' | 'EQ' | 'GTE' | 'LTE';
|
||||
threshold: number;
|
||||
severity: 'WARNING' | 'CRITICAL' | 'INFO';
|
||||
channels: string[];
|
||||
cooldownPeriod: number;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export interface MonitoringResult {
|
||||
success: boolean;
|
||||
configId: string;
|
||||
metricsConfigured: number;
|
||||
alertRulesConfigured: number;
|
||||
channelsConfigured: number;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
export interface BackupVerificationConfig {
|
||||
tenantId: string;
|
||||
shopId: string;
|
||||
taskId: string;
|
||||
traceId: string;
|
||||
businessType: 'TOC' | 'TOB';
|
||||
backupId: string;
|
||||
verifyChecksum: boolean;
|
||||
verifyRecordCount: boolean;
|
||||
sampleVerification: boolean;
|
||||
sampleSize?: number;
|
||||
}
|
||||
|
||||
export interface BackupVerificationResult {
|
||||
success: boolean;
|
||||
backupId: string;
|
||||
checksumValid: boolean;
|
||||
recordCountValid: boolean;
|
||||
sampleVerificationResult?: {
|
||||
samplesChecked: number;
|
||||
samplesValid: number;
|
||||
accuracy: number;
|
||||
};
|
||||
startedAt: Date;
|
||||
completedAt: Date;
|
||||
duration: number;
|
||||
}
|
||||
|
||||
export class DevOpsService {
|
||||
private static readonly BACKUP_DIR = path.join(process.cwd(), 'backups');
|
||||
|
||||
static async createBackup(config: BackupConfig): Promise<BackupResult> {
|
||||
const { tenantId, shopId, taskId, traceId, businessType, backupType, tables, compression, encryption } = config;
|
||||
|
||||
logger.info(`[DevOpsService] Creating backup - type: ${backupType}, tenantId: ${tenantId}, traceId: ${traceId}`);
|
||||
|
||||
const backupId = `BK-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
const startedAt = new Date();
|
||||
|
||||
try {
|
||||
if (!fs.existsSync(this.BACKUP_DIR)) {
|
||||
fs.mkdirSync(this.BACKUP_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
const tablesToBackup = tables || await this.getAllTables();
|
||||
let recordsCount = 0;
|
||||
const backupData: Record<string, any[]> = {};
|
||||
|
||||
for (const table of tablesToBackup) {
|
||||
if (config.excludeTables?.includes(table)) continue;
|
||||
|
||||
const data = await db(table).select('*');
|
||||
backupData[table] = data;
|
||||
recordsCount += data.length;
|
||||
}
|
||||
|
||||
const fileName = `${backupId}.json`;
|
||||
const filePath = path.join(this.BACKUP_DIR, fileName);
|
||||
|
||||
let content = JSON.stringify(backupData, null, 2);
|
||||
|
||||
if (compression) {
|
||||
const zlib = require('zlib');
|
||||
content = zlib.gzipSync(content).toString('base64');
|
||||
}
|
||||
|
||||
if (encryption) {
|
||||
const crypto = require('crypto');
|
||||
const key = process.env.BACKUP_ENCRYPTION_KEY || 'default-key-32-chars-long!!!!!'; // 32 chars for AES-256
|
||||
const iv = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv('aes-256-cbc', Buffer.from(key), iv);
|
||||
let encrypted = cipher.update(content, 'utf8', 'hex');
|
||||
encrypted += cipher.final('hex');
|
||||
content = iv.toString('hex') + ':' + encrypted;
|
||||
}
|
||||
|
||||
fs.writeFileSync(filePath, content);
|
||||
|
||||
const checksum = this.calculateChecksum(content);
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const completedAt = new Date();
|
||||
const duration = completedAt.getTime() - startedAt.getTime();
|
||||
|
||||
await db('cf_backups').insert({
|
||||
id: backupId,
|
||||
tenant_id: tenantId,
|
||||
shop_id: shopId,
|
||||
task_id: taskId,
|
||||
trace_id: traceId,
|
||||
business_type: businessType,
|
||||
backup_type: backupType,
|
||||
file_path: filePath,
|
||||
file_size: fileSize,
|
||||
tables_backed_up: Object.keys(backupData).length,
|
||||
records_count: recordsCount,
|
||||
checksum,
|
||||
compression,
|
||||
encryption,
|
||||
retention_days: config.retentionDays,
|
||||
started_at: startedAt,
|
||||
completed_at: completedAt,
|
||||
duration,
|
||||
status: 'COMPLETED',
|
||||
});
|
||||
|
||||
logger.info(`[DevOpsService] Backup completed - backupId: ${backupId}, tables: ${Object.keys(backupData).length}, records: ${recordsCount}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
backupId,
|
||||
backupType,
|
||||
filePath,
|
||||
fileSize,
|
||||
tablesBackedUp: Object.keys(backupData).length,
|
||||
recordsCount,
|
||||
checksum,
|
||||
startedAt,
|
||||
completedAt,
|
||||
duration,
|
||||
};
|
||||
} catch (error: any) {
|
||||
logger.error(`[DevOpsService] Backup failed - backupId: ${backupId}, error: ${error.message}`);
|
||||
|
||||
await db('cf_backups').insert({
|
||||
id: backupId,
|
||||
tenant_id: tenantId,
|
||||
shop_id: shopId,
|
||||
task_id: taskId,
|
||||
trace_id: traceId,
|
||||
business_type: businessType,
|
||||
backup_type: backupType,
|
||||
started_at: startedAt,
|
||||
status: 'FAILED',
|
||||
error_message: error.message,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async restoreBackup(config: RestoreConfig): Promise<RestoreResult> {
|
||||
const { tenantId, shopId, taskId, traceId, businessType, backupId, targetTables, verifyAfterRestore } = config;
|
||||
|
||||
logger.info(`[DevOpsService] Restoring backup - backupId: ${backupId}, tenantId: ${tenantId}, traceId: ${traceId}`);
|
||||
|
||||
const startedAt = new Date();
|
||||
|
||||
try {
|
||||
const backup = await db('cf_backups')
|
||||
.where({ id: backupId, tenant_id: tenantId })
|
||||
.first();
|
||||
|
||||
if (!backup) {
|
||||
throw new Error(`Backup not found: ${backupId}`);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(backup.file_path)) {
|
||||
throw new Error(`Backup file not found: ${backup.file_path}`);
|
||||
}
|
||||
|
||||
let content = fs.readFileSync(backup.file_path, 'utf8');
|
||||
|
||||
if (backup.encryption) {
|
||||
const crypto = require('crypto');
|
||||
const key = process.env.BACKUP_ENCRYPTION_KEY || 'default-key-32-chars-long!!!!!';
|
||||
const parts = content.split(':');
|
||||
const iv = Buffer.from(parts[0], 'hex');
|
||||
const encrypted = parts[1];
|
||||
const decipher = crypto.createDecipheriv('aes-256-cbc', Buffer.from(key), iv);
|
||||
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
||||
decrypted += decipher.final('utf8');
|
||||
content = decrypted;
|
||||
}
|
||||
|
||||
if (backup.compression) {
|
||||
const zlib = require('zlib');
|
||||
content = zlib.gunzipSync(Buffer.from(content, 'base64')).toString();
|
||||
}
|
||||
|
||||
const backupData: Record<string, any[]> = JSON.parse(content);
|
||||
const tablesToRestore = targetTables || Object.keys(backupData);
|
||||
let recordsRestored = 0;
|
||||
|
||||
for (const table of tablesToRestore) {
|
||||
if (!backupData[table]) continue;
|
||||
|
||||
await db(table).del();
|
||||
|
||||
const batchSize = 1000;
|
||||
const data = backupData[table];
|
||||
|
||||
for (let i = 0; i < data.length; i += batchSize) {
|
||||
const batch = data.slice(i, i + batchSize);
|
||||
await db(table).insert(batch);
|
||||
}
|
||||
|
||||
recordsRestored += data.length;
|
||||
}
|
||||
|
||||
const completedAt = new Date();
|
||||
const duration = completedAt.getTime() - startedAt.getTime();
|
||||
|
||||
let verificationResult;
|
||||
if (verifyAfterRestore) {
|
||||
verificationResult = await this.verifyBackup({
|
||||
tenantId,
|
||||
shopId,
|
||||
taskId,
|
||||
traceId,
|
||||
businessType,
|
||||
backupId,
|
||||
verifyChecksum: true,
|
||||
verifyRecordCount: true,
|
||||
sampleVerification: false,
|
||||
});
|
||||
}
|
||||
|
||||
await db('cf_restore_logs').insert({
|
||||
id: `RS-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||
tenant_id: tenantId,
|
||||
shop_id: shopId,
|
||||
task_id: taskId,
|
||||
trace_id: traceId,
|
||||
business_type: businessType,
|
||||
backup_id: backupId,
|
||||
tables_restored: tablesToRestore.length,
|
||||
records_restored: recordsRestored,
|
||||
started_at: startedAt,
|
||||
completed_at: completedAt,
|
||||
duration,
|
||||
status: 'COMPLETED',
|
||||
});
|
||||
|
||||
logger.info(`[DevOpsService] Restore completed - backupId: ${backupId}, tables: ${tablesToRestore.length}, records: ${recordsRestored}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
backupId,
|
||||
tablesRestored: tablesToRestore.length,
|
||||
recordsRestored,
|
||||
startedAt,
|
||||
completedAt,
|
||||
duration,
|
||||
verificationResult: verificationResult ? {
|
||||
success: verificationResult.success,
|
||||
checksumMatch: verificationResult.checksumValid,
|
||||
recordCountMatch: verificationResult.recordCountValid,
|
||||
} : undefined,
|
||||
};
|
||||
} catch (error: any) {
|
||||
logger.error(`[DevOpsService] Restore failed - backupId: ${backupId}, error: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async configureMonitoring(config: MonitoringConfig): Promise<MonitoringResult> {
|
||||
const { tenantId, shopId, taskId, traceId, businessType, metrics, alertChannels } = config;
|
||||
|
||||
logger.info(`[DevOpsService] Configuring monitoring - tenantId: ${tenantId}, metrics: ${metrics.length}, channels: ${alertChannels.length}, traceId: ${traceId}`);
|
||||
|
||||
const configId = `MON-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
try {
|
||||
await db('cf_monitoring_configs').insert({
|
||||
id: configId,
|
||||
tenant_id: tenantId,
|
||||
shop_id: shopId,
|
||||
task_id: taskId,
|
||||
trace_id: traceId,
|
||||
business_type: businessType,
|
||||
created_at: new Date(),
|
||||
});
|
||||
|
||||
for (const metric of metrics) {
|
||||
await db('cf_monitoring_metrics').insert({
|
||||
id: metric.metricId,
|
||||
config_id: configId,
|
||||
metric_name: metric.metricName,
|
||||
metric_type: metric.metricType,
|
||||
threshold_warning: metric.threshold.warning,
|
||||
threshold_critical: metric.threshold.critical,
|
||||
evaluation_window: metric.evaluationWindow,
|
||||
enabled: metric.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
for (const channel of alertChannels) {
|
||||
await db('cf_alert_channels').insert({
|
||||
id: channel.channelId,
|
||||
config_id: configId,
|
||||
channel_type: channel.channelType,
|
||||
config: JSON.stringify(channel.config),
|
||||
enabled: channel.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[DevOpsService] Monitoring configured - configId: ${configId}, metrics: ${metrics.length}, channels: ${alertChannels.length}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
configId,
|
||||
metricsConfigured: metrics.length,
|
||||
alertRulesConfigured: metrics.filter(m => m.enabled).length,
|
||||
channelsConfigured: alertChannels.filter(c => c.enabled).length,
|
||||
createdAt: new Date(),
|
||||
};
|
||||
} catch (error: any) {
|
||||
logger.error(`[DevOpsService] Monitoring configuration failed - error: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
static async verifyBackup(config: BackupVerificationConfig): Promise<BackupVerificationResult> {
|
||||
const { tenantId, shopId, taskId, traceId, businessType, backupId, verifyChecksum, verifyRecordCount, sampleVerification, sampleSize } = config;
|
||||
|
||||
logger.info(`[DevOpsService] Verifying backup - backupId: ${backupId}, tenantId: ${tenantId}, traceId: ${traceId}`);
|
||||
|
||||
const startedAt = new Date();
|
||||
|
||||
try {
|
||||
const backup = await db('cf_backups')
|
||||
.where({ id: backupId, tenant_id: tenantId })
|
||||
.first();
|
||||
|
||||
if (!backup) {
|
||||
throw new Error(`Backup not found: ${backupId}`);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(backup.file_path)) {
|
||||
throw new Error(`Backup file not found: ${backup.file_path}`);
|
||||
}
|
||||
|
||||
let content = fs.readFileSync(backup.file_path, 'utf8');
|
||||
|
||||
if (backup.encryption) {
|
||||
const crypto = require('crypto');
|
||||
const key = process.env.BACKUP_ENCRYPTION_KEY || 'default-key-32-chars-long!!!!!';
|
||||
const parts = content.split(':');
|
||||
const iv = Buffer.from(parts[0], 'hex');
|
||||
const encrypted = parts[1];
|
||||
const decipher = crypto.createDecipheriv('aes-256-cbc', Buffer.from(key), iv);
|
||||
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
||||
decrypted += decipher.final('utf8');
|
||||
content = decrypted;
|
||||
}
|
||||
|
||||
if (backup.compression) {
|
||||
const zlib = require('zlib');
|
||||
content = zlib.gunzipSync(Buffer.from(content, 'base64')).toString();
|
||||
}
|
||||
|
||||
const checksumValid = verifyChecksum ? this.calculateChecksum(content) === backup.checksum : true;
|
||||
|
||||
const backupData: Record<string, any[]> = JSON.parse(content);
|
||||
let totalRecords = 0;
|
||||
for (const table of Object.keys(backupData)) {
|
||||
totalRecords += backupData[table].length;
|
||||
}
|
||||
const recordCountValid = verifyRecordCount ? totalRecords === backup.records_count : true;
|
||||
|
||||
let sampleVerificationResult;
|
||||
if (sampleVerification && sampleSize) {
|
||||
let samplesChecked = 0;
|
||||
let samplesValid = 0;
|
||||
|
||||
for (const table of Object.keys(backupData)) {
|
||||
const data = backupData[table];
|
||||
const sampleIndices = this.getRandomIndices(data.length, Math.min(sampleSize, data.length));
|
||||
|
||||
for (const index of sampleIndices) {
|
||||
samplesChecked++;
|
||||
const record = data[index];
|
||||
if (record && typeof record === 'object') {
|
||||
samplesValid++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sampleVerificationResult = {
|
||||
samplesChecked,
|
||||
samplesValid,
|
||||
accuracy: samplesChecked > 0 ? (samplesValid / samplesChecked) * 100 : 0,
|
||||
};
|
||||
}
|
||||
|
||||
const completedAt = new Date();
|
||||
const duration = completedAt.getTime() - startedAt.getTime();
|
||||
|
||||
await db('cf_backup_verifications').insert({
|
||||
id: `BV-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
||||
tenant_id: tenantId,
|
||||
shop_id: shopId,
|
||||
task_id: taskId,
|
||||
trace_id: traceId,
|
||||
business_type: businessType,
|
||||
backup_id: backupId,
|
||||
checksum_valid: checksumValid,
|
||||
record_count_valid: recordCountValid,
|
||||
sample_verification_result: sampleVerificationResult ? JSON.stringify(sampleVerificationResult) : null,
|
||||
started_at: startedAt,
|
||||
completed_at: completedAt,
|
||||
duration,
|
||||
});
|
||||
|
||||
logger.info(`[DevOpsService] Backup verification completed - backupId: ${backupId}, checksumValid: ${checksumValid}, recordCountValid: ${recordCountValid}`);
|
||||
|
||||
return {
|
||||
success: checksumValid && recordCountValid,
|
||||
backupId,
|
||||
checksumValid,
|
||||
recordCountValid,
|
||||
sampleVerificationResult,
|
||||
startedAt,
|
||||
completedAt,
|
||||
duration,
|
||||
};
|
||||
} catch (error: any) {
|
||||
logger.error(`[DevOpsService] Backup verification failed - backupId: ${backupId}, error: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async getAllTables(): Promise<string[]> {
|
||||
const tables = await db.raw("SHOW TABLES");
|
||||
const dbName = Object.keys(tables[0])[0];
|
||||
return tables.map((row: any) => row[dbName]).filter((name: string) => name.startsWith('cf_'));
|
||||
}
|
||||
|
||||
private static calculateChecksum(content: string): string {
|
||||
const crypto = require('crypto');
|
||||
return crypto.createHash('sha256').update(content).digest('hex');
|
||||
}
|
||||
|
||||
private static getRandomIndices(max: number, count: number): number[] {
|
||||
const indices = new Set<number>();
|
||||
while (indices.size < count && indices.size < max) {
|
||||
indices.add(Math.floor(Math.random() * max));
|
||||
}
|
||||
return Array.from(indices);
|
||||
}
|
||||
|
||||
static async initializeTables(): Promise<void> {
|
||||
if (!(await db.schema.hasTable('cf_backups'))) {
|
||||
await db.schema.createTable('cf_backups', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('tenant_id').notNullable();
|
||||
table.string('shop_id').notNullable();
|
||||
table.string('task_id').notNullable();
|
||||
table.string('trace_id').notNullable();
|
||||
table.enum('business_type', ['TOC', 'TOB']).notNullable();
|
||||
table.enum('backup_type', ['FULL', 'INCREMENTAL', 'DIFFERENTIAL']).notNullable();
|
||||
table.string('file_path').notNullable();
|
||||
table.bigInteger('file_size');
|
||||
table.integer('tables_backed_up');
|
||||
table.integer('records_count');
|
||||
table.string('checksum');
|
||||
table.boolean('compression').defaultTo(false);
|
||||
table.boolean('encryption').defaultTo(false);
|
||||
table.integer('retention_days').defaultTo(30);
|
||||
table.timestamp('started_at').notNullable();
|
||||
table.timestamp('completed_at');
|
||||
table.integer('duration');
|
||||
table.enum('status', ['PENDING', 'RUNNING', 'COMPLETED', 'FAILED']).defaultTo('PENDING');
|
||||
table.text('error_message');
|
||||
|
||||
table.index(['tenant_id', 'shop_id']);
|
||||
table.index(['status']);
|
||||
table.index(['created_at']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_backups table');
|
||||
}
|
||||
|
||||
if (!(await db.schema.hasTable('cf_restore_logs'))) {
|
||||
await db.schema.createTable('cf_restore_logs', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('tenant_id').notNullable();
|
||||
table.string('shop_id').notNullable();
|
||||
table.string('task_id').notNullable();
|
||||
table.string('trace_id').notNullable();
|
||||
table.enum('business_type', ['TOC', 'TOB']).notNullable();
|
||||
table.string('backup_id').notNullable();
|
||||
table.integer('tables_restored');
|
||||
table.integer('records_restored');
|
||||
table.timestamp('started_at').notNullable();
|
||||
table.timestamp('completed_at');
|
||||
table.integer('duration');
|
||||
table.enum('status', ['PENDING', 'RUNNING', 'COMPLETED', 'FAILED']).defaultTo('PENDING');
|
||||
table.text('error_message');
|
||||
|
||||
table.index(['tenant_id', 'shop_id']);
|
||||
table.index(['backup_id']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_restore_logs table');
|
||||
}
|
||||
|
||||
if (!(await db.schema.hasTable('cf_monitoring_configs'))) {
|
||||
await db.schema.createTable('cf_monitoring_configs', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('tenant_id').notNullable();
|
||||
table.string('shop_id').notNullable();
|
||||
table.string('task_id').notNullable();
|
||||
table.string('trace_id').notNullable();
|
||||
table.enum('business_type', ['TOC', 'TOB']).notNullable();
|
||||
table.timestamp('created_at').defaultTo(db.fn.now());
|
||||
|
||||
table.index(['tenant_id', 'shop_id']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_monitoring_configs table');
|
||||
}
|
||||
|
||||
if (!(await db.schema.hasTable('cf_monitoring_metrics'))) {
|
||||
await db.schema.createTable('cf_monitoring_metrics', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('config_id').notNullable();
|
||||
table.string('metric_name').notNullable();
|
||||
table.enum('metric_type', ['CPU', 'MEMORY', 'DISK', 'NETWORK', 'DATABASE', 'API_LATENCY', 'ERROR_RATE', 'CUSTOM']).notNullable();
|
||||
table.decimal('threshold_warning', 10, 2);
|
||||
table.decimal('threshold_critical', 10, 2);
|
||||
table.integer('evaluation_window').defaultTo(300);
|
||||
table.boolean('enabled').defaultTo(true);
|
||||
|
||||
table.foreign('config_id').references('cf_monitoring_configs.id');
|
||||
table.index(['config_id']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_monitoring_metrics table');
|
||||
}
|
||||
|
||||
if (!(await db.schema.hasTable('cf_alert_channels'))) {
|
||||
await db.schema.createTable('cf_alert_channels', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('config_id').notNullable();
|
||||
table.enum('channel_type', ['EMAIL', 'SLACK', 'WEBHOOK', 'SMS']).notNullable();
|
||||
table.json('config').notNullable();
|
||||
table.boolean('enabled').defaultTo(true);
|
||||
|
||||
table.foreign('config_id').references('cf_monitoring_configs.id');
|
||||
table.index(['config_id']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_alert_channels table');
|
||||
}
|
||||
|
||||
if (!(await db.schema.hasTable('cf_backup_verifications'))) {
|
||||
await db.schema.createTable('cf_backup_verifications', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('tenant_id').notNullable();
|
||||
table.string('shop_id').notNullable();
|
||||
table.string('task_id').notNullable();
|
||||
table.string('trace_id').notNullable();
|
||||
table.enum('business_type', ['TOC', 'TOB']).notNullable();
|
||||
table.string('backup_id').notNullable();
|
||||
table.boolean('checksum_valid');
|
||||
table.boolean('record_count_valid');
|
||||
table.json('sample_verification_result');
|
||||
table.timestamp('started_at').notNullable();
|
||||
table.timestamp('completed_at');
|
||||
table.integer('duration');
|
||||
|
||||
table.index(['tenant_id', 'shop_id']);
|
||||
table.index(['backup_id']);
|
||||
});
|
||||
logger.info('[DevOpsService] Created cf_backup_verifications table');
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user