Просмотр исходного кода

implementation mariadb connector (pool); import from sqlite on startup

Christian Kahlau 2 лет назад
Родитель
Сommit
8a16dc6cd7

+ 3 - 1
.dockerignore

@@ -1 +1,3 @@
-**/node_modules
+**/node_modules
+server/data
+server/db

+ 2 - 0
server.docker-compose.yml

@@ -16,6 +16,8 @@ services:
       - 8880:80
     volumes:
       - ./server/data:/home/node/monitoring/data
+    links:
+      - mariadb
   mariadb:
     build:
       context: server/mysql

+ 1 - 0
server.dockerfile

@@ -36,6 +36,7 @@ COPY --from=build_helper ${APPDIR}/server/public/ ./public
 COPY ./server/package.json .
 
 RUN npm install --omit=dev
+COPY ./server/database.json .
 
 RUN mkdir data
 VOLUME ${APPDIR}/data

+ 12 - 0
server/database.json

@@ -0,0 +1,12 @@
+{
+  "db": {
+    "driver": "mysql",
+    "multipleStatements": true,
+    "host": "mariadb",
+    "port": 3306,
+    "database": "hbbq-monitoring",
+    "user": "monitor",
+    "password": "i4mGr00ti4mGr00t"
+  },
+  "sql-file": true
+}

+ 62 - 0
server/mysql/init-db.sql

@@ -9,5 +9,67 @@ CREATE USER 'monitor'@'%' IDENTIFIED BY 'i4mGr00ti4mGr00t';
 USE `hbbq-monitoring`;
 GRANT ALL PRIVILEGES ON `hbbq-monitoring`.* TO 'monitor'@'%' IDENTIFIED BY 'i4mGr00ti4mGr00t';
 
+CREATE TABLE `Server` (
+  `ID` INT PRIMARY KEY AUTO_INCREMENT,
+  `Title` VARCHAR(64) NOT NULL,
+  `FQDN` VARCHAR(512) NOT NULL,
+  UNIQUE INDEX UQ_Server_Title (`Title`),
+  UNIQUE INDEX UQ_Server_FQDN (`FQDN`)
+);
+
+CREATE TABLE `HealthCheckConfig` (
+  `ID` INT PRIMARY KEY AUTO_INCREMENT,
+  `ServerID` INT NOT NULL,
+  `Type` VARCHAR(16) NOT NULL,
+  `Title` VARCHAR(512) NOT NULL,
+  CONSTRAINT FK_HealthCheckConfig_Server FOREIGN KEY(`ServerID`) REFERENCES `Server`(`ID`) ON DELETE CASCADE,
+  UNIQUE INDEX UQ_HealthCheckConfig_1 (`ServerID`, `Type`, `Title`)
+);
+
+CREATE TABLE `HealthCheckParams` (
+  `ID` INT PRIMARY KEY AUTO_INCREMENT,
+  `ConfigID` INT NOT NULL,
+  `Type` VARCHAR(16) NOT NULL,
+  `Key` VARCHAR(32) NOT NULL,
+  `Value` TEXT NOT NULL,
+  CONSTRAINT FK_HealthCheckParams_HealthCheckConfig FOREIGN KEY(`ConfigID`) REFERENCES `HealthCheckConfig`(`ID`) ON DELETE CASCADE
+);
+
+CREATE TABLE `HealthCheckDataEntry` (
+  `ID` BIGINT PRIMARY KEY AUTO_INCREMENT,
+  `ConfigID` INT NOT NULL,
+  `Timestamp` TIMESTAMP NOT NULL,
+  `Status` SMALLINT NOT NULL,
+  `Message` TEXT,
+  CONSTRAINT FK_HealthCheckDataEntry_HealthCheckConfig FOREIGN KEY(`ConfigID`) REFERENCES `HealthCheckConfig`(`ID`) ON DELETE CASCADE
+);
+
+CREATE TABLE `ServerConfig` (
+  `ID` INT PRIMARY KEY AUTO_INCREMENT,
+  `ServerID` INT NOT NULL,
+  `Key` VARCHAR(32) NOT NULL,
+  `Value` TEXT NOT NULL,
+  CONSTRAINT FK_ServerConfig_Server FOREIGN KEY(`ServerID`) REFERENCES `Server`(`ID`) ON DELETE CASCADE,
+  UNIQUE INDEX UQ_ServerConfig_1(`ServerID`, `Key`)
+);
+
+CREATE TABLE `ServerDataEntry` (
+  `ID` BIGINT PRIMARY KEY AUTO_INCREMENT,
+  `ServerID` INT NOT NULL,
+  `Timestamp` TIMESTAMP NOT NULL,
+  CONSTRAINT FK_ServerDataEntry_Server FOREIGN KEY(`ServerID`) REFERENCES `Server`(`ID`) ON DELETE CASCADE,
+  UNIQUE INDEX UQ_ServerDataEntry_1(`ServerID`, `Timestamp`)
+);
+
+CREATE TABLE `ServerDataValue` (
+  `ID` BIGINT PRIMARY KEY AUTO_INCREMENT,
+  `EntryID` BIGINT NOT NULL,
+  `Type` VARCHAR(512) NOT NULL,
+  `Key` VARCHAR(32) NOT NULL,
+  `Value` DOUBLE NOT NULL,
+  CONSTRAINT FK_ServerDataValue_ServerDataEntry FOREIGN KEY(`EntryID`) REFERENCES `ServerDataEntry`(`ID`) ON DELETE CASCADE,
+  UNIQUE INDEX UQ_ServerDataValue_1(`EntryID`, `Type`, `Key`)
+);
+
 COMMIT;
 SET AUTOCOMMIT = 1;

+ 2 - 0
server/package.json

@@ -14,10 +14,12 @@
     "express": "^4.18.1",
     "firebase-admin": "^11.4.1",
     "moment": "^2.29.4",
+    "mysql": "^2.18.1",
     "sqlite3": "^5.1.1"
   },
   "devDependencies": {
     "@types/express": "^4.17.14",
+    "@types/mysql": "^2.15.21",
     "@types/node": "^18.7.19",
     "@types/sqlite3": "^3.1.8",
     "typescript": "^4.8.3"

+ 2 - 2
server/src/ctrl/controller-pool.interface.ts

@@ -1,9 +1,9 @@
-import { Database } from './database.class';
+import { DataProvider } from './data-provider.interface';
 import { HttpCheckController } from './http-check-controller.class';
 import { ServerConnector } from './server-connector.class';
 
 export interface ControllerPool {
-  db: Database;
+  db: DataProvider;
   serverConnector: ServerConnector;
   httpChecks: HttpCheckController;
 }

+ 24 - 0
server/src/ctrl/data-provider.interface.ts

@@ -0,0 +1,24 @@
+import { ServiceCheckData } from '../../../common/lib/http-check-data.module';
+import { ServiceChangedStatus } from '../lib/service-changed-status.enum';
+import { HealthCheckDataProvider } from './health-check-data-provider.interface';
+
+export interface DataProvider extends HealthCheckDataProvider {
+  set onError(listener: (error: any) => void);
+  open(migrate?: boolean): Promise<void>;
+  getAllServerConfigs(): Promise<Server[]>;
+  insertServerData(serverID: number, data: ReducedData[]): Promise<void>;
+  getServerDataTypes(serverID: number): Promise<ServerDataTypesConfig[]>;
+  queryServerData(serverID: number, type: ServerDataType, from: Date, to: Date): Promise<ServerData[]>;
+  queryServerStats(serverID: number, type: ServerDataType, from: Date, to: Date): Promise<ReducedValuesPerc>;
+  getHttpCheckConfigs(serverID?: number): Promise<Array<HttpCheckConfig | null>>;
+  saveHttpCheckConfig(
+    serverID: number,
+    conf: HttpCheckConfig
+  ): Promise<{
+    status: ServiceChangedStatus;
+    result: HttpCheckConfig;
+  }>;
+  deleteHealthCheckConfig(serverID: number, confID: number): Promise<boolean>;
+  queryServiceCheckData(serverID: number, confID: number, from: Date, to: Date): Promise<ServiceCheckData[]>;
+  queryServiceCheckLogs(serverID: number, confID: number, from: Date, to: Date): Promise<ServiceCheckData[]>;
+}

+ 5 - 4
server/src/ctrl/http-check-controller.class.ts

@@ -5,8 +5,9 @@ import defaults from '../../../common/defaults.module';
 import { HttpCheckStatus } from '../../../common/lib/http-check-data.module';
 import { Logger } from '../../../common/util/logger.class';
 
+import { ServiceChangedStatus } from '../lib/service-changed-status.enum';
 import { Timer } from '../timer.class';
-import { Database, ServiceChangedStatus } from './database.class';
+import { SQLiteDatabase } from './sqlite-database.class';
 import { FCMController } from './fcm-controller.class';
 import { HealthCheckDataProvider } from './health-check-data-provider.interface';
 
@@ -15,10 +16,10 @@ type ContentCheckError = { type: 'contentCheck'; status: HttpCheckStatus; messag
 
 export class HttpCheckController {
   private subscriptions: Array<Subscriber> = [];
-  private db!: Database;
+  private db!: SQLiteDatabase;
 
   constructor() {
-    this.db = new Database();
+    this.db = new SQLiteDatabase();
     (async () => {
       try {
         await this.db.open();
@@ -34,7 +35,7 @@ export class HttpCheckController {
           await this.runCheck(conf, this.db);
         }
       } catch (err) {
-        Logger.error('[FATAL] Initializing ServerConnector failed:', err);
+        Logger.error('[FATAL] Initializing HttpCheckController failed:', err);
         Logger.error('[EXITING]');
         process.exit(1);
       }

+ 110 - 0
server/src/ctrl/mariadb-connector.class.ts

@@ -0,0 +1,110 @@
+import { QueryOptions, PoolConnection, Pool } from 'mysql';
+import { Logger } from '../../../common/util/logger.class';
+
+const MAX_CONNECTION_RETRIES = 60;
+
+export class MariaDBConnector {
+  private _conn?: PoolConnection;
+  private _onError = (err: any) => {};
+
+  constructor(private pool: Pool) {}
+
+  public set onError(e: (err: any) => void) {
+    this._onError = e;
+  }
+
+  public query(options: string | QueryOptions, values: any): Promise<Array<object | object[]>> {
+    return new Promise(async (resolve, reject) => {
+      try {
+        if (!this._conn) return reject('Database not opened.');
+        this._conn.query(options, values, (err, results) => (err ? reject(err) : resolve(results)));
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+
+  public beginTransaction() {
+    return new Promise<void>((resolve, reject) => {
+      try {
+        if (!this._conn) return reject('Database not opened.');
+        this._conn.beginTransaction(err => (err ? reject(err) : resolve()));
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+
+  public commit() {
+    return new Promise<void>((resolve, reject) => {
+      try {
+        if (!this._conn) return reject('Database not opened.');
+        this._conn.commit(err => (err ? reject(err) : resolve()));
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+
+  public async rollback() {
+    return new Promise<void>((resolve, reject) => {
+      try {
+        if (!this._conn) return reject('Database not opened.');
+        this._conn.rollback(err => (err ? reject(err) : resolve()));
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+
+  public async connect(): Promise<void> {
+    let retries = 1;
+    let lasterror: any | null = null;
+    while (retries <= MAX_CONNECTION_RETRIES) {
+      try {
+        Logger.info(`[INFO] Connecting mariadb connection pool (${retries}/${MAX_CONNECTION_RETRIES}) ...`);
+        await this._connect();
+        lasterror = null;
+        break;
+      } catch (e) {
+        lasterror = e;
+        await new Promise<void>(res => setTimeout(res, 1000));
+      } finally {
+        retries++;
+      }
+    }
+    if (lasterror) throw lasterror;
+  }
+
+  private async _connect(): Promise<void> {
+    return new Promise((resolve, reject) => {
+      try {
+        if (!this._conn || !['connected', 'authenticated'].includes(this._conn?.state)) {
+          return this.pool.getConnection((err, conn) => {
+            if (err) return reject(err);
+            this._conn = conn;
+            this._conn.on('error', this._onError);
+
+            resolve();
+          });
+        }
+        resolve();
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+
+  public close(): Promise<void> {
+    return new Promise(async (resolve, reject) => {
+      try {
+        if (this._conn && ['connected', 'authenticated'].includes(this._conn?.state)) {
+          this._conn.release();
+        }
+        resolve();
+      } catch (e) {
+        reject(e);
+      }
+    });
+  }
+}

+ 633 - 0
server/src/ctrl/mariadb-database.class.ts

@@ -0,0 +1,633 @@
+import moment from 'moment';
+import { Pool } from 'mysql';
+
+import defaults from '../../../common/defaults.module';
+import { ServiceConfig, validateParamType } from '../../../common/interfaces/service-config.interface';
+import { HttpCheckData, HttpCheckStatus, ServiceCheckData, ServiceCheckDataEntry } from '../../../common/lib/http-check-data.module';
+import { Logger } from '../../../common/util/logger.class';
+
+import { DatabaseException } from '../lib/database.exception';
+import { ServiceChangedStatus } from '../lib/service-changed-status.enum';
+import { ValidationException } from '../lib/validation-exception.class';
+import { DataProvider } from './data-provider.interface';
+// TODO: import { DBMigration } from './db-migration.class';
+import { MariaDBConnector } from './mariadb-connector.class';
+
+export class MariaDBDatabase implements DataProvider {
+  private db: MariaDBConnector;
+  private _onError: (error: any) => void = err => console.error('[DB.ONERROR]', err);
+
+  constructor(pool: Pool) {
+    this.db = new MariaDBConnector(pool);
+  }
+
+  public set onError(listener: (error: any) => void) {
+    this._onError = listener;
+  }
+
+  public async open(migrate = false) {
+    try {
+      this.db.onError = this._onError.bind(this);
+      await this.db.connect();
+
+      Logger.info('[INFO] Opened MariaDB Connection');
+
+      if (migrate) {
+        //TODO: RUN DB MIGRATIONS
+        // const mig = new DBMigration(this.db);
+        // await mig.update();
+      }
+    } catch (err) {
+      Logger.error('[FATAL] Initializing Database failed:', err);
+      Logger.error('[EXITING]');
+      process.exit(1);
+    }
+  }
+
+  public async getAllServerConfigs(): Promise<Server[]> {
+    const res = await this.db.query(
+      `SELECT 
+        Server.*,
+        ServerConfig.Key,
+        ServerConfig.Value
+        FROM Server
+        LEFT OUTER JOIN ServerConfig ON Server.ID = ServerConfig.ServerID
+        ORDER BY Server.Title, ServerConfig.Key`,
+      []
+    );
+
+    return res.reduce((res: Server[], line: any, i) => {
+      const serverID = line['ID'];
+      let server: Server;
+      if (i === 0 || res[res.length - 1].id !== serverID) {
+        server = { id: serverID, title: line['Title'], fqdn: line['FQDN'], config: {} };
+        res.push(server);
+      } else {
+        server = res[res.length - 1];
+      }
+
+      if (!!line['Key']) {
+        server.config[line['Key']] = line['Value'];
+      }
+
+      return res;
+    }, [] as Server[]);
+  }
+
+  public async insertServerData(serverID: number, data: ReducedData[]) {
+    if (!data.length) return;
+
+    await this.db.beginTransaction();
+    try {
+      for (const entry of data) {
+        const result = await this.db.query(
+          `INSERT INTO ServerDataEntry(ServerID, Timestamp) VALUES(?, ?);
+          SELECT LAST_INSERT_ID() as ID;`,
+          [serverID, entry.time.getTime()]
+        );
+
+        if (!result || result.length < 2) throw new DatabaseException('Unexpected result during insertServerData');
+
+        let entryID = (result[1] as any[])[0]['ID'];
+
+        for (const type of Object.keys(entry).filter(t => !['time', 'hdd'].includes(t))) {
+          for (const key of Object.keys((entry as any)[type])) {
+            await this.db.query('INSERT INTO ServerDataValue(EntryID, Type, Key, Value) VALUES(?, ?, ?, ?);', [
+              entryID,
+              type,
+              key,
+              (entry as any)[type][key]
+            ]);
+          }
+        }
+
+        if (entry.hdd) {
+          for (const mount of Object.keys(entry.hdd)) {
+            for (const key of Object.keys(entry.hdd[mount])) {
+              await this.db.query('INSERT INTO ServerDataValue(EntryID, Type, Key, Value) VALUES(?, ?, ?, ?);', [
+                entryID,
+                `hdd:${mount}`,
+                key,
+                (entry.hdd[mount] as any)[key]
+              ]);
+            }
+          }
+        }
+      }
+      await this.db.commit();
+    } catch (err) {
+      await this.db.rollback();
+      throw err;
+    }
+  }
+
+  public async getServerDataTypes(serverID: number) {
+    const results: any[] = await this.db.query(
+      `
+      SELECT
+        ServerDataValue.Type 
+      FROM ServerDataEntry
+      JOIN ServerDataValue ON ServerDataEntry.ID = ServerDataValue.EntryID
+      WHERE ServerDataEntry.ServerID = ?
+      GROUP BY ServerDataValue.Type
+      ORDER BY ServerDataValue.Type;
+    `,
+      [serverID]
+    );
+
+    return results.reduce((res: Array<ServerDataTypesConfig>, { Type: type }) => {
+      if (!type.startsWith('hdd:')) {
+        res.push({ type });
+      } else {
+        let hdd = res.find(c => c.type === 'hdd');
+        if (!hdd) {
+          hdd = { type: 'hdd', subtypes: [] };
+          res.push(hdd);
+        }
+        hdd.subtypes?.push({ type: type.substring(4) });
+      }
+      return res;
+    }, []) as Array<ServerDataTypesConfig>;
+  }
+
+  public async queryServerData(serverID: number, type: ServerDataType, from: Date, to: Date): Promise<ServerData[]> {
+    const diffMs = moment(to).diff(moment(from));
+    const sectionMs = Math.floor(diffMs / 100);
+    const select_max = type !== 'cpu';
+    const select_types = select_max ? [type, type, type] : [type, type];
+    const result: any[] = await this.db.query(
+      `
+      SELECT 
+        CEIL(Timestamp / ?) * ? as 'Timegroup',
+        AVG(VALUE_AVG.Value) as 'avg',
+        MAX(VALUE_PEAK.Value) as 'peak'${
+          select_max
+            ? `,
+        MAX(VALUE_MAX.Value) as 'max'`
+            : ''
+        }
+      FROM ServerDataEntry
+      JOIN ServerDataValue AS VALUE_AVG ON ServerDataEntry.ID = VALUE_AVG.EntryID AND VALUE_AVG.Type = ? AND VALUE_AVG.Key = 'avg'
+      JOIN ServerDataValue AS VALUE_PEAK ON ServerDataEntry.ID = VALUE_PEAK.EntryID AND VALUE_PEAK.Type = ? AND VALUE_PEAK.Key = 'peak'
+      ${
+        select_max
+          ? "JOIN ServerDataValue AS VALUE_MAX ON ServerDataEntry.ID = VALUE_MAX.EntryID AND VALUE_MAX.Type = ? AND VALUE_MAX.Key = 'max'"
+          : ''
+      }
+      WHERE ServerDataEntry.ServerID = ?
+      AND ServerDataEntry.Timestamp BETWEEN ? AND ?
+      GROUP BY Timegroup
+      ORDER BY Timegroup;
+    `,
+      [sectionMs, sectionMs, ...select_types, serverID, from.getTime(), to.getTime()]
+    );
+
+    return result.map(r => ({ time: new Date(r.Timegroup), avg: r.avg, peak: r.peak, max: r.max }));
+  }
+
+  public async queryServerStats(serverID: number, type: ServerDataType, from: Date, to: Date): Promise<ReducedValuesPerc> {
+    const select_max = type !== 'cpu';
+    const select_types = select_max ? [type, type, type] : [type, type];
+    const result: any[] = await this.db.query(
+      `
+      SELECT 
+        AVG(VALUE_AVG.Value) as 'avg',
+        AVG(VALUE_PEAK.Value) as 'peak'${
+          select_max
+            ? `,
+        MAX(VALUE_MAX.Value) as 'max'`
+            : ''
+        }
+      FROM ServerDataEntry
+      JOIN ServerDataValue AS VALUE_AVG ON ServerDataEntry.ID = VALUE_AVG.EntryID AND VALUE_AVG.Type = ? AND VALUE_AVG.Key = 'avg'
+      JOIN ServerDataValue AS VALUE_PEAK ON ServerDataEntry.ID = VALUE_PEAK.EntryID AND VALUE_PEAK.Type = ? AND VALUE_PEAK.Key = 'peak'
+      ${
+        select_max
+          ? "JOIN ServerDataValue AS VALUE_MAX ON ServerDataEntry.ID = VALUE_MAX.EntryID AND VALUE_MAX.Type = ? AND VALUE_MAX.Key = 'max'"
+          : ''
+      }
+      WHERE ServerDataEntry.ServerID = ?
+      AND ServerDataEntry.Timestamp BETWEEN ? AND ?;
+    `,
+      [...select_types, serverID, from.getTime(), to.getTime()]
+    );
+
+    const row = result[0];
+    if (Object.keys(row).includes('max')) {
+      return {
+        avg: ((row['avg'] as number) / (row['max'] as number)) * 100,
+        peak: ((row['peak'] as number) / (row['max'] as number)) * 100
+      };
+    } else {
+      return {
+        avg: row['avg'] as number,
+        peak: row['peak'] as number
+      };
+    }
+  }
+
+  private async getHealthCheckConfigs(serverID?: number, type = 'http') {
+    const res: any[] = await this.db.query(
+      `SELECT 
+        HealthCheckConfig.*,
+        HealthCheckParams.Type as '_ParamType',
+        HealthCheckParams.Key as '_ParamKey',
+        HealthCheckParams.Value as '_ParamValue'
+        FROM HealthCheckConfig
+        LEFT OUTER JOIN HealthCheckParams ON HealthCheckConfig.ID = HealthCheckParams.ConfigID
+        WHERE HealthCheckConfig.Type = ?
+        ${!!serverID ? 'AND HealthCheckConfig.ServerID = ?' : ''}
+        ORDER BY HealthCheckConfig.Title, _ParamType, _ParamKey`,
+      [type, serverID]
+    );
+
+    return this.configFromResultRows(res);
+  }
+
+  public async getHttpCheckConfigs(serverID?: number) {
+    return (await this.getHealthCheckConfigs(serverID)).map(this.httpCheckConfigFrom);
+  }
+
+  private async getHealthCheckConfigByID(serverID: number, configID: number) {
+    if (!serverID && !configID) return null;
+
+    const res: any[] = await this.db.query(
+      `SELECT 
+        HealthCheckConfig.*,
+        HealthCheckParams.Type as '_ParamType',
+        HealthCheckParams.Key as '_ParamKey',
+        HealthCheckParams.Value as '_ParamValue'
+        FROM HealthCheckConfig
+        LEFT OUTER JOIN HealthCheckParams ON HealthCheckConfig.ID = HealthCheckParams.ConfigID
+        WHERE HealthCheckConfig.ID = ?
+        AND HealthCheckConfig.ServerID = ?
+        ORDER BY HealthCheckConfig.Title, _ParamType, _ParamKey`,
+      [configID, serverID]
+    );
+
+    if (!res.length) return null;
+
+    const configs = this.configFromResultRows(res);
+
+    return configs[0];
+  }
+
+  public async getHttpCheckConfigByID(serverID: number, configID: number) {
+    return this.httpCheckConfigFrom(await this.getHealthCheckConfigByID(serverID, configID));
+  }
+
+  public async saveHttpCheckConfig(serverID: number, conf: HttpCheckConfig) {
+    const validationErrors = this.validateHttpCheckConfig(conf);
+    if (validationErrors) throw new ValidationException('Validation of HttpCheckConfig object failed', validationErrors);
+
+    conf.serverId = serverID;
+    let status = ServiceChangedStatus.None;
+
+    const oldConf = await this.getHttpCheckConfigByID(serverID, conf.id);
+    await this.db.beginTransaction();
+    try {
+      if (oldConf) {
+        // UPDATE
+        Logger.debug('[DEBUG] Updating HealthCheckConfig', conf.title, `(${oldConf.id})`);
+        if (oldConf.title !== conf.title) {
+          await this.db.query('UPDATE HealthCheckConfig SET Title = ? WHERE ID = ?', [conf.title, oldConf.id]);
+        }
+
+        let updValues: any[][] = [];
+        if (oldConf.url !== conf.url) updValues.push([conf.url, conf.id, 'url']);
+        if (oldConf.interval !== conf.interval) {
+          updValues.push([conf.interval, conf.id, 'interval']);
+          status = ServiceChangedStatus.Rescheduled;
+        }
+        if (oldConf.timeout !== conf.timeout) updValues.push([conf.timeout ?? defaults.serviceChecks.httpTimeout, conf.id, 'timeout']);
+        if (oldConf.active !== conf.active) {
+          updValues.push([conf.active ?? defaults.serviceChecks.active ? 1 : 0, conf.id, 'active']);
+          status = conf.active ?? defaults.serviceChecks.active ? ServiceChangedStatus.Activated : ServiceChangedStatus.Deactivated;
+        }
+        if (oldConf.notify !== conf.notify) updValues.push([conf.notify ?? defaults.serviceChecks.notify ? 1 : 0, conf.id, 'notify']);
+        if (oldConf.notifyThreshold !== conf.notifyThreshold)
+          updValues.push([conf.notifyThreshold ?? defaults.serviceChecks.notifyThreshold, conf.id, 'notifyThreshold']);
+        if (updValues.length) {
+          for (const data of updValues) {
+            await this.db.query(`UPDATE HealthCheckParams SET Value = ? WHERE ConfigID = ? AND Key = ?;`, data);
+          }
+        }
+
+        const res = await this.db.query('SELECT * FROM HealthCheckParams WHERE ConfigID = ? and Key = "check";', [conf.id]);
+        updValues = [];
+        const delIDs: number[] = [];
+        res.forEach((row: any, i) => {
+          if (i < conf.checks.length) {
+            updValues.push([JSON.stringify(conf.checks[i]), row['ID']]);
+          } else {
+            delIDs.push(row['ID']);
+          }
+        });
+
+        if (delIDs.length) {
+          const delSql = `DELETE FROM HealthCheckParams WHERE ID IN (${delIDs.map(() => '?').join(',')});`;
+          await this.db.query(delSql, delIDs);
+        }
+
+        if (updValues.length) {
+          for (const data of updValues) {
+            await this.db.query('UPDATE HealthCheckParams SET Value = ? WHERE ID = ?;', data);
+          }
+        }
+        const insValues = conf.checks.filter((c, i) => i > res.length - 1).map(c => [conf.id, 'regexp', 'check', JSON.stringify(c)]);
+        if (insValues.length) {
+          for (const data of insValues) {
+            await this.db.query('INSERT INTO HealthCheckParams(ConfigID, Type, Key, Value) VALUES(?, ?, ?, ?);', data);
+          }
+        }
+      } else {
+        // INSERT
+        Logger.debug('[DEBUG] Inserting new HealthCheckConfig', conf.title);
+        const res = await this.db.query(
+          `INSERT INTO HealthCheckConfig(ServerID, Type, Title) VALUES(?, ?, ?);
+          SELECT LAST_INSERT_ID() as ID;`,
+          [serverID, 'http', conf.title]
+        );
+
+        if (!res || res.length < 2) throw new DatabaseException('Unexpected result during saveHttpCheckConfig');
+        conf.id = (res[1] as any[])[0]['ID'];
+
+        if (conf.active ?? defaults.serviceChecks.active) {
+          status = ServiceChangedStatus.Created;
+        }
+
+        const insCheckValues = conf.checks.map(c => [conf.id, 'regexp', 'check', c]);
+        await this.db.query(
+          `INSERT INTO HealthCheckParams(ConfigID, Type, Key, Value) VALUES
+          (?, ?, ?, ?),
+          (?, ?, ?, ?),
+          (?, ?, ?, ?),
+          (?, ?, ?, ?),
+          (?, ?, ?, ?),
+          (?, ?, ?, ?)${conf.checks.length ? `,${insCheckValues.map(() => '(?, ?, ?, ?)').join(',')}` : ''}`,
+          [
+            ...[conf.id, 'text', 'url', conf.url],
+            ...[conf.id, 'boolean', 'active', conf.active ?? defaults.serviceChecks.active ? 1 : 0],
+            ...[conf.id, 'number', 'interval', conf.interval],
+            ...[conf.id, 'number', 'timeout', conf.timeout ?? defaults.serviceChecks.httpTimeout],
+            ...[conf.id, 'boolean', 'notify', conf.notify ?? defaults.serviceChecks.notify],
+            ...[conf.id, 'number', 'notifyThreshold', conf.notifyThreshold ?? defaults.serviceChecks.notifyThreshold],
+            ...conf.checks.reduce((ret, check) => [...ret, conf.id, 'regexp', 'check', JSON.stringify(check)], [] as any[])
+          ]
+        );
+      }
+
+      await this.db.commit();
+      return { status, result: conf };
+    } catch (err) {
+      await this.db.rollback();
+      throw err;
+    }
+  }
+
+  async deleteHealthCheckConfig(serverID: number, confID: number) {
+    const conf = await this.getHealthCheckConfigByID(serverID, confID);
+    if (!conf) return false;
+
+    await this.db.query('DELETE FROM HealthCheckConfig WHERE ID = ?;', [confID]);
+    return true;
+  }
+
+  async insertHealthCheckData(confID: number, time: Date, status: HttpCheckStatus, message: string) {
+    const res = await this.db.query(
+      `INSERT INTO HealthCheckDataEntry(ConfigID, Timestamp, Status, Message) VALUES(?, ?, ?, ?);
+      SELECT LAST_INSERT_ID() as ID;`,
+      [confID, time.getTime(), status, message]
+    );
+
+    if (!res || res.length < 2) throw new DatabaseException('Unexpected result during insertHealthCheckData');
+    const id = (res[1] as any[])[0]['ID'];
+
+    return {
+      id,
+      configId: confID,
+      time,
+      status,
+      message
+    } as HttpCheckData;
+  }
+
+  async queryServiceCheckData(serverID: number, confID: number, from: Date, to: Date) {
+    const result = await this.db.query(
+      `
+      SELECT DataEntryChanges.*
+      FROM HealthCheckConfig
+      JOIN (
+        SELECT * FROM (
+          SELECT 
+            *
+          FROM HealthCheckDataEntry
+          WHERE ConfigID = ?
+          AND Timestamp BETWEEN ? AND ?
+          ORDER BY ID
+          LIMIT 0, 1
+        ) AS FIRST_STATE
+
+        UNION
+
+        SELECT 
+          ID,
+          ConfigID,
+          Timestamp,
+          Status,
+          Message
+        FROM
+        (
+          SELECT 
+            HealthCheckDataEntry.*, 
+            LAG(Status) OVER (ORDER BY ConfigID, Timestamp) AS previous_state, 
+            LAG(Message) OVER (ORDER BY ConfigID, Timestamp) AS previous_msg
+          FROM HealthCheckDataEntry
+          WHERE ConfigID = ?
+        )
+        WHERE Status <> previous_state
+        AND Message <> previous_msg
+        
+        UNION
+        
+        SELECT * FROM (
+          SELECT 
+            *
+          FROM HealthCheckDataEntry
+          WHERE ConfigID = ?
+          AND Timestamp BETWEEN ? AND ?
+          ORDER BY ID DESC
+          LIMIT 0, 1
+        ) AS LAST_STATE
+        ORDER BY ConfigID, Timestamp
+      ) AS DataEntryChanges ON DataEntryChanges.ConfigID = HealthCheckConfig.ID
+      WHERE HealthCheckConfig.ServerID = ?
+        AND DataEntryChanges.Timestamp BETWEEN ? AND ?
+      ORDER BY Timestamp, ID;`,
+      [confID, from.getTime(), to.getTime(), confID, confID, from.getTime(), to.getTime(), serverID, from.getTime(), to.getTime()]
+    );
+
+    const mapByTimestamp = this.mapServiceCheckDataByTimestamp(result);
+
+    const arr: ServiceCheckData[] = [];
+    for (const entry of mapByTimestamp.entries()) {
+      arr.push({
+        time: new Date(entry[0]),
+        data: entry[1]
+      });
+    }
+    return arr;
+  }
+
+  public async queryServiceCheckLogs(serverID: number, confID: number, from: Date, to: Date) {
+    const result = await this.db.query(
+      `
+      SELECT HealthCheckDataEntry.*
+      FROM HealthCheckConfig
+      JOIN HealthCheckDataEntry ON HealthCheckDataEntry.ConfigID = HealthCheckConfig.ID
+      WHERE HealthCheckConfig.ID = ?
+        AND HealthCheckConfig.ServerID = ?
+        AND HealthCheckDataEntry.Timestamp BETWEEN ? AND ?
+      ORDER BY Timestamp, ConfigID;`,
+      [confID, serverID, from.getTime(), to.getTime()]
+    );
+
+    const mapByTimestamp = this.mapServiceCheckDataByTimestamp(result);
+
+    const arr: ServiceCheckData[] = [];
+    for (const entry of mapByTimestamp.entries()) {
+      arr.push({
+        time: new Date(entry[0]),
+        data: entry[1]
+      });
+    }
+    return arr;
+  }
+
+  public async getLastErrors(confID: number, threshold: number) {
+    const result = await this.db.query(
+      `SELECT * FROM HealthCheckDataEntry
+        WHERE ConfigID = ?
+        AND Timestamp IN (
+          SELECT Timestamp 
+          FROM HealthCheckDataEntry
+          WHERE ConfigID = ?
+          GROUP BY Timestamp
+          ORDER BY Timestamp DESC
+          LIMIT 0, ?
+        )
+        ORDER BY Timestamp DESC, ID DESC`,
+      [confID, confID, threshold]
+    );
+
+    const mapByTimestamp = this.mapServiceCheckDataByTimestamp(result);
+    const errors: ServiceCheckData[] = [];
+    for (const entry of mapByTimestamp.entries()) {
+      const time = entry[0];
+      const data = entry[1];
+
+      const errorData = data.filter(d => d.status !== HttpCheckStatus.OK);
+      if (!errorData.length) break;
+
+      errors.push({
+        time: new Date(time),
+        data: errorData
+      });
+    }
+
+    return errors;
+  }
+
+  private mapServiceCheckDataByTimestamp(rows: any[]) {
+    return rows.reduce((res: Map<number, ServiceCheckDataEntry[]>, row) => {
+      const time: number = row['Timestamp'];
+      if (!res.has(time)) res.set(time, []);
+      res.get(time)?.push({
+        status: row['Status'] as number,
+        message: row['Message']
+      });
+      return res;
+    }, new Map()) as Map<number, ServiceCheckDataEntry[]>;
+  }
+
+  private configFromResultRows(rows: any[]) {
+    return rows.reduce((res: ServiceConfig[], line, i) => {
+      const configID = line['ID'];
+      let config: ServiceConfig;
+      if (i === 0 || res[res.length - 1].id !== configID) {
+        config = {
+          id: configID,
+          title: line['Title'],
+          type: line['Type'],
+          serverId: line['ServerID'],
+          params: []
+        };
+        res.push(config);
+      } else {
+        config = res[res.length - 1];
+      }
+
+      if (!!line['_ParamKey']) {
+        const type = validateParamType(line['_ParamType']);
+        const key = line['_ParamKey'];
+        if (key === 'check') {
+          let checkParam = config.params.find(c => c.key === 'check');
+          if (!checkParam) {
+            config.params.push(
+              (checkParam = {
+                key: 'check',
+                type: 'regexp',
+                value: []
+              })
+            );
+          }
+          (checkParam.value as string[]).push(line['_ParamValue']);
+        } else {
+          config.params.push({
+            type,
+            key,
+            value: type === 'number' ? Number(line['_ParamValue']) : type === 'boolean' ? Boolean(Number(line['_ParamValue'])) : line['_ParamValue']
+          });
+        }
+      }
+
+      return res;
+    }, [] as ServiceConfig[]);
+  }
+
+  private httpCheckConfigFrom(hcConf: ServiceConfig | null): HttpCheckConfig | null {
+    if (!hcConf) return null;
+    const params = {
+      url: hcConf.params?.find(p => p.key === 'url')?.value as string,
+      active: (hcConf.params?.find(p => p.key === 'active')?.value as boolean) ?? defaults.serviceChecks.active,
+      interval: hcConf.params?.find(p => p.key === 'interval')?.value as number,
+      timeout: (hcConf.params?.find(p => p.key === 'timeout')?.value as number) ?? defaults.serviceChecks.httpTimeout,
+      notify: (hcConf.params?.find(p => p.key === 'notify')?.value as boolean) ?? defaults.serviceChecks.notify,
+      notifyThreshold: (hcConf.params?.find(p => p.key === 'notifyThreshold')?.value as number) ?? defaults.serviceChecks.notifyThreshold,
+      checks: hcConf.params?.reduce(
+        (res, p) => (p.key === 'check' && Array.isArray(p.value) ? [...res, ...p.value.map(c => JSON.parse(c))] : res),
+        [] as string[]
+      )
+    };
+    return {
+      id: hcConf.id,
+      title: hcConf.title,
+      type: hcConf.type,
+      serverId: hcConf.serverId,
+      ...params
+    };
+  }
+
+  private validateHttpCheckConfig(conf: Partial<HttpCheckConfig>): { [key: string]: string } | null {
+    const errors = {} as any;
+    if (!conf) return { null: 'Object was null or undefined' };
+    if (!conf.title?.trim().length) errors['required|title'] = `Field 'title' is required.`;
+    if (!conf.url?.trim().length) errors['required|url'] = `Field 'url' is required.`;
+    if ((!conf.interval && conf.interval !== 0) || Number.isNaN(Number(conf.interval))) errors['required|interval'] = `Field 'interval' is required.`;
+
+    if (!conf.checks || !Array.isArray(conf.checks))
+      errors['required|checks'] = `Field 'checks' is required and must be an array of check expressions.`;
+
+    return Object.keys(errors).length ? errors : null;
+  }
+}

+ 201 - 0
server/src/ctrl/mariadb-importer.class.ts

@@ -0,0 +1,201 @@
+import moment from 'moment';
+import { Pool as MariaPool } from 'mysql';
+
+import { Logger } from '../../../common/util/logger.class';
+
+import { SQLiteDatabase as SQLiteDB } from './sqlite-database.class';
+import { MariaDBConnector } from './mariadb-connector.class';
+
+const CHUNK_SIZE = 5000;
+
+export class MariaDBImporter {
+  private oldDb!: SQLiteDB;
+  private newDb!: MariaDBConnector;
+
+  constructor(pool: MariaPool) {
+    this.oldDb = new SQLiteDB();
+    this.newDb = new MariaDBConnector(pool);
+  }
+
+  async connect() {
+    try {
+      await this.oldDb.open();
+      await this.newDb.connect();
+    } catch (e) {
+      Logger.error('[FATAL] Initializing MariaDBImporter failed:', e);
+      Logger.error('[EXITING]');
+      process.exit(1);
+    }
+  }
+
+  async runImport() {
+    // await this.newDb.beginTransaction();
+    try {
+      await this.cutoffOldData(moment().add(-4, 'months').toDate());
+      await this.truncateTables();
+
+      await this.importServer();
+      await this.importServerConfig();
+      await this.importServerDataEntry();
+      await this.importServerDataValue();
+      await this.importHealthCheckConfig();
+      await this.importHealthCheckParams();
+      await this.importHealthCheckDataEntry();
+
+      // await this.newDb.commit();
+    } catch (e) {
+      Logger.error('[ERROR] Import to MariaDB failed:', e);
+      // await this.newDb.rollback();
+      process.exit(2);
+    }
+  }
+
+  private async cutoffOldData(cutoffDate: Date) {
+    Logger.info('[INFO]', 'Cutting off old DataEntries before', cutoffDate);
+    await this.oldDb.run('DELETE FROM `ServerDataEntry` WHERE `Timestamp` < ?;', [cutoffDate.getTime()]);
+    await this.oldDb.run('DELETE FROM `HealthCheckDataEntry` WHERE `Timestamp` < ?;', [cutoffDate.getTime()]);
+  }
+
+  private async truncateTables() {
+    Logger.info('[INFO]', 'Truncating all Tables in MariaDB ...');
+    await this.newDb.query(
+      `
+      SET autocommit = OFF;
+      START TRANSACTION;
+      SET FOREIGN_KEY_CHECKS=0;
+
+      TRUNCATE TABLE \`ServerDataValue\`;
+      TRUNCATE TABLE \`ServerDataEntry\`;
+      TRUNCATE TABLE \`ServerConfig\`;
+      
+      TRUNCATE TABLE \`HealthCheckDataEntry\`;
+      TRUNCATE TABLE \`HealthCheckParams\`;
+      TRUNCATE TABLE \`HealthCheckConfig\`;
+
+      TRUNCATE TABLE \`Server\`;
+
+      COMMIT;
+      SET FOREIGN_KEY_CHECKS=1;
+      SET autocommit = ON;
+    `,
+      []
+    );
+  }
+
+  private async importServer() {
+    Logger.info('[INFO]', 'Importing Server Table ...');
+    const res = await this.oldDb.stmt('SELECT * FROM `Server`;', []);
+    for (const row of res.rows) {
+      await this.newDb.query('INSERT INTO `Server`(`ID`, `Title`, `FQDN`) VALUES (?, ?, ?)', [row['ID'], row['Title'], row['FQDN']]);
+    }
+  }
+
+  private async importServerConfig() {
+    Logger.info('[INFO]', 'Importing ServerConfig Table ...');
+    const res = await this.oldDb.stmt('SELECT * FROM `ServerConfig`;', []);
+    for (const row of res.rows) {
+      await this.newDb.query('INSERT INTO `ServerConfig`(`ID`, `ServerID`, `Key`, `Value`) VALUES (?, ?, ?, ?)', [
+        row['ID'],
+        row['ServerID'],
+        row['Key'],
+        row['Value']
+      ]);
+    }
+  }
+
+  private async importServerDataEntry() {
+    Logger.info('[INFO]', 'Importing ServerDataEntry Table ...');
+    let res = await this.oldDb.stmt('SELECT COUNT(*) as Count FROM `ServerDataEntry`;', []);
+    const count = res.rows[0]['Count'] as number;
+    let offset = 0;
+    let pageSize = Math.min(CHUNK_SIZE, count);
+    while (offset + pageSize <= count) {
+      Logger.info('[INFO]', `Importing ServerDataEntry (${offset}/${count}) ...`);
+      res = await this.oldDb.stmt('SELECT * FROM `ServerDataEntry` LIMIT ? OFFSET ?;', [pageSize, offset]);
+
+      if (!res.rows.length) break;
+
+      const sql = 'INSERT INTO `ServerDataEntry`(`ID`, `ServerID`, `Timestamp`) VALUES ' + res.rows.map(() => '(?,?,?)').join(',') + ';';
+      const data = res.rows.reduce((res, row) => [...res, row['ID'], row['ServerID'], new Date(row['Timestamp'])], []);
+
+      await this.newDb.query(sql, data);
+
+      offset += pageSize;
+      pageSize = Math.min(pageSize, count - offset);
+    }
+  }
+
+  private async importServerDataValue() {
+    Logger.info('[INFO]', 'Importing ServerDataValue Table ...');
+    let res = await this.oldDb.stmt('SELECT COUNT(*) as Count FROM `ServerDataValue`;', []);
+    const count = res.rows[0]['Count'] as number;
+    let offset = 0;
+    let pageSize = Math.min(CHUNK_SIZE, count);
+    while (offset + pageSize <= count) {
+      Logger.info('[INFO]', `Importing ServerDataValue (${offset}/${count}) ...`);
+      const res = await this.oldDb.stmt('SELECT * FROM `ServerDataValue` LIMIT ? OFFSET ?;', [pageSize, offset]);
+
+      if (!res.rows.length) break;
+
+      const sql = 'INSERT INTO `ServerDataValue`(`ID`, `EntryID`, `Type`, `Key`, `Value`) VALUES' + res.rows.map(() => '(?,?,?,?,?)').join(',') + ';';
+      const data = res.rows.reduce((res, row) => [...res, row['ID'], row['EntryID'], row['Type'], row['Key'], row['Value']], []);
+
+      await this.newDb.query(sql, data);
+
+      offset += pageSize;
+      pageSize = Math.min(pageSize, count - offset);
+    }
+  }
+
+  private async importHealthCheckConfig() {
+    Logger.info('[INFO]', 'Importing HealthCheckConfig Table ...');
+    const res = await this.oldDb.stmt('SELECT * FROM `HealthCheckConfig`;', []);
+    for (const row of res.rows) {
+      await this.newDb.query('INSERT INTO `HealthCheckConfig`(`ID`, `ServerID`, `Type`, `Title`) VALUES(?, ?, ?, ?)', [
+        row['ID'],
+        row['ServerID'],
+        row['Type'],
+        row['Title']
+      ]);
+    }
+  }
+
+  private async importHealthCheckParams() {
+    Logger.info('[INFO]', 'Importing HealthCheckParams Table ...');
+    const res = await this.oldDb.stmt('SELECT * FROM `HealthCheckParams`;', []);
+    for (const row of res.rows) {
+      await this.newDb.query('INSERT INTO `HealthCheckParams`(`ID`, `ConfigID`, `Type`, `Key`, `Value`) VALUES (?, ?, ?, ?, ?)', [
+        row['ID'],
+        row['ConfigID'],
+        row['Type'],
+        row['Key'],
+        row['Value']
+      ]);
+    }
+  }
+
+  private async importHealthCheckDataEntry() {
+    Logger.info('[INFO]', 'Importing HealthCheckDataEntry Table ...');
+    let res = await this.oldDb.stmt('SELECT COUNT(*) as Count FROM `HealthCheckDataEntry`;', []);
+    const count = res.rows[0]['Count'] as number;
+    let offset = 0;
+    let pageSize = Math.min(CHUNK_SIZE, count);
+    while (offset + pageSize <= count) {
+      Logger.info('[INFO]', `Importing HealthCheckDataEntry (${offset}/${count}) ...`);
+      const res = await this.oldDb.stmt('SELECT * FROM `HealthCheckDataEntry` LIMIT ? OFFSET ?;', [pageSize, offset]);
+
+      if (!res.rows.length) break;
+
+      const sql =
+        'INSERT INTO `HealthCheckDataEntry`(`ID`, `ConfigID`, `Timestamp`, `Status`, `Message`) VALUES ' +
+        res.rows.map(() => '(?, ?, ?, ?, ?)').join(',') +
+        ';';
+      const data = res.rows.reduce((res, row) => [...res, row['ID'], row['ConfigID'], new Date(row['Timestamp']), row['Status'], row['Message']], []);
+
+      await this.newDb.query(sql, data);
+
+      offset += pageSize;
+      pageSize = Math.min(pageSize, count - offset);
+    }
+  }
+}

+ 25 - 0
server/src/ctrl/mariadb-poolfactory.class.ts

@@ -0,0 +1,25 @@
+import fs from 'fs';
+import fsp from 'fs/promises';
+import { createPool, Pool } from 'mysql';
+
+export class MariaDBPoolFactory {
+  public static async createConnectionPool(): Promise<Pool> {
+    let dbconfigFile = 'database.json';
+    if (fs.existsSync('/run/secrets/database.json')) {
+      dbconfigFile = '/run/secrets/database.json';
+    }
+
+    const dbconfig = JSON.parse(await fsp.readFile(dbconfigFile, { encoding: 'utf-8' }));
+    const connectionData = {
+      connectionLimit: 10,
+      host: dbconfig.db.host,
+      multipleStatements: dbconfig.db.multipleStatements,
+      port: dbconfig.db.port,
+      database: dbconfig.db.database,
+      user: dbconfig.db.user,
+      password: dbconfig.db.password,
+      charset: 'utf8mb4_unicode_ci'
+    };
+    return createPool(connectionData);
+  }
+}

+ 3 - 3
server/src/ctrl/server-connector.class.ts

@@ -4,16 +4,16 @@ import moment from 'moment';
 import defaults from '../../../common/defaults.module';
 import { Logger } from '../../../common/util/logger.class';
 
-import { Database } from './database.class';
+import { SQLiteDatabase } from './sqlite-database.class';
 import { FCMController } from './fcm-controller.class';
 import { Timer } from '../timer.class';
 
 export class ServerConnector {
   private subscriptions: Array<{ id: number; interval: number; server: Server }> = [];
-  private db!: Database;
+  private db!: SQLiteDatabase;
 
   constructor() {
-    this.db = new Database();
+    this.db = new SQLiteDatabase();
     (async () => {
       try {
         await this.db.open();

+ 3 - 10
server/src/ctrl/database.class.ts → server/src/ctrl/sqlite-database.class.ts

@@ -9,20 +9,13 @@ import { ServiceConfig, validateParamType } from '../../../common/interfaces/ser
 import { HttpCheckData, HttpCheckStatus, ServiceCheckData, ServiceCheckDataEntry } from '../../../common/lib/http-check-data.module';
 import { Logger } from '../../../common/util/logger.class';
 
+import { ServiceChangedStatus } from '../lib/service-changed-status.enum';
 import { ValidationException } from '../lib/validation-exception.class';
+import { DataProvider } from './data-provider.interface';
 import { DBMigration } from './db-migration.class';
-import { HealthCheckDataProvider } from './health-check-data-provider.interface';
 import { SQLiteController } from './sqlite-controller.base';
 
-export enum ServiceChangedStatus {
-  None,
-  Created,
-  Activated,
-  Deactivated,
-  Rescheduled
-}
-
-export class Database extends SQLiteController implements HealthCheckDataProvider {
+export class SQLiteDatabase extends SQLiteController implements DataProvider {
   public set onError(listener: (error: any) => void) {
     this._onError = listener;
   }

+ 21 - 3
server/src/index.ts

@@ -1,11 +1,18 @@
+import fs from 'fs';
+import fsp from 'fs/promises';
+import path from 'path';
+
 import { Logger, LogLevel } from '../../common/util/logger.class';
 
 import { ControllerPool } from './ctrl/controller-pool.interface';
-import { Database } from './ctrl/database.class';
 import { HttpCheckController } from './ctrl/http-check-controller.class';
+import { MariaDBDatabase } from './ctrl/mariadb-database.class';
+import { MariaDBImporter } from './ctrl/mariadb-importer.class';
+import { MariaDBPoolFactory } from './ctrl/mariadb-poolfactory.class';
 import { ServerConnector } from './ctrl/server-connector.class';
 import { Webserver } from './webserver.class';
 import { Timer } from './timer.class';
+import moment from 'moment';
 
 const LOG_LEVEL: LogLevel = (process.env.LOG_LEVEL as LogLevel) || 'INFO';
 Logger.logLevel = LOG_LEVEL;
@@ -16,8 +23,19 @@ process.on('SIGTERM', exitGracefully);
 
 let pool: ControllerPool;
 (async () => {
-  const db = new Database();
-  await db.open(true);
+  const connPool = await MariaDBPoolFactory.createConnectionPool();
+  const sqliteDir = path.resolve(process.cwd(), process.env.DATA_DIR || 'data');
+  const doneFile = path.resolve(sqliteDir, 'import_done.txt');
+
+  if (fs.existsSync(sqliteDir) && !fs.existsSync(doneFile)) {
+    const mig = new MariaDBImporter(connPool);
+    await mig.connect();
+    await mig.runImport();
+    await fsp.writeFile(doneFile, moment().format('YYYY-MM-DD[T]HH:mm:ss.SSSZZ'), { encoding: 'utf-8' });
+  }
+
+  const db = new MariaDBDatabase(connPool);
+  await db.open();
 
   pool = {
     db,

+ 5 - 0
server/src/lib/database.exception.ts

@@ -0,0 +1,5 @@
+export class DatabaseException extends Error {
+  constructor(message: string) {
+    super(message);
+  }
+}

+ 7 - 0
server/src/lib/service-changed-status.enum.ts

@@ -0,0 +1,7 @@
+export enum ServiceChangedStatus {
+  None,
+  Created,
+  Activated,
+  Deactivated,
+  Rescheduled
+}

+ 1 - 1
server/src/webhdl/services-api-handler.class.ts

@@ -4,8 +4,8 @@ import { HttpCheckData, HttpCheckStatus, ServiceCheckData, ServiceCheckDataEntry
 import { HttpStatusException } from '../../../common/lib/http-status.exception';
 
 import { ControllerPool } from '../ctrl/controller-pool.interface';
-import { ServiceChangedStatus } from '../ctrl/database.class';
 import { HealthCheckDataProvider } from '../ctrl/health-check-data-provider.interface';
+import { ServiceChangedStatus } from '../lib/service-changed-status.enum';
 import { WebHandler } from './web-handler.base';
 
 export class ServicesAPIHandler extends WebHandler {