migrating to typescript

This commit is contained in:
Anatoly 2018-09-30 01:48:27 +03:00
parent 2b89ad87e5
commit 7650646cd5
10 changed files with 55 additions and 94 deletions

View file

@ -21,7 +21,7 @@
import * as mysql from 'mysql'; import * as mysql from 'mysql';
import { MysqlError, Pool as MySQLPool, PoolConnection } from 'mysql'; import { MysqlError, Pool as MySQLPool, PoolConnection } from 'mysql';
import { Pool as PgPool, PoolClient, QueryResult } from 'pg'; import { Pool as PgPool, PoolClient, QueryResult } from 'pg';
import generateError from './ErrorGenerator'; import { generateError } from './FsOps';
import Conversion from './Conversion'; import Conversion from './Conversion';
import generateReport from './ReportGenerator'; import generateReport from './ReportGenerator';
import DBVendors from './DBVendors'; import DBVendors from './DBVendors';
@ -43,14 +43,14 @@ export default class DBAccess {
/** /**
* Ensures MySQL connection pool existence. * Ensures MySQL connection pool existence.
*/ */
private _getMysqlConnection(): void { private async _getMysqlConnection(): Promise<void> {
if (!this._conversion._mysql) { if (!this._conversion._mysql) {
this._conversion._sourceConString.connectionLimit = this._conversion._maxDbConnectionPoolSize; this._conversion._sourceConString.connectionLimit = this._conversion._maxDbConnectionPoolSize;
this._conversion._sourceConString.multipleStatements = true; this._conversion._sourceConString.multipleStatements = true;
const pool: MySQLPool = mysql.createPool(this._conversion._sourceConString); const pool: MySQLPool = mysql.createPool(this._conversion._sourceConString);
if (!pool) { if (!pool) {
generateError(this._conversion, '\t--[getMysqlConnection] Cannot connect to MySQL server...'); await generateError(this._conversion, '\t--[getMysqlConnection] Cannot connect to MySQL server...');
process.exit(); process.exit();
} }
@ -61,21 +61,21 @@ export default class DBAccess {
/** /**
* Ensures PostgreSQL connection pool existence. * Ensures PostgreSQL connection pool existence.
*/ */
private _getPgConnection(): void { private async _getPgConnection(): Promise<void> {
if (!this._conversion._pg) { if (!this._conversion._pg) {
this._conversion._targetConString.max = this._conversion._maxDbConnectionPoolSize; this._conversion._targetConString.max = this._conversion._maxDbConnectionPoolSize;
const pool: PgPool = new PgPool(this._conversion._targetConString); const pool: PgPool = new PgPool(this._conversion._targetConString);
if (!pool) { if (!pool) {
generateError(this._conversion, '\t--[getPgConnection] Cannot connect to PostgreSQL server...'); await generateError(this._conversion, '\t--[getPgConnection] Cannot connect to PostgreSQL server...');
process.exit(); process.exit();
} }
this._conversion._pg = pool; this._conversion._pg = pool;
this._conversion._pg.on('error', (error: Error) => { this._conversion._pg.on('error', async (error: Error) => {
const message: string = `Cannot connect to PostgreSQL server...\n' ${ error.message }\n${ error.stack }`; const message: string = `Cannot connect to PostgreSQL server...\n' ${ error.message }\n${ error.stack }`;
generateError(this._conversion, message); await generateError(this._conversion, message);
generateReport(this._conversion, message); generateReport(this._conversion, message);
}); });
} }
@ -85,9 +85,8 @@ export default class DBAccess {
* Obtains PoolConnection instance. * Obtains PoolConnection instance.
*/ */
public getMysqlClient(): Promise<PoolConnection> { public getMysqlClient(): Promise<PoolConnection> {
this._getMysqlConnection(); return new Promise<PoolConnection>(async (resolve, reject) => {
await this._getMysqlConnection();
return new Promise<PoolConnection>((resolve, reject) => {
(<MySQLPool>this._conversion._mysql).getConnection((err: MysqlError | null, connection: PoolConnection) => { (<MySQLPool>this._conversion._mysql).getConnection((err: MysqlError | null, connection: PoolConnection) => {
return err ? reject(err) : resolve(connection); return err ? reject(err) : resolve(connection);
}); });
@ -97,8 +96,8 @@ export default class DBAccess {
/** /**
* Obtains PoolClient instance. * Obtains PoolClient instance.
*/ */
public getPgClient(): Promise<PoolClient> { public async getPgClient(): Promise<PoolClient> {
this._getPgConnection(); await this._getPgConnection();
return (<PgPool>this._conversion._pg).connect(); return (<PgPool>this._conversion._pg).connect();
} }
@ -106,20 +105,20 @@ export default class DBAccess {
* Runs a query on the first available idle client and returns its result. * Runs a query on the first available idle client and returns its result.
* Note, the pool does the acquiring and releasing of the client internally. * Note, the pool does the acquiring and releasing of the client internally.
*/ */
public runPgPoolQuery(sql: string): Promise<QueryResult> { public async runPgPoolQuery(sql: string): Promise<QueryResult> {
this._getPgConnection(); await this._getPgConnection();
return (<PgPool>this._conversion._pg).query(sql); return (<PgPool>this._conversion._pg).query(sql);
} }
/** /**
* Releases MySQL or PostgreSQL connection back to appropriate pool. * Releases MySQL or PostgreSQL connection back to appropriate pool.
*/ */
public releaseDbClient(dbClient?: PoolConnection | PoolClient): void { public async releaseDbClient(dbClient?: PoolConnection | PoolClient): Promise<void> {
try { try {
(<PoolConnection | PoolClient>dbClient).release(); (<PoolConnection | PoolClient>dbClient).release();
dbClient = undefined; dbClient = undefined;
} catch (error) { } catch (error) {
generateError(this._conversion, `\t--[DBAccess::releaseDbClient] ${ error }`); await generateError(this._conversion, `\t--[DBAccess::releaseDbClient] ${ error }`);
} }
} }
@ -127,9 +126,9 @@ export default class DBAccess {
* Checks if there are no more queries to be sent using current client. * Checks if there are no more queries to be sent using current client.
* In such case the client should be released. * In such case the client should be released.
*/ */
private _releaseDbClientIfNecessary(client: PoolConnection | PoolClient, shouldHoldClient: boolean): void { private async _releaseDbClientIfNecessary(client: PoolConnection | PoolClient, shouldHoldClient: boolean): Promise<void> {
if (!shouldHoldClient) { if (!shouldHoldClient) {
this.releaseDbClient(client); await this.releaseDbClient(client);
} }
} }
@ -154,7 +153,7 @@ export default class DBAccess {
client = vendor === DBVendors.PG ? await this.getPgClient() : await this.getMysqlClient(); client = vendor === DBVendors.PG ? await this.getPgClient() : await this.getMysqlClient();
} catch (error) { } catch (error) {
// An error occurred when tried to obtain a client from one of pools. // An error occurred when tried to obtain a client from one of pools.
generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql); await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : { client: client, data: undefined, error: error }; return processExitOnError ? process.exit() : { client: client, data: undefined, error: error };
} }
} }
@ -180,11 +179,11 @@ export default class DBAccess {
sql = (<PoolConnection>client).format(sql, bindings); sql = (<PoolConnection>client).format(sql, bindings);
} }
(<PoolConnection>client).query(sql, (error: MysqlError | null, data: any) => { (<PoolConnection>client).query(sql, async (error: MysqlError | null, data: any) => {
this._releaseDbClientIfNecessary((<PoolConnection>client), shouldReturnClient); await this._releaseDbClientIfNecessary((<PoolConnection>client), shouldReturnClient);
if (error) { if (error) {
generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql); await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : reject({ client: client, data: undefined, error: error }); return processExitOnError ? process.exit() : reject({ client: client, data: undefined, error: error });
} }
@ -206,11 +205,11 @@ export default class DBAccess {
): Promise<DBAccessQueryResult> { ): Promise<DBAccessQueryResult> {
try { try {
const data: any = Array.isArray(bindings) ? await (<PoolClient>client).query(sql, bindings) : await (<PoolClient>client).query(sql); const data: any = Array.isArray(bindings) ? await (<PoolClient>client).query(sql, bindings) : await (<PoolClient>client).query(sql);
this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined. await this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined.
return { client: client, data: data, error: undefined }; return { client: client, data: data, error: undefined };
} catch (error) { } catch (error) {
this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined. await this._releaseDbClientIfNecessary((<PoolClient>client), shouldReturnClient); // Sets the client undefined.
generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql); await generateError(this._conversion, `\t--[${ caller }] ${ error }`, sql);
return processExitOnError ? process.exit() : { client: client, data: undefined, error: error }; return processExitOnError ? process.exit() : { client: client, data: undefined, error: error };
} }
} }

View file

@ -19,8 +19,7 @@
* @author Anatoly Khaytovich <anatolyuss@gmail.com> * @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/ */
import * as csvStringify from './CsvStringifyModified'; import * as csvStringify from './CsvStringifyModified';
import { log } from './FsOps'; import { log, generateError } from './FsOps';
import generateError from './ErrorGenerator';
import Conversion from './Conversion'; import Conversion from './Conversion';
import DBAccess from './DBAccess'; import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult'; import DBAccessQueryResult from './DBAccessQueryResult';
@ -72,7 +71,7 @@ async function deleteChunk(conv: Conversion, dataPoolId: number, client: PoolCli
try { try {
await client.query(sql); await client.query(sql);
} catch (error) { } catch (error) {
generateError(conv, `\t--[DataLoader::deleteChunk] ${ error }`, sql); await generateError(conv, `\t--[DataLoader::deleteChunk] ${ error }`, sql);
} finally { } finally {
dbAccess.releaseDbClient(client); dbAccess.releaseDbClient(client);
} }
@ -88,7 +87,7 @@ function buildChunkQuery(tableName: string, selectFieldList: string, offset: num
/** /**
* Processes data-loading error. * Processes data-loading error.
*/ */
function processDataError( async function processDataError(
conv: Conversion, conv: Conversion,
streamError: string, streamError: string,
sql: string, sql: string,
@ -97,7 +96,7 @@ function processDataError(
dataPoolId: number, dataPoolId: number,
client: PoolClient client: PoolClient
): Promise<void> { ): Promise<void> {
generateError(conv, `\t--[populateTableWorker] ${ streamError }`, sqlCopy); await generateError(conv, `\t--[populateTableWorker] ${ streamError }`, sqlCopy);
const rejectedData: string = `\t--[populateTableWorker] Error loading table data:\n${ sql }\n`; const rejectedData: string = `\t--[populateTableWorker] Error loading table data:\n${ sql }\n`;
log(conv, rejectedData, path.join(conv._logsDirPath, `${ tableName }.log`)); log(conv, rejectedData, path.join(conv._logsDirPath, `${ tableName }.log`));
return deleteChunk(conv, dataPoolId, client); return deleteChunk(conv, dataPoolId, client);
@ -131,7 +130,7 @@ async function populateTableWorker(
csvStringify(result.data, async (csvError: any, csvString: string) => { csvStringify(result.data, async (csvError: any, csvString: string) => {
if (csvError) { if (csvError) {
generateError(conv, `\t--[${ logTitle }] ${ csvError }`); await generateError(conv, `\t--[${ logTitle }] ${ csvError }`);
return resolvePopulateTableWorker(); return resolvePopulateTableWorker();
} }

View file

@ -20,9 +20,8 @@
*/ */
import { ChildProcess, fork } from 'child_process'; import { ChildProcess, fork } from 'child_process';
import * as path from 'path'; import * as path from 'path';
import { log } from './FsOps'; import { log, generateError } from './FsOps';
import Conversion from './Conversion'; import Conversion from './Conversion';
import generateError from './ErrorGenerator';
import MessageToDataLoader from './MessageToDataLoader'; import MessageToDataLoader from './MessageToDataLoader';
import processConstraints from './ConstraintsProcessor'; import processConstraints from './ConstraintsProcessor';
import decodeBinaryData from './BinaryDataDecoder'; import decodeBinaryData from './BinaryDataDecoder';
@ -30,11 +29,11 @@ import decodeBinaryData from './BinaryDataDecoder';
/** /**
* Kills a process specified by the pid. * Kills a process specified by the pid.
*/ */
function killProcess(pid: number, conversion: Conversion): void { async function killProcess(pid: number, conversion: Conversion): Promise<void> {
try { try {
process.kill(pid); process.kill(pid);
} catch (killError) { } catch (killError) {
generateError(conversion, `\t--[killProcess] ${ killError }`); await generateError(conversion, `\t--[killProcess] ${ killError }`);
} }
} }
@ -119,7 +118,7 @@ async function pipeData(conversion: Conversion, dataLoaderPath: string, options:
const bandwidth: number[] = fillBandwidth(conversion); const bandwidth: number[] = fillBandwidth(conversion);
const chunksToLoad: any[] = bandwidth.map((index: number) => conversion._dataPool[index]); const chunksToLoad: any[] = bandwidth.map((index: number) => conversion._dataPool[index]);
loaderProcess.on('message', (signal: any) => { loaderProcess.on('message', async (signal: any) => {
if (typeof signal === 'object') { if (typeof signal === 'object') {
conversion._dicTables[signal.tableName].totalRowsInserted += signal.rowsInserted; conversion._dicTables[signal.tableName].totalRowsInserted += signal.rowsInserted;
const msg: string = `\t--[pipeData] For now inserted: ${ conversion._dicTables[signal.tableName].totalRowsInserted } rows, const msg: string = `\t--[pipeData] For now inserted: ${ conversion._dicTables[signal.tableName].totalRowsInserted } rows,
@ -129,7 +128,7 @@ async function pipeData(conversion: Conversion, dataLoaderPath: string, options:
return; return;
} }
killProcess(loaderProcess.pid, conversion); await killProcess(loaderProcess.pid, conversion);
conversion._processedChunks += chunksToLoad.length; conversion._processedChunks += chunksToLoad.length;
return pipeData(conversion, dataLoaderPath, options); return pipeData(conversion, dataLoaderPath, options);
}); });

View file

@ -1,42 +0,0 @@
/*
* This file is a part of "NMIG" - the database migration tool.
*
* Copyright (C) 2016 - present, Anatoly Khaytovich <anatolyuss@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program (please see the "LICENSE.md" file).
* If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
* @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/
import * as fs from 'fs';
import { log } from './FsOps';
import Conversion from './Conversion';
/**
* Writes a ditailed error message to the "/errors-only.log" file
*/
export default (conversion: Conversion, message: string, sql: string = ''): void => {
message += `\n\n\tSQL: ${sql}\n\n`;
const buffer: Buffer = Buffer.from(message, conversion._encoding);
log(conversion, message, undefined, true);
fs.open(conversion._errorLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
if (!error) {
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => {
// Each async function MUST have a callback (according to Node.js >= 7).
});
});
}
});
}

View file

@ -32,11 +32,13 @@ export function generateError(conversion: Conversion, message: string, sql: stri
log(conversion, message, undefined, true); log(conversion, message, undefined, true);
fs.open(conversion._errorLogsPath, 'a', conversion._0777, (error: Error, fd: number) => { fs.open(conversion._errorLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
if (!error) { if (error) {
fs.write(fd, buffer, 0, buffer.length, null, () => { return resolve();
fs.close(fd, () => resolve());
});
} }
fs.write(fd, buffer, 0, buffer.length, null, () => {
fs.close(fd, () => resolve());
});
}); });
}); });
} }

View file

@ -18,8 +18,7 @@
* *
* @author Anatoly Khaytovich <anatolyuss@gmail.com> * @author Anatoly Khaytovich <anatolyuss@gmail.com>
*/ */
import { log } from './FsOps'; import { log, generateError } from './FsOps';
import generateError from './ErrorGenerator';
import Conversion from './Conversion'; import Conversion from './Conversion';
import DBAccess from './DBAccess'; import DBAccess from './DBAccess';
import DBAccessQueryResult from './DBAccessQueryResult'; import DBAccessQueryResult from './DBAccessQueryResult';
@ -97,7 +96,7 @@ export async function createTable(conversion: Conversion, tableName: string): Pr
const result: DBAccessQueryResult = await dbAccess.query(logTitle, sqlAddDataChunkIdColumn, DBVendors.PG, false, false); const result: DBAccessQueryResult = await dbAccess.query(logTitle, sqlAddDataChunkIdColumn, DBVendors.PG, false, false);
if (result.error) { if (result.error) {
generateError(conversion, `\t--[${ logTitle }] ${ result.error }`, sqlAddDataChunkIdColumn); await generateError(conversion, `\t--[${ logTitle }] ${ result.error }`, sqlAddDataChunkIdColumn);
} }
return; return;

View file

@ -45,7 +45,7 @@ async function getColumnTypes(testSchemaProcessor: TestSchemaProcessor): Promise
); );
if (result.error) { if (result.error) {
testSchemaProcessor.processFatalError(result.error); await testSchemaProcessor.processFatalError(result.error);
} }
return result.data.rows; return result.data.rows;

View file

@ -42,7 +42,7 @@ async function retrieveData(testSchemaProcessor: TestSchemaProcessor): Promise<a
); );
if (result.error) { if (result.error) {
testSchemaProcessor.processFatalError(result.error); await testSchemaProcessor.processFatalError(result.error);
} }
return result.data.rows[0]; return result.data.rows[0];

View file

@ -42,7 +42,7 @@ async function hasSchemaCreated(testSchemaProcessor: TestSchemaProcessor): Promi
); );
if (result.error) { if (result.error) {
testSchemaProcessor.processFatalError(result.error); await testSchemaProcessor.processFatalError(result.error);
} }
return !!result.data.rows[0].exists; return !!result.data.rows[0].exists;

View file

@ -30,10 +30,15 @@ import loadStructureToMigrate from '../../src/StructureLoader';
import pipeData from '../../src/DataPipeManager'; import pipeData from '../../src/DataPipeManager';
import { createStateLogsTable } from '../../src/MigrationStateManager'; import { createStateLogsTable } from '../../src/MigrationStateManager';
import { createDataPoolTable, readDataPool } from '../../src/DataPoolManager'; import { createDataPoolTable, readDataPool } from '../../src/DataPoolManager';
import generateError from '../../src/ErrorGenerator';
import { log } from '../../src/FsOps';
import { readConfig, readExtraConfig, createLogsDirectory, readDataTypesMap } from '../../src/FsOps';
import { checkConnection, getLogo } from '../../src/BootProcessor'; import { checkConnection, getLogo } from '../../src/BootProcessor';
import {
readConfig,
readExtraConfig,
createLogsDirectory,
readDataTypesMap,
log,
generateError
} from '../../src/FsOps';
export default class TestSchemaProcessor { export default class TestSchemaProcessor {
/** /**
@ -57,9 +62,9 @@ export default class TestSchemaProcessor {
/** /**
* Stops the process in case of fatal error. * Stops the process in case of fatal error.
*/ */
public processFatalError(error: string): void { public async processFatalError(error: string): Promise<void> {
console.log(error); console.log(error);
generateError(<Conversion>this.conversion, error); await generateError(<Conversion>this.conversion, error);
process.exit(); process.exit();
} }