Question

I'm looking for an efficient way to take a raw sql file and have it executed synchronously against a postgres database, akin to if you ran it through psql.

I have an sql file which creates all databases, imports data, etc. I need to execute this using node.js but cannot find any module which does this automatically. For the node.js application itself, we use node-postgres ('pg'), knex.js and bookshelf.js. I assume though that pg is best for this.

One alternative I can think of is to read the full file, split it by semicolons, replace newlines with spaces, trim any duplicate space, then feed it into pg one by one in a manner that they're executed sequentially, not asynchronously. I'm a little surprised if this is truly the most efficient way and also if no libraries exist yet to solve this. I'm a little hesitant to jump into it seeing as SQL syntax can itself be a little challenging and I might accidentally mash it up.

Some clarifications in advance:

  • psql cannot be used as it's not installed on the target machine
  • I've chosen to develop and source control sql statements in sql native form, because it's a lot easier for a DBA to use and manipulate it
Was it helpful?

Solution 2

I've written the following function which works for my case. It would have been much more simpler if it weren't for:

  • Using batch to manage concurrency
  • Having the tricky PostgreSQL COPY case to consider

Code snippet:

function processSQLFile(fileName) {

  // Extract SQL queries from files. Assumes no ';' in the fileNames
  var queries = fs.readFileSync(fileName).toString()
    .replace(/(\r\n|\n|\r)/gm," ") // remove newlines
    .replace(/\s+/g, ' ') // excess white space
    .split(";") // split into all statements
    .map(Function.prototype.call, String.prototype.trim)
    .filter(function(el) {return el.length != 0}); // remove any empty ones

  // Execute each SQL query sequentially
  queries.forEach(function(query) {
    batch.push(function(done) {
      if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
        var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
        var matches = regexp.exec(query);
        var table = matches[1];
        var fileName = matches[2];
        var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
        var stream = client.copyFrom(copyString);
        stream.on('close', function () {
          done();
        });
        var csvFile = __dirname + '/' + fileName;
        var str = fs.readFileSync(csvFile);
        stream.write(str);
        stream.end();
      } else { // Other queries don't need special treatment
        client.query(query, function(result) {
          done();
        });
      }
    });
  });
}

Beware that this would fail if you used semicolons anywhere except to terminate SQL statements.

OTHER TIPS

You can just separate consequent queries with a semicolon when passed to client.query

That works:

var pg = require('pg');

pg.connect('postgres://test:test@localhost/test', function(err, client, done){
        client.query('CREATE TABLE test (test VARCHAR(255)); INSERT INTO test VALUES(\'test\') ');
        done();
});

And consequently, that works too:

var pg = require('pg');
var fs = require('fs');

var sql = fs.readFileSync('init_database.sql').toString();

pg.connect('postgres://test:test@localhost/test', function(err, client, done){
    if(err){
        console.log('error: ', err);
        process.exit(1);
    }
    client.query(sql, function(err, result){
        done();
        if(err){
            console.log('error: ', err);
            process.exit(1);
        }
        process.exit(0);
    });
});

The @databases/pg client supports running SQL files out of the box:

const createPool = require('@databases/pg');
const {sql} = require('@databases/pg');

const db = createPool();

db.query(sql.file('my-file.sql')).catch(ex => {
  console.error(ex);
  process.exitCode = 1;
}).then(() => db.dispose());

It also supports having multiple statements in a single call to db.query:

const createPool = require('@databases/pg');
const {sql} = require('@databases/pg');

const db = createPool();

db.query(sql`
  INSERT INTO users (name) VALUES (${'Forbes'});
  SELECT * FROM users;
`)).then(
  results => console.log(results)
).catch(ex => {
  console.error(ex);
  process.exitCode = 1;
}).then(() => db.dispose());

In this example, each statement is run in sequence, and the result of the last statement is returned.

The following, which just reads a file into a string and runs it using query seems to work for me:

const { Pool } = require("pg");
const pool = new Pool({ host, port, user, password, database });
dbClient = await pool.connect();

var sql = fs.readFileSync("/path/to/file.sql", "utf8");
await dbClient.query(sql);

In case it also helps, here is further code to run all "*.sql" files in a directory in alphabetical order:

const pathWithSqlFiles = "/path/to/sqldir";
const filenames = fs
  .readdirSync(pathWithSqlFiles, { withFileTypes: true })
  .filter((item) => !item.isDirectory() && item.name.toLowerCase().endsWith(".sql"))
  .map((item) => item.name);
for (const filename of filenames) {
  var sql = fs.readFileSync(`${pathWithSqlFiles}/${filename}`, "utf8");
  await dbClient.query(sql);
}

(Don't forget to close the client connection at some point after this using await dbClient.end()).

There are many ways to import a database through SQL file the simplest and fasted way is to just run this command in you cmd where your file is saved:

psql -h localhost -U postgres -d myDataBase -a -f myFile.sql

Or you can read and parse the file through node.js and run it. But it would take time.

function processSQLFile(fileName) {

  // Extract SQL queries from files. Assumes no ';' in the fileNames
  var queries = fs.readFileSync(fileName).toString()
    .replace(/(\r\n|\n|\r)/gm," ") // remove newlines
    .replace(/\s+/g, ' ') // excess white space
    .split(";") // split into all statements
    .map(Function.prototype.call, String.prototype.trim)
    .filter(function(el) {return el.length != 0}); // remove any empty ones

  // Execute each SQL query sequentially
  queries.forEach(function(query) {
    batch.push(function(done) {
      if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
        var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
        var matches = regexp.exec(query);
        var table = matches[1];
        var fileName = matches[2];
        var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
        var stream = client.copyFrom(copyString);
        stream.on('close', function () {
          done();
        });
        var csvFile = __dirname + '/' + fileName;
        var str = fs.readFileSync(csvFile);
        stream.write(str);
        stream.end();
      } else { // Other queries don't need special treatment
        client.query(query, function(result) {
          done();
        });
      }
    });
  });
}
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top