Generate an unlimited stream of JSON schema instances using json-schema-faker, faker, chance and insert the data into a supported database, e.g.: nedb, mongodb, postgres, mssql.
npm install
# run docker container
docker-compose up -d
npm test
# stop docker container
docker-compose down
Test Environment
- Intel® Core™ i7-3520M CPU @ 2.90GHz × 4
- 7.5 GiB
- Linux 64 bit
JSON schema
// person.json
{
"type": "object",
"properties": {
"name": {
"type": "string",
"faker": "name.findName"
}
},
"required": [
"name"
]
}
Install
npm i fakerdb
// examples/faker-stdout.js
var { replay, jsf } = require('fakerdb')
, faker = require('faker')
, jsonSchemaFaker = require('json-schema-faker')
, path = require('path');
jsonSchemaFaker.extend('faker', () => faker);
const SCHEMA_OBJ = {
type: 'object',
properties: {
name: {
type: 'string',
faker: 'name.findName'
}
},
required: [
'name'
]
};
replay(SCHEMA_OBJ, { replay: 2 })
.pipe(jsf(jsonSchemaFaker))
.pipe(process.stdout);
const SCHEMA_FILE = path.join(__dirname, './schema/person.json');
replay(SCHEMA_FILE, { replay: 2 })
.pipe(jsf(jsonSchemaFaker))
.pipe(process.stdout);
// examples/faker-nedb.js
let { generate } = require('fakerdb')
, Datasource = require('nedb')
, db = new Datasource({ filename: 'faker.db', autoload: true })
, path = require('path');
const REPLAY = 1000;
const SCHEMA = path.join(__dirname, './schema/person.json');
const OPTS = { replay: REPLAY, insert: { blockSize: 1000 } };
generate(db, SCHEMA, OPTS);
npm i -g fakerdb
fakerdb --help
cd examples/faker
fakerdb g -r 200000 -i person.json -f custom-jsf.js -b 1000 -o people.db