-
Notifications
You must be signed in to change notification settings - Fork 1
/
build.js
72 lines (60 loc) · 1.72 KB
/
build.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
'use strict'
const {promisify} = require('util')
const {pipeline} = require('stream')
const {exec} = require('child_process')
const toCsv = require('csv-write-stream')
const {createWriteStream} = require('fs')
const sortBy = require('lodash/sortBy')
const path = require('path')
const {writeFile, unlink} = require('fs').promises
const full = require('./full')
const orders = Object.assign(Object.create(null), {
'agency': ['agency_id'],
'stops': ['stop_id'],
'routes': ['route_id'],
'trips': ['trip_id'],
'stop_times': ['trip_id', 'stop_sequence'],
'calendar': ['service_id'],
'calendar_dates': ['service_id', 'date'],
'shapes': ['shape_id', 'shape_pt_sequence'],
'frequencies': ['trip_id', 'start_time'],
})
const pPipeline = promisify(pipeline)
const pExec = promisify(exec)
const writeCsv = async (dest, cols, rows) => {
const sink = toCsv({headers: cols})
const task = pPipeline(sink, createWriteStream(dest))
for (const row of rows) sink.write(row)
sink.end()
await task
}
;(async () => {
for (const set of Object.keys(full)) {
const data = full[set]
const rows = Array.isArray(data)
? (set in orders ? sortBy(data, orders[set]) : data)
: [data]
const cols = Array.from(new Set(rows.flatMap(row => Object.keys(row))))
// write GTFS
await writeCsv(
path.join(__dirname, 'gtfs', set + '.txt'),
cols,
rows,
)
// write JSON
const dest = path.join(__dirname, 'json', set + '.json')
await writeFile(dest, JSON.stringify(data))
}
try {
await unlink(path.join(__dirname, 'gtfs.zip'))
} catch (err) {
if (err && err.code !== 'ENOENT') throw err
}
await pExec('zip -r -D -9 ../gtfs.zip *.txt', {
cwd: path.join(__dirname, 'gtfs'),
})
})()
.catch((err) => {
console.error(err)
process.exit(1)
})