merge /stations/all into /stations

and move lib/* to routes/*
This commit is contained in:
Jannis R 2020-05-01 18:10:21 +02:00
parent 39272bd7a5
commit cf8a00dd53
No known key found for this signature in database
GPG key ID: 0FE83946296A88A5
7 changed files with 182 additions and 138 deletions

View file

@ -1,27 +0,0 @@
'use strict'
let raw = require('db-stations/full.json')
let data = {}
for (let key in raw) {
if (!Object.prototype.hasOwnProperty.call(raw, key)) continue
const station = Object.assign({}, raw[key]) // clone
// todo: remove this remapping (breaking change!)
station.coordinates = station.location
delete station.location
data[station.id] = station
}
data = JSON.stringify(data) + '\n'
raw = null
const allStations = (req, res, next) => {
// res.sendFile(rawPath, {
// maxAge: 10 * 24 * 3600 * 1000 // 10 days
// }, next)
res.set('content-type', 'application/json')
res.send(data)
}
module.exports = allStations

40
lib/db-stations.js Normal file
View file

@ -0,0 +1,40 @@
'use strict'
const {statSync} = require('fs')
const {full: readRawStations} = require('db-stations')
// We don't have access to the publish date+time of the npm package,
// so we use the ctime of db-stations/full.ndjson as an approximation.
// todo: this is brittle, find a better way, e.g. a build script
const timeModified = statSync(require.resolve('db-stations/full.ndjson')).ctime
const pStations = new Promise((resolve, reject) => {
let raw = readRawStations()
raw.once('error', reject)
let data = Object.create(null)
raw.on('data', (station) => {
data[station.id] = station
if (Array.isArray(station.ril100Identifiers)) {
for (const ril100 of station.ril100Identifiers) {
data[ril100.rilIdentifier] = station
}
}
if (Array.isArray(station.additionalIds)) {
for (const addId of station.additionalIds) {
data[addId] = station
}
}
})
raw.once('end', () => {
raw = null
resolve({data, timeModified})
})
})
pStations.catch((err) => {
console.error(err)
process.exit(1) // todo: is this appropriate?
})
module.exports = pStations

View file

@ -1,40 +0,0 @@
'use strict'
const stations = require('db-stations')
const err400 = (msg) => {
const err = new Error(msg)
err.statusCode = 400
return err
}
// This is terribly inefficient, because we read all stations for every request.
// todo: optimize it
const route = (req, res, next) => {
const id = req.params.id.trim()
const stream = stations.full()
let found = false
const onStation = (station) => {
if (station.id !== id) return
found = true
stream.removeListener('data', onStation)
res.json(station)
next('/station/:id')
}
stream.on('data', onStation)
const onEnd = () => {
if (!found) return next(err400('Station not found.'))
}
stream.once('end', onEnd)
stream.once('error', (err) => {
stream.removeListener('data', onStation)
stream.removeListener('end', onEnd)
next(nerr)
})
}
module.exports = route

View file

@ -1,70 +0,0 @@
'use strict'
const autocomplete = require('db-stations-autocomplete')
const allStations = require('db-stations/full.json')
const parse = require('cli-native').to
const createFilter = require('db-stations/create-filter')
const ndjson = require('ndjson')
const hasProp = (o, k) => Object.prototype.hasOwnProperty.call(o, k)
const err400 = (msg) => {
const err = new Error(msg)
err.statusCode = 400
return err
}
const complete = (req, res, next) => {
const limit = req.query.results && parseInt(req.query.results) || 3
const fuzzy = parse(req.query.fuzzy) === true
const completion = parse(req.query.completion) !== false
const results = autocomplete(req.query.query, limit, fuzzy, completion)
const data = []
for (let result of results) {
// todo: make this more efficient
const station = allStations.find(s => s.id === result.id)
if (!station) continue
data.push(Object.assign(result, station))
}
res.json(data)
next()
}
const filter = (req, res, next) => {
if (Object.keys(req.query).length === 0) {
return next(err400('Missing properties.'))
}
const selector = Object.create(null)
for (let prop in req.query) {
const val = parse(req.query[prop])
// todo: derhuerst/db-rest#2
if (prop.slice(0, 12) === 'coordinates.') {
prop = prop.slice(12)
}
selector[prop] = val
}
const filter = createFilter(selector)
res.type('application/x-ndjson')
const out = ndjson.stringify()
out
.once('error', next)
.pipe(res)
.once('finish', () => next())
for (let station of allStations) {
if (filter(station)) out.write(station)
}
out.end()
}
const route = (req, res, next) => {
if (req.query.query) complete(req, res, next)
else filter(req, res, next)
}
module.exports = route

View file

@ -26,9 +26,10 @@
"db-hafas": "^3.0.1",
"db-stations": "^2.4.0",
"db-stations-autocomplete": "^2.1.0",
"etag": "^1.8.1",
"hafas-client-health-check": "^1.0.1",
"hafas-rest-api": "^1.2.1",
"ndjson": "^1.5.0"
"serve-buffer": "^2.0.0"
},
"scripts": {
"start": "node index.js"

28
routes/station.js Normal file
View file

@ -0,0 +1,28 @@
'use strict'
const pStations = require('../lib/db-stations')
const err404 = (msg) => {
const err = new Error(msg)
err.statusCode = 404
return err
}
const stationRoute = (req, res, next) => {
const id = req.params.id.trim()
pStations
.then(({data, timeModified}) => {
const station = data[id]
if (!station) {
next(err404('Station not found.'))
return;
}
res.setHeader('Last-Modified', timeModified.toUTCString())
res.json(station)
})
.catch(next)
}
module.exports = stationRoute

112
routes/stations.js Normal file
View file

@ -0,0 +1,112 @@
'use strict'
const computeEtag = require('etag')
const serveBuffer = require('serve-buffer')
const autocomplete = require('db-stations-autocomplete')
const parse = require('cli-native').to
const createFilter = require('db-stations/create-filter')
let pAllStations = require('../lib/db-stations')
const JSON_MIME = 'application/json'
const NDJSON_MIME = 'application/x-ndjson'
const toNdjsonBuf = (data) => {
const chunks = []
let i = 0, bytes = 0
for (const id in data) {
const sep = i++ === 0 ? '' : '\n'
const buf = Buffer.from(sep + JSON.stringify(data[id]), 'utf8')
chunks.push(buf)
bytes += buf.length
}
return Buffer.concat(chunks, bytes)
}
pAllStations = pAllStations.then(({data, timeModified}) => {
const asJson = Buffer.from(JSON.stringify(data), 'utf8')
const asNdjson = toNdjsonBuf(data)
return {
stations: data,
timeModified,
asJson: {data: asJson, etag: computeEtag(asJson)},
asNdjson: {data: asNdjson, etag: computeEtag(asNdjson)},
}
})
.catch((err) => {
console.error(err)
process.exit(1)
})
const err = (msg, statusCode = 500) => {
const err = new Error(msg)
err.statusCode = statusCode
return err
}
const complete = (req, res, next, q, allStations, onStation, onEnd) => {
const limit = q.results && parseInt(q.results) || 3
const fuzzy = parse(q.fuzzy) === true
const completion = parse(q.completion) !== false
const results = autocomplete(q.query, limit, fuzzy, completion)
const data = []
for (const result of results) {
const station = allStations[result.id]
if (!station) continue
Object.assign(result, station)
onStation(result)
}
onEnd()
}
const filter = (req, res, next, q, allStations, onStation, onEnd) => {
const selector = Object.create(null)
for (const prop in q) selector[prop] = parse(q[prop])
const filter = createFilter(selector)
for (const id in allStations) {
const station = allStations[id]
if (filter(station)) onStation(station)
}
onEnd()
}
const stationsRoute = (req, res, next) => {
const t = req.accepts([JSON_MIME, NDJSON_MIME])
if (t !== JSON_MIME && t !== NDJSON_MIME) {
return next(err(JSON + ' or ' + NDJSON_MIME, 406))
}
const head = t === JSON_MIME ? '{\n' : ''
const sep = t === JSON_MIME ? ',\n' : '\n'
const tail = t === JSON_MIME ? '\n}\n' : '\n'
let i = 0
const onStation = (s) => {
const j = JSON.stringify(s)
const field = t === JSON_MIME ? `"${s.id}":` : ''
res.write(`${i++ === 0 ? head : sep}${field}${j}`)
}
const onEnd = () => {
if (i > 0) res.end(tail)
else res.end(head + tail)
}
const q = req.query
pAllStations
.then(({stations, timeModified, asJson, asNdjson}) => {
res.setHeader('Last-Modified', timeModified.toUTCString())
if (Object.keys(req.query).length === 0) {
const data = t === JSON_MIME ? asJson.data : asNdjson.data
const etag = t === JSON_MIME ? asJson.etag : asNdjson.etag
serveBuffer(req, res, data, {timeModified, etag})
} else if (q.query) {
complete(req, res, next, q, stations, onStation, onEnd)
} else {
filter(req, res, next, q, stations, onStation, onEnd)
}
})
.catch(next)
}
module.exports = stationsRoute