Skip to content

Commit

Permalink
Merge pull request #58 from rdf-ext/v3
Browse files Browse the repository at this point in the history
feat!: cleanup and refactoring
  • Loading branch information
bergos authored Feb 18, 2024
2 parents ed40279 + ea5c341 commit 008710f
Show file tree
Hide file tree
Showing 54 changed files with 3,574 additions and 3,022 deletions.
16 changes: 9 additions & 7 deletions .github/workflows/ci.yaml → .github/workflows/test.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
name: CI
on: push
name: Test
on:
- pull_request
- push
jobs:
test:
runs-on: ubuntu-20.04
strategy:
matrix:
node:
- '14'
- '16'
- '18'
- '19'
- '20'
- '21'
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
- run: npm install
- run: npm test
- uses: codecov/codecov-action@v3
26 changes: 0 additions & 26 deletions BaseClient.js

This file was deleted.

26 changes: 26 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Changelog

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [3.0.0] - 2024-02-18

### Added

- ESM support
- exports for all classes in the `index.js` package entrypoint

### Changed

- options like `endpointUrl`, `user`, and `password` are attached to the client object, allowing creating new client
instances from existing instances
- methods that return a `Readable` stream objects are sync
- updated dependencies

### Removed

- CommonJS support
- `BaseClient` and `Endpoint` class
- automatic request splitting for Graph Store uploads
136 changes: 0 additions & 136 deletions Endpoint.js

This file was deleted.

84 changes: 68 additions & 16 deletions ParsingClient.js
Original file line number Diff line number Diff line change
@@ -1,33 +1,85 @@
const Endpoint = require('./Endpoint')
const ParsingQuery = require('./ParsingQuery')
const BaseClient = require('./BaseClient')
import DataModelFactory from '@rdfjs/data-model/Factory.js'
import DatasetFactory from '@rdfjs/dataset/Factory.js'
import Environment from '@rdfjs/environment'
import isDatasetCoreFactory from './lib/isDatasetCoreFactory.js'
import ParsingQuery from './ParsingQuery.js'
import SimpleClient from './SimpleClient.js'

const defaultFactory = new Environment([DataModelFactory, DatasetFactory])

/**
* A client implementation which parses SPARQL responses into RDF/JS dataset (CONSTRUCT/DESCRIBE) or JSON objects (SELECT)
*
* It does not provide a store
* A client implementation based on {@link ParsingQuery} that parses SPARQL results into RDF/JS DatasetCore objects
* (CONSTRUCT/DESCRIBE) or an array of objects (SELECT). It does not provide a store interface.
*
* @extends SimpleClient
* @property {ParsingQuery} query
*
* @example
* // read the height of the Eiffel Tower from Wikidata with a SELECT query
*
* import ParsingClient from 'sparql-http-client/ParsingClient.js'
*
* const endpointUrl = 'https://query.wikidata.org/sparql'
* const query = `
* PREFIX wd: <http://www.wikidata.org/entity/>
* PREFIX p: <http://www.wikidata.org/prop/>
* PREFIX ps: <http://www.wikidata.org/prop/statement/>
* PREFIX pq: <http://www.wikidata.org/prop/qualifier/>
*
* SELECT ?value WHERE {
* wd:Q243 p:P2048 ?height.
*
* ?height pq:P518 wd:Q24192182;
* ps:P2048 ?value .
* }`
*
* const client = new ParsingClient({ endpointUrl })
* const result = await client.query.select(query)
*
* for (const row of result) {
* for (const [key, value] of Object.entries(row)) {
* console.log(`${key}: ${value.value} (${value.termType})`)
* }
* }
*/
class ParsingClient extends BaseClient {
class ParsingClient extends SimpleClient {
/**
* @param {Object} options
* @param {string} options.endpointUrl SPARQL Query endpoint URL
* @param {string} [options.endpointUrl] SPARQL query endpoint URL
* @param {factory} [options.factory] RDF/JS factory
* @param {fetch} [options.fetch=nodeify-fetch] fetch implementation
* @param {HeadersInit} [options.headers] HTTP headers to send with every endpoint request
* @param {Headers} [options.headers] headers sent with every request
* @param {string} [options.password] password used for basic authentication
* @param {string} [options.storeUrl] Graph Store URL
* @param {string} [options.updateUrl] SPARQL Update endpoint URL
* @param {string} [options.storeUrl] SPARQL Graph Store URL
* @param {string} [options.updateUrl] SPARQL update endpoint URL
* @param {string} [options.user] user used for basic authentication
* @param {factory} [options.factory] RDF/JS DataFactory
*/
constructor (options) {
constructor ({
endpointUrl,
factory = defaultFactory,
fetch,
headers,
password,
storeUrl,
updateUrl,
user
}) {
super({
endpoint: new Endpoint(options),
factory: options.factory,
endpointUrl,
factory,
fetch,
headers,
password,
storeUrl,
updateUrl,
user,
Query: ParsingQuery
})

if (!isDatasetCoreFactory(this.factory)) {
throw new Error('the given factory doesn\'t implement the DatasetCoreFactory interface')
}
}
}

module.exports = ParsingClient
export default ParsingClient
51 changes: 22 additions & 29 deletions ParsingQuery.js
Original file line number Diff line number Diff line change
@@ -1,47 +1,40 @@
const { array } = require('get-stream')
const StreamQuery = require('./StreamQuery')
import chunks from 'stream-chunks/chunks.js'
import StreamQuery from './StreamQuery.js'

/**
* Extends StreamQuery by materialising the SPARQL response streams
* A query implementation that wraps the results of the {@link StreamQuery} into RDF/JS DatasetCore objects
* (CONSTRUCT/DESCRIBE) or an array of objects (SELECT).
*
* @extends StreamQuery
*/
class ParsingQuery extends StreamQuery {
/**
* @param {Object} init
* @param {Endpoint} init.endpoint
*/
constructor ({ endpoint }) {
super({ endpoint })
}

/**
* Performs a query which returns triples
* Sends a request for a CONSTRUCT or DESCRIBE query
*
* @param {string} query
* @param {Object} [options]
* @param {HeadersInit} [options.headers] HTTP request headers
* @param {'get'|'postUrlencoded'|'postDirect'} [options.operation='get']
* @return {Promise<Quad[]>}
* @param {string} query CONSTRUCT or DESCRIBE query
* @param {Object} options
* @param {Headers} [options.headers] additional request headers
* @param {'get'|'postUrlencoded'|'postDirect'} [options.operation='get'] SPARQL Protocol operation
* @return {Promise<DatasetCore>}
*/
async construct (query, options = {}) {
const stream = await super.construct(query, options)
async construct (query, { headers, operation } = {}) {
const quads = await chunks(await super.construct(query, { headers, operation }))

return array(stream)
return this.client.factory.dataset(quads)
}

/**
* Performs a SELECT query which returns binding tuples
* Sends a request for a SELECT query
*
* @param {string} query
* @param {string} query SELECT query
* @param {Object} [options]
* @param {HeadersInit} [options.headers] HTTP request headers
* @param {'get'|'postUrlencoded'|'postDirect'} [options.operation='get']
* @param {Headers} [options.headers] additional request headers
* @param {'get'|'postUrlencoded'|'postDirect'} [options.operation='get'] SPARQL Protocol operation
* @return {Promise<Array<Object.<string, Term>>>}
*/
async select (query, options = {}) {
const stream = await super.select(query, options)

return array(stream)
async select (query, { headers, operation } = {}) {
return chunks(await super.select(query, { headers, operation }))
}
}

module.exports = ParsingQuery
export default ParsingQuery
Loading

0 comments on commit 008710f

Please sign in to comment.