diff --git a/backend/lib/azimutt_web/templates/website/converters/_editors-script.html.heex b/backend/lib/azimutt_web/templates/website/converters/_editors-script.html.heex
index 345c345fd..1372a32d0 100644
--- a/backend/lib/azimutt_web/templates/website/converters/_editors-script.html.heex
+++ b/backend/lib/azimutt_web/templates/website/converters/_editors-script.html.heex
@@ -1,4 +1,4 @@
-
+
diff --git a/libs/aml/package.json b/libs/aml/package.json
index c48be1b91..859433f6f 100644
--- a/libs/aml/package.json
+++ b/libs/aml/package.json
@@ -1,6 +1,6 @@
{
"name": "@azimutt/aml",
- "version": "0.1.7",
+ "version": "0.1.8",
"description": "Parse and Generate AML: the easiest language to define database schema.",
"keywords": ["DSL", "language", "database", "parser"],
"homepage": "https://azimutt.app/aml",
diff --git a/libs/aml/src/amlAst.ts b/libs/aml/src/amlAst.ts
index bb459e0e4..31a805d99 100644
--- a/libs/aml/src/amlAst.ts
+++ b/libs/aml/src/amlAst.ts
@@ -7,52 +7,59 @@ import {
TokenPosition
} from "@azimutt/models";
+// statements
export type AmlAst = StatementAst[]
export type StatementAst = NamespaceStatement | EntityStatement | RelationStatement | TypeStatement | EmptyStatement
-export type NamespaceStatement = { statement: 'Namespace', line: number, schema?: IdentifierToken, catalog?: IdentifierToken, database?: IdentifierToken } & ExtraAst
-export type EntityStatement = { statement: 'Entity', name: IdentifierToken, view?: TokenInfo, alias?: IdentifierToken, attrs?: AttributeAstNested[] } & NamespaceRefAst & ExtraAst
-export type RelationStatement = { statement: 'Relation', src: AttributeRefCompositeAst, ref: AttributeRefCompositeAst, srcCardinality: RelationCardinality, refCardinality: RelationCardinality, polymorphic?: RelationPolymorphicAst } & ExtraAst & { warning?: TokenInfo }
-export type TypeStatement = { statement: 'Type', name: IdentifierToken, content?: TypeContentAst } & NamespaceRefAst & ExtraAst
-export type EmptyStatement = { statement: 'Empty', comment?: CommentToken }
+export type NamespaceStatement = { kind: 'Namespace', line: number, schema?: IdentifierAst, catalog?: IdentifierAst, database?: IdentifierAst } & ExtraAst
+export type EntityStatement = { kind: 'Entity', name: IdentifierAst, view?: TokenInfo, alias?: IdentifierAst, attrs?: AttributeAstNested[] } & NamespaceRefAst & ExtraAst
+export type RelationStatement = { kind: 'Relation', src: AttributeRefCompositeAst, srcCardinality: RelationCardinalityAst, polymorphic?: RelationPolymorphicAst, refCardinality: RelationCardinalityAst, ref: AttributeRefCompositeAst } & ExtraAst & { warning?: TokenInfo }
+export type TypeStatement = { kind: 'Type', name: IdentifierAst, content?: TypeContentAst } & NamespaceRefAst & ExtraAst
+export type EmptyStatement = { kind: 'Empty', comment?: CommentAst }
-export type AttributeAstFlat = { nesting: TokenInfo & {depth: number}, name: IdentifierToken, nullable?: TokenInfo } & AttributeTypeAst & AttributeConstraintsAst & { relation?: AttributeRelationAst } & ExtraAst
-export type AttributeAstNested = { path: IdentifierToken[], nullable?: TokenInfo } & AttributeTypeAst & AttributeConstraintsAst & { relation?: AttributeRelationAst } & ExtraAst & { attrs?: AttributeAstNested[], warning?: TokenInfo }
-export type AttributeTypeAst = { type?: IdentifierToken, enumValues?: AttributeValueAst[], defaultValue?: AttributeValueAst }
-export type AttributeConstraintsAst = { primaryKey?: AttributeConstraintAst, index?: AttributeConstraintAst, unique?: AttributeConstraintAst, check?: AttributeCheckAst }
-export type AttributeConstraintAst = { keyword: TokenInfo, name?: IdentifierToken }
-export type AttributeCheckAst = AttributeConstraintAst & { predicate?: ExpressionToken }
-export type AttributeRelationAst = { ref: AttributeRefCompositeAst, srcCardinality: RelationCardinality, refCardinality: RelationCardinality, polymorphic?: RelationPolymorphicAst, warning?: TokenInfo }
+// clauses
+export type AttributeAstFlat = { nesting: {token: TokenInfo, depth: number}, name: IdentifierAst, nullable?: TokenInfo } & AttributeTypeAst & { constraints?: AttributeConstraintAst[] } & ExtraAst
+export type AttributeAstNested = { path: IdentifierAst[], nullable?: TokenInfo } & AttributeTypeAst & { constraints?: AttributeConstraintAst[] } & ExtraAst & { attrs?: AttributeAstNested[], warning?: TokenInfo }
+export type AttributeTypeAst = { type?: IdentifierAst, enumValues?: AttributeValueAst[], defaultValue?: AttributeValueAst }
+export type AttributeConstraintAst = AttributePkAst | AttributeUniqueAst | AttributeIndexAst | AttributeCheckAst | AttributeRelationAst
+export type AttributePkAst = { kind: 'PrimaryKey', token: TokenInfo, name?: IdentifierAst }
+export type AttributeUniqueAst = { kind: 'Unique', token: TokenInfo, name?: IdentifierAst }
+export type AttributeIndexAst = { kind: 'Index', token: TokenInfo, name?: IdentifierAst }
+export type AttributeCheckAst = { kind: 'Check', token: TokenInfo, name?: IdentifierAst, predicate?: ExpressionAst }
+export type AttributeRelationAst = { kind: 'Relation', token: TokenInfo, srcCardinality: RelationCardinalityAst, polymorphic?: RelationPolymorphicAst, refCardinality: RelationCardinalityAst, ref: AttributeRefCompositeAst, warning?: TokenInfo }
+export type RelationCardinalityAst = { kind: RelationCardinality, token: TokenInfo }
export type RelationPolymorphicAst = { attr: AttributePathAst, value: AttributeValueAst }
export type TypeContentAst = TypeAliasAst | TypeEnumAst | TypeStructAst | TypeCustomAst
-export type TypeAliasAst = { kind: 'alias', name: IdentifierToken }
-export type TypeEnumAst = { kind: 'enum', values: AttributeValueAst[] }
-export type TypeStructAst = { kind: 'struct', attrs: AttributeAstNested[] }
-export type TypeCustomAst = { kind: 'custom', definition: ExpressionToken }
+export type TypeAliasAst = { kind: 'Alias', name: IdentifierAst }
+export type TypeEnumAst = { kind: 'Enum', values: AttributeValueAst[] }
+export type TypeStructAst = { kind: 'Struct', attrs: AttributeAstNested[] }
+export type TypeCustomAst = { kind: 'Custom', definition: ExpressionAst }
-export type NamespaceRefAst = { database?: IdentifierToken, catalog?: IdentifierToken, schema?: IdentifierToken }
-export type EntityRefAst = { entity: IdentifierToken } & NamespaceRefAst
-export type AttributePathAst = IdentifierToken & { path?: IdentifierToken[] }
+// basic parts
+export type NamespaceRefAst = { database?: IdentifierAst, catalog?: IdentifierAst, schema?: IdentifierAst }
+export type EntityRefAst = { entity: IdentifierAst } & NamespaceRefAst
+export type AttributePathAst = IdentifierAst & { path?: IdentifierAst[] }
export type AttributeRefAst = EntityRefAst & { attr: AttributePathAst, warning?: TokenInfo }
export type AttributeRefCompositeAst = EntityRefAst & { attrs: AttributePathAst[], warning?: TokenInfo }
-export type AttributeValueAst = NullToken | DecimalToken | IntegerToken | BooleanToken | ExpressionToken | IdentifierToken // TODO: add date
+export type AttributeValueAst = NullAst | DecimalAst | IntegerAst | BooleanAst | ExpressionAst | IdentifierAst // TODO: add date
-export type ExtraAst = { properties?: PropertiesAst, doc?: DocToken, comment?: CommentToken }
+export type ExtraAst = { properties?: PropertiesAst, doc?: DocAst, comment?: CommentAst }
export type PropertiesAst = PropertyAst[]
-export type PropertyAst = { key: IdentifierToken, sep?: TokenInfo, value?: PropertyValueAst }
-export type PropertyValueAst = NullToken | DecimalToken | IntegerToken | BooleanToken | ExpressionToken | IdentifierToken | PropertyValueAst[]
+export type PropertyAst = { key: IdentifierAst, sep?: TokenInfo, value?: PropertyValueAst }
+export type PropertyValueAst = NullAst | DecimalAst | IntegerAst | BooleanAst | ExpressionAst | IdentifierAst | PropertyValueAst[]
+export type DocAst = { kind: 'Doc', token: TokenInfo, value: string, multiLine?: boolean }
-// basic tokens
-export type NullToken = { token: 'Null' } & TokenInfo
-export type DecimalToken = { token: 'Decimal', value: number } & TokenInfo
-export type IntegerToken = { token: 'Integer', value: number } & TokenInfo
-export type BooleanToken = { token: 'Boolean', value: boolean } & TokenInfo
-export type ExpressionToken = { token: 'Expression', value: string } & TokenInfo
-export type IdentifierToken = { token: 'Identifier', value: string } & TokenInfo
-export type DocToken = { token: 'Doc', value: string } & TokenPosition
-export type CommentToken = { token: 'Comment', value: string } & TokenPosition
+// elements
+export type ExpressionAst = { kind: 'Expression', token: TokenInfo, value: string }
+export type IdentifierAst = { kind: 'Identifier', token: TokenInfo, value: string, quoted?: boolean }
+export type IntegerAst = { kind: 'Integer', token: TokenInfo, value: number }
+export type DecimalAst = { kind: 'Decimal', token: TokenInfo, value: number }
+export type BooleanAst = { kind: 'Boolean', token: TokenInfo, value: boolean }
+export type NullAst = { kind: 'Null', token: TokenInfo }
+export type CommentAst = { kind: 'Comment', token: TokenInfo, value: string }
+// helpers
export type TokenInfo = TokenPosition & { issues?: TokenIssue[] }
export type TokenIssue = { message: string, kind: string, level: ParserErrorLevel }
diff --git a/libs/aml/src/amlBuilder.ts b/libs/aml/src/amlBuilder.ts
index f324518a5..3149a1ac2 100644
--- a/libs/aml/src/amlBuilder.ts
+++ b/libs/aml/src/amlBuilder.ts
@@ -38,15 +38,15 @@ import packageJson from "../package.json";
import {
AmlAst,
AttributeAstNested,
- AttributeConstraintAst,
AttributePathAst,
+ AttributePkAst,
AttributeRelationAst,
AttributeValueAst,
EntityRefAst,
EntityStatement,
- ExpressionToken,
+ ExpressionAst,
ExtraAst,
- IdentifierToken,
+ IdentifierAst,
NamespaceStatement,
PropertiesAst,
PropertyValue,
@@ -61,12 +61,12 @@ import {duplicated, notFound} from "./errors";
export function buildDatabase(ast: AmlAst, start: number, parsed: number): {db: Database, errors: ParserError[]} {
const db: Database = {entities: [], relations: [], types: []}
const errors: ParserError[] = []
- const statements = ast.filter(s => s.statement !== 'Empty')
+ const statements = ast.filter(s => s.kind !== 'Empty')
const entityRelations = buildTypesAndEntities(db, errors, statements)
buildRelations(db, errors, statements, entityRelations) // all entities need to be built to perform some relation checks
const comments: {line: number, comment: string}[] = []
- ast.filter(s => s.statement === 'Empty').forEach(stmt => {
- if (stmt.comment) comments.push({line: stmt.comment.position.start.line, comment: stmt.comment.value})
+ ast.filter(s => s.kind === 'Empty').forEach(stmt => {
+ if (stmt.comment) comments.push({line: stmt.comment.token.position.start.line, comment: stmt.comment.value})
})
const done = Date.now()
const extra = removeEmpty({
@@ -76,7 +76,7 @@ export function buildDatabase(ast: AmlAst, start: number, parsed: number): {db:
parsingTimeMs: parsed - start,
formattingTimeMs: done - parsed,
comments,
- namespaces: ast.filter(s => s.statement === 'Namespace').map((s, i) => removeUndefined({line: s.line, ...buildNamespace(i, s), comment: s.comment?.value}))
+ namespaces: ast.filter(s => s.kind === 'Namespace').map((s, i) => removeUndefined({line: s.line, ...buildNamespace(i, s), comment: s.comment?.value}))
})
return {db: removeEmpty({
entities: db.entities?.sort((a, b) => a.extra?.line && b.extra?.line ? a.extra.line - b.extra.line : a.name.toLowerCase().localeCompare(b.name.toLowerCase())),
@@ -91,12 +91,12 @@ function buildTypesAndEntities(db: Database, errors: ParserError[], ast: AmlAst)
const relations: InlineRelation[] = []
ast.forEach((stmt, i) => {
const index = i + 1
- if (stmt.statement === 'Namespace') {
+ if (stmt.kind === 'Namespace') {
namespace = buildNamespace(index, stmt)
- } else if (stmt.statement === 'Type') {
+ } else if (stmt.kind === 'Type') {
const type = buildType(namespace, index, stmt)
- addType(db, errors, type, mergePositions([stmt.database, stmt.catalog, stmt.schema, stmt.name]))
- } else if (stmt.statement === 'Entity') {
+ addType(db, errors, type, mergePositions([stmt.database, stmt.catalog, stmt.schema, stmt.name].map(v => v?.token)))
+ } else if (stmt.kind === 'Entity') {
if (!db.entities) db.entities = []
const res = buildEntity(namespace, index, stmt)
const ref = entityToRef(res.entity)
@@ -106,7 +106,7 @@ function buildTypesAndEntities(db: Database, errors: ParserError[], ast: AmlAst)
errors.push(duplicated(
`Entity ${entityToId(res.entity)}`,
prev.extra?.line ? prev.extra.line : undefined,
- mergePositions([stmt.database, stmt.catalog, stmt.schema, stmt.name])
+ mergePositions([stmt.database, stmt.catalog, stmt.schema, stmt.name].map(v => v?.token))
))
db.entities[prevIndex] = mergeEntity(prev, res.entity)
} else {
@@ -128,18 +128,18 @@ function buildRelations(db: Database, errors: ParserError[], ast: AmlAst, attrRe
db,
errors,
buildRelationAttribute(db.entities || [], aliases, r.statement, r.entity, r.attrs, r.ref),
- mergePositions([r.ref.ref.database, r.ref.ref.catalog, r.ref.ref.schema, r.ref.ref.entity, ...r.ref.ref.attrs, ...r.ref.ref.attrs.flatMap(a => a.path)])
+ mergePositions([r.ref.ref.database, r.ref.ref.catalog, r.ref.ref.schema, r.ref.ref.entity, ...r.ref.ref.attrs, ...r.ref.ref.attrs.flatMap(a => a.path)].map(v => v?.token))
))
ast.forEach((stmt, i) => {
const index = i + 1
- if (stmt.statement === 'Namespace') {
+ if (stmt.kind === 'Namespace') {
namespace = buildNamespace(index, stmt)
- } else if (stmt.statement === 'Relation') {
+ } else if (stmt.kind === 'Relation') {
addRelation(
db,
errors,
buildRelationStatement(db.entities || [], aliases, namespace, index, stmt),
- mergePositions([stmt.src.database, stmt.src.catalog, stmt.src.schema, stmt.src.entity, ...(stmt.ref?.attrs || []), ...(stmt.ref?.attrs || []).flatMap(a => a.path)])
+ mergePositions([stmt.src.database, stmt.src.catalog, stmt.src.schema, stmt.src.entity, ...(stmt.ref?.attrs || []), ...(stmt.ref?.attrs || []).flatMap(a => a.path)].map(v => v?.token))
)
} else {
// last to be built, types & relations are already built
@@ -200,26 +200,27 @@ function buildEntity(namespace: Namespace, statement: number, e: EntityStatement
const validAttrs = (e.attrs || []).filter(a => !a.path.some(p => p === undefined)) // `path` can be `[undefined]` on invalid input :/
const attrs = validAttrs.map(a => buildAttribute(namespace, statement, a, {...entityNamespace, entity: e.name.value}))
const flatAttrs = flattenAttributes(validAttrs).filter(a => !a.path.some(p => p === undefined)) // nested attributes can have `path` be `[undefined]` on invalid input :/
- const pkAttrs = flatAttrs.filter(a => a.primaryKey)
- const indexes: Index[] = buildIndexes(flatAttrs.map(a => a.index ? {path: a.path, ...a.index} : undefined).filter(isNotUndefined))
- const uniques: Index[] = buildIndexes(flatAttrs.map(a => a.unique ? {path: a.path, ...a.unique} : undefined).filter(isNotUndefined)).map(u => ({...u, unique: true}))
- const checks: Check[] = buildIndexes(flatAttrs.map(a => a.check ? {path: a.path, ...a.check} : undefined).filter(isNotUndefined)).map(c => ({...c, predicate: c.predicate || ''}))
+ const constraints = flatAttrs.flatMap(a => (a.constraints || []).map(c => ({path: a.path, ...c})))
+ const pkAttrs: ({ path: IdentifierAst[] } & AttributePkAst)[] = constraints.filter(c => c.kind === 'PrimaryKey')
+ const indexes: Index[] = buildIndexes(constraints.filter(c => c.kind === 'Index').map(c => ({path: c.path, name: c.name})))
+ const uniques: Index[] = buildIndexes(constraints.filter(c => c.kind === 'Unique').map(c => ({path: c.path, name: c.name}))).map(u => ({...u, unique: true}))
+ const checks: Check[] = buildIndexes(constraints.filter(c => c.kind === 'Check').map(c => ({path: c.path, name: c.name, predicate: c.predicate}))).map(c => ({...c, predicate: c.predicate || ''}))
return {
entity: removeEmpty({
...entityNamespace,
name: e.name.value,
kind: e.view || e.properties?.find(p => p.key.value === 'view') ? 'view' as const : undefined,
- def: e.properties?.flatMap(p => p.key.value === 'view' && p.value && !Array.isArray(p.value) && p.value.token === 'Identifier' ? [p.value.value.replaceAll(/\\n/g, '\n')] : [])[0],
+ def: e.properties?.flatMap(p => p.key.value === 'view' && p.value && !Array.isArray(p.value) && p.value.kind === 'Identifier' ? [p.value.value.replaceAll(/\\n/g, '\n')] : [])[0],
attrs: attrs.map(a => a.attribute),
pk: pkAttrs.length > 0 ? removeUndefined({
- name: pkAttrs.map(a => a.primaryKey?.name?.value).find(isNotUndefined),
+ name: pkAttrs.map(a => a.name?.value).find(isNotUndefined),
attrs: pkAttrs.map(a => a.path.map(p => p.value)),
}) : undefined,
indexes: uniques.concat(indexes),
checks: checks,
doc: e.doc?.value,
stats: undefined,
- extra: buildExtra({line: e.name.position.start.line, statement, alias: e.alias?.value, comment: e.comment?.value}, e, ['view'])
+ extra: buildExtra({line: e.name.token.position.start.line, statement, alias: e.alias?.value, comment: e.comment?.value}, e, ['view'])
}),
relations: attrs.flatMap(a => a.relations),
types: attrs.flatMap(a => a.types),
@@ -231,9 +232,9 @@ type InlineType = {type: Type, position: TokenPosition}
function buildAttribute(namespace: Namespace, statement: number, a: AttributeAstNested, entity: EntityRef): { attribute: Attribute, relations: InlineRelation[], types: InlineType[] } {
const {entity: _, ...entityNamespace} = entity
- const numType = a.enumValues && a.enumValues.length <= 2 && a.enumValues.every(v => v.token === 'Integer') ? '(' + a.enumValues.map(stringifyAttrValue).join(',') + ')' : '' // types with num parameter (varchar(10), decimal(2,3)...)
+ const numType = a.enumValues && a.enumValues.length <= 2 && a.enumValues.every(v => v.kind === 'Integer') ? '(' + a.enumValues.map(stringifyAttrValue).join(',') + ')' : '' // types with num parameter (varchar(10), decimal(2,3)...)
const enumType: InlineType[] = buildTypeInline(entityNamespace, statement, a, numType)
- const relation: InlineRelation[] = a.relation ? [{namespace, statement, entity, attrs: [a.path.map(p => p.value)], ref: a.relation}] : []
+ const relation: InlineRelation[] = (a.constraints || []).filter(c => c.kind === 'Relation').map(c => ({namespace, statement, entity, attrs: [a.path.map(p => p.value)], ref: c}))
const validAttrs = (a.attrs || []).filter(aa => !aa.path.some(p => p === undefined)) // `path` can be `[undefined]` on invalid input :/
const nested = validAttrs.map(aa => buildAttribute(namespace, statement, aa, entity))
return {
@@ -258,22 +259,22 @@ function buildAttributeType(a: AttributeAstNested, ext: string): AttributeType {
}
function buildAttrValue(v: AttributeValueAst): AttributeValue {
- if (v.token === 'Null') return null
- if (v.token === 'Decimal') return v.value
- if (v.token === 'Integer') return v.value
- if (v.token === 'Boolean') return v.value
- if (v.token === 'Expression') return '`' + v.value + '`'
- if (v.token === 'Identifier') return v.value
+ if (v.kind === 'Null') return null
+ if (v.kind === 'Decimal') return v.value
+ if (v.kind === 'Integer') return v.value
+ if (v.kind === 'Boolean') return v.value
+ if (v.kind === 'Expression') return '`' + v.value + '`'
+ if (v.kind === 'Identifier') return v.value
return isNever(v)
}
function stringifyAttrValue(v: AttributeValueAst): string {
- if (v.token === 'Null') return 'null'
- if (v.token === 'Decimal') return v.value.toString()
- if (v.token === 'Integer') return v.value.toString()
- if (v.token === 'Boolean') return v.value ? 'true' : 'false'
- if (v.token === 'Expression') return v.value
- if (v.token === 'Identifier') return v.value
+ if (v.kind === 'Null') return 'null'
+ if (v.kind === 'Decimal') return v.value.toString()
+ if (v.kind === 'Integer') return v.value.toString()
+ if (v.kind === 'Boolean') return v.value ? 'true' : 'false'
+ if (v.kind === 'Expression') return v.value
+ if (v.kind === 'Identifier') return v.value
return isNever(v)
}
@@ -284,7 +285,7 @@ function flattenAttributes(attributes: AttributeAstNested[]): AttributeAstNested
})
}
-function buildIndexes(indexes: (AttributeConstraintAst & { path: IdentifierToken[], predicate?: ExpressionToken })[]): {name?: string, attrs: AttributePath[], predicate?: string}[] {
+function buildIndexes(indexes: { path: IdentifierAst[], name?: IdentifierAst, predicate?: ExpressionAst }[]): {name?: string, attrs: AttributePath[], predicate?: string}[] {
const indexesByName: Record = groupBy(indexes.map(i => ({name: i.name?.value || '', path: i.path.map(n => n.value), predicate: i.predicate?.value})), i => i.name)
const singleIndexes: {name?: string, attrs: AttributePath[], predicate?: string}[] = (indexesByName[''] || []).map(i => removeUndefined({attrs: [i.path], predicate: i.predicate}))
const compositeIndexes: {name?: string, attrs: AttributePath[], predicate?: string}[] = Object.entries(indexesByName).filter(([k, _]) => k !== '').map(([name, values]) => removeUndefined({name, attrs: values.map(v => v.path), predicate: values.map(v => v.predicate).find(p => !!p)}))
@@ -293,14 +294,14 @@ function buildIndexes(indexes: (AttributeConstraintAst & { path: IdentifierToken
function buildRelationStatement(entities: Entity[], aliases: Record, namespace: Namespace, statement: number, r: RelationStatement): Relation | undefined {
const [srcEntity, srcAlias] = buildEntityRef(r.src, namespace, aliases)
- return buildRelation(entities, aliases, statement, r.src.entity.position.start.line, srcEntity, srcAlias, r.src.attrs.map(buildAttrPath), r, r, false)
+ return buildRelation(entities, aliases, statement, r.src.entity.token.position.start.line, srcEntity, srcAlias, r.src.attrs.map(buildAttrPath), r, r, false)
}
function buildRelationAttribute(entities: Entity[], aliases: Record, statement: number, srcEntity: EntityRef, srcAttrs: AttributePath[], r: AttributeRelationAst): Relation | undefined {
- return buildRelation(entities, aliases, statement, r.ref.entity?.position.start.line, srcEntity, undefined, srcAttrs, r, undefined, true)
+ return buildRelation(entities, aliases, statement, r.ref.entity?.token.position.start.line, srcEntity, undefined, srcAttrs, r, undefined, true)
}
-function buildRelation(entities: Entity[], aliases: Record, statement: number, line: number, srcEntity: EntityRef, srcAlias: string | undefined, srcAttrs: AttributePath[], rel: AttributeRelationAst, extra: ExtraAst | undefined, inline: boolean): Relation | undefined {
+function buildRelation(entities: Entity[], aliases: Record, statement: number, line: number, srcEntity: EntityRef, srcAlias: string | undefined, srcAttrs: AttributePath[], rel: Omit, extra: ExtraAst | undefined, inline: boolean): Relation | undefined {
// TODO: report an error instead of just ignoring?
if (!rel.ref || !rel.ref.entity.value || !rel.ref.attrs || rel.ref.attrs.some(a => a.value === undefined)) return undefined // `ref` can be undefined or with empty entity or undefined attrs on invalid input :/
const [refEntity, refAlias] = buildEntityRef(rel.ref, {}, aliases) // current namespace not used for relation ref, good idea???
@@ -310,8 +311,8 @@ function buildRelation(entities: Entity[], aliases: Record, s
return removeUndefined({
name: undefined,
origin: undefined,
- src: removeUndefined({...srcEntity, attrs: srcAttrs2, cardinality: rel.srcCardinality === 'n' ? undefined : rel.srcCardinality}),
- ref: removeUndefined({...refEntity, attrs: refAttrs, cardinality: rel.refCardinality === '1' ? undefined : rel.refCardinality}),
+ src: removeUndefined({...srcEntity, attrs: srcAttrs2, cardinality: rel.srcCardinality?.kind === 'n' ? undefined : rel.srcCardinality?.kind}),
+ ref: removeUndefined({...refEntity, attrs: refAttrs, cardinality: rel.refCardinality?.kind === '1' ? undefined : rel.refCardinality?.kind}),
polymorphic: rel.polymorphic ? {attribute: buildAttrPath(rel.polymorphic.attr), value: buildAttrValue(rel.polymorphic.value)} : undefined,
doc: extra?.doc?.value,
extra: buildExtra({line, statement, inline: inline ? true : undefined, natural, srcAlias, refAlias, comment: extra?.comment?.value}, extra || {}, []),
@@ -334,8 +335,8 @@ function buildAttrPath(a: AttributePathAst): AttributePath {
function buildTypeInline(namespace: Namespace, statement: number, a: AttributeAstNested, numType: string): InlineType[] {
return a.type && a.enumValues && !numType ? [{
- type: {...namespace, name: a.type.value, values: a.enumValues.map(stringifyAttrValue), extra: {line: a.enumValues[0].position.start.line, statement, inline: true}},
- position: mergePositions(a.enumValues)
+ type: {...namespace, name: a.type.value, values: a.enumValues.map(stringifyAttrValue), extra: {line: a.enumValues[0].token.position.start.line, statement, inline: true}},
+ position: mergePositions(a.enumValues.map(v => v.token))
}] : []
}
@@ -343,14 +344,14 @@ function buildType(namespace: Namespace, statement: number, t: TypeStatement): T
const astNamespace = removeUndefined({schema: t.schema?.value, catalog: t.catalog?.value, database: t.database?.value})
const typeNamespace = {...namespace, ...astNamespace}
const content = t.content ? buildTypeContent(namespace, statement, t.content, {...typeNamespace, entity: t.name.value}) : {}
- return removeUndefined({...typeNamespace, name: t.name.value, ...content, doc: t.doc?.value, extra: buildExtra({line: t.name.position.start.line, statement, comment: t.comment?.value}, t, [])})
+ return removeUndefined({...typeNamespace, name: t.name.value, ...content, doc: t.doc?.value, extra: buildExtra({line: t.name.token.position.start.line, statement, comment: t.comment?.value}, t, [])})
}
function buildTypeContent(namespace: Namespace, statement: number, t: TypeContentAst, entity: EntityRef): {alias?: string, values?: string[], attrs?: Attribute[], definition?: string} {
- if (t.kind === 'alias') return {alias: t.name.value}
- if (t.kind === 'enum') return {values: t.values.map(stringifyAttrValue)}
- if (t.kind === 'struct') return {attrs: t.attrs.map(a => buildAttribute(namespace, statement, a, entity).attribute)}
- if (t.kind === 'custom') return {definition: t.definition.value}
+ if (t.kind === 'Alias') return {alias: t.name.value}
+ if (t.kind === 'Enum') return {values: t.values.map(stringifyAttrValue)}
+ if (t.kind === 'Struct') return {attrs: t.attrs.map(a => buildAttribute(namespace, statement, a, entity).attribute)}
+ if (t.kind === 'Custom') return {definition: t.definition.value}
return isNever(t)
}
@@ -363,11 +364,11 @@ function buildExtra(extra: Extra, v: {properties?: PropertiesAst}, ignore: strin
function buildPropValue(v: PropertyValueAst): PropertyValue {
if (Array.isArray(v)) return v.map(buildPropValue) as PropertyValueBasic[] // ignore nested arrays
- if (v.token === 'Null') return null
- if (v.token === 'Decimal') return v.value
- if (v.token === 'Integer') return v.value
- if (v.token === 'Boolean') return v.value
- if (v.token === 'Expression') return '`' + v.value + '`'
- if (v.token === 'Identifier') return v.value
+ if (v.kind === 'Null') return null
+ if (v.kind === 'Decimal') return v.value
+ if (v.kind === 'Integer') return v.value
+ if (v.kind === 'Boolean') return v.value
+ if (v.kind === 'Expression') return '`' + v.value + '`'
+ if (v.kind === 'Identifier') return v.value
return isNever(v)
}
diff --git a/libs/aml/src/amlGenerator.test.ts b/libs/aml/src/amlGenerator.test.ts
index f8805648d..d7421d29d 100644
--- a/libs/aml/src/amlGenerator.test.ts
+++ b/libs/aml/src/amlGenerator.test.ts
@@ -1,6 +1,6 @@
import * as fs from "fs";
import {describe, expect, test} from "@jest/globals";
-import {Database, parseJsonDatabase, ParserResult, tokenPosition} from "@azimutt/models";
+import {Database, parseJsonDatabase, ParserResult, TokenPosition} from "@azimutt/models";
import {generateAml, parseAml} from "./index";
import {genEntity} from "./amlGenerator";
import {duplicated, legacy} from "./errors";
@@ -176,7 +176,7 @@ type range \`(subtype = float8, subtype_diff = float8mi)\` # custom type
extra: {}
}
const parsed = parseAmlTest(input)
- expect(parsed).toEqual({result: db, errors: [{message: 'Type status already defined at line 2', kind: 'Duplicated', level: 'warning', ...tokenPosition(66, 81, 5, 17, 5, 32)}]})
+ expect(parsed).toEqual({result: db, errors: [{message: 'Type status already defined at line 2', kind: 'Duplicated', level: 'warning', ...token(66, 81, 5, 5, 17, 32)}]})
expect(generateAml(parsed.result || {})).toEqual(input)
})
test('bad schema', () => {
@@ -190,8 +190,8 @@ type range \`(subtype = float8, subtype_diff = float8mi)\` # custom type
extra: {}
},
errors: [
- {message: "Expecting token of type --> NewLine <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(2, 4, 1, 3, 1, 5)},
- {message: "Expecting token of type --> NewLine <-- but found --> 'schema' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(6, 11, 1, 7, 1, 12)},
+ {message: "Expecting token of type --> NewLine <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...token(2, 4)},
+ {message: "Expecting token of type --> NewLine <-- but found --> 'schema' <--", kind: 'MismatchedTokenException', level: 'error', ...token(6, 11)},
]
})
})
@@ -219,7 +219,7 @@ public.users
public.users
id uuid pk
- name varchar`).errors).toEqual([duplicated('Entity public.users', 2, tokenPosition(43, 54, 6, 1, 6, 12))])
+ name varchar`).errors).toEqual([duplicated('Entity public.users', 2, token(43, 54, 6, 6, 1, 12))])
expect(parseAml(`
public.users
@@ -230,7 +230,7 @@ namespace public
users
id uuid pk
- name varchar`).errors).toEqual([duplicated('Entity public.users', 2, tokenPosition(61, 65, 8, 1, 8, 5))])
+ name varchar`).errors).toEqual([duplicated('Entity public.users', 2, token(61, 65, 8, 8, 1, 5))])
})
test('duplicate relation', () => {
expect(parseAml(`
@@ -242,7 +242,7 @@ posts
author uuid -> users(id)
rel posts(author) -> users(id)
-`).errors).toEqual([duplicated('Relation posts(author)->users(id)', 7, tokenPosition(72, 96, 9, 5, 9, 29))])
+`).errors).toEqual([duplicated('Relation posts(author)->users(id)', 7, token(72, 96, 9, 9, 5, 29))])
})
test('duplicate type', () => {
expect(parseAml(`
@@ -251,7 +251,7 @@ public.posts
status status(draft, published)
type public.status (pending, wip, done)
-`).errors).toEqual([duplicated('Type public.status', 4, tokenPosition(67, 79, 6, 6, 6, 18))])
+`).errors).toEqual([duplicated('Type public.status', 4, token(67, 79, 6, 6, 6, 18))])
})
})
@@ -263,23 +263,23 @@ type public.status (pending, wip, done)
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], extra: {}}
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int -\n')).toEqual({
- result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], extra: {}},
- errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Dash]\n 2. [LowerThan]\n 3. [GreaterThan]\nbut found: '\n'", kind: 'NoViableAltException', level: 'error', ...tokenPosition(39, 39, 5, 15, 5, 15)}]
+ result: {entities: [users, {name: 'posts', extra: {line: 4, statement: 2}}], extra: {}},
+ errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Dash]\n 2. [LowerThan]\n 3. [GreaterThan]\nbut found: '\n'", kind: 'NoViableAltException', level: 'error', ...token(39, 39, 5, 5, 15, 15)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int ->\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], extra: {}},
- errors: [{message: "Expecting token of type --> Identifier <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(40, 40, 5, 16, 5, 16)}]
+ errors: [{message: "Expecting token of type --> Identifier <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...token(40, 40, 5, 5, 16, 16)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int -> users\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], relations: [{src: {entity: 'posts', attrs: [['author']]}, ref: {entity: 'users', attrs: [['id']]}, extra: {line: 5, statement: 2, natural: 'ref', inline: true}}], extra: {}},
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int -> users(\n')).toEqual({
- result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], extra: {}},
- errors: [{message: "Expecting: expecting at least one iteration which starts with one of these possible Token sequences::\n <[WhiteSpace] ,[Identifier]>\nbut found: '\n'", kind: 'EarlyExitException', level: 'error', ...tokenPosition(47, 47, 5, 23, 5, 23)}]
+ result: {entities: [users, {name: 'posts', extra: {line: 4, statement: 2}}], extra: {}},
+ errors: [{message: "Expecting: expecting at least one iteration which starts with one of these possible Token sequences::\n <[WhiteSpace] ,[Identifier]>\nbut found: '\n'", kind: 'EarlyExitException', level: 'error', ...token(47, 47, 5, 5, 23, 23)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int -> users(id\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], relations: [{src: {entity: 'posts', attrs: [['author']]}, ref: {entity: 'users', attrs: [['id']]}, extra: {line: 5, statement: 2, inline: true}}], extra: {}},
- errors: [{message: "Expecting token of type --> RParen <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(49, 49, 5, 25, 5, 25)}]
+ errors: [{message: "Expecting token of type --> ParenRight <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...token(49, 49, 5, 5, 25, 25)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int -> users(id)\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], relations: [{src: {entity: 'posts', attrs: [['author']]}, ref: {entity: 'users', attrs: [['id']]}, extra: {line: 5, statement: 2, inline: true}}], extra: {}},
@@ -287,15 +287,15 @@ type public.status (pending, wip, done)
expect(parseAmlTest('users\n id int pk\n\nposts\n author int - users(id)\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], relations: [{src: {entity: 'posts', attrs: [['author']]}, ref: {entity: 'users', attrs: [['id']]}, extra: {line: 5, statement: 2, inline: true}}], extra: {}},
- errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Dash]\n 2. [LowerThan]\n 3. [GreaterThan]\nbut found: ' '", kind: 'NoViableAltException', level: 'error', ...tokenPosition(39, 39, 5, 15, 5, 15)}]
+ errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Dash]\n 2. [LowerThan]\n 3. [GreaterThan]\nbut found: ' '", kind: 'NoViableAltException', level: 'error', ...token(39, 39, 5, 5, 15, 15)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int users(id)\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}, {name: 'id', extra: {line: 5, statement: 3}}], extra: {}},
// TODO handle error better to not generate a fake entity (id)
errors: [
- {message: "Expecting token of type --> NewLine <-- but found --> 'users' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(39, 43, 5, 15, 5, 19)},
- {message: "Expecting token of type --> NewLine <-- but found --> '(' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(44, 44, 5, 20, 5, 20)},
- {message: "Expecting token of type --> NewLine <-- but found --> ')' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(47, 47, 5, 23, 5, 23)}
+ {message: "Expecting token of type --> NewLine <-- but found --> 'users' <--", kind: 'MismatchedTokenException', level: 'error', ...token(39, 43, 5, 5, 15, 19)},
+ {message: "Expecting token of type --> NewLine <-- but found --> '(' <--", kind: 'MismatchedTokenException', level: 'error', ...token(44, 44, 5, 5, 20, 20)},
+ {message: "Expecting token of type --> NewLine <-- but found --> ')' <--", kind: 'MismatchedTokenException', level: 'error', ...token(47, 47, 5, 5, 23, 23)}
]
})
})
@@ -306,19 +306,19 @@ type public.status (pending, wip, done)
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int f\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}, {name: 'f', extra: {line: 5, statement: 3}}], extra: {}},
- errors: [{message: "Expecting token of type --> NewLine <-- but found --> 'f' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(38, 38, 5, 14, 5, 14)}]
+ errors: [{message: "Expecting token of type --> NewLine <-- but found --> 'f' <--", kind: 'MismatchedTokenException', level: 'error', ...token(38, 38, 5, 5, 14, 14)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int fk\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], extra: {}},
errors: [
- {message: "Expecting token of type --> Identifier <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(40, 40, 5, 16, 5, 16)},
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(38, 39, 5, 14, 5, 15)},
+ {message: "Expecting token of type --> Identifier <-- but found --> '\n' <--", kind: 'MismatchedTokenException', level: 'error', ...token(40, 40, 5, 5, 16, 16)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(38, 39, 5, 5, 14, 15)},
]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int fk users\n')).toEqual({
result: {entities: [users, {name: 'posts', attrs: [{name: 'author', type: 'int'}], extra: {line: 4, statement: 2}}], relations: [{src: {entity: 'posts', attrs: [['author']]}, ref: {entity: 'users', attrs: [['id']]}, extra: {line: 5, statement: 2, natural: 'ref', inline: true}}], extra: {}},
// TODO: an error should be reported here
- errors: [{...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(38, 39, 5, 14, 5, 15)}]
+ errors: [{...legacy('"fk" is legacy, replace it with "->"'), ...token(38, 39, 5, 5, 14, 15)}]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int fk users.\n')).toEqual({
result: {
@@ -327,7 +327,7 @@ type public.status (pending, wip, done)
extra: {}
},
errors: [
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(38, 39, 5, 14, 5, 15)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(38, 39, 5, 5, 14, 15)},
]
})
expect(parseAmlTest('users\n id int pk\n\nposts\n author int fk users.id\n')).toEqual({
@@ -337,8 +337,8 @@ type public.status (pending, wip, done)
extra: {}
},
errors: [
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(38, 39, 5, 14, 5, 15)},
- {...legacy('"users.id" is the legacy way, use "users(id)" instead'), ...tokenPosition(41, 48, 5, 17, 5, 24)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(38, 39, 5, 5, 14, 15)},
+ {...legacy('"users.id" is the legacy way, use "users(id)" instead'), ...token(41, 48, 5, 5, 17, 24)},
]
})
})
@@ -429,8 +429,8 @@ talks
extra: {}
},
errors: [
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(117, 118, 10, 15, 10, 16)},
- {...legacy('"users.id" is the legacy way, use "users(id)" instead'), ...tokenPosition(120, 127, 10, 18, 10, 25)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(117, 118, 10, 10, 15, 16)},
+ {...legacy('"users.id" is the legacy way, use "users(id)" instead'), ...token(120, 127, 10, 10, 18, 25)},
]
})
})
@@ -498,15 +498,15 @@ fk admins.id -> public.users.id
extra: {comments: [{line: 7, comment: 'How to define a table and it\'s columns'}]}
},
errors: [
- {...legacy('"=" is legacy, replace it with ":"'), ...tokenPosition(113, 113, 8, 20, 8, 20)},
- {...legacy('"=" is legacy, replace it with ":"'), ...tokenPosition(122, 122, 8, 29, 8, 29)},
- {...legacy('"=" is legacy, replace it with ":"'), ...tokenPosition(131, 131, 8, 38, 8, 38)},
- {...legacy('"=email LIKE \'%@%\'" is the legacy way, use expression instead "(`email LIKE \'%@%\'`)"'), ...tokenPosition(440, 458, 14, 31, 14, 49)},
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(460, 461, 14, 51, 14, 52)},
- {...legacy('"emails.email" is the legacy way, use "emails(email)" instead'), ...tokenPosition(463, 474, 14, 54, 14, 65)},
- {...legacy('"fk" is legacy, replace it with "rel"'), ...tokenPosition(585, 586, 20, 1, 20, 2)},
- {...legacy('"admins.id" is the legacy way, use "admins(id)" instead'), ...tokenPosition(588, 596, 20, 4, 20, 12)},
- {...legacy('"public.users.id" is the legacy way, use "public.users(id)" instead'), ...tokenPosition(601, 615, 20, 17, 20, 31)},
+ {...legacy('"=" is legacy, replace it with ":"'), ...token(113, 113, 8, 8, 20, 20)},
+ {...legacy('"=" is legacy, replace it with ":"'), ...token(122, 122, 8, 8, 29, 29)},
+ {...legacy('"=" is legacy, replace it with ":"'), ...token(131, 131, 8, 8, 38, 38)},
+ {...legacy('"=email LIKE \'%@%\'" is the legacy way, use expression instead "(`email LIKE \'%@%\'`)"'), ...token(440, 458, 14, 14, 31, 49)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(460, 461, 14, 14, 51, 52)},
+ {...legacy('"emails.email" is the legacy way, use "emails(email)" instead'), ...token(463, 474, 14, 14, 54, 65)},
+ {...legacy('"fk" is legacy, replace it with "rel"'), ...token(585, 586, 20, 20, 1, 2)},
+ {...legacy('"admins.id" is the legacy way, use "admins(id)" instead'), ...token(588, 596, 20, 20, 4, 12)},
+ {...legacy('"public.users.id" is the legacy way, use "public.users(id)" instead'), ...token(601, 615, 20, 20, 17, 31)},
]
})
})
@@ -577,12 +577,12 @@ roles
expect(parsed).toEqual({
result: db,
errors: [
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(35, 36, 2, 22, 2, 23)},
- {...legacy('"contacts.id" is the legacy way, use "contacts(id)" instead'), ...tokenPosition(38, 48, 2, 25, 2, 35)},
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(68, 69, 3, 19, 3, 20)},
- {...legacy('"roles.id" is the legacy way, use "roles(id)" instead'), ...tokenPosition(71, 78, 3, 22, 3, 29)},
- {...legacy('"fk" is legacy, replace it with "->"'), ...tokenPosition(182, 183, 12, 28, 12, 29)},
- {...legacy('"contacts.id" is the legacy way, use "contacts(id)" instead'), ...tokenPosition(185, 195, 12, 31, 12, 41)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(35, 36, 2, 2, 22, 23)},
+ {...legacy('"contacts.id" is the legacy way, use "contacts(id)" instead'), ...token(38, 48, 2, 2, 25, 35)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(68, 69, 3, 3, 19, 20)},
+ {...legacy('"roles.id" is the legacy way, use "roles(id)" instead'), ...token(71, 78, 3, 3, 22, 29)},
+ {...legacy('"fk" is legacy, replace it with "->"'), ...token(182, 183, 12, 12, 28, 29)},
+ {...legacy('"contacts.id" is the legacy way, use "contacts(id)" instead'), ...token(185, 195, 12, 12, 31, 41)},
]
})
expect(generateAml(parsed.result || {}, true)).toEqual(input.trim() + '\n')
@@ -663,3 +663,7 @@ function parseAmlTest(aml: string): ParserResult {
.replaceAll(/\n/g, '\\n')
.replaceAll(/,message:'([^"]*?)',position:/g, ',message:"$1",position:'))
}*/
+
+function token(start: number, end: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): TokenPosition {
+ return {offset: {start: start, end: end}, position: {start: {line: lineStart || 1, column: columnStart || start + 1}, end: {line: lineEnd || 1, column: columnEnd || end + 1}}}
+}
diff --git a/libs/aml/src/amlParser.test.ts b/libs/aml/src/amlParser.test.ts
index 74a0f4261..e81277196 100644
--- a/libs/aml/src/amlParser.test.ts
+++ b/libs/aml/src/amlParser.test.ts
@@ -1,7 +1,17 @@
import {describe, expect, test} from "@jest/globals";
import {removeFieldsDeep} from "@azimutt/utils";
-import {tokenPosition} from "@azimutt/models";
-import {AttributeRelationAst} from "./amlAst";
+import {TokenPosition} from "@azimutt/models";
+import {
+ BooleanAst,
+ CommentAst,
+ DecimalAst,
+ DocAst,
+ ExpressionAst,
+ IdentifierAst,
+ IntegerAst,
+ NullAst,
+ TokenIssue
+} from "./amlAst";
import {nestAttributes, parseAmlAst, parseRule} from "./amlParser";
import {badIndent, legacy} from "./errors";
@@ -15,16 +25,16 @@ users
id uuid pk
name varchar
`
- const ast = [{statement: 'Empty'}, {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(1, 5, 2, 1, 2, 5)},
+ const ast = [{kind: 'Empty'}, {
+ kind: 'Entity',
+ name: identifier('users', 1, 5, 2, 2, 1, 5),
attrs: [{
- path: [{token: 'Identifier', value: 'id', ...tokenPosition(9, 10, 3, 3, 3, 4)}],
- type: {token: 'Identifier', value: 'uuid', ...tokenPosition(12, 15, 3, 6, 3, 9)},
- primaryKey: {keyword: tokenPosition(17, 18, 3, 11, 3, 12)},
+ path: [identifier('id', 9, 10, 3, 3, 3, 4)],
+ type: identifier('uuid', 12, 15, 3, 3, 6, 9),
+ constraints: [{kind: 'PrimaryKey', token: token(17, 18, 3, 3, 11, 12)}],
}, {
- path: [{token: 'Identifier', value: 'name', ...tokenPosition(22, 25, 4, 3, 4, 6)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(27, 33, 4, 8, 4, 14)},
+ path: [identifier('name', 22, 25, 4, 4, 3, 6)],
+ type: identifier('varchar', 27, 33, 4, 4, 8, 14),
}]
}]
expect(parseAmlAst(input, {strict: false})).toEqual({result: ast})
@@ -36,616 +46,611 @@ posts
comments
`
const ast = [
- {statement: 'Empty'},
- {statement: 'Entity', name: {token: 'Identifier', value: 'users', ...tokenPosition(1, 5, 2, 1, 2, 5)}},
- {statement: 'Entity', name: {token: 'Identifier', value: 'posts', ...tokenPosition(7, 11, 3, 1, 3, 5)}},
- {statement: 'Entity', name: {token: 'Identifier', value: 'comments', ...tokenPosition(13, 20, 4, 1, 4, 8)}},
+ {kind: 'Empty'},
+ {kind: 'Entity', name: identifier('users', 1, 5, 2, 2, 1, 5)},
+ {kind: 'Entity', name: identifier('posts', 7, 11, 3, 3, 1, 5)},
+ {kind: 'Entity', name: identifier('comments', 13, 20, 4, 4, 1, 8)},
]
expect(parseAmlAst(input, {strict: false})).toEqual({result: ast})
})
describe('namespaceStatementRule', () => {
test('schema', () => {
expect(parseRule(p => p.namespaceStatementRule(), 'namespace public\n')).toEqual({result: {
- statement: 'Namespace',
+ kind: 'Namespace',
line: 1,
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(10, 15, 1, 11, 1, 16)},
+ schema: identifier('public', 10),
}})
})
test('catalog', () => {
expect(parseRule(p => p.namespaceStatementRule(), 'namespace core.public\n')).toEqual({result: {
- statement: 'Namespace',
+ kind: 'Namespace',
line: 1,
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(10, 13, 1, 11, 1, 14)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(15, 20, 1, 16, 1, 21)},
+ catalog: identifier('core', 10),
+ schema: identifier('public', 15),
}})
})
test('database', () => {
expect(parseRule(p => p.namespaceStatementRule(), 'namespace analytics.core.public\n')).toEqual({result: {
- statement: 'Namespace',
+ kind: 'Namespace',
line: 1,
- database: {token: 'Identifier', value: 'analytics', ...tokenPosition(10, 18, 1, 11, 1, 19)},
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(20, 23, 1, 21, 1, 24)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(25, 30, 1, 26, 1, 31)},
+ database: identifier('analytics', 10),
+ catalog: identifier('core', 20),
+ schema: identifier('public', 25),
}})
})
test('extra', () => {
expect(parseRule(p => p.namespaceStatementRule(), 'namespace public | a note # and a comment\n')).toEqual({result: {
- statement: 'Namespace',
+ kind: 'Namespace',
line: 1,
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(10, 15, 1, 11, 1, 16)},
- doc: {token: 'Doc', value: 'a note', ...tokenPosition(17, 25, 1, 18, 1, 26)},
- comment: {token: 'Comment', value: 'and a comment', ...tokenPosition(26, 40, 1, 27, 1, 41)},
+ schema: identifier('public', 10),
+ doc: doc('a note', 17),
+ comment: comment('and a comment', 26),
}})
})
test('empty catalog', () => {
expect(parseRule(p => p.namespaceStatementRule(), 'namespace analytics..public\n')).toEqual({result: {
- statement: 'Namespace',
+ kind: 'Namespace',
line: 1,
- database: {token: 'Identifier', value: 'analytics', ...tokenPosition(10, 18, 1, 11, 1, 19)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(21, 26, 1, 22, 1, 27)},
+ database: identifier('analytics', 10),
+ schema: identifier('public', 21),
}})
})
})
describe('entityRule', () => {
test('basic', () => {
- expect(parseRule(p => p.entityRule(), 'users\n')).toEqual({result: {statement: 'Entity', name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)}}})
+ expect(parseRule(p => p.entityRule(), 'users\n')).toEqual({result: {kind: 'Entity', name: identifier('users', 0)}})
})
test('namespace', () => {
expect(parseRule(p => p.entityRule(), 'public.users\n')).toEqual({result: {
- statement: 'Entity',
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(0, 5, 1, 1, 1, 6)},
- name: {token: 'Identifier', value: 'users', ...tokenPosition(7, 11, 1, 8, 1, 12)},
+ kind: 'Entity',
+ schema: identifier('public', 0),
+ name: identifier('users', 7),
}})
expect(parseRule(p => p.entityRule(), 'core.public.users\n')).toEqual({result: {
- statement: 'Entity',
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(0, 3, 1, 1, 1, 4)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(5, 10, 1, 6, 1, 11)},
- name: {token: 'Identifier', value: 'users', ...tokenPosition(12, 16, 1, 13, 1, 17)},
+ kind: 'Entity',
+ catalog: identifier('core', 0),
+ schema: identifier('public', 5),
+ name: identifier('users', 12),
}})
expect(parseRule(p => p.entityRule(), 'ax.core.public.users\n')).toEqual({result: {
- statement: 'Entity',
- database: {token: 'Identifier', value: 'ax', ...tokenPosition(0, 1, 1, 1, 1, 2)},
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(3, 6, 1, 4, 1, 7)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(8, 13, 1, 9, 1, 14)},
- name: {token: 'Identifier', value: 'users', ...tokenPosition(15, 19, 1, 16, 1, 20)},
+ kind: 'Entity',
+ database: identifier('ax', 0),
+ catalog: identifier('core', 3),
+ schema: identifier('public', 8),
+ name: identifier('users', 15),
}})
})
test('view', () => {
expect(parseRule(p => p.entityRule(), 'users*\n')).toEqual({result: {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
- view: tokenPosition(5, 5, 1, 6, 1, 6)
+ kind: 'Entity',
+ name: identifier('users', 0),
+ view: token(5, 5)
}})
})
test('alias', () => {
expect(parseRule(p => p.entityRule(), 'users as u\n')).toEqual({result: {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
- alias: {token: 'Identifier', value: 'u', ...tokenPosition(9, 9, 1, 10, 1, 10)},
+ kind: 'Entity',
+ name: identifier('users', 0),
+ alias: identifier('u', 9),
}})
})
test('extra', () => {
expect(parseRule(p => p.entityRule(), 'users {domain: auth} | list users # sample comment\n')).toEqual({result: {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
+ kind: 'Entity',
+ name: identifier('users', 0),
properties: [{
- key: {token: 'Identifier', value: 'domain', ...tokenPosition(7, 12, 1, 8, 1, 13)},
- sep: tokenPosition(13, 13, 1, 14, 1, 14),
- value: {token: 'Identifier', value: 'auth', ...tokenPosition(15, 18, 1, 16, 1, 19)},
+ key: identifier('domain', 7),
+ sep: token(13, 13),
+ value: identifier('auth', 15),
}],
- doc: {token: 'Doc', value: 'list users', ...tokenPosition(21, 33, 1, 22, 1, 34)},
- comment: {token: 'Comment', value: 'sample comment', ...tokenPosition(34, 49, 1, 35, 1, 50)},
+ doc: doc('list users', 21),
+ comment: comment('sample comment', 34),
}})
})
test('attributes', () => {
expect(parseRule(p => p.entityRule(), 'users\n id uuid pk\n name varchar\n')).toEqual({result: {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
+ kind: 'Entity',
+ name: identifier('users', 0),
attrs: [{
- path: [{token: 'Identifier', value: 'id', ...tokenPosition(8, 9, 2, 3, 2, 4)}],
- type: {token: 'Identifier', value: 'uuid', ...tokenPosition(11, 14, 2, 6, 2, 9)},
- primaryKey: {keyword: tokenPosition(16, 17, 2, 11, 2, 12)},
+ path: [identifier('id', 8, 9, 2, 2, 3, 4)],
+ type: identifier('uuid', 11, 14, 2, 2, 6, 9),
+ constraints: [{kind: 'PrimaryKey', token: token(16, 17, 2, 2, 11, 12)}],
}, {
- path: [{token: 'Identifier', value: 'name', ...tokenPosition(21, 24, 3, 3, 3, 6)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(26, 32, 3, 8, 3, 14)},
+ path: [identifier('name', 21, 24, 3, 3, 3, 6)],
+ type: identifier('varchar', 26, 32, 3, 3, 8, 14),
}],
}})
expect(parseRule(p => p.entityRule(), 'users\n id uuid pk\n name json\n first string\n')).toEqual({result: {
- statement: 'Entity',
- name: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
+ kind: 'Entity',
+ name: identifier('users', 0),
attrs: [{
- path: [{token: 'Identifier', value: 'id', ...tokenPosition(8, 9, 2, 3, 2, 4)}],
- type: {token: 'Identifier', value: 'uuid', ...tokenPosition(11, 14, 2, 6, 2, 9)},
- primaryKey: {keyword: tokenPosition(16, 17, 2, 11, 2, 12)},
+ path: [identifier('id', 8, 9, 2, 2, 3, 4)],
+ type: identifier('uuid', 11, 14, 2, 2, 6, 9),
+ constraints: [{kind: 'PrimaryKey', token: token(16, 17, 2, 2, 11, 12)}],
}, {
- path: [{token: 'Identifier', value: 'name', ...tokenPosition(21, 24, 3, 3, 3, 6)}],
- type: {token: 'Identifier', value: 'json', ...tokenPosition(26, 29, 3, 8, 3, 11)},
+ path: [identifier('name', 21, 24, 3, 3, 3, 6)],
+ type: identifier('json', 26, 29, 3, 3, 8, 11),
attrs: [{
- path: [{token: 'Identifier', value: 'name', ...tokenPosition(21, 24, 3, 3, 3, 6)}, {token: 'Identifier', value: 'first', ...tokenPosition(37, 41, 4, 7, 4, 11)}],
- type: {token: 'Identifier', value: 'string', ...tokenPosition(43, 48, 4, 13, 4, 18)},
- warning: {issues: [badIndent(1, 2)], ...tokenPosition(31, 36, 4, 1, 4, 6)}
+ path: [identifier('name', 21, 24, 3, 3, 3, 6), identifier('first', 37, 41, 4, 4, 7, 11)],
+ type: identifier('string', 43, 48, 4, 4, 13, 18),
+ warning: {issues: [badIndent(1, 2)], ...token(31, 36, 4, 4, 1, 6)}
}]
}],
}})
})
describe('attributeRule', () => {
test('name', () => {
- expect(parseRule(p => p.attributeRule(), ' id\n')).toEqual({result: {nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)}, name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)}}})
- expect(parseRule(p => p.attributeRule(), ' "index"\n')).toEqual({result: {nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)}, name: {token: 'Identifier', value: 'index', ...tokenPosition(2, 8, 1, 3, 1, 9)}}})
+ expect(parseRule(p => p.attributeRule(), ' id\n')).toEqual({result: {nesting: {depth: 0, token: token(0, 1)}, name: identifier('id', 2)}})
+ expect(parseRule(p => p.attributeRule(), ' "index"\n')).toEqual({result: {nesting: {depth: 0, token: token(0, 1)}, name: {...identifier('index', 2, 8), quoted: true}}})
+ expect(parseRule(p => p.attributeRule(), ' fk_col\n')).toEqual({result: {nesting: {depth: 0, token: token(0, 1)}, name: identifier('fk_col', 2)}})
})
test('type', () => {
expect(parseRule(p => p.attributeRule(), ' id uuid\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'uuid', ...tokenPosition(5, 8, 1, 6, 1, 9)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: identifier('uuid', 5),
}})
expect(parseRule(p => p.attributeRule(), ' name "varchar(12)"\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'name', ...tokenPosition(2, 5, 1, 3, 1, 6)},
- type: {token: 'Identifier', value: 'varchar(12)', ...tokenPosition(7, 19, 1, 8, 1, 20)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('name', 2),
+ type: {...identifier('varchar(12)', 7, 19), quoted: true},
}})
expect(parseRule(p => p.attributeRule(), ' bio "character varying"\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'bio', ...tokenPosition(2, 4, 1, 3, 1, 5)},
- type: {token: 'Identifier', value: 'character varying', ...tokenPosition(6, 24, 1, 7, 1, 25)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('bio', 2),
+ type: {...identifier('character varying', 6, 24), quoted: true},
}})
expect(parseRule(p => p.attributeRule(), ' id "type"\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'type', ...tokenPosition(5, 10, 1, 6, 1, 11)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: {...identifier('type', 5, 10), quoted: true},
}})
})
test('enum', () => {
expect(parseRule(p => p.attributeRule(), ' status post_status(draft, published, archived)\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'status', ...tokenPosition(2, 7, 1, 3, 1, 8)},
- type: {token: 'Identifier', value: 'post_status', ...tokenPosition(9, 19, 1, 10, 1, 20)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('status', 2),
+ type: identifier('post_status', 9),
enumValues: [
- {token: 'Identifier', value: 'draft', ...tokenPosition(21, 25, 1, 22, 1, 26)},
- {token: 'Identifier', value: 'published', ...tokenPosition(28, 36, 1, 29, 1, 37)},
- {token: 'Identifier', value: 'archived', ...tokenPosition(39, 46, 1, 40, 1, 47)},
+ identifier('draft', 21),
+ identifier('published', 28),
+ identifier('archived', 39),
],
}})
})
test('default', () => {
expect(parseRule(p => p.attributeRule(), ' id int=0\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(5, 7, 1, 6, 1, 8)},
- defaultValue: {token: 'Integer', value: 0, ...tokenPosition(9, 9, 1, 10, 1, 10)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: identifier('int', 5),
+ defaultValue: integer(0, 9),
}})
expect(parseRule(p => p.attributeRule(), ' price decimal=41.9\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'price', ...tokenPosition(2, 6, 1, 3, 1, 7)},
- type: {token: 'Identifier', value: 'decimal', ...tokenPosition(8, 14, 1, 9, 1, 15)},
- defaultValue: {token: 'Decimal', value: 41.9, ...tokenPosition(16, 19, 1, 17, 1, 20)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('price', 2),
+ type: identifier('decimal', 8),
+ defaultValue: decimal(41.9, 16),
}})
expect(parseRule(p => p.attributeRule(), ' role varchar=guest\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'role', ...tokenPosition(2, 5, 1, 3, 1, 6)},
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(7, 13, 1, 8, 1, 14)},
- defaultValue: {token: 'Identifier', value: 'guest', ...tokenPosition(15, 19, 1, 16, 1, 20)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('role', 2),
+ type: identifier('varchar', 7),
+ defaultValue: identifier('guest', 15),
}})
expect(parseRule(p => p.attributeRule(), ' is_admin boolean=false\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'is_admin', ...tokenPosition(2, 9, 1, 3, 1, 10)},
- type: {token: 'Identifier', value: 'boolean', ...tokenPosition(11, 17, 1, 12, 1, 18)},
- defaultValue: {token: 'Boolean', value: false, ...tokenPosition(19, 23, 1, 20, 1, 24)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('is_admin', 2),
+ type: identifier('boolean', 11),
+ defaultValue: boolean(false, 19),
}})
expect(parseRule(p => p.attributeRule(), ' created_at timestamp=`now()`\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'created_at', ...tokenPosition(2, 11, 1, 3, 1, 12)},
- type: {token: 'Identifier', value: 'timestamp', ...tokenPosition(13, 21, 1, 14, 1, 22)},
- defaultValue: {token: 'Expression', value: 'now()', ...tokenPosition(23, 29, 1, 24, 1, 30)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('created_at', 2),
+ type: identifier('timestamp', 13),
+ defaultValue: expression('now()', 23),
}})
expect(parseRule(p => p.attributeRule(), ' source varchar=null\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'source', ...tokenPosition(2, 7, 1, 3, 1, 8)},
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(9, 15, 1, 10, 1, 16)},
- defaultValue: {token: 'Null', ...tokenPosition(17, 20, 1, 18, 1, 21)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('source', 2),
+ type: identifier('varchar', 9),
+ defaultValue: null_(17),
}})
// TODO: handle `[]` default value? Ex: ' tags varchar[]=[]\n' instead of ' tags varchar[]="[]"\n'
// TODO: handle `{}` default value? Ex: ' details json={}\n' instead of ' details json="{}"\n'
})
test('nullable', () => {
expect(parseRule(p => p.attributeRule(), ' id nullable\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- nullable: tokenPosition(5, 12, 1, 6, 1, 13),
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ nullable: token(5, 12),
}})
expect(parseRule(p => p.attributeRule(), ' id int nullable\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(5, 7, 1, 6, 1, 8)},
- nullable: tokenPosition(9, 16, 1, 10, 1, 17),
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: identifier('int', 5),
+ nullable: token(9, 16),
}})
})
test('pk', () => {
expect(parseRule(p => p.attributeRule(), ' id pk\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- primaryKey: {keyword: tokenPosition(5, 6, 1, 6, 1, 7)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'PrimaryKey', token: token(5, 6)}],
}})
expect(parseRule(p => p.attributeRule(), ' id int pk\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(5, 7, 1, 6, 1, 8)},
- primaryKey: {keyword: tokenPosition(9, 10, 1, 10, 1, 11)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: identifier('int', 5),
+ constraints: [{kind: 'PrimaryKey', token: token(9, 10)}],
}})
expect(parseRule(p => p.attributeRule(), ' id int pk=pk_name\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(5, 7, 1, 6, 1, 8)},
- primaryKey: {keyword: tokenPosition(9, 10, 1, 10, 1, 11), name: {token: 'Identifier', value: 'pk_name', ...tokenPosition(12, 18, 1, 13, 1, 19)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ type: identifier('int', 5),
+ constraints: [{kind: 'PrimaryKey', token: token(9, 10), name: identifier('pk_name', 12)}],
}})
})
test('index', () => {
expect(parseRule(p => p.attributeRule(), ' id index\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- index: {keyword: tokenPosition(5, 9, 1, 6, 1, 10)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Index', token: token(5, 9)}],
}})
expect(parseRule(p => p.attributeRule(), ' id index=id_idx\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- index: {keyword: tokenPosition(5, 9, 1, 6, 1, 10), name: {token: 'Identifier', value: 'id_idx', ...tokenPosition(11, 16, 1, 12, 1, 17)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Index', token: token(5, 9), name: identifier('id_idx', 11)}],
}})
expect(parseRule(p => p.attributeRule(), ' id index = "idx \\" id"\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- index: {keyword: tokenPosition(5, 9, 1, 6, 1, 10), name: {token: 'Identifier', value: 'idx " id', ...tokenPosition(13, 23, 1, 14, 1, 24)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Index', token: token(5, 9), name: {...identifier('idx " id', 13, 23), quoted: true}}],
}})
})
test('unique', () => {
expect(parseRule(p => p.attributeRule(), ' id unique\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- unique: {keyword: tokenPosition(5, 10, 1, 6, 1, 11)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Unique', token: token(5, 10)}],
}})
expect(parseRule(p => p.attributeRule(), ' id unique=id_uniq\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- unique: {keyword: tokenPosition(5, 10, 1, 6, 1, 11), name: {token: 'Identifier', value: 'id_uniq', ...tokenPosition(12, 18, 1, 13, 1, 19)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Unique', token: token(5, 10), name: identifier('id_uniq', 12)}],
}})
})
test('check', () => {
expect(parseRule(p => p.attributeRule(), ' id check\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- check: {keyword: tokenPosition(5, 9, 1, 6, 1, 10)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Check', token: token(5, 9)}],
}})
expect(parseRule(p => p.attributeRule(), ' id check=id_chk\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- check: {keyword: tokenPosition(5, 9, 1, 6, 1, 10), name: {token: 'Identifier', value: 'id_chk', ...tokenPosition(11, 16, 1, 12, 1, 17)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Check', token: token(5, 9), name: identifier('id_chk', 11)}],
}})
expect(parseRule(p => p.attributeRule(), ' id check(`id > 0`)\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- check: {keyword: tokenPosition(5, 9, 1, 6, 1, 10), predicate: {token: 'Expression', value: 'id > 0', ...tokenPosition(11, 18, 1, 12, 1, 19)}},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{kind: 'Check', token: token(5, 9), predicate: expression('id > 0', 11)}],
}})
expect(parseRule(p => p.attributeRule(), ' id check(`id > 0`)=id_chk\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- check: {
- keyword: tokenPosition(5, 9, 1, 6, 1, 10),
- predicate: {token: 'Expression', value: 'id > 0', ...tokenPosition(11, 18, 1, 12, 1, 19)},
- name: {token: 'Identifier', value: 'id_chk', ...tokenPosition(21, 26, 1, 22, 1, 27)}
- },
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ constraints: [{
+ kind: 'Check',
+ token: token(5, 9),
+ predicate: expression('id > 0', 11),
+ name: identifier('id_chk', 21)
+ }],
}})
})
test('relation', () => {
expect(parseRule(p => p.attributeRule(), ' user_id -> users(id)\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'user_id', ...tokenPosition(2, 8, 1, 3, 1, 9)},
- relation: {srcCardinality: 'n', refCardinality: '1', ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(13, 17, 1, 14, 1, 18)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(19, 20, 1, 20, 1, 21)}],
- }}
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('user_id', 2),
+ constraints: [{
+ kind: 'Relation',
+ token: token(10, 11),
+ refCardinality: {kind: '1', token: token(10, 10)},
+ srcCardinality: {kind: 'n', token: token(11, 11)},
+ ref: {entity: identifier('users', 13), attrs: [identifier('id', 19)]}
+ }]
}})
expect(parseRule(p => p.attributeRule(), ' user_id -> users\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'user_id', ...tokenPosition(2, 8, 1, 3, 1, 9)},
- relation: {srcCardinality: 'n', refCardinality: '1', ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(13, 17, 1, 14, 1, 18)},
- attrs: [],
- }}
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('user_id', 2),
+ constraints: [{
+ kind: 'Relation',
+ token: token(10, 11),
+ refCardinality: {kind: '1', token: token(10, 10)},
+ srcCardinality: {kind: 'n', token: token(11, 11)},
+ ref: {entity: identifier('users', 13), attrs: []}
+ }]
}})
})
test('properties', () => {
expect(parseRule(p => p.attributeRule(), ' id {tag: pii}\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- properties: [{key: {token: 'Identifier', value: 'tag', ...tokenPosition(6, 8, 1, 7, 1, 9)}, sep: tokenPosition(9, 9, 1, 10, 1, 10), value: {token: 'Identifier', value: 'pii', ...tokenPosition(11, 13, 1, 12, 1, 14)}}],
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ properties: [{key: identifier('tag', 6), sep: token(9, 9), value: identifier('pii', 11)}],
}})
})
test('note', () => {
expect(parseRule(p => p.attributeRule(), ' id | some note\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- doc: {token: 'Doc', value: 'some note', ...tokenPosition(5, 15, 1, 6, 1, 16)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ doc: doc('some note', 5, 15),
}})
})
test('comment', () => {
expect(parseRule(p => p.attributeRule(), ' id # a comment\n')).toEqual({result: {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(2, 3, 1, 3, 1, 4)},
- comment: {token: 'Comment', value: 'a comment', ...tokenPosition(5, 15, 1, 6, 1, 16)},
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 2),
+ comment: comment('a comment', 5),
+ }})
+ })
+ test('several identical constraints', () => {
+ expect(parseRule(p => p.attributeRule(), ' item_id int nullable index index=idx check(`item_id > 0`) check(`item_id < 0`) -kind=users> public.users(id) -kind=posts> posts(id)\n')).toEqual({result: {
+ nesting: {token: token(0, 1), depth: 0},
+ name: identifier('item_id', 2),
+ type: identifier('int', 10),
+ nullable: token(14, 21),
+ constraints: [
+ {kind: 'Index', token: token(23, 27)},
+ {kind: 'Index', token: token(29, 33), name: identifier('idx', 35)},
+ {kind: 'Check', token: token(39, 43), predicate: expression('item_id > 0', 45)},
+ {kind: 'Check', token: token(60, 64), predicate: expression('item_id < 0', 66)},
+ {
+ kind: 'Relation',
+ token: token(81, 92),
+ refCardinality: {kind: '1', token: token(81, 81)},
+ polymorphic: {attr: identifier('kind', 82), value: identifier('users', 87)},
+ srcCardinality: {kind: 'n', token: token(92, 92)},
+ ref: {schema: identifier('public', 94), entity: identifier('users', 101), attrs: [identifier('id', 107)]}
+ },
+ {
+ kind: 'Relation',
+ token: token(111, 122),
+ refCardinality: {kind: '1', token: token(111, 111)},
+ polymorphic: {attr: identifier('kind', 112), value: identifier('posts', 117)},
+ srcCardinality: {kind: 'n', token: token(122, 122)},
+ ref: {entity: identifier('posts', 124), attrs: [identifier('id', 130)]}
+ }
+ ]
}})
})
test('all', () => {
expect(parseRule(p => p.attributeRule(), ' id int(8, 9, 10)=8 nullable pk unique index=idx check(`id > 0`) -kind=users> public.users(id) { tag : pii , owner:PANDA} | some note # comment\n')).toEqual({result: {
- nesting: {depth: 1, ...tokenPosition(0, 3, 1, 1, 1, 4)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(4, 5, 1, 5, 1, 6)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(7, 9, 1, 8, 1, 10)},
- enumValues: [{value: 8, token: 'Integer', ...tokenPosition(11, 11, 1, 12, 1, 12)}, {value: 9, token: 'Integer', ...tokenPosition(14, 14, 1, 15, 1, 15)}, {value: 10, token: 'Integer', ...tokenPosition(17, 18, 1, 18, 1, 19)}],
- defaultValue: {token: 'Integer', value: 8, ...tokenPosition(21, 21, 1, 22, 1, 22)},
- nullable: tokenPosition(23, 30, 1, 24, 1, 31),
- primaryKey: {keyword: tokenPosition(32, 33, 1, 33, 1, 34)},
- index: {keyword: tokenPosition(42, 46, 1, 43, 1, 47), name: {token: 'Identifier', value: 'idx', ...tokenPosition(48, 50, 1, 49, 1, 51)}},
- unique: {keyword: tokenPosition(35, 40, 1, 36, 1, 41)},
- check: {keyword: tokenPosition(52, 56, 1, 53, 1, 57), predicate: {token: 'Expression', value: 'id > 0', ...tokenPosition(58, 65, 1, 59, 1, 66)}},
- relation: {
- srcCardinality: 'n',
- refCardinality: '1',
- ref: {schema: {token: 'Identifier', value: 'public', ...tokenPosition(81, 86, 1, 82, 1, 87)}, entity: {token: 'Identifier', value: 'users', ...tokenPosition(88, 92, 1, 89, 1, 93)}, attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(94, 95, 1, 95, 1, 96)}]},
- polymorphic: {attr: {token: 'Identifier', value: 'kind', ...tokenPosition(69, 72, 1, 70, 1, 73)}, value: {token: 'Identifier', value: 'users', ...tokenPosition(74, 78, 1, 75, 1, 79)}}
- },
+ nesting: {depth: 1, token: token(0, 3)},
+ name: identifier('id', 4),
+ type: identifier('int', 7),
+ enumValues: [integer(8, 11), integer(9, 14), integer(10, 17)],
+ defaultValue: integer(8, 21),
+ nullable: token(23, 30),
+ constraints: [
+ {kind: 'PrimaryKey', token: token(32, 33)},
+ {kind: 'Unique', token: token(35, 40)},
+ {kind: 'Index', token: token(42, 46), name: identifier('idx', 48)},
+ {kind: 'Check', token: token(52, 56), predicate: expression('id > 0', 58)},
+ {
+ kind: 'Relation',
+ token: token(68, 79),
+ refCardinality: {kind: '1', token: token(68, 68)},
+ polymorphic: {attr: identifier('kind', 69), value: identifier('users', 74)},
+ srcCardinality: {kind: 'n', token: token(79, 79)},
+ ref: {schema: identifier('public', 81), entity: identifier('users', 88), attrs: [identifier('id', 94)]},
+ },
+ ],
properties: [
- {key: {token: 'Identifier', value: 'tag', ...tokenPosition(100, 102, 1, 101, 1, 103)}, sep: tokenPosition(104, 104, 1, 105, 1, 105), value: {token: 'Identifier', value: 'pii', ...tokenPosition(106, 108, 1, 107, 1, 109)}},
- {key: {token: 'Identifier', value: 'owner', ...tokenPosition(112, 116, 1, 113, 1, 117)}, sep: tokenPosition(117, 117, 1, 118, 1, 118), value: {token: 'Identifier', value: 'PANDA', ...tokenPosition(118, 122, 1, 119, 1, 123)}},
+ {key: identifier('tag', 100), sep: token(104, 104), value: identifier('pii', 106)},
+ {key: identifier('owner', 112), sep: token(117, 117), value: identifier('PANDA', 118)},
],
- doc: {token: 'Doc', value: 'some note', ...tokenPosition(125, 136, 1, 126, 1, 137)},
- comment: {token: 'Comment', value: 'comment', ...tokenPosition(137, 145, 1, 138, 1, 146)},
+ doc: doc('some note', 125),
+ comment: comment('comment', 137),
}})
})
test('error', () => {
- expect(parseRule(p => p.attributeRule(), ' 12\n')).toEqual({result: {nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)}}, errors: [{message: "Expecting token of type --> Identifier <-- but found --> '12' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(2, 3, 1, 3, 1, 4)}]})
+ expect(parseRule(p => p.attributeRule(), ' 12\n')).toEqual({result: {nesting: {depth: 0, token: token(0, 1)}}, errors: [{message: "Expecting token of type --> Identifier <-- but found --> '12' <--", kind: 'MismatchedTokenException', level: 'error', ...token(2, 3)}]})
})
})
})
describe('relationRule', () => {
test('basic', () => {
expect(parseRule(p => p.relationRule(), 'rel groups(owner) -> users(id)\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: '1',
- src: {
- entity: {token: 'Identifier', value: 'groups', ...tokenPosition(4, 9, 1, 5, 1, 10)},
- attrs: [{token: 'Identifier', value: 'owner', ...tokenPosition(11, 15, 1, 12, 1, 16)}],
- },
- ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(21, 25, 1, 22, 1, 26)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(27, 28, 1, 28, 1, 29)}],
- },
+ kind: 'Relation',
+ src: {entity: identifier('groups', 4), attrs: [identifier('owner', 11)]},
+ refCardinality: {kind: '1', token: token(18, 18)},
+ srcCardinality: {kind: 'n', token: token(19, 19)},
+ ref: {entity: identifier('users', 21), attrs: [identifier('id', 27)]},
}})
})
test('one-to-one', () => {
expect(parseRule(p => p.relationRule(), 'rel profiles(id) -- users(id)\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: '1',
- refCardinality: '1',
- src: {
- entity: {token: 'Identifier', value: 'profiles', ...tokenPosition(4, 11, 1, 5, 1, 12)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(13, 14, 1, 14, 1, 15)}],
- },
- ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(20, 24, 1, 21, 1, 25)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(26, 27, 1, 27, 1, 28)}],
- },
+ kind: 'Relation',
+ src: {entity: identifier('profiles', 4), attrs: [identifier('id', 13)]},
+ refCardinality: {kind: '1', token: token(17, 17)},
+ srcCardinality: {kind: '1', token: token(18, 18)},
+ ref: {entity: identifier('users', 20), attrs: [identifier('id', 26)]},
}})
})
test('many-to-many', () => {
expect(parseRule(p => p.relationRule(), 'rel groups(id) <> users(id)\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: 'n',
- src: {
- entity: {token: 'Identifier', value: 'groups', ...tokenPosition(4, 9, 1, 5, 1, 10)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(11, 12, 1, 12, 1, 13)}],
- },
- ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(18, 22, 1, 19, 1, 23)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(24, 25, 1, 25, 1, 26)}],
- },
+ kind: 'Relation',
+ src: {entity: identifier('groups', 4), attrs: [identifier('id', 11)]},
+ refCardinality: {kind: 'n', token: token(15, 15)},
+ srcCardinality: {kind: 'n', token: token(16, 16)},
+ ref: {entity: identifier('users', 18), attrs: [identifier('id', 24)]},
}})
})
test('composite', () => {
expect(parseRule(p => p.relationRule(), 'rel audit(user_id, role_id) -> user_roles(user_id, role_id)\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: '1',
- src: {
- entity: {token: 'Identifier', value: 'audit', ...tokenPosition(4, 8, 1, 5, 1, 9)},
- attrs: [
- {token: 'Identifier', value: 'user_id', ...tokenPosition(10, 16, 1, 11, 1, 17)},
- {token: 'Identifier', value: 'role_id', ...tokenPosition(19, 25, 1, 20, 1, 26)},
- ],
- },
- ref: {
- entity: {token: 'Identifier', value: 'user_roles', ...tokenPosition(31, 40, 1, 32, 1, 41)},
- attrs: [
- {token: 'Identifier', value: 'user_id', ...tokenPosition(42, 48, 1, 43, 1, 49)},
- {token: 'Identifier', value: 'role_id', ...tokenPosition(51, 57, 1, 52, 1, 58)},
- ],
- },
+ kind: 'Relation',
+ src: {entity: identifier('audit', 4), attrs: [identifier('user_id', 10), identifier('role_id', 19)],},
+ refCardinality: {kind: '1', token: token(28, 28)},
+ srcCardinality: {kind: 'n', token: token(29, 29)},
+ ref: {entity: identifier('user_roles', 31), attrs: [identifier('user_id', 42), identifier('role_id', 51)]},
}})
})
test('polymorphic', () => {
expect(parseRule(p => p.relationRule(), 'rel events(item_id) -item_kind=User> users(id)\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: '1',
- src: {
- entity: {token: 'Identifier', value: 'events', ...tokenPosition(4, 9, 1, 5, 1, 10)},
- attrs: [{token: 'Identifier', value: 'item_id', ...tokenPosition(11, 17, 1, 12, 1, 18)}],
- },
- ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(37, 41, 1, 38, 1, 42)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(43, 44, 1, 44, 1, 45)}],
- },
- polymorphic: {
- attr: {token: 'Identifier', value: 'item_kind', ...tokenPosition(21, 29, 1, 22, 1, 30)},
- value: {token: 'Identifier', value: 'User', ...tokenPosition(31, 34, 1, 32, 1, 35)},
- }
+ kind: 'Relation',
+ src: {entity: identifier('events', 4), attrs: [identifier('item_id', 11)]},
+ refCardinality: {kind: '1', token: token(20, 20)},
+ polymorphic: {attr: identifier('item_kind', 21), value: identifier('User', 31)},
+ srcCardinality: {kind: 'n', token: token(35, 35)},
+ ref: {entity: identifier('users', 37), attrs: [identifier('id', 43)]}
}})
})
test('extra', () => {
expect(parseRule(p => p.relationRule(), 'rel groups(owner) -> users(id) {color: red} | a note # a comment\n')).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: '1',
- src: {
- entity: {token: 'Identifier', value: 'groups', ...tokenPosition(4, 9, 1, 5, 1, 10)},
- attrs: [{token: 'Identifier', value: 'owner', ...tokenPosition(11, 15, 1, 12, 1, 16)}],
- },
- ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(21, 25, 1, 22, 1, 26)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(27, 28, 1, 28, 1, 29)}],
- },
- properties: [{
- key: {token: 'Identifier', value: 'color', ...tokenPosition(32, 36, 1, 33, 1, 37)},
- sep: tokenPosition(37, 37, 1, 38, 1, 38),
- value: {token: 'Identifier', value: 'red', ...tokenPosition(39, 41, 1, 40, 1, 42)}
- }],
- doc: {token: 'Doc', value: 'a note', ...tokenPosition(44, 52, 1, 45, 1, 53)},
- comment: {token: 'Comment', value: 'a comment', ...tokenPosition(53, 63, 1, 54, 1, 64)},
+ kind: 'Relation',
+ src: {entity: identifier('groups', 4), attrs: [identifier('owner', 11)]},
+ refCardinality: {kind: '1', token: token(18, 18)},
+ srcCardinality: {kind: 'n', token: token(19, 19)},
+ ref: {entity: identifier('users', 21), attrs: [identifier('id', 27)]},
+ properties: [{key: identifier('color', 32), sep: token(37, 37), value: identifier('red', 39)}],
+ doc: doc('a note', 44),
+ comment: comment('a comment', 53),
}})
})
test('bad', () => {
- expect(parseRule(p => p.relationRule(), 'bad')).toEqual({errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Relation]\n 2. [ForeignKey]\nbut found: 'bad'", kind: 'NoViableAltException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
+ expect(parseRule(p => p.relationRule(), 'bad')).toEqual({errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [Relation]\n 2. [ForeignKey]\nbut found: 'bad'", kind: 'NoViableAltException', level: 'error', ...token(0, 2)}]})
})
})
describe('typeRule', () => {
test('empty', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
}})
})
test('alias', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status varchar\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'alias', name: {token: 'Identifier', value: 'varchar', ...tokenPosition(16, 22, 1, 17, 1, 23)}},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Alias', name: identifier('varchar', 16)},
}})
})
test('enum', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status (new, "in progress", done)\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'enum', values: [
- {token: 'Identifier', value: 'new', ...tokenPosition(17, 19, 1, 18, 1, 20)},
- {token: 'Identifier', value: 'in progress', ...tokenPosition(22, 34, 1, 23, 1, 35)},
- {token: 'Identifier', value: 'done', ...tokenPosition(37, 40, 1, 38, 1, 41)},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Enum', values: [
+ identifier('new', 17),
+ {...identifier('in progress', 22, 34), quoted: true},
+ identifier('done', 37),
]}
}})
})
test('struct', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status {internal varchar, public varchar}\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'struct', attrs: [{
- path: [{token: 'Identifier', value: 'internal', ...tokenPosition(17, 24, 1, 18, 1, 25)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(26, 32, 1, 27, 1, 33)},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Struct', attrs: [{
+ path: [identifier('internal', 17)],
+ type: identifier('varchar', 26),
}, {
- path: [{token: 'Identifier', value: 'public', ...tokenPosition(35, 40, 1, 36, 1, 41)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(42, 48, 1, 43, 1, 49)},
+ path: [identifier('public', 35)],
+ type: identifier('varchar', 42),
}]}
}})
// FIXME: would be nice to have this alternative but the $.MANY fails, see `typeRule`
/*expect(parseRule(p => p.typeRule(), 'type bug_status\n internal varchar\n public varchar\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'struct', attrs: [{
- path: [{token: 'Identifier', value: 'internal', ...tokenPosition(18, 25, 2, 3, 2, 10)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(27, 33, 2, 12, 2, 18)},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Struct', attrs: [{
+ path: [identifier('internal', 18, 25, 2, 2, 3, 10)],
+ type: identifier('varchar', 27, 33, 2, 2, 12, 18),
}, {
- path: [{token: 'Identifier', value: 'public', ...tokenPosition(37, 42, 3, 3, 3, 8)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(44, 50, 3, 10, 3, 16)},
+ path: [identifier('public', 37, 42, 3, 3, 3, 8)],
+ type: identifier('varchar', 44, 50, 3, 3, 10, 16),
}]}
}})*/
})
test('custom', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status `range(subtype = float8, subtype_diff = float8mi)`\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'custom', definition: {token: 'Expression', value: 'range(subtype = float8, subtype_diff = float8mi)', ...tokenPosition(16, 65, 1, 17, 1, 66)}}
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Custom', definition: expression('range(subtype = float8, subtype_diff = float8mi)', 16)}
}})
})
test('namespace', () => {
expect(parseRule(p => p.typeRule(), 'type reporting.public.bug_status varchar\n')).toEqual({result: {
- statement: 'Type',
- catalog: {token: 'Identifier', value: 'reporting', ...tokenPosition(5, 13, 1, 6, 1, 14)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(15, 20, 1, 16, 1, 21)},
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(22, 31, 1, 23, 1, 32)},
- content: {kind: 'alias', name: {token: 'Identifier', value: 'varchar', ...tokenPosition(33, 39, 1, 34, 1, 40)}},
+ kind: 'Type',
+ catalog: identifier('reporting', 5),
+ schema: identifier('public', 15),
+ name: identifier('bug_status', 22),
+ content: {kind: 'Alias', name: identifier('varchar', 33)},
}})
})
test('metadata', () => {
expect(parseRule(p => p.typeRule(), 'type bug_status varchar {tags: seo} | a note # a comment\n')).toEqual({result: {
- statement: 'Type',
- name: {token: 'Identifier', value: 'bug_status', ...tokenPosition(5, 14, 1, 6, 1, 15)},
- content: {kind: 'alias', name: {token: 'Identifier', value: 'varchar', ...tokenPosition(16, 22, 1, 17, 1, 23)}},
+ kind: 'Type',
+ name: identifier('bug_status', 5),
+ content: {kind: 'Alias', name: identifier('varchar', 16)},
properties: [{
- key: {token: 'Identifier', value: 'tags', ...tokenPosition(25, 28, 1, 26, 1, 29)},
- sep: tokenPosition(29, 29, 1, 30, 1, 30),
- value: {token: 'Identifier', value: 'seo', ...tokenPosition(31, 33, 1, 32, 1, 34)}
+ key: identifier('tags', 25),
+ sep: token(29, 29),
+ value: identifier('seo', 31)
}],
- doc: {token: 'Doc', value: 'a note', ...tokenPosition(36, 44, 1, 37, 1, 45)},
- comment: {token: 'Comment', value: 'a comment', ...tokenPosition(45, 55, 1, 46, 1, 56)},
+ doc: doc('a note', 36),
+ comment: comment('a comment', 45),
}})
})
// TODO: test bad
})
describe('emptyStatementRule', () => {
- test('basic', () => expect(parseRule(p => p.emptyStatementRule(), '\n')).toEqual({result: {statement: 'Empty'}}))
- test('with spaces', () => expect(parseRule(p => p.emptyStatementRule(), ' \n')).toEqual({result: {statement: 'Empty'}}))
- test('with comment', () => expect(parseRule(p => p.emptyStatementRule(), ' # hello\n')).toEqual({result: {statement: 'Empty', comment: {token: 'Comment', value: 'hello', ...tokenPosition(1, 7, 1, 2, 1, 8)}}}))
+ test('basic', () => expect(parseRule(p => p.emptyStatementRule(), '\n')).toEqual({result: {kind: 'Empty'}}))
+ test('with spaces', () => expect(parseRule(p => p.emptyStatementRule(), ' \n')).toEqual({result: {kind: 'Empty'}}))
+ test('with comment', () => expect(parseRule(p => p.emptyStatementRule(), ' # hello\n')).toEqual({result: {kind: 'Empty', comment: comment('hello', 1)}}))
})
describe('legacy', () => {
test('attribute type', () => {
// as `varchar(12)` is valid on both v1 & v2 but has different meaning, it's handled when building AML, see aml-legacy.test.ts
expect(parseRule(p => p.attributeRule(), ' name varchar(12)\n').result).toEqual({
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'name', ...tokenPosition(2, 5, 1, 3, 1, 6)},
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(7, 13, 1, 8, 1, 14)},
- enumValues: [{token: 'Integer', value: 12, ...tokenPosition(15, 16, 1, 16, 1, 17)}]
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('name', 2),
+ type: identifier('varchar', 7),
+ enumValues: [integer(12, 15)]
})
})
test('attribute relation', () => {
- const v1 = parseRule(p => p.attributeRule(), ' user_id fk users.id\n').result?.relation as AttributeRelationAst
- const v2 = parseRule(p => p.attributeRule(), ' user_id -> users(id)\n').result?.relation
- expect(v1).toEqual({
- srcCardinality: 'n',
- refCardinality: '1',
+ const v1 = parseRule(p => p.attributeRule(), ' user_id fk users.id\n').result?.constraints
+ const v2 = parseRule(p => p.attributeRule(), ' user_id -> users(id)\n').result?.constraints
+ expect(v1).toEqual([{
+ kind: 'Relation',
+ token: token(10, 11),
+ refCardinality: {kind: '1', token: token(10, 11)},
+ srcCardinality: {kind: 'n', token: token(10, 11)},
ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(13, 17, 1, 14, 1, 18)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(19, 20, 1, 20, 1, 21)}],
- warning: {...tokenPosition(13, 20, 1, 14, 1, 21), issues: [legacy('"users.id" is the legacy way, use "users(id)" instead')]}
+ entity: identifier('users', 13),
+ attrs: [identifier('id', 19)],
+ warning: {...token(13, 20), issues: [legacy('"users.id" is the legacy way, use "users(id)" instead')]}
},
- warning: {...tokenPosition(10, 11, 1, 11, 1, 12), issues: [legacy('"fk" is legacy, replace it with "->"')]}
- })
- expect(removeFieldsDeep(v1, ['warning'])).toEqual(v2)
+ warning: {...token(10, 11), issues: [legacy('"fk" is legacy, replace it with "->"')]}
+ }])
+ expect(removeFieldsDeep(v1, ['token', 'warning'])).toEqual(removeFieldsDeep(v2, ['token']))
})
test('standalone relation', () => {
const v1 = parseRule(p => p.relationRule(), 'fk groups.owner -> users.id\n')
const v2 = parseRule(p => p.relationRule(), 'rel groups(owner) -> users(id)\n')
expect(v1).toEqual({result: {
- statement: 'Relation',
- srcCardinality: 'n',
- refCardinality: '1',
+ kind: 'Relation',
src: {
- entity: {token: 'Identifier', value: 'groups', ...tokenPosition(3, 8, 1, 4, 1, 9)},
- attrs: [{token: 'Identifier', value: 'owner', ...tokenPosition(10, 14, 1, 11, 1, 15)}],
- warning: {...tokenPosition(3, 14, 1, 4, 1, 15), issues: [legacy('"groups.owner" is the legacy way, use "groups(owner)" instead')]}
+ entity: identifier('groups', 3),
+ attrs: [identifier('owner', 10)],
+ warning: {...token(3, 14), issues: [legacy('"groups.owner" is the legacy way, use "groups(owner)" instead')]}
},
+ refCardinality: {kind: '1', token: token(16, 16)},
+ srcCardinality: {kind: 'n', token: token(17, 17)},
ref: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(19, 23, 1, 20, 1, 24)},
- attrs: [{token: 'Identifier', value: 'id', ...tokenPosition(25, 26, 1, 26, 1, 27)}],
- warning: {...tokenPosition(19, 26, 1, 20, 1, 27), issues: [legacy('"users.id" is the legacy way, use "users(id)" instead')]}
+ entity: identifier('users', 19),
+ attrs: [identifier('id', 25)],
+ warning: {...token(19, 26), issues: [legacy('"users.id" is the legacy way, use "users(id)" instead')]}
},
- warning: {...tokenPosition(0, 1, 1, 1, 1, 2), issues: [legacy('"fk" is legacy, replace it with "rel"')]}
+ warning: {...token(0, 1), issues: [legacy('"fk" is legacy, replace it with "rel"')]}
}})
expect(removeFieldsDeep(v1, ['offset', 'position', 'warning'])).toEqual(removeFieldsDeep(v2, ['offset', 'position']))
})
@@ -653,9 +658,9 @@ comments
const v1 = parseRule(p => p.attributeRefRule(), 'users.settings:github')
const v2 = parseRule(p => p.attributeRefRule(), 'users(settings.github)')
expect(v1).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
- attr: {token: 'Identifier', value: 'settings', ...tokenPosition(6, 13, 1, 7, 1, 14), path: [{token: 'Identifier', value: 'github', ...tokenPosition(15, 20, 1, 16, 1, 21)}]},
- warning: {...tokenPosition(0, 20, 1, 1, 1, 21), issues: [legacy('"users.settings:github" is the legacy way, use "users(settings.github)" instead')]}
+ entity: identifier('users', 0),
+ attr: {...identifier('settings', 6), path: [identifier('github', 15)]},
+ warning: {...token(0, 20), issues: [legacy('"users.settings:github" is the legacy way, use "users(settings.github)" instead')]}
}})
expect(removeFieldsDeep(v1, ['warning'])).toEqual(v2)
expect(removeFieldsDeep(parseRule(p => p.attributeRefRule(), 'public.users.settings:github'), ['warning'])).toEqual(parseRule(p => p.attributeRefRule(), 'public.users(settings.github)'))
@@ -664,239 +669,269 @@ comments
const v1 = parseRule(p => p.attributeRefCompositeRule(), 'users.settings:github')
const v2 = parseRule(p => p.attributeRefCompositeRule(), 'users(settings.github)')
expect(v1).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
- attrs: [{token: 'Identifier', value: 'settings', ...tokenPosition(6, 13, 1, 7, 1, 14), path: [{token: 'Identifier', value: 'github', ...tokenPosition(15, 20, 1, 16, 1, 21)}]}],
- warning: {...tokenPosition(0, 20, 1, 1, 1, 21), issues: [legacy('"users.settings:github" is the legacy way, use "users(settings.github)" instead')]},
+ entity: identifier('users', 0),
+ attrs: [{...identifier('settings', 6), path: [identifier('github', 15)]}],
+ warning: {...token(0, 20), issues: [legacy('"users.settings:github" is the legacy way, use "users(settings.github)" instead')]},
}})
expect(removeFieldsDeep(v1, ['warning'])).toEqual(v2)
expect(removeFieldsDeep(parseRule(p => p.attributeRefCompositeRule(), 'public.users.settings:github'), ['warning'])).toEqual(parseRule(p => p.attributeRefCompositeRule(), 'public.users(settings.github)'))
})
test('properties', () => {
expect(parseRule(p => p.propertiesRule(), '{color=red}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'color', ...tokenPosition(1, 5, 1, 2, 1, 6)},
- sep: {...tokenPosition(6, 6, 1, 7, 1, 7), issues: [legacy('"=" is legacy, replace it with ":"')]},
- value: {token: 'Identifier', value: 'red', ...tokenPosition(7, 9, 1, 8, 1, 10)},
+ key: identifier('color', 1),
+ sep: {...token(6, 6), issues: [legacy('"=" is legacy, replace it with ":"')]},
+ value: identifier('red', 7),
}]})
})
test('check identifier', () => {
const v1 = parseRule(p => p.attributeRule(), ' age int check="age > 0"\n').result
const v2 = parseRule(p => p.attributeRule(), ' age int check(`age > 0`)\n').result
expect(v1).toEqual({
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {value: 'age', token: 'Identifier', ...tokenPosition(2, 4, 1, 3, 1, 5)},
- type: {value: 'int', token: 'Identifier', ...tokenPosition(6, 8, 1, 7, 1, 9)},
- check: {
- keyword: tokenPosition(10, 14, 1, 11, 1, 15),
- predicate: {value: 'age > 0', token: 'Expression', ...tokenPosition(15, 24, 1, 16, 1, 25), issues: [legacy('"=age > 0" is the legacy way, use expression instead "(`age > 0`)"')]},
- },
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('age', 2),
+ type: identifier('int', 6),
+ constraints: [{
+ kind: 'Check',
+ token: token(10, 14),
+ predicate: expression('age > 0', 15, 24, 1, 1, 16, 25, [legacy('"=age > 0" is the legacy way, use expression instead "(`age > 0`)"')]),
+ }],
})
- expect(removeFieldsDeep(v1, ['issues', 'offset', 'position'])).toEqual(removeFieldsDeep(v2, ['issues', 'offset', 'position']))
+ expect(removeFieldsDeep(v1, ['issues', 'offset', 'position', 'quoted'])).toEqual(removeFieldsDeep(v2, ['issues', 'offset', 'position']))
})
})
describe('common', () => {
test('integerRule', () => {
- expect(parseRule(p => p.integerRule(), '12')).toEqual({result: {token: 'Integer', value: 12, ...tokenPosition(0, 1, 1, 1, 1, 2)}})
- expect(parseRule(p => p.integerRule(), '1.2')).toEqual({errors: [{message: "Expecting token of type --> Integer <-- but found --> '1.2' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
- expect(parseRule(p => p.integerRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Integer <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
+ expect(parseRule(p => p.integerRule(), '12')).toEqual({result: integer(12, 0)})
+ expect(parseRule(p => p.integerRule(), '1.2')).toEqual({errors: [{message: "Expecting token of type --> Integer <-- but found --> '1.2' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 2)}]})
+ expect(parseRule(p => p.integerRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Integer <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 2)}]})
})
test('decimalRule', () => {
- expect(parseRule(p => p.decimalRule(), '1.2')).toEqual({result: {token: 'Decimal', value: 1.2, ...tokenPosition(0, 2, 1, 1, 1, 3)}})
- expect(parseRule(p => p.decimalRule(), '12')).toEqual({errors: [{message: "Expecting token of type --> Decimal <-- but found --> '12' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 1, 1, 1, 1, 2)}]})
- expect(parseRule(p => p.decimalRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Decimal <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
+ expect(parseRule(p => p.decimalRule(), '1.2')).toEqual({result: decimal(1.2, 0)})
+ expect(parseRule(p => p.decimalRule(), '12')).toEqual({errors: [{message: "Expecting token of type --> Decimal <-- but found --> '12' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 1)}]})
+ expect(parseRule(p => p.decimalRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Decimal <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 2)}]})
})
test('identifierRule', () => {
- expect(parseRule(p => p.identifierRule(), 'id')).toEqual({result: {token: 'Identifier', value: 'id', ...tokenPosition(0, 1, 1, 1, 1, 2)}})
- expect(parseRule(p => p.identifierRule(), 'user_id')).toEqual({result: {token: 'Identifier', value: 'user_id', ...tokenPosition(0, 6, 1, 1, 1, 7)}})
- expect(parseRule(p => p.identifierRule(), 'C##INVENTORY')).toEqual({result: {token: 'Identifier', value: 'C##INVENTORY', ...tokenPosition(0, 11, 1, 1, 1, 12)}})
- expect(parseRule(p => p.identifierRule(), '"my col"')).toEqual({result: {token: 'Identifier', value: 'my col', ...tokenPosition(0, 7, 1, 1, 1, 8)}})
- expect(parseRule(p => p.identifierRule(), '"varchar[]"')).toEqual({result: {token: 'Identifier', value: 'varchar[]', ...tokenPosition(0, 10, 1, 1, 1, 11)}})
- expect(parseRule(p => p.identifierRule(), '"my \\"new\\" col"')).toEqual({result: {token: 'Identifier', value: 'my "new" col', ...tokenPosition(0, 15, 1, 1, 1, 16)}})
- expect(parseRule(p => p.identifierRule(), 'bad col')).toEqual({result: {token: 'Identifier', value: 'bad', ...tokenPosition(0, 2, 1, 1, 1, 3)}, errors: [{message: "Redundant input, expecting EOF but found: ", kind: 'NotAllInputParsedException', level: 'error', ...tokenPosition(3, 3, 1, 4, 1, 4)}]})
+ expect(parseRule(p => p.identifierRule(), 'id')).toEqual({result: identifier('id', 0)})
+ expect(parseRule(p => p.identifierRule(), 'user_id')).toEqual({result: identifier('user_id', 0)})
+ expect(parseRule(p => p.identifierRule(), 'C##INVENTORY')).toEqual({result: identifier('C##INVENTORY', 0)})
+ expect(parseRule(p => p.identifierRule(), '"my col"')).toEqual({result: {...identifier('my col', 0, 7), quoted: true}})
+ expect(parseRule(p => p.identifierRule(), '"varchar[]"')).toEqual({result: {...identifier('varchar[]', 0, 10), quoted: true}})
+ expect(parseRule(p => p.identifierRule(), '"my \\"new\\" col"')).toEqual({result: {...identifier('my "new" col', 0, 15), quoted: true}})
+ expect(parseRule(p => p.identifierRule(), 'bad col')).toEqual({result: identifier('bad', 0), errors: [{message: "Redundant input, expecting EOF but found: ", kind: 'NotAllInputParsedException', level: 'error', ...token(3, 3)}]})
})
test('commentRule', () => {
- expect(parseRule(p => p.commentRule(), '# a comment')).toEqual({result: {token: 'Comment', value: 'a comment', ...tokenPosition(0, 10, 1, 1, 1, 11)}})
- expect(parseRule(p => p.commentRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Comment <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
+ expect(parseRule(p => p.commentRule(), '# a comment')).toEqual({result: comment('a comment', 0)})
+ expect(parseRule(p => p.commentRule(), 'bad')).toEqual({errors: [{message: "Expecting token of type --> Comment <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 2)}]})
})
test('noteRule', () => {
- expect(parseRule(p => p.docRule(), '| a note')).toEqual({result: {token: 'Doc', value: 'a note', ...tokenPosition(0, 7, 1, 1, 1, 8)}})
- expect(parseRule(p => p.docRule(), '| "a # note"')).toEqual({result: {token: 'Doc', value: 'a # note', ...tokenPosition(0, 11, 1, 1, 1, 12)}})
- expect(parseRule(p => p.docRule(), '|||\n a note\n multiline\n|||')).toEqual({result: {token: 'Doc', value: 'a note\nmultiline', ...tokenPosition(0, 29, 1, 1, 4, 3)}})
- expect(parseRule(p => p.docRule(), 'bad')).toEqual({errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [DocMultiline]\n 2. [Doc]\nbut found: 'bad'", kind: 'NoViableAltException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)}]})
+ expect(parseRule(p => p.docRule(), '| a note')).toEqual({result: doc('a note', 0, 7)})
+ expect(parseRule(p => p.docRule(), '| "a # note"')).toEqual({result: doc('a # note', 0, 11)})
+ expect(parseRule(p => p.docRule(), '|||\n a note\n multiline\n|||')).toEqual({result: {...doc('a note\nmultiline', 0, 29, 1, 4, 1, 3), multiLine: true}})
+ expect(parseRule(p => p.docRule(), 'bad')).toEqual({errors: [{message: "Expecting: one of these possible Token sequences:\n 1. [DocMultiline]\n 2. [Doc]\nbut found: 'bad'", kind: 'NoViableAltException', level: 'error', ...token(0, 2)}]})
})
test('propertiesRule', () => {
expect(parseRule(p => p.propertiesRule(), '{}')).toEqual({result: []})
- expect(parseRule(p => p.propertiesRule(), '{flag}')).toEqual({result: [{key: {token: 'Identifier', value: 'flag', ...tokenPosition(1, 4, 1, 2, 1, 5)}}]})
+ expect(parseRule(p => p.propertiesRule(), '{flag}')).toEqual({result: [{key: identifier('flag', 1)}]})
expect(parseRule(p => p.propertiesRule(), '{color: red}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'color', ...tokenPosition(1, 5, 1, 2, 1, 6)},
- sep: tokenPosition(6, 6, 1, 7, 1, 7),
- value: {token: 'Identifier', value: 'red', ...tokenPosition(8, 10, 1, 9, 1, 11)}
+ key: identifier('color', 1),
+ sep: token(6, 6),
+ value: identifier('red', 8)
}]})
expect(parseRule(p => p.propertiesRule(), '{size: 12}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'size', ...tokenPosition(1, 4, 1, 2, 1, 5)},
- sep: tokenPosition(5, 5, 1, 6, 1, 6),
- value: {token: 'Integer', value: 12, ...tokenPosition(7, 8, 1, 8, 1, 9)}
+ key: identifier('size', 1),
+ sep: token(5, 5),
+ value: integer(12, 7)
}]})
expect(parseRule(p => p.propertiesRule(), '{tags: []}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'tags', ...tokenPosition(1, 4, 1, 2, 1, 5)},
- sep: tokenPosition(5, 5, 1, 6, 1, 6),
+ key: identifier('tags', 1),
+ sep: token(5, 5),
value: []
}]})
expect(parseRule(p => p.propertiesRule(), '{tags: [pii, deprecated]}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'tags', ...tokenPosition(1, 4, 1, 2, 1, 5)},
- sep: tokenPosition(5, 5, 1, 6, 1, 6),
- value: [{token: 'Identifier', value: 'pii', ...tokenPosition(8, 10, 1, 9, 1, 11)}, {token: 'Identifier', value: 'deprecated', ...tokenPosition(13, 22, 1, 14, 1, 23)}]
+ key: identifier('tags', 1),
+ sep: token(5, 5),
+ value: [identifier('pii', 8), identifier('deprecated', 13)]
}]})
expect(parseRule(p => p.propertiesRule(), '{color:red, size : 12 , deprecated}')).toEqual({result: [{
- key: {token: 'Identifier', value: 'color', ...tokenPosition(1, 5, 1, 2, 1, 6)},
- sep: tokenPosition(6, 6, 1, 7, 1, 7),
- value: {token: 'Identifier', value: 'red', ...tokenPosition(7, 9, 1, 8, 1, 10)}
+ key: identifier('color', 1),
+ sep: token(6, 6),
+ value: identifier('red', 7)
}, {
- key: {token: 'Identifier', value: 'size', ...tokenPosition(12, 15, 1, 13, 1, 16)},
- sep: tokenPosition(17, 17, 1, 18, 1, 18),
- value: {token: 'Integer', value: 12, ...tokenPosition(19, 20, 1, 20, 1, 21)}
+ key: identifier('size', 12),
+ sep: token(17, 17),
+ value: integer(12, 19)
}, {
- key: {token: 'Identifier', value: 'deprecated', ...tokenPosition(24, 33, 1, 25, 1, 34)}
+ key: identifier('deprecated', 24)
}]})
// bad
expect(parseRule(p => p.propertiesRule(), 'bad')).toEqual({errors: [
- {message: "Expecting token of type --> LCurly <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 2, 1, 1, 1, 3)},
- {message: "Expecting token of type --> RCurly <-- but found --> '' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(NaN, -1, -1, -1, -1, -1)},
+ {message: "Expecting token of type --> CurlyLeft <-- but found --> 'bad' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 2)},
+ {message: "Expecting token of type --> CurlyRight <-- but found --> '' <--", kind: 'MismatchedTokenException', level: 'error', ...token(-1, -1, -1, -1, -1, -1)},
]})
- expect(parseRule(p => p.propertiesRule(), '{')).toEqual({errors: [{message: "Expecting token of type --> RCurly <-- but found --> '' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(NaN, -1, -1, -1, -1, -1)}]})
+ expect(parseRule(p => p.propertiesRule(), '{')).toEqual({errors: [{message: "Expecting token of type --> CurlyRight <-- but found --> '' <--", kind: 'MismatchedTokenException', level: 'error', ...token(-1, -1, -1, -1, -1, -1)}]})
})
test('extraRule', () => {
expect(parseRule(p => p.extraRule(), '')).toEqual({result: {}})
expect(parseRule(p => p.extraRule(), '{key: value} | some note # a comment')).toEqual({result: {
properties: [{
- key: {token: 'Identifier', value: 'key', ...tokenPosition(1, 3, 1, 2, 1, 4)},
- sep: tokenPosition(4, 4, 1, 5, 1, 5),
- value: {token: 'Identifier', value: 'value', ...tokenPosition(6, 10, 1, 7, 1, 11)}
+ key: identifier('key', 1),
+ sep: token(4, 4),
+ value: identifier('value', 6)
}],
- doc: {token: 'Doc', value: 'some note', ...tokenPosition(13, 24, 1, 14, 1, 25)},
- comment: {token: 'Comment', value: 'a comment', ...tokenPosition(25, 35, 1, 26, 1, 36)},
+ doc: doc('some note', 13),
+ comment: comment('a comment', 25),
}})
})
test('entityRefRule', () => {
- expect(parseRule(p => p.entityRefRule(), 'users')).toEqual({result: {entity: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)}}})
+ expect(parseRule(p => p.entityRefRule(), 'users')).toEqual({result: {entity: identifier('users', 0)}})
expect(parseRule(p => p.entityRefRule(), 'public.users')).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(7, 11, 1, 8, 1, 12)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(0, 5, 1, 1, 1, 6)},
+ entity: identifier('users', 7),
+ schema: identifier('public', 0),
}})
expect(parseRule(p => p.entityRefRule(), 'core.public.users')).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(12, 16, 1, 13, 1, 17)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(5, 10, 1, 6, 1, 11)},
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(0, 3, 1, 1, 1, 4)},
+ entity: identifier('users', 12),
+ schema: identifier('public', 5),
+ catalog: identifier('core', 0),
}})
expect(parseRule(p => p.entityRefRule(), 'analytics.core.public.users')).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(22, 26, 1, 23, 1, 27)},
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(15, 20, 1, 16, 1, 21)},
- catalog: {token: 'Identifier', value: 'core', ...tokenPosition(10, 13, 1, 11, 1, 14)},
- database: {token: 'Identifier', value: 'analytics', ...tokenPosition(0, 8, 1, 1, 1, 9)},
+ entity: identifier('users', 22),
+ schema: identifier('public', 15),
+ catalog: identifier('core', 10),
+ database: identifier('analytics', 0),
}})
- expect(parseRule(p => p.entityRefRule(), '42')).toEqual({errors: [{message: "Expecting token of type --> Identifier <-- but found --> '42' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 1, 1, 1, 1, 2)}]})
+ expect(parseRule(p => p.entityRefRule(), '42')).toEqual({errors: [{message: "Expecting token of type --> Identifier <-- but found --> '42' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 1)}]})
})
test('columnPathRule', () => {
- expect(parseRule(p => p.attributePathRule(), 'details')).toEqual({result: {token: 'Identifier', value: 'details', ...tokenPosition(0, 6, 1, 1, 1, 7)}})
+ expect(parseRule(p => p.attributePathRule(), 'details')).toEqual({result: identifier('details', 0)})
expect(parseRule(p => p.attributePathRule(), 'details.address.street')).toEqual({result: {
- token: 'Identifier',
- value: 'details',
- ...tokenPosition(0, 6, 1, 1, 1, 7),
- path: [
- {token: 'Identifier', value: 'address', ...tokenPosition(8, 14, 1, 9, 1, 15)},
- {token: 'Identifier', value: 'street', ...tokenPosition(16, 21, 1, 17, 1, 22)}
- ],
+ ...identifier('details', 0),
+ path: [identifier('address', 8), identifier('street', 16)],
}})
- expect(parseRule(p => p.attributePathRule(), '42')).toEqual({errors: [{message: "Expecting token of type --> Identifier <-- but found --> '42' <--", kind: 'MismatchedTokenException', level: 'error', ...tokenPosition(0, 1, 1, 1, 1, 2)}]})
+ expect(parseRule(p => p.attributePathRule(), '42')).toEqual({errors: [{message: "Expecting token of type --> Identifier <-- but found --> '42' <--", kind: 'MismatchedTokenException', level: 'error', ...token(0, 1)}]})
})
test('columnRefRule', () => {
expect(parseRule(p => p.attributeRefRule(), 'users(id)')).toEqual({result: {
- entity: {token: 'Identifier', value: 'users', ...tokenPosition(0, 4, 1, 1, 1, 5)},
- attr: {token: 'Identifier', value: 'id', ...tokenPosition(6, 7, 1, 7, 1, 8)},
+ entity: identifier('users', 0),
+ attr: identifier('id', 6),
}})
expect(parseRule(p => p.attributeRefRule(), 'public.events(details.item_id)')).toEqual({result: {
- schema: {token: 'Identifier', value: 'public', ...tokenPosition(0, 5, 1, 1, 1, 6)},
- entity: {token: 'Identifier', value: 'events', ...tokenPosition(7, 12, 1, 8, 1, 13)},
- attr: {token: 'Identifier', value: 'details', ...tokenPosition(14, 20, 1, 15, 1, 21), path: [{token: 'Identifier', value: 'item_id', ...tokenPosition(22, 28, 1, 23, 1, 29)}]},
+ schema: identifier('public', 0),
+ entity: identifier('events', 7),
+ attr: {...identifier('details', 14), path: [identifier('item_id', 22)]},
}})
})
test('columnRefCompositeRule', () => {
expect(parseRule(p => p.attributeRefCompositeRule(), 'user_roles(user_id, role_id)')).toEqual({result: {
- entity: {token: 'Identifier', value: 'user_roles', ...tokenPosition(0, 9, 1, 1, 1, 10)},
+ entity: identifier('user_roles', 0),
attrs: [
- {token: 'Identifier', value: 'user_id', ...tokenPosition(11, 17, 1, 12, 1, 18)},
- {token: 'Identifier', value: 'role_id', ...tokenPosition(20, 26, 1, 21, 1, 27)},
+ identifier('user_id', 11),
+ identifier('role_id', 20),
],
}})
})
test('columnValueRule', () => {
- expect(parseRule(p => p.attributeValueRule(), '42')).toEqual({result: {token: 'Integer', value: 42, ...tokenPosition(0, 1, 1, 1, 1, 2)}})
- expect(parseRule(p => p.attributeValueRule(), '2.0')).toEqual({result: {token: 'Decimal', value: 2, ...tokenPosition(0, 2, 1, 1, 1, 3)}})
- expect(parseRule(p => p.attributeValueRule(), '3.14')).toEqual({result: {token: 'Decimal', value: 3.14, ...tokenPosition(0, 3, 1, 1, 1, 4)}})
- expect(parseRule(p => p.attributeValueRule(), 'User')).toEqual({result: {token: 'Identifier', value: 'User', ...tokenPosition(0, 3, 1, 1, 1, 4)}})
- expect(parseRule(p => p.attributeValueRule(), '"a user"')).toEqual({result: {token: 'Identifier', value: 'a user', ...tokenPosition(0, 7, 1, 1, 1, 8)}})
+ expect(parseRule(p => p.attributeValueRule(), '42')).toEqual({result: integer(42, 0)})
+ expect(parseRule(p => p.attributeValueRule(), '2.0')).toEqual({result: decimal(2, 0, 2)})
+ expect(parseRule(p => p.attributeValueRule(), '3.14')).toEqual({result: decimal(3.14, 0)})
+ expect(parseRule(p => p.attributeValueRule(), 'User')).toEqual({result: identifier('User', 0)})
+ expect(parseRule(p => p.attributeValueRule(), '"a user"')).toEqual({result: {...identifier('a user', 0, 7), quoted: true}})
})
})
describe('utils', () => {
test('nestAttributes', () => {
expect(nestAttributes([])).toEqual([])
expect(nestAttributes([{
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'id', ...tokenPosition(8, 9, 2, 3, 2, 4)},
- type: {token: 'Identifier', value: 'int', ...tokenPosition(11, 13, 2, 6, 2, 8)},
- primaryKey: {keyword: tokenPosition(15, 16, 2, 10, 2, 11)}
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('id', 8, 9, 2, 2, 3, 4),
+ type: identifier('int', 11, 13, 2, 2, 6, 8),
+ constraints: [{kind: 'PrimaryKey', token: token(15, 16, 2, 2, 10, 11)}]
}, {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'name', ...tokenPosition(20, 23, 3, 3, 3, 6)},
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(25, 31, 3, 8, 3, 14)}
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('name', 20, 23, 3, 3, 3, 6),
+ type: identifier('varchar', 25, 31, 3, 3, 8, 14)
}, {
- nesting: {depth: 0, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)},
- type: {token: 'Identifier', value: 'json', ...tokenPosition(44, 47, 4, 12, 4, 15)}
+ nesting: {depth: 0, token: token(0, 1)},
+ name: identifier('settings', 35, 42, 4, 4, 3, 10),
+ type: identifier('json', 44, 47, 4, 4, 12, 15)
}, {
- nesting: {depth: 1, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'address', ...tokenPosition(53, 59, 5, 5, 5, 11)},
- type: {token: 'Identifier', value: 'json', ...tokenPosition(61, 64, 5, 13, 5, 16)}
+ nesting: {depth: 1, token: token(0, 1)},
+ name: identifier('address', 53, 59, 5, 5, 5, 11),
+ type: identifier('json', 61, 64, 5, 5, 13, 16)
}, {
- nesting: {depth: 2, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'street', ...tokenPosition(72, 77, 6, 7, 6, 12)},
- type: {token: 'Identifier', value: 'string', ...tokenPosition(79, 84, 6, 14, 6, 19)}
+ nesting: {depth: 2, token: token(0, 1)},
+ name: identifier('street', 72, 77, 6, 6, 7, 12),
+ type: identifier('string', 79, 84, 6, 6, 14, 19)
}, {
- nesting: {depth: 2, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'city', ...tokenPosition(92, 95, 7, 7, 7, 10)},
- type: {token: 'Identifier', value: 'string', ...tokenPosition(97, 102, 7, 12, 7, 17)}
+ nesting: {depth: 2, token: token(0, 1)},
+ name: identifier('city', 92, 95, 7, 7, 7, 10),
+ type: identifier('string', 97, 102, 7, 7, 12, 17)
}, {
- nesting: {depth: 1, ...tokenPosition(0, 1, 1, 1, 1, 2)},
- name: {token: 'Identifier', value: 'github', ...tokenPosition(108, 113, 8, 5, 8, 10)},
- type: {token: 'Identifier', value: 'string', ...tokenPosition(115, 120, 8, 12, 8, 17)}
+ nesting: {depth: 1, token: token(0, 1)},
+ name: identifier('github', 108, 113, 8, 8, 5, 10),
+ type: identifier('string', 115, 120, 8, 8, 12, 17)
}])).toEqual([{
- path: [{token: 'Identifier', value: 'id', ...tokenPosition(8, 9, 2, 3, 2, 4)}],
- type: {token: 'Identifier', value: 'int', ...tokenPosition(11, 13, 2, 6, 2, 8)},
- primaryKey: {keyword: tokenPosition(15, 16, 2, 10, 2, 11)},
+ path: [identifier('id', 8, 9, 2, 2, 3, 4)],
+ type: identifier('int', 11, 13, 2, 2, 6, 8),
+ constraints: [{kind: 'PrimaryKey', token: token(15, 16, 2, 2, 10, 11)}],
}, {
- path: [{token: 'Identifier', value: 'name', ...tokenPosition(20, 23, 3, 3, 3, 6)}],
- type: {token: 'Identifier', value: 'varchar', ...tokenPosition(25, 31, 3, 8, 3, 14)},
+ path: [identifier('name', 20, 23, 3, 3, 3, 6)],
+ type: identifier('varchar', 25, 31, 3, 3, 8, 14),
}, {
- path: [{token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)}],
- type: {token: 'Identifier', value: 'json', ...tokenPosition(44, 47, 4, 12, 4, 15)},
+ path: [identifier('settings', 35, 42, 4, 4, 3, 10)],
+ type: identifier('json', 44, 47, 4, 4, 12, 15),
attrs: [{
- path: [{token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)}, {token: 'Identifier', value: 'address', ...tokenPosition(53, 59, 5, 5, 5, 11)}],
- type: {token: 'Identifier', value: 'json', ...tokenPosition(61, 64, 5, 13, 5, 16)},
+ path: [identifier('settings', 35, 42, 4, 4, 3, 10), identifier('address', 53, 59, 5, 5, 5, 11)],
+ type: identifier('json', 61, 64, 5, 5, 13, 16),
attrs: [{
- path: [{token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)}, {token: 'Identifier', value: 'address', ...tokenPosition(53, 59, 5, 5, 5, 11)}, {token: 'Identifier', value: 'street', ...tokenPosition(72, 77, 6, 7, 6, 12)}],
- type: {token: 'Identifier', value: 'string', ...tokenPosition(79, 84, 6, 14, 6, 19)},
+ path: [identifier('settings', 35, 42, 4, 4, 3, 10), identifier('address', 53, 59, 5, 5, 5, 11), identifier('street', 72, 77, 6, 6, 7, 12)],
+ type: identifier('string', 79, 84, 6, 6, 14, 19),
}, {
- path: [{token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)}, {token: 'Identifier', value: 'address', ...tokenPosition(53, 59, 5, 5, 5, 11)}, {token: 'Identifier', value: 'city', ...tokenPosition(92, 95, 7, 7, 7, 10)}],
- type: {token: 'Identifier', value: 'string', ...tokenPosition(97, 102, 7, 12, 7, 17)},
+ path: [identifier('settings', 35, 42, 4, 4, 3, 10), identifier('address', 53, 59, 5, 5, 5, 11), identifier('city', 92, 95, 7, 7, 7, 10)],
+ type: identifier('string', 97, 102, 7, 7, 12, 17),
}]
}, {
- path: [{token: 'Identifier', value: 'settings', ...tokenPosition(35, 42, 4, 3, 4, 10)}, {token: 'Identifier', value: 'github', ...tokenPosition(108, 113, 8, 5, 8, 10)}],
- type: {token: 'Identifier', value: 'string', ...tokenPosition(115, 120, 8, 12, 8, 17)},
+ path: [identifier('settings', 35, 42, 4, 4, 3, 10), identifier('github', 108, 113, 8, 8, 5, 10)],
+ type: identifier('string', 115, 120, 8, 8, 12, 17),
}]
}])
})
- test('tokenPosition has expected structure', () => {
- expect(tokenPosition(1, 2, 3, 4, 5, 6)).toEqual({offset: {start: 1, end: 2}, position: {start: {line: 3, column: 4}, end: {line: 5, column: 6}}})
- })
})
})
+
+function doc(value: string, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): DocAst {
+ return {kind: 'Doc', token: token(start, end || start + value.length + 2, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function comment(value: string, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): CommentAst {
+ return {kind: 'Comment', token: token(start, end || start + value.length + 1, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function expression(value: string, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number, issues?: TokenIssue[]): ExpressionAst {
+ const t = token(start, end || start + value.length + 1, lineStart, lineEnd, columnStart, columnEnd)
+ return {kind: 'Expression', token: issues ? {...t, issues} : t, value}
+}
+
+function identifier(value: string, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): IdentifierAst {
+ return {kind: 'Identifier', token: token(start, end || start + value.length - 1, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function integer(value: number, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): IntegerAst {
+ return {kind: 'Integer', token: token(start, end || start + value.toString().length - 1, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function decimal(value: number, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): DecimalAst {
+ return {kind: 'Decimal', token: token(start, end || start + value.toString().length - 1, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function boolean(value: boolean, start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): BooleanAst {
+ return {kind: 'Boolean', token: token(start, end || start + value.toString().length - 1, lineStart, lineEnd, columnStart, columnEnd), value}
+}
+
+function null_(start: number, end?: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): NullAst {
+ return {kind: 'Null', token: token(start, end || start + 3, lineStart, lineEnd, columnStart, columnEnd)}
+}
+
+function token(start: number, end: number, lineStart?: number, lineEnd?: number, columnStart?: number, columnEnd?: number): TokenPosition {
+ return {offset: {start: start, end: end}, position: {start: {line: lineStart || 1, column: columnStart || start + 1}, end: {line: lineEnd || 1, column: columnEnd || end + 1}}}
+}
diff --git a/libs/aml/src/amlParser.ts b/libs/aml/src/amlParser.ts
index f4c6dae99..d0ee4250e 100644
--- a/libs/aml/src/amlParser.ts
+++ b/libs/aml/src/amlParser.ts
@@ -14,7 +14,6 @@ import {
ParserErrorLevel,
ParserResult,
positionStartAdd,
- RelationCardinality,
removeQuotes,
TokenPosition
} from "@azimutt/models";
@@ -24,30 +23,33 @@ import {
AttributeAstNested,
AttributeCheckAst,
AttributeConstraintAst,
- AttributeConstraintsAst,
+ AttributeIndexAst,
AttributePathAst,
+ AttributePkAst,
AttributeRefAst,
AttributeRefCompositeAst,
AttributeRelationAst,
AttributeTypeAst,
+ AttributeUniqueAst,
AttributeValueAst,
- BooleanToken,
- CommentToken,
- DecimalToken,
- DocToken,
+ BooleanAst,
+ CommentAst,
+ DecimalAst,
+ DocAst,
EmptyStatement,
EntityRefAst,
EntityStatement,
- ExpressionToken,
+ ExpressionAst,
ExtraAst,
- IdentifierToken,
- IntegerToken,
+ IdentifierAst,
+ IntegerAst,
NamespaceRefAst,
NamespaceStatement,
- NullToken,
+ NullAst,
PropertiesAst,
PropertyAst,
PropertyValueAst,
+ RelationCardinalityAst,
RelationPolymorphicAst,
RelationStatement,
StatementAst,
@@ -62,53 +64,54 @@ import {
import {badIndent, legacy} from "./errors";
// special
-const WhiteSpace = createToken({name: 'WhiteSpace', pattern: /[ \t]+/})
-const Identifier = createToken({ name: 'Identifier', pattern: /\b[a-zA-Z_][a-zA-Z0-9_#]*\b|"([^\\"]|\\\\|\\"|\\n)*"/ })
-const Expression = createToken({ name: 'Expression', pattern: /`[^`]+`/ })
+const Comment = createToken({ name: 'Comment', pattern: /#[^\n]*/ })
const Doc = createToken({ name: 'Doc', pattern: /\|(\s+"([^\\"]|\\\\|\\")*"|([^ ]#|[^#\n])*)/ }) // # is included in doc if not preceded by a space
const DocMultiline = createToken({ name: 'DocMultiline', pattern: /\|\|\|[^]*?\|\|\|/, line_breaks: true })
-const Comment = createToken({ name: 'Comment', pattern: /#[^\n]*/ })
+const Expression = createToken({ name: 'Expression', pattern: /`[^`]+`/ })
+const Identifier = createToken({ name: 'Identifier', pattern: /\b[a-zA-Z_][a-zA-Z0-9_#]*\b|"([^\\"]|\\\\|\\"|\\n)*"/ })
+const NewLine = createToken({ name: 'NewLine', pattern: /\r?\n/ })
+const WhiteSpace = createToken({name: 'WhiteSpace', pattern: /[ \t]+/})
// values
-const Null = createToken({ name: 'Null', pattern: /null/i })
const Decimal = createToken({ name: 'Decimal', pattern: /\d+\.\d+/ })
const Integer = createToken({ name: 'Integer', pattern: /\d+/, longer_alt: Decimal })
const String = createToken({ name: 'String', pattern: /'([^\\']|\\\\|\\')*'/ })
-const Boolean = createToken({ name: 'Boolean', pattern: /true|false/i, longer_alt: Identifier })
-const valueTokens: TokenType[] = [Integer, Decimal, String, Boolean, Null]
+const valueTokens: TokenType[] = [Integer, Decimal, String]
// keywords
-const Namespace = createToken({ name: 'Namespace', pattern: /namespace/i, longer_alt: Identifier })
-const As = createToken({ name: 'As', pattern: /as/i, longer_alt: Identifier })
-const Nullable = createToken({ name: 'Nullable', pattern: /nullable/i, longer_alt: Identifier })
-const PrimaryKey = createToken({ name: 'PrimaryKey', pattern: /pk/i, longer_alt: Identifier })
-const Index = createToken({ name: 'Index', pattern: /index/i, longer_alt: Identifier })
-const Unique = createToken({ name: 'Unique', pattern: /unique/i, longer_alt: Identifier })
-const Check = createToken({ name: 'Check', pattern: /check/i, longer_alt: Identifier })
-const Relation = createToken({ name: 'Relation', pattern: /rel/i, longer_alt: Identifier })
-const Type = createToken({ name: 'Type', pattern: /type/i, longer_alt: Identifier })
-const keywordTokens: TokenType[] = [Namespace, As, Nullable, PrimaryKey, Index, Unique, Check, Relation, Type]
+const As = createToken({ name: 'As', pattern: /\bas\b/i, longer_alt: Identifier })
+const Check = createToken({ name: 'Check', pattern: /\bcheck\b/i, longer_alt: Identifier })
+const False = createToken({ name: 'False', pattern: /\bfalse\b/i, longer_alt: Identifier })
+const Index = createToken({ name: 'Index', pattern: /\bindex\b/i, longer_alt: Identifier })
+const Namespace = createToken({ name: 'Namespace', pattern: /\bnamespace\b/i, longer_alt: Identifier })
+const Null = createToken({ name: 'Null', pattern: /\bnull\b/i, longer_alt: Identifier })
+const Nullable = createToken({ name: 'Nullable', pattern: /\bnullable\b/i, longer_alt: Identifier })
+const PrimaryKey = createToken({ name: 'PrimaryKey', pattern: /\bpk\b/i, longer_alt: Identifier })
+const Relation = createToken({ name: 'Relation', pattern: /\brel\b/i, longer_alt: Identifier })
+const True = createToken({ name: 'True', pattern: /\btrue\b/i, longer_alt: Identifier })
+const Type = createToken({ name: 'Type', pattern: /\btype\b/i, longer_alt: Identifier })
+const Unique = createToken({ name: 'Unique', pattern: /\bunique\b/i, longer_alt: Identifier })
+const keywordTokens: TokenType[] = [As, Check, False, Index, Namespace, Null, Nullable, PrimaryKey, Relation, True, Type, Unique]
// chars
-const NewLine = createToken({ name: 'NewLine', pattern: /\r?\n/ })
const Asterisk = createToken({ name: 'Asterisk', pattern: /\*/ })
-const Dot = createToken({ name: 'Dot', pattern: /\./ })
-const Comma = createToken({ name: 'Comma', pattern: /,/ })
+const BracketLeft = createToken({ name: 'BracketLeft', pattern: /\[/ })
+const BracketRight = createToken({ name: 'BracketRight', pattern: /]/ })
const Colon = createToken({ name: 'Colon', pattern: /:/ })
-const Equal = createToken({ name: 'Equal', pattern: /=/ })
+const Comma = createToken({ name: 'Comma', pattern: /,/ })
+const CurlyLeft = createToken({ name: 'CurlyLeft', pattern: /\{/ })
+const CurlyRight = createToken({ name: 'CurlyRight', pattern: /}/ })
const Dash = createToken({ name: 'Dash', pattern: /-/ })
+const Dot = createToken({ name: 'Dot', pattern: /\./ })
+const Equal = createToken({ name: 'Equal', pattern: /=/ })
const GreaterThan = createToken({ name: 'GreaterThan', pattern: />/ })
const LowerThan = createToken({ name: 'LowerThan', pattern: / })
-const LParen = createToken({ name: 'LParen', pattern: /\(/ })
-const RParen = createToken({ name: 'RParen', pattern: /\)/ })
-const LBracket = createToken({ name: 'LBracket', pattern: /\[/ })
-const RBracket = createToken({ name: 'RBracket', pattern: /]/ })
-const LCurly = createToken({ name: 'LCurly', pattern: /\{/ })
-const RCurly = createToken({ name: 'RCurly', pattern: /}/ })
-const charTokens: TokenType[] = [Asterisk, Dot, Comma, Colon, Equal, Dash, GreaterThan, LowerThan, LParen, RParen, LBracket, RBracket, LCurly, RCurly]
+const ParenLeft = createToken({ name: 'ParenLeft', pattern: /\(/ })
+const ParenRight = createToken({ name: 'ParenRight', pattern: /\)/ })
+const charTokens: TokenType[] = [Asterisk, BracketLeft, BracketRight, Colon, Comma, CurlyLeft, CurlyRight, Dash, Dot, Equal, GreaterThan, LowerThan, ParenLeft, ParenRight]
// legacy tokens
-const ForeignKey = createToken({ name: 'ForeignKey', pattern: /fk/i })
+const ForeignKey = createToken({ name: 'ForeignKey', pattern: /\bfk\b/i, longer_alt: Identifier })
const legacyTokens: TokenType[] = [ForeignKey]
// token order is important as they are tried in order, so the Identifier must be last
@@ -117,172 +120,292 @@ const allTokens: TokenType[] = [WhiteSpace, NewLine, ...charTokens, ...keywordTo
const defaultPos: number = -1 // used when error position is undefined
class AmlParser extends EmbeddedActionsParser {
- // common
- nullRule: () => NullToken
- decimalRule: () => DecimalToken
- integerRule: () => IntegerToken
- booleanRule: () => BooleanToken
- expressionRule: () => ExpressionToken
- identifierRule: () => IdentifierToken
- docRule: () => DocToken
- commentRule: () => CommentToken
- propertiesRule: () => PropertiesAst
- extraRule: () => ExtraAst
- entityRefRule: () => EntityRefAst
- attributePathRule: () => AttributePathAst
- attributeRefRule: () => AttributeRefAst
- attributeRefCompositeRule: () => AttributeRefCompositeAst
- attributeValueRule: () => AttributeValueAst
-
- // namespace
+ // top level
+ amlRule: () => AmlAst
+ // statements
+ statementRule: () => StatementAst
namespaceStatementRule: () => NamespaceStatement
-
- // entity
- attributeRule: () => AttributeAstFlat
entityRule: () => EntityStatement
-
- // relation
relationRule: () => RelationStatement
-
- // type
typeRule: () => TypeStatement
-
- // general
emptyStatementRule: () => EmptyStatement
- statementRule: () => StatementAst
- amlRule: () => AmlAst
+ // clauses
+ attributeRule: () => AttributeAstFlat
+ // basic parts
+ entityRefRule: () => EntityRefAst
+ attributeRefRule: () => AttributeRefAst
+ attributeRefCompositeRule: () => AttributeRefCompositeAst
+ attributePathRule: () => AttributePathAst
+ attributeValueRule: () => AttributeValueAst
+ extraRule: () => ExtraAst
+ propertiesRule: () => PropertiesAst
+ docRule: () => DocAst
+ commentRule: () => CommentAst
+ // elements
+ expressionRule: () => ExpressionAst
+ identifierRule: () => IdentifierAst
+ integerRule: () => IntegerAst
+ decimalRule: () => DecimalAst
+ booleanRule: () => BooleanAst
+ nullRule: () => NullAst
constructor(tokens: TokenType[], recovery: boolean) {
super(tokens, {recoveryEnabled: recovery})
const $ = this
- // common rules
- this.nullRule = $.RULE<() => NullToken>('nullRule', () => {
- const token = $.CONSUME(Null)
- return {token: 'Null', ...tokenPosition(token)}
- })
+ // statements
- this.decimalRule = $.RULE<() => DecimalToken>('decimalRule', () => {
- const token = $.CONSUME(Decimal)
- return {token: 'Decimal', value: parseFloat(token.image), ...tokenPosition(token)}
+ this.amlRule = $.RULE<() => AmlAst>('amlRule', () => {
+ let stmts: StatementAst[] = []
+ $.MANY(() => stmts.push($.SUBRULE($.statementRule)))
+ return stmts.filter(isNotUndefined) // can be undefined on invalid input :/
})
- this.integerRule = $.RULE<() => IntegerToken>('integerRule', () => {
- const token = $.CONSUME(Integer)
- return {token: 'Integer', value: parseInt(token.image), ...tokenPosition(token)}
- })
+ this.statementRule = $.RULE<() => StatementAst>('statementRule', () => $.OR([
+ {ALT: () => $.SUBRULE($.namespaceStatementRule)},
+ {ALT: () => $.SUBRULE($.entityRule)},
+ {ALT: () => $.SUBRULE($.relationRule)},
+ {ALT: () => $.SUBRULE($.typeRule)},
+ {ALT: () => $.SUBRULE($.emptyStatementRule)},
+ ]))
- this.booleanRule = $.RULE<() => BooleanToken>('booleanRule', () => {
- const token = $.CONSUME(Boolean)
- return {token: 'Boolean', value: token.image.toLowerCase() === 'true', ...tokenPosition(token)}
+ this.namespaceStatementRule = $.RULE<() => NamespaceStatement>('namespaceStatementRule', () => {
+ const keyword = $.CONSUME(Namespace)
+ $.SUBRULE(whitespaceRule)
+ const namespace = $.OPTION(() => $.SUBRULE(namespaceRule)) || {}
+ const extra = $.SUBRULE($.extraRule)
+ $.CONSUME(NewLine)
+ return {kind: 'Namespace', line: keyword.startLine || defaultPos, ...namespace, ...extra}
})
- this.expressionRule = $.RULE<() => ExpressionToken>('expressionRule', () => {
- const token = $.CONSUME(Expression)
- return {token: 'Expression', value: token.image.slice(1, -1), ...tokenPosition(token)}
+ this.entityRule = $.RULE<() => EntityStatement>('entityRule', () => {
+ const {entity, ...namespace} = $.SUBRULE($.entityRefRule)
+ const view = $.OPTION(() => $.CONSUME(Asterisk))
+ $.SUBRULE(whitespaceRule)
+ const alias = $.OPTION2(() => {
+ $.CONSUME(As)
+ $.CONSUME(WhiteSpace)
+ return $.SUBRULE($.identifierRule)
+ })
+ $.SUBRULE2(whitespaceRule)
+ const extra = $.SUBRULE($.extraRule)
+ $.CONSUME(NewLine)
+ const attrs: AttributeAstFlat[] = []
+ $.MANY(() => {
+ const attr = $.SUBRULE($.attributeRule)
+ if (attr?.name?.value) attrs.push(attr) // name can be '' on invalid input :/
+ })
+ return removeEmpty({kind: 'Entity' as const, name: entity, view: view ? tokenInfo(view) : undefined, ...namespace, alias, ...extra, attrs: nestAttributes(attrs)})
})
- this.identifierRule = $.RULE<() => IdentifierToken>('identifierRule', () => {
- const token = $.CONSUME(Identifier)
- if (token.image.startsWith('"')) {
- return {token: 'Identifier', value: token.image.slice(1, -1).replaceAll(/\\"/g, '"'), ...tokenPosition(token)}
- } else {
- return {token: 'Identifier', value: token.image, ...tokenPosition(token)}
- }
+ this.relationRule = $.RULE<() => RelationStatement>('relationRule', () => {
+ const warning = $.OR([
+ {ALT: () => {$.CONSUME(Relation); return undefined}},
+ {ALT: () => tokenInfo($.CONSUME(ForeignKey), [legacy('"fk" is legacy, replace it with "rel"')])}
+ ])
+ $.CONSUME(WhiteSpace)
+ const src = $.SUBRULE($.attributeRefCompositeRule)
+ $.SUBRULE(whitespaceRule)
+ const {ref, srcCardinality, refCardinality, polymorphic} = $.SUBRULE(attributeRelationRule) || {} // returns undefined on invalid input :/
+ $.SUBRULE2(whitespaceRule)
+ const extra = $.SUBRULE($.extraRule)
+ $.CONSUME(NewLine)
+ return removeUndefined({kind: 'Relation' as const, src, ref, srcCardinality, refCardinality, polymorphic, ...extra, warning})
})
- this.docRule = $.RULE<() => DocToken>('docRule', () => {
- return $.OR([{
- ALT: () => {
- const token = $.CONSUME(DocMultiline)
- return {token: 'Doc', value: stripIndent(token.image.slice(3, -3)), ...tokenPosition(token)}
- }
- }, {
- ALT: () => {
- const token = $.CONSUME(Doc)
- return {token: 'Doc', value: removeQuotes(token.image.slice(1).trim().replaceAll(/\\#/g, '#')), ...tokenPosition(token)}
- }
- }])
+ this.typeRule = $.RULE<() => TypeStatement>('typeRule', () => {
+ $.CONSUME(Type)
+ $.CONSUME(WhiteSpace)
+ const {entity, ...namespace} = $.SUBRULE(this.entityRefRule) || {} // returns undefined on invalid input :/
+ $.SUBRULE(whitespaceRule)
+ const content = $.OPTION(() => $.OR([
+ {ALT: () => $.SUBRULE(typeEnumRule)},
+ {ALT: () => $.SUBRULE(typeStructRule)},
+ {ALT: () => $.SUBRULE(typeCustomRule)},
+ {ALT: () => $.SUBRULE(typeAliasRule)},
+ ]))
+ $.SUBRULE2(whitespaceRule)
+ const extra = $.SUBRULE($.extraRule)
+ $.CONSUME(NewLine)
+ /* if (content === undefined) {
+ const attrs: AttributeAstFlat[] = []
+ // FIXME: $.MANY fails with `TypeError: Cannot read properties of undefined (reading 'call')` at recognizer_engine.ts:517:30 (manyInternalLogic), before calling the callback, no idea why :/
+ $.MANY(() => attrs.push($.SUBRULE($.attributeRule)))
+ if (attrs.length > 0) content = {kind: 'struct', attrs: nestAttributes(attrs)}
+ } */
+ return {kind: 'Type', ...namespace, name: entity, content, ...extra}
})
- this.commentRule = $.RULE<() => CommentToken>('commentRule', () => {
- const token = $.CONSUME(Comment)
- return {token: 'Comment', value: token.image.slice(1).trim(), ...tokenPosition(token)}
+ this.emptyStatementRule = $.RULE<() => EmptyStatement>('emptyStatementRule', () => {
+ $.SUBRULE(whitespaceRule)
+ const comment = $.OPTION(() => $.SUBRULE($.commentRule))
+ $.CONSUME(NewLine)
+ return removeUndefined({kind: 'Empty' as const, comment})
})
- const propertyValueRule = $.RULE<() => PropertyValueAst>('propertyValueRule', () => {
- return $.OR([
- { ALT: () => $.SUBRULE($.nullRule) },
- { ALT: () => $.SUBRULE($.decimalRule) },
- { ALT: () => $.SUBRULE($.integerRule) },
- { ALT: () => $.SUBRULE($.booleanRule) },
- { ALT: () => $.SUBRULE($.expressionRule) },
- { ALT: () => $.SUBRULE($.identifierRule) },
- { ALT: () => {
- $.CONSUME(LBracket)
- const values: PropertyValueAst[] = []
- $.MANY_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const value = $.SUBRULE(propertyValueRule)
- if (value) values.push(value) // on invalid input, `value` can be undefined :/
- $.OPTION2(() => $.CONSUME2(WhiteSpace))
- }
- })
- $.CONSUME(RBracket)
- return values
+ // clauses
+
+ this.attributeRule = $.RULE<() => AttributeAstFlat>('attributeRule', () => {
+ const spaces = $.CONSUME(WhiteSpace)
+ const depth = Math.round(spaces.image.split('').reduce((i, c) => c === '\t' ? i + 1 : i + 0.5, 0)) - 1
+ const nesting = {token: tokenInfo(spaces), depth}
+ const attr = $.SUBRULE(attributeInnerRule)
+ $.SUBRULE(whitespaceRule)
+ const extra = $.SUBRULE($.extraRule)
+ $.CONSUME(NewLine)
+ return removeUndefined({...attr, nesting, ...extra})
+ })
+ const attributeInnerRule = $.RULE<() => AttributeAstFlat>('attributeInnerRule', () => {
+ const name = $.SUBRULE($.identifierRule)
+ $.SUBRULE(whitespaceRule)
+ const {type, enumValues, defaultValue} = $.SUBRULE(attributeTypeRule) || {} // returns undefined on invalid input :/
+ $.SUBRULE2(whitespaceRule)
+ const nullable = $.OPTION(() => $.CONSUME(Nullable))
+ $.SUBRULE3(whitespaceRule)
+ const constraints: AttributeConstraintAst[] = []
+ $.MANY({DEF: () => constraints.push($.SUBRULE(attributeConstraintRule))})
+ const nesting = {token: {offset: {start: 0, end: 0}, position: {start: {line: 0, column: 0}, end: {line: 0, column: 0}}}, depth: 0} // unused placeholder
+ return removeEmpty({nesting, name, type, enumValues, defaultValue, nullable: nullable ? tokenInfo(nullable) : undefined, constraints: constraints.filter(isNotUndefined)})
+ }, {resyncEnabled: true})
+ const attributeTypeRule = $.RULE<() => AttributeTypeAst>('attributeTypeRule', () => {
+ const res = $.OPTION(() => {
+ const type = $.SUBRULE($.identifierRule)
+ const enumValues = $.OPTION2(() => {
+ $.CONSUME(ParenLeft)
+ const values: AttributeValueAst[] = []
+ $.AT_LEAST_ONE_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ values.push($.SUBRULE($.attributeValueRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(ParenRight)
+ return values.filter(isNotUndefined) // can be undefined on invalid input :/
+ })
+ const defaultValue = $.OPTION3(() => {
+ $.CONSUME(Equal)
+ return $.SUBRULE2($.attributeValueRule)
+ })
+ return {type, enumValues, defaultValue}
+ })
+ return {type: res?.type, enumValues: res?.enumValues, defaultValue: res?.defaultValue}
+ })
+ const attributeConstraintRule = $.RULE<() => AttributeConstraintAst>('attributeConstraintRule', () => $.OR([
+ {ALT: () => $.SUBRULE(attributePkRule)},
+ {ALT: () => $.SUBRULE(attributeUniqueRule)},
+ {ALT: () => $.SUBRULE(attributeIndexRule)},
+ {ALT: () => $.SUBRULE(attributeCheckRule)},
+ {ALT: () => $.SUBRULE(attributeRelationRule)},
+ ]))
+ const attributePkRule = $.RULE<() => AttributePkAst>('attributePkRule', () => {
+ const token = $.CONSUME(PrimaryKey)
+ $.SUBRULE(whitespaceRule)
+ const name = $.SUBRULE(attributeConstraintNameRule)
+ return removeUndefined({kind: 'PrimaryKey' as const, token: tokenInfo(token), name})
+ })
+ const attributeUniqueRule = $.RULE<() => AttributeUniqueAst>('attributeUniqueRule', () => {
+ const token = $.CONSUME(Unique)
+ $.SUBRULE(whitespaceRule)
+ const name = $.SUBRULE(attributeConstraintNameRule)
+ return removeUndefined({kind: 'Unique' as const, token: tokenInfo(token), name})
+ })
+ const attributeIndexRule = $.RULE<() => AttributeIndexAst>('attributeIndexRule', () => {
+ const token = $.CONSUME(Index)
+ $.SUBRULE(whitespaceRule)
+ const name = $.SUBRULE(attributeConstraintNameRule)
+ return removeUndefined({kind: 'Index' as const, token: tokenInfo(token), name})
+ })
+ const attributeCheckRule = $.RULE<() => AttributeCheckAst>('attributeCheckRule', () => {
+ const token = $.CONSUME(Check)
+ $.SUBRULE(whitespaceRule)
+ const predicate = $.OPTION(() => {
+ $.CONSUME(ParenLeft)
+ const res = $.SUBRULE($.expressionRule)
+ $.CONSUME(ParenRight)
+ $.SUBRULE2(whitespaceRule)
+ return res
+ })
+ const name = $.SUBRULE(attributeConstraintNameRule)
+ if (!predicate && name && [' ', '<', '>', '=', 'IN'].some(c => name.value.includes(c))) {
+ // no definition and a name that look like a predicate => switch to the legacy syntax (predicate was in the name)
+ const def: ExpressionAst = {kind: 'Expression' as const, token: {...positionStartAdd(name.token, -1), issues: [legacy(`"=${name.value}" is the legacy way, use expression instead "(\`${name.value}\`)"`)]}, value: name.value}
+ return removeUndefined({kind: 'Check' as const, token: tokenInfo(token), predicate: def})
+ } else {
+ return removeUndefined({kind: 'Check' as const, token: tokenInfo(token), predicate, name})
+ }
+ })
+ const attributeRelationRule = $.RULE<() => AttributeRelationAst>('attributeRelationRule', () => {
+ const {token, srcCardinality, refCardinality, polymorphic, warning} = $.OR([
+ {ALT: () => {
+ const refCardinality = $.SUBRULE(relationCardinalityRule)
+ const polymorphic = $.OPTION(() => $.SUBRULE(relationPolymorphicRule))
+ const srcCardinality = $.SUBRULE2(relationCardinalityRule)
+ const token = mergePositions([refCardinality?.token, srcCardinality?.token])
+ return {token, refCardinality, polymorphic, srcCardinality, warning: undefined}
}},
+ {ALT: () => {
+ const token = tokenInfo($.CONSUME(ForeignKey))
+ return {token, srcCardinality: {kind: 'n' as const, token}, refCardinality: {kind: '1' as const, token}, polymorphic: undefined, warning: {...token, issues: [legacy('"fk" is legacy, replace it with "->"')]}}
+ }}
])
+ $.SUBRULE(whitespaceRule)
+ const ref = $.SUBRULE($.attributeRefCompositeRule)
+ $.SUBRULE2(whitespaceRule)
+ return removeUndefined({kind: 'Relation' as const, token, srcCardinality, refCardinality, polymorphic, ref, warning})
})
- const propertyRule = $.RULE<() => PropertyAst>('propertyRule', () => {
- const key = $.SUBRULE($.identifierRule)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const value = $.OPTION2(() => {
- const sep = $.OR([
- {ALT: () => tokenInfo($.CONSUME(Colon)) },
- {ALT: () => tokenInfoLegacy($.CONSUME(Equal), '"=" is legacy, replace it with ":"') },
- ])
- $.OPTION3(() => $.CONSUME2(WhiteSpace))
- return {sep, value: $.SUBRULE(propertyValueRule)}
+ const attributeConstraintNameRule = $.RULE<() => IdentifierAst | undefined>('attributeConstraintNameRule', () => {
+ return $.OPTION(() => {
+ $.CONSUME(Equal)
+ $.SUBRULE2(whitespaceRule)
+ const res = $.SUBRULE($.identifierRule)
+ $.SUBRULE3(whitespaceRule)
+ return res
})
- return {key, ...value}
})
- this.propertiesRule = $.RULE<() => PropertiesAst>('propertiesRule', () => {
- const props: PropertiesAst = []
- $.CONSUME(LCurly)
- $.MANY_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- props.push($.SUBRULE(propertyRule))
- $.OPTION2(() => $.CONSUME2(WhiteSpace))
- }
- })
- $.CONSUME(RCurly)
- return props.filter(p => p !== undefined) // can be undefined on invalid input :/
+
+ const relationCardinalityRule = $.RULE<() => RelationCardinalityAst>('relationCardinalityRule', () => $.OR([
+ {ALT: () => ({kind: '1' as const, token: tokenInfo($.CONSUME(Dash))})},
+ {ALT: () => ({kind: 'n' as const, token: tokenInfo($.CONSUME(LowerThan))})},
+ {ALT: () => ({kind: 'n' as const, token: tokenInfo($.CONSUME(GreaterThan))})},
+ ]))
+ const relationPolymorphicRule = $.RULE<() => RelationPolymorphicAst>('relationPolymorphicRule', () => {
+ const attr = $.SUBRULE($.attributePathRule)
+ $.CONSUME(Equal)
+ const value = $.SUBRULE($.attributeValueRule)
+ return {attr, value}
})
- this.extraRule = $.RULE<() => ExtraAst>('extraRule', () => {
- const properties = $.OPTION(() => $.SUBRULE($.propertiesRule))
- $.OPTION2(() => $.CONSUME(WhiteSpace))
- const doc = $.OPTION3(() => $.SUBRULE2($.docRule))
- $.OPTION4(() => $.CONSUME2(WhiteSpace))
- const comment = $.OPTION5(() => $.SUBRULE3($.commentRule))
- return removeUndefined({properties, doc, comment})
+ const typeAliasRule = $.RULE<() => TypeAliasAst>('typeAliasRule', () => ({kind: 'Alias', name: $.SUBRULE($.identifierRule)}))
+ const typeEnumRule = $.RULE<() => TypeEnumAst>('typeEnumRule', () => {
+ $.CONSUME(ParenLeft)
+ const values: AttributeValueAst[] = []
+ $.MANY_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ values.push($.SUBRULE($.attributeValueRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(ParenRight)
+ return {kind: 'Enum', values}
})
+ const typeStructRule = $.RULE<() => TypeStructAst>('typeStructRule', () => {
+ $.CONSUME(CurlyLeft)
+ const attrs: AttributeAstFlat[] = []
+ $.MANY_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ attrs.push($.SUBRULE(attributeInnerRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(CurlyRight)
+ return {kind: 'Struct', attrs: nestAttributes(attrs)}
+ })
+ const typeCustomRule = $.RULE<() => TypeCustomAst>('typeCustomRule', () => ({kind: 'Custom', definition: $.SUBRULE($.expressionRule)}))
+
+ // basic parts
const namespaceRule = $.RULE<() => NamespaceRefAst>('namespaceRule', () => {
const first = $.SUBRULE($.identifierRule)
- const second = $.OPTION3(() => {
- const dot = $.CONSUME(Dot)
- return {dot, id: $.OPTION4(() => $.SUBRULE2($.identifierRule))}
- })
- const third = $.OPTION5(() => {
- const dot = $.CONSUME2(Dot)
- return {dot, id: $.OPTION6(() => $.SUBRULE3($.identifierRule))}
- })
- $.OPTION7(() => $.CONSUME(WhiteSpace))
+ const second = $.OPTION(() => ({dot: $.CONSUME(Dot), id: $.OPTION2(() => $.SUBRULE2($.identifierRule))}))
+ const third = $.OPTION3(() => ({dot: $.CONSUME2(Dot), id: $.OPTION4(() => $.SUBRULE3($.identifierRule))}))
+ $.SUBRULE(whitespaceRule)
if (second && third) return removeUndefined({database: first, catalog: second.id, schema: third.id})
if (second) return removeUndefined({catalog: first, schema: second.id})
return {schema: first}
@@ -290,394 +413,202 @@ class AmlParser extends EmbeddedActionsParser {
this.entityRefRule = $.RULE<() => EntityRefAst>('entityRefRule', () => {
const first = $.SUBRULE($.identifierRule)
- const second = $.OPTION3(() => {
- const dot = $.CONSUME(Dot)
- return {dot, id: $.OPTION4(() => $.SUBRULE2($.identifierRule))}
- })
- const third = $.OPTION5(() => {
- const dot = $.CONSUME2(Dot)
- return {dot, id: $.OPTION6(() => $.SUBRULE3($.identifierRule))}
- })
- const fourth = $.OPTION7(() => {
- const dot = $.CONSUME3(Dot)
- return {dot, id: $.OPTION8(() => $.SUBRULE4($.identifierRule))}
- })
- $.OPTION9(() => $.CONSUME(WhiteSpace))
+ const second = $.OPTION(() => ({dot: $.CONSUME(Dot), id: $.OPTION2(() => $.SUBRULE2($.identifierRule))}))
+ const third = $.OPTION3(() => ({dot: $.CONSUME2(Dot), id: $.OPTION4(() => $.SUBRULE3($.identifierRule))}))
+ const fourth = $.OPTION5(() => ({dot: $.CONSUME3(Dot), id: $.OPTION6(() => $.SUBRULE4($.identifierRule))}))
+ $.SUBRULE(whitespaceRule)
if (second && third && fourth && fourth.id) return removeUndefined({database: first, catalog: second.id, schema: third.id, entity: fourth.id})
if (second && third && third.id) return removeUndefined({catalog: first, schema: second.id, entity: third.id})
if (second && second.id) return removeUndefined({schema: first, entity: second.id})
return {entity: first}
})
- this.attributePathRule = $.RULE<() => AttributePathAst>('attributePathRule', () => {
- const attr = $.SUBRULE($.identifierRule)
- const path: IdentifierToken[] = []
- $.MANY(() => {
- $.CONSUME(Dot)
- path.push($.SUBRULE2($.identifierRule))
- })
- return removeEmpty({...attr, path})
- })
-
- const legacyAttributePathRule = $.RULE<() => IdentifierToken[]>('legacyAttributePathRule', () => {
- const path: IdentifierToken[] = []
- $.MANY(() => {
- $.CONSUME(Colon)
- path.push($.SUBRULE($.identifierRule))
- })
- return path
- })
-
this.attributeRefRule = $.RULE<() => AttributeRefAst>('attributeRefRule', () => {
const entity = $.SUBRULE($.entityRefRule)
- return $.OR([{
- ALT: () => {
- $.CONSUME(LParen)
+ return $.OR([
+ {ALT: () => {
+ $.CONSUME(ParenLeft)
const attr = $.SUBRULE($.attributePathRule)
- $.CONSUME(RParen)
+ $.CONSUME(ParenRight)
return {...entity, attr}
- }
- }, {
- ALT: () => {
+ }},
+ {ALT: () => {
// legacy fallback
if (!entity.schema) return removeUndefined({schema: entity.catalog, entity: entity.schema, attr: entity.entity}) // not finished, so no warning
const path = $.SUBRULE(legacyAttributePathRule)
const v1 = `${entity.catalog ? entity.catalog.value + '.' : ''}${entity.schema.value}.${entity.entity.value}${path.map(p => ':' + p.value).join('')}`
const v2 = `${entity.catalog ? entity.catalog.value + '.' : ''}${entity.schema.value}(${entity.entity.value}${path.map(p => '.' + p.value).join('')})`
const warning: TokenInfo = {
- ...mergePositions([entity.catalog, entity.schema, entity.entity, ...path].filter(isNotUndefined)),
+ ...mergePositions([entity.catalog, entity.schema, entity.entity, ...path].map(v => v?.token).filter(isNotUndefined)),
issues: [legacy(`"${v1}" is the legacy way, use "${v2}" instead`)]
}
return removeUndefined({schema: entity.catalog, entity: entity.schema, attr: removeEmpty({...entity.entity, path}), warning})
- }
- }])
+ }}
+ ])
})
this.attributeRefCompositeRule = $.RULE<() => AttributeRefCompositeAst>('attributeRefCompositeRule', () => {
const entity = $.SUBRULE($.entityRefRule)
- return $.OR([{
- ALT: () => {
- $.CONSUME(LParen)
+ return $.OR([
+ {ALT: () => {
+ $.CONSUME(ParenLeft)
const attrs: AttributePathAst[] = []
- $.AT_LEAST_ONE_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- attrs.push($.SUBRULE($.attributePathRule))
- $.OPTION2(() => $.CONSUME2(WhiteSpace))
- }
- })
- $.CONSUME(RParen)
+ $.AT_LEAST_ONE_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ attrs.push($.SUBRULE($.attributePathRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(ParenRight)
return {...entity, attrs}
- }
- }, {
- // legacy fallback
- ALT: () => {
+ }},
+ {ALT: () => {
+ // legacy fallback
if (!entity.schema) return removeUndefined({entity: entity.entity, attrs: []}) // relation without attributes
const path = $.SUBRULE(legacyAttributePathRule)
const v1 = `${entity.catalog ? entity.catalog.value + '.' : ''}${entity.schema.value}.${entity.entity.value}${path.map(p => ':' + p.value).join('')}`
const v2 = `${entity.catalog ? entity.catalog.value + '.' : ''}${entity.schema.value}(${entity.entity.value}${path.map(p => '.' + p.value).join('')})`
const warning: TokenInfo = {
- ...mergePositions([entity.catalog, entity.schema, entity.entity, ...path].filter(isNotUndefined)),
+ ...mergePositions([entity.catalog, entity.schema, entity.entity, ...path].map(v => v?.token).filter(isNotUndefined)),
issues: [legacy(`"${v1}" is the legacy way, use "${v2}" instead`)]
}
return removeUndefined({schema: entity.catalog, entity: entity.schema, attrs: [removeEmpty({...entity.entity, path})], warning})
- }
- }])
- })
-
- this.attributeValueRule = $.RULE<() => AttributeValueAst>('attributeValueRule', () => {
- return $.OR([
- { ALT: () => $.SUBRULE($.nullRule) },
- { ALT: () => $.SUBRULE($.integerRule) },
- { ALT: () => $.SUBRULE($.decimalRule) },
- { ALT: () => $.SUBRULE($.booleanRule) },
- { ALT: () => $.SUBRULE($.expressionRule) },
- { ALT: () => $.SUBRULE($.identifierRule) },
+ }}
])
})
- // namespace rules
- this.namespaceStatementRule = $.RULE<() => NamespaceStatement>('namespaceStatementRule', () => {
- const keyword = $.CONSUME(Namespace)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const namespace = $.OPTION2(() => $.SUBRULE(namespaceRule)) || {}
- const extra = $.SUBRULE($.extraRule)
- $.CONSUME(NewLine)
- return {statement: 'Namespace', line: keyword.startLine || defaultPos, ...namespace, ...extra}
- })
-
- // entity rules
- const attributeTypeRule = $.RULE<() => AttributeTypeAst>('attributeTypeRule', () => {
- const res = $.OPTION(() => {
- const type = $.SUBRULE($.identifierRule)
- const enumValues = $.OPTION2(() => {
- $.CONSUME(LParen)
- const values: AttributeValueAst[] = []
- $.AT_LEAST_ONE_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION3(() => $.CONSUME(WhiteSpace))
- const value = $.SUBRULE($.attributeValueRule)
- if (value) values.push(value) // can be undefined on invalid input :/
- $.OPTION4(() => $.CONSUME2(WhiteSpace))
- }
- })
- $.CONSUME(RParen)
- return values
- })
- const defaultValue = $.OPTION5(() => {
- $.CONSUME(Equal)
- return $.SUBRULE2($.attributeValueRule)
- })
- return {type, enumValues, defaultValue}
- })
- return {type: res?.type, enumValues: res?.enumValues, defaultValue: res?.defaultValue}
- })
- const attributeConstraintPkRule = $.RULE<() => AttributeConstraintAst>('attributeConstraintPkRule', () => {
- const token = $.CONSUME(PrimaryKey)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const name = $.OPTION2(() => {
- $.CONSUME(Equal)
- $.OPTION3(() => $.CONSUME2(WhiteSpace))
- const res = $.SUBRULE($.identifierRule)
- $.OPTION4(() => $.CONSUME3(WhiteSpace))
- return res
- })
- return removeUndefined({keyword: tokenInfo(token), name})
- })
- const attributeConstraintIndexRule = $.RULE<() => AttributeConstraintAst>('attributeConstraintIndexRule', () => {
- const token = $.CONSUME(Index)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const name = $.OPTION2(() => {
- $.CONSUME(Equal)
- $.OPTION3(() => $.CONSUME2(WhiteSpace))
- const res = $.SUBRULE($.identifierRule)
- $.OPTION4(() => $.CONSUME3(WhiteSpace))
- return res
- })
- return removeUndefined({keyword: tokenInfo(token), name})
- })
- const attributeConstraintUniqueRule = $.RULE<() => AttributeConstraintAst>('attributeConstraintUniqueRule', () => {
- const token = $.CONSUME(Unique)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const name = $.OPTION2(() => {
- $.CONSUME(Equal)
- $.OPTION3(() => $.CONSUME2(WhiteSpace))
- const res = $.SUBRULE($.identifierRule)
- $.OPTION4(() => $.CONSUME3(WhiteSpace))
- return res
- })
- return removeUndefined({keyword: tokenInfo(token), name})
- })
- const attributeConstraintCheckRule = $.RULE<() => AttributeCheckAst>('attributeConstraintCheckRule', () => {
- const token = $.CONSUME(Check)
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const predicate = $.OPTION2(() => {
- $.CONSUME(LParen)
- const res = $.SUBRULE($.expressionRule)
- $.CONSUME(RParen)
- $.OPTION3(() => $.CONSUME2(WhiteSpace))
- return res
- })
- const name = $.OPTION4(() => {
- $.CONSUME(Equal)
- $.OPTION5(() => $.CONSUME3(WhiteSpace))
- const res = $.SUBRULE($.identifierRule)
- $.OPTION6(() => $.CONSUME4(WhiteSpace))
- return res
+ this.attributePathRule = $.RULE<() => AttributePathAst>('attributePathRule', () => {
+ const attr = $.SUBRULE($.identifierRule)
+ const path: IdentifierAst[] = []
+ $.MANY(() => {
+ $.CONSUME(Dot)
+ path.push($.SUBRULE2($.identifierRule))
})
- if (!predicate && name && [' ', '<', '>', '=', 'IN'].some(c => name.value.includes(c))) {
- // no definition and a name that look like a predicate => switch to the legacy syntax (predicate was in the name)
- const def = {...positionStartAdd(name, -1), token: 'Expression' as const, issues: [legacy(`"=${name.value}" is the legacy way, use expression instead "(\`${name.value}\`)"`)]}
- return removeUndefined({keyword: tokenInfo(token), predicate: def})
- } else {
- return removeUndefined({keyword: tokenInfo(token), predicate, name})
- }
- })
- const attributeConstraintsRule = $.RULE<() => AttributeConstraintsAst>('attributeConstraintsRule', () => {
- const primaryKey = $.OPTION(() => $.SUBRULE(attributeConstraintPkRule))
- $.OPTION2(() => $.CONSUME(WhiteSpace))
- const unique = $.OPTION3(() => $.SUBRULE(attributeConstraintUniqueRule))
- $.OPTION4(() => $.CONSUME2(WhiteSpace))
- const index = $.OPTION5(() => $.SUBRULE(attributeConstraintIndexRule))
- $.OPTION6(() => $.CONSUME3(WhiteSpace))
- const check = $.OPTION7(() => $.SUBRULE(attributeConstraintCheckRule))
- return removeUndefined({primaryKey, index, unique, check})
- })
- const attributeRelationRule = $.RULE<() => AttributeRelationAst>('attributeRelationRule', () => {
- const {srcCardinality, refCardinality, polymorphic, warning} = $.OR([{
- ALT: () => {
- const refCardinality = $.SUBRULE(relationCardinalityRule)
- const polymorphic = $.OPTION(() => $.SUBRULE(relationPolymorphicRule))
- const srcCardinality = $.SUBRULE2(relationCardinalityRule)
- return {srcCardinality, refCardinality, polymorphic, warning: undefined}
- }
- }, {
- ALT: () => {
- const warning = tokenInfoLegacy($.CONSUME(ForeignKey), '"fk" is legacy, replace it with "->"')
- return {srcCardinality: 'n' as const, refCardinality: '1' as const, polymorphic: undefined, warning}
- }
- }])
- $.OPTION2(() => $.CONSUME(WhiteSpace))
- const ref = $.SUBRULE2($.attributeRefCompositeRule)
- return removeUndefined({ref, srcCardinality, refCardinality, polymorphic, warning})
- })
- const attributeRuleInner = $.RULE<() => AttributeAstFlat>('attributeRuleInner', () => {
- const name = $.SUBRULE($.identifierRule)
- $.OPTION(() => $.CONSUME2(WhiteSpace))
- const {type, enumValues, defaultValue} = $.SUBRULE(attributeTypeRule) || {} // returns undefined on invalid input :/
- $.OPTION2(() => $.CONSUME3(WhiteSpace))
- const nullable = $.OPTION3(() => $.CONSUME(Nullable))
- $.OPTION4(() => $.CONSUME4(WhiteSpace))
- const constraints = $.SUBRULE(attributeConstraintsRule)
- const nesting = {depth: 0, offset: {start: 0, end: 0}, position: {start: {line: 0, column: 0}, end: {line: 0, column: 0}}} // unused placeholder
- return removeUndefined({nesting, name, type, enumValues, defaultValue, nullable: nullable ? tokenInfo(nullable) : undefined, ...constraints})
- }, {resyncEnabled: true})
- this.attributeRule = $.RULE<() => AttributeAstFlat>('attributeRule', () => {
- const spaces = $.CONSUME(WhiteSpace)
- const depth = Math.round(spaces.image.split('').reduce((i, c) => c === '\t' ? i + 1 : i + 0.5, 0)) - 1
- const nesting = {...tokenInfo(spaces), depth}
- const attr = $.SUBRULE(attributeRuleInner)
- $.OPTION(() => $.CONSUME2(WhiteSpace))
- const relation = $.OPTION3(() => $.SUBRULE(attributeRelationRule))
- $.OPTION4(() => $.CONSUME3(WhiteSpace))
- const extra = $.SUBRULE($.extraRule)
- $.CONSUME(NewLine)
- return removeUndefined({...attr, nesting, relation, ...extra})
+ return removeEmpty({...attr, path})
})
-
- this.entityRule = $.RULE<() => EntityStatement>('entityRule', () => {
- const {entity, ...namespace} = $.SUBRULE($.entityRefRule)
- const view = $.OPTION(() => $.CONSUME(Asterisk))
- $.OPTION2(() => $.CONSUME(WhiteSpace))
- const alias = $.OPTION3(() => {
- $.CONSUME(As)
- $.CONSUME2(WhiteSpace)
- return $.SUBRULE($.identifierRule)
- })
- $.OPTION4(() => $.CONSUME3(WhiteSpace))
- const extra = $.SUBRULE($.extraRule)
- $.CONSUME(NewLine)
- const attrs: AttributeAstFlat[] = []
+ const legacyAttributePathRule = $.RULE<() => IdentifierAst[]>('legacyAttributePathRule', () => {
+ const path: IdentifierAst[] = []
$.MANY(() => {
- const attr = $.SUBRULE($.attributeRule)
- if (attr?.name?.value) attrs.push(attr) // name can be '' on invalid input :/
+ $.CONSUME(Colon)
+ path.push($.SUBRULE($.identifierRule))
})
- return removeEmpty({statement: 'Entity' as const, name: entity, view: view ? tokenInfo(view) : undefined, ...namespace, alias, ...extra, attrs: nestAttributes(attrs)})
+ return path
})
- // relation rules
- const relationCardinalityRule = $.RULE<() => RelationCardinality>('relationCardinalityRule', () => {
- return $.OR([
- { ALT: () => { $.CONSUME(Dash); return '1' } },
- { ALT: () => { $.CONSUME(LowerThan); return 'n' } },
- { ALT: () => { $.CONSUME(GreaterThan); return 'n' } },
- ])
- })
- const relationPolymorphicRule = $.RULE<() => RelationPolymorphicAst>('relationPolymorphicRule', () => {
- const attr = $.SUBRULE($.attributePathRule)
- $.CONSUME(Equal)
- const value = $.SUBRULE($.attributeValueRule)
- return {attr, value}
- })
- this.relationRule = $.RULE<() => RelationStatement>('relationRule', () => {
- const warning = $.OR([
- {ALT: () => {$.CONSUME(Relation); return undefined}},
- {ALT: () => tokenInfoLegacy($.CONSUME(ForeignKey), '"fk" is legacy, replace it with "rel"')}
- ])
- $.CONSUME(WhiteSpace)
- const src = $.SUBRULE($.attributeRefCompositeRule)
- $.OPTION(() => $.CONSUME2(WhiteSpace))
- const {ref, srcCardinality, refCardinality, polymorphic} = $.SUBRULE(attributeRelationRule) || {} // returns undefined on invalid input :/
- $.OPTION2(() => $.CONSUME3(WhiteSpace))
- const extra = $.SUBRULE($.extraRule)
- $.CONSUME(NewLine)
- return removeUndefined({statement: 'Relation' as const, src, ref, srcCardinality, refCardinality, polymorphic, ...extra, warning})
- })
+ this.attributeValueRule = $.RULE<() => AttributeValueAst>('attributeValueRule', () => $.OR([
+ {ALT: () => $.SUBRULE($.nullRule)},
+ {ALT: () => $.SUBRULE($.integerRule)},
+ {ALT: () => $.SUBRULE($.decimalRule)},
+ {ALT: () => $.SUBRULE($.booleanRule)},
+ {ALT: () => $.SUBRULE($.expressionRule)},
+ {ALT: () => $.SUBRULE($.identifierRule)},
+ ]))
- // type rules
- const typeAliasRule = $.RULE<() => TypeAliasAst>('typeAliasRule', () => {
- return { kind: 'alias', name: $.SUBRULE($.identifierRule) }
+ this.extraRule = $.RULE<() => ExtraAst>('extraRule', () => {
+ const properties = $.OPTION(() => $.SUBRULE($.propertiesRule))
+ $.SUBRULE(whitespaceRule)
+ const doc = $.OPTION2(() => $.SUBRULE($.docRule))
+ $.SUBRULE2(whitespaceRule)
+ const comment = $.OPTION3(() => $.SUBRULE($.commentRule))
+ return removeUndefined({properties, doc, comment})
})
- const typeEnumRule = $.RULE<() => TypeEnumAst>('typeEnumRule', () => {
- $.CONSUME(LParen)
- const values: AttributeValueAst[] = []
- $.MANY_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- values.push($.SUBRULE($.attributeValueRule))
- $.OPTION2(() => $.CONSUME2(WhiteSpace))
- }
- })
- $.CONSUME(RParen)
- return { kind: 'enum', values }
+
+ this.propertiesRule = $.RULE<() => PropertiesAst>('propertiesRule', () => {
+ $.CONSUME(CurlyLeft)
+ const props: PropertiesAst = []
+ $.MANY_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ props.push($.SUBRULE(propertyRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(CurlyRight)
+ return props.filter(isNotUndefined) // can be undefined on invalid input :/
})
- const typeStructRule = $.RULE<() => TypeStructAst>('typeStructRule', () => {
- $.CONSUME(LCurly)
- const attrs: AttributeAstFlat[] = []
- $.MANY_SEP({
- SEP: Comma,
- DEF: () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- attrs.push($.SUBRULE(attributeRuleInner))
- $.OPTION2(() => $.CONSUME2(WhiteSpace))
- }
+ const propertyRule = $.RULE<() => PropertyAst>('propertyRule', () => {
+ const key = $.SUBRULE($.identifierRule)
+ $.SUBRULE(whitespaceRule)
+ const value = $.OPTION(() => {
+ const sep = $.OR([
+ {ALT: () => tokenInfo($.CONSUME(Colon))},
+ {ALT: () => tokenInfo($.CONSUME(Equal), [legacy('"=" is legacy, replace it with ":"')])},
+ ])
+ $.SUBRULE2(whitespaceRule)
+ return {sep, value: $.SUBRULE(propertyValueRule)}
})
- $.CONSUME(RCurly)
- return { kind: 'struct', attrs: nestAttributes(attrs) }
+ return {key, ...value}
})
- const typeCustomRule = $.RULE<() => TypeCustomAst>('typeCustomRule', () => {
- const definition = $.SUBRULE($.expressionRule)
- return { kind: 'custom', definition }
+ const propertyValueRule = $.RULE<() => PropertyValueAst>('propertyValueRule', () => $.OR([
+ {ALT: () => $.SUBRULE($.nullRule)},
+ {ALT: () => $.SUBRULE($.decimalRule)},
+ {ALT: () => $.SUBRULE($.integerRule)},
+ {ALT: () => $.SUBRULE($.booleanRule)},
+ {ALT: () => $.SUBRULE($.expressionRule)},
+ {ALT: () => $.SUBRULE($.identifierRule)},
+ {ALT: () => {
+ $.CONSUME(BracketLeft)
+ const values: PropertyValueAst[] = []
+ $.MANY_SEP({SEP: Comma, DEF: () => {
+ $.SUBRULE(whitespaceRule)
+ values.push($.SUBRULE(propertyValueRule))
+ $.SUBRULE2(whitespaceRule)
+ }})
+ $.CONSUME(BracketRight)
+ return values.filter(isNotUndefined) // can be undefined on invalid input :/
+ }},
+ ]))
+
+ this.docRule = $.RULE<() => DocAst>('docRule', () => $.OR([
+ {ALT: () => {
+ const token = $.CONSUME(DocMultiline)
+ return {kind: 'Doc', token: tokenInfo(token), value: stripIndent(token.image.slice(3, -3)), multiLine: true}
+ }},
+ {ALT: () => {
+ const token = $.CONSUME(Doc)
+ return {kind: 'Doc', token: tokenInfo(token), value: removeQuotes(token.image.slice(1).trim().replaceAll(/\\#/g, '#'))}
+ }}
+ ]))
+
+ this.commentRule = $.RULE<() => CommentAst>('commentRule', () => {
+ const token = $.CONSUME(Comment)
+ return {kind: 'Comment', token: tokenInfo(token), value: token.image.slice(1).trim()}
})
- this.typeRule = $.RULE<() => TypeStatement>('typeRule', () => {
- $.CONSUME(Type)
- $.CONSUME(WhiteSpace)
- const {entity, ...namespace} = $.SUBRULE(this.entityRefRule) || {} // returns undefined on invalid input :/
- $.OPTION(() => $.CONSUME2(WhiteSpace))
- let content = $.OPTION2(() => $.OR([
- { ALT: () => $.SUBRULE(typeEnumRule) },
- { ALT: () => $.SUBRULE(typeStructRule) },
- { ALT: () => $.SUBRULE(typeCustomRule) },
- { ALT: () => $.SUBRULE(typeAliasRule) },
- ]))
- $.OPTION3(() => $.CONSUME3(WhiteSpace))
- const extra = $.SUBRULE($.extraRule)
- $.CONSUME(NewLine)
- /* if (content === undefined) {
- const attrs: AttributeAstFlat[] = []
- // FIXME: $.MANY fails with `TypeError: Cannot read properties of undefined (reading 'call')` at recognizer_engine.ts:517:30 (manyInternalLogic), before calling the callback, no idea why :/
- $.MANY(() => attrs.push($.SUBRULE($.attributeRule)))
- if (attrs.length > 0) content = {kind: 'struct', attrs: nestAttributes(attrs)}
- } */
- return {statement: 'Type', ...namespace, name: entity, content, ...extra}
+
+ // elements
+
+ this.expressionRule = $.RULE<() => ExpressionAst>('expressionRule', () => {
+ const token = $.CONSUME(Expression)
+ return {kind: 'Expression', token: tokenInfo(token), value: token.image.slice(1, -1)}
})
- this.emptyStatementRule = $.RULE<() => EmptyStatement>('emptyStatementRule', () => {
- $.OPTION(() => $.CONSUME(WhiteSpace))
- const comment = $.OPTION2(() => $.SUBRULE($.commentRule))
- $.CONSUME(NewLine)
- return removeUndefined({statement: 'Empty' as const, comment})
+
+ this.identifierRule = $.RULE<() => IdentifierAst>('identifierRule', () => {
+ const token = $.CONSUME(Identifier)
+ if (token.image.startsWith('"') && token.image.endsWith('"')) {
+ return {kind: 'Identifier', token: tokenInfo(token), value: token.image.slice(1, -1).replaceAll(/\\"/g, '"'), quoted: true}
+ } else {
+ return {kind: 'Identifier', token: tokenInfo(token), value: token.image}
+ }
})
- // general rules
- this.statementRule = $.RULE<() => StatementAst>('statementRule', () => {
- return $.OR([
- { ALT: () => $.SUBRULE($.namespaceStatementRule) },
- { ALT: () => $.SUBRULE($.entityRule) },
- { ALT: () => $.SUBRULE($.relationRule) },
- { ALT: () => $.SUBRULE($.typeRule) },
- { ALT: () => $.SUBRULE($.emptyStatementRule) },
- ])
+ this.integerRule = $.RULE<() => IntegerAst>('integerRule', () => {
+ const neg = $.OPTION(() => $.CONSUME(Dash))
+ const token = $.CONSUME(Integer)
+ return neg ? {kind: 'Integer', token: tokenInfo2(neg, token), value: parseInt(neg.image + token.image)} : {kind: 'Integer', token: tokenInfo(token), value: parseInt(token.image)}
})
- this.amlRule = $.RULE<() => AmlAst>('amlRule', () => {
- let stmts: StatementAst[] = []
- $.MANY(() => stmts.push($.SUBRULE($.statementRule)))
- return stmts
+ this.decimalRule = $.RULE<() => DecimalAst>('decimalRule', () => {
+ const neg = $.OPTION(() => $.CONSUME(Dash))
+ const token = $.CONSUME(Decimal)
+ return neg ? {kind: 'Decimal', token: tokenInfo2(neg, token), value: parseFloat(neg.image + token.image)} : {kind: 'Decimal', token: tokenInfo(token), value: parseFloat(token.image)}
})
+ this.booleanRule = $.RULE<() => BooleanAst>('booleanRule', () => $.OR([
+ {ALT: () => ({kind: 'Boolean', token: tokenInfo($.CONSUME(True)), value: true})},
+ {ALT: () => ({kind: 'Boolean', token: tokenInfo($.CONSUME(False)), value: false})},
+ ]))
+
+ this.nullRule = $.RULE<() => NullAst>('nullRule', () => ({kind: 'Null', token: tokenInfo($.CONSUME(Null))}))
+
+ const whitespaceRule = $.RULE<() => IToken | undefined>('whitespaceRule', () => $.OPTION(() => $.CONSUME(WhiteSpace)))
+
this.performSelfAnalysis()
}
}
@@ -698,7 +629,6 @@ export function parseRule(parse: (p: AmlParser) => T, input: string, strict:
export function parseAmlAst(input: string, opts: { strict?: boolean }): ParserResult {
return parseRule(p => p.amlRule(), input, opts.strict || false)
- .map(statements => statements.filter(s => s !== undefined)) // can be undefined on invalid input :/
}
function formatLexerError(err: ILexingError): ParserError {
@@ -722,25 +652,29 @@ function tokenInfo(token: IToken, issues?: TokenIssue[]): TokenInfo {
return removeEmpty({...tokenPosition(token), issues})
}
-function tokenInfoLegacy(token: IToken, message: string): TokenInfo {
- return tokenInfo(token, [legacy(message)])
+function tokenInfo2(start: IToken | undefined, end: IToken | undefined, issues?: TokenIssue[]): TokenInfo {
+ return removeEmpty({...mergePositions([start, end].map(t => t ? tokenPosition(t) : undefined)), issues})
}
function tokenPosition(token: IToken): TokenPosition {
return {
- offset: {start: token.startOffset, end: token.endOffset || defaultPos},
+ offset: {start: pos(token.startOffset), end: pos(token.endOffset)},
position: {
- start: {line: token.startLine || defaultPos, column: token.startColumn || defaultPos},
- end: {line: token.endLine || defaultPos, column: token.endColumn || defaultPos}
+ start: {line: pos(token.startLine), column: pos(token.startColumn)},
+ end: {line: pos(token.endLine), column: pos(token.endColumn)}
}
}
}
+function pos(value: number | undefined): number {
+ return value !== undefined && !isNaN(value) ? value : defaultPos
+}
+
// utils functions
export function nestAttributes(attributes: AttributeAstFlat[]): AttributeAstNested[] {
const results: AttributeAstNested[] = []
- let path: IdentifierToken[] = []
+ let path: IdentifierAst[] = []
let parents: AttributeAstNested[] = []
let curNesting = 0
attributes.forEach(function(attribute) {
@@ -754,7 +688,7 @@ export function nestAttributes(attributes: AttributeAstFlat[]): AttributeAstNest
results.push(parents[0]) // add top level attrs to results
} else if (nesting.depth > curNesting) { // deeper: append to `path` & `parents`
curNesting = curNesting + 1 // go only one level deeper at the time (even if nesting is higher)
- const warning = nesting.depth > curNesting ? {offset: nesting.offset, position: nesting.position, issues: [...nesting.issues || [], badIndent(curNesting, nesting.depth)]} : undefined
+ const warning = nesting.depth > curNesting ? {...nesting.token, issues: [...nesting.token.issues || [], badIndent(curNesting, nesting.depth)]} : undefined
path = [...path, name]
parents = [...parents, removeUndefined({path, ...values, warning})]
parents[parents.length - 2].attrs = [...(parents[parents.length - 2].attrs || []), parents[parents.length - 1]] // add to parent