Skip to content

Commit

Permalink
Add negative lookahead to tokenizer (#130)
Browse files Browse the repository at this point in the history
* Add negative lookahead to tokenizer

* Update tokenizer.test.ts

removed console.log

* bump
  • Loading branch information
nachobibian authored Apr 1, 2024
1 parent c23a60a commit e989459
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 5 deletions.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "scim2-parse-filter",
"version": "0.2.8",
"version": "0.2.10",
"description": "This is a fork https://www.npmjs.com/package/scim2-filter version 0.2.0 with bug correction.",
"main": "lib/src/index.js",
"directories": {
Expand Down
2 changes: 1 addition & 1 deletion src/parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export type Token = {
export function tokenizer(f: string): Token[] {
const ret: Token[] = [];
let rest = f;
const patterns = /^(?:(\s+)|(-?\d+(?:\.\d+)?(?:[eE][-+]?\d+)?)|("(?:[^"]|\\.|\n)*")|([[()]|]\.?)|(\w[-\w._:\/%]*))/;
const patterns = /^(?:(\s+)|(-?\d+(?:\.\d+)?(?:[eE][-+]?\d+)?(?![-\w._:\/\)\s]))|("(?:[^"]|\\.|\n)*")|([[()]|]\.?)|(\w[-\w._:\/%]*))/;
let n;
while ((n = patterns.exec(rest))) {
if (n[1] || n[0].length === 0) {
Expand Down
18 changes: 17 additions & 1 deletion test/parse.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { eq, op, pr, and, or, v } from "./test_util";
import { Filter, parse } from "../src";
import { Filter, parse, stringify } from "../src";
import { assert } from "chai";

// When modifying or adding to these tests,
Expand Down Expand Up @@ -267,4 +267,20 @@ describe('parse', () => {
eq("name", "xxx"))
);
});

describe('attrPath start with number', () => {
test('064869bf-be25-466f-803d-004a0540574b eq "bjensen"', eq("064869bf-be25-466f-803d-004a0540574b", "bjensen"));

it('consistent parse and stringify', () => {
const f : Filter = {
op: 'eq',
attrPath: '064869bf-be25-466f-803d-004a0540574b',
compValue: 'bjensen'
}
const string = stringify(f)
const ff = parse(string)

assert.deepEqual(f, ff)
})
})
});
8 changes: 8 additions & 0 deletions test/stringify.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -253,4 +253,12 @@ describe('stringify', () => {
)
);
});

it('consistent stringify and parse', () => {
const text = '064869bf-be25-466f-803d-004a0540574b eq "bjensen"'
const f = parse(text)
const string = stringify(f)

assert.deepEqual(text, string)
})
});
7 changes: 7 additions & 0 deletions test/tokenizer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ describe("tokenizer", () => {
);
});

it("0Field1 eq -12", () => {
assert.deepEqual(
[tok("0Field1", "Word"), tok("eq", "Word"), tok("-12", "Number"), EOT],
tokenizer("0Field1 eq -12")
);
});

it("sub-attribute after ValPath", () => {
assert.deepEqual(
tokenizer('emails[type eq "work"].value eq "user@example.com"'),
Expand Down

0 comments on commit e989459

Please sign in to comment.