Skip to content

Commit

Permalink
Add wildcard functionality
Browse files Browse the repository at this point in the history
  • Loading branch information
blakeembrey committed Jun 4, 2024
1 parent db3beff commit 01086a0
Show file tree
Hide file tree
Showing 2 changed files with 124 additions and 52 deletions.
85 changes: 71 additions & 14 deletions src/index.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,12 @@ const PARSER_TESTS: ParserTestSet[] = [
path: "/",
expected: ["/"],
},
{
path: "/:test",
expected: [
{ name: "test", prefix: "/", suffix: "", pattern: "", modifier: "" },
],
},
];

const COMPILE_TESTS: CompileTestSet[] = [
Expand Down Expand Up @@ -61,6 +67,14 @@ const COMPILE_TESTS: CompileTestSet[] = [
{ input: { id: "123" }, expected: "/test/" },
],
},
{
path: "/:0",
tests: [
{ input: undefined, expected: null },
{ input: {}, expected: null },
{ input: { 0: "123" }, expected: "/123" },
],
},
{
path: "/:test",
tests: [
Expand Down Expand Up @@ -2648,9 +2662,6 @@ const MATCH_TESTS: MatchTestSet[] = [
},
{
path: "#/*",
testOptions: {
skip: true,
},
tests: [
{
input: "#/",
Expand All @@ -2675,14 +2686,11 @@ const MATCH_TESTS: MatchTestSet[] = [
},
{
path: "/entity/:id/*",
testOptions: {
skip: true,
},
tests: [
{
input: "/entity/foo",
matches: ["/entity/foo", "foo", undefined],
expected: { path: "/entity/foo", index: 0, params: { id: "foo" } },
matches: null,
expected: false,
},
{
input: "/entity/foo/",
Expand All @@ -2693,14 +2701,11 @@ const MATCH_TESTS: MatchTestSet[] = [
},
{
path: "/test/*",
testOptions: {
skip: true,
},
tests: [
{
input: "/test",
matches: ["/test", undefined],
expected: { path: "/test", index: 0, params: {} },
matches: null,
expected: false,
},
{
input: "/test/",
Expand All @@ -2712,6 +2717,58 @@ const MATCH_TESTS: MatchTestSet[] = [
matches: ["/test/route", "route"],
expected: { path: "/test/route", index: 0, params: { "0": ["route"] } },
},
{
input: "/test/route/nested",
matches: ["/test/route/nested", "route/nested"],
expected: {
path: "/test/route/nested",
index: 0,
params: { "0": ["route", "nested"] },
},
},
],
},

/**
* Asterisk wildcard.
*/
{
path: "/*",
tests: [
{
input: "/",
matches: ["/", undefined],
expected: { path: "/", index: 0, params: { "0": undefined } },
},
{
input: "/route",
matches: ["/route", "route"],
expected: { path: "/route", index: 0, params: { "0": ["route"] } },
},
{
input: "/route/nested",
matches: ["/route/nested", "route/nested"],
expected: {
path: "/route/nested",
index: 0,
params: { "0": ["route", "nested"] },
},
},
],
},
{
path: "*",
tests: [
{
input: "/",
matches: ["/", "/"],
expected: { path: "/", index: 0, params: { "0": ["", ""] } },
},
{
input: "/test",
matches: ["/test", "/test"],
expected: { path: "/test", index: 0, params: { "0": ["", "test"] } },
},
],
},
];
Expand All @@ -2730,7 +2787,7 @@ describe("path-to-regexp", () => {
prefix: "/",
suffix: "",
modifier: "",
pattern: "[^\\/]+?",
pattern: "",
},
];

Expand Down
91 changes: 53 additions & 38 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
const DEFAULT_PREFIXES = "./";
const DEFAULT_DELIMITER = "/";
const GROUPS_RE = /\((?:\?<(.*?)>)?(?!\?)/g;
const NOOP_VALUE = (value: string) => value;
const ID_START = /^[$_\p{ID_Start}]$/u;
const ID_CONTINUE = /^[$_\u200C\u200D\p{ID_Continue}]$/u;
const ID_CHAR = /^\p{XID_Continue}$/u;

/**
* Encode a string into another string.
Expand Down Expand Up @@ -92,6 +89,7 @@ type TokenType =
| "END"
// Reserved for use.
| "!"
| "@"
| ";";

/**
Expand All @@ -105,6 +103,7 @@ interface LexToken {

const SIMPLE_TOKENS: Record<string, TokenType> = {
"!": "!",
"@": "@",
";": ";",
"*": "*",
"+": "+",
Expand Down Expand Up @@ -136,14 +135,14 @@ function lexer(str: string) {
}

if (value === ":") {
let name = chars[++i];
let name = "";

if (!ID_START.test(name)) {
throw new TypeError(`Missing parameter name at ${i}`);
while (ID_CHAR.test(chars[++i])) {
name += chars[i];
}

while (ID_CONTINUE.test(chars[++i])) {
name += chars[i];
if (!name) {
throw new TypeError(`Missing parameter name at ${i}`);
}

tokens.push({ type: "NAME", index: i, value: name });
Expand Down Expand Up @@ -248,11 +247,10 @@ export class TokenData {
*/
export function parse(str: string, options: ParseOptions = {}): TokenData {
const {
prefixes = DEFAULT_PREFIXES,
prefixes = "./",
delimiter = DEFAULT_DELIMITER,
encodePath = NOOP_VALUE,
} = options;
const defaultPattern = `[^${escape(delimiter)}]+?`;
const tokens: Token[] = [];
const it = lexer(str);
let key = 0;
Expand All @@ -265,6 +263,7 @@ export function parse(str: string, options: ParseOptions = {}): TokenData {

if (name || pattern) {
let prefix = char || "";
const modifier = it.modifier();

if (!prefixes.includes(prefix)) {
path += prefix;
Expand All @@ -281,10 +280,10 @@ export function parse(str: string, options: ParseOptions = {}): TokenData {
encodePath,
delimiter,
name || String(key++),
pattern || defaultPattern,
pattern,
prefix,
"",
it.modifier(),
modifier,
),
);
continue;
Expand All @@ -301,6 +300,22 @@ export function parse(str: string, options: ParseOptions = {}): TokenData {
path = "";
}

const asterisk = it.tryConsume("*");
if (asterisk) {
tokens.push(
toKey(
encodePath,
delimiter,
String(key++),
`[^${escape(delimiter)}]*`,
"",
"",
asterisk,
),
);
continue;
}

const open = it.tryConsume("{");
if (open) {
const prefix = it.text();
Expand All @@ -315,7 +330,7 @@ export function parse(str: string, options: ParseOptions = {}): TokenData {
encodePath,
delimiter,
name || (pattern ? String(key++) : ""),
name && !pattern ? defaultPattern : pattern || "",
pattern,
prefix,
suffix,
it.modifier(),
Expand Down Expand Up @@ -445,14 +460,15 @@ function compileTokens<P extends ParamData>(
} = options;
const reFlags = flags(options);
const stringify = toStringify(loose);
const keyToRegexp = toKeyRegexp(stringify, data.delimiter);

// Compile all the tokens into regexps.
const encoders: Array<(data: ParamData) => string> = data.tokens.map(
(token) => {
const fn = tokenToFunction(token, encode);
if (!validate || typeof token === "string") return fn;

const pattern = keyToRegexp(token, stringify);
const pattern = keyToRegexp(token);
const validRe = new RegExp(`^${pattern}$`, reFlags);

return (data) => {
Expand Down Expand Up @@ -516,16 +532,9 @@ function matchRegexp<P extends ParamData>(

const decoders = re.keys.map((key) => {
if (key.separator) {
const re = new RegExp(
`(${key.pattern})(?:${stringify(key.separator)}|$)`,
"g",
);
const re = new RegExp(stringify(key.separator), "g");

return (value: string) => {
const result: string[] = [];
for (const m of value.matchAll(re)) result.push(decode(m[1]));
return result;
};
return (value: string) => value.split(re).map(decode);
}

return decode;
Expand Down Expand Up @@ -613,14 +622,15 @@ function tokensToRegexp(
loose = DEFAULT_DELIMITER,
} = options;
const stringify = toStringify(loose);
const keyToRegexp = toKeyRegexp(stringify, data.delimiter);
let pattern = start ? "^" : "";

for (const token of data.tokens) {
if (typeof token === "string") {
pattern += stringify(token);
} else {
if (token.name) keys.push(token);
pattern += keyToRegexp(token, stringify);
pattern += keyToRegexp(token);
}
}

Expand All @@ -636,21 +646,26 @@ function tokensToRegexp(
/**
* Convert a token into a regexp string (re-used for path validation).
*/
function keyToRegexp(key: Key, stringify: Encode): string {
const prefix = stringify(key.prefix);
const suffix = stringify(key.suffix);

if (key.name) {
if (key.separator) {
const mod = key.modifier === "*" ? "?" : "";
const split = stringify(key.separator);
return `(?:${prefix}((?:${key.pattern})(?:${split}(?:${key.pattern}))*)${suffix})${mod}`;
} else {
return `(?:${prefix}(${key.pattern})${suffix})${key.modifier}`;
function toKeyRegexp(stringify: Encode, delimiter: string) {
const segmentPattern = `[^${escape(delimiter)}]+?`;

return (key: Key) => {
const prefix = stringify(key.prefix);
const suffix = stringify(key.suffix);

if (key.name) {
const pattern = key.pattern || segmentPattern;
if (key.separator) {
const mod = key.modifier === "*" ? "?" : "";
const split = stringify(key.separator);
return `(?:${prefix}((?:${pattern})(?:${split}(?:${pattern}))*)${suffix})${mod}`;
} else {
return `(?:${prefix}(${pattern})${suffix})${key.modifier}`;
}
}
} else {

return `(?:${prefix}${suffix})${key.modifier}`;
}
};
}

/**
Expand Down

0 comments on commit 01086a0

Please sign in to comment.