Skip to content

Commit

Permalink
rename to semanticTokenTypes, semanticTokenModifiers & semanticTokenS…
Browse files Browse the repository at this point in the history
…tyleDefaults
  • Loading branch information
aeschli committed Jan 30, 2020
1 parent ab175a0 commit c887569
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 33 deletions.
6 changes: 3 additions & 3 deletions extensions/vscode-colorize-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,19 @@
"vscode": "1.1.5"
},
"contributes": {
"tokenTypes": [
"semanticTokenTypes": [
{
"id": "testToken",
"description": "A test token"
}
],
"tokenModifiers": [
"semanticTokenModifiers": [
{
"id": "testModifier",
"description": "A test modifier"
}
],
"tokenStyleDefaults": [
"semanticTokenStyleDefaults": [
{
"selector": "testToken.testModifier",
"light": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,81 +41,81 @@ const colorPattern = '^#([0-9A-Fa-f]{6})([0-9A-Fa-f]{2})?$';
const tokenClassificationRegistry: ITokenClassificationRegistry = getTokenClassificationRegistry();

const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenTypeExtensionPoint[]>({
extensionPoint: 'tokenTypes',
extensionPoint: 'semanticTokenTypes',
jsonSchema: {
description: nls.localize('contributes.tokenTypes', 'Contributes semantic token types.'),
description: nls.localize('contributes.semanticTokenTypes', 'Contributes semantic token types.'),
type: 'array',
items: {
type: 'object',
properties: {
id: {
type: 'string',
description: nls.localize('contributes.tokenTypes.id', 'The identifier of the token type'),
description: nls.localize('contributes.semanticTokenTypes.id', 'The identifier of the semantic token type'),
pattern: typeAndModifierIdPattern,
patternErrorMessage: nls.localize('contributes.tokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'),
patternErrorMessage: nls.localize('contributes.semanticTokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'),
},
description: {
type: 'string',
description: nls.localize('contributes.color.description', 'The description of the token type'),
description: nls.localize('contributes.color.description', 'The description of the semantic token type'),
}
}
}
}
});

const tokenModifierExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenModifierExtensionPoint[]>({
extensionPoint: 'tokenModifiers',
extensionPoint: 'semanticTokenModifiers',
jsonSchema: {
description: nls.localize('contributes.tokenModifiers', 'Contributes semantic token modifiers.'),
description: nls.localize('contributes.semanticTokenModifiers', 'Contributes semantic token modifiers.'),
type: 'array',
items: {
type: 'object',
properties: {
id: {
type: 'string',
description: nls.localize('contributes.tokenModifiers.id', 'The identifier of the token modifier'),
description: nls.localize('contributes.semanticTokenModifiers.id', 'The identifier of the semantic token modifier'),
pattern: typeAndModifierIdPattern,
patternErrorMessage: nls.localize('contributes.tokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')
patternErrorMessage: nls.localize('contributes.semanticTokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')
},
description: {
description: nls.localize('contributes.tokenModifiers.description', 'The description of the token modifier')
description: nls.localize('contributes.semanticTokenModifiers.description', 'The description of the semantic token modifier')
}
}
}
}
});

const tokenStyleDefaultsExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenStyleDefaultExtensionPoint[]>({
extensionPoint: 'tokenStyleDefaults',
extensionPoint: 'semanticTokenStyleDefaults',
jsonSchema: {
description: nls.localize('contributes.tokenStyleDefaults', 'Contributes semantic token style default.'),
description: nls.localize('contributes.semanticTokenStyleDefaults', 'Contributes semantic token style defaults.'),
type: 'array',
items: {
type: 'object',
properties: {
selector: {
type: 'string',
description: nls.localize('contributes.tokenStyleDefaults.selector', 'The selector matching token types and modifiers.'),
description: nls.localize('contributes.semanticTokenStyleDefaults.selector', 'The selector matching token types and modifiers.'),
pattern: selectorPattern,
patternErrorMessage: nls.localize('contributes.tokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'),
patternErrorMessage: nls.localize('contributes.semanticTokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'),
},
scopes: {
type: 'array',
description: nls.localize('contributes.scopes.light', 'A list of textmate scopes that are matched against the current color theme to find a default style'),
description: nls.localize('contributes.scopes.light', 'A list of TextMate scopes that are matched against the current color theme to find a default style'),
items: {
type: 'string'
}
},
light: {
description: nls.localize('contributes.tokenStyleDefaults.light', 'The default style used for light themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.light', 'The default style used for light themes'),
$ref: textmateColorSettingsSchemaId
},
dark: {
description: nls.localize('contributes.tokenStyleDefaults.dark', 'The default style used for dark themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.dark', 'The default style used for dark themes'),
$ref: textmateColorSettingsSchemaId
},
highContrast: {
description: nls.localize('contributes.tokenStyleDefaults.hc', 'The default style used for high contrast themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.hc', 'The default style used for high contrast themes'),
$ref: textmateColorSettingsSchemaId
}
}
Expand Down Expand Up @@ -154,7 +154,7 @@ export class TokenClassificationExtensionPoints {
}
if (style.fontStyle) {
if (typeof style.fontStyle !== 'string' || !style.fontStyle.match(fontStylePattern)) {
collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be a one or a compination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint));
collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be one or a combination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint));
return undefined;
}
}
Expand All @@ -167,11 +167,11 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenTypeConfiguration', "'configuration.tokenType' must be an array"));
collector.error(nls.localize('invalid.semanticTokenTypeConfiguration', "'configuration.semanticTokenType' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (validateTypeOrModifier(contribution, 'tokenType', collector)) {
if (validateTypeOrModifier(contribution, 'semanticTokenType', collector)) {
tokenClassificationRegistry.registerTokenType(contribution.id, contribution.description);
}
}
Expand All @@ -189,11 +189,11 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenModifierConfiguration', "'configuration.tokenModifier' must be an array"));
collector.error(nls.localize('invalid.semanticTokenModifierConfiguration', "'configuration.semanticTokenModifier' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (validateTypeOrModifier(contribution, 'tokenModifier', collector)) {
if (validateTypeOrModifier(contribution, 'semanticTokenModifier', collector)) {
tokenClassificationRegistry.registerTokenModifier(contribution.id, contribution.description);
}
}
Expand All @@ -211,31 +211,31 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;

if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenStyleDefaultConfiguration', "'configuration.tokenStyleDefaults' must be an array"));
collector.error(nls.localize('invalid.semanticTokenStyleDefaultConfiguration', "'configuration.semanticTokenStyleDefaults' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (typeof contribution.selector !== 'string' || contribution.selector.length === 0) {
collector.error(nls.localize('invalid.selector', "'configuration.tokenStyleDefaults.selector' must be defined and can not be empty"));
collector.error(nls.localize('invalid.selector', "'configuration.semanticTokenStyleDefaults.selector' must be defined and can not be empty"));
continue;
}
if (!contribution.selector.match(selectorPattern)) {
collector.error(nls.localize('invalid.selector.format', "'configuration.tokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*"));
collector.error(nls.localize('invalid.selector.format', "'configuration.semanticTokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*"));
continue;
}

const tokenStyleDefault: TokenStyleDefaults = {};

if (contribution.scopes) {
if ((!Array.isArray(contribution.scopes) || contribution.scopes.some(s => typeof s !== 'string'))) {
collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.tokenStyleDefaults.scopes' must be an array or strings"));
collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.semanticTokenStyleDefaults.scopes' must be an array or strings"));
continue;
}
tokenStyleDefault.scopesToProbe = [contribution.scopes];
}
tokenStyleDefault.light = validateStyle(contribution.light, 'tokenStyleDefaults.light', collector);
tokenStyleDefault.dark = validateStyle(contribution.dark, 'tokenStyleDefaults.dark', collector);
tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'tokenStyleDefaults.highContrast', collector);
tokenStyleDefault.light = validateStyle(contribution.light, 'semanticTokenStyleDefaults.light', collector);
tokenStyleDefault.dark = validateStyle(contribution.dark, 'semanticTokenStyleDefaults.dark', collector);
tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'semanticTokenStyleDefaults.highContrast', collector);

const [type, ...modifiers] = contribution.selector.split('.');
const classification = tokenClassificationRegistry.getTokenClassification(type, modifiers);
Expand Down

0 comments on commit c887569

Please sign in to comment.