Variables
Const ANON
ANON: RegExp = regex.and(/\[/, regex.many(WS), /\]/)
AliasArgumentToken
AliasArgumentToken: TokenType
Const BAD_FIXTURES_DIR
BAD_FIXTURES_DIR: string = path.join(__dirname, 'fixtures', 'bad')
Const BLANK_NODE_LABEL
BLANK_NODE_LABEL: RegExp = regex.and(/_:/,regex.or(PN_CHARS_U, /\d/),regex.option(regex.and(regex.many(regex.or(PN_CHARS, /\./)), PN_CHARS)))
Const BOOLEAN_PATTERN
BOOLEAN_PATTERN: RegExp = /true|false/
BindDirectiveToken
BindDirectiveToken: TokenType
Const CATCH_ALL
CATCH_ALL: RegExp = /[\s\S]*/
Const CATCH_ALL_AT_LEAST_ONE
CATCH_ALL_AT_LEAST_ONE: RegExp = /[\s\S]+/
Const CATEGORY_PATTERN
CATEGORY_PATTERN: RegExp = /^categor(?:y|ies)/i
ConfigDirectiveToken
ConfigDirectiveToken: TokenType
Const DECIMAL
DECIMAL: RegExp = /(\d*\.\d+)|(\d+\.\d*)/
Const DECIMAL_NEGATIVE
DECIMAL_NEGATIVE: RegExp = regex.and(/-/, DECIMAL)
Const DECIMAL_POSITIVE
DECIMAL_POSITIVE: RegExp = regex.and(/\+/, DECIMAL)
Const DOUBLE
DOUBLE: RegExp = regex.or(regex.and(/\d+\.\d*/, EXPONENT),regex.and(/\.\d+/, EXPONENT),regex.and(/\d+/, EXPONENT))
Const DOUBLE_NEGATIVE
DOUBLE_NEGATIVE: RegExp = regex.and(/-/, DOUBLE)
Const DOUBLE_POSITIVE
DOUBLE_POSITIVE: RegExp = regex.and(/\+/, DOUBLE)
Const ECHAR
ECHAR: RegExp = /\\[tbnrf"'\\]/
Const ESCAPED_CHARACTER_PATTERN
ESCAPED_CHARACTER_PATTERN: RegExp = /\\["\\/bfnrt]/
Const ESCAPED_UNICODE_PATTERN
ESCAPED_UNICODE_PATTERN: RegExp = /\\u[0-9A-Fa-f]{4}/
Const EXPONENT
EXPONENT: RegExp = /[eE][+-]?\d+/
Const EXPONENT_PART_PATTERN
EXPONENT_PART_PATTERN: RegExp = /[eE][+-]?[0-9]+/
Const EndThen
EndThen: TokenType = createToken({name: 'EndThen',pattern: '}',pop_mode: true,})
Const EnumValueToken
EnumValueToken: TokenType = createToken({name: 'EnumValueToken',pattern: Lexer.NA, // pure category, no explicit match of its own})
Const FROM_BLOCK_END_MATCHER
FROM_BLOCK_END_MATCHER: RegExp = /^\s*to\s*{/i
Const FROM_JSON_BLOCK_END_MATCHER
FROM_JSON_BLOCK_END_MATCHER: RegExp = /((?:.|\s)*?)to\s*{/i
FilterDirectiveToken
FilterDirectiveToken: TokenType
Const FragmentName
FragmentName: TokenType = createToken({name: 'FragmentName',pattern: Lexer.NA,})
Const GOOD_FIXTURES_DIR
GOOD_FIXTURES_DIR: string = path.join(__dirname, 'fixtures', 'good')
GraphArgumentToken
GraphArgumentToken: TokenType
Const GroupGraphPattern
GroupGraphPattern: TokenType = createToken({name: 'GroupGraphPattern', // This name is useful for error messages in real-time parsingpattern: (text, startOffset = 0) => {// Capture a single brace and then anything up to its closing brace.if (text[startOffset] !== '{') {return null;}let unclosedBraceCount = 1;let cursor;for (cursor = startOffset + 1;cursor < text.length && unclosedBraceCount > 0;cursor++) {if (text[cursor] === '{') {unclosedBraceCount++;} else if (text[cursor] === '}') {unclosedBraceCount--;}}if (unclosedBraceCount > 0) {return null;}return CATCH_ALL_AT_LEAST_ONE.exec(text.slice(startOffset, cursor));},line_breaks: true,pop_mode: true,})
Const HEX
HEX: RegExp = /[0-9A-Fa-f]/
Const INTEGER
INTEGER: RegExp = /\d+/
Const INTEGER_NEGATIVE
INTEGER_NEGATIVE: RegExp = regex.and(/-/, INTEGER)
Const INTEGER_PART_PATTERN
INTEGER_PART_PATTERN: RegExp = /\-?(?:0|[1-9][0-9]*)/
Const INTEGER_POSITIVE
INTEGER_POSITIVE: RegExp = regex.and(/\+/, INTEGER)
Const IRIREF
IRIREF: RegExp = /<[^<>\\{}|\^`\u0000-\u0020]*>/
Const If
If: TokenType = createToken({name: 'If',pattern: /if/i,push_mode: LexerMode.IFCLAUSE,})
IfArgumentToken
IfArgumentToken: TokenType
IncludeDirectiveToken
IncludeDirectiveToken: TokenType
Const LANGTAG
LANGTAG: RegExp = /@[a-zA-Z]+(-[a-zA-Z0-9]+)*/
Const NAME_PATTERN
NAME_PATTERN: RegExp = /[_A-Za-z][_0-9A-Za-z]*/
Const NIL
NIL: RegExp = regex.and(/\(/, regex.many(WS), /\)/)
Const NULL_PATTERN
NULL_PATTERN: RegExp = /null/
Const Name
Name: TokenType = createToken({ name: 'Name', pattern: NAME_PATTERN })
Const ON_PATTERN
ON_PATTERN: RegExp = /on/
Const PERCENT
PERCENT: RegExp = regex.and(/%/, HEX, HEX)
Const PLX
PLX: RegExp = regex.or(PERCENT, PN_LOCAL_ESC)
Const PNAME_LN
PNAME_LN: RegExp = regex.and(PNAME_NS, PN_LOCAL)
Const PNAME_LN_TOKEN
PNAME_LN_TOKEN: TokenType = createToken({name: 'PNAME_LN',pattern: PNAME_LN,})
Const PNAME_NS
PNAME_NS: RegExp = regex.and(regex.option(PN_PREFIX), /:/)
Const PN_CHARS
PN_CHARS: RegExp = regex.or(PN_CHARS_U,/-/,/\d/,/\u00b7/,/[\u0300-\u036f]/,/[\u203f-\u2040]/)
Const PN_CHARS_BASE
PN_CHARS_BASE: RegExp = /[A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDBFF][\uDC00-\uDFFF]/
Const PN_CHARS_U
PN_CHARS_U: RegExp = regex.or(PN_CHARS_BASE, /_/)
Const PN_LOCAL
PN_LOCAL: RegExp = regex.and(regex.or(PN_CHARS_U, /:/, /\d/, PLX),regex.option(regex.and(regex.many(regex.or(PN_CHARS, /\./, /:/, PLX)),regex.or(PN_CHARS, /:/, PLX))))
Const PN_LOCAL_ESC
PN_LOCAL_ESC: RegExp = /\\[_~.\-!\$&'()*+,=\/?#@%;]/
Const PN_PREFIX
PN_PREFIX: RegExp = regex.and(PN_CHARS_BASE,regex.option(regex.and(regex.many(regex.or(PN_CHARS, /\./)), PN_CHARS)))
Const Punctuator
Punctuator: TokenType = createToken({name: 'Punctuator',pattern: Lexer.NA,})
Const Rule
Rule: TokenType = createToken({name: 'Rule',pattern: /rule/i,})
Const STRING_CHARACTER_PATTERN
STRING_CHARACTER_PATTERN: RegExp = regex.and(/"/,regex.many(regex.or(STRING_SOURCE_CHARACTER_PATTERN,ESCAPED_UNICODE_PATTERN,ESCAPED_CHARACTER_PATTERN)),/"/)
Const STRING_LITERAL1
STRING_LITERAL1: RegExp = regex.and(/'/,regex.many(regex.or(/[^\u0027\u005C\u000A\u000D]/, ECHAR)),/'/)
Const STRING_LITERAL2
STRING_LITERAL2: RegExp = regex.and(/"/,regex.many(regex.or(/[^\u0022\u005C\u000A\u000D]/, ECHAR)),/"/)
Const STRING_LITERAL_LONG1
STRING_LITERAL_LONG1: RegExp = regex.and(/'''/,regex.many(regex.and(regex.option(regex.or(/'/, /''/)), regex.or(/[^'\\]/, ECHAR))),/'''/)
Const STRING_LITERAL_LONG1_TOKEN
STRING_LITERAL_LONG1_TOKEN: TokenType = createToken({name: 'STRING_LITERAL_LONG1',pattern: STRING_LITERAL_LONG1,})
Const STRING_LITERAL_LONG2
STRING_LITERAL_LONG2: RegExp = regex.and(/"""/,regex.many(regex.and(regex.option(regex.or(/"/, /""/)), regex.or(/[^"\\]/, ECHAR))),/"""/)
Const STRING_LITERAL_LONG2_TOKEN
STRING_LITERAL_LONG2_TOKEN: TokenType = createToken({name: 'STRING_LITERAL_LONG2',pattern: STRING_LITERAL_LONG2,})
Const STRING_SOURCE_CHARACTER_PATTERN
STRING_SOURCE_CHARACTER_PATTERN: RegExp = /[\u0009\u0020\u0021\u0023-\u005B\u005D-\uFFFF]/
SkipDirectiveToken
SkipDirectiveToken: TokenType
Const SparqlReceivingStardogDirective
SparqlReceivingStardogDirective: TokenType = createToken({name: 'SparqlReceivingStardogDirective',pattern: Lexer.NA,})
Const StardogArgument
StardogArgument: TokenType = createToken({name: 'StardogArgument',pattern: Lexer.NA,})
Const StardogDirective
StardogDirective: TokenType = createToken({name: 'StardogDirective',pattern: Lexer.NA,})
Const StringValueToken
StringValueToken: TokenType = createToken({name: 'StringValueToken',pattern: Lexer.NA,})
Const Then
Then: TokenType = createToken({name: 'Then',pattern: /then/i,push_mode: LexerMode.THENCLAUSE,})
ToArgumentToken
ToArgumentToken: TokenType
Const TopLevel
TopLevel: TokenType = createToken({name: 'TopLevel',pattern: Lexer.NA,})
Const TriplesBlock
TriplesBlock: TokenType = createToken({name: 'TriplesBlock', // This name is useful for error messages in real-time parsingpattern: /[^{}]+/,line_breaks: true,})
Const UNKNOWN
UNKNOWN: TokenType = createToken({ name: 'UNKNOWN', pattern: /\w+/i })
Const VAR1
VAR1: RegExp = regex.and(/\?/, VARNAME)
Const VAR2
VAR2: RegExp = regex.and(/\$/, VARNAME)
Const VARNAME
VARNAME: RegExp = regex.and(regex.or(PN_CHARS_U, /\d/),regex.many(regex.or(PN_CHARS_U, /\d/, /\u00b7/, /[\u0300-\u036f]/, /[\u203f-\u2040]/)))
Const WS
WS: RegExp = /[\u0020\u0009\u000d\u000a]/
Const allInvalidQueries
all
InvalidQueries: FileAndContents[] = [...getAllFileContents(join(__dirname, 'fixtures', 'sparql11', 'ebnf', 'badDog')),]
Const allValidQueries
all
ValidQueries: FileAndContents[] = [...getAllFileContents(join(__dirname, 'fixtures', 'sparql11', 'ebnf', 'goodDog')),]
Const baseTokens
baseTokens: TokenType[] = [sparqlTokenMap.NIL,sparqlTokenMap.ANON,sparqlTokenMap.LCurly,sparqlTokenMap.RCurly,sparqlTokenMap.LParen,sparqlTokenMap.RParen,sparqlTokenMap.WhiteSpace,sparqlTokenMap.IRIREF,sparqlTokenMap.LANGTAG,sparqlTokenMap.DOUBLE,sparqlTokenMap.DECIMAL,sparqlTokenMap.INTEGER,sparqlTokenMap.DOUBLE_POSITIVE,sparqlTokenMap.DECIMAL_POSITIVE,sparqlTokenMap.INTEGER_POSITIVE,sparqlTokenMap.DOUBLE_NEGATIVE,sparqlTokenMap.DECIMAL_NEGATIVE,sparqlTokenMap.INTEGER_NEGATIVE,sparqlTokenMap.STRING_LITERAL1,sparqlTokenMap.STRING_LITERAL2,sparqlTokenMap.STRING_LITERAL_LONG1,sparqlTokenMap.STRING_LITERAL_LONG2,sparqlTokenMap.PNAME_NS,sparqlTokenMap.PNAME_LN,sparqlTokenMap.BLANK_NODE_LABEL,sparqlTokenMap.VAR1,sparqlTokenMap.VAR2,sparqlTokenMap.Comment,sparqlTokenMap.SELECT,sparqlTokenMap.CONSTRUCT,sparqlTokenMap.DISTINCT,sparqlTokenMap.Star,sparqlTokenMap.WHERE,sparqlTokenMap.GROUP_BY,sparqlTokenMap.ORDER_BY,sparqlTokenMap.BY,sparqlTokenMap.Period,sparqlTokenMap.QuestionMark,sparqlTokenMap.Plus,sparqlTokenMap.Minus,sparqlTokenMap.LBracket,sparqlTokenMap.RBracket,sparqlTokenMap.PERCENT,sparqlTokenMap.BASE,sparqlTokenMap.PREFIX,sparqlTokenMap.DESCRIBE,sparqlTokenMap.ASK,sparqlTokenMap.FROM,sparqlTokenMap.REDUCED,sparqlTokenMap.NAMED,sparqlTokenMap.HAVING,sparqlTokenMap.ASC,sparqlTokenMap.DESC,sparqlTokenMap.OFFSET,sparqlTokenMap.LIMIT,sparqlTokenMap.VALUES,sparqlTokenMap.LOAD,sparqlTokenMap.SILENT,sparqlTokenMap.INTO,sparqlTokenMap.AS,sparqlTokenMap.CLEAR,sparqlTokenMap.DROP,sparqlTokenMap.CREATE,sparqlTokenMap.ADD,sparqlTokenMap.TO,sparqlTokenMap.MOVE,sparqlTokenMap.COPY,sparqlTokenMap.INSERT_DATA,sparqlTokenMap.DELETE_DATA,sparqlTokenMap.DELETE_WHERE,sparqlTokenMap.WITH,sparqlTokenMap.DELETE,sparqlTokenMap.INSERT,sparqlTokenMap.USING,sparqlTokenMap.DEFAULT,sparqlTokenMap.GRAPH,sparqlTokenMap.ALL,sparqlTokenMap.OPTIONAL,sparqlTokenMap.SERVICE,sparqlTokenMap.BIND,sparqlTokenMap.UNDEF,sparqlTokenMap.MINUS,sparqlTokenMap.UNION,sparqlTokenMap.FILTER,sparqlTokenMap.LANGMATCHES,sparqlTokenMap.LANG,sparqlTokenMap.DATATYPE,sparqlTokenMap.BOUND,sparqlTokenMap.IRI,sparqlTokenMap.URI,sparqlTokenMap.BNODE,sparqlTokenMap.RAND,sparqlTokenMap.ABS,sparqlTokenMap.CEIL,sparqlTokenMap.FLOOR,sparqlTokenMap.ROUND,sparqlTokenMap.CONCAT,sparqlTokenMap.STRLEN,sparqlTokenMap.UCASE,sparqlTokenMap.LCASE,sparqlTokenMap.ENCODE_FOR_URI,sparqlTokenMap.CONTAINS,sparqlTokenMap.STRSTARTS,sparqlTokenMap.STRENDS,sparqlTokenMap.STRBEFORE,sparqlTokenMap.STRAFTER,sparqlTokenMap.YEAR,sparqlTokenMap.MONTH,sparqlTokenMap.DAY,sparqlTokenMap.HOURS,sparqlTokenMap.MINUTES,sparqlTokenMap.SECONDS,sparqlTokenMap.TIMEZONE,sparqlTokenMap.TZ,sparqlTokenMap.NOW,sparqlTokenMap.UUID,sparqlTokenMap.STRUUID,sparqlTokenMap.MD5,sparqlTokenMap.SHA1,sparqlTokenMap.SHA256,sparqlTokenMap.SHA384,sparqlTokenMap.SHA512,sparqlTokenMap.COALESCE,sparqlTokenMap.IF,sparqlTokenMap.STRLANG,sparqlTokenMap.STRDT,sparqlTokenMap.STR,sparqlTokenMap.sameTerm,sparqlTokenMap.isIRI,sparqlTokenMap.isURI,sparqlTokenMap.isBLANK,sparqlTokenMap.isLITERAL,sparqlTokenMap.isNUMERIC,sparqlTokenMap.REGEX,sparqlTokenMap.SUBSTR,sparqlTokenMap.REPLACE,sparqlTokenMap.EXISTS,sparqlTokenMap.NOT_EXISTS,sparqlTokenMap.COUNT,sparqlTokenMap.SUM,sparqlTokenMap.MIN,sparqlTokenMap.MAX_LENGTH,sparqlTokenMap.MAX,sparqlTokenMap.AVG,sparqlTokenMap.SAMPLE,sparqlTokenMap.GROUP_CONCAT,sparqlTokenMap.SEPARATOR,sparqlTokenMap.TRUE,sparqlTokenMap.FALSE,sparqlTokenMap.Semicolon,sparqlTokenMap.Comma,sparqlTokenMap.ForwardSlash,sparqlTokenMap.DoubleCaret,sparqlTokenMap.Caret,sparqlTokenMap.LogicalOr,sparqlTokenMap.Pipe,sparqlTokenMap.LogicalAnd,sparqlTokenMap.NotEquals,sparqlTokenMap.Bang,sparqlTokenMap.Equals,sparqlTokenMap.LessThanEquals,sparqlTokenMap.GreaterThanEquals,sparqlTokenMap.LEmbed,sparqlTokenMap.REmbed,sparqlTokenMap.LessThan,sparqlTokenMap.GreaterThan,sparqlTokenMap.IN,sparqlTokenMap.NOT_IN,sparqlTokenMap.A,sparqlTokenMap.UNKNOWN,]
Const basicFixture
basicFixture: "ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\d{3}-\\d{2}-\\d{4}$" ;] ;sh:property [ # _:b2sh:path ex:worksFor ;sh:class ex:Company ;sh:nodeKind sh:IRI ;] ;sh:closed true ;<http://www.w3.org/ns/shacl#ignoredProperties> ( rdf:type ) ." = `ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\\\d{3}-\\\\d{2}-\\\\d{4}$" ;] ;sh:property [ # _:b2sh:path ex:worksFor ;sh:class ex:Company ;sh:nodeKind sh:IRI ;] ;sh:closed true ;<http://www.w3.org/ns/shacl#ignoredProperties> ( rdf:type ) .`
Const categoryTokens
categoryTokens: any[] = Object.keys(categoryTokenMap).map((key) => categoryTokenMap[key])
Const conditionalDirectiveTokens
conditionalDirectiveTokens: TokenType[] = [SkipDirectiveToken,IncludeDirectiveToken,FilterDirectiveToken,]
Const defaultNamespacesMap
defaultNamespacesMap: object = Object.freeze(['', 'rdf', 'rdfs', 'xsd', 'owl', 'stardog'].reduce((namespacesMap, prefix) => ({...namespacesMap,[prefix]: true,}),{}))
Const disallowedSparqlLiteralTokenNames
disallowedSparqlLiteralTokenNames: string[] = [sparqlTokenMap.DOUBLE,sparqlTokenMap.DECIMAL,sparqlTokenMap.INTEGER,sparqlTokenMap.DOUBLE_POSITIVE,sparqlTokenMap.DECIMAL_POSITIVE,sparqlTokenMap.INTEGER_POSITIVE,sparqlTokenMap.DOUBLE_NEGATIVE,sparqlTokenMap.DECIMAL_NEGATIVE,sparqlTokenMap.INTEGER_NEGATIVE,sparqlTokenMap.STRING_LITERAL1,sparqlTokenMap.STRING_LITERAL2,sparqlTokenMap.STRING_LITERAL_LONG1,sparqlTokenMap.STRING_LITERAL_LONG2,].map((token) => token.tokenName)
Const disallowedSparqlTokenNames
disallowedSparqlTokenNames: string[] = Object.keys(disallowedSparqlTokenNameToRuleMap)
Const escapeSequence
escapeSequence: RegExp = /\\u([a-fA-F0-9]{4})|\\U([a-fA-F0-9]{8})|\\[uU]|\\(.)/g
Const escapedIri
escapedIri: RegExp = /^<((?:[^ <>{}\\]|\\[uU])+)>[ \t]*/
Const finalTokens
finalTokens: TokenType[] = [FragmentName,EnumValueToken,Name,StringValueToken,Punctuator,]
Const fixture
fixture: "ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\d{3}-\\d{2}-\\d{4}$" ;] ;sh:property [ # _:b2sh:path ex:worksFor ;sh:class ex:Company ;sh:nodeKind sh:IRI ;] ;sh:closed true ;<http://www.w3.org/ns/shacl#ignoredProperties> ( rdf:type ) ." = `ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\\\d{3}-\\\\d{2}-\\\\d{4}$" ;] ;sh:property [ # _:b2sh:path ex:worksFor ;sh:class ex:Company ;sh:nodeKind sh:IRI ;] ;sh:closed true ;<http://www.w3.org/ns/shacl#ignoredProperties> ( rdf:type ) .`
Const fixtureSuites
fixtureSuites: string[] = ['shacl-core/complex','shacl-core/misc','shacl-core/node','shacl-core/path','shacl-core/property','shacl-core/targets','shacl-sparql/component','shacl-sparql/node','shacl-sparql/pre-binding','shacl-sparql/property',]
Const getShaclTokenMap
getShaclTokenMap: function = memoize((prefixes: { shacl: string; xsd: string }) => {const prefixWithShacl = makePrefixer(prefixes.shacl);const prefixWithXsd = makePrefixer(prefixes.xsd);// Add the prefixed local names to the SHACL token map now that we know the// prefixes.const shaclTokenMap = localNames.reduce((tokenMap, localName) => {const tokenName = `SHACL_${localName}`;const prefixedTokenName = `${tokenName}_prefixed`;return {...tokenMap,[prefixedTokenName]: createToken({name: prefixedTokenName,pattern: prefixWithShacl(localName),categories: [tokenMap[tokenName], turtleTokenMap.PNAME_LN],}),};}, shaclUnprefixedTokenMap);// Add the prefixed local names to the XSD token map now that we know the// prefixes.return xsdLocalNames.reduce((tokenMap, localName) => {const tokenName = `SHACL_xsd_${localName}`;const prefixedTokenName = `${tokenName}_prefixed`;return {...tokenMap,[prefixedTokenName]: createToken({name: prefixedTokenName,pattern: `${prefixWithXsd(localName)}`,categories: [tokenMap[tokenName], turtleTokenMap.PNAME_LN],}),};}, shaclTokenMap);}, isDeepEqual)
Type declaration
-
-
Parameters
-
prefixes: object
-
shacl: string
-
xsd: string
Const getShaclTokenTypes
getShaclTokenTypes: function = memoize((prefixes: { shacl: string; xsd: string }) => {const tokenMap = getShaclTokenMap(prefixes);const { pnameTokens, iriTokens } = Object.keys(tokenMap).sort(reverseSort).reduce((accumulator, key) => {if (key.endsWith('_IRI')) {if (iriIndex < pnameIndex) {accumulator.iriTokens.push(tokenMap[key.slice(0, -4)]);}accumulator.iriTokens.push(tokenMap[key]);} else if (key.endsWith('_prefixed')) {if (pnameIndex < iriIndex) {accumulator.pnameTokens.push(tokenMap[key.slice(0, -9)]);}accumulator.pnameTokens.push(tokenMap[key]);}return accumulator;},{ pnameTokens: [], iriTokens: [] });if (pnameIndex < iriIndex) {return [...turtleTokenTypes.slice(0, pnameIndex),...categoryTokens,...pnameTokens,...turtleTokenTypes.slice(pnameIndex, iriIndex),...iriTokens,...turtleTokenTypes.slice(iriIndex),];} else {return [...turtleTokenTypes.slice(0, iriIndex),...categoryTokens,...iriTokens,...turtleTokenTypes.slice(iriIndex, pnameIndex),...pnameTokens,...turtleTokenTypes.slice(pnameIndex),];}}, isDeepEqual)
Type declaration
-
- (prefixes: object): TokenType[]
-
Parameters
-
prefixes: object
-
shacl: string
-
xsd: string
Returns TokenType[]
Const graphQlTokens
graphQlTokens: TokenType[] = []
Const graphqlTokens
graphqlTokens: any = require('./tokens')
Const illegalIriChars
illegalIriChars: RegExp = /[\x00-\x20<>\\"\{\}\|\^\`]/
Const indexOfIriRef
indexOfIriRef: any = turtleTokenTypes.indexOf(turtleTokenMap.IRIREF)
Const indexOfPnCharsBase
indexOfPnCharsBase: any = turtleTokenTypes.indexOf(turtleTokenMap.PN_CHARS_BASE)
Const indexOfSelect
indexOfSelect: number = baseTokens.indexOf(sparqlTokenMap.SELECT)
Const invalidTestsFilenames
invalidTestsFilenames: string[] = ['qualifiedValueShape-001.ttl', // references `sh:nodeShape` from old, not current, SHACL spec'shacl-shacl-data-shapes.ttl', // has SHACL results (different from SHACL itself) that parser can't handle'path-complex-002-shapes.ttl', // has SHACL results (different from SHACL itself) that parser can't handle'path-strange-001.ttl', // spec says that a shape has at most one value for `sh:path`, yet this has more'path-strange-002.ttl', // spec says that a shape has at most one value for `sh:path`, yet this has more]
Const iriIndex
iriIndex: any = turtleTokenTypes.indexOf(turtleTokenMap.IRIREF)
Const lexer
lexer: Lexer = new Lexer(turtleTokenTypes)
Const localNameToCategoryMap
localNameToCategoryMap: object = Object.keys(localNamesByCategory).reduce((nameToCategoryMap, category) => {const categoryLocalNames = localNamesByCategory[category];categoryLocalNames.forEach((localName) => (nameToCategoryMap[localName] = category));return nameToCategoryMap;},{})
Const localNames
localNames: string[] = Object.keys(localNameToCategoryMap)
Const manyValuesForSingleValueFixture
manyValuesForSingleValueFixture: "ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string, xsd:boolean ; #invalidsh:pattern "^\\d{3}-\\d{2}-\\d{4}$" ;] ." = `ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;sh:targetClass ex:Person ; # Applies to all personssh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string, xsd:boolean ; #invalidsh:pattern "^\\\\d{3}-\\\\d{2}-\\\\d{4}$" ;] .`
Const mixedShaclAndTurtleFixture
mixedShaclAndTurtleFixture: "ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;:loves ex:Somebody ;sh:targetClass ex:Person, ex:Human ;sh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\d{3}-\\d{2}-\\d{4}$" ;] ." = `ex:OtherPersona :Thing .ex:PersonShapea sh:NodeShape ;:loves ex:Somebody ;sh:targetClass ex:Person, ex:Human ;sh:property [ # _:b1sh:path ex:ssn ; # constrains the values of ex:ssnsh:maxCount 1 ;sh:datatype xsd:string ;sh:pattern "^\\\\d{3}-\\\\d{2}-\\\\d{4}$" ;] .`
Const nonStandardTokens
nonStandardTokens: TokenType[] = [...pathsTokens,sparqlTokenMap.UNNEST,sparqlTokenMap.VALIDATE,sparqlTokenMap.SHAPES,sparqlTokenMap.SHAPE,sparqlTokenMap.PER,]
Const pathsTokens
pathsTokens: TokenType[] = [sparqlTokenMap.START,sparqlTokenMap.END,sparqlTokenMap.VIA,sparqlTokenMap.CYCLIC,sparqlTokenMap.PATHS_SHORTEST,sparqlTokenMap.PATHS_ALL,sparqlTokenMap.PATHS,]
Const pnameIndex
pnameIndex: any = turtleTokenTypes.indexOf(sparqlTokenMap.PNAME_NS)
Const shaclIriNamespace
shaclIriNamespace: "http://www.w3.org/ns/shacl#" = "http://www.w3.org/ns/shacl#"
Const shaclTokens
shaclTokens: any = require('./tokens')
Const shaclUnprefixedTokenMap
shaclUnprefixedTokenMap: object = localNames.reduce((tokenMap, localName) => {const category = localNameToCategoryMap[localName];const categoryToken = categoryTokenMap[category];const tokenName = `SHACL_${localName}`;const iriTokenName = `${tokenName}_IRI`;// Category token that will select either a SHACL IRI or a SHACL PN_LOCAL:const iriOrPrefixCategoryToken = createToken({name: tokenName,pattern: Lexer.NA,categories: categoryToken ? [categoryToken] : [],});return {...tokenMap,[tokenName]: iriOrPrefixCategoryToken,[iriTokenName]: createToken({name: iriTokenName,pattern: `<${shaclIriNamespace}${localName}>`,categories: [iriOrPrefixCategoryToken, turtleTokenMap.IRIREF],}),};}, xsdUnprefixedTokenMap)
Const smsOnlyTokens
smsOnlyTokens: TokenType[] = [smsTokenMap.Template,smsTokenMap.Sql,smsTokenMap.GraphQl,smsTokenMap.Json,smsTokenMap.Csv,smsTokenMap.Mapping,smsTokenMap.SqlBlock,smsTokenMap.JsonBlock,smsTokenMap.GraphQlBlock,]
Const smsTokenTypes
smsTokenTypes: any[] = [...smsOnlyTokens, ...stardogSparqlTokens]
Const smsTokens
smsTokens: any = require('./tokens')
Const sparqlReceivingStardogDirectiveTokens
sparqlReceivingStardogDirectiveTokens: TokenType[] = [BindDirectiveToken,...conditionalDirectiveTokens,]
Const sparqlTokenTypes
sparqlTokenTypes: TokenType[] = [...baseTokens, ...nonStandardTokens]
Const sparqlTokens
sparqlTokens: any = require('./sparql/tokens')
Const srsTokenTypes
srsTokenTypes: TokenType[] = [Rule,If,Then,EndThen,sparqlTokenMap.LCurly,...turtleTokenTypes,GroupGraphPattern,TriplesBlock,]
Const srsTokens
srsTokens: any = require('./tokens')
Const stardogArguments
stardog
Arguments: StardogArgumentHolder = ['orderBy','first','to','if','alias','graph','offset','limit','iri',].sort().reduce((accumulator, name) => {const key = `${name[0].toUpperCase()}${name.slice(1)}ArgumentToken`;const categories = [Name, EnumValueToken, FragmentName, StardogArgument];if (['orderBy', 'first', 'limit', 'offset'].includes(name)) {categories.push(TopLevel);}const token = createToken({name: key,pattern: name,categories,longer_alt: Name,});return {...accumulator,tokenMap: {...accumulator.tokenMap,[key]: token,},orderedTokens: accumulator.orderedTokens.concat(token),};},{ tokenMap: {}, orderedTokens: [] } as StardogArgumentHolder)
Const stardogDirectives
stardog
Directives: StardogDirectiveHolder = ['optional','bind','hide','skip','include','filter','prefix','config',].sort().reduce((accumulator, name) => {const key = `${name[0].toUpperCase()}${name.slice(1)}DirectiveToken`;const categories = [Name, EnumValueToken, FragmentName, StardogDirective];if (['prefix', 'config'].includes(name)) {categories.push(TopLevel);}if (['bind', 'skip', 'include', 'filter'].includes(name)) {categories.push(SparqlReceivingStardogDirective);}const token = createToken({name: key,pattern: name,categories,longer_alt: Name,});return {...accumulator,tokenMap: {...accumulator.tokenMap,[key]: token,},orderedTokens: accumulator.orderedTokens.concat(token),};},{ tokenMap: {}, orderedTokens: [] } as StardogDirectiveHolder)
Const stardogGraphQlParser
Const stardogGraphQlTokens
stardogGraphQlTokens: TokenType[] = [...graphQlTokens,...stardogDirectives.orderedTokens,...stardogArguments.orderedTokens,stardogOrderByArgumentFieldPropertyToken,stardogOrderByArgumentDescPropertyToken,StardogDirective,SparqlReceivingStardogDirective,StardogArgument,TopLevel,]
Const stardogOrderByArgumentDescPropertyToken
stardogOrderByArgumentDescPropertyToken: TokenType = createToken({name: 'OrderByArgumentDescPropertyToken',pattern: 'desc',categories: [Name, EnumValueToken, FragmentName],longer_alt: Name,})
Const stardogOrderByArgumentFieldPropertyToken
stardogOrderByArgumentFieldPropertyToken: TokenType = createToken({name: 'OrderByArgumentFieldPropertyToken',pattern: 'field',categories: [Name, EnumValueToken, FragmentName],longer_alt: Name,})
Const stardogSparqlTokens
stardogSparqlTokens: any = [...baseTokens.slice(0, indexOfSelect),...nonStandardTokens,...baseTokens.slice(indexOfSelect),]
Const stringLiteralLongQuote
stringLiteralLongQuote: RegExp = /^"""([^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*)"""/
Const stringLiteralLongSingleQuote
stringLiteralLongSingleQuote: RegExp = /^'''([^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*)'''/
Const stringLiteralQuote
stringLiteralQuote: RegExp = /^"((?:[^"\\\r\n]|\\.)*)"(?=[^"])/
Const stringLiteralSingleQuote
stringLiteralSingleQuote: RegExp = /^'((?:[^'\\\r\n]|\\.)*)'(?=[^'])/
Const subExpressionMatcher
subExpressionMatcher: RegExp = /(?:[A-Z]+Expression|ValueLogical)$/i
Const trigLexer
trigLexer: Lexer = new Lexer(trigTokenTypes)
Const trigTokenTypes
trigTokenTypes: TokenType[] = [...turtleTokenTypes.slice(0, indexOfPnCharsBase),sparqlTokenMap.GRAPH,...turtleTokenTypes.slice(indexOfPnCharsBase),]
Const trigTokens
trigTokens: any = require('./tokens')
Const turtleLexer
turtleLexer: Lexer = new Lexer(turtleTokenTypes)
Const turtleTokenTypes
turtleTokenTypes: any = [turtleTokenMap.Comment,sparqlTokenMap.ANON,sparqlTokenMap.LBracket,sparqlTokenMap.RBracket,sparqlTokenMap.LCurly,sparqlTokenMap.RCurly,sparqlTokenMap.LParen,sparqlTokenMap.RParen,sparqlTokenMap.WhiteSpace,turtleTokenMap.TRUE,turtleTokenMap.FALSE,sparqlTokenMap.Comma,sparqlTokenMap.Semicolon,sparqlTokenMap.PNAME_NS,sparqlTokenMap.A,sparqlTokenMap.PREFIX,sparqlTokenMap.BASE,sparqlTokenMap.PNAME_LN,sparqlTokenMap.BLANK_NODE_LABEL,turtleTokenMap.TTL_BASE,turtleTokenMap.TTL_PREFIX,sparqlTokenMap.LANGTAG,turtleTokenMap.DOUBLE,turtleTokenMap.DECIMAL,sparqlTokenMap.Period,sparqlTokenMap.DoubleCaret,turtleTokenMap.LEmbed,turtleTokenMap.REmbed,turtleTokenMap.IRIREF,turtleTokenMap.STRING_LITERAL_LONG_SINGLE_QUOTE,turtleTokenMap.STRING_LITERAL_LONG_QUOTE,turtleTokenMap.STRING_LITERAL_QUOTE,turtleTokenMap.STRING_LITERAL_SINGLE_QUOTE,turtleTokenMap.INTEGER,turtleTokenMap.EXPONENT,turtleTokenMap.PLX,sparqlTokenMap.PERCENT,turtleTokenMap.HEX,turtleTokenMap.PN_CHARS_BASE,turtleTokenMap.PN_CHARS_U,turtleTokenMap.PN_CHARS,turtleTokenMap.PN_PREFIX,turtleTokenMap.PN_LOCAL,turtleTokenMap.PN_LOCAL_ESC,turtleTokenMap.ECHAR,turtleTokenMap.UCHAR,turtleTokenMap.UNKNOWN,]
Const turtleTokens
turtleTokens: any = require('./tokens')
Const unescapedIri
unescapedIri: RegExp = /^<([^\x00-\x20<>\\"\{\}\|\^\`]*)>[ \t]*/
Const unescapedStringLiteralQuote
unescapedStringLiteralQuote: RegExp = /^"([^"\\\r\n]+)"/
Const unescapedStringLiteralSingleQuote
unescapedStringLiteralSingleQuote: RegExp = /^'([^'\\\r\n]+)'/
Const unicodeRegexp
unicodeRegexp: RegExp = /[\0-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/
Const upperLowerIfFixtures
upperLowerIfFixtures: object = ['IF', 'if', 'iF', 'If'].reduce((accumulator, val, index) => ({...accumulator,['upperLowerIf' + index]:'PREFIX test: <http://test.com/test/0.1/>\n' +val +' {\n' + // If' ?X test:hasSibling ?Y . ?Y rdf:type test:Man\n' +'}\n' +'THEN {\n' +' ?X test:hasBrother ?Y\n' +'}\n',}),{})
Const xsdIriNamespace
xsdIriNamespace: "http://www.w3.org/2001/XMLSchema#" = "http://www.w3.org/2001/XMLSchema#"
Const xsdLocalNames
xsdLocalNames: Object = getAsTypedTuple('boolean','integer','string','date','dateTime','anyURI')
Const xsdUnprefixedTokenMap
xsdUnprefixedTokenMap: object = xsdLocalNames.reduce((tokenMap, localName) => {const tokenName = `SHACL_xsd_${localName}`; // category token nameconst iriTokenName = `${tokenName}_IRI`; // IRI token name// Category token that will ultimately select either an XSD IRI or an XSD PN_LOCAL:const iriOrPrefixCategoryToken = createToken({name: tokenName,pattern: Lexer.NA,});return {...tokenMap,[tokenName]: iriOrPrefixCategoryToken,[iriTokenName]: createToken({name: iriTokenName,pattern: `<${xsdIriNamespace}${localName}`,categories: [iriOrPrefixCategoryToken, turtleTokenMap.IRIREF],}),};}, {})