diff --git a/src/language/__tests__/lexer-test.js b/src/language/__tests__/lexer-test.js index 6c1b23c554..55e88dd6ed 100644 --- a/src/language/__tests__/lexer-test.js +++ b/src/language/__tests__/lexer-test.js @@ -8,7 +8,8 @@ import dedent from '../../jsutils/dedent'; import inspect from '../../jsutils/inspect'; import { GraphQLError } from '../../error'; import { Source } from '../source'; -import { createLexer, TokenKind, isPunctuatorToken } from '../lexer'; +import { TokenKind } from '../tokenKind'; +import { createLexer, isPunctuatorToken } from '../lexer'; function lexOne(str) { const lexer = createLexer(new Source(str)); diff --git a/src/language/__tests__/parser-test.js b/src/language/__tests__/parser-test.js index 7e879659e2..535712d340 100644 --- a/src/language/__tests__/parser-test.js +++ b/src/language/__tests__/parser-test.js @@ -5,7 +5,7 @@ import { inspect as nodeInspect } from 'util'; import { expect } from 'chai'; import { describe, it } from 'mocha'; import { Kind } from '../kinds'; -import { TokenKind } from '../lexer'; +import { TokenKind } from '../tokenKind'; import { parse, parseValue, parseType } from '../parser'; import { Source } from '../source'; import dedent from '../../jsutils/dedent'; diff --git a/src/language/ast.js b/src/language/ast.js index 22dcfa9930..c32a199fd7 100644 --- a/src/language/ast.js +++ b/src/language/ast.js @@ -1,7 +1,7 @@ // @flow strict import { type Source } from './source'; -import { type TokenKindEnum } from './lexer'; +import { type TokenKindEnum } from './tokenKind'; /** * Contains a range of UTF-8 character offsets and token references that diff --git a/src/language/index.js b/src/language/index.js index 7cb97111c3..a27bf36486 100644 --- a/src/language/index.js +++ b/src/language/index.js @@ -8,8 +8,11 @@ export type { SourceLocation } from './location'; export { Kind } from './kinds'; export type { KindEnum } from './kinds'; -export { createLexer, TokenKind } from './lexer'; -export type { Lexer, TokenKindEnum } from './lexer'; +export { TokenKind } from './tokenKind'; +export type { TokenKindEnum } from './tokenKind'; + +export { createLexer } from './lexer'; +export type { Lexer } from './lexer'; export { parse, parseValue, parseType } from './parser'; export type { ParseOptions } from './parser'; diff --git a/src/language/lexer.js b/src/language/lexer.js index e19cf32606..af70d6274d 100644 --- a/src/language/lexer.js +++ b/src/language/lexer.js @@ -3,6 +3,7 @@ import defineToJSON from '../jsutils/defineToJSON'; import { type Token } from './ast'; import { type Source } from './source'; +import { type TokenKindEnum, TokenKind } from './tokenKind'; import { syntaxError } from '../error/syntaxError'; import { dedentBlockStringValue } from './blockString'; @@ -90,40 +91,6 @@ export type Lexer = { ... }; -/** - * An exported enum describing the different kinds of tokens that the - * lexer emits. - */ -export const TokenKind = Object.freeze({ - SOF: '', - EOF: '', - BANG: '!', - DOLLAR: '$', - AMP: '&', - PAREN_L: '(', - PAREN_R: ')', - SPREAD: '...', - COLON: ':', - EQUALS: '=', - AT: '@', - BRACKET_L: '[', - BRACKET_R: ']', - BRACE_L: '{', - PIPE: '|', - BRACE_R: '}', - NAME: 'Name', - INT: 'Int', - FLOAT: 'Float', - STRING: 'String', - BLOCK_STRING: 'BlockString', - COMMENT: 'Comment', -}); - -/** - * The enum type representing the token kinds values. - */ -export type TokenKindEnum = $Values; - // @internal export function isPunctuatorToken(token: Token) { const kind = token.kind; diff --git a/src/language/parser.js b/src/language/parser.js index d83581c7b4..45d31ca710 100644 --- a/src/language/parser.js +++ b/src/language/parser.js @@ -5,13 +5,8 @@ import defineToJSON from '../jsutils/defineToJSON'; import { Source } from './source'; import { type GraphQLError } from '../error/GraphQLError'; import { syntaxError } from '../error/syntaxError'; -import { - type Lexer, - type TokenKindEnum, - TokenKind, - getTokenDesc, - createLexer, -} from './lexer'; +import { type TokenKindEnum, TokenKind } from './tokenKind'; +import { type Lexer, getTokenDesc, createLexer } from './lexer'; import { type Location, type Token, diff --git a/src/language/tokenKind.js b/src/language/tokenKind.js new file mode 100644 index 0000000000..0dd0afc8c9 --- /dev/null +++ b/src/language/tokenKind.js @@ -0,0 +1,35 @@ +// @flow strict + +/** + * An exported enum describing the different kinds of tokens that the + * lexer emits. + */ +export const TokenKind = Object.freeze({ + SOF: '', + EOF: '', + BANG: '!', + DOLLAR: '$', + AMP: '&', + PAREN_L: '(', + PAREN_R: ')', + SPREAD: '...', + COLON: ':', + EQUALS: '=', + AT: '@', + BRACKET_L: '[', + BRACKET_R: ']', + BRACE_L: '{', + PIPE: '|', + BRACE_R: '}', + NAME: 'Name', + INT: 'Int', + FLOAT: 'Float', + STRING: 'String', + BLOCK_STRING: 'BlockString', + COMMENT: 'Comment', +}); + +/** + * The enum type representing the token kinds values. + */ +export type TokenKindEnum = $Values; diff --git a/src/utilities/buildASTSchema.js b/src/utilities/buildASTSchema.js index 5148e40769..3694e81cfe 100644 --- a/src/utilities/buildASTSchema.js +++ b/src/utilities/buildASTSchema.js @@ -9,7 +9,7 @@ import { type ObjMap } from '../jsutils/ObjMap'; import { valueFromAST } from './valueFromAST'; import { assertValidSDL } from '../validation/validate'; import { dedentBlockStringValue } from '../language/blockString'; -import { TokenKind } from '../language/lexer'; +import { TokenKind } from '../language/tokenKind'; import { type ParseOptions, parse } from '../language/parser'; import { type Source } from '../language/source'; import { getDirectiveValues } from '../execution/values'; diff --git a/src/utilities/stripIgnoredCharacters.js b/src/utilities/stripIgnoredCharacters.js index 4efcc35f74..a5be9337bb 100644 --- a/src/utilities/stripIgnoredCharacters.js +++ b/src/utilities/stripIgnoredCharacters.js @@ -2,7 +2,8 @@ import inspect from '../jsutils/inspect'; import { Source } from '../language/source'; -import { createLexer, TokenKind, isPunctuatorToken } from '../language/lexer'; +import { TokenKind } from '../language/tokenKind'; +import { createLexer, isPunctuatorToken } from '../language/lexer'; import { dedentBlockStringValue, getBlockStringIndentation,