From f9ae3e4f2b529a2f59a0fd0f9dae1bb01c94f9a7 Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Thu, 3 Dec 2015 10:44:24 -0800 Subject: [PATCH 01/18] Initial support for globs in tsconfig.json --- src/compiler/commandLineParser.ts | 573 +++++++++++++++++- src/compiler/core.ts | 70 +++ src/compiler/diagnosticMessages.json | 4 + src/compiler/sys.ts | 46 +- src/compiler/types.ts | 21 + src/harness/external/chai.d.ts | 3 + src/harness/harness.ts | 28 +- src/harness/harnessLanguageService.ts | 15 +- src/harness/projectsRunner.ts | 19 +- src/harness/rwcRunner.ts | 9 +- src/services/shims.ts | 62 +- .../cases/unittests/cachingInServerLSHost.ts | 46 +- tests/cases/unittests/expandFiles.ts | 249 ++++++++ tests/cases/unittests/session.ts | 30 +- 14 files changed, 1087 insertions(+), 88 deletions(-) create mode 100644 tests/cases/unittests/expandFiles.ts diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 034b7022e8232..f850ac00dba68 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -495,46 +495,51 @@ namespace ts { }; function getFileNames(): string[] { - let fileNames: string[] = []; + let fileNames: string[]; if (hasProperty(json, "files")) { - if (json["files"] instanceof Array) { - fileNames = map(json["files"], s => combinePaths(basePath, s)); + if (isArray(json["files"])) { + fileNames = json["files"]; } else { errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "files", "Array")); } } - else { - const filesSeen: Map = {}; - const exclude = json["exclude"] instanceof Array ? map(json["exclude"], normalizeSlashes) : undefined; - const supportedExtensions = getSupportedExtensions(options); - Debug.assert(indexOf(supportedExtensions, ".ts") < indexOf(supportedExtensions, ".d.ts"), "Changed priority of extensions to pick"); - - // Get files of supported extensions in their order of resolution - for (const extension of supportedExtensions) { - const filesInDirWithExtension = host.readDirectory(basePath, extension, exclude); - for (const fileName of filesInDirWithExtension) { - // .ts extension would read the .d.ts extension files too but since .d.ts is lower priority extension, - // lets pick them when its turn comes up - if (extension === ".ts" && fileExtensionIs(fileName, ".d.ts")) { - continue; - } - // If this is one of the output extension (which would be .d.ts and .js if we are allowing compilation of js files) - // do not include this file if we included .ts or .tsx file with same base name as it could be output of the earlier compilation - if (extension === ".d.ts" || (options.allowJs && contains(supportedJavascriptExtensions, extension))) { - const baseName = fileName.substr(0, fileName.length - extension.length); - if (hasProperty(filesSeen, baseName + ".ts") || hasProperty(filesSeen, baseName + ".tsx")) { - continue; - } - } + let includeSpecs: string[]; + if (hasProperty(json, "include")) { + if (isArray(json["include"])) { + includeSpecs = json["include"]; + } + else { + errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "include", "Array")); + } + } + + let excludeSpecs: string[]; + if (hasProperty(json, "exclude")) { + if (isArray(json["exclude"])) { + excludeSpecs = json["exclude"]; + } + else { + errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "exclude", "Array")); + } + } - filesSeen[fileName] = true; - fileNames.push(fileName); + if (fileNames === undefined && includeSpecs === undefined) { + includeSpecs = ["**/*.ts"]; + if (options.jsx) { + includeSpecs.push("**/*.tsx"); + } + + if (options.allowJs) { + includeSpecs.push("**/*.js"); + if (options.jsx) { + includeSpecs.push("**/*.jsx"); } } } - return fileNames; + + return expandFiles(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors); } } @@ -584,4 +589,514 @@ namespace ts { return { options, errors }; } + + // Simplified whitelist, forces escaping of any non-word (or digit), non-whitespace character. + const reservedCharacterPattern = /[^\w\s]/g; + + const enum ExpandResult { + Ok, + Error + } + + /** + * Expands an array of file specifications. + * + * @param fileNames The literal file names to include. + * @param includeSpecs The file specifications to expand. + * @param excludeSpecs The file specifications to exclude. + * @param basePath The base path for any relative file specifications. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + */ + export function expandFiles(fileNames: string[], includeSpecs: string[], excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors?: Diagnostic[]): string[] { + basePath = normalizePath(basePath); + basePath = removeTrailingDirectorySeparator(basePath); + + const excludePattern = includeSpecs ? createExcludeRegularExpression(excludeSpecs, basePath, options, host, errors) : undefined; + const fileSet = createFileMap(host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper); + + // include every literal file. + if (fileNames) { + for (const fileName of fileNames) { + const path = toPath(fileName, basePath, caseSensitiveKeyMapper); + if (!fileSet.contains(path)) { + fileSet.set(path, path); + } + } + } + + // expand and include the provided files into the file set. + if (includeSpecs) { + for (let includeSpec of includeSpecs) { + includeSpec = normalizePath(includeSpec); + includeSpec = removeTrailingDirectorySeparator(includeSpec); + expandFileSpec(basePath, includeSpec, 0, excludePattern, options, host, errors, fileSet); + } + } + + const output = fileSet.reduce(addFileToOutput, []); + return output; + } + + /** + * Expands a file specification with wildcards. + * + * @param basePath The directory to expand. + * @param fileSpec The original file specification. + * @param start The starting offset in the file specification. + * @param excludePattern A pattern used to exclude a file specification. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + * @param fileSet The set of matching files. + * @param isExpandingRecursiveDirectory A value indicating whether the file specification includes a recursive directory wildcard prior to the start of this segment. + */ + function expandFileSpec(basePath: string, fileSpec: string, start: number, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[], fileSet: FileMap, isExpandingRecursiveDirectory?: boolean): ExpandResult { + // Skip expansion if the base path matches an exclude pattern. + if (isExcludedPath(excludePattern, basePath)) { + return ExpandResult.Ok; + } + + // Find the offset of the next wildcard in the file specification + let offset = indexOfWildcard(fileSpec, start); + if (offset < 0) { + // There were no more wildcards, so include the file. + const path = toPath(fileSpec.substring(start), basePath, caseSensitiveKeyMapper); + includeFile(path, excludePattern, options, host, fileSet); + return ExpandResult.Ok; + } + + // Find the last directory separator before the wildcard to get the leading path. + offset = fileSpec.lastIndexOf(directorySeparator, offset); + if (offset > start) { + // The wildcard occurs in a later segment, include remaining path up to + // wildcard in prefix. + basePath = combinePaths(basePath, fileSpec.substring(start, offset)); + + // Skip this wildcard path if the base path now matches an exclude pattern. + if (isExcludedPath(excludePattern, basePath)) { + return ExpandResult.Ok; + } + + start = offset + 1; + } + + // Find the offset of the next directory separator to extract the wildcard path segment. + offset = getEndOfPathSegment(fileSpec, start); + + // Check if the current offset is the beginning of a recursive directory pattern. + if (isRecursiveDirectoryWildcard(fileSpec, start, offset)) { + if (offset >= fileSpec.length) { + // If there is no file specification following the recursive directory pattern + // we cannot match any files, so we will ignore this pattern. + return ExpandResult.Ok; + } + + // Stop expansion if a file specification contains more than one recursive directory pattern. + if (isExpandingRecursiveDirectory) { + if (errors) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, fileSpec)); + } + + return ExpandResult.Error; + } + + // Expand the recursive directory pattern. + return expandRecursiveDirectory(basePath, fileSpec, offset + 1, excludePattern, options, host, errors, fileSet); + } + + // Match the entries in the directory against the wildcard pattern. + const pattern = createRegularExpressionFromWildcard(fileSpec, start, offset, host); + + // If there are no more directory separators (the offset is at the end of the file specification), then + // this must be a file. + if (offset >= fileSpec.length) { + const files = host.readFileNames(basePath); + for (const extension of getSupportedExtensions(options)) { + for (const file of files) { + if (fileExtensionIs(file, extension)) { + const path = toPath(file, basePath, caseSensitiveKeyMapper); + + // .ts extension would read the .d.ts extension files too but since .d.ts is lower priority extension, + // lets pick them when its turn comes up. + if (extension === ".ts" && fileExtensionIs(file, ".d.ts")) { + continue; + } + + // If this is one of the output extension (which would be .d.ts and .js if we are allowing compilation of js files) + // do not include this file if we included .ts or .tsx file with same base name as it could be output of the earlier compilation + if (extension === ".d.ts" || (options.allowJs && contains(supportedJavascriptExtensions, extension))) { + if (fileSet.contains(changeExtension(path, ".ts")) || fileSet.contains(changeExtension(path, ".tsx"))) { + continue; + } + } + + // This wildcard has no further directory to process, so include the file. + includeFile(path, excludePattern, options, host, fileSet); + } + } + } + } + else { + const directories = host.readDirectoryNames(basePath); + for (const directory of directories) { + // If this was a directory, process the directory. + const path = toPath(directory, basePath, caseSensitiveKeyMapper); + if (expandFileSpec(path, fileSpec, offset + 1, excludePattern, options, host, errors, fileSet, isExpandingRecursiveDirectory) === ExpandResult.Error) { + return ExpandResult.Error; + } + } + } + + return ExpandResult.Ok; + } + + /** + * Expands a `**` recursive directory wildcard. + * + * @param basePath The directory to recursively expand. + * @param fileSpec The original file specification. + * @param start The starting offset in the file specification. + * @param excludePattern A pattern used to exclude a file specification. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + * @param fileSet The set of matching files. + */ + function expandRecursiveDirectory(basePath: string, fileSpec: string, start: number, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[], fileSet: FileMap): ExpandResult { + // expand the non-recursive part of the file specification against the prefix path. + if (expandFileSpec(basePath, fileSpec, start, excludePattern, options, host, errors, fileSet, /*isExpandingRecursiveDirectory*/ true) === ExpandResult.Error) { + return ExpandResult.Error; + } + + // Recursively expand each subdirectory. + const directories = host.readDirectoryNames(basePath); + for (const entry of directories) { + const path = combinePaths(basePath, entry); + if (expandRecursiveDirectory(path, fileSpec, start, excludePattern, options, host, errors, fileSet) === ExpandResult.Error) { + return ExpandResult.Error; + } + } + + return ExpandResult.Ok; + } + + /** + * Attempts to include a file in a file set. + * + * @param file The file to include. + * @param excludePattern A pattern used to exclude a file specification. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param fileSet The set of matching files. + */ + function includeFile(file: Path, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, fileSet: FileMap): void { + // Ignore the file if it should be excluded. + if (isExcludedPath(excludePattern, file)) { + return; + } + + // Ignore the file if it doesn't exist. + if (!host.fileExists(file)) { + return; + } + + // Ignore the file if it does not have a supported extension. + if (!options.allowNonTsExtensions && !isSupportedSourceFileName(file, options)) { + return; + } + + if (!fileSet.contains(file)) { + fileSet.set(file, file); + } + } + + /** + * Adds a file to an array of files. + * + * @param output The output array. + * @param file The file path. + */ + function addFileToOutput(output: string[], file: string) { + output.push(file); + return output; + } + + /** + * Determines whether a path should be excluded. + * + * @param excludePattern A pattern used to exclude a file specification. + * @param path The path to test for exclusion. + */ + function isExcludedPath(excludePattern: RegExp, path: string) { + return excludePattern ? excludePattern.test(path) : false; + } + + /** + * Creates a regular expression from a glob-style wildcard. + * + * @param fileSpec The file specification. + * @param start The starting offset in the file specification. + * @param end The end offset in the file specification. + * @param host The host used to resolve files and directories. + */ + function createRegularExpressionFromWildcard(fileSpec: string, start: number, end: number, host: ParseConfigHost): RegExp { + const pattern = createPatternFromWildcard(fileSpec, start, end); + return new RegExp("^" + pattern + "$", host.useCaseSensitiveFileNames ? "" : "i"); + } + + /** + * Creates a pattern from a wildcard segment. + * + * @param fileSpec The file specification. + * @param start The starting offset in the file specification. + * @param end The end offset in the file specification. + */ + function createPatternFromWildcard(fileSpec: string, start: number, end: number): string { + let pattern = ""; + let offset = indexOfWildcard(fileSpec, start); + while (offset >= 0 && offset < end) { + if (offset > start) { + // Escape and append the non-wildcard portion to the regular expression + pattern += escapeRegularExpressionText(fileSpec, start, offset); + } + + const charCode = fileSpec.charCodeAt(offset); + if (charCode === CharacterCodes.asterisk) { + // Append a multi-character (zero or more characters) pattern to the regular expression + pattern += "[^/]*"; + } + else if (charCode === CharacterCodes.question) { + // Append a single-character (zero or one character) pattern to the regular expression + pattern += "[^/]"; + } + + start = offset + 1; + offset = indexOfWildcard(fileSpec, start); + } + + // Escape and append any remaining non-wildcard portion. + if (start < end) { + pattern += escapeRegularExpressionText(fileSpec, start, end); + } + + return pattern; + } + + /** + * Creates a regular expression from a glob-style wildcard used to exclude a file. + * + * @param excludeSpecs The file specifications to exclude. + * @param basePath The prefix path. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + */ + function createExcludeRegularExpression(excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): RegExp { + // Ignore an empty exclusion list + if (!excludeSpecs || excludeSpecs.length === 0) { + return undefined; + } + + basePath = escapeRegularExpressionText(basePath, 0, basePath.length); + + let pattern = ""; + for (const excludeSpec of excludeSpecs) { + const excludePattern = createExcludePattern(excludeSpec, basePath, options, host, errors); + if (excludePattern) { + if (pattern.length > 0) { + pattern += "|"; + } + + pattern += "(" + excludePattern + ")"; + } + } + + if (pattern.length > 0) { + return new RegExp("^(" + pattern + ")($|/)", host.useCaseSensitiveFileNames ? "" : "i"); + } + + return undefined; + } + + /** + * Creates a pattern for used to exclude a file. + * + * @param excludeSpec The file specification to exclude. + * @param basePath The base path for the exclude pattern. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + */ + function createExcludePattern(excludeSpec: string, basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): string { + if (!excludeSpec) { + return undefined; + } + + excludeSpec = normalizePath(excludeSpec); + excludeSpec = removeTrailingDirectorySeparator(excludeSpec); + + let pattern = isRootedDiskPath(excludeSpec) ? "" : basePath; + let hasRecursiveDirectoryWildcard = false; + let segmentStart = 0; + let segmentEnd = getEndOfPathSegment(excludeSpec, segmentStart); + while (segmentStart < segmentEnd) { + if (isRecursiveDirectoryWildcard(excludeSpec, segmentStart, segmentEnd)) { + if (hasRecursiveDirectoryWildcard) { + if (errors) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, excludeSpec)); + } + + return undefined; + } + + // As an optimization, if the recursive directory is the last + // wildcard, or is followed by only `*` or `*.ts`, don't add the + // remaining pattern and exit the loop. + if (canElideRecursiveDirectorySegment(excludeSpec, segmentEnd, options, host)) { + break; + } + + hasRecursiveDirectoryWildcard = true; + pattern += "(/.+)?"; + } + else { + if (pattern) { + pattern += directorySeparator; + } + + pattern += createPatternFromWildcard(excludeSpec, segmentStart, segmentEnd); + } + + segmentStart = segmentEnd + 1; + segmentEnd = getEndOfPathSegment(excludeSpec, segmentStart); + } + + return pattern; + } + + /** + * Determines whether a recursive directory segment can be elided when + * building a regular expression to exclude a path. + * + * @param excludeSpec The file specification used to exclude a path. + * @param segmentEnd The end position of the recursive directory segment. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + */ + function canElideRecursiveDirectorySegment(excludeSpec: string, segmentEnd: number, options: CompilerOptions, host: ParseConfigHost) { + // If there are no segments after this segment, the pattern for this segment may be elided. + if (segmentEnd + 1 >= excludeSpec.length) { + return true; + } + + // If the following segment is a wildcard that may be elided, the pattern for this segment may be elided. + return canElideWildcardSegment(excludeSpec, segmentEnd + 1, options, host); + } + + /** + * Determines whether a wildcard segment can be elided when building a + * regular expression to exclude a path. + * + * @param excludeSpec The file specification used to exclude a path. + * @param segmentStart The starting position of the segment. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + */ + function canElideWildcardSegment(excludeSpec: string, segmentStart: number, options: CompilerOptions, host: ParseConfigHost) { + const charCode = excludeSpec.charCodeAt(segmentStart); + if (charCode === CharacterCodes.asterisk) { + const end = excludeSpec.length; + + // If the segment consists only of `*`, we may elide this segment. + if (segmentStart + 1 === end) { + return true; + } + + // If the segment consists only of `*.ts`, and we do not allow + // any other extensions for source files, we may elide this segment. + if (!options.allowNonTsExtensions && !options.jsx && !options.allowJs && segmentStart + 4 === end) { + const segment = excludeSpec.substr(segmentStart); + return fileExtensionIs(host.useCaseSensitiveFileNames ? segment : segment.toLowerCase(), ".ts"); + } + } + return false; + } + + /** + * Escape regular expression reserved tokens. + * + * @param text The text to escape. + * @param start The starting offset in the string. + * @param end The ending offset in the string. + */ + function escapeRegularExpressionText(text: string, start: number, end: number) { + return text.substring(start, end).replace(reservedCharacterPattern, "\\$&"); + } + + /** + * Determines whether the wildcard at the current offset is a recursive directory wildcard. + * + * @param fileSpec The file specification. + * @param segmentStart The starting offset of a segment in the file specification. + * @param segmentEnd The ending offset of a segment in the file specification. + */ + function isRecursiveDirectoryWildcard(fileSpec: string, segmentStart: number, segmentEnd: number) { + return segmentEnd - segmentStart === 2 && + fileSpec.charCodeAt(segmentStart) === CharacterCodes.asterisk && + fileSpec.charCodeAt(segmentStart + 1) === CharacterCodes.asterisk; + } + + /** + * Gets the index of the next wildcard character in a file specification. + * + * @param fileSpec The file specification. + * @param start The starting offset in the file specification. + */ + function indexOfWildcard(fileSpec: string, start: number): number { + for (let i = start; i < fileSpec.length; ++i) { + const ch = fileSpec.charCodeAt(i); + if (ch === CharacterCodes.asterisk || ch === CharacterCodes.question) { + return i; + } + } + + return -1; + } + + /** + * Get the end position of a path segment, either the index of the next directory separator or + * the provided end position. + * + * @param fileSpec The file specification. + * @param segmentStart The start offset in the file specification. + */ + function getEndOfPathSegment(fileSpec: string, segmentStart: number): number { + const end = fileSpec.length; + if (segmentStart >= end) { + return end; + } + + const offset = fileSpec.indexOf(directorySeparator, segmentStart); + return offset < 0 ? end : offset; + } + + /** + * Gets a case sensitive key. + * + * @param key The original key. + */ + function caseSensitiveKeyMapper(key: string) { + return key; + } + + /** + * Gets a case insensitive key. + * + * @param key The original key. + */ + function caseInsensitiveKeyMapper(key: string) { + return key.toLowerCase(); + } } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index cae7bd8210341..aae4b54b5c7e6 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -25,6 +25,7 @@ namespace ts { contains, remove, forEachValue: forEachValueInMap, + reduce, clear }; @@ -34,6 +35,10 @@ namespace ts { } } + function reduce(callback: (memo: U, value: T, key: Path) => U, initial: U) { + return reduceProperties(files, callback, initial); + } + // path should already be well-formed so it does not need to be normalized function get(path: Path): T { return files[toKey(path)]; @@ -490,6 +495,24 @@ namespace ts { return a < b ? Comparison.LessThan : Comparison.GreaterThan; } + export function compareStrings(a: string, b: string, ignoreCase?: boolean): Comparison { + if (a === b) return Comparison.EqualTo; + if (a === undefined) return Comparison.LessThan; + if (b === undefined) return Comparison.GreaterThan; + if (ignoreCase) { + if (String.prototype.localeCompare) { + const result = a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" }); + return result < 0 ? Comparison.LessThan : result > 0 ? Comparison.GreaterThan : Comparison.EqualTo; + } + + a = a.toUpperCase(); + b = b.toUpperCase(); + if (a === b) return Comparison.EqualTo; + } + + return a < b ? Comparison.LessThan : Comparison.GreaterThan; + } + function getDiagnosticFileName(diagnostic: Diagnostic): string { return diagnostic.file ? diagnostic.file.fileName : undefined; } @@ -756,6 +779,49 @@ namespace ts { return path1 + directorySeparator + path2; } + /** + * Removes a trailing directory separator from a path. + * @param path The path. + */ + export function removeTrailingDirectorySeparator(path: string) { + if (path.charAt(path.length - 1) === directorySeparator) { + return path.substr(0, path.length - 1); + } + + return path; + } + + /** + * Adds a trailing directory separator to a path, if it does not already have one. + * @param path The path. + */ + export function ensureTrailingDirectorySeparator(path: string) { + if (path.charAt(path.length - 1) !== directorySeparator) { + return path + directorySeparator; + } + + return path; + } + + export function comparePaths(a: string, b: string, currentDirectory: string, ignoreCase?: boolean) { + if (a === b) return Comparison.EqualTo; + if (a === undefined) return Comparison.LessThan; + if (b === undefined) return Comparison.GreaterThan; + a = removeTrailingDirectorySeparator(a); + b = removeTrailingDirectorySeparator(b); + const aComponents = getNormalizedPathComponents(a, currentDirectory); + const bComponents = getNormalizedPathComponents(b, currentDirectory); + const sharedLength = Math.min(aComponents.length, bComponents.length); + for (let i = 0; i < sharedLength; ++i) { + const result = compareStrings(aComponents[i], bComponents[i], ignoreCase); + if (result !== Comparison.EqualTo) { + return result; + } + } + + return compareValues(aComponents.length, bComponents.length); + } + export function fileExtensionIs(path: string, extension: string): boolean { const pathLen = path.length; const extLen = extension.length; @@ -794,6 +860,10 @@ namespace ts { return path; } + export function changeExtension(path: T, newExtension: string): T { + return (removeFileExtension(path) + newExtension); + } + const backslashOrDoubleQuote = /[\"\\]/g; const escapedCharsRegExp = /[\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g; const escapedCharsMap: Map = { diff --git a/src/compiler/diagnosticMessages.json b/src/compiler/diagnosticMessages.json index 0c19f6d8247a0..6a1d0e5ce650f 100644 --- a/src/compiler/diagnosticMessages.json +++ b/src/compiler/diagnosticMessages.json @@ -2020,6 +2020,10 @@ "category": "Error", "code": 5009 }, + "File specification cannot contain multiple recursive directory wildcards ('**'): '{0}'.": { + "category": "Error", + "code": 5011 + }, "Cannot read file '{0}': {1}": { "category": "Error", "code": 5012 diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 3a90ca42fa152..fc8e90ac38fc3 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -17,6 +17,8 @@ namespace ts { getExecutingFilePath(): string; getCurrentDirectory(): string; readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readFileNames(path: string): string[]; + readDirectoryNames(path: string): string[]; getMemoryUsage?(): number; exit(exitCode?: number): void; } @@ -155,6 +157,16 @@ namespace ts { } } + function readFileNames(path: string): string[] { + const folder = fso.GetFolder(path || "."); + return getNames(folder.files); + } + + function readDirectoryNames(path: string): string[] { + const folder = fso.GetFolder(path || "."); + return getNames(folder.directories); + } + return { args, newLine: "\r\n", @@ -185,6 +197,8 @@ namespace ts { return new ActiveXObject("WScript.Shell").CurrentDirectory; }, readDirectory, + readFileNames, + readDirectoryNames, exit(exitCode?: number): void { try { WScript.Quit(exitCode); @@ -281,7 +295,7 @@ namespace ts { // REVIEW: for now this implementation uses polling. // The advantage of polling is that it works reliably // on all os and with network mounted files. - // For 90 referenced files, the average time to detect + // For 90 referenced files, the average time to detect // changes is 2*msInterval (by default 5 seconds). // The overhead of this is .04 percent (1/2500) with // average pause of < 1 millisecond (and max @@ -381,6 +395,30 @@ namespace ts { } } + function readFileNames(path: string): string[] { + const entries = _fs.readdirSync(path || "."); + const files: string[] = []; + for (const entry of entries) { + const stat = _fs.statSync(combinePaths(path, entry)); + if (stat.isFile()) { + files.push(entry); + } + } + return files.sort(); + } + + function readDirectoryNames(path: string): string[] { + const entries = _fs.readdirSync(path || "."); + const directories: string[] = []; + for (const entry of entries) { + const stat = _fs.statSync(combinePaths(path, entry)); + if (stat.isDirectory()) { + directories.push(entry); + } + } + return directories.sort(); + } + return { args: process.argv.slice(2), newLine: _os.EOL, @@ -406,7 +444,7 @@ namespace ts { }; }, watchDirectory: (path, callback, recursive) => { - // Node 4.0 `fs.watch` function supports the "recursive" option on both OSX and Windows + // Node 4.0 `fs.watch` function supports the "recursive" option on both OSX and Windows // (ref: https://github.com/nodejs/node/pull/2649 and https://github.com/Microsoft/TypeScript/issues/4643) return _fs.watch( path, @@ -426,7 +464,7 @@ namespace ts { return _path.resolve(path); }, fileExists(path: string): boolean { - return _fs.existsSync(path); + return _fs.existsSync(path) && _fs.statSync(path).isFile(); }, directoryExists(path: string) { return _fs.existsSync(path) && _fs.statSync(path).isDirectory(); @@ -443,6 +481,8 @@ namespace ts { return process.cwd(); }, readDirectory, + readFileNames, + readDirectoryNames, getMemoryUsage() { if (global.gc) { global.gc(); diff --git a/src/compiler/types.ts b/src/compiler/types.ts index dbd0322aa1802..ffd9ddc206d1d 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -14,6 +14,7 @@ namespace ts { remove(fileName: Path): void; forEachValue(f: (key: Path, v: T) => void): void; + reduce(f: (memo: U, value: T, key: Path) => U, initial: U): U; clear(): void; } @@ -1582,7 +1583,27 @@ namespace ts { } export interface ParseConfigHost { + useCaseSensitiveFileNames: boolean; + readDirectory(rootDir: string, extension: string, exclude: string[]): string[]; + + /** + * Gets a value indicating whether the specified path exists. + * @param path The path to test. + */ + fileExists(path: string): boolean; + + /** + * Reads the files names in the directory. + * @param rootDir The directory path. + */ + readFileNames(rootDir: string): string[]; + + /** + * Reads the directory names in the directory. + * @param rootDir The directory path. + */ + readDirectoryNames(rootDir: string): string[]; } export interface WriteFileCallback { diff --git a/src/harness/external/chai.d.ts b/src/harness/external/chai.d.ts index 814de75e7b254..fc25980d3a099 100644 --- a/src/harness/external/chai.d.ts +++ b/src/harness/external/chai.d.ts @@ -167,6 +167,9 @@ declare module chai { module assert { function equal(actual: any, expected: any, message?: string): void; function notEqual(actual: any, expected: any, message?: string): void; + function deepEqual(actual: any, expected: any, message?: string): void; + function notDeepEqual(actual: any, expected: any, message?: string): void; + function lengthOf(object: any[], length: number, message?: string): void; function isTrue(value: any, message?: string): void; function isFalse(value: any, message?: string): void; function isNull(value: any, message?: string): void; diff --git a/src/harness/harness.ts b/src/harness/harness.ts index 15f3813e54d2c..d8ccc512343c5 100644 --- a/src/harness/harness.ts +++ b/src/harness/harness.ts @@ -1,7 +1,7 @@ // // Copyright (c) Microsoft Corporation. All rights reserved. -// +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -282,7 +282,7 @@ namespace Utils { break; case "parserContextFlags": - // Clear the flag that are produced by aggregating child values.. That is ephemeral + // Clear the flag that are produced by aggregating child values.. That is ephemeral // data we don't care about in the dump. We only care what the parser set directly // on the ast. let value = n.parserContextFlags & ts.ParserContextFlags.ParserGeneratedFlags; @@ -356,7 +356,7 @@ namespace Utils { assert.equal(node1.kind, node2.kind, "node1.kind !== node2.kind"); assert.equal(node1.flags, node2.flags, "node1.flags !== node2.flags"); - // call this on both nodes to ensure all propagated flags have been set (and thus can be + // call this on both nodes to ensure all propagated flags have been set (and thus can be // compared). assert.equal(ts.containsParseError(node1), ts.containsParseError(node2)); assert.equal(node1.parserContextFlags, node2.parserContextFlags, "node1.parserContextFlags !== node2.parserContextFlags"); @@ -436,6 +436,8 @@ namespace Harness { getExecutingFilePath(): string; exit(exitCode?: number): void; readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readDirectoryNames(path: string): string[]; + readFileNames(path: string): string[]; } export var IO: IO; @@ -474,6 +476,8 @@ namespace Harness { export const fileExists: typeof IO.fileExists = fso.FileExists; export const log: typeof IO.log = global.WScript && global.WScript.StdOut.WriteLine; export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); + export const readDirectoryNames: typeof IO.readDirectoryNames = path => ts.sys.readDirectoryNames(path); + export const readFileNames: typeof IO.readFileNames = path => ts.sys.readFileNames(path); export function createDirectory(path: string) { if (directoryExists(path)) { @@ -544,6 +548,8 @@ namespace Harness { export const log: typeof IO.log = s => console.log(s); export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); + export const readDirectoryNames: typeof IO.readDirectoryNames = path => ts.sys.readDirectoryNames(path); + export const readFileNames: typeof IO.readFileNames = path => ts.sys.readFileNames(path); export function createDirectory(path: string) { if (!directoryExists(path)) { @@ -752,6 +758,14 @@ namespace Harness { export function readDirectory(path: string, extension?: string, exclude?: string[]) { return listFiles(path).filter(f => !extension || ts.fileExtensionIs(f, extension)); } + + export function readDirectoryNames(path: string): string[] { + return []; + } + + export function readFileNames(path: string) { + return readDirectory(path); + } } } @@ -777,7 +791,7 @@ namespace Harness { (emittedFile: string, emittedLine: number, emittedColumn: number, sourceFile: string, sourceLine: number, sourceColumn: number, sourceName: string): void; } - // Settings + // Settings export let userSpecifiedRoot = ""; export let lightMode = false; @@ -816,7 +830,7 @@ namespace Harness { fileName: string, sourceText: string, languageVersion: ts.ScriptTarget) { - // We'll only assert invariants outside of light mode. + // We'll only assert invariants outside of light mode. const shouldAssertInvariants = !Harness.lightMode; // Only set the parent nodes if we're asserting invariants. We don't need them otherwise. @@ -911,7 +925,7 @@ namespace Harness { libFiles?: string; } - // Additional options not already in ts.optionDeclarations + // Additional options not already in ts.optionDeclarations const harnessOptionDeclarations: ts.CommandLineOption[] = [ { name: "allowNonTsExtensions", type: "boolean" }, { name: "useCaseSensitiveFileNames", type: "boolean" }, @@ -1149,7 +1163,7 @@ namespace Harness { errLines.forEach(e => outputLines.push(e)); // do not count errors from lib.d.ts here, they are computed separately as numLibraryDiagnostics - // if lib.d.ts is explicitly included in input files and there are some errors in it (i.e. because of duplicate identifiers) + // if lib.d.ts is explicitly included in input files and there are some errors in it (i.e. because of duplicate identifiers) // then they will be added twice thus triggering 'total errors' assertion with condition // 'totalErrorsReportedInNonLibraryFiles + numLibraryDiagnostics + numTest262HarnessDiagnostics, diagnostics.length diff --git a/src/harness/harnessLanguageService.ts b/src/harness/harnessLanguageService.ts index 3c7814df56258..e12105e3393ea 100644 --- a/src/harness/harnessLanguageService.ts +++ b/src/harness/harnessLanguageService.ts @@ -259,6 +259,12 @@ namespace Harness.LanguageService { readDirectory(rootDir: string, extension: string): string { throw new Error("NYI"); } + readDirectoryNames(path: string): string { + throw new Error("Not implemented."); + } + readFileNames(path: string): string { + throw new Error("Not implemented."); + } fileExists(fileName: string) { return this.getScriptInfo(fileName) !== undefined; } readFile(fileName: string) { const snapshot = this.nativeHost.getScriptSnapshot(fileName); @@ -572,6 +578,14 @@ namespace Harness.LanguageService { throw new Error("Not implemented Yet."); } + readDirectoryNames(path: string): string[] { + throw new Error("Not implemented."); + } + + readFileNames(path: string): string[] { + throw new Error("Not implemented."); + } + watchFile(fileName: string, callback: (fileName: string) => void): ts.FileWatcher { return { close() { } }; } @@ -644,4 +658,3 @@ namespace Harness.LanguageService { getPreProcessedFileInfo(fileName: string, fileContents: string): ts.PreProcessedFileInfo { throw new Error("getPreProcessedFileInfo is not available using the server interface."); } } } - \ No newline at end of file diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index 5210266345759..655c2f07c2bd9 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -210,7 +210,14 @@ class ProjectRunner extends RunnerBase { } const configObject = result.config; - const configParseResult = ts.parseJsonConfigFileContent(configObject, { readDirectory }, ts.getDirectoryPath(configFileName), compilerOptions); + const configParseHost: ts.ParseConfigHost = { + useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), + fileExists, + readDirectory, + readDirectoryNames, + readFileNames + }; + const configParseResult = ts.parseJsonConfigFileContent(configObject, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions); if (configParseResult.errors.length > 0) { return { moduleKind, @@ -237,7 +244,7 @@ class ProjectRunner extends RunnerBase { mapRoot: testCase.resolveMapRoot && testCase.mapRoot ? Harness.IO.resolvePath(testCase.mapRoot) : testCase.mapRoot, sourceRoot: testCase.resolveSourceRoot && testCase.sourceRoot ? Harness.IO.resolvePath(testCase.sourceRoot) : testCase.sourceRoot, module: moduleKind, - moduleResolution: ts.ModuleResolutionKind.Classic, // currently all tests use classic module resolution kind, this will change in the future + moduleResolution: ts.ModuleResolutionKind.Classic, // currently all tests use classic module resolution kind, this will change in the future }; // Set the values specified using json const optionNameMap: ts.Map = {}; @@ -278,6 +285,14 @@ class ProjectRunner extends RunnerBase { return result; } + function readDirectoryNames(path: string) { + return Harness.IO.readDirectoryNames(getFileNameInTheProjectTest(path)); + } + + function readFileNames(path: string) { + return Harness.IO.readFileNames(getFileNameInTheProjectTest(path)); + } + function fileExists(fileName: string): boolean { return Harness.IO.fileExists(getFileNameInTheProjectTest(fileName)); } diff --git a/src/harness/rwcRunner.ts b/src/harness/rwcRunner.ts index ce570a7d6ad01..5b4e2a7a3d96d 100644 --- a/src/harness/rwcRunner.ts +++ b/src/harness/rwcRunner.ts @@ -76,7 +76,14 @@ namespace RWC { if (tsconfigFile) { const tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path); const parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content); - const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, Harness.IO, ts.getDirectoryPath(tsconfigFile.path)); + const configParseHost: ts.ParseConfigHost = { + useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), + fileExists: Harness.IO.fileExists, + readDirectory: Harness.IO.readDirectory, + readDirectoryNames: Harness.IO.readDirectoryNames, + readFileNames: Harness.IO.readFileNames, + }; + const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, configParseHost, ts.getDirectoryPath(tsconfigFile.path)); fileNames = configParseResult.fileNames; opts.options = ts.extend(opts.options, configParseResult.options); } diff --git a/src/services/shims.ts b/src/services/shims.ts index 6b656ea2738fb..4c35f5aaf1b64 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -60,7 +60,7 @@ namespace ts { getNewLine?(): string; getProjectVersion?(): string; useCaseSensitiveFileNames?(): boolean; - + getModuleResolutionsForFile?(fileName: string): string; } @@ -73,6 +73,9 @@ namespace ts { * when enumerating the directory. */ readDirectory(rootDir: string, extension: string, exclude?: string): string; + readDirectoryNames?(rootDir: string): string; + readFileNames?(rootDir: string): string; + useCaseSensitiveFileNames?: boolean; } /// @@ -272,12 +275,12 @@ namespace ts { private files: string[]; private loggingEnabled = false; private tracingEnabled = false; - + public resolveModuleNames: (moduleName: string[], containingFile: string) => ResolvedModule[]; - + constructor(private shimHost: LanguageServiceShimHost) { // if shimHost is a COM object then property check will become method call with no arguments. - // 'in' does not have this effect. + // 'in' does not have this effect. if ("getModuleResolutionsForFile" in this.shimHost) { this.resolveModuleNames = (moduleNames: string[], containingFile: string) => { let resolutionsInFile = >JSON.parse(this.shimHost.getModuleResolutionsForFile(containingFile)); @@ -407,7 +410,18 @@ namespace ts { export class CoreServicesShimHostAdapter implements ParseConfigHost { + public useCaseSensitiveFileNames: boolean; + constructor(private shimHost: CoreServicesShimHost) { + if (typeof shimHost.useCaseSensitiveFileNames === "boolean") { + this.useCaseSensitiveFileNames = shimHost.useCaseSensitiveFileNames; + } + else if (sys) { + this.useCaseSensitiveFileNames = sys.useCaseSensitiveFileNames; + } + else { + this.useCaseSensitiveFileNames = true; + } } public readDirectory(rootDir: string, extension: string, exclude: string[]): string[] { @@ -424,11 +438,41 @@ namespace ts { } return JSON.parse(encoded); } - + + public readDirectoryNames(path: string): string[] { + if (this.shimHost.readDirectory) { + const encoded = this.shimHost.readDirectoryNames(path); + return JSON.parse(encoded); + } + + if (sys) { + path = normalizePath(path); + path = ensureTrailingDirectorySeparator(path); + return sys.readDirectoryNames(path); + } + + return []; + } + + public readFileNames(path: string): string[] { + if (this.shimHost.readFileNames) { + const encoded = this.shimHost.readFileNames(path); + return JSON.parse(encoded); + } + + if (sys) { + path = normalizePath(path); + path = ensureTrailingDirectorySeparator(path); + return sys.readFileNames(path); + } + + return []; + } + public fileExists(fileName: string): boolean { return this.shimHost.fileExists(fileName); } - + public readFile(fileName: string): string { return this.shimHost.readFile(fileName); } @@ -940,7 +984,7 @@ namespace ts { private forwardJSONCall(actionDescription: string, action: () => any): any { return forwardJSONCall(this.logger, actionDescription, action, this.logPerformance); } - + public resolveModuleName(fileName: string, moduleName: string, compilerOptionsJson: string): string { return this.forwardJSONCall(`resolveModuleName('${fileName}')`, () => { let compilerOptions = JSON.parse(compilerOptionsJson); @@ -949,14 +993,14 @@ namespace ts { resolvedFileName: result.resolvedModule ? result.resolvedModule.resolvedFileName: undefined, failedLookupLocations: result.failedLookupLocations }; - }); + }); } public getPreProcessedFileInfo(fileName: string, sourceTextSnapshot: IScriptSnapshot): string { return this.forwardJSONCall( "getPreProcessedFileInfo('" + fileName + "')", () => { - // for now treat files as JavaScript + // for now treat files as JavaScript var result = preProcessFile(sourceTextSnapshot.getText(0, sourceTextSnapshot.getLength()), /* readImportFiles */ true, /* detectJavaScriptImports */ true); var convertResult = { referencedFiles: [], diff --git a/tests/cases/unittests/cachingInServerLSHost.ts b/tests/cases/unittests/cachingInServerLSHost.ts index 88b44a693b940..f0a138c27a196 100644 --- a/tests/cases/unittests/cachingInServerLSHost.ts +++ b/tests/cases/unittests/cachingInServerLSHost.ts @@ -39,6 +39,8 @@ module ts { readDirectory: (path: string, extension?: string, exclude?: string[]): string[] => { throw new Error("NYI"); }, + readDirectoryNames: (path: string): string[] => { throw new Error("NYI"); }, + readFileNames: (path: string): string[] => { throw new Error("NYI"); }, exit: (exitCode?: number) => { }, watchFile: (path, callback) => { @@ -69,8 +71,8 @@ module ts { let projectService = new server.ProjectService(serverHost, logger); let rootScriptInfo = projectService.openFile(rootFile, /* openedByClient */true); let project = projectService.createInferredProject(rootScriptInfo); - project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); - return { + project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); + return { project, rootScriptInfo }; @@ -87,22 +89,22 @@ module ts { name: "c:/f1.ts", content: `foo()` }; - + let serverHost = createDefaultServerHost({ [root.name]: root, [imported.name]: imported }); let { project, rootScriptInfo } = createProject(root.name, serverHost); // ensure that imported file was found let diags = project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.equal(diags.length, 1); - + let originalFileExists = serverHost.fileExists; { // patch fileExists to make sure that disk is not touched serverHost.fileExists = (fileName): boolean => { - assert.isTrue(false, "fileExists should not be called"); + assert.isTrue(false, "fileExists should not be called"); return false; }; - + let newContent = `import {x} from "f1" var x: string = 1;`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); @@ -123,7 +125,7 @@ module ts { }; let newContent = `import {x} from "f2"`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); - + try { // trigger synchronization to make sure that LSHost will try to find 'f2' module on disk project.compilerService.languageService.getSemanticDiagnostics(imported.name); @@ -132,7 +134,7 @@ module ts { catch(e) { assert.isTrue(e.message.indexOf(`Could not find file: '${imported.name}'.`) === 0); } - + assert.isTrue(fileExistsIsCalled); } { @@ -140,45 +142,45 @@ module ts { serverHost.fileExists = (fileName): boolean => { if (fileName === "lib.d.ts") { return false; - } + } fileExistsCalled = true; assert.isTrue(fileName.indexOf('/f1.') !== -1); return originalFileExists(fileName); }; - + let newContent = `import {x} from "f1"`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.isTrue(fileExistsCalled); - + // setting compiler options discards module resolution cache fileExistsCalled = false; - + let opts = ts.clone(project.projectOptions); opts.compilerOptions = ts.clone(opts.compilerOptions); opts.compilerOptions.target = ts.ScriptTarget.ES5; project.setProjectOptions(opts); - + project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.isTrue(fileExistsCalled); } }); - + it("loads missing files from disk", () => { let root: File = { name: 'c:/foo.ts', content: `import {x} from "bar"` }; - + let imported: File = { name: 'c:/bar.d.ts', content: `export var y = 1` - }; - + }; + let fileMap: Map = { [root.name]: root }; let serverHost = createDefaultServerHost(fileMap); let originalFileExists = serverHost.fileExists; - + let fileExistsCalledForBar = false; serverHost.fileExists = fileName => { if (fileName === "lib.d.ts") { @@ -187,22 +189,22 @@ module ts { if (!fileExistsCalledForBar) { fileExistsCalledForBar = fileName.indexOf("/bar.") !== -1; } - + return originalFileExists(fileName); }; - + let { project, rootScriptInfo } = createProject(root.name, serverHost); let diags = project.compilerService.languageService.getSemanticDiagnostics(root.name); assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called"); assert.isTrue(diags.length === 1, "one diagnostic expected"); assert.isTrue(typeof diags[0].messageText === "string" && ((diags[0].messageText).indexOf("Cannot find module") === 0), "should be 'cannot find module' message"); - + // assert that import will success once file appear on disk fileMap[imported.name] = imported; fileExistsCalledForBar = false; rootScriptInfo.editContent(0, rootScriptInfo.content.length, `import {y} from "bar"`) - + diags = project.compilerService.languageService.getSemanticDiagnostics(root.name); assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called"); assert.isTrue(diags.length === 0); diff --git a/tests/cases/unittests/expandFiles.ts b/tests/cases/unittests/expandFiles.ts new file mode 100644 index 0000000000000..a9d12a7326057 --- /dev/null +++ b/tests/cases/unittests/expandFiles.ts @@ -0,0 +1,249 @@ +/// +/// + +describe("expandFiles", () => { + it("fail", () => { + assert.isTrue(false, "just checking"); + }); + + const basePath = "c:/dev/"; + const caseInsensitiveHost = createMockParseConfigHost( + basePath, + /*files*/ [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.ts", + "c:/dev/b.js", + "c:/dev/c.d.ts", + "c:/dev/z/a.ts", + "c:/dev/z/abz.ts", + "c:/dev/z/aba.ts", + "c:/dev/z/b.ts", + "c:/dev/z/bbz.ts", + "c:/dev/z/bba.ts", + "c:/dev/x/a.ts", + "c:/dev/x/aa.ts", + "c:/dev/x/b.ts", + "c:/dev/x/y/a.ts", + "c:/dev/x/y/b.ts" + ], + /*ignoreCase*/ true); + + const caseSensitiveHost = createMockParseConfigHost( + basePath, + /*files*/ [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.ts", + "c:/dev/b.js", + "c:/dev/A.ts", + "c:/dev/B.ts", + "c:/dev/c.d.ts", + "c:/dev/z/a.ts", + "c:/dev/z/abz.ts", + "c:/dev/z/aba.ts", + "c:/dev/z/b.ts", + "c:/dev/z/bbz.ts", + "c:/dev/z/bba.ts", + "c:/dev/x/a.ts", + "c:/dev/x/b.ts", + "c:/dev/x/y/a.ts", + "c:/dev/x/y/b.ts", + ], + /*ignoreCase*/ false); + + const expect = _chai.expect; + describe("with literal file list", () => { + it("without exclusions", () => { + const fileNames = ["a.ts", "b.ts"]; + const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); + }); + it("missing files are still present", () => { + const fileNames = ["z.ts", "x.ts"]; + const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/z.ts", "c:/dev/x.ts"]); + }); + it("are not removed due to excludes", () => { + const fileNames = ["a.ts", "b.ts"]; + const excludeSpecs = ["b.ts"]; + const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, excludeSpecs, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); + }); + }); + + describe("with literal include list", () => { + it("without exclusions", () => { + const includeSpecs = ["a.ts", "b.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); + }); + it("with non .ts file extensions are excluded", () => { + const includeSpecs = ["a.js", "b.js"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, []); + }); + it("with missing files are excluded", () => { + const includeSpecs = ["z.ts", "x.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, []); + }); + it("with literal excludes", () => { + const includeSpecs = ["a.ts", "b.ts"]; + const excludeSpecs = ["b.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts"]); + }); + it("with wildcard excludes", () => { + const includeSpecs = ["a.ts", "b.ts", "z/a.ts", "z/abz.ts", "z/aba.ts", "x/b.ts"]; + const excludeSpecs = ["*.ts", "z/??z.ts", "*/b.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/z/a.ts", "c:/dev/z/aba.ts"]); + }); + it("with recursive excludes", () => { + const includeSpecs = ["a.ts", "b.ts", "x/a.ts", "x/b.ts", "x/y/a.ts", "x/y/b.ts"]; + const excludeSpecs = ["**/b.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts"]); + }); + it("with case sensitive exclude", () => { + const includeSpecs = ["B.ts"]; + const excludeSpecs = ["**/b.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseSensitiveHost); + assert.deepEqual(results, ["c:/dev/B.ts"]); + }); + }); + + describe("with wildcard include list", () => { + it("same named declarations are excluded", () => { + const includeSpecs = ["*.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"]); + }); + it("`*` matches only ts files", () => { + const includeSpecs = ["*"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"]); + }); + it("`?` matches only a single character", () => { + const includeSpecs = ["x/?.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/x/a.ts", "c:/dev/x/b.ts"]); + }); + it("with recursive directory", () => { + const includeSpecs = ["**/a.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts", "c:/dev/z/a.ts"]); + }); + it("case sensitive", () => { + const includeSpecs = ["**/A.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseSensitiveHost); + assert.deepEqual(results, ["c:/dev/A.ts"]); + }); + it("with missing files are excluded", () => { + const includeSpecs = ["*/z.ts"]; + const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, []); + }); + it("always include literal files", () => { + const fileNames = ["a.ts"]; + const includeSpecs = ["*/z.ts"]; + const excludeSpecs = ["**/a.ts"]; + const results = ts.expandFiles(fileNames, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); + assert.deepEqual(results, ["c:/dev/a.ts"]); + }); + }); + + function createMockParseConfigHost(basePath: string, files: string[], ignoreCase: boolean): ts.ParseConfigHost { + const fileSet: ts.Map = {}; + const directorySet: ts.Map<{ files: ts.Map; directories: ts.Map; }> = {}; + + files.sort((a, b) => ts.comparePaths(a, b, basePath, ignoreCase)); + for (const file of files) { + addFile(ts.getNormalizedAbsolutePath(file, basePath)); + } + + return { + useCaseSensitiveFileNames: !ignoreCase, + fileExists, + readDirectory, + readFileNames, + readDirectoryNames + }; + + function fileExists(path: string): boolean { + const fileKey = ignoreCase ? path.toLowerCase() : path; + return ts.hasProperty(fileSet, fileKey); + } + + function directoryExists(path: string): boolean { + const directoryKey = ignoreCase ? path.toLowerCase() : path; + return ts.hasProperty(directorySet, directoryKey); + } + + function readDirectory(rootDir: string, extension?: string, exclude?: string[]): string[] { + throw new Error("Not implemented"); + } + + function readFileNames(path: string) { + const files = getDirectoryEntry(path).files; + const result: string[] = []; + ts.forEachKey(files, key => { result.push(key); }); + result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); + return result; + } + + function readDirectoryNames(path: string) { + const directories = getDirectoryEntry(path).directories; + const result: string[] = []; + ts.forEachKey(directories, key => { result.push(key); }); + result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); + return result; + } + + function getDirectoryEntry(path: string) { + path = ts.getNormalizedAbsolutePath(path, basePath); + path = ts.removeTrailingDirectorySeparator(path); + const directoryKey = ignoreCase ? path.toLowerCase() : path; + return ts.getProperty(directorySet, directoryKey); + } + + function addFile(file: string) { + const fileKey = ignoreCase ? file.toLowerCase() : file; + if (!ts.hasProperty(fileSet, fileKey)) { + fileSet[fileKey] = file; + const name = ts.getBaseFileName(file); + const parent = ts.getDirectoryPath(file); + addToDirectory(parent, name, "file"); + } + } + + function addDirectory(directory: string) { + directory = ts.removeTrailingDirectorySeparator(directory); + const directoryKey = ignoreCase ? directory.toLowerCase() : directory; + if (!ts.hasProperty(directorySet, directoryKey)) { + directorySet[directoryKey] = { files: {}, directories: {} }; + const name = ts.getBaseFileName(directory); + const parent = ts.getDirectoryPath(directory); + if (parent !== directory) { + addToDirectory(parent, name, "directory"); + } + } + } + + function addToDirectory(directory: string, entry: string, type: "file" | "directory") { + addDirectory(directory); + directory = ts.removeTrailingDirectorySeparator(directory); + const directoryKey = ignoreCase ? directory.toLowerCase() : directory; + const entryKey = ignoreCase ? entry.toLowerCase() : entry; + const directoryEntry = directorySet[directoryKey]; + const entries = type === "file" ? directoryEntry.files : directoryEntry.directories; + if (!ts.hasProperty(entries, entryKey)) { + entries[entryKey] = entry; + } + } + } +}); + diff --git a/tests/cases/unittests/session.ts b/tests/cases/unittests/session.ts index c1dfd508942fd..8e35baa391022 100644 --- a/tests/cases/unittests/session.ts +++ b/tests/cases/unittests/session.ts @@ -16,6 +16,8 @@ namespace ts.server { getExecutingFilePath(): string { return void 0; }, getCurrentDirectory(): string { return void 0; }, readDirectory(): string[] { return []; }, + readDirectoryNames(): string[] { return []; }, + readFileNames(): string[] { return []; }, exit(): void {} }; const mockLogger: Logger = { @@ -28,7 +30,7 @@ namespace ts.server { endGroup(): void {}, msg(s: string, type?: string): void {}, }; - + describe("the Session class", () => { let session: Session; let lastSent: protocol.Message; @@ -204,7 +206,7 @@ namespace ts.server { .to.throw(`Protocol handler already exists for command "${command}"`); }); }); - + describe("event", () => { it("can format event responses and send them", () => { const evt = "notify-test"; @@ -315,7 +317,7 @@ namespace ts.server { responseRequired: true })); } - + send(msg: protocol.Message) { this.client.handle(msg); } @@ -323,7 +325,7 @@ namespace ts.server { enqueue(msg: protocol.Request) { this.queue.unshift(msg); } - + handleRequest(msg: protocol.Request) { let response: protocol.Response; try { @@ -345,7 +347,7 @@ namespace ts.server { } } } - + class InProcClient { private server: InProcSession; private seq = 0; @@ -379,7 +381,7 @@ namespace ts.server { connect(session: InProcSession): void { this.server = session; } - + execute(command: string, args: any, callback: (resp: protocol.Response) => void): void { if (!this.server) { return; @@ -394,7 +396,7 @@ namespace ts.server { this.callbacks[this.seq] = callback; } }; - + it("can be constructed and respond to commands", (done) => { const cli = new InProcClient(); const session = new InProcSession(cli); @@ -402,23 +404,23 @@ namespace ts.server { data: true }; const toEvent = { - data: false + data: false }; let responses = 0; // Connect the client cli.connect(session); - + // Add an event handler cli.on("testevent", (eventinfo) => { expect(eventinfo).to.equal(toEvent); responses++; expect(responses).to.equal(1); }); - + // Trigger said event from the server session.event(toEvent, "testevent"); - + // Queue an echo command cli.execute("echo", toEcho, (resp) => { assert(resp.success, resp.message); @@ -426,7 +428,7 @@ namespace ts.server { expect(responses).to.equal(2); expect(resp.body).to.deep.equal(toEcho); }); - + // Queue a configure command cli.execute("configure", { hostInfo: "unit test", @@ -436,10 +438,10 @@ namespace ts.server { }, (resp) => { assert(resp.success, resp.message); responses++; - expect(responses).to.equal(3); + expect(responses).to.equal(3); done(); }); - + // Consume the queue and trigger the callbacks session.consumeQueue(); }); From 30575dbd7cc4ac62c044e6d1eef77de3affd11ce Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Mon, 7 Dec 2015 14:58:13 -0800 Subject: [PATCH 02/18] Added caching, more tests --- Jakefile.js | 3 +- src/compiler/commandLineParser.ts | 474 ++++++++++++++++------ src/compiler/core.ts | 88 ++++- src/compiler/diagnosticMessages.json | 4 + src/compiler/types.ts | 20 +- src/harness/external/chai.d.ts | 6 +- src/harness/harnessLanguageService.ts | 1 + src/harness/projectsRunner.ts | 5 + src/harness/rwcRunner.ts | 1 + src/server/editorServices.ts | 57 ++- src/services/shims.ts | 13 + tests/cases/unittests/expandFiles.ts | 540 ++++++++++++++++++-------- 12 files changed, 924 insertions(+), 288 deletions(-) diff --git a/Jakefile.js b/Jakefile.js index 6243ce8ff97ea..a3f1d31f26896 100644 --- a/Jakefile.js +++ b/Jakefile.js @@ -160,7 +160,8 @@ var harnessSources = harnessCoreSources.concat([ "reuseProgramStructure.ts", "cachingInServerLSHost.ts", "moduleResolution.ts", - "tsconfigParsing.ts" + "tsconfigParsing.ts", + "expandFiles.ts" ].map(function (f) { return path.join(unittestsDirectory, f); })).concat([ diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index f850ac00dba68..6499ec6327fc4 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -488,13 +488,15 @@ namespace ts { const { options: optionsFromJsonConfigFile, errors } = convertCompilerOptionsFromJson(json["compilerOptions"], basePath); const options = extend(existingOptions, optionsFromJsonConfigFile); + const { fileNames, wildcardDirectories } = getFileNames(); return { options, - fileNames: getFileNames(), - errors + fileNames, + errors, + wildcardDirectories }; - function getFileNames(): string[] { + function getFileNames(): ExpandResult { let fileNames: string[]; if (hasProperty(json, "files")) { if (isArray(json["files"])) { @@ -526,17 +528,7 @@ namespace ts { } if (fileNames === undefined && includeSpecs === undefined) { - includeSpecs = ["**/*.ts"]; - if (options.jsx) { - includeSpecs.push("**/*.tsx"); - } - - if (options.allowJs) { - includeSpecs.push("**/*.js"); - if (options.jsx) { - includeSpecs.push("**/*.jsx"); - } - } + includeSpecs = ["**/*"]; } return expandFiles(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors); @@ -593,11 +585,45 @@ namespace ts { // Simplified whitelist, forces escaping of any non-word (or digit), non-whitespace character. const reservedCharacterPattern = /[^\w\s]/g; - const enum ExpandResult { + const enum ExpansionState { Ok, Error } + interface ExpansionContext { + /** A pattern used to exclude a file specification. */ + excludePattern: RegExp; + /** Compiler options. */ + options: CompilerOptions; + /** The host used to resolve files and directories. */ + host: ParseConfigHost; + /** Errors to report. */ + errors: Diagnostic[]; + /** The set of literal files. */ + literalFiles: FileMap; + /** The set of files matching a wildcard. */ + wildcardFiles: FileMap; + /** Directories to be watched. */ + wildcardDirectories: FileMap; + /** Supported extensions. */ + supportedExtensions: string[]; + /** + * Path cache, used to reduce calls to the file system. `true` indicates a file exists, + * `false` indicates a file or directory does not exist. A DirectoryResult + * indicates the file and subdirectory names in a directory. */ + cache: FileMap; + } + + const enum FileSystemEntryKind { + File, + Directory + } + + interface DirectoryResult { + files?: string[]; + directories?: string[]; + } + /** * Expands an array of file specifications. * @@ -609,62 +635,118 @@ namespace ts { * @param host The host used to resolve files and directories. * @param errors An array for diagnostic reporting. */ - export function expandFiles(fileNames: string[], includeSpecs: string[], excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors?: Diagnostic[]): string[] { + export function expandFiles(fileNames: string[], includeSpecs: string[], excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors?: Diagnostic[]): ExpandResult { basePath = normalizePath(basePath); basePath = removeTrailingDirectorySeparator(basePath); + // The exclude spec list is converted into a regular expression, which allows us to quickly + // test whether a file or directory should be excluded before recursively traversing the + // file system. const excludePattern = includeSpecs ? createExcludeRegularExpression(excludeSpecs, basePath, options, host, errors) : undefined; - const fileSet = createFileMap(host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper); + const keyMapper = host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper; + + // Literal file names (provided via the "files" array in tsconfig.json) are stored in a + // file map with a possibly invariant key. We use this map later when when including + // wildcard paths. + const literalFiles = createFileMap(keyMapper); + + // Wildcard paths (provided via the "includes" array in tscofnig.json) are stored in a + // file map with a possibly invariant key. We use this map to store paths matched + // via wildcard, and to handle extension priority. + const wildcardFiles = createFileMap(keyMapper); + + // Wildcard directories (provided as part of a wildcard path) are stored in a + // file map that marks whether it was a regular wildcard match (with a `*` or `?` token), + // or a recursive directory. This information is used by filesystem watchers to monitor for + // new entries in these paths. + const wildcardDirectories = createFileMap(keyMapper); + + // To reduce the overhead of disk I/O (and marshalling to managed code when hosted in + // Visual Studio), file system queries are cached during the expansion session. + // If present, a cache entry can be one of three values: + // - A `false` value indicates the file or directory did not exist. + // - A `true` value indicates the path is a file and it exists. + // - An object value indicates the path is a directory and exists. The object may have + // zero, one, or both of the following properties: + // - A "files" array, which contains the file names in the directory. + // - A "directories" array, which contains the subdirectory names in the directory. + const cache = createFileMap(keyMapper); + + // Rather than requery this for each file and filespec, we querythe supported extensions + // once and store it on the expansion context. + const supportedExtensions = getSupportedExtensions(options); + + // The expansion context holds references to shared information for the various expansion + // operations to reduce the overhead of closures. + const context: ExpansionContext = { + options, + host, + errors, + excludePattern, + literalFiles, + wildcardFiles, + wildcardDirectories, + supportedExtensions, + cache + }; - // include every literal file. + // Literal files are always included verbatim. An "include" or "exclude" specification cannot + // remove a literal file. if (fileNames) { for (const fileName of fileNames) { const path = toPath(fileName, basePath, caseSensitiveKeyMapper); - if (!fileSet.contains(path)) { - fileSet.set(path, path); + if (!literalFiles.contains(path)) { + literalFiles.set(path, path); } } } - // expand and include the provided files into the file set. + // Each "include" specification is expanded and matching files are added. if (includeSpecs) { for (let includeSpec of includeSpecs) { includeSpec = normalizePath(includeSpec); includeSpec = removeTrailingDirectorySeparator(includeSpec); - expandFileSpec(basePath, includeSpec, 0, excludePattern, options, host, errors, fileSet); + expandFileSpec(includeSpec, basePath, 0, context); } } - const output = fileSet.reduce(addFileToOutput, []); - return output; + return { + fileNames: wildcardFiles.reduce(addFileToOutput, literalFiles.reduce(addFileToOutput, [])), + wildcardDirectories: wildcardDirectories.reduce>(addDirectoryToOutput, {}), + }; } /** * Expands a file specification with wildcards. * - * @param basePath The directory to expand. * @param fileSpec The original file specification. + * @param basePath The directory to expand. This path must exist. * @param start The starting offset in the file specification. - * @param excludePattern A pattern used to exclude a file specification. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - * @param errors An array for diagnostic reporting. - * @param fileSet The set of matching files. + * @param context The expansion context. * @param isExpandingRecursiveDirectory A value indicating whether the file specification includes a recursive directory wildcard prior to the start of this segment. */ - function expandFileSpec(basePath: string, fileSpec: string, start: number, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[], fileSet: FileMap, isExpandingRecursiveDirectory?: boolean): ExpandResult { + function expandFileSpec(fileSpec: string, basePath: Path, start: number, context: ExpansionContext, isExpandingRecursiveDirectory?: boolean): ExpansionState { + // A file specification must always point to a file. As a result, we always assume the + // path segment following the last directory separator points to a file. The only + // exception is when the final path segment is the recursive directory pattern "**", in + // which case we report an error. + const { host, options, errors, wildcardFiles, wildcardDirectories, excludePattern, cache, supportedExtensions } = context; + // Skip expansion if the base path matches an exclude pattern. - if (isExcludedPath(excludePattern, basePath)) { - return ExpandResult.Ok; + if (isExcludedPath(basePath, excludePattern)) { + return ExpansionState.Ok; } - // Find the offset of the next wildcard in the file specification + // Find the offset of the next wildcard in the file specification. If there are no more + // wildcards, we can include the file if it exists and isn't excluded. let offset = indexOfWildcard(fileSpec, start); if (offset < 0) { - // There were no more wildcards, so include the file. const path = toPath(fileSpec.substring(start), basePath, caseSensitiveKeyMapper); - includeFile(path, excludePattern, options, host, fileSet); - return ExpandResult.Ok; + if (!isExcludedPath(path, excludePattern) && pathExists(path, FileSystemEntryKind.File, context)) { + includeFile(path, context, /*wildcardHasExtension*/ true); + } + + return ExpansionState.Ok; } // Find the last directory separator before the wildcard to get the leading path. @@ -672,11 +754,11 @@ namespace ts { if (offset > start) { // The wildcard occurs in a later segment, include remaining path up to // wildcard in prefix. - basePath = combinePaths(basePath, fileSpec.substring(start, offset)); + basePath = toPath(fileSpec.substring(start, offset), basePath, caseSensitiveKeyMapper); // Skip this wildcard path if the base path now matches an exclude pattern. - if (isExcludedPath(excludePattern, basePath)) { - return ExpandResult.Ok; + if (isExcludedPath(basePath, excludePattern) || !pathExists(basePath, FileSystemEntryKind.Directory, context)) { + return ExpansionState.Ok; } start = offset + 1; @@ -687,23 +769,34 @@ namespace ts { // Check if the current offset is the beginning of a recursive directory pattern. if (isRecursiveDirectoryWildcard(fileSpec, start, offset)) { - if (offset >= fileSpec.length) { - // If there is no file specification following the recursive directory pattern - // we cannot match any files, so we will ignore this pattern. - return ExpandResult.Ok; - } - // Stop expansion if a file specification contains more than one recursive directory pattern. if (isExpandingRecursiveDirectory) { if (errors) { errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, fileSpec)); } - return ExpandResult.Error; + return ExpansionState.Error; + } + + if (offset >= fileSpec.length) { + // If there is no file specification following the recursive directory pattern + // then we report an error as we cannot match any files. + if (errors) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, fileSpec)); + } + + return ExpansionState.Error; } + // Keep track of the recursive wildcard directory + wildcardDirectories.set(basePath, WatchDirectoryFlags.Recursive); + // Expand the recursive directory pattern. - return expandRecursiveDirectory(basePath, fileSpec, offset + 1, excludePattern, options, host, errors, fileSet); + return expandRecursiveDirectory(fileSpec, basePath, offset + 1, context); + } + + if (!isExpandingRecursiveDirectory) { + wildcardDirectories.set(basePath, WatchDirectoryFlags.None); } // Match the entries in the directory against the wildcard pattern. @@ -712,103 +805,224 @@ namespace ts { // If there are no more directory separators (the offset is at the end of the file specification), then // this must be a file. if (offset >= fileSpec.length) { - const files = host.readFileNames(basePath); - for (const extension of getSupportedExtensions(options)) { - for (const file of files) { - if (fileExtensionIs(file, extension)) { - const path = toPath(file, basePath, caseSensitiveKeyMapper); - - // .ts extension would read the .d.ts extension files too but since .d.ts is lower priority extension, - // lets pick them when its turn comes up. - if (extension === ".ts" && fileExtensionIs(file, ".d.ts")) { - continue; - } + const wildcardHasExtension = fileSegmentHasExtension(fileSpec, start); + const fileNames = readDirectory(basePath, FileSystemEntryKind.File, context); + for (const fileName of fileNames) { + // Skip the file if it doesn't match the pattern. + if (!pattern.test(fileName)) { + continue; + } - // If this is one of the output extension (which would be .d.ts and .js if we are allowing compilation of js files) - // do not include this file if we included .ts or .tsx file with same base name as it could be output of the earlier compilation - if (extension === ".d.ts" || (options.allowJs && contains(supportedJavascriptExtensions, extension))) { - if (fileSet.contains(changeExtension(path, ".ts")) || fileSet.contains(changeExtension(path, ".tsx"))) { - continue; - } - } + const path = toPath(fileName, basePath, caseSensitiveKeyMapper); - // This wildcard has no further directory to process, so include the file. - includeFile(path, excludePattern, options, host, fileSet); - } + // If we have excluded this path, we should skip the file. + if (isExcludedPath(path, excludePattern)) { + continue; } + + // This wildcard has no further directory to process, so include the file. + includeFile(path, context, wildcardHasExtension); } } else { - const directories = host.readDirectoryNames(basePath); - for (const directory of directories) { - // If this was a directory, process the directory. - const path = toPath(directory, basePath, caseSensitiveKeyMapper); - if (expandFileSpec(path, fileSpec, offset + 1, excludePattern, options, host, errors, fileSet, isExpandingRecursiveDirectory) === ExpandResult.Error) { - return ExpandResult.Error; + const directoryNames = readDirectory(basePath, FileSystemEntryKind.Directory, context); + for (const directoryName of directoryNames) { + if (pattern.test(directoryName)) { + const newBasePath = toPath(directoryName, basePath, caseSensitiveKeyMapper); + + // Expand the entries in this directory. + if (expandFileSpec(fileSpec, newBasePath, offset + 1, context, isExpandingRecursiveDirectory) === ExpansionState.Error) { + return ExpansionState.Error; + } } } } - return ExpandResult.Ok; + return ExpansionState.Ok; } /** * Expands a `**` recursive directory wildcard. * - * @param basePath The directory to recursively expand. * @param fileSpec The original file specification. + * @param basePath The directory to recursively expand. * @param start The starting offset in the file specification. - * @param excludePattern A pattern used to exclude a file specification. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - * @param errors An array for diagnostic reporting. - * @param fileSet The set of matching files. + * @param context The expansion context. */ - function expandRecursiveDirectory(basePath: string, fileSpec: string, start: number, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[], fileSet: FileMap): ExpandResult { - // expand the non-recursive part of the file specification against the prefix path. - if (expandFileSpec(basePath, fileSpec, start, excludePattern, options, host, errors, fileSet, /*isExpandingRecursiveDirectory*/ true) === ExpandResult.Error) { - return ExpandResult.Error; + function expandRecursiveDirectory(fileSpec: string, basePath: Path, start: number, context: ExpansionContext): ExpansionState { + // Skip the directory if it is excluded. + if (isExcludedPath(basePath, context.excludePattern)) { + return ExpansionState.Ok; + } + + // Expand the non-recursive part of the file specification against the prefix path. + if (expandFileSpec(fileSpec, basePath, start, context, /*isExpandingRecursiveDirectory*/ true) === ExpansionState.Error) { + return ExpansionState.Error; } // Recursively expand each subdirectory. - const directories = host.readDirectoryNames(basePath); - for (const entry of directories) { - const path = combinePaths(basePath, entry); - if (expandRecursiveDirectory(path, fileSpec, start, excludePattern, options, host, errors, fileSet) === ExpandResult.Error) { - return ExpandResult.Error; + const directoryNames = readDirectory(basePath, FileSystemEntryKind.Directory, context); + for (const directoryName of directoryNames) { + const newBasePath = toPath(directoryName, basePath, caseSensitiveKeyMapper); + if (expandRecursiveDirectory(fileSpec, newBasePath, start, context) === ExpansionState.Error) { + return ExpansionState.Error; } } - return ExpandResult.Ok; + return ExpansionState.Ok; } /** * Attempts to include a file in a file set. * * @param file The file to include. - * @param excludePattern A pattern used to exclude a file specification. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - * @param fileSet The set of matching files. + * @param context The expansion context. + * @param wildcardHasExtension A value indicating whether the wildcard supplied an explicit extension. */ - function includeFile(file: Path, excludePattern: RegExp, options: CompilerOptions, host: ParseConfigHost, fileSet: FileMap): void { - // Ignore the file if it should be excluded. - if (isExcludedPath(excludePattern, file)) { + function includeFile(file: Path, context: ExpansionContext, wildcardHasExtension: boolean): void { + const { options, literalFiles, wildcardFiles, excludePattern, supportedExtensions } = context; + + // Ignore the file if it does not have a supported extension. + if ((!wildcardHasExtension || !options.allowNonTsExtensions) && !isSupportedSourceFileName(file, options)) { return; } - // Ignore the file if it doesn't exist. - if (!host.fileExists(file)) { + // If we have already included a literal or wildcard path with a + // higher priority extension, we should skip this file. + // + // This handles cases where we may encounter both .ts and + // .d.ts (or .js if "allowJs" is enabled) in the same + // directory when they are compilation outputs. + const extensionPriority = getExtensionPriority(file, supportedExtensions); + if (hasFileWithHigherPriorityExtension(file, extensionPriority, context)) { return; } - // Ignore the file if it does not have a supported extension. - if (!options.allowNonTsExtensions && !isSupportedSourceFileName(file, options)) { - return; + // We may have included a wildcard path with a lower priority + // extension due to the user-defined order of entries in the + // "include" array. If there is a lower priority extension in the + // same directory, we should remove it. + removeWildcardFilesWithLowerPriorityExtension(file, extensionPriority, context); + + if (!literalFiles.contains(file) && !wildcardFiles.contains(file)) { + wildcardFiles.set(file, file); } + } + + /** + * Tests whether a path exists and is a specific kind of item. Results are + * cached for performance. + * + * @param path The path to tests. + * @param kind The kind of file system entry to find. + * @param context The expansion context. + */ + function pathExists(path: Path, kind: FileSystemEntryKind, context: ExpansionContext) { + const { cache, host } = context; + const entry = cache.get(path); + if (entry === false) { + // If the entry is strictly `false` then the path doesn`t exist, regardless of its kind. + return false; + } + else if (entry === true) { + // If the entry is strictly `true` then a file exists at this path. + return kind === FileSystemEntryKind.File; + } + else if (typeof entry === "object") { + // If the entry is an object, then a directory exists at this path. + return kind === FileSystemEntryKind.Directory; + } + else { + // The entry does not exist in the cache, so we need to check the host. + if (kind === FileSystemEntryKind.File) { + const result = host.fileExists(path); + cache.set(path, result); + return result; + } + else if (kind === FileSystemEntryKind.Directory) { + const result = host.directoryExists(path); + cache.set(path, result ? {} : false); + return result; + } + } + + return false; + } + + /** + * Reads the contents of a directory for a specific kind of item. Results are + * cached for performance. + * + * @param basePath The path to the directory. The path must already exist. + * @param kind The kind of file system entry to find. + * @param context The expansion context. + */ + function readDirectory(basePath: Path, kind: FileSystemEntryKind, context: ExpansionContext) { + const { cache, host } = context; + + let entry = cache.get(basePath); + if (entry === undefined) { + entry = {}; + cache.set(basePath, entry); + } + + if (typeof entry === "object") { + if (kind === FileSystemEntryKind.File) { + if (entry.files === undefined) { + entry.files = host.readFileNames(basePath); + } + + return entry.files; + } + else if (kind === FileSystemEntryKind.Directory) { + if (entry.directories === undefined) { + entry.directories = host.readDirectoryNames(basePath); + } + + return entry.directories; + } + } + + return []; + } - if (!fileSet.contains(file)) { - fileSet.set(file, file); + /** + * Determines whether a literal or wildcard file has already been included that has a higher + * extension priority. + * + * @param file The path to the file. + * @param extensionPriority The priority of the extension. + * @param context The expansion context. + */ + function hasFileWithHigherPriorityExtension(file: Path, extensionPriority: ExtensionPriority, context: ExpansionContext) { + const { literalFiles, wildcardFiles, supportedExtensions } = context; + const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority); + for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; ++i) { + const higherPriorityExtension = supportedExtensions[i]; + const higherPriorityPath = changeExtension(file, higherPriorityExtension); + if (literalFiles.contains(higherPriorityPath) || wildcardFiles.contains(higherPriorityPath)) { + return true; + } + } + + return false; + } + + /** + * Removes files included via wildcard expansion with a lower extension priority that have + * already been included. + * + * @param file The path to the file. + * @param extensionPriority The priority of the extension. + * @param context The expansion context. + */ + function removeWildcardFilesWithLowerPriorityExtension(file: Path, extensionPriority: ExtensionPriority, context: ExpansionContext) { + const { wildcardFiles, supportedExtensions } = context; + const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority); + for (let i = nextExtensionPriority; i < supportedExtensions.length; ++i) { + const lowerPriorityExtension = supportedExtensions[i]; + const lowerPriorityPath = changeExtension(file, lowerPriorityExtension); + wildcardFiles.remove(lowerPriorityPath); } } @@ -823,16 +1037,48 @@ namespace ts { return output; } + /** + * Adds a watched directory to an output map. + * + * @param output The output map. + * @param flags The directory flags. + * @param directory The directory path. + */ + function addDirectoryToOutput(output: Map, flags: WatchDirectoryFlags, directory: string) { + output[directory] = flags; + return output; + } + /** * Determines whether a path should be excluded. * - * @param excludePattern A pattern used to exclude a file specification. * @param path The path to test for exclusion. + * @param excludePattern A pattern used to exclude a file specification. */ - function isExcludedPath(excludePattern: RegExp, path: string) { + function isExcludedPath(path: string, excludePattern: RegExp) { return excludePattern ? excludePattern.test(path) : false; } + /** + * Determines whether a file segment contains a valid extension. + * + * @param fileSpec The file specification. + * @param segmentStart The offset to the start of the file segment in the specification. + */ + function fileSegmentHasExtension(fileSpec: string, segmentStart: number) { + // if the final path segment does not have a . token, the file does not have an extension. + if (fileSpec.indexOf(".", segmentStart) === -1) { + return false; + } + + // if the extension for the final path segment is (".*"), then the file does not have an extension. + if (fileExtensionIs(fileSpec, ".*")) { + return false; + } + + return true; + } + /** * Creates a regular expression from a glob-style wildcard. * @@ -858,17 +1104,17 @@ namespace ts { let offset = indexOfWildcard(fileSpec, start); while (offset >= 0 && offset < end) { if (offset > start) { - // Escape and append the non-wildcard portion to the regular expression + // Escape and append the non-wildcard portion to the regular expression. pattern += escapeRegularExpressionText(fileSpec, start, offset); } const charCode = fileSpec.charCodeAt(offset); if (charCode === CharacterCodes.asterisk) { - // Append a multi-character (zero or more characters) pattern to the regular expression - pattern += "[^/]*"; + // Append a multi-character (zero or more characters) pattern to the regular expression. + pattern += "[^/]*?"; } else if (charCode === CharacterCodes.question) { - // Append a single-character (zero or one character) pattern to the regular expression + // Append a single-character pattern to the regular expression. pattern += "[^/]"; } @@ -1054,8 +1300,8 @@ namespace ts { * @param fileSpec The file specification. * @param start The starting offset in the file specification. */ - function indexOfWildcard(fileSpec: string, start: number): number { - for (let i = start; i < fileSpec.length; ++i) { + function indexOfWildcard(fileSpec: string, start: number, end: number = fileSpec.length): number { + for (let i = start; i < end; ++i) { const ch = fileSpec.charCodeAt(i); if (ch === CharacterCodes.asterisk || ch === CharacterCodes.question) { return i; diff --git a/src/compiler/core.ts b/src/compiler/core.ts index aae4b54b5c7e6..998766e8d9deb 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -26,7 +26,8 @@ namespace ts { remove, forEachValue: forEachValueInMap, reduce, - clear + clear, + mergeFrom }; function forEachValueInMap(f: (key: Path, value: T) => void) { @@ -61,6 +62,16 @@ namespace ts { files = {}; } + function mergeFrom(other: FileMap) { + other.forEachValue(mergeFromOther); + } + + function mergeFromOther(key: Path, value: T) { + if (!contains(key)) { + set(key, value); + } + } + function toKey(path: Path): string { return keyMapper ? keyMapper(path) : path; } @@ -822,6 +833,28 @@ namespace ts { return compareValues(aComponents.length, bComponents.length); } + export function containsPath(parent: string, child: string, currentDirectory: string, ignoreCase?: boolean) { + if (parent === undefined || child === undefined) return false; + if (parent === child) return true; + parent = removeTrailingDirectorySeparator(parent); + child = removeTrailingDirectorySeparator(child); + if (parent === child) return true; + const parentComponents = getNormalizedPathComponents(parent, currentDirectory); + const childComponents = getNormalizedPathComponents(child, currentDirectory); + if (childComponents.length < parentComponents.length) { + return false; + } + + for (let i = 0; i < parentComponents.length; ++i) { + const result = compareStrings(parentComponents[i], childComponents[i], ignoreCase); + if (result !== Comparison.EqualTo) { + return false; + } + } + + return true; + } + export function fileExtensionIs(path: string, extension: string): boolean { const pathLen = path.length; const extLen = extension.length; @@ -850,6 +883,59 @@ namespace ts { return false; } + /** + * Extension boundaries by priority. Lower numbers indicate higher priorities, and are + * aligned to the offset of the highest priority extension in the + * allSupportedExtensions array. + */ + export const enum ExtensionPriority { + TypeScriptFiles = 0, + DeclarationAndJavaScriptFiles = 2, + Limit = 5, + + Highest = TypeScriptFiles, + Lowest = DeclarationAndJavaScriptFiles, + } + + export function getExtensionPriority(path: string, supportedExtensions: string[]): ExtensionPriority { + for (let i = supportedExtensions.length - 1; i >= 0; i--) { + if (fileExtensionIs(path, supportedExtensions[i])) { + return adjustExtensionPriority(i); + } + } + + // If its not in the list of supported extensions, this is likely a + // TypeScript file with a non-ts extension + return ExtensionPriority.Highest; + } + + /** + * Adjusts an extension priority to be the highest priority within the same range. + */ + export function adjustExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority { + if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) { + return ExtensionPriority.TypeScriptFiles; + } + else if (extensionPriority < ExtensionPriority.Limit) { + return ExtensionPriority.DeclarationAndJavaScriptFiles; + } + else { + return ExtensionPriority.Limit; + } + } + + /** + * Gets the next lowest extension priority for a given priority. + */ + export function getNextLowestExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority { + if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) { + return ExtensionPriority.DeclarationAndJavaScriptFiles; + } + else { + return ExtensionPriority.Limit; + } + } + const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"]; export function removeFileExtension(path: string): string { for (const ext of extensionsToRemove) { diff --git a/src/compiler/diagnosticMessages.json b/src/compiler/diagnosticMessages.json index 6a1d0e5ce650f..c607bace4efec 100644 --- a/src/compiler/diagnosticMessages.json +++ b/src/compiler/diagnosticMessages.json @@ -2020,6 +2020,10 @@ "category": "Error", "code": 5009 }, + "File specification cannot end in a recursive directory wildcard ('**'): '{0}'.": { + "category": "Error", + "code": 5010 + }, "File specification cannot contain multiple recursive directory wildcards ('**'): '{0}'.": { "category": "Error", "code": 5011 diff --git a/src/compiler/types.ts b/src/compiler/types.ts index ffd9ddc206d1d..3d16f70d04995 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -15,6 +15,7 @@ namespace ts { forEachValue(f: (key: Path, v: T) => void): void; reduce(f: (memo: U, value: T, key: Path) => U, initial: U): U; + mergeFrom(other: FileMap): void; clear(): void; } @@ -1588,11 +1589,17 @@ namespace ts { readDirectory(rootDir: string, extension: string, exclude: string[]): string[]; /** - * Gets a value indicating whether the specified path exists. + * Gets a value indicating whether the specified path exists and is a file. * @param path The path to test. */ fileExists(path: string): boolean; + /** + * Gets a value indicating whether the specified path exists and is a directory. + * @param path The path to test. + */ + directoryExists(path: string): boolean; + /** * Reads the files names in the directory. * @param rootDir The directory path. @@ -2460,6 +2467,17 @@ namespace ts { options: CompilerOptions; fileNames: string[]; errors: Diagnostic[]; + wildcardDirectories?: Map; + } + + export const enum WatchDirectoryFlags { + None = 0, + Recursive = 1 << 0, + } + + export interface ExpandResult { + fileNames: string[]; + wildcardDirectories: Map; } /* @internal */ diff --git a/src/harness/external/chai.d.ts b/src/harness/external/chai.d.ts index fc25980d3a099..fba0024d254ee 100644 --- a/src/harness/external/chai.d.ts +++ b/src/harness/external/chai.d.ts @@ -167,12 +167,14 @@ declare module chai { module assert { function equal(actual: any, expected: any, message?: string): void; function notEqual(actual: any, expected: any, message?: string): void; - function deepEqual(actual: any, expected: any, message?: string): void; - function notDeepEqual(actual: any, expected: any, message?: string): void; + function deepEqual(actual: T, expected: T, message?: string): void; + function notDeepEqual(actual: T, expected: T, message?: string): void; function lengthOf(object: any[], length: number, message?: string): void; function isTrue(value: any, message?: string): void; function isFalse(value: any, message?: string): void; function isNull(value: any, message?: string): void; function isNotNull(value: any, message?: string): void; + function isUndefined(value: any, message?: string): void; + function isDefined(value: any, message?: string): void; } } \ No newline at end of file diff --git a/src/harness/harnessLanguageService.ts b/src/harness/harnessLanguageService.ts index e12105e3393ea..521d17cfc8fce 100644 --- a/src/harness/harnessLanguageService.ts +++ b/src/harness/harnessLanguageService.ts @@ -266,6 +266,7 @@ namespace Harness.LanguageService { throw new Error("Not implemented."); } fileExists(fileName: string) { return this.getScriptInfo(fileName) !== undefined; } + directoryExists(directoryName: string) { return false; } readFile(fileName: string) { const snapshot = this.nativeHost.getScriptSnapshot(fileName); return snapshot && snapshot.getText(0, snapshot.getLength()); diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index 655c2f07c2bd9..76c7952428a4b 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -213,6 +213,7 @@ class ProjectRunner extends RunnerBase { const configParseHost: ts.ParseConfigHost = { useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), fileExists, + directoryExists, readDirectory, readDirectoryNames, readFileNames @@ -297,6 +298,10 @@ class ProjectRunner extends RunnerBase { return Harness.IO.fileExists(getFileNameInTheProjectTest(fileName)); } + function directoryExists(directoryName: string): boolean { + return Harness.IO.directoryExists(getFileNameInTheProjectTest(directoryName)); + } + function getSourceFileText(fileName: string): string { let text: string = undefined; try { diff --git a/src/harness/rwcRunner.ts b/src/harness/rwcRunner.ts index 5b4e2a7a3d96d..826f6b6726068 100644 --- a/src/harness/rwcRunner.ts +++ b/src/harness/rwcRunner.ts @@ -79,6 +79,7 @@ namespace RWC { const configParseHost: ts.ParseConfigHost = { useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), fileExists: Harness.IO.fileExists, + directoryExists: Harness.IO.directoryExists, readDirectory: Harness.IO.readDirectory, readDirectoryNames: Harness.IO.readDirectoryNames, readFileNames: Harness.IO.readFileNames, diff --git a/src/server/editorServices.ts b/src/server/editorServices.ts index 0305b7595c276..0e1c1c97f0370 100644 --- a/src/server/editorServices.ts +++ b/src/server/editorServices.ts @@ -119,7 +119,7 @@ namespace ts.server { if (!resolution) { const existingResolution = currentResolutionsInFile && ts.lookUp(currentResolutionsInFile, moduleName); if (moduleResolutionIsValid(existingResolution)) { - // ok, it is safe to use existing module resolution results + // ok, it is safe to use existing module resolution results resolution = existingResolution; } else { @@ -144,8 +144,8 @@ namespace ts.server { } if (resolution.resolvedModule) { - // TODO: consider checking failedLookupLocations - // TODO: use lastCheckTime to track expiration for module name resolution + // TODO: consider checking failedLookupLocations + // TODO: use lastCheckTime to track expiration for module name resolution return true; } @@ -354,6 +354,7 @@ namespace ts.server { export interface ProjectOptions { // these fields can be present in the project file files?: string[]; + wildcardDirectories?: ts.Map; compilerOptions?: ts.CompilerOptions; } @@ -362,6 +363,7 @@ namespace ts.server { projectFilename: string; projectFileWatcher: FileWatcher; directoryWatcher: FileWatcher; + directoriesWatchedForWildcards: Map; // Used to keep track of what directories are watched for this project directoriesWatchedForTsconfig: string[] = []; program: ts.Program; @@ -510,7 +512,7 @@ namespace ts.server { openFileRootsConfigured: ScriptInfo[] = []; // a path to directory watcher map that detects added tsconfig files directoryWatchersForTsconfig: ts.Map = {}; - // count of how many projects are using the directory watcher. If the + // count of how many projects are using the directory watcher. If the // number becomes 0 for a watcher, then we should close it. directoryWatchersRefCount: ts.Map = {}; hostConfiguration: HostConfiguration; @@ -590,11 +592,11 @@ namespace ts.server { // We check if the project file list has changed. If so, we update the project. if (!arrayIsEqualTo(currentRootFiles && currentRootFiles.sort(), newRootFiles && newRootFiles.sort())) { // For configured projects, the change is made outside the tsconfig file, and - // it is not likely to affect the project for other files opened by the client. We can + // it is not likely to affect the project for other files opened by the client. We can // just update the current project. this.updateConfiguredProject(project); - // Call updateProjectStructure to clean up inferred projects we may have + // Call updateProjectStructure to clean up inferred projects we may have // created for the new files this.updateProjectStructure(); } @@ -739,6 +741,8 @@ namespace ts.server { if (project.isConfiguredProject()) { project.projectFileWatcher.close(); project.directoryWatcher.close(); + forEachValue(project.directoriesWatchedForWildcards, watcher => { watcher.close(); }); + delete project.directoriesWatchedForWildcards; this.configuredProjects = copyListRemovingItem(project, this.configuredProjects); } else { @@ -816,8 +820,8 @@ namespace ts.server { * @param info The file that has been closed or newly configured */ closeOpenFile(info: ScriptInfo) { - // Closing file should trigger re-reading the file content from disk. This is - // because the user may chose to discard the buffer content before saving + // Closing file should trigger re-reading the file content from disk. This is + // because the user may chose to discard the buffer content before saving // to the disk, and the server's version of the file can be out of sync. info.svc.reloadFromFile(info.fileName); @@ -915,8 +919,8 @@ namespace ts.server { } /** - * This function is to update the project structure for every projects. - * It is called on the premise that all the configured projects are + * This function is to update the project structure for every projects. + * It is called on the premise that all the configured projects are * up to date. */ updateProjectStructure() { @@ -970,7 +974,7 @@ namespace ts.server { if (rootFile.defaultProject && rootFile.defaultProject.isConfiguredProject()) { // If the root file has already been added into a configured project, - // meaning the original inferred project is gone already. + // meaning the original inferred project is gone already. if (!rootedProject.isConfiguredProject()) { this.removeProject(rootedProject); } @@ -1075,9 +1079,9 @@ namespace ts.server { } /** - * This function tries to search for a tsconfig.json for the given file. If we found it, + * This function tries to search for a tsconfig.json for the given file. If we found it, * we first detect if there is already a configured project created for it: if so, we re-read - * the tsconfig file content and update the project; otherwise we create a new one. + * the tsconfig file content and update the project; otherwise we create a new one. */ openOrUpdateConfiguredProjectForFile(fileName: string) { const searchPath = ts.normalizePath(getDirectoryPath(fileName)); @@ -1215,7 +1219,8 @@ namespace ts.server { else { const projectOptions: ProjectOptions = { files: parsedCommandLine.fileNames, - compilerOptions: parsedCommandLine.options + wildcardDirectories: parsedCommandLine.wildcardDirectories, + compilerOptions: parsedCommandLine.options, }; return { succeeded: true, projectOptions }; } @@ -1241,12 +1246,30 @@ namespace ts.server { } project.finishGraph(); project.projectFileWatcher = this.host.watchFile(configFilename, _ => this.watchedProjectConfigFileChanged(project)); - this.log("Add recursive watcher for: " + ts.getDirectoryPath(configFilename)); + + const configDirectoryPath = ts.getDirectoryPath(configFilename); + + this.log("Add recursive watcher for: " + configDirectoryPath); project.directoryWatcher = this.host.watchDirectory( - ts.getDirectoryPath(configFilename), + configDirectoryPath, path => this.directoryWatchedForSourceFilesChanged(project, path), /*recursive*/ true ); + + project.directoriesWatchedForWildcards = reduceProperties(projectOptions.wildcardDirectories, (watchers, flag, directory) => { + if (comparePaths(configDirectoryPath, directory, ".", !this.host.useCaseSensitiveFileNames) !== Comparison.EqualTo) { + const recursive = (flag & WatchDirectoryFlags.Recursive) !== 0; + this.log(`Add ${ recursive ? "recursive " : ""}watcher for: ${directory}`); + watchers[directory] = this.host.watchDirectory( + directory, + path => this.directoryWatchedForSourceFilesChanged(project, path), + recursive + ); + } + + return watchers; + }, >{}); + return { success: true, project: project }; } } @@ -1280,7 +1303,7 @@ namespace ts.server { info = this.openFile(fileName, /*openedByClient*/ false); } else { - // if the root file was opened by client, it would belong to either + // if the root file was opened by client, it would belong to either // openFileRoots or openFileReferenced. if (info.isOpen) { if (this.openFileRoots.indexOf(info) >= 0) { diff --git a/src/services/shims.ts b/src/services/shims.ts index 4c35f5aaf1b64..a7f85d2445230 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -75,6 +75,7 @@ namespace ts { readDirectory(rootDir: string, extension: string, exclude?: string): string; readDirectoryNames?(rootDir: string): string; readFileNames?(rootDir: string): string; + directoryExists?(path: string): boolean; useCaseSensitiveFileNames?: boolean; } @@ -473,6 +474,18 @@ namespace ts { return this.shimHost.fileExists(fileName); } + public directoryExists(directoryName: string): boolean { + if (this.shimHost.directoryExists) { + return this.shimHost.directoryExists(directoryName); + } + + if (sys) { + return sys.directoryExists(directoryName); + } + + return false; + } + public readFile(fileName: string): string { return this.shimHost.readFile(fileName); } diff --git a/tests/cases/unittests/expandFiles.ts b/tests/cases/unittests/expandFiles.ts index a9d12a7326057..3aac938f178ec 100644 --- a/tests/cases/unittests/expandFiles.ts +++ b/tests/cases/unittests/expandFiles.ts @@ -1,164 +1,395 @@ /// /// -describe("expandFiles", () => { - it("fail", () => { - assert.isTrue(false, "just checking"); - }); +namespace ts { + const caseInsensitiveBasePath = "c:/dev/"; + const caseInsensitiveHost = createMockParseConfigHost(/*ignoreCase*/ true, caseInsensitiveBasePath, [ + "a.ts", + "a.d.ts", + "a.js", + "b.ts", + "b.js", + "c.d.ts", + "z/a.ts", + "z/abz.ts", + "z/aba.ts", + "z/b.ts", + "z/bbz.ts", + "z/bba.ts", + "x/a.ts", + "x/aa.ts", + "x/b.ts", + "x/y/a.ts", + "x/y/b.ts", + "js/a.js", + "js/b.js", + ]); - const basePath = "c:/dev/"; - const caseInsensitiveHost = createMockParseConfigHost( - basePath, - /*files*/ [ - "c:/dev/a.ts", - "c:/dev/a.d.ts", - "c:/dev/a.js", - "c:/dev/b.ts", - "c:/dev/b.js", - "c:/dev/c.d.ts", - "c:/dev/z/a.ts", - "c:/dev/z/abz.ts", - "c:/dev/z/aba.ts", - "c:/dev/z/b.ts", - "c:/dev/z/bbz.ts", - "c:/dev/z/bba.ts", - "c:/dev/x/a.ts", - "c:/dev/x/aa.ts", - "c:/dev/x/b.ts", - "c:/dev/x/y/a.ts", - "c:/dev/x/y/b.ts" - ], - /*ignoreCase*/ true); - - const caseSensitiveHost = createMockParseConfigHost( - basePath, - /*files*/ [ - "c:/dev/a.ts", - "c:/dev/a.d.ts", - "c:/dev/a.js", - "c:/dev/b.ts", - "c:/dev/b.js", - "c:/dev/A.ts", - "c:/dev/B.ts", - "c:/dev/c.d.ts", - "c:/dev/z/a.ts", - "c:/dev/z/abz.ts", - "c:/dev/z/aba.ts", - "c:/dev/z/b.ts", - "c:/dev/z/bbz.ts", - "c:/dev/z/bba.ts", - "c:/dev/x/a.ts", - "c:/dev/x/b.ts", - "c:/dev/x/y/a.ts", - "c:/dev/x/y/b.ts", - ], - /*ignoreCase*/ false); - - const expect = _chai.expect; - describe("with literal file list", () => { - it("without exclusions", () => { - const fileNames = ["a.ts", "b.ts"]; - const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); - }); - it("missing files are still present", () => { - const fileNames = ["z.ts", "x.ts"]; - const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/z.ts", "c:/dev/x.ts"]); - }); - it("are not removed due to excludes", () => { - const fileNames = ["a.ts", "b.ts"]; - const excludeSpecs = ["b.ts"]; - const results = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, excludeSpecs, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); - }); - }); + const caseSensitiveBasePath = "/dev/"; + const caseSensitiveHost = createMockParseConfigHost(/*ignoreCase*/ false, caseSensitiveBasePath, [ + "a.ts", + "a.d.ts", + "a.js", + "b.ts", + "b.js", + "A.ts", + "B.ts", + "c.d.ts", + "z/a.ts", + "z/abz.ts", + "z/aba.ts", + "z/b.ts", + "z/bbz.ts", + "z/bba.ts", + "x/a.ts", + "x/b.ts", + "x/y/a.ts", + "x/y/b.ts", + "js/a.js", + "js/b.js", + ]); - describe("with literal include list", () => { - it("without exclusions", () => { - const includeSpecs = ["a.ts", "b.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts"]); - }); - it("with non .ts file extensions are excluded", () => { - const includeSpecs = ["a.js", "b.js"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, []); - }); - it("with missing files are excluded", () => { - const includeSpecs = ["z.ts", "x.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, []); - }); - it("with literal excludes", () => { - const includeSpecs = ["a.ts", "b.ts"]; - const excludeSpecs = ["b.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts"]); - }); - it("with wildcard excludes", () => { - const includeSpecs = ["a.ts", "b.ts", "z/a.ts", "z/abz.ts", "z/aba.ts", "x/b.ts"]; - const excludeSpecs = ["*.ts", "z/??z.ts", "*/b.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/z/a.ts", "c:/dev/z/aba.ts"]); - }); - it("with recursive excludes", () => { - const includeSpecs = ["a.ts", "b.ts", "x/a.ts", "x/b.ts", "x/y/a.ts", "x/y/b.ts"]; - const excludeSpecs = ["**/b.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts"]); - }); - it("with case sensitive exclude", () => { - const includeSpecs = ["B.ts"]; - const excludeSpecs = ["**/b.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, basePath, {}, caseSensitiveHost); - assert.deepEqual(results, ["c:/dev/B.ts"]); - }); - }); + const caseInsensitiveMixedExtensionHost = createMockParseConfigHost(/*ignoreCase*/ true, caseInsensitiveBasePath, [ + "a.ts", + "a.d.ts", + "a.js", + "b.tsx", + "b.d.ts", + "b.jsx", + "c.tsx", + "c.js", + "d.js", + "e.jsx", + "f.other" + ]); - describe("with wildcard include list", () => { - it("same named declarations are excluded", () => { - const includeSpecs = ["*.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"]); - }); - it("`*` matches only ts files", () => { - const includeSpecs = ["*"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"]); + describe("expandFiles", () => { + describe("with literal file list", () => { + it("without exclusions", () => { + const fileNames = ["a.ts", "b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("missing files are still present", () => { + const fileNames = ["z.ts", "x.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/z.ts", "c:/dev/x.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("are not removed due to excludes", () => { + const fileNames = ["a.ts", "b.ts"]; + const excludeSpecs = ["b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); }); - it("`?` matches only a single character", () => { - const includeSpecs = ["x/?.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/x/a.ts", "c:/dev/x/b.ts"]); - }); - it("with recursive directory", () => { - const includeSpecs = ["**/a.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts", "c:/dev/z/a.ts"]); - }); - it("case sensitive", () => { - const includeSpecs = ["**/A.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseSensitiveHost); - assert.deepEqual(results, ["c:/dev/A.ts"]); + + describe("with literal include list", () => { + it("without exclusions", () => { + const includeSpecs = ["a.ts", "b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with non .ts file extensions are excluded", () => { + const includeSpecs = ["a.js", "b.js"]; + const expected: ts.ExpandResult = { + fileNames: [], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with missing files are excluded", () => { + const includeSpecs = ["z.ts", "x.ts"]; + const expected: ts.ExpandResult = { + fileNames: [], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with literal excludes", () => { + const includeSpecs = ["a.ts", "b.ts"]; + const excludeSpecs = ["b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with wildcard excludes", () => { + const includeSpecs = ["a.ts", "b.ts", "z/a.ts", "z/abz.ts", "z/aba.ts", "x/b.ts"]; + const excludeSpecs = ["*.ts", "z/??z.ts", "*/b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/z/a.ts", "c:/dev/z/aba.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with recursive excludes", () => { + const includeSpecs = ["a.ts", "b.ts", "x/a.ts", "x/b.ts", "x/y/a.ts", "x/y/b.ts"]; + const excludeSpecs = ["**/b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with case sensitive exclude", () => { + const includeSpecs = ["B.ts"]; + const excludeSpecs = ["**/b.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["/dev/B.ts"], + wildcardDirectories: {}, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseSensitiveBasePath, {}, caseSensitiveHost); + assert.deepEqual(actual, expected); + }); }); - it("with missing files are excluded", () => { - const includeSpecs = ["*/z.ts"]; - const results = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, []); + + describe("with wildcard include list", () => { + it("same named declarations are excluded", () => { + const includeSpecs = ["*.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("`*` matches only ts files", () => { + const includeSpecs = ["*"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("`?` matches only a single character", () => { + const includeSpecs = ["x/?.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/x/a.ts", "c:/dev/x/b.ts"], + wildcardDirectories: { + "c:/dev/x": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with recursive directory", () => { + const includeSpecs = ["**/a.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts", "c:/dev/z/a.ts"], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("case sensitive", () => { + const includeSpecs = ["**/A.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["/dev/A.ts"], + wildcardDirectories: { + "/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseSensitiveBasePath, {}, caseSensitiveHost); + assert.deepEqual(actual, expected); + }); + it("with missing files are excluded", () => { + const includeSpecs = ["*/z.ts"]; + const expected: ts.ExpandResult = { + fileNames: [], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("always include literal files", () => { + const fileNames = ["a.ts"]; + const includeSpecs = ["*/z.ts"]; + const excludeSpecs = ["**/a.ts"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/a.ts"], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.expandFiles(fileNames, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("exclude folders", () => { + const includeSpecs = ["**/*"]; + const excludeSpecs = ["z", "x"]; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("exclude .js files when allowJs=false", () => { + const includeSpecs = ["js/*"]; + const expected: ts.ExpandResult = { + fileNames: [], + wildcardDirectories: { + "c:/dev/js": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); + it("include .js files when allowJs=true", () => { + const includeSpecs = ["js/*"]; + const expected: ts.ExpandResult = { + fileNames: ["c:/dev/js/a.js", "c:/dev/js/b.js"], + wildcardDirectories: { + "c:/dev/js": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, { allowJs: true }, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); }); - it("always include literal files", () => { - const fileNames = ["a.ts"]; - const includeSpecs = ["*/z.ts"]; - const excludeSpecs = ["**/a.ts"]; - const results = ts.expandFiles(fileNames, includeSpecs, excludeSpecs, basePath, {}, caseInsensitiveHost); - assert.deepEqual(results, ["c:/dev/a.ts"]); + + describe("when called from parseJsonConfigFileContent", () => { + it("with jsx=none, allowJs=false", () => { + const json: any = { + "compilerOptions": { + "jsx": "none", + "allowJs": false + } + }; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + }); + it("with jsx=preserve, allowJs=false", () => { + const json: any = { + "compilerOptions": { + "jsx": "preserve", + "allowJs": false + } + }; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + }); + it("with jsx=none, allowJs=true", () => { + const json: any = { + "compilerOptions": { + "jsx": "none", + "allowJs": true + } + }; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + "c:/dev/d.js", + "c:/dev/e.jsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + }); + it("with jsx=preserve, allowJs=true", () => { + const json: any = { + "compilerOptions": { + "jsx": "preserve", + "allowJs": true + } + }; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + "c:/dev/d.js", + "c:/dev/e.jsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + }); }); }); - function createMockParseConfigHost(basePath: string, files: string[], ignoreCase: boolean): ts.ParseConfigHost { + interface DirectoryEntry { + files: ts.Map; + directories: ts.Map; + } + + interface TestParseConfigHost extends ts.ParseConfigHost { + basePath: string; + } + + function createMockParseConfigHost(ignoreCase: boolean, basePath: string, files: string[]): TestParseConfigHost { const fileSet: ts.Map = {}; - const directorySet: ts.Map<{ files: ts.Map; directories: ts.Map; }> = {}; + const directorySet: ts.Map = {}; + const emptyDirectory: DirectoryEntry = { files: {}, directories: {} }; files.sort((a, b) => ts.comparePaths(a, b, basePath, ignoreCase)); for (const file of files) { @@ -167,18 +398,24 @@ describe("expandFiles", () => { return { useCaseSensitiveFileNames: !ignoreCase, + basePath, fileExists, + directoryExists, readDirectory, readFileNames, readDirectoryNames }; function fileExists(path: string): boolean { + path = ts.getNormalizedAbsolutePath(path, basePath); + path = ts.removeTrailingDirectorySeparator(path); const fileKey = ignoreCase ? path.toLowerCase() : path; return ts.hasProperty(fileSet, fileKey); } function directoryExists(path: string): boolean { + path = ts.getNormalizedAbsolutePath(path, basePath); + path = ts.removeTrailingDirectorySeparator(path); const directoryKey = ignoreCase ? path.toLowerCase() : path; return ts.hasProperty(directorySet, directoryKey); } @@ -188,7 +425,7 @@ describe("expandFiles", () => { } function readFileNames(path: string) { - const files = getDirectoryEntry(path).files; + const { files } = getDirectoryEntry(path) || emptyDirectory; const result: string[] = []; ts.forEachKey(files, key => { result.push(key); }); result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); @@ -196,7 +433,7 @@ describe("expandFiles", () => { } function readDirectoryNames(path: string) { - const directories = getDirectoryEntry(path).directories; + const { directories } = getDirectoryEntry(path); // || emptyDirectory; const result: string[] = []; ts.forEachKey(directories, key => { result.push(key); }); result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); @@ -245,5 +482,4 @@ describe("expandFiles", () => { } } } -}); - +} \ No newline at end of file From def3ba10854c2cb72bf7f60b71e6e459dfb79efb Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Mon, 7 Dec 2015 15:32:32 -0800 Subject: [PATCH 03/18] Added stubs to ChakraHost interface for readDirectoryNames/readFileNames --- src/compiler/sys.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 11a7646bd0997..5ff8dbf679661 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -62,6 +62,8 @@ namespace ts { readFile(path: string): string; writeFile(path: string, contents: string): void; readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readDirectoryNames(path: string): string[]; + readFileNames(path: string): string[]; }; export var sys: System = (function () { @@ -537,6 +539,8 @@ namespace ts { getExecutingFilePath: () => ChakraHost.executingFile, getCurrentDirectory: () => ChakraHost.currentDirectory, readDirectory: ChakraHost.readDirectory, + readFileNames: ChakraHost.readFileNames, + readDirectoryNames: ChakraHost.readDirectoryNames, exit: ChakraHost.quit, }; } From c3c3bca6fab303c94fe3ff369c9749081d30e40e Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Tue, 8 Dec 2015 10:54:23 -0800 Subject: [PATCH 04/18] Fixed typos in comments --- src/compiler/commandLineParser.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 6499ec6327fc4..4d2480ffef946 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -646,12 +646,12 @@ namespace ts { const keyMapper = host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper; // Literal file names (provided via the "files" array in tsconfig.json) are stored in a - // file map with a possibly invariant key. We use this map later when when including + // file map with a possibly case insensitive key. We use this map later when when including // wildcard paths. const literalFiles = createFileMap(keyMapper); - // Wildcard paths (provided via the "includes" array in tscofnig.json) are stored in a - // file map with a possibly invariant key. We use this map to store paths matched + // Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a + // file map with a possibly case insensitive key. We use this map to store paths matched // via wildcard, and to handle extension priority. const wildcardFiles = createFileMap(keyMapper); @@ -672,7 +672,7 @@ namespace ts { // - A "directories" array, which contains the subdirectory names in the directory. const cache = createFileMap(keyMapper); - // Rather than requery this for each file and filespec, we querythe supported extensions + // Rather than requery this for each file and filespec, we query the supported extensions // once and store it on the expansion context. const supportedExtensions = getSupportedExtensions(options); From bf9af46e5a1d35a5d83f95e0260dff1689967440 Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Tue, 8 Dec 2015 10:57:04 -0800 Subject: [PATCH 05/18] Changed name of 'reduce' method added to FileSet --- src/compiler/commandLineParser.ts | 4 ++-- src/compiler/core.ts | 4 ++-- src/compiler/types.ts | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 4d2480ffef946..b4339e78f2adc 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -711,8 +711,8 @@ namespace ts { } return { - fileNames: wildcardFiles.reduce(addFileToOutput, literalFiles.reduce(addFileToOutput, [])), - wildcardDirectories: wildcardDirectories.reduce>(addDirectoryToOutput, {}), + fileNames: wildcardFiles.reduceProperties(addFileToOutput, literalFiles.reduceProperties(addFileToOutput, [])), + wildcardDirectories: wildcardDirectories.reduceProperties>(addDirectoryToOutput, {}), }; } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index 998766e8d9deb..3e6981996e199 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -25,7 +25,7 @@ namespace ts { contains, remove, forEachValue: forEachValueInMap, - reduce, + reduceProperties: reducePropertiesInMap, clear, mergeFrom }; @@ -36,7 +36,7 @@ namespace ts { } } - function reduce(callback: (memo: U, value: T, key: Path) => U, initial: U) { + function reducePropertiesInMap(callback: (memo: U, value: T, key: Path) => U, initial: U) { return reduceProperties(files, callback, initial); } diff --git a/src/compiler/types.ts b/src/compiler/types.ts index 4b84c27be99c1..4c62904f4e843 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -14,7 +14,7 @@ namespace ts { remove(fileName: Path): void; forEachValue(f: (key: Path, v: T) => void): void; - reduce(f: (memo: U, value: T, key: Path) => U, initial: U): U; + reduceProperties(f: (memo: U, value: T, key: Path) => U, initial: U): U; mergeFrom(other: FileMap): void; clear(): void; } From d8572508ee0650e58a7602098498b1309c301d2d Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Mon, 14 Dec 2015 15:21:12 -0800 Subject: [PATCH 06/18] Heavily revised implementation that relies on an updated 'readDirectory' API. --- Jakefile.js | 1 + src/compiler/commandLineParser.ts | 731 +++--------------- src/compiler/core.ts | 203 ++++- src/compiler/sys.ts | 119 +-- src/compiler/types.ts | 22 +- src/harness/harness.ts | 39 +- src/harness/harnessLanguageService.ts | 10 +- src/harness/loggedIO.ts | 9 +- src/harness/projectsRunner.ts | 15 +- src/harness/rwcRunner.ts | 3 - src/harness/vfs.ts | 160 ++++ src/services/shims.ts | 76 +- .../cases/unittests/cachingInServerLSHost.ts | 4 +- tests/cases/unittests/expandFiles.ts | 259 +++---- tests/cases/unittests/session.ts | 2 - 15 files changed, 629 insertions(+), 1024 deletions(-) create mode 100644 src/harness/vfs.ts diff --git a/Jakefile.js b/Jakefile.js index 58677e19a75c6..0ce1d5a95ece2 100644 --- a/Jakefile.js +++ b/Jakefile.js @@ -131,6 +131,7 @@ var languageServiceLibrarySources = [ var harnessCoreSources = [ "harness.ts", + "vfs.ts", "sourceMapRecorder.ts", "harnessLanguageService.ts", "fourslash.ts", diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index b4339e78f2adc..c81b3b63244fc 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -582,47 +582,8 @@ namespace ts { return { options, errors }; } - // Simplified whitelist, forces escaping of any non-word (or digit), non-whitespace character. - const reservedCharacterPattern = /[^\w\s]/g; - - const enum ExpansionState { - Ok, - Error - } - - interface ExpansionContext { - /** A pattern used to exclude a file specification. */ - excludePattern: RegExp; - /** Compiler options. */ - options: CompilerOptions; - /** The host used to resolve files and directories. */ - host: ParseConfigHost; - /** Errors to report. */ - errors: Diagnostic[]; - /** The set of literal files. */ - literalFiles: FileMap; - /** The set of files matching a wildcard. */ - wildcardFiles: FileMap; - /** Directories to be watched. */ - wildcardDirectories: FileMap; - /** Supported extensions. */ - supportedExtensions: string[]; - /** - * Path cache, used to reduce calls to the file system. `true` indicates a file exists, - * `false` indicates a file or directory does not exist. A DirectoryResult - * indicates the file and subdirectory names in a directory. */ - cache: FileMap; - } - - const enum FileSystemEntryKind { - File, - Directory - } - - interface DirectoryResult { - files?: string[]; - directories?: string[]; - } + const invalidTrailingRecursionPattern = /(^|\/)\*\*\/?$/; + const invalidMultipleRecursionPatterns = /(^|\/)\*\*\/(.*\/)?\*\*($|\/)/; /** * Expands an array of file specifications. @@ -642,348 +603,143 @@ namespace ts { // The exclude spec list is converted into a regular expression, which allows us to quickly // test whether a file or directory should be excluded before recursively traversing the // file system. - const excludePattern = includeSpecs ? createExcludeRegularExpression(excludeSpecs, basePath, options, host, errors) : undefined; const keyMapper = host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper; // Literal file names (provided via the "files" array in tsconfig.json) are stored in a // file map with a possibly case insensitive key. We use this map later when when including // wildcard paths. - const literalFiles = createFileMap(keyMapper); + const literalFileMap: Map = {}; // Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a // file map with a possibly case insensitive key. We use this map to store paths matched // via wildcard, and to handle extension priority. - const wildcardFiles = createFileMap(keyMapper); + const wildcardFileMap: Map = {}; // Wildcard directories (provided as part of a wildcard path) are stored in a // file map that marks whether it was a regular wildcard match (with a `*` or `?` token), // or a recursive directory. This information is used by filesystem watchers to monitor for // new entries in these paths. - const wildcardDirectories = createFileMap(keyMapper); - - // To reduce the overhead of disk I/O (and marshalling to managed code when hosted in - // Visual Studio), file system queries are cached during the expansion session. - // If present, a cache entry can be one of three values: - // - A `false` value indicates the file or directory did not exist. - // - A `true` value indicates the path is a file and it exists. - // - An object value indicates the path is a directory and exists. The object may have - // zero, one, or both of the following properties: - // - A "files" array, which contains the file names in the directory. - // - A "directories" array, which contains the subdirectory names in the directory. - const cache = createFileMap(keyMapper); + const wildcardDirectories: Map = getWildcardDirectories(includeSpecs, basePath, host.useCaseSensitiveFileNames); // Rather than requery this for each file and filespec, we query the supported extensions // once and store it on the expansion context. const supportedExtensions = getSupportedExtensions(options); - // The expansion context holds references to shared information for the various expansion - // operations to reduce the overhead of closures. - const context: ExpansionContext = { - options, - host, - errors, - excludePattern, - literalFiles, - wildcardFiles, - wildcardDirectories, - supportedExtensions, - cache - }; - // Literal files are always included verbatim. An "include" or "exclude" specification cannot // remove a literal file. if (fileNames) { for (const fileName of fileNames) { - const path = toPath(fileName, basePath, caseSensitiveKeyMapper); - if (!literalFiles.contains(path)) { - literalFiles.set(path, path); - } + const file = combinePaths(basePath, fileName); + literalFileMap[keyMapper(file)] = file; } } - // Each "include" specification is expanded and matching files are added. if (includeSpecs) { - for (let includeSpec of includeSpecs) { - includeSpec = normalizePath(includeSpec); - includeSpec = removeTrailingDirectorySeparator(includeSpec); - expandFileSpec(includeSpec, basePath, 0, context); - } - } - - return { - fileNames: wildcardFiles.reduceProperties(addFileToOutput, literalFiles.reduceProperties(addFileToOutput, [])), - wildcardDirectories: wildcardDirectories.reduceProperties>(addDirectoryToOutput, {}), - }; - } - - /** - * Expands a file specification with wildcards. - * - * @param fileSpec The original file specification. - * @param basePath The directory to expand. This path must exist. - * @param start The starting offset in the file specification. - * @param context The expansion context. - * @param isExpandingRecursiveDirectory A value indicating whether the file specification includes a recursive directory wildcard prior to the start of this segment. - */ - function expandFileSpec(fileSpec: string, basePath: Path, start: number, context: ExpansionContext, isExpandingRecursiveDirectory?: boolean): ExpansionState { - // A file specification must always point to a file. As a result, we always assume the - // path segment following the last directory separator points to a file. The only - // exception is when the final path segment is the recursive directory pattern "**", in - // which case we report an error. - const { host, options, errors, wildcardFiles, wildcardDirectories, excludePattern, cache, supportedExtensions } = context; - - // Skip expansion if the base path matches an exclude pattern. - if (isExcludedPath(basePath, excludePattern)) { - return ExpansionState.Ok; - } - - // Find the offset of the next wildcard in the file specification. If there are no more - // wildcards, we can include the file if it exists and isn't excluded. - let offset = indexOfWildcard(fileSpec, start); - if (offset < 0) { - const path = toPath(fileSpec.substring(start), basePath, caseSensitiveKeyMapper); - if (!isExcludedPath(path, excludePattern) && pathExists(path, FileSystemEntryKind.File, context)) { - includeFile(path, context, /*wildcardHasExtension*/ true); - } - - return ExpansionState.Ok; - } - - // Find the last directory separator before the wildcard to get the leading path. - offset = fileSpec.lastIndexOf(directorySeparator, offset); - if (offset > start) { - // The wildcard occurs in a later segment, include remaining path up to - // wildcard in prefix. - basePath = toPath(fileSpec.substring(start, offset), basePath, caseSensitiveKeyMapper); - - // Skip this wildcard path if the base path now matches an exclude pattern. - if (isExcludedPath(basePath, excludePattern) || !pathExists(basePath, FileSystemEntryKind.Directory, context)) { - return ExpansionState.Ok; - } - - start = offset + 1; - } - - // Find the offset of the next directory separator to extract the wildcard path segment. - offset = getEndOfPathSegment(fileSpec, start); - - // Check if the current offset is the beginning of a recursive directory pattern. - if (isRecursiveDirectoryWildcard(fileSpec, start, offset)) { - // Stop expansion if a file specification contains more than one recursive directory pattern. - if (isExpandingRecursiveDirectory) { - if (errors) { - errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, fileSpec)); - } - - return ExpansionState.Error; - } - - if (offset >= fileSpec.length) { - // If there is no file specification following the recursive directory pattern - // then we report an error as we cannot match any files. - if (errors) { - errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, fileSpec)); - } - - return ExpansionState.Error; - } - - // Keep track of the recursive wildcard directory - wildcardDirectories.set(basePath, WatchDirectoryFlags.Recursive); - - // Expand the recursive directory pattern. - return expandRecursiveDirectory(fileSpec, basePath, offset + 1, context); - } - - if (!isExpandingRecursiveDirectory) { - wildcardDirectories.set(basePath, WatchDirectoryFlags.None); - } - - // Match the entries in the directory against the wildcard pattern. - const pattern = createRegularExpressionFromWildcard(fileSpec, start, offset, host); - - // If there are no more directory separators (the offset is at the end of the file specification), then - // this must be a file. - if (offset >= fileSpec.length) { - const wildcardHasExtension = fileSegmentHasExtension(fileSpec, start); - const fileNames = readDirectory(basePath, FileSystemEntryKind.File, context); - for (const fileName of fileNames) { - // Skip the file if it doesn't match the pattern. - if (!pattern.test(fileName)) { + includeSpecs = validateSpecs(includeSpecs, errors, /*allowTrailingRecursion*/ false); + if (excludeSpecs) { + excludeSpecs = validateSpecs(excludeSpecs, errors, /*allowTrailingRecursion*/ true); + } + + for (const file of host.readDirectory(basePath, supportedExtensions, excludeSpecs, includeSpecs)) { + // If we have already included a literal or wildcard path with a + // higher priority extension, we should skip this file. + // + // This handles cases where we may encounter both .ts and + // .d.ts (or .js if "allowJs" is enabled) in the same + // directory when they are compilation outputs. + if (hasFileWithHigherPriorityExtension(file, literalFileMap, wildcardFileMap, supportedExtensions, keyMapper)) { continue; } - const path = toPath(fileName, basePath, caseSensitiveKeyMapper); + // We may have included a wildcard path with a lower priority + // extension due to the user-defined order of entries in the + // "include" array. If there is a lower priority extension in the + // same directory, we should remove it. + removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper); - // If we have excluded this path, we should skip the file. - if (isExcludedPath(path, excludePattern)) { - continue; + const key = keyMapper(file); + if (!hasProperty(literalFileMap, key) && !hasProperty(wildcardFileMap, key)) { + wildcardFileMap[key] = file; } - - // This wildcard has no further directory to process, so include the file. - includeFile(path, context, wildcardHasExtension); } } - else { - const directoryNames = readDirectory(basePath, FileSystemEntryKind.Directory, context); - for (const directoryName of directoryNames) { - if (pattern.test(directoryName)) { - const newBasePath = toPath(directoryName, basePath, caseSensitiveKeyMapper); - // Expand the entries in this directory. - if (expandFileSpec(fileSpec, newBasePath, offset + 1, context, isExpandingRecursiveDirectory) === ExpansionState.Error) { - return ExpansionState.Error; - } - } - } - } - - return ExpansionState.Ok; + const literalFiles = reduceProperties(literalFileMap, addFileToOutput, []); + const wildcardFiles = reduceProperties(wildcardFileMap, addFileToOutput, []); + wildcardFiles.sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive); + return { + fileNames: literalFiles.concat(wildcardFiles), + wildcardDirectories + }; } - /** - * Expands a `**` recursive directory wildcard. - * - * @param fileSpec The original file specification. - * @param basePath The directory to recursively expand. - * @param start The starting offset in the file specification. - * @param context The expansion context. - */ - function expandRecursiveDirectory(fileSpec: string, basePath: Path, start: number, context: ExpansionContext): ExpansionState { - // Skip the directory if it is excluded. - if (isExcludedPath(basePath, context.excludePattern)) { - return ExpansionState.Ok; - } - - // Expand the non-recursive part of the file specification against the prefix path. - if (expandFileSpec(fileSpec, basePath, start, context, /*isExpandingRecursiveDirectory*/ true) === ExpansionState.Error) { - return ExpansionState.Error; - } - - // Recursively expand each subdirectory. - const directoryNames = readDirectory(basePath, FileSystemEntryKind.Directory, context); - for (const directoryName of directoryNames) { - const newBasePath = toPath(directoryName, basePath, caseSensitiveKeyMapper); - if (expandRecursiveDirectory(fileSpec, newBasePath, start, context) === ExpansionState.Error) { - return ExpansionState.Error; + function validateSpecs(specs: string[], errors: Diagnostic[], allowTrailingRecursion: boolean) { + const validSpecs: string[] = []; + for (const spec of specs) { + if (!allowTrailingRecursion && invalidTrailingRecursionPattern.test(spec)) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, spec)); } - } - - return ExpansionState.Ok; - } - - /** - * Attempts to include a file in a file set. - * - * @param file The file to include. - * @param context The expansion context. - * @param wildcardHasExtension A value indicating whether the wildcard supplied an explicit extension. - */ - function includeFile(file: Path, context: ExpansionContext, wildcardHasExtension: boolean): void { - const { options, literalFiles, wildcardFiles, excludePattern, supportedExtensions } = context; - - // Ignore the file if it does not have a supported extension. - if ((!wildcardHasExtension || !options.allowNonTsExtensions) && !isSupportedSourceFileName(file, options)) { - return; - } - - // If we have already included a literal or wildcard path with a - // higher priority extension, we should skip this file. - // - // This handles cases where we may encounter both .ts and - // .d.ts (or .js if "allowJs" is enabled) in the same - // directory when they are compilation outputs. - const extensionPriority = getExtensionPriority(file, supportedExtensions); - if (hasFileWithHigherPriorityExtension(file, extensionPriority, context)) { - return; - } - - // We may have included a wildcard path with a lower priority - // extension due to the user-defined order of entries in the - // "include" array. If there is a lower priority extension in the - // same directory, we should remove it. - removeWildcardFilesWithLowerPriorityExtension(file, extensionPriority, context); - - if (!literalFiles.contains(file) && !wildcardFiles.contains(file)) { - wildcardFiles.set(file, file); - } - } - - /** - * Tests whether a path exists and is a specific kind of item. Results are - * cached for performance. - * - * @param path The path to tests. - * @param kind The kind of file system entry to find. - * @param context The expansion context. - */ - function pathExists(path: Path, kind: FileSystemEntryKind, context: ExpansionContext) { - const { cache, host } = context; - const entry = cache.get(path); - if (entry === false) { - // If the entry is strictly `false` then the path doesn`t exist, regardless of its kind. - return false; - } - else if (entry === true) { - // If the entry is strictly `true` then a file exists at this path. - return kind === FileSystemEntryKind.File; - } - else if (typeof entry === "object") { - // If the entry is an object, then a directory exists at this path. - return kind === FileSystemEntryKind.Directory; - } - else { - // The entry does not exist in the cache, so we need to check the host. - if (kind === FileSystemEntryKind.File) { - const result = host.fileExists(path); - cache.set(path, result); - return result; + else if (invalidMultipleRecursionPatterns.test(spec)) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, spec)); } - else if (kind === FileSystemEntryKind.Directory) { - const result = host.directoryExists(path); - cache.set(path, result ? {} : false); - return result; + else { + validSpecs.push(spec); } } - return false; + return validSpecs; } + const watchRecursivePattern = /\/[^/]*?[*?][^/]*\//; + const wildcardDirectoryPattern = /^[^*?]*(?=\/[^/]*[*?])/; + /** - * Reads the contents of a directory for a specific kind of item. Results are - * cached for performance. - * - * @param basePath The path to the directory. The path must already exist. - * @param kind The kind of file system entry to find. - * @param context The expansion context. + * Gets directories in a set of include patterns that should be watched for changes. */ - function readDirectory(basePath: Path, kind: FileSystemEntryKind, context: ExpansionContext) { - const { cache, host } = context; - - let entry = cache.get(basePath); - if (entry === undefined) { - entry = {}; - cache.set(basePath, entry); - } - - if (typeof entry === "object") { - if (kind === FileSystemEntryKind.File) { - if (entry.files === undefined) { - entry.files = host.readFileNames(basePath); + function getWildcardDirectories(includes: string[], path: string, useCaseSensitiveFileNames: boolean) { + // We watch a directory recursively if it contains a wildcard anywhere in a directory segment + // of the pattern: + // + // /a/b/**/d - Watch /a/b recursively to catch changes to any d in any subfolder recursively + // /a/b/*/d - Watch /a/b recursively to catch any d in any immediate subfolder, even if a new subfolder is added + // + // We watch a directory without recursion if it contains a wildcard in the file segment of + // the pattern: + // + // /a/b/* - Watch /a/b directly to catch any new file + // /a/b/a?z - Watch /a/b directly to catch any new file matching a?z + const wildcardDirectories: Map = {}; + if (includes !== undefined) { + const recursiveKeys: string[] = []; + for (const include of includes) { + const name = combinePaths(path, include); + const match = wildcardDirectoryPattern.exec(name); + if (match) { + const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase(); + const flags = watchRecursivePattern.test(name) ? WatchDirectoryFlags.Recursive : WatchDirectoryFlags.None; + const existingFlags = getProperty(wildcardDirectories, key); + if (existingFlags === undefined || existingFlags < flags) { + wildcardDirectories[key] = flags; + if (flags === WatchDirectoryFlags.Recursive) { + recursiveKeys.push(key); + } + } } - - return entry.files; } - else if (kind === FileSystemEntryKind.Directory) { - if (entry.directories === undefined) { - entry.directories = host.readDirectoryNames(basePath); - } - return entry.directories; + // Remove any subpaths under an existing recursively watched directory. + for (const key in wildcardDirectories) { + if (hasProperty(wildcardDirectories, key)) { + for (const recursiveKey in recursiveKeys) { + if (containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { + delete wildcardDirectories[key]; + } + } + } } } - return []; + return wildcardDirectories; } /** @@ -994,13 +750,13 @@ namespace ts { * @param extensionPriority The priority of the extension. * @param context The expansion context. */ - function hasFileWithHigherPriorityExtension(file: Path, extensionPriority: ExtensionPriority, context: ExpansionContext) { - const { literalFiles, wildcardFiles, supportedExtensions } = context; + function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { + const extensionPriority = getExtensionPriority(file, extensions); const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority); for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; ++i) { - const higherPriorityExtension = supportedExtensions[i]; - const higherPriorityPath = changeExtension(file, higherPriorityExtension); - if (literalFiles.contains(higherPriorityPath) || wildcardFiles.contains(higherPriorityPath)) { + const higherPriorityExtension = extensions[i]; + const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension)); + if (hasProperty(literalFiles, higherPriorityPath) || hasProperty(wildcardFiles, higherPriorityPath)) { return true; } } @@ -1016,13 +772,13 @@ namespace ts { * @param extensionPriority The priority of the extension. * @param context The expansion context. */ - function removeWildcardFilesWithLowerPriorityExtension(file: Path, extensionPriority: ExtensionPriority, context: ExpansionContext) { - const { wildcardFiles, supportedExtensions } = context; + function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { + const extensionPriority = getExtensionPriority(file, extensions); const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority); - for (let i = nextExtensionPriority; i < supportedExtensions.length; ++i) { - const lowerPriorityExtension = supportedExtensions[i]; - const lowerPriorityPath = changeExtension(file, lowerPriorityExtension); - wildcardFiles.remove(lowerPriorityPath); + for (let i = nextExtensionPriority; i < extensions.length; ++i) { + const lowerPriorityExtension = extensions[i]; + const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension)); + delete wildcardFiles[lowerPriorityPath]; } } @@ -1037,297 +793,6 @@ namespace ts { return output; } - /** - * Adds a watched directory to an output map. - * - * @param output The output map. - * @param flags The directory flags. - * @param directory The directory path. - */ - function addDirectoryToOutput(output: Map, flags: WatchDirectoryFlags, directory: string) { - output[directory] = flags; - return output; - } - - /** - * Determines whether a path should be excluded. - * - * @param path The path to test for exclusion. - * @param excludePattern A pattern used to exclude a file specification. - */ - function isExcludedPath(path: string, excludePattern: RegExp) { - return excludePattern ? excludePattern.test(path) : false; - } - - /** - * Determines whether a file segment contains a valid extension. - * - * @param fileSpec The file specification. - * @param segmentStart The offset to the start of the file segment in the specification. - */ - function fileSegmentHasExtension(fileSpec: string, segmentStart: number) { - // if the final path segment does not have a . token, the file does not have an extension. - if (fileSpec.indexOf(".", segmentStart) === -1) { - return false; - } - - // if the extension for the final path segment is (".*"), then the file does not have an extension. - if (fileExtensionIs(fileSpec, ".*")) { - return false; - } - - return true; - } - - /** - * Creates a regular expression from a glob-style wildcard. - * - * @param fileSpec The file specification. - * @param start The starting offset in the file specification. - * @param end The end offset in the file specification. - * @param host The host used to resolve files and directories. - */ - function createRegularExpressionFromWildcard(fileSpec: string, start: number, end: number, host: ParseConfigHost): RegExp { - const pattern = createPatternFromWildcard(fileSpec, start, end); - return new RegExp("^" + pattern + "$", host.useCaseSensitiveFileNames ? "" : "i"); - } - - /** - * Creates a pattern from a wildcard segment. - * - * @param fileSpec The file specification. - * @param start The starting offset in the file specification. - * @param end The end offset in the file specification. - */ - function createPatternFromWildcard(fileSpec: string, start: number, end: number): string { - let pattern = ""; - let offset = indexOfWildcard(fileSpec, start); - while (offset >= 0 && offset < end) { - if (offset > start) { - // Escape and append the non-wildcard portion to the regular expression. - pattern += escapeRegularExpressionText(fileSpec, start, offset); - } - - const charCode = fileSpec.charCodeAt(offset); - if (charCode === CharacterCodes.asterisk) { - // Append a multi-character (zero or more characters) pattern to the regular expression. - pattern += "[^/]*?"; - } - else if (charCode === CharacterCodes.question) { - // Append a single-character pattern to the regular expression. - pattern += "[^/]"; - } - - start = offset + 1; - offset = indexOfWildcard(fileSpec, start); - } - - // Escape and append any remaining non-wildcard portion. - if (start < end) { - pattern += escapeRegularExpressionText(fileSpec, start, end); - } - - return pattern; - } - - /** - * Creates a regular expression from a glob-style wildcard used to exclude a file. - * - * @param excludeSpecs The file specifications to exclude. - * @param basePath The prefix path. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - * @param errors An array for diagnostic reporting. - */ - function createExcludeRegularExpression(excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): RegExp { - // Ignore an empty exclusion list - if (!excludeSpecs || excludeSpecs.length === 0) { - return undefined; - } - - basePath = escapeRegularExpressionText(basePath, 0, basePath.length); - - let pattern = ""; - for (const excludeSpec of excludeSpecs) { - const excludePattern = createExcludePattern(excludeSpec, basePath, options, host, errors); - if (excludePattern) { - if (pattern.length > 0) { - pattern += "|"; - } - - pattern += "(" + excludePattern + ")"; - } - } - - if (pattern.length > 0) { - return new RegExp("^(" + pattern + ")($|/)", host.useCaseSensitiveFileNames ? "" : "i"); - } - - return undefined; - } - - /** - * Creates a pattern for used to exclude a file. - * - * @param excludeSpec The file specification to exclude. - * @param basePath The base path for the exclude pattern. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - * @param errors An array for diagnostic reporting. - */ - function createExcludePattern(excludeSpec: string, basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): string { - if (!excludeSpec) { - return undefined; - } - - excludeSpec = normalizePath(excludeSpec); - excludeSpec = removeTrailingDirectorySeparator(excludeSpec); - - let pattern = isRootedDiskPath(excludeSpec) ? "" : basePath; - let hasRecursiveDirectoryWildcard = false; - let segmentStart = 0; - let segmentEnd = getEndOfPathSegment(excludeSpec, segmentStart); - while (segmentStart < segmentEnd) { - if (isRecursiveDirectoryWildcard(excludeSpec, segmentStart, segmentEnd)) { - if (hasRecursiveDirectoryWildcard) { - if (errors) { - errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, excludeSpec)); - } - - return undefined; - } - - // As an optimization, if the recursive directory is the last - // wildcard, or is followed by only `*` or `*.ts`, don't add the - // remaining pattern and exit the loop. - if (canElideRecursiveDirectorySegment(excludeSpec, segmentEnd, options, host)) { - break; - } - - hasRecursiveDirectoryWildcard = true; - pattern += "(/.+)?"; - } - else { - if (pattern) { - pattern += directorySeparator; - } - - pattern += createPatternFromWildcard(excludeSpec, segmentStart, segmentEnd); - } - - segmentStart = segmentEnd + 1; - segmentEnd = getEndOfPathSegment(excludeSpec, segmentStart); - } - - return pattern; - } - - /** - * Determines whether a recursive directory segment can be elided when - * building a regular expression to exclude a path. - * - * @param excludeSpec The file specification used to exclude a path. - * @param segmentEnd The end position of the recursive directory segment. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - */ - function canElideRecursiveDirectorySegment(excludeSpec: string, segmentEnd: number, options: CompilerOptions, host: ParseConfigHost) { - // If there are no segments after this segment, the pattern for this segment may be elided. - if (segmentEnd + 1 >= excludeSpec.length) { - return true; - } - - // If the following segment is a wildcard that may be elided, the pattern for this segment may be elided. - return canElideWildcardSegment(excludeSpec, segmentEnd + 1, options, host); - } - - /** - * Determines whether a wildcard segment can be elided when building a - * regular expression to exclude a path. - * - * @param excludeSpec The file specification used to exclude a path. - * @param segmentStart The starting position of the segment. - * @param options Compiler options. - * @param host The host used to resolve files and directories. - */ - function canElideWildcardSegment(excludeSpec: string, segmentStart: number, options: CompilerOptions, host: ParseConfigHost) { - const charCode = excludeSpec.charCodeAt(segmentStart); - if (charCode === CharacterCodes.asterisk) { - const end = excludeSpec.length; - - // If the segment consists only of `*`, we may elide this segment. - if (segmentStart + 1 === end) { - return true; - } - - // If the segment consists only of `*.ts`, and we do not allow - // any other extensions for source files, we may elide this segment. - if (!options.allowNonTsExtensions && !options.jsx && !options.allowJs && segmentStart + 4 === end) { - const segment = excludeSpec.substr(segmentStart); - return fileExtensionIs(host.useCaseSensitiveFileNames ? segment : segment.toLowerCase(), ".ts"); - } - } - return false; - } - - /** - * Escape regular expression reserved tokens. - * - * @param text The text to escape. - * @param start The starting offset in the string. - * @param end The ending offset in the string. - */ - function escapeRegularExpressionText(text: string, start: number, end: number) { - return text.substring(start, end).replace(reservedCharacterPattern, "\\$&"); - } - - /** - * Determines whether the wildcard at the current offset is a recursive directory wildcard. - * - * @param fileSpec The file specification. - * @param segmentStart The starting offset of a segment in the file specification. - * @param segmentEnd The ending offset of a segment in the file specification. - */ - function isRecursiveDirectoryWildcard(fileSpec: string, segmentStart: number, segmentEnd: number) { - return segmentEnd - segmentStart === 2 && - fileSpec.charCodeAt(segmentStart) === CharacterCodes.asterisk && - fileSpec.charCodeAt(segmentStart + 1) === CharacterCodes.asterisk; - } - - /** - * Gets the index of the next wildcard character in a file specification. - * - * @param fileSpec The file specification. - * @param start The starting offset in the file specification. - */ - function indexOfWildcard(fileSpec: string, start: number, end: number = fileSpec.length): number { - for (let i = start; i < end; ++i) { - const ch = fileSpec.charCodeAt(i); - if (ch === CharacterCodes.asterisk || ch === CharacterCodes.question) { - return i; - } - } - - return -1; - } - - /** - * Get the end position of a path segment, either the index of the next directory separator or - * the provided end position. - * - * @param fileSpec The file specification. - * @param segmentStart The start offset in the file specification. - */ - function getEndOfPathSegment(fileSpec: string, segmentStart: number): number { - const end = fileSpec.length; - if (segmentStart >= end) { - return end; - } - - const offset = fileSpec.indexOf(directorySeparator, segmentStart); - return offset < 0 ? end : offset; - } - /** * Gets a case sensitive key. * diff --git a/src/compiler/core.ts b/src/compiler/core.ts index 3e6981996e199..465d4d229d88a 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -25,9 +25,7 @@ namespace ts { contains, remove, forEachValue: forEachValueInMap, - reduceProperties: reducePropertiesInMap, clear, - mergeFrom }; function forEachValueInMap(f: (key: Path, value: T) => void) { @@ -36,10 +34,6 @@ namespace ts { } } - function reducePropertiesInMap(callback: (memo: U, value: T, key: Path) => U, initial: U) { - return reduceProperties(files, callback, initial); - } - // path should already be well-formed so it does not need to be normalized function get(path: Path): T { return files[toKey(path)]; @@ -62,16 +56,6 @@ namespace ts { files = {}; } - function mergeFrom(other: FileMap) { - other.forEachValue(mergeFromOther); - } - - function mergeFromOther(key: Path, value: T) { - if (!contains(key)) { - set(key, value); - } - } - function toKey(path: Path): string { return keyMapper ? keyMapper(path) : path; } @@ -131,6 +115,15 @@ namespace ts { return -1; } + export function indexOfAnyCharCode(text: string, charCodes: number[], start?: number): number { + for (let i = start || 0, len = text.length; i < len; ++i) { + if (contains(charCodes, text.charCodeAt(i))) { + return i; + } + } + return -1; + } + export function countWhere(array: T[], predicate: (x: T) => boolean): number { let count = 0; if (array) { @@ -524,6 +517,10 @@ namespace ts { return a < b ? Comparison.LessThan : Comparison.GreaterThan; } + export function compareStringsCaseInsensitive(a: string, b: string) { + return compareStrings(a, b, /*ignoreCase*/ true); + } + function getDiagnosticFileName(diagnostic: Diagnostic): string { return diagnostic.file ? diagnostic.file.fileName : undefined; } @@ -861,6 +858,180 @@ namespace ts { return pathLen > extLen && path.substr(pathLen - extLen, extLen) === extension; } + export function fileExtensionIsAny(path: string, extensions: string[]): boolean { + for (const extension of extensions) { + if (fileExtensionIs(path, extension)) { + return true; + } + } + + return false; + } + + + // Reserved characters, forces escaping of any non-word (or digit), non-whitespace character. + // It may be inefficient (we could just match (/[-[\]{}()*+?.,\\^$|#\s]/g), but this is future + // proof. + const reservedCharacterPattern = /[^\w\s\/]/g; + const wildcardCharCodes = [CharacterCodes.asterisk, CharacterCodes.question]; + + export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude", useCaseSensitiveFileNames: boolean) { + if (specs === undefined || specs.length === 0) { + return undefined; + } + + let pattern = ""; + let hasWrittenSubpattern = false; + spec: for (const spec of specs) { + if (!spec) { + continue; + } + + let subpattern = ""; + let hasRecursiveDirectoryWildcard = false; + let hasWrittenComponent = false; + const components = getNormalizedPathComponents(spec, basePath); + if (usage !== "exclude" && components[components.length - 1] === "**") { + continue spec; + } + + // getNormalizedPathComponents includes the separator for the root component. + // We need to remove to create our regex correctly. + components[0] = removeTrailingDirectorySeparator(components[0]); + + let optionalCount = 0; + for (const component of components) { + if (component === "**") { + if (hasRecursiveDirectoryWildcard) { + continue spec; + } + + subpattern += "(/.+?)?"; + hasRecursiveDirectoryWildcard = true; + hasWrittenComponent = true; + } + else { + if (usage === "directories") { + subpattern += "("; + optionalCount++; + } + + if (hasWrittenComponent) { + subpattern += directorySeparator; + } + + subpattern += component.replace(reservedCharacterPattern, replaceWildcardCharacter); + hasWrittenComponent = true; + } + } + + while (optionalCount > 0) { + subpattern += ")?"; + optionalCount--; + } + + if (hasWrittenSubpattern) { + pattern += "|"; + } + + pattern += "(" + subpattern + ")"; + hasWrittenSubpattern = true; + } + + if (!pattern) { + return undefined; + } + + return new RegExp("^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$"), useCaseSensitiveFileNames ? "" : "i"); + } + + function replaceWildcardCharacter(match: string) { + return match === "*" ? "[^/]*" : match === "?" ? "[^/]" : "\\" + match; + } + + export interface FileSystemEntries { + files: string[]; + directories: string[]; + } + + export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] { + path = normalizePath(path); + currentDirectory = normalizePath(currentDirectory); + const absolutePath = combinePaths(currentDirectory, path); + const includeFileRegex = getRegularExpressionForWildcard(includes, absolutePath, "files", useCaseSensitiveFileNames); + const includeDirectoryRegex = getRegularExpressionForWildcard(includes, absolutePath, "directories", useCaseSensitiveFileNames); + const excludeRegex = getRegularExpressionForWildcard(excludes, absolutePath, "exclude", useCaseSensitiveFileNames); + const result: string[] = []; + for (const basePath of getBasePaths(path, includes, useCaseSensitiveFileNames)) { + visitDirectory(basePath, combinePaths(currentDirectory, basePath)); + } + return result; + + function visitDirectory(path: string, absolutePath: string) { + const { files, directories } = getFileSystemEntries(path); + + for (const current of files) { + const name = combinePaths(path, current); + const absoluteName = combinePaths(absolutePath, current); + if ((!extensions || fileExtensionIsAny(name, extensions)) && + (!includeFileRegex || includeFileRegex.test(absoluteName)) && + (!excludeRegex || !excludeRegex.test(absoluteName))) { + result.push(name); + } + } + + for (const current of directories) { + const name = combinePaths(path, current); + const absoluteName = combinePaths(absolutePath, current); + if ((!includeDirectoryRegex || includeDirectoryRegex.test(absoluteName)) && + (!excludeRegex || !excludeRegex.test(absoluteName))) { + visitDirectory(name, absoluteName); + } + } + } + } + + /** + * Computes the unique non-wildcard base paths amongst the provided include patterns. + */ + function getBasePaths(path: string, includes: string[], useCaseSensitiveFileNames: boolean) { + // Storage for our results in the form of literal paths (e.g. the paths as written by the user). + const basePaths: string[] = [path]; + if (includes) { + // Storage for literal base paths amongst the include patterns. + const includeBasePaths: string[] = []; + for (const include of includes) { + if (isRootedDiskPath(include)) { + const wildcardOffset = indexOfAnyCharCode(include, wildcardCharCodes); + const includeBasePath = wildcardOffset < 0 + ? removeTrailingDirectorySeparator(getDirectoryPath(include)) + : include.substring(0, include.lastIndexOf(directorySeparator, wildcardOffset)); + + // Append the literal and canonical candidate base paths. + includeBasePaths.push(includeBasePath); + } + } + + // Sort the offsets array using either the literal or canonical path representations. + includeBasePaths.sort(useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive); + + // Iterate over each include base path and include unique base paths that are not a + // subpath of an existing base path + include: for (let i = 0; i < includeBasePaths.length; ++i) { + const includeBasePath = includeBasePaths[i]; + for (let j = 0; j < basePaths.length; ++j) { + if (containsPath(basePaths[j], includeBasePath, path, !useCaseSensitiveFileNames)) { + continue include; + } + } + + basePaths.push(includeBasePath); + } + } + + return basePaths; + } + /** * List of supported extensions in order of file resolution precedence. */ diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 5ff8dbf679661..cad64adb4cf52 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -16,9 +16,7 @@ namespace ts { createDirectory(path: string): void; getExecutingFilePath(): string; getCurrentDirectory(): string; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; - readFileNames(path: string): string[]; - readDirectoryNames(path: string): string[]; + readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[]; getMemoryUsage?(): number; exit(exitCode?: number): void; } @@ -61,9 +59,7 @@ namespace ts { resolvePath(path: string): string; readFile(path: string): string; writeFile(path: string, contents: string): void; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; - readDirectoryNames(path: string): string[]; - readFileNames(path: string): string[]; + readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[]; }; export var sys: System = (function () { @@ -71,6 +67,7 @@ namespace ts { function getWScriptSystem(): System { const fso = new ActiveXObject("Scripting.FileSystemObject"); + const shell = new ActiveXObject("WScript.Shell"); const fileStream = new ActiveXObject("ADODB.Stream"); fileStream.Type = 2 /*text*/; @@ -150,38 +147,20 @@ namespace ts { return result.sort(); } - function readDirectory(path: string, extension?: string, exclude?: string[]): string[] { - const result: string[] = []; - exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s))); - visitDirectory(path); - return result; - function visitDirectory(path: string) { + function getAccessibleFileSystemEntries(path: string): FileSystemEntries { + try { const folder = fso.GetFolder(path || "."); const files = getNames(folder.files); - for (const current of files) { - const name = combinePaths(path, current); - if ((!extension || fileExtensionIs(name, extension)) && !contains(exclude, getCanonicalPath(name))) { - result.push(name); - } - } - const subfolders = getNames(folder.subfolders); - for (const current of subfolders) { - const name = combinePaths(path, current); - if (!contains(exclude, getCanonicalPath(name))) { - visitDirectory(name); - } - } + const directories = getNames(folder.subfolders); + return { files, directories }; + } + catch (e) { + return { files: [], directories: [] }; } } - function readFileNames(path: string): string[] { - const folder = fso.GetFolder(path || "."); - return getNames(folder.files); - } - - function readDirectoryNames(path: string): string[] { - const folder = fso.GetFolder(path || "."); - return getNames(folder.directories); + function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] { + return matchFiles(path, extensions, excludes, includes, /*useCaseSensitiveFileNames*/ false, shell.CurrentDirectory, getAccessibleFileSystemEntries); } return { @@ -211,11 +190,9 @@ namespace ts { return WScript.ScriptFullName; }, getCurrentDirectory() { - return new ActiveXObject("WScript.Shell").CurrentDirectory; + return shell.CurrentDirectory; }, readDirectory, - readFileNames, - readDirectoryNames, exit(exitCode?: number): void { try { WScript.Quit(exitCode); @@ -385,56 +362,30 @@ namespace ts { return useCaseSensitiveFileNames ? path.toLowerCase() : path; } - function readDirectory(path: string, extension?: string, exclude?: string[]): string[] { - const result: string[] = []; - exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s))); - visitDirectory(path); - return result; - function visitDirectory(path: string) { - const files = _fs.readdirSync(path || ".").sort(); + function getAccessibleFileSystemEntries(path: string): FileSystemEntries { + try { + const entries = _fs.readdirSync(path || ".").sort(); + const files: string[] = []; const directories: string[] = []; - for (const current of files) { - const name = combinePaths(path, current); - if (!contains(exclude, getCanonicalPath(name))) { - const stat = _fs.statSync(name); - if (stat.isFile()) { - if (!extension || fileExtensionIs(name, extension)) { - result.push(name); - } - } - else if (stat.isDirectory()) { - directories.push(name); - } + for (const entry of entries) { + const name = combinePaths(path, entry); + const stat = _fs.statSync(name); + if (stat.isFile()) { + files.push(entry); + } + else if (stat.isDirectory()) { + directories.push(entry); } } - for (const current of directories) { - visitDirectory(current); - } + return { files, directories }; } - } - - function readFileNames(path: string): string[] { - const entries = _fs.readdirSync(path || "."); - const files: string[] = []; - for (const entry of entries) { - const stat = _fs.statSync(combinePaths(path, entry)); - if (stat.isFile()) { - files.push(entry); - } + catch (e) { + return { files: [], directories: [] }; } - return files.sort(); } - function readDirectoryNames(path: string): string[] { - const entries = _fs.readdirSync(path || "."); - const directories: string[] = []; - for (const entry of entries) { - const stat = _fs.statSync(combinePaths(path, entry)); - if (stat.isDirectory()) { - directories.push(entry); - } - } - return directories.sort(); + function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] { + return matchFiles(path, extensions, excludes, includes, useCaseSensitiveFileNames, process.cwd(), getAccessibleFileSystemEntries); } return { @@ -482,7 +433,7 @@ namespace ts { return _path.resolve(path); }, fileExists(path: string): boolean { - return _fs.existsSync(path) && _fs.statSync(path).isFile(); + return _fs.existsSync(path); }, directoryExists(path: string) { return _fs.existsSync(path) && _fs.statSync(path).isDirectory(); @@ -499,8 +450,6 @@ namespace ts { return process.cwd(); }, readDirectory, - readFileNames, - readDirectoryNames, getMemoryUsage() { if (global.gc) { global.gc(); @@ -539,8 +488,6 @@ namespace ts { getExecutingFilePath: () => ChakraHost.executingFile, getCurrentDirectory: () => ChakraHost.currentDirectory, readDirectory: ChakraHost.readDirectory, - readFileNames: ChakraHost.readFileNames, - readDirectoryNames: ChakraHost.readDirectoryNames, exit: ChakraHost.quit, }; } @@ -560,6 +507,4 @@ namespace ts { return undefined; // Unsupported host } })(); -} - - +} \ No newline at end of file diff --git a/src/compiler/types.ts b/src/compiler/types.ts index 4c62904f4e843..3ddc0b447a94e 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -14,8 +14,6 @@ namespace ts { remove(fileName: Path): void; forEachValue(f: (key: Path, v: T) => void): void; - reduceProperties(f: (memo: U, value: T, key: Path) => U, initial: U): U; - mergeFrom(other: FileMap): void; clear(): void; } @@ -1586,31 +1584,13 @@ namespace ts { export interface ParseConfigHost { useCaseSensitiveFileNames: boolean; - readDirectory(rootDir: string, extension: string, exclude: string[]): string[]; + readDirectory(rootDir: string, extensions: string[], excludes: string[], includes: string[]): string[]; /** * Gets a value indicating whether the specified path exists and is a file. * @param path The path to test. */ fileExists(path: string): boolean; - - /** - * Gets a value indicating whether the specified path exists and is a directory. - * @param path The path to test. - */ - directoryExists(path: string): boolean; - - /** - * Reads the files names in the directory. - * @param rootDir The directory path. - */ - readFileNames(rootDir: string): string[]; - - /** - * Reads the directory names in the directory. - * @param rootDir The directory path. - */ - readDirectoryNames(rootDir: string): string[]; } export interface WriteFileCallback { diff --git a/src/harness/harness.ts b/src/harness/harness.ts index d8ccc512343c5..509c193666f29 100644 --- a/src/harness/harness.ts +++ b/src/harness/harness.ts @@ -23,6 +23,7 @@ /// /// /// +/// /* tslint:disable:no-null */ // Block scoped definitions work poorly for global variables, temporarily enable var @@ -435,9 +436,7 @@ namespace Harness { args(): string[]; getExecutingFilePath(): string; exit(exitCode?: number): void; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; - readDirectoryNames(path: string): string[]; - readFileNames(path: string): string[]; + readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[]; } export var IO: IO; @@ -475,9 +474,7 @@ namespace Harness { export const directoryExists: typeof IO.directoryExists = fso.FolderExists; export const fileExists: typeof IO.fileExists = fso.FileExists; export const log: typeof IO.log = global.WScript && global.WScript.StdOut.WriteLine; - export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); - export const readDirectoryNames: typeof IO.readDirectoryNames = path => ts.sys.readDirectoryNames(path); - export const readFileNames: typeof IO.readFileNames = path => ts.sys.readFileNames(path); + export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include); export function createDirectory(path: string) { if (directoryExists(path)) { @@ -547,9 +544,7 @@ namespace Harness { export const fileExists: typeof IO.fileExists = fs.existsSync; export const log: typeof IO.log = s => console.log(s); - export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); - export const readDirectoryNames: typeof IO.readDirectoryNames = path => ts.sys.readDirectoryNames(path); - export const readFileNames: typeof IO.readFileNames = path => ts.sys.readFileNames(path); + export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include); export function createDirectory(path: string) { if (!directoryExists(path)) { @@ -755,16 +750,22 @@ namespace Harness { Http.writeToServerSync(serverRoot + path, "WRITE", contents); } - export function readDirectory(path: string, extension?: string, exclude?: string[]) { - return listFiles(path).filter(f => !extension || ts.fileExtensionIs(f, extension)); - } - - export function readDirectoryNames(path: string): string[] { - return []; - } - - export function readFileNames(path: string) { - return readDirectory(path); + export function readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]) { + const fs = new Utils.VirtualFileSystem(path, useCaseSensitiveFileNames()); + for (const file in listFiles(path)) { + fs.addFile(file); + } + return ts.matchFiles(path, extension, exclude, include, useCaseSensitiveFileNames(), getCurrentDirectory(), path => { + const entry = fs.traversePath(path); + if (entry && entry.isDirectory()) { + const directory = entry; + return { + files: ts.map(directory.getFiles(), f => f.name), + directories: ts.map(directory.getDirectories(), d => d.name) + }; + } + return { files: [], directories: [] }; + }); } } } diff --git a/src/harness/harnessLanguageService.ts b/src/harness/harnessLanguageService.ts index 521d17cfc8fce..4db96f344dfc7 100644 --- a/src/harness/harnessLanguageService.ts +++ b/src/harness/harnessLanguageService.ts @@ -575,18 +575,10 @@ namespace Harness.LanguageService { return this.host.getCurrentDirectory(); } - readDirectory(path: string, extension?: string): string[] { + readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[] { throw new Error("Not implemented Yet."); } - readDirectoryNames(path: string): string[] { - throw new Error("Not implemented."); - } - - readFileNames(path: string): string[] { - throw new Error("Not implemented."); - } - watchFile(fileName: string, callback: (fileName: string) => void): ts.FileWatcher { return { close() { } }; } diff --git a/src/harness/loggedIO.ts b/src/harness/loggedIO.ts index 0bae91f7976fc..a6df6382bd0c4 100644 --- a/src/harness/loggedIO.ts +++ b/src/harness/loggedIO.ts @@ -62,8 +62,9 @@ interface IOLog { }[]; directoriesRead: { path: string, - extension: string, + extension: string[], exclude: string[], + include: string[], result: string[] }[]; } @@ -217,9 +218,9 @@ namespace Playback { memoize(path => findResultByPath(wrapper, replayLog.filesRead, path).contents)); wrapper.readDirectory = recordReplay(wrapper.readDirectory, underlying)( - (path, extension, exclude) => { - const result = (underlying).readDirectory(path, extension, exclude); - const logEntry = { path, extension, exclude, result }; + (path, extension, exclude, include) => { + const result = (underlying).readDirectory(path, extension, exclude, include); + const logEntry = { path, extension, exclude, include, result }; recordLog.directoriesRead.push(logEntry); return result; }, diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index 76c7952428a4b..bff391cf9af3a 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -213,10 +213,7 @@ class ProjectRunner extends RunnerBase { const configParseHost: ts.ParseConfigHost = { useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), fileExists, - directoryExists, readDirectory, - readDirectoryNames, - readFileNames }; const configParseResult = ts.parseJsonConfigFileContent(configObject, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions); if (configParseResult.errors.length > 0) { @@ -276,8 +273,8 @@ class ProjectRunner extends RunnerBase { : ts.normalizeSlashes(testCase.projectRoot) + "/" + ts.normalizeSlashes(fileName); } - function readDirectory(rootDir: string, extension: string, exclude: string[]): string[] { - const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude); + function readDirectory(rootDir: string, extension: string[], exclude: string[], include: string[]): string[] { + const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude, include); const result: string[] = []; for (let i = 0; i < harnessReadDirectoryResult.length; i++) { result[i] = ts.getRelativePathToDirectoryOrUrl(testCase.projectRoot, harnessReadDirectoryResult[i], @@ -286,14 +283,6 @@ class ProjectRunner extends RunnerBase { return result; } - function readDirectoryNames(path: string) { - return Harness.IO.readDirectoryNames(getFileNameInTheProjectTest(path)); - } - - function readFileNames(path: string) { - return Harness.IO.readFileNames(getFileNameInTheProjectTest(path)); - } - function fileExists(fileName: string): boolean { return Harness.IO.fileExists(getFileNameInTheProjectTest(fileName)); } diff --git a/src/harness/rwcRunner.ts b/src/harness/rwcRunner.ts index 826f6b6726068..791c684c79c16 100644 --- a/src/harness/rwcRunner.ts +++ b/src/harness/rwcRunner.ts @@ -79,10 +79,7 @@ namespace RWC { const configParseHost: ts.ParseConfigHost = { useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), fileExists: Harness.IO.fileExists, - directoryExists: Harness.IO.directoryExists, readDirectory: Harness.IO.readDirectory, - readDirectoryNames: Harness.IO.readDirectoryNames, - readFileNames: Harness.IO.readFileNames, }; const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, configParseHost, ts.getDirectoryPath(tsconfigFile.path)); fileNames = configParseResult.fileNames; diff --git a/src/harness/vfs.ts b/src/harness/vfs.ts new file mode 100644 index 0000000000000..66bbd715fbc60 --- /dev/null +++ b/src/harness/vfs.ts @@ -0,0 +1,160 @@ +/// +namespace Utils { + export class VirtualFileSystemEntry { + fileSystem: VirtualFileSystem; + name: string; + + constructor(fileSystem: VirtualFileSystem, name: string) { + this.fileSystem = fileSystem; + this.name = name; + } + + isDirectory() { return false; } + isFile() { return false; } + isFileSystem() { return false; } + } + + export class VirtualFile extends VirtualFileSystemEntry { + content: string; + isFile() { return true; } + } + + export abstract class VirtualFileSystemContainer extends VirtualFileSystemEntry { + abstract getFileSystemEntries(): VirtualFileSystemEntry[]; + + getFileSystemEntry(name: string): VirtualFileSystemEntry { + for (const entry of this.getFileSystemEntries()) { + if (this.fileSystem.sameName(entry.name, name)) { + return entry; + } + } + return undefined; + } + + getDirectories(): VirtualDirectory[] { + return ts.filter(this.getFileSystemEntries(), entry => entry.isDirectory()); + } + + getFiles(): VirtualFile[] { + return ts.filter(this.getFileSystemEntries(), entry => entry.isFile()); + } + + getDirectory(name: string): VirtualDirectory { + const entry = this.getFileSystemEntry(name); + return entry.isDirectory() ? entry : undefined; + } + + getFile(name: string): VirtualFile { + const entry = this.getFileSystemEntry(name); + return entry.isFile() ? entry : undefined; + } + } + + export class VirtualDirectory extends VirtualFileSystemContainer { + private entries: VirtualFileSystemEntry[] = []; + + isDirectory() { return true; } + + getFileSystemEntries() { return this.entries.slice(); } + + addDirectory(name: string): VirtualDirectory { + const entry = this.getFileSystemEntry(name); + if (entry === undefined) { + const directory = new VirtualDirectory(this.fileSystem, name); + this.entries.push(directory); + return directory; + } + else if (entry.isDirectory()) { + return entry; + } + else { + return undefined; + } + } + + addFile(name: string, content?: string): VirtualFile { + const entry = this.getFileSystemEntry(name); + if (entry === undefined) { + const file = new VirtualFile(this.fileSystem, name); + file.content = content; + this.entries.push(file); + return file; + } + else if (entry.isFile()) { + const file = entry; + file.content = content; + return file; + } + else { + return undefined; + } + } + } + + export class VirtualFileSystem extends VirtualFileSystemContainer { + private root: VirtualDirectory; + + currentDirectory: string; + useCaseSensitiveFileNames: boolean; + + constructor(currentDirectory: string, useCaseSensitiveFileNames: boolean) { + super(undefined, ""); + this.fileSystem = this; + this.root = new VirtualDirectory(this, ""); + this.currentDirectory = currentDirectory; + this.useCaseSensitiveFileNames = useCaseSensitiveFileNames; + } + + isFileSystem() { return true; } + + getFileSystemEntries() { return this.root.getFileSystemEntries(); } + + addDirectory(path: string) { + const components = ts.getNormalizedPathComponents(path, this.currentDirectory); + let directory: VirtualDirectory = this.root; + for (const component of components) { + directory = directory.addDirectory(component); + if (directory === undefined) { + break; + } + } + + return directory; + } + + addFile(path: string, content?: string) { + const absolutePath = ts.getNormalizedAbsolutePath(path, this.currentDirectory); + const fileName = ts.getBaseFileName(path); + const directoryPath = ts.getDirectoryPath(absolutePath); + const directory = this.addDirectory(directoryPath); + return directory ? directory.addFile(fileName, content) : undefined; + } + + fileExists(path: string) { + const entry = this.traversePath(path); + return entry !== undefined && entry.isFile(); + } + + sameName(a: string, b: string) { + return this.useCaseSensitiveFileNames ? a === b : a.toLowerCase() === b.toLowerCase(); + } + + traversePath(path: string) { + let directory: VirtualDirectory = this.root; + for (const component of ts.getNormalizedPathComponents(path, this.currentDirectory)) { + const entry = directory.getFileSystemEntry(component); + if (entry === undefined) { + return undefined; + } + else if (entry.isDirectory()) { + directory = entry; + } + else { + return entry; + } + } + + return directory; + } + } +} \ No newline at end of file diff --git a/src/services/shims.ts b/src/services/shims.ts index a7f85d2445230..cf6f3885a2c3a 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -72,10 +72,7 @@ namespace ts { * @param exclude A JSON encoded string[] containing the paths to exclude * when enumerating the directory. */ - readDirectory(rootDir: string, extension: string, exclude?: string): string; - readDirectoryNames?(rootDir: string): string; - readFileNames?(rootDir: string): string; - directoryExists?(path: string): boolean; + readDirectory(rootDir: string, extension: string, exclude?: string, include?: string): string; useCaseSensitiveFileNames?: boolean; } @@ -425,70 +422,43 @@ namespace ts { } } - public readDirectory(rootDir: string, extension: string, exclude: string[]): string[] { - // Wrap the API changes for 1.5 release. This try/catch - // should be removed once TypeScript 1.5 has shipped. + public readDirectory(rootDir: string, extensions: string[], exclude: string[], include: string[]): string[] { + // Wrap the API changes for 1.8 release. This try/catch + // should be removed once TypeScript 1.8 has shipped. // Also consider removing the optional designation for // the exclude param at this time. - var encoded: string; try { - encoded = this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude)); + return JSON.parse(this.shimHost.readDirectory( + rootDir, + JSON.stringify(extensions), + JSON.stringify(exclude), + JSON.stringify(include))); } catch (e) { - encoded = this.shimHost.readDirectory(rootDir, extension); - } - return JSON.parse(encoded); - } - - public readDirectoryNames(path: string): string[] { - if (this.shimHost.readDirectory) { - const encoded = this.shimHost.readDirectoryNames(path); - return JSON.parse(encoded); - } - - if (sys) { - path = normalizePath(path); - path = ensureTrailingDirectorySeparator(path); - return sys.readDirectoryNames(path); - } - - return []; - } - - public readFileNames(path: string): string[] { - if (this.shimHost.readFileNames) { - const encoded = this.shimHost.readFileNames(path); - return JSON.parse(encoded); - } - - if (sys) { - path = normalizePath(path); - path = ensureTrailingDirectorySeparator(path); - return sys.readFileNames(path); + let results: string[] = []; + for (const extension of extensions) { + for (const file of this.readDirectoryFallback(rootDir, extension, exclude)) + { + if (!contains(results, file)) { + results.push(file); + } + } + } + return results; } - - return []; } public fileExists(fileName: string): boolean { return this.shimHost.fileExists(fileName); } - public directoryExists(directoryName: string): boolean { - if (this.shimHost.directoryExists) { - return this.shimHost.directoryExists(directoryName); - } - - if (sys) { - return sys.directoryExists(directoryName); - } - - return false; - } - public readFile(fileName: string): string { return this.shimHost.readFile(fileName); } + + private readDirectoryFallback(rootDir: string, extension: string, exclude: string[]) { + return JSON.parse(this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude))); + } } function simpleForwardCall(logger: Logger, actionDescription: string, action: () => any, logPerformance: boolean): any { diff --git a/tests/cases/unittests/cachingInServerLSHost.ts b/tests/cases/unittests/cachingInServerLSHost.ts index f0a138c27a196..0d2a9ebf54d40 100644 --- a/tests/cases/unittests/cachingInServerLSHost.ts +++ b/tests/cases/unittests/cachingInServerLSHost.ts @@ -36,11 +36,9 @@ module ts { getCurrentDirectory: (): string => { return ""; }, - readDirectory: (path: string, extension?: string, exclude?: string[]): string[] => { + readDirectory: (path: string, extension?: string[], exclude?: string[], include?: string[]): string[] => { throw new Error("NYI"); }, - readDirectoryNames: (path: string): string[] => { throw new Error("NYI"); }, - readFileNames: (path: string): string[] => { throw new Error("NYI"); }, exit: (exitCode?: number) => { }, watchFile: (path, callback) => { diff --git a/tests/cases/unittests/expandFiles.ts b/tests/cases/unittests/expandFiles.ts index 3aac938f178ec..02065407e27df 100644 --- a/tests/cases/unittests/expandFiles.ts +++ b/tests/cases/unittests/expandFiles.ts @@ -2,65 +2,91 @@ /// namespace ts { + class MockParseConfigHost extends Utils.VirtualFileSystem implements ParseConfigHost { + constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) { + super(currentDirectory, ignoreCase); + for (const file of files) { + this.addFile(file); + } + } + + readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) { + return matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path)); + } + + getAccessibleFileSystemEntries(path: string) { + const entry = this.traversePath(path); + if (entry && entry.isDirectory()) { + const directory = entry; + return { + files: map(directory.getFiles(), f => f.name), + directories: map(directory.getDirectories(), d => d.name) + }; + } + return { files: [], directories: [] }; + } + } + const caseInsensitiveBasePath = "c:/dev/"; - const caseInsensitiveHost = createMockParseConfigHost(/*ignoreCase*/ true, caseInsensitiveBasePath, [ - "a.ts", - "a.d.ts", - "a.js", - "b.ts", - "b.js", - "c.d.ts", - "z/a.ts", - "z/abz.ts", - "z/aba.ts", - "z/b.ts", - "z/bbz.ts", - "z/bba.ts", - "x/a.ts", - "x/aa.ts", - "x/b.ts", - "x/y/a.ts", - "x/y/b.ts", - "js/a.js", - "js/b.js", + const caseInsensitiveHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.ts", + "c:/dev/b.js", + "c:/dev/c.d.ts", + "c:/dev/z/a.ts", + "c:/dev/z/abz.ts", + "c:/dev/z/aba.ts", + "c:/dev/z/b.ts", + "c:/dev/z/bbz.ts", + "c:/dev/z/bba.ts", + "c:/dev/x/a.ts", + "c:/dev/x/aa.ts", + "c:/dev/x/b.ts", + "c:/dev/x/y/a.ts", + "c:/dev/x/y/b.ts", + "c:/dev/js/a.js", + "c:/dev/js/b.js", + "c:/ext/ext.ts" ]); const caseSensitiveBasePath = "/dev/"; - const caseSensitiveHost = createMockParseConfigHost(/*ignoreCase*/ false, caseSensitiveBasePath, [ - "a.ts", - "a.d.ts", - "a.js", - "b.ts", - "b.js", - "A.ts", - "B.ts", - "c.d.ts", - "z/a.ts", - "z/abz.ts", - "z/aba.ts", - "z/b.ts", - "z/bbz.ts", - "z/bba.ts", - "x/a.ts", - "x/b.ts", - "x/y/a.ts", - "x/y/b.ts", - "js/a.js", - "js/b.js", + const caseSensitiveHost = new MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, [ + "/dev/a.ts", + "/dev/a.d.ts", + "/dev/a.js", + "/dev/b.ts", + "/dev/b.js", + "/dev/A.ts", + "/dev/B.ts", + "/dev/c.d.ts", + "/dev/z/a.ts", + "/dev/z/abz.ts", + "/dev/z/aba.ts", + "/dev/z/b.ts", + "/dev/z/bbz.ts", + "/dev/z/bba.ts", + "/dev/x/a.ts", + "/dev/x/b.ts", + "/dev/x/y/a.ts", + "/dev/x/y/b.ts", + "/dev/js/a.js", + "/dev/js/b.js", ]); - const caseInsensitiveMixedExtensionHost = createMockParseConfigHost(/*ignoreCase*/ true, caseInsensitiveBasePath, [ - "a.ts", - "a.d.ts", - "a.js", - "b.tsx", - "b.d.ts", - "b.jsx", - "c.tsx", - "c.js", - "d.js", - "e.jsx", - "f.other" + const caseInsensitiveMixedExtensionHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.tsx", + "c:/dev/b.d.ts", + "c:/dev/b.jsx", + "c:/dev/c.tsx", + "c:/dev/c.js", + "c:/dev/d.js", + "c:/dev/e.jsx", + "c:/dev/f.other" ]); describe("expandFiles", () => { @@ -226,7 +252,7 @@ namespace ts { const expected: ts.ExpandResult = { fileNames: [], wildcardDirectories: { - "c:/dev": ts.WatchDirectoryFlags.None + "c:/dev": ts.WatchDirectoryFlags.Recursive }, }; const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); @@ -239,7 +265,7 @@ namespace ts { const expected: ts.ExpandResult = { fileNames: ["c:/dev/a.ts"], wildcardDirectories: { - "c:/dev": ts.WatchDirectoryFlags.None + "c:/dev": ts.WatchDirectoryFlags.Recursive }, }; const actual = ts.expandFiles(fileNames, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); @@ -283,6 +309,23 @@ namespace ts { const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, { allowJs: true }, caseInsensitiveHost); assert.deepEqual(actual, expected); }); + it("include paths outside of the project", () => { + const includeSpecs = ["*", "c:/ext/*"]; + const expected: ts.ExpandResult = { + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts", + "c:/ext/ext.ts", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None, + "c:/ext": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + assert.deepEqual(actual, expected); + }); }); describe("when called from parseJsonConfigFileContent", () => { @@ -376,110 +419,4 @@ namespace ts { }); }); }); - - interface DirectoryEntry { - files: ts.Map; - directories: ts.Map; - } - - interface TestParseConfigHost extends ts.ParseConfigHost { - basePath: string; - } - - function createMockParseConfigHost(ignoreCase: boolean, basePath: string, files: string[]): TestParseConfigHost { - const fileSet: ts.Map = {}; - const directorySet: ts.Map = {}; - const emptyDirectory: DirectoryEntry = { files: {}, directories: {} }; - - files.sort((a, b) => ts.comparePaths(a, b, basePath, ignoreCase)); - for (const file of files) { - addFile(ts.getNormalizedAbsolutePath(file, basePath)); - } - - return { - useCaseSensitiveFileNames: !ignoreCase, - basePath, - fileExists, - directoryExists, - readDirectory, - readFileNames, - readDirectoryNames - }; - - function fileExists(path: string): boolean { - path = ts.getNormalizedAbsolutePath(path, basePath); - path = ts.removeTrailingDirectorySeparator(path); - const fileKey = ignoreCase ? path.toLowerCase() : path; - return ts.hasProperty(fileSet, fileKey); - } - - function directoryExists(path: string): boolean { - path = ts.getNormalizedAbsolutePath(path, basePath); - path = ts.removeTrailingDirectorySeparator(path); - const directoryKey = ignoreCase ? path.toLowerCase() : path; - return ts.hasProperty(directorySet, directoryKey); - } - - function readDirectory(rootDir: string, extension?: string, exclude?: string[]): string[] { - throw new Error("Not implemented"); - } - - function readFileNames(path: string) { - const { files } = getDirectoryEntry(path) || emptyDirectory; - const result: string[] = []; - ts.forEachKey(files, key => { result.push(key); }); - result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); - return result; - } - - function readDirectoryNames(path: string) { - const { directories } = getDirectoryEntry(path); // || emptyDirectory; - const result: string[] = []; - ts.forEachKey(directories, key => { result.push(key); }); - result.sort((a, b) => ts.compareStrings(a, b, ignoreCase)); - return result; - } - - function getDirectoryEntry(path: string) { - path = ts.getNormalizedAbsolutePath(path, basePath); - path = ts.removeTrailingDirectorySeparator(path); - const directoryKey = ignoreCase ? path.toLowerCase() : path; - return ts.getProperty(directorySet, directoryKey); - } - - function addFile(file: string) { - const fileKey = ignoreCase ? file.toLowerCase() : file; - if (!ts.hasProperty(fileSet, fileKey)) { - fileSet[fileKey] = file; - const name = ts.getBaseFileName(file); - const parent = ts.getDirectoryPath(file); - addToDirectory(parent, name, "file"); - } - } - - function addDirectory(directory: string) { - directory = ts.removeTrailingDirectorySeparator(directory); - const directoryKey = ignoreCase ? directory.toLowerCase() : directory; - if (!ts.hasProperty(directorySet, directoryKey)) { - directorySet[directoryKey] = { files: {}, directories: {} }; - const name = ts.getBaseFileName(directory); - const parent = ts.getDirectoryPath(directory); - if (parent !== directory) { - addToDirectory(parent, name, "directory"); - } - } - } - - function addToDirectory(directory: string, entry: string, type: "file" | "directory") { - addDirectory(directory); - directory = ts.removeTrailingDirectorySeparator(directory); - const directoryKey = ignoreCase ? directory.toLowerCase() : directory; - const entryKey = ignoreCase ? entry.toLowerCase() : entry; - const directoryEntry = directorySet[directoryKey]; - const entries = type === "file" ? directoryEntry.files : directoryEntry.directories; - if (!ts.hasProperty(entries, entryKey)) { - entries[entryKey] = entry; - } - } - } } \ No newline at end of file diff --git a/tests/cases/unittests/session.ts b/tests/cases/unittests/session.ts index 8e35baa391022..7c44b895ce051 100644 --- a/tests/cases/unittests/session.ts +++ b/tests/cases/unittests/session.ts @@ -16,8 +16,6 @@ namespace ts.server { getExecutingFilePath(): string { return void 0; }, getCurrentDirectory(): string { return void 0; }, readDirectory(): string[] { return []; }, - readDirectoryNames(): string[] { return []; }, - readFileNames(): string[] { return []; }, exit(): void {} }; const mockLogger: Logger = { From 94a5327e9a5209af798415f0e18dce48ac074668 Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Wed, 16 Dec 2015 13:42:17 -0800 Subject: [PATCH 07/18] more tests --- src/compiler/commandLineParser.ts | 44 ++- tests/cases/unittests/expandFiles.ts | 545 +++++++++++++++++++++------ 2 files changed, 454 insertions(+), 135 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index c81b3b63244fc..a5cd7abc50522 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -531,7 +531,7 @@ namespace ts { includeSpecs = ["**/*"]; } - return expandFiles(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors); + return matchFileNames(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors); } } @@ -589,14 +589,14 @@ namespace ts { * Expands an array of file specifications. * * @param fileNames The literal file names to include. - * @param includeSpecs The file specifications to expand. - * @param excludeSpecs The file specifications to exclude. + * @param include The wildcard file specifications to include. + * @param exclude The wildcard file specifications to exclude. * @param basePath The base path for any relative file specifications. * @param options Compiler options. * @param host The host used to resolve files and directories. * @param errors An array for diagnostic reporting. */ - export function expandFiles(fileNames: string[], includeSpecs: string[], excludeSpecs: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors?: Diagnostic[]): ExpandResult { + function matchFileNames(fileNames: string[], include: string[], exclude: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): ExpandResult { basePath = normalizePath(basePath); basePath = removeTrailingDirectorySeparator(basePath); @@ -615,11 +615,19 @@ namespace ts { // via wildcard, and to handle extension priority. const wildcardFileMap: Map = {}; + if (include) { + include = validateSpecs(include, errors, /*allowTrailingRecursion*/ false); + } + + if (exclude) { + exclude = validateSpecs(exclude, errors, /*allowTrailingRecursion*/ true); + } + // Wildcard directories (provided as part of a wildcard path) are stored in a // file map that marks whether it was a regular wildcard match (with a `*` or `?` token), // or a recursive directory. This information is used by filesystem watchers to monitor for // new entries in these paths. - const wildcardDirectories: Map = getWildcardDirectories(includeSpecs, basePath, host.useCaseSensitiveFileNames); + const wildcardDirectories: Map = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames); // Rather than requery this for each file and filespec, we query the supported extensions // once and store it on the expansion context. @@ -634,13 +642,8 @@ namespace ts { } } - if (includeSpecs) { - includeSpecs = validateSpecs(includeSpecs, errors, /*allowTrailingRecursion*/ false); - if (excludeSpecs) { - excludeSpecs = validateSpecs(excludeSpecs, errors, /*allowTrailingRecursion*/ true); - } - - for (const file of host.readDirectory(basePath, supportedExtensions, excludeSpecs, includeSpecs)) { + if (include && include.length > 0) { + for (const file of host.readDirectory(basePath, supportedExtensions, exclude, include)) { // If we have already included a literal or wildcard path with a // higher priority extension, we should skip this file. // @@ -677,10 +680,10 @@ namespace ts { const validSpecs: string[] = []; for (const spec of specs) { if (!allowTrailingRecursion && invalidTrailingRecursionPattern.test(spec)) { - errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, spec)); + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, spec)); } else if (invalidMultipleRecursionPatterns.test(spec)) { - errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, spec)); + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, spec)); } else { validSpecs.push(spec); @@ -696,7 +699,7 @@ namespace ts { /** * Gets directories in a set of include patterns that should be watched for changes. */ - function getWildcardDirectories(includes: string[], path: string, useCaseSensitiveFileNames: boolean) { + function getWildcardDirectories(include: string[], exclude: string[], path: string, useCaseSensitiveFileNames: boolean) { // We watch a directory recursively if it contains a wildcard anywhere in a directory segment // of the pattern: // @@ -708,11 +711,16 @@ namespace ts { // // /a/b/* - Watch /a/b directly to catch any new file // /a/b/a?z - Watch /a/b directly to catch any new file matching a?z + const excludeRegExp = getRegularExpressionForWildcard(exclude, path, "exclude", useCaseSensitiveFileNames); const wildcardDirectories: Map = {}; - if (includes !== undefined) { + if (include !== undefined) { const recursiveKeys: string[] = []; - for (const include of includes) { - const name = combinePaths(path, include); + for (const file of include) { + const name = combinePaths(path, file); + if (excludeRegExp && excludeRegExp.test(name)) { + continue; + } + const match = wildcardDirectoryPattern.exec(name); if (match) { const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase(); diff --git a/tests/cases/unittests/expandFiles.ts b/tests/cases/unittests/expandFiles.ts index 02065407e27df..bbda14a2cfa8d 100644 --- a/tests/cases/unittests/expandFiles.ts +++ b/tests/cases/unittests/expandFiles.ts @@ -92,189 +92,376 @@ namespace ts { describe("expandFiles", () => { describe("with literal file list", () => { it("without exclusions", () => { - const fileNames = ["a.ts", "b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + const json = { + files: [ + "a.ts", + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("missing files are still present", () => { - const fileNames = ["z.ts", "x.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/z.ts", "c:/dev/x.ts"], + const json = { + files: [ + "z.ts", + "x.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/z.ts", + "c:/dev/x.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("are not removed due to excludes", () => { - const fileNames = ["a.ts", "b.ts"]; - const excludeSpecs = ["b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + const json = { + files: [ + "a.ts", + "b.ts" + ], + exclude: [ + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(fileNames, /*includeSpecs*/ undefined, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); }); describe("with literal include list", () => { it("without exclusions", () => { - const includeSpecs = ["a.ts", "b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/b.ts"], + const json = { + include: [ + "a.ts", + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with non .ts file extensions are excluded", () => { - const includeSpecs = ["a.js", "b.js"]; - const expected: ts.ExpandResult = { + const json = { + include: [ + "a.js", + "b.js" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], fileNames: [], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with missing files are excluded", () => { - const includeSpecs = ["z.ts", "x.ts"]; - const expected: ts.ExpandResult = { + const json = { + include: [ + "z.ts", + "x.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], fileNames: [], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with literal excludes", () => { - const includeSpecs = ["a.ts", "b.ts"]; - const excludeSpecs = ["b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts"], + const json = { + include: [ + "a.ts", + "b.ts" + ], + exclude: [ + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with wildcard excludes", () => { - const includeSpecs = ["a.ts", "b.ts", "z/a.ts", "z/abz.ts", "z/aba.ts", "x/b.ts"]; - const excludeSpecs = ["*.ts", "z/??z.ts", "*/b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/z/a.ts", "c:/dev/z/aba.ts"], + const json = { + include: [ + "a.ts", + "b.ts", + "z/a.ts", + "z/abz.ts", + "z/aba.ts", + "x/b.ts" + ], + exclude: [ + "*.ts", + "z/??z.ts", + "*/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/z/a.ts", + "c:/dev/z/aba.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with recursive excludes", () => { - const includeSpecs = ["a.ts", "b.ts", "x/a.ts", "x/b.ts", "x/y/a.ts", "x/y/b.ts"]; - const excludeSpecs = ["**/b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts"], + const json = { + include: [ + "a.ts", + "b.ts", + "x/a.ts", + "x/b.ts", + "x/y/a.ts", + "x/y/b.ts" + ], + exclude: [ + "**/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with case sensitive exclude", () => { - const includeSpecs = ["B.ts"]; - const excludeSpecs = ["**/b.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["/dev/B.ts"], + const json = { + include: [ + "B.ts" + ], + exclude: [ + "**/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "/dev/B.ts" + ], wildcardDirectories: {}, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseSensitiveBasePath, {}, caseSensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath); assert.deepEqual(actual, expected); }); }); describe("with wildcard include list", () => { it("same named declarations are excluded", () => { - const includeSpecs = ["*.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"], + const json = { + include: [ + "*.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], wildcardDirectories: { "c:/dev": ts.WatchDirectoryFlags.None }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("`*` matches only ts files", () => { - const includeSpecs = ["*"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/b.ts", "c:/dev/c.d.ts"], + const json = { + include: [ + "*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], wildcardDirectories: { "c:/dev": ts.WatchDirectoryFlags.None }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("`?` matches only a single character", () => { - const includeSpecs = ["x/?.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/x/a.ts", "c:/dev/x/b.ts"], + const json = { + include: [ + "x/?.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/x/a.ts", + "c:/dev/x/b.ts" + ], wildcardDirectories: { "c:/dev/x": ts.WatchDirectoryFlags.None }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("with recursive directory", () => { - const includeSpecs = ["**/a.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts", "c:/dev/x/a.ts", "c:/dev/x/y/a.ts", "c:/dev/z/a.ts"], + const json = { + include: [ + "**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts", + "c:/dev/z/a.ts" + ], wildcardDirectories: { "c:/dev": ts.WatchDirectoryFlags.Recursive }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("case sensitive", () => { - const includeSpecs = ["**/A.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["/dev/A.ts"], + const json = { + include: [ + "**/A.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "/dev/A.ts" + ], wildcardDirectories: { "/dev": ts.WatchDirectoryFlags.Recursive }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseSensitiveBasePath, {}, caseSensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath); assert.deepEqual(actual, expected); }); it("with missing files are excluded", () => { - const includeSpecs = ["*/z.ts"]; - const expected: ts.ExpandResult = { + const json = { + include: [ + "*/z.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], fileNames: [], wildcardDirectories: { "c:/dev": ts.WatchDirectoryFlags.Recursive }, }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("always include literal files", () => { - const fileNames = ["a.ts"]; - const includeSpecs = ["*/z.ts"]; - const excludeSpecs = ["**/a.ts"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/a.ts"], + const json = { + files: [ + "a.ts" + ], + include: [ + "*/z.ts" + ], + exclude: [ + "**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts" + ], wildcardDirectories: { "c:/dev": ts.WatchDirectoryFlags.Recursive }, }; - const actual = ts.expandFiles(fileNames, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("exclude folders", () => { - const includeSpecs = ["**/*"]; - const excludeSpecs = ["z", "x"]; - const expected: ts.ExpandResult = { + const json = { + include: [ + "**/*" + ], + exclude: [ + "z", + "x" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.ts", @@ -284,34 +471,66 @@ namespace ts { "c:/dev": ts.WatchDirectoryFlags.Recursive } }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, excludeSpecs, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("exclude .js files when allowJs=false", () => { - const includeSpecs = ["js/*"]; - const expected: ts.ExpandResult = { + const json = { + compilerOptions: { + allowJs: false + }, + include: [ + "js/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: false + }, + errors: [], fileNames: [], wildcardDirectories: { "c:/dev/js": ts.WatchDirectoryFlags.None } }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("include .js files when allowJs=true", () => { - const includeSpecs = ["js/*"]; - const expected: ts.ExpandResult = { - fileNames: ["c:/dev/js/a.js", "c:/dev/js/b.js"], + const json = { + compilerOptions: { + allowJs: true + }, + include: [ + "js/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: true + }, + errors: [], + fileNames: [ + "c:/dev/js/a.js", + "c:/dev/js/b.js" + ], wildcardDirectories: { "c:/dev/js": ts.WatchDirectoryFlags.None } }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, { allowJs: true }, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); it("include paths outside of the project", () => { - const includeSpecs = ["*", "c:/ext/*"]; - const expected: ts.ExpandResult = { + const json = { + include: [ + "*", + "c:/ext/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.ts", @@ -323,20 +542,20 @@ namespace ts { "c:/ext": ts.WatchDirectoryFlags.None } }; - const actual = ts.expandFiles(/*fileNames*/ undefined, includeSpecs, /*excludeSpecs*/ undefined, caseInsensitiveBasePath, {}, caseInsensitiveHost); + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); assert.deepEqual(actual, expected); }); - }); - - describe("when called from parseJsonConfigFileContent", () => { it("with jsx=none, allowJs=false", () => { - const json: any = { - "compilerOptions": { - "jsx": "none", - "allowJs": false + const json = { + compilerOptions: { + allowJs: false } }; - const expected: ts.ExpandResult = { + const expected: ts.ParsedCommandLine = { + options: { + allowJs: false + }, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.tsx", @@ -347,17 +566,21 @@ namespace ts { } }; const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); - assert.deepEqual(actual.fileNames, expected.fileNames); - assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual, expected); }); it("with jsx=preserve, allowJs=false", () => { - const json: any = { - "compilerOptions": { - "jsx": "preserve", - "allowJs": false + const json = { + compilerOptions: { + jsx: "preserve", + allowJs: false } }; - const expected: ts.ExpandResult = { + const expected: ts.ParsedCommandLine = { + options: { + jsx: ts.JsxEmit.Preserve, + allowJs: false + }, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.tsx", @@ -368,17 +591,19 @@ namespace ts { } }; const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); - assert.deepEqual(actual.fileNames, expected.fileNames); - assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual, expected); }); it("with jsx=none, allowJs=true", () => { - const json: any = { - "compilerOptions": { - "jsx": "none", - "allowJs": true + const json = { + compilerOptions: { + allowJs: true } }; - const expected: ts.ExpandResult = { + const expected: ts.ParsedCommandLine = { + options: { + allowJs: true + }, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.tsx", @@ -391,17 +616,21 @@ namespace ts { } }; const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); - assert.deepEqual(actual.fileNames, expected.fileNames); - assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual, expected); }); it("with jsx=preserve, allowJs=true", () => { - const json: any = { - "compilerOptions": { - "jsx": "preserve", - "allowJs": true + const json = { + compilerOptions: { + jsx: "preserve", + allowJs: true } }; - const expected: ts.ExpandResult = { + const expected: ts.ParsedCommandLine = { + options: { + jsx: ts.JsxEmit.Preserve, + allowJs: true + }, + errors: [], fileNames: [ "c:/dev/a.ts", "c:/dev/b.tsx", @@ -414,8 +643,90 @@ namespace ts { } }; const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); - assert.deepEqual(actual.fileNames, expected.fileNames); - assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual, expected); + }); + describe("with trailing recursive directory", () => { + it("in includes", () => { + const json = { + include: [ + "**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**") + ], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("in excludes", () => { + const json = { + include: [ + "**/*" + ], + exclude: [ + "**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + }); + describe("with multiple recursive directory patterns", () => { + it("in includes", () => { + const json = { + include: [ + "**/x/**/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**/*") + ], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("in excludes", () => { + const json = { + include: [ + "**/a.ts" + ], + exclude: [ + "**/x/**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**") + ], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts", + "c:/dev/z/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); }); }); }); From 6f85fe9ac467727bd33f607c95e7eaa6912ae57b Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Wed, 16 Dec 2015 15:18:25 -0800 Subject: [PATCH 08/18] Minor update to shims.ts for forthcoming VS support for globs. --- src/services/shims.ts | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/src/services/shims.ts b/src/services/shims.ts index cf6f3885a2c3a..46056eb110fff 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -73,7 +73,7 @@ namespace ts { * when enumerating the directory. */ readDirectory(rootDir: string, extension: string, exclude?: string, include?: string): string; - useCaseSensitiveFileNames?: boolean; + useCaseSensitiveFileNames?(): boolean; } /// @@ -411,15 +411,7 @@ namespace ts { public useCaseSensitiveFileNames: boolean; constructor(private shimHost: CoreServicesShimHost) { - if (typeof shimHost.useCaseSensitiveFileNames === "boolean") { - this.useCaseSensitiveFileNames = shimHost.useCaseSensitiveFileNames; - } - else if (sys) { - this.useCaseSensitiveFileNames = sys.useCaseSensitiveFileNames; - } - else { - this.useCaseSensitiveFileNames = true; - } + this.useCaseSensitiveFileNames = this.shimHost.useCaseSensitiveFileNames ? this.shimHost.useCaseSensitiveFileNames() : false; } public readDirectory(rootDir: string, extensions: string[], exclude: string[], include: string[]): string[] { @@ -435,7 +427,7 @@ namespace ts { JSON.stringify(include))); } catch (e) { - let results: string[] = []; + const results: string[] = []; for (const extension of extensions) { for (const file of this.readDirectoryFallback(rootDir, extension, exclude)) { From d23df3430ee26f9a5a078c84f5aae229598156d0 Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Wed, 16 Dec 2015 15:49:31 -0800 Subject: [PATCH 09/18] Detailed comments for regular expressions and renamed some files. --- Jakefile.js | 4 +- src/compiler/commandLineParser.ts | 55 ++++++++++++++++++- src/harness/harness.ts | 2 +- src/harness/{vfs.ts => virtualFileSystem.ts} | 0 .../{expandFiles.ts => matchFiles.ts} | 3 +- 5 files changed, 57 insertions(+), 7 deletions(-) rename src/harness/{vfs.ts => virtualFileSystem.ts} (100%) rename tests/cases/unittests/{expandFiles.ts => matchFiles.ts} (97%) diff --git a/Jakefile.js b/Jakefile.js index cf7f69d41dc73..d46c2437ccbc4 100644 --- a/Jakefile.js +++ b/Jakefile.js @@ -131,7 +131,7 @@ var languageServiceLibrarySources = [ var harnessCoreSources = [ "harness.ts", - "vfs.ts", + "virtualFileSystem.ts", "sourceMapRecorder.ts", "harnessLanguageService.ts", "fourslash.ts", @@ -163,7 +163,7 @@ var harnessSources = harnessCoreSources.concat([ "cachingInServerLSHost.ts", "moduleResolution.ts", "tsconfigParsing.ts", - "expandFiles.ts" + "matchFiles.ts" ].map(function (f) { return path.join(unittestsDirectory, f); })).concat([ diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index a5cd7abc50522..6935cf28bcdac 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -582,9 +582,61 @@ namespace ts { return { options, errors }; } + /** + * Tests for a path that ends in a recursive directory wildcard. + * Matches **, /**, /**\/, and /**\/, but not a**b. + * + * NOTE: used \/ in place of / above to avoid ending the comment. + * + * Breakdown: + * (^|\/) # matches either the beginning of the string or a directory separator. + * \*\* # matches the recursive directory wildcard "**". + * \/?$ # matches an optional trailing directory separator at the end of the string. + */ const invalidTrailingRecursionPattern = /(^|\/)\*\*\/?$/; + + /** + * Tests for a path with multiple recursive directory wildcards. + * Matches **\/** and **\/a/**, but not **\/a**b. + * + * NOTE: used \/ in place of / above to avoid ending the comment. + * + * Breakdown: + * (^|\/) # matches either the beginning of the string or a directory separator. + * \*\*\/ # matches a recursive directory wildcard "**" followed by a directory separator. + * (.*\/)? # optionally matches any number of characters followed by a directory separator. + * \*\* # matches a recursive directory wildcard "**" + * ($|\/) # matches either the end of the string or a directory separator. + */ const invalidMultipleRecursionPatterns = /(^|\/)\*\*\/(.*\/)?\*\*($|\/)/; + /** + * Tests for a path containing a wildcard character in a directory component of the path. + * Matches /*\/, /?/, and /a*b/, but not /a/ or /a/*. + * + * NOTE: used \/ in place of / above to avoid ending the comment. + * + * Breakdown: + * \/ # matches a directory separator. + * [^/]*? # matches any number of characters excluding directory separators (non-greedy). + * [*?] # matches either a wildcard character (* or ?) + * [^/]* # matches any number of characters excluding directory separators (greedy). + * \/ # matches a directory separator. + */ + const watchRecursivePattern = /\/[^/]*?[*?][^/]*\//; + + /** + * Matches the portion of a wildcard path that does not contain wildcards. + * Matches /a of /a/*, or /a/b/c of /a/b/c/?/d. + * + * Breakdown: + * ^ # matches the beginning of the string + * [^*?]* # matches any number of non-wildcard characters + * (?=\/[^/]*[*?]) # lookahead that matches a directory separator followed by + * # a path component that contains at least one wildcard character (* or ?). + */ + const wildcardDirectoryPattern = /^[^*?]*(?=\/[^/]*[*?])/; + /** * Expands an array of file specifications. * @@ -693,9 +745,6 @@ namespace ts { return validSpecs; } - const watchRecursivePattern = /\/[^/]*?[*?][^/]*\//; - const wildcardDirectoryPattern = /^[^*?]*(?=\/[^/]*[*?])/; - /** * Gets directories in a set of include patterns that should be watched for changes. */ diff --git a/src/harness/harness.ts b/src/harness/harness.ts index 509c193666f29..f33cafc456d1e 100644 --- a/src/harness/harness.ts +++ b/src/harness/harness.ts @@ -23,7 +23,7 @@ /// /// /// -/// +/// /* tslint:disable:no-null */ // Block scoped definitions work poorly for global variables, temporarily enable var diff --git a/src/harness/vfs.ts b/src/harness/virtualFileSystem.ts similarity index 100% rename from src/harness/vfs.ts rename to src/harness/virtualFileSystem.ts diff --git a/tests/cases/unittests/expandFiles.ts b/tests/cases/unittests/matchFiles.ts similarity index 97% rename from tests/cases/unittests/expandFiles.ts rename to tests/cases/unittests/matchFiles.ts index bbda14a2cfa8d..faccd31b88f40 100644 --- a/tests/cases/unittests/expandFiles.ts +++ b/tests/cases/unittests/matchFiles.ts @@ -1,5 +1,6 @@ /// /// +/// namespace ts { class MockParseConfigHost extends Utils.VirtualFileSystem implements ParseConfigHost { @@ -89,7 +90,7 @@ namespace ts { "c:/dev/f.other" ]); - describe("expandFiles", () => { + describe("matchFiles", () => { describe("with literal file list", () => { it("without exclusions", () => { const json = { From c2249177f58eb8221a9d4709f65cb900e207d15a Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Mon, 4 Jan 2016 11:37:25 -0800 Subject: [PATCH 10/18] Comment cleanup --- src/compiler/commandLineParser.ts | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 6935cf28bcdac..48acf3055ad1e 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -584,9 +584,9 @@ namespace ts { /** * Tests for a path that ends in a recursive directory wildcard. - * Matches **, /**, /**\/, and /**\/, but not a**b. + * Matches **, \**, **\, and \**\, but not a**b. * - * NOTE: used \/ in place of / above to avoid ending the comment. + * NOTE: used \ in place of / above to avoid issues with multiline comments. * * Breakdown: * (^|\/) # matches either the beginning of the string or a directory separator. @@ -597,9 +597,9 @@ namespace ts { /** * Tests for a path with multiple recursive directory wildcards. - * Matches **\/** and **\/a/**, but not **\/a**b. + * Matches **\** and **\a\**, but not **\a**b. * - * NOTE: used \/ in place of / above to avoid ending the comment. + * NOTE: used \ in place of / above to avoid issues with multiline comments. * * Breakdown: * (^|\/) # matches either the beginning of the string or a directory separator. @@ -612,9 +612,9 @@ namespace ts { /** * Tests for a path containing a wildcard character in a directory component of the path. - * Matches /*\/, /?/, and /a*b/, but not /a/ or /a/*. + * Matches \*\, \?\, and \a*b\, but not \a\ or \a\*. * - * NOTE: used \/ in place of / above to avoid ending the comment. + * NOTE: used \ in place of / above to avoid issues with multiline comments. * * Breakdown: * \/ # matches a directory separator. @@ -627,7 +627,9 @@ namespace ts { /** * Matches the portion of a wildcard path that does not contain wildcards. - * Matches /a of /a/*, or /a/b/c of /a/b/c/?/d. + * Matches \a of \a\*, or \a\b\c of \a\b\c\?\d. + * + * NOTE: used \ in place of / above to avoid issues with multiline comments. * * Breakdown: * ^ # matches the beginning of the string From c1205ebdfbfebc94b35abab251237bdfbff213c5 Mon Sep 17 00:00:00 2001 From: Ron Buckton Date: Tue, 5 Jan 2016 10:11:44 -0800 Subject: [PATCH 11/18] Fixed new linter warnings --- src/compiler/commandLineParser.ts | 4 ++-- src/compiler/core.ts | 10 +++++----- src/compiler/sys.ts | 10 +--------- src/harness/projectsRunner.ts | 4 ---- 4 files changed, 8 insertions(+), 20 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 759b443e74240..52f9ef05f37b1 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -816,7 +816,7 @@ namespace ts { function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { const extensionPriority = getExtensionPriority(file, extensions); const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority); - for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; ++i) { + for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) { const higherPriorityExtension = extensions[i]; const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension)); if (hasProperty(literalFiles, higherPriorityPath) || hasProperty(wildcardFiles, higherPriorityPath)) { @@ -838,7 +838,7 @@ namespace ts { function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { const extensionPriority = getExtensionPriority(file, extensions); const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority); - for (let i = nextExtensionPriority; i < extensions.length; ++i) { + for (let i = nextExtensionPriority; i < extensions.length; i++) { const lowerPriorityExtension = extensions[i]; const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension)); delete wildcardFiles[lowerPriorityPath]; diff --git a/src/compiler/core.ts b/src/compiler/core.ts index 137a603407a4c..d0f27a2046f61 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -116,7 +116,7 @@ namespace ts { } export function indexOfAnyCharCode(text: string, charCodes: number[], start?: number): number { - for (let i = start || 0, len = text.length; i < len; ++i) { + for (let i = start || 0, len = text.length; i < len; i++) { if (contains(charCodes, text.charCodeAt(i))) { return i; } @@ -824,7 +824,7 @@ namespace ts { const aComponents = getNormalizedPathComponents(a, currentDirectory); const bComponents = getNormalizedPathComponents(b, currentDirectory); const sharedLength = Math.min(aComponents.length, bComponents.length); - for (let i = 0; i < sharedLength; ++i) { + for (let i = 0; i < sharedLength; i++) { const result = compareStrings(aComponents[i], bComponents[i], ignoreCase); if (result !== Comparison.EqualTo) { return result; @@ -846,7 +846,7 @@ namespace ts { return false; } - for (let i = 0; i < parentComponents.length; ++i) { + for (let i = 0; i < parentComponents.length; i++) { const result = compareStrings(parentComponents[i], childComponents[i], ignoreCase); if (result !== Comparison.EqualTo) { return false; @@ -1021,9 +1021,9 @@ namespace ts { // Iterate over each include base path and include unique base paths that are not a // subpath of an existing base path - include: for (let i = 0; i < includeBasePaths.length; ++i) { + include: for (let i = 0; i < includeBasePaths.length; i++) { const includeBasePath = includeBasePaths[i]; - for (let j = 0; j < basePaths.length; ++j) { + for (let j = 0; j < basePaths.length; j++) { if (containsPath(basePaths[j], includeBasePath, path, !useCaseSensitiveFileNames)) { continue include; } diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 9b97c99eaa397..2c23e4016eb30 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -139,10 +139,6 @@ namespace ts { } } - function getCanonicalPath(path: string): string { - return path.toLowerCase(); - } - function getNames(collection: any): string[] { const result: string[] = []; for (let e = new Enumerator(collection); !e.atEnd(); e.moveNext()) { @@ -357,10 +353,6 @@ namespace ts { } } - function getCanonicalPath(path: string): string { - return useCaseSensitiveFileNames ? path.toLowerCase() : path; - } - function getAccessibleFileSystemEntries(path: string): FileSystemEntries { try { const entries = _fs.readdirSync(path || ".").sort(); @@ -501,4 +493,4 @@ namespace ts { return undefined; // Unsupported host } })(); -} +} diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index bcb91fbbfdaf3..769e72d385127 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -287,10 +287,6 @@ class ProjectRunner extends RunnerBase { return Harness.IO.fileExists(getFileNameInTheProjectTest(fileName)); } - function directoryExists(directoryName: string): boolean { - return Harness.IO.directoryExists(getFileNameInTheProjectTest(directoryName)); - } - function getSourceFileText(fileName: string): string { let text: string = undefined; try { From db856431e8ff5413a35f4e536f1a078dcd2a8200 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Wed, 25 May 2016 17:07:36 -0700 Subject: [PATCH 12/18] Fixing linter and test errors --- src/compiler/commandLineParser.ts | 15 ++++++++++++++- src/compiler/core.ts | 2 +- src/compiler/sys.ts | 6 +----- src/services/shims.ts | 8 ++++---- tests/cases/unittests/tsconfigParsing.ts | 21 --------------------- 5 files changed, 20 insertions(+), 32 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 8dcc21b4d1b34..9fec0b1083d9a 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -713,6 +713,16 @@ namespace ts { errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "exclude", "Array")); } } + else { + // By default, exclude common package folders + excludeSpecs = ["node_modules", "bower_components", "jspm_packages"]; + } + + // Always exclude the output directory unless explicitly included + const outDir = json["compilerOptions"] && json["compilerOptions"]["outDir"]; + if (outDir) { + excludeSpecs.push(outDir); + } if (fileNames === undefined && includeSpecs === undefined) { includeSpecs = ["**/*"]; @@ -885,7 +895,6 @@ namespace ts { */ function matchFileNames(fileNames: string[], include: string[], exclude: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): ExpandResult { basePath = normalizePath(basePath); - basePath = removeTrailingDirectorySeparator(basePath); // The exclude spec list is converted into a regular expression, which allows us to quickly // test whether a file or directory should be excluded before recursively traversing the @@ -941,6 +950,10 @@ namespace ts { continue; } + if (IgnoreFileNamePattern.test(file)) { + continue; + } + // We may have included a wildcard path with a lower priority // extension due to the user-defined order of entries in the // "include" array. If there is a lower priority extension in the diff --git a/src/compiler/core.ts b/src/compiler/core.ts index ae489ad553c12..cc3c2b2811476 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -1067,7 +1067,7 @@ namespace ts { return basePaths; } - + export function ensureScriptKind(fileName: string, scriptKind?: ScriptKind): ScriptKind { // Using scriptKind as a condition handles both: // - 'scriptKind' is unspecified and thus it is `undefined` diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 3191b8b0056a9..8e5e76974bf75 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -367,7 +367,7 @@ namespace ts { const files: string[] = []; const directories: string[] = []; for (const entry of entries) { - // This is necessary because on some file system node fails to exclude + // This is necessary because on some file system node fails to exclude // "." and "..". See https://github.com/nodejs/node/issues/4002 if (entry === "." || entry === "..") { continue; @@ -391,10 +391,6 @@ namespace ts { function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] { return matchFiles(path, extensions, excludes, includes, useCaseSensitiveFileNames, process.cwd(), getAccessibleFileSystemEntries); } - - function getCanonicalPath(path: string): string { - return useCaseSensitiveFileNames ? path : path.toLowerCase(); - } const enum FileSystemEntryKind { File, diff --git a/src/services/shims.ts b/src/services/shims.ts index 0610c0e0b57e6..ba9ccdce2dc77 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -81,7 +81,7 @@ namespace ts { */ readDirectory(rootDir: string, extension: string, exclude?: string, include?: string, depth?: number): string; useCaseSensitiveFileNames?(): boolean; - trace(s: string): void; + trace(s: string): void; } /// @@ -432,10 +432,10 @@ namespace ts { public directoryExists: (directoryName: string) => boolean; public realpath: (path: string) => string; - public useCaseSensitiveFileNames: boolean; + public useCaseSensitiveFileNames: boolean; constructor(private shimHost: CoreServicesShimHost) { - this.useCaseSensitiveFileNames = this.shimHost.useCaseSensitiveFileNames ? this.shimHost.useCaseSensitiveFileNames() : false; + this.useCaseSensitiveFileNames = this.shimHost.useCaseSensitiveFileNames ? this.shimHost.useCaseSensitiveFileNames() : false; if ("directoryExists" in this.shimHost) { this.directoryExists = directoryName => this.shimHost.directoryExists(directoryName); } @@ -453,7 +453,7 @@ namespace ts { JSON.stringify(extensions), JSON.stringify(exclude), JSON.stringify(include), - depth + depth )); } catch (e) { diff --git a/tests/cases/unittests/tsconfigParsing.ts b/tests/cases/unittests/tsconfigParsing.ts index 581575c94794d..daa2b2bcdf099 100644 --- a/tests/cases/unittests/tsconfigParsing.ts +++ b/tests/cases/unittests/tsconfigParsing.ts @@ -50,27 +50,6 @@ namespace ts { const host: ParseConfigHost = new MockParseConfigHost(basePath, true, allFileList); const parsed = ts.parseJsonConfigFileContent(json, host, basePath, /*existingOptions*/ undefined, configFileName); assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort())); - - function mockReadDirectory(rootDir: string, extension: string, exclude: string[]): string[] { - const result: string[] = []; - const fullExcludeDirectories = ts.map(exclude, directory => combinePaths(rootDir, directory)); - for (const file of allFileList) { - let shouldExclude = false; - for (const fullExcludeDirectorie of fullExcludeDirectories) { - if (file.indexOf(fullExcludeDirectorie) >= 0) { - shouldExclude = true; - break; - } - } - if (shouldExclude) { - continue; - } - if (fileExtensionIs(file, extension)) { - result.push(file); - } - } - return result; - } } it("returns empty config for file with only whitespaces", () => { From c340c88706939febc9e6a52f28e3089b94294973 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Wed, 25 May 2016 17:08:24 -0700 Subject: [PATCH 13/18] Bringing back excludes error and fixing faulty test --- src/compiler/commandLineParser.ts | 3 ++ src/harness/virtualFileSystem.ts | 26 +++++++++++++++ tests/cases/unittests/matchFiles.ts | 31 ++--------------- tests/cases/unittests/tsconfigParsing.ts | 42 +++++------------------- 4 files changed, 41 insertions(+), 61 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 9fec0b1083d9a..d96e3b30cc9cd 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -713,6 +713,9 @@ namespace ts { errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "exclude", "Array")); } } + else if (hasProperty(json, "excludes")) { + errors.push(createCompilerDiagnostic(Diagnostics.Unknown_option_excludes_Did_you_mean_exclude)); + } else { // By default, exclude common package folders excludeSpecs = ["node_modules", "bower_components", "jspm_packages"]; diff --git a/src/harness/virtualFileSystem.ts b/src/harness/virtualFileSystem.ts index 66bbd715fbc60..30192b8b8ec4d 100644 --- a/src/harness/virtualFileSystem.ts +++ b/src/harness/virtualFileSystem.ts @@ -1,4 +1,5 @@ /// +/// namespace Utils { export class VirtualFileSystemEntry { fileSystem: VirtualFileSystem; @@ -157,4 +158,29 @@ namespace Utils { return directory; } } + + export class MockParseConfigHost extends VirtualFileSystem implements ts.ParseConfigHost { + constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) { + super(currentDirectory, ignoreCase); + for (const file of files) { + this.addFile(file); + } + } + + readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) { + return ts.matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path)); + } + + getAccessibleFileSystemEntries(path: string) { + const entry = this.traversePath(path); + if (entry && entry.isDirectory()) { + const directory = entry; + return { + files: ts.map(directory.getFiles(), f => f.name), + directories: ts.map(directory.getDirectories(), d => d.name) + }; + } + return { files: [], directories: [] }; + } + } } \ No newline at end of file diff --git a/tests/cases/unittests/matchFiles.ts b/tests/cases/unittests/matchFiles.ts index 792bad51899b7..c2d13cf0393a3 100644 --- a/tests/cases/unittests/matchFiles.ts +++ b/tests/cases/unittests/matchFiles.ts @@ -3,33 +3,8 @@ /// namespace ts { - class MockParseConfigHost extends Utils.VirtualFileSystem implements ParseConfigHost { - constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) { - super(currentDirectory, ignoreCase); - for (const file of files) { - this.addFile(file); - } - } - - readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) { - return matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path)); - } - - getAccessibleFileSystemEntries(path: string) { - const entry = this.traversePath(path); - if (entry && entry.isDirectory()) { - const directory = entry; - return { - files: map(directory.getFiles(), f => f.name), - directories: map(directory.getDirectories(), d => d.name) - }; - } - return { files: [], directories: [] }; - } - } - const caseInsensitiveBasePath = "c:/dev/"; - const caseInsensitiveHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + const caseInsensitiveHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ "c:/dev/a.ts", "c:/dev/a.d.ts", "c:/dev/a.js", @@ -53,7 +28,7 @@ namespace ts { ]); const caseSensitiveBasePath = "/dev/"; - const caseSensitiveHost = new MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, [ + const caseSensitiveHost = new Utils.MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, [ "/dev/a.ts", "/dev/a.d.ts", "/dev/a.js", @@ -76,7 +51,7 @@ namespace ts { "/dev/js/b.js", ]); - const caseInsensitiveMixedExtensionHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + const caseInsensitiveMixedExtensionHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ "c:/dev/a.ts", "c:/dev/a.d.ts", "c:/dev/a.js", diff --git a/tests/cases/unittests/tsconfigParsing.ts b/tests/cases/unittests/tsconfigParsing.ts index daa2b2bcdf099..9b833e64a1117 100644 --- a/tests/cases/unittests/tsconfigParsing.ts +++ b/tests/cases/unittests/tsconfigParsing.ts @@ -2,31 +2,6 @@ /// namespace ts { - class MockParseConfigHost extends Utils.VirtualFileSystem implements ParseConfigHost { - constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) { - super(currentDirectory, ignoreCase); - for (const file of files) { - this.addFile(file); - } - } - - readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) { - return matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path)); - } - - getAccessibleFileSystemEntries(path: string) { - const entry = this.traversePath(path); - if (entry && entry.isDirectory()) { - const directory = entry; - return { - files: map(directory.getFiles(), f => f.name), - directories: map(directory.getDirectories(), d => d.name) - }; - } - return { files: [], directories: [] }; - } - } - describe('parseConfigFileTextToJson', () => { function assertParseResult(jsonText: string, expectedConfigObject: { config?: any; error?: Diagnostic }) { let parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText); @@ -41,13 +16,14 @@ namespace ts { function assertParseErrorWithExcludesKeyword(jsonText: string) { let parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText); - let parsedCommand = ts.parseJsonConfigFileContent(parsed, ts.sys, "tests/cases/unittests"); - assert.isTrue(undefined !== parsedCommand.errors); + let parsedCommand = ts.parseJsonConfigFileContent(parsed.config, ts.sys, "tests/cases/unittests"); + assert.isTrue(parsedCommand.errors && parsedCommand.errors.length === 1 && + parsedCommand.errors[0].code === ts.Diagnostics.Unknown_option_excludes_Did_you_mean_exclude.code); } function assertParseFileList(jsonText: string, configFileName: string, basePath: string, allFileList: string[], expectedFileList: string[]) { const json = JSON.parse(jsonText); - const host: ParseConfigHost = new MockParseConfigHost(basePath, true, allFileList); + const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList); const parsed = ts.parseJsonConfigFileContent(json, host, basePath, /*existingOptions*/ undefined, configFileName); assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort())); } @@ -125,19 +101,19 @@ namespace ts { assertParseResult( `{ "compilerOptions": { - "lib": "es5" + "lib": ["es5"] } }`, { - config: { compilerOptions: { lib: "es5" } } + config: { compilerOptions: { lib: ["es5"] } } }); assertParseResult( `{ "compilerOptions": { - "lib": "es5,es6" + "lib": ["es5", "es6"] } }`, { - config: { compilerOptions: { lib: "es5,es6" } } + config: { compilerOptions: { lib: ["es5", "es6"] } } }); }); @@ -145,7 +121,7 @@ namespace ts { assertParseErrorWithExcludesKeyword( `{ "compilerOptions": { - "lib": "es5" + "lib": ["es5"] }, "excludes": [ "foge.ts" From aa5c51c51677600f7ac1960f7d953864b70275bc Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Thu, 26 May 2016 10:17:43 -0700 Subject: [PATCH 14/18] Fixing lint errors --- src/compiler/sys.ts | 4 ++-- tests/cases/unittests/cachingInServerLSHost.ts | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index bdbe04e904457..de7d24b229617 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -425,10 +425,10 @@ namespace ts { function directoryExists(path: string): boolean { return fileSystemEntryExists(path, FileSystemEntryKind.Directory); } - + function getDirectories(path: string): string[] { return filter(_fs.readdirSync(path), p => fileSystemEntryExists(combinePaths(path, p), FileSystemEntryKind.Directory)); - } + } return { args: process.argv.slice(2), diff --git a/tests/cases/unittests/cachingInServerLSHost.ts b/tests/cases/unittests/cachingInServerLSHost.ts index b58ca9898bc7c..65f3cb1ed9b97 100644 --- a/tests/cases/unittests/cachingInServerLSHost.ts +++ b/tests/cases/unittests/cachingInServerLSHost.ts @@ -82,7 +82,7 @@ namespace ts { const projectService = new server.ProjectService(serverHost, logger); const rootScriptInfo = projectService.openFile(rootFile, /* openedByClient */true); const project = projectService.createInferredProject(rootScriptInfo); - project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); + project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); return { project, rootScriptInfo @@ -145,7 +145,7 @@ namespace ts { catch (e) { assert.isTrue(e.message.indexOf(`Could not find file: '${imported.name}'.`) === 0); } - + assert.isTrue(fileExistsIsCalled); } { From 0415b95fd268ffdf1f14f045dc99d5cefb3e8878 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Tue, 31 May 2016 10:11:04 -0700 Subject: [PATCH 15/18] Passing regular expressions to native hosts --- src/compiler/commandLineParser.ts | 5 ++-- src/compiler/core.ts | 38 +++++++++++++++++++++++++------ src/compiler/sys.ts | 7 ++++-- src/services/shims.ts | 11 ++++++--- 4 files changed, 47 insertions(+), 14 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index d21fcd3b89d39..7674dcdd06380 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -1021,13 +1021,14 @@ namespace ts { // // /a/b/* - Watch /a/b directly to catch any new file // /a/b/a?z - Watch /a/b directly to catch any new file matching a?z - const excludeRegExp = getRegularExpressionForWildcard(exclude, path, "exclude", useCaseSensitiveFileNames); + const rawExcludeRegex = getRegularExpressionForWildcard(exclude, path, "exclude"); + const excludeRegex = rawExcludeRegex && new RegExp(rawExcludeRegex, useCaseSensitiveFileNames ? "" : "i"); const wildcardDirectories: Map = {}; if (include !== undefined) { const recursiveKeys: string[] = []; for (const file of include) { const name = combinePaths(path, file); - if (excludeRegExp && excludeRegExp.test(name)) { + if (excludeRegex && excludeRegex.test(name)) { continue; } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index cc3c2b2811476..02d018194311f 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -911,7 +911,7 @@ namespace ts { const reservedCharacterPattern = /[^\w\s\/]/g; const wildcardCharCodes = [CharacterCodes.asterisk, CharacterCodes.question]; - export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude", useCaseSensitiveFileNames: boolean) { + export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude") { if (specs === undefined || specs.length === 0) { return undefined; } @@ -978,7 +978,7 @@ namespace ts { return undefined; } - return new RegExp("^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$"), useCaseSensitiveFileNames ? "" : "i"); + return "^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$"); } function replaceWildcardCharacter(match: string) { @@ -990,15 +990,39 @@ namespace ts { directories: string[]; } - export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] { + interface FileMatcherPatterns { + includeFilePattern: string; + includeDirectoryPattern: string; + excludePattern: string; + basePaths: string[]; + } + + export function getFileMatcherPatterns(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string): FileMatcherPatterns { path = normalizePath(path); currentDirectory = normalizePath(currentDirectory); const absolutePath = combinePaths(currentDirectory, path); - const includeFileRegex = getRegularExpressionForWildcard(includes, absolutePath, "files", useCaseSensitiveFileNames); - const includeDirectoryRegex = getRegularExpressionForWildcard(includes, absolutePath, "directories", useCaseSensitiveFileNames); - const excludeRegex = getRegularExpressionForWildcard(excludes, absolutePath, "exclude", useCaseSensitiveFileNames); + + return { + includeFilePattern: getRegularExpressionForWildcard(includes, absolutePath, "files"), + includeDirectoryPattern: getRegularExpressionForWildcard(includes, absolutePath, "directories"), + excludePattern: getRegularExpressionForWildcard(excludes, absolutePath, "exclude"), + basePaths: getBasePaths(path, includes, useCaseSensitiveFileNames) + }; + } + + export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] { + path = normalizePath(path); + currentDirectory = normalizePath(currentDirectory); + + const patterns = getFileMatcherPatterns(path, extensions, excludes, includes, useCaseSensitiveFileNames, currentDirectory); + + const regexFlag = useCaseSensitiveFileNames ? "" : "i"; + const includeFileRegex = patterns.includeFilePattern && new RegExp(patterns.includeFilePattern, regexFlag); + const includeDirectoryRegex = patterns.includeDirectoryPattern && new RegExp(patterns.includeDirectoryPattern, regexFlag); + const excludeRegex = patterns.excludePattern && new RegExp(patterns.excludePattern, regexFlag); + const result: string[] = []; - for (const basePath of getBasePaths(path, includes, useCaseSensitiveFileNames)) { + for (const basePath of patterns.basePaths) { visitDirectory(basePath, combinePaths(currentDirectory, basePath)); } return result; diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index de7d24b229617..21a78ee1dbe24 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -73,7 +73,7 @@ namespace ts { readFile(path: string): string; writeFile(path: string, contents: string): void; getDirectories(path: string): string[]; - readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[]; + readDirectory(path: string, extensions?: string[], basePaths?: string[], excludeEx?: string, includeFileEx?: string, includeDirEx?: string): string[]; watchFile?(path: string, callback: FileWatcherCallback): FileWatcher; watchDirectory?(path: string, callback: DirectoryWatcherCallback, recursive?: boolean): FileWatcher; realpath(path: string): string; @@ -558,7 +558,10 @@ namespace ts { getExecutingFilePath: () => ChakraHost.executingFile, getCurrentDirectory: () => ChakraHost.currentDirectory, getDirectories: ChakraHost.getDirectories, - readDirectory: ChakraHost.readDirectory, + readDirectory: (path: string, extensions?: string[], excludes?: string[], includes?: string[]) => { + const pattern = getFileMatcherPatterns(path, extensions, excludes, includes, !!ChakraHost.useCaseSensitiveFileNames, ChakraHost.currentDirectory); + return ChakraHost.readDirectory(path, extensions, pattern.basePaths, pattern.excludePattern, pattern.includeFilePattern, pattern.includeDirectoryPattern); + }, exit: ChakraHost.quit, realpath }; diff --git a/src/services/shims.ts b/src/services/shims.ts index 032a2712fdb76..75448536bad7e 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -80,8 +80,9 @@ namespace ts { * @param exclude A JSON encoded string[] containing the paths to exclude * when enumerating the directory. */ - readDirectory(rootDir: string, extension: string, exclude?: string, include?: string, depth?: number): string; + readDirectory(rootDir: string, extension: string, basePaths?: string, excludeEx?: string, includeFileEx?: string, includeDirEx?: string, depth?: number): string; useCaseSensitiveFileNames?(): boolean; + getCurrentDirectory(): string; trace(s: string): void; } @@ -453,11 +454,15 @@ namespace ts { // Wrap the API changes for 2.0 release. This try/catch // should be removed once TypeScript 2.0 has shipped. try { + const pattern = getFileMatcherPatterns(rootDir, extensions, exclude, include, + this.shimHost.useCaseSensitiveFileNames(), this.shimHost.getCurrentDirectory()); return JSON.parse(this.shimHost.readDirectory( rootDir, JSON.stringify(extensions), - JSON.stringify(exclude), - JSON.stringify(include), + JSON.stringify(pattern.basePaths), + pattern.excludePattern, + pattern.includeFilePattern, + pattern.includeDirectoryPattern, depth )); } From 86cde9e22204be835572216b10757dcf82af8050 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Fri, 17 Jun 2016 16:56:23 -0700 Subject: [PATCH 16/18] Updating readDirectory for tsserverProjectSystem unit tests --- .../cases/unittests/tsserverProjectSystem.ts | 41 +++++++++---------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/tests/cases/unittests/tsserverProjectSystem.ts b/tests/cases/unittests/tsserverProjectSystem.ts index cf382d094632c..dcefd491fff9f 100644 --- a/tests/cases/unittests/tsserverProjectSystem.ts +++ b/tests/cases/unittests/tsserverProjectSystem.ts @@ -90,23 +90,6 @@ namespace ts { } } - function readDirectory(folder: FSEntry, ext: string, excludes: Path[], result: string[]): void { - if (!folder || !isFolder(folder) || contains(excludes, folder.path)) { - return; - } - for (const entry of folder.entries) { - if (contains(excludes, entry.path)) { - continue; - } - if (isFolder(entry)) { - readDirectory(entry, ext, excludes, result); - } - else if (fileExtensionIs(entry.path, ext)) { - result.push(entry.fullPath); - } - } - } - function checkNumberOfConfiguredProjects(projectService: server.ProjectService, expected: number) { assert.equal(projectService.configuredProjects.length, expected, `expected ${expected} configured project(s)`); } @@ -188,10 +171,26 @@ namespace ts { } } - readDirectory(path: string, ext: string, excludes: string[]): string[] { - const result: string[] = []; - readDirectory(this.fs.get(this.toPath(path)), ext, map(excludes, e => toPath(e, path, this.getCanonicalFileName)), result); - return result; + readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[] { + const that = this; + return ts.matchFiles(path, extensions, exclude, include, this.useCaseSensitiveFileNames, this.getCurrentDirectory(), (dir) => { + const result: FileSystemEntries = { + directories: [], + files : [] + }; + const dirEntry = that.fs.get(that.toPath(dir)); + if (isFolder(dirEntry)) { + dirEntry.entries.forEach((entry) => { + if (isFolder(entry)) { + result.directories.push(entry.fullPath); + } + else if (isFile(entry)) { + result.files.push(entry.fullPath); + } + }); + } + return result; + }); } watchDirectory(directoryName: string, callback: DirectoryWatcherCallback, recursive: boolean): DirectoryWatcher { From 95072aab824ba8cc591c778efc7c0e622afa2cd2 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Fri, 17 Jun 2016 17:11:13 -0700 Subject: [PATCH 17/18] Responding to PR feedback --- src/compiler/commandLineParser.ts | 8 ++++---- src/compiler/core.ts | 2 +- src/compiler/sys.ts | 10 +++++++++- tests/cases/unittests/cachingInServerLSHost.ts | 2 +- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 2b83690372a33..9cca35977be4e 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -672,7 +672,7 @@ namespace ts { // Skip over any minified JavaScript files (ending in ".min.js") // Skip over dotted files and folders as well - const IgnoreFileNamePattern = /(\.min\.js$)|([\\/]\.[\w.])/; + const ignoreFileNamePattern = /(\.min\.js$)|([\\/]\.[\w.])/; /** * Parse the contents of a config file (tsconfig.json). * @param json The contents of the config file to parse @@ -969,7 +969,7 @@ namespace ts { continue; } - if (IgnoreFileNamePattern.test(file)) { + if (ignoreFileNamePattern.test(file)) { continue; } @@ -1055,8 +1055,8 @@ namespace ts { // Remove any subpaths under an existing recursively watched directory. for (const key in wildcardDirectories) { if (hasProperty(wildcardDirectories, key)) { - for (const recursiveKey in recursiveKeys) { - if (containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { + for (const recursiveKey of recursiveKeys) { + if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { delete wildcardDirectories[key]; } } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index 56d8262a3ba1c..c41b4baa9dc2a 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -1211,7 +1211,7 @@ namespace ts { export function isJsxOrTsxExtension(ext: string): boolean { return ext === ".jsx" || ext === ".tsx"; } - + export function changeExtension(path: T, newExtension: string): T { return (removeFileExtension(path) + newExtension); } diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 70fef01b50474..8b977c53111d2 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -381,7 +381,15 @@ namespace ts { continue; } const name = combinePaths(path, entry); - const stat = _fs.statSync(name); + + let stat: any; + try { + stat = _fs.statSync(name); + } + catch (e) { + continue; + } + if (stat.isFile()) { files.push(entry); } diff --git a/tests/cases/unittests/cachingInServerLSHost.ts b/tests/cases/unittests/cachingInServerLSHost.ts index 65f3cb1ed9b97..2608a082d6df6 100644 --- a/tests/cases/unittests/cachingInServerLSHost.ts +++ b/tests/cases/unittests/cachingInServerLSHost.ts @@ -82,7 +82,7 @@ namespace ts { const projectService = new server.ProjectService(serverHost, logger); const rootScriptInfo = projectService.openFile(rootFile, /* openedByClient */true); const project = projectService.createInferredProject(rootScriptInfo); - project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); + project.setProjectOptions({ files: [rootScriptInfo.fileName], compilerOptions: { module: ts.ModuleKind.AMD } }); return { project, rootScriptInfo From f73ed594327eeef48e391f2daebea83e32ca5170 Mon Sep 17 00:00:00 2001 From: Richard Knoll Date: Mon, 20 Jun 2016 11:09:48 -0700 Subject: [PATCH 18/18] Adding more matchFiles test cases --- tests/cases/unittests/matchFiles.ts | 189 ++++++++++++++++++++++++++++ 1 file changed, 189 insertions(+) diff --git a/tests/cases/unittests/matchFiles.ts b/tests/cases/unittests/matchFiles.ts index c2d13cf0393a3..b9c538a9e14dd 100644 --- a/tests/cases/unittests/matchFiles.ts +++ b/tests/cases/unittests/matchFiles.ts @@ -65,6 +65,16 @@ namespace ts { "c:/dev/f.other" ]); + const caseInsensitiveCommonFoldersHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.ts", + "c:/dev/node_modules/a.ts", + "c:/dev/bower_components/a.ts", + "c:/dev/jspm_packages/a.ts" + ]); + describe("matchFiles", () => { describe("with literal file list", () => { it("without exclusions", () => { @@ -297,6 +307,87 @@ namespace ts { assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); assert.deepEqual(actual.errors, expected.errors); }); + it("with common package folders and no exclusions", () => { + const json = { + include: [ + "a.ts", + "b.ts", + "node_modules/a.ts", + "bower_components/a.ts", + "jspm_packages/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); + it("with common package folders and exclusions", () => { + const json = { + include: [ + "a.ts", + "b.ts", + "node_modules/a.ts", + "bower_components/a.ts", + "jspm_packages/a.ts" + ], + exclude: [ + "a.ts", + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/bower_components/a.ts", + "c:/dev/jspm_packages/a.ts", + "c:/dev/node_modules/a.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); + it("with common package folders and empty exclude", () => { + const json = { + include: [ + "a.ts", + "b.ts", + "node_modules/a.ts", + "bower_components/a.ts", + "jspm_packages/a.ts" + ], + exclude: [] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/bower_components/a.ts", + "c:/dev/jspm_packages/a.ts", + "c:/dev/node_modules/a.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); }); describe("with wildcard include list", () => { @@ -392,6 +483,32 @@ namespace ts { assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); assert.deepEqual(actual.errors, expected.errors); }); + it("with multiple recursive directories", () => { + const json = { + include: [ + "x/y/**/a.ts", + "x/**/a.ts", + "z/**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts", + "c:/dev/z/a.ts" + ], + wildcardDirectories: { + "c:/dev/x": ts.WatchDirectoryFlags.Recursive, + "c:/dev/z": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); it("case sensitive", () => { const json = { include: [ @@ -486,6 +603,78 @@ namespace ts { assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); assert.deepEqual(actual.errors, expected.errors); }); + it("with common package folders and no exclusions", () => { + const json = { + include: [ + "**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); + it("with common package folders and exclusions", () => { + const json = { + include: [ + "**/a.ts" + ], + exclude: [ + "a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/bower_components/a.ts", + "c:/dev/jspm_packages/a.ts", + "c:/dev/node_modules/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); + it("with common package folders and empty exclude", () => { + const json = { + include: [ + "**/a.ts" + ], + exclude: [] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/bower_components/a.ts", + "c:/dev/jspm_packages/a.ts", + "c:/dev/node_modules/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath); + assert.deepEqual(actual.fileNames, expected.fileNames); + assert.deepEqual(actual.wildcardDirectories, expected.wildcardDirectories); + assert.deepEqual(actual.errors, expected.errors); + }); it("exclude .js files when allowJs=false", () => { const json = { compilerOptions: {