diff --git a/Jakefile.js b/Jakefile.js index 5a3348a61eb19..744914c315716 100644 --- a/Jakefile.js +++ b/Jakefile.js @@ -120,6 +120,7 @@ var languageServiceLibrarySources = [ var harnessCoreSources = [ "harness.ts", + "virtualFileSystem.ts", "sourceMapRecorder.ts", "harnessLanguageService.ts", "fourslash.ts", @@ -150,7 +151,8 @@ var harnessSources = harnessCoreSources.concat([ "reuseProgramStructure.ts", "cachingInServerLSHost.ts", "moduleResolution.ts", - "tsconfigParsing.ts" + "tsconfigParsing.ts", + "matchFiles.ts" ].map(function (f) { return path.join(unittestsDirectory, f); })).concat([ diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 367fcf48c0d7f..52f9ef05f37b1 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -492,53 +492,50 @@ namespace ts { const { options: optionsFromJsonConfigFile, errors } = convertCompilerOptionsFromJson(json["compilerOptions"], basePath); const options = extend(existingOptions, optionsFromJsonConfigFile); + const { fileNames, wildcardDirectories } = getFileNames(); return { options, - fileNames: getFileNames(), - errors + fileNames, + errors, + wildcardDirectories }; - function getFileNames(): string[] { - let fileNames: string[] = []; + function getFileNames(): ExpandResult { + let fileNames: string[]; if (hasProperty(json, "files")) { - if (json["files"] instanceof Array) { - fileNames = map(json["files"], s => combinePaths(basePath, s)); + if (isArray(json["files"])) { + fileNames = json["files"]; } else { errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "files", "Array")); } } - else { - const filesSeen: Map = {}; - const exclude = json["exclude"] instanceof Array ? map(json["exclude"], normalizeSlashes) : undefined; - const supportedExtensions = getSupportedExtensions(options); - Debug.assert(indexOf(supportedExtensions, ".ts") < indexOf(supportedExtensions, ".d.ts"), "Changed priority of extensions to pick"); - - // Get files of supported extensions in their order of resolution - for (const extension of supportedExtensions) { - const filesInDirWithExtension = host.readDirectory(basePath, extension, exclude); - for (const fileName of filesInDirWithExtension) { - // .ts extension would read the .d.ts extension files too but since .d.ts is lower priority extension, - // lets pick them when its turn comes up - if (extension === ".ts" && fileExtensionIs(fileName, ".d.ts")) { - continue; - } - // If this is one of the output extension (which would be .d.ts and .js if we are allowing compilation of js files) - // do not include this file if we included .ts or .tsx file with same base name as it could be output of the earlier compilation - if (extension === ".d.ts" || (options.allowJs && contains(supportedJavascriptExtensions, extension))) { - const baseName = fileName.substr(0, fileName.length - extension.length); - if (hasProperty(filesSeen, baseName + ".ts") || hasProperty(filesSeen, baseName + ".tsx")) { - continue; - } - } + let includeSpecs: string[]; + if (hasProperty(json, "include")) { + if (isArray(json["include"])) { + includeSpecs = json["include"]; + } + else { + errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "include", "Array")); + } + } - filesSeen[fileName] = true; - fileNames.push(fileName); - } + let excludeSpecs: string[]; + if (hasProperty(json, "exclude")) { + if (isArray(json["exclude"])) { + excludeSpecs = json["exclude"]; + } + else { + errors.push(createCompilerDiagnostic(Diagnostics.Compiler_option_0_requires_a_value_of_type_1, "exclude", "Array")); } } - return fileNames; + + if (fileNames === undefined && includeSpecs === undefined) { + includeSpecs = ["**/*"]; + } + + return matchFileNames(fileNames, includeSpecs, excludeSpecs, basePath, options, host, errors); } } @@ -588,4 +585,292 @@ namespace ts { return { options, errors }; } + + /** + * Tests for a path that ends in a recursive directory wildcard. + * Matches **, \**, **\, and \**\, but not a**b. + * + * NOTE: used \ in place of / above to avoid issues with multiline comments. + * + * Breakdown: + * (^|\/) # matches either the beginning of the string or a directory separator. + * \*\* # matches the recursive directory wildcard "**". + * \/?$ # matches an optional trailing directory separator at the end of the string. + */ + const invalidTrailingRecursionPattern = /(^|\/)\*\*\/?$/; + + /** + * Tests for a path with multiple recursive directory wildcards. + * Matches **\** and **\a\**, but not **\a**b. + * + * NOTE: used \ in place of / above to avoid issues with multiline comments. + * + * Breakdown: + * (^|\/) # matches either the beginning of the string or a directory separator. + * \*\*\/ # matches a recursive directory wildcard "**" followed by a directory separator. + * (.*\/)? # optionally matches any number of characters followed by a directory separator. + * \*\* # matches a recursive directory wildcard "**" + * ($|\/) # matches either the end of the string or a directory separator. + */ + const invalidMultipleRecursionPatterns = /(^|\/)\*\*\/(.*\/)?\*\*($|\/)/; + + /** + * Tests for a path containing a wildcard character in a directory component of the path. + * Matches \*\, \?\, and \a*b\, but not \a\ or \a\*. + * + * NOTE: used \ in place of / above to avoid issues with multiline comments. + * + * Breakdown: + * \/ # matches a directory separator. + * [^/]*? # matches any number of characters excluding directory separators (non-greedy). + * [*?] # matches either a wildcard character (* or ?) + * [^/]* # matches any number of characters excluding directory separators (greedy). + * \/ # matches a directory separator. + */ + const watchRecursivePattern = /\/[^/]*?[*?][^/]*\//; + + /** + * Matches the portion of a wildcard path that does not contain wildcards. + * Matches \a of \a\*, or \a\b\c of \a\b\c\?\d. + * + * NOTE: used \ in place of / above to avoid issues with multiline comments. + * + * Breakdown: + * ^ # matches the beginning of the string + * [^*?]* # matches any number of non-wildcard characters + * (?=\/[^/]*[*?]) # lookahead that matches a directory separator followed by + * # a path component that contains at least one wildcard character (* or ?). + */ + const wildcardDirectoryPattern = /^[^*?]*(?=\/[^/]*[*?])/; + + /** + * Expands an array of file specifications. + * + * @param fileNames The literal file names to include. + * @param include The wildcard file specifications to include. + * @param exclude The wildcard file specifications to exclude. + * @param basePath The base path for any relative file specifications. + * @param options Compiler options. + * @param host The host used to resolve files and directories. + * @param errors An array for diagnostic reporting. + */ + function matchFileNames(fileNames: string[], include: string[], exclude: string[], basePath: string, options: CompilerOptions, host: ParseConfigHost, errors: Diagnostic[]): ExpandResult { + basePath = normalizePath(basePath); + basePath = removeTrailingDirectorySeparator(basePath); + + // The exclude spec list is converted into a regular expression, which allows us to quickly + // test whether a file or directory should be excluded before recursively traversing the + // file system. + const keyMapper = host.useCaseSensitiveFileNames ? caseSensitiveKeyMapper : caseInsensitiveKeyMapper; + + // Literal file names (provided via the "files" array in tsconfig.json) are stored in a + // file map with a possibly case insensitive key. We use this map later when when including + // wildcard paths. + const literalFileMap: Map = {}; + + // Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a + // file map with a possibly case insensitive key. We use this map to store paths matched + // via wildcard, and to handle extension priority. + const wildcardFileMap: Map = {}; + + if (include) { + include = validateSpecs(include, errors, /*allowTrailingRecursion*/ false); + } + + if (exclude) { + exclude = validateSpecs(exclude, errors, /*allowTrailingRecursion*/ true); + } + + // Wildcard directories (provided as part of a wildcard path) are stored in a + // file map that marks whether it was a regular wildcard match (with a `*` or `?` token), + // or a recursive directory. This information is used by filesystem watchers to monitor for + // new entries in these paths. + const wildcardDirectories: Map = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames); + + // Rather than requery this for each file and filespec, we query the supported extensions + // once and store it on the expansion context. + const supportedExtensions = getSupportedExtensions(options); + + // Literal files are always included verbatim. An "include" or "exclude" specification cannot + // remove a literal file. + if (fileNames) { + for (const fileName of fileNames) { + const file = combinePaths(basePath, fileName); + literalFileMap[keyMapper(file)] = file; + } + } + + if (include && include.length > 0) { + for (const file of host.readDirectory(basePath, supportedExtensions, exclude, include)) { + // If we have already included a literal or wildcard path with a + // higher priority extension, we should skip this file. + // + // This handles cases where we may encounter both .ts and + // .d.ts (or .js if "allowJs" is enabled) in the same + // directory when they are compilation outputs. + if (hasFileWithHigherPriorityExtension(file, literalFileMap, wildcardFileMap, supportedExtensions, keyMapper)) { + continue; + } + + // We may have included a wildcard path with a lower priority + // extension due to the user-defined order of entries in the + // "include" array. If there is a lower priority extension in the + // same directory, we should remove it. + removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper); + + const key = keyMapper(file); + if (!hasProperty(literalFileMap, key) && !hasProperty(wildcardFileMap, key)) { + wildcardFileMap[key] = file; + } + } + } + + const literalFiles = reduceProperties(literalFileMap, addFileToOutput, []); + const wildcardFiles = reduceProperties(wildcardFileMap, addFileToOutput, []); + wildcardFiles.sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive); + return { + fileNames: literalFiles.concat(wildcardFiles), + wildcardDirectories + }; + } + + function validateSpecs(specs: string[], errors: Diagnostic[], allowTrailingRecursion: boolean) { + const validSpecs: string[] = []; + for (const spec of specs) { + if (!allowTrailingRecursion && invalidTrailingRecursionPattern.test(spec)) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, spec)); + } + else if (invalidMultipleRecursionPatterns.test(spec)) { + errors.push(createCompilerDiagnostic(Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, spec)); + } + else { + validSpecs.push(spec); + } + } + + return validSpecs; + } + + /** + * Gets directories in a set of include patterns that should be watched for changes. + */ + function getWildcardDirectories(include: string[], exclude: string[], path: string, useCaseSensitiveFileNames: boolean) { + // We watch a directory recursively if it contains a wildcard anywhere in a directory segment + // of the pattern: + // + // /a/b/**/d - Watch /a/b recursively to catch changes to any d in any subfolder recursively + // /a/b/*/d - Watch /a/b recursively to catch any d in any immediate subfolder, even if a new subfolder is added + // + // We watch a directory without recursion if it contains a wildcard in the file segment of + // the pattern: + // + // /a/b/* - Watch /a/b directly to catch any new file + // /a/b/a?z - Watch /a/b directly to catch any new file matching a?z + const excludeRegExp = getRegularExpressionForWildcard(exclude, path, "exclude", useCaseSensitiveFileNames); + const wildcardDirectories: Map = {}; + if (include !== undefined) { + const recursiveKeys: string[] = []; + for (const file of include) { + const name = combinePaths(path, file); + if (excludeRegExp && excludeRegExp.test(name)) { + continue; + } + + const match = wildcardDirectoryPattern.exec(name); + if (match) { + const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase(); + const flags = watchRecursivePattern.test(name) ? WatchDirectoryFlags.Recursive : WatchDirectoryFlags.None; + const existingFlags = getProperty(wildcardDirectories, key); + if (existingFlags === undefined || existingFlags < flags) { + wildcardDirectories[key] = flags; + if (flags === WatchDirectoryFlags.Recursive) { + recursiveKeys.push(key); + } + } + } + } + + // Remove any subpaths under an existing recursively watched directory. + for (const key in wildcardDirectories) { + if (hasProperty(wildcardDirectories, key)) { + for (const recursiveKey in recursiveKeys) { + if (containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { + delete wildcardDirectories[key]; + } + } + } + } + } + + return wildcardDirectories; + } + + /** + * Determines whether a literal or wildcard file has already been included that has a higher + * extension priority. + * + * @param file The path to the file. + * @param extensionPriority The priority of the extension. + * @param context The expansion context. + */ + function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { + const extensionPriority = getExtensionPriority(file, extensions); + const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority); + for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) { + const higherPriorityExtension = extensions[i]; + const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension)); + if (hasProperty(literalFiles, higherPriorityPath) || hasProperty(wildcardFiles, higherPriorityPath)) { + return true; + } + } + + return false; + } + + /** + * Removes files included via wildcard expansion with a lower extension priority that have + * already been included. + * + * @param file The path to the file. + * @param extensionPriority The priority of the extension. + * @param context The expansion context. + */ + function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map, extensions: string[], keyMapper: (value: string) => string) { + const extensionPriority = getExtensionPriority(file, extensions); + const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority); + for (let i = nextExtensionPriority; i < extensions.length; i++) { + const lowerPriorityExtension = extensions[i]; + const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension)); + delete wildcardFiles[lowerPriorityPath]; + } + } + + /** + * Adds a file to an array of files. + * + * @param output The output array. + * @param file The file path. + */ + function addFileToOutput(output: string[], file: string) { + output.push(file); + return output; + } + + /** + * Gets a case sensitive key. + * + * @param key The original key. + */ + function caseSensitiveKeyMapper(key: string) { + return key; + } + + /** + * Gets a case insensitive key. + * + * @param key The original key. + */ + function caseInsensitiveKeyMapper(key: string) { + return key.toLowerCase(); + } } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index 1ae130f0f2d9f..d0f27a2046f61 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -25,7 +25,7 @@ namespace ts { contains, remove, forEachValue: forEachValueInMap, - clear + clear, }; function forEachValueInMap(f: (key: Path, value: T) => void) { @@ -115,6 +115,15 @@ namespace ts { return -1; } + export function indexOfAnyCharCode(text: string, charCodes: number[], start?: number): number { + for (let i = start || 0, len = text.length; i < len; i++) { + if (contains(charCodes, text.charCodeAt(i))) { + return i; + } + } + return -1; + } + export function countWhere(array: T[], predicate: (x: T) => boolean): number { let count = 0; if (array) { @@ -494,6 +503,28 @@ namespace ts { return a < b ? Comparison.LessThan : Comparison.GreaterThan; } + export function compareStrings(a: string, b: string, ignoreCase?: boolean): Comparison { + if (a === b) return Comparison.EqualTo; + if (a === undefined) return Comparison.LessThan; + if (b === undefined) return Comparison.GreaterThan; + if (ignoreCase) { + if (String.prototype.localeCompare) { + const result = a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" }); + return result < 0 ? Comparison.LessThan : result > 0 ? Comparison.GreaterThan : Comparison.EqualTo; + } + + a = a.toUpperCase(); + b = b.toUpperCase(); + if (a === b) return Comparison.EqualTo; + } + + return a < b ? Comparison.LessThan : Comparison.GreaterThan; + } + + export function compareStringsCaseInsensitive(a: string, b: string) { + return compareStrings(a, b, /*ignoreCase*/ true); + } + function getDiagnosticFileName(diagnostic: Diagnostic): string { return diagnostic.file ? diagnostic.file.fileName : undefined; } @@ -760,12 +791,251 @@ namespace ts { return path1 + directorySeparator + path2; } + /** + * Removes a trailing directory separator from a path. + * @param path The path. + */ + export function removeTrailingDirectorySeparator(path: string) { + if (path.charAt(path.length - 1) === directorySeparator) { + return path.substr(0, path.length - 1); + } + + return path; + } + + /** + * Adds a trailing directory separator to a path, if it does not already have one. + * @param path The path. + */ + export function ensureTrailingDirectorySeparator(path: string) { + if (path.charAt(path.length - 1) !== directorySeparator) { + return path + directorySeparator; + } + + return path; + } + + export function comparePaths(a: string, b: string, currentDirectory: string, ignoreCase?: boolean) { + if (a === b) return Comparison.EqualTo; + if (a === undefined) return Comparison.LessThan; + if (b === undefined) return Comparison.GreaterThan; + a = removeTrailingDirectorySeparator(a); + b = removeTrailingDirectorySeparator(b); + const aComponents = getNormalizedPathComponents(a, currentDirectory); + const bComponents = getNormalizedPathComponents(b, currentDirectory); + const sharedLength = Math.min(aComponents.length, bComponents.length); + for (let i = 0; i < sharedLength; i++) { + const result = compareStrings(aComponents[i], bComponents[i], ignoreCase); + if (result !== Comparison.EqualTo) { + return result; + } + } + + return compareValues(aComponents.length, bComponents.length); + } + + export function containsPath(parent: string, child: string, currentDirectory: string, ignoreCase?: boolean) { + if (parent === undefined || child === undefined) return false; + if (parent === child) return true; + parent = removeTrailingDirectorySeparator(parent); + child = removeTrailingDirectorySeparator(child); + if (parent === child) return true; + const parentComponents = getNormalizedPathComponents(parent, currentDirectory); + const childComponents = getNormalizedPathComponents(child, currentDirectory); + if (childComponents.length < parentComponents.length) { + return false; + } + + for (let i = 0; i < parentComponents.length; i++) { + const result = compareStrings(parentComponents[i], childComponents[i], ignoreCase); + if (result !== Comparison.EqualTo) { + return false; + } + } + + return true; + } + export function fileExtensionIs(path: string, extension: string): boolean { const pathLen = path.length; const extLen = extension.length; return pathLen > extLen && path.substr(pathLen - extLen, extLen) === extension; } + export function fileExtensionIsAny(path: string, extensions: string[]): boolean { + for (const extension of extensions) { + if (fileExtensionIs(path, extension)) { + return true; + } + } + + return false; + } + + + // Reserved characters, forces escaping of any non-word (or digit), non-whitespace character. + // It may be inefficient (we could just match (/[-[\]{}()*+?.,\\^$|#\s]/g), but this is future + // proof. + const reservedCharacterPattern = /[^\w\s\/]/g; + const wildcardCharCodes = [CharacterCodes.asterisk, CharacterCodes.question]; + + export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude", useCaseSensitiveFileNames: boolean) { + if (specs === undefined || specs.length === 0) { + return undefined; + } + + let pattern = ""; + let hasWrittenSubpattern = false; + spec: for (const spec of specs) { + if (!spec) { + continue; + } + + let subpattern = ""; + let hasRecursiveDirectoryWildcard = false; + let hasWrittenComponent = false; + const components = getNormalizedPathComponents(spec, basePath); + if (usage !== "exclude" && components[components.length - 1] === "**") { + continue spec; + } + + // getNormalizedPathComponents includes the separator for the root component. + // We need to remove to create our regex correctly. + components[0] = removeTrailingDirectorySeparator(components[0]); + + let optionalCount = 0; + for (const component of components) { + if (component === "**") { + if (hasRecursiveDirectoryWildcard) { + continue spec; + } + + subpattern += "(/.+?)?"; + hasRecursiveDirectoryWildcard = true; + hasWrittenComponent = true; + } + else { + if (usage === "directories") { + subpattern += "("; + optionalCount++; + } + + if (hasWrittenComponent) { + subpattern += directorySeparator; + } + + subpattern += component.replace(reservedCharacterPattern, replaceWildcardCharacter); + hasWrittenComponent = true; + } + } + + while (optionalCount > 0) { + subpattern += ")?"; + optionalCount--; + } + + if (hasWrittenSubpattern) { + pattern += "|"; + } + + pattern += "(" + subpattern + ")"; + hasWrittenSubpattern = true; + } + + if (!pattern) { + return undefined; + } + + return new RegExp("^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$"), useCaseSensitiveFileNames ? "" : "i"); + } + + function replaceWildcardCharacter(match: string) { + return match === "*" ? "[^/]*" : match === "?" ? "[^/]" : "\\" + match; + } + + export interface FileSystemEntries { + files: string[]; + directories: string[]; + } + + export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] { + path = normalizePath(path); + currentDirectory = normalizePath(currentDirectory); + const absolutePath = combinePaths(currentDirectory, path); + const includeFileRegex = getRegularExpressionForWildcard(includes, absolutePath, "files", useCaseSensitiveFileNames); + const includeDirectoryRegex = getRegularExpressionForWildcard(includes, absolutePath, "directories", useCaseSensitiveFileNames); + const excludeRegex = getRegularExpressionForWildcard(excludes, absolutePath, "exclude", useCaseSensitiveFileNames); + const result: string[] = []; + for (const basePath of getBasePaths(path, includes, useCaseSensitiveFileNames)) { + visitDirectory(basePath, combinePaths(currentDirectory, basePath)); + } + return result; + + function visitDirectory(path: string, absolutePath: string) { + const { files, directories } = getFileSystemEntries(path); + + for (const current of files) { + const name = combinePaths(path, current); + const absoluteName = combinePaths(absolutePath, current); + if ((!extensions || fileExtensionIsAny(name, extensions)) && + (!includeFileRegex || includeFileRegex.test(absoluteName)) && + (!excludeRegex || !excludeRegex.test(absoluteName))) { + result.push(name); + } + } + + for (const current of directories) { + const name = combinePaths(path, current); + const absoluteName = combinePaths(absolutePath, current); + if ((!includeDirectoryRegex || includeDirectoryRegex.test(absoluteName)) && + (!excludeRegex || !excludeRegex.test(absoluteName))) { + visitDirectory(name, absoluteName); + } + } + } + } + + /** + * Computes the unique non-wildcard base paths amongst the provided include patterns. + */ + function getBasePaths(path: string, includes: string[], useCaseSensitiveFileNames: boolean) { + // Storage for our results in the form of literal paths (e.g. the paths as written by the user). + const basePaths: string[] = [path]; + if (includes) { + // Storage for literal base paths amongst the include patterns. + const includeBasePaths: string[] = []; + for (const include of includes) { + if (isRootedDiskPath(include)) { + const wildcardOffset = indexOfAnyCharCode(include, wildcardCharCodes); + const includeBasePath = wildcardOffset < 0 + ? removeTrailingDirectorySeparator(getDirectoryPath(include)) + : include.substring(0, include.lastIndexOf(directorySeparator, wildcardOffset)); + + // Append the literal and canonical candidate base paths. + includeBasePaths.push(includeBasePath); + } + } + + // Sort the offsets array using either the literal or canonical path representations. + includeBasePaths.sort(useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive); + + // Iterate over each include base path and include unique base paths that are not a + // subpath of an existing base path + include: for (let i = 0; i < includeBasePaths.length; i++) { + const includeBasePath = includeBasePaths[i]; + for (let j = 0; j < basePaths.length; j++) { + if (containsPath(basePaths[j], includeBasePath, path, !useCaseSensitiveFileNames)) { + continue include; + } + } + + basePaths.push(includeBasePath); + } + } + + return basePaths; + } + /** * List of supported extensions in order of file resolution precedence. */ @@ -788,6 +1058,59 @@ namespace ts { return false; } + /** + * Extension boundaries by priority. Lower numbers indicate higher priorities, and are + * aligned to the offset of the highest priority extension in the + * allSupportedExtensions array. + */ + export const enum ExtensionPriority { + TypeScriptFiles = 0, + DeclarationAndJavaScriptFiles = 2, + Limit = 5, + + Highest = TypeScriptFiles, + Lowest = DeclarationAndJavaScriptFiles, + } + + export function getExtensionPriority(path: string, supportedExtensions: string[]): ExtensionPriority { + for (let i = supportedExtensions.length - 1; i >= 0; i--) { + if (fileExtensionIs(path, supportedExtensions[i])) { + return adjustExtensionPriority(i); + } + } + + // If its not in the list of supported extensions, this is likely a + // TypeScript file with a non-ts extension + return ExtensionPriority.Highest; + } + + /** + * Adjusts an extension priority to be the highest priority within the same range. + */ + export function adjustExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority { + if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) { + return ExtensionPriority.TypeScriptFiles; + } + else if (extensionPriority < ExtensionPriority.Limit) { + return ExtensionPriority.DeclarationAndJavaScriptFiles; + } + else { + return ExtensionPriority.Limit; + } + } + + /** + * Gets the next lowest extension priority for a given priority. + */ + export function getNextLowestExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority { + if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) { + return ExtensionPriority.DeclarationAndJavaScriptFiles; + } + else { + return ExtensionPriority.Limit; + } + } + const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"]; export function removeFileExtension(path: string): string { for (const ext of extensionsToRemove) { @@ -798,6 +1121,10 @@ namespace ts { return path; } + export function changeExtension(path: T, newExtension: string): T { + return (removeFileExtension(path) + newExtension); + } + export interface ObjectAllocator { getNodeConstructor(): new (kind: SyntaxKind, pos?: number, end?: number) => Node; getSourceFileConstructor(): new (kind: SyntaxKind, pos?: number, end?: number) => SourceFile; diff --git a/src/compiler/diagnosticMessages.json b/src/compiler/diagnosticMessages.json index a43f3534df6b5..35334ffdf4d06 100644 --- a/src/compiler/diagnosticMessages.json +++ b/src/compiler/diagnosticMessages.json @@ -2059,6 +2059,14 @@ "category": "Error", "code": 5009 }, + "File specification cannot end in a recursive directory wildcard ('**'): '{0}'.": { + "category": "Error", + "code": 5010 + }, + "File specification cannot contain multiple recursive directory wildcards ('**'): '{0}'.": { + "category": "Error", + "code": 5011 + }, "Cannot read file '{0}': {1}": { "category": "Error", "code": 5012 diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index c99c28bc0dd06..2c23e4016eb30 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -16,7 +16,7 @@ namespace ts { createDirectory(path: string): void; getExecutingFilePath(): string; getCurrentDirectory(): string; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[]; getMemoryUsage?(): number; exit(exitCode?: number): void; } @@ -61,7 +61,7 @@ namespace ts { resolvePath(path: string): string; readFile(path: string): string; writeFile(path: string, contents: string): void; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[]; watchFile?(path: string, callback: (path: string, removed?: boolean) => void): FileWatcher; watchDirectory?(path: string, callback: (path: string) => void, recursive?: boolean): FileWatcher; }; @@ -71,6 +71,7 @@ namespace ts { function getWScriptSystem(): System { const fso = new ActiveXObject("Scripting.FileSystemObject"); + const shell = new ActiveXObject("WScript.Shell"); const fileStream = new ActiveXObject("ADODB.Stream"); fileStream.Type = 2 /*text*/; @@ -138,10 +139,6 @@ namespace ts { } } - function getCanonicalPath(path: string): string { - return path.toLowerCase(); - } - function getNames(collection: any): string[] { const result: string[] = []; for (let e = new Enumerator(collection); !e.atEnd(); e.moveNext()) { @@ -150,30 +147,22 @@ namespace ts { return result.sort(); } - function readDirectory(path: string, extension?: string, exclude?: string[]): string[] { - const result: string[] = []; - exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s))); - visitDirectory(path); - return result; - function visitDirectory(path: string) { + function getAccessibleFileSystemEntries(path: string): FileSystemEntries { + try { const folder = fso.GetFolder(path || "."); const files = getNames(folder.files); - for (const current of files) { - const name = combinePaths(path, current); - if ((!extension || fileExtensionIs(name, extension)) && !contains(exclude, getCanonicalPath(name))) { - result.push(name); - } - } - const subfolders = getNames(folder.subfolders); - for (const current of subfolders) { - const name = combinePaths(path, current); - if (!contains(exclude, getCanonicalPath(name))) { - visitDirectory(name); - } - } + const directories = getNames(folder.subfolders); + return { files, directories }; + } + catch (e) { + return { files: [], directories: [] }; } } + function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] { + return matchFiles(path, extensions, excludes, includes, /*useCaseSensitiveFileNames*/ false, shell.CurrentDirectory, getAccessibleFileSystemEntries); + } + return { args, newLine: "\r\n", @@ -201,7 +190,7 @@ namespace ts { return WScript.ScriptFullName; }, getCurrentDirectory() { - return new ActiveXObject("WScript.Shell").CurrentDirectory; + return shell.CurrentDirectory; }, readDirectory, exit(exitCode?: number): void { @@ -364,36 +353,30 @@ namespace ts { } } - function getCanonicalPath(path: string): string { - return useCaseSensitiveFileNames ? path.toLowerCase() : path; - } - - function readDirectory(path: string, extension?: string, exclude?: string[]): string[] { - const result: string[] = []; - exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s))); - visitDirectory(path); - return result; - function visitDirectory(path: string) { - const files = _fs.readdirSync(path || ".").sort(); + function getAccessibleFileSystemEntries(path: string): FileSystemEntries { + try { + const entries = _fs.readdirSync(path || ".").sort(); + const files: string[] = []; const directories: string[] = []; - for (const current of files) { - const name = combinePaths(path, current); - if (!contains(exclude, getCanonicalPath(name))) { - const stat = _fs.statSync(name); - if (stat.isFile()) { - if (!extension || fileExtensionIs(name, extension)) { - result.push(name); - } - } - else if (stat.isDirectory()) { - directories.push(name); - } + for (const entry of entries) { + const name = combinePaths(path, entry); + const stat = _fs.statSync(name); + if (stat.isFile()) { + files.push(entry); + } + else if (stat.isDirectory()) { + directories.push(entry); } } - for (const current of directories) { - visitDirectory(current); - } + return { files, directories }; } + catch (e) { + return { files: [], directories: [] }; + } + } + + function readDirectory(path: string, extensions?: string[], excludes?: string[], includes?: string[]): string[] { + return matchFiles(path, extensions, excludes, includes, useCaseSensitiveFileNames, process.cwd(), getAccessibleFileSystemEntries); } return { @@ -511,5 +494,3 @@ namespace ts { } })(); } - - diff --git a/src/compiler/types.ts b/src/compiler/types.ts index 24f70373a8ace..709c95ac0a3b5 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -1586,7 +1586,15 @@ namespace ts { } export interface ParseConfigHost { - readDirectory(rootDir: string, extension: string, exclude: string[]): string[]; + useCaseSensitiveFileNames: boolean; + + readDirectory(rootDir: string, extensions: string[], excludes: string[], includes: string[]): string[]; + + /** + * Gets a value indicating whether the specified path exists and is a file. + * @param path The path to test. + */ + fileExists(path: string): boolean; } export interface WriteFileCallback { @@ -2480,6 +2488,17 @@ namespace ts { options: CompilerOptions; fileNames: string[]; errors: Diagnostic[]; + wildcardDirectories?: Map; + } + + export const enum WatchDirectoryFlags { + None = 0, + Recursive = 1 << 0, + } + + export interface ExpandResult { + fileNames: string[]; + wildcardDirectories: Map; } /* @internal */ diff --git a/src/harness/external/chai.d.ts b/src/harness/external/chai.d.ts index 814de75e7b254..fba0024d254ee 100644 --- a/src/harness/external/chai.d.ts +++ b/src/harness/external/chai.d.ts @@ -167,9 +167,14 @@ declare module chai { module assert { function equal(actual: any, expected: any, message?: string): void; function notEqual(actual: any, expected: any, message?: string): void; + function deepEqual(actual: T, expected: T, message?: string): void; + function notDeepEqual(actual: T, expected: T, message?: string): void; + function lengthOf(object: any[], length: number, message?: string): void; function isTrue(value: any, message?: string): void; function isFalse(value: any, message?: string): void; function isNull(value: any, message?: string): void; function isNotNull(value: any, message?: string): void; + function isUndefined(value: any, message?: string): void; + function isDefined(value: any, message?: string): void; } } \ No newline at end of file diff --git a/src/harness/harness.ts b/src/harness/harness.ts index 7fd819731721a..b1e8bb5cc4ef7 100644 --- a/src/harness/harness.ts +++ b/src/harness/harness.ts @@ -1,7 +1,7 @@ // // Copyright (c) Microsoft Corporation. All rights reserved. -// +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -23,6 +23,7 @@ /// /// /// +/// /* tslint:disable:no-null */ // Block scoped definitions work poorly for global variables, temporarily enable var @@ -281,7 +282,7 @@ namespace Utils { break; case "parserContextFlags": - // Clear the flag that are produced by aggregating child values.. That is ephemeral + // Clear the flag that are produced by aggregating child values.. That is ephemeral // data we don't care about in the dump. We only care what the parser set directly // on the ast. let value = n.parserContextFlags & ts.ParserContextFlags.ParserGeneratedFlags; @@ -355,7 +356,7 @@ namespace Utils { assert.equal(node1.kind, node2.kind, "node1.kind !== node2.kind"); assert.equal(node1.flags, node2.flags, "node1.flags !== node2.flags"); - // call this on both nodes to ensure all propagated flags have been set (and thus can be + // call this on both nodes to ensure all propagated flags have been set (and thus can be // compared). assert.equal(ts.containsParseError(node1), ts.containsParseError(node2)); assert.equal(node1.parserContextFlags, node2.parserContextFlags, "node1.parserContextFlags !== node2.parserContextFlags"); @@ -434,7 +435,7 @@ namespace Harness { args(): string[]; getExecutingFilePath(): string; exit(exitCode?: number): void; - readDirectory(path: string, extension?: string, exclude?: string[]): string[]; + readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[]; } export var IO: IO; @@ -472,7 +473,7 @@ namespace Harness { export const directoryExists: typeof IO.directoryExists = fso.FolderExists; export const fileExists: typeof IO.fileExists = fso.FileExists; export const log: typeof IO.log = global.WScript && global.WScript.StdOut.WriteLine; - export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); + export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include); export function createDirectory(path: string) { if (directoryExists(path)) { @@ -541,7 +542,7 @@ namespace Harness { export const fileExists: typeof IO.fileExists = fs.existsSync; export const log: typeof IO.log = s => console.log(s); - export const readDirectory: typeof IO.readDirectory = (path, extension, exclude) => ts.sys.readDirectory(path, extension, exclude); + export const readDirectory: typeof IO.readDirectory = (path, extension, exclude, include) => ts.sys.readDirectory(path, extension, exclude, include); export function createDirectory(path: string) { if (!directoryExists(path)) { @@ -734,8 +735,22 @@ namespace Harness { Http.writeToServerSync(serverRoot + path, "WRITE", contents); } - export function readDirectory(path: string, extension?: string, exclude?: string[]) { - return listFiles(path).filter(f => !extension || ts.fileExtensionIs(f, extension)); + export function readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]) { + const fs = new Utils.VirtualFileSystem(path, useCaseSensitiveFileNames()); + for (const file in listFiles(path)) { + fs.addFile(file); + } + return ts.matchFiles(path, extension, exclude, include, useCaseSensitiveFileNames(), getCurrentDirectory(), path => { + const entry = fs.traversePath(path); + if (entry && entry.isDirectory()) { + const directory = entry; + return { + files: ts.map(directory.getFiles(), f => f.name), + directories: ts.map(directory.getDirectories(), d => d.name) + }; + } + return { files: [], directories: [] }; + }); } } } @@ -762,7 +777,7 @@ namespace Harness { (emittedFile: string, emittedLine: number, emittedColumn: number, sourceFile: string, sourceLine: number, sourceColumn: number, sourceName: string): void; } - // Settings + // Settings export let userSpecifiedRoot = ""; export let lightMode = false; @@ -801,7 +816,7 @@ namespace Harness { fileName: string, sourceText: string, languageVersion: ts.ScriptTarget) { - // We'll only assert invariants outside of light mode. + // We'll only assert invariants outside of light mode. const shouldAssertInvariants = !Harness.lightMode; // Only set the parent nodes if we're asserting invariants. We don't need them otherwise. @@ -896,7 +911,7 @@ namespace Harness { libFiles?: string; } - // Additional options not already in ts.optionDeclarations + // Additional options not already in ts.optionDeclarations const harnessOptionDeclarations: ts.CommandLineOption[] = [ { name: "allowNonTsExtensions", type: "boolean" }, { name: "useCaseSensitiveFileNames", type: "boolean" }, @@ -1134,7 +1149,7 @@ namespace Harness { errLines.forEach(e => outputLines.push(e)); // do not count errors from lib.d.ts here, they are computed separately as numLibraryDiagnostics - // if lib.d.ts is explicitly included in input files and there are some errors in it (i.e. because of duplicate identifiers) + // if lib.d.ts is explicitly included in input files and there are some errors in it (i.e. because of duplicate identifiers) // then they will be added twice thus triggering 'total errors' assertion with condition // 'totalErrorsReportedInNonLibraryFiles + numLibraryDiagnostics + numTest262HarnessDiagnostics, diagnostics.length diff --git a/src/harness/harnessLanguageService.ts b/src/harness/harnessLanguageService.ts index fb5b6ce92aa0d..b9a826e78ed24 100644 --- a/src/harness/harnessLanguageService.ts +++ b/src/harness/harnessLanguageService.ts @@ -259,7 +259,14 @@ namespace Harness.LanguageService { readDirectory(rootDir: string, extension: string): string { throw new Error("NYI"); } + readDirectoryNames(path: string): string { + throw new Error("Not implemented."); + } + readFileNames(path: string): string { + throw new Error("Not implemented."); + } fileExists(fileName: string) { return this.getScriptInfo(fileName) !== undefined; } + directoryExists(directoryName: string) { return false; } readFile(fileName: string) { const snapshot = this.nativeHost.getScriptSnapshot(fileName); return snapshot && snapshot.getText(0, snapshot.getLength()); @@ -561,7 +568,7 @@ namespace Harness.LanguageService { return this.host.getCurrentDirectory(); } - readDirectory(path: string, extension?: string): string[] { + readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]): string[] { throw new Error("Not implemented Yet."); } @@ -637,4 +644,3 @@ namespace Harness.LanguageService { getPreProcessedFileInfo(fileName: string, fileContents: string): ts.PreProcessedFileInfo { throw new Error("getPreProcessedFileInfo is not available using the server interface."); } } } - \ No newline at end of file diff --git a/src/harness/loggedIO.ts b/src/harness/loggedIO.ts index 3d51682f745b9..2d6e6481f241b 100644 --- a/src/harness/loggedIO.ts +++ b/src/harness/loggedIO.ts @@ -62,8 +62,9 @@ interface IOLog { }[]; directoriesRead: { path: string, - extension: string, + extension: string[], exclude: string[], + include: string[], result: string[] }[]; } @@ -217,9 +218,9 @@ namespace Playback { memoize(path => findResultByPath(wrapper, replayLog.filesRead, path).contents)); wrapper.readDirectory = recordReplay(wrapper.readDirectory, underlying)( - (path, extension, exclude) => { - const result = (underlying).readDirectory(path, extension, exclude); - const logEntry = { path, extension, exclude, result }; + (path, extension, exclude, include) => { + const result = (underlying).readDirectory(path, extension, exclude, include); + const logEntry = { path, extension, exclude, include, result }; recordLog.directoriesRead.push(logEntry); return result; }, diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index 96fcfedc6b48e..769e72d385127 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -210,7 +210,12 @@ class ProjectRunner extends RunnerBase { } const configObject = result.config; - const configParseResult = ts.parseJsonConfigFileContent(configObject, { readDirectory }, ts.getDirectoryPath(configFileName), compilerOptions); + const configParseHost: ts.ParseConfigHost = { + useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), + fileExists, + readDirectory, + }; + const configParseResult = ts.parseJsonConfigFileContent(configObject, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions); if (configParseResult.errors.length > 0) { return { moduleKind, @@ -237,7 +242,7 @@ class ProjectRunner extends RunnerBase { mapRoot: testCase.resolveMapRoot && testCase.mapRoot ? Harness.IO.resolvePath(testCase.mapRoot) : testCase.mapRoot, sourceRoot: testCase.resolveSourceRoot && testCase.sourceRoot ? Harness.IO.resolvePath(testCase.sourceRoot) : testCase.sourceRoot, module: moduleKind, - moduleResolution: ts.ModuleResolutionKind.Classic, // currently all tests use classic module resolution kind, this will change in the future + moduleResolution: ts.ModuleResolutionKind.Classic, // currently all tests use classic module resolution kind, this will change in the future }; // Set the values specified using json const optionNameMap: ts.Map = {}; @@ -268,8 +273,8 @@ class ProjectRunner extends RunnerBase { : ts.normalizeSlashes(testCase.projectRoot) + "/" + ts.normalizeSlashes(fileName); } - function readDirectory(rootDir: string, extension: string, exclude: string[]): string[] { - const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude); + function readDirectory(rootDir: string, extension: string[], exclude: string[], include: string[]): string[] { + const harnessReadDirectoryResult = Harness.IO.readDirectory(getFileNameInTheProjectTest(rootDir), extension, exclude, include); const result: string[] = []; for (let i = 0; i < harnessReadDirectoryResult.length; i++) { result[i] = ts.getRelativePathToDirectoryOrUrl(testCase.projectRoot, harnessReadDirectoryResult[i], diff --git a/src/harness/rwcRunner.ts b/src/harness/rwcRunner.ts index ce570a7d6ad01..791c684c79c16 100644 --- a/src/harness/rwcRunner.ts +++ b/src/harness/rwcRunner.ts @@ -76,7 +76,12 @@ namespace RWC { if (tsconfigFile) { const tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path); const parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content); - const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, Harness.IO, ts.getDirectoryPath(tsconfigFile.path)); + const configParseHost: ts.ParseConfigHost = { + useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(), + fileExists: Harness.IO.fileExists, + readDirectory: Harness.IO.readDirectory, + }; + const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, configParseHost, ts.getDirectoryPath(tsconfigFile.path)); fileNames = configParseResult.fileNames; opts.options = ts.extend(opts.options, configParseResult.options); } diff --git a/src/harness/virtualFileSystem.ts b/src/harness/virtualFileSystem.ts new file mode 100644 index 0000000000000..66bbd715fbc60 --- /dev/null +++ b/src/harness/virtualFileSystem.ts @@ -0,0 +1,160 @@ +/// +namespace Utils { + export class VirtualFileSystemEntry { + fileSystem: VirtualFileSystem; + name: string; + + constructor(fileSystem: VirtualFileSystem, name: string) { + this.fileSystem = fileSystem; + this.name = name; + } + + isDirectory() { return false; } + isFile() { return false; } + isFileSystem() { return false; } + } + + export class VirtualFile extends VirtualFileSystemEntry { + content: string; + isFile() { return true; } + } + + export abstract class VirtualFileSystemContainer extends VirtualFileSystemEntry { + abstract getFileSystemEntries(): VirtualFileSystemEntry[]; + + getFileSystemEntry(name: string): VirtualFileSystemEntry { + for (const entry of this.getFileSystemEntries()) { + if (this.fileSystem.sameName(entry.name, name)) { + return entry; + } + } + return undefined; + } + + getDirectories(): VirtualDirectory[] { + return ts.filter(this.getFileSystemEntries(), entry => entry.isDirectory()); + } + + getFiles(): VirtualFile[] { + return ts.filter(this.getFileSystemEntries(), entry => entry.isFile()); + } + + getDirectory(name: string): VirtualDirectory { + const entry = this.getFileSystemEntry(name); + return entry.isDirectory() ? entry : undefined; + } + + getFile(name: string): VirtualFile { + const entry = this.getFileSystemEntry(name); + return entry.isFile() ? entry : undefined; + } + } + + export class VirtualDirectory extends VirtualFileSystemContainer { + private entries: VirtualFileSystemEntry[] = []; + + isDirectory() { return true; } + + getFileSystemEntries() { return this.entries.slice(); } + + addDirectory(name: string): VirtualDirectory { + const entry = this.getFileSystemEntry(name); + if (entry === undefined) { + const directory = new VirtualDirectory(this.fileSystem, name); + this.entries.push(directory); + return directory; + } + else if (entry.isDirectory()) { + return entry; + } + else { + return undefined; + } + } + + addFile(name: string, content?: string): VirtualFile { + const entry = this.getFileSystemEntry(name); + if (entry === undefined) { + const file = new VirtualFile(this.fileSystem, name); + file.content = content; + this.entries.push(file); + return file; + } + else if (entry.isFile()) { + const file = entry; + file.content = content; + return file; + } + else { + return undefined; + } + } + } + + export class VirtualFileSystem extends VirtualFileSystemContainer { + private root: VirtualDirectory; + + currentDirectory: string; + useCaseSensitiveFileNames: boolean; + + constructor(currentDirectory: string, useCaseSensitiveFileNames: boolean) { + super(undefined, ""); + this.fileSystem = this; + this.root = new VirtualDirectory(this, ""); + this.currentDirectory = currentDirectory; + this.useCaseSensitiveFileNames = useCaseSensitiveFileNames; + } + + isFileSystem() { return true; } + + getFileSystemEntries() { return this.root.getFileSystemEntries(); } + + addDirectory(path: string) { + const components = ts.getNormalizedPathComponents(path, this.currentDirectory); + let directory: VirtualDirectory = this.root; + for (const component of components) { + directory = directory.addDirectory(component); + if (directory === undefined) { + break; + } + } + + return directory; + } + + addFile(path: string, content?: string) { + const absolutePath = ts.getNormalizedAbsolutePath(path, this.currentDirectory); + const fileName = ts.getBaseFileName(path); + const directoryPath = ts.getDirectoryPath(absolutePath); + const directory = this.addDirectory(directoryPath); + return directory ? directory.addFile(fileName, content) : undefined; + } + + fileExists(path: string) { + const entry = this.traversePath(path); + return entry !== undefined && entry.isFile(); + } + + sameName(a: string, b: string) { + return this.useCaseSensitiveFileNames ? a === b : a.toLowerCase() === b.toLowerCase(); + } + + traversePath(path: string) { + let directory: VirtualDirectory = this.root; + for (const component of ts.getNormalizedPathComponents(path, this.currentDirectory)) { + const entry = directory.getFileSystemEntry(component); + if (entry === undefined) { + return undefined; + } + else if (entry.isDirectory()) { + directory = entry; + } + else { + return entry; + } + } + + return directory; + } + } +} \ No newline at end of file diff --git a/src/server/editorServices.ts b/src/server/editorServices.ts index d78f7d40ef668..9305031089cdf 100644 --- a/src/server/editorServices.ts +++ b/src/server/editorServices.ts @@ -119,7 +119,7 @@ namespace ts.server { if (!resolution) { const existingResolution = currentResolutionsInFile && ts.lookUp(currentResolutionsInFile, moduleName); if (moduleResolutionIsValid(existingResolution)) { - // ok, it is safe to use existing module resolution results + // ok, it is safe to use existing module resolution results resolution = existingResolution; } else { @@ -144,8 +144,8 @@ namespace ts.server { } if (resolution.resolvedModule) { - // TODO: consider checking failedLookupLocations - // TODO: use lastCheckTime to track expiration for module name resolution + // TODO: consider checking failedLookupLocations + // TODO: use lastCheckTime to track expiration for module name resolution return true; } @@ -326,6 +326,7 @@ namespace ts.server { export interface ProjectOptions { // these fields can be present in the project file files?: string[]; + wildcardDirectories?: ts.Map; compilerOptions?: ts.CompilerOptions; } @@ -334,6 +335,7 @@ namespace ts.server { projectFilename: string; projectFileWatcher: FileWatcher; directoryWatcher: FileWatcher; + directoriesWatchedForWildcards: Map; // Used to keep track of what directories are watched for this project directoriesWatchedForTsconfig: string[] = []; program: ts.Program; @@ -482,7 +484,7 @@ namespace ts.server { openFileRootsConfigured: ScriptInfo[] = []; // a path to directory watcher map that detects added tsconfig files directoryWatchersForTsconfig: ts.Map = {}; - // count of how many projects are using the directory watcher. If the + // count of how many projects are using the directory watcher. If the // number becomes 0 for a watcher, then we should close it. directoryWatchersRefCount: ts.Map = {}; hostConfiguration: HostConfiguration; @@ -563,11 +565,11 @@ namespace ts.server { // We check if the project file list has changed. If so, we update the project. if (!arrayIsEqualTo(currentRootFiles && currentRootFiles.sort(), newRootFiles && newRootFiles.sort())) { // For configured projects, the change is made outside the tsconfig file, and - // it is not likely to affect the project for other files opened by the client. We can + // it is not likely to affect the project for other files opened by the client. We can // just update the current project. this.updateConfiguredProject(project); - // Call updateProjectStructure to clean up inferred projects we may have + // Call updateProjectStructure to clean up inferred projects we may have // created for the new files this.updateProjectStructure(); } @@ -713,6 +715,8 @@ namespace ts.server { if (project.isConfiguredProject()) { project.projectFileWatcher.close(); project.directoryWatcher.close(); + forEachValue(project.directoriesWatchedForWildcards, watcher => { watcher.close(); }); + delete project.directoriesWatchedForWildcards; this.configuredProjects = copyListRemovingItem(project, this.configuredProjects); } else { @@ -791,8 +795,8 @@ namespace ts.server { * @param info The file that has been closed or newly configured */ closeOpenFile(info: ScriptInfo) { - // Closing file should trigger re-reading the file content from disk. This is - // because the user may chose to discard the buffer content before saving + // Closing file should trigger re-reading the file content from disk. This is + // because the user may chose to discard the buffer content before saving // to the disk, and the server's version of the file can be out of sync. info.svc.reloadFromFile(info.fileName); @@ -890,8 +894,8 @@ namespace ts.server { } /** - * This function is to update the project structure for every projects. - * It is called on the premise that all the configured projects are + * This function is to update the project structure for every projects. + * It is called on the premise that all the configured projects are * up to date. */ updateProjectStructure() { @@ -945,7 +949,7 @@ namespace ts.server { if (rootFile.defaultProject && rootFile.defaultProject.isConfiguredProject()) { // If the root file has already been added into a configured project, - // meaning the original inferred project is gone already. + // meaning the original inferred project is gone already. if (!rootedProject.isConfiguredProject()) { this.removeProject(rootedProject); } @@ -1050,9 +1054,9 @@ namespace ts.server { } /** - * This function tries to search for a tsconfig.json for the given file. If we found it, + * This function tries to search for a tsconfig.json for the given file. If we found it, * we first detect if there is already a configured project created for it: if so, we re-read - * the tsconfig file content and update the project; otherwise we create a new one. + * the tsconfig file content and update the project; otherwise we create a new one. */ openOrUpdateConfiguredProjectForFile(fileName: string) { const searchPath = ts.normalizePath(getDirectoryPath(fileName)); @@ -1189,7 +1193,8 @@ namespace ts.server { else { const projectOptions: ProjectOptions = { files: parsedCommandLine.fileNames, - compilerOptions: parsedCommandLine.options + wildcardDirectories: parsedCommandLine.wildcardDirectories, + compilerOptions: parsedCommandLine.options, }; return { succeeded: true, projectOptions }; } @@ -1215,12 +1220,30 @@ namespace ts.server { } project.finishGraph(); project.projectFileWatcher = this.host.watchFile(configFilename, _ => this.watchedProjectConfigFileChanged(project)); - this.log("Add recursive watcher for: " + ts.getDirectoryPath(configFilename)); + + const configDirectoryPath = ts.getDirectoryPath(configFilename); + + this.log("Add recursive watcher for: " + configDirectoryPath); project.directoryWatcher = this.host.watchDirectory( - ts.getDirectoryPath(configFilename), + configDirectoryPath, path => this.directoryWatchedForSourceFilesChanged(project, path), /*recursive*/ true ); + + project.directoriesWatchedForWildcards = reduceProperties(projectOptions.wildcardDirectories, (watchers, flag, directory) => { + if (comparePaths(configDirectoryPath, directory, ".", !this.host.useCaseSensitiveFileNames) !== Comparison.EqualTo) { + const recursive = (flag & WatchDirectoryFlags.Recursive) !== 0; + this.log(`Add ${ recursive ? "recursive " : ""}watcher for: ${directory}`); + watchers[directory] = this.host.watchDirectory( + directory, + path => this.directoryWatchedForSourceFilesChanged(project, path), + recursive + ); + } + + return watchers; + }, >{}); + return { success: true, project: project }; } } @@ -1254,7 +1277,7 @@ namespace ts.server { info = this.openFile(fileName, /*openedByClient*/ false); } else { - // if the root file was opened by client, it would belong to either + // if the root file was opened by client, it would belong to either // openFileRoots or openFileReferenced. if (info.isOpen) { if (this.openFileRoots.indexOf(info) >= 0) { diff --git a/src/services/shims.ts b/src/services/shims.ts index 6b656ea2738fb..46056eb110fff 100644 --- a/src/services/shims.ts +++ b/src/services/shims.ts @@ -60,7 +60,7 @@ namespace ts { getNewLine?(): string; getProjectVersion?(): string; useCaseSensitiveFileNames?(): boolean; - + getModuleResolutionsForFile?(fileName: string): string; } @@ -72,7 +72,8 @@ namespace ts { * @param exclude A JSON encoded string[] containing the paths to exclude * when enumerating the directory. */ - readDirectory(rootDir: string, extension: string, exclude?: string): string; + readDirectory(rootDir: string, extension: string, exclude?: string, include?: string): string; + useCaseSensitiveFileNames?(): boolean; } /// @@ -272,12 +273,12 @@ namespace ts { private files: string[]; private loggingEnabled = false; private tracingEnabled = false; - + public resolveModuleNames: (moduleName: string[], containingFile: string) => ResolvedModule[]; - + constructor(private shimHost: LanguageServiceShimHost) { // if shimHost is a COM object then property check will become method call with no arguments. - // 'in' does not have this effect. + // 'in' does not have this effect. if ("getModuleResolutionsForFile" in this.shimHost) { this.resolveModuleNames = (moduleNames: string[], containingFile: string) => { let resolutionsInFile = >JSON.parse(this.shimHost.getModuleResolutionsForFile(containingFile)); @@ -407,31 +408,49 @@ namespace ts { export class CoreServicesShimHostAdapter implements ParseConfigHost { + public useCaseSensitiveFileNames: boolean; + constructor(private shimHost: CoreServicesShimHost) { + this.useCaseSensitiveFileNames = this.shimHost.useCaseSensitiveFileNames ? this.shimHost.useCaseSensitiveFileNames() : false; } - public readDirectory(rootDir: string, extension: string, exclude: string[]): string[] { - // Wrap the API changes for 1.5 release. This try/catch - // should be removed once TypeScript 1.5 has shipped. + public readDirectory(rootDir: string, extensions: string[], exclude: string[], include: string[]): string[] { + // Wrap the API changes for 1.8 release. This try/catch + // should be removed once TypeScript 1.8 has shipped. // Also consider removing the optional designation for // the exclude param at this time. - var encoded: string; try { - encoded = this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude)); + return JSON.parse(this.shimHost.readDirectory( + rootDir, + JSON.stringify(extensions), + JSON.stringify(exclude), + JSON.stringify(include))); } catch (e) { - encoded = this.shimHost.readDirectory(rootDir, extension); + const results: string[] = []; + for (const extension of extensions) { + for (const file of this.readDirectoryFallback(rootDir, extension, exclude)) + { + if (!contains(results, file)) { + results.push(file); + } + } + } + return results; } - return JSON.parse(encoded); } - + public fileExists(fileName: string): boolean { return this.shimHost.fileExists(fileName); } - + public readFile(fileName: string): string { return this.shimHost.readFile(fileName); } + + private readDirectoryFallback(rootDir: string, extension: string, exclude: string[]) { + return JSON.parse(this.shimHost.readDirectory(rootDir, extension, JSON.stringify(exclude))); + } } function simpleForwardCall(logger: Logger, actionDescription: string, action: () => any, logPerformance: boolean): any { @@ -940,7 +959,7 @@ namespace ts { private forwardJSONCall(actionDescription: string, action: () => any): any { return forwardJSONCall(this.logger, actionDescription, action, this.logPerformance); } - + public resolveModuleName(fileName: string, moduleName: string, compilerOptionsJson: string): string { return this.forwardJSONCall(`resolveModuleName('${fileName}')`, () => { let compilerOptions = JSON.parse(compilerOptionsJson); @@ -949,14 +968,14 @@ namespace ts { resolvedFileName: result.resolvedModule ? result.resolvedModule.resolvedFileName: undefined, failedLookupLocations: result.failedLookupLocations }; - }); + }); } public getPreProcessedFileInfo(fileName: string, sourceTextSnapshot: IScriptSnapshot): string { return this.forwardJSONCall( "getPreProcessedFileInfo('" + fileName + "')", () => { - // for now treat files as JavaScript + // for now treat files as JavaScript var result = preProcessFile(sourceTextSnapshot.getText(0, sourceTextSnapshot.getLength()), /* readImportFiles */ true, /* detectJavaScriptImports */ true); var convertResult = { referencedFiles: [], diff --git a/tests/cases/unittests/cachingInServerLSHost.ts b/tests/cases/unittests/cachingInServerLSHost.ts index 88b44a693b940..0d2a9ebf54d40 100644 --- a/tests/cases/unittests/cachingInServerLSHost.ts +++ b/tests/cases/unittests/cachingInServerLSHost.ts @@ -36,7 +36,7 @@ module ts { getCurrentDirectory: (): string => { return ""; }, - readDirectory: (path: string, extension?: string, exclude?: string[]): string[] => { + readDirectory: (path: string, extension?: string[], exclude?: string[], include?: string[]): string[] => { throw new Error("NYI"); }, exit: (exitCode?: number) => { @@ -69,8 +69,8 @@ module ts { let projectService = new server.ProjectService(serverHost, logger); let rootScriptInfo = projectService.openFile(rootFile, /* openedByClient */true); let project = projectService.createInferredProject(rootScriptInfo); - project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); - return { + project.setProjectOptions( {files: [rootScriptInfo.fileName], compilerOptions: {module: ts.ModuleKind.AMD} } ); + return { project, rootScriptInfo }; @@ -87,22 +87,22 @@ module ts { name: "c:/f1.ts", content: `foo()` }; - + let serverHost = createDefaultServerHost({ [root.name]: root, [imported.name]: imported }); let { project, rootScriptInfo } = createProject(root.name, serverHost); // ensure that imported file was found let diags = project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.equal(diags.length, 1); - + let originalFileExists = serverHost.fileExists; { // patch fileExists to make sure that disk is not touched serverHost.fileExists = (fileName): boolean => { - assert.isTrue(false, "fileExists should not be called"); + assert.isTrue(false, "fileExists should not be called"); return false; }; - + let newContent = `import {x} from "f1" var x: string = 1;`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); @@ -123,7 +123,7 @@ module ts { }; let newContent = `import {x} from "f2"`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); - + try { // trigger synchronization to make sure that LSHost will try to find 'f2' module on disk project.compilerService.languageService.getSemanticDiagnostics(imported.name); @@ -132,7 +132,7 @@ module ts { catch(e) { assert.isTrue(e.message.indexOf(`Could not find file: '${imported.name}'.`) === 0); } - + assert.isTrue(fileExistsIsCalled); } { @@ -140,45 +140,45 @@ module ts { serverHost.fileExists = (fileName): boolean => { if (fileName === "lib.d.ts") { return false; - } + } fileExistsCalled = true; assert.isTrue(fileName.indexOf('/f1.') !== -1); return originalFileExists(fileName); }; - + let newContent = `import {x} from "f1"`; rootScriptInfo.editContent(0, rootScriptInfo.content.length, newContent); project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.isTrue(fileExistsCalled); - + // setting compiler options discards module resolution cache fileExistsCalled = false; - + let opts = ts.clone(project.projectOptions); opts.compilerOptions = ts.clone(opts.compilerOptions); opts.compilerOptions.target = ts.ScriptTarget.ES5; project.setProjectOptions(opts); - + project.compilerService.languageService.getSemanticDiagnostics(imported.name); assert.isTrue(fileExistsCalled); } }); - + it("loads missing files from disk", () => { let root: File = { name: 'c:/foo.ts', content: `import {x} from "bar"` }; - + let imported: File = { name: 'c:/bar.d.ts', content: `export var y = 1` - }; - + }; + let fileMap: Map = { [root.name]: root }; let serverHost = createDefaultServerHost(fileMap); let originalFileExists = serverHost.fileExists; - + let fileExistsCalledForBar = false; serverHost.fileExists = fileName => { if (fileName === "lib.d.ts") { @@ -187,22 +187,22 @@ module ts { if (!fileExistsCalledForBar) { fileExistsCalledForBar = fileName.indexOf("/bar.") !== -1; } - + return originalFileExists(fileName); }; - + let { project, rootScriptInfo } = createProject(root.name, serverHost); let diags = project.compilerService.languageService.getSemanticDiagnostics(root.name); assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called"); assert.isTrue(diags.length === 1, "one diagnostic expected"); assert.isTrue(typeof diags[0].messageText === "string" && ((diags[0].messageText).indexOf("Cannot find module") === 0), "should be 'cannot find module' message"); - + // assert that import will success once file appear on disk fileMap[imported.name] = imported; fileExistsCalledForBar = false; rootScriptInfo.editContent(0, rootScriptInfo.content.length, `import {y} from "bar"`) - + diags = project.compilerService.languageService.getSemanticDiagnostics(root.name); assert.isTrue(fileExistsCalledForBar, "'fileExists' should be called"); assert.isTrue(diags.length === 0); diff --git a/tests/cases/unittests/matchFiles.ts b/tests/cases/unittests/matchFiles.ts new file mode 100644 index 0000000000000..faccd31b88f40 --- /dev/null +++ b/tests/cases/unittests/matchFiles.ts @@ -0,0 +1,734 @@ +/// +/// +/// + +namespace ts { + class MockParseConfigHost extends Utils.VirtualFileSystem implements ParseConfigHost { + constructor(currentDirectory: string, ignoreCase: boolean, files: string[]) { + super(currentDirectory, ignoreCase); + for (const file of files) { + this.addFile(file); + } + } + + readDirectory(path: string, extensions: string[], excludes: string[], includes: string[]) { + return matchFiles(path, extensions, excludes, includes, this.useCaseSensitiveFileNames, this.currentDirectory, (path: string) => this.getAccessibleFileSystemEntries(path)); + } + + getAccessibleFileSystemEntries(path: string) { + const entry = this.traversePath(path); + if (entry && entry.isDirectory()) { + const directory = entry; + return { + files: map(directory.getFiles(), f => f.name), + directories: map(directory.getDirectories(), d => d.name) + }; + } + return { files: [], directories: [] }; + } + } + + const caseInsensitiveBasePath = "c:/dev/"; + const caseInsensitiveHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.ts", + "c:/dev/b.js", + "c:/dev/c.d.ts", + "c:/dev/z/a.ts", + "c:/dev/z/abz.ts", + "c:/dev/z/aba.ts", + "c:/dev/z/b.ts", + "c:/dev/z/bbz.ts", + "c:/dev/z/bba.ts", + "c:/dev/x/a.ts", + "c:/dev/x/aa.ts", + "c:/dev/x/b.ts", + "c:/dev/x/y/a.ts", + "c:/dev/x/y/b.ts", + "c:/dev/js/a.js", + "c:/dev/js/b.js", + "c:/ext/ext.ts" + ]); + + const caseSensitiveBasePath = "/dev/"; + const caseSensitiveHost = new MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, [ + "/dev/a.ts", + "/dev/a.d.ts", + "/dev/a.js", + "/dev/b.ts", + "/dev/b.js", + "/dev/A.ts", + "/dev/B.ts", + "/dev/c.d.ts", + "/dev/z/a.ts", + "/dev/z/abz.ts", + "/dev/z/aba.ts", + "/dev/z/b.ts", + "/dev/z/bbz.ts", + "/dev/z/bba.ts", + "/dev/x/a.ts", + "/dev/x/b.ts", + "/dev/x/y/a.ts", + "/dev/x/y/b.ts", + "/dev/js/a.js", + "/dev/js/b.js", + ]); + + const caseInsensitiveMixedExtensionHost = new MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, [ + "c:/dev/a.ts", + "c:/dev/a.d.ts", + "c:/dev/a.js", + "c:/dev/b.tsx", + "c:/dev/b.d.ts", + "c:/dev/b.jsx", + "c:/dev/c.tsx", + "c:/dev/c.js", + "c:/dev/d.js", + "c:/dev/e.jsx", + "c:/dev/f.other" + ]); + + describe("matchFiles", () => { + describe("with literal file list", () => { + it("without exclusions", () => { + const json = { + files: [ + "a.ts", + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("missing files are still present", () => { + const json = { + files: [ + "z.ts", + "x.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/z.ts", + "c:/dev/x.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("are not removed due to excludes", () => { + const json = { + files: [ + "a.ts", + "b.ts" + ], + exclude: [ + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + }); + + describe("with literal include list", () => { + it("without exclusions", () => { + const json = { + include: [ + "a.ts", + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with non .ts file extensions are excluded", () => { + const json = { + include: [ + "a.js", + "b.js" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with missing files are excluded", () => { + const json = { + include: [ + "z.ts", + "x.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with literal excludes", () => { + const json = { + include: [ + "a.ts", + "b.ts" + ], + exclude: [ + "b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with wildcard excludes", () => { + const json = { + include: [ + "a.ts", + "b.ts", + "z/a.ts", + "z/abz.ts", + "z/aba.ts", + "x/b.ts" + ], + exclude: [ + "*.ts", + "z/??z.ts", + "*/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/z/a.ts", + "c:/dev/z/aba.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with recursive excludes", () => { + const json = { + include: [ + "a.ts", + "b.ts", + "x/a.ts", + "x/b.ts", + "x/y/a.ts", + "x/y/b.ts" + ], + exclude: [ + "**/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with case sensitive exclude", () => { + const json = { + include: [ + "B.ts" + ], + exclude: [ + "**/b.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "/dev/B.ts" + ], + wildcardDirectories: {}, + }; + const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath); + assert.deepEqual(actual, expected); + }); + }); + + describe("with wildcard include list", () => { + it("same named declarations are excluded", () => { + const json = { + include: [ + "*.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("`*` matches only ts files", () => { + const json = { + include: [ + "*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("`?` matches only a single character", () => { + const json = { + include: [ + "x/?.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/x/a.ts", + "c:/dev/x/b.ts" + ], + wildcardDirectories: { + "c:/dev/x": ts.WatchDirectoryFlags.None + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with recursive directory", () => { + const json = { + include: [ + "**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts", + "c:/dev/z/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("case sensitive", () => { + const json = { + include: [ + "**/A.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "/dev/A.ts" + ], + wildcardDirectories: { + "/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with missing files are excluded", () => { + const json = { + include: [ + "*/z.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("always include literal files", () => { + const json = { + files: [ + "a.ts" + ], + include: [ + "*/z.ts" + ], + exclude: [ + "**/a.ts" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + }, + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("exclude folders", () => { + const json = { + include: [ + "**/*" + ], + exclude: [ + "z", + "x" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("exclude .js files when allowJs=false", () => { + const json = { + compilerOptions: { + allowJs: false + }, + include: [ + "js/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: false + }, + errors: [], + fileNames: [], + wildcardDirectories: { + "c:/dev/js": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("include .js files when allowJs=true", () => { + const json = { + compilerOptions: { + allowJs: true + }, + include: [ + "js/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: true + }, + errors: [], + fileNames: [ + "c:/dev/js/a.js", + "c:/dev/js/b.js" + ], + wildcardDirectories: { + "c:/dev/js": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("include paths outside of the project", () => { + const json = { + include: [ + "*", + "c:/ext/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.ts", + "c:/dev/c.d.ts", + "c:/ext/ext.ts", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.None, + "c:/ext": ts.WatchDirectoryFlags.None + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with jsx=none, allowJs=false", () => { + const json = { + compilerOptions: { + allowJs: false + } + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: false + }, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with jsx=preserve, allowJs=false", () => { + const json = { + compilerOptions: { + jsx: "preserve", + allowJs: false + } + }; + const expected: ts.ParsedCommandLine = { + options: { + jsx: ts.JsxEmit.Preserve, + allowJs: false + }, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with jsx=none, allowJs=true", () => { + const json = { + compilerOptions: { + allowJs: true + } + }; + const expected: ts.ParsedCommandLine = { + options: { + allowJs: true + }, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + "c:/dev/d.js", + "c:/dev/e.jsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("with jsx=preserve, allowJs=true", () => { + const json = { + compilerOptions: { + jsx: "preserve", + allowJs: true + } + }; + const expected: ts.ParsedCommandLine = { + options: { + jsx: ts.JsxEmit.Preserve, + allowJs: true + }, + errors: [], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/b.tsx", + "c:/dev/c.tsx", + "c:/dev/d.js", + "c:/dev/e.jsx", + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + describe("with trailing recursive directory", () => { + it("in includes", () => { + const json = { + include: [ + "**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**") + ], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("in excludes", () => { + const json = { + include: [ + "**/*" + ], + exclude: [ + "**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + }); + describe("with multiple recursive directory patterns", () => { + it("in includes", () => { + const json = { + include: [ + "**/x/**/*" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**/*") + ], + fileNames: [], + wildcardDirectories: {} + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + it("in excludes", () => { + const json = { + include: [ + "**/a.ts" + ], + exclude: [ + "**/x/**" + ] + }; + const expected: ts.ParsedCommandLine = { + options: {}, + errors: [ + ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**") + ], + fileNames: [ + "c:/dev/a.ts", + "c:/dev/x/a.ts", + "c:/dev/x/y/a.ts", + "c:/dev/z/a.ts" + ], + wildcardDirectories: { + "c:/dev": ts.WatchDirectoryFlags.Recursive + } + }; + const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath); + assert.deepEqual(actual, expected); + }); + }); + }); + }); +} \ No newline at end of file