diff --git a/.eslintrc.js b/.eslintrc.js index af1249eae6436e..010c02be11bcf4 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -18,6 +18,7 @@ const hacks = [ 'eslint-plugin-markdown', '@babel/eslint-parser', '@babel/plugin-syntax-class-properties', + '@babel/plugin-syntax-top-level-await', ]; Module._findPath = (request, paths, isMain) => { const r = ModuleFindPath(request, paths, isMain); @@ -41,7 +42,10 @@ module.exports = { parser: '@babel/eslint-parser', parserOptions: { babelOptions: { - plugins: [Module._findPath('@babel/plugin-syntax-class-properties')], + plugins: [ + Module._findPath('@babel/plugin-syntax-class-properties'), + Module._findPath('@babel/plugin-syntax-top-level-await'), + ], }, requireConfigFile: false, sourceType: 'script', @@ -53,6 +57,7 @@ module.exports = { 'doc/api/module.md', 'doc/api/modules.md', 'doc/api/packages.md', + 'doc/api/wasi.md', 'test/es-module/test-esm-type-flag.js', 'test/es-module/test-esm-type-flag-alias.js', '*.mjs', diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bb01e8612c1e6..838185a57b307e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ release. -15.6.0
+15.7.0
+15.6.0
15.5.1
15.5.0
15.4.0
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 29700978fb78bf..c9f8b61cd14430 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,12 +15,10 @@ See [details on our policy on Code of Conduct](./doc/guides/contributing/code-of ## [Issues](./doc/guides/contributing/issues.md) -* [How to Contribute in Issues](./doc/guides/contributing/issues.md#how-to-contribute-in-issues) * [Asking for General Help](./doc/guides/contributing/issues.md#asking-for-general-help) * [Discussing non-technical topics](./doc/guides/contributing/issues.md#discussing-non-technical-topics) * [Submitting a Bug Report](./doc/guides/contributing/issues.md#submitting-a-bug-report) * [Triaging a Bug Report](./doc/guides/contributing/issues.md#triaging-a-bug-report) -* [Resolving a Bug Report](./doc/guides/contributing/issues.md#resolving-a-bug-report) ## [Pull Requests](./doc/guides/contributing/pull-requests.md) diff --git a/Makefile b/Makefile index e6ad96da91474d..aa3eb1567d447e 100644 --- a/Makefile +++ b/Makefile @@ -696,7 +696,7 @@ doc-only: tools/doc/node_modules \ @if [ "$(shell $(node_use_openssl))" != "true" ]; then \ echo "Skipping doc-only (no crypto)"; \ else \ - $(MAKE) out/doc/api/all.html out/doc/api/all.json; \ + $(MAKE) out/doc/api/all.html out/doc/api/all.json out/doc/api/stability; \ fi .PHONY: doc @@ -749,6 +749,10 @@ out/doc/api/all.html: $(apidocs_html) tools/doc/allhtml.js \ out/doc/api/all.json: $(apidocs_json) tools/doc/alljson.js | out/doc/api $(call available-node, tools/doc/alljson.js) +.PHONY: out/doc/api/stability +out/doc/api/stability: out/doc/api/all.json tools/doc/stability.js | out/doc/api + $(call available-node, tools/doc/stability.js) + .PHONY: docopen docopen: out/doc/api/all.html @$(PYTHON) -mwebbrowser file://$(abspath $<) diff --git a/README.md b/README.md index edb005c7cfee64..563c5a54118f07 100644 --- a/README.md +++ b/README.md @@ -325,6 +325,8 @@ For information about the governance of the Node.js project, see **Zeyu Yang** <himself65@outlook.com> (he/him) * [hiroppy](https://github.com/hiroppy) - **Yuta Hiroto** <hello@hiroppy.me> (he/him) +* [iansu](https://github.com/iansu) - +**Ian Sutherland** <ian@iansutherland.ca> * [indutny](https://github.com/indutny) - **Fedor Indutny** <fedor.indutny@gmail.com> * [JacksonTian](https://github.com/JacksonTian) - @@ -359,6 +361,8 @@ For information about the governance of the Node.js project, see **Matteo Collina** <matteo.collina@gmail.com> (he/him) * [mhdawson](https://github.com/mhdawson) - **Michael Dawson** <midawson@redhat.com> (he/him) +* [miladfarca](https://github.com/miladfarca) - +**Milad Fa** <mfarazma@redhat.com> (he/him) * [mildsunrise](https://github.com/mildsunrise) - **Alba Mendez** <me@alba.sh> (she/her) * [misterdjules](https://github.com/misterdjules) - @@ -383,6 +387,8 @@ For information about the governance of the Node.js project, see **Andrey Pechkurov** <apechkurov@gmail.com> (he/him) * [Qard](https://github.com/Qard) - **Stephen Belanger** <admin@stephenbelanger.com> (he/him) +* [RaisinTen](https://github.com/RaisinTen) - +**Darshan Sen** <raisinten@gmail.com> (he/him) * [refack](https://github.com/refack) - **Refael Ackermann (רפאל פלחי)** <refack@gmail.com> (he/him/הוא/אתה) * [rexagod](https://github.com/rexagod) - diff --git a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js index f6037d332d0692..fc21ea7c85d14b 100644 --- a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js +++ b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common.js'); -const { URLSearchParams } = require('url'); const querystring = require('querystring'); const searchParams = common.searchParams; diff --git a/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js b/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js index cb2301e94036da..b9c2861719bc0f 100644 --- a/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js +++ b/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common.js'); -const { URLSearchParams } = require('url'); const querystring = require('querystring'); const searchParams = common.searchParams; diff --git a/benchmark/url/url-searchparams-iteration.js b/benchmark/url/url-searchparams-iteration.js index b628908d62c708..ce530c5227fab3 100644 --- a/benchmark/url/url-searchparams-iteration.js +++ b/benchmark/url/url-searchparams-iteration.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common.js'); const assert = require('assert'); -const { URLSearchParams } = require('url'); const bench = common.createBenchmark(main, { loopMethod: ['forEach', 'iterator'], diff --git a/benchmark/url/url-searchparams-read.js b/benchmark/url/url-searchparams-read.js index cdaaa7ad11a8c3..e1cb39fbe71cd8 100644 --- a/benchmark/url/url-searchparams-read.js +++ b/benchmark/url/url-searchparams-read.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common.js'); -const { URLSearchParams } = require('url'); const bench = common.createBenchmark(main, { accessMethod: ['get', 'getAll', 'has'], diff --git a/benchmark/url/url-searchparams-sort.js b/benchmark/url/url-searchparams-sort.js index 5beb98cf2d2e25..a1873fd612f873 100644 --- a/benchmark/url/url-searchparams-sort.js +++ b/benchmark/url/url-searchparams-sort.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common.js'); -const URLSearchParams = require('url').URLSearchParams; const inputs = { wpt: 'wpt', // To work around tests diff --git a/deps/icu-small/source/common/cmemory.h b/deps/icu-small/source/common/cmemory.h index b5e0c2b7a17296..c9156f253cf1c7 100644 --- a/deps/icu-small/source/common/cmemory.h +++ b/deps/icu-small/source/common/cmemory.h @@ -725,9 +725,14 @@ class MemoryPool : public UMemory { } MemoryPool& operator=(MemoryPool&& other) U_NOEXCEPT { - fCount = other.fCount; - fPool = std::move(other.fPool); - other.fCount = 0; + // Since `this` may contain instances that need to be deleted, we can't + // just throw them away and replace them with `other`. The normal way of + // dealing with this in C++ is to swap `this` and `other`, rather than + // simply overwrite: the destruction of `other` can then take care of + // running MemoryPool::~MemoryPool() over the still-to-be-deallocated + // instances. + std::swap(fCount, other.fCount); + std::swap(fPool, other.fPool); return *this; } @@ -796,9 +801,6 @@ class MemoryPool : public UMemory { template class MaybeStackVector : protected MemoryPool { public: - using MemoryPool::MemoryPool; - using MemoryPool::operator=; - template T* emplaceBack(Args&&... args) { return this->create(args...); diff --git a/deps/icu-small/source/common/locid.cpp b/deps/icu-small/source/common/locid.cpp index 2804e36bf62cb1..874e4a70556f31 100644 --- a/deps/icu-small/source/common/locid.cpp +++ b/deps/icu-small/source/common/locid.cpp @@ -35,6 +35,7 @@ #include "unicode/bytestream.h" #include "unicode/locid.h" +#include "unicode/localebuilder.h" #include "unicode/strenum.h" #include "unicode/stringpiece.h" #include "unicode/uloc.h" @@ -1028,7 +1029,7 @@ class AliasReplacer { // place the the replaced locale ID in out and return true. // Otherwise return false for no replacement or error. bool replace( - const Locale& locale, CharString& out, UErrorCode status); + const Locale& locale, CharString& out, UErrorCode& status); private: const char* language; @@ -1336,10 +1337,13 @@ AliasReplacer::replaceTerritory(UVector& toBeFreed, UErrorCode& status) // Cannot use nullptr for language because that will construct // the default locale, in that case, use "und" to get the correct // locale. - Locale l(language == nullptr ? "und" : language, nullptr, script); + Locale l = LocaleBuilder() + .setLanguage(language == nullptr ? "und" : language) + .setScript(script) + .build(status); l.addLikelySubtags(status); const char* likelyRegion = l.getCountry(); - CharString* item = nullptr; + LocalPointer item; if (likelyRegion != nullptr && uprv_strlen(likelyRegion) > 0) { size_t len = uprv_strlen(likelyRegion); const char* foundInReplacement = uprv_strstr(replacement, @@ -1351,20 +1355,22 @@ AliasReplacer::replaceTerritory(UVector& toBeFreed, UErrorCode& status) *(foundInReplacement-1) == ' '); U_ASSERT(foundInReplacement[len] == ' ' || foundInReplacement[len] == '\0'); - item = new CharString(foundInReplacement, (int32_t)len, status); + item.adoptInsteadAndCheckErrorCode( + new CharString(foundInReplacement, (int32_t)len, status), status); } } - if (item == nullptr) { - item = new CharString(replacement, - (int32_t)(firstSpace - replacement), status); + if (item.isNull() && U_SUCCESS(status)) { + item.adoptInsteadAndCheckErrorCode( + new CharString(replacement, + (int32_t)(firstSpace - replacement), status), status); } if (U_FAILURE(status)) { return false; } - if (item == nullptr) { + if (item.isNull()) { status = U_MEMORY_ALLOCATION_ERROR; return false; } replacedRegion = item->data(); - toBeFreed.addElement(item, status); + toBeFreed.addElement(item.orphan(), status); } U_ASSERT(!same(region, replacedRegion)); region = replacedRegion; @@ -1453,7 +1459,7 @@ AliasReplacer::outputToString( int32_t variantsStart = out.length(); for (int32_t i = 0; i < variants.size(); i++) { out.append(SEP_CHAR, status) - .append((const char*)((UVector*)variants.elementAt(i)), + .append((const char*)(variants.elementAt(i)), status); } T_CString_toUpperCase(out.data() + variantsStart); @@ -1470,7 +1476,7 @@ AliasReplacer::outputToString( } bool -AliasReplacer::replace(const Locale& locale, CharString& out, UErrorCode status) +AliasReplacer::replace(const Locale& locale, CharString& out, UErrorCode& status) { data = AliasData::singleton(status); if (U_FAILURE(status)) { @@ -2453,9 +2459,13 @@ Locale::setKeywordValue(const char* keywordName, const char* keywordValue, UErro if (U_FAILURE(status)) { return; } + if (status == U_STRING_NOT_TERMINATED_WARNING) { + status = U_ZERO_ERROR; + } int32_t bufferLength = uprv_max((int32_t)(uprv_strlen(fullName) + 1), ULOC_FULLNAME_CAPACITY); int32_t newLength = uloc_setKeywordValue(keywordName, keywordValue, fullName, bufferLength, &status) + 1; + U_ASSERT(status != U_STRING_NOT_TERMINATED_WARNING); /* Handle the case the current buffer is not enough to hold the new id */ if (status == U_BUFFER_OVERFLOW_ERROR) { U_ASSERT(newLength > bufferLength); @@ -2472,6 +2482,7 @@ Locale::setKeywordValue(const char* keywordName, const char* keywordValue, UErro fullName = newFullName; status = U_ZERO_ERROR; uloc_setKeywordValue(keywordName, keywordValue, fullName, newLength, &status); + U_ASSERT(status != U_STRING_NOT_TERMINATED_WARNING); } else { U_ASSERT(newLength <= bufferLength); } diff --git a/deps/icu-small/source/common/rbbitblb.cpp b/deps/icu-small/source/common/rbbitblb.cpp index 65b597c4a936b0..cbd8f315c252d8 100644 --- a/deps/icu-small/source/common/rbbitblb.cpp +++ b/deps/icu-small/source/common/rbbitblb.cpp @@ -1402,12 +1402,13 @@ void RBBITableBuilder::exportTable(void *where) { U_ASSERT (sd->fAccepting <= 255); U_ASSERT (sd->fLookAhead <= 255); U_ASSERT (0 <= sd->fTagsIdx && sd->fTagsIdx <= 255); - row->r8.fAccepting = sd->fAccepting; - row->r8.fLookAhead = sd->fLookAhead; - row->r8.fTagsIdx = sd->fTagsIdx; + RBBIStateTableRow8 *r8 = (RBBIStateTableRow8*)row; + r8->fAccepting = sd->fAccepting; + r8->fLookAhead = sd->fLookAhead; + r8->fTagsIdx = sd->fTagsIdx; for (col=0; colfDtran->elementAti(col) <= kMaxStateFor8BitsTable); - row->r8.fNextState[col] = sd->fDtran->elementAti(col); + r8->fNextState[col] = sd->fDtran->elementAti(col); } } else { U_ASSERT (sd->fAccepting <= 0xffff); @@ -1603,12 +1604,13 @@ void RBBITableBuilder::exportSafeTable(void *where) { UnicodeString *rowString = (UnicodeString *)fSafeTable->elementAt(state); RBBIStateTableRow *row = (RBBIStateTableRow *)(table->fTableData + state*table->fRowLen); if (use8BitsForSafeTable()) { - row->r8.fAccepting = 0; - row->r8.fLookAhead = 0; - row->r8.fTagsIdx = 0; + RBBIStateTableRow8 *r8 = (RBBIStateTableRow8*)row; + r8->fAccepting = 0; + r8->fLookAhead = 0; + r8->fTagsIdx = 0; for (col=0; colcharAt(col) <= kMaxStateFor8BitsTable); - row->r8.fNextState[col] = static_cast(rowString->charAt(col)); + r8->fNextState[col] = static_cast(rowString->charAt(col)); } } else { row->r16.fAccepting = 0; diff --git a/deps/icu-small/source/common/uloc.cpp b/deps/icu-small/source/common/uloc.cpp index 522f33dbe243a9..ebfbb506508c1b 100644 --- a/deps/icu-small/source/common/uloc.cpp +++ b/deps/icu-small/source/common/uloc.cpp @@ -877,6 +877,9 @@ uloc_setKeywordValue(const char* keywordName, if(U_FAILURE(*status)) { return -1; } + if (*status == U_STRING_NOT_TERMINATED_WARNING) { + *status = U_ZERO_ERROR; + } if (keywordName == NULL || keywordName[0] == 0 || bufferCapacity <= 1) { *status = U_ILLEGAL_ARGUMENT_ERROR; return 0; @@ -914,6 +917,7 @@ uloc_setKeywordValue(const char* keywordName, startSearchHere = (char*)locale_getKeywordsStart(buffer); if(startSearchHere == NULL || (startSearchHere[1]==0)) { if(keywordValueLen == 0) { /* no keywords = nothing to remove */ + U_ASSERT(*status != U_STRING_NOT_TERMINATED_WARNING); return bufLen; } @@ -933,6 +937,7 @@ uloc_setKeywordValue(const char* keywordName, startSearchHere += keywordNameLen; *startSearchHere++ = '='; uprv_strcpy(startSearchHere, keywordValueBuffer); + U_ASSERT(*status != U_STRING_NOT_TERMINATED_WARNING); return needLen; } /* end shortcut - no @ */ @@ -1047,13 +1052,27 @@ uloc_setKeywordValue(const char* keywordName, if (!handledInputKeyAndValue || U_FAILURE(*status)) { /* if input key/value specified removal of a keyword not present in locale, or * there was an error in CharString.append, leave original locale alone. */ + U_ASSERT(*status != U_STRING_NOT_TERMINATED_WARNING); return bufLen; } // needLen = length of the part before '@' needLen = (int32_t)(startSearchHere - buffer); - return needLen + updatedKeysAndValues.extract( + // Check to see can we fit the startSearchHere, if not, return + // U_BUFFER_OVERFLOW_ERROR without copy updatedKeysAndValues into it. + // We do this because this API function does not behave like most others: + // It promises never to set a U_STRING_NOT_TERMINATED_WARNING. + // When the contents fits but without the terminating NUL, in this case we need to not change + // the buffer contents and return with a buffer overflow error. + int32_t appendLength = updatedKeysAndValues.length(); + if (appendLength >= bufferCapacity - needLen) { + *status = U_BUFFER_OVERFLOW_ERROR; + return needLen + appendLength; + } + needLen += updatedKeysAndValues.extract( startSearchHere, bufferCapacity - needLen, *status); + U_ASSERT(*status != U_STRING_NOT_TERMINATED_WARNING); + return needLen; } /* ### ID parsing implementation **************************************************/ diff --git a/deps/icu-small/source/common/unicode/docmain.h b/deps/icu-small/source/common/unicode/docmain.h index f09d7e1dc29cec..14491494c5ca7a 100644 --- a/deps/icu-small/source/common/unicode/docmain.h +++ b/deps/icu-small/source/common/unicode/docmain.h @@ -143,6 +143,11 @@ * icu::MessageFormat * * + * List Formatting + * ulistformatter.h + * icu::ListFormatter + * + * * Number Formatting
(includes currency and unit formatting) * unumberformatter.h, unum.h * icu::number::NumberFormatter (ICU 60+) or icu::NumberFormat (older versions) diff --git a/deps/icu-small/source/common/unicode/urename.h b/deps/icu-small/source/common/unicode/urename.h index 20232cd209c2d6..fe59fdd893d940 100644 --- a/deps/icu-small/source/common/unicode/urename.h +++ b/deps/icu-small/source/common/unicode/urename.h @@ -1137,6 +1137,7 @@ #define ulocimp_toLanguageTag U_ICU_ENTRY_POINT_RENAME(ulocimp_toLanguageTag) #define ulocimp_toLegacyKey U_ICU_ENTRY_POINT_RENAME(ulocimp_toLegacyKey) #define ulocimp_toLegacyType U_ICU_ENTRY_POINT_RENAME(ulocimp_toLegacyType) +#define ultag_getTKeyStart U_ICU_ENTRY_POINT_RENAME(ultag_getTKeyStart) #define ultag_isExtensionSubtags U_ICU_ENTRY_POINT_RENAME(ultag_isExtensionSubtags) #define ultag_isLanguageSubtag U_ICU_ENTRY_POINT_RENAME(ultag_isLanguageSubtag) #define ultag_isPrivateuseValueSubtags U_ICU_ENTRY_POINT_RENAME(ultag_isPrivateuseValueSubtags) diff --git a/deps/icu-small/source/common/unicode/uvernum.h b/deps/icu-small/source/common/unicode/uvernum.h index a4cbb9e0fe8661..a46481a3fe610c 100644 --- a/deps/icu-small/source/common/unicode/uvernum.h +++ b/deps/icu-small/source/common/unicode/uvernum.h @@ -66,7 +66,7 @@ * This value will change in the subsequent releases of ICU * @stable ICU 2.6 */ -#define U_ICU_VERSION_MINOR_NUM 1 +#define U_ICU_VERSION_MINOR_NUM 2 /** The current ICU patchlevel version as an integer. * This value will change in the subsequent releases of ICU @@ -139,7 +139,7 @@ * This value will change in the subsequent releases of ICU * @stable ICU 2.4 */ -#define U_ICU_VERSION "68.1" +#define U_ICU_VERSION "68.2" /** * The current ICU library major version number as a string, for library name suffixes. @@ -158,7 +158,7 @@ /** Data version in ICU4C. * @internal ICU 4.4 Internal Use Only **/ -#define U_ICU_DATA_VERSION "68.1" +#define U_ICU_DATA_VERSION "68.2" #endif /* U_HIDE_INTERNAL_API */ /*=========================================================================== diff --git a/deps/icu-small/source/common/wintz.cpp b/deps/icu-small/source/common/wintz.cpp index 047f4290f10d0e..6805297a38a161 100644 --- a/deps/icu-small/source/common/wintz.cpp +++ b/deps/icu-small/source/common/wintz.cpp @@ -36,17 +36,58 @@ U_NAMESPACE_BEGIN +// Note these constants and the struct are only used when dealing with the fallback path for RDP sesssions. + +// This is the location of the time zones in the registry on Vista+ systems. +// See: https://docs.microsoft.com/windows/win32/api/timezoneapi/ns-timezoneapi-dynamic_time_zone_information +#define WINDOWS_TIMEZONES_REG_KEY_PATH L"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Time Zones" + +// Max length for a registry key is 255. +1 for null. +// See: https://docs.microsoft.com/windows/win32/sysinfo/registry-element-size-limits +#define WINDOWS_MAX_REG_KEY_LENGTH 256 + +#if U_PLATFORM_HAS_WINUWP_API == 0 + +// This is the layout of the TZI binary value in the registry. +// See: https://docs.microsoft.com/windows/win32/api/timezoneapi/ns-timezoneapi-time_zone_information +typedef struct _REG_TZI_FORMAT { + LONG Bias; + LONG StandardBias; + LONG DaylightBias; + SYSTEMTIME StandardDate; + SYSTEMTIME DaylightDate; +} REG_TZI_FORMAT; + +#endif // U_PLATFORM_HAS_WINUWP_API + /** -* Main Windows time zone detection function. -* Returns the Windows time zone converted to an ICU time zone as a heap-allocated buffer, or nullptr upon failure. +* This is main Windows time zone detection function. +* +* It returns the Windows time zone converted to an ICU time zone as a heap-allocated buffer, or nullptr upon failure. +* +* We use the Win32 API GetDynamicTimeZoneInformation (which is available since Vista) to get the current time zone info, +* as this API returns a non-localized time zone name which can be then mapped to an ICU time zone. +* +* However, in some RDP/terminal services situations, this struct isn't always fully complete, and the TimeZoneKeyName +* field of the struct might be NULL. This can happen with some 3rd party RDP clients, and also when using older versions +* of the RDP protocol, which don't send the newer TimeZoneKeyNamei information and only send the StandardName and DaylightName. +* +* Since these 3rd party clients and older RDP clients only send the pre-Vista time zone information to the server, this means that we +* need to fallback on using the pre-Vista methods to determine the time zone. This unfortunately requires examining the registry directly +* in order to try and determine the current time zone. +* +* Note that this can however still fail in some cases though if the client and server are using different languages, as the StandardName +* that is sent by client is localized in the client's language. However, we must compare this to the names that are on the server, which +* are localized in registry using the server's language. Despite that, this is the best we can do. * -* Note: We use the Win32 API GetDynamicTimeZoneInformation (available since Vista+) to get the current time zone info. -* This API returns a non-localized time zone name, which is mapped to an ICU time zone ID (~ Olsen ID). +* Note: This fallback method won't work for the UWP version though, as we can't use the registry APIs in UWP. +* +* Once we have the current Windows time zone, then we can then map it to an ICU time zone ID (~ Olsen ID). */ U_CAPI const char* U_EXPORT2 uprv_detectWindowsTimeZone() { - // Obtain the DYNAMIC_TIME_ZONE_INFORMATION info to get the non-localized time zone name. + // We first try to obtain the time zone directly by using the TimeZoneKeyName field of the DYNAMIC_TIME_ZONE_INFORMATION struct. DYNAMIC_TIME_ZONE_INFORMATION dynamicTZI; uprv_memset(&dynamicTZI, 0, sizeof(dynamicTZI)); SYSTEMTIME systemTimeAllZero; @@ -86,22 +127,138 @@ uprv_detectWindowsTimeZone() // Note '-' before 'utcOffsetMin'. The timezone ID's sign convention // is that a timezone ahead of UTC is Etc/GMT- and a timezone // behind UTC is Etc/GMT+. - int ret = snprintf(gmtOffsetTz, UPRV_LENGTHOF(gmtOffsetTz), "Etc/GMT%+d", -utcOffsetMins / 60); + int ret = snprintf(gmtOffsetTz, UPRV_LENGTHOF(gmtOffsetTz), "Etc/GMT%+ld", -utcOffsetMins / 60); if (ret > 0 && ret < UPRV_LENGTHOF(gmtOffsetTz)) { return uprv_strdup(gmtOffsetTz); } } } - // If DST is NOT disabled, but we have an empty TimeZoneKeyName, then it is unclear - // what we should do as this should not happen. + // If DST is NOT disabled, but the TimeZoneKeyName field of the struct is NULL, then we may be dealing with a + // RDP/terminal services session where the 'Time Zone Redirection' feature is enabled. However, either the RDP + // client sent the server incomplete info (some 3rd party RDP clients only send the StandardName and DaylightName, + // but do not send the important TimeZoneKeyName), or if the RDP server has not appropriately populated the struct correctly. + // + // In this case we unfortunately have no choice but to fallback to using the pre-Vista method of determining the + // time zone, which requires examining the registry directly. + // + // Note that this can however still fail though if the client and server are using different languages, as the StandardName + // that is sent by client is *localized* in the client's language. However, we must compare this to the names that are + // on the server, which are *localized* in registry using the server's language. + // + // One other note is that this fallback method doesn't work for the UWP version, as we can't use the registry APIs. + + // windowsTimeZoneName will point at timezoneSubKeyName if we had to fallback to using the registry, and we found a match. + WCHAR timezoneSubKeyName[WINDOWS_MAX_REG_KEY_LENGTH]; + WCHAR *windowsTimeZoneName = dynamicTZI.TimeZoneKeyName; + if (dynamicTZI.TimeZoneKeyName[0] == 0) { + +// We can't use the registry APIs in the UWP version. +#if U_PLATFORM_HAS_WINUWP_API == 1 + (void)timezoneSubKeyName; // suppress unused variable warnings. return nullptr; +#else + // Open the path to the time zones in the Windows registry. + LONG ret; + HKEY hKeyAllTimeZones = nullptr; + ret = RegOpenKeyExW(HKEY_LOCAL_MACHINE, WINDOWS_TIMEZONES_REG_KEY_PATH, 0, KEY_READ, + reinterpret_cast(&hKeyAllTimeZones)); + + if (ret != ERROR_SUCCESS) { + // If we can't open the key, then we can't do much, so fail. + return nullptr; + } + + // Read the number of subkeys under the time zone registry path. + DWORD numTimeZoneSubKeys; + ret = RegQueryInfoKeyW(hKeyAllTimeZones, nullptr, nullptr, nullptr, &numTimeZoneSubKeys, + nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr); + + if (ret != ERROR_SUCCESS) { + RegCloseKey(hKeyAllTimeZones); + return nullptr; + } + + // Examine each of the subkeys to try and find a match for the localized standard name ("Std"). + // + // Note: The name of the time zone subkey itself is not localized, but the "Std" name is localized. This means + // that we could fail to find a match if the RDP client and RDP server are using different languages, but unfortunately + // there isn't much we can do about it. + HKEY hKeyTimeZoneSubKey = nullptr; + ULONG registryValueType; + WCHAR registryStandardName[WINDOWS_MAX_REG_KEY_LENGTH]; + + for (DWORD i = 0; i < numTimeZoneSubKeys; i++) { + // Note: RegEnumKeyExW wants the size of the buffer in characters. + DWORD size = UPRV_LENGTHOF(timezoneSubKeyName); + ret = RegEnumKeyExW(hKeyAllTimeZones, i, timezoneSubKeyName, &size, nullptr, nullptr, nullptr, nullptr); + + if (ret != ERROR_SUCCESS) { + RegCloseKey(hKeyAllTimeZones); + return nullptr; + } + + ret = RegOpenKeyExW(hKeyAllTimeZones, timezoneSubKeyName, 0, KEY_READ, + reinterpret_cast(&hKeyTimeZoneSubKey)); + + if (ret != ERROR_SUCCESS) { + RegCloseKey(hKeyAllTimeZones); + return nullptr; + } + + // Note: RegQueryValueExW wants the size of the buffer in bytes. + size = sizeof(registryStandardName); + ret = RegQueryValueExW(hKeyTimeZoneSubKey, L"Std", nullptr, ®istryValueType, + reinterpret_cast(registryStandardName), &size); + + if (ret != ERROR_SUCCESS || registryValueType != REG_SZ) { + RegCloseKey(hKeyTimeZoneSubKey); + RegCloseKey(hKeyAllTimeZones); + return nullptr; + } + + // Note: wcscmp does an ordinal (byte) comparison. + if (wcscmp(reinterpret_cast(registryStandardName), dynamicTZI.StandardName) == 0) { + // Since we are comparing the *localized* time zone name, it's possible that some languages might use + // the same string for more than one time zone. Thus we need to examine the TZI data in the registry to + // compare the GMT offset (the bias), and the DST transition dates, to ensure it's the same time zone + // as the currently reported one. + REG_TZI_FORMAT registryTziValue; + uprv_memset(®istryTziValue, 0, sizeof(registryTziValue)); + + // Note: RegQueryValueExW wants the size of the buffer in bytes. + DWORD timezoneTziValueSize = sizeof(registryTziValue); + ret = RegQueryValueExW(hKeyTimeZoneSubKey, L"TZI", nullptr, ®istryValueType, + reinterpret_cast(®istryTziValue), &timezoneTziValueSize); + + if (ret == ERROR_SUCCESS) { + if ((dynamicTZI.Bias == registryTziValue.Bias) && + (memcmp((const void *)&dynamicTZI.StandardDate, (const void *)®istryTziValue.StandardDate, sizeof(SYSTEMTIME)) == 0) && + (memcmp((const void *)&dynamicTZI.DaylightDate, (const void *)®istryTziValue.DaylightDate, sizeof(SYSTEMTIME)) == 0)) + { + // We found a matching time zone. + windowsTimeZoneName = timezoneSubKeyName; + break; + } + } + } + RegCloseKey(hKeyTimeZoneSubKey); + hKeyTimeZoneSubKey = nullptr; + } + + if (hKeyTimeZoneSubKey != nullptr) { + RegCloseKey(hKeyTimeZoneSubKey); + } + if (hKeyAllTimeZones != nullptr) { + RegCloseKey(hKeyAllTimeZones); + } +#endif // U_PLATFORM_HAS_WINUWP_API } CharString winTZ; UErrorCode status = U_ZERO_ERROR; - winTZ.appendInvariantChars(UnicodeString(TRUE, dynamicTZI.TimeZoneKeyName, -1), status); + winTZ.appendInvariantChars(UnicodeString(TRUE, windowsTimeZoneName, -1), status); // Map Windows Timezone name (non-localized) to ICU timezone ID (~ Olson timezone id). StackUResourceBundle winTZBundle; @@ -123,18 +280,29 @@ uprv_detectWindowsTimeZone() int regionCodeLen = GetGeoInfoW(geoId, GEO_ISO2, regionCodeW, UPRV_LENGTHOF(regionCodeW), 0); const UChar *icuTZ16 = nullptr; - int32_t tzLen; + int32_t tzListLen = 0; if (regionCodeLen != 0) { for (int i = 0; i < UPRV_LENGTHOF(regionCodeW); i++) { regionCode[i] = static_cast(regionCodeW[i]); } - icuTZ16 = ures_getStringByKey(winTZBundle.getAlias(), regionCode, &tzLen, &status); + icuTZ16 = ures_getStringByKey(winTZBundle.getAlias(), regionCode, &tzListLen, &status); } if (regionCodeLen == 0 || U_FAILURE(status)) { // fallback to default "001" (world) status = U_ZERO_ERROR; - icuTZ16 = ures_getStringByKey(winTZBundle.getAlias(), "001", &tzLen, &status); + icuTZ16 = ures_getStringByKey(winTZBundle.getAlias(), "001", &tzListLen, &status); + } + + // Note: We want the first entry in the string returned by ures_getStringByKey. + // However this string can be a space delimited list of timezones: + // Ex: "America/New_York America/Detroit America/Indiana/Petersburg ..." + // We need to stop at the first space, so we pass tzLen (instead of tzListLen) to appendInvariantChars below. + int32_t tzLen = 0; + if (tzListLen > 0) { + while (!(icuTZ16[tzLen] == u'\0' || icuTZ16[tzLen] == u' ')) { + tzLen++; + } } // Note: cloneData returns nullptr if the status is a failure, so this diff --git a/deps/icu-small/source/data/in/icudt68l.dat.bz2 b/deps/icu-small/source/data/in/icudt68l.dat.bz2 index fcee4ecfa17efe..8fd32b7471d648 100644 Binary files a/deps/icu-small/source/data/in/icudt68l.dat.bz2 and b/deps/icu-small/source/data/in/icudt68l.dat.bz2 differ diff --git a/deps/icu-small/source/i18n/dtitvfmt.cpp b/deps/icu-small/source/i18n/dtitvfmt.cpp index a913dc43c808d6..d6ec501af88701 100644 --- a/deps/icu-small/source/i18n/dtitvfmt.cpp +++ b/deps/icu-small/source/i18n/dtitvfmt.cpp @@ -1422,7 +1422,11 @@ DateIntervalFormat::setIntervalPattern(UCalendarDateFields field, if ( field == UCAL_AM_PM ) { fInfo->getIntervalPattern(*bestSkeleton, UCAL_HOUR, pattern,status); if ( !pattern.isEmpty() ) { - setIntervalPattern(field, pattern); + UBool suppressDayPeriodField = fSkeleton.indexOf(CAP_J) != -1; + UnicodeString adjustIntervalPattern; + adjustFieldWidth(*skeleton, *bestSkeleton, pattern, differenceInfo, + suppressDayPeriodField, adjustIntervalPattern); + setIntervalPattern(field, adjustIntervalPattern); } return false; } @@ -1694,27 +1698,23 @@ DateIntervalFormat::adjustFieldWidth(const UnicodeString& inputSkeleton, DateIntervalInfo::parseSkeleton(inputSkeleton, inputSkeletonFieldWidth); DateIntervalInfo::parseSkeleton(bestMatchSkeleton, bestMatchSkeletonFieldWidth); if (suppressDayPeriodField) { - adjustedPtn.findAndReplace(UnicodeString(LOW_A), UnicodeString()); - adjustedPtn.findAndReplace(UnicodeString(" "), UnicodeString(" ")); + findReplaceInPattern(adjustedPtn, UnicodeString(LOW_A), UnicodeString()); + findReplaceInPattern(adjustedPtn, UnicodeString(" "), UnicodeString(" ")); adjustedPtn.trim(); } if ( differenceInfo == 2 ) { if (inputSkeleton.indexOf(LOW_Z) != -1) { - adjustedPtn.findAndReplace(UnicodeString(LOW_V), - UnicodeString(LOW_Z)); - } - if (inputSkeleton.indexOf(CAP_K) != -1) { - adjustedPtn.findAndReplace(UnicodeString(LOW_H), - UnicodeString(CAP_K)); - } - if (inputSkeleton.indexOf(LOW_K) != -1) { - adjustedPtn.findAndReplace(UnicodeString(CAP_H), - UnicodeString(LOW_K)); - } - if (inputSkeleton.indexOf(LOW_B) != -1) { - adjustedPtn.findAndReplace(UnicodeString(LOW_A), - UnicodeString(LOW_B)); - } + findReplaceInPattern(adjustedPtn, UnicodeString(LOW_V), UnicodeString(LOW_Z)); + } + if (inputSkeleton.indexOf(CAP_K) != -1) { + findReplaceInPattern(adjustedPtn, UnicodeString(LOW_H), UnicodeString(CAP_K)); + } + if (inputSkeleton.indexOf(LOW_K) != -1) { + findReplaceInPattern(adjustedPtn, UnicodeString(CAP_H), UnicodeString(LOW_K)); + } + if (inputSkeleton.indexOf(LOW_B) != -1) { + findReplaceInPattern(adjustedPtn, UnicodeString(LOW_A), UnicodeString(LOW_B)); + } } if (adjustedPtn.indexOf(LOW_A) != -1 && bestMatchSkeletonFieldWidth[LOW_A - PATTERN_CHAR_BASE] == 0) { bestMatchSkeletonFieldWidth[LOW_A - PATTERN_CHAR_BASE] = 1; @@ -1792,6 +1792,39 @@ DateIntervalFormat::adjustFieldWidth(const UnicodeString& inputSkeleton, } } +void +DateIntervalFormat::findReplaceInPattern(UnicodeString& targetString, + const UnicodeString& strToReplace, + const UnicodeString& strToReplaceWith) { + int32_t firstQuoteIndex = targetString.indexOf(u'\''); + if (firstQuoteIndex == -1) { + targetString.findAndReplace(strToReplace, strToReplaceWith); + } else { + UnicodeString result; + UnicodeString source = targetString; + + while (firstQuoteIndex >= 0) { + int32_t secondQuoteIndex = source.indexOf(u'\'', firstQuoteIndex + 1); + if (secondQuoteIndex == -1) { + secondQuoteIndex = source.length() - 1; + } + + UnicodeString unquotedText(source, 0, firstQuoteIndex); + UnicodeString quotedText(source, firstQuoteIndex, secondQuoteIndex - firstQuoteIndex + 1); + + unquotedText.findAndReplace(strToReplace, strToReplaceWith); + result += unquotedText; + result += quotedText; + + source.remove(0, secondQuoteIndex + 1); + firstQuoteIndex = source.indexOf(u'\''); + } + source.findAndReplace(strToReplace, strToReplaceWith); + result += source; + targetString = result; + } +} + void diff --git a/deps/icu-small/source/i18n/formatted_string_builder.cpp b/deps/icu-small/source/i18n/formatted_string_builder.cpp index 5aabc31cc4391b..b370f14f2ac4ff 100644 --- a/deps/icu-small/source/i18n/formatted_string_builder.cpp +++ b/deps/icu-small/source/i18n/formatted_string_builder.cpp @@ -276,6 +276,11 @@ int32_t FormattedStringBuilder::prepareForInsertHelper(int32_t index, int32_t co char16_t *oldChars = getCharPtr(); Field *oldFields = getFieldPtr(); if (fLength + count > oldCapacity) { + if ((fLength + count) > INT32_MAX / 2) { + // If we continue, then newCapacity will overlow int32_t in the next line. + status = U_INPUT_TOO_LONG_ERROR; + return -1; + } int32_t newCapacity = (fLength + count) * 2; int32_t newZero = newCapacity / 2 - (fLength + count) / 2; @@ -330,12 +335,14 @@ int32_t FormattedStringBuilder::prepareForInsertHelper(int32_t index, int32_t co fZero = newZero; fLength += count; } + U_ASSERT((fZero + index) >= 0); return fZero + index; } int32_t FormattedStringBuilder::remove(int32_t index, int32_t count) { // TODO: Reset the heap here? (If the string after removal can fit on stack?) int32_t position = index + fZero; + U_ASSERT(position >= 0); uprv_memmove2(getCharPtr() + position, getCharPtr() + position + count, sizeof(char16_t) * (fLength - index - count)); diff --git a/deps/icu-small/source/i18n/formattedval_impl.h b/deps/icu-small/source/i18n/formattedval_impl.h index 8005b0abb4d848..1e6eb1e639f809 100644 --- a/deps/icu-small/source/i18n/formattedval_impl.h +++ b/deps/icu-small/source/i18n/formattedval_impl.h @@ -117,6 +117,12 @@ class FormattedValueFieldPositionIteratorImpl : public UMemory, public Formatted }; +// Internal struct that must be exported for MSVC +struct U_I18N_API SpanInfo { + int32_t spanValue; + int32_t length; +}; + // Export an explicit template instantiation of the MaybeStackArray that // is used as a data member of CEBuffer. // @@ -126,7 +132,7 @@ class FormattedValueFieldPositionIteratorImpl : public UMemory, public Formatted // See digitlst.h, pluralaffix.h, datefmt.h, and others for similar examples. // #if U_PF_WINDOWS <= U_PLATFORM && U_PLATFORM <= U_PF_CYGWIN -template class U_I18N_API MaybeStackArray; +template class U_I18N_API MaybeStackArray; #endif /** @@ -162,13 +168,19 @@ class U_I18N_API FormattedValueStringBuilderImpl : public UMemory, public Format return fString; } - void appendSpanIndex(int32_t index); - void prependSpanIndex(int32_t index); + /** + * Adds additional metadata used for span fields. + * + * spanValue: the index of the list item, for example. + * length: the length of the span, used to split adjacent fields. + */ + void appendSpanInfo(int32_t spanValue, int32_t length, UErrorCode& status); + void prependSpanInfo(int32_t spanValue, int32_t length, UErrorCode& status); private: FormattedStringBuilder fString; FormattedStringBuilder::Field fNumericField; - MaybeStackArray spanIndices; + MaybeStackArray spanIndices; bool nextPositionImpl(ConstrainedFieldPosition& cfpos, FormattedStringBuilder::Field numericField, UErrorCode& status) const; static bool isIntOrGroup(FormattedStringBuilder::Field field); diff --git a/deps/icu-small/source/i18n/formattedval_sbimpl.cpp b/deps/icu-small/source/i18n/formattedval_sbimpl.cpp index b2ae4c34c0a66d..84c2d00666c2be 100644 --- a/deps/icu-small/source/i18n/formattedval_sbimpl.cpp +++ b/deps/icu-small/source/i18n/formattedval_sbimpl.cpp @@ -46,19 +46,19 @@ Appendable& FormattedValueStringBuilderImpl::appendTo(Appendable& appendable, UE UBool FormattedValueStringBuilderImpl::nextPosition(ConstrainedFieldPosition& cfpos, UErrorCode& status) const { // NOTE: MSVC sometimes complains when implicitly converting between bool and UBool - return nextPositionImpl(cfpos, fNumericField, status) ? TRUE : FALSE; + return nextPositionImpl(cfpos, fNumericField, status) ? true : false; } UBool FormattedValueStringBuilderImpl::nextFieldPosition(FieldPosition& fp, UErrorCode& status) const { int32_t rawField = fp.getField(); if (rawField == FieldPosition::DONT_CARE) { - return FALSE; + return false; } if (rawField < 0 || rawField >= UNUM_FIELD_COUNT) { status = U_ILLEGAL_ARGUMENT_ERROR; - return FALSE; + return false; } ConstrainedFieldPosition cfpos; @@ -67,7 +67,7 @@ UBool FormattedValueStringBuilderImpl::nextFieldPosition(FieldPosition& fp, UErr if (nextPositionImpl(cfpos, kUndefinedField, status)) { fp.setBeginIndex(cfpos.getStart()); fp.setEndIndex(cfpos.getLimit()); - return TRUE; + return true; } // Special case: fraction should start after integer if fraction is not present @@ -85,7 +85,7 @@ UBool FormattedValueStringBuilderImpl::nextFieldPosition(FieldPosition& fp, UErr fp.setEndIndex(i - fString.fZero); } - return FALSE; + return false; } void FormattedValueStringBuilderImpl::getAllFieldPositions(FieldPositionIteratorHandler& fpih, @@ -103,23 +103,12 @@ static constexpr Field kEndField = Field(0xf, 0xf); bool FormattedValueStringBuilderImpl::nextPositionImpl(ConstrainedFieldPosition& cfpos, Field numericField, UErrorCode& /*status*/) const { int32_t fieldStart = -1; Field currField = kUndefinedField; - UFieldCategory spanCategory = UFIELD_CATEGORY_UNDEFINED; - int32_t spanValue; for (int32_t i = fString.fZero + cfpos.getLimit(); i <= fString.fZero + fString.fLength; i++) { Field _field = (i < fString.fZero + fString.fLength) ? fString.getFieldPtr()[i] : kEndField; // Case 1: currently scanning a field. if (currField != kUndefinedField) { if (currField != _field) { int32_t end = i - fString.fZero; - // Handle span fields; don't trim them - if (spanCategory != UFIELD_CATEGORY_UNDEFINED) { - cfpos.setState( - spanCategory, - spanValue, - fieldStart, - end); - return true; - } // Grouping separators can be whitespace; don't throw them out! if (isTrimmable(currField)) { end = trimBack(i - fString.fZero); @@ -182,13 +171,11 @@ bool FormattedValueStringBuilderImpl::nextPositionImpl(ConstrainedFieldPosition& if (elementField == Field(UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD) && cfpos.matchesField(elementField.getCategory(), elementField.getField()) && (cfpos.getLimit() < i - fString.fZero || cfpos.getCategory() != elementField.getCategory())) { - // Re-wind to the beginning of the field and then emit it - int32_t j = i - 1; - for (; j >= fString.fZero && fString.getFieldPtr()[j] == fString.getFieldPtr()[i-1]; j--) {} + int64_t si = cfpos.getInt64IterationContext() - 1; cfpos.setState( elementField.getCategory(), elementField.getField(), - j - fString.fZero + 1, + i - fString.fZero - spanIndices[si].length, i - fString.fZero); return true; } @@ -203,22 +190,28 @@ bool FormattedValueStringBuilderImpl::nextPositionImpl(ConstrainedFieldPosition& } // Case 3: check for field starting at this position // Case 3a: Need to add a SpanField - if (_field == Field(UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD) - // don't return the same field twice in a row: - && (i == fString.fZero - || fString.getFieldPtr()[i-1].getCategory() != UFIELD_CATEGORY_LIST - || fString.getFieldPtr()[i-1].getField() != ULISTFMT_ELEMENT_FIELD)) { + if (_field == Field(UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD)) { int64_t si = cfpos.getInt64IterationContext(); - spanValue = spanIndices[si]; + int32_t spanValue = spanIndices[si].spanValue; + int32_t length = spanIndices[si].length; cfpos.setInt64IterationContext(si + 1); if (cfpos.matchesField(UFIELD_CATEGORY_LIST_SPAN, spanValue)) { - spanCategory = UFIELD_CATEGORY_LIST_SPAN; + UFieldCategory spanCategory = UFIELD_CATEGORY_LIST_SPAN; fieldStart = i - fString.fZero; - currField = _field; + int32_t end = fieldStart + length; + cfpos.setState( + spanCategory, + spanValue, + fieldStart, + end); + return true; + } else { + // Failed to match; jump ahead + i += length - 1; continue; } } - // Case 3b: No SpanField or SpanField did not match + // Case 3b: No SpanField if (cfpos.matchesField(_field.getCategory(), _field.getField())) { fieldStart = i - fString.fZero; currField = _field; @@ -226,24 +219,40 @@ bool FormattedValueStringBuilderImpl::nextPositionImpl(ConstrainedFieldPosition& } U_ASSERT(currField == kUndefinedField); + // Always set the position to the end so that we don't revisit previous sections + cfpos.setState( + cfpos.getCategory(), + cfpos.getField(), + fString.fLength, + fString.fLength); return false; } -void FormattedValueStringBuilderImpl::appendSpanIndex(int32_t position) { - if (spanIndices.getCapacity() <= position) { - spanIndices.resize(position * 2); +void FormattedValueStringBuilderImpl::appendSpanInfo(int32_t spanValue, int32_t length, UErrorCode& status) { + if (U_FAILURE(status)) { return; } + U_ASSERT(spanIndices.getCapacity() >= spanValue); + if (spanIndices.getCapacity() == spanValue) { + if (!spanIndices.resize(spanValue * 2, spanValue)) { + status = U_MEMORY_ALLOCATION_ERROR; + return; + } } - spanIndices[position] = position; + spanIndices[spanValue] = {spanValue, length}; } -void FormattedValueStringBuilderImpl::prependSpanIndex(int32_t position) { - if (spanIndices.getCapacity() <= position) { - spanIndices.resize(position * 2); +void FormattedValueStringBuilderImpl::prependSpanInfo(int32_t spanValue, int32_t length, UErrorCode& status) { + if (U_FAILURE(status)) { return; } + U_ASSERT(spanIndices.getCapacity() >= spanValue); + if (spanIndices.getCapacity() == spanValue) { + if (!spanIndices.resize(spanValue * 2, spanValue)) { + status = U_MEMORY_ALLOCATION_ERROR; + return; + } } - for (int32_t i = 0; i < position; i++) { + for (int32_t i = spanValue - 1; i >= 0; i--) { spanIndices[i+1] = spanIndices[i]; } - spanIndices[0] = position; + spanIndices[0] = {spanValue, length}; } bool FormattedValueStringBuilderImpl::isIntOrGroup(Field field) { diff --git a/deps/icu-small/source/i18n/listformatter.cpp b/deps/icu-small/source/i18n/listformatter.cpp index ab04ac9080a5ab..be0d16bc7f52b3 100644 --- a/deps/icu-small/source/i18n/listformatter.cpp +++ b/deps/icu-small/source/i18n/listformatter.cpp @@ -16,6 +16,10 @@ * created by: Umesh P. Nair */ +#include "unicode/utypes.h" + +#if !UCONFIG_NO_FORMATTING + #include "cmemory.h" #include "unicode/fpositer.h" // FieldPositionIterator #include "unicode/listformatter.h" @@ -171,21 +175,21 @@ PatternHandler* createPatternHandler( UErrorCode& status) { if (uprv_strcmp(lang, "es") == 0) { // Spanish - UnicodeString spanishYStr(TRUE, spanishY, -1); + UnicodeString spanishYStr(true, spanishY, -1); bool twoIsY = two == spanishYStr; bool endIsY = end == spanishYStr; if (twoIsY || endIsY) { - UnicodeString replacement(TRUE, spanishE, -1); + UnicodeString replacement(true, spanishE, -1); return new ContextualHandler( shouldChangeToE, twoIsY ? replacement : two, two, endIsY ? replacement : end, end, status); } - UnicodeString spanishOStr(TRUE, spanishO, -1); + UnicodeString spanishOStr(true, spanishO, -1); bool twoIsO = two == spanishOStr; bool endIsO = end == spanishOStr; if (twoIsO || endIsO) { - UnicodeString replacement(TRUE, spanishU, -1); + UnicodeString replacement(true, spanishU, -1); return new ContextualHandler( shouldChangeToU, twoIsO ? replacement : two, two, @@ -193,11 +197,11 @@ PatternHandler* createPatternHandler( } } else if (uprv_strcmp(lang, "he") == 0 || uprv_strcmp(lang, "iw") == 0) { // Hebrew - UnicodeString hebrewVavStr(TRUE, hebrewVav, -1); + UnicodeString hebrewVavStr(true, hebrewVav, -1); bool twoIsVav = two == hebrewVavStr; bool endIsVav = end == hebrewVavStr; if (twoIsVav || endIsVav) { - UnicodeString replacement(TRUE, hebrewVavDash, -1); + UnicodeString replacement(true, hebrewVavDash, -1); return new ContextualHandler( shouldChangeToVavDash, twoIsVav ? replacement : two, two, @@ -238,7 +242,6 @@ ListFormatInternal(const ListFormatInternal &other) : }; -#if !UCONFIG_NO_FORMATTING class FormattedListData : public FormattedValueStringBuilderImpl { public: FormattedListData(UErrorCode&) : FormattedValueStringBuilderImpl(kUndefinedField) {} @@ -248,7 +251,6 @@ class FormattedListData : public FormattedValueStringBuilderImpl { FormattedListData::~FormattedListData() = default; UPRV_FORMATTED_VALUE_SUBCLASS_AUTO_IMPL(FormattedList) -#endif static Hashtable* listPatternHash = nullptr; @@ -257,7 +259,7 @@ U_CDECL_BEGIN static UBool U_CALLCONV uprv_listformatter_cleanup() { delete listPatternHash; listPatternHash = nullptr; - return TRUE; + return true; } static void U_CALLCONV @@ -350,7 +352,6 @@ const ListFormatInternal* ListFormatter::getListFormatInternal( return result; } -#if !UCONFIG_NO_FORMATTING static const char* typeWidthToStyleString(UListFormatterType type, UListFormatterWidth width) { switch (type) { case ULISTFMT_TYPE_AND: @@ -394,7 +395,6 @@ static const char* typeWidthToStyleString(UListFormatterType type, UListFormatte return nullptr; } -#endif static const UChar solidus = 0x2F; static const UChar aliasPrefix[] = { 0x6C,0x69,0x73,0x74,0x50,0x61,0x74,0x74,0x65,0x72,0x6E,0x2F }; // "listPattern/" @@ -515,14 +515,9 @@ ListFormatter* ListFormatter::createInstance(UErrorCode& errorCode) { } ListFormatter* ListFormatter::createInstance(const Locale& locale, UErrorCode& errorCode) { -#if !UCONFIG_NO_FORMATTING return createInstance(locale, ULISTFMT_TYPE_AND, ULISTFMT_WIDTH_WIDE, errorCode); -#else - return createInstance(locale, "standard", errorCode); -#endif } -#if !UCONFIG_NO_FORMATTING ListFormatter* ListFormatter::createInstance( const Locale& locale, UListFormatterType type, UListFormatterWidth width, UErrorCode& errorCode) { const char* style = typeWidthToStyleString(type, width); @@ -532,7 +527,6 @@ ListFormatter* ListFormatter::createInstance( } return createInstance(locale, style, errorCode); } -#endif ListFormatter* ListFormatter::createInstance(const Locale& locale, const char *style, UErrorCode& errorCode) { const ListFormatInternal* listFormatInternal = getListFormatInternal(locale, style, errorCode); @@ -573,7 +567,7 @@ class FormattedListBuilder { start, {UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD}, status); - data->appendSpanIndex(0); + data->appendSpanInfo(0, start.length(), status); } } @@ -609,7 +603,7 @@ class FormattedListBuilder { next, {UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD}, status); - data->appendSpanIndex(position); + data->appendSpanInfo(position, next.length(), status); data->getStringRef().append( temp.tempSubString(offsets[1]), {UFIELD_CATEGORY_LIST, ULISTFMT_LITERAL_FIELD}, @@ -628,7 +622,7 @@ class FormattedListBuilder { next, {UFIELD_CATEGORY_LIST, ULISTFMT_ELEMENT_FIELD}, status); - data->prependSpanIndex(position); + data->prependSpanInfo(position, next.length(), status); data->getStringRef().insert( 0, temp.tempSubStringBetween(0, offsets[1]), @@ -660,7 +654,6 @@ UnicodeString& ListFormatter::format( int32_t index, int32_t &offset, UErrorCode& errorCode) const { -#if !UCONFIG_NO_FORMATTING int32_t initialOffset = appendTo.length(); auto result = formatStringsToValue(items, nItems, errorCode); UnicodeStringAppendable appendable(appendTo); @@ -671,11 +664,9 @@ UnicodeString& ListFormatter::format( result.nextPosition(cfpos, errorCode); offset = initialOffset + cfpos.getStart(); } -#endif return appendTo; } -#if !UCONFIG_NO_FORMATTING FormattedList ListFormatter::formatStringsToValue( const UnicodeString items[], int32_t nItems, @@ -741,7 +732,8 @@ FormattedList ListFormatter::formatStringsToValue( return FormattedList(result.data.orphan()); } } -#endif U_NAMESPACE_END + +#endif /* #if !UCONFIG_NO_FORMATTING */ diff --git a/deps/icu-small/source/i18n/measunit.cpp b/deps/icu-small/source/i18n/measunit.cpp index dab3abb5e21ff6..ece83177625513 100644 --- a/deps/icu-small/source/i18n/measunit.cpp +++ b/deps/icu-small/source/i18n/measunit.cpp @@ -33,7 +33,8 @@ UOBJECT_DEFINE_RTTI_IMPLEMENTATION(MeasureUnit) // update this code, refer to: // http://site.icu-project.org/design/formatting/measureformat/updating-measure-unit // -// Start generated code for measunit.cpp +// Start generated code +// TODO(ICU-21076): improve how this generated code is produced. // Maps from Type ID to offset in gSubTypes. static const int32_t gOffsets[] = { @@ -54,15 +55,44 @@ static const int32_t gOffsets[] = { 404, 408, 423, - 424, - 430, - 440, - 444, - 448, + 426, + 432, + 442, + 446, 450, - 484 + 452, + 486 }; +// TODO: FIX CODE GENERATION - leaving this here but commented-out to make it +// clear that we no longer want this array. We needed it for only one thing: efficient checking of "currency". +// +// static const int32_t gIndexes[] = { +// 0, +// 2, +// 7, +// 17, +// 25, +// 29, +// 29, +// 40, +// 56, +// 60, +// 69, +// 71, +// 75, +// 83, +// 105, +// 109, +// 124, +// 127, +// 133, +// 143, +// 147, +// 151, +// 153, +// 187 +// }; static const int32_t kCurrencyOffset = 5; // Must be sorted alphabetically. @@ -517,7 +547,9 @@ static const char * const gSubTypes[] = { "solar-mass", "stone", "ton", - "", + "", // TODO(ICU-21076): manual edit of what should have been generated by Java. + "percent", // TODO(ICU-21076): regenerate, deal with duplication. + "permille", // TODO(ICU-21076): regenerate, deal with duplication. "gigawatt", "horsepower", "kilowatt", @@ -580,6 +612,8 @@ static const char * const gSubTypes[] = { "teaspoon" }; +// unitPerUnitToSingleUnit no longer in use! TODO: remove from code-generation code. + // Shortcuts to the base unit in order to make the default constructor fast static const int32_t kBaseTypeIdx = 16; static const int32_t kBaseSubTypeIdx = 0; @@ -2056,7 +2090,7 @@ MeasureUnit MeasureUnit::getTeaspoon() { return MeasureUnit(22, 33); } -// End generated code for measunit.cpp +// End generated code static int32_t binarySearch( const char * const * array, int32_t start, int32_t end, StringPiece key) { @@ -2105,7 +2139,9 @@ MeasureUnit &MeasureUnit::operator=(const MeasureUnit &other) { if (this == &other) { return *this; } - delete fImpl; + if (fImpl != nullptr) { + delete fImpl; + } if (other.fImpl) { ErrorCode localStatus; fImpl = new MeasureUnitImpl(other.fImpl->copy(localStatus)); @@ -2126,7 +2162,9 @@ MeasureUnit &MeasureUnit::operator=(MeasureUnit &&other) noexcept { if (this == &other) { return *this; } - delete fImpl; + if (fImpl != nullptr) { + delete fImpl; + } fImpl = other.fImpl; other.fImpl = nullptr; fTypeId = other.fTypeId; @@ -2139,8 +2177,10 @@ MeasureUnit *MeasureUnit::clone() const { } MeasureUnit::~MeasureUnit() { - delete fImpl; - fImpl = nullptr; + if (fImpl != nullptr) { + delete fImpl; + fImpl = nullptr; + } } const char *MeasureUnit::getType() const { @@ -2237,11 +2277,9 @@ StringEnumeration* MeasureUnit::getAvailableTypes(UErrorCode &errorCode) { } bool MeasureUnit::findBySubType(StringPiece subType, MeasureUnit* output) { - // Sanity checking kCurrencyOffset and final entry in gOffsets - U_ASSERT(uprv_strcmp(gTypes[kCurrencyOffset], "currency") == 0); - U_ASSERT(gOffsets[UPRV_LENGTHOF(gOffsets) - 1] == UPRV_LENGTHOF(gSubTypes)); - for (int32_t t = 0; t < UPRV_LENGTHOF(gOffsets) - 1; t++) { + // Ensure kCurrencyOffset is set correctly + U_ASSERT(uprv_strcmp(gTypes[kCurrencyOffset], "currency") == 0); // Skip currency units if (t == kCurrencyOffset) { continue; @@ -2298,8 +2336,10 @@ void MeasureUnit::initCurrency(StringPiece isoCurrency) { void MeasureUnit::setTo(int32_t typeId, int32_t subTypeId) { fTypeId = typeId; fSubTypeId = subTypeId; - delete fImpl; - fImpl = nullptr; + if (fImpl != nullptr) { + delete fImpl; + fImpl = nullptr; + } } int32_t MeasureUnit::getOffset() const { diff --git a/deps/icu-small/source/i18n/number_skeletons.cpp b/deps/icu-small/source/i18n/number_skeletons.cpp index e6d94d27b2ba5b..028525a589db91 100644 --- a/deps/icu-small/source/i18n/number_skeletons.cpp +++ b/deps/icu-small/source/i18n/number_skeletons.cpp @@ -732,6 +732,7 @@ skeleton::parseStem(const StringSegment& segment, const UCharsTrie& stemTrie, Se case STEM_CURRENCY: CHECK_NULL(seen, unit, status); + CHECK_NULL(seen, perUnit, status); return STATE_CURRENCY_UNIT; case STEM_INTEGER_WIDTH: @@ -1500,32 +1501,33 @@ bool GeneratorHelpers::notation(const MacroProps& macros, UnicodeString& sb, UEr } bool GeneratorHelpers::unit(const MacroProps& macros, UnicodeString& sb, UErrorCode& status) { - if (utils::unitIsCurrency(macros.unit)) { + MeasureUnit unit = macros.unit; + if (!utils::unitIsBaseUnit(macros.perUnit)) { + if (utils::unitIsCurrency(macros.unit) || utils::unitIsCurrency(macros.perUnit)) { + status = U_UNSUPPORTED_ERROR; + return false; + } + unit = unit.product(macros.perUnit.reciprocal(status), status); + } + + if (utils::unitIsCurrency(unit)) { sb.append(u"currency/", -1); - CurrencyUnit currency(macros.unit, status); + CurrencyUnit currency(unit, status); if (U_FAILURE(status)) { return false; } blueprint_helpers::generateCurrencyOption(currency, sb, status); return true; - } else if (utils::unitIsBaseUnit(macros.unit)) { + } else if (utils::unitIsBaseUnit(unit)) { // Default value is not shown in normalized form return false; - } else if (utils::unitIsPercent(macros.unit)) { + } else if (utils::unitIsPercent(unit)) { sb.append(u"percent", -1); return true; - } else if (utils::unitIsPermille(macros.unit)) { + } else if (utils::unitIsPermille(unit)) { sb.append(u"permille", -1); return true; } else { - MeasureUnit unit = macros.unit; - if (utils::unitIsCurrency(macros.perUnit)) { - status = U_UNSUPPORTED_ERROR; - return false; - } - if (!utils::unitIsBaseUnit(macros.perUnit)) { - unit = unit.product(macros.perUnit.reciprocal(status), status); - } sb.append(u"unit/", -1); sb.append(unit.getIdentifier()); return true; diff --git a/deps/icu-small/source/i18n/unicode/dtitvfmt.h b/deps/icu-small/source/i18n/unicode/dtitvfmt.h index 3d20d8e9c44990..4a1ab801a04c9d 100644 --- a/deps/icu-small/source/i18n/unicode/dtitvfmt.h +++ b/deps/icu-small/source/i18n/unicode/dtitvfmt.h @@ -1037,6 +1037,17 @@ class U_I18N_API DateIntervalFormat : public Format { UBool suppressDayPeriodField, UnicodeString& adjustedIntervalPattern); + /** + * Does the same thing as UnicodeString::findAndReplace(), except that it won't perform + * the substitution inside quoted literal text. + * @param targetString The string to perform the find-replace operation on. + * @param strToReplace The string to search for and replace in the target string. + * @param strToReplaceWith The string to substitute in wherever `stringToReplace` was found. + */ + static void U_EXPORT2 findReplaceInPattern(UnicodeString& targetString, + const UnicodeString& strToReplace, + const UnicodeString& strToReplaceWith); + /** * Concat a single date pattern with a time interval pattern, * set it into the intervalPatterns, while field is time field. diff --git a/deps/icu-small/source/i18n/unicode/listformatter.h b/deps/icu-small/source/i18n/unicode/listformatter.h index a969a8744dcf58..eddb5dab6701b3 100644 --- a/deps/icu-small/source/i18n/unicode/listformatter.h +++ b/deps/icu-small/source/i18n/unicode/listformatter.h @@ -23,6 +23,8 @@ #if U_SHOW_CPLUSPLUS_API +#if !UCONFIG_NO_FORMATTING + #include "unicode/unistr.h" #include "unicode/locid.h" #include "unicode/formattedvalue.h" @@ -65,7 +67,6 @@ struct ListFormatData : public UMemory { */ -#if !UCONFIG_NO_FORMATTING /** * An immutable class containing the result of a list formatting operation. * @@ -135,7 +136,6 @@ class U_I18N_API FormattedList : public UMemory, public FormattedValue { : fData(nullptr), fErrorCode(errorCode) {} friend class ListFormatter; }; -#endif // !UCONFIG_NO_FORMATTING /** @@ -185,8 +185,6 @@ class U_I18N_API ListFormatter : public UObject{ */ static ListFormatter* createInstance(const Locale& locale, UErrorCode& errorCode); -#ifndef U_HIDE_DRAFT_API -#if !UCONFIG_NO_FORMATTING /** * Creates a ListFormatter for the given locale, list type, and style. * @@ -195,12 +193,10 @@ class U_I18N_API ListFormatter : public UObject{ * @param width The width of formatting to use. * @param errorCode ICU error code, set if no data available for the given locale. * @return A ListFormatter object created from internal data derived from CLDR data. - * @draft ICU 67 + * @stable ICU 67 */ static ListFormatter* createInstance( const Locale& locale, UListFormatterType type, UListFormatterWidth width, UErrorCode& errorCode); -#endif /* !UCONFIG_NO_FORMATTING */ -#endif /* U_HIDE_DRAFT_API */ #ifndef U_HIDE_INTERNAL_API /** @@ -239,7 +235,6 @@ class U_I18N_API ListFormatter : public UObject{ UnicodeString& format(const UnicodeString items[], int32_t n_items, UnicodeString& appendTo, UErrorCode& errorCode) const; -#if !UCONFIG_NO_FORMATTING /** * Formats a list of strings to a FormattedList, which exposes field * position information. The FormattedList contains more information than @@ -255,7 +250,6 @@ class U_I18N_API ListFormatter : public UObject{ const UnicodeString items[], int32_t n_items, UErrorCode& errorCode) const; -#endif // !UCONFIG_NO_FORMATTING #ifndef U_HIDE_INTERNAL_API /** @@ -296,6 +290,8 @@ class U_I18N_API ListFormatter : public UObject{ U_NAMESPACE_END +#endif /* #if !UCONFIG_NO_FORMATTING */ + #endif /* U_SHOW_CPLUSPLUS_API */ #endif // __LISTFORMATTER_H__ diff --git a/deps/icu-small/source/i18n/unicode/measunit.h b/deps/icu-small/source/i18n/unicode/measunit.h index b9f732ae99011d..ed8773c7710f3e 100644 --- a/deps/icu-small/source/i18n/unicode/measunit.h +++ b/deps/icu-small/source/i18n/unicode/measunit.h @@ -3519,6 +3519,7 @@ class U_I18N_API MeasureUnit: public UObject { */ static MeasureUnit getTeaspoon(); + // End generated createXXX methods protected: diff --git a/deps/icu-small/source/i18n/unicode/ulistformatter.h b/deps/icu-small/source/i18n/unicode/ulistformatter.h index 3dfa9f2d56171f..28a1e580370006 100644 --- a/deps/icu-small/source/i18n/unicode/ulistformatter.h +++ b/deps/icu-small/source/i18n/unicode/ulistformatter.h @@ -62,17 +62,16 @@ typedef enum UListFormatterField { ULISTFMT_ELEMENT_FIELD } UListFormatterField; -#ifndef U_HIDE_DRAFT_API /** * Type of meaning expressed by the list. * - * @draft ICU 67 + * @stable ICU 67 */ typedef enum UListFormatterType { /** * Conjunction formatting, e.g. "Alice, Bob, Charlie, and Delta". * - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_TYPE_AND, @@ -80,14 +79,14 @@ typedef enum UListFormatterType { * Disjunction (or alternative, or simply one of) formatting, e.g. * "Alice, Bob, Charlie, or Delta". * - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_TYPE_OR, /** * Formatting of a list of values with units, e.g. "5 pounds, 12 ounces". * - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_TYPE_UNITS } UListFormatterType; @@ -95,29 +94,28 @@ typedef enum UListFormatterType { /** * Verbosity level of the list patterns. * - * @draft ICU 67 + * @stable ICU 67 */ typedef enum UListFormatterWidth { /** * Use list formatting with full words (no abbreviations) when possible. * - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_WIDTH_WIDE, /** * Use list formatting of typical length. - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_WIDTH_SHORT, /** * Use list formatting of the shortest possible length. - * @draft ICU 67 + * @stable ICU 67 */ ULISTFMT_WIDTH_NARROW, } UListFormatterWidth; -#endif /* U_HIDE_DRAFT_API */ /** * Open a new UListFormatter object using the rules for a given locale. @@ -141,7 +139,6 @@ U_CAPI UListFormatter* U_EXPORT2 ulistfmt_open(const char* locale, UErrorCode* status); -#ifndef U_HIDE_DRAFT_API /** * Open a new UListFormatter object appropriate for the given locale, list type, * and style. @@ -162,12 +159,11 @@ ulistfmt_open(const char* locale, * @return * A pointer to a UListFormatter object for the specified locale, * or NULL if an error occurred. - * @draft ICU 67 + * @stable ICU 67 */ U_CAPI UListFormatter* U_EXPORT2 ulistfmt_openForType(const char* locale, UListFormatterType type, UListFormatterWidth width, UErrorCode* status); -#endif /* U_HIDE_DRAFT_API */ /** * Close a UListFormatter object. Once closed it may no longer be used. diff --git a/deps/npm/.npmignore b/deps/npm/.npmignore index aacaa8f822e7ce..b90057457893ba 100644 --- a/deps/npm/.npmignore +++ b/deps/npm/.npmignore @@ -12,6 +12,7 @@ node_modules/.bin node_modules/npm-registry-mock /npmrc /release/ +/coverage/ # don't need these in the npm package. html/*.png diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS index ff19da4cf80a63..0a9c02f8b7d2b7 100644 --- a/deps/npm/AUTHORS +++ b/deps/npm/AUTHORS @@ -746,3 +746,4 @@ fuhao.xu marsonya <16393876+marsonya@users.noreply.github.com> Jeff Griffiths Michael Garvin +Gar diff --git a/deps/npm/CHANGELOG.md b/deps/npm/CHANGELOG.md index 6688fb0f5bd47b..df3e2ba22f31ad 100644 --- a/deps/npm/CHANGELOG.md +++ b/deps/npm/CHANGELOG.md @@ -1,3 +1,98 @@ +## v7.4.3 (2021-01-21) + +### DOCUMENTATION + +* [`ec1f06d06`](https://github.com/npm/cli/commit/ec1f06d06447a29c74bee063cff103ede7a2111b) + [#2498](https://github.com/npm/cli/issues/2498) + docs(npm): update `npm` docs + ([@darcyclarke](https://github.com/darcyclarke)) + +### DEPENDENCIES +* [`bc23284cd`](https://github.com/npm/cli/commit/bc23284cd5c4cc4532875aff14df94213727a509) + [#2511](https://github.com/npm/cli/issues/2511) + remove coverage files + ([@ruyadorno](https://github.com/ruyadorno)) +* [`fcbc676b8`](https://github.com/npm/cli/commit/fcbc676b88e1b7c8d01a3799683cd388a82c44d6) + `pacote@11.2.3` +* [`ebd3a24ff`](https://github.com/npm/cli/commit/ebd3a24ff8381f2def306136b745d1615fd6139f) + `@npmcli/arborist@2.0.6` + * Preserve git+https auth when provided + +## v7.4.2 (2021-01-15) + +### DEPENDENCIES + +* [`e5ce6bbba`](https://github.com/npm/cli/commit/e5ce6bbbad82b85c8e74a4558503513e4f337476) + * `@npmcli/arborist@2.0.5` + * fix creating missing dirs when using --prefix and --global + * fix omit types of deps in global installs + * fix prioritizing npm-shrinkwrap.json over package-lock.json + * better cache system for packuments + * improves audit performance + +## v7.4.1 (2021-01-14) + +### BUG FIXES + +* [`23df96d33`](https://github.com/npm/cli/commit/23df96d3394ba0b69a37f416d7f0c26bb9354975) + [#2486](https://github.com/npm/cli/issues/2486) + npm link no longer deletes entire project when global prefix is a symlink + ([@nlf](https://github.com/nlf)) + +### DOCUMENTATION + +* [`7dd0dfc59`](https://github.com/npm/cli/commit/7dd0dfc59c861e7d3e30a86a8e6db10872fc6b44) + [#2459](https://github.com/npm/cli/issues/2459) + fix(docs): clean up `npm start` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`307b3bd9f`](https://github.com/npm/cli/commit/307b3bd9f90e96fcc8805a1d5ddec80787a3d3a7) + [#2460](https://github.com/npm/cli/issues/2460) + fix(docs): clean up `npm stop` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`23f01b739`](https://github.com/npm/cli/commit/23f01b739d7a01a7dc3672322e14eb76ff33d712) + [#2462](https://github.com/npm/cli/issues/2462) + fix(docs): clean up `npm test` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`4b43656fc`](https://github.com/npm/cli/commit/4b43656fc608783a29ccf8495dc305459abc5cc7) + [#2463](https://github.com/npm/cli/issues/2463) + fix(docs): clean up `npm prefix` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`1135539ba`](https://github.com/npm/cli/commit/1135539bac9f98bb1a5d5ed05227a8ecd19493d3) + [`a07bb8e69`](https://github.com/npm/cli/commit/a07bb8e692a85b55d51850534c09fa58224c2285) + [`9b55b798e`](https://github.com/npm/cli/commit/9b55b798ed8f2b9be7b3199a1bfc23b1cd89c4cd) + [`cd5eeaaa0`](https://github.com/npm/cli/commit/cd5eeaaa08eabb505b65747a428c3c59159663dc) + [`6df69ce10`](https://github.com/npm/cli/commit/6df69ce107912f8429665eb851825d2acebc8575) + [`dc6b2a8b0`](https://github.com/npm/cli/commit/dc6b2a8b032d118be3566ce0fa7c67c171c8d2cb) + [`a3c127446`](https://github.com/npm/cli/commit/a3c1274460e16d1edbdca6a0cee86ef313fdd961) + [#2464](https://github.com/npm/cli/issues/2464) + fix(docs): clean up `npm uninstall` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`cfdcf32fd`](https://github.com/npm/cli/commit/cfdcf32fd7628501712b8cad4a541c6b8e7b66bc) + [#2474](https://github.com/npm/cli/issues/2474) + fix(docs): clean up `npm unpublish` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`acd5b062a`](https://github.com/npm/cli/commit/acd5b062a811fcd98849df908ce26855823ca671) + [#2475](https://github.com/npm/cli/issues/2475) + fix(docs): update `package-lock.json` docs + ([@isaacs](https://github.com/isaacs)) +* [`b0b0edf6d`](https://github.com/npm/cli/commit/b0b0edf6de1678de7f4a000700c88daa5f7194ef) + [#2482](https://github.com/npm/cli/issues/2482) + fix(docs): clean up `npm token` docs + ([@wraithgar](https://github.com/wraithgar)) +* [`35559201a`](https://github.com/npm/cli/commit/35559201a4a0a5b111ce58d6824e5b4030eb4496) + [#2487](https://github.com/npm/cli/issues/2487) + fix(docs): clean up `npm search` docs + ([@wraithgar](https://github.com/wraithgar)) + +### DEPENDENCIES + +* [`ea8c02169`](https://github.com/npm/cli/commit/ea8c02169cfbf0484d67db7c0e7a6ec8aecb7210) + `@npmcli/arborist@2.0.5` +* [`fb6f2c313`](https://github.com/npm/cli/commit/fb6f2c313d1d9770cc7d02a3900c7945df3cb661) + `pacote@11.2.1` +* [`c549b7657`](https://github.com/npm/cli/commit/c549b76573b1835a63e1e5898e9c16860079d84e) + `make-fetch-happen@8.0.13` + ## v7.4.0 (2021-01-07) ### FEATURES diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md index 2c0a8f58047ca2..7ad950a6ba99e8 100644 --- a/deps/npm/docs/content/commands/npm-audit.md +++ b/deps/npm/docs/content/commands/npm-audit.md @@ -190,5 +190,4 @@ $ npm audit --audit-level=moderate ### See Also * [npm install](/commands/npm-install) -* [package-locks](/configuring-npm/package-locks) * [config](/using-npm/config) diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 0d874f4f27a50f..925ba8de2e5b7e 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -68,4 +68,4 @@ cache: ### See Also * [npm install](/commands/npm-install) -* [package-locks](/configuring-npm/package-locks) +* [package-lock.json](/configuring-npm/package-lock-json) diff --git a/deps/npm/docs/content/commands/npm-prefix.md b/deps/npm/docs/content/commands/npm-prefix.md index 6894cb5c9298cc..9c33bb18901ef4 100644 --- a/deps/npm/docs/content/commands/npm-prefix.md +++ b/deps/npm/docs/content/commands/npm-prefix.md @@ -12,13 +12,25 @@ npm prefix [-g] ### Description -Print the local prefix to standard out. This is the closest parent directory +Print the local prefix to standard output. This is the closest parent directory to contain a `package.json` file or `node_modules` directory, unless `-g` is also specified. If `-g` is specified, this will be the value of the global prefix. See [`npm config`](/commands/npm-config) for more detail. +### Example + +```bash +npm prefix +/usr/local/projects/foo +``` + +```bash +npm prefix -g +/usr/local +``` + ### See Also * [npm root](/commands/npm-root) diff --git a/deps/npm/docs/content/commands/npm-search.md b/deps/npm/docs/content/commands/npm-search.md index 991bfe9e131f22..33864d472d4a20 100644 --- a/deps/npm/docs/content/commands/npm-search.md +++ b/deps/npm/docs/content/commands/npm-search.md @@ -16,35 +16,42 @@ aliases: s, se, find Search the registry for packages matching the search terms. `npm search` performs a linear, incremental, lexically-ordered search through package -metadata for all files in the registry. If color is enabled, it will further -highlight the matches in the results. +metadata for all files in the registry. If your terminal has color +support, it will further highlight the matches in the results. This can +be disabled with the config item `color` -Additionally, using the `--searchopts` and `--searchexclude` options paired with -more search terms will respectively include and exclude further patterns. The -main difference between `--searchopts` and the standard search terms is that the -former does not highlight results in the output and can be used for more -fine-grained filtering. Additionally, both of these can be added to `.npmrc` for -default search filtering behavior. +Additionally, using the `--searchopts` and `--searchexclude` options +paired with more search terms will include and exclude further patterns. +The main difference between `--searchopts` and the standard search terms +is that the former does not highlight results in the output and you can +use them more fine-grained filtering. Additionally, you can add both of +these to your config to change default search filtering behavior. Search also allows targeting of maintainers in search results, by prefixing their npm username with `=`. -If a term starts with `/`, then it's interpreted as a regular expression and -supports standard JavaScript RegExp syntax. A trailing `/` will be ignored in -this case. (Note that many regular expression characters must be escaped or -quoted in most shells.) - -### A Note on caching +If a term starts with `/`, then it's interpreted as a regular expression +and supports standard JavaScript RegExp syntax. In this case search will +ignore a trailing `/` . (Note you must escape or quote many regular +expression characters in most shells.) ### Configuration +All of the following can be defined in a `.npmrc` file, or passed as +parameters to the cli prefixed with `--` (e.g. `--json`) + #### description * Default: true * Type: Boolean -Used as `--no-description`, disables search matching in package descriptions and -suppresses display of that field in results. +#### color + + * Default: true + * Type: Boolean + +Used as `--no-color`, disables color highlighting of matches in the +results. #### json @@ -66,9 +73,9 @@ Output search results as lines with tab-separated columns. * Type: Boolean Display full package descriptions and other long text across multiple -lines. When disabled (default) search results are truncated to fit -neatly on a single line. Modules with extremely long names will -fall on multiple lines. +lines. When disabled (which is the default) the output will +truncate search results to fit neatly on a single line. Modules with +extremely long names will fall on multiple lines. #### searchopts @@ -84,23 +91,37 @@ Space-separated options that are always passed to search. Space-separated options that limit the results from search. -#### searchstaleness - -* Default: 900 (15 minutes) -* Type: Number - -The age of the cache, in seconds, before another registry request is made. - #### registry * Default: https://registry.npmjs.org/ * Type: url -Search the specified registry for modules. If you have configured npm to point -to a different default registry, such as your internal private module -repository, `npm search` will default to that registry when searching. Pass a -different registry url such as the default above in order to override this -setting. +Search the specified registry for modules. If you have configured npm to +point to a different default registry (such as your internal private +module repository), `npm search` will also default to that registry when +searching. + +### A note on caching + +The npm cli caches search results with the same terms and options +locally in its cache. You can use the following to change how and when +the cli uses this cache. See [`npm cache`](/commands/npm-cache) for more +on how the cache works. + +#### prefer-online + +Forced staleness checks for cached searches, making the cli look for +updates immediately even for fresh search results. + +#### prefer-offline + +Bypasses staleness checks for cached. Missing data will still be +requested from the server. To force full offline mode, use `offline`. + +#### offline + +Forces full offline mode. Any searches not locally cached will result in +an error. ### See Also @@ -108,3 +129,5 @@ setting. * [npm config](/commands/npm-config) * [npmrc](/configuring-npm/npmrc) * [npm view](/commands/npm-view) +* [npm cache](/commands/npm-cache) +* https://npm.im/npm-registry-fetch diff --git a/deps/npm/docs/content/commands/npm-shrinkwrap.md b/deps/npm/docs/content/commands/npm-shrinkwrap.md index 05d5706b9f9f1f..dce50b7843bc3e 100644 --- a/deps/npm/docs/content/commands/npm-shrinkwrap.md +++ b/deps/npm/docs/content/commands/npm-shrinkwrap.md @@ -13,18 +13,18 @@ npm shrinkwrap ### Description This command repurposes `package-lock.json` into a publishable -`npm-shrinkwrap.json` or simply creates a new one. The file created and updated -by this command will then take precedence over any other existing or future -`package-lock.json` files. For a detailed explanation of the design and purpose -of package locks in npm, see [package-locks](/configuring-npm/package-locks). +`npm-shrinkwrap.json` or simply creates a new one. The file created and +updated by this command will then take precedence over any other existing +or future `package-lock.json` files. For a detailed explanation of the +design and purpose of package locks in npm, see +[package-lock-json](/configuring-npm/package-lock-json). ### See Also * [npm install](/commands/npm-install) * [npm run-script](/commands/npm-run-script) * [npm scripts](/using-npm/scripts) -* [package.js](/configuring-npm/package-json) -* [package-locks](/configuring-npm/package-locks) +* [package.json](/configuring-npm/package-json) * [package-lock.json](/configuring-npm/package-lock-json) -* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) +* [npm-shrinkwrap.json](/configuring-npm/npm-shrinkwrap-json) * [npm ls](/commands/npm-ls) diff --git a/deps/npm/docs/content/commands/npm-start.md b/deps/npm/docs/content/commands/npm-start.md index 8083bf8b7818e4..4791719b592f66 100644 --- a/deps/npm/docs/content/commands/npm-start.md +++ b/deps/npm/docs/content/commands/npm-start.md @@ -12,13 +12,39 @@ npm start [-- ] ### Description -This runs an arbitrary command specified in the package's `"start"` property of -its `"scripts"` object. If no `"start"` property is specified on the -`"scripts"` object, it will run `node server.js`. +This runs a predefined command specified in the `"start"` property of +a package's `"scripts"` object. + +If the `"scripts"` object does not define a `"start"` property, npm +will run `node server.js`. + +Note that this is different from the default node behavior of running +the file specified in a package's `"main"` attribute when evoking with +`node .` As of [`npm@2.0.0`](https://blog.npmjs.org/post/98131109725/npm-2-0-0), you can use custom arguments when executing scripts. Refer to [`npm run-script`](/commands/npm-run-script) for more details. +### Example + +```json +{ + "scripts": { + "start": "node foo.js" + } +} +``` + +```bash +npm start + +> npm@x.x.x start +> node foo.js + +(foo.js output would be here) + +``` + ### See Also * [npm run-script](/commands/npm-run-script) diff --git a/deps/npm/docs/content/commands/npm-stop.md b/deps/npm/docs/content/commands/npm-stop.md index 17156c97c40430..9e8f9be360fd9f 100644 --- a/deps/npm/docs/content/commands/npm-stop.md +++ b/deps/npm/docs/content/commands/npm-stop.md @@ -12,7 +12,31 @@ npm stop [-- ] ### Description -This runs a package's "stop" script, if one was provided. +This runs a predefined command specified in the "stop" property of a +package's "scripts" object. + +Unlike with [npm start](/commands/npm-start), there is no default script +that will run if the `"stop"` property is not defined. + +### Example + +```json +{ + "scripts": { + "stop": "node bar.js" + } +} +``` + +```bash +npm stop + +> npm@x.x.x stop +> node bar.js + +(bar.js output would be here) + +``` ### See Also diff --git a/deps/npm/docs/content/commands/npm-test.md b/deps/npm/docs/content/commands/npm-test.md index b8f25d520c8217..2cc6a2e38b0f1d 100644 --- a/deps/npm/docs/content/commands/npm-test.md +++ b/deps/npm/docs/content/commands/npm-test.md @@ -14,7 +14,28 @@ aliases: t, tst ### Description -This runs a package's "test" script, if one was provided. +This runs a predefined command specified in the `"test"` property of +a package's `"scripts"` object. + +### Example + +```json +{ + "scripts": { + "test": "node test.js" + } +} +``` + +```bash +npm test +> npm@x.x.x test +> node test.js + +(test.js output would be here) +``` + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-token.md b/deps/npm/docs/content/commands/npm-token.md index 3716a0990299b1..652079453702e7 100644 --- a/deps/npm/docs/content/commands/npm-token.md +++ b/deps/npm/docs/content/commands/npm-token.md @@ -16,8 +16,8 @@ description: Manage your authentication tokens This lets you list, create and revoke authentication tokens. * `npm token list`: - Shows a table of all active authentication tokens. You can request this as - JSON with `--json` or tab-separated values with `--parseable`. + Shows a table of all active authentication tokens. You can request + this as JSON with `--json` or tab-separated values with `--parseable`. ```bash +--------+---------+------------+----------+----------------+ @@ -40,10 +40,17 @@ This lets you list, create and revoke authentication tokens. ``` * `npm token create [--read-only] [--cidr=]`: - Create a new authentication token. It can be `--read-only` or accept a list of - [CIDR](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) ranges to - limit use of this token to. This will prompt you for your password, and, if you have - two-factor authentication enabled, an otp. + Create a new authentication token. It can be `--read-only`, or accept + a list of + [CIDR](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + ranges with which to limit use of this token. This will prompt you for + your password, and, if you have two-factor authentication enabled, an + otp. + + Currently, the cli can not generate automation tokens. Please refer to + the [docs + website](https://docs.npmjs.com/creating-and-viewing-access-tokens) + for more information on generating automation tokens. ```bash +----------------+--------------------------------------+ @@ -58,7 +65,9 @@ This lets you list, create and revoke authentication tokens. ``` * `npm token revoke `: - This removes an authentication token, making it immediately unusable. This can accept - both complete tokens (as you get back from `npm token create` and will - find in your `.npmrc`) and ids as seen in the `npm token list` output. - This will NOT accept the truncated token found in `npm token list` output. + Immediately removes an authentication token from the registry. You + will no longer be able to use it. This can accept both complete + tokens (such as those you get back from `npm token create`, and those + found in your `.npmrc`), and ids as seen in the parseable or json + output of `npm token list`. This will NOT accept the truncated token + found in the normal `npm token list` output. diff --git a/deps/npm/docs/content/commands/npm-uninstall.md b/deps/npm/docs/content/commands/npm-uninstall.md index fe3c871138c19f..258431cbd9f944 100644 --- a/deps/npm/docs/content/commands/npm-uninstall.md +++ b/deps/npm/docs/content/commands/npm-uninstall.md @@ -7,7 +7,7 @@ description: Remove a package ### Synopsis ```bash -npm uninstall [<@scope>/][@]... [-S|--save|-D|--save-dev|-O|--save-optional|--no-save] +npm uninstall [<@scope>/][@]... [-S|--save|--no-save] aliases: remove, rm, r, un, unlink ``` @@ -17,40 +17,43 @@ aliases: remove, rm, r, un, unlink This uninstalls a package, completely removing everything npm installed on its behalf. -Example: +It also removes the package from the `dependencies`, `devDependencies`, +`optionalDependencies`, and `peerDependencies` objects in your +`package.json`. -```bash -npm uninstall sax -``` +Futher, if you have an `npm-shrinkwrap.json` or `package-lock.json`, npm +will update those files as well. -In global mode (ie, with `-g` or `--global` appended to the command), -it uninstalls the current package context as a global package. - -`npm uninstall` takes 3 exclusive, optional flags which save or update -the package version in your main package.json: +`--no-save` will tell npm not to remove the package from your +`package.json`, `npm-shrinkwrap.json`, or `package-lock.json` files. -* `-S, --save`: Package will be removed from your `dependencies`. +`--save` or `-S` will tell npm to remove the package from your +`package.json`, `npm-shrinkwrap.json`, and `package-lock.json` files. +This is the default, but you may need to use this if you have for +instance `save=false` in your `npmrc` file -* `-D, --save-dev`: Package will be removed from your `devDependencies`. +In global mode (ie, with `-g` or `--global` appended to the command), +it uninstalls the current package context as a global package. +`--no-save` is ignored in this case. -* `-O, --save-optional`: Package will be removed from your `optionalDependencies`. +Scope is optional and follows the usual rules for [`scope`](/using-npm/scope). -* `--no-save`: Package will not be removed from your `package.json` file. +### Examples -Further, if you have an `npm-shrinkwrap.json` then it will be updated as -well. +```bash +npm uninstall sax +``` -Scope is optional and follows the usual rules for [`scope`](/using-npm/scope). +`sax` will no longer be in your `package.json`, `npm-shrinkwrap.json`, or +`package-lock.json` files. -Examples: ```bash -npm uninstall sax --save -npm uninstall @myorg/privatepackage --save -npm uninstall node-tap --save-dev -npm uninstall dtrace-provider --save-optional npm uninstall lodash --no-save ``` +`lodash` will not be removed from your `package.json`, +`npm-shrinkwrap.json`, or `package-lock.json` files. + ### See Also * [npm prune](/commands/npm-prune) diff --git a/deps/npm/docs/content/commands/npm-unpublish.md b/deps/npm/docs/content/commands/npm-unpublish.md index b39a7c09eb6e4e..e9d6e9045c6f9e 100644 --- a/deps/npm/docs/content/commands/npm-unpublish.md +++ b/deps/npm/docs/content/commands/npm-unpublish.md @@ -6,6 +6,10 @@ description: Remove a package from the registry ### Synopsis +To learn more about how the npm registry treats unpublish, see our unpublish policies + #### Unpublishing a single version of a package ```bash @@ -20,22 +24,26 @@ npm unpublish [<@scope>/] --force ### Warning -Consider using the `deprecate` command instead, if your intent is to encourage users to upgrade, or if you no longer want to maintain a package. +Consider using the [`deprecate`](/commands/npm-deprecate) command instead, +if your intent is to encourage users to upgrade, or if you no longer +want to maintain a package. ### Description -This removes a package version from the registry, deleting its -entry and removing the tarball. - -If no version is specified, or if all versions are removed then -the root package entry is removed from the registry entirely. +This removes a package version from the registry, deleting its entry and +removing the tarball. -Even if a package version is unpublished, that specific name and -version combination can never be reused. In order to publish the -package again, a new version number must be used. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed. +The npm registry will return an error if you are not [logged +in](/commands/npm-login). -To learn more about how unpublish is treated on the npm registry, see our unpublish policies. +If you do not specify a version or if you remove all of a package's +versions then the registry will remove the root package entry entirely. +Even if you unpublish a package version, that specific name and version +combination can never be reused. In order to publish the package again, +you must use a new version number. If you unpublish the entire package, +you may not publish any new versions of that package until 24 hours have +passed. ### See Also @@ -44,3 +52,4 @@ To learn more about how unpublish is treated on the npm registry, see our -* archived web: - +When you find issues, please report them: Be sure to follow the template and bug reporting guidelines. @@ -159,13 +152,6 @@ Or suggest formal RFC proposals: * -### Author - -[Isaac Z. Schlueter](http://blog.izs.me/) :: -[isaacs](https://github.com/isaacs/) :: -[@izs](https://twitter.com/izs) :: - - ### See Also * [npm help](/commands/npm-help) * [package.json](/configuring-npm/package-json) diff --git a/deps/npm/docs/content/configuring-npm/install.md b/deps/npm/docs/content/configuring-npm/install.md index 3fc91045c6971f..43fce4868ba973 100644 --- a/deps/npm/docs/content/configuring-npm/install.md +++ b/deps/npm/docs/content/configuring-npm/install.md @@ -6,17 +6,27 @@ description: Download and install node and npm ### Description -To publish and install packages to and from the public npm registry, you must install Node.js and the npm command line interface using either a Node version manager or a Node installer. **We strongly recommend using a Node version manager to install Node.js and npm.** We do not recommend using a Node installer, since the Node installation process installs npm in a directory with local permissions and can cause permissions errors when you run npm packages globally. +To publish and install packages to and from the public npm registry, you +must install Node.js and the npm command line interface using either a Node +version manager or a Node installer. **We strongly recommend using a Node +version manager to install Node.js and npm.** We do not recommend using a +Node installer, since the Node installation process installs npm in a +directory with local permissions and can cause permissions errors when you +run npm packages globally. ### Overview -- [Checking your version of npm and Node.js](#checking-your-version-of-npm-and-node-js) -- [Using a Node version manager to install Node.js and npm](#using-a-node-version-manager-to-install-node-js-and-npm) -- [Using a Node installer to install Node.js and npm](#using-a-node-installer-to-install-node-js-and-npm) +- [Checking your version of npm and + Node.js](#checking-your-version-of-npm-and-node-js) +- [Using a Node version manager to install Node.js and + npm](#using-a-node-version-manager-to-install-node-js-and-npm) +- [Using a Node installer to install Node.js and + npm](#using-a-node-installer-to-install-node-js-and-npm) ### Checking your version of npm and Node.js -To see if you already have Node.js and npm installed and check the installed version, run the following commands: +To see if you already have Node.js and npm installed and check the +installed version, run the following commands: ``` node -v @@ -25,7 +35,10 @@ npm -v ### Using a Node version manager to install Node.js and npm -Node version managers allow you to install and switch between multiple versions of Node.js and npm on your system so you can test your applications on multiple versions of npm to ensure they work for users on different versions. +Node version managers allow you to install and switch between multiple +versions of Node.js and npm on your system so you can test your +applications on multiple versions of npm to ensure they work for users on +different versions. #### OSX or Linux Node version managers @@ -39,28 +52,36 @@ Node version managers allow you to install and switch between multiple versions ### Using a Node installer to install Node.js and npm -If you are unable to use a Node version manager, you can use a Node installer to install both Node.js and npm on your system. +If you are unable to use a Node version manager, you can use a Node +installer to install both Node.js and npm on your system. * [Node.js installer](https://nodejs.org/en/download/) -* [NodeSource installer](https://github.com/nodesource/distributions). If you use Linux, we recommend that you use a NodeSource installer. +* [NodeSource installer](https://github.com/nodesource/distributions). If + you use Linux, we recommend that you use a NodeSource installer. #### OS X or Windows Node installers -If you're using OS X or Windows, use one of the installers from the [Node.js download page](https://nodejs.org/en/download/). Be sure to install the version labeled **LTS**. Other versions have not yet been tested with npm. +If you're using OS X or Windows, use one of the installers from the +[Node.js download page](https://nodejs.org/en/download/). Be sure to +install the version labeled **LTS**. Other versions have not yet been +tested with npm. #### Linux or other operating systems Node installers -If you're using Linux or another operating system, use one of the following installers: +If you're using Linux or another operating system, use one of the following +installers: -- [NodeSource installer](https://github.com/nodesource/distributions) (recommended) -- One of the installers on the [Node.js download page](https://nodejs.org/en/download/) - -Or see [this page](https://nodejs.org/en/download/package-manager/) to install npm for Linux in the way many Linux developers prefer. +- [NodeSource installer](https://github.com/nodesource/distributions) + (recommended) +- One of the installers on the [Node.js download + page](https://nodejs.org/en/download/) +Or see [this page](https://nodejs.org/en/download/package-manager/) to +install npm for Linux in the way many Linux developers prefer. #### Less-common operating systems -For more information on installing Node.js on a variety of operating systems, see [this page][pkg-mgr]. - +For more information on installing Node.js on a variety of operating +systems, see [this page][pkg-mgr]. [pkg-mgr]: https://nodejs.org/en/download/package-manager/ diff --git a/deps/npm/docs/content/configuring-npm/shrinkwrap-json.md b/deps/npm/docs/content/configuring-npm/npm-shrinkwrap-json.md similarity index 61% rename from deps/npm/docs/content/configuring-npm/shrinkwrap-json.md rename to deps/npm/docs/content/configuring-npm/npm-shrinkwrap-json.md index 7fd5baa67fb837..ab0a2410793809 100644 --- a/deps/npm/docs/content/configuring-npm/shrinkwrap-json.md +++ b/deps/npm/docs/content/configuring-npm/npm-shrinkwrap-json.md @@ -1,26 +1,30 @@ --- -title: shrinkwrap.json +title: npm-shrinkwrap.json section: 5 description: A publishable lockfile --- ### Description -`npm-shrinkwrap.json` is a file created by [`npm shrinkwrap`](/commands/npm-shrinkwrap). It is identical to +`npm-shrinkwrap.json` is a file created by [`npm +shrinkwrap`](/commands/npm-shrinkwrap). It is identical to `package-lock.json`, with one major caveat: Unlike `package-lock.json`, `npm-shrinkwrap.json` may be included when publishing a package. The recommended use-case for `npm-shrinkwrap.json` is applications deployed through the publishing process on the registry: for example, daemons and command-line tools intended as global installs or `devDependencies`. It's -strongly discouraged for library authors to publish this file, since that would -prevent end users from having control over transitive dependency updates. +strongly discouraged for library authors to publish this file, since that +would prevent end users from having control over transitive dependency +updates. -Additionally, if both `package-lock.json` and `npm-shrinkwrap.json` are present -in a package root, `package-lock.json` will be ignored in favor of this file. +If both `package-lock.json` and `npm-shrinkwrap.json` are present in a +package root, `npm-shrinkwrap.json` will be preferred over the +`package-lock.json` file. -For full details and description of the `npm-shrinkwrap.json` file format, refer -to the manual page for [package-lock.json](/configuring-npm/package-lock-json). +For full details and description of the `npm-shrinkwrap.json` file format, +refer to the manual page for +[package-lock.json](/configuring-npm/package-lock-json). ### See also diff --git a/deps/npm/docs/content/configuring-npm/npmrc.md b/deps/npm/docs/content/configuring-npm/npmrc.md index f0edfda61c9a84..83310ffa9c7f21 100644 --- a/deps/npm/docs/content/configuring-npm/npmrc.md +++ b/deps/npm/docs/content/configuring-npm/npmrc.md @@ -6,13 +6,14 @@ description: The npm config files ### Description -npm gets its config settings from the command line, environment -variables, and `npmrc` files. +npm gets its config settings from the command line, environment variables, +and `npmrc` files. -The `npm config` command can be used to update and edit the contents -of the user and global npmrc files. +The `npm config` command can be used to update and edit the contents of the +user and global npmrc files. -For a list of available configuration options, see [config](/using-npm/config). +For a list of available configuration options, see +[config](/using-npm/config). ### Files @@ -23,20 +24,19 @@ The four relevant files are: * global config file ($PREFIX/etc/npmrc) * npm builtin config file (/path/to/npm/npmrc) -All npm config files are an ini-formatted list of `key = value` -parameters. Environment variables can be replaced using -`${VARIABLE_NAME}`. For example: +All npm config files are an ini-formatted list of `key = value` parameters. +Environment variables can be replaced using `${VARIABLE_NAME}`. For +example: ```bash prefix = ${HOME}/.npm-packages ``` -Each of these files is loaded, and config options are resolved in -priority order. For example, a setting in the userconfig file would -override the setting in the globalconfig file. +Each of these files is loaded, and config options are resolved in priority +order. For example, a setting in the userconfig file would override the +setting in the globalconfig file. -Array values are specified by adding "[]" after the key name. For -example: +Array values are specified by adding "[]" after the key name. For example: ```bash key[] = "first value" @@ -45,7 +45,9 @@ key[] = "second value" #### Comments -Lines in `.npmrc` files are interpreted as comments when they begin with a `;` or `#` character. `.npmrc` files are parsed by [npm/ini](https://github.com/npm/ini), which specifies this comment syntax. +Lines in `.npmrc` files are interpreted as comments when they begin with a +`;` or `#` character. `.npmrc` files are parsed by +[npm/ini](https://github.com/npm/ini), which specifies this comment syntax. For example: @@ -61,24 +63,24 @@ When working locally in a project, a `.npmrc` file in the root of the project (ie, a sibling of `node_modules` and `package.json`) will set config values specific to this project. -Note that this only applies to the root of the project that you're -running npm in. It has no effect when your module is published. For -example, you can't publish a module that forces itself to install -globally, or in a different location. +Note that this only applies to the root of the project that you're running +npm in. It has no effect when your module is published. For example, you +can't publish a module that forces itself to install globally, or in a +different location. Additionally, this file is not read in global mode, such as when running `npm install -g`. #### Per-user config file -`$HOME/.npmrc` (or the `userconfig` param, if set in the environment -or on the command line) +`$HOME/.npmrc` (or the `userconfig` param, if set in the environment or on +the command line) #### Global config file -`$PREFIX/etc/npmrc` (or the `globalconfig` param, if set above): -This file is an ini-file formatted list of `key = value` parameters. -Environment variables can be replaced as above. +`$PREFIX/etc/npmrc` (or the `globalconfig` param, if set above): This file +is an ini-file formatted list of `key = value` parameters. Environment +variables can be replaced as above. #### Built-in config file @@ -86,9 +88,8 @@ Environment variables can be replaced as above. This is an unchangeable "builtin" configuration file that npm keeps consistent across updates. Set fields in here using the `./configure` -script that comes with npm. This is primarily for distribution -maintainers to override default configs in a standard and consistent -manner. +script that comes with npm. This is primarily for distribution maintainers +to override default configs in a standard and consistent manner. ### See also diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index 8b9b4ffbb1074a..caa1e16a32678a 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -6,8 +6,9 @@ description: Specifics of npm's package.json handling ### Description -This document is all you need to know about what's required in your package.json -file. It must be actual JSON, not just a JavaScript object literal. +This document is all you need to know about what's required in your +package.json file. It must be actual JSON, not just a JavaScript object +literal. A lot of the behavior described in this document is affected by the config settings described in [`config`](/using-npm/config). @@ -15,32 +16,36 @@ settings described in [`config`](/using-npm/config). ### name If you plan to publish your package, the *most* important things in your -package.json are the name and version fields as they will be required. The name -and version together form an identifier that is assumed to be completely unique. -Changes to the package should come along with changes to the version. If you don't -plan to publish your package, the name and version fields are optional. +package.json are the name and version fields as they will be required. The +name and version together form an identifier that is assumed to be +completely unique. Changes to the package should come along with changes +to the version. If you don't plan to publish your package, the name and +version fields are optional. The name is what your thing is called. Some rules: -* The name must be less than or equal to 214 characters. This includes the scope for - scoped packages. -* The names of scoped packages can begin with a dot or an underscore. This is not permitted without a scope. +* The name must be less than or equal to 214 characters. This includes the + scope for scoped packages. +* The names of scoped packages can begin with a dot or an underscore. This + is not permitted without a scope. * New packages must not have uppercase letters in the name. -* The name ends up being part of a URL, an argument on the command line, and a - folder name. Therefore, the name can't contain any non-URL-safe characters. +* The name ends up being part of a URL, an argument on the command line, + and a folder name. Therefore, the name can't contain any non-URL-safe + characters. Some tips: * Don't use the same name as a core Node module. -* Don't put "js" or "node" in the name. It's assumed that it's js, since you're - writing a package.json file, and you can specify the engine using the "engines" - field. (See below.) -* The name will probably be passed as an argument to require(), so it should - be something short, but also reasonably descriptive. -* You may want to check the npm registry to see if there's something by that name - already, before you get too attached to it. +* Don't put "js" or "node" in the name. It's assumed that it's js, since + you're writing a package.json file, and you can specify the engine using + the "engines" field. (See below.) +* The name will probably be passed as an argument to require(), so it + should be something short, but also reasonably descriptive. +* You may want to check the npm registry to see if there's something by + that name already, before you get too attached to it. + A name can be optionally prefixed by a scope, e.g. `@myorg/mypackage`. See [`scope`](/using-npm/scope) for more detail. @@ -48,14 +53,15 @@ A name can be optionally prefixed by a scope, e.g. `@myorg/mypackage`. See ### version If you plan to publish your package, the *most* important things in your -package.json are the name and version fields as they will be required. The name -and version together form an identifier that is assumed to be completely unique. -Changes to the package should come along with changes to the version. If you don't -plan to publish your package, the name and version fields are optional. +package.json are the name and version fields as they will be required. The +name and version together form an identifier that is assumed to be +completely unique. Changes to the package should come along with changes +to the version. If you don't plan to publish your package, the name and +version fields are optional. Version must be parseable by -[node-semver](https://github.com/npm/node-semver), which is bundled -with npm as a dependency. (`npm install semver` to use it yourself.) +[node-semver](https://github.com/npm/node-semver), which is bundled with +npm as a dependency. (`npm install semver` to use it yourself.) More on version numbers and ranges at [semver](/using-npm/semver). @@ -66,8 +72,8 @@ package, as it's listed in `npm search`. ### keywords -Put keywords in it. It's an array of strings. This helps people -discover your package as it's listed in `npm search`. +Put keywords in it. It's an array of strings. This helps people discover +your package as it's listed in `npm search`. ### homepage @@ -82,71 +88,83 @@ Example: ### bugs The url to your project's issue tracker and / or the email address to which -issues should be reported. These are helpful for people who encounter issues -with your package. +issues should be reported. These are helpful for people who encounter +issues with your package. It should look like this: ```json -{ "url" : "https://github.com/owner/project/issues" -, "email" : "project@hostname.com" +{ + "url" : "https://github.com/owner/project/issues", + "email" : "project@hostname.com" } ``` -You can specify either one or both values. If you want to provide only a url, -you can specify the value for "bugs" as a simple string instead of an object. +You can specify either one or both values. If you want to provide only a +url, you can specify the value for "bugs" as a simple string instead of an +object. If a url is provided, it will be used by the `npm bugs` command. ### license -You should specify a license for your package so that people know how they are -permitted to use it, and any restrictions you're placing on it. +You should specify a license for your package so that people know how they +are permitted to use it, and any restrictions you're placing on it. -If you're using a common license such as BSD-2-Clause or MIT, add a -current SPDX license identifier for the license you're using, like this: +If you're using a common license such as BSD-2-Clause or MIT, add a current +SPDX license identifier for the license you're using, like this: ```json -{ "license" : "BSD-3-Clause" } +{ + "license" : "BSD-3-Clause" +} ``` -You can check [the full list of SPDX license IDs](https://spdx.org/licenses/). -Ideally you should pick one that is +You can check [the full list of SPDX license +IDs](https://spdx.org/licenses/). Ideally you should pick one that is [OSI](https://opensource.org/licenses/alphabetical) approved. -If your package is licensed under multiple common licenses, use an [SPDX license -expression syntax version 2.0 string](https://www.npmjs.com/package/spdx), like this: +If your package is licensed under multiple common licenses, use an [SPDX +license expression syntax version 2.0 +string](https://www.npmjs.com/package/spdx), like this: ```json -{ "license" : "(ISC OR GPL-3.0)" } +{ + "license" : "(ISC OR GPL-3.0)" +} ``` If you are using a license that hasn't been assigned an SPDX identifier, or if you are using a custom license, use a string value like this one: ```json -{ "license" : "SEE LICENSE IN " } +{ + "license" : "SEE LICENSE IN " +} ``` Then include a file named `` at the top level of the package. -Some old packages used license objects or a "licenses" property containing an -array of license objects: +Some old packages used license objects or a "licenses" property containing +an array of license objects: ```json // Not valid metadata -{ "license" : - { "type" : "ISC" - , "url" : "https://opensource.org/licenses/ISC" +{ + "license" : { + "type" : "ISC", + "url" : "https://opensource.org/licenses/ISC" } } // Not valid metadata -{ "licenses" : - [ - { "type": "MIT" - , "url": "https://www.opensource.org/licenses/mit-license.php" - } - , { "type": "Apache-2.0" - , "url": "https://opensource.org/licenses/apache2.0.php" +{ + "licenses" : [ + { + "type": "MIT", + "url": "https://www.opensource.org/licenses/mit-license.php" + }, + { + "type": "Apache-2.0", + "url": "https://opensource.org/licenses/apache2.0.php" } ] } @@ -155,35 +173,49 @@ array of license objects: Those styles are now deprecated. Instead, use SPDX expressions, like this: ```json -{ "license": "ISC" } +{ + "license": "ISC" +} +``` -{ "license": "(MIT OR Apache-2.0)" } +```json +{ + "license": "(MIT OR Apache-2.0)" +} ``` Finally, if you do not wish to grant others the right to use a private or unpublished package under any terms: ```json -{ "license": "UNLICENSED" } +{ + "license": "UNLICENSED" +} ``` + Consider also setting `"private": true` to prevent accidental publication. ### people fields: author, contributors -The "author" is one person. "contributors" is an array of people. A "person" -is an object with a "name" field and optionally "url" and "email", like this: +The "author" is one person. "contributors" is an array of people. A +"person" is an object with a "name" field and optionally "url" and "email", +like this: ```json -{ "name" : "Barney Rubble" -, "email" : "b@rubble.com" -, "url" : "http://barnyrubble.tumblr.com/" +{ + "name" : "Barney Rubble", + "email" : "b@rubble.com", + "url" : "http://barnyrubble.tumblr.com/" } ``` -Or you can shorten that all into a single string, and npm will parse it for you: +Or you can shorten that all into a single string, and npm will parse it for +you: ```json -"Barney Rubble (http://barnyrubble.tumblr.com/)" +{ + "author": "Barney Rubble (http://barnyrubble.tumblr.com/)" +} ``` Both email and url are optional either way. @@ -193,58 +225,61 @@ npm also sets a top-level "maintainers" field with your npm user info. ### funding You can specify an object containing an URL that provides up-to-date -information about ways to help fund development of your package, or -a string URL, or an array of these: +information about ways to help fund development of your package, or a +string URL, or an array of these: - "funding": { +```json +{ + "funding": { + "type" : "individual", + "url" : "http://example.com/donate" + }, + + "funding": { + "type" : "patreon", + "url" : "https://www.patreon.com/my-account" + }, + + "funding": "http://example.com/donate", + + "funding": [ + { "type" : "individual", "url" : "http://example.com/donate" - } - - "funding": { + }, + "http://example.com/donateAlso", + { "type" : "patreon", "url" : "https://www.patreon.com/my-account" } - - "funding": "http://example.com/donate" - - "funding": [ - { - "type" : "individual", - "url" : "http://example.com/donate" - }, - "http://example.com/donateAlso", - { - "type" : "patreon", - "url" : "https://www.patreon.com/my-account" - } - ] - + ] +} +``` Users can use the `npm fund` subcommand to list the `funding` URLs of all -dependencies of their project, direct and indirect. A shortcut to visit each -funding url is also available when providing the project name such as: -`npm fund ` (when there are multiple URLs, the first one will be -visited) +dependencies of their project, direct and indirect. A shortcut to visit +each funding url is also available when providing the project name such as: +`npm fund ` (when there are multiple URLs, the first one will +be visited) ### files -The optional `files` field is an array of file patterns that describes -the entries to be included when your package is installed as a -dependency. File patterns follow a similar syntax to `.gitignore`, but -reversed: including a file, directory, or glob pattern (`*`, `**/*`, and such) -will make it so that file is included in the tarball when it's packed. Omitting -the field will make it default to `["*"]`, which means it will include all files. +The optional `files` field is an array of file patterns that describes the +entries to be included when your package is installed as a dependency. File +patterns follow a similar syntax to `.gitignore`, but reversed: including a +file, directory, or glob pattern (`*`, `**/*`, and such) will make it so +that file is included in the tarball when it's packed. Omitting the field +will make it default to `["*"]`, which means it will include all files. -Some special files and directories are also included or excluded regardless of -whether they exist in the `files` array (see below). +Some special files and directories are also included or excluded regardless +of whether they exist in the `files` array (see below). -You can also provide a `.npmignore` file in the root of your package or -in subdirectories, which will keep files from being included. At the -root of your package it will not override the "files" field, but in -subdirectories it will. The `.npmignore` file works just like a -`.gitignore`. If there is a `.gitignore` file, and `.npmignore` is -missing, `.gitignore`'s contents will be used instead. +You can also provide a `.npmignore` file in the root of your package or in +subdirectories, which will keep files from being included. At the root of +your package it will not override the "files" field, but in subdirectories +it will. The `.npmignore` file works just like a `.gitignore`. If there is +a `.gitignore` file, and `.npmignore` is missing, `.gitignore`'s contents +will be used instead. Files included with the "package.json#files" field _cannot_ be excluded through `.npmignore` or `.gitignore`. @@ -276,24 +311,28 @@ Conversely, some files are always ignored: * `node_modules` * `config.gypi` * `*.orig` -* `package-lock.json` (use shrinkwrap instead) +* `package-lock.json` (use + [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) if you wish + it to be published) ### main -The main field is a module ID that is the primary entry point to your program. -That is, if your package is named `foo`, and a user installs it, and then does -`require("foo")`, then your main module's exports object will be returned. +The main field is a module ID that is the primary entry point to your +program. That is, if your package is named `foo`, and a user installs it, +and then does `require("foo")`, then your main module's exports object will +be returned. -This should be a module ID relative to the root of your package folder. +This should be a module relative to the root of your package folder. -For most modules, it makes the most sense to have a main script and often not -much else. +For most modules, it makes the most sense to have a main script and often +not much else. ### browser If your module is meant to be used client-side the browser field should be used instead of the main field. This is helpful to hint users that it might -rely on primitives that aren't available in Node.js modules. (e.g. `window`) +rely on primitives that aren't available in Node.js modules. (e.g. +`window`) ### bin @@ -302,35 +341,45 @@ install into the PATH. npm makes this pretty easy (in fact, it uses this feature to install the "npm" executable.) To use this, supply a `bin` field in your package.json which is a map of -command name to local file name. On install, npm will symlink that file into -`prefix/bin` for global installs, or `./node_modules/.bin/` for local +command name to local file name. On install, npm will symlink that file +into `prefix/bin` for global installs, or `./node_modules/.bin/` for local installs. For example, myapp could have this: ```json -{ "bin" : { "myapp" : "./cli.js" } } +{ + "bin": { + "myapp": "./cli.js" + } +} ``` -So, when you install myapp, it'll create a symlink from the `cli.js` script to -`/usr/local/bin/myapp`. +So, when you install myapp, it'll create a symlink from the `cli.js` script +to `/usr/local/bin/myapp`. -If you have a single executable, and its name should be the name -of the package, then you can just supply it as a string. For example: +If you have a single executable, and its name should be the name of the +package, then you can just supply it as a string. For example: ```json -{ "name": "my-program" -, "version": "1.2.5" -, "bin": "./path/to/program" } +{ + "name": "my-program", + "version": "1.2.5", + "bin": "./path/to/program" +} ``` would be the same as this: ```json -{ "name": "my-program" -, "version": "1.2.5" -, "bin" : { "my-program" : "./path/to/program" } } +{ + "name": "my-program", + "version": "1.2.5", + "bin": { + "my-program": "./path/to/program" + } +} ``` Please make sure that your file(s) referenced in `bin` starts with @@ -339,93 +388,88 @@ executable! ### man -Specify either a single file or an array of filenames to put in place for the -`man` program to find. +Specify either a single file or an array of filenames to put in place for +the `man` program to find. If only a single file is provided, then it's installed such that it is the -result from `man `, regardless of its actual filename. For example: +result from `man `, regardless of its actual filename. For +example: ```json -{ "name" : "foo" -, "version" : "1.2.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo.js" -, "man" : "./man/doc.1" +{ + "name": "foo", + "version": "1.2.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo.js", + "man": "./man/doc.1" } ``` -would link the `./man/doc.1` file in such that it is the target for `man foo` +would link the `./man/doc.1` file in such that it is the target for `man +foo` If the filename doesn't start with the package name, then it's prefixed. So, this: ```json -{ "name" : "foo" -, "version" : "1.2.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo.js" -, "man" : [ "./man/foo.1", "./man/bar.1" ] +{ + "name": "foo", + "version": "1.2.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo.js", + "man": [ + "./man/foo.1", + "./man/bar.1" + ] } ``` will create files to do `man foo` and `man foo-bar`. Man files must end with a number, and optionally a `.gz` suffix if they are -compressed. The number dictates which man section the file is installed into. +compressed. The number dictates which man section the file is installed +into. ```json -{ "name" : "foo" -, "version" : "1.2.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo.js" -, "man" : [ "./man/foo.1", "./man/foo.2" ] +{ + "name": "foo", + "version": "1.2.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo.js", + "man": [ + "./man/foo.1", + "./man/foo.2" + ] } ``` + will create entries for `man foo` and `man 2 foo` ### directories -The CommonJS [Packages](http://wiki.commonjs.org/wiki/Packages/1.0) spec details a -few ways that you can indicate the structure of your package using a `directories` -object. If you look at [npm's package.json](https://registry.npmjs.org/npm/latest), -you'll see that it has directories for doc, lib, and man. +The CommonJS [Packages](http://wiki.commonjs.org/wiki/Packages/1.0) spec +details a few ways that you can indicate the structure of your package +using a `directories` object. If you look at [npm's +package.json](https://registry.npmjs.org/npm/latest), you'll see that it +has directories for doc, lib, and man. In the future, this information may be used in other creative ways. -#### directories.lib - -Tell people where the bulk of your library is. Nothing special is done -with the lib folder in any way, but it's useful meta info. - #### directories.bin If you specify a `bin` directory in `directories.bin`, all the files in that folder will be added. -Because of the way the `bin` directive works, specifying both a -`bin` path and setting `directories.bin` is an error. If you want to -specify individual files, use `bin`, and for all the files in an -existing `bin` directory, use `directories.bin`. +Because of the way the `bin` directive works, specifying both a `bin` path +and setting `directories.bin` is an error. If you want to specify +individual files, use `bin`, and for all the files in an existing `bin` +directory, use `directories.bin`. #### directories.man A folder that is full of man pages. Sugar to generate a "man" array by walking the folder. -#### directories.doc - -Put markdown files in here. Eventually, these will be displayed nicely, -maybe, someday. - -#### directories.example - -Put example scripts in here. Someday, it might be exposed in some clever way. - -#### directories.test - -Put your tests in here. It is currently not exposed, but it might be in the -future. - ### repository Specify the place where your code lives. This is helpful for people who @@ -435,72 +479,80 @@ command will be able to find you. Do it like this: ```json -"repository": { - "type" : "git", - "url" : "https://github.com/npm/cli.git" -} - -"repository": { - "type" : "svn", - "url" : "https://v8.googlecode.com/svn/trunk/" +{ + "repository": { + "type": "git", + "url": "https://github.com/npm/cli.git" + } } ``` -The URL should be a publicly available (perhaps read-only) url that can be handed -directly to a VCS program without any modification. It should not be a url to an -html project page that you put in your browser. It's for computers. +The URL should be a publicly available (perhaps read-only) url that can be +handed directly to a VCS program without any modification. It should not +be a url to an html project page that you put in your browser. It's for +computers. -For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same -shortcut syntax you use for `npm install`: +For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the +same shortcut syntax you use for `npm install`: ```json -"repository": "npm/npm" +{ + "repository": "npm/npm", -"repository": "github:user/repo" + "repository": "github:user/repo", -"repository": "gist:11081aaa281" + "repository": "gist:11081aaa281", -"repository": "bitbucket:user/repo" + "repository": "bitbucket:user/repo", -"repository": "gitlab:user/repo" + "repository": "gitlab:user/repo" +} ``` -If the `package.json` for your package is not in the root directory (for example -if it is part of a monorepo), you can specify the directory in which it lives: +If the `package.json` for your package is not in the root directory (for +example if it is part of a monorepo), you can specify the directory in +which it lives: ```json -"repository": { - "type" : "git", - "url" : "https://github.com/facebook/react.git", - "directory": "packages/react-dom" +{ + "repository": { + "type": "git", + "url": "https://github.com/facebook/react.git", + "directory": "packages/react-dom" + } } ``` ### scripts -The "scripts" property is a dictionary containing script commands that are run -at various times in the lifecycle of your package. The key is the lifecycle -event, and the value is the command to run at that point. +The "scripts" property is a dictionary containing script commands that are +run at various times in the lifecycle of your package. The key is the +lifecycle event, and the value is the command to run at that point. -See [`scripts`](/using-npm/scripts) to find out more about writing package scripts. +See [`scripts`](/using-npm/scripts) to find out more about writing package +scripts. ### config -A "config" object can be used to set configuration parameters used in package -scripts that persist across upgrades. For instance, if a package had the -following: +A "config" object can be used to set configuration parameters used in +package scripts that persist across upgrades. For instance, if a package +had the following: ```json -{ "name" : "foo" -, "config" : { "port" : "8080" } } +{ + "name": "foo", + "config": { + "port": "8080" + } +} ``` and then had a "start" command that then referenced the `npm_package_config_port` environment variable, then the user could override that by doing `npm config set foo:port 8001`. -See [`config`](/using-npm/config) and [`scripts`](/using-npm/scripts) for more on package -configs. +See [`config`](/using-npm/config) and [`scripts`](/using-npm/scripts) for +more on package configs. ### dependencies @@ -509,17 +561,19 @@ version range. The version range is a string which has one or more space-separated descriptors. Dependencies can also be identified with a tarball or git URL. -**Please do not put test harnesses or transpilers in your -`dependencies` object.** See `devDependencies`, below. +**Please do not put test harnesses or transpilers or other "development" +time tools in your `dependencies` object.** See `devDependencies`, below. -See [semver](/using-npm/semver) for more details about specifying version ranges. +See [semver](/using-npm/semver) for more details about specifying version +ranges. * `version` Must match `version` exactly * `>version` Must be greater than `version` * `>=version` etc * `=1.0.2 <2.1.2" - , "baz" : ">1.0.2 <=2.3.4" - , "boo" : "2.0.1" - , "qux" : "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0" - , "asd" : "http://asdf.com/asdf.tar.gz" - , "til" : "~1.2" - , "elf" : "~1.2.3" - , "two" : "2.x" - , "thr" : "3.3.x" - , "lat" : "latest" - , "dyl" : "file:../dyl" +{ + "dependencies": { + "foo": "1.0.0 - 2.9999.9999", + "bar": ">=1.0.2 <2.1.2", + "baz": ">1.0.2 <=2.3.4", + "boo": "2.0.1", + "qux": "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0", + "asd": "http://asdf.com/asdf.tar.gz", + "til": "~1.2", + "elf": "~1.2.3", + "two": "2.x", + "thr": "3.3.x", + "lat": "latest", + "dyl": "file:../dyl" } } ``` @@ -573,8 +629,8 @@ Git urls are of the form: If `#` is provided, it will be used to clone exactly that commit. If the commit-ish has the format `#semver:`, `` can be any valid semver range or exact version, and npm will look for any tags -or refs matching that range in the remote repository, much as it would for a -registry dependency. If neither `#` or `#semver:` is +or refs matching that range in the remote repository, much as it would for +a registry dependency. If neither `#` or `#semver:` is specified, then `master` is used. Examples: @@ -606,9 +662,9 @@ included. For example: #### Local Paths -As of version 2.0.0 you can provide a path to a local directory that contains a -package. Local paths can be saved using `npm install -S` or -`npm install --save`, using any of these forms: +As of version 2.0.0 you can provide a path to a local directory that +contains a package. Local paths can be saved using `npm install -S` or `npm +install --save`, using any of these forms: ```bash ../foo/bar @@ -629,32 +685,32 @@ in which case they will be normalized to a relative path and added to your } ``` -This feature is helpful for local offline development and creating -tests that require npm installing where you don't want to hit an -external server, but should not be used when publishing packages -to the public registry. +This feature is helpful for local offline development and creating tests +that require npm installing where you don't want to hit an external server, +but should not be used when publishing packages to the public registry. ### devDependencies If someone is planning on downloading and using your module in their -program, then they probably don't want or need to download and build -the external test or documentation framework that you use. +program, then they probably don't want or need to download and build the +external test or documentation framework that you use. -In this case, it's best to map these additional items in a `devDependencies` -object. +In this case, it's best to map these additional items in a +`devDependencies` object. -These things will be installed when doing `npm link` or `npm install` -from the root of a package, and can be managed like any other npm -configuration param. See [`config`](/using-npm/config) for more on the topic. +These things will be installed when doing `npm link` or `npm install` from +the root of a package, and can be managed like any other npm configuration +param. See [`config`](/using-npm/config) for more on the topic. For build steps that are not platform-specific, such as compiling -CoffeeScript or other languages to JavaScript, use the `prepare` -script to do this, and make the required package a devDependency. +CoffeeScript or other languages to JavaScript, use the `prepare` script to +do this, and make the required package a devDependency. For example: ```json -{ "name": "ethopia-waza", +{ + "name": "ethopia-waza", "description": "a delightfully fruity coffee varietal", "version": "1.2.3", "devDependencies": { @@ -667,17 +723,18 @@ For example: } ``` -The `prepare` script will be run before publishing, so that users -can consume the functionality without requiring them to compile it -themselves. In dev mode (ie, locally running `npm install`), it'll -run this script as well, so that you can test it easily. +The `prepare` script will be run before publishing, so that users can +consume the functionality without requiring them to compile it themselves. +In dev mode (ie, locally running `npm install`), it'll run this script as +well, so that you can test it easily. ### peerDependencies In some cases, you want to express the compatibility of your package with a host tool or library, while not necessarily doing a `require` of this host. -This is usually referred to as a *plugin*. Notably, your module may be exposing -a specific interface, expected and specified by the host documentation. +This is usually referred to as a *plugin*. Notably, your module may be +exposing a specific interface, expected and specified by the host +documentation. For example: @@ -691,34 +748,38 @@ For example: } ``` -This ensures your package `tea-latte` can be installed *along* with the second -major version of the host package `tea` only. `npm install tea-latte` could -possibly yield the following dependency graph: +This ensures your package `tea-latte` can be installed *along* with the +second major version of the host package `tea` only. `npm install +tea-latte` could possibly yield the following dependency graph: ```bash ├── tea-latte@1.3.5 └── tea@2.2.0 ``` -**NOTE: npm versions 1 and 2 will automatically install `peerDependencies` if -they are not explicitly depended upon higher in the dependency tree. In the -next major version of npm (npm@3), this will no longer be the case. You will -receive a warning that the peerDependency is not installed instead.** The -behavior in npms 1 & 2 was frequently confusing and could easily put you into -dependency hell, a situation that npm is designed to avoid as much as possible. +In npm versions 3 through 6, `peerDependencies` were not automatically +installed, and would raise a warning if an invalid version of the peer +dependency was found in the tree. As of npm v7, peerDependencies _are_ +installed by default. -Trying to install another plugin with a conflicting requirement will cause an -error. For this reason, make sure your plugin requirement is as broad as -possible, and not to lock it down to specific patch versions. +Trying to install another plugin with a conflicting requirement may cause +an error if the tree cannot be resolved correctly. For this reason, make +sure your plugin requirement is as broad as possible, and not to lock it +down to specific patch versions. -Assuming the host complies with [semver](https://semver.org/), only changes in -the host package's major version will break your plugin. Thus, if you've worked -with every 1.x version of the host package, use `"^1.0"` or `"1.x"` to express -this. If you depend on features introduced in 1.5.2, use `">= 1.5.2 < 2"`. +Assuming the host complies with [semver](https://semver.org/), only changes +in the host package's major version will break your plugin. Thus, if you've +worked with every 1.x version of the host package, use `"^1.0"` or `"1.x"` +to express this. If you depend on features introduced in 1.5.2, use +`"^1.5.2"`. ### peerDependenciesMeta -When a user installs your package, npm will emit warnings if packages specified in `peerDependencies` are not already installed. The `peerDependenciesMeta` field serves to provide npm more information on how your peer dependencies are to be used. Specifically, it allows peer dependencies to be marked as optional. +When a user installs your package, npm will emit warnings if packages +specified in `peerDependencies` are not already installed. The +`peerDependenciesMeta` field serves to provide npm more information on how +your peer dependencies are to be used. Specifically, it allows peer +dependencies to be marked as optional. For example: @@ -738,7 +799,10 @@ For example: } ``` -Marking a peer dependency as optional ensures npm will not emit a warning if the `soy-milk` package is not installed on the host. This allows you to integrate and interact with a variety of host packages without requiring all of them to be installed. +Marking a peer dependency as optional ensures npm will not emit a warning +if the `soy-milk` package is not installed on the host. This allows you to +integrate and interact with a variety of host packages without requiring +all of them to be installed. ### bundledDependencies @@ -759,26 +823,28 @@ If we define a package.json like this: "name": "awesome-web-framework", "version": "1.0.0", "bundledDependencies": [ - "renderized", "super-streams" + "renderized", + "super-streams" ] } ``` + we can obtain `awesome-web-framework-1.0.0.tgz` file by running `npm pack`. This file contains the dependencies `renderized` and `super-streams` which can be installed in a new project by executing `npm install -awesome-web-framework-1.0.0.tgz`. Note that the package names do not include -any versions, as that information is specified in `dependencies`. +awesome-web-framework-1.0.0.tgz`. Note that the package names do not +include any versions, as that information is specified in `dependencies`. If this is spelled `"bundleDependencies"`, then that is also honored. ### optionalDependencies -If a dependency can be used, but you would like npm to proceed if it cannot be -found or fails to install, then you may put it in the `optionalDependencies` -object. This is a map of package name to version or url, just like the -`dependencies` object. The difference is that build failures do not cause -installation to fail. Running `npm install --no-optional` will prevent these -dependencies from being installed. +If a dependency can be used, but you would like npm to proceed if it cannot +be found or fails to install, then you may put it in the +`optionalDependencies` object. This is a map of package name to version or +url, just like the `dependencies` object. The difference is that build +failures do not cause installation to fail. Running `npm install +--no-optional` will prevent these dependencies from being installed. It is still your program's responsibility to handle the lack of the dependency. For example, something like this: @@ -809,32 +875,30 @@ Entries in `optionalDependencies` will override entries of the same name in You can specify the version of node that your stuff works on: ```json -{ "engines" : { "node" : ">=0.10.3 <0.12" } } +{ + "engines": { + "node": ">=0.10.3 <15" + } +} ``` And, like with dependencies, if you don't specify the version (or if you specify "\*" as the version), then any version of node will do. -If you specify an "engines" field, then npm will require that "node" be -somewhere on that list. If "engines" is omitted, then npm will just assume -that it works on node. - -You can also use the "engines" field to specify which versions of npm -are capable of properly installing your program. For example: +You can also use the "engines" field to specify which versions of npm are +capable of properly installing your program. For example: ```json -{ "engines" : { "npm" : "~1.0.20" } } +{ + "engines": { + "npm": "~1.0.20" + } +} ``` -Unless the user has set the `engine-strict` config flag, this -field is advisory only and will only produce warnings when your package is installed as a dependency. - -### engineStrict - -**This feature was removed in npm 3.0.0** - -Prior to npm 3.0.0, this feature was used to treat this package as if the -user had set `engine-strict`. It is no longer used. +Unless the user has set the `engine-strict` config flag, this field is +advisory only and will only produce warnings when your package is installed +as a dependency. ### os @@ -842,14 +906,23 @@ You can specify which operating systems your module will run on: ```json -"os" : [ "darwin", "linux" ] +{ + "os": [ + "darwin", + "linux" + ] +} ``` -You can also block instead of allowing operating systems, -just prepend the blocked os with a '!': +You can also block instead of allowing operating systems, just prepend the +blocked os with a '!': ```json -"os" : [ "!win32" ] +{ + "os": [ + "!win32" + ] +} ``` The host operating system is determined by `process.platform` @@ -863,62 +936,62 @@ If your code only runs on certain cpu architectures, you can specify which ones. ```json -"cpu" : [ "x64", "ia32" ] +{ + "cpu": [ + "x64", + "ia32" + ] +} ``` Like the `os` option, you can also block architectures: ```json -"cpu" : [ "!arm", "!mips" ] +{ + "cpu": [ + "!arm", + "!mips" + ] +} ``` The host architecture is determined by `process.arch` -### preferGlobal - -**DEPRECATED** - -This option used to trigger an npm warning, but it will no longer warn. It is -purely there for informational purposes. It is now recommended that you install -any binaries as local devDependencies wherever possible. - ### private -If you set `"private": true` in your package.json, then npm will refuse -to publish it. +If you set `"private": true` in your package.json, then npm will refuse to +publish it. -This is a way to prevent accidental publication of private repositories. If -you would like to ensure that a given package is only ever published to a -specific registry (for example, an internal registry), then use the -`publishConfig` dictionary described below to override the `registry` config -param at publish-time. +This is a way to prevent accidental publication of private repositories. +If you would like to ensure that a given package is only ever published to +a specific registry (for example, an internal registry), then use the +`publishConfig` dictionary described below to override the `registry` +config param at publish-time. ### publishConfig This is a set of config values that will be used at publish-time. It's especially handy if you want to set the tag, registry or access, so that you can ensure that a given package is not tagged with "latest", published -to the global public registry or that a scoped module is private by default. - -Any config values can be overridden, but only "tag", "registry" and "access" -probably matter for the purposes of publishing. +to the global public registry or that a scoped module is private by +default. -See [`config`](/using-npm/config) to see the list of config options that can be -overridden. +See [`config`](/using-npm/config) to see the list of config options that +can be overridden. ### workspaces The optional `workspaces` field is an array of file patterns that describes -locations within the local file system that the install client should look up -to find each [workspace](/using-npm/workspaces) that needs to be symlinked to -the top level `node_modules` folder. +locations within the local file system that the install client should look +up to find each [workspace](/using-npm/workspaces) that needs to be +symlinked to the top level `node_modules` folder. It can describe either the direct paths of the folders to be used as workspaces or it can define globs that will resolve to these same folders. -In the following example, all folders located inside the folder `./packages` -will be treated as workspaces as long as they have valid `package.json` files -inside them: +In the following example, all folders located inside the folder +`./packages` will be treated as workspaces as long as they have valid +`package.json` files inside them: ```json { @@ -937,20 +1010,20 @@ npm will default some values based on package contents. * `"scripts": {"start": "node server.js"}` - If there is a `server.js` file in the root of your package, then npm - will default the `start` command to `node server.js`. + If there is a `server.js` file in the root of your package, then npm will + default the `start` command to `node server.js`. * `"scripts":{"install": "node-gyp rebuild"}` - If there is a `binding.gyp` file in the root of your package and you have not defined an `install` or `preinstall` script, npm will - default the `install` command to compile using node-gyp. + If there is a `binding.gyp` file in the root of your package and you have + not defined an `install` or `preinstall` script, npm will default the + `install` command to compile using node-gyp. * `"contributors": [...]` - If there is an `AUTHORS` file in the root of your package, npm will - treat each line as a `Name (url)` format, where email and url - are optional. Lines which start with a `#` or are blank, will be - ignored. + If there is an `AUTHORS` file in the root of your package, npm will treat + each line as a `Name (url)` format, where email and url are + optional. Lines which start with a `#` or are blank, will be ignored. ### SEE ALSO diff --git a/deps/npm/docs/content/configuring-npm/package-lock-json.md b/deps/npm/docs/content/configuring-npm/package-lock-json.md index a3083410f7e2a2..4d994bbc8c0a28 100644 --- a/deps/npm/docs/content/configuring-npm/package-lock-json.md +++ b/deps/npm/docs/content/configuring-npm/package-lock-json.md @@ -14,132 +14,223 @@ generate identical trees, regardless of intermediate dependency updates. This file is intended to be committed into source repositories, and serves various purposes: -* Describe a single representation of a dependency tree such that teammates, deployments, and continuous integration are guaranteed to install exactly the same dependencies. +* Describe a single representation of a dependency tree such that + teammates, deployments, and continuous integration are guaranteed to + install exactly the same dependencies. -* Provide a facility for users to "time-travel" to previous states of `node_modules` without having to commit the directory itself. +* Provide a facility for users to "time-travel" to previous states of + `node_modules` without having to commit the directory itself. -* To facilitate greater visibility of tree changes through readable source control diffs. +* Facilitate greater visibility of tree changes through readable source + control diffs. -* And optimize the installation process by allowing npm to skip repeated metadata resolutions for previously-installed packages. +* Optimize the installation process by allowing npm to skip repeated + metadata resolutions for previously-installed packages. -One key detail about `package-lock.json` is that it cannot be published, and it -will be ignored if found in any place other than the toplevel package. It shares -a format with [npm-shrinkwrap.json](/configuring-npm/shrinkwrap-json), which is essentially the same file, but -allows publication. This is not recommended unless deploying a CLI tool or -otherwise using the publication process for producing production packages. +* As of npm v7, lockfiles include enough information to gain a complete + picture of the package tree, reducing the need to read `package.json` + files, and allowing for significant performance improvements. -If both `package-lock.json` and `npm-shrinkwrap.json` are present in the root of -a package, `package-lock.json` will be completely ignored. +### `package-lock.json` vs `npm-shrinkwrap.json` +Both of these files have the same format, and perform similar functions in +the root of a project. -### File Format +The difference is that `package-lock.json` is that it cannot be published, +and it will be ignored if found in any place other than the root project. -#### name +In contrast, [npm-shrinkwrap.json](/configuring-npm/npm-shrinkwrap-json) allows +publication, and defines the dependency tree from the point encountered. +This is not recommended unless deploying a CLI tool or otherwise using the +publication process for producing production packages. -The name of the package this is a package-lock for. This must match what's in -`package.json`. +If both `package-lock.json` and `npm-shrinkwrap.json` are present in the +root of a project, `npm-shrinkwrap.json` will take precedence and +`package-lock.json` will be ignored. -#### version +### Hidden Lockfiles -The version of the package this is a package-lock for. This must match what's in -`package.json`. +In order to avoid processing the `node_modules` folder repeatedly, npm as +of v7 uses a "hidden" lockfile present in +`node_modules/.package-lock.json`. This contains information about the +tree, and is used in lieu of reading the entire `node_modules` hierarchy +provided that the following conditions are met: -#### lockfileVersion +- All package folders it references exist in the `node_modules` hierarchy. +- No package folders exist in the `node_modules` hierarchy that are not + listed in the lockfile. +- The modified time of the file is at least as recent as all of the package + folders it references. -An integer version, starting at `1` with the version number of this document -whose semantics were used when generating this `package-lock.json`. +That is, the hidden lockfile will only be relevant if it was created as +part of the most recent update to the package tree. If another CLI mutates +the tree in any way, this will be detected, and the hidden lockfile will be +ignored. -#### packageIntegrity +Note that it _is_ possible to manually change the _contents_ of a package +in such a way that the modified time of the package folder is unaffected. +For example, if you add a file to `node_modules/foo/lib/bar.js`, then the +modified time on `node_modules/foo` will not reflect this change. If you +are manually editing files in `node_modules`, it is generally best to +delete the file at `node_modules/.package-lock.json`. -This is a [subresource -integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) value -created from the `package.json`. No preprocessing of the `package.json` should -be done. Subresource integrity strings can be produced by modules like -[`ssri`](https://www.npmjs.com/package/ssri). +As the hidden lockfile is ignored by older npm versions, it does not +contain the backwards compatibility affordances present in "normal" +lockfiles. That is, it is `lockfileVersion: 3`, rather than +`lockfileVersion: 2`. -#### preserveSymlinks +### Handling Old Lockfiles -Indicates that the install was done with the environment variable -`NODE_PRESERVE_SYMLINKS` enabled. The installer should insist that the value of -this property match that environment variable. +When npm detects a lockfile from npm v6 or before during the package +installation process, it is automatically updated to fetch missing +information from either the `node_modules` tree or (in the case of empty +`node_modules` trees or very old lockfile formats) the npm registry. -#### dependencies +### File Format -A mapping of package name to dependency object. Dependency objects have the -following properties: +#### `name` -##### version +The name of the package this is a package-lock for. This will match what's +in `package.json`. -This is a specifier that uniquely identifies this package and should be -usable in fetching a new copy of it. +#### `version` -* bundled dependencies: Regardless of source, this is a version number that is purely for informational purposes. -* registry sources: This is a version number. (eg, `1.2.3`) -* git sources: This is a git specifier with resolved committish. (eg, `git+https://example.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e`) -* http tarball sources: This is the URL of the tarball. (eg, `https://example.com/example-1.3.0.tgz`) -* local tarball sources: This is the file URL of the tarball. (eg `file:///opt/storage/example-1.3.0.tgz`) -* local link sources: This is the file URL of the link. (eg `file:libs/our-module`) +The version of the package this is a package-lock for. This will match +what's in `package.json`. -##### integrity +#### `lockfileVersion` -This is a [Standard Subresource -Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) for this -resource. +An integer version, starting at `1` with the version number of this +document whose semantics were used when generating this +`package-lock.json`. -* For bundled dependencies this is not included, regardless of source. -* For registry sources, this is the `integrity` that the registry provided, or if one wasn't provided the SHA1 in `shasum`. -* For git sources this is the specific commit hash we cloned from. -* For remote tarball sources this is an integrity based on a SHA512 of - the file. -* For local tarball sources: This is an integrity field based on the SHA512 of the file. +Note that the file format changed significantly in npm v7 to track +information that would have otherwise required looking in `node_modules` or +the npm registry. Lockfiles generated by npm v7 will contain +`lockfileVersion: 2`. -##### resolved +* No version provided: an "ancient" shrinkwrap file from a version of npm + prior to npm v5. +* `1`: The lockfile version used by npm v5 and v6. +* `2`: The lockfile version used by npm v7, which is backwards compatible + to v1 lockfiles. +* `3`: The lockfile version used by npm v7, _without_ backwards + compatibility affordances. This is used for the hidden lockfile at + `node_modules/.package-lock.json`, and will likely be used in a future + version of npm, once support for npm v6 is no longer relevant. -* For bundled dependencies this is not included, regardless of source. -* For registry sources this is path of the tarball relative to the registry - URL. If the tarball URL isn't on the same server as the registry URL then - this is a complete URL. +npm will always attempt to get whatever data it can out of a lockfile, even +if it is not a version that it was designed to support. -##### bundled +#### `packages` -If true, this is the bundled dependency and will be installed by the parent -module. When installing, this module will be extracted from the parent -module during the extract phase, not installed as a separate dependency. +This is an object that maps package locations to an object containing the +information about that package. -##### dev +The root project is typically listed with a key of `""`, and all other +packages are listed with their relative paths from the root project folder. -If true then this dependency is either a development dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both a development dependency of the top level and a -transitive dependency of a non-development dependency of the top level. +Package descriptors have the following fields: -##### optional +* version: The version found in `package.json` -If true then this dependency is either an optional dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both an optional dependency of the top level and a -transitive dependency of a non-optional dependency of the top level. +* resolved: The place where the package was actually resolved from. In + the case of packages fetched from the registry, this will be a url to a + tarball. In the case of git dependencies, this will be the full git url + with commit sha. In the case of link dependencies, this will be the + location of the link target. -All optional dependencies should be included even if they're uninstallable -on the current platform. +* integrity: A `sha512` or `sha1` [Standard Subresource + Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) + string for the artifact that was unpacked in this location. +* link: A flag to indicate that this is a symbolic link. If this is + present, no other fields are specified, since the link target will also + be included in the lockfile. -##### requires +* dev, optional, devOptional: If the package is strictly part of the + `devDependencies` tree, then `dev` will be true. If it is strictly part + of the `optionalDependencies` tree, then `optional` will be set. If it + is both a `dev` dependency _and_ an `optional` dependency of a non-dev + dependency, then `devOptional` will be set. (An `optional` dependency of + a `dev` dependency will have both `dev` and `optional` set.) -This is a mapping of module name to version. This is a list of everything -this module requires, regardless of where it will be installed. The version -should match via normal matching rules a dependency either in our -`dependencies` or in a level higher than us. +* inBundle: A flag to indicate that the package is a bundled dependency. +* hasInstallScript: A flag to indicate that the package has a `preinstall`, + `install`, or `postinstall` script. -##### dependencies +* hasShrinkwrap: A flag to indicate that the package has an + `npm-shrinkwrap.json` file. + +* bin, license, engines, dependencies, optionalDependencies: fields from + `package.json` + +#### dependencies -The dependencies of this dependency, exactly as at the top level. +Legacy data for supporting versions of npm that use `lockfileVersion: 1`. +This is a mapping of package names to dependency objects. Because the +object structure is strictly hierarchical, symbolic link dependencies are +somewhat challenging to represent in some cases. + +npm v7 ignores this section entirely if a `packages` section is present, +but does keep it up to date in order to support switching between npm v6 +and npm v7. + +Dependency objects have the following fields: + +* version: a specifier that varies depending on the nature of the package, + and is usable in fetching a new copy of it. + + * bundled dependencies: Regardless of source, this is a version number + that is purely for informational purposes. + * registry sources: This is a version number. (eg, `1.2.3`) + * git sources: This is a git specifier with resolved committish. (eg, + `git+https://example.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e`) + * http tarball sources: This is the URL of the tarball. (eg, + `https://example.com/example-1.3.0.tgz`) + * local tarball sources: This is the file URL of the tarball. (eg + `file:///opt/storage/example-1.3.0.tgz`) + * local link sources: This is the file URL of the link. (eg + `file:libs/our-module`) + +* integrity: A `sha512` or `sha1` [Standard Subresource + Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) + string for the artifact that was unpacked in this location. For git + dependencies, this is the commit sha. + +* resolved: For registry sources this is path of the tarball relative to + the registry URL. If the tarball URL isn't on the same server as the + registry URL then this is a complete URL. + +* bundled: If true, this is the bundled dependency and will be installed + by the parent module. When installing, this module will be extracted + from the parent module during the extract phase, not installed as a + separate dependency. + +* dev: If true then this dependency is either a development dependency ONLY + of the top level module or a transitive dependency of one. This is false + for dependencies that are both a development dependency of the top level + and a transitive dependency of a non-development dependency of the top + level. + +* optional: If true then this dependency is either an optional dependency + ONLY of the top level module or a transitive dependency of one. This is + false for dependencies that are both an optional dependency of the top + level and a transitive dependency of a non-optional dependency of the top + level. + +* requires: This is a mapping of module name to version. This is a list of + everything this module requires, regardless of where it will be + installed. The version should match via normal matching rules a + dependency either in our `dependencies` or in a level higher than us. + +* dependencies: The dependencies of this dependency, exactly as at the top + level. ### See also * [npm shrinkwrap](/commands/npm-shrinkwrap) -* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) -* [package-locks](/configuring-npm/package-locks) +* [npm-shrinkwrap.json](/configuring-npm/npm-shrinkwrap-json) * [package.json](/configuring-npm/package-json) * [npm install](/commands/npm-install) diff --git a/deps/npm/docs/content/configuring-npm/package-locks.md b/deps/npm/docs/content/configuring-npm/package-locks.md deleted file mode 100644 index a456ef8936645d..00000000000000 --- a/deps/npm/docs/content/configuring-npm/package-locks.md +++ /dev/null @@ -1,178 +0,0 @@ ---- -title: package-locks -section: 5 -description: An explanation of npm lockfiles ---- - -### Description - -Conceptually, the "input" to [`npm install`](/commands/npm-install) is a [package.json](/configuring-npm/package-json), while its -"output" is a fully-formed `node_modules` tree: a representation of the -dependencies you declared. In an ideal world, npm would work like a pure -function: the same `package.json` should produce the exact same `node_modules` -tree, any time. In some cases, this is indeed true. But in many others, npm is -unable to do this. There are multiple reasons for this: - -* different versions of npm (or other package managers) may have been used to install a package, each using slightly different installation algorithms. - -* a new version of a direct semver-range package may have been published since the last time your packages were installed, and thus a newer version will be used. - -* A dependency of one of your dependencies may have published a new version, which will update even if you used pinned dependency specifiers (`1.2.3` instead of `^1.2.3`) - -* The registry you installed from is no longer available, or allows mutation of versions (unlike the primary npm registry), and a different version of a package exists under the same version number now. - -As an example, consider package A: - -```json -{ - "name": "A", - "version": "0.1.0", - "dependencies": { - "B": "<0.1.0" - } -} -``` - -package B: - -```json -{ - "name": "B", - "version": "0.0.1", - "dependencies": { - "C": "<0.1.0" - } -} -``` - -and package C: -```json -{ - "name": "C", - "version": "0.0.1" -} -``` - -If these are the only versions of A, B, and C available in the -registry, then a normal `npm install A` will install: - -```json -A@0.1.0 -`-- B@0.0.1 - `-- C@0.0.1 -``` - -However, if B@0.0.2 is published, then a fresh `npm install A` will -install: - -```bash -A@0.1.0 -`-- B@0.0.2 - `-- C@0.0.1 -``` - -assuming the new version did not modify B's dependencies. Of course, -the new version of B could include a new version of C and any number -of new dependencies. If such changes are undesirable, the author of A -could specify a dependency on B@0.0.1. However, if A's author and B's -author are not the same person, there's no way for A's author to say -that he or she does not want to pull in newly published versions of C -when B hasn't changed at all. - -To prevent this potential issue, npm uses [package-lock.json](/configuring-npm/package-lock-json) or, if present, [npm-shrinkwrap.json](/configuring-npm/shrinkwrap-json). These files are called package locks, or lockfiles. - -Whenever you run `npm install`, npm generates or updates your package lock, -which will look something like this: - -```json -{ - "name": "A", - "version": "0.1.0", - ...metadata fields... - "dependencies": { - "B": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/B/-/B-0.0.1.tgz", - "integrity": "sha512-DeAdb33F+" - "dependencies": { - "C": { - "version": "git://github.com/org/C.git#5c380ae319fc4efe9e7f2d9c78b0faa588fd99b4" - } - } - } - } -} -``` - -This file describes an *exact*, and more importantly *reproducible* -`node_modules` tree. Once it's present, any future installation will base its -work off this file, instead of recalculating dependency versions off -[package.json](/configuring-npm/package-json). - -The presence of a package lock changes the installation behavior such that: - -1. The module tree described by the package lock is reproduced. This means -reproducing the structure described in the file, using the specific files -referenced in "resolved" if available, falling back to normal package resolution -using "version" if one isn't. - -2. The tree is walked and any missing dependencies are installed in the usual -fashion. - -If `preshrinkwrap`, `shrinkwrap` or `postshrinkwrap` are in the `scripts` -property of the `package.json`, they will be executed in order. `preshrinkwrap` -and `shrinkwrap` are executed before the shrinkwrap, `postshrinkwrap` is -executed afterwards. These scripts run for both `package-lock.json` and -`npm-shrinkwrap.json`. For example to run some postprocessing on the generated -file: - -```json - "scripts": { - "postshrinkwrap": "json -I -e \"this.myMetadata = $MY_APP_METADATA\"" - } -``` - -#### Using locked packages - -Using a locked package is no different than using any package without a package -lock: any commands that update `node_modules` and/or `package.json`'s -dependencies will automatically sync the existing lockfile. This includes `npm -install`, `npm rm`, `npm update`, etc. To prevent this update from happening, -you can use the `--no-save` option to prevent saving altogether, or -`--no-shrinkwrap` to allow `package.json` to be updated while leaving -`package-lock.json` or `npm-shrinkwrap.json` intact. - -It is highly recommended you commit the generated package lock to source -control: this will allow anyone else on your team, your deployments, your -CI/continuous integration, and anyone else who runs `npm install` in your -package source to get the exact same dependency tree that you were developing -on. Additionally, the diffs from these changes are human-readable and will -inform you of any changes npm has made to your `node_modules`, so you can notice -if any transitive dependencies were updated, hoisted, etc. - -#### Resolving lockfile conflicts - -Occasionally, two separate npm install will create package locks that cause -merge conflicts in source control systems. As of `npm@5.7.0`, these conflicts -can be resolved by manually fixing any `package.json` conflicts, and then -running `npm install [--package-lock-only]` again. npm will automatically -resolve any conflicts for you and write a merged package lock that includes all -the dependencies from both branches in a reasonable tree. If -`--package-lock-only` is provided, it will do this without also modifying your -local `node_modules/`. - -To make this process seamless on git, consider installing -[`npm-merge-driver`](https://npm.im/npm-merge-driver), which will teach git how -to do this itself without any user interaction. In short: `$ npx -npm-merge-driver install -g` will let you do this, and even works with -pre-`npm@5.7.0` versions of npm 5, albeit a bit more noisily. Note that if -`package.json` itself conflicts, you will have to resolve that by hand and run -`npm install` manually, even with the merge driver. - -### See Also - -* https://medium.com/@sdboyer/so-you-want-to-write-a-package-manager-4ae9c17d9527 -* [package.json](/configuring-npm/package-json) -* [package-lock.json](/configuring-npm/package-lock-json) -* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) -* [npm shrinkwrap](/commands/npm-shrinkwrap) diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html index 4482f6608999f2..8705b442873c05 100644 --- a/deps/npm/docs/output/commands/npm-audit.html +++ b/deps/npm/docs/output/commands/npm-audit.html @@ -271,7 +271,6 @@

Examples

See Also

diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 9aa49839f5c095..8361e995b3a161 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -193,7 +193,7 @@

Example

See Also

diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index b3991e6400e862..15d1bbc99f4784 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -159,7 +159,7 @@

Description

the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm’s source tree will show:

-
npm@7.4.0 /path/to/npm
+
npm@7.4.3 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 
diff --git a/deps/npm/docs/output/commands/npm-prefix.html b/deps/npm/docs/output/commands/npm-prefix.html index 7424df7aaef9f7..a330dfd1c373cc 100644 --- a/deps/npm/docs/output/commands/npm-prefix.html +++ b/deps/npm/docs/output/commands/npm-prefix.html @@ -141,18 +141,25 @@

npm-prefix

Table of contents

- +

Synopsis

npm prefix [-g]
 

Description

-

Print the local prefix to standard out. This is the closest parent directory +

Print the local prefix to standard output. This is the closest parent directory to contain a package.json file or node_modules directory, unless -g is also specified.

If -g is specified, this will be the value of the global prefix. See npm config for more detail.

+

Example

+
npm prefix
+/usr/local/projects/foo
+
+
npm prefix -g
+/usr/local
+

See Also

  • npm root
  • diff --git a/deps/npm/docs/output/commands/npm-search.html b/deps/npm/docs/output/commands/npm-search.html index c98a22eca01a33..bad81ffa002be5 100644 --- a/deps/npm/docs/output/commands/npm-search.html +++ b/deps/npm/docs/output/commands/npm-search.html @@ -141,7 +141,7 @@

    npm-search

    Table of contents

    - +

    Synopsis

    @@ -152,29 +152,36 @@

    Table of contents

    Description

    Search the registry for packages matching the search terms. npm search performs a linear, incremental, lexically-ordered search through package -metadata for all files in the registry. If color is enabled, it will further -highlight the matches in the results.

    -

    Additionally, using the --searchopts and --searchexclude options paired with -more search terms will respectively include and exclude further patterns. The -main difference between --searchopts and the standard search terms is that the -former does not highlight results in the output and can be used for more -fine-grained filtering. Additionally, both of these can be added to .npmrc for -default search filtering behavior.

    +metadata for all files in the registry. If your terminal has color +support, it will further highlight the matches in the results. This can +be disabled with the config item color

    +

    Additionally, using the --searchopts and --searchexclude options +paired with more search terms will include and exclude further patterns. +The main difference between --searchopts and the standard search terms +is that the former does not highlight results in the output and you can +use them more fine-grained filtering. Additionally, you can add both of +these to your config to change default search filtering behavior.

    Search also allows targeting of maintainers in search results, by prefixing their npm username with =.

    -

    If a term starts with /, then it’s interpreted as a regular expression and -supports standard JavaScript RegExp syntax. A trailing / will be ignored in -this case. (Note that many regular expression characters must be escaped or -quoted in most shells.)

    -

    A Note on caching

    +

    If a term starts with /, then it’s interpreted as a regular expression +and supports standard JavaScript RegExp syntax. In this case search will +ignore a trailing / . (Note you must escape or quote many regular +expression characters in most shells.)

    Configuration

    +

    All of the following can be defined in a .npmrc file, or passed as +parameters to the cli prefixed with -- (e.g. --json)

    description

    • Default: true
    • Type: Boolean
    -

    Used as --no-description, disables search matching in package descriptions and -suppresses display of that field in results.

    +

    color

    +
      +
    • Default: true
    • +
    • Type: Boolean
    • +
    +

    Used as --no-color, disables color highlighting of matches in the +results.

    json

    • Default: false
    • @@ -193,9 +200,9 @@

      long

    • Type: Boolean

    Display full package descriptions and other long text across multiple -lines. When disabled (default) search results are truncated to fit -neatly on a single line. Modules with extremely long names will -fall on multiple lines.

    +lines. When disabled (which is the default) the output will +truncate search results to fit neatly on a single line. Modules with +extremely long names will fall on multiple lines.

    searchopts

    • Default: “”
    • @@ -208,28 +215,37 @@

      searchexclude

    • Type: String

    Space-separated options that limit the results from search.

    -

    searchstaleness

    -
      -
    • Default: 900 (15 minutes)
    • -
    • Type: Number
    • -
    -

    The age of the cache, in seconds, before another registry request is made.

    registry

    -

    Search the specified registry for modules. If you have configured npm to point -to a different default registry, such as your internal private module -repository, npm search will default to that registry when searching. Pass a -different registry url such as the default above in order to override this -setting.

    +

    Search the specified registry for modules. If you have configured npm to +point to a different default registry (such as your internal private +module repository), npm search will also default to that registry when +searching.

    +

    A note on caching

    +

    The npm cli caches search results with the same terms and options +locally in its cache. You can use the following to change how and when +the cli uses this cache. See npm cache for more +on how the cache works.

    +

    prefer-online

    +

    Forced staleness checks for cached searches, making the cli look for +updates immediately even for fresh search results.

    +

    prefer-offline

    +

    Bypasses staleness checks for cached. Missing data will still be +requested from the server. To force full offline mode, use offline.

    +

    offline

    +

    Forces full offline mode. Any searches not locally cached will result in +an error.

    See Also

    diff --git a/deps/npm/docs/output/commands/npm-shrinkwrap.html b/deps/npm/docs/output/commands/npm-shrinkwrap.html index 54e413ab750a55..63057cbe952b17 100644 --- a/deps/npm/docs/output/commands/npm-shrinkwrap.html +++ b/deps/npm/docs/output/commands/npm-shrinkwrap.html @@ -149,19 +149,19 @@

    Table of contents

Description

This command repurposes package-lock.json into a publishable -npm-shrinkwrap.json or simply creates a new one. The file created and updated -by this command will then take precedence over any other existing or future -package-lock.json files. For a detailed explanation of the design and purpose -of package locks in npm, see package-locks.

+npm-shrinkwrap.json or simply creates a new one. The file created and +updated by this command will then take precedence over any other existing +or future package-lock.json files. For a detailed explanation of the +design and purpose of package locks in npm, see +package-lock-json.

See Also

diff --git a/deps/npm/docs/output/commands/npm-start.html b/deps/npm/docs/output/commands/npm-start.html index 6f392c99a33318..920e55b4504572 100644 --- a/deps/npm/docs/output/commands/npm-start.html +++ b/deps/npm/docs/output/commands/npm-start.html @@ -141,18 +141,37 @@

npm-start

Table of contents

- +

Synopsis

npm start [-- <args>]
 

Description

-

This runs an arbitrary command specified in the package’s "start" property of -its "scripts" object. If no "start" property is specified on the -"scripts" object, it will run node server.js.

+

This runs a predefined command specified in the "start" property of +a package’s "scripts" object.

+

If the "scripts" object does not define a "start" property, npm +will run node server.js.

+

Note that this is different from the default node behavior of running +the file specified in a package’s "main" attribute when evoking with +node .

As of npm@2.0.0, you can use custom arguments when executing scripts. Refer to npm run-script for more details.

+

Example

+
{
+  "scripts": {
+    "start": "node foo.js"
+  }
+}
+
+
npm start
+
+> npm@x.x.x start
+> node foo.js
+
+(foo.js output would be here)
+
+

See Also

  • npm run-script
  • diff --git a/deps/npm/docs/output/commands/npm-stop.html b/deps/npm/docs/output/commands/npm-stop.html index 6ca7ce0e590dca..defee05689eb97 100644 --- a/deps/npm/docs/output/commands/npm-stop.html +++ b/deps/npm/docs/output/commands/npm-stop.html @@ -141,14 +141,32 @@

    npm-stop

    Table of contents

    - +

    Synopsis

    npm stop [-- <args>]
     

    Description

    -

    This runs a package’s “stop” script, if one was provided.

    +

    This runs a predefined command specified in the “stop” property of a +package’s “scripts” object.

    +

    Unlike with npm start, there is no default script +that will run if the "stop" property is not defined.

    +

    Example

    +
    {
    +  "scripts": {
    +    "stop": "node bar.js"
    +  }
    +}
    +
    +
    npm stop
    +
    +> npm@x.x.x stop
    +> node bar.js
    +
    +(bar.js output would be here)
    +
    +

    See Also

    • npm run-script
    • diff --git a/deps/npm/docs/output/commands/npm-test.html b/deps/npm/docs/output/commands/npm-test.html index cdb6bb6cfc7a59..3a721ab1fff161 100644 --- a/deps/npm/docs/output/commands/npm-test.html +++ b/deps/npm/docs/output/commands/npm-test.html @@ -141,7 +141,7 @@

      npm-test

      Table of contents

      - +

      Synopsis

      @@ -150,7 +150,21 @@

      Table of contents

      aliases: t, tst

      Description

      -

      This runs a package’s “test” script, if one was provided.

      +

      This runs a predefined command specified in the "test" property of +a package’s "scripts" object.

      +

      Example

      +
      {
      +  "scripts": {
      +    "test": "node test.js"
      +  }
      +}
      +
      +
      npm test
      +> npm@x.x.x test
      +> node test.js
      +
      +(test.js output would be here)
      +

      See Also

      • npm run-script
      • diff --git a/deps/npm/docs/output/commands/npm-token.html b/deps/npm/docs/output/commands/npm-token.html index c704357a4b51d6..de92432475e248 100644 --- a/deps/npm/docs/output/commands/npm-token.html +++ b/deps/npm/docs/output/commands/npm-token.html @@ -153,8 +153,8 @@

        Description

        This lets you list, create and revoke authentication tokens.

        • npm token list: -Shows a table of all active authentication tokens. You can request this as -JSON with --json or tab-separated values with --parseable.
        • +Shows a table of all active authentication tokens. You can request +this as JSON with --json or tab-separated values with --parseable.
        +--------+---------+------------+----------+----------------+
         | id     | token   | created    | read-only | CIDR whitelist |
        @@ -175,11 +175,19 @@ 

        Description

        +--------+---------+------------+----------+----------------+
          -
        • npm token create [--read-only] [--cidr=<cidr-ranges>]: -Create a new authentication token. It can be --read-only or accept a list of -CIDR ranges to -limit use of this token to. This will prompt you for your password, and, if you have -two-factor authentication enabled, an otp.
        • +
        • +

          npm token create [--read-only] [--cidr=<cidr-ranges>]: +Create a new authentication token. It can be --read-only, or accept +a list of +CIDR +ranges with which to limit use of this token. This will prompt you for +your password, and, if you have two-factor authentication enabled, an +otp.

          +

          Currently, the cli can not generate automation tokens. Please refer to +the docs +website +for more information on generating automation tokens.

          +
        +----------------+--------------------------------------+
         | token          | a73c9572-f1b9-8983-983d-ba3ac3cc913d |
        @@ -193,10 +201,12 @@ 

        Description

        • npm token revoke <token|id>: -This removes an authentication token, making it immediately unusable. This can accept -both complete tokens (as you get back from npm token create and will -find in your .npmrc) and ids as seen in the npm token list output. -This will NOT accept the truncated token found in npm token list output.
        • +Immediately removes an authentication token from the registry. You +will no longer be able to use it. This can accept both complete +tokens (such as those you get back from npm token create, and those +found in your .npmrc), and ids as seen in the parseable or json +output of npm token list. This will NOT accept the truncated token +found in the normal npm token list output.
      diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html index 0ed6f6fb604bed..c2712db4f730e4 100644 --- a/deps/npm/docs/output/commands/npm-uninstall.html +++ b/deps/npm/docs/output/commands/npm-uninstall.html @@ -141,48 +141,41 @@

      npm-uninstall

      Table of contents

      - +

      Synopsis

      -
      npm uninstall [<@scope>/]<pkg>[@<version>]... [-S|--save|-D|--save-dev|-O|--save-optional|--no-save]
      +
      npm uninstall [<@scope>/]<pkg>[@<version>]... [-S|--save|--no-save]
       
       aliases: remove, rm, r, un, unlink
       

      Description

      This uninstalls a package, completely removing everything npm installed on its behalf.

      -

      Example:

      -
      npm uninstall sax
      -
      +

      It also removes the package from the dependencies, devDependencies, +optionalDependencies, and peerDependencies objects in your +package.json.

      +

      Futher, if you have an npm-shrinkwrap.json or package-lock.json, npm +will update those files as well.

      +

      --no-save will tell npm not to remove the package from your +package.json, npm-shrinkwrap.json, or package-lock.json files.

      +

      --save or -S will tell npm to remove the package from your +package.json, npm-shrinkwrap.json, and package-lock.json files. +This is the default, but you may need to use this if you have for +instance save=false in your npmrc file

      In global mode (ie, with -g or --global appended to the command), -it uninstalls the current package context as a global package.

      -

      npm uninstall takes 3 exclusive, optional flags which save or update -the package version in your main package.json:

      -
        -
      • -

        -S, --save: Package will be removed from your dependencies.

        -
      • -
      • -

        -D, --save-dev: Package will be removed from your devDependencies.

        -
      • -
      • -

        -O, --save-optional: Package will be removed from your optionalDependencies.

        -
      • -
      • -

        --no-save: Package will not be removed from your package.json file.

        -
      • -
      -

      Further, if you have an npm-shrinkwrap.json then it will be updated as -well.

      +it uninstalls the current package context as a global package. +--no-save is ignored in this case.

      Scope is optional and follows the usual rules for scope.

      -

      Examples:

      -
      npm uninstall sax --save
      -npm uninstall @myorg/privatepackage --save
      -npm uninstall node-tap --save-dev
      -npm uninstall dtrace-provider --save-optional
      -npm uninstall lodash --no-save
      +

      Examples

      +
      npm uninstall sax
      +
      +

      sax will no longer be in your package.json, npm-shrinkwrap.json, or +package-lock.json files.

      +
      npm uninstall lodash --no-save
       
      +

      lodash will not be removed from your package.json, +npm-shrinkwrap.json, or package-lock.json files.

      See Also

      • npm prune
      • diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html index 599b051e19ad71..75751d3408cfa1 100644 --- a/deps/npm/docs/output/commands/npm-unpublish.html +++ b/deps/npm/docs/output/commands/npm-unpublish.html @@ -145,6 +145,7 @@

        Table of contents

        Synopsis

        +

        To learn more about how the npm registry treats unpublish, see our unpublish policies

        Unpublishing a single version of a package

        npm unpublish [<@scope>/]<pkg>@<version>
         
        @@ -152,16 +153,21 @@

        Unpublishing an entire package

        npm unpublish [<@scope>/]<pkg> --force
         

        Warning

        -

        Consider using the deprecate command instead, if your intent is to encourage users to upgrade, or if you no longer want to maintain a package.

        +

        Consider using the deprecate command instead, +if your intent is to encourage users to upgrade, or if you no longer +want to maintain a package.

        Description

        -

        This removes a package version from the registry, deleting its -entry and removing the tarball.

        -

        If no version is specified, or if all versions are removed then -the root package entry is removed from the registry entirely.

        -

        Even if a package version is unpublished, that specific name and -version combination can never be reused. In order to publish the -package again, a new version number must be used. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed.

        -

        To learn more about how unpublish is treated on the npm registry, see our unpublish policies.

        +

        This removes a package version from the registry, deleting its entry and +removing the tarball.

        +

        The npm registry will return an error if you are not logged +in.

        +

        If you do not specify a version or if you remove all of a package’s +versions then the registry will remove the root package entry entirely.

        +

        Even if you unpublish a package version, that specific name and version +combination can never be reused. In order to publish the package again, +you must use a new version number. If you unpublish the entire package, +you may not publish any new versions of that package until 24 hours have +passed.

        See Also

        diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index de9bf85161d1e0..03d4c92821011c 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -141,14 +141,14 @@

        npm

        Table of contents

        - +

        Synopsis

        npm <command> [args]
         

        Version

        -

        7.4.0

        +

        7.4.3

        Description

        npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency @@ -246,19 +246,10 @@

        Configuration

        Contributions

        Patches welcome!

        If you would like to contribute, but don’t know what to work on, read -the contributing guidelines and check the issues list.

        - +the contributing guidelines +and check the issues list.

        Bugs

        -

        When you find issues, please report them:

        - +

        When you find issues, please report them: https://github.com/npm/cli/issues

        Be sure to follow the template and bug reporting guidelines.

        Feature Requests

        Discuss new feature ideas on our discussion forum:

        @@ -269,11 +260,6 @@

        Feature Requests

        -

        Author

        -

        Isaac Z. Schlueter :: -isaacs :: -@izs :: -i@izs.me

        See Also

        • npm help
        • diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html index f5452b64fd0fbe..0950fadc17ec92 100644 --- a/deps/npm/docs/output/configuring-npm/install.html +++ b/deps/npm/docs/output/configuring-npm/install.html @@ -145,20 +145,33 @@

          Table of contents

          Description

          -

          To publish and install packages to and from the public npm registry, you must install Node.js and the npm command line interface using either a Node version manager or a Node installer. We strongly recommend using a Node version manager to install Node.js and npm. We do not recommend using a Node installer, since the Node installation process installs npm in a directory with local permissions and can cause permissions errors when you run npm packages globally.

          +

          To publish and install packages to and from the public npm registry, you +must install Node.js and the npm command line interface using either a Node +version manager or a Node installer. We strongly recommend using a Node +version manager to install Node.js and npm. We do not recommend using a +Node installer, since the Node installation process installs npm in a +directory with local permissions and can cause permissions errors when you +run npm packages globally.

          Overview

          Checking your version of npm and Node.js

          -

          To see if you already have Node.js and npm installed and check the installed version, run the following commands:

          +

          To see if you already have Node.js and npm installed and check the +installed version, run the following commands:

          node -v
           npm -v
           

          Using a Node version manager to install Node.js and npm

          -

          Node version managers allow you to install and switch between multiple versions of Node.js and npm on your system so you can test your applications on multiple versions of npm to ensure they work for users on different versions.

          +

          Node version managers allow you to install and switch between multiple +versions of Node.js and npm on your system so you can test your +applications on multiple versions of npm to ensure they work for users on +different versions.

          OSX or Linux Node version managers

          Using a Node installer to install Node.js and npm

          -

          If you are unable to use a Node version manager, you can use a Node installer to install both Node.js and npm on your system.

          +

          If you are unable to use a Node version manager, you can use a Node +installer to install both Node.js and npm on your system.

          OS X or Windows Node installers

          -

          If you’re using OS X or Windows, use one of the installers from the Node.js download page. Be sure to install the version labeled LTS. Other versions have not yet been tested with npm.

          +

          If you’re using OS X or Windows, use one of the installers from the +Node.js download page. Be sure to +install the version labeled LTS. Other versions have not yet been +tested with npm.

          Linux or other operating systems Node installers

          -

          If you’re using Linux or another operating system, use one of the following installers:

          +

          If you’re using Linux or another operating system, use one of the following +installers:

          -

          Or see this page to install npm for Linux in the way many Linux developers prefer.

          +

          Or see this page to +install npm for Linux in the way many Linux developers prefer.

          Less-common operating systems

          -

          For more information on installing Node.js on a variety of operating systems, see this page.

          +

          For more information on installing Node.js on a variety of operating +systems, see this page.

          diff --git a/deps/npm/docs/output/configuring-npm/shrinkwrap-json.html b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html similarity index 89% rename from deps/npm/docs/output/configuring-npm/shrinkwrap-json.html rename to deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html index b4425d3df95fe8..d0de6a1470a2c6 100644 --- a/deps/npm/docs/output/configuring-npm/shrinkwrap-json.html +++ b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html @@ -1,5 +1,5 @@ -shrinkwrap.json +npm-shrinkwrap.json - - - - -
          -
          -

          package-locks

          -An explanation of npm lockfiles -
          - -
          -

          Table of contents

          - -
          - -

          Description

          -

          Conceptually, the “input” to npm install is a package.json, while its -“output” is a fully-formed node_modules tree: a representation of the -dependencies you declared. In an ideal world, npm would work like a pure -function: the same package.json should produce the exact same node_modules -tree, any time. In some cases, this is indeed true. But in many others, npm is -unable to do this. There are multiple reasons for this:

          -
            -
          • -

            different versions of npm (or other package managers) may have been used to install a package, each using slightly different installation algorithms.

            -
          • -
          • -

            a new version of a direct semver-range package may have been published since the last time your packages were installed, and thus a newer version will be used.

            -
          • -
          • -

            A dependency of one of your dependencies may have published a new version, which will update even if you used pinned dependency specifiers (1.2.3 instead of ^1.2.3)

            -
          • -
          • -

            The registry you installed from is no longer available, or allows mutation of versions (unlike the primary npm registry), and a different version of a package exists under the same version number now.

            -
          • -
          -

          As an example, consider package A:

          -
          {
          -  "name": "A",
          -  "version": "0.1.0",
          -  "dependencies": {
          -    "B": "<0.1.0"
          -  }
          -}
          -
          -

          package B:

          -
          {
          -  "name": "B",
          -  "version": "0.0.1",
          -  "dependencies": {
          -    "C": "<0.1.0"
          -  }
          -}
          -
          -

          and package C:

          -
          {
          -  "name": "C",
          -  "version": "0.0.1"
          -}
          -
          -

          If these are the only versions of A, B, and C available in the -registry, then a normal npm install A will install:

          -
          A@0.1.0
          -`-- B@0.0.1
          -    `-- C@0.0.1
          -
          -

          However, if B@0.0.2 is published, then a fresh npm install A will -install:

          -
          A@0.1.0
          -`-- B@0.0.2
          -    `-- C@0.0.1
          -
          -

          assuming the new version did not modify B’s dependencies. Of course, -the new version of B could include a new version of C and any number -of new dependencies. If such changes are undesirable, the author of A -could specify a dependency on B@0.0.1. However, if A’s author and B’s -author are not the same person, there’s no way for A’s author to say -that he or she does not want to pull in newly published versions of C -when B hasn’t changed at all.

          -

          To prevent this potential issue, npm uses package-lock.json or, if present, npm-shrinkwrap.json. These files are called package locks, or lockfiles.

          -

          Whenever you run npm install, npm generates or updates your package lock, -which will look something like this:

          -
          {
          -  "name": "A",
          -  "version": "0.1.0",
          -  ...metadata fields...
          -  "dependencies": {
          -    "B": {
          -      "version": "0.0.1",
          -      "resolved": "https://registry.npmjs.org/B/-/B-0.0.1.tgz",
          -      "integrity": "sha512-DeAdb33F+"
          -      "dependencies": {
          -        "C": {
          -          "version": "git://github.com/org/C.git#5c380ae319fc4efe9e7f2d9c78b0faa588fd99b4"
          -        }
          -      }
          -    }
          -  }
          -}
          -
          -

          This file describes an exact, and more importantly reproducible -node_modules tree. Once it’s present, any future installation will base its -work off this file, instead of recalculating dependency versions off -package.json.

          -

          The presence of a package lock changes the installation behavior such that:

          -
            -
          1. -

            The module tree described by the package lock is reproduced. This means -reproducing the structure described in the file, using the specific files -referenced in “resolved” if available, falling back to normal package resolution -using “version” if one isn’t.

            -
          2. -
          3. -

            The tree is walked and any missing dependencies are installed in the usual -fashion.

            -
          4. -
          -

          If preshrinkwrap, shrinkwrap or postshrinkwrap are in the scripts -property of the package.json, they will be executed in order. preshrinkwrap -and shrinkwrap are executed before the shrinkwrap, postshrinkwrap is -executed afterwards. These scripts run for both package-lock.json and -npm-shrinkwrap.json. For example to run some postprocessing on the generated -file:

          -
            "scripts": {
          -    "postshrinkwrap": "json -I -e \"this.myMetadata = $MY_APP_METADATA\""
          -  }
          -
          -

          Using locked packages

          -

          Using a locked package is no different than using any package without a package -lock: any commands that update node_modules and/or package.json’s -dependencies will automatically sync the existing lockfile. This includes npm install, npm rm, npm update, etc. To prevent this update from happening, -you can use the --no-save option to prevent saving altogether, or ---no-shrinkwrap to allow package.json to be updated while leaving -package-lock.json or npm-shrinkwrap.json intact.

          -

          It is highly recommended you commit the generated package lock to source -control: this will allow anyone else on your team, your deployments, your -CI/continuous integration, and anyone else who runs npm install in your -package source to get the exact same dependency tree that you were developing -on. Additionally, the diffs from these changes are human-readable and will -inform you of any changes npm has made to your node_modules, so you can notice -if any transitive dependencies were updated, hoisted, etc.

          -

          Resolving lockfile conflicts

          -

          Occasionally, two separate npm install will create package locks that cause -merge conflicts in source control systems. As of npm@5.7.0, these conflicts -can be resolved by manually fixing any package.json conflicts, and then -running npm install [--package-lock-only] again. npm will automatically -resolve any conflicts for you and write a merged package lock that includes all -the dependencies from both branches in a reasonable tree. If ---package-lock-only is provided, it will do this without also modifying your -local node_modules/.

          -

          To make this process seamless on git, consider installing -npm-merge-driver, which will teach git how -to do this itself without any user interaction. In short: $ npx npm-merge-driver install -g will let you do this, and even works with -pre-npm@5.7.0 versions of npm 5, albeit a bit more noisily. Note that if -package.json itself conflicts, you will have to resolve that by hand and run -npm install manually, even with the merge driver.

          -

          See Also

          - -
          - - -
          - - - - \ No newline at end of file diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js index f7e13369c86a25..84f36ada662016 100644 --- a/deps/npm/lib/link.js +++ b/deps/npm/lib/link.js @@ -45,6 +45,9 @@ const link = async args => { // Returns a list of items that can't be fulfilled by // things found in the current arborist inventory const missingArgsFromTree = (tree, args) => { + if (tree.isLink) + return missingArgsFromTree(tree.target, args) + const foundNodes = [] const missing = args.filter(a => { const arg = npa(a) diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js index 362dacad3dc30c..153759d83815e6 100644 --- a/deps/npm/lib/ls.js +++ b/deps/npm/lib/ls.js @@ -99,7 +99,7 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { // special formatting for top-level package name if (node.isRoot) { const hasNoPackageJson = !Object.keys(node.package).length - if (hasNoPackageJson) + if (hasNoPackageJson || global) printable = path else printable += `${long ? EOL : ' '}${path}` diff --git a/deps/npm/lib/search.js b/deps/npm/lib/search.js index 38f5a1d77b3226..a3d806d2f1507e 100644 --- a/deps/npm/lib/search.js +++ b/deps/npm/lib/search.js @@ -12,7 +12,7 @@ const completion = require('./utils/completion/none.js') const usage = usageUtil( 'search', - 'npm search [--long] [search terms ...]' + 'npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...]' ) const cmd = (args, cb) => search(args).then(() => cb()).catch(cb) diff --git a/deps/npm/lib/token.js b/deps/npm/lib/token.js index 98bbd30433cdd6..b737b5ffbca320 100644 --- a/deps/npm/lib/token.js +++ b/deps/npm/lib/token.js @@ -17,7 +17,7 @@ token._validateCIDRList = validateCIDRList const usageUtil = require('./utils/usage.js') token.usage = usageUtil('token', 'npm token list\n' + - 'npm token revoke \n' + + 'npm token revoke \n' + 'npm token create [--read-only] [--cidr=list]') const UsageError = (msg) => diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js index 83a0b009699eb6..15995c0b3cc946 100644 --- a/deps/npm/lib/uninstall.js +++ b/deps/npm/lib/uninstall.js @@ -9,7 +9,7 @@ const completion = require('./utils/completion/installed-shallow.js') const usage = usageUtil( 'uninstall', - 'npm uninstall [<@scope>/][@]... [--save-prod|--save-dev|--save-optional] [--no-save]' + 'npm uninstall [<@scope>/][@]... [-S|--save|--no-save]' ) const cmd = (args, cb) => rm(args).then(() => cb()).catch(cb) diff --git a/deps/npm/lib/unpublish.js b/deps/npm/lib/unpublish.js index 75993af9437d0d..528208c39693e0 100644 --- a/deps/npm/lib/unpublish.js +++ b/deps/npm/lib/unpublish.js @@ -35,7 +35,7 @@ const completionFn = async (args) => { const access = await libaccess.lsPackages(username, opts) // do a bit of filtering at this point, so that we don't need // to fetch versions for more than one thing, but also don't - // accidentally a whole project + // accidentally unpublish a whole project let pkgs = Object.keys(access || {}) if (!partialWord || !pkgs.length) return pkgs diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index 03f3c3f2bccb95..1c7268e135bb26 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -205,8 +205,6 @@ $ npm audit \-\-audit\-level=moderate .IP \(bu 2 npm help install .IP \(bu 2 -npm help package\-locks -.IP \(bu 2 npm help config .RE diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index e7092af6f34758..b74c14dc7730bf 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -83,6 +83,6 @@ cache: .IP \(bu 2 npm help install .IP \(bu 2 -npm help package\-locks +npm help package\-lock\.json .RE diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 94df47109a032e..0a90b749529343 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@7\.4\.0 /path/to/npm +npm@7\.4\.3 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index cc1e99d664e124..ce1426703aa0e1 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -10,12 +10,27 @@ npm prefix [\-g] .RE .SS Description .P -Print the local prefix to standard out\. This is the closest parent directory +Print the local prefix to standard output\. This is the closest parent directory to contain a \fBpackage\.json\fP file or \fBnode_modules\fP directory, unless \fB\-g\fP is also specified\. .P If \fB\-g\fP is specified, this will be the value of the global prefix\. See npm help \fBconfig\fP for more detail\. +.SS Example +.P +.RS 2 +.nf +npm prefix +/usr/local/projects/foo +.fi +.RE +.P +.RS 2 +.nf +npm prefix \-g +/usr/local +.fi +.RE .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 061bdc5458e20c..ab71c948f48bf8 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -14,25 +14,28 @@ aliases: s, se, find .P Search the registry for packages matching the search terms\. \fBnpm search\fP performs a linear, incremental, lexically\-ordered search through package -metadata for all files in the registry\. If color is enabled, it will further -highlight the matches in the results\. +metadata for all files in the registry\. If your terminal has color +support, it will further highlight the matches in the results\. This can +be disabled with the config item \fBcolor\fP .P -Additionally, using the \fB\-\-searchopts\fP and \fB\-\-searchexclude\fP options paired with -more search terms will respectively include and exclude further patterns\. The -main difference between \fB\-\-searchopts\fP and the standard search terms is that the -former does not highlight results in the output and can be used for more -fine\-grained filtering\. Additionally, both of these can be added to \fB\|\.npmrc\fP for -default search filtering behavior\. +Additionally, using the \fB\-\-searchopts\fP and \fB\-\-searchexclude\fP options +paired with more search terms will include and exclude further patterns\. +The main difference between \fB\-\-searchopts\fP and the standard search terms +is that the former does not highlight results in the output and you can +use them more fine\-grained filtering\. Additionally, you can add both of +these to your config to change default search filtering behavior\. .P Search also allows targeting of maintainers in search results, by prefixing their npm username with \fB=\fP\|\. .P -If a term starts with \fB/\fP, then it's interpreted as a regular expression and -supports standard JavaScript RegExp syntax\. A trailing \fB/\fP will be ignored in -this case\. (Note that many regular expression characters must be escaped or -quoted in most shells\.) -.SS A Note on caching +If a term starts with \fB/\fP, then it's interpreted as a regular expression +and supports standard JavaScript RegExp syntax\. In this case search will +ignore a trailing \fB/\fP \. (Note you must escape or quote many regular +expression characters in most shells\.) .SS Configuration +.P +All of the following can be defined in a \fB\|\.npmrc\fP file, or passed as +parameters to the cli prefixed with \fB\-\-\fP (e\.g\. \fB\-\-json\fP) .SS description .RS 0 .IP \(bu 2 @@ -40,10 +43,18 @@ Default: true .IP \(bu 2 Type: Boolean +.RE +.SS color +.RS 0 +.IP \(bu 2 +Default: true +.IP \(bu 2 +Type: Boolean + .RE .P -Used as \fB\-\-no\-description\fP, disables search matching in package descriptions and -suppresses display of that field in results\. +Used as \fB\-\-no\-color\fP, disables color highlighting of matches in the +results\. .SS json .RS 0 .IP \(bu 2 @@ -74,9 +85,9 @@ Type: Boolean .RE .P Display full package descriptions and other long text across multiple -lines\. When disabled (default) search results are truncated to fit -neatly on a single line\. Modules with extremely long names will -fall on multiple lines\. +lines\. When disabled (which is the default) the output will +truncate search results to fit neatly on a single line\. Modules with +extremely long names will fall on multiple lines\. .SS searchopts .RS 0 .IP \(bu 2 @@ -97,16 +108,6 @@ Type: String .RE .P Space\-separated options that limit the results from search\. -.SS searchstaleness -.RS 0 -.IP \(bu 2 -Default: 900 (15 minutes) -.IP \(bu 2 -Type: Number - -.RE -.P -The age of the cache, in seconds, before another registry request is made\. .SS registry .RS 0 .IP \(bu 2 @@ -116,11 +117,28 @@ Type: url .RE .P -Search the specified registry for modules\. If you have configured npm to point -to a different default registry, such as your internal private module -repository, \fBnpm search\fP will default to that registry when searching\. Pass a -different registry url such as the default above in order to override this -setting\. +Search the specified registry for modules\. If you have configured npm to +point to a different default registry (such as your internal private +module repository), \fBnpm search\fP will also default to that registry when +searching\. +.SS A note on caching +.P +The npm cli caches search results with the same terms and options +locally in its cache\. You can use the following to change how and when +the cli uses this cache\. See npm help \fBcache\fP for more +on how the cache works\. +.SS prefer\-online +.P +Forced staleness checks for cached searches, making the cli look for +updates immediately even for fresh search results\. +.SS prefer\-offline +.P +Bypasses staleness checks for cached\. Missing data will still be +requested from the server\. To force full offline mode, use \fBoffline\fP\|\. +.SS offline +.P +Forces full offline mode\. Any searches not locally cached will result in +an error\. .SS See Also .RS 0 .IP \(bu 2 @@ -131,5 +149,9 @@ npm help config npm help npmrc .IP \(bu 2 npm help view +.IP \(bu 2 +npm help cache +.IP \(bu 2 +https://npm\.im/npm\-registry\-fetch .RE diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 73ef9c3ce307ed..c5151ab9abe2b2 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -11,10 +11,11 @@ npm shrinkwrap .SS Description .P This command repurposes \fBpackage\-lock\.json\fP into a publishable -\fBnpm\-shrinkwrap\.json\fP or simply creates a new one\. The file created and updated -by this command will then take precedence over any other existing or future -\fBpackage\-lock\.json\fP files\. For a detailed explanation of the design and purpose -of package locks in npm, see npm help package\-locks\. +\fBnpm\-shrinkwrap\.json\fP or simply creates a new one\. The file created and +updated by this command will then take precedence over any other existing +or future \fBpackage\-lock\.json\fP files\. For a detailed explanation of the +design and purpose of package locks in npm, see +npm help package\-lock\-json\. .SS See Also .RS 0 .IP \(bu 2 @@ -24,13 +25,11 @@ npm help run\-script .IP \(bu 2 npm help scripts .IP \(bu 2 -npm help package\.js -.IP \(bu 2 -npm help package\-locks +npm help package\.json .IP \(bu 2 npm help package\-lock\.json .IP \(bu 2 -npm help shrinkwrap\.json +npm help npm\-shrinkwrap\.json .IP \(bu 2 npm help ls diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 44bf825b2d6d88..a3d343d84f4bbd 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -10,12 +10,41 @@ npm start [\-\- ] .RE .SS Description .P -This runs an arbitrary command specified in the package's \fB"start"\fP property of -its \fB"scripts"\fP object\. If no \fB"start"\fP property is specified on the -\fB"scripts"\fP object, it will run \fBnode server\.js\fP\|\. +This runs a predefined command specified in the \fB"start"\fP property of +a package's \fB"scripts"\fP object\. +.P +If the \fB"scripts"\fP object does not define a \fB"start"\fP property, npm +will run \fBnode server\.js\fP\|\. +.P +Note that this is different from the default node behavior of running +the file specified in a package's \fB"main"\fP attribute when evoking with +\fBnode \.\fP .P As of \fBnpm@2\.0\.0\fP \fIhttps://blog\.npmjs\.org/post/98131109725/npm\-2\-0\-0\fR, you can use custom arguments when executing scripts\. Refer to npm help \fBrun\-script\fP for more details\. +.SS Example +.P +.RS 2 +.nf +{ + "scripts": { + "start": "node foo\.js" + } +} +.fi +.RE +.P +.RS 2 +.nf +npm start + +> npm@x\.x\.x start +> node foo\.js + +(foo\.js output would be here) + +.fi +.RE .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 9ca8142296a454..2d811be0b48f92 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -10,7 +10,34 @@ npm stop [\-\- ] .RE .SS Description .P -This runs a package's "stop" script, if one was provided\. +This runs a predefined command specified in the "stop" property of a +package's "scripts" object\. +.P +Unlike with npm help start, there is no default script +that will run if the \fB"stop"\fP property is not defined\. +.SS Example +.P +.RS 2 +.nf +{ + "scripts": { + "stop": "node bar\.js" + } +} +.fi +.RE +.P +.RS 2 +.nf +npm stop + +> npm@x\.x\.x stop +> node bar\.js + +(bar\.js output would be here) + +.fi +.RE .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index 5844149a7306f9..877e25c8157f5c 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -12,7 +12,29 @@ aliases: t, tst .RE .SS Description .P -This runs a package's "test" script, if one was provided\. +This runs a predefined command specified in the \fB"test"\fP property of +a package's \fB"scripts"\fP object\. +.SS Example +.P +.RS 2 +.nf +{ + "scripts": { + "test": "node test\.js" + } +} +.fi +.RE +.P +.RS 2 +.nf +npm test +> npm@x\.x\.x test +> node test\.js + +(test\.js output would be here) +.fi +.RE .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 86ba1819106808..5edb1cee42eda1 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -16,8 +16,8 @@ This lets you list, create and revoke authentication tokens\. .RS 0 .IP \(bu 2 \fBnpm token list\fP: -Shows a table of all active authentication tokens\. You can request this as -JSON with \fB\-\-json\fP or tab\-separated values with \fB\-\-parseable\fP\|\. +Shows a table of all active authentication tokens\. You can request +this as JSON with \fB\-\-json\fP or tab\-separated values with \fB\-\-parseable\fP\|\. .RE .P @@ -45,10 +45,16 @@ JSON with \fB\-\-json\fP or tab\-separated values with \fB\-\-parseable\fP\|\. .RS 0 .IP \(bu 2 \fBnpm token create [\-\-read\-only] [\-\-cidr=]\fP: -Create a new authentication token\. It can be \fB\-\-read\-only\fP or accept a list of -CIDR \fIhttps://en\.wikipedia\.org/wiki/Classless_Inter\-Domain_Routing\fR ranges to -limit use of this token to\. This will prompt you for your password, and, if you have -two\-factor authentication enabled, an otp\. +Create a new authentication token\. It can be \fB\-\-read\-only\fP, or accept +a list of +CIDR \fIhttps://en\.wikipedia\.org/wiki/Classless_Inter\-Domain_Routing\fR +ranges with which to limit use of this token\. This will prompt you for +your password, and, if you have two\-factor authentication enabled, an +otp\. +Currently, the cli can not generate automation tokens\. Please refer to +the docs +website \fIhttps://docs\.npmjs\.com/creating\-and\-viewing\-access\-tokens\fR +for more information on generating automation tokens\. .RE .P @@ -68,9 +74,11 @@ two\-factor authentication enabled, an otp\. .RS 0 .IP \(bu 2 \fBnpm token revoke \fP: -This removes an authentication token, making it immediately unusable\. This can accept -both complete tokens (as you get back from \fBnpm token create\fP and will -find in your \fB\|\.npmrc\fP) and ids as seen in the \fBnpm token list\fP output\. -This will NOT accept the truncated token found in \fBnpm token list\fP output\. +Immediately removes an authentication token from the registry\. You +will no longer be able to use it\. This can accept both complete +tokens (such as those you get back from \fBnpm token create\fP, and those +found in your \fB\|\.npmrc\fP), and ids as seen in the parseable or json +output of \fBnpm token list\fP\|\. This will NOT accept the truncated token +found in the normal \fBnpm token list\fP output\. .RE diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index f341264506be1c..fb5016b3903935 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -5,7 +5,7 @@ .P .RS 2 .nf -npm uninstall [<@scope>/][@]\.\.\. [\-S|\-\-save|\-D|\-\-save\-dev|\-O|\-\-save\-optional|\-\-no\-save] +npm uninstall [<@scope>/][@]\.\.\. [\-S|\-\-save|\-\-no\-save] aliases: remove, rm, r, un, unlink .fi @@ -15,47 +15,45 @@ aliases: remove, rm, r, un, unlink This uninstalls a package, completely removing everything npm installed on its behalf\. .P -Example: +It also removes the package from the \fBdependencies\fP, \fBdevDependencies\fP, +\fBoptionalDependencies\fP, and \fBpeerDependencies\fP objects in your +\fBpackage\.json\fP\|\. .P -.RS 2 -.nf -npm uninstall sax -.fi -.RE +Futher, if you have an \fBnpm\-shrinkwrap\.json\fP or \fBpackage\-lock\.json\fP, npm +will update those files as well\. .P -In global mode (ie, with \fB\-g\fP or \fB\-\-global\fP appended to the command), -it uninstalls the current package context as a global package\. +\fB\-\-no\-save\fP will tell npm not to remove the package from your +\fBpackage\.json\fP, \fBnpm\-shrinkwrap\.json\fP, or \fBpackage\-lock\.json\fP files\. .P -\fBnpm uninstall\fP takes 3 exclusive, optional flags which save or update -the package version in your main package\.json: -.RS 0 -.IP \(bu 2 -\fB\-S, \-\-save\fP: Package will be removed from your \fBdependencies\fP\|\. -.IP \(bu 2 -\fB\-D, \-\-save\-dev\fP: Package will be removed from your \fBdevDependencies\fP\|\. -.IP \(bu 2 -\fB\-O, \-\-save\-optional\fP: Package will be removed from your \fBoptionalDependencies\fP\|\. -.IP \(bu 2 -\fB\-\-no\-save\fP: Package will not be removed from your \fBpackage\.json\fP file\. - -.RE +\fB\-\-save\fP or \fB\-S\fP will tell npm to remove the package from your +\fBpackage\.json\fP, \fBnpm\-shrinkwrap\.json\fP, and \fBpackage\-lock\.json\fP files\. +This is the default, but you may need to use this if you have for +instance \fBsave=false\fP in your \fBnpmrc\fP file .P -Further, if you have an \fBnpm\-shrinkwrap\.json\fP then it will be updated as -well\. +In global mode (ie, with \fB\-g\fP or \fB\-\-global\fP appended to the command), +it uninstalls the current package context as a global package\. +\fB\-\-no\-save\fP is ignored in this case\. .P Scope is optional and follows the usual rules for npm help \fBscope\fP\|\. +.SS Examples .P -Examples: +.RS 2 +.nf +npm uninstall sax +.fi +.RE +.P +\fBsax\fP will no longer be in your \fBpackage\.json\fP, \fBnpm\-shrinkwrap\.json\fP, or +\fBpackage\-lock\.json\fP files\. .P .RS 2 .nf -npm uninstall sax \-\-save -npm uninstall @myorg/privatepackage \-\-save -npm uninstall node\-tap \-\-save\-dev -npm uninstall dtrace\-provider \-\-save\-optional npm uninstall lodash \-\-no\-save .fi .RE +.P +\fBlodash\fP will not be removed from your \fBpackage\.json\fP, +\fBnpm\-shrinkwrap\.json\fP, or \fBpackage\-lock\.json\fP files\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index 7c965107871bae..6542ec54643f38 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -2,6 +2,10 @@ .SH "NAME" \fBnpm-unpublish\fR \- Remove a package from the registry .SS Synopsis +.P +To learn more about how the npm registry treats unpublish, see our unpublish policies .SS Unpublishing a single version of a package .P .RS 2 @@ -18,20 +22,25 @@ npm unpublish [<@scope>/] \-\-force .RE .SS Warning .P -Consider using the \fBdeprecate\fP command instead, if your intent is to encourage users to upgrade, or if you no longer want to maintain a package\. +Consider using the npm help \fBdeprecate\fP command instead, +if your intent is to encourage users to upgrade, or if you no longer +want to maintain a package\. .SS Description .P -This removes a package version from the registry, deleting its -entry and removing the tarball\. +This removes a package version from the registry, deleting its entry and +removing the tarball\. .P -If no version is specified, or if all versions are removed then -the root package entry is removed from the registry entirely\. +The npm registry will return an error if you are not npm help logged +in\. .P -Even if a package version is unpublished, that specific name and -version combination can never be reused\. In order to publish the -package again, a new version number must be used\. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed\. +If you do not specify a version or if you remove all of a package's +versions then the registry will remove the root package entry entirely\. .P -To learn more about how unpublish is treated on the npm registry, see our unpublish policies\|\. +Even if you unpublish a package version, that specific name and version +combination can never be reused\. In order to publish the package again, +you must use a new version number\. If you unpublish the entire package, +you may not publish any new versions of that package until 24 hours have +passed\. .SS See Also .RS 0 .IP \(bu 2 @@ -44,5 +53,7 @@ npm help registry npm help adduser .IP \(bu 2 npm help owner +.IP \(bu 2 +npm help login .RE diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 73b67d8a52cdba..a75dc70db92309 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -7\.4\.0 +7\.4\.3 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts @@ -141,26 +141,11 @@ See npm help \fBconfig\fP for much much more information\. Patches welcome! .P If you would like to contribute, but don't know what to work on, read -the contributing guidelines and check the issues list\. -.RS 0 -.IP \(bu 2 -CONTRIBUTING\.md \fIhttps://github\.com/npm/cli/blob/latest/CONTRIBUTING\.md\fR -.IP \(bu 2 -Bug tracker \fIhttps://github\.com/npm/cli/issues\fR - -.RE +the contributing guidelines \fIhttps://github\.com/npm/cli/blob/latest/CONTRIBUTING\.md\fR +and check the issues list\. .SS Bugs .P -When you find issues, please report them: -.RS 0 -.IP \(bu 2 -web: -https://github\.com/npm/npm/issues -.IP \(bu 2 -archived web: -https://npm\.community/c/bugs - -.RE +When you find issues, please report them: https://github\.com/npm/cli/issues .P Be sure to follow the template and bug reporting guidelines\. .SS Feature Requests @@ -178,12 +163,6 @@ Or suggest formal RFC proposals: https://github\.com/npm/rfcs .RE -.SS Author -.P -Isaac Z\. Schlueter \fIhttp://blog\.izs\.me/\fR :: -isaacs \fIhttps://github\.com/isaacs/\fR :: -@izs \fIhttps://twitter\.com/izs\fR :: -i@izs\.me .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index 4aa76d059bf8f4..d01600aa8d769a 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -3,20 +3,30 @@ \fBinstall\fR \- Download and install node and npm .SS Description .P -To publish and install packages to and from the public npm registry, you must install Node\.js and the npm command line interface using either a Node version manager or a Node installer\. \fBWe strongly recommend using a Node version manager to install Node\.js and npm\.\fR We do not recommend using a Node installer, since the Node installation process installs npm in a directory with local permissions and can cause permissions errors when you run npm packages globally\. +To publish and install packages to and from the public npm registry, you +must install Node\.js and the npm command line interface using either a Node +version manager or a Node installer\. \fBWe strongly recommend using a Node +version manager to install Node\.js and npm\.\fR We do not recommend using a +Node installer, since the Node installation process installs npm in a +directory with local permissions and can cause permissions errors when you +run npm packages globally\. .SS Overview .RS 0 .IP \(bu 2 -Checking your version of npm and Node\.js \fI#checking\-your\-version\-of\-npm\-and\-node\-js\fR +Checking your version of npm and +Node\.js \fI#checking\-your\-version\-of\-npm\-and\-node\-js\fR .IP \(bu 2 -Using a Node version manager to install Node\.js and npm \fI#using\-a\-node\-version\-manager\-to\-install\-node\-js\-and\-npm\fR +Using a Node version manager to install Node\.js and +npm \fI#using\-a\-node\-version\-manager\-to\-install\-node\-js\-and\-npm\fR .IP \(bu 2 -Using a Node installer to install Node\.js and npm \fI#using\-a\-node\-installer\-to\-install\-node\-js\-and\-npm\fR +Using a Node installer to install Node\.js and +npm \fI#using\-a\-node\-installer\-to\-install\-node\-js\-and\-npm\fR .RE .SS Checking your version of npm and Node\.js .P -To see if you already have Node\.js and npm installed and check the installed version, run the following commands: +To see if you already have Node\.js and npm installed and check the +installed version, run the following commands: .P .RS 2 .nf @@ -26,7 +36,10 @@ npm \-v .RE .SS Using a Node version manager to install Node\.js and npm .P -Node version managers allow you to install and switch between multiple versions of Node\.js and npm on your system so you can test your applications on multiple versions of npm to ensure they work for users on different versions\. +Node version managers allow you to install and switch between multiple +versions of Node\.js and npm on your system so you can test your +applications on multiple versions of npm to ensure they work for users on +different versions\. .SS OSX or Linux Node version managers .RS 0 .IP \(bu 2 @@ -45,29 +58,39 @@ nvm\-windows \fIhttps://github\.com/coreybutler/nvm\-windows\fR .RE .SS Using a Node installer to install Node\.js and npm .P -If you are unable to use a Node version manager, you can use a Node installer to install both Node\.js and npm on your system\. +If you are unable to use a Node version manager, you can use a Node +installer to install both Node\.js and npm on your system\. .RS 0 .IP \(bu 2 Node\.js installer \fIhttps://nodejs\.org/en/download/\fR .IP \(bu 2 -NodeSource installer \fIhttps://github\.com/nodesource/distributions\fR\|\. If you use Linux, we recommend that you use a NodeSource installer\. +NodeSource installer \fIhttps://github\.com/nodesource/distributions\fR\|\. If +you use Linux, we recommend that you use a NodeSource installer\. .RE .SS OS X or Windows Node installers .P -If you're using OS X or Windows, use one of the installers from the Node\.js download page \fIhttps://nodejs\.org/en/download/\fR\|\. Be sure to install the version labeled \fBLTS\fR\|\. Other versions have not yet been tested with npm\. +If you're using OS X or Windows, use one of the installers from the +Node\.js download page \fIhttps://nodejs\.org/en/download/\fR\|\. Be sure to +install the version labeled \fBLTS\fR\|\. Other versions have not yet been +tested with npm\. .SS Linux or other operating systems Node installers .P -If you're using Linux or another operating system, use one of the following installers: +If you're using Linux or another operating system, use one of the following +installers: .RS 0 .IP \(bu 2 -NodeSource installer \fIhttps://github\.com/nodesource/distributions\fR (recommended) +NodeSource installer \fIhttps://github\.com/nodesource/distributions\fR +(recommended) .IP \(bu 2 -One of the installers on the Node\.js download page \fIhttps://nodejs\.org/en/download/\fR +One of the installers on the Node\.js download +page \fIhttps://nodejs\.org/en/download/\fR .RE .P -Or see this page \fIhttps://nodejs\.org/en/download/package\-manager/\fR to install npm for Linux in the way many Linux developers prefer\. +Or see this page \fIhttps://nodejs\.org/en/download/package\-manager/\fR to +install npm for Linux in the way many Linux developers prefer\. .SS Less\-common operating systems .P -For more information on installing Node\.js on a variety of operating systems, see this page \fIhttps://nodejs\.org/en/download/package\-manager/\fR\|\. +For more information on installing Node\.js on a variety of operating +systems, see this page \fIhttps://nodejs\.org/en/download/package\-manager/\fR\|\. diff --git a/deps/npm/man/man5/shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 similarity index 56% rename from deps/npm/man/man5/shrinkwrap-json.5 rename to deps/npm/man/man5/npm-shrinkwrap-json.5 index 606b2179a10a40..7f8012e847099f 100644 --- a/deps/npm/man/man5/shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,23 +1,27 @@ -.TH "SHRINKWRAP\.JSON" "5" "January 2021" "" "" +.TH "NPM\-SHRINKWRAP\.JSON" "5" "January 2021" "" "" .SH "NAME" -\fBshrinkwrap.json\fR \- A publishable lockfile +\fBnpm-shrinkwrap.json\fR \- A publishable lockfile .SS Description .P -\fBnpm\-shrinkwrap\.json\fP is a file created by npm help \fBshrinkwrap\fP\|\. It is identical to +\fBnpm\-shrinkwrap\.json\fP is a file created by npm help \fBnpm +shrinkwrap\fP\|\. It is identical to \fBpackage\-lock\.json\fP, with one major caveat: Unlike \fBpackage\-lock\.json\fP, \fBnpm\-shrinkwrap\.json\fP may be included when publishing a package\. .P The recommended use\-case for \fBnpm\-shrinkwrap\.json\fP is applications deployed through the publishing process on the registry: for example, daemons and command\-line tools intended as global installs or \fBdevDependencies\fP\|\. It's -strongly discouraged for library authors to publish this file, since that would -prevent end users from having control over transitive dependency updates\. +strongly discouraged for library authors to publish this file, since that +would prevent end users from having control over transitive dependency +updates\. .P -Additionally, if both \fBpackage\-lock\.json\fP and \fBnpm\-shrinkwrap\.json\fP are present -in a package root, \fBpackage\-lock\.json\fP will be ignored in favor of this file\. +If both \fBpackage\-lock\.json\fP and \fBnpm\-shrinkwrap\.json\fP are present in a +package root, \fBnpm\-shrinkwrap\.json\fP will be preferred over the +\fBpackage\-lock\.json\fP file\. .P -For full details and description of the \fBnpm\-shrinkwrap\.json\fP file format, refer -to the manual page for npm help package\-lock\.json\. +For full details and description of the \fBnpm\-shrinkwrap\.json\fP file format, +refer to the manual page for +npm help package\-lock\.json\. .SS See also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 877c2175a486a8..baf2fe3e5d9078 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -3,13 +3,14 @@ \fBnpmrc\fR \- The npm config files .SS Description .P -npm gets its config settings from the command line, environment -variables, and \fBnpmrc\fP files\. +npm gets its config settings from the command line, environment variables, +and \fBnpmrc\fP files\. .P -The \fBnpm config\fP command can be used to update and edit the contents -of the user and global npmrc files\. +The \fBnpm config\fP command can be used to update and edit the contents of the +user and global npmrc files\. .P -For a list of available configuration options, see npm help config\. +For a list of available configuration options, see +npm help config\. .SS Files .P The four relevant files are: @@ -25,9 +26,9 @@ npm builtin config file (/path/to/npm/npmrc) .RE .P -All npm config files are an ini\-formatted list of \fBkey = value\fP -parameters\. Environment variables can be replaced using -\fB${VARIABLE_NAME}\fP\|\. For example: +All npm config files are an ini\-formatted list of \fBkey = value\fP parameters\. +Environment variables can be replaced using \fB${VARIABLE_NAME}\fP\|\. For +example: .P .RS 2 .nf @@ -35,12 +36,11 @@ prefix = ${HOME}/\.npm\-packages .fi .RE .P -Each of these files is loaded, and config options are resolved in -priority order\. For example, a setting in the userconfig file would -override the setting in the globalconfig file\. +Each of these files is loaded, and config options are resolved in priority +order\. For example, a setting in the userconfig file would override the +setting in the globalconfig file\. .P -Array values are specified by adding "[]" after the key name\. For -example: +Array values are specified by adding "[]" after the key name\. For example: .P .RS 2 .nf @@ -50,7 +50,9 @@ key[] = "second value" .RE .SS Comments .P -Lines in \fB\|\.npmrc\fP files are interpreted as comments when they begin with a \fB;\fP or \fB#\fP character\. \fB\|\.npmrc\fP files are parsed by npm/ini \fIhttps://github\.com/npm/ini\fR, which specifies this comment syntax\. +Lines in \fB\|\.npmrc\fP files are interpreted as comments when they begin with a +\fB;\fP or \fB#\fP character\. \fB\|\.npmrc\fP files are parsed by +npm/ini \fIhttps://github\.com/npm/ini\fR, which specifies this comment syntax\. .P For example: .P @@ -67,31 +69,30 @@ When working locally in a project, a \fB\|\.npmrc\fP file in the root of the project (ie, a sibling of \fBnode_modules\fP and \fBpackage\.json\fP) will set config values specific to this project\. .P -Note that this only applies to the root of the project that you're -running npm in\. It has no effect when your module is published\. For -example, you can't publish a module that forces itself to install -globally, or in a different location\. +Note that this only applies to the root of the project that you're running +npm in\. It has no effect when your module is published\. For example, you +can't publish a module that forces itself to install globally, or in a +different location\. .P Additionally, this file is not read in global mode, such as when running \fBnpm install \-g\fP\|\. .SS Per\-user config file .P -\fB$HOME/\.npmrc\fP (or the \fBuserconfig\fP param, if set in the environment -or on the command line) +\fB$HOME/\.npmrc\fP (or the \fBuserconfig\fP param, if set in the environment or on +the command line) .SS Global config file .P -\fB$PREFIX/etc/npmrc\fP (or the \fBglobalconfig\fP param, if set above): -This file is an ini\-file formatted list of \fBkey = value\fP parameters\. -Environment variables can be replaced as above\. +\fB$PREFIX/etc/npmrc\fP (or the \fBglobalconfig\fP param, if set above): This file +is an ini\-file formatted list of \fBkey = value\fP parameters\. Environment +variables can be replaced as above\. .SS Built\-in config file .P \fBpath/to/npm/itself/npmrc\fP .P This is an unchangeable "builtin" configuration file that npm keeps consistent across updates\. Set fields in here using the \fB\|\./configure\fP -script that comes with npm\. This is primarily for distribution -maintainers to override default configs in a standard and consistent -manner\. +script that comes with npm\. This is primarily for distribution maintainers +to override default configs in a standard and consistent manner\. .SS See also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 17946fd18fb559..79ebabc5eec228 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -3,33 +3,37 @@ \fBpackage.json\fR \- Specifics of npm's package\.json handling .SS Description .P -This document is all you need to know about what's required in your package\.json -file\. It must be actual JSON, not just a JavaScript object literal\. +This document is all you need to know about what's required in your +package\.json file\. It must be actual JSON, not just a JavaScript object +literal\. .P A lot of the behavior described in this document is affected by the config settings described in npm help \fBconfig\fP\|\. .SS name .P If you plan to publish your package, the \fImost\fR important things in your -package\.json are the name and version fields as they will be required\. The name -and version together form an identifier that is assumed to be completely unique\. -Changes to the package should come along with changes to the version\. If you don't -plan to publish your package, the name and version fields are optional\. +package\.json are the name and version fields as they will be required\. The +name and version together form an identifier that is assumed to be +completely unique\. Changes to the package should come along with changes +to the version\. If you don't plan to publish your package, the name and +version fields are optional\. .P The name is what your thing is called\. .P Some rules: .RS 0 .IP \(bu 2 -The name must be less than or equal to 214 characters\. This includes the scope for -scoped packages\. +The name must be less than or equal to 214 characters\. This includes the +scope for scoped packages\. .IP \(bu 2 -The names of scoped packages can begin with a dot or an underscore\. This is not permitted without a scope\. +The names of scoped packages can begin with a dot or an underscore\. This +is not permitted without a scope\. .IP \(bu 2 New packages must not have uppercase letters in the name\. .IP \(bu 2 -The name ends up being part of a URL, an argument on the command line, and a -folder name\. Therefore, the name can't contain any non\-URL\-safe characters\. +The name ends up being part of a URL, an argument on the command line, +and a folder name\. Therefore, the name can't contain any non\-URL\-safe +characters\. .RE .P @@ -38,15 +42,16 @@ Some tips: .IP \(bu 2 Don't use the same name as a core Node module\. .IP \(bu 2 -Don't put "js" or "node" in the name\. It's assumed that it's js, since you're -writing a package\.json file, and you can specify the engine using the "engines" -field\. (See below\.) +Don't put "js" or "node" in the name\. It's assumed that it's js, since +you're writing a package\.json file, and you can specify the engine using +the "engines" field\. (See below\.) .IP \(bu 2 -The name will probably be passed as an argument to require(), so it should -be something short, but also reasonably descriptive\. +The name will probably be passed as an argument to require(), so it +should be something short, but also reasonably descriptive\. .IP \(bu 2 -You may want to check the npm registry to see if there's something by that name -already, before you get too attached to it\. https://www\.npmjs\.com/ +You may want to check the npm registry to see if there's something by +that name already, before you get too attached to it\. +https://www\.npmjs\.com/ .RE .P @@ -55,14 +60,15 @@ npm help \fBscope\fP for more detail\. .SS version .P If you plan to publish your package, the \fImost\fR important things in your -package\.json are the name and version fields as they will be required\. The name -and version together form an identifier that is assumed to be completely unique\. -Changes to the package should come along with changes to the version\. If you don't -plan to publish your package, the name and version fields are optional\. +package\.json are the name and version fields as they will be required\. The +name and version together form an identifier that is assumed to be +completely unique\. Changes to the package should come along with changes +to the version\. If you don't plan to publish your package, the name and +version fields are optional\. .P Version must be parseable by -node\-semver \fIhttps://github\.com/npm/node\-semver\fR, which is bundled -with npm as a dependency\. (\fBnpm install semver\fP to use it yourself\.) +node\-semver \fIhttps://github\.com/npm/node\-semver\fR, which is bundled with +npm as a dependency\. (\fBnpm install semver\fP to use it yourself\.) .P More on version numbers and ranges at npm help semver\. .SS description @@ -71,8 +77,8 @@ Put a description in it\. It's a string\. This helps people discover your package, as it's listed in \fBnpm search\fP\|\. .SS keywords .P -Put keywords in it\. It's an array of strings\. This helps people -discover your package as it's listed in \fBnpm search\fP\|\. +Put keywords in it\. It's an array of strings\. This helps people discover +your package as it's listed in \fBnpm search\fP\|\. .SS homepage .P The url to the project homepage\. @@ -87,47 +93,54 @@ Example: .SS bugs .P The url to your project's issue tracker and / or the email address to which -issues should be reported\. These are helpful for people who encounter issues -with your package\. +issues should be reported\. These are helpful for people who encounter +issues with your package\. .P It should look like this: .P .RS 2 .nf -{ "url" : "https://github\.com/owner/project/issues" -, "email" : "project@hostname\.com" +{ + "url" : "https://github\.com/owner/project/issues", + "email" : "project@hostname\.com" } .fi .RE .P -You can specify either one or both values\. If you want to provide only a url, -you can specify the value for "bugs" as a simple string instead of an object\. +You can specify either one or both values\. If you want to provide only a +url, you can specify the value for "bugs" as a simple string instead of an +object\. .P If a url is provided, it will be used by the \fBnpm bugs\fP command\. .SS license .P -You should specify a license for your package so that people know how they are -permitted to use it, and any restrictions you're placing on it\. +You should specify a license for your package so that people know how they +are permitted to use it, and any restrictions you're placing on it\. .P -If you're using a common license such as BSD\-2\-Clause or MIT, add a -current SPDX license identifier for the license you're using, like this: +If you're using a common license such as BSD\-2\-Clause or MIT, add a current +SPDX license identifier for the license you're using, like this: .P .RS 2 .nf -{ "license" : "BSD\-3\-Clause" } +{ + "license" : "BSD\-3\-Clause" +} .fi .RE .P -You can check the full list of SPDX license IDs \fIhttps://spdx\.org/licenses/\fR\|\. -Ideally you should pick one that is +You can check the full list of SPDX license +IDs \fIhttps://spdx\.org/licenses/\fR\|\. Ideally you should pick one that is OSI \fIhttps://opensource\.org/licenses/alphabetical\fR approved\. .P -If your package is licensed under multiple common licenses, use an SPDX license -expression syntax version 2\.0 string \fIhttps://www\.npmjs\.com/package/spdx\fR, like this: +If your package is licensed under multiple common licenses, use an SPDX +license expression syntax version 2\.0 +string \fIhttps://www\.npmjs\.com/package/spdx\fR, like this: .P .RS 2 .nf -{ "license" : "(ISC OR GPL\-3\.0)" } +{ + "license" : "(ISC OR GPL\-3\.0)" +} .fi .RE .P @@ -136,32 +149,37 @@ you are using a custom license, use a string value like this one: .P .RS 2 .nf -{ "license" : "SEE LICENSE IN " } +{ + "license" : "SEE LICENSE IN " +} .fi .RE .P Then include a file named \fB\fP at the top level of the package\. .P -Some old packages used license objects or a "licenses" property containing an -array of license objects: +Some old packages used license objects or a "licenses" property containing +an array of license objects: .P .RS 2 .nf // Not valid metadata -{ "license" : - { "type" : "ISC" - , "url" : "https://opensource\.org/licenses/ISC" +{ + "license" : { + "type" : "ISC", + "url" : "https://opensource\.org/licenses/ISC" } } // Not valid metadata -{ "licenses" : - [ - { "type": "MIT" - , "url": "https://www\.opensource\.org/licenses/mit\-license\.php" - } - , { "type": "Apache\-2\.0" - , "url": "https://opensource\.org/licenses/apache2\.0\.php" +{ + "licenses" : [ + { + "type": "MIT", + "url": "https://www\.opensource\.org/licenses/mit\-license\.php" + }, + { + "type": "Apache\-2\.0", + "url": "https://opensource\.org/licenses/apache2\.0\.php" } ] } @@ -172,9 +190,17 @@ Those styles are now deprecated\. Instead, use SPDX expressions, like this: .P .RS 2 .nf -{ "license": "ISC" } - -{ "license": "(MIT OR Apache\-2\.0)" } +{ + "license": "ISC" +} +.fi +.RE +.P +.RS 2 +.nf +{ + "license": "(MIT OR Apache\-2\.0)" +} .fi .RE .P @@ -183,30 +209,37 @@ unpublished package under any terms: .P .RS 2 .nf -{ "license": "UNLICENSED" } +{ + "license": "UNLICENSED" +} .fi .RE .P Consider also setting \fB"private": true\fP to prevent accidental publication\. .SS people fields: author, contributors .P -The "author" is one person\. "contributors" is an array of people\. A "person" -is an object with a "name" field and optionally "url" and "email", like this: +The "author" is one person\. "contributors" is an array of people\. A +"person" is an object with a "name" field and optionally "url" and "email", +like this: .P .RS 2 .nf -{ "name" : "Barney Rubble" -, "email" : "b@rubble\.com" -, "url" : "http://barnyrubble\.tumblr\.com/" +{ + "name" : "Barney Rubble", + "email" : "b@rubble\.com", + "url" : "http://barnyrubble\.tumblr\.com/" } .fi .RE .P -Or you can shorten that all into a single string, and npm will parse it for you: +Or you can shorten that all into a single string, and npm will parse it for +you: .P .RS 2 .nf -"Barney Rubble (http://barnyrubble\.tumblr\.com/)" +{ + "author": "Barney Rubble (http://barnyrubble\.tumblr\.com/)" +} .fi .RE .P @@ -216,60 +249,62 @@ npm also sets a top\-level "maintainers" field with your npm user info\. .SS funding .P You can specify an object containing an URL that provides up\-to\-date -information about ways to help fund development of your package, or -a string URL, or an array of these: +information about ways to help fund development of your package, or a +string URL, or an array of these: .P .RS 2 .nf -"funding": { - "type" : "individual", - "url" : "http://example\.com/donate" -} - -"funding": { - "type" : "patreon", - "url" : "https://www\.patreon\.com/my\-account" -} - -"funding": "http://example\.com/donate" - -"funding": [ - { +{ + "funding": { "type" : "individual", "url" : "http://example\.com/donate" }, - "http://example\.com/donateAlso", - { + + "funding": { "type" : "patreon", "url" : "https://www\.patreon\.com/my\-account" - } -] + }, + + "funding": "http://example\.com/donate", + + "funding": [ + { + "type" : "individual", + "url" : "http://example\.com/donate" + }, + "http://example\.com/donateAlso", + { + "type" : "patreon", + "url" : "https://www\.patreon\.com/my\-account" + } + ] +} .fi .RE .P Users can use the \fBnpm fund\fP subcommand to list the \fBfunding\fP URLs of all -dependencies of their project, direct and indirect\. A shortcut to visit each -funding url is also available when providing the project name such as: -\fBnpm fund \fP (when there are multiple URLs, the first one will be -visited) +dependencies of their project, direct and indirect\. A shortcut to visit +each funding url is also available when providing the project name such as: +\fBnpm fund \fP (when there are multiple URLs, the first one will +be visited) .SS files .P -The optional \fBfiles\fP field is an array of file patterns that describes -the entries to be included when your package is installed as a -dependency\. File patterns follow a similar syntax to \fB\|\.gitignore\fP, but -reversed: including a file, directory, or glob pattern (\fB*\fP, \fB**/*\fP, and such) -will make it so that file is included in the tarball when it's packed\. Omitting -the field will make it default to \fB["*"]\fP, which means it will include all files\. +The optional \fBfiles\fP field is an array of file patterns that describes the +entries to be included when your package is installed as a dependency\. File +patterns follow a similar syntax to \fB\|\.gitignore\fP, but reversed: including a +file, directory, or glob pattern (\fB*\fP, \fB**/*\fP, and such) will make it so +that file is included in the tarball when it's packed\. Omitting the field +will make it default to \fB["*"]\fP, which means it will include all files\. .P -Some special files and directories are also included or excluded regardless of -whether they exist in the \fBfiles\fP array (see below)\. +Some special files and directories are also included or excluded regardless +of whether they exist in the \fBfiles\fP array (see below)\. .P -You can also provide a \fB\|\.npmignore\fP file in the root of your package or -in subdirectories, which will keep files from being included\. At the -root of your package it will not override the "files" field, but in -subdirectories it will\. The \fB\|\.npmignore\fP file works just like a -\fB\|\.gitignore\fP\|\. If there is a \fB\|\.gitignore\fP file, and \fB\|\.npmignore\fP is -missing, \fB\|\.gitignore\fP\|'s contents will be used instead\. +You can also provide a \fB\|\.npmignore\fP file in the root of your package or in +subdirectories, which will keep files from being included\. At the root of +your package it will not override the "files" field, but in subdirectories +it will\. The \fB\|\.npmignore\fP file works just like a \fB\|\.gitignore\fP\|\. If there is +a \fB\|\.gitignore\fP file, and \fB\|\.npmignore\fP is missing, \fB\|\.gitignore\fP\|'s contents +will be used instead\. .P Files included with the "package\.json#files" field \fIcannot\fR be excluded through \fB\|\.npmignore\fP or \fB\|\.gitignore\fP\|\. @@ -324,24 +359,28 @@ Conversely, some files are always ignored: .IP \(bu 2 \fB*\.orig\fP .IP \(bu 2 -\fBpackage\-lock\.json\fP (use shrinkwrap instead) +\fBpackage\-lock\.json\fP (use +npm help \fBnpm\-shrinkwrap\.json\fP if you wish +it to be published) .RE .SS main .P -The main field is a module ID that is the primary entry point to your program\. -That is, if your package is named \fBfoo\fP, and a user installs it, and then does -\fBrequire("foo")\fP, then your main module's exports object will be returned\. +The main field is a module ID that is the primary entry point to your +program\. That is, if your package is named \fBfoo\fP, and a user installs it, +and then does \fBrequire("foo")\fP, then your main module's exports object will +be returned\. .P -This should be a module ID relative to the root of your package folder\. +This should be a module relative to the root of your package folder\. .P -For most modules, it makes the most sense to have a main script and often not -much else\. +For most modules, it makes the most sense to have a main script and often +not much else\. .SS browser .P If your module is meant to be used client\-side the browser field should be used instead of the main field\. This is helpful to hint users that it might -rely on primitives that aren't available in Node\.js modules\. (e\.g\. \fBwindow\fP) +rely on primitives that aren't available in Node\.js modules\. (e\.g\. +\fBwindow\fP) .SS bin .P A lot of packages have one or more executable files that they'd like to @@ -349,29 +388,35 @@ install into the PATH\. npm makes this pretty easy (in fact, it uses this feature to install the "npm" executable\.) .P To use this, supply a \fBbin\fP field in your package\.json which is a map of -command name to local file name\. On install, npm will symlink that file into -\fBprefix/bin\fP for global installs, or \fB\|\./node_modules/\.bin/\fP for local +command name to local file name\. On install, npm will symlink that file +into \fBprefix/bin\fP for global installs, or \fB\|\./node_modules/\.bin/\fP for local installs\. .P For example, myapp could have this: .P .RS 2 .nf -{ "bin" : { "myapp" : "\./cli\.js" } } +{ + "bin": { + "myapp": "\./cli\.js" + } +} .fi .RE .P -So, when you install myapp, it'll create a symlink from the \fBcli\.js\fP script to -\fB/usr/local/bin/myapp\fP\|\. +So, when you install myapp, it'll create a symlink from the \fBcli\.js\fP script +to \fB/usr/local/bin/myapp\fP\|\. .P -If you have a single executable, and its name should be the name -of the package, then you can just supply it as a string\. For example: +If you have a single executable, and its name should be the name of the +package, then you can just supply it as a string\. For example: .P .RS 2 .nf -{ "name": "my\-program" -, "version": "1\.2\.5" -, "bin": "\./path/to/program" } +{ + "name": "my\-program", + "version": "1\.2\.5", + "bin": "\./path/to/program" +} .fi .RE .P @@ -379,9 +424,13 @@ would be the same as this: .P .RS 2 .nf -{ "name": "my\-program" -, "version": "1\.2\.5" -, "bin" : { "my\-program" : "\./path/to/program" } } +{ + "name": "my\-program", + "version": "1\.2\.5", + "bin": { + "my\-program": "\./path/to/program" + } +} .fi .RE .P @@ -390,35 +439,42 @@ Please make sure that your file(s) referenced in \fBbin\fP starts with executable! .SS man .P -Specify either a single file or an array of filenames to put in place for the -\fBman\fP program to find\. +Specify either a single file or an array of filenames to put in place for +the \fBman\fP program to find\. .P If only a single file is provided, then it's installed such that it is the -result from \fBman \fP, regardless of its actual filename\. For example: +result from \fBman \fP, regardless of its actual filename\. For +example: .P .RS 2 .nf -{ "name" : "foo" -, "version" : "1\.2\.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo\.js" -, "man" : "\./man/doc\.1" +{ + "name": "foo", + "version": "1\.2\.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo\.js", + "man": "\./man/doc\.1" } .fi .RE .P -would link the \fB\|\./man/doc\.1\fP file in such that it is the target for \fBman foo\fP +would link the \fB\|\./man/doc\.1\fP file in such that it is the target for \fBman +foo\fP .P If the filename doesn't start with the package name, then it's prefixed\. So, this: .P .RS 2 .nf -{ "name" : "foo" -, "version" : "1\.2\.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo\.js" -, "man" : [ "\./man/foo\.1", "\./man/bar\.1" ] +{ + "name": "foo", + "version": "1\.2\.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo\.js", + "man": [ + "\./man/foo\.1", + "\./man/bar\.1" + ] } .fi .RE @@ -426,15 +482,20 @@ So, this: will create files to do \fBman foo\fP and \fBman foo\-bar\fP\|\. .P Man files must end with a number, and optionally a \fB\|\.gz\fP suffix if they are -compressed\. The number dictates which man section the file is installed into\. +compressed\. The number dictates which man section the file is installed +into\. .P .RS 2 .nf -{ "name" : "foo" -, "version" : "1\.2\.3" -, "description" : "A packaged foo fooer for fooing foos" -, "main" : "foo\.js" -, "man" : [ "\./man/foo\.1", "\./man/foo\.2" ] +{ + "name": "foo", + "version": "1\.2\.3", + "description": "A packaged foo fooer for fooing foos", + "main": "foo\.js", + "man": [ + "\./man/foo\.1", + "\./man/foo\.2" + ] } .fi .RE @@ -442,40 +503,26 @@ compressed\. The number dictates which man section the file is installed into\. will create entries for \fBman foo\fP and \fBman 2 foo\fP .SS directories .P -The CommonJS Packages \fIhttp://wiki\.commonjs\.org/wiki/Packages/1\.0\fR spec details a -few ways that you can indicate the structure of your package using a \fBdirectories\fP -object\. If you look at npm's package\.json \fIhttps://registry\.npmjs\.org/npm/latest\fR, -you'll see that it has directories for doc, lib, and man\. +The CommonJS Packages \fIhttp://wiki\.commonjs\.org/wiki/Packages/1\.0\fR spec +details a few ways that you can indicate the structure of your package +using a \fBdirectories\fP object\. If you look at npm's +package\.json \fIhttps://registry\.npmjs\.org/npm/latest\fR, you'll see that it +has directories for doc, lib, and man\. .P In the future, this information may be used in other creative ways\. -.SS directories\.lib -.P -Tell people where the bulk of your library is\. Nothing special is done -with the lib folder in any way, but it's useful meta info\. .SS directories\.bin .P If you specify a \fBbin\fP directory in \fBdirectories\.bin\fP, all the files in that folder will be added\. .P -Because of the way the \fBbin\fP directive works, specifying both a -\fBbin\fP path and setting \fBdirectories\.bin\fP is an error\. If you want to -specify individual files, use \fBbin\fP, and for all the files in an -existing \fBbin\fP directory, use \fBdirectories\.bin\fP\|\. +Because of the way the \fBbin\fP directive works, specifying both a \fBbin\fP path +and setting \fBdirectories\.bin\fP is an error\. If you want to specify +individual files, use \fBbin\fP, and for all the files in an existing \fBbin\fP +directory, use \fBdirectories\.bin\fP\|\. .SS directories\.man .P A folder that is full of man pages\. Sugar to generate a "man" array by walking the folder\. -.SS directories\.doc -.P -Put markdown files in here\. Eventually, these will be displayed nicely, -maybe, someday\. -.SS directories\.example -.P -Put example scripts in here\. Someday, it might be exposed in some clever way\. -.SS directories\.test -.P -Put your tests in here\. It is currently not exposed, but it might be in the -future\. .SS repository .P Specify the place where your code lives\. This is helpful for people who @@ -486,68 +533,76 @@ Do it like this: .P .RS 2 .nf -"repository": { - "type" : "git", - "url" : "https://github\.com/npm/cli\.git" -} - -"repository": { - "type" : "svn", - "url" : "https://v8\.googlecode\.com/svn/trunk/" +{ + "repository": { + "type": "git", + "url": "https://github\.com/npm/cli\.git" + } } .fi .RE .P -The URL should be a publicly available (perhaps read\-only) url that can be handed -directly to a VCS program without any modification\. It should not be a url to an -html project page that you put in your browser\. It's for computers\. +The URL should be a publicly available (perhaps read\-only) url that can be +handed directly to a VCS program without any modification\. It should not +be a url to an html project page that you put in your browser\. It's for +computers\. .P -For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same -shortcut syntax you use for \fBnpm install\fP: +For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the +same shortcut syntax you use for \fBnpm install\fP: .P .RS 2 .nf -"repository": "npm/npm" +{ + "repository": "npm/npm", -"repository": "github:user/repo" + "repository": "github:user/repo", -"repository": "gist:11081aaa281" + "repository": "gist:11081aaa281", -"repository": "bitbucket:user/repo" + "repository": "bitbucket:user/repo", -"repository": "gitlab:user/repo" + "repository": "gitlab:user/repo" +} .fi .RE .P -If the \fBpackage\.json\fP for your package is not in the root directory (for example -if it is part of a monorepo), you can specify the directory in which it lives: +If the \fBpackage\.json\fP for your package is not in the root directory (for +example if it is part of a monorepo), you can specify the directory in +which it lives: .P .RS 2 .nf -"repository": { - "type" : "git", - "url" : "https://github\.com/facebook/react\.git", - "directory": "packages/react\-dom" +{ + "repository": { + "type": "git", + "url": "https://github\.com/facebook/react\.git", + "directory": "packages/react\-dom" + } } .fi .RE .SS scripts .P -The "scripts" property is a dictionary containing script commands that are run -at various times in the lifecycle of your package\. The key is the lifecycle -event, and the value is the command to run at that point\. +The "scripts" property is a dictionary containing script commands that are +run at various times in the lifecycle of your package\. The key is the +lifecycle event, and the value is the command to run at that point\. .P -See npm help \fBscripts\fP to find out more about writing package scripts\. +See npm help \fBscripts\fP to find out more about writing package +scripts\. .SS config .P -A "config" object can be used to set configuration parameters used in package -scripts that persist across upgrades\. For instance, if a package had the -following: +A "config" object can be used to set configuration parameters used in +package scripts that persist across upgrades\. For instance, if a package +had the following: .P .RS 2 .nf -{ "name" : "foo" -, "config" : { "port" : "8080" } } +{ + "name": "foo", + "config": { + "port": "8080" + } +} .fi .RE .P @@ -555,8 +610,8 @@ and then had a "start" command that then referenced the \fBnpm_package_config_port\fP environment variable, then the user could override that by doing \fBnpm config set foo:port 8001\fP\|\. .P -See npm help \fBconfig\fP and npm help \fBscripts\fP for more on package -configs\. +See npm help \fBconfig\fP and npm help \fBscripts\fP for +more on package configs\. .SS dependencies .P Dependencies are specified in a simple object that maps a package name to a @@ -564,10 +619,11 @@ version range\. The version range is a string which has one or more space\-separated descriptors\. Dependencies can also be identified with a tarball or git URL\. .P -\fBPlease do not put test harnesses or transpilers in your -\fBdependencies\fP object\.\fR See \fBdevDependencies\fP, below\. +\fBPlease do not put test harnesses or transpilers or other "development" +time tools in your \fBdependencies\fP object\.\fR See \fBdevDependencies\fP, below\. .P -See npm help semver for more details about specifying version ranges\. +See npm help semver for more details about specifying version +ranges\. .RS 0 .IP \(bu 2 \fBversion\fP Must match \fBversion\fP exactly @@ -580,7 +636,8 @@ See npm help semver for more details about specifying version ranges\. .IP \(bu 2 \fB<=version\fP .IP \(bu 2 -\fB~version\fP "Approximately equivalent to version" See npm help semver +\fB~version\fP "Approximately equivalent to version" See +npm help semver .IP \(bu 2 \fB^version\fP "Compatible with version" See npm help semver .IP \(bu 2 @@ -600,7 +657,8 @@ See npm help semver for more details about specifying version ranges\. .IP \(bu 2 \fBuser/repo\fP See 'GitHub URLs' below .IP \(bu 2 -\fBtag\fP A specific version tagged and published as \fBtag\fP See npm help \fBdist\-tag\fP +\fBtag\fP A specific version tagged and published as \fBtag\fP See npm help \fBnpm +dist\-tag\fP .IP \(bu 2 \fBpath/path/path\fP See Local Paths \fI#local\-paths\fR below @@ -610,19 +668,20 @@ For example, these are all valid: .P .RS 2 .nf -{ "dependencies" : - { "foo" : "1\.0\.0 \- 2\.9999\.9999" - , "bar" : ">=1\.0\.2 <2\.1\.2" - , "baz" : ">1\.0\.2 <=2\.3\.4" - , "boo" : "2\.0\.1" - , "qux" : "<1\.0\.0 || >=2\.3\.1 <2\.4\.5 || >=2\.5\.2 <3\.0\.0" - , "asd" : "http://asdf\.com/asdf\.tar\.gz" - , "til" : "~1\.2" - , "elf" : "~1\.2\.3" - , "two" : "2\.x" - , "thr" : "3\.3\.x" - , "lat" : "latest" - , "dyl" : "file:\.\./dyl" +{ + "dependencies": { + "foo": "1\.0\.0 \- 2\.9999\.9999", + "bar": ">=1\.0\.2 <2\.1\.2", + "baz": ">1\.0\.2 <=2\.3\.4", + "boo": "2\.0\.1", + "qux": "<1\.0\.0 || >=2\.3\.1 <2\.4\.5 || >=2\.5\.2 <3\.0\.0", + "asd": "http://asdf\.com/asdf\.tar\.gz", + "til": "~1\.2", + "elf": "~1\.2\.3", + "two": "2\.x", + "thr": "3\.3\.x", + "lat": "latest", + "dyl": "file:\.\./dyl" } } .fi @@ -649,8 +708,8 @@ Git urls are of the form: If \fB#\fP is provided, it will be used to clone exactly that commit\. If the commit\-ish has the format \fB#semver:\fP, \fB\fP can be any valid semver range or exact version, and npm will look for any tags -or refs matching that range in the remote repository, much as it would for a -registry dependency\. If neither \fB#\fP or \fB#semver:\fP is +or refs matching that range in the remote repository, much as it would for +a registry dependency\. If neither \fB#\fP or \fB#semver:\fP is specified, then \fBmaster\fP is used\. .P Examples: @@ -684,9 +743,9 @@ included\. For example: .RE .SS Local Paths .P -As of version 2\.0\.0 you can provide a path to a local directory that contains a -package\. Local paths can be saved using \fBnpm install \-S\fP or -\fBnpm install \-\-save\fP, using any of these forms: +As of version 2\.0\.0 you can provide a path to a local directory that +contains a package\. Local paths can be saved using \fBnpm install \-S\fP or \fBnpm +install \-\-save\fP, using any of these forms: .P .RS 2 .nf @@ -711,32 +770,32 @@ in which case they will be normalized to a relative path and added to your .fi .RE .P -This feature is helpful for local offline development and creating -tests that require npm installing where you don't want to hit an -external server, but should not be used when publishing packages -to the public registry\. +This feature is helpful for local offline development and creating tests +that require npm installing where you don't want to hit an external server, +but should not be used when publishing packages to the public registry\. .SS devDependencies .P If someone is planning on downloading and using your module in their -program, then they probably don't want or need to download and build -the external test or documentation framework that you use\. +program, then they probably don't want or need to download and build the +external test or documentation framework that you use\. .P -In this case, it's best to map these additional items in a \fBdevDependencies\fP -object\. +In this case, it's best to map these additional items in a +\fBdevDependencies\fP object\. .P -These things will be installed when doing \fBnpm link\fP or \fBnpm install\fP -from the root of a package, and can be managed like any other npm -configuration param\. See npm help \fBconfig\fP for more on the topic\. +These things will be installed when doing \fBnpm link\fP or \fBnpm install\fP from +the root of a package, and can be managed like any other npm configuration +param\. See npm help \fBconfig\fP for more on the topic\. .P For build steps that are not platform\-specific, such as compiling -CoffeeScript or other languages to JavaScript, use the \fBprepare\fP -script to do this, and make the required package a devDependency\. +CoffeeScript or other languages to JavaScript, use the \fBprepare\fP script to +do this, and make the required package a devDependency\. .P For example: .P .RS 2 .nf -{ "name": "ethopia\-waza", +{ + "name": "ethopia\-waza", "description": "a delightfully fruity coffee varietal", "version": "1\.2\.3", "devDependencies": { @@ -750,16 +809,17 @@ For example: .fi .RE .P -The \fBprepare\fP script will be run before publishing, so that users -can consume the functionality without requiring them to compile it -themselves\. In dev mode (ie, locally running \fBnpm install\fP), it'll -run this script as well, so that you can test it easily\. +The \fBprepare\fP script will be run before publishing, so that users can +consume the functionality without requiring them to compile it themselves\. +In dev mode (ie, locally running \fBnpm install\fP), it'll run this script as +well, so that you can test it easily\. .SS peerDependencies .P In some cases, you want to express the compatibility of your package with a host tool or library, while not necessarily doing a \fBrequire\fP of this host\. -This is usually referred to as a \fIplugin\fR\|\. Notably, your module may be exposing -a specific interface, expected and specified by the host documentation\. +This is usually referred to as a \fIplugin\fR\|\. Notably, your module may be +exposing a specific interface, expected and specified by the host +documentation\. .P For example: .P @@ -775,9 +835,9 @@ For example: .fi .RE .P -This ensures your package \fBtea\-latte\fP can be installed \fIalong\fR with the second -major version of the host package \fBtea\fP only\. \fBnpm install tea\-latte\fP could -possibly yield the following dependency graph: +This ensures your package \fBtea\-latte\fP can be installed \fIalong\fR with the +second major version of the host package \fBtea\fP only\. \fBnpm install +tea\-latte\fP could possibly yield the following dependency graph: .P .RS 2 .nf @@ -786,24 +846,28 @@ possibly yield the following dependency graph: .fi .RE .P -\fBNOTE: npm versions 1 and 2 will automatically install \fBpeerDependencies\fP if -they are not explicitly depended upon higher in the dependency tree\. In the -next major version of npm (npm@3), this will no longer be the case\. You will -receive a warning that the peerDependency is not installed instead\.\fR The -behavior in npms 1 & 2 was frequently confusing and could easily put you into -dependency hell, a situation that npm is designed to avoid as much as possible\. -.P -Trying to install another plugin with a conflicting requirement will cause an -error\. For this reason, make sure your plugin requirement is as broad as -possible, and not to lock it down to specific patch versions\. -.P -Assuming the host complies with semver \fIhttps://semver\.org/\fR, only changes in -the host package's major version will break your plugin\. Thus, if you've worked -with every 1\.x version of the host package, use \fB"^1\.0"\fP or \fB"1\.x"\fP to express -this\. If you depend on features introduced in 1\.5\.2, use \fB">= 1\.5\.2 < 2"\fP\|\. +In npm versions 3 through 6, \fBpeerDependencies\fP were not automatically +installed, and would raise a warning if an invalid version of the peer +dependency was found in the tree\. As of npm v7, peerDependencies \fIare\fR +installed by default\. +.P +Trying to install another plugin with a conflicting requirement may cause +an error if the tree cannot be resolved correctly\. For this reason, make +sure your plugin requirement is as broad as possible, and not to lock it +down to specific patch versions\. +.P +Assuming the host complies with semver \fIhttps://semver\.org/\fR, only changes +in the host package's major version will break your plugin\. Thus, if you've +worked with every 1\.x version of the host package, use \fB"^1\.0"\fP or \fB"1\.x"\fP +to express this\. If you depend on features introduced in 1\.5\.2, use +\fB"^1\.5\.2"\fP\|\. .SS peerDependenciesMeta .P -When a user installs your package, npm will emit warnings if packages specified in \fBpeerDependencies\fP are not already installed\. The \fBpeerDependenciesMeta\fP field serves to provide npm more information on how your peer dependencies are to be used\. Specifically, it allows peer dependencies to be marked as optional\. +When a user installs your package, npm will emit warnings if packages +specified in \fBpeerDependencies\fP are not already installed\. The +\fBpeerDependenciesMeta\fP field serves to provide npm more information on how +your peer dependencies are to be used\. Specifically, it allows peer +dependencies to be marked as optional\. .P For example: .P @@ -825,7 +889,10 @@ For example: .fi .RE .P -Marking a peer dependency as optional ensures npm will not emit a warning if the \fBsoy\-milk\fP package is not installed on the host\. This allows you to integrate and interact with a variety of host packages without requiring all of them to be installed\. +Marking a peer dependency as optional ensures npm will not emit a warning +if the \fBsoy\-milk\fP package is not installed on the host\. This allows you to +integrate and interact with a variety of host packages without requiring +all of them to be installed\. .SS bundledDependencies .P This defines an array of package names that will be bundled when publishing @@ -846,7 +913,8 @@ If we define a package\.json like this: "name": "awesome\-web\-framework", "version": "1\.0\.0", "bundledDependencies": [ - "renderized", "super\-streams" + "renderized", + "super\-streams" ] } .fi @@ -855,18 +923,18 @@ If we define a package\.json like this: we can obtain \fBawesome\-web\-framework\-1\.0\.0\.tgz\fP file by running \fBnpm pack\fP\|\. This file contains the dependencies \fBrenderized\fP and \fBsuper\-streams\fP which can be installed in a new project by executing \fBnpm install -awesome\-web\-framework\-1\.0\.0\.tgz\fP\|\. Note that the package names do not include -any versions, as that information is specified in \fBdependencies\fP\|\. +awesome\-web\-framework\-1\.0\.0\.tgz\fP\|\. Note that the package names do not +include any versions, as that information is specified in \fBdependencies\fP\|\. .P If this is spelled \fB"bundleDependencies"\fP, then that is also honored\. .SS optionalDependencies .P -If a dependency can be used, but you would like npm to proceed if it cannot be -found or fails to install, then you may put it in the \fBoptionalDependencies\fP -object\. This is a map of package name to version or url, just like the -\fBdependencies\fP object\. The difference is that build failures do not cause -installation to fail\. Running \fBnpm install \-\-no\-optional\fP will prevent these -dependencies from being installed\. +If a dependency can be used, but you would like npm to proceed if it cannot +be found or fails to install, then you may put it in the +\fBoptionalDependencies\fP object\. This is a map of package name to version or +url, just like the \fBdependencies\fP object\. The difference is that build +failures do not cause installation to fail\. Running \fBnpm install +\-\-no\-optional\fP will prevent these dependencies from being installed\. .P It is still your program's responsibility to handle the lack of the dependency\. For example, something like this: @@ -899,34 +967,33 @@ You can specify the version of node that your stuff works on: .P .RS 2 .nf -{ "engines" : { "node" : ">=0\.10\.3 <0\.12" } } +{ + "engines": { + "node": ">=0\.10\.3 <15" + } +} .fi .RE .P And, like with dependencies, if you don't specify the version (or if you specify "*" as the version), then any version of node will do\. .P -If you specify an "engines" field, then npm will require that "node" be -somewhere on that list\. If "engines" is omitted, then npm will just assume -that it works on node\. -.P -You can also use the "engines" field to specify which versions of npm -are capable of properly installing your program\. For example: +You can also use the "engines" field to specify which versions of npm are +capable of properly installing your program\. For example: .P .RS 2 .nf -{ "engines" : { "npm" : "~1\.0\.20" } } +{ + "engines": { + "npm": "~1\.0\.20" + } +} .fi .RE .P -Unless the user has set the \fBengine\-strict\fP config flag, this -field is advisory only and will only produce warnings when your package is installed as a dependency\. -.SS engineStrict -.P -\fBThis feature was removed in npm 3\.0\.0\fR -.P -Prior to npm 3\.0\.0, this feature was used to treat this package as if the -user had set \fBengine\-strict\fP\|\. It is no longer used\. +Unless the user has set the \fBengine\-strict\fP config flag, this field is +advisory only and will only produce warnings when your package is installed +as a dependency\. .SS os .P You can specify which operating systems your @@ -934,16 +1001,25 @@ module will run on: .P .RS 2 .nf -"os" : [ "darwin", "linux" ] +{ + "os": [ + "darwin", + "linux" + ] +} .fi .RE .P -You can also block instead of allowing operating systems, -just prepend the blocked os with a '!': +You can also block instead of allowing operating systems, just prepend the +blocked os with a '!': .P .RS 2 .nf -"os" : [ "!win32" ] +{ + "os": [ + "!win32" + ] +} .fi .RE .P @@ -958,7 +1034,12 @@ you can specify which ones\. .P .RS 2 .nf -"cpu" : [ "x64", "ia32" ] +{ + "cpu": [ + "x64", + "ia32" + ] +} .fi .RE .P @@ -966,53 +1047,49 @@ Like the \fBos\fP option, you can also block architectures: .P .RS 2 .nf -"cpu" : [ "!arm", "!mips" ] +{ + "cpu": [ + "!arm", + "!mips" + ] +} .fi .RE .P The host architecture is determined by \fBprocess\.arch\fP -.SS preferGlobal -.P -\fBDEPRECATED\fR -.P -This option used to trigger an npm warning, but it will no longer warn\. It is -purely there for informational purposes\. It is now recommended that you install -any binaries as local devDependencies wherever possible\. .SS private .P -If you set \fB"private": true\fP in your package\.json, then npm will refuse -to publish it\. +If you set \fB"private": true\fP in your package\.json, then npm will refuse to +publish it\. .P -This is a way to prevent accidental publication of private repositories\. If -you would like to ensure that a given package is only ever published to a -specific registry (for example, an internal registry), then use the -\fBpublishConfig\fP dictionary described below to override the \fBregistry\fP config -param at publish\-time\. +This is a way to prevent accidental publication of private repositories\. +If you would like to ensure that a given package is only ever published to +a specific registry (for example, an internal registry), then use the +\fBpublishConfig\fP dictionary described below to override the \fBregistry\fP +config param at publish\-time\. .SS publishConfig .P This is a set of config values that will be used at publish\-time\. It's especially handy if you want to set the tag, registry or access, so that you can ensure that a given package is not tagged with "latest", published -to the global public registry or that a scoped module is private by default\. -.P -Any config values can be overridden, but only "tag", "registry" and "access" -probably matter for the purposes of publishing\. +to the global public registry or that a scoped module is private by +default\. .P -See npm help \fBconfig\fP to see the list of config options that can be -overridden\. +See npm help \fBconfig\fP to see the list of config options that +can be overridden\. .SS workspaces .P The optional \fBworkspaces\fP field is an array of file patterns that describes -locations within the local file system that the install client should look up -to find each npm help workspace that needs to be symlinked to -the top level \fBnode_modules\fP folder\. +locations within the local file system that the install client should look +up to find each npm help workspace that needs to be +symlinked to the top level \fBnode_modules\fP folder\. .P It can describe either the direct paths of the folders to be used as workspaces or it can define globs that will resolve to these same folders\. .P -In the following example, all folders located inside the folder \fB\|\./packages\fP -will be treated as workspaces as long as they have valid \fBpackage\.json\fP files -inside them: +In the following example, all folders located inside the folder +\fB\|\./packages\fP will be treated as workspaces as long as they have valid +\fBpackage\.json\fP files inside them: .P .RS 2 .nf @@ -1032,18 +1109,18 @@ npm will default some values based on package contents\. .RS 0 .IP \(bu 2 \fB"scripts": {"start": "node server\.js"}\fP -If there is a \fBserver\.js\fP file in the root of your package, then npm -will default the \fBstart\fP command to \fBnode server\.js\fP\|\. +If there is a \fBserver\.js\fP file in the root of your package, then npm will +default the \fBstart\fP command to \fBnode server\.js\fP\|\. .IP \(bu 2 \fB"scripts":{"install": "node\-gyp rebuild"}\fP -If there is a \fBbinding\.gyp\fP file in the root of your package and you have not defined an \fBinstall\fP or \fBpreinstall\fP script, npm will -default the \fBinstall\fP command to compile using node\-gyp\. +If there is a \fBbinding\.gyp\fP file in the root of your package and you have +not defined an \fBinstall\fP or \fBpreinstall\fP script, npm will default the +\fBinstall\fP command to compile using node\-gyp\. .IP \(bu 2 \fB"contributors": [\.\.\.]\fP -If there is an \fBAUTHORS\fP file in the root of your package, npm will -treat each line as a \fBName (url)\fP format, where email and url -are optional\. Lines which start with a \fB#\fP or are blank, will be -ignored\. +If there is an \fBAUTHORS\fP file in the root of your package, npm will treat +each line as a \fBName (url)\fP format, where email and url are +optional\. Lines which start with a \fB#\fP or are blank, will be ignored\. .RE .SS SEE ALSO diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 4c69eefe99ef0a..453c0105cb0bad 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -12,138 +12,243 @@ This file is intended to be committed into source repositories, and serves various purposes: .RS 0 .IP \(bu 2 -Describe a single representation of a dependency tree such that teammates, deployments, and continuous integration are guaranteed to install exactly the same dependencies\. +Describe a single representation of a dependency tree such that +teammates, deployments, and continuous integration are guaranteed to +install exactly the same dependencies\. .IP \(bu 2 -Provide a facility for users to "time\-travel" to previous states of \fBnode_modules\fP without having to commit the directory itself\. +Provide a facility for users to "time\-travel" to previous states of +\fBnode_modules\fP without having to commit the directory itself\. .IP \(bu 2 -To facilitate greater visibility of tree changes through readable source control diffs\. +Facilitate greater visibility of tree changes through readable source +control diffs\. .IP \(bu 2 -And optimize the installation process by allowing npm to skip repeated metadata resolutions for previously\-installed packages\. +Optimize the installation process by allowing npm to skip repeated +metadata resolutions for previously\-installed packages\. +.IP \(bu 2 +As of npm v7, lockfiles include enough information to gain a complete +picture of the package tree, reducing the need to read \fBpackage\.json\fP +files, and allowing for significant performance improvements\. .RE +.SS \fBpackage\-lock\.json\fP vs \fBnpm\-shrinkwrap\.json\fP .P -One key detail about \fBpackage\-lock\.json\fP is that it cannot be published, and it -will be ignored if found in any place other than the toplevel package\. It shares -a format with npm help npm\-shrinkwrap\.json, which is essentially the same file, but -allows publication\. This is not recommended unless deploying a CLI tool or -otherwise using the publication process for producing production packages\. +Both of these files have the same format, and perform similar functions in +the root of a project\. .P -If both \fBpackage\-lock\.json\fP and \fBnpm\-shrinkwrap\.json\fP are present in the root of -a package, \fBpackage\-lock\.json\fP will be completely ignored\. -.SS File Format -.SS name -.P -The name of the package this is a package\-lock for\. This must match what's in -\fBpackage\.json\fP\|\. -.SS version -.P -The version of the package this is a package\-lock for\. This must match what's in -\fBpackage\.json\fP\|\. -.SS lockfileVersion -.P -An integer version, starting at \fB1\fP with the version number of this document -whose semantics were used when generating this \fBpackage\-lock\.json\fP\|\. -.SS packageIntegrity -.P -This is a subresource -integrity \fIhttps://w3c\.github\.io/webappsec/specs/subresourceintegrity/\fR value -created from the \fBpackage\.json\fP\|\. No preprocessing of the \fBpackage\.json\fP should -be done\. Subresource integrity strings can be produced by modules like -\fBssri\fP \fIhttps://www\.npmjs\.com/package/ssri\fR\|\. -.SS preserveSymlinks -.P -Indicates that the install was done with the environment variable -\fBNODE_PRESERVE_SYMLINKS\fP enabled\. The installer should insist that the value of -this property match that environment variable\. -.SS dependencies +The difference is that \fBpackage\-lock\.json\fP is that it cannot be published, +and it will be ignored if found in any place other than the root project\. .P -A mapping of package name to dependency object\. Dependency objects have the -following properties: -.SS version +In contrast, npm help npm\-shrinkwrap\.json allows +publication, and defines the dependency tree from the point encountered\. +This is not recommended unless deploying a CLI tool or otherwise using the +publication process for producing production packages\. .P -This is a specifier that uniquely identifies this package and should be -usable in fetching a new copy of it\. +If both \fBpackage\-lock\.json\fP and \fBnpm\-shrinkwrap\.json\fP are present in the +root of a project, \fBnpm\-shrinkwrap\.json\fP will take precedence and +\fBpackage\-lock\.json\fP will be ignored\. +.SS Hidden Lockfiles +.P +In order to avoid processing the \fBnode_modules\fP folder repeatedly, npm as +of v7 uses a "hidden" lockfile present in +\fBnode_modules/\.package\-lock\.json\fP\|\. This contains information about the +tree, and is used in lieu of reading the entire \fBnode_modules\fP hierarchy +provided that the following conditions are met: .RS 0 .IP \(bu 2 -bundled dependencies: Regardless of source, this is a version number that is purely for informational purposes\. +All package folders it references exist in the \fBnode_modules\fP hierarchy\. .IP \(bu 2 -registry sources: This is a version number\. (eg, \fB1\.2\.3\fP) +No package folders exist in the \fBnode_modules\fP hierarchy that are not +listed in the lockfile\. +.IP \(bu 2 +The modified time of the file is at least as recent as all of the package +folders it references\. + +.RE +.P +That is, the hidden lockfile will only be relevant if it was created as +part of the most recent update to the package tree\. If another CLI mutates +the tree in any way, this will be detected, and the hidden lockfile will be +ignored\. +.P +Note that it \fIis\fR possible to manually change the \fIcontents\fR of a package +in such a way that the modified time of the package folder is unaffected\. +For example, if you add a file to \fBnode_modules/foo/lib/bar\.js\fP, then the +modified time on \fBnode_modules/foo\fP will not reflect this change\. If you +are manually editing files in \fBnode_modules\fP, it is generally best to +delete the file at \fBnode_modules/\.package\-lock\.json\fP\|\. +.P +As the hidden lockfile is ignored by older npm versions, it does not +contain the backwards compatibility affordances present in "normal" +lockfiles\. That is, it is \fBlockfileVersion: 3\fP, rather than +\fBlockfileVersion: 2\fP\|\. +.SS Handling Old Lockfiles +.P +When npm detects a lockfile from npm v6 or before during the package +installation process, it is automatically updated to fetch missing +information from either the \fBnode_modules\fP tree or (in the case of empty +\fBnode_modules\fP trees or very old lockfile formats) the npm registry\. +.SS File Format +.SS \fBname\fP +.P +The name of the package this is a package\-lock for\. This will match what's +in \fBpackage\.json\fP\|\. +.SS \fBversion\fP +.P +The version of the package this is a package\-lock for\. This will match +what's in \fBpackage\.json\fP\|\. +.SS \fBlockfileVersion\fP +.P +An integer version, starting at \fB1\fP with the version number of this +document whose semantics were used when generating this +\fBpackage\-lock\.json\fP\|\. +.P +Note that the file format changed significantly in npm v7 to track +information that would have otherwise required looking in \fBnode_modules\fP or +the npm registry\. Lockfiles generated by npm v7 will contain +\fBlockfileVersion: 2\fP\|\. +.RS 0 .IP \(bu 2 -git sources: This is a git specifier with resolved committish\. (eg, \fBgit+https://example\.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e\fP) +No version provided: an "ancient" shrinkwrap file from a version of npm +prior to npm v5\. .IP \(bu 2 -http tarball sources: This is the URL of the tarball\. (eg, \fBhttps://example\.com/example\-1\.3\.0\.tgz\fP) +\fB1\fP: The lockfile version used by npm v5 and v6\. .IP \(bu 2 -local tarball sources: This is the file URL of the tarball\. (eg \fBfile:///opt/storage/example\-1\.3\.0\.tgz\fP) +\fB2\fP: The lockfile version used by npm v7, which is backwards compatible +to v1 lockfiles\. .IP \(bu 2 -local link sources: This is the file URL of the link\. (eg \fBfile:libs/our\-module\fP) +\fB3\fP: The lockfile version used by npm v7, \fIwithout\fR backwards +compatibility affordances\. This is used for the hidden lockfile at +\fBnode_modules/\.package\-lock\.json\fP, and will likely be used in a future +version of npm, once support for npm v6 is no longer relevant\. .RE -.SS integrity .P -This is a Standard Subresource -Integrity \fIhttps://w3c\.github\.io/webappsec/specs/subresourceintegrity/\fR for this -resource\. +npm will always attempt to get whatever data it can out of a lockfile, even +if it is not a version that it was designed to support\. +.SS \fBpackages\fP +.P +This is an object that maps package locations to an object containing the +information about that package\. +.P +The root project is typically listed with a key of \fB""\fP, and all other +packages are listed with their relative paths from the root project folder\. +.P +Package descriptors have the following fields: .RS 0 .IP \(bu 2 -For bundled dependencies this is not included, regardless of source\. +version: The version found in \fBpackage\.json\fP +.IP \(bu 2 +resolved: The place where the package was actually resolved from\. In +the case of packages fetched from the registry, this will be a url to a +tarball\. In the case of git dependencies, this will be the full git url +with commit sha\. In the case of link dependencies, this will be the +location of the link target\. +.IP \(bu 2 +integrity: A \fBsha512\fP or \fBsha1\fP Standard Subresource +Integrity \fIhttps://w3c\.github\.io/webappsec/specs/subresourceintegrity/\fR +string for the artifact that was unpacked in this location\. +.IP \(bu 2 +link: A flag to indicate that this is a symbolic link\. If this is +present, no other fields are specified, since the link target will also +be included in the lockfile\. .IP \(bu 2 -For registry sources, this is the \fBintegrity\fP that the registry provided, or if one wasn't provided the SHA1 in \fBshasum\fP\|\. +dev, optional, devOptional: If the package is strictly part of the +\fBdevDependencies\fP tree, then \fBdev\fP will be true\. If it is strictly part +of the \fBoptionalDependencies\fP tree, then \fBoptional\fP will be set\. If it +is both a \fBdev\fP dependency \fIand\fR an \fBoptional\fP dependency of a non\-dev +dependency, then \fBdevOptional\fP will be set\. (An \fBoptional\fP dependency of +a \fBdev\fP dependency will have both \fBdev\fP and \fBoptional\fP set\.) .IP \(bu 2 -For git sources this is the specific commit hash we cloned from\. +inBundle: A flag to indicate that the package is a bundled dependency\. .IP \(bu 2 -For remote tarball sources this is an integrity based on a SHA512 of -the file\. +hasInstallScript: A flag to indicate that the package has a \fBpreinstall\fP, +\fBinstall\fP, or \fBpostinstall\fP script\. .IP \(bu 2 -For local tarball sources: This is an integrity field based on the SHA512 of the file\. +hasShrinkwrap: A flag to indicate that the package has an +\fBnpm\-shrinkwrap\.json\fP file\. +.IP \(bu 2 +bin, license, engines, dependencies, optionalDependencies: fields from +\fBpackage\.json\fP .RE -.SS resolved +.SS dependencies +.P +Legacy data for supporting versions of npm that use \fBlockfileVersion: 1\fP\|\. +This is a mapping of package names to dependency objects\. Because the +object structure is strictly hierarchical, symbolic link dependencies are +somewhat challenging to represent in some cases\. +.P +npm v7 ignores this section entirely if a \fBpackages\fP section is present, +but does keep it up to date in order to support switching between npm v6 +and npm v7\. +.P +Dependency objects have the following fields: .RS 0 .IP \(bu 2 -For bundled dependencies this is not included, regardless of source\. +version: a specifier that varies depending on the nature of the package, +and is usable in fetching a new copy of it\. +.RS +.IP \(bu 2 +bundled dependencies: Regardless of source, this is a version number +that is purely for informational purposes\. .IP \(bu 2 -For registry sources this is path of the tarball relative to the registry -URL\. If the tarball URL isn't on the same server as the registry URL then -this is a complete URL\. +registry sources: This is a version number\. (eg, \fB1\.2\.3\fP) +.IP \(bu 2 +git sources: This is a git specifier with resolved committish\. (eg, +\fBgit+https://example\.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e\fP) +.IP \(bu 2 +http tarball sources: This is the URL of the tarball\. (eg, +\fBhttps://example\.com/example\-1\.3\.0\.tgz\fP) +.IP \(bu 2 +local tarball sources: This is the file URL of the tarball\. (eg +\fBfile:///opt/storage/example\-1\.3\.0\.tgz\fP) +.IP \(bu 2 +local link sources: This is the file URL of the link\. (eg +\fBfile:libs/our\-module\fP) + +.RE +.IP \(bu 2 +integrity: A \fBsha512\fP or \fBsha1\fP Standard Subresource +Integrity \fIhttps://w3c\.github\.io/webappsec/specs/subresourceintegrity/\fR +string for the artifact that was unpacked in this location\. For git +dependencies, this is the commit sha\. +.IP \(bu 2 +resolved: For registry sources this is path of the tarball relative to +the registry URL\. If the tarball URL isn't on the same server as the +registry URL then this is a complete URL\. +.IP \(bu 2 +bundled: If true, this is the bundled dependency and will be installed +by the parent module\. When installing, this module will be extracted +from the parent module during the extract phase, not installed as a +separate dependency\. +.IP \(bu 2 +dev: If true then this dependency is either a development dependency ONLY +of the top level module or a transitive dependency of one\. This is false +for dependencies that are both a development dependency of the top level +and a transitive dependency of a non\-development dependency of the top +level\. +.IP \(bu 2 +optional: If true then this dependency is either an optional dependency +ONLY of the top level module or a transitive dependency of one\. This is +false for dependencies that are both an optional dependency of the top +level and a transitive dependency of a non\-optional dependency of the top +level\. +.IP \(bu 2 +requires: This is a mapping of module name to version\. This is a list of +everything this module requires, regardless of where it will be +installed\. The version should match via normal matching rules a +dependency either in our \fBdependencies\fP or in a level higher than us\. +.IP \(bu 2 +dependencies: The dependencies of this dependency, exactly as at the top +level\. .RE -.SS bundled -.P -If true, this is the bundled dependency and will be installed by the parent -module\. When installing, this module will be extracted from the parent -module during the extract phase, not installed as a separate dependency\. -.SS dev -.P -If true then this dependency is either a development dependency ONLY of the -top level module or a transitive dependency of one\. This is false for -dependencies that are both a development dependency of the top level and a -transitive dependency of a non\-development dependency of the top level\. -.SS optional -.P -If true then this dependency is either an optional dependency ONLY of the -top level module or a transitive dependency of one\. This is false for -dependencies that are both an optional dependency of the top level and a -transitive dependency of a non\-optional dependency of the top level\. -.P -All optional dependencies should be included even if they're uninstallable -on the current platform\. -.SS requires -.P -This is a mapping of module name to version\. This is a list of everything -this module requires, regardless of where it will be installed\. The version -should match via normal matching rules a dependency either in our -\fBdependencies\fP or in a level higher than us\. -.SS dependencies -.P -The dependencies of this dependency, exactly as at the top level\. .SS See also .RS 0 .IP \(bu 2 npm help shrinkwrap .IP \(bu 2 -npm help shrinkwrap\.json -.IP \(bu 2 -npm help package\-locks +npm help npm\-shrinkwrap\.json .IP \(bu 2 npm help package\.json .IP \(bu 2 diff --git a/deps/npm/man/man5/package-locks.5 b/deps/npm/man/man5/package-locks.5 deleted file mode 100644 index c71959c743fa77..00000000000000 --- a/deps/npm/man/man5/package-locks.5 +++ /dev/null @@ -1,200 +0,0 @@ -.TH "PACKAGE\-LOCKS" "5" "January 2021" "" "" -.SH "NAME" -\fBpackage-locks\fR \- An explanation of npm lockfiles -.SS Description -.P -Conceptually, the "input" to npm help \fBinstall\fP is a npm help package\.json, while its -"output" is a fully\-formed \fBnode_modules\fP tree: a representation of the -dependencies you declared\. In an ideal world, npm would work like a pure -function: the same \fBpackage\.json\fP should produce the exact same \fBnode_modules\fP -tree, any time\. In some cases, this is indeed true\. But in many others, npm is -unable to do this\. There are multiple reasons for this: -.RS 0 -.IP \(bu 2 -different versions of npm (or other package managers) may have been used to install a package, each using slightly different installation algorithms\. -.IP \(bu 2 -a new version of a direct semver\-range package may have been published since the last time your packages were installed, and thus a newer version will be used\. -.IP \(bu 2 -A dependency of one of your dependencies may have published a new version, which will update even if you used pinned dependency specifiers (\fB1\.2\.3\fP instead of \fB^1\.2\.3\fP) -.IP \(bu 2 -The registry you installed from is no longer available, or allows mutation of versions (unlike the primary npm registry), and a different version of a package exists under the same version number now\. - -.RE -.P -As an example, consider package A: -.P -.RS 2 -.nf -{ - "name": "A", - "version": "0\.1\.0", - "dependencies": { - "B": "<0\.1\.0" - } -} -.fi -.RE -.P -package B: -.P -.RS 2 -.nf -{ - "name": "B", - "version": "0\.0\.1", - "dependencies": { - "C": "<0\.1\.0" - } -} -.fi -.RE -.P -and package C: -.P -.RS 2 -.nf -{ - "name": "C", - "version": "0\.0\.1" -} -.fi -.RE -.P -If these are the only versions of A, B, and C available in the -registry, then a normal \fBnpm install A\fP will install: -.P -.RS 2 -.nf -A@0\.1\.0 -`\-\- B@0\.0\.1 - `\-\- C@0\.0\.1 -.fi -.RE -.P -However, if B@0\.0\.2 is published, then a fresh \fBnpm install A\fP will -install: -.P -.RS 2 -.nf -A@0\.1\.0 -`\-\- B@0\.0\.2 - `\-\- C@0\.0\.1 -.fi -.RE -.P -assuming the new version did not modify B's dependencies\. Of course, -the new version of B could include a new version of C and any number -of new dependencies\. If such changes are undesirable, the author of A -could specify a dependency on B@0\.0\.1\|\. However, if A's author and B's -author are not the same person, there's no way for A's author to say -that he or she does not want to pull in newly published versions of C -when B hasn't changed at all\. -.P -To prevent this potential issue, npm uses npm help package\-lock\.json or, if present, npm help npm\-shrinkwrap\.json\. These files are called package locks, or lockfiles\. -.P -Whenever you run \fBnpm install\fP, npm generates or updates your package lock, -which will look something like this: -.P -.RS 2 -.nf -{ - "name": "A", - "version": "0\.1\.0", - \.\.\.metadata fields\.\.\. - "dependencies": { - "B": { - "version": "0\.0\.1", - "resolved": "https://registry\.npmjs\.org/B/\-/B\-0\.0\.1\.tgz", - "integrity": "sha512\-DeAdb33F+" - "dependencies": { - "C": { - "version": "git://github\.com/org/C\.git#5c380ae319fc4efe9e7f2d9c78b0faa588fd99b4" - } - } - } - } -} -.fi -.RE -.P -This file describes an \fIexact\fR, and more importantly \fIreproducible\fR -\fBnode_modules\fP tree\. Once it's present, any future installation will base its -work off this file, instead of recalculating dependency versions off -npm help package\.json\. -.P -The presence of a package lock changes the installation behavior such that: -.RS 0 -.IP 1. 3 -The module tree described by the package lock is reproduced\. This means -reproducing the structure described in the file, using the specific files -referenced in "resolved" if available, falling back to normal package resolution -using "version" if one isn't\. -.IP 2. 3 -The tree is walked and any missing dependencies are installed in the usual -fashion\. - -.RE -.P -If \fBpreshrinkwrap\fP, \fBshrinkwrap\fP or \fBpostshrinkwrap\fP are in the \fBscripts\fP -property of the \fBpackage\.json\fP, they will be executed in order\. \fBpreshrinkwrap\fP -and \fBshrinkwrap\fP are executed before the shrinkwrap, \fBpostshrinkwrap\fP is -executed afterwards\. These scripts run for both \fBpackage\-lock\.json\fP and -\fBnpm\-shrinkwrap\.json\fP\|\. For example to run some postprocessing on the generated -file: -.P -.RS 2 -.nf - "scripts": { - "postshrinkwrap": "json \-I \-e \\"this\.myMetadata = $MY_APP_METADATA\\"" - } -.fi -.RE -.SS Using locked packages -.P -Using a locked package is no different than using any package without a package -lock: any commands that update \fBnode_modules\fP and/or \fBpackage\.json\fP\|'s -dependencies will automatically sync the existing lockfile\. This includes \fBnpm -install\fP, \fBnpm rm\fP, \fBnpm update\fP, etc\. To prevent this update from happening, -you can use the \fB\-\-no\-save\fP option to prevent saving altogether, or -\fB\-\-no\-shrinkwrap\fP to allow \fBpackage\.json\fP to be updated while leaving -\fBpackage\-lock\.json\fP or \fBnpm\-shrinkwrap\.json\fP intact\. -.P -It is highly recommended you commit the generated package lock to source -control: this will allow anyone else on your team, your deployments, your -CI/continuous integration, and anyone else who runs \fBnpm install\fP in your -package source to get the exact same dependency tree that you were developing -on\. Additionally, the diffs from these changes are human\-readable and will -inform you of any changes npm has made to your \fBnode_modules\fP, so you can notice -if any transitive dependencies were updated, hoisted, etc\. -.SS Resolving lockfile conflicts -.P -Occasionally, two separate npm install will create package locks that cause -merge conflicts in source control systems\. As of \fBnpm@5\.7\.0\fP, these conflicts -can be resolved by manually fixing any \fBpackage\.json\fP conflicts, and then -running \fBnpm install [\-\-package\-lock\-only]\fP again\. npm will automatically -resolve any conflicts for you and write a merged package lock that includes all -the dependencies from both branches in a reasonable tree\. If -\fB\-\-package\-lock\-only\fP is provided, it will do this without also modifying your -local \fBnode_modules/\fP\|\. -.P -To make this process seamless on git, consider installing -\fBnpm\-merge\-driver\fP \fIhttps://npm\.im/npm\-merge\-driver\fR, which will teach git how -to do this itself without any user interaction\. In short: \fB$ npx -npm\-merge\-driver install \-g\fP will let you do this, and even works with -pre\-\fBnpm@5\.7\.0\fP versions of npm 5, albeit a bit more noisily\. Note that if -\fBpackage\.json\fP itself conflicts, you will have to resolve that by hand and run -\fBnpm install\fP manually, even with the merge driver\. -.SS See Also -.RS 0 -.IP \(bu 2 -https://medium\.com/@sdboyer/so\-you\-want\-to\-write\-a\-package\-manager\-4ae9c17d9527 -.IP \(bu 2 -npm help package\.json -.IP \(bu 2 -npm help package\-lock\.json -.IP \(bu 2 -npm help shrinkwrap\.json -.IP \(bu 2 -npm help shrinkwrap - -.RE diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js index 4c7e96da4ecf39..6c46656eb92923 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js @@ -53,6 +53,7 @@ class Arborist extends Base { ...options, path: options.path || '.', cache: options.cache || `${homedir()}/.npm/_cacache`, + packumentCache: new Map(), } this.cache = resolve(this.options.cache) this.path = resolve(this.options.path) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index abf39e5dc1757e..49e76e265b816e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -111,7 +111,7 @@ module.exports = cls => class ActualLoader extends cls { pkg: {}, global, }) - return this[_loadActualActually]({root, ignoreMissing}) + return this[_loadActualActually]({root, ignoreMissing, global}) } // not in global mode, hidden lockfile is allowed, load root pkg too @@ -154,7 +154,7 @@ module.exports = cls => class ActualLoader extends cls { return this[_actualTree] } - async [_loadActualActually] ({ root, ignoreMissing }) { + async [_loadActualActually] ({ root, ignoreMissing, global }) { await this[_loadFSTree](this[_actualTree]) if (!ignoreMissing) await this[_findMissingEdges]() @@ -162,6 +162,17 @@ module.exports = cls => class ActualLoader extends cls { this[_transplant](root) await this[_loadWorkspaces](this[_actualTree]) + if (global) { + // need to depend on the children, or else all of them + // will end up being flagged as extraneous, since the + // global root isn't a "real" project + const tree = this[_actualTree] + const actualRoot = tree.isLink ? tree.target : tree + const { dependencies = {} } = actualRoot.package + for (const name of actualRoot.children.keys()) + dependencies[name] = dependencies[name] || '*' + actualRoot.package = { ...actualRoot.package, dependencies } + } // only reset root flags if we're not re-rooting, otherwise leave as-is calcDepFlags(this[_actualTree], !root) return this[_actualTree] diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 661d879eb19e64..d916b49c22c018 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -136,7 +136,7 @@ module.exports = cls => class Reifier extends cls { async [_validatePath] () { // don't create missing dirs on dry runs - if (this[_packageLockOnly] || this[_dryRun] || this[_global]) + if (this[_packageLockOnly] || this[_dryRun]) return await mkdirp(resolve(this.path)) @@ -830,9 +830,14 @@ module.exports = cls => class Reifier extends cls { const pname = child.package.name const alias = name !== pname updateDepSpec(pkg, name, (alias ? `npm:${pname}@` : '') + range) - } else if (req.hosted) - updateDepSpec(pkg, name, req.hosted.shortcut({ noCommittish: false })) - else + } else if (req.hosted) { + // save the git+https url if it has auth, otherwise shortcut + const h = req.hosted + const opt = { noCommittish: false } + const save = h.https && h.auth ? `git+${h.https(opt)}` + : h.shortcut(opt) + updateDepSpec(pkg, name, save) + } else updateDepSpec(pkg, name, req.saveSpec) } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js index 5d648de5bd87b4..32276482419017 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js @@ -9,6 +9,7 @@ const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { return null try { + const hostedOpt = { noCommittish: false } const { fetchSpec, saveSpec, @@ -20,7 +21,9 @@ const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { const isPath = type === 'file' || type === 'directory' return isPath && !relPaths ? `file:${fetchSpec}` : isPath ? 'file:' + (toPath ? relpath(toPath, fetchSpec) : fetchSpec) - : hosted ? 'git+' + hosted.sshurl({ noCommittish: false }) + : hosted ? `git+${ + hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt) + }` : type === 'git' ? saveSpec // always return something. 'foo' is interpreted as 'foo@' otherwise. : rawSpec === '' && raw.slice(-1) !== '@' ? raw diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/diff.js b/deps/npm/node_modules/@npmcli/arborist/lib/diff.js index 1864a3ea10b676..ada67f8161d308 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/diff.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/diff.js @@ -72,6 +72,11 @@ const allChildren = node => { if (!node) return new Map() + // if the node is a global root, and also a link, then what we really + // want is to traverse the target's children + if (node.global && node.isRoot && node.isLink) + return allChildren(node.target) + const kids = new Map() for (const n of [node, ...node.fsChildren]) { for (const kid of n.children.values()) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/edge.js b/deps/npm/node_modules/@npmcli/arborist/lib/edge.js index 0e30f463363700..c5f00faff2999f 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/edge.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/edge.js @@ -1,6 +1,7 @@ // An edge in the dependency graph // Represents a dependency relationship of some kind +const util = require('util') const npa = require('npm-package-arg') const depValid = require('./dep-valid.js') const _from = Symbol('_from') @@ -24,6 +25,21 @@ const types = new Set([ 'workspace', ]) +class ArboristEdge {} +const printableEdge = (edge) => { + const edgeFrom = edge.from && edge.from.location + const edgeTo = edge.to && edge.to.location + + return Object.assign(new ArboristEdge(), { + name: edge.name, + spec: edge.spec, + type: edge.type, + ...(edgeFrom != null ? { from: edgeFrom } : {}), + ...(edgeTo ? { to: edgeTo } : {}), + ...(edge.error ? { error: edge.error } : {}), + }) +} + class Edge { constructor (options) { const { type, name, spec, accept, from } = options @@ -185,6 +201,14 @@ class Edge { get to () { return this[_to] } + + toJSON () { + return printableEdge(this) + } + + [util.inspect.custom] () { + return this.toJSON() + } } Edge.types = [...types] diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/index.js b/deps/npm/node_modules/@npmcli/arborist/lib/index.js index 830a88a5f953ba..fd7d8817258ed6 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/index.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/index.js @@ -2,5 +2,6 @@ module.exports = require('./arborist/index.js') module.exports.Arborist = module.exports module.exports.Node = require('./node.js') module.exports.Link = require('./link.js') +module.exports.Edge = require('./edge.js') // XXX export the other classes, too. shrinkwrap, diff, etc. // they're handy! diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js index 6e243c049d2730..396bcb58a2de96 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js @@ -40,6 +40,7 @@ const treeCheck = require('./tree-check.js') const walkUp = require('walk-up-path') const {resolve, relative, dirname, basename} = require('path') +const util = require('util') const _package = Symbol('_package') const _parent = Symbol('_parent') const _target = Symbol.for('_target') @@ -63,6 +64,71 @@ const _meta = Symbol('_meta') const relpath = require('./relpath.js') const consistentResolve = require('./consistent-resolve.js') +// helper function to output a clearer visualization +// of the current node and its descendents +class ArboristNode {} + +const printableTree = (tree, path = []) => + (path.includes(tree) ? { location: tree.location } : (path.push(tree), Object.assign(new ArboristNode(), { + name: tree.name, + ...(tree.package && tree.package.version + ? { version: tree.package.version } + : {}), + location: tree.location, + path: tree.path, + realpath: tree.realpath, + ...(tree.isLink ? { target: printableTree(tree.target, path) } : {}), + ...(tree.resolved != null ? { resolved: tree.resolved } : {}), + ...(tree.extraneous ? { extraneous: true } : { + ...(tree.dev ? { dev: true } : {}), + ...(tree.optional ? { optional: true } : {}), + ...(tree.devOptional && !tree.dev && !tree.optional + ? { devOptional: true } : {}), + ...(tree.peer ? { peer: true } : {}), + }), + ...(tree.inBundle ? { bundled: true } : {}), + // handle top-level tree error + ...(tree.error + ? { + error: { + code: tree.error.code, + ...(tree.error.path + ? { path: tree.error.path } + : {}), + }, + } : {}), + // handle errors for each node + ...(tree.errors && tree.errors.length + ? { + errors: tree.errors.map(error => ({ + code: error.code, + ...(error.path + ? { path: error.path } + : {}), + })), + } : {}), + ...(tree.edgesIn && tree.edgesIn.size ? { + edgesIn: new Set([...tree.edgesIn] + .sort((a, b) => a.from.location.localeCompare(b.from.location))), + } : {}), + ...(tree.edgesOut && tree.edgesOut.size ? { + edgesOut: new Map([...tree.edgesOut.entries()] + .sort((a, b) => a[0].localeCompare(b[0]))), + } : {}), + ...(tree.fsChildren && tree.fsChildren.size ? { + fsChildren: new Set([...tree.fsChildren] + .sort((a, b) => a.path.localeCompare(b.path)) + .map(tree => printableTree(tree, path))), + } : {}), + ...(tree.target || !tree.children || !tree.children.size + ? {} + : { + children: new Map([...tree.children.entries()] + .sort((a, b) => a[0].localeCompare(b[0])) + .map(([name, tree]) => [name, printableTree(tree, path)])), + }), + }))) + class Node { constructor (options) { // NB: path can be null if it's a link target @@ -1145,6 +1211,14 @@ class Node { const base = scoped ? `${basename(d)}/${basename(rp)}` : basename(rp) return base === name && basename(nm) === 'node_modules' ? dir : false } + + toJSON () { + return printableTree(this) + } + + [util.inspect.custom] () { + return this.toJSON() + } } module.exports = Node diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js index a454320a318e68..9254531e49d4aa 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -200,9 +200,9 @@ class Shrinkwrap { return s[_maybeStat]().then(([sw, lock]) => { s.filename = resolve(s.path, (s.hiddenLockfile ? 'node_modules/.package-lock' - : s.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap' + : s.shrinkwrapOnly || sw ? 'npm-shrinkwrap' : 'package-lock') + '.json') - s.loadedFromDisk = sw || lock + s.loadedFromDisk = !!(sw || lock) s.type = basename(s.filename) return s }) @@ -353,14 +353,14 @@ class Shrinkwrap { // we don't need to load package-lock.json except for top of tree nodes, // only npm-shrinkwrap.json. return this[_maybeRead]().then(([sw, lock, yarn]) => { - const data = lock || sw || '' + const data = sw || lock || '' // use shrinkwrap only for deps, otherwise prefer package-lock // and ignore npm-shrinkwrap if both are present. // TODO: emit a warning here or something if both are present. this.filename = resolve(this.path, (this.hiddenLockfile ? 'node_modules/.package-lock' - : this.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap' + : this.shrinkwrapOnly || sw ? 'npm-shrinkwrap' : 'package-lock') + '.json') this.type = basename(this.filename) diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 6300a5e867d4cd..fafd1fb0f865f2 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,12 +1,12 @@ { "name": "@npmcli/arborist", - "version": "2.0.3", + "version": "2.0.6", "description": "Manage node_modules trees", "dependencies": { "@npmcli/installed-package-contents": "^1.0.5", "@npmcli/map-workspaces": "^1.0.1", - "@npmcli/metavuln-calculator": "^1.0.0", - "@npmcli/move-file": "^1.0.1", + "@npmcli/metavuln-calculator": "^1.0.1", + "@npmcli/move-file": "^1.1.0", "@npmcli/name-from-folder": "^1.0.1", "@npmcli/node-gyp": "^1.0.1", "@npmcli/run-script": "^1.8.1", @@ -19,7 +19,8 @@ "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.0", "npm-pick-manifest": "^6.1.0", - "pacote": "^11.1.14", + "npm-registry-fetch": "^9.0.0", + "pacote": "^11.2.3", "parse-conflict-json": "^1.1.1", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^1.0.1", diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js b/deps/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js index 95bcc67558ed17..15340f5dc70e87 100644 --- a/deps/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js +++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js @@ -65,7 +65,7 @@ class Advisory { // load up the data from a cache entry and a fetched packument load (cached, packument) { - // basic data integrity gutchecks + // basic data integrity gutcheck if (!cached || typeof cached !== 'object') { throw new TypeError('invalid cached data, expected object') } @@ -148,7 +148,42 @@ class Advisory { } [_calculateRange] () { - const metavuln = this.vulnerableVersions.join(' || ').trim() + // calling semver.simplifyRange with a massive list of versions, and those + // versions all concatenated with `||` is a geometric CPU explosion! + // we can try to be a *little* smarter up front by doing x-y for all + // contiguous version sets in the list + const ranges = [] + this.versions = semver.sort(this.versions) + this.vulnerableVersions = semver.sort(this.vulnerableVersions) + for (let v = 0, vulnVer = 0; v < this.versions.length; v++) { + // figure out the vulnerable subrange + const vr = [this.versions[v]] + while (v < this.versions.length) { + if (this.versions[v] !== this.vulnerableVersions[vulnVer]) { + // we don't test prerelease versions, so just skip past it + if (/-/.test(this.versions[v])) { + v++ + continue + } + break + } + if (vr.length > 1) + vr[1] = this.versions[v] + else + vr.push(this.versions[v]) + v++ + vulnVer++ + } + // it'll either be just the first version, which means no overlap, + // or the start and end versions, which might be the same version + if (vr.length > 1) { + const tail = this.versions[this.versions.length - 1] + ranges.push(vr[1] === tail ? `>=${vr[0]}` + : vr[0] === vr[1] ? vr[0] + : vr.join(' - ')) + } + } + const metavuln = ranges.join(' || ').trim() this.range = !metavuln ? '<0.0.0-0' : semver.simplifyRange(this.versions, metavuln, semverOpt) } @@ -271,25 +306,99 @@ class Advisory { } for (const list of versionSets) { - const headVuln = this.testVersion(list[0]) - const tailVuln = this.testVersion(list[list.length - 1]) + // it's common to have version lists like: + // 1.0.0 + // 1.0.1-alpha.0 + // 1.0.1-alpha.1 + // ... + // 1.0.1-alpha.999 + // 1.0.1 + // 1.0.2-alpha.0 + // ... + // 1.0.2-alpha.99 + // 1.0.2 + // with a huge number of prerelease versions that are not installable + // anyway. + // If mid has a prerelease tag, and list[0] does not, then walk it + // back until we hit a non-prerelease version + // If mid has a prerelease tag, and list[list.length-1] does not, + // then walk it forward until we hit a version without a prerelease tag + // Similarly, if the head/tail is a prerelease, but there is a non-pr + // version in the list, then start there instead. + let h = 0 + const origHeadVuln = this.testVersion(list[h]) + while (h < list.length && /-/.test(String(list[h]))) + h++ + + // don't filter out the whole list! they might all be pr's + if (h === list.length) + h = 0 + else if (origHeadVuln) { + // if the original was vulnerable, assume so are all of these + for (let hh = 0; hh < h; hh++) + this[_markVulnerable](list[hh]) + } + + let t = list.length - 1 + const origTailVuln = this.testVersion(list[t]) + while (t > h && /-/.test(String(list[t]))) + t-- + + // don't filter out the whole list! might all be pr's + if (t === h) + t = list.length - 1 + else if (origTailVuln) { + // if original tail was vulnerable, assume these are as well + for (let tt = list.length - 1; tt > t; tt--) + this[_markVulnerable](list[tt]) + } + + const headVuln = h === 0 ? origHeadVuln + : this.testVersion(list[h]) + + const tailVuln = t === list.length - 1 ? origTailVuln + : this.testVersion(list[t]) + // if head and tail both vulnerable, whole list is thrown out if (headVuln && tailVuln) { - for (const v of list.slice(1, -1)) { - this[_markVulnerable](v) - } + for (let v = h; v < t; v++) + this[_markVulnerable](list[v]) continue } // if length is 2 or 1, then we marked them all already - if (list.length <= 2) + if (t < h + 2) continue const mid = Math.floor(list.length / 2) - // leave out the ends, since we tested those already - versionSets.add(list.slice(0, mid)) - versionSets.add(list.slice(mid)) + const pre = list.slice(0, mid) + const post = list.slice(mid) + + // if the parent list wasn't prereleases, then drop pr tags + // from end of the pre list, and beginning of the post list, + // marking as vulnerable if the midpoint item we picked is. + if (!/-/.test(String(pre[0]))) { + const midVuln = this.testVersion(pre[pre.length - 1]) + while (/-/.test(String(pre[pre.length-1]))) { + const v = pre.pop() + if (midVuln) + this[_markVulnerable](v) + } + } + + if (!/-/.test(String(post[post.length-1]))) { + const midVuln = this.testVersion(post[0]) + while (/-/.test(String(post[0]))) { + const v = post.shift() + if (midVuln) + this[_markVulnerable](v) + } + } + + versionSets.add(pre) + versionSets.add(post) } } } + module.exports = Advisory diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json index ec5eea5a4ceac3..636382170f177c 100644 --- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json +++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "1.0.0", + "version": "1.0.2", "main": "lib/index.js", "files": [ "lib" diff --git a/deps/npm/node_modules/@npmcli/move-file/README.md b/deps/npm/node_modules/@npmcli/move-file/README.md index da682ebd5123a5..8a5a57f0f8d926 100644 --- a/deps/npm/node_modules/@npmcli/move-file/README.md +++ b/deps/npm/node_modules/@npmcli/move-file/README.md @@ -3,7 +3,7 @@ A fork of [move-file](https://github.com/sindresorhus/move-file) with compatibility with all node 10.x versions. -> Move a file +> Move a file (or directory) The built-in [`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) @@ -18,6 +18,7 @@ would have expected `fs.rename()` to be. - Optionally prevent overwriting an existing file. - Creates non-existent destination directories for you. - Support for Node versions that lack built-in recursive `fs.mkdir()` +- Automatically recurses when source is a directory. ## Install @@ -48,13 +49,13 @@ Returns a `Promise` that resolves when the file has been moved. Type: `string` -File you want to move. +File, or directory, you want to move. #### destination Type: `string` -Where you want the file moved. +Where you want the file or directory moved. #### options @@ -65,4 +66,4 @@ Type: `object` Type: `boolean`\ Default: `true` -Overwrite existing destination file. +Overwrite existing destination file(s). diff --git a/deps/npm/node_modules/@npmcli/move-file/index.js b/deps/npm/node_modules/@npmcli/move-file/index.js index d1567d1f64f730..51f9535d39f4cc 100644 --- a/deps/npm/node_modules/@npmcli/move-file/index.js +++ b/deps/npm/node_modules/@npmcli/move-file/index.js @@ -1,4 +1,5 @@ -const { dirname } = require('path') +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const rimraf_ = require('rimraf') const { promisify } = require('util') const { access: access_, @@ -7,14 +8,31 @@ const { copyFileSync, unlink: unlink_, unlinkSync, + readdir: readdir_, + readdirSync, rename: rename_, renameSync, + stat: stat_, + statSync, + lstat: lstat_, + lstatSync, + symlink: symlink_, + symlinkSync, + readlink: readlink_, + readlinkSync } = require('fs') const access = promisify(access_) const copyFile = promisify(copyFile_) const unlink = promisify(unlink_) +const readdir = promisify(readdir_) const rename = promisify(rename_) +const stat = promisify(stat_) +const lstat = promisify(lstat_) +const symlink = promisify(symlink_) +const readlink = promisify(readlink_) +const rimraf = promisify(rimraf_) +const rimrafSync = rimraf_.sync const mkdirp = require('mkdirp') @@ -36,7 +54,7 @@ const pathExistsSync = path => { } } -module.exports = async (source, destination, options = {}) => { +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { if (!source || !destination) { throw new TypeError('`source` and `destination` file required') } @@ -56,15 +74,38 @@ module.exports = async (source, destination, options = {}) => { await rename(source, destination) } catch (error) { if (error.code === 'EXDEV') { - await copyFile(source, destination) - await unlink(source) + const sourceStat = await lstat(source) + if (sourceStat.isDirectory()) { + const files = await readdir(source) + await Promise.all(files.map((file) => moveFile(join(source, file), join(destination, file), options, false, symlinks))) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await copyFile(source, destination) + } } else { throw error } } + + if (root) { + await Promise.all(symlinks.map(async ({ source, destination }) => { + let target = await readlink(source) + // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination + if (isAbsolute(target)) + target = resolve(destination, relative(source, target)) + // try to determine what the actual file is so we can create the correct type of symlink in windows + let targetStat + try { + targetStat = await stat(resolve(dirname(source), target)) + } catch (err) {} + await symlink(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file') + })) + await rimraf(source) + } } -module.exports.sync = (source, destination, options = {}) => { +const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => { if (!source || !destination) { throw new TypeError('`source` and `destination` file required') } @@ -84,10 +125,38 @@ module.exports.sync = (source, destination, options = {}) => { renameSync(source, destination) } catch (error) { if (error.code === 'EXDEV') { - copyFileSync(source, destination) - unlinkSync(source) + const sourceStat = lstatSync(source) + if (sourceStat.isDirectory()) { + const files = readdirSync(source) + for (const file of files) { + moveFileSync(join(source, file), join(destination, file), options, false, symlinks) + } + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + copyFileSync(source, destination) + } } else { throw error } } + + if (root) { + for (const { source, destination } of symlinks) { + let target = readlinkSync(source) + // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination + if (isAbsolute(target)) + target = resolve(destination, relative(source, target)) + // try to determine what the actual file is so we can create the correct type of symlink in windows + let targetStat + try { + targetStat = statSync(resolve(dirname(source), target)) + } catch (err) {} + symlinkSync(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file') + } + rimrafSync(source) + } } + +module.exports = moveFile +module.exports.sync = moveFileSync diff --git a/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/LICENSE b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/README.md b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/README.md new file mode 100644 index 00000000000000..423b8cf854ad3e --- /dev/null +++ b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/bin.js b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/bin.js new file mode 100755 index 00000000000000..0d1e17be701ec3 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/bin.js @@ -0,0 +1,50 @@ +#!/usr/bin/env node + +var rimraf = require('./') + +var help = false +var dashdash = false +var noglob = false +var args = process.argv.slice(2).filter(function(arg) { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else + return !!arg +}) + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + var log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + process.exit(help ? 0 : 1) +} else + go(0) + +function go (n) { + if (n >= args.length) + return + var options = {} + if (noglob) + options = { glob: false } + rimraf(args[n], options, function (er) { + if (er) + throw er + go(n+1) + }) +} diff --git a/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/package.json b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/package.json new file mode 100644 index 00000000000000..26e05d85ea2fdf --- /dev/null +++ b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/package.json @@ -0,0 +1,29 @@ +{ + "name": "rimraf", + "version": "2.7.1", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags", + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.1.3" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + } +} diff --git a/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/rimraf.js b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/rimraf.js new file mode 100644 index 00000000000000..a90ad029f3ece1 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/move-file/node_modules/rimraf/rimraf.js @@ -0,0 +1,372 @@ +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = require("assert") +var path = require("path") +var fs = require("fs") +var glob = undefined +try { + glob = require("glob") +} catch (_err) { + // treat glob as optional. +} +var _0666 = parseInt('666', 8) + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} diff --git a/deps/npm/node_modules/@npmcli/move-file/package.json b/deps/npm/node_modules/@npmcli/move-file/package.json index 476bc76ba73ff5..46b42c9e9aa2b8 100644 --- a/deps/npm/node_modules/@npmcli/move-file/package.json +++ b/deps/npm/node_modules/@npmcli/move-file/package.json @@ -1,12 +1,13 @@ { "name": "@npmcli/move-file", - "version": "1.0.1", + "version": "1.1.0", "files": [ "index.js" ], "description": "move a file (fork of move-file)", "dependencies": { - "mkdirp": "^1.0.4" + "mkdirp": "^1.0.4", + "rimraf": "^2.7.1" }, "devDependencies": { "require-inject": "^1.4.4", diff --git a/deps/npm/node_modules/make-fetch-happen/index.js b/deps/npm/node_modules/make-fetch-happen/index.js index b8d7bd98da5965..54f72049c1d52b 100644 --- a/deps/npm/node_modules/make-fetch-happen/index.js +++ b/deps/npm/node_modules/make-fetch-happen/index.js @@ -285,6 +285,10 @@ function remoteFetch (uri, opts) { size: opts.size, counter: opts.counter, timeout: opts.timeout, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + rejectUnauthorized: opts.strictSSL, } return retry( diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index 4fc6163d5a0278..2d555bcb86f41e 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "8.0.12", + "version": "8.0.13", "description": "Opinionated, caching, retrying fetch client", "main": "index.js", "files": [ diff --git a/deps/npm/node_modules/pacote/README.md b/deps/npm/node_modules/pacote/README.md index 81cd437efcd994..619e0ec44e8f6c 100644 --- a/deps/npm/node_modules/pacote/README.md +++ b/deps/npm/node_modules/pacote/README.md @@ -162,6 +162,11 @@ resolved, and other properties, as they are determined. including information not strictly required for installation (author, description, etc.) Defaults to `true` when `before` is set, since the version publish time is part of the extended packument metadata. +* `packumentCache` For registry packuments only, you may provide a `Map` + object which will be used to cache packument requests between pacote + calls. This allows you to easily avoid hitting the registry multiple + times (even just to validate the cache) for a given packument, since it + is unlikely to change in the span of a single command. ## Extracted File Modes diff --git a/deps/npm/node_modules/pacote/lib/fetcher.js b/deps/npm/node_modules/pacote/lib/fetcher.js index 33fbf79c61b60f..c4e5852daf8a87 100644 --- a/deps/npm/node_modules/pacote/lib/fetcher.js +++ b/deps/npm/node_modules/pacote/lib/fetcher.js @@ -47,6 +47,8 @@ class FetcherBase { throw new TypeError('options object is required') this.spec = npa(spec, opts.where) + this.allowGitIgnore = !!opts.allowGitIgnore + // a bit redundant because presumably the caller already knows this, // but it makes it easier to not have to keep track of the requested // spec when we're dispatching thousands of these at once, and normalizing @@ -60,6 +62,7 @@ class FetcherBase { // clone the opts object so that others aren't upset when we mutate it // by adding/modifying the integrity value. this.opts = {...opts} + this.cache = opts.cache || cacheDir() this.resolved = opts.resolved || null @@ -413,7 +416,7 @@ class FetcherBase { const base = basename(entry.path) if (base === '.npmignore') sawIgnores.add(entry.path) - else if (base === '.gitignore') { + else if (base === '.gitignore' && !this.allowGitIgnore) { // rename, but only if there's not already a .npmignore const ni = entry.path.replace(/\.gitignore$/, '.npmignore') if (sawIgnores.has(ni)) diff --git a/deps/npm/node_modules/pacote/lib/git.js b/deps/npm/node_modules/pacote/lib/git.js index 81f7ca2567ce36..406ab5c600221b 100644 --- a/deps/npm/node_modules/pacote/lib/git.js +++ b/deps/npm/node_modules/pacote/lib/git.js @@ -24,13 +24,16 @@ const _cloneRepo = Symbol('_cloneRepo') const _setResolvedWithSha = Symbol('_setResolvedWithSha') const _prepareDir = Symbol('_prepareDir') -// get the repository url. prefer ssh, fall back to git:// +// get the repository url. +// prefer https if there's auth, since ssh will drop that. +// otherwise, prefer ssh if available (more secure). // We have to add the git+ back because npa suppresses it. -const repoUrl = (hosted, opts) => - hosted.sshurl && addGitPlus(hosted.sshurl(opts)) || - hosted.https && addGitPlus(hosted.https(opts)) +const repoUrl = (h, opts) => + h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || + h.https && addGitPlus(h.https(opts)) -const addGitPlus = url => url && `git+${url}` +// add git+ to the url, but only one time. +const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') class GitFetcher extends Fetcher { constructor (spec, opts) { @@ -51,6 +54,11 @@ class GitFetcher extends Fetcher { this.resolvedSha = '' } + // just exposed to make it easier to test all the combinations + static repoUrl (hosted, opts) { + return repoUrl(hosted, opts) + } + get types () { return ['git'] } @@ -69,13 +77,16 @@ class GitFetcher extends Fetcher { } // first try https, since that's faster and passphrase-less for - // public repos. Fall back to SSH to support private repos. - // NB: we always store the SSH url in the 'resolved' field. + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it [_resolvedFromHosted] (hosted) { return this[_resolvedFromRepo](hosted.https && hosted.https()) .catch(er => { const ssh = hosted.sshurl && hosted.sshurl() - if (!ssh) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) throw er return this[_resolvedFromRepo](ssh) }) @@ -121,9 +132,11 @@ class GitFetcher extends Fetcher { // either a git url with a hash, or a tarball download URL [_addGitSha] (sha) { if (this.spec.hosted) { - this[_setResolvedWithSha]( - this.spec.hosted.shortcut({ noCommittish: true }) + '#' + sha - ) + const h = this.spec.hosted + const opt = { noCommittish: true } + const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) + + this[_setResolvedWithSha](`${base}#${sha}`) } else { const u = url.format(new url.URL(`#${sha}`, this.spec.rawSpec)) this[_setResolvedWithSha](url.format(u)) @@ -207,6 +220,7 @@ class GitFetcher extends Fetcher { const nameat = this.spec.name ? `${this.spec.name}@` : '' return new RemoteFetcher(h.tarball({ noCommittish: false }), { ...this.opts, + allowGitIgnore: true, pkgid: `git:${nameat}${this.resolved}`, resolved: this.resolved, integrity: null, // it'll always be different, if we have one @@ -231,14 +245,19 @@ class GitFetcher extends Fetcher { }) } + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it [_cloneHosted] (ref, tmp) { const hosted = this.spec.hosted const https = hosted.https() return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) .catch(er => { const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) - /* istanbul ignore if - should be covered by the resolve() call */ - if (!ssh) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) throw er return this[_cloneRepo](ssh, ref, tmp) }) diff --git a/deps/npm/node_modules/pacote/lib/registry.js b/deps/npm/node_modules/pacote/lib/registry.js index b9df036146406d..537610d2990d09 100644 --- a/deps/npm/node_modules/pacote/lib/registry.js +++ b/deps/npm/node_modules/pacote/lib/registry.js @@ -20,6 +20,14 @@ class RegistryFetcher extends Fetcher { constructor (spec, opts) { super(spec, opts) + // you usually don't want to fetch the same packument multiple times in + // the span of a given script or command, no matter how many pacote calls + // are made, so this lets us avoid doing that. It's only relevant for + // registry fetchers, because other types simulate their packument from + // the manifest, which they memoize on this.package, so it's very cheap + // already. + this.packumentCache = this.opts.packumentCache || null + // handle case when npm-package-arg guesses wrong. if (this.spec.type === 'tag' && this.spec.rawSpec === '' && @@ -64,11 +72,17 @@ class RegistryFetcher extends Fetcher { } } - packument () { + async packument () { + // note this might be either an in-flight promise for a request, + // or the actual packument, but we never want to make more than + // one request at a time for the same thing regardless. + if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) + return this.packumentCache.get(this.packumentUrl) + // npm-registry-fetch the packument // set the appropriate header for corgis if fullMetadata isn't set // return the res.json() promise - return fetch(this.packumentUrl, { + const p = fetch(this.packumentUrl, { ...this.opts, headers: this[_headers](), spec: this.spec, @@ -77,8 +91,12 @@ class RegistryFetcher extends Fetcher { }).then(res => res.json().then(packument => { packument._cached = res.headers.has('x-local-cache') packument._contentLength = +res.headers.get('content-length') + if (this.packumentCache) + this.packumentCache.set(this.packumentUrl, packument) return packument })).catch(er => { + if (this.packumentCache) + this.packumentCache.delete(this.packumentUrl) if (er.code === 'E404' && !this.fullMetadata) { // possible that corgis are not supported by this registry this.fullMetadata = true @@ -86,6 +104,9 @@ class RegistryFetcher extends Fetcher { } throw er }) + if (this.packumentCache) + this.packumentCache.set(this.packumentUrl, p) + return p } manifest () { diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json index 085e8f66af175f..b55685a48b2411 100644 --- a/deps/npm/node_modules/pacote/package.json +++ b/deps/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "11.1.14", + "version": "11.2.3", "description": "JavaScript package downloader", "author": "Isaac Z. Schlueter (https://izs.me)", "bin": { @@ -13,7 +13,7 @@ "snap": "tap", "preversion": "npm test", "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags" }, "tap": { "timeout": 300, diff --git a/deps/npm/package.json b/deps/npm/package.json index 9f0aacbda26880..da6175dfe80b8e 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "7.4.0", + "version": "7.4.3", "name": "npm", "description": "a package manager for JavaScript", "keywords": [ @@ -42,7 +42,7 @@ "./package.json": "./package.json" }, "dependencies": { - "@npmcli/arborist": "^2.0.3", + "@npmcli/arborist": "^2.0.6", "@npmcli/ci-detect": "^1.2.0", "@npmcli/config": "^1.2.8", "@npmcli/run-script": "^1.8.1", @@ -74,7 +74,7 @@ "libnpmsearch": "^3.1.0", "libnpmteam": "^2.0.2", "libnpmversion": "^1.0.7", - "make-fetch-happen": "^8.0.12", + "make-fetch-happen": "^8.0.13", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -90,7 +90,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "~4.1.2", "opener": "^1.5.2", - "pacote": "^11.1.14", + "pacote": "^11.2.3", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", @@ -180,7 +180,7 @@ ], "devDependencies": { "cmark-gfm": "^0.8.5", - "eslint": "^7.14.0", + "eslint": "^7.18.0", "eslint-plugin-import": "^2.22.1", "eslint-plugin-node": "^11.1.0", "eslint-plugin-promise": "^4.2.1", diff --git a/deps/npm/tap-snapshots/test-lib-link.js-TAP.test.js b/deps/npm/tap-snapshots/test-lib-link.js-TAP.test.js index de7f483b60de85..ab1d5c6b830fbe 100644 --- a/deps/npm/tap-snapshots/test-lib-link.js-TAP.test.js +++ b/deps/npm/tap-snapshots/test-lib-link.js-TAP.test.js @@ -19,6 +19,11 @@ exports[`test/lib/link.js TAP link pkg already in global space > should create a ` +exports[`test/lib/link.js TAP link pkg already in global space when prefix is a symlink > should create a local symlink to global pkg 1`] = ` +{CWD}/test/lib/link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/scoped-linked + +` + exports[`test/lib/link.js TAP link to globalDir when in current working dir of pkg and no args > should create a global link to current pkg 1`] = ` {CWD}/test/lib/link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/test-pkg-link diff --git a/deps/npm/test/lib/link.js b/deps/npm/test/lib/link.js index a478259f7b409a..c39026a49163f6 100644 --- a/deps/npm/test/lib/link.js +++ b/deps/npm/test/lib/link.js @@ -259,6 +259,64 @@ t.test('link pkg already in global space', (t) => { }) }) +t.test('link pkg already in global space when prefix is a symlink', (t) => { + t.plan(3) + + const testdir = t.testdir({ + 'global-prefix': t.fixture('symlink', './real-global-prefix'), + 'real-global-prefix': { + lib: { + node_modules: { + '@myscope': { + linked: t.fixture('symlink', '../../../../scoped-linked'), + }, + }, + }, + }, + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, + 'my-project': { + 'package.json': JSON.stringify({ + name: 'my-project', + version: '1.0.0', + }), + }, + }) + npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') + npm.prefix = resolve(testdir, 'my-project') + + npm.config.find = () => 'default' + + const _cwd = process.cwd() + process.chdir(npm.prefix) + + reifyOutput = async () => { + reifyOutput = undefined + process.chdir(_cwd) + npm.config.find = () => null + + const links = await printLinks({ + path: npm.prefix, + }) + + t.equal( + require(resolve(testdir, 'my-project', 'package.json')).dependencies, + undefined, + 'should not save to package.json upon linking' + ) + + t.matchSnapshot(links, 'should create a local symlink to global pkg') + } + + link(['@myscope/linked'], (err) => { + t.ifError(err, 'should not error out') + }) +}) + t.test('completion', (t) => { const testdir = t.testdir({ 'global-prefix': { diff --git a/deps/npm/test/lib/ls.js b/deps/npm/test/lib/ls.js index 7bbfc5f772b9f6..a9504a2ad055ca 100644 --- a/deps/npm/test/lib/ls.js +++ b/deps/npm/test/lib/ls.js @@ -3795,6 +3795,7 @@ t.test('ls --json', (t) => { t.deepEqual( jsonParse(result), { + name: 'ls-ls-json-global', dependencies: { a: { version: '1.0.0', diff --git a/deps/npm/test/lib/outdated.js b/deps/npm/test/lib/outdated.js index 7a5bd8f0ef870c..da53b6031d6f93 100644 --- a/deps/npm/test/lib/outdated.js +++ b/deps/npm/test/lib/outdated.js @@ -131,6 +131,9 @@ t.test('should display outdated deps', t => { devDependencies: { zeta: '^1.0.0', }, + optionalDependencies: { + lorem: '^1.0.0', + }, peerDependencies: { beta: '^1.0.0', }, diff --git a/deps/v8/src/ast/ast-source-ranges.h b/deps/v8/src/ast/ast-source-ranges.h index 1b42a055dd642d..1e96ec4c27c58b 100644 --- a/deps/v8/src/ast/ast-source-ranges.h +++ b/deps/v8/src/ast/ast-source-ranges.h @@ -47,6 +47,7 @@ struct SourceRange { V(Block) \ V(CaseClause) \ V(Conditional) \ + V(Expression) \ V(FunctionLiteral) \ V(IfStatement) \ V(IterationStatement) \ @@ -281,6 +282,24 @@ class NaryOperationSourceRanges final : public AstNodeSourceRanges { ZoneVector ranges_; }; +class ExpressionSourceRanges final : public AstNodeSourceRanges { + public: + explicit ExpressionSourceRanges(const SourceRange& right_range) + : right_range_(right_range) {} + + SourceRange GetRange(SourceRangeKind kind) override { + DCHECK(HasRange(kind)); + return right_range_; + } + + bool HasRange(SourceRangeKind kind) override { + return kind == SourceRangeKind::kRight; + } + + private: + SourceRange right_range_; +}; + class SuspendSourceRanges final : public ContinuationSourceRanges { public: explicit SuspendSourceRanges(int32_t continuation_position) diff --git a/deps/v8/src/interpreter/bytecode-generator.cc b/deps/v8/src/interpreter/bytecode-generator.cc index 87c537879c5f68..74a4f66a5626bd 100644 --- a/deps/v8/src/interpreter/bytecode-generator.cc +++ b/deps/v8/src/interpreter/bytecode-generator.cc @@ -4575,8 +4575,11 @@ void BytecodeGenerator::VisitThrow(Throw* expr) { void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) { if (property->is_optional_chain_link()) { DCHECK_NOT_NULL(optional_chaining_null_labels_); + int right_range = + AllocateBlockCoverageSlotIfEnabled(property, SourceRangeKind::kRight); builder()->LoadAccumulatorWithRegister(obj).JumpIfUndefinedOrNull( optional_chaining_null_labels_->New()); + BuildIncrementBlockCoverageCounterIfEnabled(right_range); } AssignType property_kind = Property::GetAssignType(property); @@ -4902,8 +4905,11 @@ void BytecodeGenerator::VisitCall(Call* expr) { if (expr->is_optional_chain_link()) { DCHECK_NOT_NULL(optional_chaining_null_labels_); + int right_range = + AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kRight); builder()->LoadAccumulatorWithRegister(callee).JumpIfUndefinedOrNull( optional_chaining_null_labels_->New()); + BuildIncrementBlockCoverageCounterIfEnabled(right_range); } // Evaluate all arguments to the function call and store in sequential args @@ -5175,7 +5181,10 @@ void BytecodeGenerator::VisitDelete(UnaryOperation* unary) { OptionalChainNullLabelScope label_scope(this); VisitForAccumulatorValue(property->obj()); if (property->is_optional_chain_link()) { + int right_range = AllocateBlockCoverageSlotIfEnabled( + property, SourceRangeKind::kRight); builder()->JumpIfUndefinedOrNull(label_scope.labels()->New()); + BuildIncrementBlockCoverageCounterIfEnabled(right_range); } Register object = register_allocator()->NewRegister(); builder()->StoreAccumulatorInRegister(object); diff --git a/deps/v8/src/parsing/parser-base.h b/deps/v8/src/parsing/parser-base.h index 608427664961b5..863f645f781a1a 100644 --- a/deps/v8/src/parsing/parser-base.h +++ b/deps/v8/src/parsing/parser-base.h @@ -3289,6 +3289,7 @@ ParserBase::ParseLeftHandSideContinuation(ExpressionT result) { bool optional_chaining = false; bool is_optional = false; + int optional_link_begin; do { switch (peek()) { case Token::QUESTION_PERIOD: { @@ -3296,10 +3297,16 @@ ParserBase::ParseLeftHandSideContinuation(ExpressionT result) { ReportUnexpectedToken(peek()); return impl()->FailureExpression(); } + // Include the ?. in the source range position. + optional_link_begin = scanner()->peek_location().beg_pos; Consume(Token::QUESTION_PERIOD); is_optional = true; optional_chaining = true; - continue; + if (Token::IsPropertyOrCall(peek())) continue; + int pos = position(); + ExpressionT key = ParsePropertyOrPrivatePropertyName(); + result = factory()->NewProperty(result, key, pos, is_optional); + break; } /* Property */ @@ -3379,14 +3386,7 @@ ParserBase::ParseLeftHandSideContinuation(ExpressionT result) { } default: - /* Optional Property */ - if (is_optional) { - DCHECK_EQ(scanner()->current_token(), Token::QUESTION_PERIOD); - int pos = position(); - ExpressionT key = ParsePropertyOrPrivatePropertyName(); - result = factory()->NewProperty(result, key, pos, is_optional); - break; - } + // Template literals in/after an Optional Chain not supported: if (optional_chaining) { impl()->ReportMessageAt(scanner()->peek_location(), MessageTemplate::kOptionalChainingNoTemplate); @@ -3397,8 +3397,12 @@ ParserBase::ParseLeftHandSideContinuation(ExpressionT result) { result = ParseTemplateLiteral(result, position(), true); break; } - is_optional = false; - } while (is_optional || Token::IsPropertyOrCall(peek())); + if (is_optional) { + SourceRange chain_link_range(optional_link_begin, end_position()); + impl()->RecordExpressionSourceRange(result, chain_link_range); + is_optional = false; + } + } while (Token::IsPropertyOrCall(peek())); if (optional_chaining) return factory()->NewOptionalChain(result); return result; } diff --git a/deps/v8/src/parsing/parser.h b/deps/v8/src/parsing/parser.h index 46abe16d4fc9c1..1633de604099eb 100644 --- a/deps/v8/src/parsing/parser.h +++ b/deps/v8/src/parsing/parser.h @@ -997,6 +997,14 @@ class V8_EXPORT_PRIVATE Parser : public NON_EXPORTED_BASE(ParserBase) { node, zone()->New(body_range)); } + // Used to record source ranges of expressions associated with optional chain: + V8_INLINE void RecordExpressionSourceRange(Expression* node, + const SourceRange& right_range) { + if (source_range_map_ == nullptr) return; + source_range_map_->Insert(node, + zone()->New(right_range)); + } + V8_INLINE void RecordSuspendSourceRange(Expression* node, int32_t continuation_position) { if (source_range_map_ == nullptr) return; diff --git a/deps/v8/test/mjsunit/code-coverage-block.js b/deps/v8/test/mjsunit/code-coverage-block.js index 4584f3134a90db..e9d38d71466a9b 100644 --- a/deps/v8/test/mjsunit/code-coverage-block.js +++ b/deps/v8/test/mjsunit/code-coverage-block.js @@ -1177,7 +1177,7 @@ a(true); // 0500 {"start":0,"end":401,"count":2}, {"start":154,"end":254,"count":0}]); - TestCoverage( +TestCoverage( "https://crbug.com/v8/11231 - nullish coalescing", ` const a = true // 0000 @@ -1195,4 +1195,41 @@ const i = c ?? b ?? 'hello' // 0400 {"start":262,"end":274,"count":0}, {"start":417,"end":427,"count":0}]); +TestCoverage( +"Optional Chaining", +` +const a = undefined || null // 0000 +const b = a?.b // 0050 +const c = a?.['b'] // 0100 +const d = { // 0150 + e: {f: 99, g: () => {return undefined}} // 0200 +} // 0250 +const e = d?.e?.f // 0300 +const f = d?.e?.['f'] // 0350 +const g = d?.e?.f?.g // 0400 +const h = d?.e?.f?.g?.h // 0450 +const i = d?.['d']?.['e']?.['h'] // 0500 +const k = a?.('b') // 0550 +const l = d?.e?.g?.() // 0600 +const m = d?.e?.g?.()?.a?.b // 0650 +delete a?.b // 0700 +const n = d?.[d?.x?.f] // 0750 +if (a?.[d?.x?.f]) { const p = 99 } else {}// 0800 +const p = d?.[d?.x?.f]?.x // 0850 +`, +[{"start":0,"end":899,"count":1}, + {"start":61,"end":64,"count":0}, + {"start":111,"end":118,"count":0}, + {"start":470,"end":473,"count":0}, + {"start":518,"end":532,"count":0}, + {"start":561,"end":568,"count":0}, + {"start":671,"end":677,"count":0}, + {"start":708,"end":711,"count":0}, + {"start":768,"end":771,"count":0}, + {"start":805,"end":816,"count":0}, + {"start":818,"end":834,"count":0}, + {"start":868,"end":871,"count":0}, + {"start":872,"end":875,"count":0}, + {"start":216,"end":240,"count":2}]); + %DebugToggleBlockCoverage(false); diff --git a/doc/api/buffer.md b/doc/api/buffer.md index 765aa2dd088e84..d04650ba09ea12 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -50,6 +50,9 @@ const buf7 = Buffer.from('tést', 'latin1'); ## Buffers and character encodings + +> Stability: 1 - Experimental + +A [`Blob`][] encapsulates immutable, raw data that can be safely shared across +multiple worker threads. + +### `new buffer.Blob([sources[, options]])` + + +* `sources` {string[]|ArrayBuffer[]|TypedArray[]|DataView[]|Blob[]} An array + of string, {ArrayBuffer}, {TypedArray}, {DataView}, or {Blob} objects, or + any mix of such objects, that will be stored within the `Blob`. +* `options` {Object} + * `encoding` {string} The character encoding to use for string sources. + **Default**: `'utf8'`. + * `type` {string} The Blob content-type. The intent is for `type` to convey + the MIME media type of the data, however no validation of the type format + is performed. + +Creates a new `Blob` object containing a concatenation of the given sources. + +{ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into +the 'Blob' and can therefore be safely modified after the 'Blob' is created. + +String sources are also copied into the `Blob`. + +### `blob.arrayBuffer()` + + +* Returns: {Promise} + +Returns a promise that fulfills with an {ArrayBuffer} containing a copy of +the `Blob` data. + +### `blob.size` + + +The total size of the `Blob` in bytes. + +### `blob.slice([start, [end, [type]]])` + + +* `start` {number} The starting index. +* `end` {number} The ending index. +* `type` {string} The content-type for the new `Blob` + +Creates and returns a new `Blob` containing a subset of this `Blob` objects +data. The original `Blob` is not alterered. + +### `blob.text()` + + +* Returns: {Promise} + +Returns a promise that resolves the contents of the `Blob` decoded as a UTF-8 +string. + +### `blob.type` + + +* Type: {string} + +The content-type of the `Blob`. + +### `Blob` objects and `MessageChannel` + +Once a {Blob} object is created, it can be sent via `MessagePort` to multiple +destinations without transfering or immediately copying the data. The data +contained by the `Blob` is copied only when the `arrayBuffer()` or `text()` +methods are called. + +```js +const { Blob } = require('buffer'); +const blob = new Blob(['hello there']); +const { setTimeout: delay } = require('timers/promises'); + +const mc1 = new MessageChannel(); +const mc2 = new MessageChannel(); + +mc1.port1.onmessage = async ({ data }) => { + console.log(await data.arrayBuffer()); + mc1.port1.close(); +}; + +mc2.port1.onmessage = async ({ data }) => { + await delay(1000); + console.log(await data.arrayBuffer()); + mc2.port1.close(); +}; + +mc1.port2.postMessage(blob); +mc2.port2.postMessage(blob); + +// The Blob is still usable after posting. +data.text().then(console.log); +``` + ## Class: `Buffer` The `Buffer` class is a global type for dealing with binary data directly. @@ -482,9 +603,10 @@ Returns the byte length of a string when encoded using `encoding`. This is not the same as [`String.prototype.length`][], which does not account for the encoding that is used to convert the string into bytes. -For `'base64'` and `'hex'`, this function assumes valid input. For strings that -contain non-base64/hex-encoded data (e.g. whitespace), the return value might be -greater than the length of a `Buffer` created from the string. +For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. +For strings that contain non-base64/hex-encoded data (e.g. whitespace), the +return value might be greater than the length of a `Buffer` created from the +string. ```js const str = '\u00bd + \u00bc = \u00be'; @@ -3380,6 +3502,7 @@ introducing security vulnerabilities into an application. [UTF-8]: https://en.wikipedia.org/wiki/UTF-8 [WHATWG Encoding Standard]: https://encoding.spec.whatwg.org/ [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer +[`Blob`]: https://developer.mozilla.org/en-US/docs/Web/API/Blob [`Buffer.alloc()`]: #buffer_static_method_buffer_alloc_size_fill_encoding [`Buffer.allocUnsafe()`]: #buffer_static_method_buffer_allocunsafe_size [`Buffer.allocUnsafeSlow()`]: #buffer_static_method_buffer_allocunsafeslow_size @@ -3418,6 +3541,7 @@ introducing security vulnerabilities into an application. [`buffer.constants.MAX_STRING_LENGTH`]: #buffer_buffer_constants_max_string_length [`buffer.kMaxLength`]: #buffer_buffer_kmaxlength [`util.inspect()`]: util.md#util_util_inspect_object_options +[base64url]: https://tools.ietf.org/html/rfc4648#section-5 [binary strings]: https://developer.mozilla.org/en-US/docs/Web/API/DOMString/Binary [endianness]: https://en.wikipedia.org/wiki/Endianness [iterator]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 797e17bdecb20a..7a8007ba90d147 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -1284,6 +1284,25 @@ passing keys as strings or `Buffer`s due to improved security features. The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to be listed in the `transferList` argument. +### `keyObject.asymmetricKeyDetails` + + +* {Object} + * `modulusLength`: {number} Key size in bits (RSA, DSA). + * `publicExponent`: {bigint} Public exponent (RSA). + * `divisorLength`: {number} Size of `q` in bits (DSA). + * `namedCurve`: {string} Name of the curve (EC). + +This property exists only on asymmetric keys. Depending on the type of the key, +this object contains information about the key. None of the information obtained +through this property can be used to uniquely identify a key or to compromise +the security of the key. + +RSA-PSS parameters, DH, or any future key type details might be exposed via this +API using additional attributes. + ### `keyObject.asymmetricKeyType` Encapsulates an X509 certificate and provides read-only access to -it's information. +its information. ```js const { X509Certificate } = require('crypto'); @@ -2490,7 +2509,7 @@ added: v15.0.0 * If `type` is `'hmac'`, the minimum is 1, and the maximum length is 231-1. If the value is not a multiple of 8, the generated key will be truncated to `Math.floor(length / 8)`. - * If `type` is `'aes'`, the length must be one of `128` or `256`. + * If `type` is `'aes'`, the length must be one of `128`, `192`, or `256`. * `callback`: {Function} * `err`: {Error} * `key`: {KeyObject} @@ -2519,7 +2538,7 @@ added: v15.0.0 * If `type` is `'hmac'`, the minimum is 1, and the maximum length is 231-1. If the value is not a multiple of 8, the generated key will be truncated to `Math.floor(length / 8)`. - * If `type` is `'aes'`, the length must be one of `128` or `256`. + * If `type` is `'aes'`, the length must be one of `128`, `192`, or `256`. * Returns: {KeyObject} Synchronously generates a new random secret key of the given `length`. The diff --git a/doc/api/documentation.md b/doc/api/documentation.md index 979d85ef0a3005..d39aa30438aefc 100644 --- a/doc/api/documentation.md +++ b/doc/api/documentation.md @@ -43,6 +43,9 @@ Bugs or behavior changes may surprise users when Experimental API modifications occur. To avoid surprises, use of an Experimental feature may need a command-line flag. Experimental features may also emit a [warning][]. +## Stability overview + + ## JSON output + +* {http.IncomingMessage} + +A reference to the original HTTP `request` object. + ### `response.sendDate` @@ -1957,7 +1957,7 @@ the request body. When this event is emitted and handled, the [`'request'`][] event will not be emitted. -### Event: `'connection'` +#### Event: `'connection'` @@ -3437,6 +3437,15 @@ Removes a header that has been queued for implicit sending. response.removeHeader('Content-Encoding'); ``` +### `response.req` + + +* {http2.Http2ServerRequest} + +A reference to the original HTTP2 `request` object. + #### `response.sendDate` - -* Type: {boolean} `true` if the module is running during the Node.js preload - phase. - ### `module.syncBuiltinESMExports()` + +* Type: {boolean} `true` if the module is running during the Node.js preload + phase. + ### `module.loaded` -* `percentile` {number} A percentile value between 1 and 100. +* `percentile` {number} A percentile value in the range (0, 100]. * Returns: {number} Returns the value at the given percentile. diff --git a/doc/api/tls.md b/doc/api/tls.md index bedf4e28e7bc04..2c8414f2988c9c 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -627,6 +627,9 @@ added: v0.5.3 The `server.addContext()` method adds a secure context that will be used if the client request's SNI name matches the supplied `hostname` (or wildcard). +When there are multiple matching contexts, the most recently added one is +used. + ### `server.address()` + +* `url` {URL} The [WHATWG URL][] object to convert to an options object. +* Returns: {Object} Options object + * `protocol` {string} Protocol to use. + * `hostname` {string} A domain name or IP address of the server to issue the + request to. + * `hash` {string} The fragment portion of the URL. + * `search` {string} The serialized query portion of the URL. + * `pathname` {string} The path portion of the URL. + * `path` {string} Request path. Should include query string if any. + E.G. `'/index.html?page=12'`. An exception is thrown when the request path + contains illegal characters. Currently, only spaces are rejected but that + may change in the future. + * `href` {string} The serialized URL. + * `port` {number} Port of remote server. + * `auth` {string} Basic authentication i.e. `'user:password'` to compute an + Authorization header. + +This utility function converts a URL object into an ordinary options object as +expected by the [`http.request()`][] and [`https.request()`][] APIs. + +```js +const { urlToHttpOptions } = require('url'); +const myURL = new URL('https://a:b@測試?abc#foo'); + +console.log(urlToHttpOptions(myUrl)); +/** +{ + protocol: 'https:', + hostname: 'xn--g6w251d', + hash: '#foo', + search: '?abc', + pathname: '/', + path: '/?abc', + href: 'https://a:b@xn--g6w251d/?abc#foo', + auth: 'a:b' +} +*/ +``` + ## Legacy URL API - -* **Version**: -* **Platform**: -* **Subsystem**: - - -``` - -If you believe that you have uncovered a bug in Node.js, please fill out this -form, following the template to the best of your ability. Do not worry if you -cannot answer every detail, just fill in what you can. +presented with a choice of issue templates. If you believe that you have +uncovered a bug in Node.js, please fill out the `Bug Report` template to the +best of your ability. Do not worry if you cannot answer every detail; just fill +in what you can. The two most important pieces of information we need in order to properly evaluate the report is a description of the behavior you are seeing and a simple @@ -105,13 +60,5 @@ activities, such as applying labels and closing/reopening/assigning issues. For more information on the roles and permissions, see ["Permission levels for repositories owned by an organization"](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization). -## Resolving a Bug Report - -In the vast majority of cases, issues are resolved by opening a Pull Request. -The process for opening and reviewing a Pull Request is similar to that of -opening and triaging issues, but carries with it a necessary review and approval -workflow that ensures that the proposed changes meet the minimal quality and -functional guidelines of the Node.js project. - [Node.js help repository]: https://github.com/nodejs/help/issues [Technical Steering Committee (TSC) repository]: https://github.com/nodejs/TSC/issues diff --git a/doc/guides/contributing/pull-requests.md b/doc/guides/contributing/pull-requests.md index 4ca1df219bb941..8ec694cb949962 100644 --- a/doc/guides/contributing/pull-requests.md +++ b/doc/guides/contributing/pull-requests.md @@ -1,10 +1,5 @@ # Pull Requests -There are two fundamental components of the Pull Request process: one concrete -and technical, and one more process oriented. The concrete and technical -component involves the specific details of setting up your local environment -so that you can make the actual changes. This is where we will start. - * [Dependencies](#dependencies) * [Setting up your local environment](#setting-up-your-local-environment) * [Step 1: Fork](#step-1-fork) diff --git a/doc/guides/maintaining-icu.md b/doc/guides/maintaining-icu.md index 4b0c413c3f9fcb..f98e129c5a86df 100644 --- a/doc/guides/maintaining-icu.md +++ b/doc/guides/maintaining-icu.md @@ -158,7 +158,7 @@ process.versions.icu; new Intl.DateTimeFormat('es', { month: 'long' }).format(new Date(9E8)); ``` -(This should print your updated ICU version number, and also `January` again.) +(This should print your updated ICU version number, and also `enero` again.) You are ready to check in the updated `deps/icu-small`. This is a big commit, so make this a separate commit from the smaller changes. diff --git a/doc/template.html b/doc/template.html index d7cc96062eaf6a..51f3738e1be15a 100644 --- a/doc/template.html +++ b/doc/template.html @@ -55,10 +55,10 @@

          Node.js __VERSION__ Documentation


          -
          -

          Table of Contents

          +
          + Table of Contents __TOC__ -
          +
          __CONTENT__ @@ -66,8 +66,7 @@

          Table of Contents

      - - + + diff --git a/lib/_http_client.js b/lib/_http_client.js index 6bae982eb1c97b..fbaebacc1559ed 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -44,6 +44,7 @@ const { const net = require('net'); const url = require('url'); const assert = require('internal/assert'); +const { once } = require('internal/util'); const { _checkIsHttpToken: checkIsHttpToken, debug, @@ -57,7 +58,7 @@ const { OutgoingMessage } = require('_http_outgoing'); const Agent = require('_http_agent'); const { Buffer } = require('buffer'); const { defaultTriggerAsyncIdScope } = require('internal/async_hooks'); -const { URL, urlToOptions, searchParamsSymbol } = require('internal/url'); +const { URL, urlToHttpOptions, searchParamsSymbol } = require('internal/url'); const { kOutHeaders, kNeedDrain } = require('internal/http'); const { connResetException, codes } = require('internal/errors'); const { @@ -76,7 +77,7 @@ const { DTRACE_HTTP_CLIENT_RESPONSE } = require('internal/dtrace'); -const { addAbortSignal } = require('stream'); +const { addAbortSignal, finished } = require('stream'); const INVALID_PATH_REGEX = /[^\u0021-\u00ff]/; const kError = Symbol('kError'); @@ -104,7 +105,7 @@ function ClientRequest(input, options, cb) { if (typeof input === 'string') { const urlStr = input; try { - input = urlToOptions(new URL(urlStr)); + input = urlToHttpOptions(new URL(urlStr)); } catch (err) { input = url.parse(urlStr); if (!input.hostname) { @@ -121,7 +122,7 @@ function ClientRequest(input, options, cb) { } else if (input && input[searchParamsSymbol] && input[searchParamsSymbol][searchParamsSymbol]) { // url.URL instance - input = urlToOptions(input); + input = urlToHttpOptions(input); } else { cb = options; options = input; @@ -240,8 +241,6 @@ function ClientRequest(input, options, cb) { this.host = host; this.protocol = protocol; - let called = false; - if (this.agent) { // If there is an agent we should default to Connection:keep-alive, // but only if the Agent will actually reuse the connection! @@ -305,18 +304,6 @@ function ClientRequest(input, options, cb) { options.headers); } - const oncreate = (err, socket) => { - if (called) - return; - called = true; - if (err) { - process.nextTick(() => this.emit('error', err)); - return; - } - this.onSocket(socket); - this._deferToConnect(null, null, () => this._flush()); - }; - // initiate connection if (this.agent) { this.agent.addRequest(this, options); @@ -325,20 +312,27 @@ function ClientRequest(input, options, cb) { this._last = true; this.shouldKeepAlive = false; if (typeof options.createConnection === 'function') { - const newSocket = options.createConnection(options, oncreate); - if (newSocket && !called) { - called = true; - this.onSocket(newSocket); - } else { - return; + const oncreate = once((err, socket) => { + if (err) { + process.nextTick(() => this.emit('error', err)); + } else { + this.onSocket(socket); + } + }); + + try { + const newSocket = options.createConnection(options, oncreate); + if (newSocket) { + oncreate(null, newSocket); + } + } catch (err) { + oncreate(err); } } else { debug('CLIENT use net.createConnection', options); this.onSocket(net.createConnection(options)); } } - - this._deferToConnect(null, null, () => this._flush()); } ObjectSetPrototypeOf(ClientRequest.prototype, OutgoingMessage.prototype); ObjectSetPrototypeOf(ClientRequest, OutgoingMessage); @@ -376,38 +370,12 @@ ClientRequest.prototype.destroy = function destroy(err) { this.res._dump(); } - // In the event that we don't have a socket, we will pop out of - // the request queue through handling in onSocket. - if (this.socket) { - _destroy(this, this.socket, err); - } else if (err) { - this[kError] = err; - } + this[kError] = err; + this.socket?.destroy(err); return this; }; -function _destroy(req, socket, err) { - // TODO (ronag): Check if socket was used at all (e.g. headersSent) and - // re-use it in that case. `req.socket` just checks whether the socket was - // assigned to the request and *might* have been used. - if (socket && (!req.agent || req.socket)) { - socket.destroy(err); - } else { - if (socket) { - socket.emit('free'); - } - if (!req.aborted && !err) { - err = connResetException('socket hang up'); - } - if (err) { - req.emit('error', err); - } - req._closed = true; - req.emit('close'); - } -} - function emitAbortNT(req) { req.emit('abort'); } @@ -836,13 +804,33 @@ ClientRequest.prototype.onSocket = function onSocket(socket, err) { }; function onSocketNT(req, socket, err) { - if (req.destroyed) { - _destroy(req, socket, req[kError]); - } else if (err) { + if (req.destroyed || err) { req.destroyed = true; - _destroy(req, null, err); + + function _destroy(req, err) { + if (!req.aborted && !err) { + err = connResetException('socket hang up'); + } + if (err) { + req.emit('error', err); + } + req._closed = true; + req.emit('close'); + } + + if (!err && req.agent) { + socket?.emit('free'); + } else if (socket) { + finished(socket.destroy(err || req[kError]), (er) => { + _destroy(req, er || err); + }); + return; + } + + _destroy(req, err || req[kError]); } else { tickOnSocket(req, socket); + req._flush(); } } diff --git a/lib/_http_server.js b/lib/_http_server.js index f1372b56dc6c1b..96f486ab608698 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -181,6 +181,7 @@ function ServerResponse(req) { if (req.method === 'HEAD') this._hasBody = false; + this.req = req; this.sendDate = true; this._sent100 = false; this._expect_continue = false; @@ -581,11 +582,7 @@ function socketOnTimeout() { function socketOnClose(socket, state) { debug('server socket close'); - // Mark this parser as reusable - if (socket.parser) { - freeParser(socket.parser, null, socket); - } - + freeParser(socket.parser, null, socket); abortIncoming(state.incoming); } @@ -609,18 +606,15 @@ function socketOnEnd(server, socket, parser, state) { if (ret instanceof Error) { debug('parse error'); + // socketOnError has additional logic and will call socket.destroy(err). FunctionPrototypeCall(socketOnError, socket, ret); - return; - } - - if (!server.httpAllowHalfOpen) { - abortIncoming(state.incoming); - if (socket.writable) socket.end(); + } else if (!server.httpAllowHalfOpen) { + socket.end(); } else if (state.outgoing.length) { state.outgoing[state.outgoing.length - 1]._last = true; } else if (socket._httpMessage) { socket._httpMessage._last = true; - } else if (socket.writable) { + } else { socket.end(); } } @@ -635,6 +629,7 @@ function socketOnData(server, socket, parser, state, d) { function onRequestTimeout(socket) { socket[kRequestTimeout] = undefined; + // socketOnError has additional logic and will call socket.destroy(err). ReflectApply(socketOnError, socket, [new ERR_HTTP_REQUEST_TIMEOUT()]); } diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index 1447253bf73224..b5fbe4d36b2578 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -1458,7 +1458,8 @@ Server.prototype[EE.captureRejectionSymbol] = function( function SNICallback(servername, callback) { const contexts = this.server._contexts; - for (const elem of contexts) { + for (let i = contexts.length - 1; i >= 0; --i) { + const elem = contexts[i]; if (RegExpPrototypeTest(elem[0], servername)) { callback(null, elem[1]); return; diff --git a/lib/buffer.js b/lib/buffer.js index d3034a46c60b29..591ddd632383fb 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -100,6 +100,7 @@ const { hideStackFrames } = require('internal/errors'); const { + validateArray, validateBuffer, validateInteger, validateString @@ -115,6 +116,10 @@ const { createUnsafeBuffer } = require('internal/buffer'); +const { + Blob, +} = require('internal/blob'); + FastBuffer.prototype.constructor = Buffer; Buffer.prototype = FastBuffer.prototype; addBufferPrototypeMethods(Buffer.prototype); @@ -530,9 +535,7 @@ Buffer.isEncoding = function isEncoding(encoding) { Buffer[kIsEncodingSymbol] = Buffer.isEncoding; Buffer.concat = function concat(list, length) { - if (!ArrayIsArray(list)) { - throw new ERR_INVALID_ARG_TYPE('list', 'Array', list); - } + validateArray(list, 'list'); if (list.length === 0) return new FastBuffer(); @@ -646,6 +649,20 @@ const encodingOps = { encodingsMap.base64, dir) }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir) + }, hex: { encoding: 'hex', encodingVal: encodingsMap.hex, @@ -702,6 +719,11 @@ function getEncodingOps(encoding) { if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') return encodingOps.hex; break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; } } @@ -1191,6 +1213,7 @@ if (internalBinding('config').hasIntl) { } module.exports = { + Blob, Buffer, SlowBuffer, transcode, diff --git a/lib/events.js b/lib/events.js index 8beb501678f647..75baac156aaf86 100644 --- a/lib/events.js +++ b/lib/events.js @@ -736,7 +736,7 @@ async function once(emitter, name, options = {}) { } function abortListener() { eventTargetAgnosticRemoveListener(emitter, name, resolver); - eventTargetAgnosticRemoveListener(emitter, 'error', resolver); + eventTargetAgnosticRemoveListener(emitter, 'error', errorListener); reject(lazyDOMException('The operation was aborted', 'AbortError')); } if (signal != null) { diff --git a/lib/fs.js b/lib/fs.js index a4b38f10899d48..b873593e2307a4 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -37,7 +37,6 @@ const { BigIntPrototypeToString, MathMax, Number, - NumberIsSafeInteger, ObjectCreate, ObjectDefineProperties, ObjectDefineProperty, @@ -75,7 +74,8 @@ const { ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, - ERR_FEATURE_UNAVAILABLE_ON_PLATFORM + ERR_FEATURE_UNAVAILABLE_ON_PLATFORM, + ERR_OUT_OF_RANGE, }, hideStackFrames, uvErrmapGet, @@ -545,9 +545,23 @@ function read(fd, buffer, offset, length, position, callback) { validateOffsetLengthRead(offset, length, buffer.byteLength); - if (!NumberIsSafeInteger(position)) + if (position == null) position = -1; + if (typeof position === 'number') { + validateInteger(position, 'position'); + } else if (typeof position === 'bigint') { + if (!(position >= -(2n ** 63n) && position <= 2n ** 63n - 1n)) { + throw new ERR_OUT_OF_RANGE('position', + `>= ${-(2n ** 63n)} && <= ${2n ** 63n - 1n}`, + position); + } + } else { + throw new ERR_INVALID_ARG_TYPE('position', + ['integer', 'bigint'], + position); + } + function wrapper(err, bytesRead) { // Retain a reference to buffer so that it can't be GC'ed too soon. callback(err, bytesRead || 0, buffer); @@ -597,9 +611,23 @@ function readSync(fd, buffer, offset, length, position) { validateOffsetLengthRead(offset, length, buffer.byteLength); - if (!NumberIsSafeInteger(position)) + if (position == null) position = -1; + if (typeof position === 'number') { + validateInteger(position, 'position'); + } else if (typeof position === 'bigint') { + if (!(position >= -(2n ** 63n) && position <= 2n ** 63n - 1n)) { + throw new ERR_OUT_OF_RANGE('position', + `>= ${-(2n ** 63n)} && <= ${2n ** 63n - 1n}`, + position); + } + } else { + throw new ERR_INVALID_ARG_TYPE('position', + ['integer', 'bigint'], + position); + } + const ctx = {}; const result = binding.read(fd, buffer, offset, length, position, undefined, ctx); diff --git a/lib/https.js b/lib/https.js index a7fcf06a95f273..79ac6c6bd58707 100644 --- a/lib/https.js +++ b/lib/https.js @@ -48,7 +48,7 @@ const { ClientRequest } = require('_http_client'); let debug = require('internal/util/debuglog').debuglog('https', (fn) => { debug = fn; }); -const { URL, urlToOptions, searchParamsSymbol } = require('internal/url'); +const { URL, urlToHttpOptions, searchParamsSymbol } = require('internal/url'); const { IncomingMessage, ServerResponse } = require('http'); const { kIncomingMessage } = require('_http_common'); @@ -303,7 +303,7 @@ function request(...args) { if (typeof args[0] === 'string') { const urlStr = ArrayPrototypeShift(args); try { - options = urlToOptions(new URL(urlStr)); + options = urlToHttpOptions(new URL(urlStr)); } catch (err) { options = url.parse(urlStr); if (!options.hostname) { @@ -320,7 +320,7 @@ function request(...args) { } else if (args[0] && args[0][searchParamsSymbol] && args[0][searchParamsSymbol][searchParamsSymbol]) { // url.URL instance - options = urlToOptions(ArrayPrototypeShift(args)); + options = urlToHttpOptions(ArrayPrototypeShift(args)); } if (args[0] && typeof args[0] !== 'function') { diff --git a/lib/internal/blob.js b/lib/internal/blob.js new file mode 100644 index 00000000000000..e49b3710e0768a --- /dev/null +++ b/lib/internal/blob.js @@ -0,0 +1,238 @@ +'use strict'; + +const { + ArrayFrom, + ObjectSetPrototypeOf, + Promise, + PromiseResolve, + RegExpPrototypeTest, + StringPrototypeToLowerCase, + Symbol, + SymbolIterator, + Uint8Array, +} = primordials; + +const { + createBlob, + FixedSizeBlobCopyJob, +} = internalBinding('buffer'); + +const { + JSTransferable, + kClone, + kDeserialize, +} = require('internal/worker/js_transferable'); + +const { + isAnyArrayBuffer, + isArrayBufferView, +} = require('internal/util/types'); + +const { + customInspectSymbol: kInspect, + emitExperimentalWarning, +} = require('internal/util'); +const { inspect } = require('internal/util/inspect'); + +const { + AbortError, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_BUFFER_TOO_LARGE, + ERR_OUT_OF_RANGE, + } +} = require('internal/errors'); + +const { + validateObject, + validateString, + validateUint32, + isUint32, +} = require('internal/validators'); + +const kHandle = Symbol('kHandle'); +const kType = Symbol('kType'); +const kLength = Symbol('kLength'); + +let Buffer; + +function deferred() { + let res, rej; + const promise = new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + return { promise, resolve: res, reject: rej }; +} + +function lazyBuffer() { + if (Buffer === undefined) + Buffer = require('buffer').Buffer; + return Buffer; +} + +function isBlob(object) { + return object?.[kHandle] !== undefined; +} + +function getSource(source, encoding) { + if (isBlob(source)) + return [source.size, source[kHandle]]; + + if (typeof source === 'string') { + source = lazyBuffer().from(source, encoding); + } else if (isAnyArrayBuffer(source)) { + source = new Uint8Array(source); + } else if (!isArrayBufferView(source)) { + throw new ERR_INVALID_ARG_TYPE( + 'source', + [ + 'string', + 'ArrayBuffer', + 'SharedArrayBuffer', + 'Buffer', + 'TypedArray', + 'DataView' + ], + source); + } + + // We copy into a new Uint8Array because the underlying + // BackingStores are going to be detached and owned by + // the Blob. We also don't want to have to worry about + // byte offsets. + source = new Uint8Array(source); + return [source.byteLength, source]; +} + +class InternalBlob extends JSTransferable { + constructor(handle, length, type = '') { + super(); + this[kHandle] = handle; + this[kType] = type; + this[kLength] = length; + } +} + +class Blob extends JSTransferable { + constructor(sources = [], options) { + emitExperimentalWarning('buffer.Blob'); + if (sources === null || + typeof sources[SymbolIterator] !== 'function' || + typeof sources === 'string') { + throw new ERR_INVALID_ARG_TYPE('sources', 'Iterable', sources); + } + if (options !== undefined) + validateObject(options, 'options'); + const { + encoding = 'utf8', + type = '', + } = { ...options }; + + let length = 0; + const sources_ = ArrayFrom(sources, (source) => { + const { 0: len, 1: src } = getSource(source, encoding); + length += len; + return src; + }); + + // This is a MIME media type but we're not actively checking the syntax. + // But, to be fair, neither does Chrome. + validateString(type, 'options.type'); + + if (!isUint32(length)) + throw new ERR_BUFFER_TOO_LARGE(0xFFFFFFFF); + + super(); + this[kHandle] = createBlob(sources_, length); + this[kLength] = length; + this[kType] = RegExpPrototypeTest(/[^\u{0020}-\u{007E}]/u, type) ? + '' : StringPrototypeToLowerCase(type); + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `Blob ${inspect({ + size: this.size, + type: this.type, + }, opts)}`; + } + + [kClone]() { + const handle = this[kHandle]; + const type = this[kType]; + const length = this[kLength]; + return { + data: { handle, type, length }, + deserializeInfo: 'internal/blob:InternalBlob' + }; + } + + [kDeserialize]({ handle, type, length }) { + this[kHandle] = handle; + this[kType] = type; + this[kLength] = length; + } + + get type() { return this[kType]; } + + get size() { return this[kLength]; } + + slice(start = 0, end = (this[kLength]), type = this[kType]) { + validateUint32(start, 'start'); + if (end < 0) end = this[kLength] + end; + validateUint32(end, 'end'); + validateString(type, 'type'); + if (end < start) + throw new ERR_OUT_OF_RANGE('end', 'greater than start', end); + if (end > this[kLength]) + throw new ERR_OUT_OF_RANGE('end', 'less than or equal to length', end); + return new InternalBlob( + this[kHandle].slice(start, end), + end - start, type); + } + + async arrayBuffer() { + const job = new FixedSizeBlobCopyJob(this[kHandle]); + + const ret = job.run(); + if (ret !== undefined) + return PromiseResolve(ret); + + const { + promise, + resolve, + reject + } = deferred(); + job.ondone = (err, ab) => { + if (err !== undefined) + return reject(new AbortError()); + resolve(ab); + }; + + return promise; + } + + async text() { + const dec = new TextDecoder(); + return dec.decode(await this.arrayBuffer()); + } +} + +InternalBlob.prototype.constructor = Blob; +ObjectSetPrototypeOf( + InternalBlob.prototype, + Blob.prototype); + +module.exports = { + Blob, + InternalBlob, + isBlob, +}; diff --git a/lib/internal/bootstrap/switches/does_own_process_state.js b/lib/internal/bootstrap/switches/does_own_process_state.js index 0d60fb1f4595d1..2924e7f8cc17fa 100644 --- a/lib/internal/bootstrap/switches/does_own_process_state.js +++ b/lib/internal/bootstrap/switches/does_own_process_state.js @@ -24,13 +24,11 @@ if (credentials.implementsPosixCredentials) { const { parseFileMode, + validateArray, validateString } = require('internal/validators'); function wrapPosixCredentialSetters(credentials) { - const { - ArrayIsArray, - } = primordials; const { codes: { ERR_INVALID_ARG_TYPE, @@ -63,9 +61,7 @@ function wrapPosixCredentialSetters(credentials) { } function setgroups(groups) { - if (!ArrayIsArray(groups)) { - throw new ERR_INVALID_ARG_TYPE('groups', 'Array', groups); - } + validateArray(groups, 'groups'); for (let i = 0; i < groups.length; i++) { validateId(groups[i], `groups[${i}]`); } diff --git a/lib/internal/buffer.js b/lib/internal/buffer.js index 3515626041bbad..b20b8c6ae76448 100644 --- a/lib/internal/buffer.js +++ b/lib/internal/buffer.js @@ -18,12 +18,14 @@ const { validateNumber } = require('internal/validators'); const { asciiSlice, base64Slice, + base64urlSlice, latin1Slice, hexSlice, ucs2Slice, utf8Slice, asciiWrite, base64Write, + base64urlWrite, latin1Write, hexWrite, ucs2Write, @@ -1027,12 +1029,14 @@ function addBufferPrototypeMethods(proto) { proto.asciiSlice = asciiSlice; proto.base64Slice = base64Slice; + proto.base64urlSlice = base64urlSlice; proto.latin1Slice = latin1Slice; proto.hexSlice = hexSlice; proto.ucs2Slice = ucs2Slice; proto.utf8Slice = utf8Slice; proto.asciiWrite = asciiWrite; proto.base64Write = base64Write; + proto.base64urlWrite = base64urlWrite; proto.latin1Write = latin1Write; proto.hexWrite = hexWrite; proto.ucs2Write = ucs2Write; diff --git a/lib/internal/child_process.js b/lib/internal/child_process.js index 8512ae342fde65..7bd9a058ea56de 100644 --- a/lib/internal/child_process.js +++ b/lib/internal/child_process.js @@ -29,7 +29,11 @@ const { ERR_MISSING_ARGS } } = require('internal/errors'); -const { validateString, validateOneOf } = require('internal/validators'); +const { + validateArray, + validateOneOf, + validateString, +} = require('internal/validators'); const EventEmitter = require('events'); const net = require('net'); const dgram = require('dgram'); @@ -377,12 +381,12 @@ ChildProcess.prototype.spawn = function(options) { validateString(options.file, 'options.file'); this.spawnfile = options.file; - if (ArrayIsArray(options.args)) - this.spawnargs = options.args; - else if (options.args === undefined) + if (options.args === undefined) { this.spawnargs = []; - else - throw new ERR_INVALID_ARG_TYPE('options.args', 'Array', options.args); + } else { + validateArray(options.args, 'options.args'); + this.spawnargs = options.args; + } const err = this._handle.spawn(options); diff --git a/lib/internal/crypto/keys.js b/lib/internal/crypto/keys.js index f54393a5e3b4bd..9d6f86d32b0b78 100644 --- a/lib/internal/crypto/keys.js +++ b/lib/internal/crypto/keys.js @@ -5,6 +5,7 @@ const { ObjectDefineProperty, ObjectSetPrototypeOf, Symbol, + Uint8Array, } = primordials; const { @@ -36,6 +37,7 @@ const { kHandle, kKeyObject, getArrayBufferOrView, + bigIntArrayToUnsignedBigInt, } = require('internal/crypto/util'); const { @@ -128,12 +130,39 @@ const [ } const kAsymmetricKeyType = Symbol('kAsymmetricKeyType'); + const kAsymmetricKeyDetails = Symbol('kAsymmetricKeyDetails'); + + function normalizeKeyDetails(details = {}) { + if (details.publicExponent !== undefined) { + return { + ...details, + publicExponent: + bigIntArrayToUnsignedBigInt(new Uint8Array(details.publicExponent)) + }; + } + return details; + } class AsymmetricKeyObject extends KeyObject { get asymmetricKeyType() { return this[kAsymmetricKeyType] || (this[kAsymmetricKeyType] = this[kHandle].getAsymmetricKeyType()); } + + get asymmetricKeyDetails() { + switch (this.asymmetricKeyType) { + case 'rsa': + case 'rsa-pss': + case 'dsa': + case 'ec': + return this[kAsymmetricKeyDetails] || + (this[kAsymmetricKeyDetails] = normalizeKeyDetails( + this[kHandle].keyDetail({}) + )); + default: + return {}; + } + } } class PublicKeyObject extends AsymmetricKeyObject { diff --git a/lib/internal/crypto/random.js b/lib/internal/crypto/random.js index cbc549377b1a3d..3a11a74648ff05 100644 --- a/lib/internal/crypto/random.js +++ b/lib/internal/crypto/random.js @@ -218,20 +218,20 @@ function randomInt(min, max, callback) { `<= ${RAND_MAX}`, range); } - const excess = RAND_MAX % range; - const randLimit = RAND_MAX - excess; + // For (x % range) to produce an unbiased value greater than or equal to 0 and + // less than range, x must be drawn randomly from the set of integers greater + // than or equal to 0 and less than randLimit. + const randLimit = RAND_MAX - (RAND_MAX % range); if (isSync) { // Sync API while (true) { const x = randomBytes(6).readUIntBE(0, 6); - // If x > (maxVal - (maxVal % range)), we will get "modulo bias" - if (x > randLimit) { - // Try again + if (x >= randLimit) { + // Try again. continue; } - const n = (x % range) + min; - return n; + return (x % range) + min; } } else { // Async API @@ -239,9 +239,8 @@ function randomInt(min, max, callback) { randomBytes(6, (err, bytes) => { if (err) return callback(err); const x = bytes.readUIntBE(0, 6); - // If x > (maxVal - (maxVal % range)), we will get "modulo bias" - if (x > randLimit) { - // Try again + if (x >= randLimit) { + // Try again. return pickAttempt(); } const n = (x % range) + min; diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index 67cb9841928a2d..9d2fee2b2dd57a 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -3,6 +3,7 @@ const { ArrayPrototypeIncludes, ArrayPrototypePush, + BigInt, FunctionPrototypeBind, Number, Promise, @@ -308,6 +309,17 @@ function bigIntArrayToUnsignedInt(input) { return result; } +function bigIntArrayToUnsignedBigInt(input) { + let result = 0n; + + for (let n = 0; n < input.length; ++n) { + const n_reversed = input.length - n - 1; + result |= BigInt(input[n]) << 8n * BigInt(n_reversed); + } + + return result; +} + function getStringOption(options, key) { let value; if (options && (value = options[key]) != null) @@ -413,6 +425,7 @@ module.exports = { jobPromise, lazyRequire, validateMaxBufferLength, + bigIntArrayToUnsignedBigInt, bigIntArrayToUnsignedInt, getStringOption, getUsagesUnion, diff --git a/lib/internal/dns/utils.js b/lib/internal/dns/utils.js index 27d25c92ad93aa..40f5ba0088e83e 100644 --- a/lib/internal/dns/utils.js +++ b/lib/internal/dns/utils.js @@ -1,7 +1,6 @@ 'use strict'; const { - ArrayIsArray, ArrayPrototypeForEach, ArrayPrototypeJoin, ArrayPrototypeMap, @@ -14,7 +13,7 @@ const { const errors = require('internal/errors'); const { isIP } = require('internal/net'); -const { validateInt32 } = require('internal/validators'); +const { validateArray, validateInt32 } = require('internal/validators'); const { ChannelWrap, strerror, @@ -60,9 +59,7 @@ class Resolver { } setServers(servers) { - if (!ArrayIsArray(servers)) { - throw new ERR_INVALID_ARG_TYPE('servers', 'Array', servers); - } + validateArray(servers, 'servers'); // Cache the original servers because in the event of an error while // setting the servers, c-ares won't have any servers available for diff --git a/lib/internal/histogram.js b/lib/internal/histogram.js index 00149db50236da..f599e4b3edb5f5 100644 --- a/lib/internal/histogram.js +++ b/lib/internal/histogram.js @@ -5,7 +5,7 @@ const { } = require('internal/util'); const { format } = require('util'); -const { SafeMap, Symbol } = primordials; +const { NumberIsNaN, SafeMap, Symbol } = primordials; const { ERR_INVALID_ARG_TYPE, @@ -61,7 +61,7 @@ class Histogram { if (typeof percentile !== 'number') throw new ERR_INVALID_ARG_TYPE('percentile', 'number', percentile); - if (percentile <= 0 || percentile > 100) + if (NumberIsNaN(percentile) || percentile <= 0 || percentile > 100) throw new ERR_INVALID_ARG_VALUE.RangeError('percentile', percentile); return this[kHandle]?.percentile(percentile); diff --git a/lib/internal/http2/compat.js b/lib/internal/http2/compat.js index d116cb9d5ee925..ddc017a144cfbe 100644 --- a/lib/internal/http2/compat.js +++ b/lib/internal/http2/compat.js @@ -12,6 +12,7 @@ const { ReflectApply, ReflectGetPrototypeOf, StringPrototypeIncludes, + SafeArrayIterator, StringPrototypeToLowerCase, StringPrototypeTrim, Symbol, @@ -148,7 +149,8 @@ function onStreamTrailers(trailers, flags, rawTrailers) { const request = this[kRequest]; if (request !== undefined) { ObjectAssign(request[kTrailers], trailers); - ArrayPrototypePush(request[kRawTrailers], ...rawTrailers); + ArrayPrototypePush(request[kRawTrailers], + ...new SafeArrayIterator(rawTrailers)); } } @@ -527,6 +529,10 @@ class Http2ServerResponse extends Stream { return this[kStream].headersSent; } + get req() { + return this[kStream][kRequest]; + } + get sendDate() { return this[kState].sendDate; } diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 22ed8086dc9316..a4eab21c135b35 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -7,6 +7,7 @@ const { ArrayIsArray, ArrayPrototypeForEach, ArrayPrototypePush, + ArrayPrototypeUnshift, FunctionPrototypeBind, FunctionPrototypeCall, MathMin, @@ -20,6 +21,7 @@ const { ReflectApply, ReflectGetPrototypeOf, RegExpPrototypeTest, + SafeArrayIterator, SafeMap, SafeSet, StringPrototypeSlice, @@ -187,21 +189,22 @@ let debug = require('internal/util/debuglog').debuglog('http2', (fn) => { // this seems pretty fast, though. function debugStream(id, sessionType, message, ...args) { debug('Http2Stream %s [Http2Session %s]: ' + message, - id, sessionName(sessionType), ...args); + id, sessionName(sessionType), ...new SafeArrayIterator(args)); } function debugStreamObj(stream, message, ...args) { const session = stream[kSession]; const type = session ? session[kType] : undefined; - debugStream(stream[kID], type, message, ...args); + debugStream(stream[kID], type, message, ...new SafeArrayIterator(args)); } function debugSession(sessionType, message, ...args) { - debug('Http2Session %s: ' + message, sessionName(sessionType), ...args); + debug('Http2Session %s: ' + message, sessionName(sessionType), + ...new SafeArrayIterator(args)); } function debugSessionObj(session, message, ...args) { - debugSession(session[kType], message, ...args); + debugSession(session[kType], message, ...new SafeArrayIterator(args)); } const kMaxFrameSize = (2 ** 24) - 1; @@ -317,7 +320,7 @@ const SESSION_FLAGS_DESTROYED = 0x4; // Top level to avoid creating a closure function emit(self, ...args) { - self.emit(...args); + ReflectApply(self.emit, self, args); } // Called when a new block of headers has been received for a given @@ -1020,7 +1023,7 @@ function setupHandle(socket, type, options) { if (type === NGHTTP2_SESSION_SERVER && ArrayIsArray(options.origins)) { - this.origin(...options.origins); + ReflectApply(this.origin, this, options.origins); } process.nextTick(emit, this, 'connect', this, socket); @@ -1495,7 +1498,7 @@ class Http2Session extends EventEmitter { [EventEmitter.captureRejectionSymbol](err, event, ...args) { switch (event) { case 'stream': - const [stream] = args; + const stream = args[0]; stream.destroy(err); break; default: @@ -1663,7 +1666,9 @@ class ClientHttp2Session extends Http2Session { this[kUpdateTimer](); if (headers !== null && headers !== undefined) { - for (const header of ObjectKeys(headers)) { + const keys = ObjectKeys(headers); + for (let i = 0; i < keys.length; i++) { + const header = keys[i]; if (header[0] === ':') { assertValidPseudoHeader(header); } else if (header && !checkIsHttpToken(header)) @@ -3095,7 +3100,7 @@ Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function( case 'stream': // TODO(mcollina): we might want to match this with what we do on // the compat side. - const [stream] = args; + const { 0: stream } = args; if (stream.sentHeaders) { stream.destroy(err); } else { @@ -3104,7 +3109,7 @@ Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function( } break; case 'request': - const [, res] = args; + const { 1: res } = args; if (!res.headersSent && !res.finished) { // Don't leak headers. for (const name of res.getHeaderNames()) { @@ -3117,8 +3122,9 @@ Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function( } break; default: + ArrayPrototypeUnshift(args, err, event); ReflectApply(net.Server.prototype[EventEmitter.captureRejectionSymbol], - this, [err, event, ...args]); + this, args); } }; diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index b129e94d9890c0..0c69d547d40b9a 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -233,9 +233,9 @@ ObjectDefineProperty(Module, 'wrapper', { } }); -ObjectDefineProperty(Module.prototype, 'isPreloading', { - get() { return isPreloading; } -}); +const isPreloadingDesc = { get() { return isPreloading; } }; +ObjectDefineProperty(Module.prototype, 'isPreloading', isPreloadingDesc); +ObjectDefineProperty(NativeModule.prototype, 'isPreloading', isPreloadingDesc); function getModuleParent() { return moduleParentCache.get(this); diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index 98683441586784..5a578902cbb01a 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -104,13 +104,8 @@ function getConditionsSet(conditions) { const realpathCache = new SafeMap(); const packageJSONCache = new SafeMap(); /* string -> PackageConfig */ -function tryStatSync(path) { - try { - return statSync(path); - } catch { - return new Stats(); - } -} +const tryStatSync = + (path) => statSync(path, { throwIfNoEntry: false }) ?? new Stats(); function getPackageConfig(path, specifier, base) { const existing = packageJSONCache.get(path); diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 2ab9550213ecca..ec1316d3245a41 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -6,6 +6,12 @@ // so that Node.js's builtin modules do not need to later look these up from // the global proxy, which can be mutated by users. +const { + defineProperty: ReflectDefineProperty, + getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, + ownKeys: ReflectOwnKeys, +} = Reflect; + // TODO(joyeecheung): we can restrict access to these globals in builtin // modules through the JS linter, for example: ban access such as `Object` // (which falls back to a lookup in the global proxy) in favor of @@ -19,17 +25,6 @@ const { bind, call } = Function.prototype; const uncurryThis = bind.bind(call); primordials.uncurryThis = uncurryThis; -function copyProps(src, dest) { - for (const key of Reflect.ownKeys(src)) { - if (!Reflect.getOwnPropertyDescriptor(dest, key)) { - Reflect.defineProperty( - dest, - key, - Reflect.getOwnPropertyDescriptor(src, key)); - } - } -} - function getNewKey(key) { return typeof key === 'symbol' ? `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : @@ -37,12 +32,12 @@ function getNewKey(key) { } function copyAccessor(dest, prefix, key, { enumerable, get, set }) { - Reflect.defineProperty(dest, `${prefix}Get${key}`, { + ReflectDefineProperty(dest, `${prefix}Get${key}`, { value: uncurryThis(get), enumerable }); if (set !== undefined) { - Reflect.defineProperty(dest, `${prefix}Set${key}`, { + ReflectDefineProperty(dest, `${prefix}Set${key}`, { value: uncurryThis(set), enumerable }); @@ -50,128 +45,46 @@ function copyAccessor(dest, prefix, key, { enumerable, get, set }) { } function copyPropsRenamed(src, dest, prefix) { - for (const key of Reflect.ownKeys(src)) { + for (const key of ReflectOwnKeys(src)) { const newKey = getNewKey(key); - const desc = Reflect.getOwnPropertyDescriptor(src, key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); if ('get' in desc) { copyAccessor(dest, prefix, newKey, desc); } else { - Reflect.defineProperty(dest, `${prefix}${newKey}`, desc); + ReflectDefineProperty(dest, `${prefix}${newKey}`, desc); } } } function copyPropsRenamedBound(src, dest, prefix) { - for (const key of Reflect.ownKeys(src)) { + for (const key of ReflectOwnKeys(src)) { const newKey = getNewKey(key); - const desc = Reflect.getOwnPropertyDescriptor(src, key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); if ('get' in desc) { copyAccessor(dest, prefix, newKey, desc); } else { if (typeof desc.value === 'function') { desc.value = desc.value.bind(src); } - Reflect.defineProperty(dest, `${prefix}${newKey}`, desc); + ReflectDefineProperty(dest, `${prefix}${newKey}`, desc); } } } function copyPrototype(src, dest, prefix) { - for (const key of Reflect.ownKeys(src)) { + for (const key of ReflectOwnKeys(src)) { const newKey = getNewKey(key); - const desc = Reflect.getOwnPropertyDescriptor(src, key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); if ('get' in desc) { copyAccessor(dest, prefix, newKey, desc); } else { if (typeof desc.value === 'function') { desc.value = uncurryThis(desc.value); } - Reflect.defineProperty(dest, `${prefix}${newKey}`, desc); - } - } -} - -const createSafeIterator = (factory, next) => { - class SafeIterator { - constructor(iterable) { - this._iterator = factory(iterable); - } - next() { - return next(this._iterator); - } - [Symbol.iterator]() { - return this; - } - } - Object.setPrototypeOf(SafeIterator.prototype, null); - Object.freeze(SafeIterator.prototype); - Object.freeze(SafeIterator); - return SafeIterator; -}; - -function makeSafe(unsafe, safe) { - if (Symbol.iterator in unsafe.prototype) { - const dummy = new unsafe(); - let next; // We can reuse the same `next` method. - - for (const key of Reflect.ownKeys(unsafe.prototype)) { - if (!Reflect.getOwnPropertyDescriptor(safe.prototype, key)) { - const desc = Reflect.getOwnPropertyDescriptor(unsafe.prototype, key); - if ( - typeof desc.value === 'function' && - desc.value.length === 0 && - Symbol.iterator in (desc.value.call(dummy) ?? {}) - ) { - const createIterator = uncurryThis(desc.value); - next ??= uncurryThis(createIterator(dummy).next); - const SafeIterator = createSafeIterator(createIterator, next); - desc.value = function() { - return new SafeIterator(this); - }; - } - Reflect.defineProperty(safe.prototype, key, desc); - } + ReflectDefineProperty(dest, `${prefix}${newKey}`, desc); } - } else { - copyProps(unsafe.prototype, safe.prototype); } - copyProps(unsafe, safe); - - Object.setPrototypeOf(safe.prototype, null); - Object.freeze(safe.prototype); - Object.freeze(safe); - return safe; } -primordials.makeSafe = makeSafe; - -// Subclass the constructors because we need to use their prototype -// methods later. -// Defining the `constructor` is necessary here to avoid the default -// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. -primordials.SafeMap = makeSafe( - Map, - class SafeMap extends Map { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakMap = makeSafe( - WeakMap, - class SafeWeakMap extends WeakMap { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeSet = makeSafe( - Set, - class SafeSet extends Set { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakSet = makeSafe( - WeakSet, - class SafeWeakSet extends WeakSet { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); // Create copies of the namespace objects [ @@ -256,6 +169,41 @@ primordials.SafeWeakSet = makeSafe( copyPrototype(original.prototype, primordials, `${name}Prototype`); }); +/* eslint-enable node-core/prefer-primordials */ + +const { + ArrayPrototypeForEach, + FunctionPrototypeCall, + Map, + ObjectFreeze, + ObjectSetPrototypeOf, + Set, + SymbolIterator, + WeakMap, + WeakSet, +} = primordials; + +// Because these functions are used by `makeSafe`, which is exposed +// on the `primordials` object, it's important to use const references +// to the primordials that they use: +const createSafeIterator = (factory, next) => { + class SafeIterator { + constructor(iterable) { + this._iterator = factory(iterable); + } + next() { + return next(this._iterator); + } + [SymbolIterator]() { + return this; + } + } + ObjectSetPrototypeOf(SafeIterator.prototype, null); + ObjectFreeze(SafeIterator.prototype); + ObjectFreeze(SafeIterator); + return SafeIterator; +}; + primordials.SafeArrayIterator = createSafeIterator( primordials.ArrayPrototypeSymbolIterator, primordials.ArrayIteratorPrototypeNext @@ -265,5 +213,80 @@ primordials.SafeStringIterator = createSafeIterator( primordials.StringIteratorPrototypeNext ); -Object.setPrototypeOf(primordials, null); -Object.freeze(primordials); +const copyProps = (src, dest) => { + ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { + if (!ReflectGetOwnPropertyDescriptor(dest, key)) { + ReflectDefineProperty( + dest, + key, + ReflectGetOwnPropertyDescriptor(src, key)); + } + }); +}; + +const makeSafe = (unsafe, safe) => { + if (SymbolIterator in unsafe.prototype) { + const dummy = new unsafe(); + let next; // We can reuse the same `next` method. + + ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { + if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { + const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); + if ( + typeof desc.value === 'function' && + desc.value.length === 0 && + SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) + ) { + const createIterator = uncurryThis(desc.value); + next ??= uncurryThis(createIterator(dummy).next); + const SafeIterator = createSafeIterator(createIterator, next); + desc.value = function() { + return new SafeIterator(this); + }; + } + ReflectDefineProperty(safe.prototype, key, desc); + } + }); + } else { + copyProps(unsafe.prototype, safe.prototype); + } + copyProps(unsafe, safe); + + ObjectSetPrototypeOf(safe.prototype, null); + ObjectFreeze(safe.prototype); + ObjectFreeze(safe); + return safe; +}; +primordials.makeSafe = makeSafe; + +// Subclass the constructors because we need to use their prototype +// methods later. +// Defining the `constructor` is necessary here to avoid the default +// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. +primordials.SafeMap = makeSafe( + Map, + class SafeMap extends Map { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakMap = makeSafe( + WeakMap, + class SafeWeakMap extends WeakMap { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeSet = makeSafe( + Set, + class SafeSet extends Set { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakSet = makeSafe( + WeakSet, + class SafeWeakSet extends WeakSet { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); + +ObjectSetPrototypeOf(primordials, null); +ObjectFreeze(primordials); diff --git a/lib/internal/process/per_thread.js b/lib/internal/process/per_thread.js index b3bd2de58ea3ff..4fdf0ba8764455 100644 --- a/lib/internal/process/per_thread.js +++ b/lib/internal/process/per_thread.js @@ -5,7 +5,6 @@ // thread and the worker threads. const { - ArrayIsArray, ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeSplice, @@ -35,6 +34,7 @@ const { } } = require('internal/errors'); const format = require('internal/util/inspect').format; +const { validateArray } = require('internal/validators'); const constants = internalBinding('constants').os.signals; function assert(x, msg) { @@ -55,9 +55,7 @@ function getFastAPIs(binding) { _hrtime.hrtime(); if (time !== undefined) { - if (!ArrayIsArray(time)) { - throw new ERR_INVALID_ARG_TYPE('time', 'Array', time); - } + validateArray(time, 'time'); if (time.length !== 2) { throw new ERR_OUT_OF_RANGE('time', 2, time.length); } diff --git a/lib/internal/repl/utils.js b/lib/internal/repl/utils.js index 594b6a0c4485c7..8fee6d40123c49 100644 --- a/lib/internal/repl/utils.js +++ b/lib/internal/repl/utils.js @@ -245,7 +245,7 @@ function setupPreview(repl, contextSymbol, bufferSymbol, active) { } // Result and the text that was completed. - const [rawCompletions, completeOn] = data; + const { 0: rawCompletions, 1: completeOn } = data; if (!rawCompletions || rawCompletions.length === 0) { return; diff --git a/lib/internal/source_map/source_map.js b/lib/internal/source_map/source_map.js index 37912a2c9750bc..f49de5d8c4deda 100644 --- a/lib/internal/source_map/source_map.js +++ b/lib/internal/source_map/source_map.js @@ -67,7 +67,12 @@ 'use strict'; const { - ArrayIsArray + ArrayIsArray, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ObjectPrototypeHasOwnProperty, + StringPrototypeCharAt, } = primordials; const { @@ -94,14 +99,14 @@ class StringCharIterator { * @return {string} */ next() { - return this._string.charAt(this._position++); + return StringPrototypeCharAt(this._string, this._position++); } /** * @return {string} */ peek() { - return this._string.charAt(this._position); + return StringPrototypeCharAt(this._string, this._position); } /** @@ -158,7 +163,7 @@ class SourceMap { } else { this.#parseMap(this.#payload, 0, 0); } - this.#mappings.sort(compareSourceMapEntry); + ArrayPrototypeSort(this.#mappings, compareSourceMapEntry); } /** @@ -211,7 +216,7 @@ class SourceMap { /** * @override */ - #parseMap = (map, lineNumber, columnNumber) => { + #parseMap(map, lineNumber, columnNumber) { let sourceIndex = 0; let sourceLineNumber = 0; let sourceColumnNumber = 0; @@ -222,7 +227,7 @@ class SourceMap { for (let i = 0; i < map.sources.length; ++i) { const url = map.sources[i]; originalToCanonicalURLMap[url] = url; - sources.push(url); + ArrayPrototypePush(sources, url); this.#sources[url] = true; if (map.sourcesContent && map.sourcesContent[i]) @@ -246,7 +251,7 @@ class SourceMap { columnNumber += decodeVLQ(stringCharIterator); if (isSeparator(stringCharIterator.peek())) { - this.#mappings.push([lineNumber, columnNumber]); + ArrayPrototypePush(this.#mappings, [lineNumber, columnNumber]); continue; } @@ -264,8 +269,11 @@ class SourceMap { name = map.names?.[nameIndex]; } - this.#mappings.push([lineNumber, columnNumber, sourceURL, - sourceLineNumber, sourceColumnNumber, name]); + ArrayPrototypePush( + this.#mappings, + [lineNumber, columnNumber, sourceURL, sourceLineNumber, + sourceColumnNumber, name] + ); } }; } @@ -320,8 +328,9 @@ function cloneSourceMapV3(payload) { } payload = { ...payload }; for (const key in payload) { - if (payload.hasOwnProperty(key) && ArrayIsArray(payload[key])) { - payload[key] = payload[key].slice(0); + if (ObjectPrototypeHasOwnProperty(payload, key) && + ArrayIsArray(payload[key])) { + payload[key] = ArrayPrototypeSlice(payload[key]); } } return payload; @@ -334,8 +343,8 @@ function cloneSourceMapV3(payload) { * @return {number} */ function compareSourceMapEntry(entry1, entry2) { - const [lineNumber1, columnNumber1] = entry1; - const [lineNumber2, columnNumber2] = entry2; + const { 0: lineNumber1, 1: columnNumber1 } = entry1; + const { 0: lineNumber2, 1: columnNumber2 } = entry2; if (lineNumber1 !== lineNumber2) { return lineNumber1 - lineNumber2; } diff --git a/lib/internal/source_map/source_map_cache.js b/lib/internal/source_map/source_map_cache.js index b95653ebba84e9..f0d911f78fad8b 100644 --- a/lib/internal/source_map/source_map_cache.js +++ b/lib/internal/source_map/source_map_cache.js @@ -138,7 +138,7 @@ function sourceMapFromFile(mapURL) { // data:[][;base64], see: // https://tools.ietf.org/html/rfc2397#section-2 function sourceMapFromDataUrl(sourceURL, url) { - const [format, data] = StringPrototypeSplit(url, ','); + const { 0: format, 1: data } = StringPrototypeSplit(url, ','); const splitFormat = StringPrototypeSplit(format, ';'); const contentType = splitFormat[0]; const base64 = splitFormat[splitFormat.length - 1] === 'base64'; diff --git a/lib/internal/test/binding.js b/lib/internal/test/binding.js index 882ea90093d039..063b9b5c900aad 100644 --- a/lib/internal/test/binding.js +++ b/lib/internal/test/binding.js @@ -4,4 +4,9 @@ process.emitWarning( 'These APIs are for internal testing only. Do not use them.', 'internal/test/binding'); -module.exports = { internalBinding }; +if (module.isPreloading) { + globalThis.internalBinding = internalBinding; + globalThis.primordials = primordials; +} + +module.exports = { internalBinding, primordials }; diff --git a/lib/internal/url.js b/lib/internal/url.js index 900b6b03cd9ec1..3c90ec3d3672a8 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -437,7 +437,7 @@ ObjectDefineProperties(URL.prototype, { ret += '@'; } ret += options.unicode ? - domainToUnicode(this.hostname) : this.hostname; + domainToUnicode(ctx.host) : ctx.host; if (ctx.port !== null) ret += `:${ctx.port}`; } @@ -1295,7 +1295,7 @@ function domainToUnicode(domain) { // Utility function that converts a URL object into an ordinary // options object as expected by the http.request and https.request // APIs. -function urlToOptions(url) { +function urlToHttpOptions(url) { const options = { protocol: url.protocol, hostname: typeof url.hostname === 'string' && @@ -1494,7 +1494,7 @@ module.exports = { URLSearchParams, domainToASCII, domainToUnicode, - urlToOptions, + urlToHttpOptions, formatSymbol: kFormat, searchParamsSymbol: searchParams, encodeStr diff --git a/lib/internal/util.js b/lib/internal/util.js index cd0edfe44ee13f..5d5a2b14a708b4 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -180,6 +180,11 @@ function slowCases(enc) { StringPrototypeToLowerCase(`${enc}`) === 'utf-16le') return 'utf16le'; break; + case 9: + if (enc === 'base64url' || enc === 'BASE64URL' || + StringPrototypeToLowerCase(`${enc}`) === 'base64url') + return 'base64url'; + break; default: if (enc === '') return 'utf8'; } diff --git a/lib/internal/util/inspect.js b/lib/internal/util/inspect.js index b03380030a6be5..c40083a1ad497b 100644 --- a/lib/internal/util/inspect.js +++ b/lib/internal/util/inspect.js @@ -43,6 +43,7 @@ const { ObjectPrototypePropertyIsEnumerable, ObjectSeal, ObjectSetPrototypeOf, + ReflectOwnKeys, RegExp, RegExpPrototypeTest, RegExpPrototypeToString, @@ -309,7 +310,8 @@ function inspect(value, opts) { ctx.showHidden = opts; } else if (opts) { const optKeys = ObjectKeys(opts); - for (const key of optKeys) { + for (let i = 0; i < optKeys.length; ++i) { + const key = optKeys[i]; // TODO(BridgeAR): Find a solution what to do about stylize. Either make // this function public or add a new API with a similar or better // functionality. @@ -614,11 +616,7 @@ function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { ArrayPrototypeForEach(keys, (key) => keySet.add(key)); } // Get all own property names and symbols. - keys = ObjectGetOwnPropertyNames(obj); - const symbols = ObjectGetOwnPropertySymbols(obj); - if (symbols.length !== 0) { - ArrayPrototypePush(keys, ...symbols); - } + keys = ReflectOwnKeys(obj); for (const key of keys) { // Ignore the `constructor` property and keys that exist on layers above. if (key === 'constructor' || @@ -666,7 +664,7 @@ function getKeys(value, showHidden) { if (showHidden) { keys = ObjectGetOwnPropertyNames(value); if (symbols.length !== 0) - keys.push(...symbols); + ArrayPrototypePush(keys, ...symbols); } else { // This might throw if `value` is a Module Namespace Object from an // unevaluated module, but we don't want to perform the actual type @@ -1869,7 +1867,7 @@ function tryStringify(arg) { } function format(...args) { - return formatWithOptionsInternal(undefined, ...args); + return formatWithOptionsInternal(undefined, args); } function formatWithOptions(inspectOptions, ...args) { @@ -1877,10 +1875,10 @@ function formatWithOptions(inspectOptions, ...args) { throw new ERR_INVALID_ARG_TYPE( 'inspectOptions', 'object', inspectOptions); } - return formatWithOptionsInternal(inspectOptions, ...args); + return formatWithOptionsInternal(inspectOptions, args); } -function formatWithOptionsInternal(inspectOptions, ...args) { +function formatWithOptionsInternal(inspectOptions, args) { const first = args[0]; let a = 0; let str = ''; diff --git a/lib/internal/vm/module.js b/lib/internal/vm/module.js index 30ce655bd0d8b9..f847f2404f47e0 100644 --- a/lib/internal/vm/module.js +++ b/lib/internal/vm/module.js @@ -11,6 +11,7 @@ const { ObjectGetPrototypeOf, ObjectSetPrototypeOf, PromiseAll, + ReflectApply, SafeWeakMap, Symbol, SymbolToStringTag, @@ -445,7 +446,7 @@ class SyntheticModule extends Module { function importModuleDynamicallyWrap(importModuleDynamically) { const importModuleDynamicallyWrapper = async (...args) => { - const m = await importModuleDynamically(...args); + const m = await ReflectApply(importModuleDynamically, this, args); if (isModuleNamespaceObject(m)) { return m; } diff --git a/lib/internal/worker.js b/lib/internal/worker.js index cb51c0a1a5efd0..38f3ba66f214e6 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -4,6 +4,7 @@ const { ArrayIsArray, + ArrayPrototypeForEach, ArrayPrototypeMap, ArrayPrototypePush, Float64Array, @@ -14,7 +15,9 @@ const { ObjectEntries, Promise, PromiseResolve, + ReflectApply, RegExpPrototypeTest, + SafeArrayIterator, String, Symbol, SymbolFor, @@ -54,6 +57,7 @@ const { } = workerIo; const { deserializeError } = require('internal/error_serdes'); const { fileURLToPath, isURLInstance, pathToFileURL } = require('internal/url'); +const { validateArray } = require('internal/validators'); const { ownsProcessState, @@ -106,9 +110,7 @@ class Worker extends EventEmitter { } let argv; if (options.argv) { - if (!ArrayIsArray(options.argv)) { - throw new ERR_INVALID_ARG_TYPE('options.argv', 'Array', options.argv); - } + validateArray(options.argv, 'options.argv'); argv = ArrayPrototypeMap(options.argv, String); } @@ -155,8 +157,10 @@ class Worker extends EventEmitter { let env; if (typeof options.env === 'object' && options.env !== null) { env = ObjectCreate(null); - for (const [ key, value ] of ObjectEntries(options.env)) - env[key] = `${value}`; + ArrayPrototypeForEach( + ObjectEntries(options.env), + ({ 0: key, 1: value }) => { env[key] = `${value}`; } + ); } else if (options.env == null) { env = process.env; } else if (options.env !== SHARE_ENV) { @@ -209,12 +213,13 @@ class Worker extends EventEmitter { const transferList = [port2]; // If transferList is provided. if (options.transferList) - ArrayPrototypePush(transferList, ...options.transferList); + ArrayPrototypePush(transferList, + ...new SafeArrayIterator(options.transferList)); this[kPublicPort] = port1; - for (const event of ['message', 'messageerror']) { + ArrayPrototypeForEach(['message', 'messageerror'], (event) => { this[kPublicPort].on(event, (message) => this.emit(event, message)); - } + }); setupPortReferencing(this[kPublicPort], this, 'message'); this[kPort].postMessage({ argv, @@ -279,8 +284,9 @@ class Worker extends EventEmitter { { const { stream, chunks } = message; const readable = this[kParentSideStdio][stream]; - for (const { chunk, encoding } of chunks) + ArrayPrototypeForEach(chunks, ({ chunk, encoding }) => { readable.push(chunk, encoding); + }); return; } case messageTypes.STDIO_WANTS_MORE_DATA: @@ -314,7 +320,7 @@ class Worker extends EventEmitter { postMessage(...args) { if (this[kPublicPort] === null) return; - this[kPublicPort].postMessage(...args); + ReflectApply(this[kPublicPort].postMessage, this[kPublicPort], args); } terminate(callback) { diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 5cbf018ff8efae..a5b0fdf1f60028 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -82,7 +82,6 @@ const kInsertEntry = Symbol('insert-entry'); const kGetEntries = Symbol('get-entries'); const kIndex = Symbol('index'); const kMarks = Symbol('marks'); -const kCount = Symbol('count'); const observers = {}; const observerableTypes = [ @@ -287,11 +286,6 @@ class PerformanceObserverEntryList { writable: true, enumerable: false, value: {} - }, - [kCount]: { - writable: true, - enumerable: false, - value: 0 } }); L.init(this[kEntries]); @@ -300,11 +294,6 @@ class PerformanceObserverEntryList { [kInsertEntry](entry) { const item = { entry }; L.append(this[kEntries], item); - this[kCount]++; - } - - get length() { - return this[kCount]; } [kGetEntries](name, type) { diff --git a/lib/repl.js b/lib/repl.js index 3368b5997ae01d..b1905195ab4b47 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -46,6 +46,7 @@ const { ArrayPrototypeConcat, ArrayPrototypeFilter, ArrayPrototypeFindIndex, + ArrayPrototypeForEach, ArrayPrototypeIncludes, ArrayPrototypeJoin, ArrayPrototypeMap, @@ -663,7 +664,7 @@ function REPLServer(prompt, let matched = false; errStack = ''; - for (const line of lines) { + ArrayPrototypeForEach(lines, (line) => { if (!matched && RegExpPrototypeTest(/^\[?([A-Z][a-z0-9_]*)*Error/, line)) { errStack += writer.options.breakLength >= line.length ? @@ -673,7 +674,7 @@ function REPLServer(prompt, } else { errStack += line; } - } + }); if (!matched) { const ln = lines.length === 1 ? ' ' : ':\n'; errStack = `Uncaught${ln}${errStack}`; @@ -754,9 +755,7 @@ function REPLServer(prompt, const prioritizedSigintQueue = new SafeSet(); self.on('SIGINT', function onSigInt() { if (prioritizedSigintQueue.size > 0) { - for (const task of prioritizedSigintQueue) { - task(); - } + ArrayPrototypeForEach(prioritizedSigintQueue, (task) => task()); return; } @@ -1010,13 +1009,13 @@ REPLServer.prototype.createContext = function() { }, () => { context = vm.createContext(); }); - for (const name of ObjectGetOwnPropertyNames(global)) { + ArrayPrototypeForEach(ObjectGetOwnPropertyNames(global), (name) => { // Only set properties that do not already exist as a global builtin. if (!globalBuiltins.has(name)) { ObjectDefineProperty(context, name, ObjectGetOwnPropertyDescriptor(global, name)); } - } + }); context.global = context; const _console = new Console(this.output); ObjectDefineProperty(context, 'console', { @@ -1231,7 +1230,7 @@ function complete(line, callback) { paths = ArrayPrototypeConcat(module.paths, CJSModule.globalPaths); } - for (let dir of paths) { + ArrayPrototypeForEach(paths, (dir) => { dir = path.resolve(dir, subdir); const dirents = gracefulReaddir(dir, { withFileTypes: true }) || []; for (const dirent of dirents) { @@ -1259,7 +1258,7 @@ function complete(line, callback) { } } } - } + }); if (group.length) { ArrayPrototypePush(completionGroups, group); } @@ -1269,7 +1268,7 @@ function complete(line, callback) { } } else if (RegExpPrototypeTest(fsAutoCompleteRE, line) && this.allowBlockingCompletions) { - [completionGroups, completeOn] = completeFSFunctions(line); + ({ 0: completionGroups, 1: completeOn } = completeFSFunctions(line)); // Handle variable member lookup. // We support simple chained expressions like the following (no function // calls, etc.). That is for simplicity and also because we *eval* that @@ -1282,7 +1281,7 @@ function complete(line, callback) { // foo.<|> # completions for 'foo' with filter '' } else if (line.length === 0 || RegExpPrototypeTest(/\w|\.|\$/, line[line.length - 1])) { - const [match] = RegExpPrototypeExec(simpleExpressionRE, line) || ['']; + const { 0: match } = RegExpPrototypeExec(simpleExpressionRE, line) || ['']; if (line.length !== 0 && !match) { completionGroupsLoaded(); return; @@ -1352,11 +1351,11 @@ function complete(line, callback) { if (memberGroups.length) { expr += chaining; - for (const group of memberGroups) { + ArrayPrototypeForEach(memberGroups, (group) => { ArrayPrototypePush(completionGroups, ArrayPrototypeMap(group, (member) => `${expr}${member}`)); - } + }); if (filter) { filter = `${expr}${filter}`; } @@ -1375,7 +1374,7 @@ function complete(line, callback) { // Filter, sort (within each group), uniq and merge the completion groups. if (completionGroups.length && filter) { const newCompletionGroups = []; - for (const group of completionGroups) { + ArrayPrototypeForEach(completionGroups, (group) => { const filteredGroup = ArrayPrototypeFilter( group, (str) => StringPrototypeStartsWith(str, filter) @@ -1383,29 +1382,30 @@ function complete(line, callback) { if (filteredGroup.length) { ArrayPrototypePush(newCompletionGroups, filteredGroup); } - } + }); completionGroups = newCompletionGroups; } const completions = []; // Unique completions across all groups. - const uniqueSet = new SafeSet(['']); + const uniqueSet = new SafeSet(); + uniqueSet.add(''); // Completion group 0 is the "closest" (least far up the inheritance // chain) so we put its completions last: to be closest in the REPL. - for (const group of completionGroups) { + ArrayPrototypeForEach(completionGroups, (group) => { ArrayPrototypeSort(group, (a, b) => (b > a ? 1 : -1)); const setSize = uniqueSet.size; - for (const entry of group) { + ArrayPrototypeForEach(group, (entry) => { if (!uniqueSet.has(entry)) { ArrayPrototypeUnshift(completions, entry); uniqueSet.add(entry); } - } + }); // Add a separator between groups. if (uniqueSet.size !== setSize) { ArrayPrototypeUnshift(completions, ''); } - } + }); // Remove obsolete group entry, if present. if (completions[0] === '') { @@ -1569,14 +1569,13 @@ function defineDefaultCommands(repl) { const longestNameLength = MathMax( ...ArrayPrototypeMap(names, (name) => name.length) ); - for (let n = 0; n < names.length; n++) { - const name = names[n]; + ArrayPrototypeForEach(names, (name) => { const cmd = this.commands[name]; const spaces = StringPrototypeRepeat(' ', longestNameLength - name.length + 3); const line = `.${name}${cmd.help ? spaces + cmd.help : ''}\n`; this.output.write(line); - } + }); this.output.write('\nPress Ctrl+C to abort current expression, ' + 'Ctrl+D to exit the REPL\n'); this.displayPrompt(); diff --git a/lib/url.js b/lib/url.js index 49f99a8b5fa621..a13988031241d0 100644 --- a/lib/url.js +++ b/lib/url.js @@ -47,9 +47,10 @@ const { URLSearchParams, domainToASCII, domainToUnicode, + fileURLToPath, formatSymbol, pathToFileURL, - fileURLToPath + urlToHttpOptions, } = require('internal/url'); // Original url.parse() API @@ -987,5 +988,6 @@ module.exports = { // Utilities pathToFileURL, - fileURLToPath + fileURLToPath, + urlToHttpOptions, }; diff --git a/lib/vm.js b/lib/vm.js index 33893845084141..79c97f3af3ff02 100644 --- a/lib/vm.js +++ b/lib/vm.js @@ -23,6 +23,7 @@ const { ArrayPrototypeForEach, + ArrayPrototypeUnshift, Symbol, PromiseReject, ReflectApply, @@ -130,17 +131,17 @@ class Script extends ContextifyScript { if (breakOnSigint && process.listenerCount('SIGINT') > 0) { return sigintHandlersWrap(super.runInThisContext, this, args); } - return super.runInThisContext(...args); + return ReflectApply(super.runInThisContext, this, args); } runInContext(contextifiedObject, options) { validateContext(contextifiedObject); const { breakOnSigint, args } = getRunInContextArgs(options); + ArrayPrototypeUnshift(args, contextifiedObject); if (breakOnSigint && process.listenerCount('SIGINT') > 0) { - return sigintHandlersWrap(super.runInContext, this, - [contextifiedObject, ...args]); + return sigintHandlersWrap(super.runInContext, this, args); } - return super.runInContext(contextifiedObject, ...args); + return ReflectApply(super.runInContext, this, args); } runInNewContext(contextObject, options) { @@ -274,9 +275,9 @@ function sigintHandlersWrap(fn, thisArg, argsArray) { } finally { // Add using the public methods so that the `newListener` handler of // process can re-attach the listeners. - for (const listener of sigintListeners) { + ArrayPrototypeForEach(sigintListeners, (listener) => { process.addListener('SIGINT', listener); - } + }); } } diff --git a/lib/zlib.js b/lib/zlib.js index 1d1426635ca99b..38460c1263aa42 100644 --- a/lib/zlib.js +++ b/lib/zlib.js @@ -23,6 +23,7 @@ const { ArrayBuffer, + ArrayPrototypeForEach, ArrayPrototypeMap, ArrayPrototypePush, Error, @@ -803,8 +804,8 @@ function Brotli(opts, mode) { assert(mode === BROTLI_DECODE || mode === BROTLI_ENCODE); TypedArrayPrototypeFill(brotliInitParamsArray, -1); - if (opts && opts.params) { - for (const origKey of ObjectKeys(opts.params)) { + if (opts?.params) { + ArrayPrototypeForEach(ObjectKeys(opts.params), (origKey) => { const key = +origKey; if (NumberIsNaN(key) || key < 0 || key > kMaxBrotliParam || (brotliInitParamsArray[key] | 0) !== -1) { @@ -817,7 +818,7 @@ function Brotli(opts, mode) { 'number', opts.params[origKey]); } brotliInitParamsArray[key] = value; - } + }); } const handle = mode === BROTLI_DECODE ? diff --git a/node.gyp b/node.gyp index 9294442e9097ce..7906c81784bad4 100644 --- a/node.gyp +++ b/node.gyp @@ -111,6 +111,7 @@ 'lib/internal/assert/assertion_error.js', 'lib/internal/assert/calltracker.js', 'lib/internal/async_hooks.js', + 'lib/internal/blob.js', 'lib/internal/blocklist.js', 'lib/internal/buffer.js', 'lib/internal/cli_table.js', @@ -609,6 +610,7 @@ 'src/node.cc', 'src/node_api.cc', 'src/node_binding.cc', + 'src/node_blob.cc', 'src/node_buffer.cc', 'src/node_config.cc', 'src/node_constants.cc', @@ -707,6 +709,7 @@ 'src/node_api.h', 'src/node_api_types.h', 'src/node_binding.h', + 'src/node_blob.h', 'src/node_buffer.h', 'src/node_constants.h', 'src/node_context_data.h', diff --git a/src/api/encoding.cc b/src/api/encoding.cc index 6df4a7faf30393..f64aeee15c3b34 100644 --- a/src/api/encoding.cc +++ b/src/api/encoding.cc @@ -68,6 +68,8 @@ enum encoding ParseEncoding(const char* encoding, } else if (encoding[1] == 'a') { if (strncmp(encoding + 2, "se64", 5) == 0) return BASE64; + if (strncmp(encoding + 2, "se64url", 8) == 0) + return BASE64URL; } if (StringEqualNoCase(encoding, "binary")) return LATIN1; // BINARY is a deprecated alias of LATIN1. @@ -75,6 +77,8 @@ enum encoding ParseEncoding(const char* encoding, return BUFFER; if (StringEqualNoCase(encoding, "base64")) return BASE64; + if (StringEqualNoCase(encoding, "base64url")) + return BASE64URL; break; case 'a': diff --git a/src/async_wrap.h b/src/async_wrap.h index 1a82a5bfce846c..90f43f64e521db 100644 --- a/src/async_wrap.h +++ b/src/async_wrap.h @@ -38,6 +38,7 @@ namespace node { V(ELDHISTOGRAM) \ V(FILEHANDLE) \ V(FILEHANDLECLOSEREQ) \ + V(FIXEDSIZEBLOBCOPY) \ V(FSEVENTWRAP) \ V(FSREQCALLBACK) \ V(FSREQPROMISE) \ diff --git a/src/crypto/crypto_rsa.cc b/src/crypto/crypto_rsa.cc index d859551392a382..971b184b0c53f5 100644 --- a/src/crypto/crypto_rsa.cc +++ b/src/crypto/crypto_rsa.cc @@ -367,7 +367,14 @@ Maybe ExportJWKRsaKey( int type = EVP_PKEY_id(pkey.get()); CHECK(type == EVP_PKEY_RSA || type == EVP_PKEY_RSA_PSS); - RSA* rsa = EVP_PKEY_get0_RSA(pkey.get()); + // TODO(tniessen): Remove the "else" branch once we drop support for OpenSSL + // versions older than 1.1.1e via FIPS / dynamic linking. + RSA* rsa; + if (OpenSSL_version_num() >= 0x1010105fL) { + rsa = EVP_PKEY_get0_RSA(pkey.get()); + } else { + rsa = static_cast(EVP_PKEY_get0(pkey.get())); + } CHECK_NOT_NULL(rsa); const BIGNUM* n; @@ -508,7 +515,14 @@ Maybe GetRsaKeyDetail( int type = EVP_PKEY_id(pkey.get()); CHECK(type == EVP_PKEY_RSA || type == EVP_PKEY_RSA_PSS); - RSA* rsa = EVP_PKEY_get0_RSA(pkey.get()); + // TODO(tniessen): Remove the "else" branch once we drop support for OpenSSL + // versions older than 1.1.1e via FIPS / dynamic linking. + RSA* rsa; + if (OpenSSL_version_num() >= 0x1010105fL) { + rsa = EVP_PKEY_get0_RSA(pkey.get()); + } else { + rsa = static_cast(EVP_PKEY_get0(pkey.get())); + } CHECK_NOT_NULL(rsa); RSA_get0_key(rsa, &n, &e, nullptr); diff --git a/src/crypto/crypto_tls.cc b/src/crypto/crypto_tls.cc index f4850e425f46b3..52e57ab9862021 100644 --- a/src/crypto/crypto_tls.cc +++ b/src/crypto/crypto_tls.cc @@ -1339,8 +1339,6 @@ int TLSWrap::SelectSNIContextCallback(SSL* s, int* ad, void* arg) { return SSL_TLSEXT_ERR_OK; } -#ifndef OPENSSL_NO_PSK - int TLSWrap::SetCACerts(SecureContext* sc) { int err = SSL_set1_verify_cert_store( ssl_.get(), SSL_CTX_get_cert_store(sc->ctx_.get())); @@ -1355,6 +1353,8 @@ int TLSWrap::SetCACerts(SecureContext* sc) { return 1; } +#ifndef OPENSSL_NO_PSK + void TLSWrap::SetPskIdentityHint(const FunctionCallbackInfo& args) { TLSWrap* p; ASSIGN_OR_RETURN_UNWRAP(&p, args.Holder()); diff --git a/src/crypto/crypto_x509.cc b/src/crypto/crypto_x509.cc index 9c85f035d2ad82..0ea91bca877564 100644 --- a/src/crypto/crypto_x509.cc +++ b/src/crypto/crypto_x509.cc @@ -56,7 +56,8 @@ Local X509Certificate::GetConstructorTemplate( Local tmpl = env->x509_constructor_template(); if (tmpl.IsEmpty()) { tmpl = FunctionTemplate::New(env->isolate()); - tmpl->InstanceTemplate()->SetInternalFieldCount(1); + tmpl->InstanceTemplate()->SetInternalFieldCount( + BaseObject::kInternalFieldCount); tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); tmpl->SetClassName( FIXED_ONE_BYTE_STRING(env->isolate(), "X509Certificate")); diff --git a/src/debug_utils.cc b/src/debug_utils.cc index a601c5ecf40ea9..aa97bfbe943bab 100644 --- a/src/debug_utils.cc +++ b/src/debug_utils.cc @@ -377,7 +377,7 @@ std::vector NativeSymbolDebuggingContext::GetLoadedLibraries() { [](struct dl_phdr_info* info, size_t size, void* data) { auto list = static_cast*>(data); if (*info->dlpi_name != '\0') { - list->push_back(info->dlpi_name); + list->emplace_back(info->dlpi_name); } return 0; }, @@ -386,7 +386,7 @@ std::vector NativeSymbolDebuggingContext::GetLoadedLibraries() { uint32_t i = 0; for (const char* name = _dyld_get_image_name(i); name != nullptr; name = _dyld_get_image_name(++i)) { - list.push_back(name); + list.emplace_back(name); } #elif _AIX @@ -411,10 +411,10 @@ std::vector NativeSymbolDebuggingContext::GetLoadedLibraries() { strlen(cur_info->ldinfo_filename) + 1; if (*member_name != '\0') { str << cur_info->ldinfo_filename << "(" << member_name << ")"; - list.push_back(str.str()); + list.emplace_back(str.str()); str.str(""); } else { - list.push_back(cur_info->ldinfo_filename); + list.emplace_back(cur_info->ldinfo_filename); } buf += cur_info->ldinfo_next; } while (cur_info->ldinfo_next != 0); @@ -424,7 +424,7 @@ std::vector NativeSymbolDebuggingContext::GetLoadedLibraries() { if (dlinfo(RTLD_SELF, RTLD_DI_LINKMAP, &p) != -1) { for (Link_map* l = p; l != nullptr; l = l->l_next) { - list.push_back(l->l_name); + list.emplace_back(l->l_name); } } @@ -459,7 +459,7 @@ std::vector NativeSymbolDebuggingContext::GetLoadedLibraries() { char* str = new char[size]; WideCharToMultiByte( CP_UTF8, 0, module_name, -1, str, size, nullptr, nullptr); - list.push_back(str); + list.emplace_back(str); } } } diff --git a/src/env.h b/src/env.h index 7724e97ea58bb4..6be3cdb22fc0a9 100644 --- a/src/env.h +++ b/src/env.h @@ -448,6 +448,7 @@ constexpr size_t kFsStatsBufferLength = V(async_wrap_object_ctor_template, v8::FunctionTemplate) \ V(base_object_ctor_template, v8::FunctionTemplate) \ V(binding_data_ctor_template, v8::FunctionTemplate) \ + V(blob_constructor_template, v8::FunctionTemplate) \ V(blocklist_instance_template, v8::ObjectTemplate) \ V(compiled_fn_entry_template, v8::ObjectTemplate) \ V(dir_instance_template, v8::ObjectTemplate) \ diff --git a/src/node_blob.cc b/src/node_blob.cc new file mode 100644 index 00000000000000..b147b8764a7668 --- /dev/null +++ b/src/node_blob.cc @@ -0,0 +1,337 @@ +#include "node_blob.h" +#include "async_wrap-inl.h" +#include "base_object-inl.h" +#include "env-inl.h" +#include "memory_tracker-inl.h" +#include "node_errors.h" +#include "node_external_reference.h" +#include "threadpoolwork-inl.h" +#include "v8.h" + +#include + +namespace node { + +using v8::Array; +using v8::ArrayBuffer; +using v8::ArrayBufferView; +using v8::BackingStore; +using v8::Context; +using v8::EscapableHandleScope; +using v8::Function; +using v8::FunctionCallbackInfo; +using v8::FunctionTemplate; +using v8::HandleScope; +using v8::Local; +using v8::MaybeLocal; +using v8::Number; +using v8::Object; +using v8::Uint32; +using v8::Undefined; +using v8::Value; + +void Blob::Initialize(Environment* env, v8::Local target) { + env->SetMethod(target, "createBlob", New); + FixedSizeBlobCopyJob::Initialize(env, target); +} + +Local Blob::GetConstructorTemplate(Environment* env) { + Local tmpl = env->blob_constructor_template(); + if (tmpl.IsEmpty()) { + tmpl = FunctionTemplate::New(env->isolate()); + tmpl->InstanceTemplate()->SetInternalFieldCount( + BaseObject::kInternalFieldCount); + tmpl->Inherit(BaseObject::GetConstructorTemplate(env)); + tmpl->SetClassName( + FIXED_ONE_BYTE_STRING(env->isolate(), "Blob")); + env->SetProtoMethod(tmpl, "toArrayBuffer", ToArrayBuffer); + env->SetProtoMethod(tmpl, "slice", ToSlice); + env->set_blob_constructor_template(tmpl); + } + return tmpl; +} + +bool Blob::HasInstance(Environment* env, v8::Local object) { + return GetConstructorTemplate(env)->HasInstance(object); +} + +BaseObjectPtr Blob::Create( + Environment* env, + const std::vector store, + size_t length) { + + HandleScope scope(env->isolate()); + + Local ctor; + if (!GetConstructorTemplate(env)->GetFunction(env->context()).ToLocal(&ctor)) + return BaseObjectPtr(); + + Local obj; + if (!ctor->NewInstance(env->context()).ToLocal(&obj)) + return BaseObjectPtr(); + + return MakeBaseObject(env, obj, store, length); +} + +void Blob::New(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + CHECK(args[0]->IsArray()); // sources + CHECK(args[1]->IsUint32()); // length + + std::vector entries; + + size_t length = args[1].As()->Value(); + size_t len = 0; + Local ary = args[0].As(); + for (size_t n = 0; n < ary->Length(); n++) { + Local entry; + if (!ary->Get(env->context(), n).ToLocal(&entry)) + return; + CHECK(entry->IsArrayBufferView() || Blob::HasInstance(env, entry)); + if (entry->IsArrayBufferView()) { + Local view = entry.As(); + CHECK_EQ(view->ByteOffset(), 0); + std::shared_ptr store = view->Buffer()->GetBackingStore(); + size_t byte_length = view->ByteLength(); + view->Buffer()->Detach(); // The Blob will own the backing store now. + entries.emplace_back(BlobEntry{std::move(store), byte_length, 0}); + len += byte_length; + } else { + Blob* blob; + ASSIGN_OR_RETURN_UNWRAP(&blob, entry); + auto source = blob->entries(); + entries.insert(entries.end(), source.begin(), source.end()); + len += blob->length(); + } + } + CHECK_EQ(length, len); + + BaseObjectPtr blob = Create(env, entries, length); + if (blob) + args.GetReturnValue().Set(blob->object()); +} + +void Blob::ToArrayBuffer(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + Blob* blob; + ASSIGN_OR_RETURN_UNWRAP(&blob, args.Holder()); + Local ret; + if (blob->GetArrayBuffer(env).ToLocal(&ret)) + args.GetReturnValue().Set(ret); +} + +void Blob::ToSlice(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + Blob* blob; + ASSIGN_OR_RETURN_UNWRAP(&blob, args.Holder()); + CHECK(args[0]->IsUint32()); + CHECK(args[1]->IsUint32()); + size_t start = args[0].As()->Value(); + size_t end = args[1].As()->Value(); + BaseObjectPtr slice = blob->Slice(env, start, end); + if (slice) + args.GetReturnValue().Set(slice->object()); +} + +void Blob::MemoryInfo(MemoryTracker* tracker) const { + tracker->TrackFieldWithSize("store", length_); +} + +MaybeLocal Blob::GetArrayBuffer(Environment* env) { + EscapableHandleScope scope(env->isolate()); + size_t len = length(); + std::shared_ptr store = + ArrayBuffer::NewBackingStore(env->isolate(), len); + if (len > 0) { + unsigned char* dest = static_cast(store->Data()); + size_t total = 0; + for (const auto& entry : entries()) { + unsigned char* src = static_cast(entry.store->Data()); + src += entry.offset; + memcpy(dest, src, entry.length); + dest += entry.length; + total += entry.length; + CHECK_LE(total, len); + } + } + + return scope.Escape(ArrayBuffer::New(env->isolate(), store)); +} + +BaseObjectPtr Blob::Slice(Environment* env, size_t start, size_t end) { + CHECK_LE(start, length()); + CHECK_LE(end, length()); + CHECK_LE(start, end); + + std::vector slices; + size_t total = end - start; + size_t remaining = total; + + if (total == 0) return Create(env, slices, 0); + + for (const auto& entry : entries()) { + if (start + entry.offset > entry.store->ByteLength()) { + start -= entry.length; + continue; + } + + size_t offset = entry.offset + start; + size_t len = std::min(remaining, entry.store->ByteLength() - offset); + slices.emplace_back(BlobEntry{entry.store, len, offset}); + + remaining -= len; + start = 0; + + if (remaining == 0) + break; + } + + return Create(env, slices, total); +} + +Blob::Blob( + Environment* env, + v8::Local obj, + const std::vector& store, + size_t length) + : BaseObject(env, obj), + store_(store), + length_(length) { + MakeWeak(); +} + +BaseObjectPtr +Blob::BlobTransferData::Deserialize( + Environment* env, + Local context, + std::unique_ptr self) { + if (context != env->context()) { + THROW_ERR_MESSAGE_TARGET_CONTEXT_UNAVAILABLE(env); + return {}; + } + return Blob::Create(env, store_, length_); +} + +BaseObject::TransferMode Blob::GetTransferMode() const { + return BaseObject::TransferMode::kCloneable; +} + +std::unique_ptr Blob::CloneForMessaging() const { + return std::make_unique(store_, length_); +} + +FixedSizeBlobCopyJob::FixedSizeBlobCopyJob( + Environment* env, + Local object, + Blob* blob, + FixedSizeBlobCopyJob::Mode mode) + : AsyncWrap(env, object, AsyncWrap::PROVIDER_FIXEDSIZEBLOBCOPY), + ThreadPoolWork(env), + mode_(mode) { + if (mode == FixedSizeBlobCopyJob::Mode::SYNC) MakeWeak(); + source_ = blob->entries(); + length_ = blob->length(); +} + +void FixedSizeBlobCopyJob::AfterThreadPoolWork(int status) { + Environment* env = AsyncWrap::env(); + CHECK_EQ(mode_, Mode::ASYNC); + CHECK(status == 0 || status == UV_ECANCELED); + std::unique_ptr ptr(this); + HandleScope handle_scope(env->isolate()); + Context::Scope context_scope(env->context()); + Local args[2]; + + if (status == UV_ECANCELED) { + args[0] = Number::New(env->isolate(), status), + args[1] = Undefined(env->isolate()); + } else { + args[0] = Undefined(env->isolate()); + args[1] = ArrayBuffer::New(env->isolate(), destination_); + } + + ptr->MakeCallback(env->ondone_string(), arraysize(args), args); +} + +void FixedSizeBlobCopyJob::DoThreadPoolWork() { + Environment* env = AsyncWrap::env(); + destination_ = ArrayBuffer::NewBackingStore(env->isolate(), length_); + unsigned char* dest = static_cast(destination_->Data()); + if (length_ > 0) { + size_t total = 0; + for (const auto& entry : source_) { + unsigned char* src = static_cast(entry.store->Data()); + src += entry.offset; + memcpy(dest, src, entry.length); + dest += entry.length; + total += entry.length; + CHECK_LE(total, length_); + } + } +} + +void FixedSizeBlobCopyJob::MemoryInfo(MemoryTracker* tracker) const { + tracker->TrackFieldWithSize("source", length_); + tracker->TrackFieldWithSize( + "destination", + destination_ ? destination_->ByteLength() : 0); +} + +void FixedSizeBlobCopyJob::Initialize(Environment* env, Local target) { + v8::Local job = env->NewFunctionTemplate(New); + job->Inherit(AsyncWrap::GetConstructorTemplate(env)); + job->InstanceTemplate()->SetInternalFieldCount( + AsyncWrap::kInternalFieldCount); + env->SetProtoMethod(job, "run", Run); + env->SetConstructorFunction(target, "FixedSizeBlobCopyJob", job); +} + +void FixedSizeBlobCopyJob::New(const FunctionCallbackInfo& args) { + static constexpr size_t kMaxSyncLength = 4096; + static constexpr size_t kMaxEntryCount = 4; + + Environment* env = Environment::GetCurrent(args); + CHECK(args.IsConstructCall()); + CHECK(args[0]->IsObject()); + CHECK(Blob::HasInstance(env, args[0])); + + Blob* blob; + ASSIGN_OR_RETURN_UNWRAP(&blob, args[0]); + + // This is a fairly arbitrary heuristic. We want to avoid deferring to + // the threadpool if the amount of data being copied is small and there + // aren't that many entries to copy. + FixedSizeBlobCopyJob::Mode mode = + (blob->length() < kMaxSyncLength && + blob->entries().size() < kMaxEntryCount) ? + FixedSizeBlobCopyJob::Mode::SYNC : + FixedSizeBlobCopyJob::Mode::ASYNC; + + new FixedSizeBlobCopyJob(env, args.This(), blob, mode); +} + +void FixedSizeBlobCopyJob::Run(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + FixedSizeBlobCopyJob* job; + ASSIGN_OR_RETURN_UNWRAP(&job, args.Holder()); + if (job->mode() == FixedSizeBlobCopyJob::Mode::ASYNC) + return job->ScheduleWork(); + + job->DoThreadPoolWork(); + args.GetReturnValue().Set( + ArrayBuffer::New(env->isolate(), job->destination_)); +} + +void FixedSizeBlobCopyJob::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(Run); +} + +void Blob::RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(Blob::New); + registry->Register(Blob::ToArrayBuffer); + registry->Register(Blob::ToSlice); +} + +} // namespace node diff --git a/src/node_blob.h b/src/node_blob.h new file mode 100644 index 00000000000000..965f65390bdd41 --- /dev/null +++ b/src/node_blob.h @@ -0,0 +1,137 @@ +#ifndef SRC_NODE_BLOB_H_ +#define SRC_NODE_BLOB_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "async_wrap.h" +#include "base_object.h" +#include "env.h" +#include "memory_tracker.h" +#include "node_internals.h" +#include "node_worker.h" +#include "v8.h" + +#include + +namespace node { + +struct BlobEntry { + std::shared_ptr store; + size_t length; + size_t offset; +}; + +class Blob : public BaseObject { + public: + static void RegisterExternalReferences( + ExternalReferenceRegistry* registry); + static void Initialize(Environment* env, v8::Local target); + + static void New(const v8::FunctionCallbackInfo& args); + static void ToArrayBuffer(const v8::FunctionCallbackInfo& args); + static void ToSlice(const v8::FunctionCallbackInfo& args); + + static v8::Local GetConstructorTemplate( + Environment* env); + + static BaseObjectPtr Create( + Environment* env, + const std::vector store, + size_t length); + + static bool HasInstance(Environment* env, v8::Local object); + + const std::vector entries() const { + return store_; + } + + void MemoryInfo(MemoryTracker* tracker) const override; + SET_MEMORY_INFO_NAME(Blob); + SET_SELF_SIZE(Blob); + + // Copies the contents of the Blob into an ArrayBuffer. + v8::MaybeLocal GetArrayBuffer(Environment* env); + + BaseObjectPtr Slice(Environment* env, size_t start, size_t end); + + inline size_t length() const { return length_; } + + class BlobTransferData : public worker::TransferData { + public: + explicit BlobTransferData( + const std::vector& store, + size_t length) + : store_(store), + length_(length) {} + + BaseObjectPtr Deserialize( + Environment* env, + v8::Local context, + std::unique_ptr self) override; + + SET_MEMORY_INFO_NAME(BlobTransferData) + SET_SELF_SIZE(BlobTransferData) + SET_NO_MEMORY_INFO() + + private: + std::vector store_; + size_t length_ = 0; + }; + + BaseObject::TransferMode GetTransferMode() const override; + std::unique_ptr CloneForMessaging() const override; + + Blob( + Environment* env, + v8::Local obj, + const std::vector& store, + size_t length); + + private: + std::vector store_; + size_t length_ = 0; +}; + +class FixedSizeBlobCopyJob : public AsyncWrap, public ThreadPoolWork { + public: + enum class Mode { + SYNC, + ASYNC + }; + + static void RegisterExternalReferences( + ExternalReferenceRegistry* registry); + static void Initialize(Environment* env, v8::Local target); + static void New(const v8::FunctionCallbackInfo& args); + static void Run(const v8::FunctionCallbackInfo& args); + + bool IsNotIndicativeOfMemoryLeakAtExit() const override { + return true; + } + + void DoThreadPoolWork() override; + void AfterThreadPoolWork(int status) override; + + Mode mode() const { return mode_; } + + void MemoryInfo(MemoryTracker* tracker) const override; + SET_MEMORY_INFO_NAME(FixedSizeBlobCopyJob) + SET_SELF_SIZE(FixedSizeBlobCopyJob) + + private: + FixedSizeBlobCopyJob( + Environment* env, + v8::Local object, + Blob* blob, + Mode mode = Mode::ASYNC); + + Mode mode_; + std::vector source_; + std::shared_ptr destination_; + size_t length_ = 0; +}; + +} // namespace node + +#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS +#endif // SRC_NODE_BLOB_H_ diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 77efa7bae2fea5..485e273f2fbe9e 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -22,6 +22,7 @@ #include "node_buffer.h" #include "allocated_buffer-inl.h" #include "node.h" +#include "node_blob.h" #include "node_errors.h" #include "node_external_reference.h" #include "node_internals.h" @@ -1184,6 +1185,7 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "asciiSlice", StringSlice); env->SetMethodNoSideEffect(target, "base64Slice", StringSlice); + env->SetMethodNoSideEffect(target, "base64urlSlice", StringSlice); env->SetMethodNoSideEffect(target, "latin1Slice", StringSlice); env->SetMethodNoSideEffect(target, "hexSlice", StringSlice); env->SetMethodNoSideEffect(target, "ucs2Slice", StringSlice); @@ -1191,12 +1193,15 @@ void Initialize(Local target, env->SetMethod(target, "asciiWrite", StringWrite); env->SetMethod(target, "base64Write", StringWrite); + env->SetMethod(target, "base64urlWrite", StringWrite); env->SetMethod(target, "latin1Write", StringWrite); env->SetMethod(target, "hexWrite", StringWrite); env->SetMethod(target, "ucs2Write", StringWrite); env->SetMethod(target, "utf8Write", StringWrite); env->SetMethod(target, "getZeroFillToggle", GetZeroFillToggle); + + Blob::Initialize(env, target); } } // anonymous namespace @@ -1223,6 +1228,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringSlice); registry->Register(StringSlice); + registry->Register(StringSlice); registry->Register(StringSlice); registry->Register(StringSlice); registry->Register(StringSlice); @@ -1230,11 +1236,15 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(StringWrite); + registry->Register(StringWrite); registry->Register(StringWrite); registry->Register(StringWrite); registry->Register(StringWrite); registry->Register(StringWrite); registry->Register(GetZeroFillToggle); + + Blob::RegisterExternalReferences(registry); + FixedSizeBlobCopyJob::RegisterExternalReferences(registry); } } // namespace Buffer diff --git a/src/node_file.cc b/src/node_file.cc index ac1d6aa74aa0b5..a210aea3368bee 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -51,6 +51,7 @@ namespace node { namespace fs { using v8::Array; +using v8::BigInt; using v8::Boolean; using v8::Context; using v8::EscapableHandleScope; @@ -2038,8 +2039,10 @@ static void Read(const FunctionCallbackInfo& args) { const size_t len = static_cast(args[3].As()->Value()); CHECK(Buffer::IsWithinBounds(off, len, buffer_length)); - CHECK(IsSafeJsInt(args[4])); - const int64_t pos = args[4].As()->Value(); + CHECK(IsSafeJsInt(args[4]) || args[4]->IsBigInt()); + const int64_t pos = args[4]->IsNumber() ? + args[4].As()->Value() : + args[4].As()->Int64Value(); char* buf = buffer_data + off; uv_buf_t uvbuf = uv_buf_init(buf, len); diff --git a/src/node_os.cc b/src/node_os.cc index 2e151ac4f89213..3cde80996f095f 100644 --- a/src/node_os.cc +++ b/src/node_os.cc @@ -112,16 +112,17 @@ static void GetCPUInfo(const FunctionCallbackInfo& args) { // assemble them into objects in JS than to call Object::Set() repeatedly // The array is in the format // [model, speed, (5 entries of cpu_times), model2, speed2, ...] - std::vector> result(count * 7); - for (int i = 0, j = 0; i < count; i++) { + std::vector> result; + result.reserve(count * 7); + for (int i = 0; i < count; i++) { uv_cpu_info_t* ci = cpu_infos + i; - result[j++] = OneByteString(isolate, ci->model); - result[j++] = Number::New(isolate, ci->speed); - result[j++] = Number::New(isolate, ci->cpu_times.user); - result[j++] = Number::New(isolate, ci->cpu_times.nice); - result[j++] = Number::New(isolate, ci->cpu_times.sys); - result[j++] = Number::New(isolate, ci->cpu_times.idle); - result[j++] = Number::New(isolate, ci->cpu_times.irq); + result.emplace_back(OneByteString(isolate, ci->model)); + result.emplace_back(Number::New(isolate, ci->speed)); + result.emplace_back(Number::New(isolate, ci->cpu_times.user)); + result.emplace_back(Number::New(isolate, ci->cpu_times.nice)); + result.emplace_back(Number::New(isolate, ci->cpu_times.sys)); + result.emplace_back(Number::New(isolate, ci->cpu_times.idle)); + result.emplace_back(Number::New(isolate, ci->cpu_times.irq)); } uv_free_cpu_info(cpu_infos, count); @@ -182,7 +183,8 @@ static void GetInterfaceAddresses(const FunctionCallbackInfo& args) { } Local no_scope_id = Integer::New(isolate, -1); - std::vector> result(count * 7); + std::vector> result; + result.reserve(count * 7); for (i = 0; i < count; i++) { const char* const raw_name = interfaces[i].name; @@ -216,18 +218,18 @@ static void GetInterfaceAddresses(const FunctionCallbackInfo& args) { family = env->unknown_string(); } - result[i * 7] = name; - result[i * 7 + 1] = OneByteString(isolate, ip); - result[i * 7 + 2] = OneByteString(isolate, netmask); - result[i * 7 + 3] = family; - result[i * 7 + 4] = FIXED_ONE_BYTE_STRING(isolate, mac); - result[i * 7 + 5] = - interfaces[i].is_internal ? True(isolate) : False(isolate); + result.emplace_back(name); + result.emplace_back(OneByteString(isolate, ip)); + result.emplace_back(OneByteString(isolate, netmask)); + result.emplace_back(family); + result.emplace_back(FIXED_ONE_BYTE_STRING(isolate, mac)); + result.emplace_back( + interfaces[i].is_internal ? True(isolate) : False(isolate)); if (interfaces[i].address.address4.sin_family == AF_INET6) { uint32_t scopeid = interfaces[i].address.address6.sin6_scope_id; - result[i * 7 + 6] = Integer::NewFromUnsigned(isolate, scopeid); + result.emplace_back(Integer::NewFromUnsigned(isolate, scopeid)); } else { - result[i * 7 + 6] = no_scope_id; + result.emplace_back(no_scope_id); } } diff --git a/src/node_version.h b/src/node_version.h index e46fb50dda678a..56555ebb0b8867 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 15 -#define NODE_MINOR_VERSION 6 -#define NODE_PATCH_VERSION 1 +#define NODE_MINOR_VERSION 7 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/string_decoder.cc b/src/string_decoder.cc index 0f9e6faaab162e..a915f5744f3b00 100644 --- a/src/string_decoder.cc +++ b/src/string_decoder.cc @@ -70,7 +70,10 @@ MaybeLocal StringDecoder::DecodeData(Isolate* isolate, size_t nread = *nread_ptr; - if (Encoding() == UTF8 || Encoding() == UCS2 || Encoding() == BASE64) { + if (Encoding() == UTF8 || + Encoding() == UCS2 || + Encoding() == BASE64 || + Encoding() == BASE64URL) { // See if we want bytes to finish a character from the previous // chunk; if so, copy the new bytes to the missing bytes buffer // and create a small string from it that is to be prepended to the @@ -198,7 +201,7 @@ MaybeLocal StringDecoder::DecodeData(Isolate* isolate, state_[kBufferedBytes] = 2; state_[kMissingBytes] = 2; } - } else if (Encoding() == BASE64) { + } else if (Encoding() == BASE64 || Encoding() == BASE64URL) { state_[kBufferedBytes] = nread % 3; if (state_[kBufferedBytes] > 0) state_[kMissingBytes] = 3 - BufferedBytes(); @@ -311,6 +314,7 @@ void InitializeStringDecoder(Local target, ADD_TO_ENCODINGS_ARRAY(ASCII, "ascii"); ADD_TO_ENCODINGS_ARRAY(UTF8, "utf8"); ADD_TO_ENCODINGS_ARRAY(BASE64, "base64"); + ADD_TO_ENCODINGS_ARRAY(BASE64URL, "base64url"); ADD_TO_ENCODINGS_ARRAY(UCS2, "utf16le"); ADD_TO_ENCODINGS_ARRAY(HEX, "hex"); ADD_TO_ENCODINGS_ARRAY(BUFFER, "buffer"); diff --git a/test/abort/abort.status b/test/abort/abort.status new file mode 100644 index 00000000000000..e56c24ff433d63 --- /dev/null +++ b/test/abort/abort.status @@ -0,0 +1,11 @@ +prefix abort + +# To mark a test as flaky, list the test name in the appropriate section +# below, without ".js", followed by ": PASS,FLAKY". Example: +# sample-test : PASS,FLAKY + +[true] # This section applies to all platforms + +[$system==ibmi] +# https://github.com/nodejs/node/issues/34410 +test-addon-register-signal-handler: PASS,FLAKY diff --git a/test/addons/addon.status b/test/addons/addon.status index 951cddc48465d1..b56e0aa461cc36 100644 --- a/test/addons/addon.status +++ b/test/addons/addon.status @@ -13,3 +13,5 @@ openssl-binding/test: PASS,FLAKY [$system==ibmi] openssl-binding/test: SKIP zlib-binding/test: SKIP +# https://github.com/nodejs/node/issues/34410 +register-signal-handler/test: PASS,FLAKY diff --git a/test/addons/parse-encoding/binding.cc b/test/addons/parse-encoding/binding.cc index cdbd8e44466db8..7fae5bf4bdb61c 100644 --- a/test/addons/parse-encoding/binding.cc +++ b/test/addons/parse-encoding/binding.cc @@ -6,6 +6,7 @@ namespace { #define ENCODING_MAP(V) \ V(ASCII) \ V(BASE64) \ + V(BASE64URL) \ V(BUFFER) \ V(HEX) \ V(LATIN1) \ diff --git a/test/addons/parse-encoding/test.js b/test/addons/parse-encoding/test.js index 1456115a926f3e..da52f2dc4248af 100644 --- a/test/addons/parse-encoding/test.js +++ b/test/addons/parse-encoding/test.js @@ -8,6 +8,7 @@ assert.strictEqual(parseEncoding(''), 'UNKNOWN'); assert.strictEqual(parseEncoding('ascii'), 'ASCII'); assert.strictEqual(parseEncoding('base64'), 'BASE64'); +assert.strictEqual(parseEncoding('base64url'), 'BASE64URL'); assert.strictEqual(parseEncoding('binary'), 'LATIN1'); assert.strictEqual(parseEncoding('buffer'), 'BUFFER'); assert.strictEqual(parseEncoding('hex'), 'HEX'); diff --git a/test/cctest/test_environment.cc b/test/cctest/test_environment.cc index 862dfd5780868a..e9c81dc97ddbf1 100644 --- a/test/cctest/test_environment.cc +++ b/test/cctest/test_environment.cc @@ -1,6 +1,7 @@ #include "node_buffer.h" #include "node_internals.h" #include "libplatform/libplatform.h" +#include "util.h" #include #include "gtest/gtest.h" @@ -10,6 +11,7 @@ using node::AtExit; using node::RunAtExit; +using node::USE; static bool called_cb_1 = false; static bool called_cb_2 = false; @@ -74,7 +76,7 @@ class RedirectStdErr { fflush(stderr); fgetpos(stderr, &pos_); fd_ = dup(fileno(stderr)); - freopen(filename_, "w", stderr); + USE(freopen(filename_, "w", stderr)); } ~RedirectStdErr() { diff --git a/test/common/tls.js b/test/common/tls.js index e7cacde7456707..5094755c240afd 100644 --- a/test/common/tls.js +++ b/test/common/tls.js @@ -15,9 +15,9 @@ class TestTLSSocket extends net.Socket { this.handshake_list = []; // AES128-GCM-SHA256 this.ciphers = Buffer.from('000002009c0', 'hex'); - this.pre_master_secret = + this.pre_primary_secret = Buffer.concat([this.version, crypto.randomBytes(46)]); - this.master_secret = null; + this.primary_secret = null; this.write_seq = 0; this.client_random = crypto.randomBytes(32); @@ -26,12 +26,12 @@ class TestTLSSocket extends net.Socket { }); this.on('server_random', (server_random) => { - this.master_secret = PRF12('sha256', this.pre_master_secret, - 'master secret', - Buffer.concat([this.client_random, - server_random]), - 48); - const key_block = PRF12('sha256', this.master_secret, + this.primary_secret = PRF12('sha256', this.pre_primary_secret, + 'primary secret', + Buffer.concat([this.client_random, + server_random]), + 48); + const key_block = PRF12('sha256', this.primary_secret, 'key expansion', Buffer.concat([server_random, this.client_random]), @@ -51,14 +51,14 @@ class TestTLSSocket extends net.Socket { } createClientKeyExchange() { - const encrypted_pre_master_secret = crypto.publicEncrypt({ + const encrypted_pre_primary_secret = crypto.publicEncrypt({ key: this.server_cert, padding: crypto.constants.RSA_PKCS1_PADDING - }, this.pre_master_secret); + }, this.pre_primary_secret); const length = Buffer.alloc(2); - length.writeUIntBE(encrypted_pre_master_secret.length, 0, 2); + length.writeUIntBE(encrypted_pre_primary_secret.length, 0, 2); const msg = addHandshakeHeader(0x10, Buffer.concat([ - length, encrypted_pre_master_secret])); + length, encrypted_pre_primary_secret])); this.emit('handshake', msg); return addRecordHeader(0x16, msg); } @@ -67,7 +67,7 @@ class TestTLSSocket extends net.Socket { const shasum = crypto.createHash('sha256'); shasum.update(Buffer.concat(this.handshake_list)); const message_hash = shasum.digest(); - const r = PRF12('sha256', this.master_secret, + const r = PRF12('sha256', this.primary_secret, 'client finished', message_hash, 12); const msg = addHandshakeHeader(0x14, r); this.emit('handshake', msg); diff --git a/test/es-module/test-esm-dynamic-import.js b/test/es-module/test-esm-dynamic-import.js index 6f8757da1b914e..6e64f86423c66b 100644 --- a/test/es-module/test-esm-dynamic-import.js +++ b/test/es-module/test-esm-dynamic-import.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); -const { URL } = require('url'); const relativePath = '../fixtures/es-modules/test-esm-ok.mjs'; const absolutePath = require.resolve('../fixtures/es-modules/test-esm-ok.mjs'); diff --git a/test/es-module/test-esm-loader-modulemap.js b/test/es-module/test-esm-loader-modulemap.js index a4d56a2c2fda1c..2d74cd385be52b 100644 --- a/test/es-module/test-esm-loader-modulemap.js +++ b/test/es-module/test-esm-loader-modulemap.js @@ -7,7 +7,6 @@ require('../common'); const assert = require('assert'); -const { URL } = require('url'); const { Loader } = require('internal/modules/esm/loader'); const ModuleMap = require('internal/modules/esm/module_map'); const ModuleJob = require('internal/modules/esm/module_job'); diff --git a/test/fixtures/wpt/FileAPI/BlobURL/support/file_test2.txt b/test/fixtures/wpt/FileAPI/BlobURL/support/file_test2.txt new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html b/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html new file mode 100644 index 00000000000000..07fb27ef8af10b --- /dev/null +++ b/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html @@ -0,0 +1,62 @@ + + + + + Blob and File reference URL Test(2) + + + + + + +
      +
      +
      + +
      +

      Test steps:

      +
        +
      1. Download the file.
      2. +
      3. Select the file in the file inputbox.
      4. +
      5. Delete the file.
      6. +
      7. Click the 'start' button.
      8. +
      +
      + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/FileReader/progress_event_bubbles_cancelable.html b/test/fixtures/wpt/FileAPI/FileReader/progress_event_bubbles_cancelable.html new file mode 100644 index 00000000000000..6a03243f934081 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReader/progress_event_bubbles_cancelable.html @@ -0,0 +1,33 @@ + + +File API Test: Progress Event - bubbles, cancelable + + + + +
      + + diff --git a/test/fixtures/wpt/FileAPI/FileReader/support/file_test1.txt b/test/fixtures/wpt/FileAPI/FileReader/support/file_test1.txt new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/test/fixtures/wpt/FileAPI/FileReader/test_errors-manual.html b/test/fixtures/wpt/FileAPI/FileReader/test_errors-manual.html new file mode 100644 index 00000000000000..b8c3f84d2bf23a --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReader/test_errors-manual.html @@ -0,0 +1,72 @@ + + + + + FileReader Errors Test + + + + + + +
      +
      +
      + +
      +

      Test steps:

      +
        +
      1. Download the file.
      2. +
      3. Select the file in the file inputbox.
      4. +
      5. Delete the file.
      6. +
      7. Click the 'start' button.
      8. +
      +
      + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/FileReader/test_notreadableerrors-manual.html b/test/fixtures/wpt/FileAPI/FileReader/test_notreadableerrors-manual.html new file mode 100644 index 00000000000000..46d73598a0f91a --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReader/test_notreadableerrors-manual.html @@ -0,0 +1,42 @@ + + +FileReader NotReadableError Test + + + + +
      +
      +
      + +
      +

      Test steps:

      +
        +
      1. Download the file.
      2. +
      3. Select the file in the file inputbox.
      4. +
      5. Delete the file's readable permission.
      6. +
      7. Click the 'start' button.
      8. +
      +
      + + + diff --git a/test/fixtures/wpt/FileAPI/FileReader/test_securityerrors-manual.html b/test/fixtures/wpt/FileAPI/FileReader/test_securityerrors-manual.html new file mode 100644 index 00000000000000..add93ed69d139a --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReader/test_securityerrors-manual.html @@ -0,0 +1,40 @@ + + +FileReader SecurityError Test + + + + +
      +
      +
      + +
      +

      Test steps:

      +
        +
      1. Select a system sensitive file (e.g. files in /usr/bin, password files, + and other native operating system executables) in the file inputbox.
      2. +
      3. Click the 'start' button.
      4. +
      +
      + + diff --git a/test/fixtures/wpt/FileAPI/FileReader/workers.html b/test/fixtures/wpt/FileAPI/FileReader/workers.html new file mode 100644 index 00000000000000..8e114eeaf86ff5 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReader/workers.html @@ -0,0 +1,27 @@ + + + + + diff --git a/test/fixtures/wpt/FileAPI/FileReaderSync.worker.js b/test/fixtures/wpt/FileAPI/FileReaderSync.worker.js new file mode 100644 index 00000000000000..3d7a0222f31266 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/FileReaderSync.worker.js @@ -0,0 +1,56 @@ +importScripts("/resources/testharness.js"); + +var blob, empty_blob, readerSync; +setup(() => { + readerSync = new FileReaderSync(); + blob = new Blob(["test"]); + empty_blob = new Blob(); +}); + +test(() => { + assert_true(readerSync instanceof FileReaderSync); +}, "Interface"); + +test(() => { + var text = readerSync.readAsText(blob); + assert_equals(text, "test"); +}, "readAsText"); + +test(() => { + var text = readerSync.readAsText(empty_blob); + assert_equals(text, ""); +}, "readAsText with empty blob"); + +test(() => { + var data = readerSync.readAsDataURL(blob); + assert_equals(data.indexOf("data:"), 0); +}, "readAsDataURL"); + +test(() => { + var data = readerSync.readAsDataURL(empty_blob); + assert_equals(data.indexOf("data:"), 0); +}, "readAsDataURL with empty blob"); + +test(() => { + var data = readerSync.readAsBinaryString(blob); + assert_equals(data, "test"); +}, "readAsBinaryString"); + +test(() => { + var data = readerSync.readAsBinaryString(empty_blob); + assert_equals(data, ""); +}, "readAsBinaryString with empty blob"); + +test(() => { + var data = readerSync.readAsArrayBuffer(blob); + assert_true(data instanceof ArrayBuffer); + assert_equals(data.byteLength, "test".length); +}, "readAsArrayBuffer"); + +test(() => { + var data = readerSync.readAsArrayBuffer(empty_blob); + assert_true(data instanceof ArrayBuffer); + assert_equals(data.byteLength, 0); +}, "readAsArrayBuffer with empty blob"); + +done(); diff --git a/test/fixtures/wpt/FileAPI/META.yml b/test/fixtures/wpt/FileAPI/META.yml new file mode 100644 index 00000000000000..506a59fec1eb33 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/META.yml @@ -0,0 +1,6 @@ +spec: https://w3c.github.io/FileAPI/ +suggested_reviewers: + - inexorabletash + - zqzhang + - jdm + - mkruisselbrink diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-array-buffer.any.js b/test/fixtures/wpt/FileAPI/blob/Blob-array-buffer.any.js new file mode 100644 index 00000000000000..2310646e5fdeab --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-array-buffer.any.js @@ -0,0 +1,45 @@ +// META: title=Blob Array Buffer +// META: script=../support/Blob.js +'use strict'; + +promise_test(async () => { + const input_arr = new TextEncoder().encode("PASS"); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer()") + +promise_test(async () => { + const input_arr = new TextEncoder().encode(""); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer() empty Blob data") + +promise_test(async () => { + const input_arr = new TextEncoder().encode("\u08B8\u000a"); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer() non-ascii input") + +promise_test(async () => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + const blob = new Blob([typed_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_equals_typed_array(new Uint8Array(array_buffer), typed_arr); +}, "Blob.arrayBuffer() non-unicode input") + +promise_test(async () => { + const input_arr = new TextEncoder().encode("PASS"); + const blob = new Blob([input_arr]); + const array_buffer_results = await Promise.all([blob.arrayBuffer(), + blob.arrayBuffer(), blob.arrayBuffer()]); + for (let array_buffer of array_buffer_results) { + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); + } +}, "Blob.arrayBuffer() concurrent reads") diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-constructor-endings.html b/test/fixtures/wpt/FileAPI/blob/Blob-constructor-endings.html new file mode 100644 index 00000000000000..04edd2a303b135 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-constructor-endings.html @@ -0,0 +1,104 @@ + + +Blob constructor: endings option + + + + diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-constructor.html b/test/fixtures/wpt/FileAPI/blob/Blob-constructor.html new file mode 100644 index 00000000000000..62a649aed66418 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-constructor.html @@ -0,0 +1,501 @@ + + +Blob constructor + + + + + + + +
      + diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-in-worker.worker.js b/test/fixtures/wpt/FileAPI/blob/Blob-in-worker.worker.js new file mode 100644 index 00000000000000..a67060e7b85eff --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-in-worker.worker.js @@ -0,0 +1,14 @@ +importScripts("/resources/testharness.js"); + +async_test(function() { + var data = "TEST"; + var blob = new Blob([data], {type: "text/plain"}); + var reader = new FileReader(); + reader.onload = this.step_func_done(function() { + assert_equals(reader.result, data); + }); + reader.onerror = this.unreached_func("Unexpected error event"); + reader.readAsText(blob); +}, "Create Blob in Worker"); + +done(); diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-slice-overflow.html b/test/fixtures/wpt/FileAPI/blob/Blob-slice-overflow.html new file mode 100644 index 00000000000000..74cd83a34f7116 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-slice-overflow.html @@ -0,0 +1,42 @@ + + +Blob slice overflow + + + + +
      + + diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-slice.html b/test/fixtures/wpt/FileAPI/blob/Blob-slice.html new file mode 100644 index 00000000000000..03fe6ca5343bd1 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-slice.html @@ -0,0 +1,238 @@ + + +Blob slice + + + + + +
      + diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-stream.any.js b/test/fixtures/wpt/FileAPI/blob/Blob-stream.any.js new file mode 100644 index 00000000000000..792b6639c35a26 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-stream.any.js @@ -0,0 +1,72 @@ +// META: title=Blob Stream +// META: script=../support/Blob.js +// META: script=../../streams/resources/test-utils.js +'use strict'; + +// Helper function that triggers garbage collection while reading a chunk +// if perform_gc is true. +async function read_and_gc(reader, perform_gc) { + const read_promise = reader.read(); + if (perform_gc) + garbageCollect(); + return read_promise; +} + +// Takes in a ReadableStream and reads from it until it is done, returning +// an array that contains the results of each read operation. If perform_gc +// is true, garbage collection is triggered while reading every chunk. +async function read_all_chunks(stream, perform_gc = false) { + assert_true(stream instanceof ReadableStream); + assert_true('getReader' in stream); + const reader = stream.getReader(); + + assert_true('read' in reader); + let read_value = await read_and_gc(reader, perform_gc); + + let out = []; + let i = 0; + while (!read_value.done) { + for (let val of read_value.value) { + out[i++] = val; + } + read_value = await read_and_gc(reader, perform_gc); + } + return out; +} + +promise_test(async () => { + const blob = new Blob(["PASS"]); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + for (let [index, value] of chunks.entries()) { + assert_equals(value, "PASS".charCodeAt(index)); + } +}, "Blob.stream()") + +promise_test(async () => { + const blob = new Blob(); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + assert_array_equals(chunks, []); +}, "Blob.stream() empty Blob") + +promise_test(async () => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + const blob = new Blob([typed_arr]); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + assert_array_equals(chunks, input_arr); +}, "Blob.stream() non-unicode input") + +promise_test(async() => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + let blob = new Blob([typed_arr]); + const stream = blob.stream(); + blob = null; + garbageCollect(); + const chunks = await read_all_chunks(stream, /*perform_gc=*/true); + assert_array_equals(chunks, input_arr); +}, "Blob.stream() garbage collection of blob shouldn't break stream" + + "consumption") diff --git a/test/fixtures/wpt/FileAPI/blob/Blob-text.any.js b/test/fixtures/wpt/FileAPI/blob/Blob-text.any.js new file mode 100644 index 00000000000000..d04fa97cffe6a3 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/blob/Blob-text.any.js @@ -0,0 +1,64 @@ +// META: title=Blob Text +// META: script=../support/Blob.js +'use strict'; + +promise_test(async () => { + const blob = new Blob(["PASS"]); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text()") + +promise_test(async () => { + const blob = new Blob(); + const text = await blob.text(); + assert_equals(text, ""); +}, "Blob.text() empty blob data") + +promise_test(async () => { + const blob = new Blob(["P", "A", "SS"]); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text() multi-element array in constructor") + +promise_test(async () => { + const non_unicode = "\u0061\u030A"; + const input_arr = new TextEncoder().encode(non_unicode); + const blob = new Blob([input_arr]); + const text = await blob.text(); + assert_equals(text, non_unicode); +}, "Blob.text() non-unicode") + +promise_test(async () => { + const blob = new Blob(["PASS"], { type: "text/plain;charset=utf-16le" }); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text() different charset param in type option") + +promise_test(async () => { + const non_unicode = "\u0061\u030A"; + const input_arr = new TextEncoder().encode(non_unicode); + const blob = new Blob([input_arr], { type: "text/plain;charset=utf-16le" }); + const text = await blob.text(); + assert_equals(text, non_unicode); +}, "Blob.text() different charset param with non-ascii input") + +promise_test(async () => { + const input_arr = new Uint8Array([192, 193, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255]); + const blob = new Blob([input_arr]); + const text = await blob.text(); + assert_equals(text, "\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd" + + "\ufffd\ufffd\ufffd\ufffd"); +}, "Blob.text() invalid utf-8 input") + +promise_test(async () => { + const input_arr = new Uint8Array([192, 193, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255]); + const blob = new Blob([input_arr]); + const text_results = await Promise.all([blob.text(), blob.text(), + blob.text()]); + for (let text of text_results) { + assert_equals(text, "\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd" + + "\ufffd\ufffd\ufffd\ufffd"); + } +}, "Blob.text() concurrent reads") diff --git a/test/fixtures/wpt/FileAPI/file/File-constructor-endings.html b/test/fixtures/wpt/FileAPI/file/File-constructor-endings.html new file mode 100644 index 00000000000000..1282b6c5ac2c79 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/File-constructor-endings.html @@ -0,0 +1,104 @@ + + +File constructor: endings option + + + + diff --git a/test/fixtures/wpt/FileAPI/file/File-constructor.html b/test/fixtures/wpt/FileAPI/file/File-constructor.html new file mode 100644 index 00000000000000..3477e4ada16e92 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/File-constructor.html @@ -0,0 +1,159 @@ + + +File constructor + + + +
      + diff --git a/test/fixtures/wpt/FileAPI/file/Worker-read-file-constructor.worker.js b/test/fixtures/wpt/FileAPI/file/Worker-read-file-constructor.worker.js new file mode 100644 index 00000000000000..4e003b3c958a94 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/Worker-read-file-constructor.worker.js @@ -0,0 +1,15 @@ +importScripts("/resources/testharness.js"); + +async_test(function() { + var file = new File(["bits"], "dummy", { 'type': 'text/plain', lastModified: 42 }); + var reader = new FileReader(); + reader.onload = this.step_func_done(function() { + assert_equals(file.name, "dummy", "file name"); + assert_equals(reader.result, "bits", "file content"); + assert_equals(file.lastModified, 42, "file lastModified"); + }); + reader.onerror = this.unreached_func("Unexpected error event"); + reader.readAsText(file); +}, "FileReader in Worker"); + +done(); diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-controls.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-form-controls.tentative.html new file mode 100644 index 00000000000000..d11f4a860931b4 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-controls.tentative.html @@ -0,0 +1,117 @@ + + +Upload files named using controls (tentative) + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-iso-2022-jp.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-form-iso-2022-jp.tentative.html new file mode 100644 index 00000000000000..659af3bde85852 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-iso-2022-jp.tentative.html @@ -0,0 +1,72 @@ + + + +Upload files in ISO-2022-JP form (tentative) + + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-punctuation.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-form-punctuation.tentative.html new file mode 100644 index 00000000000000..5c2d6d0bf1fe01 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-punctuation.tentative.html @@ -0,0 +1,230 @@ + + +Upload files named using punctuation (tentative) + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-utf-8.html b/test/fixtures/wpt/FileAPI/file/send-file-form-utf-8.html new file mode 100644 index 00000000000000..1be44f4f4db09e --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-utf-8.html @@ -0,0 +1,62 @@ + + +Upload files in UTF-8 form + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-windows-1252.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-form-windows-1252.tentative.html new file mode 100644 index 00000000000000..a2c37186b3e023 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-windows-1252.tentative.html @@ -0,0 +1,69 @@ + + +Upload files in Windows-1252 form (tentative) + + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form-x-user-defined.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-form-x-user-defined.tentative.html new file mode 100644 index 00000000000000..503b08a51706f7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form-x-user-defined.tentative.html @@ -0,0 +1,70 @@ + + +Upload files in x-user-defined form (tentative) + + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-form.html b/test/fixtures/wpt/FileAPI/file/send-file-form.html new file mode 100644 index 00000000000000..baa8d4286c5789 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-form.html @@ -0,0 +1,25 @@ + + +Upload ASCII-named file in UTF-8 form + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-formdata-controls.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-formdata-controls.tentative.html new file mode 100644 index 00000000000000..4259741b63ef31 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-formdata-controls.tentative.html @@ -0,0 +1,93 @@ + + +FormData: Upload files named using controls (tentative) + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-formdata-punctuation.tentative.html b/test/fixtures/wpt/FileAPI/file/send-file-formdata-punctuation.tentative.html new file mode 100644 index 00000000000000..d8e84e9d978094 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-formdata-punctuation.tentative.html @@ -0,0 +1,168 @@ + + +FormData: Upload files named using punctuation (tentative) + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-formdata-utf-8.html b/test/fixtures/wpt/FileAPI/file/send-file-formdata-utf-8.html new file mode 100644 index 00000000000000..7a7f6cefe776b9 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-formdata-utf-8.html @@ -0,0 +1,53 @@ + + +FormData: Upload files in UTF-8 fetch() + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/file/send-file-formdata.html b/test/fixtures/wpt/FileAPI/file/send-file-formdata.html new file mode 100644 index 00000000000000..77e048e54741c0 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/file/send-file-formdata.html @@ -0,0 +1,28 @@ + + +FormData: Upload ASCII-named file in UTF-8 form + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/fileReader.html b/test/fixtures/wpt/FileAPI/fileReader.html new file mode 100644 index 00000000000000..b767e22d4a66eb --- /dev/null +++ b/test/fixtures/wpt/FileAPI/fileReader.html @@ -0,0 +1,67 @@ + + + + FileReader States + + + + + + +
      + + + diff --git a/test/fixtures/wpt/FileAPI/filelist-section/filelist.html b/test/fixtures/wpt/FileAPI/filelist-section/filelist.html new file mode 100644 index 00000000000000..b97dcde19f647c --- /dev/null +++ b/test/fixtures/wpt/FileAPI/filelist-section/filelist.html @@ -0,0 +1,57 @@ + + + + + FileAPI Test: filelist + + + + + + + + + +
      + +
      +
      + + + + + diff --git a/test/fixtures/wpt/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html b/test/fixtures/wpt/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html new file mode 100644 index 00000000000000..2efaa059fa4897 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html @@ -0,0 +1,64 @@ + + + + + FileAPI Test: filelist_multiple_selected_files + + + + + + + + + +
      + +
      +
      +

      Test steps:

      +
        +
      1. Download upload.txt, upload.zip to local.
      2. +
      3. Select the local two files (upload.txt, upload.zip) to run the test.
      4. +
      +
      + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/filelist-section/filelist_selected_file-manual.html b/test/fixtures/wpt/FileAPI/filelist-section/filelist_selected_file-manual.html new file mode 100644 index 00000000000000..966aadda615589 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/filelist-section/filelist_selected_file-manual.html @@ -0,0 +1,64 @@ + + + + + FileAPI Test: filelist_selected_file + + + + + + + + + +
      + +
      +
      +

      Test steps:

      +
        +
      1. Download upload.txt to local.
      2. +
      3. Select the local upload.txt file to run the test.
      4. +
      +
      + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/filelist-section/support/upload.txt b/test/fixtures/wpt/FileAPI/filelist-section/support/upload.txt new file mode 100644 index 00000000000000..f45965b711f127 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/filelist-section/support/upload.txt @@ -0,0 +1 @@ +Hello, this is test file for file upload. diff --git a/test/fixtures/wpt/FileAPI/filelist-section/support/upload.zip b/test/fixtures/wpt/FileAPI/filelist-section/support/upload.zip new file mode 100644 index 00000000000000..41bfebe5eed561 Binary files /dev/null and b/test/fixtures/wpt/FileAPI/filelist-section/support/upload.zip differ diff --git a/test/fixtures/wpt/FileAPI/historical.https.html b/test/fixtures/wpt/FileAPI/historical.https.html new file mode 100644 index 00000000000000..4f841f17639459 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/historical.https.html @@ -0,0 +1,65 @@ + + + + + Historical features + + + + + +
      + + + diff --git a/test/fixtures/wpt/FileAPI/idlharness-manual.html b/test/fixtures/wpt/FileAPI/idlharness-manual.html new file mode 100644 index 00000000000000..c1d8b0c7149d75 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/idlharness-manual.html @@ -0,0 +1,45 @@ + + + + + File API manual IDL tests + + + + + + + + +

      File API manual IDL tests

      + +

      Either download upload.txt and select it below or select an + arbitrary local file.

      + +
      + +
      + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/idlharness.html b/test/fixtures/wpt/FileAPI/idlharness.html new file mode 100644 index 00000000000000..5e0a43f80df3f8 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/idlharness.html @@ -0,0 +1,40 @@ + + + + + File API automated IDL tests + + + + + + + + +

      File API automated IDL tests

      + +
      + +
      + +
      + + + + + diff --git a/test/fixtures/wpt/FileAPI/idlharness.worker.js b/test/fixtures/wpt/FileAPI/idlharness.worker.js new file mode 100644 index 00000000000000..786b7e4199fb45 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/idlharness.worker.js @@ -0,0 +1,20 @@ +importScripts("/resources/testharness.js"); +importScripts("/resources/WebIDLParser.js", "/resources/idlharness.js"); + +'use strict'; + +// https://w3c.github.io/FileAPI/ + +idl_test( + ['FileAPI'], + ['dom', 'html', 'url'], + idl_array => { + idl_array.add_objects({ + Blob: ['new Blob(["TEST"])'], + File: ['new File(["myFileBits"], "myFileName")'], + FileReader: ['new FileReader()'], + FileReaderSync: ['new FileReaderSync()'] + }); + } +); +done(); diff --git a/test/fixtures/wpt/FileAPI/progress-manual.html b/test/fixtures/wpt/FileAPI/progress-manual.html new file mode 100644 index 00000000000000..b2e03b3eb27387 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/progress-manual.html @@ -0,0 +1,49 @@ + + +Process Events for FileReader + + + + +Please choose one file through this input below.
      + +
      + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.html b/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.html new file mode 100644 index 00000000000000..d65ae9db18a1ff --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.html @@ -0,0 +1,91 @@ + + +FileAPI Test: Blob Determining Encoding + + + + + +
      + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-event-handler-attributes.html b/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-event-handler-attributes.html new file mode 100644 index 00000000000000..86657b5711aff1 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-event-handler-attributes.html @@ -0,0 +1,23 @@ + + +FileReader event handler attributes + + +
      + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-multiple-reads.html b/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-multiple-reads.html new file mode 100644 index 00000000000000..e7279fe4bd445e --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/FileReader-multiple-reads.html @@ -0,0 +1,89 @@ + +FileReader: starting new reads while one is in progress + + + + +
      + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_abort.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_abort.html new file mode 100644 index 00000000000000..940a775d35bf42 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_abort.html @@ -0,0 +1,53 @@ + + + + + FileAPI Test: filereader_abort + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_error.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_error.html new file mode 100644 index 00000000000000..cf4524825b80ca --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_error.html @@ -0,0 +1,35 @@ + + + + + FileAPI Test: filereader_error + + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_events.any.js b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_events.any.js new file mode 100644 index 00000000000000..ac692907d119f7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_events.any.js @@ -0,0 +1,19 @@ +promise_test(async t => { + var reader = new FileReader(); + var eventWatcher = new EventWatcher(t, reader, ['loadstart', 'progress', 'abort', 'error', 'load', 'loadend']); + reader.readAsText(new Blob([])); + await eventWatcher.wait_for('loadstart'); + // No progress event for an empty blob, as no data is loaded. + await eventWatcher.wait_for('load'); + await eventWatcher.wait_for('loadend'); +}, 'events are dispatched in the correct order for an empty blob'); + +promise_test(async t => { + var reader = new FileReader(); + var eventWatcher = new EventWatcher(t, reader, ['loadstart', 'progress', 'abort', 'error', 'load', 'loadend']); + reader.readAsText(new Blob(['a'])); + await eventWatcher.wait_for('loadstart'); + await eventWatcher.wait_for('progress'); + await eventWatcher.wait_for('load'); + await eventWatcher.wait_for('loadend'); +}, 'events are dispatched in the correct order for a non-empty blob'); diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file-manual.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file-manual.html new file mode 100644 index 00000000000000..702ca9afd7b067 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file-manual.html @@ -0,0 +1,69 @@ + + + + + FileAPI Test: filereader_file + + + + + + + +
      +

      Test step:

      +
        +
      1. Download blue-100x100.png to local.
      2. +
      3. Select the local file (blue-100x100.png) to run the test.
      4. +
      +
      + +
      + +
      + +
      + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file_img-manual.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file_img-manual.html new file mode 100644 index 00000000000000..fca42c7fceba48 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_file_img-manual.html @@ -0,0 +1,47 @@ + + + + + FileAPI Test: filereader_file_img + + + + + + + +
      +

      Test step:

      +
        +
      1. Download blue-100x100.png to local.
      2. +
      3. Select the local file (blue-100x100.png) to run the test.
      4. +
      +
      + +
      + +
      + +
      + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsArrayBuffer.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsArrayBuffer.html new file mode 100644 index 00000000000000..31001a51a0727f --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsArrayBuffer.html @@ -0,0 +1,38 @@ + + + + + FileAPI Test: filereader_readAsArrayBuffer + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsBinaryString.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsBinaryString.html new file mode 100644 index 00000000000000..b550e4d0a96dc7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsBinaryString.html @@ -0,0 +1,32 @@ + + +FileAPI Test: filereader_readAsBinaryString + + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsDataURL.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsDataURL.html new file mode 100644 index 00000000000000..5bc39499a229d1 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsDataURL.html @@ -0,0 +1,51 @@ + + +FileAPI Test: FileReader.readAsDataURL + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsText.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsText.html new file mode 100644 index 00000000000000..7d639d0111473b --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readAsText.html @@ -0,0 +1,51 @@ + + + + + FileAPI Test: filereader_readAsText + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readystate.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readystate.html new file mode 100644 index 00000000000000..1586b8995059f7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_readystate.html @@ -0,0 +1,34 @@ + + + + + FileAPI Test: filereader_readystate + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/filereader_result.html b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_result.html new file mode 100644 index 00000000000000..b80322ed424f83 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/reading-data-section/filereader_result.html @@ -0,0 +1,97 @@ + + + + + FileAPI Test: filereader_result + + + + + + +
      + + + + diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/support/blue-100x100.png b/test/fixtures/wpt/FileAPI/reading-data-section/support/blue-100x100.png new file mode 100644 index 00000000000000..b662fe18ec4797 Binary files /dev/null and b/test/fixtures/wpt/FileAPI/reading-data-section/support/blue-100x100.png differ diff --git a/test/fixtures/wpt/FileAPI/support/Blob.js b/test/fixtures/wpt/FileAPI/support/Blob.js new file mode 100644 index 00000000000000..04069acd3ccbe7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/Blob.js @@ -0,0 +1,70 @@ +'use strict' + +function test_blob(fn, expectations) { + var expected = expectations.expected, + type = expectations.type, + desc = expectations.desc; + + var t = async_test(desc); + t.step(function() { + var blob = fn(); + assert_true(blob instanceof Blob); + assert_false(blob instanceof File); + assert_equals(blob.type, type); + assert_equals(blob.size, expected.length); + + var fr = new FileReader(); + fr.onload = t.step_func_done(function(event) { + assert_equals(this.result, expected); + }, fr); + fr.onerror = t.step_func(function(e) { + assert_unreached("got error event on FileReader"); + }); + fr.readAsText(blob, "UTF-8"); + }); +} + +function test_blob_binary(fn, expectations) { + var expected = expectations.expected, + type = expectations.type, + desc = expectations.desc; + + var t = async_test(desc); + t.step(function() { + var blob = fn(); + assert_true(blob instanceof Blob); + assert_false(blob instanceof File); + assert_equals(blob.type, type); + assert_equals(blob.size, expected.length); + + var fr = new FileReader(); + fr.onload = t.step_func_done(function(event) { + assert_true(this.result instanceof ArrayBuffer, + "Result should be an ArrayBuffer"); + assert_array_equals(new Uint8Array(this.result), expected); + }, fr); + fr.onerror = t.step_func(function(e) { + assert_unreached("got error event on FileReader"); + }); + fr.readAsArrayBuffer(blob); + }); +} + +// Assert that two TypedArray objects have the same byte values +self.assert_equals_typed_array = (array1, array2) => { + const [view1, view2] = [array1, array2].map((array) => { + assert_true(array.buffer instanceof ArrayBuffer, + 'Expect input ArrayBuffers to contain field `buffer`'); + return new DataView(array.buffer, array.byteOffset, array.byteLength); + }); + + assert_equals(view1.byteLength, view2.byteLength, + 'Expect both arrays to be of the same byte length'); + + const byteLength = view1.byteLength; + + for (let i = 0; i < byteLength; ++i) { + assert_equals(view1.getUint8(i), view2.getUint8(i), + `Expect byte at buffer position ${i} to be equal`); + } +} diff --git a/test/fixtures/wpt/FileAPI/support/document-domain-setter.sub.html b/test/fixtures/wpt/FileAPI/support/document-domain-setter.sub.html new file mode 100644 index 00000000000000..61aebdf326679c --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/document-domain-setter.sub.html @@ -0,0 +1,7 @@ + +Relevant/current/blob source page used as a test helper + + diff --git a/test/fixtures/wpt/FileAPI/support/historical-serviceworker.js b/test/fixtures/wpt/FileAPI/support/historical-serviceworker.js new file mode 100644 index 00000000000000..8bd89a23adb70f --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/historical-serviceworker.js @@ -0,0 +1,5 @@ +importScripts('/resources/testharness.js'); + +test(() => { + assert_false('FileReaderSync' in self); +}, '"FileReaderSync" should not be supported in service workers'); diff --git a/test/fixtures/wpt/FileAPI/support/incumbent.sub.html b/test/fixtures/wpt/FileAPI/support/incumbent.sub.html new file mode 100644 index 00000000000000..63a81cd3281c46 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/incumbent.sub.html @@ -0,0 +1,22 @@ + +Incumbent page used as a test helper + + + + + + diff --git a/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js new file mode 100644 index 00000000000000..d6adf21ec33795 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js @@ -0,0 +1,282 @@ +'use strict'; + +// See /FileAPI/file/resources/echo-content-escaped.py +function escapeString(string) { + return string.replace(/\\/g, "\\\\").replace( + /[^\x20-\x7E]/g, + (x) => { + let hex = x.charCodeAt(0).toString(16); + if (hex.length < 2) hex = "0" + hex; + return `\\x${hex}`; + }, + ).replace(/\\x0d\\x0a/g, "\r\n"); +} + +// Rationale for this particular test character sequence, which is +// used in filenames and also in file contents: +// +// - ABC~ ensures the string starts with something we can read to +// ensure it is from the correct source; ~ is used because even +// some 1-byte otherwise-ASCII-like parts of ISO-2022-JP +// interpret it differently. +// - ‾¥ are inside a single-byte range of ISO-2022-JP and help +// diagnose problems due to filesystem encoding or locale +// - ≈ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - ¤ is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale; it is also the "simplest" case +// needing substitution in ISO-2022-JP +// - ・ is inside a single-byte range of ISO-2022-JP in some variants +// and helps diagnose problems due to filesystem encoding or locale; +// on the web it is distinct when decoding but unified when encoding +// - ・ is inside a double-byte range of ISO-2022-JP and helps +// diagnose problems due to filesystem encoding or locale +// - • is inside Windows-1252 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes +// - ∙ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - · is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale and also ensures HTML named +// character references (e.g. ·) are not used +// - ☼ is inside IBM437 shadowing C0 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes +// - ★ is inside ISO-2022-JP on a non-Kanji page and makes correct +// output easier to spot +// - 星 is inside ISO-2022-JP on a Kanji page and makes correct +// output easier to spot +// - 🌟 is outside the BMP and makes incorrect surrogate pair +// substitution detectable and ensures substitutions work +// correctly immediately after Kanji 2-byte ISO-2022-JP +// - 星 repeated here ensures the correct codec state is used +// after a non-BMP substitution +// - ★ repeated here also makes correct output easier to spot +// - ☼ is inside IBM437 shadowing C0 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes and also ensures +// substitutions work correctly immediately after non-Kanji +// 2-byte ISO-2022-JP +// - · is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale and also ensures HTML named +// character references (e.g. ·) are not used +// - ∙ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - • is inside Windows-1252 and again helps diagnose problems +// due to filesystem encoding or locale +// - ・ is inside a double-byte range of ISO-2022-JP and helps +// diagnose problems due to filesystem encoding or locale +// - ・ is inside a single-byte range of ISO-2022-JP in some variants +// and helps diagnose problems due to filesystem encoding or locale; +// on the web it is distinct when decoding but unified when encoding +// - ¤ is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale; again it is a "simple" +// substitution case +// - ≈ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - ¥‾ are inside a single-byte range of ISO-2022-JP and help +// diagnose problems due to filesystem encoding or locale +// - ~XYZ ensures earlier errors don't lead to misencoding of +// simple ASCII +// +// Overall the near-symmetry makes common I18N mistakes like +// off-by-1-after-non-BMP easier to spot. All the characters +// are also allowed in Windows Unicode filenames. +const kTestChars = 'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ'; + +// The kTestFallback* strings represent the expected byte sequence from +// encoding kTestChars with the given encoding with "html" replacement +// mode, isomorphic-decoded. That means, characters that can't be +// encoded in that encoding get HTML-escaped, but no further +// `escapeString`-like escapes are needed. +const kTestFallbackUtf8 = ( + "ABC~\xE2\x80\xBE\xC2\xA5\xE2\x89\x88\xC2\xA4\xEF\xBD\xA5\xE3\x83\xBB\xE2" + + "\x80\xA2\xE2\x88\x99\xC2\xB7\xE2\x98\xBC\xE2\x98\x85\xE6\x98\x9F\xF0\x9F" + + "\x8C\x9F\xE6\x98\x9F\xE2\x98\x85\xE2\x98\xBC\xC2\xB7\xE2\x88\x99\xE2\x80" + + "\xA2\xE3\x83\xBB\xEF\xBD\xA5\xC2\xA4\xE2\x89\x88\xC2\xA5\xE2\x80\xBE~XYZ" +); + +const kTestFallbackIso2022jp = ( + ("ABC~\x1B(J~\\≈¤\x1B$B!&!&\x1B(B•∙·☼\x1B$B!z@1\x1B(B🌟" + + "\x1B$B@1!z\x1B(B☼·∙•\x1B$B!&!&\x1B(B¤≈\x1B(J\\~\x1B(B~XYZ") + .replace(/[^\0-\x7F]/gu, (x) => `&#${x.codePointAt(0)};`) +); + +const kTestFallbackWindows1252 = ( + "ABC~‾\xA5≈\xA4・・\x95∙\xB7☼★星🌟星★☼\xB7∙\x95・・\xA4≈\xA5‾~XYZ".replace( + /[^\0-\xFF]/gu, + (x) => `&#${x.codePointAt(0)};`, + ) +); + +const kTestFallbackXUserDefined = kTestChars.replace( + /[^\0-\x7F]/gu, + (x) => `&#${x.codePointAt(0)};`, +); + +// formPostFileUploadTest - verifies multipart upload structure and +// numeric character reference replacement for filenames, field names, +// and field values using form submission. +// +// Uses /FileAPI/file/resources/echo-content-escaped.py to echo the +// upload POST with controls and non-ASCII bytes escaped. This is done +// because navigations whose response body contains [\0\b\v] may get +// treated as a download, which is not what we want. Use the +// `escapeString` function to replicate that kind of escape (note that +// it takes an isomorphic-decoded string, not a byte sequence). +// +// Fields in the parameter object: +// +// - fileNameSource: purely explanatory and gives a clue about which +// character encoding is the source for the non-7-bit-ASCII parts of +// the fileBaseName, or Unicode if no smaller-than-Unicode source +// contains all the characters. Used in the test name. +// - fileBaseName: the not-necessarily-just-7-bit-ASCII file basename +// used for the constructed test file. Used in the test name. +// - formEncoding: the acceptCharset of the form used to submit the +// test file. Used in the test name. +// - expectedEncodedBaseName: the expected formEncoding-encoded +// version of fileBaseName, isomorphic-decoded. That means, characters +// that can't be encoded in that encoding get HTML-escaped, but no +// further `escapeString`-like escapes are needed. +const formPostFileUploadTest = ({ + fileNameSource, + fileBaseName, + formEncoding, + expectedEncodedBaseName, +}) => { + promise_test(async testCase => { + + if (document.readyState !== 'complete') { + await new Promise(resolve => addEventListener('load', resolve)); + } + + const formTargetFrame = Object.assign(document.createElement('iframe'), { + name: 'formtargetframe', + }); + document.body.append(formTargetFrame); + testCase.add_cleanup(() => { + document.body.removeChild(formTargetFrame); + }); + + const form = Object.assign(document.createElement('form'), { + acceptCharset: formEncoding, + action: '/FileAPI/file/resources/echo-content-escaped.py', + method: 'POST', + enctype: 'multipart/form-data', + target: formTargetFrame.name, + }); + document.body.append(form); + testCase.add_cleanup(() => { + document.body.removeChild(form); + }); + + // Used to verify that the browser agrees with the test about + // which form charset is used. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: '_charset_', + })); + + // Used to verify that the browser agrees with the test about + // field value replacement and encoding independently of file system + // idiosyncracies. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: 'filename', + value: fileBaseName, + })); + + // Same, but with name and value reversed to ensure field names + // get the same treatment. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: fileBaseName, + value: 'filename', + })); + + const fileInput = Object.assign(document.createElement('input'), { + type: 'file', + name: 'file', + }); + form.append(fileInput); + + // Removes c:\fakepath\ or other pseudofolder and returns just the + // final component of filePath; allows both / and \ as segment + // delimiters. + const baseNameOfFilePath = filePath => filePath.split(/[\/\\]/).pop(); + await new Promise(resolve => { + const dataTransfer = new DataTransfer; + dataTransfer.items.add( + new File([kTestChars], fileBaseName, {type: 'text/plain'})); + fileInput.files = dataTransfer.files; + // For historical reasons .value will be prefixed with + // c:\fakepath\, but the basename should match the file name + // exposed through the newer .files[0].name API. This check + // verifies that assumption. + assert_equals( + baseNameOfFilePath(fileInput.files[0].name), + baseNameOfFilePath(fileInput.value), + `The basename of the field's value should match its files[0].name`); + form.submit(); + formTargetFrame.onload = resolve; + }); + + const formDataText = formTargetFrame.contentDocument.body.textContent; + const formDataLines = formDataText.split('\n'); + if (formDataLines.length && !formDataLines[formDataLines.length - 1]) { + --formDataLines.length; + } + assert_greater_than( + formDataLines.length, + 2, + `${fileBaseName}: multipart form data must have at least 3 lines: ${ + JSON.stringify(formDataText) + }`); + const boundary = formDataLines[0]; + assert_equals( + formDataLines[formDataLines.length - 1], + boundary + '--', + `${fileBaseName}: multipart form data must end with ${boundary}--: ${ + JSON.stringify(formDataText) + }`); + + const asValue = expectedEncodedBaseName.replace(/\r\n?|\n/g, "\r\n"); + const asName = asValue.replace(/[\r\n"]/g, encodeURIComponent); + const asFilename = expectedEncodedBaseName.replace(/[\r\n"]/g, encodeURIComponent); + + // The response body from echo-content-escaped.py has controls and non-ASCII + // bytes escaped, so any caller-provided field that might contain such bytes + // must be passed to `escapeString`, after any other expected + // transformations. + const expectedText = [ + boundary, + 'Content-Disposition: form-data; name="_charset_"', + '', + formEncoding, + boundary, + 'Content-Disposition: form-data; name="filename"', + '', + // Unlike for names and filenames, multipart/form-data values don't escape + // \r\n linebreaks, and when they're read from an iframe they become \n. + escapeString(asValue).replace(/\r\n/g, "\n"), + boundary, + `Content-Disposition: form-data; name="${escapeString(asName)}"`, + '', + 'filename', + boundary, + `Content-Disposition: form-data; name="file"; ` + + `filename="${escapeString(asFilename)}"`, + 'Content-Type: text/plain', + '', + escapeString(kTestFallbackUtf8), + boundary + '--', + ].join('\n'); + + assert_true( + formDataText.startsWith(expectedText), + `Unexpected multipart-shaped form data received:\n${ + formDataText + }\nExpected:\n${expectedText}`); + }, `Upload ${fileBaseName} (${fileNameSource}) in ${formEncoding} form`); +}; diff --git a/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js new file mode 100644 index 00000000000000..53572ef36c8d1b --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js @@ -0,0 +1,97 @@ +"use strict"; + +const kTestChars = "ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ"; + +// formDataPostFileUploadTest - verifies multipart upload structure and +// numeric character reference replacement for filenames, field names, +// and field values using FormData and fetch(). +// +// Uses /fetch/api/resources/echo-content.py to echo the upload +// POST (unlike in send-file-form-helper.js, here we expect all +// multipart/form-data request bodies to be UTF-8, so we don't need to +// escape controls and non-ASCII bytes). +// +// Fields in the parameter object: +// +// - fileNameSource: purely explanatory and gives a clue about which +// character encoding is the source for the non-7-bit-ASCII parts of +// the fileBaseName, or Unicode if no smaller-than-Unicode source +// contains all the characters. Used in the test name. +// - fileBaseName: the not-necessarily-just-7-bit-ASCII file basename +// used for the constructed test file. Used in the test name. +const formDataPostFileUploadTest = ({ + fileNameSource, + fileBaseName, +}) => { + promise_test(async (testCase) => { + const formData = new FormData(); + let file = new Blob([kTestChars], { type: "text/plain" }); + try { + // Switch to File in browsers that allow this + file = new File([file], fileBaseName, { type: file.type }); + } catch (ignoredException) { + } + + // Used to verify that the browser agrees with the test about + // field value replacement and encoding independently of file system + // idiosyncracies. + formData.append("filename", fileBaseName); + + // Same, but with name and value reversed to ensure field names + // get the same treatment. + formData.append(fileBaseName, "filename"); + + formData.append("file", file, fileBaseName); + + const formDataText = await (await fetch( + `/fetch/api/resources/echo-content.py`, + { + method: "POST", + body: formData, + }, + )).text(); + const formDataLines = formDataText.split("\r\n"); + if (formDataLines.length && !formDataLines[formDataLines.length - 1]) { + --formDataLines.length; + } + assert_greater_than( + formDataLines.length, + 2, + `${fileBaseName}: multipart form data must have at least 3 lines: ${ + JSON.stringify(formDataText) + }`, + ); + const boundary = formDataLines[0]; + assert_equals( + formDataLines[formDataLines.length - 1], + boundary + "--", + `${fileBaseName}: multipart form data must end with ${boundary}--: ${ + JSON.stringify(formDataText) + }`, + ); + + const asName = fileBaseName.replace(/[\r\n"]/g, encodeURIComponent); + const expectedText = [ + boundary, + 'Content-Disposition: form-data; name="filename"', + "", + fileBaseName, + boundary, + `Content-Disposition: form-data; name="${asName}"`, + "", + "filename", + boundary, + `Content-Disposition: form-data; name="file"; ` + + `filename="${asName}"`, + "Content-Type: text/plain", + "", + kTestChars, + boundary + "--", + ].join("\r\n"); + + assert_true( + formDataText.startsWith(expectedText), + `Unexpected multipart-shaped form data received:\n${formDataText}\nExpected:\n${expectedText}`, + ); + }, `Upload ${fileBaseName} (${fileNameSource}) in fetch with FormData`); +}; diff --git a/test/fixtures/wpt/FileAPI/support/upload.txt b/test/fixtures/wpt/FileAPI/support/upload.txt new file mode 100644 index 00000000000000..5ab2f8a4323aba --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/upload.txt @@ -0,0 +1 @@ +Hello \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/support/url-origin.html b/test/fixtures/wpt/FileAPI/support/url-origin.html new file mode 100644 index 00000000000000..63755113915f9f --- /dev/null +++ b/test/fixtures/wpt/FileAPI/support/url-origin.html @@ -0,0 +1,6 @@ + + diff --git a/test/fixtures/wpt/FileAPI/unicode.html b/test/fixtures/wpt/FileAPI/unicode.html new file mode 100644 index 00000000000000..ce3e3579d7c2c7 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/unicode.html @@ -0,0 +1,46 @@ + + +Blob/Unicode interaction: normalization and encoding + + + diff --git a/test/fixtures/wpt/FileAPI/url/cross-global-revoke.sub.html b/test/fixtures/wpt/FileAPI/url/cross-global-revoke.sub.html new file mode 100644 index 00000000000000..21b8c5bb1986d5 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/cross-global-revoke.sub.html @@ -0,0 +1,61 @@ + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/url/multi-global-origin-serialization.sub.html b/test/fixtures/wpt/FileAPI/url/multi-global-origin-serialization.sub.html new file mode 100644 index 00000000000000..0052b26fa62130 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/multi-global-origin-serialization.sub.html @@ -0,0 +1,26 @@ + + +Blob URL serialization (specifically the origin) in multi-global situations + + + + + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/url/resources/create-helper.html b/test/fixtures/wpt/FileAPI/url/resources/create-helper.html new file mode 100644 index 00000000000000..fa6cf4e671e835 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/resources/create-helper.html @@ -0,0 +1,7 @@ + + \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/url/resources/create-helper.js b/test/fixtures/wpt/FileAPI/url/resources/create-helper.js new file mode 100644 index 00000000000000..e6344f700ced60 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/resources/create-helper.js @@ -0,0 +1,4 @@ +self.addEventListener('message', e => { + let url = URL.createObjectURL(e.data.blob); + self.postMessage({url: url}); +}); diff --git a/test/fixtures/wpt/FileAPI/url/resources/fetch-tests.js b/test/fixtures/wpt/FileAPI/url/resources/fetch-tests.js new file mode 100644 index 00000000000000..a81ea1e7b1de35 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/resources/fetch-tests.js @@ -0,0 +1,71 @@ +// This method generates a number of tests verifying fetching of blob URLs, +// allowing the same tests to be used both with fetch() and XMLHttpRequest. +// +// |fetch_method| is only used in test names, and should describe the +// (javascript) method being used by the other two arguments (i.e. 'fetch' or 'XHR'). +// +// |fetch_should_succeed| is a callback that is called with the Test and a URL. +// Fetching the URL is expected to succeed. The callback should return a promise +// resolved with whatever contents were fetched. +// +// |fetch_should_fail| similarly is a callback that is called with the Test, a URL +// to fetch, and optionally a method to use to do the fetch. If no method is +// specified the callback should use the 'GET' method. Fetching of these URLs is +// expected to fail, and the callback should return a promise that resolves iff +// fetching did indeed fail. +function fetch_tests(fetch_method, fetch_should_succeed, fetch_should_fail) { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + }, 'Blob URLs can be used in ' + fetch_method); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_succeed(t, url + '#fragment').then(text => { + assert_equals(text, blob_contents); + }); + }, fetch_method + ' with a fragment should succeed'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + URL.revokeObjectURL(url); + + return fetch_should_fail(t, url); + }, fetch_method + ' of a revoked URL should fail'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + URL.revokeObjectURL(url + '#fragment'); + + return fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + }, 'Only exact matches should revoke URLs, using ' + fetch_method); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_fail(t, url + '?querystring'); + }, 'Appending a query string should cause ' + fetch_method + ' to fail'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_fail(t, url + '/path'); + }, 'Appending a path should cause ' + fetch_method + ' to fail'); + + for (const method of ['HEAD', 'POST', 'DELETE', 'OPTIONS', 'PUT', 'CUSTOM']) { + const url = URL.createObjectURL(blob); + + promise_test(t => { + return fetch_should_fail(t, url, method); + }, fetch_method + ' with method "' + method + '" should fail'); + } +} \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.html b/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.html new file mode 100644 index 00000000000000..adf5a014a668d6 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.html @@ -0,0 +1,7 @@ + + \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.js b/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.js new file mode 100644 index 00000000000000..c3e05b64b1a6c8 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/resources/revoke-helper.js @@ -0,0 +1,9 @@ +self.addEventListener('message', e => { + URL.revokeObjectURL(e.data.url); + // Registering a new object URL will make absolutely sure that the revocation + // has propagated. Without this at least in chrome it is possible for the + // below postMessage to arrive at its destination before the revocation has + // been fully processed. + URL.createObjectURL(new Blob([])); + self.postMessage('revoked'); +}); diff --git a/test/fixtures/wpt/FileAPI/url/sandboxed-iframe.html b/test/fixtures/wpt/FileAPI/url/sandboxed-iframe.html new file mode 100644 index 00000000000000..a52939a3eb297c --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/sandboxed-iframe.html @@ -0,0 +1,32 @@ + + +FileAPI Test: Verify behavior of Blob URL in unique origins + + + + + + + diff --git a/test/fixtures/wpt/FileAPI/url/unicode-origin.sub.html b/test/fixtures/wpt/FileAPI/url/unicode-origin.sub.html new file mode 100644 index 00000000000000..2c4921c0344998 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/unicode-origin.sub.html @@ -0,0 +1,23 @@ + + +FileAPI Test: Verify origin of Blob URL + + + + diff --git a/test/fixtures/wpt/FileAPI/url/url-charset.window.js b/test/fixtures/wpt/FileAPI/url/url-charset.window.js new file mode 100644 index 00000000000000..777709b64a50e5 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-charset.window.js @@ -0,0 +1,34 @@ +async_test(t => { + // This could be detected as ISO-2022-JP, in which case there would be no + // bbb` + ], + {type: 'text/html;charset=utf-8'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + t.add_cleanup(() => { + win.close(); + }); + + win.onload = t.step_func_done(() => { + assert_equals(win.document.charset, 'UTF-8'); + }); +}, 'Blob charset should override any auto-detected charset.'); + +async_test(t => { + const blob = new Blob( + [`\n`], + {type: 'text/html;charset=utf-8'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + t.add_cleanup(() => { + win.close(); + }); + + win.onload = t.step_func_done(() => { + assert_equals(win.document.charset, 'UTF-8'); + }); +}, 'Blob charset should override .'); diff --git a/test/fixtures/wpt/FileAPI/url/url-format.any.js b/test/fixtures/wpt/FileAPI/url/url-format.any.js new file mode 100644 index 00000000000000..33732fa61fc3dd --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-format.any.js @@ -0,0 +1,64 @@ +// META: timeout=long +const blob = new Blob(['test']); +const file = new File(['test'], 'name'); + +test(() => { + const url_count = 5000; + let list = []; + + for (let i = 0; i < url_count; ++i) + list.push(URL.createObjectURL(blob)); + + list.sort(); + + for (let i = 1; i < list.length; ++i) + assert_not_equals(list[i], list[i-1], 'generated Blob URLs should be unique'); +}, 'Generated Blob URLs are unique'); + +test(() => { + const url = URL.createObjectURL(blob); + assert_equals(typeof url, 'string'); + assert_true(url.startsWith('blob:')); +}, 'Blob URL starts with "blob:"'); + +test(() => { + const url = URL.createObjectURL(file); + assert_equals(typeof url, 'string'); + assert_true(url.startsWith('blob:')); +}, 'Blob URL starts with "blob:" for Files'); + +test(() => { + const url = URL.createObjectURL(blob); + assert_equals(new URL(url).origin, location.origin); + if (location.origin !== 'null') { + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Origin of Blob URL matches our origin'); + +test(() => { + const url = URL.createObjectURL(blob); + const url_record = new URL(url); + assert_equals(url_record.protocol, 'blob:'); + assert_equals(url_record.origin, location.origin); + assert_equals(url_record.host, '', 'host should be an empty string'); + assert_equals(url_record.port, '', 'port should be an empty string'); + const uuid_path_re = /\/[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; + assert_true(uuid_path_re.test(url_record.pathname), 'Path must end with a valid UUID'); + if (location.origin !== 'null') { + const nested_url = new URL(url_record.pathname); + assert_equals(nested_url.origin, location.origin); + assert_equals(nested_url.pathname.search(uuid_path_re), 0, 'Path must be a valid UUID'); + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Blob URL parses correctly'); + +test(() => { + const url = URL.createObjectURL(file); + assert_equals(new URL(url).origin, location.origin); + if (location.origin !== 'null') { + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Origin of Blob URL matches our origin for Files'); diff --git a/test/fixtures/wpt/FileAPI/url/url-in-tags-revoke.window.js b/test/fixtures/wpt/FileAPI/url/url-in-tags-revoke.window.js new file mode 100644 index 00000000000000..1cdad79f7e34e0 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-in-tags-revoke.window.js @@ -0,0 +1,115 @@ +// META: timeout=long +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + URL.revokeObjectURL(url); + + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); +}, 'Fetching a blob URL immediately before revoking it works in an iframe.'); + +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', '/common/blank.html'); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func(() => { + frame.contentWindow.location = url; + URL.revokeObjectURL(url); + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); + }); +}, 'Fetching a blob URL immediately before revoking it works in an iframe navigation.'); + +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + URL.revokeObjectURL(url); + add_completion_callback(() => { win.close(); }); + + win.onload = t.step_func_done(() => { + assert_equals(win.test_result, run_result); + }); +}, 'Opening a blob URL in a new window immediately before revoking it works.'); + +function receive_message_on_channel(t, channel_name) { + const channel = new BroadcastChannel(channel_name); + return new Promise(resolve => { + channel.addEventListener('message', t.step_func(e => { + resolve(e.data); + })); + }); +} + +function window_contents_for_channel(channel_name) { + return '\n' + + ''; +} + +async_test(t => { + const channel_name = 'noopener-window-test'; + const blob = new Blob([window_contents_for_channel(channel_name)], {type: 'text/html'}); + receive_message_on_channel(t, channel_name).then(t.step_func_done(t => { + assert_equals(t, 'foobar'); + })); + const url = URL.createObjectURL(blob); + const win = window.open(); + win.opener = null; + win.location = url; + URL.revokeObjectURL(url); +}, 'Opening a blob URL in a noopener about:blank window immediately before revoking it works.'); + +async_test(t => { + const run_result = 'test_script_OK'; + const blob_contents = 'window.script_test_result = "' + run_result + '";'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + + const e = document.createElement('script'); + e.setAttribute('src', url); + e.onload = t.step_func_done(() => { + assert_equals(window.script_test_result, run_result); + }); + + document.body.appendChild(e); + URL.revokeObjectURL(url); +}, 'Fetching a blob URL immediately before revoking it works in '; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); +}, 'Blob URLs can be used in iframes, and are treated same origin'); + +async_test(t => { + const blob_contents = '\n\n' + + '\n' + + '\n' + + '
      \n' + + '
      '; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url + '#block2'); + document.body.appendChild(frame); + frame.contentWindow.onscroll = t.step_func_done(() => { + assert_equals(frame.contentWindow.scrollY, 5000); + }); +}, 'Blob URL fragment is implemented.'); diff --git a/test/fixtures/wpt/FileAPI/url/url-lifetime.html b/test/fixtures/wpt/FileAPI/url/url-lifetime.html new file mode 100644 index 00000000000000..ad5d667193a3d0 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-lifetime.html @@ -0,0 +1,56 @@ + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/FileAPI/url/url-reload.window.js b/test/fixtures/wpt/FileAPI/url/url-reload.window.js new file mode 100644 index 00000000000000..d333b3a74aa82c --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-reload.window.js @@ -0,0 +1,36 @@ +function blob_url_reload_test(t, revoke_before_reload) { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func(() => { + if (revoke_before_reload) + URL.revokeObjectURL(url); + assert_equals(frame.contentWindow.test_result, run_result); + frame.contentWindow.test_result = null; + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); + // Slight delay before reloading to ensure revoke actually has had a chance + // to be processed. + t.step_timeout(() => { + frame.contentWindow.location.reload(); + }, 250); + }); +} + +async_test(t => { + blob_url_reload_test(t, false); +}, 'Reloading a blob URL succeeds.'); + + +async_test(t => { + blob_url_reload_test(t, true); +}, 'Reloading a blob URL succeeds even if the URL was revoked.'); diff --git a/test/fixtures/wpt/FileAPI/url/url-with-fetch.any.js b/test/fixtures/wpt/FileAPI/url/url-with-fetch.any.js new file mode 100644 index 00000000000000..9bd8d383df4e1e --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-with-fetch.any.js @@ -0,0 +1,53 @@ +// META: script=resources/fetch-tests.js + +function fetch_should_succeed(test, request) { + return fetch(request).then(response => response.text()); +} + +function fetch_should_fail(test, url, method = 'GET') { + return promise_rejects_js(test, TypeError, fetch(url, {method: method})); +} + +fetch_tests('fetch', fetch_should_succeed, fetch_should_fail); + +promise_test(t => { + const blob_contents = 'test blob contents'; + const blob_type = 'image/png'; + const blob = new Blob([blob_contents], {type: blob_type}); + const url = URL.createObjectURL(blob); + + return fetch(url).then(response => { + assert_equals(response.headers.get('Content-Type'), blob_type); + }); +}, 'fetch should return Content-Type from Blob'); + +promise_test(t => { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + const request = new Request(url); + + // Revoke the object URL. Request should take a reference to the blob as + // soon as it receives it in open(), so the request succeeds even though we + // revoke the URL before calling fetch(). + URL.revokeObjectURL(url); + + return fetch_should_succeed(t, request).then(text => { + assert_equals(text, blob_contents); + }); +}, 'Revoke blob URL after creating Request, will fetch'); + +promise_test(function(t) { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + + const result = fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + + // Revoke the object URL. fetch should have already resolved the blob URL. + URL.revokeObjectURL(url); + + return result; +}, 'Revoke blob URL after calling fetch, fetch should succeed'); diff --git a/test/fixtures/wpt/FileAPI/url/url-with-xhr.any.js b/test/fixtures/wpt/FileAPI/url/url-with-xhr.any.js new file mode 100644 index 00000000000000..29d83080ab5845 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url-with-xhr.any.js @@ -0,0 +1,68 @@ +// META: script=resources/fetch-tests.js + +function xhr_should_succeed(test, url) { + return new Promise((resolve, reject) => { + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + xhr.onload = test.step_func(() => { + assert_equals(xhr.status, 200); + assert_equals(xhr.statusText, 'OK'); + resolve(xhr.response); + }); + xhr.onerror = () => reject('Got unexpected error event'); + xhr.send(); + }); +} + +function xhr_should_fail(test, url, method = 'GET') { + const xhr = new XMLHttpRequest(); + xhr.open(method, url); + const result1 = new Promise((resolve, reject) => { + xhr.onload = () => reject('Got unexpected load event'); + xhr.onerror = resolve; + }); + const result2 = new Promise(resolve => { + xhr.onreadystatechange = test.step_func(() => { + if (xhr.readyState !== xhr.DONE) return; + assert_equals(xhr.status, 0); + resolve(); + }); + }); + xhr.send(); + return Promise.all([result1, result2]); +} + +fetch_tests('XHR', xhr_should_succeed, xhr_should_fail); + +async_test(t => { + const blob_contents = 'test blob contents'; + const blob_type = 'image/png'; + const blob = new Blob([blob_contents], {type: blob_type}); + const url = URL.createObjectURL(blob); + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + xhr.onloadend = t.step_func_done(() => { + assert_equals(xhr.getResponseHeader('Content-Type'), blob_type); + }); + xhr.send(); +}, 'XHR should return Content-Type from Blob'); + +async_test(t => { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + + // Revoke the object URL. XHR should take a reference to the blob as soon as + // it receives it in open(), so the request succeeds even though we revoke the + // URL before calling send(). + URL.revokeObjectURL(url); + + xhr.onload = t.step_func_done(() => { + assert_equals(xhr.response, blob_contents); + }); + xhr.onerror = t.unreached_func('Got unexpected error event'); + + xhr.send(); +}, 'Revoke blob URL after open(), will fetch'); diff --git a/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file-manual.html b/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file-manual.html new file mode 100644 index 00000000000000..7ae32512e07c76 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file-manual.html @@ -0,0 +1,45 @@ + + +FileAPI Test: Creating Blob URL with File + + + + + + +
      +

      Test steps:

      +
        +
      1. Download blue96x96.png to local.
      2. +
      3. Select the local file (blue96x96.png) to run the test.
      4. +
      +
      + +
      + +
      + +
      + + + diff --git a/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file_img-manual.html b/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file_img-manual.html new file mode 100644 index 00000000000000..534c1de9968da8 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url_createobjecturl_file_img-manual.html @@ -0,0 +1,28 @@ + + +FileAPI Test: Creating Blob URL with File as image source + + + +
      +

      Test steps:

      +
        +
      1. Download blue96x96.png to local.
      2. +
      3. Select the local file (blue96x96.png) to run the test.
      4. +
      +

      Pass/fail criteria:

      +

      Test passes if there is a filled blue square.

      + +

      +

      +
      + + + diff --git a/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img-ref.html b/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img-ref.html new file mode 100644 index 00000000000000..7d7390442d3631 --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img-ref.html @@ -0,0 +1,12 @@ + + +FileAPI Reference File + + + +

      Test passes if there is a filled blue square.

      + +

      + +

      + diff --git a/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img.html b/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img.html new file mode 100644 index 00000000000000..468dcb086d770a --- /dev/null +++ b/test/fixtures/wpt/FileAPI/url/url_xmlhttprequest_img.html @@ -0,0 +1,27 @@ + + + +FileAPI Test: Creating Blob URL via XMLHttpRequest as image source + + + + +

      Test passes if there is a filled blue square.

      + +

      + +

      + + + + diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index fc609bfca90b5a..4a5d73c742acea 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -20,6 +20,7 @@ Last update: - hr-time: https://github.com/web-platform-tests/wpt/tree/a5d1774ecf/hr-time - common: https://github.com/web-platform-tests/wpt/tree/4dacb6e2ff/common - dom/abort: https://github.com/web-platform-tests/wpt/tree/7caa3de747/dom/abort +- FileAPI: https://github.com/web-platform-tests/wpt/tree/d9d921b8f9/FileAPI [Web Platform Tests]: https://github.com/web-platform-tests/wpt [`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/master/docs/git-node.md#git-node-wpt diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 3cbb5f8f99bbdb..20fed894db94b6 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -38,5 +38,9 @@ "dom/abort": { "commit": "7caa3de7471cf19b78ee9efa313c7341a462b5e3", "path": "dom/abort" + }, + "FileAPI": { + "commit": "d9d921b8f9235e0d2ec92672040c0ccfc8262e21", + "path": "FileAPI" } } \ No newline at end of file diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index ff714c7cabfd04..f5bb33cfa64915 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -36,9 +36,6 @@ test-async-hooks-http-parser-destroy: PASS,FLAKY # https://github.com/nodejs/node/pull/31178 test-crypto-dh-stateless: SKIP test-crypto-keygen: SKIP -# https://github.com/nodejs/node/issues/36847 -test-cluster-bind-privileged-port: PASS,FLAKY -test-cluster-shared-handle-bind-privileged-port: PASS,FLAKY [$system==solaris] # Also applies to SmartOS @@ -72,3 +69,7 @@ test-net-write-after-end-nt: SKIP test-tls-env-extra-ca: SKIP # https://github.com/nodejs/node/pull/34209 test-dgram-error-message-address: SKIP +# https://github.com/nodejs/node/issues/36929 +test-crypto-secure-heap: PASS,FLAKY +# https://github.com/nodejs/node/issues/36925 +test-fs-read-type: PASS,FLAKY diff --git a/test/parallel/test-blob.js b/test/parallel/test-blob.js new file mode 100644 index 00000000000000..795d4bb6fbdff5 --- /dev/null +++ b/test/parallel/test-blob.js @@ -0,0 +1,186 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { Blob } = require('buffer'); + +{ + const b = new Blob(); + assert.strictEqual(b.size, 0); + assert.strictEqual(b.type, ''); +} + +assert.throws(() => new Blob(false), { + code: 'ERR_INVALID_ARG_TYPE' +}); + +assert.throws(() => new Blob('hello'), { + code: 'ERR_INVALID_ARG_TYPE' +}); + +assert.throws(() => new Blob({}), { + code: 'ERR_INVALID_ARG_TYPE' +}); + +assert.throws(() => new Blob(['test', 1]), { + code: 'ERR_INVALID_ARG_TYPE' +}); + +{ + const b = new Blob([]); + assert(b); + assert.strictEqual(b.size, 0); + assert.strictEqual(b.type, ''); + + b.arrayBuffer().then(common.mustCall((ab) => { + assert.deepStrictEqual(ab, new ArrayBuffer(0)); + })); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, ''); + })); + const c = b.slice(); + assert.strictEqual(c.size, 0); +} + +{ + assert.throws(() => new Blob([], { type: 1 }), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => new Blob([], { type: false }), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => new Blob([], { type: {} }), { + code: 'ERR_INVALID_ARG_TYPE' + }); +} + +{ + const b = new Blob(['616263'], { encoding: 'hex', type: 'foo' }); + assert.strictEqual(b.size, 3); + assert.strictEqual(b.type, 'foo'); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'abc'); + })); +} + +{ + const b = new Blob([Buffer.from('abc')]); + assert.strictEqual(b.size, 3); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'abc'); + })); +} + +{ + const b = new Blob([new ArrayBuffer(3)]); + assert.strictEqual(b.size, 3); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, '\0\0\0'); + })); +} + +{ + const b = new Blob([new Uint8Array(3)]); + assert.strictEqual(b.size, 3); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, '\0\0\0'); + })); +} + +{ + const b = new Blob([new Blob(['abc'])]); + assert.strictEqual(b.size, 3); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'abc'); + })); +} + +{ + const b = new Blob(['hello', Buffer.from('world')]); + assert.strictEqual(b.size, 10); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'helloworld'); + })); +} + +{ + const b = new Blob( + [ + 'h', + 'e', + 'l', + 'lo', + Buffer.from('world') + ]); + assert.strictEqual(b.size, 10); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'helloworld'); + })); +} + +{ + const b = new Blob(['hello', Buffer.from('world')]); + assert.strictEqual(b.size, 10); + assert.strictEqual(b.type, ''); + + const c = b.slice(1, -1, 'foo'); + assert.strictEqual(c.type, 'foo'); + c.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'elloworl'); + })); + + const d = c.slice(1, -1); + d.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'llowor'); + })); + + const e = d.slice(1, -1); + e.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'lowo'); + })); + + const f = e.slice(1, -1); + f.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'ow'); + })); + + const g = f.slice(1, -1); + assert.strictEqual(g.type, 'foo'); + g.text().then(common.mustCall((text) => { + assert.strictEqual(text, ''); + })); + + assert.strictEqual(b.size, 10); + assert.strictEqual(b.type, ''); + + assert.throws(() => b.slice(-1, 1), { + code: 'ERR_OUT_OF_RANGE' + }); + assert.throws(() => b.slice(1, 100), { + code: 'ERR_OUT_OF_RANGE' + }); + + assert.throws(() => b.slice(1, 2, false), { + code: 'ERR_INVALID_ARG_TYPE' + }); +} + +{ + const b = new Blob([Buffer.from('hello'), Buffer.from('world')]); + const mc = new MessageChannel(); + mc.port1.onmessage = common.mustCall(({ data }) => { + data.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'helloworld'); + })); + mc.port1.close(); + }); + mc.port2.postMessage(b); + b.text().then(common.mustCall((text) => { + assert.strictEqual(text, 'helloworld'); + })); +} + +{ + const b = new Blob(['hello'], { type: '\x01' }); + assert.strictEqual(b.type, ''); +} diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index 0887b8a48362f6..105bfb10866499 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -101,6 +101,7 @@ const expectedModules = new Set([ 'NativeModule internal/vm/module', 'NativeModule internal/worker/io', 'NativeModule internal/worker/js_transferable', + 'NativeModule internal/blob', 'NativeModule path', 'NativeModule stream', 'NativeModule timers', diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js index c72e286f3f0e2c..1368136abe8f22 100644 --- a/test/parallel/test-buffer-alloc.js +++ b/test/parallel/test-buffer-alloc.js @@ -293,14 +293,40 @@ Buffer.alloc(1).write('', 1, 0); // Test toString('base64') // assert.strictEqual((Buffer.from('Man')).toString('base64'), 'TWFu'); +assert.strictEqual((Buffer.from('Woman')).toString('base64'), 'V29tYW4='); + +// +// Test toString('base64url') +// +assert.strictEqual((Buffer.from('Man')).toString('base64url'), 'TWFu'); +assert.strictEqual((Buffer.from('Woman')).toString('base64url'), 'V29tYW4'); { - // Test that regular and URL-safe base64 both work + // Test that regular and URL-safe base64 both work both ways const expected = [0xff, 0xff, 0xbe, 0xff, 0xef, 0xbf, 0xfb, 0xef, 0xff]; assert.deepStrictEqual(Buffer.from('//++/++/++//', 'base64'), Buffer.from(expected)); assert.deepStrictEqual(Buffer.from('__--_--_--__', 'base64'), Buffer.from(expected)); + assert.deepStrictEqual(Buffer.from('//++/++/++//', 'base64url'), + Buffer.from(expected)); + assert.deepStrictEqual(Buffer.from('__--_--_--__', 'base64url'), + Buffer.from(expected)); +} + +const base64flavors = ['base64', 'base64url']; + +{ + // Test that regular and URL-safe base64 both work both ways with padding + const expected = [0xff, 0xff, 0xbe, 0xff, 0xef, 0xbf, 0xfb, 0xef, 0xff, 0xfb]; + assert.deepStrictEqual(Buffer.from('//++/++/++//+w==', 'base64'), + Buffer.from(expected)); + assert.deepStrictEqual(Buffer.from('//++/++/++//+w==', 'base64'), + Buffer.from(expected)); + assert.deepStrictEqual(Buffer.from('//++/++/++//+w==', 'base64url'), + Buffer.from(expected)); + assert.deepStrictEqual(Buffer.from('//++/++/++//+w==', 'base64url'), + Buffer.from(expected)); } { @@ -317,137 +343,182 @@ assert.strictEqual((Buffer.from('Man')).toString('base64'), 'TWFu'); 'dWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZ' + 'GdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm' + '5hbCBwbGVhc3VyZS4='; - assert.strictEqual((Buffer.from(quote)).toString('base64'), expected); - - let b = Buffer.allocUnsafe(1024); - let bytesWritten = b.write(expected, 0, 'base64'); - assert.strictEqual(quote.length, bytesWritten); - assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); - - // Check that the base64 decoder ignores whitespace - const expectedWhite = `${expected.slice(0, 60)} \n` + - `${expected.slice(60, 120)} \n` + - `${expected.slice(120, 180)} \n` + - `${expected.slice(180, 240)} \n` + - `${expected.slice(240, 300)}\n` + - `${expected.slice(300, 360)}\n`; - b = Buffer.allocUnsafe(1024); - bytesWritten = b.write(expectedWhite, 0, 'base64'); - assert.strictEqual(quote.length, bytesWritten); - assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); - - // Check that the base64 decoder on the constructor works - // even in the presence of whitespace. - b = Buffer.from(expectedWhite, 'base64'); - assert.strictEqual(quote.length, b.length); - assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); - - // Check that the base64 decoder ignores illegal chars - const expectedIllegal = expected.slice(0, 60) + ' \x80' + - expected.slice(60, 120) + ' \xff' + - expected.slice(120, 180) + ' \x00' + - expected.slice(180, 240) + ' \x98' + - expected.slice(240, 300) + '\x03' + - expected.slice(300, 360); - b = Buffer.from(expectedIllegal, 'base64'); - assert.strictEqual(quote.length, b.length); - assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); + assert.strictEqual(Buffer.from(quote).toString('base64'), expected); + assert.strictEqual( + Buffer.from(quote).toString('base64url'), + expected.replaceAll('+', '-').replaceAll('/', '_').replaceAll('=', '') + ); + + base64flavors.forEach((encoding) => { + let b = Buffer.allocUnsafe(1024); + let bytesWritten = b.write(expected, 0, encoding); + assert.strictEqual(quote.length, bytesWritten); + assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); + + // Check that the base64 decoder ignores whitespace + const expectedWhite = `${expected.slice(0, 60)} \n` + + `${expected.slice(60, 120)} \n` + + `${expected.slice(120, 180)} \n` + + `${expected.slice(180, 240)} \n` + + `${expected.slice(240, 300)}\n` + + `${expected.slice(300, 360)}\n`; + b = Buffer.allocUnsafe(1024); + bytesWritten = b.write(expectedWhite, 0, encoding); + assert.strictEqual(quote.length, bytesWritten); + assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); + + // Check that the base64 decoder on the constructor works + // even in the presence of whitespace. + b = Buffer.from(expectedWhite, encoding); + assert.strictEqual(quote.length, b.length); + assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); + + // Check that the base64 decoder ignores illegal chars + const expectedIllegal = expected.slice(0, 60) + ' \x80' + + expected.slice(60, 120) + ' \xff' + + expected.slice(120, 180) + ' \x00' + + expected.slice(180, 240) + ' \x98' + + expected.slice(240, 300) + '\x03' + + expected.slice(300, 360); + b = Buffer.from(expectedIllegal, encoding); + assert.strictEqual(quote.length, b.length); + assert.strictEqual(quote, b.toString('ascii', 0, quote.length)); + }); } -assert.strictEqual(Buffer.from('', 'base64').toString(), ''); -assert.strictEqual(Buffer.from('K', 'base64').toString(), ''); - -// multiple-of-4 with padding -assert.strictEqual(Buffer.from('Kg==', 'base64').toString(), '*'); -assert.strictEqual(Buffer.from('Kio=', 'base64').toString(), '*'.repeat(2)); -assert.strictEqual(Buffer.from('Kioq', 'base64').toString(), '*'.repeat(3)); -assert.strictEqual(Buffer.from('KioqKg==', 'base64').toString(), '*'.repeat(4)); -assert.strictEqual(Buffer.from('KioqKio=', 'base64').toString(), '*'.repeat(5)); -assert.strictEqual(Buffer.from('KioqKioq', 'base64').toString(), '*'.repeat(6)); -assert.strictEqual(Buffer.from('KioqKioqKg==', 'base64').toString(), - '*'.repeat(7)); -assert.strictEqual(Buffer.from('KioqKioqKio=', 'base64').toString(), - '*'.repeat(8)); -assert.strictEqual(Buffer.from('KioqKioqKioq', 'base64').toString(), - '*'.repeat(9)); -assert.strictEqual(Buffer.from('KioqKioqKioqKg==', 'base64').toString(), - '*'.repeat(10)); -assert.strictEqual(Buffer.from('KioqKioqKioqKio=', 'base64').toString(), - '*'.repeat(11)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioq', 'base64').toString(), - '*'.repeat(12)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKg==', 'base64').toString(), - '*'.repeat(13)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKio=', 'base64').toString(), - '*'.repeat(14)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioq', 'base64').toString(), - '*'.repeat(15)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKg==', 'base64').toString(), - '*'.repeat(16)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKio=', 'base64').toString(), - '*'.repeat(17)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioq', 'base64').toString(), - '*'.repeat(18)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKg==', - 'base64').toString(), - '*'.repeat(19)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKio=', - 'base64').toString(), - '*'.repeat(20)); - -// No padding, not a multiple of 4 -assert.strictEqual(Buffer.from('Kg', 'base64').toString(), '*'); -assert.strictEqual(Buffer.from('Kio', 'base64').toString(), '*'.repeat(2)); -assert.strictEqual(Buffer.from('KioqKg', 'base64').toString(), '*'.repeat(4)); -assert.strictEqual(Buffer.from('KioqKio', 'base64').toString(), '*'.repeat(5)); -assert.strictEqual(Buffer.from('KioqKioqKg', 'base64').toString(), - '*'.repeat(7)); -assert.strictEqual(Buffer.from('KioqKioqKio', 'base64').toString(), - '*'.repeat(8)); -assert.strictEqual(Buffer.from('KioqKioqKioqKg', 'base64').toString(), - '*'.repeat(10)); -assert.strictEqual(Buffer.from('KioqKioqKioqKio', 'base64').toString(), - '*'.repeat(11)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKg', 'base64').toString(), - '*'.repeat(13)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKio', 'base64').toString(), - '*'.repeat(14)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKg', 'base64').toString(), - '*'.repeat(16)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKio', 'base64').toString(), - '*'.repeat(17)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKg', - 'base64').toString(), - '*'.repeat(19)); -assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKio', - 'base64').toString(), - '*'.repeat(20)); +base64flavors.forEach((encoding) => { + assert.strictEqual(Buffer.from('', encoding).toString(), ''); + assert.strictEqual(Buffer.from('K', encoding).toString(), ''); + + // multiple-of-4 with padding + assert.strictEqual(Buffer.from('Kg==', encoding).toString(), '*'); + assert.strictEqual(Buffer.from('Kio=', encoding).toString(), '*'.repeat(2)); + assert.strictEqual(Buffer.from('Kioq', encoding).toString(), '*'.repeat(3)); + assert.strictEqual( + Buffer.from('KioqKg==', encoding).toString(), '*'.repeat(4)); + assert.strictEqual( + Buffer.from('KioqKio=', encoding).toString(), '*'.repeat(5)); + assert.strictEqual( + Buffer.from('KioqKioq', encoding).toString(), '*'.repeat(6)); + assert.strictEqual(Buffer.from('KioqKioqKg==', encoding).toString(), + '*'.repeat(7)); + assert.strictEqual(Buffer.from('KioqKioqKio=', encoding).toString(), + '*'.repeat(8)); + assert.strictEqual(Buffer.from('KioqKioqKioq', encoding).toString(), + '*'.repeat(9)); + assert.strictEqual(Buffer.from('KioqKioqKioqKg==', encoding).toString(), + '*'.repeat(10)); + assert.strictEqual(Buffer.from('KioqKioqKioqKio=', encoding).toString(), + '*'.repeat(11)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioq', encoding).toString(), + '*'.repeat(12)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKg==', encoding).toString(), + '*'.repeat(13)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKio=', encoding).toString(), + '*'.repeat(14)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioq', encoding).toString(), + '*'.repeat(15)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKg==', encoding).toString(), + '*'.repeat(16)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKio=', encoding).toString(), + '*'.repeat(17)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKioq', encoding).toString(), + '*'.repeat(18)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKg==', + encoding).toString(), + '*'.repeat(19)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKioqKio=', + encoding).toString(), + '*'.repeat(20)); + + // No padding, not a multiple of 4 + assert.strictEqual(Buffer.from('Kg', encoding).toString(), '*'); + assert.strictEqual(Buffer.from('Kio', encoding).toString(), '*'.repeat(2)); + assert.strictEqual(Buffer.from('KioqKg', encoding).toString(), '*'.repeat(4)); + assert.strictEqual( + Buffer.from('KioqKio', encoding).toString(), '*'.repeat(5)); + assert.strictEqual(Buffer.from('KioqKioqKg', encoding).toString(), + '*'.repeat(7)); + assert.strictEqual(Buffer.from('KioqKioqKio', encoding).toString(), + '*'.repeat(8)); + assert.strictEqual(Buffer.from('KioqKioqKioqKg', encoding).toString(), + '*'.repeat(10)); + assert.strictEqual(Buffer.from('KioqKioqKioqKio', encoding).toString(), + '*'.repeat(11)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKg', encoding).toString(), + '*'.repeat(13)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKio', encoding).toString(), + '*'.repeat(14)); + assert.strictEqual(Buffer.from('KioqKioqKioqKioqKioqKg', encoding).toString(), + '*'.repeat(16)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKio', encoding).toString(), + '*'.repeat(17)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKioqKg', encoding).toString(), + '*'.repeat(19)); + assert.strictEqual( + Buffer.from('KioqKioqKioqKioqKioqKioqKio', encoding).toString(), + '*'.repeat(20)); +}); // Handle padding graciously, multiple-of-4 or not assert.strictEqual( Buffer.from('72INjkR5fchcxk9+VgdGPFJDxUBFR5/rMFsghgxADiw==', 'base64').length, 32 ); +assert.strictEqual( + Buffer.from('72INjkR5fchcxk9-VgdGPFJDxUBFR5_rMFsghgxADiw==', 'base64url') + .length, + 32 +); assert.strictEqual( Buffer.from('72INjkR5fchcxk9+VgdGPFJDxUBFR5/rMFsghgxADiw=', 'base64').length, 32 ); +assert.strictEqual( + Buffer.from('72INjkR5fchcxk9-VgdGPFJDxUBFR5_rMFsghgxADiw=', 'base64url') + .length, + 32 +); assert.strictEqual( Buffer.from('72INjkR5fchcxk9+VgdGPFJDxUBFR5/rMFsghgxADiw', 'base64').length, 32 ); +assert.strictEqual( + Buffer.from('72INjkR5fchcxk9-VgdGPFJDxUBFR5_rMFsghgxADiw', 'base64url') + .length, + 32 +); assert.strictEqual( Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg==', 'base64').length, 31 ); +assert.strictEqual( + Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg==', 'base64url') + .length, + 31 +); assert.strictEqual( Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg=', 'base64').length, 31 ); +assert.strictEqual( + Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg=', 'base64url') + .length, + 31 +); assert.strictEqual( Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg', 'base64').length, 31 ); +assert.strictEqual( + Buffer.from('w69jACy6BgZmaFvv96HG6MYksWytuZu3T1FvGnulPg', 'base64url').length, + 31 +); { // This string encodes single '.' character in UTF-16 @@ -459,6 +530,16 @@ assert.strictEqual( assert.strictEqual(dot.toString('base64'), '//4uAA=='); } +{ +// This string encodes single '.' character in UTF-16 + const dot = Buffer.from('//4uAA', 'base64url'); + assert.strictEqual(dot[0], 0xff); + assert.strictEqual(dot[1], 0xfe); + assert.strictEqual(dot[2], 0x2e); + assert.strictEqual(dot[3], 0x00); + assert.strictEqual(dot.toString('base64url'), '__4uAA'); +} + { // Writing base64 at a position > 0 should not mangle the result. // @@ -474,6 +555,21 @@ assert.strictEqual( 'Madness?! This is node.js!'); } +{ + // Writing base64url at a position > 0 should not mangle the result. + // + // https://github.com/joyent/node/issues/402 + const segments = ['TWFkbmVzcz8h', 'IFRoaXM', 'IGlz', 'IG5vZGUuanMh']; + const b = Buffer.allocUnsafe(64); + let pos = 0; + + for (let i = 0; i < segments.length; ++i) { + pos += b.write(segments[i], pos, 'base64url'); + } + assert.strictEqual(b.toString('latin1', 0, pos), + 'Madness?! This is node.js!'); +} + // Regression test for https://github.com/nodejs/node/issues/3496. assert.strictEqual(Buffer.from('=bad'.repeat(1e4), 'base64').length, 0); diff --git a/test/parallel/test-buffer-bytelength.js b/test/parallel/test-buffer-bytelength.js index b5264ba092ce1e..2355e4e0a0b442 100644 --- a/test/parallel/test-buffer-bytelength.js +++ b/test/parallel/test-buffer-bytelength.js @@ -91,9 +91,19 @@ assert.strictEqual(Buffer.byteLength('aGkk', 'base64'), 3); assert.strictEqual( Buffer.byteLength('bHNrZGZsa3NqZmtsc2xrZmFqc2RsZmtqcw==', 'base64'), 25 ); +// base64url +assert.strictEqual(Buffer.byteLength('aGVsbG8gd29ybGQ', 'base64url'), 11); +assert.strictEqual(Buffer.byteLength('aGVsbG8gd29ybGQ', 'BASE64URL'), 11); +assert.strictEqual(Buffer.byteLength('bm9kZS5qcyByb2NrcyE', 'base64url'), 14); +assert.strictEqual(Buffer.byteLength('aGkk', 'base64url'), 3); +assert.strictEqual( + Buffer.byteLength('bHNrZGZsa3NqZmtsc2xrZmFqc2RsZmtqcw', 'base64url'), 25 +); // special padding assert.strictEqual(Buffer.byteLength('aaa=', 'base64'), 2); assert.strictEqual(Buffer.byteLength('aaaa==', 'base64'), 3); +assert.strictEqual(Buffer.byteLength('aaa=', 'base64url'), 2); +assert.strictEqual(Buffer.byteLength('aaaa==', 'base64url'), 3); assert.strictEqual(Buffer.byteLength('Il était tué'), 14); assert.strictEqual(Buffer.byteLength('Il était tué', 'utf8'), 14); diff --git a/test/parallel/test-buffer-fill.js b/test/parallel/test-buffer-fill.js index 6e24b3511e6e1a..6e7b24989a53bb 100644 --- a/test/parallel/test-buffer-fill.js +++ b/test/parallel/test-buffer-fill.js @@ -131,20 +131,36 @@ assert.throws(() => { }); // BASE64 -testBufs('YWJj', 'ucs2'); -testBufs('yKJhYQ==', 'ucs2'); -testBufs('Yci0Ysi1Y8i2', 'ucs2'); -testBufs('YWJj', 4, 'ucs2'); -testBufs('YWJj', SIZE, 'ucs2'); -testBufs('yKJhYQ==', 2, 'ucs2'); -testBufs('yKJhYQ==', 8, 'ucs2'); -testBufs('Yci0Ysi1Y8i2', 4, 'ucs2'); -testBufs('Yci0Ysi1Y8i2', 12, 'ucs2'); -testBufs('YWJj', 4, 1, 'ucs2'); -testBufs('YWJj', 5, 1, 'ucs2'); -testBufs('yKJhYQ==', 8, 1, 'ucs2'); -testBufs('Yci0Ysi1Y8i2', 4, 1, 'ucs2'); -testBufs('Yci0Ysi1Y8i2', 12, 1, 'ucs2'); +testBufs('YWJj', 'base64'); +testBufs('yKJhYQ==', 'base64'); +testBufs('Yci0Ysi1Y8i2', 'base64'); +testBufs('YWJj', 4, 'base64'); +testBufs('YWJj', SIZE, 'base64'); +testBufs('yKJhYQ==', 2, 'base64'); +testBufs('yKJhYQ==', 8, 'base64'); +testBufs('Yci0Ysi1Y8i2', 4, 'base64'); +testBufs('Yci0Ysi1Y8i2', 12, 'base64'); +testBufs('YWJj', 4, 1, 'base64'); +testBufs('YWJj', 5, 1, 'base64'); +testBufs('yKJhYQ==', 8, 1, 'base64'); +testBufs('Yci0Ysi1Y8i2', 4, 1, 'base64'); +testBufs('Yci0Ysi1Y8i2', 12, 1, 'base64'); + +// BASE64URL +testBufs('YWJj', 'base64url'); +testBufs('yKJhYQ', 'base64url'); +testBufs('Yci0Ysi1Y8i2', 'base64url'); +testBufs('YWJj', 4, 'base64url'); +testBufs('YWJj', SIZE, 'base64url'); +testBufs('yKJhYQ', 2, 'base64url'); +testBufs('yKJhYQ', 8, 'base64url'); +testBufs('Yci0Ysi1Y8i2', 4, 'base64url'); +testBufs('Yci0Ysi1Y8i2', 12, 'base64url'); +testBufs('YWJj', 4, 1, 'base64url'); +testBufs('YWJj', 5, 1, 'base64url'); +testBufs('yKJhYQ', 8, 1, 'base64url'); +testBufs('Yci0Ysi1Y8i2', 4, 1, 'base64url'); +testBufs('Yci0Ysi1Y8i2', 12, 1, 'base64url'); // Buffer function deepStrictEqualValues(buf, arr) { diff --git a/test/parallel/test-buffer-isencoding.js b/test/parallel/test-buffer-isencoding.js index e67d1c078397a3..1a9e17dc48714d 100644 --- a/test/parallel/test-buffer-isencoding.js +++ b/test/parallel/test-buffer-isencoding.js @@ -11,6 +11,7 @@ const assert = require('assert'); 'latin1', 'binary', 'base64', + 'base64url', 'ucs2', 'ucs-2', 'utf16le', diff --git a/test/parallel/test-buffer-write.js b/test/parallel/test-buffer-write.js index 842b12c04c04db..3a4a3a8b696612 100644 --- a/test/parallel/test-buffer-write.js +++ b/test/parallel/test-buffer-write.js @@ -23,6 +23,7 @@ const resultMap = new Map([ ['binary', Buffer.from([102, 111, 111, 0, 0, 0, 0, 0, 0])], ['utf16le', Buffer.from([102, 0, 111, 0, 111, 0, 0, 0, 0])], ['base64', Buffer.from([102, 111, 111, 0, 0, 0, 0, 0, 0])], + ['base64url', Buffer.from([102, 111, 111, 0, 0, 0, 0, 0, 0])], ['hex', Buffer.from([102, 111, 111, 0, 0, 0, 0, 0, 0])] ]); @@ -44,7 +45,7 @@ encodings }); // base64 -['base64', 'BASE64'].forEach((encoding) => { +['base64', 'BASE64', 'base64url', 'BASE64URL'].forEach((encoding) => { const buf = Buffer.alloc(9); const len = Buffer.byteLength('Zm9v', encoding); diff --git a/test/parallel/test-crypto-key-objects.js b/test/parallel/test-crypto-key-objects.js index fdb63afa7659f3..34c2cef4c7677c 100644 --- a/test/parallel/test-crypto-key-objects.js +++ b/test/parallel/test-crypto-key-objects.js @@ -70,6 +70,7 @@ const privateDsa = fixtures.readKey('dsa_private_encrypted_1025.pem', assert.strictEqual(key.type, 'secret'); assert.strictEqual(key.symmetricKeySize, 32); assert.strictEqual(key.asymmetricKeyType, undefined); + assert.strictEqual(key.asymmetricKeyDetails, undefined); const exportedKey = key.export(); assert(keybuf.equals(exportedKey)); diff --git a/test/parallel/test-crypto-keygen.js b/test/parallel/test-crypto-keygen.js index f59a26a422f96d..c27d5fe166220e 100644 --- a/test/parallel/test-crypto-keygen.js +++ b/test/parallel/test-crypto-keygen.js @@ -114,6 +114,31 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); testSignVerify(publicKey, privateKey); } +{ + // Test sync key generation with key objects with a non-standard + // publicExpononent + const { publicKey, privateKey } = generateKeyPairSync('rsa', { + publicExponent: 3, + modulusLength: 512 + }); + + assert.strictEqual(typeof publicKey, 'object'); + assert.strictEqual(publicKey.type, 'public'); + assert.strictEqual(publicKey.asymmetricKeyType, 'rsa'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 3n + }); + + assert.strictEqual(typeof privateKey, 'object'); + assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(privateKey.asymmetricKeyType, 'rsa'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 3n + }); +} + { // Test sync key generation with key objects. const { publicKey, privateKey } = generateKeyPairSync('rsa', { @@ -123,10 +148,18 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); assert.strictEqual(typeof publicKey, 'object'); assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(publicKey.asymmetricKeyType, 'rsa'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n + }); assert.strictEqual(typeof privateKey, 'object'); assert.strictEqual(privateKey.type, 'private'); assert.strictEqual(privateKey.asymmetricKeyType, 'rsa'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n + }); } { @@ -268,9 +301,17 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); }, common.mustSucceed((publicKey, privateKey) => { assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n + }); assert.strictEqual(privateKey.type, 'private'); assert.strictEqual(privateKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n + }); // Unlike RSA, RSA-PSS does not allow encryption. assert.throws(() => { @@ -342,6 +383,28 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); })); } +{ + // Test async DSA key object generation. + generateKeyPair('dsa', { + modulusLength: 512, + divisorLength: 256 + }, common.mustSucceed((publicKey, privateKey) => { + assert.strictEqual(publicKey.type, 'public'); + assert.strictEqual(publicKey.asymmetricKeyType, 'dsa'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + divisorLength: 256 + }); + + assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(privateKey.asymmetricKeyType, 'dsa'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + divisorLength: 256 + }); + })); +} + { // Test async elliptic curve key generation, e.g. for ECDSA, with a SEC1 // private key. @@ -925,16 +988,24 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); // It should recognize both NIST and standard curve names. generateKeyPair('ec', { namedCurve: 'P-256', - publicKeyEncoding: { type: 'spki', format: 'pem' }, - privateKeyEncoding: { type: 'pkcs8', format: 'pem' } }, common.mustSucceed((publicKey, privateKey) => { + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + namedCurve: 'prime256v1' + }); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + namedCurve: 'prime256v1' + }); })); generateKeyPair('ec', { namedCurve: 'secp256k1', - publicKeyEncoding: { type: 'spki', format: 'pem' }, - privateKeyEncoding: { type: 'pkcs8', format: 'pem' } }, common.mustSucceed((publicKey, privateKey) => { + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + namedCurve: 'secp256k1' + }); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + namedCurve: 'secp256k1' + }); })); } @@ -945,9 +1016,11 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); generateKeyPair(keyType, common.mustSucceed((publicKey, privateKey) => { assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(publicKey.asymmetricKeyType, keyType); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, {}); assert.strictEqual(privateKey.type, 'private'); assert.strictEqual(privateKey.asymmetricKeyType, keyType); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, {}); })); }); } diff --git a/test/parallel/test-crypto-x509.js b/test/parallel/test-crypto-x509.js index b97d53be799515..3d2031c52131b3 100644 --- a/test/parallel/test-crypto-x509.js +++ b/test/parallel/test-crypto-x509.js @@ -110,7 +110,7 @@ const der = Buffer.from( assert(x509.publicKey); assert.strictEqual(x509.publicKey.type, 'public'); - assert.strictEqual(x509.toString(), cert.toString()); + assert.strictEqual(x509.toString().replaceAll('\r\n', '\n'), cert.toString()); assert.strictEqual(x509.toJSON(), x509.toString()); assert(x509.checkPrivateKey(privateKey)); diff --git a/test/parallel/test-events-once.js b/test/parallel/test-events-once.js index 7c37f576c29fd3..14b8ea5815a61a 100644 --- a/test/parallel/test-events-once.js +++ b/test/parallel/test-events-once.js @@ -169,6 +169,19 @@ async function abortSignalAfterEvent() { await once(ee, 'foo', { signal: ac.signal }); } +async function abortSignalRemoveListener() { + const ee = new EventEmitter(); + const ac = new AbortController(); + + try { + process.nextTick(() => ac.abort()); + await once(ee, 'test', { signal: ac.signal }); + } catch { + strictEqual(ee.listeners('test').length, 0); + strictEqual(ee.listeners('error').length, 0); + } +} + async function eventTargetAbortSignalBefore() { const et = new EventTarget(); const ac = new AbortController(); @@ -218,6 +231,7 @@ Promise.all([ abortSignalBefore(), abortSignalAfter(), abortSignalAfterEvent(), + abortSignalRemoveListener(), eventTargetAbortSignalBefore(), eventTargetAbortSignalAfter(), eventTargetAbortSignalAfterEvent(), diff --git a/test/parallel/test-fs-exists.js b/test/parallel/test-fs-exists.js index cd2d9a712f3f58..75b7adfc3ee3d0 100644 --- a/test/parallel/test-fs-exists.js +++ b/test/parallel/test-fs-exists.js @@ -23,7 +23,6 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const { URL } = require('url'); const f = __filename; assert.throws(() => fs.exists(f), { code: 'ERR_INVALID_CALLBACK' }); diff --git a/test/parallel/test-fs-null-bytes.js b/test/parallel/test-fs-null-bytes.js index beaea00969b630..d4548c02c07009 100644 --- a/test/parallel/test-fs-null-bytes.js +++ b/test/parallel/test-fs-null-bytes.js @@ -23,7 +23,6 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const URL = require('url').URL; function check(async, sync) { const argsSync = Array.prototype.slice.call(arguments, 2); diff --git a/test/parallel/test-fs-read-type.js b/test/parallel/test-fs-read-type.js index 0f9bdbab588661..5635fe0a662c64 100644 --- a/test/parallel/test-fs-read-type.js +++ b/test/parallel/test-fs-read-type.js @@ -76,6 +76,51 @@ assert.throws(() => { 'It must be >= 0. Received -1' }); +[true, () => {}, {}, ''].forEach((value) => { + assert.throws(() => { + fs.read(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + value, + common.mustNotCall()); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); +}); + +[0.5, 2 ** 53, 2n ** 63n].forEach((value) => { + assert.throws(() => { + fs.read(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + value, + common.mustNotCall()); + }, { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError' + }); +}); + +fs.read(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + 0n, + common.mustSucceed()); + +fs.read(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + 2n ** 53n - 1n, + common.mustCall((err) => { + if (err) { + assert.strictEqual(err.code, 'EFBIG'); + } + })); assert.throws( () => fs.readSync(fd, expected.length, 0, 'utf-8'), @@ -151,3 +196,48 @@ assert.throws(() => { message: 'The value of "length" is out of range. ' + 'It must be <= 4. Received 5' }); + +[true, () => {}, {}, ''].forEach((value) => { + assert.throws(() => { + fs.readSync(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + value); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); +}); + +[0.5, 2 ** 53, 2n ** 63n].forEach((value) => { + assert.throws(() => { + fs.readSync(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + value); + }, { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError' + }); +}); + +fs.readSync(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + 0n); + +try { + fs.readSync(fd, + Buffer.allocUnsafe(expected.length), + 0, + expected.length, + 2n ** 53n - 1n); +} catch (err) { + // On systems where max file size is below 2^53-1, we'd expect a EFBIG error. + // This is not using `assert.throws` because the above call should not raise + // any error on systems that allows file of that size. + if (err.code !== 'EFBIG') throw err; +} diff --git a/test/parallel/test-fs-whatwg-url.js b/test/parallel/test-fs-whatwg-url.js index a506e5eb942a54..829cfa92fafebd 100644 --- a/test/parallel/test-fs-whatwg-url.js +++ b/test/parallel/test-fs-whatwg-url.js @@ -6,7 +6,6 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); const os = require('os'); -const URL = require('url').URL; function pathToFileURL(p) { if (!path.isAbsolute(p)) diff --git a/test/parallel/test-http-client-get-url.js b/test/parallel/test-http-client-get-url.js index a72eea56538c4d..3b091a72eda493 100644 --- a/test/parallel/test-http-client-get-url.js +++ b/test/parallel/test-http-client-get-url.js @@ -24,7 +24,6 @@ const common = require('../common'); const assert = require('assert'); const http = require('http'); const url = require('url'); -const URL = url.URL; const testPath = '/foo?bar'; const server = http.createServer(common.mustCall((req, res) => { diff --git a/test/parallel/test-http-server.js b/test/parallel/test-http-server.js index f591cd59fc63c2..dcefc761728d36 100644 --- a/test/parallel/test-http-server.js +++ b/test/parallel/test-http-server.js @@ -49,6 +49,8 @@ const server = http.createServer(function(req, res) { res.id = request_number; req.id = request_number++; + assert.strictEqual(res.req, req); + if (req.id === 0) { assert.strictEqual(req.method, 'GET'); assert.strictEqual(url.parse(req.url).pathname, '/hello'); diff --git a/test/parallel/test-http2-altsvc.js b/test/parallel/test-http2-altsvc.js index 3a1a1cf62991b7..39a3ca97b78bf3 100644 --- a/test/parallel/test-http2-altsvc.js +++ b/test/parallel/test-http2-altsvc.js @@ -6,7 +6,6 @@ if (!common.hasCrypto) const assert = require('assert'); const http2 = require('http2'); -const { URL } = require('url'); const Countdown = require('../common/countdown'); const server = http2.createServer(); diff --git a/test/parallel/test-http2-compat-serverresponse.js b/test/parallel/test-http2-compat-serverresponse.js new file mode 100644 index 00000000000000..379677a68f81e1 --- /dev/null +++ b/test/parallel/test-http2-compat-serverresponse.js @@ -0,0 +1,40 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const h2 = require('http2'); + +// Http2ServerResponse should expose convenience properties + +const server = h2.createServer(); +server.listen(0, common.mustCall(function() { + const port = server.address().port; + server.once('request', common.mustCall(function(request, response) { + assert.strictEqual(response.req, request); + + response.on('finish', common.mustCall(function() { + process.nextTick(() => { + server.close(); + }); + })); + response.end(); + })); + + const url = `http://localhost:${port}`; + const client = h2.connect(url, common.mustCall(function() { + const headers = { + ':path': '/foobar', + ':method': 'GET', + ':scheme': 'http', + ':authority': `localhost:${port}` + }; + const request = client.request(headers); + request.on('end', common.mustCall(function() { + client.close(); + })); + request.end(); + request.resume(); + })); +})); diff --git a/test/parallel/test-http2-connect-method.js b/test/parallel/test-http2-connect-method.js index d5b4c4bd274950..4ada9f47553528 100644 --- a/test/parallel/test-http2-connect-method.js +++ b/test/parallel/test-http2-connect-method.js @@ -6,7 +6,6 @@ if (!common.hasCrypto) const assert = require('assert'); const net = require('net'); const http2 = require('http2'); -const { URL } = require('url'); const { HTTP2_HEADER_METHOD, diff --git a/test/parallel/test-http2-create-client-connect.js b/test/parallel/test-http2-create-client-connect.js index 8a4fc9a1d0e075..5723fcecd6f069 100644 --- a/test/parallel/test-http2-create-client-connect.js +++ b/test/parallel/test-http2-create-client-connect.js @@ -9,7 +9,6 @@ if (!common.hasCrypto) const fixtures = require('../common/fixtures'); const h2 = require('http2'); const url = require('url'); -const URL = url.URL; { const server = h2.createServer(); diff --git a/test/parallel/test-http2-response-splitting.js b/test/parallel/test-http2-response-splitting.js index 9613eca9636ae4..a94b9ca4f72b35 100644 --- a/test/parallel/test-http2-response-splitting.js +++ b/test/parallel/test-http2-response-splitting.js @@ -9,7 +9,6 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); const http2 = require('http2'); -const { URL } = require('url'); // Response splitting example, credit: Amit Klein, Safebreach const str = '/welcome?lang=bar%c4%8d%c4%8aContent­Length:%200%c4%8d%c4%8a%c' + diff --git a/test/parallel/test-https-client-get-url.js b/test/parallel/test-https-client-get-url.js index 76328775e80187..fb91a4f1e7cb8a 100644 --- a/test/parallel/test-https-client-get-url.js +++ b/test/parallel/test-https-client-get-url.js @@ -32,8 +32,6 @@ const assert = require('assert'); const https = require('https'); const url = require('url'); -const URL = url.URL; - const options = { key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') diff --git a/test/parallel/test-internal-util-normalizeencoding.js b/test/parallel/test-internal-util-normalizeencoding.js index d7a0259ac9c9fd..af567cbcf5cab3 100644 --- a/test/parallel/test-internal-util-normalizeencoding.js +++ b/test/parallel/test-internal-util-normalizeencoding.js @@ -33,6 +33,9 @@ const tests = [ ['base64', 'base64'], ['BASE64', 'base64'], ['Base64', 'base64'], + ['base64url', 'base64url'], + ['BASE64url', 'base64url'], + ['Base64url', 'base64url'], ['hex', 'hex'], ['HEX', 'hex'], ['ASCII', 'ascii'], diff --git a/test/parallel/test-npm-install.js b/test/parallel/test-npm-install.js index 1eec5f57ad6db9..17deacdbc14d2a 100644 --- a/test/parallel/test-npm-install.js +++ b/test/parallel/test-npm-install.js @@ -42,6 +42,8 @@ const env = { ...process.env, PATH: path.dirname(process.execPath), NPM_CONFIG_PREFIX: path.join(npmSandbox, 'npm-prefix'), NPM_CONFIG_TMP: path.join(npmSandbox, 'npm-tmp'), + NPM_CONFIG_AUDIT: false, + NPM_CONFIG_UPDATE_NOTIFIER: false, HOME: homeDir, }; diff --git a/test/parallel/test-repl-history-navigation.js b/test/parallel/test-repl-history-navigation.js index fa40ac624000f4..4dd9c350229b19 100644 --- a/test/parallel/test-repl-history-navigation.js +++ b/test/parallel/test-repl-history-navigation.js @@ -504,7 +504,56 @@ const tests = [ prompt, ], clean: true - } + }, + { + env: { NODE_REPL_HISTORY: defaultHistoryPath }, + test: (function*() { + // Deleting Array iterator should not break history feature. + // + // Using a generator function instead of an object to allow the test to + // keep iterating even when Array.prototype[Symbol.iterator] has been + // deleted. + yield 'const ArrayIteratorPrototype ='; + yield ' Object.getPrototypeOf(Array.prototype[Symbol.iterator]());'; + yield ENTER; + yield 'const {next} = ArrayIteratorPrototype;'; + yield ENTER; + yield 'const realArrayIterator = Array.prototype[Symbol.iterator];'; + yield ENTER; + yield 'delete Array.prototype[Symbol.iterator];'; + yield ENTER; + yield 'delete ArrayIteratorPrototype.next;'; + yield ENTER; + yield UP; + yield UP; + yield DOWN; + yield DOWN; + yield 'fu'; + yield 'n'; + yield RIGHT; + yield BACKSPACE; + yield LEFT; + yield LEFT; + yield 'A'; + yield BACKSPACE; + yield GO_TO_END; + yield BACKSPACE; + yield WORD_LEFT; + yield WORD_RIGHT; + yield ESCAPE; + yield ENTER; + yield 'Array.proto'; + yield RIGHT; + yield '.pu'; + yield ENTER; + yield 'ArrayIteratorPrototype.next = next;'; + yield ENTER; + yield 'Array.prototype[Symbol.iterator] = realArrayIterator;'; + yield ENTER; + })(), + expected: [], + clean: false + }, ]; const numtests = tests.length; diff --git a/test/parallel/test-repl-unsafe-array-iteration.js b/test/parallel/test-repl-unsafe-array-iteration.js new file mode 100644 index 00000000000000..3fc65f54cf1f37 --- /dev/null +++ b/test/parallel/test-repl-unsafe-array-iteration.js @@ -0,0 +1,68 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { spawn } = require('child_process'); + +const replProcess = spawn(process.argv0, ['--interactive'], { + stdio: ['pipe', 'pipe', 'inherit'], + windowsHide: true, +}); + +replProcess.on('error', common.mustNotCall()); + +const replReadyState = (async function* () { + let ready; + const SPACE = ' '.charCodeAt(); + const BRACKET = '>'.charCodeAt(); + const DOT = '.'.charCodeAt(); + replProcess.stdout.on('data', (data) => { + ready = data[data.length - 1] === SPACE && ( + data[data.length - 2] === BRACKET || ( + data[data.length - 2] === DOT && + data[data.length - 3] === DOT && + data[data.length - 4] === DOT + )); + }); + + const processCrashed = new Promise((resolve, reject) => + replProcess.on('exit', reject) + ); + while (true) { + await Promise.race([new Promise(setImmediate), processCrashed]); + if (ready) { + ready = false; + yield; + } + } +})(); +async function writeLn(data, expectedOutput) { + await replReadyState.next(); + if (expectedOutput) { + replProcess.stdout.once('data', common.mustCall((data) => + assert.match(data.toString('utf8'), expectedOutput) + )); + } + await new Promise((resolve, reject) => replProcess.stdin.write( + `${data}\n`, + (err) => (err ? reject(err) : resolve()) + )); +} + +async function main() { + await writeLn( + 'const ArrayIteratorPrototype =' + + ' Object.getPrototypeOf(Array.prototype[Symbol.iterator]());' + ); + await writeLn('delete Array.prototype[Symbol.iterator];'); + await writeLn('delete ArrayIteratorPrototype.next;'); + + await writeLn( + 'for(const x of [3, 2, 1]);', + /Uncaught TypeError: \[3,2,1\] is not iterable/ + ); + await writeLn('.exit'); + + assert(!replProcess.connected); +} + +main().then(common.mustCall()); diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js index 5b495a41e08545..d9d0405f4d5a45 100644 --- a/test/parallel/test-stream-buffer-list.js +++ b/test/parallel/test-stream-buffer-list.js @@ -48,3 +48,37 @@ const shifted = list.shift(); testIterator(list, 0); assert.strictEqual(shifted, buf); assert.deepStrictEqual(list, new BufferList()); + +{ + const list = new BufferList(); + list.push('foo'); + list.push('bar'); + list.push('foo'); + list.push('bar'); + assert.strictEqual(list.consume(6, true), 'foobar'); + assert.strictEqual(list.consume(6, true), 'foobar'); +} + +{ + const list = new BufferList(); + list.push('foo'); + list.push('bar'); + assert.strictEqual(list.consume(5, true), 'fooba'); +} + +{ + const list = new BufferList(); + list.push(buf); + list.push(buf); + list.push(buf); + list.push(buf); + assert.strictEqual(list.consume(6).toString(), 'foofoo'); + assert.strictEqual(list.consume(6).toString(), 'foofoo'); +} + +{ + const list = new BufferList(); + list.push(buf); + list.push(buf); + assert.strictEqual(list.consume(5).toString(), 'foofo'); +} diff --git a/test/parallel/test-string-decoder-end.js b/test/parallel/test-string-decoder-end.js index ee0a47e3572f6d..5a3c5cc720789d 100644 --- a/test/parallel/test-string-decoder-end.js +++ b/test/parallel/test-string-decoder-end.js @@ -27,7 +27,7 @@ require('../common'); const assert = require('assert'); const SD = require('string_decoder').StringDecoder; -const encodings = ['base64', 'hex', 'utf8', 'utf16le', 'ucs2']; +const encodings = ['base64', 'base64url', 'hex', 'utf8', 'utf16le', 'ucs2']; const bufs = [ '☃💩', 'asdf' ].map((b) => Buffer.from(b)); @@ -79,6 +79,13 @@ testEnd('base64', Buffer.of(0x61, 0x61), Buffer.of(0x61), 'YWE=YQ=='); testEnd('base64', Buffer.of(0x61, 0x61, 0x61), Buffer.of(), 'YWFh'); testEnd('base64', Buffer.of(0x61, 0x61, 0x61), Buffer.of(0x61), 'YWFhYQ=='); +testEnd('base64url', Buffer.of(0x61), Buffer.of(), 'YQ'); +testEnd('base64url', Buffer.of(0x61), Buffer.of(0x61), 'YQYQ'); +testEnd('base64url', Buffer.of(0x61, 0x61), Buffer.of(), 'YWE'); +testEnd('base64url', Buffer.of(0x61, 0x61), Buffer.of(0x61), 'YWEYQ'); +testEnd('base64url', Buffer.of(0x61, 0x61, 0x61), Buffer.of(), 'YWFh'); +testEnd('base64url', Buffer.of(0x61, 0x61, 0x61), Buffer.of(0x61), 'YWFhYQ'); + function testEncoding(encoding) { bufs.forEach((buf) => { testBuf(encoding, buf); diff --git a/test/parallel/test-string-decoder-fuzz.js b/test/parallel/test-string-decoder-fuzz.js index d8d01881591161..ac6f74e94981a8 100644 --- a/test/parallel/test-string-decoder-fuzz.js +++ b/test/parallel/test-string-decoder-fuzz.js @@ -20,7 +20,7 @@ function randBuf(maxLen) { } const encodings = [ - 'utf16le', 'utf8', 'ascii', 'hex', 'base64', 'latin1' + 'utf16le', 'utf8', 'ascii', 'hex', 'base64', 'latin1', 'base64url' ]; function runSingleFuzzTest() { diff --git a/test/parallel/test-tls-secure-context-usage-order.js b/test/parallel/test-tls-secure-context-usage-order.js new file mode 100644 index 00000000000000..c79a3eac775822 --- /dev/null +++ b/test/parallel/test-tls-secure-context-usage-order.js @@ -0,0 +1,99 @@ +'use strict'; +const common = require('../common'); +const fixtures = require('../common/fixtures'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +// This test ensures that when a TLS connection is established, the server +// selects the most recently added SecureContext that matches the servername. + +const assert = require('assert'); +const tls = require('tls'); + +function loadPEM(n) { + return fixtures.readKey(`${n}.pem`); +} + +const serverOptions = { + key: loadPEM('agent2-key'), + cert: loadPEM('agent2-cert'), + requestCert: true, + rejectUnauthorized: false, +}; + +const badSecureContext = { + key: loadPEM('agent1-key'), + cert: loadPEM('agent1-cert'), + ca: [ loadPEM('ca2-cert') ] +}; + +const goodSecureContext = { + key: loadPEM('agent1-key'), + cert: loadPEM('agent1-cert'), + ca: [ loadPEM('ca1-cert') ] +}; + +const server = tls.createServer(serverOptions, (c) => { + // The 'a' and 'b' subdomains are used to distinguish between client + // connections. + // Connection to subdomain 'a' is made when the 'bad' secure context is + // the only one in use. + if ('a.example.com' === c.servername) { + assert.strictEqual(c.authorized, false); + } + // Connection to subdomain 'b' is made after the 'good' context has been + // added. + if ('b.example.com' === c.servername) { + assert.strictEqual(c.authorized, true); + } +}); + +// 1. Add the 'bad' secure context. A connection using this context will not be +// authorized. +server.addContext('*.example.com', badSecureContext); + +server.listen(0, () => { + const options = { + port: server.address().port, + key: loadPEM('agent1-key'), + cert: loadPEM('agent1-cert'), + ca: [loadPEM('ca1-cert')], + servername: 'a.example.com', + rejectUnauthorized: false, + }; + + // 2. Make a connection using servername 'a.example.com'. Since a 'bad' + // secure context is used, this connection should not be authorized. + const client = tls.connect(options, () => { + client.end(); + }); + + client.on('close', common.mustCall(() => { + // 3. Add a 'good' secure context. + server.addContext('*.example.com', goodSecureContext); + + options.servername = 'b.example.com'; + // 4. Make a connection using servername 'b.example.com'. This connection + // should be authorized because the 'good' secure context is the most + // recently added matching context. + + const other = tls.connect(options, () => { + other.end(); + }); + + other.on('close', common.mustCall(() => { + // 5. Make another connection using servername 'b.example.com' to ensure + // that the array of secure contexts is not reversed in place with each + // SNICallback call, as someone might be tempted to refactor this piece of + // code by using Array.prototype.reverse() method. + const onemore = tls.connect(options, () => { + onemore.end(); + }); + + onemore.on('close', common.mustCall(() => { + server.close(); + })); + })); + })); +}); diff --git a/test/parallel/test-url-format-whatwg.js b/test/parallel/test-url-format-whatwg.js index ab3d18bdc3aa25..9c86a7ae2c6910 100644 --- a/test/parallel/test-url-format-whatwg.js +++ b/test/parallel/test-url-format-whatwg.js @@ -6,7 +6,6 @@ if (!common.hasIntl) const assert = require('assert'); const url = require('url'); -const URL = url.URL; const myURL = new URL('http://xn--lck1c3crb1723bpq4a.com/a?a=b#c'); diff --git a/test/parallel/test-url-urltooptions.js b/test/parallel/test-url-urltooptions.js new file mode 100644 index 00000000000000..cc4838eeecb00f --- /dev/null +++ b/test/parallel/test-url-urltooptions.js @@ -0,0 +1,37 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const { urlToHttpOptions } = require('url'); + +// Test urlToHttpOptions +const urlObj = new URL('http://user:pass@foo.bar.com:21/aaa/zzz?l=24#test'); +const opts = urlToHttpOptions(urlObj); +assert.strictEqual(opts instanceof URL, false); +assert.strictEqual(opts.protocol, 'http:'); +assert.strictEqual(opts.auth, 'user:pass'); +assert.strictEqual(opts.hostname, 'foo.bar.com'); +assert.strictEqual(opts.port, 21); +assert.strictEqual(opts.path, '/aaa/zzz?l=24'); +assert.strictEqual(opts.pathname, '/aaa/zzz'); +assert.strictEqual(opts.search, '?l=24'); +assert.strictEqual(opts.hash, '#test'); + +const { hostname } = urlToHttpOptions(new URL('http://[::1]:21')); +assert.strictEqual(hostname, '::1'); + +// If a WHATWG URL object is copied, it is possible that the resulting copy +// contains the Symbols that Node uses for brand checking, but not the data +// properties, which are getters. Verify that urlToHttpOptions() can handle +// such a case. +const copiedUrlObj = { ...urlObj }; +const copiedOpts = urlToHttpOptions(copiedUrlObj); +assert.strictEqual(copiedOpts instanceof URL, false); +assert.strictEqual(copiedOpts.protocol, undefined); +assert.strictEqual(copiedOpts.auth, undefined); +assert.strictEqual(copiedOpts.hostname, undefined); +assert.strictEqual(copiedOpts.port, NaN); +assert.strictEqual(copiedOpts.path, ''); +assert.strictEqual(copiedOpts.pathname, undefined); +assert.strictEqual(copiedOpts.search, undefined); +assert.strictEqual(copiedOpts.hash, undefined); +assert.strictEqual(copiedOpts.href, undefined); diff --git a/test/parallel/test-vm-module-errors.js b/test/parallel/test-vm-module-errors.js index 942e2f370dfff8..25b43c3e4df9d0 100644 --- a/test/parallel/test-vm-module-errors.js +++ b/test/parallel/test-vm-module-errors.js @@ -6,7 +6,7 @@ const common = require('../common'); const assert = require('assert'); -const { SourceTextModule, createContext } = require('vm'); +const { SourceTextModule, createContext, Module } = require('vm'); async function createEmptyLinkedModule() { const m = new SourceTextModule(''); @@ -205,6 +205,17 @@ async function checkInvalidOptionForEvaluate() { "Received type string ('a-string')", code: 'ERR_INVALID_ARG_TYPE' }); + + { + ['link', 'evaluate'].forEach(async (method) => { + await assert.rejects(async () => { + await Module.prototype[method](); + }, { + code: 'ERR_VM_MODULE_NOT_MODULE', + message: /Provided module is not an instance of Module/ + }); + }); + } } function checkInvalidCachedData() { @@ -223,6 +234,19 @@ function checkInvalidCachedData() { }); } +function checkGettersErrors() { + const getters = ['identifier', 'context', 'namespace', 'status', 'error']; + getters.forEach((getter) => { + assert.throws(() => { + // eslint-disable-next-line no-unused-expressions + Module.prototype[getter]; + }, { + code: 'ERR_VM_MODULE_NOT_MODULE', + message: /Provided module is not an instance of Module/ + }); + }); +} + const finished = common.mustCall(); (async function main() { @@ -232,5 +256,6 @@ const finished = common.mustCall(); await checkExecution(); await checkInvalidOptionForEvaluate(); checkInvalidCachedData(); + checkGettersErrors(); finished(); })().then(common.mustCall()); diff --git a/test/parallel/test-vm-module-link.js b/test/parallel/test-vm-module-link.js index 5b5eb6b08ffc31..39520bcf8e6891 100644 --- a/test/parallel/test-vm-module-link.js +++ b/test/parallel/test-vm-module-link.js @@ -5,7 +5,6 @@ const common = require('../common'); const assert = require('assert'); -const { URL } = require('url'); const { SourceTextModule } = require('vm'); diff --git a/test/parallel/test-vm-module-synthetic.js b/test/parallel/test-vm-module-synthetic.js index 660c0c64ed8d5d..9d1c07ead5c4cd 100644 --- a/test/parallel/test-vm-module-synthetic.js +++ b/test/parallel/test-vm-module-synthetic.js @@ -65,4 +65,14 @@ const assert = require('assert'); code: 'ERR_VM_MODULE_STATUS', }); } + + { + assert.throws(() => { + SyntheticModule.prototype.setExport.call({}, 'foo'); + }, { + code: 'ERR_VM_MODULE_NOT_MODULE', + message: /Provided module is not an instance of Module/ + }); + } + })().then(common.mustCall()); diff --git a/test/parallel/test-webcrypto-rsa-pss-params.js b/test/parallel/test-webcrypto-rsa-pss-params.js new file mode 100644 index 00000000000000..964eaf32e890fd --- /dev/null +++ b/test/parallel/test-webcrypto-rsa-pss-params.js @@ -0,0 +1,40 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const { + createPrivateKey, + createPublicKey, + webcrypto: { + subtle + } +} = require('crypto'); + +const fixtures = require('../common/fixtures'); + +{ + const rsaPssKeyWithoutParams = fixtures.readKey('rsa_pss_private_2048.pem'); + + const pkcs8 = createPrivateKey(rsaPssKeyWithoutParams).export({ + type: 'pkcs8', + format: 'der' + }); + const spki = createPublicKey(rsaPssKeyWithoutParams).export({ + type: 'spki', + format: 'der' + }); + + const hashes = ['SHA-1', 'SHA-256', 'SHA-384', 'SHA-512']; + + const tasks = []; + for (const hash of hashes) { + const algorithm = { name: 'RSA-PSS', hash }; + tasks.push(subtle.importKey('pkcs8', pkcs8, algorithm, true, ['sign'])); + tasks.push(subtle.importKey('spki', spki, algorithm, true, ['verify'])); + } + + Promise.all(tasks).then(common.mustCall()); +} diff --git a/test/parallel/test-webcrypto-sign-verify-rsa.js b/test/parallel/test-webcrypto-sign-verify-rsa.js index 35bd363c5558a6..46ee119f781586 100644 --- a/test/parallel/test-webcrypto-sign-verify-rsa.js +++ b/test/parallel/test-webcrypto-sign-verify-rsa.js @@ -23,7 +23,7 @@ async function testVerify({ noVerifyPublicKey, privateKey, hmacKey, - rsaKeys + ecdsaKeys ] = await Promise.all([ subtle.importKey( 'spki', @@ -85,7 +85,7 @@ async function testVerify({ }); await assert.rejects( - subtle.verify(algorithm, rsaKeys.publicKey, signature, plaintext), { + subtle.verify(algorithm, ecdsaKeys.publicKey, signature, plaintext), { message: /Unable to use this key to verify/ }); @@ -138,7 +138,7 @@ async function testSign({ noSignPrivateKey, privateKey, hmacKey, - rsaKeys, + ecdsaKeys ] = await Promise.all([ subtle.importKey( 'spki', @@ -205,7 +205,7 @@ async function testSign({ }); await assert.rejects( - subtle.sign(algorithm, rsaKeys.privateKey, plaintext), { + subtle.sign(algorithm, ecdsaKeys.privateKey, plaintext), { message: /Unable to use this key to sign/ }); } diff --git a/test/parallel/test-webcrypto-wrap-unwrap.js b/test/parallel/test-webcrypto-wrap-unwrap.js index cadeeca8ec3ee5..42784c4a660812 100644 --- a/test/parallel/test-webcrypto-wrap-unwrap.js +++ b/test/parallel/test-webcrypto-wrap-unwrap.js @@ -243,7 +243,7 @@ async function testWrap(wrappingKey, unwrappingKey, key, wrap, format) { assert.deepStrictEqual(exported, exportedAgain); } -async function testWrapping(name, keys, ecdhPeerKey) { +async function testWrapping(name, keys) { const variations = []; const { @@ -264,13 +264,9 @@ async function testWrapping(name, keys, ecdhPeerKey) { (async function() { await generateWrappingKeys(); const keys = await generateKeysToWrap(); - const ecdhPeerKey = await subtle.generateKey({ - name: 'ECDH', - namedCurve: 'P-384' - }, true, ['deriveBits']); const variations = []; Object.keys(kWrappingData).forEach((name) => { - return testWrapping(name, keys, ecdhPeerKey); + return testWrapping(name, keys); }); await Promise.all(variations); })().then(common.mustCall()); diff --git a/test/parallel/test-whatwg-url-constructor.js b/test/parallel/test-whatwg-url-constructor.js index d5b12a6021b772..3dc1c5986027e7 100644 --- a/test/parallel/test-whatwg-url-constructor.js +++ b/test/parallel/test-whatwg-url-constructor.js @@ -6,7 +6,6 @@ if (!common.hasIntl) { } const fixtures = require('../common/fixtures'); -const { URL, URLSearchParams } = require('url'); const { test, assert_equals, assert_true, assert_throws } = require('../common/wpt').harness; diff --git a/test/parallel/test-whatwg-url-custom-global.js b/test/parallel/test-whatwg-url-custom-global.js index c79723f0490e07..b99dfd8f3e7d94 100644 --- a/test/parallel/test-whatwg-url-custom-global.js +++ b/test/parallel/test-whatwg-url-custom-global.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const { URL, URLSearchParams } = require('url'); assert.deepStrictEqual( Object.getOwnPropertyDescriptor(global, 'URL'), diff --git a/test/parallel/test-whatwg-url-custom-inspect.js b/test/parallel/test-whatwg-url-custom-inspect.js index 318b8b66d5672a..ad77f5725d30ed 100644 --- a/test/parallel/test-whatwg-url-custom-inspect.js +++ b/test/parallel/test-whatwg-url-custom-inspect.js @@ -9,7 +9,6 @@ if (!common.hasIntl) { } const util = require('util'); -const URL = require('url').URL; const assert = require('assert'); const url = new URL('https://username:password@host.name:8080/path/name/?que=ry#hash'); diff --git a/test/parallel/test-whatwg-url-custom-parsing.js b/test/parallel/test-whatwg-url-custom-parsing.js index 2799a9caef3efe..6d9a9a816eff83 100644 --- a/test/parallel/test-whatwg-url-custom-parsing.js +++ b/test/parallel/test-whatwg-url-custom-parsing.js @@ -8,7 +8,6 @@ if (!common.hasIntl) { common.skip('missing Intl'); } -const URL = require('url').URL; const assert = require('assert'); const fixtures = require('../common/fixtures'); diff --git a/test/parallel/test-whatwg-url-custom-properties.js b/test/parallel/test-whatwg-url-custom-properties.js index 3a9ae43f1a4d74..cdbd2f6bdd4424 100644 --- a/test/parallel/test-whatwg-url-custom-properties.js +++ b/test/parallel/test-whatwg-url-custom-properties.js @@ -4,9 +4,7 @@ // Tests below are not from WPT. require('../common'); -const URL = require('url').URL; const assert = require('assert'); -const urlToOptions = require('internal/url').urlToOptions; const url = new URL('http://user:pass@foo.bar.com:21/aaa/zzz?l=24#test'); const oldParams = url.searchParams; // For test of [SameObject] @@ -131,41 +129,6 @@ assert.strictEqual(url.toString(), assert.strictEqual((delete url.searchParams), true); assert.strictEqual(url.searchParams, oldParams); -// Test urlToOptions -{ - const urlObj = new URL('http://user:pass@foo.bar.com:21/aaa/zzz?l=24#test'); - const opts = urlToOptions(urlObj); - assert.strictEqual(opts instanceof URL, false); - assert.strictEqual(opts.protocol, 'http:'); - assert.strictEqual(opts.auth, 'user:pass'); - assert.strictEqual(opts.hostname, 'foo.bar.com'); - assert.strictEqual(opts.port, 21); - assert.strictEqual(opts.path, '/aaa/zzz?l=24'); - assert.strictEqual(opts.pathname, '/aaa/zzz'); - assert.strictEqual(opts.search, '?l=24'); - assert.strictEqual(opts.hash, '#test'); - - const { hostname } = urlToOptions(new URL('http://[::1]:21')); - assert.strictEqual(hostname, '::1'); - - // If a WHATWG URL object is copied, it is possible that the resulting copy - // contains the Symbols that Node uses for brand checking, but not the data - // properties, which are getters. Verify that urlToOptions() can handle such - // a case. - const copiedUrlObj = { ...urlObj }; - const copiedOpts = urlToOptions(copiedUrlObj); - assert.strictEqual(copiedOpts instanceof URL, false); - assert.strictEqual(copiedOpts.protocol, undefined); - assert.strictEqual(copiedOpts.auth, undefined); - assert.strictEqual(copiedOpts.hostname, undefined); - assert.strictEqual(copiedOpts.port, NaN); - assert.strictEqual(copiedOpts.path, ''); - assert.strictEqual(copiedOpts.pathname, undefined); - assert.strictEqual(copiedOpts.search, undefined); - assert.strictEqual(copiedOpts.hash, undefined); - assert.strictEqual(copiedOpts.href, undefined); -} - // Test special origins [ { expected: 'https://whatwg.org', diff --git a/test/parallel/test-whatwg-url-custom-searchparams-append.js b/test/parallel/test-whatwg-url-custom-searchparams-append.js index cf93950ebd514c..5d2976a23cad53 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-append.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-append.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-constructor.js b/test/parallel/test-whatwg-url-custom-searchparams-constructor.js index ab065814179d3f..878caed43ff0ab 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-constructor.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-constructor.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; function makeIterableFunc(array) { return Object.assign(() => {}, { diff --git a/test/parallel/test-whatwg-url-custom-searchparams-delete.js b/test/parallel/test-whatwg-url-custom-searchparams-delete.js index 5c3088b0363ff1..e84f10e3f93df8 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-delete.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-delete.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const { URL, URLSearchParams } = require('url'); { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-entries.js b/test/parallel/test-whatwg-url-custom-searchparams-entries.js index b70717ff2b78c1..6e5dabb1a768c4 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-entries.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-entries.js @@ -2,7 +2,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; // Tests below are not from WPT. const params = new URLSearchParams('a=b&c=d'); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-foreach.js b/test/parallel/test-whatwg-url-custom-searchparams-foreach.js index b796cff9bc1b5c..0c035161dbea97 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-foreach.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-foreach.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const { URLSearchParams } = require('url'); { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-get.js b/test/parallel/test-whatwg-url-custom-searchparams-get.js index 1088fcc43d439a..4ce16805ceceb9 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-get.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-get.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-getall.js b/test/parallel/test-whatwg-url-custom-searchparams-getall.js index 8d229a25979d6c..d3f271fcc5dc7b 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-getall.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-getall.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-has.js b/test/parallel/test-whatwg-url-custom-searchparams-has.js index 4a76dda6d3dc48..1963e40057ef72 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-has.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-has.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-inspect.js b/test/parallel/test-whatwg-url-custom-searchparams-inspect.js index 6cc22caea62436..c03890938d9cfe 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-inspect.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-inspect.js @@ -5,7 +5,6 @@ require('../common'); const assert = require('assert'); const util = require('util'); -const URLSearchParams = require('url').URLSearchParams; const sp = new URLSearchParams('?a=a&b=b&b=c'); assert.strictEqual(util.inspect(sp), diff --git a/test/parallel/test-whatwg-url-custom-searchparams-keys.js b/test/parallel/test-whatwg-url-custom-searchparams-keys.js index 5a222c7428eac7..b65e71c9a24153 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-keys.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-keys.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; const params = new URLSearchParams('a=b&c=d'); const keys = params.keys(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-set.js b/test/parallel/test-whatwg-url-custom-searchparams-set.js index 39462bf4880065..106e94d6a249a9 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-set.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-set.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-sort.js b/test/parallel/test-whatwg-url-custom-searchparams-sort.js index 49c3b065f957c6..e0b0c5c1ed12f6 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-sort.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-sort.js @@ -3,7 +3,6 @@ // Tests below are not from WPT. require('../common'); -const { URL, URLSearchParams } = require('url'); const { test, assert_array_equals } = require('../common/wpt').harness; // TODO(joyeecheung): upstream this to WPT, if possible - even diff --git a/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js b/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js index 35307fa914975a..e46865e8b014eb 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; { const params = new URLSearchParams(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams-values.js b/test/parallel/test-whatwg-url-custom-searchparams-values.js index eedad691fa351c..9c4bb05d0e587d 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams-values.js +++ b/test/parallel/test-whatwg-url-custom-searchparams-values.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URLSearchParams = require('url').URLSearchParams; const params = new URLSearchParams('a=b&c=d'); const values = params.values(); diff --git a/test/parallel/test-whatwg-url-custom-searchparams.js b/test/parallel/test-whatwg-url-custom-searchparams.js index 39c8d87b6a60bf..272435b001a366 100644 --- a/test/parallel/test-whatwg-url-custom-searchparams.js +++ b/test/parallel/test-whatwg-url-custom-searchparams.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const { URL, URLSearchParams } = require('url'); const fixtures = require('../common/fixtures'); const serialized = 'a=a&a=1&a=true&a=undefined&a=null&a=%EF%BF%BD' + diff --git a/test/parallel/test-whatwg-url-custom-setters.js b/test/parallel/test-whatwg-url-custom-setters.js index e10ebb9fe66968..b98bf5d8d3b393 100644 --- a/test/parallel/test-whatwg-url-custom-setters.js +++ b/test/parallel/test-whatwg-url-custom-setters.js @@ -9,7 +9,6 @@ if (!common.hasIntl) { } const assert = require('assert'); -const URL = require('url').URL; const { test, assert_equals } = require('../common/wpt').harness; const fixtures = require('../common/fixtures'); diff --git a/test/parallel/test-whatwg-url-custom-tostringtag.js b/test/parallel/test-whatwg-url-custom-tostringtag.js index 784a3ebc7728e5..54e5850a8f07b9 100644 --- a/test/parallel/test-whatwg-url-custom-tostringtag.js +++ b/test/parallel/test-whatwg-url-custom-tostringtag.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); -const URL = require('url').URL; const toString = Object.prototype.toString; diff --git a/test/parallel/test-whatwg-url-origin.js b/test/parallel/test-whatwg-url-origin.js index 05bd1229177e15..532ff06bb1152f 100644 --- a/test/parallel/test-whatwg-url-origin.js +++ b/test/parallel/test-whatwg-url-origin.js @@ -6,7 +6,6 @@ if (!common.hasIntl) { } const fixtures = require('../common/fixtures'); -const URL = require('url').URL; const { test, assert_equals } = require('../common/wpt').harness; const request = { diff --git a/test/parallel/test-whatwg-url-override-hostname.js b/test/parallel/test-whatwg-url-override-hostname.js new file mode 100644 index 00000000000000..61e3412c6b7b53 --- /dev/null +++ b/test/parallel/test-whatwg-url-override-hostname.js @@ -0,0 +1,20 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); + +{ + const url = new (class extends URL { get hostname() { return 'bar.com'; } })('http://foo.com/'); + assert.strictEqual(url.href, 'http://foo.com/'); + assert.strictEqual(url.toString(), 'http://foo.com/'); + assert.strictEqual(url.toJSON(), 'http://foo.com/'); + assert.strictEqual(url.hash, ''); + assert.strictEqual(url.host, 'foo.com'); + assert.strictEqual(url.hostname, 'bar.com'); + assert.strictEqual(url.origin, 'http://foo.com'); + assert.strictEqual(url.password, ''); + assert.strictEqual(url.protocol, 'http:'); + assert.strictEqual(url.username, ''); + assert.strictEqual(url.search, ''); + assert.strictEqual(url.searchParams.toString(), ''); +} diff --git a/test/parallel/test-whatwg-url-setters.js b/test/parallel/test-whatwg-url-setters.js index 6634fdd4b844e5..8742ab8ed372de 100644 --- a/test/parallel/test-whatwg-url-setters.js +++ b/test/parallel/test-whatwg-url-setters.js @@ -6,7 +6,6 @@ if (!common.hasIntl) { common.skip('missing Intl'); } -const URL = require('url').URL; const { test, assert_equals } = require('../common/wpt').harness; const fixtures = require('../common/fixtures'); diff --git a/test/parallel/test-whatwg-url-toascii.js b/test/parallel/test-whatwg-url-toascii.js index c5f0f24d6031d1..e5180bfb344127 100644 --- a/test/parallel/test-whatwg-url-toascii.js +++ b/test/parallel/test-whatwg-url-toascii.js @@ -6,7 +6,6 @@ if (!common.hasIntl) { } const fixtures = require('../common/fixtures'); -const { URL } = require('url'); const { test, assert_equals, assert_throws } = require('../common/wpt').harness; const request = { diff --git a/test/parallel/test-worker-unsupported-things.js b/test/parallel/test-worker-unsupported-things.js index a9f434eeaf55bb..18c1617c3cde5e 100644 --- a/test/parallel/test-worker-unsupported-things.js +++ b/test/parallel/test-worker-unsupported-things.js @@ -24,6 +24,14 @@ if (!process.env.HAS_STARTED_WORKER) { assert.strictEqual(process.debugPort, before); } + { + const mask = 0o600; + assert.throws(() => { process.umask(mask); }, { + code: 'ERR_WORKER_UNSUPPORTED_OPERATION', + message: 'Setting process.umask() is not supported in workers' + }); + } + const stubs = ['abort', 'chdir', 'send', 'disconnect']; if (!common.isWindows) { @@ -35,13 +43,19 @@ if (!process.env.HAS_STARTED_WORKER) { assert.strictEqual(process[fn].disabled, true); assert.throws(() => { process[fn](); - }, { code: 'ERR_WORKER_UNSUPPORTED_OPERATION' }); + }, { + code: 'ERR_WORKER_UNSUPPORTED_OPERATION', + message: `process.${fn}() is not supported in workers` + }); }); ['channel', 'connected'].forEach((fn) => { assert.throws(() => { process[fn]; // eslint-disable-line no-unused-expressions - }, { code: 'ERR_WORKER_UNSUPPORTED_OPERATION' }); + }, { + code: 'ERR_WORKER_UNSUPPORTED_OPERATION', + message: `process.${fn} is not supported in workers` + }); }); assert.strictEqual('_startProfilerIdleNotifier' in process, false); diff --git a/test/sequential/test-async-wrap-getasyncid.js b/test/sequential/test-async-wrap-getasyncid.js index e37e613b747dc2..e9b2e2d2cca247 100644 --- a/test/sequential/test-async-wrap-getasyncid.js +++ b/test/sequential/test-async-wrap-getasyncid.js @@ -69,6 +69,7 @@ const { getSystemErrorName } = require('util'); delete providers.ELDHISTOGRAM; delete providers.SIGINTWATCHDOG; delete providers.WORKERHEAPSNAPSHOT; + delete providers.FIXEDSIZEBLOBCOPY; const objKeys = Object.keys(providers); if (objKeys.length > 0) diff --git a/test/sequential/test-inspector-port-zero.js b/test/sequential/test-inspector-port-zero.js index 59027b5e30769d..1683394a1dd4a3 100644 --- a/test/sequential/test-inspector-port-zero.js +++ b/test/sequential/test-inspector-port-zero.js @@ -4,7 +4,6 @@ const { mustCall, skipIfInspectorDisabled } = require('../common'); skipIfInspectorDisabled(); const assert = require('assert'); -const { URL } = require('url'); const { spawn } = require('child_process'); function test(arg, port = '') { diff --git a/test/sequential/test-performance-eventloopdelay.js b/test/sequential/test-performance-eventloopdelay.js index 5021ee98073d0e..b23e0977428347 100644 --- a/test/sequential/test-performance-eventloopdelay.js +++ b/test/sequential/test-performance-eventloopdelay.js @@ -86,7 +86,7 @@ const { sleep } = require('internal/util'); } ); }); - [-1, 0, 101].forEach((i) => { + [-1, 0, 101, NaN].forEach((i) => { assert.throws( () => histogram.percentile(i), { diff --git a/test/wpt/status/FileAPI/blob.json b/test/wpt/status/FileAPI/blob.json new file mode 100644 index 00000000000000..1b463a1f0a1b7f --- /dev/null +++ b/test/wpt/status/FileAPI/blob.json @@ -0,0 +1,8 @@ +{ + "Blob-stream.any.js": { + "skip": "Depends on Web Streams API" + }, + "Blob-in-worker.worker.js": { + "skip": "Depends on Web Workers API" + } +} diff --git a/test/wpt/test-blob.js b/test/wpt/test-blob.js new file mode 100644 index 00000000000000..92e18bc0ef2f22 --- /dev/null +++ b/test/wpt/test-blob.js @@ -0,0 +1,13 @@ +'use strict'; + +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('FileAPI/blob'); + +runner.setInitScript(` + const { Blob } = require('buffer'); + global.Blob = Blob; +`); + +runner.runJsTests(); diff --git a/tools/doc/allhtml.js b/tools/doc/allhtml.js index c038b1f57a9bb8..5a24cee4292656 100644 --- a/tools/doc/allhtml.js +++ b/tools/doc/allhtml.js @@ -31,10 +31,10 @@ for (const link of toc.match(//g)) { const data = fs.readFileSync(source + '/' + href, 'utf8'); // Split the doc. - const match = /(<\/ul>\s*)?<\/div>\s*
      /.exec(data); + const match = /(<\/ul>\s*)?<\/\w+>\s*<\w+ id="apicontent">/.exec(data); contents += data.slice(0, match.index) - .replace(/[\s\S]*?
      \s*

      .*?<\/h2>\s*(
        \s*)?/, ''); + .replace(/[\s\S]*?id="toc"[^>]*>\s*<\w+>.*?<\/\w+>\s*(
          \s*)?/, ''); apicontent += data.slice(match.index + match[0].length) .replace(/[\s\S]*/, '') @@ -59,13 +59,13 @@ let all = toc.replace(/index\.html/g, 'all.html') all = all.replace(/.*?\| /, '<title>'); // Insert the combined table of contents. -const tocStart = /<div id="toc">\s*<h2>.*?<\/h2>\s*/.exec(all); +const tocStart = /<\w+ id="toc"[^>]*>\s*<\w+>.*?<\/\w+>\s*/.exec(all); all = all.slice(0, tocStart.index + tocStart[0].length) + '<ul>\n' + contents + '</ul>\n' + all.slice(tocStart.index + tocStart[0].length); // Replace apicontent with the concatenated set of apicontents from each source. -const apiStart = /<div id="apicontent">\s*/.exec(all); +const apiStart = /<\w+ id="apicontent">\s*/.exec(all); const apiEnd = all.lastIndexOf('<!-- API END -->'); all = all.slice(0, apiStart.index + apiStart[0].length) + apicontent + diff --git a/tools/doc/alljson.js b/tools/doc/alljson.js index 7e027f764e7efd..0d697dde01d091 100644 --- a/tools/doc/alljson.js +++ b/tools/doc/alljson.js @@ -43,6 +43,9 @@ for (const link of toc.match(/<a.*?>/g)) { for (const property in data) { if (results.hasOwnProperty(property)) { + data[property].forEach((mod) => { + mod.source = data.source; + }); results[property].push(...data[property]); } } diff --git a/tools/doc/stability.js b/tools/doc/stability.js new file mode 100644 index 00000000000000..ca4158ebc9a77e --- /dev/null +++ b/tools/doc/stability.js @@ -0,0 +1,111 @@ +'use strict'; + +// Build stability table to documentation.html/json/md by generated all.json + +const fs = require('fs'); +const path = require('path'); +const unified = require('unified'); +const raw = require('rehype-raw'); +const markdown = require('remark-parse'); +const htmlStringify = require('rehype-stringify'); +const gfm = require('remark-gfm'); +const remark2rehype = require('remark-rehype'); +const visit = require('unist-util-visit'); + +const source = `${__dirname}/../../out/doc/api`; +const data = require(path.join(source, 'all.json')); +const mark = '<!-- STABILITY_OVERVIEW_SLOT -->'; + +const output = { + json: path.join(source, 'stability.json'), + docHTML: path.join(source, 'documentation.html'), + docJSON: path.join(source, 'documentation.json'), + docMarkdown: path.join(source, 'documentation.md'), +}; + +function collectStability(data) { + const stability = []; + + for (const mod of data.modules) { + if (mod.displayName && mod.stability >= 0) { + const link = mod.source.replace('doc/api/', '').replace('.md', '.html'); + // const link = re.exec(toc)[1] + stability.push({ + api: mod.name, + link: link, + stability: mod.stability, + stabilityText: `(${mod.stability}) ${mod.stabilityText}`, + }); + } + } + + stability.sort((a, b) => a.api.localeCompare(b.api)); + return stability; +} + +function createMarkdownTable(data) { + const md = ['| API | Stability |', '| --- | --------- |']; + + for (const mod of data) { + md.push(`| [${mod.api}](${mod.link}) | ${mod.stabilityText} |`); + } + + return md.join('\n'); +} + +function createHTML(md) { + const file = unified() + .use(markdown) + .use(gfm) + .use(remark2rehype, { allowDangerousHtml: true }) + .use(raw) + .use(htmlStringify) + .use(processStability) + .processSync(md); + + return file.contents.trim(); +} + +function processStability() { + return (tree) => { + visit(tree, { type: 'element', tagName: 'tr' }, (node) => { + const [api, stability] = node.children; + if (api.tagName !== 'td') { + return; + } + + api.properties.class = 'module_stability'; + + const level = stability.children[0].value[1]; + stability.properties.class = `api_stability api_stability_${level}`; + }); + }; +} + +function updateStabilityMark(file, value, mark) { + const fd = fs.openSync(file, 'r+'); + const content = fs.readFileSync(fd); + + // Find the position of the `mark`. + const index = content.indexOf(mark); + + // Overwrite the mark with `value` parameter. + const offset = fs.writeSync(fd, value, index, 'utf-8'); + + // Re-write the end of the file after `value`. + fs.writeSync(fd, content, index + mark.length, undefined, index + offset); + fs.closeSync(fd); +} + +const stability = collectStability(data); + +// add markdown +const markdownTable = createMarkdownTable(stability); +updateStabilityMark(output.docMarkdown, markdownTable, mark); + +// add html table +const html = createHTML(markdownTable); +updateStabilityMark(output.docHTML, html, mark); + +// add json output +updateStabilityMark(output.docJSON, JSON.stringify(html), JSON.stringify(mark)); diff --git a/tools/doc/type-parser.js b/tools/doc/type-parser.js index 624ff0974d9cb5..5a91b1556c596d 100644 --- a/tools/doc/type-parser.js +++ b/tools/doc/type-parser.js @@ -40,6 +40,8 @@ const customTypesMap = { 'WebAssembly.Instance': `${jsDocPrefix}Reference/Global_Objects/WebAssembly/Instance`, + 'Blob': 'buffer.html#buffer_class_blob', + 'BroadcastChannel': 'worker_threads.html#worker_threads_class_broadcastchannel_' + 'extends_eventtarget', diff --git a/tools/icu/current_ver.dep b/tools/icu/current_ver.dep index b4caab129071e1..1c7f45879621c0 100644 --- a/tools/icu/current_ver.dep +++ b/tools/icu/current_ver.dep @@ -1,6 +1,6 @@ [ { - "url": "https://github.com/unicode-org/icu/releases/download/release-68-1/icu4c-68_1-src.tgz", - "md5": "6a99b541ea01f271257b121a4433c7c0" + "url": "https://github.com/unicode-org/icu/releases/download/release-68-2/icu4c-68_2-src.tgz", + "md5": "c21cbdfe31a1e325afe765a16f907d20" } ] diff --git a/tools/icu/icu-generic.gyp b/tools/icu/icu-generic.gyp index 680f8528df1384..36339144926096 100644 --- a/tools/icu/icu-generic.gyp +++ b/tools/icu/icu-generic.gyp @@ -107,80 +107,6 @@ 'sources': [ '<@(icu_src_i18n)' ], - ## if your compiler can dead-strip, these exclusions will - ## make ZERO difference to binary size. - ## Made ICU-specific for future-proofing. - 'conditions': [ - [ 'icu_ver_major == 55', { 'sources!': [ - # alphabetic index - '<(icu_path)/source/i18n/alphaindex.cpp', - # BOCSU - # misc - '<(icu_path)/source/i18n/regexcmp.cpp', - '<(icu_path)/source/i18n/regexcmp.h', - '<(icu_path)/source/i18n/regexcst.h', - '<(icu_path)/source/i18n/regeximp.cpp', - '<(icu_path)/source/i18n/regeximp.h', - '<(icu_path)/source/i18n/regexst.cpp', - '<(icu_path)/source/i18n/regexst.h', - '<(icu_path)/source/i18n/regextxt.cpp', - '<(icu_path)/source/i18n/regextxt.h', - '<(icu_path)/source/i18n/region.cpp', - '<(icu_path)/source/i18n/region_impl.h', - '<(icu_path)/source/i18n/reldatefmt.cpp', - '<(icu_path)/source/i18n/reldatefmt.h' - '<(icu_path)/source/i18n/scientificformathelper.cpp', - '<(icu_path)/source/i18n/tmunit.cpp', - '<(icu_path)/source/i18n/tmutamt.cpp', - '<(icu_path)/source/i18n/tmutfmt.cpp', - '<(icu_path)/source/i18n/uregex.cpp', - '<(icu_path)/source/i18n/uregexc.cpp', - '<(icu_path)/source/i18n/uregion.cpp', - '<(icu_path)/source/i18n/uspoof.cpp', - '<(icu_path)/source/i18n/uspoof_build.cpp', - '<(icu_path)/source/i18n/uspoof_conf.cpp', - '<(icu_path)/source/i18n/uspoof_conf.h', - '<(icu_path)/source/i18n/uspoof_impl.cpp', - '<(icu_path)/source/i18n/uspoof_impl.h', - '<(icu_path)/source/i18n/uspoof_wsconf.cpp', - '<(icu_path)/source/i18n/uspoof_wsconf.h', - ]}], - [ 'icu_ver_major == 57', { 'sources!': [ - - # alphabetic index - '<(icu_path)/source/i18n/alphaindex.cpp', - # BOCSU - # misc - '<(icu_path)/source/i18n/regexcmp.cpp', - '<(icu_path)/source/i18n/regexcmp.h', - '<(icu_path)/source/i18n/regexcst.h', - '<(icu_path)/source/i18n/regeximp.cpp', - '<(icu_path)/source/i18n/regeximp.h', - '<(icu_path)/source/i18n/regexst.cpp', - '<(icu_path)/source/i18n/regexst.h', - '<(icu_path)/source/i18n/regextxt.cpp', - '<(icu_path)/source/i18n/regextxt.h', - '<(icu_path)/source/i18n/region.cpp', - '<(icu_path)/source/i18n/region_impl.h', - '<(icu_path)/source/i18n/reldatefmt.cpp', - '<(icu_path)/source/i18n/reldatefmt.h' - '<(icu_path)/source/i18n/scientificformathelper.cpp', - '<(icu_path)/source/i18n/tmunit.cpp', - '<(icu_path)/source/i18n/tmutamt.cpp', - '<(icu_path)/source/i18n/tmutfmt.cpp', - '<(icu_path)/source/i18n/uregex.cpp', - '<(icu_path)/source/i18n/uregexc.cpp', - '<(icu_path)/source/i18n/uregion.cpp', - '<(icu_path)/source/i18n/uspoof.cpp', - '<(icu_path)/source/i18n/uspoof_build.cpp', - '<(icu_path)/source/i18n/uspoof_conf.cpp', - '<(icu_path)/source/i18n/uspoof_conf.h', - '<(icu_path)/source/i18n/uspoof_impl.cpp', - '<(icu_path)/source/i18n/uspoof_impl.h', - '<(icu_path)/source/i18n/uspoof_wsconf.cpp', - '<(icu_path)/source/i18n/uspoof_wsconf.h', - ]}], - ], 'include_dirs': [ '<(icu_path)/source/i18n', ], @@ -410,35 +336,6 @@ ## make ZERO difference to binary size. ## Made ICU-specific for future-proofing. 'conditions': [ - [ 'icu_ver_major == 55', { 'sources!': [ - - # bidi- not needed (yet!) - '<(icu_path)/source/common/ubidi.c', - '<(icu_path)/source/common/ubidiimp.h', - '<(icu_path)/source/common/ubidiln.c', - '<(icu_path)/source/common/ubidiwrt.c', - #'<(icu_path)/source/common/ubidi_props.c', - #'<(icu_path)/source/common/ubidi_props.h', - #'<(icu_path)/source/common/ubidi_props_data.h', - # and the callers - '<(icu_path)/source/common/ushape.cpp', - ]}], - [ 'icu_ver_major == 57', { 'sources!': [ - # work around http://bugs.icu-project.org/trac/ticket/12451 - # (benign afterwards) - '<(icu_path)/source/common/cstr.cpp', - - # bidi- not needed (yet!) - '<(icu_path)/source/common/ubidi.c', - '<(icu_path)/source/common/ubidiimp.h', - '<(icu_path)/source/common/ubidiln.c', - '<(icu_path)/source/common/ubidiwrt.c', - #'<(icu_path)/source/common/ubidi_props.c', - #'<(icu_path)/source/common/ubidi_props.h', - #'<(icu_path)/source/common/ubidi_props_data.h', - # and the callers - '<(icu_path)/source/common/ushape.cpp', - ]}], [ 'OS == "solaris"', { 'defines': [ '_XOPEN_SOURCE_EXTENDED=0', ]}], diff --git a/tools/icu/patches/64/source/common/putil.cpp b/tools/icu/patches/64/source/common/putil.cpp deleted file mode 100644 index 59cf232afee6a8..00000000000000 --- a/tools/icu/patches/64/source/common/putil.cpp +++ /dev/null @@ -1,2415 +0,0 @@ -// © 2016 and later: Unicode, Inc. and others. -// License & terms of use: http://www.unicode.org/copyright.html -/* -****************************************************************************** -* -* Copyright (C) 1997-2016, International Business Machines -* Corporation and others. All Rights Reserved. -* -****************************************************************************** -* -* FILE NAME : putil.c (previously putil.cpp and ptypes.cpp) -* -* Date Name Description -* 04/14/97 aliu Creation. -* 04/24/97 aliu Added getDefaultDataDirectory() and -* getDefaultLocaleID(). -* 04/28/97 aliu Rewritten to assume Unix and apply general methods -* for assumed case. Non-UNIX platforms must be -* special-cased. Rewrote numeric methods dealing -* with NaN and Infinity to be platform independent -* over all IEEE 754 platforms. -* 05/13/97 aliu Restored sign of timezone -* (semantics are hours West of GMT) -* 06/16/98 erm Added IEEE_754 stuff, cleaned up isInfinite, isNan, -* nextDouble.. -* 07/22/98 stephen Added remainder, max, min, trunc -* 08/13/98 stephen Added isNegativeInfinity, isPositiveInfinity -* 08/24/98 stephen Added longBitsFromDouble -* 09/08/98 stephen Minor changes for Mac Port -* 03/02/99 stephen Removed openFile(). Added AS400 support. -* Fixed EBCDIC tables -* 04/15/99 stephen Converted to C. -* 06/28/99 stephen Removed mutex locking in u_isBigEndian(). -* 08/04/99 jeffrey R. Added OS/2 changes -* 11/15/99 helena Integrated S/390 IEEE support. -* 04/26/01 Barry N. OS/400 support for uprv_getDefaultLocaleID -* 08/15/01 Steven H. OS/400 support for uprv_getDefaultCodepage -* 01/03/08 Steven L. Fake Time Support -****************************************************************************** -*/ - -// Defines _XOPEN_SOURCE for access to POSIX functions. -// Must be before any other #includes. -#include "uposixdefs.h" - -// First, the platform type. Need this for U_PLATFORM. -#include "unicode/platform.h" - -#if U_PLATFORM == U_PF_MINGW && defined __STRICT_ANSI__ -/* tzset isn't defined in strict ANSI on MinGW. */ -#undef __STRICT_ANSI__ -#endif - -/* - * Cygwin with GCC requires inclusion of time.h after the above disabling strict asci mode statement. - */ -#include <time.h> - -#if !U_PLATFORM_USES_ONLY_WIN32_API -#include <sys/time.h> -#endif - -/* include the rest of the ICU headers */ -#include "unicode/putil.h" -#include "unicode/ustring.h" -#include "putilimp.h" -#include "uassert.h" -#include "umutex.h" -#include "cmemory.h" -#include "cstring.h" -#include "locmap.h" -#include "ucln_cmn.h" -#include "charstr.h" - -/* Include standard headers. */ -#include <stdio.h> -#include <stdlib.h> -#include <string.h> -#include <math.h> -#include <locale.h> -#include <float.h> - -#ifndef U_COMMON_IMPLEMENTATION -#error U_COMMON_IMPLEMENTATION not set - must be set for all ICU source files in common/ - see http://userguide.icu-project.org/howtouseicu -#endif - - -/* include system headers */ -#if U_PLATFORM_USES_ONLY_WIN32_API - /* - * TODO: U_PLATFORM_USES_ONLY_WIN32_API includes MinGW. - * Should Cygwin be included as well (U_PLATFORM_HAS_WIN32_API) - * to use native APIs as much as possible? - */ -#ifndef WIN32_LEAN_AND_MEAN -# define WIN32_LEAN_AND_MEAN -#endif -# define VC_EXTRALEAN -# define NOUSER -# define NOSERVICE -# define NOIME -# define NOMCX -# include <windows.h> -# include "unicode/uloc.h" -# include "wintz.h" -#elif U_PLATFORM == U_PF_OS400 -# include <float.h> -# include <qusec.h> /* error code structure */ -# include <qusrjobi.h> -# include <qliept.h> /* EPT_CALL macro - this include must be after all other "QSYSINCs" */ -# include <mih/testptr.h> /* For uprv_maximumPtr */ -#elif U_PLATFORM == U_PF_OS390 -# include "unicode/ucnv.h" /* Needed for UCNV_SWAP_LFNL_OPTION_STRING */ -#elif U_PLATFORM_IS_DARWIN_BASED || U_PLATFORM_IS_LINUX_BASED || U_PLATFORM == U_PF_BSD || U_PLATFORM == U_PF_SOLARIS -# include <limits.h> -# include <unistd.h> -# if U_PLATFORM == U_PF_SOLARIS -# ifndef _XPG4_2 -# define _XPG4_2 -# endif -# endif -#elif U_PLATFORM == U_PF_QNX -# include <sys/neutrino.h> -#endif - -/* - * Only include langinfo.h if we have a way to get the codeset. If we later - * depend on more feature, we can test on U_HAVE_NL_LANGINFO. - * - */ - -#if U_HAVE_NL_LANGINFO_CODESET -#include <langinfo.h> -#endif - -/** - * Simple things (presence of functions, etc) should just go in configure.in and be added to - * icucfg.h via autoheader. - */ -#if U_PLATFORM_IMPLEMENTS_POSIX -# if U_PLATFORM == U_PF_OS400 -# define HAVE_DLFCN_H 0 -# define HAVE_DLOPEN 0 -# else -# ifndef HAVE_DLFCN_H -# define HAVE_DLFCN_H 1 -# endif -# ifndef HAVE_DLOPEN -# define HAVE_DLOPEN 1 -# endif -# endif -# ifndef HAVE_GETTIMEOFDAY -# define HAVE_GETTIMEOFDAY 1 -# endif -#else -# define HAVE_DLFCN_H 0 -# define HAVE_DLOPEN 0 -# define HAVE_GETTIMEOFDAY 0 -#endif - -U_NAMESPACE_USE - -/* Define the extension for data files, again... */ -#define DATA_TYPE "dat" - -/* Leave this copyright notice here! */ -static const char copyright[] = U_COPYRIGHT_STRING; - -/* floating point implementations ------------------------------------------- */ - -/* We return QNAN rather than SNAN*/ -#define SIGN 0x80000000U - -/* Make it easy to define certain types of constants */ -typedef union { - int64_t i64; /* This must be defined first in order to allow the initialization to work. This is a C89 feature. */ - double d64; -} BitPatternConversion; -static const BitPatternConversion gNan = { (int64_t) INT64_C(0x7FF8000000000000) }; -static const BitPatternConversion gInf = { (int64_t) INT64_C(0x7FF0000000000000) }; - -/*--------------------------------------------------------------------------- - Platform utilities - Our general strategy is to assume we're on a POSIX platform. Platforms which - are non-POSIX must declare themselves so. The default POSIX implementation - will sometimes work for non-POSIX platforms as well (e.g., the NaN-related - functions). - ---------------------------------------------------------------------------*/ - -#if U_PLATFORM_USES_ONLY_WIN32_API || U_PLATFORM == U_PF_OS400 -# undef U_POSIX_LOCALE -#else -# define U_POSIX_LOCALE 1 -#endif - -/* - WARNING! u_topNBytesOfDouble and u_bottomNBytesOfDouble - can't be properly optimized by the gcc compiler sometimes (i.e. gcc 3.2). -*/ -#if !IEEE_754 -static char* -u_topNBytesOfDouble(double* d, int n) -{ -#if U_IS_BIG_ENDIAN - return (char*)d; -#else - return (char*)(d + 1) - n; -#endif -} - -static char* -u_bottomNBytesOfDouble(double* d, int n) -{ -#if U_IS_BIG_ENDIAN - return (char*)(d + 1) - n; -#else - return (char*)d; -#endif -} -#endif /* !IEEE_754 */ - -#if IEEE_754 -static UBool -u_signBit(double d) { - uint8_t hiByte; -#if U_IS_BIG_ENDIAN - hiByte = *(uint8_t *)&d; -#else - hiByte = *(((uint8_t *)&d) + sizeof(double) - 1); -#endif - return (hiByte & 0x80) != 0; -} -#endif - - - -#if defined (U_DEBUG_FAKETIME) -/* Override the clock to test things without having to move the system clock. - * Assumes POSIX gettimeofday() will function - */ -UDate fakeClock_t0 = 0; /** Time to start the clock from **/ -UDate fakeClock_dt = 0; /** Offset (fake time - real time) **/ -UBool fakeClock_set = FALSE; /** True if fake clock has spun up **/ - -static UDate getUTCtime_real() { - struct timeval posixTime; - gettimeofday(&posixTime, NULL); - return (UDate)(((int64_t)posixTime.tv_sec * U_MILLIS_PER_SECOND) + (posixTime.tv_usec/1000)); -} - -static UDate getUTCtime_fake() { - static UMutex fakeClockMutex = U_MUTEX_INTIALIZER; - umtx_lock(&fakeClockMutex); - if(!fakeClock_set) { - UDate real = getUTCtime_real(); - const char *fake_start = getenv("U_FAKETIME_START"); - if((fake_start!=NULL) && (fake_start[0]!=0)) { - sscanf(fake_start,"%lf",&fakeClock_t0); - fakeClock_dt = fakeClock_t0 - real; - fprintf(stderr,"U_DEBUG_FAKETIME was set at compile time, so the ICU clock will start at a preset value\n" - "env variable U_FAKETIME_START=%.0f (%s) for an offset of %.0f ms from the current time %.0f\n", - fakeClock_t0, fake_start, fakeClock_dt, real); - } else { - fakeClock_dt = 0; - fprintf(stderr,"U_DEBUG_FAKETIME was set at compile time, but U_FAKETIME_START was not set.\n" - "Set U_FAKETIME_START to the number of milliseconds since 1/1/1970 to set the ICU clock.\n"); - } - fakeClock_set = TRUE; - } - umtx_unlock(&fakeClockMutex); - - return getUTCtime_real() + fakeClock_dt; -} -#endif - -#if U_PLATFORM_USES_ONLY_WIN32_API -typedef union { - int64_t int64; - FILETIME fileTime; -} FileTimeConversion; /* This is like a ULARGE_INTEGER */ - -/* Number of 100 nanoseconds from 1/1/1601 to 1/1/1970 */ -#define EPOCH_BIAS INT64_C(116444736000000000) -#define HECTONANOSECOND_PER_MILLISECOND 10000 - -#endif - -/*--------------------------------------------------------------------------- - Universal Implementations - These are designed to work on all platforms. Try these, and if they - don't work on your platform, then special case your platform with new - implementations. ----------------------------------------------------------------------------*/ - -U_CAPI UDate U_EXPORT2 -uprv_getUTCtime() -{ -#if defined(U_DEBUG_FAKETIME) - return getUTCtime_fake(); /* Hook for overriding the clock */ -#else - return uprv_getRawUTCtime(); -#endif -} - -/* Return UTC (GMT) time measured in milliseconds since 0:00 on 1/1/70.*/ -U_CAPI UDate U_EXPORT2 -uprv_getRawUTCtime() -{ -#if U_PLATFORM_USES_ONLY_WIN32_API - - FileTimeConversion winTime; - GetSystemTimeAsFileTime(&winTime.fileTime); - return (UDate)((winTime.int64 - EPOCH_BIAS) / HECTONANOSECOND_PER_MILLISECOND); -#else - -#if HAVE_GETTIMEOFDAY - struct timeval posixTime; - gettimeofday(&posixTime, NULL); - return (UDate)(((int64_t)posixTime.tv_sec * U_MILLIS_PER_SECOND) + (posixTime.tv_usec/1000)); -#else - time_t epochtime; - time(&epochtime); - return (UDate)epochtime * U_MILLIS_PER_SECOND; -#endif - -#endif -} - -/*----------------------------------------------------------------------------- - IEEE 754 - These methods detect and return NaN and infinity values for doubles - conforming to IEEE 754. Platforms which support this standard include X86, - Mac 680x0, Mac PowerPC, AIX RS/6000, and most others. - If this doesn't work on your platform, you have non-IEEE floating-point, and - will need to code your own versions. A naive implementation is to return 0.0 - for getNaN and getInfinity, and false for isNaN and isInfinite. - ---------------------------------------------------------------------------*/ - -U_CAPI UBool U_EXPORT2 -uprv_isNaN(double number) -{ -#if IEEE_754 - BitPatternConversion convertedNumber; - convertedNumber.d64 = number; - /* Infinity is 0x7FF0000000000000U. Anything greater than that is a NaN */ - return (UBool)((convertedNumber.i64 & U_INT64_MAX) > gInf.i64); - -#elif U_PLATFORM == U_PF_OS390 - uint32_t highBits = *(uint32_t*)u_topNBytesOfDouble(&number, - sizeof(uint32_t)); - uint32_t lowBits = *(uint32_t*)u_bottomNBytesOfDouble(&number, - sizeof(uint32_t)); - - return ((highBits & 0x7F080000L) == 0x7F080000L) && - (lowBits == 0x00000000L); - -#else - /* If your platform doesn't support IEEE 754 but *does* have an NaN value,*/ - /* you'll need to replace this default implementation with what's correct*/ - /* for your platform.*/ - return number != number; -#endif -} - -U_CAPI UBool U_EXPORT2 -uprv_isInfinite(double number) -{ -#if IEEE_754 - BitPatternConversion convertedNumber; - convertedNumber.d64 = number; - /* Infinity is exactly 0x7FF0000000000000U. */ - return (UBool)((convertedNumber.i64 & U_INT64_MAX) == gInf.i64); -#elif U_PLATFORM == U_PF_OS390 - uint32_t highBits = *(uint32_t*)u_topNBytesOfDouble(&number, - sizeof(uint32_t)); - uint32_t lowBits = *(uint32_t*)u_bottomNBytesOfDouble(&number, - sizeof(uint32_t)); - - return ((highBits & ~SIGN) == 0x70FF0000L) && (lowBits == 0x00000000L); - -#else - /* If your platform doesn't support IEEE 754 but *does* have an infinity*/ - /* value, you'll need to replace this default implementation with what's*/ - /* correct for your platform.*/ - return number == (2.0 * number); -#endif -} - -U_CAPI UBool U_EXPORT2 -uprv_isPositiveInfinity(double number) -{ -#if IEEE_754 || U_PLATFORM == U_PF_OS390 - return (UBool)(number > 0 && uprv_isInfinite(number)); -#else - return uprv_isInfinite(number); -#endif -} - -U_CAPI UBool U_EXPORT2 -uprv_isNegativeInfinity(double number) -{ -#if IEEE_754 || U_PLATFORM == U_PF_OS390 - return (UBool)(number < 0 && uprv_isInfinite(number)); - -#else - uint32_t highBits = *(uint32_t*)u_topNBytesOfDouble(&number, - sizeof(uint32_t)); - return((highBits & SIGN) && uprv_isInfinite(number)); - -#endif -} - -U_CAPI double U_EXPORT2 -uprv_getNaN() -{ -#if IEEE_754 || U_PLATFORM == U_PF_OS390 - return gNan.d64; -#else - /* If your platform doesn't support IEEE 754 but *does* have an NaN value,*/ - /* you'll need to replace this default implementation with what's correct*/ - /* for your platform.*/ - return 0.0; -#endif -} - -U_CAPI double U_EXPORT2 -uprv_getInfinity() -{ -#if IEEE_754 || U_PLATFORM == U_PF_OS390 - return gInf.d64; -#else - /* If your platform doesn't support IEEE 754 but *does* have an infinity*/ - /* value, you'll need to replace this default implementation with what's*/ - /* correct for your platform.*/ - return 0.0; -#endif -} - -U_CAPI double U_EXPORT2 -uprv_floor(double x) -{ - return floor(x); -} - -U_CAPI double U_EXPORT2 -uprv_ceil(double x) -{ - return ceil(x); -} - -U_CAPI double U_EXPORT2 -uprv_round(double x) -{ - return uprv_floor(x + 0.5); -} - -U_CAPI double U_EXPORT2 -uprv_fabs(double x) -{ - return fabs(x); -} - -U_CAPI double U_EXPORT2 -uprv_modf(double x, double* y) -{ - return modf(x, y); -} - -U_CAPI double U_EXPORT2 -uprv_fmod(double x, double y) -{ - return fmod(x, y); -} - -U_CAPI double U_EXPORT2 -uprv_pow(double x, double y) -{ - /* This is declared as "double pow(double x, double y)" */ - return pow(x, y); -} - -U_CAPI double U_EXPORT2 -uprv_pow10(int32_t x) -{ - return pow(10.0, (double)x); -} - -U_CAPI double U_EXPORT2 -uprv_fmax(double x, double y) -{ -#if IEEE_754 - /* first handle NaN*/ - if(uprv_isNaN(x) || uprv_isNaN(y)) - return uprv_getNaN(); - - /* check for -0 and 0*/ - if(x == 0.0 && y == 0.0 && u_signBit(x)) - return y; - -#endif - - /* this should work for all flt point w/o NaN and Inf special cases */ - return (x > y ? x : y); -} - -U_CAPI double U_EXPORT2 -uprv_fmin(double x, double y) -{ -#if IEEE_754 - /* first handle NaN*/ - if(uprv_isNaN(x) || uprv_isNaN(y)) - return uprv_getNaN(); - - /* check for -0 and 0*/ - if(x == 0.0 && y == 0.0 && u_signBit(y)) - return y; - -#endif - - /* this should work for all flt point w/o NaN and Inf special cases */ - return (x > y ? y : x); -} - -U_CAPI UBool U_EXPORT2 -uprv_add32_overflow(int32_t a, int32_t b, int32_t* res) { - // NOTE: Some compilers (GCC, Clang) have primitives available, like __builtin_add_overflow. - // This function could be optimized by calling one of those primitives. - auto a64 = static_cast<int64_t>(a); - auto b64 = static_cast<int64_t>(b); - int64_t res64 = a64 + b64; - *res = static_cast<int32_t>(res64); - return res64 != *res; -} - -U_CAPI UBool U_EXPORT2 -uprv_mul32_overflow(int32_t a, int32_t b, int32_t* res) { - // NOTE: Some compilers (GCC, Clang) have primitives available, like __builtin_mul_overflow. - // This function could be optimized by calling one of those primitives. - auto a64 = static_cast<int64_t>(a); - auto b64 = static_cast<int64_t>(b); - int64_t res64 = a64 * b64; - *res = static_cast<int32_t>(res64); - return res64 != *res; -} - -/** - * Truncates the given double. - * trunc(3.3) = 3.0, trunc (-3.3) = -3.0 - * This is different than calling floor() or ceil(): - * floor(3.3) = 3, floor(-3.3) = -4 - * ceil(3.3) = 4, ceil(-3.3) = -3 - */ -U_CAPI double U_EXPORT2 -uprv_trunc(double d) -{ -#if IEEE_754 - /* handle error cases*/ - if(uprv_isNaN(d)) - return uprv_getNaN(); - if(uprv_isInfinite(d)) - return uprv_getInfinity(); - - if(u_signBit(d)) /* Signbit() picks up -0.0; d<0 does not. */ - return ceil(d); - else - return floor(d); - -#else - return d >= 0 ? floor(d) : ceil(d); - -#endif -} - -/** - * Return the largest positive number that can be represented by an integer - * type of arbitrary bit length. - */ -U_CAPI double U_EXPORT2 -uprv_maxMantissa(void) -{ - return pow(2.0, DBL_MANT_DIG + 1.0) - 1.0; -} - -U_CAPI double U_EXPORT2 -uprv_log(double d) -{ - return log(d); -} - -U_CAPI void * U_EXPORT2 -uprv_maximumPtr(void * base) -{ -#if U_PLATFORM == U_PF_OS400 - /* - * With the provided function we should never be out of range of a given segment - * (a traditional/typical segment that is). Our segments have 5 bytes for the - * id and 3 bytes for the offset. The key is that the casting takes care of - * only retrieving the offset portion minus x1000. Hence, the smallest offset - * seen in a program is x001000 and when casted to an int would be 0. - * That's why we can only add 0xffefff. Otherwise, we would exceed the segment. - * - * Currently, 16MB is the current addressing limitation on i5/OS if the activation is - * non-TERASPACE. If it is TERASPACE it is 2GB - 4k(header information). - * This function determines the activation based on the pointer that is passed in and - * calculates the appropriate maximum available size for - * each pointer type (TERASPACE and non-TERASPACE) - * - * Unlike other operating systems, the pointer model isn't determined at - * compile time on i5/OS. - */ - if ((base != NULL) && (_TESTPTR(base, _C_TERASPACE_CHECK))) { - /* if it is a TERASPACE pointer the max is 2GB - 4k */ - return ((void *)(((char *)base)-((uint32_t)(base))+((uint32_t)0x7fffefff))); - } - /* otherwise 16MB since NULL ptr is not checkable or the ptr is not TERASPACE */ - return ((void *)(((char *)base)-((uint32_t)(base))+((uint32_t)0xffefff))); - -#else - return U_MAX_PTR(base); -#endif -} - -/*--------------------------------------------------------------------------- - Platform-specific Implementations - Try these, and if they don't work on your platform, then special case your - platform with new implementations. - ---------------------------------------------------------------------------*/ - -/* Generic time zone layer -------------------------------------------------- */ - -/* Time zone utilities */ -U_CAPI void U_EXPORT2 -uprv_tzset() -{ -#if defined(U_TZSET) - U_TZSET(); -#else - /* no initialization*/ -#endif -} - -U_CAPI int32_t U_EXPORT2 -uprv_timezone() -{ -#ifdef U_TIMEZONE - return U_TIMEZONE; -#else - time_t t, t1, t2; - struct tm tmrec; - int32_t tdiff = 0; - - time(&t); - uprv_memcpy( &tmrec, localtime(&t), sizeof(tmrec) ); -#if U_PLATFORM != U_PF_IPHONE - UBool dst_checked = (tmrec.tm_isdst != 0); /* daylight savings time is checked*/ -#endif - t1 = mktime(&tmrec); /* local time in seconds*/ - uprv_memcpy( &tmrec, gmtime(&t), sizeof(tmrec) ); - t2 = mktime(&tmrec); /* GMT (or UTC) in seconds*/ - tdiff = t2 - t1; - -#if U_PLATFORM != U_PF_IPHONE - /* imitate NT behaviour, which returns same timezone offset to GMT for - winter and summer. - This does not work on all platforms. For instance, on glibc on Linux - and on Mac OS 10.5, tdiff calculated above remains the same - regardless of whether DST is in effect or not. iOS is another - platform where this does not work. Linux + glibc and Mac OS 10.5 - have U_TIMEZONE defined so that this code is not reached. - */ - if (dst_checked) - tdiff += 3600; -#endif - return tdiff; -#endif -} - -/* Note that U_TZNAME does *not* have to be tzname, but if it is, - some platforms need to have it declared here. */ - -#if defined(U_TZNAME) && (U_PLATFORM == U_PF_IRIX || U_PLATFORM_IS_DARWIN_BASED) -/* RS6000 and others reject char **tzname. */ -extern U_IMPORT char *U_TZNAME[]; -#endif - -#if !UCONFIG_NO_FILE_IO && ((U_PLATFORM_IS_DARWIN_BASED && (U_PLATFORM != U_PF_IPHONE || defined(U_TIMEZONE))) || U_PLATFORM_IS_LINUX_BASED || U_PLATFORM == U_PF_BSD || U_PLATFORM == U_PF_SOLARIS) -/* These platforms are likely to use Olson timezone IDs. */ -/* common targets of the symbolic link at TZDEFAULT are: - * "/usr/share/zoneinfo/<olsonID>" default, older Linux distros, macOS to 10.12 - * "../usr/share/zoneinfo/<olsonID>" newer Linux distros: Red Hat Enterprise Linux 7, Ubuntu 16, SuSe Linux 12 - * "/usr/share/lib/zoneinfo/<olsonID>" Solaris - * "../usr/share/lib/zoneinfo/<olsonID>" Solaris - * "/var/db/timezone/zoneinfo/<olsonID>" macOS 10.13 - * To avoid checking lots of paths, just check that the target path - * before the <olsonID> ends with "/zoneinfo/", and the <olsonID> is valid. - */ - -#define CHECK_LOCALTIME_LINK 1 -#if U_PLATFORM_IS_DARWIN_BASED -#include <tzfile.h> -#define TZZONEINFO (TZDIR "/") -#elif U_PLATFORM == U_PF_SOLARIS -#define TZDEFAULT "/etc/localtime" -#define TZZONEINFO "/usr/share/lib/zoneinfo/" -#define TZ_ENV_CHECK "localtime" -#else -#define TZDEFAULT "/etc/localtime" -#define TZZONEINFO "/usr/share/zoneinfo/" -#endif -#define TZZONEINFOTAIL "/zoneinfo/" -#if U_HAVE_DIRENT_H -#define TZFILE_SKIP "posixrules" /* tz file to skip when searching. */ -/* Some Linux distributions have 'localtime' in /usr/share/zoneinfo - symlinked to /etc/localtime, which makes searchForTZFile return - 'localtime' when it's the first match. */ -#define TZFILE_SKIP2 "localtime" -#define SEARCH_TZFILE -#include <dirent.h> /* Needed to search through system timezone files */ -#endif -static char gTimeZoneBuffer[PATH_MAX]; -static char *gTimeZoneBufferPtr = NULL; -#endif - -#if !U_PLATFORM_USES_ONLY_WIN32_API -#define isNonDigit(ch) (ch < '0' || '9' < ch) -static UBool isValidOlsonID(const char *id) { - int32_t idx = 0; - - /* Determine if this is something like Iceland (Olson ID) - or AST4ADT (non-Olson ID) */ - while (id[idx] && isNonDigit(id[idx]) && id[idx] != ',') { - idx++; - } - - /* If we went through the whole string, then it might be okay. - The timezone is sometimes set to "CST-7CDT", "CST6CDT5,J129,J131/19:30", - "GRNLNDST3GRNLNDDT" or similar, so we cannot use it. - The rest of the time it could be an Olson ID. George */ - return (UBool)(id[idx] == 0 - || uprv_strcmp(id, "PST8PDT") == 0 - || uprv_strcmp(id, "MST7MDT") == 0 - || uprv_strcmp(id, "CST6CDT") == 0 - || uprv_strcmp(id, "EST5EDT") == 0); -} - -/* On some Unix-like OS, 'posix' subdirectory in - /usr/share/zoneinfo replicates the top-level contents. 'right' - subdirectory has the same set of files, but individual files - are different from those in the top-level directory or 'posix' - because 'right' has files for TAI (Int'l Atomic Time) while 'posix' - has files for UTC. - When the first match for /etc/localtime is in either of them - (usually in posix because 'right' has different file contents), - or TZ environment variable points to one of them, createTimeZone - fails because, say, 'posix/America/New_York' is not an Olson - timezone id ('America/New_York' is). So, we have to skip - 'posix/' and 'right/' at the beginning. */ -static void skipZoneIDPrefix(const char** id) { - if (uprv_strncmp(*id, "posix/", 6) == 0 - || uprv_strncmp(*id, "right/", 6) == 0) - { - *id += 6; - } -} -#endif - -#if defined(U_TZNAME) && !U_PLATFORM_USES_ONLY_WIN32_API - -#define CONVERT_HOURS_TO_SECONDS(offset) (int32_t)(offset*3600) -typedef struct OffsetZoneMapping { - int32_t offsetSeconds; - int32_t daylightType; /* 0=U_DAYLIGHT_NONE, 1=daylight in June-U_DAYLIGHT_JUNE, 2=daylight in December=U_DAYLIGHT_DECEMBER*/ - const char *stdID; - const char *dstID; - const char *olsonID; -} OffsetZoneMapping; - -enum { U_DAYLIGHT_NONE=0,U_DAYLIGHT_JUNE=1,U_DAYLIGHT_DECEMBER=2 }; - -/* -This list tries to disambiguate a set of abbreviated timezone IDs and offsets -and maps it to an Olson ID. -Before adding anything to this list, take a look at -icu/source/tools/tzcode/tz.alias -Sometimes no daylight savings (0) is important to define due to aliases. -This list can be tested with icu/source/test/compat/tzone.pl -More values could be added to daylightType to increase precision. -*/ -static const struct OffsetZoneMapping OFFSET_ZONE_MAPPINGS[] = { - {-45900, 2, "CHAST", "CHADT", "Pacific/Chatham"}, - {-43200, 1, "PETT", "PETST", "Asia/Kamchatka"}, - {-43200, 2, "NZST", "NZDT", "Pacific/Auckland"}, - {-43200, 1, "ANAT", "ANAST", "Asia/Anadyr"}, - {-39600, 1, "MAGT", "MAGST", "Asia/Magadan"}, - {-37800, 2, "LHST", "LHST", "Australia/Lord_Howe"}, - {-36000, 2, "EST", "EST", "Australia/Sydney"}, - {-36000, 1, "SAKT", "SAKST", "Asia/Sakhalin"}, - {-36000, 1, "VLAT", "VLAST", "Asia/Vladivostok"}, - {-34200, 2, "CST", "CST", "Australia/South"}, - {-32400, 1, "YAKT", "YAKST", "Asia/Yakutsk"}, - {-32400, 1, "CHOT", "CHOST", "Asia/Choibalsan"}, - {-31500, 2, "CWST", "CWST", "Australia/Eucla"}, - {-28800, 1, "IRKT", "IRKST", "Asia/Irkutsk"}, - {-28800, 1, "ULAT", "ULAST", "Asia/Ulaanbaatar"}, - {-28800, 2, "WST", "WST", "Australia/West"}, - {-25200, 1, "HOVT", "HOVST", "Asia/Hovd"}, - {-25200, 1, "KRAT", "KRAST", "Asia/Krasnoyarsk"}, - {-21600, 1, "NOVT", "NOVST", "Asia/Novosibirsk"}, - {-21600, 1, "OMST", "OMSST", "Asia/Omsk"}, - {-18000, 1, "YEKT", "YEKST", "Asia/Yekaterinburg"}, - {-14400, 1, "SAMT", "SAMST", "Europe/Samara"}, - {-14400, 1, "AMT", "AMST", "Asia/Yerevan"}, - {-14400, 1, "AZT", "AZST", "Asia/Baku"}, - {-10800, 1, "AST", "ADT", "Asia/Baghdad"}, - {-10800, 1, "MSK", "MSD", "Europe/Moscow"}, - {-10800, 1, "VOLT", "VOLST", "Europe/Volgograd"}, - {-7200, 0, "EET", "CEST", "Africa/Tripoli"}, - {-7200, 1, "EET", "EEST", "Europe/Athens"}, /* Conflicts with Africa/Cairo */ - {-7200, 1, "IST", "IDT", "Asia/Jerusalem"}, - {-3600, 0, "CET", "WEST", "Africa/Algiers"}, - {-3600, 2, "WAT", "WAST", "Africa/Windhoek"}, - {0, 1, "GMT", "IST", "Europe/Dublin"}, - {0, 1, "GMT", "BST", "Europe/London"}, - {0, 0, "WET", "WEST", "Africa/Casablanca"}, - {0, 0, "WET", "WET", "Africa/El_Aaiun"}, - {3600, 1, "AZOT", "AZOST", "Atlantic/Azores"}, - {3600, 1, "EGT", "EGST", "America/Scoresbysund"}, - {10800, 1, "PMST", "PMDT", "America/Miquelon"}, - {10800, 2, "UYT", "UYST", "America/Montevideo"}, - {10800, 1, "WGT", "WGST", "America/Godthab"}, - {10800, 2, "BRT", "BRST", "Brazil/East"}, - {12600, 1, "NST", "NDT", "America/St_Johns"}, - {14400, 1, "AST", "ADT", "Canada/Atlantic"}, - {14400, 2, "AMT", "AMST", "America/Cuiaba"}, - {14400, 2, "CLT", "CLST", "Chile/Continental"}, - {14400, 2, "FKT", "FKST", "Atlantic/Stanley"}, - {14400, 2, "PYT", "PYST", "America/Asuncion"}, - {18000, 1, "CST", "CDT", "America/Havana"}, - {18000, 1, "EST", "EDT", "US/Eastern"}, /* Conflicts with America/Grand_Turk */ - {21600, 2, "EAST", "EASST", "Chile/EasterIsland"}, - {21600, 0, "CST", "MDT", "Canada/Saskatchewan"}, - {21600, 0, "CST", "CDT", "America/Guatemala"}, - {21600, 1, "CST", "CDT", "US/Central"}, /* Conflicts with Mexico/General */ - {25200, 1, "MST", "MDT", "US/Mountain"}, /* Conflicts with Mexico/BajaSur */ - {28800, 0, "PST", "PST", "Pacific/Pitcairn"}, - {28800, 1, "PST", "PDT", "US/Pacific"}, /* Conflicts with Mexico/BajaNorte */ - {32400, 1, "AKST", "AKDT", "US/Alaska"}, - {36000, 1, "HAST", "HADT", "US/Aleutian"} -}; - -/*#define DEBUG_TZNAME*/ - -static const char* remapShortTimeZone(const char *stdID, const char *dstID, int32_t daylightType, int32_t offset) -{ - int32_t idx; -#ifdef DEBUG_TZNAME - fprintf(stderr, "TZ=%s std=%s dst=%s daylight=%d offset=%d\n", getenv("TZ"), stdID, dstID, daylightType, offset); -#endif - for (idx = 0; idx < UPRV_LENGTHOF(OFFSET_ZONE_MAPPINGS); idx++) - { - if (offset == OFFSET_ZONE_MAPPINGS[idx].offsetSeconds - && daylightType == OFFSET_ZONE_MAPPINGS[idx].daylightType - && strcmp(OFFSET_ZONE_MAPPINGS[idx].stdID, stdID) == 0 - && strcmp(OFFSET_ZONE_MAPPINGS[idx].dstID, dstID) == 0) - { - return OFFSET_ZONE_MAPPINGS[idx].olsonID; - } - } - return NULL; -} -#endif - -#ifdef SEARCH_TZFILE -#define MAX_READ_SIZE 512 - -typedef struct DefaultTZInfo { - char* defaultTZBuffer; - int64_t defaultTZFileSize; - FILE* defaultTZFilePtr; - UBool defaultTZstatus; - int32_t defaultTZPosition; -} DefaultTZInfo; - -/* - * This method compares the two files given to see if they are a match. - * It is currently use to compare two TZ files. - */ -static UBool compareBinaryFiles(const char* defaultTZFileName, const char* TZFileName, DefaultTZInfo* tzInfo) { - FILE* file; - int64_t sizeFile; - int64_t sizeFileLeft; - int32_t sizeFileRead; - int32_t sizeFileToRead; - char bufferFile[MAX_READ_SIZE]; - UBool result = TRUE; - - if (tzInfo->defaultTZFilePtr == NULL) { - tzInfo->defaultTZFilePtr = fopen(defaultTZFileName, "r"); - } - file = fopen(TZFileName, "r"); - - tzInfo->defaultTZPosition = 0; /* reset position to begin search */ - - if (file != NULL && tzInfo->defaultTZFilePtr != NULL) { - /* First check that the file size are equal. */ - if (tzInfo->defaultTZFileSize == 0) { - fseek(tzInfo->defaultTZFilePtr, 0, SEEK_END); - tzInfo->defaultTZFileSize = ftell(tzInfo->defaultTZFilePtr); - } - fseek(file, 0, SEEK_END); - sizeFile = ftell(file); - sizeFileLeft = sizeFile; - - if (sizeFile != tzInfo->defaultTZFileSize) { - result = FALSE; - } else { - /* Store the data from the files in seperate buffers and - * compare each byte to determine equality. - */ - if (tzInfo->defaultTZBuffer == NULL) { - rewind(tzInfo->defaultTZFilePtr); - tzInfo->defaultTZBuffer = (char*)uprv_malloc(sizeof(char) * tzInfo->defaultTZFileSize); - sizeFileRead = fread(tzInfo->defaultTZBuffer, 1, tzInfo->defaultTZFileSize, tzInfo->defaultTZFilePtr); - } - rewind(file); - while(sizeFileLeft > 0) { - uprv_memset(bufferFile, 0, MAX_READ_SIZE); - sizeFileToRead = sizeFileLeft < MAX_READ_SIZE ? sizeFileLeft : MAX_READ_SIZE; - - sizeFileRead = fread(bufferFile, 1, sizeFileToRead, file); - if (memcmp(tzInfo->defaultTZBuffer + tzInfo->defaultTZPosition, bufferFile, sizeFileRead) != 0) { - result = FALSE; - break; - } - sizeFileLeft -= sizeFileRead; - tzInfo->defaultTZPosition += sizeFileRead; - } - } - } else { - result = FALSE; - } - - if (file != NULL) { - fclose(file); - } - - return result; -} - - -/* dirent also lists two entries: "." and ".." that we can safely ignore. */ -#define SKIP1 "." -#define SKIP2 ".." -static UBool U_CALLCONV putil_cleanup(void); -static CharString *gSearchTZFileResult = NULL; - -/* - * This method recursively traverses the directory given for a matching TZ file and returns the first match. - * This function is not thread safe - it uses a global, gSearchTZFileResult, to hold its results. - */ -static char* searchForTZFile(const char* path, DefaultTZInfo* tzInfo) { - DIR* dirp = NULL; - struct dirent* dirEntry = NULL; - char* result = NULL; - UErrorCode status = U_ZERO_ERROR; - - /* Save the current path */ - CharString curpath(path, -1, status); - if (U_FAILURE(status)) { - goto cleanupAndReturn; - } - - dirp = opendir(path); - if (dirp == NULL) { - goto cleanupAndReturn; - } - - if (gSearchTZFileResult == NULL) { - gSearchTZFileResult = new CharString; - if (gSearchTZFileResult == NULL) { - goto cleanupAndReturn; - } - ucln_common_registerCleanup(UCLN_COMMON_PUTIL, putil_cleanup); - } - - /* Check each entry in the directory. */ - while((dirEntry = readdir(dirp)) != NULL) { - const char* dirName = dirEntry->d_name; - if (uprv_strcmp(dirName, SKIP1) != 0 && uprv_strcmp(dirName, SKIP2) != 0 - && uprv_strcmp(TZFILE_SKIP, dirName) != 0 && uprv_strcmp(TZFILE_SKIP2, dirName) != 0) { - /* Create a newpath with the new entry to test each entry in the directory. */ - CharString newpath(curpath, status); - newpath.append(dirName, -1, status); - if (U_FAILURE(status)) { - break; - } - - DIR* subDirp = NULL; - if ((subDirp = opendir(newpath.data())) != NULL) { - /* If this new path is a directory, make a recursive call with the newpath. */ - closedir(subDirp); - newpath.append('/', status); - if (U_FAILURE(status)) { - break; - } - result = searchForTZFile(newpath.data(), tzInfo); - /* - Have to get out here. Otherwise, we'd keep looking - and return the first match in the top-level directory - if there's a match in the top-level. If not, this function - would return NULL and set gTimeZoneBufferPtr to NULL in initDefault(). - It worked without this in most cases because we have a fallback of calling - localtime_r to figure out the default timezone. - */ - if (result != NULL) - break; - } else { - if(compareBinaryFiles(TZDEFAULT, newpath.data(), tzInfo)) { - int32_t amountToSkip = sizeof(TZZONEINFO) - 1; - if (amountToSkip > newpath.length()) { - amountToSkip = newpath.length(); - } - const char* zoneid = newpath.data() + amountToSkip; - skipZoneIDPrefix(&zoneid); - gSearchTZFileResult->clear(); - gSearchTZFileResult->append(zoneid, -1, status); - if (U_FAILURE(status)) { - break; - } - result = gSearchTZFileResult->data(); - /* Get out after the first one found. */ - break; - } - } - } - } - - cleanupAndReturn: - if (dirp) { - closedir(dirp); - } - return result; -} -#endif - -U_CAPI void U_EXPORT2 -uprv_tzname_clear_cache() -{ -#if defined(CHECK_LOCALTIME_LINK) && !defined(DEBUG_SKIP_LOCALTIME_LINK) - gTimeZoneBufferPtr = NULL; -#endif -} - -U_CAPI const char* U_EXPORT2 -uprv_tzname(int n) -{ - (void)n; // Avoid unreferenced parameter warning. - const char *tzid = NULL; -#if U_PLATFORM_USES_ONLY_WIN32_API - tzid = uprv_detectWindowsTimeZone(); - - if (tzid != NULL) { - return tzid; - } - -#ifndef U_TZNAME - // The return value is free'd in timezone.cpp on Windows because - // the other code path returns a pointer to a heap location. - // If we don't have a name already, then tzname wouldn't be any - // better, so just fall back. - return uprv_strdup(""); -#endif // !U_TZNAME - -#else - -/*#if U_PLATFORM_IS_DARWIN_BASED - int ret; - - tzid = getenv("TZFILE"); - if (tzid != NULL) { - return tzid; - } -#endif*/ - -/* This code can be temporarily disabled to test tzname resolution later on. */ -#ifndef DEBUG_TZNAME - tzid = getenv("TZ"); - if (tzid != NULL && isValidOlsonID(tzid) -#if U_PLATFORM == U_PF_SOLARIS - /* When TZ equals localtime on Solaris, check the /etc/localtime file. */ - && uprv_strcmp(tzid, TZ_ENV_CHECK) != 0 -#endif - ) { - /* The colon forces tzset() to treat the remainder as zoneinfo path */ - if (tzid[0] == ':') { - tzid++; - } - /* This might be a good Olson ID. */ - skipZoneIDPrefix(&tzid); - return tzid; - } - /* else U_TZNAME will give a better result. */ -#endif - -#if defined(CHECK_LOCALTIME_LINK) && !defined(DEBUG_SKIP_LOCALTIME_LINK) - /* Caller must handle threading issues */ - if (gTimeZoneBufferPtr == NULL) { - /* - This is a trick to look at the name of the link to get the Olson ID - because the tzfile contents is underspecified. - This isn't guaranteed to work because it may not be a symlink. - */ - int32_t ret = (int32_t)readlink(TZDEFAULT, gTimeZoneBuffer, sizeof(gTimeZoneBuffer)-1); - if (0 < ret) { - int32_t tzZoneInfoTailLen = uprv_strlen(TZZONEINFOTAIL); - gTimeZoneBuffer[ret] = 0; - char * tzZoneInfoTailPtr = uprv_strstr(gTimeZoneBuffer, TZZONEINFOTAIL); - - if (tzZoneInfoTailPtr != NULL - && isValidOlsonID(tzZoneInfoTailPtr + tzZoneInfoTailLen)) - { - return (gTimeZoneBufferPtr = tzZoneInfoTailPtr + tzZoneInfoTailLen); - } - } else { -#if defined(SEARCH_TZFILE) - DefaultTZInfo* tzInfo = (DefaultTZInfo*)uprv_malloc(sizeof(DefaultTZInfo)); - if (tzInfo != NULL) { - tzInfo->defaultTZBuffer = NULL; - tzInfo->defaultTZFileSize = 0; - tzInfo->defaultTZFilePtr = NULL; - tzInfo->defaultTZstatus = FALSE; - tzInfo->defaultTZPosition = 0; - - gTimeZoneBufferPtr = searchForTZFile(TZZONEINFO, tzInfo); - - /* Free previously allocated memory */ - if (tzInfo->defaultTZBuffer != NULL) { - uprv_free(tzInfo->defaultTZBuffer); - } - if (tzInfo->defaultTZFilePtr != NULL) { - fclose(tzInfo->defaultTZFilePtr); - } - uprv_free(tzInfo); - } - - if (gTimeZoneBufferPtr != NULL && isValidOlsonID(gTimeZoneBufferPtr)) { - return gTimeZoneBufferPtr; - } -#endif - } - } - else { - return gTimeZoneBufferPtr; - } -#endif -#endif - -#ifdef U_TZNAME -#if U_PLATFORM_USES_ONLY_WIN32_API - /* The return value is free'd in timezone.cpp on Windows because - * the other code path returns a pointer to a heap location. */ - return uprv_strdup(U_TZNAME[n]); -#else - /* - U_TZNAME is usually a non-unique abbreviation, which isn't normally usable. - So we remap the abbreviation to an olson ID. - - Since Windows exposes a little more timezone information, - we normally don't use this code on Windows because - uprv_detectWindowsTimeZone should have already given the correct answer. - */ - { - struct tm juneSol, decemberSol; - int daylightType; - static const time_t juneSolstice=1182478260; /*2007-06-21 18:11 UT*/ - static const time_t decemberSolstice=1198332540; /*2007-12-22 06:09 UT*/ - - /* This probing will tell us when daylight savings occurs. */ - localtime_r(&juneSolstice, &juneSol); - localtime_r(&decemberSolstice, &decemberSol); - if(decemberSol.tm_isdst > 0) { - daylightType = U_DAYLIGHT_DECEMBER; - } else if(juneSol.tm_isdst > 0) { - daylightType = U_DAYLIGHT_JUNE; - } else { - daylightType = U_DAYLIGHT_NONE; - } - tzid = remapShortTimeZone(U_TZNAME[0], U_TZNAME[1], daylightType, uprv_timezone()); - if (tzid != NULL) { - return tzid; - } - } - return U_TZNAME[n]; -#endif -#else - return ""; -#endif -} - -/* Get and set the ICU data directory --------------------------------------- */ - -static icu::UInitOnce gDataDirInitOnce = U_INITONCE_INITIALIZER; -static char *gDataDirectory = NULL; - -UInitOnce gTimeZoneFilesInitOnce = U_INITONCE_INITIALIZER; -static CharString *gTimeZoneFilesDirectory = NULL; - -#if U_POSIX_LOCALE || U_PLATFORM_USES_ONLY_WIN32_API - static const char *gCorrectedPOSIXLocale = NULL; /* Sometimes heap allocated */ - static bool gCorrectedPOSIXLocaleHeapAllocated = false; -#endif - -static UBool U_CALLCONV putil_cleanup(void) -{ - if (gDataDirectory && *gDataDirectory) { - uprv_free(gDataDirectory); - } - gDataDirectory = NULL; - gDataDirInitOnce.reset(); - - delete gTimeZoneFilesDirectory; - gTimeZoneFilesDirectory = NULL; - gTimeZoneFilesInitOnce.reset(); - -#ifdef SEARCH_TZFILE - delete gSearchTZFileResult; - gSearchTZFileResult = NULL; -#endif - -#if U_POSIX_LOCALE || U_PLATFORM_USES_ONLY_WIN32_API - if (gCorrectedPOSIXLocale && gCorrectedPOSIXLocaleHeapAllocated) { - uprv_free(const_cast<char *>(gCorrectedPOSIXLocale)); - gCorrectedPOSIXLocale = NULL; - gCorrectedPOSIXLocaleHeapAllocated = false; - } -#endif - return TRUE; -} - -/* - * Set the data directory. - * Make a copy of the passed string, and set the global data dir to point to it. - */ -U_CAPI void U_EXPORT2 -u_setDataDirectory(const char *directory) { - char *newDataDir; - int32_t length; - - if(directory==NULL || *directory==0) { - /* A small optimization to prevent the malloc and copy when the - shared library is used, and this is a way to make sure that NULL - is never returned. - */ - newDataDir = (char *)""; - } - else { - length=(int32_t)uprv_strlen(directory); - newDataDir = (char *)uprv_malloc(length + 2); - /* Exit out if newDataDir could not be created. */ - if (newDataDir == NULL) { - return; - } - uprv_strcpy(newDataDir, directory); - -#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) - { - char *p; - while((p = uprv_strchr(newDataDir, U_FILE_ALT_SEP_CHAR)) != NULL) { - *p = U_FILE_SEP_CHAR; - } - } -#endif - } - - if (gDataDirectory && *gDataDirectory) { - uprv_free(gDataDirectory); - } - gDataDirectory = newDataDir; - ucln_common_registerCleanup(UCLN_COMMON_PUTIL, putil_cleanup); -} - -U_CAPI UBool U_EXPORT2 -uprv_pathIsAbsolute(const char *path) -{ - if(!path || !*path) { - return FALSE; - } - - if(*path == U_FILE_SEP_CHAR) { - return TRUE; - } - -#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) - if(*path == U_FILE_ALT_SEP_CHAR) { - return TRUE; - } -#endif - -#if U_PLATFORM_USES_ONLY_WIN32_API - if( (((path[0] >= 'A') && (path[0] <= 'Z')) || - ((path[0] >= 'a') && (path[0] <= 'z'))) && - path[1] == ':' ) { - return TRUE; - } -#endif - - return FALSE; -} - -/* Backup setting of ICU_DATA_DIR_PREFIX_ENV_VAR - (needed for some Darwin ICU build environments) */ -#if U_PLATFORM_IS_DARWIN_BASED && TARGET_OS_SIMULATOR -# if !defined(ICU_DATA_DIR_PREFIX_ENV_VAR) -# define ICU_DATA_DIR_PREFIX_ENV_VAR "IPHONE_SIMULATOR_ROOT" -# endif -#endif - -#if U_PLATFORM_HAS_WINUWP_API != 0 -// Helper function to get the ICU Data Directory under the Windows directory location. -static BOOL U_CALLCONV getIcuDataDirectoryUnderWindowsDirectory(char* directoryBuffer, UINT bufferLength) -{ -#if defined(ICU_DATA_DIR_WINDOWS) - wchar_t windowsPath[MAX_PATH]; - char windowsPathUtf8[MAX_PATH]; - - UINT length = GetSystemWindowsDirectoryW(windowsPath, UPRV_LENGTHOF(windowsPath)); - if ((length > 0) && (length < (UPRV_LENGTHOF(windowsPath) - 1))) { - // Convert UTF-16 to a UTF-8 string. - UErrorCode status = U_ZERO_ERROR; - int32_t windowsPathUtf8Len = 0; - u_strToUTF8(windowsPathUtf8, static_cast<int32_t>(UPRV_LENGTHOF(windowsPathUtf8)), - &windowsPathUtf8Len, reinterpret_cast<const UChar*>(windowsPath), -1, &status); - - if (U_SUCCESS(status) && (status != U_STRING_NOT_TERMINATED_WARNING) && - (windowsPathUtf8Len < (UPRV_LENGTHOF(windowsPathUtf8) - 1))) { - // Ensure it always has a separator, so we can append the ICU data path. - if (windowsPathUtf8[windowsPathUtf8Len - 1] != U_FILE_SEP_CHAR) { - windowsPathUtf8[windowsPathUtf8Len++] = U_FILE_SEP_CHAR; - windowsPathUtf8[windowsPathUtf8Len] = '\0'; - } - // Check if the concatenated string will fit. - if ((windowsPathUtf8Len + UPRV_LENGTHOF(ICU_DATA_DIR_WINDOWS)) < bufferLength) { - uprv_strcpy(directoryBuffer, windowsPathUtf8); - uprv_strcat(directoryBuffer, ICU_DATA_DIR_WINDOWS); - return TRUE; - } - } - } -#endif - - return FALSE; -} -#endif - -static void U_CALLCONV dataDirectoryInitFn() { - /* If we already have the directory, then return immediately. Will happen if user called - * u_setDataDirectory(). - */ - if (gDataDirectory) { - return; - } - - const char *path = NULL; -#if defined(ICU_DATA_DIR_PREFIX_ENV_VAR) - char datadir_path_buffer[PATH_MAX]; -#endif - - /* - When ICU_NO_USER_DATA_OVERRIDE is defined, users aren't allowed to - override ICU's data with the ICU_DATA environment variable. This prevents - problems where multiple custom copies of ICU's specific version of data - are installed on a system. Either the application must define the data - directory with u_setDataDirectory, define ICU_DATA_DIR when compiling - ICU, set the data with udata_setCommonData or trust that all of the - required data is contained in ICU's data library that contains - the entry point defined by U_ICUDATA_ENTRY_POINT. - - There may also be some platforms where environment variables - are not allowed. - */ -# if !defined(ICU_NO_USER_DATA_OVERRIDE) && !UCONFIG_NO_FILE_IO - /* First try to get the environment variable */ -# if U_PLATFORM_HAS_WINUWP_API == 0 // Windows UWP does not support getenv - path=getenv("ICU_DATA"); -# endif -# endif - - /* ICU_DATA_DIR may be set as a compile option. - * U_ICU_DATA_DEFAULT_DIR is provided and is set by ICU at compile time - * and is used only when data is built in archive mode eliminating the need - * for ICU_DATA_DIR to be set. U_ICU_DATA_DEFAULT_DIR is set to the installation - * directory of the data dat file. Users should use ICU_DATA_DIR if they want to - * set their own path. - */ -#if defined(ICU_DATA_DIR) || defined(U_ICU_DATA_DEFAULT_DIR) - if(path==NULL || *path==0) { -# if defined(ICU_DATA_DIR_PREFIX_ENV_VAR) - const char *prefix = getenv(ICU_DATA_DIR_PREFIX_ENV_VAR); -# endif -# ifdef ICU_DATA_DIR - path=ICU_DATA_DIR; -# else - path=U_ICU_DATA_DEFAULT_DIR; -# endif -# if defined(ICU_DATA_DIR_PREFIX_ENV_VAR) - if (prefix != NULL) { - snprintf(datadir_path_buffer, PATH_MAX, "%s%s", prefix, path); - path=datadir_path_buffer; - } -# endif - } -#endif - -#if U_PLATFORM_HAS_WINUWP_API != 0 && defined(ICU_DATA_DIR_WINDOWS) - char datadir_path_buffer[MAX_PATH]; - if (getIcuDataDirectoryUnderWindowsDirectory(datadir_path_buffer, UPRV_LENGTHOF(datadir_path_buffer))) { - path = datadir_path_buffer; - } -#endif - - if(path==NULL) { - /* It looks really bad, set it to something. */ - path = ""; - } - - u_setDataDirectory(path); - return; -} - -U_CAPI const char * U_EXPORT2 -u_getDataDirectory(void) { - umtx_initOnce(gDataDirInitOnce, &dataDirectoryInitFn); - return gDataDirectory; -} - -static void setTimeZoneFilesDir(const char *path, UErrorCode &status) { - if (U_FAILURE(status)) { - return; - } - gTimeZoneFilesDirectory->clear(); - gTimeZoneFilesDirectory->append(path, status); -#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) - char *p = gTimeZoneFilesDirectory->data(); - while ((p = uprv_strchr(p, U_FILE_ALT_SEP_CHAR)) != NULL) { - *p = U_FILE_SEP_CHAR; - } -#endif -} - -#define TO_STRING(x) TO_STRING_2(x) -#define TO_STRING_2(x) #x - -static void U_CALLCONV TimeZoneDataDirInitFn(UErrorCode &status) { - U_ASSERT(gTimeZoneFilesDirectory == NULL); - ucln_common_registerCleanup(UCLN_COMMON_PUTIL, putil_cleanup); - gTimeZoneFilesDirectory = new CharString(); - if (gTimeZoneFilesDirectory == NULL) { - status = U_MEMORY_ALLOCATION_ERROR; - return; - } - - const char *dir = ""; - -#if U_PLATFORM_HAS_WINUWP_API != 0 - // The UWP version does not support the environment variable setting, but can possibly pick them up from the Windows directory. - char datadir_path_buffer[MAX_PATH]; - if (getIcuDataDirectoryUnderWindowsDirectory(datadir_path_buffer, UPRV_LENGTHOF(datadir_path_buffer))) { - dir = datadir_path_buffer; - } -#else - dir = getenv("ICU_TIMEZONE_FILES_DIR"); -#endif // U_PLATFORM_HAS_WINUWP_API - -#if defined(U_TIMEZONE_FILES_DIR) - if (dir == NULL) { - // Build time configuration setting. - dir = TO_STRING(U_TIMEZONE_FILES_DIR); - } -#endif - - if (dir == NULL) { - dir = ""; - } - - setTimeZoneFilesDir(dir, status); -} - - -U_CAPI const char * U_EXPORT2 -u_getTimeZoneFilesDirectory(UErrorCode *status) { - umtx_initOnce(gTimeZoneFilesInitOnce, &TimeZoneDataDirInitFn, *status); - return U_SUCCESS(*status) ? gTimeZoneFilesDirectory->data() : ""; -} - -U_CAPI void U_EXPORT2 -u_setTimeZoneFilesDirectory(const char *path, UErrorCode *status) { - umtx_initOnce(gTimeZoneFilesInitOnce, &TimeZoneDataDirInitFn, *status); - setTimeZoneFilesDir(path, *status); - - // Note: this function does some extra churn, first setting based on the - // environment, then immediately replacing with the value passed in. - // The logic is simpler that way, and performance shouldn't be an issue. -} - - -#if U_POSIX_LOCALE -/* A helper function used by uprv_getPOSIXIDForDefaultLocale and - * uprv_getPOSIXIDForDefaultCodepage. Returns the posix locale id for - * LC_CTYPE and LC_MESSAGES. It doesn't support other locale categories. - */ -static const char *uprv_getPOSIXIDForCategory(int category) -{ - const char* posixID = NULL; - if (category == LC_MESSAGES || category == LC_CTYPE) { - /* - * On Solaris two different calls to setlocale can result in - * different values. Only get this value once. - * - * We must check this first because an application can set this. - * - * LC_ALL can't be used because it's platform dependent. The LANG - * environment variable seems to affect LC_CTYPE variable by default. - * Here is what setlocale(LC_ALL, NULL) can return. - * HPUX can return 'C C C C C C C' - * Solaris can return /en_US/C/C/C/C/C on the second try. - * Linux can return LC_CTYPE=C;LC_NUMERIC=C;... - * - * The default codepage detection also needs to use LC_CTYPE. - * - * Do not call setlocale(LC_*, "")! Using an empty string instead - * of NULL, will modify the libc behavior. - */ - posixID = setlocale(category, NULL); - if ((posixID == 0) - || (uprv_strcmp("C", posixID) == 0) - || (uprv_strcmp("POSIX", posixID) == 0)) - { - /* Maybe we got some garbage. Try something more reasonable */ - posixID = getenv("LC_ALL"); - /* Solaris speaks POSIX - See IEEE Std 1003.1-2008 - * This is needed to properly handle empty env. variables - */ -#if U_PLATFORM == U_PF_SOLARIS - if ((posixID == 0) || (posixID[0] == '\0')) { - posixID = getenv(category == LC_MESSAGES ? "LC_MESSAGES" : "LC_CTYPE"); - if ((posixID == 0) || (posixID[0] == '\0')) { -#else - if (posixID == 0) { - posixID = getenv(category == LC_MESSAGES ? "LC_MESSAGES" : "LC_CTYPE"); - if (posixID == 0) { -#endif - posixID = getenv("LANG"); - } - } - } - } - if ((posixID==0) - || (uprv_strcmp("C", posixID) == 0) - || (uprv_strcmp("POSIX", posixID) == 0)) - { - /* Nothing worked. Give it a nice POSIX default value. */ - posixID = "en_US_POSIX"; - // Note: this test will not catch 'C.UTF-8', - // that will be handled in uprv_getDefaultLocaleID(). - // Leave this mapping here for the uprv_getPOSIXIDForDefaultCodepage() - // caller which expects to see "en_US_POSIX" in many branches. - } - return posixID; -} - -/* Return just the POSIX id for the default locale, whatever happens to be in - * it. It gets the value from LC_MESSAGES and indirectly from LC_ALL and LANG. - */ -static const char *uprv_getPOSIXIDForDefaultLocale(void) -{ - static const char* posixID = NULL; - if (posixID == 0) { - posixID = uprv_getPOSIXIDForCategory(LC_MESSAGES); - } - return posixID; -} - -#if !U_CHARSET_IS_UTF8 -/* Return just the POSIX id for the default codepage, whatever happens to be in - * it. It gets the value from LC_CTYPE and indirectly from LC_ALL and LANG. - */ -static const char *uprv_getPOSIXIDForDefaultCodepage(void) -{ - static const char* posixID = NULL; - if (posixID == 0) { - posixID = uprv_getPOSIXIDForCategory(LC_CTYPE); - } - return posixID; -} -#endif -#endif - -/* NOTE: The caller should handle thread safety */ -U_CAPI const char* U_EXPORT2 -uprv_getDefaultLocaleID() -{ -#if U_POSIX_LOCALE -/* - Note that: (a '!' means the ID is improper somehow) - LC_ALL ----> default_loc codepage --------------------------------------------------------- - ab.CD ab CD - ab@CD ab__CD - - ab@CD.EF ab__CD EF - - ab_CD.EF@GH ab_CD_GH EF - -Some 'improper' ways to do the same as above: - ! ab_CD@GH.EF ab_CD_GH EF - ! ab_CD.EF@GH.IJ ab_CD_GH EF - ! ab_CD@ZZ.EF@GH.IJ ab_CD_GH EF - - _CD@GH _CD_GH - - _CD.EF@GH _CD_GH EF - -The variant cannot have dots in it. -The 'rightmost' variant (@xxx) wins. -The leftmost codepage (.xxx) wins. -*/ - const char* posixID = uprv_getPOSIXIDForDefaultLocale(); - - /* Format: (no spaces) - ll [ _CC ] [ . MM ] [ @ VV] - - l = lang, C = ctry, M = charmap, V = variant - */ - - if (gCorrectedPOSIXLocale != nullptr) { - return gCorrectedPOSIXLocale; - } - - // Copy the ID into owned memory. - // Over-allocate in case we replace "C" with "en_US_POSIX" (+10), + null termination - char *correctedPOSIXLocale = static_cast<char *>(uprv_malloc(uprv_strlen(posixID) + 10 + 1)); - if (correctedPOSIXLocale == nullptr) { - return nullptr; - } - uprv_strcpy(correctedPOSIXLocale, posixID); - - char *limit; - if ((limit = uprv_strchr(correctedPOSIXLocale, '.')) != nullptr) { - *limit = 0; - } - if ((limit = uprv_strchr(correctedPOSIXLocale, '@')) != nullptr) { - *limit = 0; - } - - if ((uprv_strcmp("C", correctedPOSIXLocale) == 0) // no @ variant - || (uprv_strcmp("POSIX", correctedPOSIXLocale) == 0)) { - // Raw input was C.* or POSIX.*, Give it a nice POSIX default value. - // (The "C"/"POSIX" case is handled in uprv_getPOSIXIDForCategory()) - uprv_strcpy(correctedPOSIXLocale, "en_US_POSIX"); - } - - /* Note that we scan the *uncorrected* ID. */ - const char *p; - if ((p = uprv_strrchr(posixID, '@')) != nullptr) { - p++; - - /* Take care of any special cases here.. */ - if (!uprv_strcmp(p, "nynorsk")) { - p = "NY"; - /* Don't worry about no__NY. In practice, it won't appear. */ - } - - if (uprv_strchr(correctedPOSIXLocale,'_') == nullptr) { - uprv_strcat(correctedPOSIXLocale, "__"); /* aa@b -> aa__b (note this can make the new locale 1 char longer) */ - } - else { - uprv_strcat(correctedPOSIXLocale, "_"); /* aa_CC@b -> aa_CC_b */ - } - - const char *q; - if ((q = uprv_strchr(p, '.')) != nullptr) { - /* How big will the resulting string be? */ - int32_t len = (int32_t)(uprv_strlen(correctedPOSIXLocale) + (q-p)); - uprv_strncat(correctedPOSIXLocale, p, q-p); // do not include charset - correctedPOSIXLocale[len] = 0; - } - else { - /* Anything following the @ sign */ - uprv_strcat(correctedPOSIXLocale, p); - } - - /* Should there be a map from 'no@nynorsk' -> no_NO_NY here? - * How about 'russian' -> 'ru'? - * Many of the other locales using ISO codes will be handled by the - * canonicalization functions in uloc_getDefault. - */ - } - - if (gCorrectedPOSIXLocale == nullptr) { - gCorrectedPOSIXLocale = correctedPOSIXLocale; - gCorrectedPOSIXLocaleHeapAllocated = true; - ucln_common_registerCleanup(UCLN_COMMON_PUTIL, putil_cleanup); - correctedPOSIXLocale = nullptr; - } - posixID = gCorrectedPOSIXLocale; - - if (correctedPOSIXLocale != nullptr) { /* Was already set - clean up. */ - uprv_free(correctedPOSIXLocale); - } - - return posixID; - -#elif U_PLATFORM_USES_ONLY_WIN32_API -#define POSIX_LOCALE_CAPACITY 64 - UErrorCode status = U_ZERO_ERROR; - char *correctedPOSIXLocale = nullptr; - - // If we have already figured this out just use the cached value - if (gCorrectedPOSIXLocale != nullptr) { - return gCorrectedPOSIXLocale; - } - - // No cached value, need to determine the current value - static WCHAR windowsLocale[LOCALE_NAME_MAX_LENGTH] = {}; - int length = GetLocaleInfoEx(LOCALE_NAME_USER_DEFAULT, LOCALE_SNAME, windowsLocale, LOCALE_NAME_MAX_LENGTH); - - // Now we should have a Windows locale name that needs converted to the POSIX style. - if (length > 0) // If length is 0, then the GetLocaleInfoEx failed. - { - // First we need to go from UTF-16 to char (and also convert from _ to - while we're at it.) - char modifiedWindowsLocale[LOCALE_NAME_MAX_LENGTH] = {}; - - int32_t i; - for (i = 0; i < UPRV_LENGTHOF(modifiedWindowsLocale); i++) - { - if (windowsLocale[i] == '_') - { - modifiedWindowsLocale[i] = '-'; - } - else - { - modifiedWindowsLocale[i] = static_cast<char>(windowsLocale[i]); - } - - if (modifiedWindowsLocale[i] == '\0') - { - break; - } - } - - if (i >= UPRV_LENGTHOF(modifiedWindowsLocale)) - { - // Ran out of room, can't really happen, maybe we'll be lucky about a matching - // locale when tags are dropped - modifiedWindowsLocale[UPRV_LENGTHOF(modifiedWindowsLocale) - 1] = '\0'; - } - - // Now normalize the resulting name - correctedPOSIXLocale = static_cast<char *>(uprv_malloc(POSIX_LOCALE_CAPACITY + 1)); - /* TODO: Should we just exit on memory allocation failure? */ - if (correctedPOSIXLocale) - { - int32_t posixLen = uloc_canonicalize(modifiedWindowsLocale, correctedPOSIXLocale, POSIX_LOCALE_CAPACITY, &status); - if (U_SUCCESS(status)) - { - *(correctedPOSIXLocale + posixLen) = 0; - gCorrectedPOSIXLocale = correctedPOSIXLocale; - gCorrectedPOSIXLocaleHeapAllocated = true; - ucln_common_registerCleanup(UCLN_COMMON_PUTIL, putil_cleanup); - } - else - { - uprv_free(correctedPOSIXLocale); - } - } - } - - // If unable to find a locale we can agree upon, use en-US by default - if (gCorrectedPOSIXLocale == nullptr) { - gCorrectedPOSIXLocale = "en_US"; - } - return gCorrectedPOSIXLocale; - -#elif U_PLATFORM == U_PF_OS400 - /* locales are process scoped and are by definition thread safe */ - static char correctedLocale[64]; - const char *localeID = getenv("LC_ALL"); - char *p; - - if (localeID == NULL) - localeID = getenv("LANG"); - if (localeID == NULL) - localeID = setlocale(LC_ALL, NULL); - /* Make sure we have something... */ - if (localeID == NULL) - return "en_US_POSIX"; - - /* Extract the locale name from the path. */ - if((p = uprv_strrchr(localeID, '/')) != NULL) - { - /* Increment p to start of locale name. */ - p++; - localeID = p; - } - - /* Copy to work location. */ - uprv_strcpy(correctedLocale, localeID); - - /* Strip off the '.locale' extension. */ - if((p = uprv_strchr(correctedLocale, '.')) != NULL) { - *p = 0; - } - - /* Upper case the locale name. */ - T_CString_toUpperCase(correctedLocale); - - /* See if we are using the POSIX locale. Any of the - * following are equivalent and use the same QLGPGCMA - * (POSIX) locale. - * QLGPGCMA2 means UCS2 - * QLGPGCMA_4 means UTF-32 - * QLGPGCMA_8 means UTF-8 - */ - if ((uprv_strcmp("C", correctedLocale) == 0) || - (uprv_strcmp("POSIX", correctedLocale) == 0) || - (uprv_strncmp("QLGPGCMA", correctedLocale, 8) == 0)) - { - uprv_strcpy(correctedLocale, "en_US_POSIX"); - } - else - { - int16_t LocaleLen; - - /* Lower case the lang portion. */ - for(p = correctedLocale; *p != 0 && *p != '_'; p++) - { - *p = uprv_tolower(*p); - } - - /* Adjust for Euro. After '_E' add 'URO'. */ - LocaleLen = uprv_strlen(correctedLocale); - if (correctedLocale[LocaleLen - 2] == '_' && - correctedLocale[LocaleLen - 1] == 'E') - { - uprv_strcat(correctedLocale, "URO"); - } - - /* If using Lotus-based locale then convert to - * equivalent non Lotus. - */ - else if (correctedLocale[LocaleLen - 2] == '_' && - correctedLocale[LocaleLen - 1] == 'L') - { - correctedLocale[LocaleLen - 2] = 0; - } - - /* There are separate simplified and traditional - * locales called zh_HK_S and zh_HK_T. - */ - else if (uprv_strncmp(correctedLocale, "zh_HK", 5) == 0) - { - uprv_strcpy(correctedLocale, "zh_HK"); - } - - /* A special zh_CN_GBK locale... - */ - else if (uprv_strcmp(correctedLocale, "zh_CN_GBK") == 0) - { - uprv_strcpy(correctedLocale, "zh_CN"); - } - - } - - return correctedLocale; -#endif - -} - -#if !U_CHARSET_IS_UTF8 -#if U_POSIX_LOCALE -/* -Due to various platform differences, one platform may specify a charset, -when they really mean a different charset. Remap the names so that they are -compatible with ICU. Only conflicting/ambiguous aliases should be resolved -here. Before adding anything to this function, please consider adding unique -names to the ICU alias table in the data directory. -*/ -static const char* -remapPlatformDependentCodepage(const char *locale, const char *name) { - if (locale != NULL && *locale == 0) { - /* Make sure that an empty locale is handled the same way. */ - locale = NULL; - } - if (name == NULL) { - return NULL; - } -#if U_PLATFORM == U_PF_AIX - if (uprv_strcmp(name, "IBM-943") == 0) { - /* Use the ASCII compatible ibm-943 */ - name = "Shift-JIS"; - } - else if (uprv_strcmp(name, "IBM-1252") == 0) { - /* Use the windows-1252 that contains the Euro */ - name = "IBM-5348"; - } -#elif U_PLATFORM == U_PF_SOLARIS - if (locale != NULL && uprv_strcmp(name, "EUC") == 0) { - /* Solaris underspecifies the "EUC" name. */ - if (uprv_strcmp(locale, "zh_CN") == 0) { - name = "EUC-CN"; - } - else if (uprv_strcmp(locale, "zh_TW") == 0) { - name = "EUC-TW"; - } - else if (uprv_strcmp(locale, "ko_KR") == 0) { - name = "EUC-KR"; - } - } - else if (uprv_strcmp(name, "eucJP") == 0) { - /* - ibm-954 is the best match. - ibm-33722 is the default for eucJP (similar to Windows). - */ - name = "eucjis"; - } - else if (uprv_strcmp(name, "646") == 0) { - /* - * The default codepage given by Solaris is 646 but the C library routines treat it as if it was - * ISO-8859-1 instead of US-ASCII(646). - */ - name = "ISO-8859-1"; - } -#elif U_PLATFORM_IS_DARWIN_BASED - if (locale == NULL && *name == 0) { - /* - No locale was specified, and an empty name was passed in. - This usually indicates that nl_langinfo didn't return valid information. - Mac OS X uses UTF-8 by default (especially the locale data and console). - */ - name = "UTF-8"; - } - else if (uprv_strcmp(name, "CP949") == 0) { - /* Remap CP949 to a similar codepage to avoid issues with backslash and won symbol. */ - name = "EUC-KR"; - } - else if (locale != NULL && uprv_strcmp(locale, "en_US_POSIX") != 0 && uprv_strcmp(name, "US-ASCII") == 0) { - /* - * For non C/POSIX locale, default the code page to UTF-8 instead of US-ASCII. - */ - name = "UTF-8"; - } -#elif U_PLATFORM == U_PF_BSD - if (uprv_strcmp(name, "CP949") == 0) { - /* Remap CP949 to a similar codepage to avoid issues with backslash and won symbol. */ - name = "EUC-KR"; - } -#elif U_PLATFORM == U_PF_HPUX - if (locale != NULL && uprv_strcmp(locale, "zh_HK") == 0 && uprv_strcmp(name, "big5") == 0) { - /* HP decided to extend big5 as hkbig5 even though it's not compatible :-( */ - /* zh_TW.big5 is not the same charset as zh_HK.big5! */ - name = "hkbig5"; - } - else if (uprv_strcmp(name, "eucJP") == 0) { - /* - ibm-1350 is the best match, but unavailable. - ibm-954 is mostly a superset of ibm-1350. - ibm-33722 is the default for eucJP (similar to Windows). - */ - name = "eucjis"; - } -#elif U_PLATFORM == U_PF_LINUX - if (locale != NULL && uprv_strcmp(name, "euc") == 0) { - /* Linux underspecifies the "EUC" name. */ - if (uprv_strcmp(locale, "korean") == 0) { - name = "EUC-KR"; - } - else if (uprv_strcmp(locale, "japanese") == 0) { - /* See comment below about eucJP */ - name = "eucjis"; - } - } - else if (uprv_strcmp(name, "eucjp") == 0) { - /* - ibm-1350 is the best match, but unavailable. - ibm-954 is mostly a superset of ibm-1350. - ibm-33722 is the default for eucJP (similar to Windows). - */ - name = "eucjis"; - } - else if (locale != NULL && uprv_strcmp(locale, "en_US_POSIX") != 0 && - (uprv_strcmp(name, "ANSI_X3.4-1968") == 0 || uprv_strcmp(name, "US-ASCII") == 0)) { - /* - * For non C/POSIX locale, default the code page to UTF-8 instead of US-ASCII. - */ - name = "UTF-8"; - } - /* - * Linux returns ANSI_X3.4-1968 for C/POSIX, but the call site takes care of - * it by falling back to 'US-ASCII' when NULL is returned from this - * function. So, we don't have to worry about it here. - */ -#endif - /* return NULL when "" is passed in */ - if (*name == 0) { - name = NULL; - } - return name; -} - -static const char* -getCodepageFromPOSIXID(const char *localeName, char * buffer, int32_t buffCapacity) -{ - char localeBuf[100]; - const char *name = NULL; - char *variant = NULL; - - if (localeName != NULL && (name = (uprv_strchr(localeName, '.'))) != NULL) { - size_t localeCapacity = uprv_min(sizeof(localeBuf), (name-localeName)+1); - uprv_strncpy(localeBuf, localeName, localeCapacity); - localeBuf[localeCapacity-1] = 0; /* ensure NULL termination */ - name = uprv_strncpy(buffer, name+1, buffCapacity); - buffer[buffCapacity-1] = 0; /* ensure NULL termination */ - if ((variant = const_cast<char *>(uprv_strchr(name, '@'))) != NULL) { - *variant = 0; - } - name = remapPlatformDependentCodepage(localeBuf, name); - } - return name; -} -#endif - -static const char* -int_getDefaultCodepage() -{ -#if U_PLATFORM == U_PF_OS400 - uint32_t ccsid = 37; /* Default to ibm-37 */ - static char codepage[64]; - Qwc_JOBI0400_t jobinfo; - Qus_EC_t error = { sizeof(Qus_EC_t) }; /* SPI error code */ - - EPT_CALL(QUSRJOBI)(&jobinfo, sizeof(jobinfo), "JOBI0400", - "* ", " ", &error); - - if (error.Bytes_Available == 0) { - if (jobinfo.Coded_Char_Set_ID != 0xFFFF) { - ccsid = (uint32_t)jobinfo.Coded_Char_Set_ID; - } - else if (jobinfo.Default_Coded_Char_Set_Id != 0xFFFF) { - ccsid = (uint32_t)jobinfo.Default_Coded_Char_Set_Id; - } - /* else use the default */ - } - sprintf(codepage,"ibm-%d", ccsid); - return codepage; - -#elif U_PLATFORM == U_PF_OS390 - static char codepage[64]; - - strncpy(codepage, nl_langinfo(CODESET),63-strlen(UCNV_SWAP_LFNL_OPTION_STRING)); - strcat(codepage,UCNV_SWAP_LFNL_OPTION_STRING); - codepage[63] = 0; /* NULL terminate */ - - return codepage; - -#elif U_PLATFORM_USES_ONLY_WIN32_API - static char codepage[64]; - DWORD codepageNumber = 0; - -#if U_PLATFORM_HAS_WINUWP_API > 0 - // UWP doesn't have a direct API to get the default ACP as Microsoft would rather - // have folks use Unicode than a "system" code page, however this is the same - // codepage as the system default locale codepage. (FWIW, the system locale is - // ONLY used for codepage, it should never be used for anything else) - GetLocaleInfoEx(LOCALE_NAME_SYSTEM_DEFAULT, LOCALE_IDEFAULTANSICODEPAGE | LOCALE_RETURN_NUMBER, - (LPWSTR)&codepageNumber, sizeof(codepageNumber) / sizeof(WCHAR)); -#else - // Win32 apps can call GetACP - codepageNumber = GetACP(); -#endif - // Special case for UTF-8 - if (codepageNumber == 65001) - { - return "UTF-8"; - } - // Windows codepages can look like windows-1252, so format the found number - // the numbers are eclectic, however all valid system code pages, besides UTF-8 - // are between 3 and 19999 - if (codepageNumber > 0 && codepageNumber < 20000) - { - sprintf(codepage, "windows-%ld", codepageNumber); - return codepage; - } - // If the codepage number call failed then return UTF-8 - return "UTF-8"; - -#elif U_POSIX_LOCALE - static char codesetName[100]; - const char *localeName = NULL; - const char *name = NULL; - - localeName = uprv_getPOSIXIDForDefaultCodepage(); - uprv_memset(codesetName, 0, sizeof(codesetName)); - /* On Solaris nl_langinfo returns C locale values unless setlocale - * was called earlier. - */ -#if (U_HAVE_NL_LANGINFO_CODESET && U_PLATFORM != U_PF_SOLARIS) - /* When available, check nl_langinfo first because it usually gives more - useful names. It depends on LC_CTYPE. - nl_langinfo may use the same buffer as setlocale. */ - { - const char *codeset = nl_langinfo(U_NL_LANGINFO_CODESET); -#if U_PLATFORM_IS_DARWIN_BASED || U_PLATFORM_IS_LINUX_BASED - /* - * On Linux and MacOSX, ensure that default codepage for non C/POSIX locale is UTF-8 - * instead of ASCII. - */ - if (uprv_strcmp(localeName, "en_US_POSIX") != 0) { - codeset = remapPlatformDependentCodepage(localeName, codeset); - } else -#endif - { - codeset = remapPlatformDependentCodepage(NULL, codeset); - } - - if (codeset != NULL) { - uprv_strncpy(codesetName, codeset, sizeof(codesetName)); - codesetName[sizeof(codesetName)-1] = 0; - return codesetName; - } - } -#endif - - /* Use setlocale in a nice way, and then check some environment variables. - Maybe the application used setlocale already. - */ - uprv_memset(codesetName, 0, sizeof(codesetName)); - name = getCodepageFromPOSIXID(localeName, codesetName, sizeof(codesetName)); - if (name) { - /* if we can find the codeset name from setlocale, return that. */ - return name; - } - - if (*codesetName == 0) - { - /* Everything failed. Return US ASCII (ISO 646). */ - (void)uprv_strcpy(codesetName, "US-ASCII"); - } - return codesetName; -#else - return "US-ASCII"; -#endif -} - - -U_CAPI const char* U_EXPORT2 -uprv_getDefaultCodepage() -{ - static char const *name = NULL; - umtx_lock(NULL); - if (name == NULL) { - name = int_getDefaultCodepage(); - } - umtx_unlock(NULL); - return name; -} -#endif /* !U_CHARSET_IS_UTF8 */ - - -/* end of platform-specific implementation -------------- */ - -/* version handling --------------------------------------------------------- */ - -U_CAPI void U_EXPORT2 -u_versionFromString(UVersionInfo versionArray, const char *versionString) { - char *end; - uint16_t part=0; - - if(versionArray==NULL) { - return; - } - - if(versionString!=NULL) { - for(;;) { - versionArray[part]=(uint8_t)uprv_strtoul(versionString, &end, 10); - if(end==versionString || ++part==U_MAX_VERSION_LENGTH || *end!=U_VERSION_DELIMITER) { - break; - } - versionString=end+1; - } - } - - while(part<U_MAX_VERSION_LENGTH) { - versionArray[part++]=0; - } -} - -U_CAPI void U_EXPORT2 -u_versionFromUString(UVersionInfo versionArray, const UChar *versionString) { - if(versionArray!=NULL && versionString!=NULL) { - char versionChars[U_MAX_VERSION_STRING_LENGTH+1]; - int32_t len = u_strlen(versionString); - if(len>U_MAX_VERSION_STRING_LENGTH) { - len = U_MAX_VERSION_STRING_LENGTH; - } - u_UCharsToChars(versionString, versionChars, len); - versionChars[len]=0; - u_versionFromString(versionArray, versionChars); - } -} - -U_CAPI void U_EXPORT2 -u_versionToString(const UVersionInfo versionArray, char *versionString) { - uint16_t count, part; - uint8_t field; - - if(versionString==NULL) { - return; - } - - if(versionArray==NULL) { - versionString[0]=0; - return; - } - - /* count how many fields need to be written */ - for(count=4; count>0 && versionArray[count-1]==0; --count) { - } - - if(count <= 1) { - count = 2; - } - - /* write the first part */ - /* write the decimal field value */ - field=versionArray[0]; - if(field>=100) { - *versionString++=(char)('0'+field/100); - field%=100; - } - if(field>=10) { - *versionString++=(char)('0'+field/10); - field%=10; - } - *versionString++=(char)('0'+field); - - /* write the following parts */ - for(part=1; part<count; ++part) { - /* write a dot first */ - *versionString++=U_VERSION_DELIMITER; - - /* write the decimal field value */ - field=versionArray[part]; - if(field>=100) { - *versionString++=(char)('0'+field/100); - field%=100; - } - if(field>=10) { - *versionString++=(char)('0'+field/10); - field%=10; - } - *versionString++=(char)('0'+field); - } - - /* NUL-terminate */ - *versionString=0; -} - -U_CAPI void U_EXPORT2 -u_getVersion(UVersionInfo versionArray) { - (void)copyright; // Suppress unused variable warning from clang. - u_versionFromString(versionArray, U_ICU_VERSION); -} - -/** - * icucfg.h dependent code - */ - -#if U_ENABLE_DYLOAD && HAVE_DLOPEN && !U_PLATFORM_USES_ONLY_WIN32_API - -#if HAVE_DLFCN_H -#ifdef __MVS__ -#ifndef __SUSV3 -#define __SUSV3 1 -#endif -#endif -#include <dlfcn.h> -#endif /* HAVE_DLFCN_H */ - -U_INTERNAL void * U_EXPORT2 -uprv_dl_open(const char *libName, UErrorCode *status) { - void *ret = NULL; - if(U_FAILURE(*status)) return ret; - ret = dlopen(libName, RTLD_NOW|RTLD_GLOBAL); - if(ret==NULL) { -#ifdef U_TRACE_DYLOAD - printf("dlerror on dlopen(%s): %s\n", libName, dlerror()); -#endif - *status = U_MISSING_RESOURCE_ERROR; - } - return ret; -} - -U_INTERNAL void U_EXPORT2 -uprv_dl_close(void *lib, UErrorCode *status) { - if(U_FAILURE(*status)) return; - dlclose(lib); -} - -U_INTERNAL UVoidFunction* U_EXPORT2 -uprv_dlsym_func(void *lib, const char* sym, UErrorCode *status) { - union { - UVoidFunction *fp; - void *vp; - } uret; - uret.fp = NULL; - if(U_FAILURE(*status)) return uret.fp; - uret.vp = dlsym(lib, sym); - if(uret.vp == NULL) { -#ifdef U_TRACE_DYLOAD - printf("dlerror on dlsym(%p,%s): %s\n", lib,sym, dlerror()); -#endif - *status = U_MISSING_RESOURCE_ERROR; - } - return uret.fp; -} - -#elif U_ENABLE_DYLOAD && U_PLATFORM_USES_ONLY_WIN32_API && !U_PLATFORM_HAS_WINUWP_API - -/* Windows API implementation. */ -// Note: UWP does not expose/allow these APIs, so the UWP version gets the null implementation. */ - -U_INTERNAL void * U_EXPORT2 -uprv_dl_open(const char *libName, UErrorCode *status) { - HMODULE lib = NULL; - - if(U_FAILURE(*status)) return NULL; - - lib = LoadLibraryA(libName); - - if(lib==NULL) { - *status = U_MISSING_RESOURCE_ERROR; - } - - return (void*)lib; -} - -U_INTERNAL void U_EXPORT2 -uprv_dl_close(void *lib, UErrorCode *status) { - HMODULE handle = (HMODULE)lib; - if(U_FAILURE(*status)) return; - - FreeLibrary(handle); - - return; -} - -U_INTERNAL UVoidFunction* U_EXPORT2 -uprv_dlsym_func(void *lib, const char* sym, UErrorCode *status) { - HMODULE handle = (HMODULE)lib; - UVoidFunction* addr = NULL; - - if(U_FAILURE(*status) || lib==NULL) return NULL; - - addr = (UVoidFunction*)GetProcAddress(handle, sym); - - if(addr==NULL) { - DWORD lastError = GetLastError(); - if(lastError == ERROR_PROC_NOT_FOUND) { - *status = U_MISSING_RESOURCE_ERROR; - } else { - *status = U_UNSUPPORTED_ERROR; /* other unknown error. */ - } - } - - return addr; -} - -#else - -/* No dynamic loading, null (nonexistent) implementation. */ - -U_INTERNAL void * U_EXPORT2 -uprv_dl_open(const char *libName, UErrorCode *status) { - (void)libName; - if(U_FAILURE(*status)) return NULL; - *status = U_UNSUPPORTED_ERROR; - return NULL; -} - -U_INTERNAL void U_EXPORT2 -uprv_dl_close(void *lib, UErrorCode *status) { - (void)lib; - if(U_FAILURE(*status)) return; - *status = U_UNSUPPORTED_ERROR; - return; -} - -U_INTERNAL UVoidFunction* U_EXPORT2 -uprv_dlsym_func(void *lib, const char* sym, UErrorCode *status) { - (void)lib; - (void)sym; - if(U_SUCCESS(*status)) { - *status = U_UNSUPPORTED_ERROR; - } - return (UVoidFunction*)NULL; -} - -#endif - -/* - * Hey, Emacs, please set the following: - * - * Local Variables: - * indent-tabs-mode: nil - * End: - * - */ diff --git a/tools/icu/patches/64/source/i18n/dtptngen.cpp b/tools/icu/patches/64/source/i18n/dtptngen.cpp deleted file mode 100644 index eb8bcfb971f427..00000000000000 --- a/tools/icu/patches/64/source/i18n/dtptngen.cpp +++ /dev/null @@ -1,2778 +0,0 @@ -// © 2016 and later: Unicode, Inc. and others. -// License & terms of use: http://www.unicode.org/copyright.html -/* -******************************************************************************* -* Copyright (C) 2007-2016, International Business Machines Corporation and -* others. All Rights Reserved. -******************************************************************************* -* -* File DTPTNGEN.CPP -* -******************************************************************************* -*/ - -#include "unicode/utypes.h" -#if !UCONFIG_NO_FORMATTING - -#include "unicode/datefmt.h" -#include "unicode/decimfmt.h" -#include "unicode/dtfmtsym.h" -#include "unicode/dtptngen.h" -#include "unicode/localpointer.h" -#include "unicode/simpleformatter.h" -#include "unicode/smpdtfmt.h" -#include "unicode/udat.h" -#include "unicode/udatpg.h" -#include "unicode/uniset.h" -#include "unicode/uloc.h" -#include "unicode/ures.h" -#include "unicode/ustring.h" -#include "unicode/rep.h" -#include "cpputils.h" -#include "mutex.h" -#include "umutex.h" -#include "cmemory.h" -#include "cstring.h" -#include "locbased.h" -#include "hash.h" -#include "uhash.h" -#include "uresimp.h" -#include "dtptngen_impl.h" -#include "ucln_in.h" -#include "charstr.h" -#include "uassert.h" - -#if U_CHARSET_FAMILY==U_EBCDIC_FAMILY -/** - * If we are on EBCDIC, use an iterator which will - * traverse the bundles in ASCII order. - */ -#define U_USE_ASCII_BUNDLE_ITERATOR -#define U_SORT_ASCII_BUNDLE_ITERATOR -#endif - -#if defined(U_USE_ASCII_BUNDLE_ITERATOR) - -#include "unicode/ustring.h" -#include "uarrsort.h" - -struct UResAEntry { - UChar *key; - UResourceBundle *item; -}; - -struct UResourceBundleAIterator { - UResourceBundle *bund; - UResAEntry *entries; - int32_t num; - int32_t cursor; -}; - -/* Must be C linkage to pass function pointer to the sort function */ - -U_CDECL_BEGIN - -static int32_t U_CALLCONV -ures_a_codepointSort(const void *context, const void *left, const void *right) { - //CompareContext *cmp=(CompareContext *)context; - return u_strcmp(((const UResAEntry *)left)->key, - ((const UResAEntry *)right)->key); -} - -U_CDECL_END - -static void ures_a_open(UResourceBundleAIterator *aiter, UResourceBundle *bund, UErrorCode *status) { - if(U_FAILURE(*status)) { - return; - } - aiter->bund = bund; - aiter->num = ures_getSize(aiter->bund); - aiter->cursor = 0; -#if !defined(U_SORT_ASCII_BUNDLE_ITERATOR) - aiter->entries = nullptr; -#else - aiter->entries = (UResAEntry*)uprv_malloc(sizeof(UResAEntry)*aiter->num); - for(int i=0;i<aiter->num;i++) { - aiter->entries[i].item = ures_getByIndex(aiter->bund, i, nullptr, status); - const char *akey = ures_getKey(aiter->entries[i].item); - int32_t len = uprv_strlen(akey)+1; - aiter->entries[i].key = (UChar*)uprv_malloc(len*sizeof(UChar)); - u_charsToUChars(akey, aiter->entries[i].key, len); - } - uprv_sortArray(aiter->entries, aiter->num, sizeof(UResAEntry), ures_a_codepointSort, nullptr, TRUE, status); -#endif -} - -static void ures_a_close(UResourceBundleAIterator *aiter) { -#if defined(U_SORT_ASCII_BUNDLE_ITERATOR) - for(int i=0;i<aiter->num;i++) { - uprv_free(aiter->entries[i].key); - ures_close(aiter->entries[i].item); - } -#endif -} - -static const UChar *ures_a_getNextString(UResourceBundleAIterator *aiter, int32_t *len, const char **key, UErrorCode *err) { -#if !defined(U_SORT_ASCII_BUNDLE_ITERATOR) - return ures_getNextString(aiter->bund, len, key, err); -#else - if(U_FAILURE(*err)) return nullptr; - UResourceBundle *item = aiter->entries[aiter->cursor].item; - const UChar* ret = ures_getString(item, len, err); - *key = ures_getKey(item); - aiter->cursor++; - return ret; -#endif -} - - -#endif - - -U_NAMESPACE_BEGIN - -// ***************************************************************************** -// class DateTimePatternGenerator -// ***************************************************************************** -static const UChar Canonical_Items[] = { - // GyQMwWEDFdaHmsSv - CAP_G, LOW_Y, CAP_Q, CAP_M, LOW_W, CAP_W, CAP_E, - CAP_D, CAP_F, LOW_D, LOW_A, // The UDATPG_x_FIELD constants and these fields have a different order than in ICU4J - CAP_H, LOW_M, LOW_S, CAP_S, LOW_V, 0 -}; - -static const dtTypeElem dtTypes[] = { - // patternChar, field, type, minLen, weight - {CAP_G, UDATPG_ERA_FIELD, DT_SHORT, 1, 3,}, - {CAP_G, UDATPG_ERA_FIELD, DT_LONG, 4, 0}, - {CAP_G, UDATPG_ERA_FIELD, DT_NARROW, 5, 0}, - - {LOW_Y, UDATPG_YEAR_FIELD, DT_NUMERIC, 1, 20}, - {CAP_Y, UDATPG_YEAR_FIELD, DT_NUMERIC + DT_DELTA, 1, 20}, - {LOW_U, UDATPG_YEAR_FIELD, DT_NUMERIC + 2*DT_DELTA, 1, 20}, - {LOW_R, UDATPG_YEAR_FIELD, DT_NUMERIC + 3*DT_DELTA, 1, 20}, - {CAP_U, UDATPG_YEAR_FIELD, DT_SHORT, 1, 3}, - {CAP_U, UDATPG_YEAR_FIELD, DT_LONG, 4, 0}, - {CAP_U, UDATPG_YEAR_FIELD, DT_NARROW, 5, 0}, - - {CAP_Q, UDATPG_QUARTER_FIELD, DT_NUMERIC, 1, 2}, - {CAP_Q, UDATPG_QUARTER_FIELD, DT_SHORT, 3, 0}, - {CAP_Q, UDATPG_QUARTER_FIELD, DT_LONG, 4, 0}, - {CAP_Q, UDATPG_QUARTER_FIELD, DT_NARROW, 5, 0}, - {LOW_Q, UDATPG_QUARTER_FIELD, DT_NUMERIC + DT_DELTA, 1, 2}, - {LOW_Q, UDATPG_QUARTER_FIELD, DT_SHORT - DT_DELTA, 3, 0}, - {LOW_Q, UDATPG_QUARTER_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {LOW_Q, UDATPG_QUARTER_FIELD, DT_NARROW - DT_DELTA, 5, 0}, - - {CAP_M, UDATPG_MONTH_FIELD, DT_NUMERIC, 1, 2}, - {CAP_M, UDATPG_MONTH_FIELD, DT_SHORT, 3, 0}, - {CAP_M, UDATPG_MONTH_FIELD, DT_LONG, 4, 0}, - {CAP_M, UDATPG_MONTH_FIELD, DT_NARROW, 5, 0}, - {CAP_L, UDATPG_MONTH_FIELD, DT_NUMERIC + DT_DELTA, 1, 2}, - {CAP_L, UDATPG_MONTH_FIELD, DT_SHORT - DT_DELTA, 3, 0}, - {CAP_L, UDATPG_MONTH_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {CAP_L, UDATPG_MONTH_FIELD, DT_NARROW - DT_DELTA, 5, 0}, - {LOW_L, UDATPG_MONTH_FIELD, DT_NUMERIC + DT_DELTA, 1, 1}, - - {LOW_W, UDATPG_WEEK_OF_YEAR_FIELD, DT_NUMERIC, 1, 2}, - - {CAP_W, UDATPG_WEEK_OF_MONTH_FIELD, DT_NUMERIC, 1, 0}, - - {CAP_E, UDATPG_WEEKDAY_FIELD, DT_SHORT, 1, 3}, - {CAP_E, UDATPG_WEEKDAY_FIELD, DT_LONG, 4, 0}, - {CAP_E, UDATPG_WEEKDAY_FIELD, DT_NARROW, 5, 0}, - {CAP_E, UDATPG_WEEKDAY_FIELD, DT_SHORTER, 6, 0}, - {LOW_C, UDATPG_WEEKDAY_FIELD, DT_NUMERIC + 2*DT_DELTA, 1, 2}, - {LOW_C, UDATPG_WEEKDAY_FIELD, DT_SHORT - 2*DT_DELTA, 3, 0}, - {LOW_C, UDATPG_WEEKDAY_FIELD, DT_LONG - 2*DT_DELTA, 4, 0}, - {LOW_C, UDATPG_WEEKDAY_FIELD, DT_NARROW - 2*DT_DELTA, 5, 0}, - {LOW_C, UDATPG_WEEKDAY_FIELD, DT_SHORTER - 2*DT_DELTA, 6, 0}, - {LOW_E, UDATPG_WEEKDAY_FIELD, DT_NUMERIC + DT_DELTA, 1, 2}, // LOW_E is currently not used in CLDR data, should not be canonical - {LOW_E, UDATPG_WEEKDAY_FIELD, DT_SHORT - DT_DELTA, 3, 0}, - {LOW_E, UDATPG_WEEKDAY_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {LOW_E, UDATPG_WEEKDAY_FIELD, DT_NARROW - DT_DELTA, 5, 0}, - {LOW_E, UDATPG_WEEKDAY_FIELD, DT_SHORTER - DT_DELTA, 6, 0}, - - {LOW_D, UDATPG_DAY_FIELD, DT_NUMERIC, 1, 2}, - {LOW_G, UDATPG_DAY_FIELD, DT_NUMERIC + DT_DELTA, 1, 20}, // really internal use, so we don't care - - {CAP_D, UDATPG_DAY_OF_YEAR_FIELD, DT_NUMERIC, 1, 3}, - - {CAP_F, UDATPG_DAY_OF_WEEK_IN_MONTH_FIELD, DT_NUMERIC, 1, 0}, - - {LOW_A, UDATPG_DAYPERIOD_FIELD, DT_SHORT, 1, 3}, - {LOW_A, UDATPG_DAYPERIOD_FIELD, DT_LONG, 4, 0}, - {LOW_A, UDATPG_DAYPERIOD_FIELD, DT_NARROW, 5, 0}, - {LOW_B, UDATPG_DAYPERIOD_FIELD, DT_SHORT - DT_DELTA, 1, 3}, - {LOW_B, UDATPG_DAYPERIOD_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {LOW_B, UDATPG_DAYPERIOD_FIELD, DT_NARROW - DT_DELTA, 5, 0}, - // b needs to be closer to a than to B, so we make this 3*DT_DELTA - {CAP_B, UDATPG_DAYPERIOD_FIELD, DT_SHORT - 3*DT_DELTA, 1, 3}, - {CAP_B, UDATPG_DAYPERIOD_FIELD, DT_LONG - 3*DT_DELTA, 4, 0}, - {CAP_B, UDATPG_DAYPERIOD_FIELD, DT_NARROW - 3*DT_DELTA, 5, 0}, - - {CAP_H, UDATPG_HOUR_FIELD, DT_NUMERIC + 10*DT_DELTA, 1, 2}, // 24 hour - {LOW_K, UDATPG_HOUR_FIELD, DT_NUMERIC + 11*DT_DELTA, 1, 2}, // 24 hour - {LOW_H, UDATPG_HOUR_FIELD, DT_NUMERIC, 1, 2}, // 12 hour - {CAP_K, UDATPG_HOUR_FIELD, DT_NUMERIC + DT_DELTA, 1, 2}, // 12 hour - // The C code has had versions of the following 3, keep & update. Should not need these, but... - // Without these, certain tests using e.g. staticGetSkeleton fail because j/J in patterns - // get skipped instead of mapped to the right hour chars, for example in - // DateFormatTest::TestPatternFromSkeleton - // IntlTestDateTimePatternGeneratorAPI:: testStaticGetSkeleton - // DateIntervalFormatTest::testTicket11985 - // Need to investigate better handling of jJC replacement e.g. in staticGetSkeleton. - {CAP_J, UDATPG_HOUR_FIELD, DT_NUMERIC + 5*DT_DELTA, 1, 2}, // 12/24 hour no AM/PM - {LOW_J, UDATPG_HOUR_FIELD, DT_NUMERIC + 6*DT_DELTA, 1, 6}, // 12/24 hour - {CAP_C, UDATPG_HOUR_FIELD, DT_NUMERIC + 7*DT_DELTA, 1, 6}, // 12/24 hour with preferred dayPeriods for 12 - - {LOW_M, UDATPG_MINUTE_FIELD, DT_NUMERIC, 1, 2}, - - {LOW_S, UDATPG_SECOND_FIELD, DT_NUMERIC, 1, 2}, - {CAP_A, UDATPG_SECOND_FIELD, DT_NUMERIC + DT_DELTA, 1, 1000}, - - {CAP_S, UDATPG_FRACTIONAL_SECOND_FIELD, DT_NUMERIC, 1, 1000}, - - {LOW_V, UDATPG_ZONE_FIELD, DT_SHORT - 2*DT_DELTA, 1, 0}, - {LOW_V, UDATPG_ZONE_FIELD, DT_LONG - 2*DT_DELTA, 4, 0}, - {LOW_Z, UDATPG_ZONE_FIELD, DT_SHORT, 1, 3}, - {LOW_Z, UDATPG_ZONE_FIELD, DT_LONG, 4, 0}, - {CAP_Z, UDATPG_ZONE_FIELD, DT_NARROW - DT_DELTA, 1, 3}, - {CAP_Z, UDATPG_ZONE_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {CAP_Z, UDATPG_ZONE_FIELD, DT_SHORT - DT_DELTA, 5, 0}, - {CAP_O, UDATPG_ZONE_FIELD, DT_SHORT - DT_DELTA, 1, 0}, - {CAP_O, UDATPG_ZONE_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {CAP_V, UDATPG_ZONE_FIELD, DT_SHORT - DT_DELTA, 1, 0}, - {CAP_V, UDATPG_ZONE_FIELD, DT_LONG - DT_DELTA, 2, 0}, - {CAP_V, UDATPG_ZONE_FIELD, DT_LONG-1 - DT_DELTA, 3, 0}, - {CAP_V, UDATPG_ZONE_FIELD, DT_LONG-2 - DT_DELTA, 4, 0}, - {CAP_X, UDATPG_ZONE_FIELD, DT_NARROW - DT_DELTA, 1, 0}, - {CAP_X, UDATPG_ZONE_FIELD, DT_SHORT - DT_DELTA, 2, 0}, - {CAP_X, UDATPG_ZONE_FIELD, DT_LONG - DT_DELTA, 4, 0}, - {LOW_X, UDATPG_ZONE_FIELD, DT_NARROW - DT_DELTA, 1, 0}, - {LOW_X, UDATPG_ZONE_FIELD, DT_SHORT - DT_DELTA, 2, 0}, - {LOW_X, UDATPG_ZONE_FIELD, DT_LONG - DT_DELTA, 4, 0}, - - {0, UDATPG_FIELD_COUNT, 0, 0, 0} , // last row of dtTypes[] - }; - -static const char* const CLDR_FIELD_APPEND[] = { - "Era", "Year", "Quarter", "Month", "Week", "*", "Day-Of-Week", - "*", "*", "Day", "*", // The UDATPG_x_FIELD constants and these fields have a different order than in ICU4J - "Hour", "Minute", "Second", "*", "Timezone" -}; - -static const char* const CLDR_FIELD_NAME[UDATPG_FIELD_COUNT] = { - "era", "year", "quarter", "month", "week", "weekOfMonth", "weekday", - "dayOfYear", "weekdayOfMonth", "day", "dayperiod", // The UDATPG_x_FIELD constants and these fields have a different order than in ICU4J - "hour", "minute", "second", "*", "zone" -}; - -static const char* const CLDR_FIELD_WIDTH[] = { // [UDATPG_WIDTH_COUNT] - "", "-short", "-narrow" -}; - -// TODO(ticket:13619): remove when definition uncommented in dtptngen.h. -static const int32_t UDATPG_WIDTH_COUNT = UDATPG_NARROW + 1; -static constexpr UDateTimePGDisplayWidth UDATPG_WIDTH_APPENDITEM = UDATPG_WIDE; -static constexpr int32_t UDATPG_FIELD_KEY_MAX = 24; // max length of CLDR field tag (type + width) - -// For appendItems -static const UChar UDATPG_ItemFormat[]= {0x7B, 0x30, 0x7D, 0x20, 0x251C, 0x7B, 0x32, 0x7D, 0x3A, - 0x20, 0x7B, 0x31, 0x7D, 0x2524, 0}; // {0} \u251C{2}: {1}\u2524 - -//static const UChar repeatedPatterns[6]={CAP_G, CAP_E, LOW_Z, LOW_V, CAP_Q, 0}; // "GEzvQ" - -static const char DT_DateTimePatternsTag[]="DateTimePatterns"; -static const char DT_DateTimeCalendarTag[]="calendar"; -static const char DT_DateTimeGregorianTag[]="gregorian"; -static const char DT_DateTimeAppendItemsTag[]="appendItems"; -static const char DT_DateTimeFieldsTag[]="fields"; -static const char DT_DateTimeAvailableFormatsTag[]="availableFormats"; -//static const UnicodeString repeatedPattern=UnicodeString(repeatedPatterns); - -UOBJECT_DEFINE_RTTI_IMPLEMENTATION(DateTimePatternGenerator) -UOBJECT_DEFINE_RTTI_IMPLEMENTATION(DTSkeletonEnumeration) -UOBJECT_DEFINE_RTTI_IMPLEMENTATION(DTRedundantEnumeration) - -DateTimePatternGenerator* U_EXPORT2 -DateTimePatternGenerator::createInstance(UErrorCode& status) { - return createInstance(Locale::getDefault(), status); -} - -DateTimePatternGenerator* U_EXPORT2 -DateTimePatternGenerator::createInstance(const Locale& locale, UErrorCode& status) { - if (U_FAILURE(status)) { - return nullptr; - } - LocalPointer<DateTimePatternGenerator> result( - new DateTimePatternGenerator(locale, status), status); - return U_SUCCESS(status) ? result.orphan() : nullptr; -} - -DateTimePatternGenerator* U_EXPORT2 -DateTimePatternGenerator::createEmptyInstance(UErrorCode& status) { - if (U_FAILURE(status)) { - return nullptr; - } - LocalPointer<DateTimePatternGenerator> result( - new DateTimePatternGenerator(status), status); - return U_SUCCESS(status) ? result.orphan() : nullptr; -} - -DateTimePatternGenerator::DateTimePatternGenerator(UErrorCode &status) : - skipMatcher(nullptr), - fAvailableFormatKeyHash(nullptr), - internalErrorCode(U_ZERO_ERROR) -{ - fp = new FormatParser(); - dtMatcher = new DateTimeMatcher(); - distanceInfo = new DistanceInfo(); - patternMap = new PatternMap(); - if (fp == nullptr || dtMatcher == nullptr || distanceInfo == nullptr || patternMap == nullptr) { - internalErrorCode = status = U_MEMORY_ALLOCATION_ERROR; - } -} - -DateTimePatternGenerator::DateTimePatternGenerator(const Locale& locale, UErrorCode &status) : - skipMatcher(nullptr), - fAvailableFormatKeyHash(nullptr), - internalErrorCode(U_ZERO_ERROR) -{ - fp = new FormatParser(); - dtMatcher = new DateTimeMatcher(); - distanceInfo = new DistanceInfo(); - patternMap = new PatternMap(); - if (fp == nullptr || dtMatcher == nullptr || distanceInfo == nullptr || patternMap == nullptr) { - internalErrorCode = status = U_MEMORY_ALLOCATION_ERROR; - } - else { - initData(locale, status); - } -} - -DateTimePatternGenerator::DateTimePatternGenerator(const DateTimePatternGenerator& other) : - UObject(), - skipMatcher(nullptr), - fAvailableFormatKeyHash(nullptr), - internalErrorCode(U_ZERO_ERROR) -{ - fp = new FormatParser(); - dtMatcher = new DateTimeMatcher(); - distanceInfo = new DistanceInfo(); - patternMap = new PatternMap(); - if (fp == nullptr || dtMatcher == nullptr || distanceInfo == nullptr || patternMap == nullptr) { - internalErrorCode = U_MEMORY_ALLOCATION_ERROR; - } - *this=other; -} - -DateTimePatternGenerator& -DateTimePatternGenerator::operator=(const DateTimePatternGenerator& other) { - // reflexive case - if (&other == this) { - return *this; - } - internalErrorCode = other.internalErrorCode; - pLocale = other.pLocale; - fDefaultHourFormatChar = other.fDefaultHourFormatChar; - *fp = *(other.fp); - dtMatcher->copyFrom(other.dtMatcher->skeleton); - *distanceInfo = *(other.distanceInfo); - dateTimeFormat = other.dateTimeFormat; - decimal = other.decimal; - // NUL-terminate for the C API. - dateTimeFormat.getTerminatedBuffer(); - decimal.getTerminatedBuffer(); - delete skipMatcher; - if ( other.skipMatcher == nullptr ) { - skipMatcher = nullptr; - } - else { - skipMatcher = new DateTimeMatcher(*other.skipMatcher); - if (skipMatcher == nullptr) - { - internalErrorCode = U_MEMORY_ALLOCATION_ERROR; - return *this; - } - } - for (int32_t i=0; i< UDATPG_FIELD_COUNT; ++i ) { - appendItemFormats[i] = other.appendItemFormats[i]; - appendItemFormats[i].getTerminatedBuffer(); // NUL-terminate for the C API. - for (int32_t j=0; j< UDATPG_WIDTH_COUNT; ++j ) { - fieldDisplayNames[i][j] = other.fieldDisplayNames[i][j]; - fieldDisplayNames[i][j].getTerminatedBuffer(); // NUL-terminate for the C API. - } - } - patternMap->copyFrom(*other.patternMap, internalErrorCode); - copyHashtable(other.fAvailableFormatKeyHash, internalErrorCode); - return *this; -} - - -UBool -DateTimePatternGenerator::operator==(const DateTimePatternGenerator& other) const { - if (this == &other) { - return TRUE; - } - if ((pLocale==other.pLocale) && (patternMap->equals(*other.patternMap)) && - (dateTimeFormat==other.dateTimeFormat) && (decimal==other.decimal)) { - for ( int32_t i=0 ; i<UDATPG_FIELD_COUNT; ++i ) { - if (appendItemFormats[i] != other.appendItemFormats[i]) { - return FALSE; - } - for (int32_t j=0; j< UDATPG_WIDTH_COUNT; ++j ) { - if (fieldDisplayNames[i][j] != other.fieldDisplayNames[i][j]) { - return FALSE; - } - } - } - return TRUE; - } - else { - return FALSE; - } -} - -UBool -DateTimePatternGenerator::operator!=(const DateTimePatternGenerator& other) const { - return !operator==(other); -} - -DateTimePatternGenerator::~DateTimePatternGenerator() { - if (fAvailableFormatKeyHash!=nullptr) { - delete fAvailableFormatKeyHash; - } - - if (fp != nullptr) delete fp; - if (dtMatcher != nullptr) delete dtMatcher; - if (distanceInfo != nullptr) delete distanceInfo; - if (patternMap != nullptr) delete patternMap; - if (skipMatcher != nullptr) delete skipMatcher; -} - -namespace { - -UInitOnce initOnce = U_INITONCE_INITIALIZER; -UHashtable *localeToAllowedHourFormatsMap = nullptr; - -// Value deleter for hashmap. -U_CFUNC void U_CALLCONV deleteAllowedHourFormats(void *ptr) { - uprv_free(ptr); -} - -// Close hashmap at cleanup. -U_CFUNC UBool U_CALLCONV allowedHourFormatsCleanup() { - uhash_close(localeToAllowedHourFormatsMap); - return TRUE; -} - -enum AllowedHourFormat{ - ALLOWED_HOUR_FORMAT_UNKNOWN = -1, - ALLOWED_HOUR_FORMAT_h, - ALLOWED_HOUR_FORMAT_H, - ALLOWED_HOUR_FORMAT_K, // Added ICU-20383, used by JP - ALLOWED_HOUR_FORMAT_k, // Added ICU-20383, not currently used - ALLOWED_HOUR_FORMAT_hb, - ALLOWED_HOUR_FORMAT_hB, - ALLOWED_HOUR_FORMAT_Kb, // Added ICU-20383, not currently used - ALLOWED_HOUR_FORMAT_KB, // Added ICU-20383, not currently used - // ICU-20383 The following are unlikely and not currently used - ALLOWED_HOUR_FORMAT_Hb, - ALLOWED_HOUR_FORMAT_HB -}; - -} // namespace - -void -DateTimePatternGenerator::initData(const Locale& locale, UErrorCode &status) { - //const char *baseLangName = locale.getBaseName(); // unused - - skipMatcher = nullptr; - fAvailableFormatKeyHash=nullptr; - addCanonicalItems(status); - addICUPatterns(locale, status); - addCLDRData(locale, status); - setDateTimeFromCalendar(locale, status); - setDecimalSymbols(locale, status); - umtx_initOnce(initOnce, loadAllowedHourFormatsData, status); - getAllowedHourFormats(locale, status); - // If any of the above methods failed then the object is in an invalid state. - internalErrorCode = status; -} // DateTimePatternGenerator::initData - -namespace { - -struct AllowedHourFormatsSink : public ResourceSink { - // Initialize sub-sinks. - AllowedHourFormatsSink() {} - virtual ~AllowedHourFormatsSink(); - - virtual void put(const char *key, ResourceValue &value, UBool /*noFallback*/, - UErrorCode &errorCode) { - ResourceTable timeData = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - for (int32_t i = 0; timeData.getKeyAndValue(i, key, value); ++i) { - const char *regionOrLocale = key; - ResourceTable formatList = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - // below we construct a list[] that has an entry for the "preferred" value at [0], - // followed by 1 or more entries for the "allowed" values, terminated with an - // entry for ALLOWED_HOUR_FORMAT_UNKNOWN (not included in length below) - LocalMemory<int32_t> list; - int32_t length = 0; - int32_t preferredFormat = ALLOWED_HOUR_FORMAT_UNKNOWN; - for (int32_t j = 0; formatList.getKeyAndValue(j, key, value); ++j) { - if (uprv_strcmp(key, "allowed") == 0) { - if (value.getType() == URES_STRING) { - length = 2; // 1 preferred to add later, 1 allowed to add now - if (list.allocateInsteadAndReset(length + 1) == nullptr) { - errorCode = U_MEMORY_ALLOCATION_ERROR; - return; - } - list[1] = getHourFormatFromUnicodeString(value.getUnicodeString(errorCode)); - } - else { - ResourceArray allowedFormats = value.getArray(errorCode); - length = allowedFormats.getSize() + 1; // 1 preferred, getSize allowed - if (list.allocateInsteadAndReset(length + 1) == nullptr) { - errorCode = U_MEMORY_ALLOCATION_ERROR; - return; - } - for (int32_t k = 1; k < length; ++k) { - allowedFormats.getValue(k-1, value); - list[k] = getHourFormatFromUnicodeString(value.getUnicodeString(errorCode)); - } - } - } else if (uprv_strcmp(key, "preferred") == 0) { - preferredFormat = getHourFormatFromUnicodeString(value.getUnicodeString(errorCode)); - } - } - if (length > 1) { - list[0] = (preferredFormat!=ALLOWED_HOUR_FORMAT_UNKNOWN)? preferredFormat: list[1]; - } else { - // fallback handling for missing data - length = 2; // 1 preferred, 1 allowed - if (list.allocateInsteadAndReset(length + 1) == nullptr) { - errorCode = U_MEMORY_ALLOCATION_ERROR; - return; - } - list[0] = (preferredFormat!=ALLOWED_HOUR_FORMAT_UNKNOWN)? preferredFormat: ALLOWED_HOUR_FORMAT_H; - list[1] = list[0]; - } - list[length] = ALLOWED_HOUR_FORMAT_UNKNOWN; - // At this point list[] will have at least two non-ALLOWED_HOUR_FORMAT_UNKNOWN entries, - // followed by ALLOWED_HOUR_FORMAT_UNKNOWN. - uhash_put(localeToAllowedHourFormatsMap, const_cast<char *>(regionOrLocale), list.orphan(), &errorCode); - if (U_FAILURE(errorCode)) { return; } - } - } - - AllowedHourFormat getHourFormatFromUnicodeString(const UnicodeString &s) { - if (s.length() == 1) { - if (s[0] == LOW_H) { return ALLOWED_HOUR_FORMAT_h; } - if (s[0] == CAP_H) { return ALLOWED_HOUR_FORMAT_H; } - if (s[0] == CAP_K) { return ALLOWED_HOUR_FORMAT_K; } - if (s[0] == LOW_K) { return ALLOWED_HOUR_FORMAT_k; } - } else if (s.length() == 2) { - if (s[0] == LOW_H && s[1] == LOW_B) { return ALLOWED_HOUR_FORMAT_hb; } - if (s[0] == LOW_H && s[1] == CAP_B) { return ALLOWED_HOUR_FORMAT_hB; } - if (s[0] == CAP_K && s[1] == LOW_B) { return ALLOWED_HOUR_FORMAT_Kb; } - if (s[0] == CAP_K && s[1] == CAP_B) { return ALLOWED_HOUR_FORMAT_KB; } - if (s[0] == CAP_H && s[1] == LOW_B) { return ALLOWED_HOUR_FORMAT_Hb; } - if (s[0] == CAP_H && s[1] == CAP_B) { return ALLOWED_HOUR_FORMAT_HB; } - } - - return ALLOWED_HOUR_FORMAT_UNKNOWN; - } -}; - -} // namespace - -AllowedHourFormatsSink::~AllowedHourFormatsSink() {} - -U_CFUNC void U_CALLCONV DateTimePatternGenerator::loadAllowedHourFormatsData(UErrorCode &status) { - if (U_FAILURE(status)) { return; } - localeToAllowedHourFormatsMap = uhash_open( - uhash_hashChars, uhash_compareChars, nullptr, &status); - if (U_FAILURE(status)) { return; } - - uhash_setValueDeleter(localeToAllowedHourFormatsMap, deleteAllowedHourFormats); - ucln_i18n_registerCleanup(UCLN_I18N_ALLOWED_HOUR_FORMATS, allowedHourFormatsCleanup); - - LocalUResourceBundlePointer rb(ures_openDirect(nullptr, "supplementalData", &status)); - if (U_FAILURE(status)) { return; } - - AllowedHourFormatsSink sink; - // TODO: Currently in the enumeration each table allocates a new array. - // Try to reduce the number of memory allocations. Consider storing a - // UVector32 with the concatenation of all of the sub-arrays, put the start index - // into the hashmap, store 6 single-value sub-arrays right at the beginning of the - // vector (at index enum*2) for easy data sharing, copy sub-arrays into runtime - // object. Remember to clean up the vector, too. - ures_getAllItemsWithFallback(rb.getAlias(), "timeData", sink, status); -} - -void DateTimePatternGenerator::getAllowedHourFormats(const Locale &locale, UErrorCode &status) { - if (U_FAILURE(status)) { return; } - Locale maxLocale(locale); - maxLocale.addLikelySubtags(status); - if (U_FAILURE(status)) { - return; - } - - const char *country = maxLocale.getCountry(); - if (*country == '\0') { country = "001"; } - const char *language = maxLocale.getLanguage(); - - CharString langCountry; - langCountry.append(language, static_cast<int32_t>(uprv_strlen(language)), status); - langCountry.append('_', status); - langCountry.append(country, static_cast<int32_t>(uprv_strlen(country)), status); - - int32_t *allowedFormats; - allowedFormats = (int32_t *)uhash_get(localeToAllowedHourFormatsMap, langCountry.data()); - if (allowedFormats == nullptr) { - allowedFormats = (int32_t *)uhash_get(localeToAllowedHourFormatsMap, const_cast<char *>(country)); - } - - if (allowedFormats != nullptr) { // Lookup is successful - // Here allowedFormats points to a list consisting of key for preferredFormat, - // followed by one or more keys for allowedFormats, then followed by ALLOWED_HOUR_FORMAT_UNKNOWN. - switch (allowedFormats[0]) { - case ALLOWED_HOUR_FORMAT_h: fDefaultHourFormatChar = LOW_H; break; - case ALLOWED_HOUR_FORMAT_H: fDefaultHourFormatChar = CAP_H; break; - case ALLOWED_HOUR_FORMAT_K: fDefaultHourFormatChar = CAP_K; break; - case ALLOWED_HOUR_FORMAT_k: fDefaultHourFormatChar = LOW_K; break; - default: fDefaultHourFormatChar = CAP_H; break; - } - for (int32_t i = 0; i < UPRV_LENGTHOF(fAllowedHourFormats); ++i) { - fAllowedHourFormats[i] = allowedFormats[i + 1]; - if (fAllowedHourFormats[i] == ALLOWED_HOUR_FORMAT_UNKNOWN) { - break; - } - } - } else { // Lookup failed, twice - fDefaultHourFormatChar = CAP_H; - fAllowedHourFormats[0] = ALLOWED_HOUR_FORMAT_H; - fAllowedHourFormats[1] = ALLOWED_HOUR_FORMAT_UNKNOWN; - } -} - -UnicodeString -DateTimePatternGenerator::getSkeleton(const UnicodeString& pattern, UErrorCode& -/*status*/) { - FormatParser fp2; - DateTimeMatcher matcher; - PtnSkeleton localSkeleton; - matcher.set(pattern, &fp2, localSkeleton); - return localSkeleton.getSkeleton(); -} - -UnicodeString -DateTimePatternGenerator::staticGetSkeleton( - const UnicodeString& pattern, UErrorCode& /*status*/) { - FormatParser fp; - DateTimeMatcher matcher; - PtnSkeleton localSkeleton; - matcher.set(pattern, &fp, localSkeleton); - return localSkeleton.getSkeleton(); -} - -UnicodeString -DateTimePatternGenerator::getBaseSkeleton(const UnicodeString& pattern, UErrorCode& /*status*/) { - FormatParser fp2; - DateTimeMatcher matcher; - PtnSkeleton localSkeleton; - matcher.set(pattern, &fp2, localSkeleton); - return localSkeleton.getBaseSkeleton(); -} - -UnicodeString -DateTimePatternGenerator::staticGetBaseSkeleton( - const UnicodeString& pattern, UErrorCode& /*status*/) { - FormatParser fp; - DateTimeMatcher matcher; - PtnSkeleton localSkeleton; - matcher.set(pattern, &fp, localSkeleton); - return localSkeleton.getBaseSkeleton(); -} - -void -DateTimePatternGenerator::addICUPatterns(const Locale& locale, UErrorCode& status) { - if (U_FAILURE(status)) { return; } - UnicodeString dfPattern; - UnicodeString conflictingString; - DateFormat* df; - - // Load with ICU patterns - for (int32_t i=DateFormat::kFull; i<=DateFormat::kShort; i++) { - DateFormat::EStyle style = (DateFormat::EStyle)i; - df = DateFormat::createDateInstance(style, locale); - SimpleDateFormat* sdf; - if (df != nullptr && (sdf = dynamic_cast<SimpleDateFormat*>(df)) != nullptr) { - sdf->toPattern(dfPattern); - addPattern(dfPattern, FALSE, conflictingString, status); - } - // TODO Maybe we should return an error when the date format isn't simple. - delete df; - if (U_FAILURE(status)) { return; } - - df = DateFormat::createTimeInstance(style, locale); - if (df != nullptr && (sdf = dynamic_cast<SimpleDateFormat*>(df)) != nullptr) { - sdf->toPattern(dfPattern); - addPattern(dfPattern, FALSE, conflictingString, status); - - // TODO: C++ and Java are inconsistent (see #12568). - // C++ uses MEDIUM, but Java uses SHORT. - if ( i==DateFormat::kShort && !dfPattern.isEmpty() ) { - consumeShortTimePattern(dfPattern, status); - } - } - // TODO Maybe we should return an error when the date format isn't simple. - delete df; - if (U_FAILURE(status)) { return; } - } -} - -void -DateTimePatternGenerator::hackTimes(const UnicodeString& hackPattern, UErrorCode& status) { - UnicodeString conflictingString; - - fp->set(hackPattern); - UnicodeString mmss; - UBool gotMm=FALSE; - for (int32_t i=0; i<fp->itemNumber; ++i) { - UnicodeString field = fp->items[i]; - if ( fp->isQuoteLiteral(field) ) { - if ( gotMm ) { - UnicodeString quoteLiteral; - fp->getQuoteLiteral(quoteLiteral, &i); - mmss += quoteLiteral; - } - } - else { - if (fp->isPatternSeparator(field) && gotMm) { - mmss+=field; - } - else { - UChar ch=field.charAt(0); - if (ch==LOW_M) { - gotMm=TRUE; - mmss+=field; - } - else { - if (ch==LOW_S) { - if (!gotMm) { - break; - } - mmss+= field; - addPattern(mmss, FALSE, conflictingString, status); - break; - } - else { - if (gotMm || ch==LOW_Z || ch==CAP_Z || ch==LOW_V || ch==CAP_V) { - break; - } - } - } - } - } - } -} - -#define ULOC_LOCALE_IDENTIFIER_CAPACITY (ULOC_FULLNAME_CAPACITY + 1 + ULOC_KEYWORD_AND_VALUES_CAPACITY) - -void -DateTimePatternGenerator::getCalendarTypeToUse(const Locale& locale, CharString& destination, UErrorCode& err) { - destination.clear().append(DT_DateTimeGregorianTag, -1, err); // initial default - if ( U_SUCCESS(err) ) { - UErrorCode localStatus = U_ZERO_ERROR; - char localeWithCalendarKey[ULOC_LOCALE_IDENTIFIER_CAPACITY]; - // obtain a locale that always has the calendar key value that should be used - ures_getFunctionalEquivalent( - localeWithCalendarKey, - ULOC_LOCALE_IDENTIFIER_CAPACITY, - nullptr, - "calendar", - "calendar", - locale.getName(), - nullptr, - FALSE, - &localStatus); - localeWithCalendarKey[ULOC_LOCALE_IDENTIFIER_CAPACITY-1] = 0; // ensure null termination - // now get the calendar key value from that locale - char calendarType[ULOC_KEYWORDS_CAPACITY]; - int32_t calendarTypeLen = uloc_getKeywordValue( - localeWithCalendarKey, - "calendar", - calendarType, - ULOC_KEYWORDS_CAPACITY, - &localStatus); - // If the input locale was invalid, don't fail with missing resource error, instead - // continue with default of Gregorian. - if (U_FAILURE(localStatus) && localStatus != U_MISSING_RESOURCE_ERROR) { - err = localStatus; - return; - } - if (calendarTypeLen < ULOC_KEYWORDS_CAPACITY) { - destination.clear().append(calendarType, -1, err); - if (U_FAILURE(err)) { return; } - } - } -} - -void -DateTimePatternGenerator::consumeShortTimePattern(const UnicodeString& shortTimePattern, - UErrorCode& status) { - if (U_FAILURE(status)) { return; } - // ICU-20383 No longer set fDefaultHourFormatChar to the hour format character from - // this pattern; instead it is set from localeToAllowedHourFormatsMap which now - // includes entries for both preferred and allowed formats. - - // HACK for hh:ss - hackTimes(shortTimePattern, status); -} - -struct DateTimePatternGenerator::AppendItemFormatsSink : public ResourceSink { - - // Destination for data, modified via setters. - DateTimePatternGenerator& dtpg; - - AppendItemFormatsSink(DateTimePatternGenerator& _dtpg) : dtpg(_dtpg) {} - virtual ~AppendItemFormatsSink(); - - virtual void put(const char *key, ResourceValue &value, UBool /*noFallback*/, - UErrorCode &errorCode) { - ResourceTable itemsTable = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - for (int32_t i = 0; itemsTable.getKeyAndValue(i, key, value); ++i) { - UDateTimePatternField field = dtpg.getAppendFormatNumber(key); - if (field == UDATPG_FIELD_COUNT) { continue; } - const UnicodeString& valueStr = value.getUnicodeString(errorCode); - if (dtpg.getAppendItemFormat(field).isEmpty() && !valueStr.isEmpty()) { - dtpg.setAppendItemFormat(field, valueStr); - } - } - } - - void fillInMissing() { - UnicodeString defaultItemFormat(TRUE, UDATPG_ItemFormat, UPRV_LENGTHOF(UDATPG_ItemFormat)-1); // Read-only alias. - for (int32_t i = 0; i < UDATPG_FIELD_COUNT; i++) { - UDateTimePatternField field = (UDateTimePatternField)i; - if (dtpg.getAppendItemFormat(field).isEmpty()) { - dtpg.setAppendItemFormat(field, defaultItemFormat); - } - } - } -}; - -struct DateTimePatternGenerator::AppendItemNamesSink : public ResourceSink { - - // Destination for data, modified via setters. - DateTimePatternGenerator& dtpg; - - AppendItemNamesSink(DateTimePatternGenerator& _dtpg) : dtpg(_dtpg) {} - virtual ~AppendItemNamesSink(); - - virtual void put(const char *key, ResourceValue &value, UBool /*noFallback*/, - UErrorCode &errorCode) { - ResourceTable itemsTable = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - for (int32_t i = 0; itemsTable.getKeyAndValue(i, key, value); ++i) { - UDateTimePGDisplayWidth width; - UDateTimePatternField field = dtpg.getFieldAndWidthIndices(key, &width); - if (field == UDATPG_FIELD_COUNT) { continue; } - ResourceTable detailsTable = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - for (int32_t j = 0; detailsTable.getKeyAndValue(j, key, value); ++j) { - if (uprv_strcmp(key, "dn") != 0) { continue; } - const UnicodeString& valueStr = value.getUnicodeString(errorCode); - if (dtpg.getFieldDisplayName(field,width).isEmpty() && !valueStr.isEmpty()) { - dtpg.setFieldDisplayName(field,width,valueStr); - } - break; - } - } - } - - void fillInMissing() { - for (int32_t i = 0; i < UDATPG_FIELD_COUNT; i++) { - UnicodeString& valueStr = dtpg.getMutableFieldDisplayName((UDateTimePatternField)i, UDATPG_WIDE); - if (valueStr.isEmpty()) { - valueStr = CAP_F; - U_ASSERT(i < 20); - if (i < 10) { - // F0, F1, ..., F9 - valueStr += (UChar)(i+0x30); - } else { - // F10, F11, ... - valueStr += (UChar)0x31; - valueStr += (UChar)(i-10 + 0x30); - } - // NUL-terminate for the C API. - valueStr.getTerminatedBuffer(); - } - for (int32_t j = 1; j < UDATPG_WIDTH_COUNT; j++) { - UnicodeString& valueStr2 = dtpg.getMutableFieldDisplayName((UDateTimePatternField)i, (UDateTimePGDisplayWidth)j); - if (valueStr2.isEmpty()) { - valueStr2 = dtpg.getFieldDisplayName((UDateTimePatternField)i, (UDateTimePGDisplayWidth)(j-1)); - } - } - } - } -}; - -struct DateTimePatternGenerator::AvailableFormatsSink : public ResourceSink { - - // Destination for data, modified via setters. - DateTimePatternGenerator& dtpg; - - // Temporary variable, required for calling addPatternWithSkeleton. - UnicodeString conflictingPattern; - - AvailableFormatsSink(DateTimePatternGenerator& _dtpg) : dtpg(_dtpg) {} - virtual ~AvailableFormatsSink(); - - virtual void put(const char *key, ResourceValue &value, UBool isRoot, - UErrorCode &errorCode) { - ResourceTable itemsTable = value.getTable(errorCode); - if (U_FAILURE(errorCode)) { return; } - for (int32_t i = 0; itemsTable.getKeyAndValue(i, key, value); ++i) { - const UnicodeString formatKey(key, -1, US_INV); - if (!dtpg.isAvailableFormatSet(formatKey) ) { - dtpg.setAvailableFormat(formatKey, errorCode); - // Add pattern with its associated skeleton. Override any duplicate - // derived from std patterns, but not a previous availableFormats entry: - const UnicodeString& formatValue = value.getUnicodeString(errorCode); - conflictingPattern.remove(); - dtpg.addPatternWithSkeleton(formatValue, &formatKey, !isRoot, conflictingPattern, errorCode); - } - } - } -}; - -// Virtual destructors must be defined out of line. -DateTimePatternGenerator::AppendItemFormatsSink::~AppendItemFormatsSink() {} -DateTimePatternGenerator::AppendItemNamesSink::~AppendItemNamesSink() {} -DateTimePatternGenerator::AvailableFormatsSink::~AvailableFormatsSink() {} - -void -DateTimePatternGenerator::addCLDRData(const Locale& locale, UErrorCode& errorCode) { - if (U_FAILURE(errorCode)) { return; } - UnicodeString rbPattern, value, field; - CharString path; - - LocalUResourceBundlePointer rb(ures_open(nullptr, locale.getName(), &errorCode)); - if (U_FAILURE(errorCode)) { return; } - - CharString calendarTypeToUse; // to be filled in with the type to use, if all goes well - getCalendarTypeToUse(locale, calendarTypeToUse, errorCode); - if (U_FAILURE(errorCode)) { return; } - - // Local err to ignore resource not found exceptions - UErrorCode err = U_ZERO_ERROR; - - // Load append item formats. - AppendItemFormatsSink appendItemFormatsSink(*this); - path.clear() - .append(DT_DateTimeCalendarTag, errorCode) - .append('/', errorCode) - .append(calendarTypeToUse, errorCode) - .append('/', errorCode) - .append(DT_DateTimeAppendItemsTag, errorCode); // i.e., calendar/xxx/appendItems - if (U_FAILURE(errorCode)) { return; } - ures_getAllItemsWithFallback(rb.getAlias(), path.data(), appendItemFormatsSink, err); - appendItemFormatsSink.fillInMissing(); - - // Load CLDR item names. - err = U_ZERO_ERROR; - AppendItemNamesSink appendItemNamesSink(*this); - ures_getAllItemsWithFallback(rb.getAlias(), DT_DateTimeFieldsTag, appendItemNamesSink, err); - appendItemNamesSink.fillInMissing(); - - // Load the available formats from CLDR. - err = U_ZERO_ERROR; - initHashtable(errorCode); - if (U_FAILURE(errorCode)) { return; } - AvailableFormatsSink availableFormatsSink(*this); - path.clear() - .append(DT_DateTimeCalendarTag, errorCode) - .append('/', errorCode) - .append(calendarTypeToUse, errorCode) - .append('/', errorCode) - .append(DT_DateTimeAvailableFormatsTag, errorCode); // i.e., calendar/xxx/availableFormats - if (U_FAILURE(errorCode)) { return; } - ures_getAllItemsWithFallback(rb.getAlias(), path.data(), availableFormatsSink, err); -} - -void -DateTimePatternGenerator::initHashtable(UErrorCode& err) { - if (U_FAILURE(err)) { return; } - if (fAvailableFormatKeyHash!=nullptr) { - return; - } - LocalPointer<Hashtable> hash(new Hashtable(FALSE, err), err); - if (U_SUCCESS(err)) { - fAvailableFormatKeyHash = hash.orphan(); - } -} - -void -DateTimePatternGenerator::setAppendItemFormat(UDateTimePatternField field, const UnicodeString& value) { - appendItemFormats[field] = value; - // NUL-terminate for the C API. - appendItemFormats[field].getTerminatedBuffer(); -} - -const UnicodeString& -DateTimePatternGenerator::getAppendItemFormat(UDateTimePatternField field) const { - return appendItemFormats[field]; -} - -void -DateTimePatternGenerator::setAppendItemName(UDateTimePatternField field, const UnicodeString& value) { - setFieldDisplayName(field, UDATPG_WIDTH_APPENDITEM, value); -} - -const UnicodeString& -DateTimePatternGenerator::getAppendItemName(UDateTimePatternField field) const { - return fieldDisplayNames[field][UDATPG_WIDTH_APPENDITEM]; -} - -void -DateTimePatternGenerator::setFieldDisplayName(UDateTimePatternField field, UDateTimePGDisplayWidth width, const UnicodeString& value) { - fieldDisplayNames[field][width] = value; - // NUL-terminate for the C API. - fieldDisplayNames[field][width].getTerminatedBuffer(); -} - -UnicodeString -DateTimePatternGenerator::getFieldDisplayName(UDateTimePatternField field, UDateTimePGDisplayWidth width) const { - return fieldDisplayNames[field][width]; -} - -UnicodeString& -DateTimePatternGenerator::getMutableFieldDisplayName(UDateTimePatternField field, UDateTimePGDisplayWidth width) { - return fieldDisplayNames[field][width]; -} - -void -DateTimePatternGenerator::getAppendName(UDateTimePatternField field, UnicodeString& value) { - value = SINGLE_QUOTE; - value += fieldDisplayNames[field][UDATPG_WIDTH_APPENDITEM]; - value += SINGLE_QUOTE; -} - -UnicodeString -DateTimePatternGenerator::getBestPattern(const UnicodeString& patternForm, UErrorCode& status) { - return getBestPattern(patternForm, UDATPG_MATCH_NO_OPTIONS, status); -} - -UnicodeString -DateTimePatternGenerator::getBestPattern(const UnicodeString& patternForm, UDateTimePatternMatchOptions options, UErrorCode& status) { - if (U_FAILURE(status)) { - return UnicodeString(); - } - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return UnicodeString(); - } - const UnicodeString *bestPattern = nullptr; - UnicodeString dtFormat; - UnicodeString resultPattern; - int32_t flags = kDTPGNoFlags; - - int32_t dateMask=(1<<UDATPG_DAYPERIOD_FIELD) - 1; - int32_t timeMask=(1<<UDATPG_FIELD_COUNT) - 1 - dateMask; - - // Replace hour metacharacters 'j', 'C' and 'J', set flags as necessary - UnicodeString patternFormMapped = mapSkeletonMetacharacters(patternForm, &flags, status); - if (U_FAILURE(status)) { - return UnicodeString(); - } - - resultPattern.remove(); - dtMatcher->set(patternFormMapped, fp); - const PtnSkeleton* specifiedSkeleton = nullptr; - bestPattern=getBestRaw(*dtMatcher, -1, distanceInfo, status, &specifiedSkeleton); - if (U_FAILURE(status)) { - return UnicodeString(); - } - - if ( distanceInfo->missingFieldMask==0 && distanceInfo->extraFieldMask==0 ) { - resultPattern = adjustFieldTypes(*bestPattern, specifiedSkeleton, flags, options); - - return resultPattern; - } - int32_t neededFields = dtMatcher->getFieldMask(); - UnicodeString datePattern=getBestAppending(neededFields & dateMask, flags, status, options); - UnicodeString timePattern=getBestAppending(neededFields & timeMask, flags, status, options); - if (U_FAILURE(status)) { - return UnicodeString(); - } - if (datePattern.length()==0) { - if (timePattern.length()==0) { - resultPattern.remove(); - } - else { - return timePattern; - } - } - if (timePattern.length()==0) { - return datePattern; - } - resultPattern.remove(); - status = U_ZERO_ERROR; - dtFormat=getDateTimeFormat(); - SimpleFormatter(dtFormat, 2, 2, status).format(timePattern, datePattern, resultPattern, status); - return resultPattern; -} - -/* - * Map a skeleton that may have metacharacters jJC to one without, by replacing - * the metacharacters with locale-appropriate fields of h/H/k/K and of a/b/B - * (depends on fDefaultHourFormatChar and fAllowedHourFormats being set, which in - * turn depends on initData having been run). This method also updates the flags - * as necessary. Returns the updated skeleton. - */ -UnicodeString -DateTimePatternGenerator::mapSkeletonMetacharacters(const UnicodeString& patternForm, int32_t* flags, UErrorCode& status) { - UnicodeString patternFormMapped; - patternFormMapped.remove(); - UBool inQuoted = FALSE; - int32_t patPos, patLen = patternForm.length(); - for (patPos = 0; patPos < patLen; patPos++) { - UChar patChr = patternForm.charAt(patPos); - if (patChr == SINGLE_QUOTE) { - inQuoted = !inQuoted; - } else if (!inQuoted) { - // Handle special mappings for 'j' and 'C' in which fields lengths - // 1,3,5 => hour field length 1 - // 2,4,6 => hour field length 2 - // 1,2 => abbreviated dayPeriod (field length 1..3) - // 3,4 => long dayPeriod (field length 4) - // 5,6 => narrow dayPeriod (field length 5) - if (patChr == LOW_J || patChr == CAP_C) { - int32_t extraLen = 0; // 1 less than total field length - while (patPos+1 < patLen && patternForm.charAt(patPos+1)==patChr) { - extraLen++; - patPos++; - } - int32_t hourLen = 1 + (extraLen & 1); - int32_t dayPeriodLen = (extraLen < 2)? 1: 3 + (extraLen >> 1); - UChar hourChar = LOW_H; - UChar dayPeriodChar = LOW_A; - if (patChr == LOW_J) { - hourChar = fDefaultHourFormatChar; - } else { - AllowedHourFormat bestAllowed; - if (fAllowedHourFormats[0] != ALLOWED_HOUR_FORMAT_UNKNOWN) { - bestAllowed = (AllowedHourFormat)fAllowedHourFormats[0]; - } else { - status = U_INVALID_FORMAT_ERROR; - return UnicodeString(); - } - if (bestAllowed == ALLOWED_HOUR_FORMAT_H || bestAllowed == ALLOWED_HOUR_FORMAT_HB || bestAllowed == ALLOWED_HOUR_FORMAT_Hb) { - hourChar = CAP_H; - } else if (bestAllowed == ALLOWED_HOUR_FORMAT_K || bestAllowed == ALLOWED_HOUR_FORMAT_KB || bestAllowed == ALLOWED_HOUR_FORMAT_Kb) { - hourChar = CAP_K; - } else if (bestAllowed == ALLOWED_HOUR_FORMAT_k) { - hourChar = LOW_K; - } - // in #13183 just add b/B to skeleton, no longer need to set special flags - if (bestAllowed == ALLOWED_HOUR_FORMAT_HB || bestAllowed == ALLOWED_HOUR_FORMAT_hB || bestAllowed == ALLOWED_HOUR_FORMAT_KB) { - dayPeriodChar = CAP_B; - } else if (bestAllowed == ALLOWED_HOUR_FORMAT_Hb || bestAllowed == ALLOWED_HOUR_FORMAT_hb || bestAllowed == ALLOWED_HOUR_FORMAT_Kb) { - dayPeriodChar = LOW_B; - } - } - if (hourChar==CAP_H || hourChar==LOW_K) { - dayPeriodLen = 0; - } - while (dayPeriodLen-- > 0) { - patternFormMapped.append(dayPeriodChar); - } - while (hourLen-- > 0) { - patternFormMapped.append(hourChar); - } - } else if (patChr == CAP_J) { - // Get pattern for skeleton with H, then replace H or k - // with fDefaultHourFormatChar (if different) - patternFormMapped.append(CAP_H); - *flags |= kDTPGSkeletonUsesCapJ; - } else { - patternFormMapped.append(patChr); - } - } - } - return patternFormMapped; -} - -UnicodeString -DateTimePatternGenerator::replaceFieldTypes(const UnicodeString& pattern, - const UnicodeString& skeleton, - UErrorCode& status) { - return replaceFieldTypes(pattern, skeleton, UDATPG_MATCH_NO_OPTIONS, status); -} - -UnicodeString -DateTimePatternGenerator::replaceFieldTypes(const UnicodeString& pattern, - const UnicodeString& skeleton, - UDateTimePatternMatchOptions options, - UErrorCode& status) { - if (U_FAILURE(status)) { - return UnicodeString(); - } - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return UnicodeString(); - } - dtMatcher->set(skeleton, fp); - UnicodeString result = adjustFieldTypes(pattern, nullptr, kDTPGNoFlags, options); - return result; -} - -void -DateTimePatternGenerator::setDecimal(const UnicodeString& newDecimal) { - this->decimal = newDecimal; - // NUL-terminate for the C API. - this->decimal.getTerminatedBuffer(); -} - -const UnicodeString& -DateTimePatternGenerator::getDecimal() const { - return decimal; -} - -void -DateTimePatternGenerator::addCanonicalItems(UErrorCode& status) { - if (U_FAILURE(status)) { return; } - UnicodeString conflictingPattern; - - for (int32_t i=0; i<UDATPG_FIELD_COUNT; i++) { - if (Canonical_Items[i] > 0) { - addPattern(UnicodeString(Canonical_Items[i]), FALSE, conflictingPattern, status); - } - if (U_FAILURE(status)) { return; } - } -} - -void -DateTimePatternGenerator::setDateTimeFormat(const UnicodeString& dtFormat) { - dateTimeFormat = dtFormat; - // NUL-terminate for the C API. - dateTimeFormat.getTerminatedBuffer(); -} - -const UnicodeString& -DateTimePatternGenerator::getDateTimeFormat() const { - return dateTimeFormat; -} - -void -DateTimePatternGenerator::setDateTimeFromCalendar(const Locale& locale, UErrorCode& status) { - if (U_FAILURE(status)) { return; } - - const UChar *resStr; - int32_t resStrLen = 0; - - LocalPointer<Calendar> fCalendar(Calendar::createInstance(locale, status), status); - if (U_FAILURE(status)) { return; } - - LocalUResourceBundlePointer calData(ures_open(nullptr, locale.getBaseName(), &status)); - if (U_FAILURE(status)) { return; } - ures_getByKey(calData.getAlias(), DT_DateTimeCalendarTag, calData.getAlias(), &status); - if (U_FAILURE(status)) { return; } - - LocalUResourceBundlePointer dateTimePatterns; - if (fCalendar->getType() != nullptr && *fCalendar->getType() != '\0' - && uprv_strcmp(fCalendar->getType(), DT_DateTimeGregorianTag) != 0) { - dateTimePatterns.adoptInstead(ures_getByKeyWithFallback(calData.getAlias(), fCalendar->getType(), - nullptr, &status)); - ures_getByKeyWithFallback(dateTimePatterns.getAlias(), DT_DateTimePatternsTag, - dateTimePatterns.getAlias(), &status); - } - - if (dateTimePatterns.isNull() || status == U_MISSING_RESOURCE_ERROR) { - status = U_ZERO_ERROR; - dateTimePatterns.adoptInstead(ures_getByKeyWithFallback(calData.getAlias(), DT_DateTimeGregorianTag, - dateTimePatterns.orphan(), &status)); - ures_getByKeyWithFallback(dateTimePatterns.getAlias(), DT_DateTimePatternsTag, - dateTimePatterns.getAlias(), &status); - } - if (U_FAILURE(status)) { return; } - - if (ures_getSize(dateTimePatterns.getAlias()) <= DateFormat::kDateTime) - { - status = U_INVALID_FORMAT_ERROR; - return; - } - resStr = ures_getStringByIndex(dateTimePatterns.getAlias(), (int32_t)DateFormat::kDateTime, &resStrLen, &status); - setDateTimeFormat(UnicodeString(TRUE, resStr, resStrLen)); -} - -void -DateTimePatternGenerator::setDecimalSymbols(const Locale& locale, UErrorCode& status) { - DecimalFormatSymbols dfs = DecimalFormatSymbols(locale, status); - if(U_SUCCESS(status)) { - decimal = dfs.getSymbol(DecimalFormatSymbols::kDecimalSeparatorSymbol); - // NUL-terminate for the C API. - decimal.getTerminatedBuffer(); - } -} - -UDateTimePatternConflict -DateTimePatternGenerator::addPattern( - const UnicodeString& pattern, - UBool override, - UnicodeString &conflictingPattern, - UErrorCode& status) -{ - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return UDATPG_NO_CONFLICT; - } - - return addPatternWithSkeleton(pattern, nullptr, override, conflictingPattern, status); -} - -// For DateTimePatternGenerator::addPatternWithSkeleton - -// If skeletonToUse is specified, then an availableFormats entry is being added. In this case: -// 1. We pass that skeleton to matcher.set instead of having it derive a skeleton from the pattern. -// 2. If the new entry's skeleton or basePattern does match an existing entry but that entry also had a skeleton specified -// (i.e. it was also from availableFormats), then the new entry does not override it regardless of the value of the override -// parameter. This prevents later availableFormats entries from a parent locale overriding earlier ones from the actual -// specified locale. However, availableFormats entries *should* override entries with matching skeleton whose skeleton was -// derived (i.e. entries derived from the standard date/time patters for the specified locale). -// 3. When adding the pattern (patternMap->add), we set a new boolean to indicate that the added entry had a -// specified skeleton (which sets a new field in the PtnElem in the PatternMap). -UDateTimePatternConflict -DateTimePatternGenerator::addPatternWithSkeleton( - const UnicodeString& pattern, - const UnicodeString* skeletonToUse, - UBool override, - UnicodeString& conflictingPattern, - UErrorCode& status) -{ - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return UDATPG_NO_CONFLICT; - } - - UnicodeString basePattern; - PtnSkeleton skeleton; - UDateTimePatternConflict conflictingStatus = UDATPG_NO_CONFLICT; - - DateTimeMatcher matcher; - if ( skeletonToUse == nullptr ) { - matcher.set(pattern, fp, skeleton); - matcher.getBasePattern(basePattern); - } else { - matcher.set(*skeletonToUse, fp, skeleton); // no longer trims skeleton fields to max len 3, per #7930 - matcher.getBasePattern(basePattern); // or perhaps instead: basePattern = *skeletonToUse; - } - // We only care about base conflicts - and replacing the pattern associated with a base - if: - // 1. the conflicting previous base pattern did *not* have an explicit skeleton; in that case the previous - // base + pattern combination was derived from either (a) a canonical item, (b) a standard format, or - // (c) a pattern specified programmatically with a previous call to addPattern (which would only happen - // if we are getting here from a subsequent call to addPattern). - // 2. a skeleton is specified for the current pattern, but override=false; in that case we are checking - // availableFormats items from root, which should not override any previous entry with the same base. - UBool entryHadSpecifiedSkeleton; - const UnicodeString *duplicatePattern = patternMap->getPatternFromBasePattern(basePattern, entryHadSpecifiedSkeleton); - if (duplicatePattern != nullptr && (!entryHadSpecifiedSkeleton || (skeletonToUse != nullptr && !override))) { - conflictingStatus = UDATPG_BASE_CONFLICT; - conflictingPattern = *duplicatePattern; - if (!override) { - return conflictingStatus; - } - } - // The only time we get here with override=true and skeletonToUse!=null is when adding availableFormats - // items from CLDR data. In that case, we don't want an item from a parent locale to replace an item with - // same skeleton from the specified locale, so skip the current item if skeletonWasSpecified is true for - // the previously-specified conflicting item. - const PtnSkeleton* entrySpecifiedSkeleton = nullptr; - duplicatePattern = patternMap->getPatternFromSkeleton(skeleton, &entrySpecifiedSkeleton); - if (duplicatePattern != nullptr ) { - conflictingStatus = UDATPG_CONFLICT; - conflictingPattern = *duplicatePattern; - if (!override || (skeletonToUse != nullptr && entrySpecifiedSkeleton != nullptr)) { - return conflictingStatus; - } - } - patternMap->add(basePattern, skeleton, pattern, skeletonToUse != nullptr, status); - if(U_FAILURE(status)) { - return conflictingStatus; - } - - return UDATPG_NO_CONFLICT; -} - - -UDateTimePatternField -DateTimePatternGenerator::getAppendFormatNumber(const char* field) const { - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i ) { - if (uprv_strcmp(CLDR_FIELD_APPEND[i], field)==0) { - return (UDateTimePatternField)i; - } - } - return UDATPG_FIELD_COUNT; -} - -UDateTimePatternField -DateTimePatternGenerator::getFieldAndWidthIndices(const char* key, UDateTimePGDisplayWidth* widthP) const { - char cldrFieldKey[UDATPG_FIELD_KEY_MAX + 1]; - uprv_strncpy(cldrFieldKey, key, UDATPG_FIELD_KEY_MAX); - cldrFieldKey[UDATPG_FIELD_KEY_MAX]=0; // ensure termination - *widthP = UDATPG_WIDE; - char* hyphenPtr = uprv_strchr(cldrFieldKey, '-'); - if (hyphenPtr) { - for (int32_t i=UDATPG_WIDTH_COUNT-1; i>0; --i) { - if (uprv_strcmp(CLDR_FIELD_WIDTH[i], hyphenPtr)==0) { - *widthP=(UDateTimePGDisplayWidth)i; - break; - } - } - *hyphenPtr = 0; // now delete width portion of key - } - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i ) { - if (uprv_strcmp(CLDR_FIELD_NAME[i],cldrFieldKey)==0) { - return (UDateTimePatternField)i; - } - } - return UDATPG_FIELD_COUNT; -} - -const UnicodeString* -DateTimePatternGenerator::getBestRaw(DateTimeMatcher& source, - int32_t includeMask, - DistanceInfo* missingFields, - UErrorCode &status, - const PtnSkeleton** specifiedSkeletonPtr) { - int32_t bestDistance = 0x7fffffff; - DistanceInfo tempInfo; - const UnicodeString *bestPattern=nullptr; - const PtnSkeleton* specifiedSkeleton=nullptr; - - PatternMapIterator it(status); - if (U_FAILURE(status)) { return nullptr; } - - for (it.set(*patternMap); it.hasNext(); ) { - DateTimeMatcher trial = it.next(); - if (trial.equals(skipMatcher)) { - continue; - } - int32_t distance=source.getDistance(trial, includeMask, tempInfo); - if (distance<bestDistance) { - bestDistance=distance; - bestPattern=patternMap->getPatternFromSkeleton(*trial.getSkeletonPtr(), &specifiedSkeleton); - missingFields->setTo(tempInfo); - if (distance==0) { - break; - } - } - } - - // If the best raw match had a specified skeleton and that skeleton was requested by the caller, - // then return it too. This generally happens when the caller needs to pass that skeleton - // through to adjustFieldTypes so the latter can do a better job. - if (bestPattern && specifiedSkeletonPtr) { - *specifiedSkeletonPtr = specifiedSkeleton; - } - return bestPattern; -} - -UnicodeString -DateTimePatternGenerator::adjustFieldTypes(const UnicodeString& pattern, - const PtnSkeleton* specifiedSkeleton, - int32_t flags, - UDateTimePatternMatchOptions options) { - UnicodeString newPattern; - fp->set(pattern); - for (int32_t i=0; i < fp->itemNumber; i++) { - UnicodeString field = fp->items[i]; - if ( fp->isQuoteLiteral(field) ) { - - UnicodeString quoteLiteral; - fp->getQuoteLiteral(quoteLiteral, &i); - newPattern += quoteLiteral; - } - else { - if (fp->isPatternSeparator(field)) { - newPattern+=field; - continue; - } - int32_t canonicalIndex = fp->getCanonicalIndex(field); - if (canonicalIndex < 0) { - newPattern+=field; - continue; // don't adjust - } - const dtTypeElem *row = &dtTypes[canonicalIndex]; - int32_t typeValue = row->field; - - // handle day periods - with #13183, no longer need special handling here, integrated with normal types - - if ((flags & kDTPGFixFractionalSeconds) != 0 && typeValue == UDATPG_SECOND_FIELD) { - field += decimal; - dtMatcher->skeleton.original.appendFieldTo(UDATPG_FRACTIONAL_SECOND_FIELD, field); - } else if (dtMatcher->skeleton.type[typeValue]!=0) { - // Here: - // - "reqField" is the field from the originally requested skeleton, with length - // "reqFieldLen". - // - "field" is the field from the found pattern. - // - // The adjusted field should consist of characters from the originally requested - // skeleton, except in the case of UDATPG_HOUR_FIELD or UDATPG_MONTH_FIELD or - // UDATPG_WEEKDAY_FIELD or UDATPG_YEAR_FIELD, in which case it should consist - // of characters from the found pattern. - // - // The length of the adjusted field (adjFieldLen) should match that in the originally - // requested skeleton, except that in the following cases the length of the adjusted field - // should match that in the found pattern (i.e. the length of this pattern field should - // not be adjusted): - // 1. typeValue is UDATPG_HOUR_FIELD/MINUTE/SECOND and the corresponding bit in options is - // not set (ticket #7180). Note, we may want to implement a similar change for other - // numeric fields (MM, dd, etc.) so the default behavior is to get locale preference for - // field length, but options bits can be used to override this. - // 2. There is a specified skeleton for the found pattern and one of the following is true: - // a) The length of the field in the skeleton (skelFieldLen) is equal to reqFieldLen. - // b) The pattern field is numeric and the skeleton field is not, or vice versa. - - UChar reqFieldChar = dtMatcher->skeleton.original.getFieldChar(typeValue); - int32_t reqFieldLen = dtMatcher->skeleton.original.getFieldLength(typeValue); - if (reqFieldChar == CAP_E && reqFieldLen < 3) - reqFieldLen = 3; // 1-3 for E are equivalent to 3 for c,e - int32_t adjFieldLen = reqFieldLen; - if ( (typeValue==UDATPG_HOUR_FIELD && (options & UDATPG_MATCH_HOUR_FIELD_LENGTH)==0) || - (typeValue==UDATPG_MINUTE_FIELD && (options & UDATPG_MATCH_MINUTE_FIELD_LENGTH)==0) || - (typeValue==UDATPG_SECOND_FIELD && (options & UDATPG_MATCH_SECOND_FIELD_LENGTH)==0) ) { - adjFieldLen = field.length(); - } else if (specifiedSkeleton) { - int32_t skelFieldLen = specifiedSkeleton->original.getFieldLength(typeValue); - UBool patFieldIsNumeric = (row->type > 0); - UBool skelFieldIsNumeric = (specifiedSkeleton->type[typeValue] > 0); - if (skelFieldLen == reqFieldLen || (patFieldIsNumeric && !skelFieldIsNumeric) || (skelFieldIsNumeric && !patFieldIsNumeric)) { - // don't adjust the field length in the found pattern - adjFieldLen = field.length(); - } - } - UChar c = (typeValue!= UDATPG_HOUR_FIELD - && typeValue!= UDATPG_MONTH_FIELD - && typeValue!= UDATPG_WEEKDAY_FIELD - && (typeValue!= UDATPG_YEAR_FIELD || reqFieldChar==CAP_Y)) - ? reqFieldChar - : field.charAt(0); - if (typeValue == UDATPG_HOUR_FIELD && (flags & kDTPGSkeletonUsesCapJ) != 0) { - c = fDefaultHourFormatChar; - } - field.remove(); - for (int32_t j=adjFieldLen; j>0; --j) { - field += c; - } - } - newPattern+=field; - } - } - return newPattern; -} - -UnicodeString -DateTimePatternGenerator::getBestAppending(int32_t missingFields, int32_t flags, UErrorCode &status, UDateTimePatternMatchOptions options) { - if (U_FAILURE(status)) { - return UnicodeString(); - } - UnicodeString resultPattern, tempPattern; - const UnicodeString* tempPatternPtr; - int32_t lastMissingFieldMask=0; - if (missingFields!=0) { - resultPattern=UnicodeString(); - const PtnSkeleton* specifiedSkeleton=nullptr; - tempPatternPtr = getBestRaw(*dtMatcher, missingFields, distanceInfo, status, &specifiedSkeleton); - if (U_FAILURE(status)) { - return UnicodeString(); - } - tempPattern = *tempPatternPtr; - resultPattern = adjustFieldTypes(tempPattern, specifiedSkeleton, flags, options); - if ( distanceInfo->missingFieldMask==0 ) { - return resultPattern; - } - while (distanceInfo->missingFieldMask!=0) { // precondition: EVERY single field must work! - if ( lastMissingFieldMask == distanceInfo->missingFieldMask ) { - break; // cannot find the proper missing field - } - if (((distanceInfo->missingFieldMask & UDATPG_SECOND_AND_FRACTIONAL_MASK)==UDATPG_FRACTIONAL_MASK) && - ((missingFields & UDATPG_SECOND_AND_FRACTIONAL_MASK) == UDATPG_SECOND_AND_FRACTIONAL_MASK)) { - resultPattern = adjustFieldTypes(resultPattern, specifiedSkeleton, flags | kDTPGFixFractionalSeconds, options); - distanceInfo->missingFieldMask &= ~UDATPG_FRACTIONAL_MASK; - continue; - } - int32_t startingMask = distanceInfo->missingFieldMask; - tempPatternPtr = getBestRaw(*dtMatcher, distanceInfo->missingFieldMask, distanceInfo, status, &specifiedSkeleton); - if (U_FAILURE(status)) { - return UnicodeString(); - } - tempPattern = *tempPatternPtr; - tempPattern = adjustFieldTypes(tempPattern, specifiedSkeleton, flags, options); - int32_t foundMask=startingMask& ~distanceInfo->missingFieldMask; - int32_t topField=getTopBitNumber(foundMask); - - if (appendItemFormats[topField].length() != 0) { - UnicodeString appendName; - getAppendName((UDateTimePatternField)topField, appendName); - const UnicodeString *values[3] = { - &resultPattern, - &tempPattern, - &appendName - }; - SimpleFormatter(appendItemFormats[topField], 2, 3, status). - formatAndReplace(values, 3, resultPattern, nullptr, 0, status); - } - lastMissingFieldMask = distanceInfo->missingFieldMask; - } - } - return resultPattern; -} - -int32_t -DateTimePatternGenerator::getTopBitNumber(int32_t foundMask) const { - if ( foundMask==0 ) { - return 0; - } - int32_t i=0; - while (foundMask!=0) { - foundMask >>=1; - ++i; - } - if (i-1 >UDATPG_ZONE_FIELD) { - return UDATPG_ZONE_FIELD; - } - else - return i-1; -} - -void -DateTimePatternGenerator::setAvailableFormat(const UnicodeString &key, UErrorCode& err) -{ - fAvailableFormatKeyHash->puti(key, 1, err); -} - -UBool -DateTimePatternGenerator::isAvailableFormatSet(const UnicodeString &key) const { - return (UBool)(fAvailableFormatKeyHash->geti(key) == 1); -} - -void -DateTimePatternGenerator::copyHashtable(Hashtable *other, UErrorCode &status) { - if (other == nullptr || U_FAILURE(status)) { - return; - } - if (fAvailableFormatKeyHash != nullptr) { - delete fAvailableFormatKeyHash; - fAvailableFormatKeyHash = nullptr; - } - initHashtable(status); - if(U_FAILURE(status)){ - return; - } - int32_t pos = UHASH_FIRST; - const UHashElement* elem = nullptr; - // walk through the hash table and create a deep clone - while((elem = other->nextElement(pos))!= nullptr){ - const UHashTok otherKeyTok = elem->key; - UnicodeString* otherKey = (UnicodeString*)otherKeyTok.pointer; - fAvailableFormatKeyHash->puti(*otherKey, 1, status); - if(U_FAILURE(status)){ - return; - } - } -} - -StringEnumeration* -DateTimePatternGenerator::getSkeletons(UErrorCode& status) const { - if (U_FAILURE(status)) { - return nullptr; - } - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return nullptr; - } - LocalPointer<StringEnumeration> skeletonEnumerator( - new DTSkeletonEnumeration(*patternMap, DT_SKELETON, status), status); - - return U_SUCCESS(status) ? skeletonEnumerator.orphan() : nullptr; -} - -const UnicodeString& -DateTimePatternGenerator::getPatternForSkeleton(const UnicodeString& skeleton) const { - PtnElem *curElem; - - if (skeleton.length() ==0) { - return emptyString; - } - curElem = patternMap->getHeader(skeleton.charAt(0)); - while ( curElem != nullptr ) { - if ( curElem->skeleton->getSkeleton()==skeleton ) { - return curElem->pattern; - } - curElem = curElem->next.getAlias(); - } - return emptyString; -} - -StringEnumeration* -DateTimePatternGenerator::getBaseSkeletons(UErrorCode& status) const { - if (U_FAILURE(status)) { - return nullptr; - } - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return nullptr; - } - LocalPointer<StringEnumeration> baseSkeletonEnumerator( - new DTSkeletonEnumeration(*patternMap, DT_BASESKELETON, status), status); - - return U_SUCCESS(status) ? baseSkeletonEnumerator.orphan() : nullptr; -} - -StringEnumeration* -DateTimePatternGenerator::getRedundants(UErrorCode& status) { - if (U_FAILURE(status)) { return nullptr; } - if (U_FAILURE(internalErrorCode)) { - status = internalErrorCode; - return nullptr; - } - LocalPointer<StringEnumeration> output(new DTRedundantEnumeration(), status); - if (U_FAILURE(status)) { return nullptr; } - const UnicodeString *pattern; - PatternMapIterator it(status); - if (U_FAILURE(status)) { return nullptr; } - - for (it.set(*patternMap); it.hasNext(); ) { - DateTimeMatcher current = it.next(); - pattern = patternMap->getPatternFromSkeleton(*(it.getSkeleton())); - if ( isCanonicalItem(*pattern) ) { - continue; - } - if ( skipMatcher == nullptr ) { - skipMatcher = new DateTimeMatcher(current); - if (skipMatcher == nullptr) { - status = U_MEMORY_ALLOCATION_ERROR; - return nullptr; - } - } - else { - *skipMatcher = current; - } - UnicodeString trial = getBestPattern(current.getPattern(), status); - if (U_FAILURE(status)) { return nullptr; } - if (trial == *pattern) { - ((DTRedundantEnumeration *)output.getAlias())->add(*pattern, status); - if (U_FAILURE(status)) { return nullptr; } - } - if (current.equals(skipMatcher)) { - continue; - } - } - return output.orphan(); -} - -UBool -DateTimePatternGenerator::isCanonicalItem(const UnicodeString& item) const { - if ( item.length() != 1 ) { - return FALSE; - } - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i) { - if (item.charAt(0)==Canonical_Items[i]) { - return TRUE; - } - } - return FALSE; -} - - -DateTimePatternGenerator* -DateTimePatternGenerator::clone() const { - return new DateTimePatternGenerator(*this); -} - -PatternMap::PatternMap() { - for (int32_t i=0; i < MAX_PATTERN_ENTRIES; ++i ) { - boot[i] = nullptr; - } - isDupAllowed = TRUE; -} - -void -PatternMap::copyFrom(const PatternMap& other, UErrorCode& status) { - if (U_FAILURE(status)) { - return; - } - this->isDupAllowed = other.isDupAllowed; - for (int32_t bootIndex = 0; bootIndex < MAX_PATTERN_ENTRIES; ++bootIndex) { - PtnElem *curElem, *otherElem, *prevElem=nullptr; - otherElem = other.boot[bootIndex]; - while (otherElem != nullptr) { - LocalPointer<PtnElem> newElem(new PtnElem(otherElem->basePattern, otherElem->pattern), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeleton.adoptInsteadAndCheckErrorCode(new PtnSkeleton(*(otherElem->skeleton)), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeletonWasSpecified = otherElem->skeletonWasSpecified; - - // Release ownership from the LocalPointer of the PtnElem object. - // The PtnElem will now be owned by either the boot (for the first entry in the linked-list) - // or owned by the previous PtnElem object in the linked-list. - curElem = newElem.orphan(); - - if (this->boot[bootIndex] == nullptr) { - this->boot[bootIndex] = curElem; - } else { - if (prevElem != nullptr) { - prevElem->next.adoptInstead(curElem); - } else { - UPRV_UNREACHABLE; - } - } - prevElem = curElem; - otherElem = otherElem->next.getAlias(); - } - - } -} - -PtnElem* -PatternMap::getHeader(UChar baseChar) const { - PtnElem* curElem; - - if ( (baseChar >= CAP_A) && (baseChar <= CAP_Z) ) { - curElem = boot[baseChar-CAP_A]; - } - else { - if ( (baseChar >=LOW_A) && (baseChar <= LOW_Z) ) { - curElem = boot[26+baseChar-LOW_A]; - } - else { - return nullptr; - } - } - return curElem; -} - -PatternMap::~PatternMap() { - for (int32_t i=0; i < MAX_PATTERN_ENTRIES; ++i ) { - if (boot[i] != nullptr ) { - delete boot[i]; - boot[i] = nullptr; - } - } -} // PatternMap destructor - -void -PatternMap::add(const UnicodeString& basePattern, - const PtnSkeleton& skeleton, - const UnicodeString& value,// mapped pattern value - UBool skeletonWasSpecified, - UErrorCode &status) { - UChar baseChar = basePattern.charAt(0); - PtnElem *curElem, *baseElem; - status = U_ZERO_ERROR; - - // the baseChar must be A-Z or a-z - if ((baseChar >= CAP_A) && (baseChar <= CAP_Z)) { - baseElem = boot[baseChar-CAP_A]; - } - else { - if ((baseChar >=LOW_A) && (baseChar <= LOW_Z)) { - baseElem = boot[26+baseChar-LOW_A]; - } - else { - status = U_ILLEGAL_CHARACTER; - return; - } - } - - if (baseElem == nullptr) { - LocalPointer<PtnElem> newElem(new PtnElem(basePattern, value), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeleton.adoptInsteadAndCheckErrorCode(new PtnSkeleton(skeleton), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeletonWasSpecified = skeletonWasSpecified; - if (baseChar >= LOW_A) { - boot[26 + (baseChar - LOW_A)] = newElem.orphan(); // the boot array now owns the PtnElem. - } - else { - boot[baseChar - CAP_A] = newElem.orphan(); // the boot array now owns the PtnElem. - } - } - if ( baseElem != nullptr ) { - curElem = getDuplicateElem(basePattern, skeleton, baseElem); - - if (curElem == nullptr) { - // add new element to the list. - curElem = baseElem; - while( curElem -> next != nullptr ) - { - curElem = curElem->next.getAlias(); - } - - LocalPointer<PtnElem> newElem(new PtnElem(basePattern, value), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeleton.adoptInsteadAndCheckErrorCode(new PtnSkeleton(skeleton), status); - if (U_FAILURE(status)) { - return; // out of memory - } - newElem->skeletonWasSpecified = skeletonWasSpecified; - curElem->next.adoptInstead(newElem.orphan()); - curElem = curElem->next.getAlias(); - } - else { - // Pattern exists in the list already. - if ( !isDupAllowed ) { - return; - } - // Overwrite the value. - curElem->pattern = value; - // It was a bug that we were not doing the following previously, - // though that bug hid other problems by making things partly work. - curElem->skeletonWasSpecified = skeletonWasSpecified; - } - } -} // PatternMap::add - -// Find the pattern from the given basePattern string. -const UnicodeString * -PatternMap::getPatternFromBasePattern(const UnicodeString& basePattern, UBool& skeletonWasSpecified) const { // key to search for - PtnElem *curElem; - - if ((curElem=getHeader(basePattern.charAt(0)))==nullptr) { - return nullptr; // no match - } - - do { - if ( basePattern.compare(curElem->basePattern)==0 ) { - skeletonWasSpecified = curElem->skeletonWasSpecified; - return &(curElem->pattern); - } - curElem = curElem->next.getAlias(); - } while (curElem != nullptr); - - return nullptr; -} // PatternMap::getFromBasePattern - - -// Find the pattern from the given skeleton. -// At least when this is called from getBestRaw & addPattern (in which case specifiedSkeletonPtr is non-NULL), -// the comparison should be based on skeleton.original (which is unique and tied to the distance measurement in bestRaw) -// and not skeleton.baseOriginal (which is not unique); otherwise we may pick a different skeleton than the one with the -// optimum distance value in getBestRaw. When this is called from public getRedundants (specifiedSkeletonPtr is NULL), -// for now it will continue to compare based on baseOriginal so as not to change the behavior unnecessarily. -const UnicodeString * -PatternMap::getPatternFromSkeleton(const PtnSkeleton& skeleton, const PtnSkeleton** specifiedSkeletonPtr) const { // key to search for - PtnElem *curElem; - - if (specifiedSkeletonPtr) { - *specifiedSkeletonPtr = nullptr; - } - - // find boot entry - UChar baseChar = skeleton.getFirstChar(); - if ((curElem=getHeader(baseChar))==nullptr) { - return nullptr; // no match - } - - do { - UBool equal; - if (specifiedSkeletonPtr != nullptr) { // called from DateTimePatternGenerator::getBestRaw or addPattern, use original - equal = curElem->skeleton->original == skeleton.original; - } else { // called from DateTimePatternGenerator::getRedundants, use baseOriginal - equal = curElem->skeleton->baseOriginal == skeleton.baseOriginal; - } - if (equal) { - if (specifiedSkeletonPtr && curElem->skeletonWasSpecified) { - *specifiedSkeletonPtr = curElem->skeleton.getAlias(); - } - return &(curElem->pattern); - } - curElem = curElem->next.getAlias(); - } while (curElem != nullptr); - - return nullptr; -} - -UBool -PatternMap::equals(const PatternMap& other) const { - if ( this==&other ) { - return TRUE; - } - for (int32_t bootIndex = 0; bootIndex < MAX_PATTERN_ENTRIES; ++bootIndex) { - if (boot[bootIndex] == other.boot[bootIndex]) { - continue; - } - if ((boot[bootIndex] == nullptr) || (other.boot[bootIndex] == nullptr)) { - return FALSE; - } - PtnElem *otherElem = other.boot[bootIndex]; - PtnElem *myElem = boot[bootIndex]; - while ((otherElem != nullptr) || (myElem != nullptr)) { - if ( myElem == otherElem ) { - break; - } - if ((otherElem == nullptr) || (myElem == nullptr)) { - return FALSE; - } - if ( (myElem->basePattern != otherElem->basePattern) || - (myElem->pattern != otherElem->pattern) ) { - return FALSE; - } - if ((myElem->skeleton.getAlias() != otherElem->skeleton.getAlias()) && - !myElem->skeleton->equals(*(otherElem->skeleton))) { - return FALSE; - } - myElem = myElem->next.getAlias(); - otherElem = otherElem->next.getAlias(); - } - } - return TRUE; -} - -// find any key existing in the mapping table already. -// return TRUE if there is an existing key, otherwise return FALSE. -PtnElem* -PatternMap::getDuplicateElem( - const UnicodeString &basePattern, - const PtnSkeleton &skeleton, - PtnElem *baseElem) { - PtnElem *curElem; - - if ( baseElem == nullptr ) { - return nullptr; - } - else { - curElem = baseElem; - } - do { - if ( basePattern.compare(curElem->basePattern)==0 ) { - UBool isEqual = TRUE; - for (int32_t i = 0; i < UDATPG_FIELD_COUNT; ++i) { - if (curElem->skeleton->type[i] != skeleton.type[i] ) { - isEqual = FALSE; - break; - } - } - if (isEqual) { - return curElem; - } - } - curElem = curElem->next.getAlias(); - } while( curElem != nullptr ); - - // end of the list - return nullptr; - -} // PatternMap::getDuplicateElem - -DateTimeMatcher::DateTimeMatcher(void) { -} - -DateTimeMatcher::~DateTimeMatcher() {} - -DateTimeMatcher::DateTimeMatcher(const DateTimeMatcher& other) { - copyFrom(other.skeleton); -} - - -void -DateTimeMatcher::set(const UnicodeString& pattern, FormatParser* fp) { - PtnSkeleton localSkeleton; - return set(pattern, fp, localSkeleton); -} - -void -DateTimeMatcher::set(const UnicodeString& pattern, FormatParser* fp, PtnSkeleton& skeletonResult) { - int32_t i; - for (i=0; i<UDATPG_FIELD_COUNT; ++i) { - skeletonResult.type[i] = NONE; - } - skeletonResult.original.clear(); - skeletonResult.baseOriginal.clear(); - skeletonResult.addedDefaultDayPeriod = FALSE; - - fp->set(pattern); - for (i=0; i < fp->itemNumber; i++) { - const UnicodeString& value = fp->items[i]; - // don't skip 'a' anymore, dayPeriod handled specially below - - if ( fp->isQuoteLiteral(value) ) { - UnicodeString quoteLiteral; - fp->getQuoteLiteral(quoteLiteral, &i); - continue; - } - int32_t canonicalIndex = fp->getCanonicalIndex(value); - if (canonicalIndex < 0) { - continue; - } - const dtTypeElem *row = &dtTypes[canonicalIndex]; - int32_t field = row->field; - skeletonResult.original.populate(field, value); - UChar repeatChar = row->patternChar; - int32_t repeatCount = row->minLen; - skeletonResult.baseOriginal.populate(field, repeatChar, repeatCount); - int16_t subField = row->type; - if (row->type > 0) { - U_ASSERT(value.length() < INT16_MAX); - subField += static_cast<int16_t>(value.length()); - } - skeletonResult.type[field] = subField; - } - // #13183, handle special behavior for day period characters (a, b, B) - if (!skeletonResult.original.isFieldEmpty(UDATPG_HOUR_FIELD)) { - if (skeletonResult.original.getFieldChar(UDATPG_HOUR_FIELD)==LOW_H || skeletonResult.original.getFieldChar(UDATPG_HOUR_FIELD)==CAP_K) { - // We have a skeleton with 12-hour-cycle format - if (skeletonResult.original.isFieldEmpty(UDATPG_DAYPERIOD_FIELD)) { - // But we do not have a day period in the skeleton; add the default DAYPERIOD (currently "a") - for (i = 0; dtTypes[i].patternChar != 0; i++) { - if ( dtTypes[i].field == UDATPG_DAYPERIOD_FIELD ) { - // first entry for UDATPG_DAYPERIOD_FIELD - skeletonResult.original.populate(UDATPG_DAYPERIOD_FIELD, dtTypes[i].patternChar, dtTypes[i].minLen); - skeletonResult.baseOriginal.populate(UDATPG_DAYPERIOD_FIELD, dtTypes[i].patternChar, dtTypes[i].minLen); - skeletonResult.type[UDATPG_DAYPERIOD_FIELD] = dtTypes[i].type; - skeletonResult.addedDefaultDayPeriod = TRUE; - break; - } - } - } - } else { - // Skeleton has 24-hour-cycle hour format and has dayPeriod, delete dayPeriod (i.e. ignore it) - skeletonResult.original.clearField(UDATPG_DAYPERIOD_FIELD); - skeletonResult.baseOriginal.clearField(UDATPG_DAYPERIOD_FIELD); - skeletonResult.type[UDATPG_DAYPERIOD_FIELD] = NONE; - } - } - copyFrom(skeletonResult); -} - -void -DateTimeMatcher::getBasePattern(UnicodeString &result ) { - result.remove(); // Reset the result first. - skeleton.baseOriginal.appendTo(result); -} - -UnicodeString -DateTimeMatcher::getPattern() { - UnicodeString result; - return skeleton.original.appendTo(result); -} - -int32_t -DateTimeMatcher::getDistance(const DateTimeMatcher& other, int32_t includeMask, DistanceInfo& distanceInfo) const { - int32_t result = 0; - distanceInfo.clear(); - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i ) { - int32_t myType = (includeMask&(1<<i))==0 ? 0 : skeleton.type[i]; - int32_t otherType = other.skeleton.type[i]; - if (myType==otherType) { - continue; - } - if (myType==0) {// and other is not - result += EXTRA_FIELD; - distanceInfo.addExtra(i); - } - else { - if (otherType==0) { - result += MISSING_FIELD; - distanceInfo.addMissing(i); - } - else { - result += abs(myType - otherType); - } - } - - } - return result; -} - -void -DateTimeMatcher::copyFrom(const PtnSkeleton& newSkeleton) { - skeleton.copyFrom(newSkeleton); -} - -void -DateTimeMatcher::copyFrom() { - // same as clear - skeleton.clear(); -} - -UBool -DateTimeMatcher::equals(const DateTimeMatcher* other) const { - if (other==nullptr) { return FALSE; } - return skeleton.original == other->skeleton.original; -} - -int32_t -DateTimeMatcher::getFieldMask() const { - int32_t result = 0; - - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i) { - if (skeleton.type[i]!=0) { - result |= (1<<i); - } - } - return result; -} - -PtnSkeleton* -DateTimeMatcher::getSkeletonPtr() { - return &skeleton; -} - -FormatParser::FormatParser () { - status = START; - itemNumber = 0; -} - - -FormatParser::~FormatParser () { -} - - -// Find the next token with the starting position and length -// Note: the startPos may -FormatParser::TokenStatus -FormatParser::setTokens(const UnicodeString& pattern, int32_t startPos, int32_t *len) { - int32_t curLoc = startPos; - if ( curLoc >= pattern.length()) { - return DONE; - } - // check the current char is between A-Z or a-z - do { - UChar c=pattern.charAt(curLoc); - if ( (c>=CAP_A && c<=CAP_Z) || (c>=LOW_A && c<=LOW_Z) ) { - curLoc++; - } - else { - startPos = curLoc; - *len=1; - return ADD_TOKEN; - } - - if ( pattern.charAt(curLoc)!= pattern.charAt(startPos) ) { - break; // not the same token - } - } while(curLoc <= pattern.length()); - *len = curLoc-startPos; - return ADD_TOKEN; -} - -void -FormatParser::set(const UnicodeString& pattern) { - int32_t startPos = 0; - TokenStatus result = START; - int32_t len = 0; - itemNumber = 0; - - do { - result = setTokens( pattern, startPos, &len ); - if ( result == ADD_TOKEN ) - { - items[itemNumber++] = UnicodeString(pattern, startPos, len ); - startPos += len; - } - else { - break; - } - } while (result==ADD_TOKEN && itemNumber < MAX_DT_TOKEN); -} - -int32_t -FormatParser::getCanonicalIndex(const UnicodeString& s, UBool strict) { - int32_t len = s.length(); - if (len == 0) { - return -1; - } - UChar ch = s.charAt(0); - - // Verify that all are the same character. - for (int32_t l = 1; l < len; l++) { - if (ch != s.charAt(l)) { - return -1; - } - } - int32_t i = 0; - int32_t bestRow = -1; - while (dtTypes[i].patternChar != 0x0000) { - if ( dtTypes[i].patternChar != ch ) { - ++i; - continue; - } - bestRow = i; - if (dtTypes[i].patternChar != dtTypes[i+1].patternChar) { - return i; - } - if (dtTypes[i+1].minLen <= len) { - ++i; - continue; - } - return i; - } - return strict ? -1 : bestRow; -} - -UBool -FormatParser::isQuoteLiteral(const UnicodeString& s) { - return (UBool)(s.charAt(0) == SINGLE_QUOTE); -} - -// This function assumes the current itemIndex points to the quote literal. -// Please call isQuoteLiteral prior to this function. -void -FormatParser::getQuoteLiteral(UnicodeString& quote, int32_t *itemIndex) { - int32_t i = *itemIndex; - - quote.remove(); - if (items[i].charAt(0)==SINGLE_QUOTE) { - quote += items[i]; - ++i; - } - while ( i < itemNumber ) { - if ( items[i].charAt(0)==SINGLE_QUOTE ) { - if ( (i+1<itemNumber) && (items[i+1].charAt(0)==SINGLE_QUOTE)) { - // two single quotes e.g. 'o''clock' - quote += items[i++]; - quote += items[i++]; - continue; - } - else { - quote += items[i]; - break; - } - } - else { - quote += items[i]; - } - ++i; - } - *itemIndex=i; -} - -UBool -FormatParser::isPatternSeparator(const UnicodeString& field) const { - for (int32_t i=0; i<field.length(); ++i ) { - UChar c= field.charAt(i); - if ( (c==SINGLE_QUOTE) || (c==BACKSLASH) || (c==SPACE) || (c==COLON) || - (c==QUOTATION_MARK) || (c==COMMA) || (c==HYPHEN) ||(items[i].charAt(0)==DOT) ) { - continue; - } - else { - return FALSE; - } - } - return TRUE; -} - -DistanceInfo::~DistanceInfo() {} - -void -DistanceInfo::setTo(const DistanceInfo& other) { - missingFieldMask = other.missingFieldMask; - extraFieldMask= other.extraFieldMask; -} - -PatternMapIterator::PatternMapIterator(UErrorCode& status) : - bootIndex(0), nodePtr(nullptr), matcher(nullptr), patternMap(nullptr) -{ - if (U_FAILURE(status)) { return; } - matcher.adoptInsteadAndCheckErrorCode(new DateTimeMatcher(), status); -} - -PatternMapIterator::~PatternMapIterator() { -} - -void -PatternMapIterator::set(PatternMap& newPatternMap) { - this->patternMap=&newPatternMap; -} - -PtnSkeleton* -PatternMapIterator::getSkeleton() const { - if ( nodePtr == nullptr ) { - return nullptr; - } - else { - return nodePtr->skeleton.getAlias(); - } -} - -UBool -PatternMapIterator::hasNext() const { - int32_t headIndex = bootIndex; - PtnElem *curPtr = nodePtr; - - if (patternMap==nullptr) { - return FALSE; - } - while ( headIndex < MAX_PATTERN_ENTRIES ) { - if ( curPtr != nullptr ) { - if ( curPtr->next != nullptr ) { - return TRUE; - } - else { - headIndex++; - curPtr=nullptr; - continue; - } - } - else { - if ( patternMap->boot[headIndex] != nullptr ) { - return TRUE; - } - else { - headIndex++; - continue; - } - } - } - return FALSE; -} - -DateTimeMatcher& -PatternMapIterator::next() { - while ( bootIndex < MAX_PATTERN_ENTRIES ) { - if ( nodePtr != nullptr ) { - if ( nodePtr->next != nullptr ) { - nodePtr = nodePtr->next.getAlias(); - break; - } - else { - bootIndex++; - nodePtr=nullptr; - continue; - } - } - else { - if ( patternMap->boot[bootIndex] != nullptr ) { - nodePtr = patternMap->boot[bootIndex]; - break; - } - else { - bootIndex++; - continue; - } - } - } - if (nodePtr!=nullptr) { - matcher->copyFrom(*nodePtr->skeleton); - } - else { - matcher->copyFrom(); - } - return *matcher; -} - - -SkeletonFields::SkeletonFields() { - // Set initial values to zero - clear(); -} - -void SkeletonFields::clear() { - uprv_memset(chars, 0, sizeof(chars)); - uprv_memset(lengths, 0, sizeof(lengths)); -} - -void SkeletonFields::copyFrom(const SkeletonFields& other) { - uprv_memcpy(chars, other.chars, sizeof(chars)); - uprv_memcpy(lengths, other.lengths, sizeof(lengths)); -} - -void SkeletonFields::clearField(int32_t field) { - chars[field] = 0; - lengths[field] = 0; -} - -UChar SkeletonFields::getFieldChar(int32_t field) const { - return chars[field]; -} - -int32_t SkeletonFields::getFieldLength(int32_t field) const { - return lengths[field]; -} - -void SkeletonFields::populate(int32_t field, const UnicodeString& value) { - populate(field, value.charAt(0), value.length()); -} - -void SkeletonFields::populate(int32_t field, UChar ch, int32_t length) { - chars[field] = (int8_t) ch; - lengths[field] = (int8_t) length; -} - -UBool SkeletonFields::isFieldEmpty(int32_t field) const { - return lengths[field] == 0; -} - -UnicodeString& SkeletonFields::appendTo(UnicodeString& string) const { - for (int32_t i = 0; i < UDATPG_FIELD_COUNT; ++i) { - appendFieldTo(i, string); - } - return string; -} - -UnicodeString& SkeletonFields::appendFieldTo(int32_t field, UnicodeString& string) const { - UChar ch(chars[field]); - int32_t length = (int32_t) lengths[field]; - - for (int32_t i=0; i<length; i++) { - string += ch; - } - return string; -} - -UChar SkeletonFields::getFirstChar() const { - for (int32_t i = 0; i < UDATPG_FIELD_COUNT; ++i) { - if (lengths[i] != 0) { - return chars[i]; - } - } - return '\0'; -} - - -PtnSkeleton::PtnSkeleton() { -} - -PtnSkeleton::PtnSkeleton(const PtnSkeleton& other) { - copyFrom(other); -} - -void PtnSkeleton::copyFrom(const PtnSkeleton& other) { - uprv_memcpy(type, other.type, sizeof(type)); - original.copyFrom(other.original); - baseOriginal.copyFrom(other.baseOriginal); -} - -void PtnSkeleton::clear() { - uprv_memset(type, 0, sizeof(type)); - original.clear(); - baseOriginal.clear(); -} - -UBool -PtnSkeleton::equals(const PtnSkeleton& other) const { - return (original == other.original) - && (baseOriginal == other.baseOriginal) - && (uprv_memcmp(type, other.type, sizeof(type)) == 0); -} - -UnicodeString -PtnSkeleton::getSkeleton() const { - UnicodeString result; - result = original.appendTo(result); - int32_t pos; - if (addedDefaultDayPeriod && (pos = result.indexOf(LOW_A)) >= 0) { - // for backward compatibility: if DateTimeMatcher.set added a single 'a' that - // was not in the provided skeleton, remove it here before returning skeleton. - result.remove(pos, 1); - } - return result; -} - -UnicodeString -PtnSkeleton::getBaseSkeleton() const { - UnicodeString result; - result = baseOriginal.appendTo(result); - int32_t pos; - if (addedDefaultDayPeriod && (pos = result.indexOf(LOW_A)) >= 0) { - // for backward compatibility: if DateTimeMatcher.set added a single 'a' that - // was not in the provided skeleton, remove it here before returning skeleton. - result.remove(pos, 1); - } - return result; -} - -UChar -PtnSkeleton::getFirstChar() const { - return baseOriginal.getFirstChar(); -} - -PtnSkeleton::~PtnSkeleton() { -} - -PtnElem::PtnElem(const UnicodeString &basePat, const UnicodeString &pat) : - basePattern(basePat), skeleton(nullptr), pattern(pat), next(nullptr) -{ -} - -PtnElem::~PtnElem() { -} - -DTSkeletonEnumeration::DTSkeletonEnumeration(PatternMap& patternMap, dtStrEnum type, UErrorCode& status) : fSkeletons(nullptr) { - PtnElem *curElem; - PtnSkeleton *curSkeleton; - UnicodeString s; - int32_t bootIndex; - - pos=0; - fSkeletons.adoptInsteadAndCheckErrorCode(new UVector(status), status); - if (U_FAILURE(status)) { - return; - } - - for (bootIndex=0; bootIndex<MAX_PATTERN_ENTRIES; ++bootIndex ) { - curElem = patternMap.boot[bootIndex]; - while (curElem!=nullptr) { - switch(type) { - case DT_BASESKELETON: - s=curElem->basePattern; - break; - case DT_PATTERN: - s=curElem->pattern; - break; - case DT_SKELETON: - curSkeleton=curElem->skeleton.getAlias(); - s=curSkeleton->getSkeleton(); - break; - } - if ( !isCanonicalItem(s) ) { - LocalPointer<UnicodeString> newElem(new UnicodeString(s), status); - if (U_FAILURE(status)) { - return; - } - fSkeletons->addElement(newElem.getAlias(), status); - if (U_FAILURE(status)) { - fSkeletons.adoptInstead(nullptr); - return; - } - newElem.orphan(); // fSkeletons vector now owns the UnicodeString. - } - curElem = curElem->next.getAlias(); - } - } - if ((bootIndex==MAX_PATTERN_ENTRIES) && (curElem!=nullptr) ) { - status = U_BUFFER_OVERFLOW_ERROR; - } -} - -const UnicodeString* -DTSkeletonEnumeration::snext(UErrorCode& status) { - if (U_SUCCESS(status) && fSkeletons.isValid() && pos < fSkeletons->size()) { - return (const UnicodeString*)fSkeletons->elementAt(pos++); - } - return nullptr; -} - -void -DTSkeletonEnumeration::reset(UErrorCode& /*status*/) { - pos=0; -} - -int32_t -DTSkeletonEnumeration::count(UErrorCode& /*status*/) const { - return (fSkeletons.isNull()) ? 0 : fSkeletons->size(); -} - -UBool -DTSkeletonEnumeration::isCanonicalItem(const UnicodeString& item) { - if ( item.length() != 1 ) { - return FALSE; - } - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i) { - if (item.charAt(0)==Canonical_Items[i]) { - return TRUE; - } - } - return FALSE; -} - -DTSkeletonEnumeration::~DTSkeletonEnumeration() { - UnicodeString *s; - if (fSkeletons.isValid()) { - for (int32_t i = 0; i < fSkeletons->size(); ++i) { - if ((s = (UnicodeString *)fSkeletons->elementAt(i)) != nullptr) { - delete s; - } - } - } -} - -DTRedundantEnumeration::DTRedundantEnumeration() : pos(0), fPatterns(nullptr) { -} - -void -DTRedundantEnumeration::add(const UnicodeString& pattern, UErrorCode& status) { - if (U_FAILURE(status)) { return; } - if (fPatterns.isNull()) { - fPatterns.adoptInsteadAndCheckErrorCode(new UVector(status), status); - if (U_FAILURE(status)) { - return; - } - } - LocalPointer<UnicodeString> newElem(new UnicodeString(pattern), status); - if (U_FAILURE(status)) { - return; - } - fPatterns->addElement(newElem.getAlias(), status); - if (U_FAILURE(status)) { - fPatterns.adoptInstead(nullptr); - return; - } - newElem.orphan(); // fPatterns now owns the string. -} - -const UnicodeString* -DTRedundantEnumeration::snext(UErrorCode& status) { - if (U_SUCCESS(status) && fPatterns.isValid() && pos < fPatterns->size()) { - return (const UnicodeString*)fPatterns->elementAt(pos++); - } - return nullptr; -} - -void -DTRedundantEnumeration::reset(UErrorCode& /*status*/) { - pos=0; -} - -int32_t -DTRedundantEnumeration::count(UErrorCode& /*status*/) const { - return (fPatterns.isNull()) ? 0 : fPatterns->size(); -} - -UBool -DTRedundantEnumeration::isCanonicalItem(const UnicodeString& item) const { - if ( item.length() != 1 ) { - return FALSE; - } - for (int32_t i=0; i<UDATPG_FIELD_COUNT; ++i) { - if (item.charAt(0)==Canonical_Items[i]) { - return TRUE; - } - } - return FALSE; -} - -DTRedundantEnumeration::~DTRedundantEnumeration() { - UnicodeString *s; - if (fPatterns.isValid()) { - for (int32_t i = 0; i < fPatterns->size(); ++i) { - if ((s = (UnicodeString *)fPatterns->elementAt(i)) != nullptr) { - delete s; - } - } - } -} - -U_NAMESPACE_END - - -#endif /* #if !UCONFIG_NO_FORMATTING */ - -//eof diff --git a/tools/node_modules/@babel/core/lib/config/files/configuration.js b/tools/node_modules/@babel/core/lib/config/files/configuration.js index 999c3269cd2ef8..4835fb31904706 100644 --- a/tools/node_modules/@babel/core/lib/config/files/configuration.js +++ b/tools/node_modules/@babel/core/lib/config/files/configuration.js @@ -62,8 +62,6 @@ var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex")); var fs = _interopRequireWildcard(require("../../gensync-utils/fs")); -var _resolve = _interopRequireDefault(require("../../gensync-utils/resolve")); - function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; } function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } @@ -147,8 +145,17 @@ function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) } function* loadConfig(name, dirname, envName, caller) { - const filepath = yield* (0, _resolve.default)(name, { - basedir: dirname + const filepath = (parseFloat(process.versions.node) >= 8.9 ? require.resolve : (r, { + paths: [b] + }, M = require("module")) => { + let f = M._findPath(r, M._nodeModulePaths(b).concat(b)); + + if (f) return f; + f = new Error(`Cannot resolve module '${r}'`); + f.code = "MODULE_NOT_FOUND"; + throw f; + })(name, { + paths: [dirname] }); const conf = yield* readConfig(filepath, envName, caller); diff --git a/tools/node_modules/@babel/core/lib/config/files/plugins.js b/tools/node_modules/@babel/core/lib/config/files/plugins.js index 6b9cb715c88568..eddce5f1715316 100644 --- a/tools/node_modules/@babel/core/lib/config/files/plugins.js +++ b/tools/node_modules/@babel/core/lib/config/files/plugins.js @@ -18,16 +18,6 @@ function _debug() { return data; } -function _resolve() { - const data = _interopRequireDefault(require("resolve")); - - _resolve = function () { - return data; - }; - - return data; -} - function _path() { const data = _interopRequireDefault(require("path")); @@ -98,8 +88,17 @@ function resolveStandardizedName(type, name, dirname = process.cwd()) { const standardizedName = standardizeName(type, name); try { - return _resolve().default.sync(standardizedName, { - basedir: dirname + return (parseFloat(process.versions.node) >= 8.9 ? require.resolve : (r, { + paths: [b] + }, M = require("module")) => { + let f = M._findPath(r, M._nodeModulePaths(b).concat(b)); + + if (f) return f; + f = new Error(`Cannot resolve module '${r}'`); + f.code = "MODULE_NOT_FOUND"; + throw f; + })(standardizedName, { + paths: [dirname] }); } catch (e) { if (e.code !== "MODULE_NOT_FOUND") throw e; @@ -108,10 +107,18 @@ function resolveStandardizedName(type, name, dirname = process.cwd()) { let resolvedOriginal = false; try { - _resolve().default.sync(name, { - basedir: dirname + (parseFloat(process.versions.node) >= 8.9 ? require.resolve : (r, { + paths: [b] + }, M = require("module")) => { + let f = M._findPath(r, M._nodeModulePaths(b).concat(b)); + + if (f) return f; + f = new Error(`Cannot resolve module '${r}'`); + f.code = "MODULE_NOT_FOUND"; + throw f; + })(name, { + paths: [dirname] }); - resolvedOriginal = true; } catch (_unused) {} @@ -123,10 +130,18 @@ function resolveStandardizedName(type, name, dirname = process.cwd()) { let resolvedBabel = false; try { - _resolve().default.sync(standardizeName(type, "@babel/" + name), { - basedir: dirname + (parseFloat(process.versions.node) >= 8.9 ? require.resolve : (r, { + paths: [b] + }, M = require("module")) => { + let f = M._findPath(r, M._nodeModulePaths(b).concat(b)); + + if (f) return f; + f = new Error(`Cannot resolve module '${r}'`); + f.code = "MODULE_NOT_FOUND"; + throw f; + })(standardizeName(type, "@babel/" + name), { + paths: [dirname] }); - resolvedBabel = true; } catch (_unused2) {} @@ -138,10 +153,18 @@ function resolveStandardizedName(type, name, dirname = process.cwd()) { const oppositeType = type === "preset" ? "plugin" : "preset"; try { - _resolve().default.sync(standardizeName(oppositeType, name), { - basedir: dirname + (parseFloat(process.versions.node) >= 8.9 ? require.resolve : (r, { + paths: [b] + }, M = require("module")) => { + let f = M._findPath(r, M._nodeModulePaths(b).concat(b)); + + if (f) return f; + f = new Error(`Cannot resolve module '${r}'`); + f.code = "MODULE_NOT_FOUND"; + throw f; + })(standardizeName(oppositeType, name), { + paths: [dirname] }); - resolvedOppositeType = true; } catch (_unused3) {} diff --git a/tools/node_modules/@babel/core/lib/gensync-utils/resolve.js b/tools/node_modules/@babel/core/lib/gensync-utils/resolve.js deleted file mode 100644 index 2ca39d7612d472..00000000000000 --- a/tools/node_modules/@babel/core/lib/gensync-utils/resolve.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.default = void 0; - -function _resolve() { - const data = _interopRequireDefault(require("resolve")); - - _resolve = function () { - return data; - }; - - return data; -} - -function _gensync() { - const data = _interopRequireDefault(require("gensync")); - - _gensync = function () { - return data; - }; - - return data; -} - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -var _default = (0, _gensync().default)({ - sync: _resolve().default.sync, - errback: _resolve().default -}); - -exports.default = _default; \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/code-frame/README.md b/tools/node_modules/@babel/core/node_modules/@babel/code-frame/README.md index 185f93d2471999..08cacb0477fb94 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/code-frame/README.md +++ b/tools/node_modules/@babel/core/node_modules/@babel/code-frame/README.md @@ -2,7 +2,7 @@ > Generate errors that contain a code frame that point to source locations. -See our website [@babel/code-frame](https://babeljs.io/docs/en/next/babel-code-frame.html) for more information. +See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information. ## Install diff --git a/tools/node_modules/@babel/core/node_modules/@babel/code-frame/package.json b/tools/node_modules/@babel/core/node_modules/@babel/code-frame/package.json index d2e0ccffde9971..07a28a6bda4ec3 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/code-frame/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/code-frame/package.json @@ -1,6 +1,6 @@ { "name": "@babel/code-frame", - "version": "7.10.4", + "version": "7.12.11", "description": "Generate errors that contain a code frame that point to source locations.", "author": "Sebastian McKenzie <sebmck@gmail.com>", "homepage": "https://babeljs.io/", @@ -18,8 +18,8 @@ "@babel/highlight": "^7.10.4" }, "devDependencies": { + "@types/chalk": "^2.0.0", "chalk": "^2.0.0", "strip-ansi": "^4.0.0" - }, - "gitHead": "7fd40d86a0d03ff0e9c3ea16b29689945433d4df" -} + } +} \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/generators/types.js b/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/generators/types.js index 603a5935b5061c..ef3054b83273e2 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/generators/types.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/generators/types.js @@ -218,13 +218,9 @@ function StringLiteral(node) { return; } - const opts = this.format.jsescOption; - - if (this.format.jsonCompatibleStrings) { - opts.json = true; - } - - const val = (0, _jsesc.default)(node.value, opts); + const val = (0, _jsesc.default)(node.value, Object.assign(this.format.jsescOption, this.format.jsonCompatibleStrings && { + json: true + })); return this.token(val); } @@ -232,22 +228,22 @@ function BigIntLiteral(node) { const raw = this.getPossibleRaw(node); if (!this.format.minified && raw != null) { - this.token(raw); + this.word(raw); return; } - this.token(node.value + "n"); + this.word(node.value + "n"); } function DecimalLiteral(node) { const raw = this.getPossibleRaw(node); if (!this.format.minified && raw != null) { - this.token(raw); + this.word(raw); return; } - this.token(node.value + "m"); + this.word(node.value + "m"); } function PipelineTopicExpression(node) { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/index.js index 115ab16a10a898..b3fcd73b364610 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/generator/lib/index.js @@ -38,7 +38,6 @@ function normalizeOptions(code, opts) { compact: opts.compact, minified: opts.minified, concise: opts.concise, - jsonCompatibleStrings: opts.jsonCompatibleStrings, indent: { adjustMultilineComment: true, style: " ", @@ -51,6 +50,9 @@ function normalizeOptions(code, opts) { }, opts.jsescOption), recordAndTupleSyntaxType: opts.recordAndTupleSyntaxType }; + { + format.jsonCompatibleStrings = opts.jsonCompatibleStrings; + } if (format.minified) { format.compact = true; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/generator/package.json b/tools/node_modules/@babel/core/node_modules/@babel/generator/package.json index b76c14e291015c..96f3acfa11306c 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/generator/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/generator/package.json @@ -1,6 +1,6 @@ { "name": "@babel/generator", - "version": "7.12.5", + "version": "7.12.11", "description": "Turns an AST into code.", "author": "Sebastian McKenzie <sebmck@gmail.com>", "homepage": "https://babeljs.io/", @@ -18,12 +18,12 @@ "lib" ], "dependencies": { - "@babel/types": "^7.12.5", + "@babel/types": "^7.12.11", "jsesc": "^2.5.1", "source-map": "^0.5.0" }, "devDependencies": { - "@babel/helper-fixtures": "7.10.5", - "@babel/parser": "7.12.5" + "@babel/helper-fixtures": "7.12.10", + "@babel/parser": "7.12.11" } } \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/README.md b/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/README.md index a8a6809ace3d6e..36a65931b20ebb 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/README.md +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/README.md @@ -2,7 +2,7 @@ > Helper function to change the property 'name' of every function -See our website [@babel/helper-function-name](https://babeljs.io/docs/en/next/babel-helper-function-name.html) for more information. +See our website [@babel/helper-function-name](https://babeljs.io/docs/en/babel-helper-function-name) for more information. ## Install diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/package.json index 98c7f62ce193d4..42b3e100e1ced0 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-function-name/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-function-name", - "version": "7.10.4", + "version": "7.12.11", "description": "Helper function to change the property 'name' of every function", "repository": { "type": "git", @@ -13,9 +13,8 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/helper-get-function-arity": "^7.10.4", - "@babel/template": "^7.10.4", - "@babel/types": "^7.10.4" - }, - "gitHead": "7fd40d86a0d03ff0e9c3ea16b29689945433d4df" -} + "@babel/helper-get-function-arity": "^7.12.10", + "@babel/template": "^7.12.7", + "@babel/types": "^7.12.11" + } +} \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/README.md b/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/README.md index 1de8084fb133bc..8fa48c13e71816 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/README.md +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/README.md @@ -2,7 +2,7 @@ > Helper function to get function arity -See our website [@babel/helper-get-function-arity](https://babeljs.io/docs/en/next/babel-helper-get-function-arity.html) for more information. +See our website [@babel/helper-get-function-arity](https://babeljs.io/docs/en/babel-helper-get-function-arity) for more information. ## Install diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/package.json index 6b53c0b74b3b71..736839ec5ff305 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-get-function-arity/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-get-function-arity", - "version": "7.10.4", + "version": "7.12.10", "description": "Helper function to get function arity", "repository": { "type": "git", @@ -13,7 +13,6 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/types": "^7.10.4" - }, - "gitHead": "7fd40d86a0d03ff0e9c3ea16b29689945433d4df" -} + "@babel/types": "^7.12.10" + } +} \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/lib/index.js index 0d5b34e5d6fded..0751eb3ca5527b 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/lib/index.js @@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = _default; +exports.default = optimiseCallExpression; var t = _interopRequireWildcard(require("@babel/types")); @@ -11,7 +11,7 @@ function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } -function _default(callee, thisNode, args, optional) { +function optimiseCallExpression(callee, thisNode, args, optional) { if (args.length === 1 && t.isSpreadElement(args[0]) && t.isIdentifier(args[0].argument, { name: "arguments" })) { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/package.json index e71c0e887520da..3973be801216bd 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-optimise-call-expression/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-optimise-call-expression", - "version": "7.12.7", + "version": "7.12.10", "description": "Helper function to optimise call expression", "repository": { "type": "git", @@ -13,10 +13,10 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/types": "^7.12.7" + "@babel/types": "^7.12.10" }, "devDependencies": { - "@babel/generator": "7.12.5", - "@babel/parser": "7.12.7" + "@babel/generator": "7.12.10", + "@babel/parser": "7.12.10" } } \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/lib/index.js index d434da7028c5ae..f08da165b1afec 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/lib/index.js @@ -40,7 +40,7 @@ function skipAllButComputedKey(path) { } const environmentVisitor = { - [`${t.StaticBlock ? "StaticBlock|" : ""}ClassPrivateProperty|TypeAnnotation`](path) { + [`${t.staticBlock ? "StaticBlock|" : ""}ClassPrivateProperty|TypeAnnotation`](path) { path.skip(); }, diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/package.json index e2c2392524fef8..bc8d05da90c053 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-replace-supers/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-replace-supers", - "version": "7.12.5", + "version": "7.12.11", "description": "Helper function to replace supers", "repository": { "type": "git", @@ -13,9 +13,9 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.12.1", - "@babel/helper-optimise-call-expression": "^7.10.4", - "@babel/traverse": "^7.12.5", - "@babel/types": "^7.12.5" + "@babel/helper-member-expression-to-functions": "^7.12.7", + "@babel/helper-optimise-call-expression": "^7.12.10", + "@babel/traverse": "^7.12.10", + "@babel/types": "^7.12.11" } } \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/README.md b/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/README.md index d241fee0f6dc44..a6f54046044463 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/README.md +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/README.md @@ -2,7 +2,7 @@ > -See our website [@babel/helper-split-export-declaration](https://babeljs.io/docs/en/next/babel-helper-split-export-declaration.html) for more information. +See our website [@babel/helper-split-export-declaration](https://babeljs.io/docs/en/babel-helper-split-export-declaration) for more information. ## Install diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/package.json index 3cd12554e9aa5a..5913ccfef09e91 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-split-export-declaration/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-split-export-declaration", - "version": "7.11.0", + "version": "7.12.11", "description": "", "repository": { "type": "git", @@ -13,6 +13,6 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/types": "^7.11.0" + "@babel/types": "^7.12.11" } -} +} \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/README.md b/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/README.md index ab2dad173149e8..6733576a8ce76b 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/README.md +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/README.md @@ -2,7 +2,7 @@ > Validate identifier/keywords name -See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/en/next/babel-helper-validator-identifier.html) for more information. +See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/en/babel-helper-validator-identifier) for more information. ## Install diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/package.json index 79112981bddbb9..464dbfa3aace49 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-validator-identifier/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-validator-identifier", - "version": "7.10.4", + "version": "7.12.11", "description": "Validate identifier/keywords name", "repository": { "type": "git", @@ -16,6 +16,5 @@ "devDependencies": { "charcodes": "^0.2.0", "unicode-13.0.0": "^0.8.0" - }, - "gitHead": "7fd40d86a0d03ff0e9c3ea16b29689945433d4df" -} + } +} \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js index a37d8f6a4c85fc..2f888fabfe390a 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js @@ -608,7 +608,7 @@ const ErrorMessages = Object.freeze({ DuplicateStaticBlock: "Duplicate static block in the same class", ElementAfterRest: "Rest element must be last element", EscapedCharNotAnIdentifier: "Invalid Unicode escape", - ExportBindingIsString: "A string literal cannot be used as an exported binding without `from`.\n- Did you mean `export { %0 as '%1' } from 'some-module'`?", + ExportBindingIsString: "A string literal cannot be used as an exported binding without `from`.\n- Did you mean `export { '%0' as '%1' } from 'some-module'`?", ExportDefaultFromAsIdentifier: "'from' is not allowed as an identifier after 'export default'", ForInOfLoopInitializer: "%0 loop variable declaration may not have an initializer", GeneratorInSingleStatementContext: "Generators can only be declared at the top level or inside a block", @@ -758,10 +758,6 @@ class ParserError extends CommentsParser { } -function isSimpleProperty(node) { - return node != null && node.type === "Property" && node.kind === "init" && node.method === false; -} - var estree = (superClass => class extends superClass { estreeParseRegExpLiteral({ pattern, @@ -782,7 +778,14 @@ var estree = (superClass => class extends superClass { } estreeParseBigIntLiteral(value) { - const bigInt = typeof BigInt !== "undefined" ? BigInt(value) : null; + let bigInt; + + try { + bigInt = BigInt(value); + } catch (_unused) { + bigInt = null; + } + const node = this.estreeParseLiteral(bigInt); node.bigint = String(node.value || value); return node; @@ -803,7 +806,7 @@ var estree = (superClass => class extends superClass { const directiveLiteral = directive.value; const stmt = this.startNodeAt(directive.start, directive.loc.start); const expression = this.startNodeAt(directiveLiteral.start, directiveLiteral.loc.start); - expression.value = directiveLiteral.value; + expression.value = directiveLiteral.extra.expressionValue; expression.raw = directiveLiteral.extra.raw; stmt.expression = this.finishNodeAt(expression, "Literal", directiveLiteral.end, directiveLiteral.loc.end); stmt.directive = directiveLiteral.extra.raw.slice(1, -1); @@ -816,7 +819,7 @@ var estree = (superClass => class extends superClass { } checkDeclaration(node) { - if (isSimpleProperty(node)) { + if (node != null && this.isObjectProperty(node)) { this.checkDeclaration(node.value); } else { super.checkDeclaration(node); @@ -827,27 +830,6 @@ var estree = (superClass => class extends superClass { return method.value.params; } - checkLVal(expr, contextDescription, ...args) { - switch (expr.type) { - case "ObjectPattern": - expr.properties.forEach(prop => { - this.checkLVal(prop.type === "Property" ? prop.value : prop, "object destructuring pattern", ...args); - }); - break; - - default: - super.checkLVal(expr, contextDescription, ...args); - } - } - - checkProto(prop, isRecord, protoRef, refExpressionErrors) { - if (prop.method) { - return; - } - - super.checkProto(prop, isRecord, protoRef, refExpressionErrors); - } - isValidDirective(stmt) { var _stmt$expression$extr; @@ -857,12 +839,12 @@ var estree = (superClass => class extends superClass { stmtToDirective(stmt) { const directive = super.stmtToDirective(stmt); const value = stmt.expression.value; - directive.value.value = value; + this.addExtra(directive.value, "expressionValue", value); return directive; } - parseBlockBody(node, allowDirectives, topLevel, end) { - super.parseBlockBody(node, allowDirectives, topLevel, end); + parseBlockBody(node, ...args) { + super.parseBlockBody(node, ...args); const directiveStatements = node.directives.map(d => this.directiveToStmt(d)); node.body = directiveStatements.concat(node.body); delete node.directives; @@ -955,8 +937,8 @@ var estree = (superClass => class extends superClass { } toAssignable(node, isLHS = false) { - if (isSimpleProperty(node)) { - this.toAssignable(node.value); + if (node != null && this.isObjectProperty(node)) { + this.toAssignable(node.value, isLHS); return node; } @@ -965,9 +947,9 @@ var estree = (superClass => class extends superClass { toAssignableObjectExpressionProp(prop, ...args) { if (prop.kind === "get" || prop.kind === "set") { - throw this.raise(prop.key.start, ErrorMessages.PatternHasAccessor); + this.raise(prop.key.start, ErrorMessages.PatternHasAccessor); } else if (prop.method) { - throw this.raise(prop.key.start, ErrorMessages.PatternHasMethod); + this.raise(prop.key.start, ErrorMessages.PatternHasMethod); } else { super.toAssignableObjectExpressionProp(prop, ...args); } @@ -1035,6 +1017,26 @@ var estree = (superClass => class extends superClass { return node; } + hasPropertyAsPrivateName(node) { + if (node.type === "ChainExpression") { + node = node.expression; + } + + return super.hasPropertyAsPrivateName(node); + } + + isOptionalChain(node) { + return node.type === "ChainExpression"; + } + + isObjectProperty(node) { + return node.type === "Property" && node.kind === "init" && !node.method; + } + + isObjectMethod(node) { + return node.method || node.kind === "get" || node.kind === "set"; + } + }); class TokContext { @@ -4323,6 +4325,7 @@ const JsxErrors = Object.freeze({ AttributeIsEmpty: "JSX attributes must only be assigned a non-empty expression", MissingClosingTagFragment: "Expected corresponding JSX closing tag for <>", MissingClosingTagElement: "Expected corresponding JSX closing tag for <%0>", + UnexpectedSequenceExpression: "Sequence expressions cannot be directly nested inside JSX. Did you mean to wrap it in parentheses (...)?", UnsupportedJsxValue: "JSX value should be either an expression or a quoted JSX text", UnterminatedJsxContent: "Unterminated JSX contents", UnwrappedAdjacentJSXElements: "Adjacent JSX elements must be wrapped in an enclosing tag. Did you want a JSX fragment <>...</>?" @@ -4607,7 +4610,8 @@ var jsx = (superClass => class extends superClass { if (this.match(types.braceR)) { node.expression = this.jsxParseEmptyExpression(); } else { - node.expression = this.parseExpression(); + const expression = this.parseExpression(); + node.expression = expression; } this.expect(types.braceR); @@ -7723,7 +7727,7 @@ class State { this.context = [types$1.braceStatement]; this.exprAllowed = true; this.containsEsc = false; - this.octalPositions = []; + this.strictErrors = new Map(); this.exportedIdentifiers = []; this.tokensLength = 0; } @@ -7855,15 +7859,11 @@ class Tokenizer extends ParserError { setStrict(strict) { this.state.strict = strict; - if (!this.match(types.num) && !this.match(types.string)) return; - this.state.pos = this.state.start; - while (this.state.pos < this.state.lineStart) { - this.state.lineStart = this.input.lastIndexOf("\n", this.state.lineStart - 2) + 1; - --this.state.curLine; + if (strict) { + this.state.strictErrors.forEach((message, pos) => this.raise(pos, message)); + this.state.strictErrors.clear(); } - - this.nextToken(); } curContext() { @@ -7873,7 +7873,6 @@ class Tokenizer extends ParserError { nextToken() { const curContext = this.curContext(); if (!(curContext == null ? void 0 : curContext.preserveSpace)) this.skipSpace(); - this.state.octalPositions = []; this.state.start = this.state.pos; this.state.startLoc = this.state.curPosition(); @@ -8600,10 +8599,9 @@ class Tokenizer extends ParserError { if (hasLeadingZero) { const integer = this.input.slice(start, this.state.pos); + this.recordStrictModeErrors(start, ErrorMessages.StrictOctalLiteral); - if (this.state.strict) { - this.raise(start, ErrorMessages.StrictOctalLiteral); - } else { + if (!this.state.strict) { const underscorePos = integer.indexOf("_"); if (underscorePos > 0) { @@ -8802,6 +8800,14 @@ class Tokenizer extends ParserError { } } + recordStrictModeErrors(pos, message) { + if (this.state.strict && !this.state.strictErrors.has(pos)) { + this.raise(pos, message); + } else { + this.state.strictErrors.set(pos, message); + } + } + readEscapedChar(inTemplate) { const throwOnInvalid = !inTemplate; const ch = this.input.charCodeAt(++this.state.pos); @@ -8855,8 +8861,8 @@ class Tokenizer extends ParserError { case 57: if (inTemplate) { return null; - } else if (this.state.strict) { - this.raise(this.state.pos - 1, ErrorMessages.StrictNumericEscape); + } else { + this.recordStrictModeErrors(this.state.pos - 1, ErrorMessages.StrictNumericEscape); } default: @@ -8877,10 +8883,8 @@ class Tokenizer extends ParserError { if (octalStr !== "0" || next === 56 || next === 57) { if (inTemplate) { return null; - } else if (this.state.strict) { - this.raise(codePos, ErrorMessages.StrictNumericEscape); } else { - this.state.octalPositions.push(codePos); + this.recordStrictModeErrors(codePos, ErrorMessages.StrictNumericEscape); } } @@ -9194,6 +9198,30 @@ class UtilParser extends Tokenizer { return this.match(types.name) || !!this.state.type.keyword || this.match(types.string) || this.match(types.num) || this.match(types.bigint) || this.match(types.decimal); } + isPrivateName(node) { + return node.type === "PrivateName"; + } + + getPrivateNameSV(node) { + return node.id.name; + } + + hasPropertyAsPrivateName(node) { + return (node.type === "MemberExpression" || node.type === "OptionalMemberExpression") && this.isPrivateName(node.property); + } + + isOptionalChain(node) { + return node.type === "OptionalMemberExpression" || node.type === "OptionalCallExpression"; + } + + isObjectProperty(node) { + return node.type === "ObjectProperty"; + } + + isObjectMethod(node) { + return node.type === "ObjectMethod"; + } + } class ExpressionErrors { constructor() { @@ -9575,7 +9603,7 @@ class LValParser extends NodeUtils { case "ObjectPattern": for (let _i2 = 0, _expr$properties = expr.properties; _i2 < _expr$properties.length; _i2++) { let prop = _expr$properties[_i2]; - if (prop.type === "ObjectProperty") prop = prop.value;else if (prop.type === "ObjectMethod") continue; + if (this.isObjectProperty(prop)) prop = prop.value;else if (this.isObjectMethod(prop)) continue; this.checkLVal(prop, "object destructuring pattern", bindingType, checkClashes, disallowLetBinding); } @@ -9777,7 +9805,7 @@ function newExpressionScope() { class ExpressionParser extends LValParser { checkProto(prop, isRecord, protoRef, refExpressionErrors) { - if (prop.type === "SpreadElement" || prop.type === "ObjectMethod" || prop.computed || prop.shorthand) { + if (prop.type === "SpreadElement" || this.isObjectMethod(prop) || prop.computed || prop.shorthand) { return; } @@ -10078,7 +10106,7 @@ class ExpressionParser extends LValParser { if (arg.type === "Identifier") { this.raise(node.start, ErrorMessages.StrictDelete); - } else if ((arg.type === "MemberExpression" || arg.type === "OptionalMemberExpression") && arg.property.type === "PrivateName") { + } else if (this.hasPropertyAsPrivateName(arg)) { this.raise(node.start, ErrorMessages.DeletePrivateField); } } @@ -10153,13 +10181,12 @@ class ExpressionParser extends LValParser { let optional = false; if (this.match(types.questionDot)) { - state.optionalChainMember = optional = true; - if (noCalls && this.lookaheadCharCode() === 40) { state.stop = true; return base; } + state.optionalChainMember = optional = true; this.next(); } @@ -10180,12 +10207,12 @@ class ExpressionParser extends LValParser { node.computed = computed; const property = computed ? this.parseExpression() : this.parseMaybePrivateName(true); - if (property.type === "PrivateName") { + if (this.isPrivateName(property)) { if (node.object.type === "Super") { this.raise(startPos, ErrorMessages.SuperPrivateField); } - this.classScope.usePrivateName(property.id.name, property.start); + this.classScope.usePrivateName(this.getPrivateNameSV(property), property.start); } node.property = property; @@ -10816,7 +10843,7 @@ class ExpressionParser extends LValParser { if (node.callee.type === "Import") { this.raise(node.callee.start, ErrorMessages.ImportCallNotNewExpression); - } else if (node.callee.type === "OptionalMemberExpression" || node.callee.type === "OptionalCallExpression") { + } else if (this.isOptionalChain(node.callee)) { this.raise(this.state.lastTokEnd, ErrorMessages.OptionalChainingNoNew); } else if (this.eat(types.questionDot)) { this.raise(this.state.start, ErrorMessages.OptionalChainingNoNew); @@ -10907,7 +10934,7 @@ class ExpressionParser extends LValParser { this.checkProto(prop, isRecord, propHash, refExpressionErrors); } - if (isRecord && prop.type !== "ObjectProperty" && prop.type !== "SpreadElement") { + if (isRecord && !this.isObjectProperty(prop) && prop.type !== "SpreadElement") { this.raise(prop.start, ErrorMessages.InvalidRecordProperty); } @@ -11102,7 +11129,7 @@ class ExpressionParser extends LValParser { this.state.inPropertyName = true; prop.key = this.match(types.num) || this.match(types.string) || this.match(types.bigint) || this.match(types.decimal) ? this.parseExprAtom() : this.parseMaybePrivateName(isPrivateNameAllowed); - if (prop.key.type !== "PrivateName") { + if (!this.isPrivateName(prop.key)) { prop.computed = false; } @@ -11364,11 +11391,10 @@ class ExpressionParser extends LValParser { } isAwaitAllowed() { - if (this.scope.inFunction) return this.prodParam.hasAwait; - if (this.options.allowAwaitOutsideFunction) return true; + if (this.prodParam.hasAwait) return true; - if (this.hasPlugin("topLevelAwait")) { - return this.inModule && this.prodParam.hasAwait; + if (this.options.allowAwaitOutsideFunction && !this.scope.inFunction) { + return true; } return false; @@ -12193,6 +12219,11 @@ class StatementParser extends ExpressionParser { parseBlock(allowDirectives = false, createNewLexicalScope = true, afterBlockParse) { const node = this.startNode(); + + if (allowDirectives) { + this.state.strictErrors.clear(); + } + this.expect(types.braceL); if (createNewLexicalScope) { @@ -12219,41 +12250,33 @@ class StatementParser extends ExpressionParser { } parseBlockOrModuleBlockBody(body, directives, topLevel, end, afterBlockParse) { - const octalPositions = []; const oldStrict = this.state.strict; let hasStrictModeDirective = false; let parsedNonDirective = false; while (!this.match(end)) { - if (!parsedNonDirective && this.state.octalPositions.length) { - octalPositions.push(...this.state.octalPositions); - } - const stmt = this.parseStatement(null, topLevel); - if (directives && !parsedNonDirective && this.isValidDirective(stmt)) { - const directive = this.stmtToDirective(stmt); - directives.push(directive); + if (directives && !parsedNonDirective) { + if (this.isValidDirective(stmt)) { + const directive = this.stmtToDirective(stmt); + directives.push(directive); + + if (!hasStrictModeDirective && directive.value.value === "use strict") { + hasStrictModeDirective = true; + this.setStrict(true); + } - if (!hasStrictModeDirective && directive.value.value === "use strict") { - hasStrictModeDirective = true; - this.setStrict(true); + continue; } - continue; + parsedNonDirective = true; + this.state.strictErrors.clear(); } - parsedNonDirective = true; body.push(stmt); } - if (this.state.strict && octalPositions.length) { - for (let _i3 = 0; _i3 < octalPositions.length; _i3++) { - const pos = octalPositions[_i3]; - this.raise(pos, ErrorMessages.StrictOctalLiteral); - } - } - if (afterBlockParse) { afterBlockParse.call(this, hasStrictModeDirective); } @@ -12522,7 +12545,7 @@ class StatementParser extends ExpressionParser { method.kind = "method"; this.parseClassElementName(method); - if (method.key.type === "PrivateName") { + if (this.isPrivateName(method.key)) { this.pushClassPrivateMethod(classBody, privateMethod, true, false); return; } @@ -12537,7 +12560,7 @@ class StatementParser extends ExpressionParser { const containsEsc = this.state.containsEsc; const key = this.parseClassElementName(member); - const isPrivate = key.type === "PrivateName"; + const isPrivate = this.isPrivateName(key); const isSimple = key.type === "Identifier"; const maybeQuestionTokenStart = this.state.start; this.parsePostMemberNameModifiers(publicMember); @@ -12582,7 +12605,7 @@ class StatementParser extends ExpressionParser { this.parseClassElementName(method); this.parsePostMemberNameModifiers(publicMember); - if (method.key.type === "PrivateName") { + if (this.isPrivateName(method.key)) { this.pushClassPrivateMethod(classBody, privateMethod, isGenerator, true); } else { if (this.isNonstaticConstructor(publicMethod)) { @@ -12595,7 +12618,7 @@ class StatementParser extends ExpressionParser { method.kind = key.name; this.parseClassElementName(publicMethod); - if (method.key.type === "PrivateName") { + if (this.isPrivateName(method.key)) { this.pushClassPrivateMethod(classBody, privateMethod, false, false); } else { if (this.isNonstaticConstructor(publicMethod)) { @@ -12624,7 +12647,7 @@ class StatementParser extends ExpressionParser { this.raise(key.start, ErrorMessages.StaticPrototype); } - if (key.type === "PrivateName" && key.id.name === "constructor") { + if (this.isPrivateName(key) && this.getPrivateNameSV(key) === "constructor") { this.raise(key.start, ErrorMessages.ConstructorClassPrivateField); } @@ -12671,7 +12694,7 @@ class StatementParser extends ExpressionParser { this.expectPlugin("classPrivateProperties", prop.key.start); const node = this.parseClassPrivateProperty(prop); classBody.body.push(node); - this.classScope.declarePrivateName(node.key.id.name, CLASS_ELEMENT_OTHER, node.key.start); + this.classScope.declarePrivateName(this.getPrivateNameSV(node.key), CLASS_ELEMENT_OTHER, node.key.start); } pushClassMethod(classBody, method, isGenerator, isAsync, isConstructor, allowsDirectSuper) { @@ -12683,7 +12706,7 @@ class StatementParser extends ExpressionParser { const node = this.parseMethod(method, isGenerator, isAsync, false, false, "ClassPrivateMethod", true); classBody.body.push(node); const kind = node.kind === "get" ? node.static ? CLASS_ELEMENT_STATIC_GETTER : CLASS_ELEMENT_INSTANCE_GETTER : node.kind === "set" ? node.static ? CLASS_ELEMENT_STATIC_SETTER : CLASS_ELEMENT_INSTANCE_SETTER : CLASS_ELEMENT_OTHER; - this.classScope.declarePrivateName(node.key.id.name, kind, node.key.start); + this.classScope.declarePrivateName(this.getPrivateNameSV(node.key), kind, node.key.start); } parsePostMemberNameModifiers(methodOrProp) {} @@ -12955,8 +12978,8 @@ class StatementParser extends ExpressionParser { } } } else if (node.specifiers && node.specifiers.length) { - for (let _i4 = 0, _node$specifiers = node.specifiers; _i4 < _node$specifiers.length; _i4++) { - const specifier = _node$specifiers[_i4]; + for (let _i3 = 0, _node$specifiers = node.specifiers; _i3 < _node$specifiers.length; _i3++) { + const specifier = _node$specifiers[_i3]; const { exported } = specifier; @@ -12969,7 +12992,7 @@ class StatementParser extends ExpressionParser { } = specifier; if (local.type === "StringLiteral") { - this.raise(specifier.start, ErrorMessages.ExportBindingIsString, local.extra.raw, exportedName); + this.raise(specifier.start, ErrorMessages.ExportBindingIsString, local.value, exportedName); } else { this.checkReservedWord(local.name, local.start, true, false); this.scope.checkLocalExport(local); @@ -12982,8 +13005,8 @@ class StatementParser extends ExpressionParser { if (!id) throw new Error("Assertion failure"); this.checkDuplicateExports(node, id.name); } else if (node.declaration.type === "VariableDeclaration") { - for (let _i5 = 0, _node$declaration$dec = node.declaration.declarations; _i5 < _node$declaration$dec.length; _i5++) { - const declaration = _node$declaration$dec[_i5]; + for (let _i4 = 0, _node$declaration$dec = node.declaration.declarations; _i4 < _node$declaration$dec.length; _i4++) { + const declaration = _node$declaration$dec[_i4]; this.checkDeclaration(declaration.id); } } @@ -13001,13 +13024,13 @@ class StatementParser extends ExpressionParser { if (node.type === "Identifier") { this.checkDuplicateExports(node, node.name); } else if (node.type === "ObjectPattern") { - for (let _i6 = 0, _node$properties = node.properties; _i6 < _node$properties.length; _i6++) { - const prop = _node$properties[_i6]; + for (let _i5 = 0, _node$properties = node.properties; _i5 < _node$properties.length; _i5++) { + const prop = _node$properties[_i5]; this.checkDeclaration(prop); } } else if (node.type === "ArrayPattern") { - for (let _i7 = 0, _node$elements = node.elements; _i7 < _node$elements.length; _i7++) { - const elem = _node$elements[_i7]; + for (let _i6 = 0, _node$elements = node.elements; _i6 < _node$elements.length; _i6++) { + const elem = _node$elements[_i6]; if (elem) { this.checkDeclaration(elem); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json b/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json index d26cd44fb7852b..0632cf7c13535f 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json @@ -1,6 +1,6 @@ { "name": "@babel/parser", - "version": "7.12.7", + "version": "7.12.11", "description": "A JavaScript parser", "author": "Sebastian McKenzie <sebmck@gmail.com>", "homepage": "https://babeljs.io/", @@ -32,9 +32,9 @@ "node": ">=6.0.0" }, "devDependencies": { - "@babel/code-frame": "7.10.4", - "@babel/helper-fixtures": "7.10.5", - "@babel/helper-validator-identifier": "7.10.4", + "@babel/code-frame": "7.12.11", + "@babel/helper-fixtures": "7.12.10", + "@babel/helper-validator-identifier": "7.12.11", "charcodes": "^0.2.0" }, "bin": "./bin/babel-parser.js" diff --git a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/path/modification.js b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/path/modification.js index e8bee5944c14de..cc1e2d0e519de6 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/path/modification.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/path/modification.js @@ -186,7 +186,7 @@ function unshiftContainer(listKey, nodes) { container: this.node[listKey], listKey, key: 0 - }); + }).setContext(this.context); return path._containerInsertBefore(nodes); } @@ -203,7 +203,7 @@ function pushContainer(listKey, nodes) { container: container, listKey, key: container.length - }); + }).setContext(this.context); return path.replaceWithMultiple(nodes); } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/scope/lib/renamer.js b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/scope/lib/renamer.js index 6d1bb1847f437e..2f82343bc6f565 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/scope/lib/renamer.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/scope/lib/renamer.js @@ -104,7 +104,15 @@ class Renamer { } } - scope.traverse(block || scope.block, renameVisitor, this); + const blockToTraverse = block || scope.block; + + if ((blockToTraverse == null ? void 0 : blockToTraverse.type) === "SwitchStatement") { + blockToTraverse.cases.forEach(c => { + scope.traverse(c, renameVisitor, this); + }); + } else { + scope.traverse(blockToTraverse, renameVisitor, this); + } if (!block) { scope.removeOwnBinding(oldName); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json b/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json index 3a875cf867494a..7a9969f86498b9 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json @@ -1,6 +1,6 @@ { "name": "@babel/traverse", - "version": "7.12.9", + "version": "7.12.12", "description": "The Babel Traverse module maintains the overall tree state, and is responsible for replacing, removing, and adding nodes", "author": "Sebastian McKenzie <sebmck@gmail.com>", "homepage": "https://babeljs.io/", @@ -15,12 +15,12 @@ }, "main": "lib/index.js", "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.5", - "@babel/helper-function-name": "^7.10.4", - "@babel/helper-split-export-declaration": "^7.11.0", - "@babel/parser": "^7.12.7", - "@babel/types": "^7.12.7", + "@babel/code-frame": "^7.12.11", + "@babel/generator": "^7.12.11", + "@babel/helper-function-name": "^7.12.11", + "@babel/helper-split-export-declaration": "^7.12.11", + "@babel/parser": "^7.12.11", + "@babel/types": "^7.12.12", "debug": "^4.1.0", "globals": "^11.1.0", "lodash": "^4.17.19" diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/asserts/generated/index.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/asserts/generated/index.js index 659c4196f3e3fc..dd9c71a68d0fde 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/asserts/generated/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/asserts/generated/index.js @@ -305,1151 +305,1151 @@ function assert(type, node, opts) { } } -function assertArrayExpression(node, opts = {}) { +function assertArrayExpression(node, opts) { assert("ArrayExpression", node, opts); } -function assertAssignmentExpression(node, opts = {}) { +function assertAssignmentExpression(node, opts) { assert("AssignmentExpression", node, opts); } -function assertBinaryExpression(node, opts = {}) { +function assertBinaryExpression(node, opts) { assert("BinaryExpression", node, opts); } -function assertInterpreterDirective(node, opts = {}) { +function assertInterpreterDirective(node, opts) { assert("InterpreterDirective", node, opts); } -function assertDirective(node, opts = {}) { +function assertDirective(node, opts) { assert("Directive", node, opts); } -function assertDirectiveLiteral(node, opts = {}) { +function assertDirectiveLiteral(node, opts) { assert("DirectiveLiteral", node, opts); } -function assertBlockStatement(node, opts = {}) { +function assertBlockStatement(node, opts) { assert("BlockStatement", node, opts); } -function assertBreakStatement(node, opts = {}) { +function assertBreakStatement(node, opts) { assert("BreakStatement", node, opts); } -function assertCallExpression(node, opts = {}) { +function assertCallExpression(node, opts) { assert("CallExpression", node, opts); } -function assertCatchClause(node, opts = {}) { +function assertCatchClause(node, opts) { assert("CatchClause", node, opts); } -function assertConditionalExpression(node, opts = {}) { +function assertConditionalExpression(node, opts) { assert("ConditionalExpression", node, opts); } -function assertContinueStatement(node, opts = {}) { +function assertContinueStatement(node, opts) { assert("ContinueStatement", node, opts); } -function assertDebuggerStatement(node, opts = {}) { +function assertDebuggerStatement(node, opts) { assert("DebuggerStatement", node, opts); } -function assertDoWhileStatement(node, opts = {}) { +function assertDoWhileStatement(node, opts) { assert("DoWhileStatement", node, opts); } -function assertEmptyStatement(node, opts = {}) { +function assertEmptyStatement(node, opts) { assert("EmptyStatement", node, opts); } -function assertExpressionStatement(node, opts = {}) { +function assertExpressionStatement(node, opts) { assert("ExpressionStatement", node, opts); } -function assertFile(node, opts = {}) { +function assertFile(node, opts) { assert("File", node, opts); } -function assertForInStatement(node, opts = {}) { +function assertForInStatement(node, opts) { assert("ForInStatement", node, opts); } -function assertForStatement(node, opts = {}) { +function assertForStatement(node, opts) { assert("ForStatement", node, opts); } -function assertFunctionDeclaration(node, opts = {}) { +function assertFunctionDeclaration(node, opts) { assert("FunctionDeclaration", node, opts); } -function assertFunctionExpression(node, opts = {}) { +function assertFunctionExpression(node, opts) { assert("FunctionExpression", node, opts); } -function assertIdentifier(node, opts = {}) { +function assertIdentifier(node, opts) { assert("Identifier", node, opts); } -function assertIfStatement(node, opts = {}) { +function assertIfStatement(node, opts) { assert("IfStatement", node, opts); } -function assertLabeledStatement(node, opts = {}) { +function assertLabeledStatement(node, opts) { assert("LabeledStatement", node, opts); } -function assertStringLiteral(node, opts = {}) { +function assertStringLiteral(node, opts) { assert("StringLiteral", node, opts); } -function assertNumericLiteral(node, opts = {}) { +function assertNumericLiteral(node, opts) { assert("NumericLiteral", node, opts); } -function assertNullLiteral(node, opts = {}) { +function assertNullLiteral(node, opts) { assert("NullLiteral", node, opts); } -function assertBooleanLiteral(node, opts = {}) { +function assertBooleanLiteral(node, opts) { assert("BooleanLiteral", node, opts); } -function assertRegExpLiteral(node, opts = {}) { +function assertRegExpLiteral(node, opts) { assert("RegExpLiteral", node, opts); } -function assertLogicalExpression(node, opts = {}) { +function assertLogicalExpression(node, opts) { assert("LogicalExpression", node, opts); } -function assertMemberExpression(node, opts = {}) { +function assertMemberExpression(node, opts) { assert("MemberExpression", node, opts); } -function assertNewExpression(node, opts = {}) { +function assertNewExpression(node, opts) { assert("NewExpression", node, opts); } -function assertProgram(node, opts = {}) { +function assertProgram(node, opts) { assert("Program", node, opts); } -function assertObjectExpression(node, opts = {}) { +function assertObjectExpression(node, opts) { assert("ObjectExpression", node, opts); } -function assertObjectMethod(node, opts = {}) { +function assertObjectMethod(node, opts) { assert("ObjectMethod", node, opts); } -function assertObjectProperty(node, opts = {}) { +function assertObjectProperty(node, opts) { assert("ObjectProperty", node, opts); } -function assertRestElement(node, opts = {}) { +function assertRestElement(node, opts) { assert("RestElement", node, opts); } -function assertReturnStatement(node, opts = {}) { +function assertReturnStatement(node, opts) { assert("ReturnStatement", node, opts); } -function assertSequenceExpression(node, opts = {}) { +function assertSequenceExpression(node, opts) { assert("SequenceExpression", node, opts); } -function assertParenthesizedExpression(node, opts = {}) { +function assertParenthesizedExpression(node, opts) { assert("ParenthesizedExpression", node, opts); } -function assertSwitchCase(node, opts = {}) { +function assertSwitchCase(node, opts) { assert("SwitchCase", node, opts); } -function assertSwitchStatement(node, opts = {}) { +function assertSwitchStatement(node, opts) { assert("SwitchStatement", node, opts); } -function assertThisExpression(node, opts = {}) { +function assertThisExpression(node, opts) { assert("ThisExpression", node, opts); } -function assertThrowStatement(node, opts = {}) { +function assertThrowStatement(node, opts) { assert("ThrowStatement", node, opts); } -function assertTryStatement(node, opts = {}) { +function assertTryStatement(node, opts) { assert("TryStatement", node, opts); } -function assertUnaryExpression(node, opts = {}) { +function assertUnaryExpression(node, opts) { assert("UnaryExpression", node, opts); } -function assertUpdateExpression(node, opts = {}) { +function assertUpdateExpression(node, opts) { assert("UpdateExpression", node, opts); } -function assertVariableDeclaration(node, opts = {}) { +function assertVariableDeclaration(node, opts) { assert("VariableDeclaration", node, opts); } -function assertVariableDeclarator(node, opts = {}) { +function assertVariableDeclarator(node, opts) { assert("VariableDeclarator", node, opts); } -function assertWhileStatement(node, opts = {}) { +function assertWhileStatement(node, opts) { assert("WhileStatement", node, opts); } -function assertWithStatement(node, opts = {}) { +function assertWithStatement(node, opts) { assert("WithStatement", node, opts); } -function assertAssignmentPattern(node, opts = {}) { +function assertAssignmentPattern(node, opts) { assert("AssignmentPattern", node, opts); } -function assertArrayPattern(node, opts = {}) { +function assertArrayPattern(node, opts) { assert("ArrayPattern", node, opts); } -function assertArrowFunctionExpression(node, opts = {}) { +function assertArrowFunctionExpression(node, opts) { assert("ArrowFunctionExpression", node, opts); } -function assertClassBody(node, opts = {}) { +function assertClassBody(node, opts) { assert("ClassBody", node, opts); } -function assertClassExpression(node, opts = {}) { +function assertClassExpression(node, opts) { assert("ClassExpression", node, opts); } -function assertClassDeclaration(node, opts = {}) { +function assertClassDeclaration(node, opts) { assert("ClassDeclaration", node, opts); } -function assertExportAllDeclaration(node, opts = {}) { +function assertExportAllDeclaration(node, opts) { assert("ExportAllDeclaration", node, opts); } -function assertExportDefaultDeclaration(node, opts = {}) { +function assertExportDefaultDeclaration(node, opts) { assert("ExportDefaultDeclaration", node, opts); } -function assertExportNamedDeclaration(node, opts = {}) { +function assertExportNamedDeclaration(node, opts) { assert("ExportNamedDeclaration", node, opts); } -function assertExportSpecifier(node, opts = {}) { +function assertExportSpecifier(node, opts) { assert("ExportSpecifier", node, opts); } -function assertForOfStatement(node, opts = {}) { +function assertForOfStatement(node, opts) { assert("ForOfStatement", node, opts); } -function assertImportDeclaration(node, opts = {}) { +function assertImportDeclaration(node, opts) { assert("ImportDeclaration", node, opts); } -function assertImportDefaultSpecifier(node, opts = {}) { +function assertImportDefaultSpecifier(node, opts) { assert("ImportDefaultSpecifier", node, opts); } -function assertImportNamespaceSpecifier(node, opts = {}) { +function assertImportNamespaceSpecifier(node, opts) { assert("ImportNamespaceSpecifier", node, opts); } -function assertImportSpecifier(node, opts = {}) { +function assertImportSpecifier(node, opts) { assert("ImportSpecifier", node, opts); } -function assertMetaProperty(node, opts = {}) { +function assertMetaProperty(node, opts) { assert("MetaProperty", node, opts); } -function assertClassMethod(node, opts = {}) { +function assertClassMethod(node, opts) { assert("ClassMethod", node, opts); } -function assertObjectPattern(node, opts = {}) { +function assertObjectPattern(node, opts) { assert("ObjectPattern", node, opts); } -function assertSpreadElement(node, opts = {}) { +function assertSpreadElement(node, opts) { assert("SpreadElement", node, opts); } -function assertSuper(node, opts = {}) { +function assertSuper(node, opts) { assert("Super", node, opts); } -function assertTaggedTemplateExpression(node, opts = {}) { +function assertTaggedTemplateExpression(node, opts) { assert("TaggedTemplateExpression", node, opts); } -function assertTemplateElement(node, opts = {}) { +function assertTemplateElement(node, opts) { assert("TemplateElement", node, opts); } -function assertTemplateLiteral(node, opts = {}) { +function assertTemplateLiteral(node, opts) { assert("TemplateLiteral", node, opts); } -function assertYieldExpression(node, opts = {}) { +function assertYieldExpression(node, opts) { assert("YieldExpression", node, opts); } -function assertAwaitExpression(node, opts = {}) { +function assertAwaitExpression(node, opts) { assert("AwaitExpression", node, opts); } -function assertImport(node, opts = {}) { +function assertImport(node, opts) { assert("Import", node, opts); } -function assertBigIntLiteral(node, opts = {}) { +function assertBigIntLiteral(node, opts) { assert("BigIntLiteral", node, opts); } -function assertExportNamespaceSpecifier(node, opts = {}) { +function assertExportNamespaceSpecifier(node, opts) { assert("ExportNamespaceSpecifier", node, opts); } -function assertOptionalMemberExpression(node, opts = {}) { +function assertOptionalMemberExpression(node, opts) { assert("OptionalMemberExpression", node, opts); } -function assertOptionalCallExpression(node, opts = {}) { +function assertOptionalCallExpression(node, opts) { assert("OptionalCallExpression", node, opts); } -function assertAnyTypeAnnotation(node, opts = {}) { +function assertAnyTypeAnnotation(node, opts) { assert("AnyTypeAnnotation", node, opts); } -function assertArrayTypeAnnotation(node, opts = {}) { +function assertArrayTypeAnnotation(node, opts) { assert("ArrayTypeAnnotation", node, opts); } -function assertBooleanTypeAnnotation(node, opts = {}) { +function assertBooleanTypeAnnotation(node, opts) { assert("BooleanTypeAnnotation", node, opts); } -function assertBooleanLiteralTypeAnnotation(node, opts = {}) { +function assertBooleanLiteralTypeAnnotation(node, opts) { assert("BooleanLiteralTypeAnnotation", node, opts); } -function assertNullLiteralTypeAnnotation(node, opts = {}) { +function assertNullLiteralTypeAnnotation(node, opts) { assert("NullLiteralTypeAnnotation", node, opts); } -function assertClassImplements(node, opts = {}) { +function assertClassImplements(node, opts) { assert("ClassImplements", node, opts); } -function assertDeclareClass(node, opts = {}) { +function assertDeclareClass(node, opts) { assert("DeclareClass", node, opts); } -function assertDeclareFunction(node, opts = {}) { +function assertDeclareFunction(node, opts) { assert("DeclareFunction", node, opts); } -function assertDeclareInterface(node, opts = {}) { +function assertDeclareInterface(node, opts) { assert("DeclareInterface", node, opts); } -function assertDeclareModule(node, opts = {}) { +function assertDeclareModule(node, opts) { assert("DeclareModule", node, opts); } -function assertDeclareModuleExports(node, opts = {}) { +function assertDeclareModuleExports(node, opts) { assert("DeclareModuleExports", node, opts); } -function assertDeclareTypeAlias(node, opts = {}) { +function assertDeclareTypeAlias(node, opts) { assert("DeclareTypeAlias", node, opts); } -function assertDeclareOpaqueType(node, opts = {}) { +function assertDeclareOpaqueType(node, opts) { assert("DeclareOpaqueType", node, opts); } -function assertDeclareVariable(node, opts = {}) { +function assertDeclareVariable(node, opts) { assert("DeclareVariable", node, opts); } -function assertDeclareExportDeclaration(node, opts = {}) { +function assertDeclareExportDeclaration(node, opts) { assert("DeclareExportDeclaration", node, opts); } -function assertDeclareExportAllDeclaration(node, opts = {}) { +function assertDeclareExportAllDeclaration(node, opts) { assert("DeclareExportAllDeclaration", node, opts); } -function assertDeclaredPredicate(node, opts = {}) { +function assertDeclaredPredicate(node, opts) { assert("DeclaredPredicate", node, opts); } -function assertExistsTypeAnnotation(node, opts = {}) { +function assertExistsTypeAnnotation(node, opts) { assert("ExistsTypeAnnotation", node, opts); } -function assertFunctionTypeAnnotation(node, opts = {}) { +function assertFunctionTypeAnnotation(node, opts) { assert("FunctionTypeAnnotation", node, opts); } -function assertFunctionTypeParam(node, opts = {}) { +function assertFunctionTypeParam(node, opts) { assert("FunctionTypeParam", node, opts); } -function assertGenericTypeAnnotation(node, opts = {}) { +function assertGenericTypeAnnotation(node, opts) { assert("GenericTypeAnnotation", node, opts); } -function assertInferredPredicate(node, opts = {}) { +function assertInferredPredicate(node, opts) { assert("InferredPredicate", node, opts); } -function assertInterfaceExtends(node, opts = {}) { +function assertInterfaceExtends(node, opts) { assert("InterfaceExtends", node, opts); } -function assertInterfaceDeclaration(node, opts = {}) { +function assertInterfaceDeclaration(node, opts) { assert("InterfaceDeclaration", node, opts); } -function assertInterfaceTypeAnnotation(node, opts = {}) { +function assertInterfaceTypeAnnotation(node, opts) { assert("InterfaceTypeAnnotation", node, opts); } -function assertIntersectionTypeAnnotation(node, opts = {}) { +function assertIntersectionTypeAnnotation(node, opts) { assert("IntersectionTypeAnnotation", node, opts); } -function assertMixedTypeAnnotation(node, opts = {}) { +function assertMixedTypeAnnotation(node, opts) { assert("MixedTypeAnnotation", node, opts); } -function assertEmptyTypeAnnotation(node, opts = {}) { +function assertEmptyTypeAnnotation(node, opts) { assert("EmptyTypeAnnotation", node, opts); } -function assertNullableTypeAnnotation(node, opts = {}) { +function assertNullableTypeAnnotation(node, opts) { assert("NullableTypeAnnotation", node, opts); } -function assertNumberLiteralTypeAnnotation(node, opts = {}) { +function assertNumberLiteralTypeAnnotation(node, opts) { assert("NumberLiteralTypeAnnotation", node, opts); } -function assertNumberTypeAnnotation(node, opts = {}) { +function assertNumberTypeAnnotation(node, opts) { assert("NumberTypeAnnotation", node, opts); } -function assertObjectTypeAnnotation(node, opts = {}) { +function assertObjectTypeAnnotation(node, opts) { assert("ObjectTypeAnnotation", node, opts); } -function assertObjectTypeInternalSlot(node, opts = {}) { +function assertObjectTypeInternalSlot(node, opts) { assert("ObjectTypeInternalSlot", node, opts); } -function assertObjectTypeCallProperty(node, opts = {}) { +function assertObjectTypeCallProperty(node, opts) { assert("ObjectTypeCallProperty", node, opts); } -function assertObjectTypeIndexer(node, opts = {}) { +function assertObjectTypeIndexer(node, opts) { assert("ObjectTypeIndexer", node, opts); } -function assertObjectTypeProperty(node, opts = {}) { +function assertObjectTypeProperty(node, opts) { assert("ObjectTypeProperty", node, opts); } -function assertObjectTypeSpreadProperty(node, opts = {}) { +function assertObjectTypeSpreadProperty(node, opts) { assert("ObjectTypeSpreadProperty", node, opts); } -function assertOpaqueType(node, opts = {}) { +function assertOpaqueType(node, opts) { assert("OpaqueType", node, opts); } -function assertQualifiedTypeIdentifier(node, opts = {}) { +function assertQualifiedTypeIdentifier(node, opts) { assert("QualifiedTypeIdentifier", node, opts); } -function assertStringLiteralTypeAnnotation(node, opts = {}) { +function assertStringLiteralTypeAnnotation(node, opts) { assert("StringLiteralTypeAnnotation", node, opts); } -function assertStringTypeAnnotation(node, opts = {}) { +function assertStringTypeAnnotation(node, opts) { assert("StringTypeAnnotation", node, opts); } -function assertSymbolTypeAnnotation(node, opts = {}) { +function assertSymbolTypeAnnotation(node, opts) { assert("SymbolTypeAnnotation", node, opts); } -function assertThisTypeAnnotation(node, opts = {}) { +function assertThisTypeAnnotation(node, opts) { assert("ThisTypeAnnotation", node, opts); } -function assertTupleTypeAnnotation(node, opts = {}) { +function assertTupleTypeAnnotation(node, opts) { assert("TupleTypeAnnotation", node, opts); } -function assertTypeofTypeAnnotation(node, opts = {}) { +function assertTypeofTypeAnnotation(node, opts) { assert("TypeofTypeAnnotation", node, opts); } -function assertTypeAlias(node, opts = {}) { +function assertTypeAlias(node, opts) { assert("TypeAlias", node, opts); } -function assertTypeAnnotation(node, opts = {}) { +function assertTypeAnnotation(node, opts) { assert("TypeAnnotation", node, opts); } -function assertTypeCastExpression(node, opts = {}) { +function assertTypeCastExpression(node, opts) { assert("TypeCastExpression", node, opts); } -function assertTypeParameter(node, opts = {}) { +function assertTypeParameter(node, opts) { assert("TypeParameter", node, opts); } -function assertTypeParameterDeclaration(node, opts = {}) { +function assertTypeParameterDeclaration(node, opts) { assert("TypeParameterDeclaration", node, opts); } -function assertTypeParameterInstantiation(node, opts = {}) { +function assertTypeParameterInstantiation(node, opts) { assert("TypeParameterInstantiation", node, opts); } -function assertUnionTypeAnnotation(node, opts = {}) { +function assertUnionTypeAnnotation(node, opts) { assert("UnionTypeAnnotation", node, opts); } -function assertVariance(node, opts = {}) { +function assertVariance(node, opts) { assert("Variance", node, opts); } -function assertVoidTypeAnnotation(node, opts = {}) { +function assertVoidTypeAnnotation(node, opts) { assert("VoidTypeAnnotation", node, opts); } -function assertEnumDeclaration(node, opts = {}) { +function assertEnumDeclaration(node, opts) { assert("EnumDeclaration", node, opts); } -function assertEnumBooleanBody(node, opts = {}) { +function assertEnumBooleanBody(node, opts) { assert("EnumBooleanBody", node, opts); } -function assertEnumNumberBody(node, opts = {}) { +function assertEnumNumberBody(node, opts) { assert("EnumNumberBody", node, opts); } -function assertEnumStringBody(node, opts = {}) { +function assertEnumStringBody(node, opts) { assert("EnumStringBody", node, opts); } -function assertEnumSymbolBody(node, opts = {}) { +function assertEnumSymbolBody(node, opts) { assert("EnumSymbolBody", node, opts); } -function assertEnumBooleanMember(node, opts = {}) { +function assertEnumBooleanMember(node, opts) { assert("EnumBooleanMember", node, opts); } -function assertEnumNumberMember(node, opts = {}) { +function assertEnumNumberMember(node, opts) { assert("EnumNumberMember", node, opts); } -function assertEnumStringMember(node, opts = {}) { +function assertEnumStringMember(node, opts) { assert("EnumStringMember", node, opts); } -function assertEnumDefaultedMember(node, opts = {}) { +function assertEnumDefaultedMember(node, opts) { assert("EnumDefaultedMember", node, opts); } -function assertJSXAttribute(node, opts = {}) { +function assertJSXAttribute(node, opts) { assert("JSXAttribute", node, opts); } -function assertJSXClosingElement(node, opts = {}) { +function assertJSXClosingElement(node, opts) { assert("JSXClosingElement", node, opts); } -function assertJSXElement(node, opts = {}) { +function assertJSXElement(node, opts) { assert("JSXElement", node, opts); } -function assertJSXEmptyExpression(node, opts = {}) { +function assertJSXEmptyExpression(node, opts) { assert("JSXEmptyExpression", node, opts); } -function assertJSXExpressionContainer(node, opts = {}) { +function assertJSXExpressionContainer(node, opts) { assert("JSXExpressionContainer", node, opts); } -function assertJSXSpreadChild(node, opts = {}) { +function assertJSXSpreadChild(node, opts) { assert("JSXSpreadChild", node, opts); } -function assertJSXIdentifier(node, opts = {}) { +function assertJSXIdentifier(node, opts) { assert("JSXIdentifier", node, opts); } -function assertJSXMemberExpression(node, opts = {}) { +function assertJSXMemberExpression(node, opts) { assert("JSXMemberExpression", node, opts); } -function assertJSXNamespacedName(node, opts = {}) { +function assertJSXNamespacedName(node, opts) { assert("JSXNamespacedName", node, opts); } -function assertJSXOpeningElement(node, opts = {}) { +function assertJSXOpeningElement(node, opts) { assert("JSXOpeningElement", node, opts); } -function assertJSXSpreadAttribute(node, opts = {}) { +function assertJSXSpreadAttribute(node, opts) { assert("JSXSpreadAttribute", node, opts); } -function assertJSXText(node, opts = {}) { +function assertJSXText(node, opts) { assert("JSXText", node, opts); } -function assertJSXFragment(node, opts = {}) { +function assertJSXFragment(node, opts) { assert("JSXFragment", node, opts); } -function assertJSXOpeningFragment(node, opts = {}) { +function assertJSXOpeningFragment(node, opts) { assert("JSXOpeningFragment", node, opts); } -function assertJSXClosingFragment(node, opts = {}) { +function assertJSXClosingFragment(node, opts) { assert("JSXClosingFragment", node, opts); } -function assertNoop(node, opts = {}) { +function assertNoop(node, opts) { assert("Noop", node, opts); } -function assertPlaceholder(node, opts = {}) { +function assertPlaceholder(node, opts) { assert("Placeholder", node, opts); } -function assertV8IntrinsicIdentifier(node, opts = {}) { +function assertV8IntrinsicIdentifier(node, opts) { assert("V8IntrinsicIdentifier", node, opts); } -function assertArgumentPlaceholder(node, opts = {}) { +function assertArgumentPlaceholder(node, opts) { assert("ArgumentPlaceholder", node, opts); } -function assertBindExpression(node, opts = {}) { +function assertBindExpression(node, opts) { assert("BindExpression", node, opts); } -function assertClassProperty(node, opts = {}) { +function assertClassProperty(node, opts) { assert("ClassProperty", node, opts); } -function assertPipelineTopicExpression(node, opts = {}) { +function assertPipelineTopicExpression(node, opts) { assert("PipelineTopicExpression", node, opts); } -function assertPipelineBareFunction(node, opts = {}) { +function assertPipelineBareFunction(node, opts) { assert("PipelineBareFunction", node, opts); } -function assertPipelinePrimaryTopicReference(node, opts = {}) { +function assertPipelinePrimaryTopicReference(node, opts) { assert("PipelinePrimaryTopicReference", node, opts); } -function assertClassPrivateProperty(node, opts = {}) { +function assertClassPrivateProperty(node, opts) { assert("ClassPrivateProperty", node, opts); } -function assertClassPrivateMethod(node, opts = {}) { +function assertClassPrivateMethod(node, opts) { assert("ClassPrivateMethod", node, opts); } -function assertImportAttribute(node, opts = {}) { +function assertImportAttribute(node, opts) { assert("ImportAttribute", node, opts); } -function assertDecorator(node, opts = {}) { +function assertDecorator(node, opts) { assert("Decorator", node, opts); } -function assertDoExpression(node, opts = {}) { +function assertDoExpression(node, opts) { assert("DoExpression", node, opts); } -function assertExportDefaultSpecifier(node, opts = {}) { +function assertExportDefaultSpecifier(node, opts) { assert("ExportDefaultSpecifier", node, opts); } -function assertPrivateName(node, opts = {}) { +function assertPrivateName(node, opts) { assert("PrivateName", node, opts); } -function assertRecordExpression(node, opts = {}) { +function assertRecordExpression(node, opts) { assert("RecordExpression", node, opts); } -function assertTupleExpression(node, opts = {}) { +function assertTupleExpression(node, opts) { assert("TupleExpression", node, opts); } -function assertDecimalLiteral(node, opts = {}) { +function assertDecimalLiteral(node, opts) { assert("DecimalLiteral", node, opts); } -function assertStaticBlock(node, opts = {}) { +function assertStaticBlock(node, opts) { assert("StaticBlock", node, opts); } -function assertTSParameterProperty(node, opts = {}) { +function assertTSParameterProperty(node, opts) { assert("TSParameterProperty", node, opts); } -function assertTSDeclareFunction(node, opts = {}) { +function assertTSDeclareFunction(node, opts) { assert("TSDeclareFunction", node, opts); } -function assertTSDeclareMethod(node, opts = {}) { +function assertTSDeclareMethod(node, opts) { assert("TSDeclareMethod", node, opts); } -function assertTSQualifiedName(node, opts = {}) { +function assertTSQualifiedName(node, opts) { assert("TSQualifiedName", node, opts); } -function assertTSCallSignatureDeclaration(node, opts = {}) { +function assertTSCallSignatureDeclaration(node, opts) { assert("TSCallSignatureDeclaration", node, opts); } -function assertTSConstructSignatureDeclaration(node, opts = {}) { +function assertTSConstructSignatureDeclaration(node, opts) { assert("TSConstructSignatureDeclaration", node, opts); } -function assertTSPropertySignature(node, opts = {}) { +function assertTSPropertySignature(node, opts) { assert("TSPropertySignature", node, opts); } -function assertTSMethodSignature(node, opts = {}) { +function assertTSMethodSignature(node, opts) { assert("TSMethodSignature", node, opts); } -function assertTSIndexSignature(node, opts = {}) { +function assertTSIndexSignature(node, opts) { assert("TSIndexSignature", node, opts); } -function assertTSAnyKeyword(node, opts = {}) { +function assertTSAnyKeyword(node, opts) { assert("TSAnyKeyword", node, opts); } -function assertTSBooleanKeyword(node, opts = {}) { +function assertTSBooleanKeyword(node, opts) { assert("TSBooleanKeyword", node, opts); } -function assertTSBigIntKeyword(node, opts = {}) { +function assertTSBigIntKeyword(node, opts) { assert("TSBigIntKeyword", node, opts); } -function assertTSIntrinsicKeyword(node, opts = {}) { +function assertTSIntrinsicKeyword(node, opts) { assert("TSIntrinsicKeyword", node, opts); } -function assertTSNeverKeyword(node, opts = {}) { +function assertTSNeverKeyword(node, opts) { assert("TSNeverKeyword", node, opts); } -function assertTSNullKeyword(node, opts = {}) { +function assertTSNullKeyword(node, opts) { assert("TSNullKeyword", node, opts); } -function assertTSNumberKeyword(node, opts = {}) { +function assertTSNumberKeyword(node, opts) { assert("TSNumberKeyword", node, opts); } -function assertTSObjectKeyword(node, opts = {}) { +function assertTSObjectKeyword(node, opts) { assert("TSObjectKeyword", node, opts); } -function assertTSStringKeyword(node, opts = {}) { +function assertTSStringKeyword(node, opts) { assert("TSStringKeyword", node, opts); } -function assertTSSymbolKeyword(node, opts = {}) { +function assertTSSymbolKeyword(node, opts) { assert("TSSymbolKeyword", node, opts); } -function assertTSUndefinedKeyword(node, opts = {}) { +function assertTSUndefinedKeyword(node, opts) { assert("TSUndefinedKeyword", node, opts); } -function assertTSUnknownKeyword(node, opts = {}) { +function assertTSUnknownKeyword(node, opts) { assert("TSUnknownKeyword", node, opts); } -function assertTSVoidKeyword(node, opts = {}) { +function assertTSVoidKeyword(node, opts) { assert("TSVoidKeyword", node, opts); } -function assertTSThisType(node, opts = {}) { +function assertTSThisType(node, opts) { assert("TSThisType", node, opts); } -function assertTSFunctionType(node, opts = {}) { +function assertTSFunctionType(node, opts) { assert("TSFunctionType", node, opts); } -function assertTSConstructorType(node, opts = {}) { +function assertTSConstructorType(node, opts) { assert("TSConstructorType", node, opts); } -function assertTSTypeReference(node, opts = {}) { +function assertTSTypeReference(node, opts) { assert("TSTypeReference", node, opts); } -function assertTSTypePredicate(node, opts = {}) { +function assertTSTypePredicate(node, opts) { assert("TSTypePredicate", node, opts); } -function assertTSTypeQuery(node, opts = {}) { +function assertTSTypeQuery(node, opts) { assert("TSTypeQuery", node, opts); } -function assertTSTypeLiteral(node, opts = {}) { +function assertTSTypeLiteral(node, opts) { assert("TSTypeLiteral", node, opts); } -function assertTSArrayType(node, opts = {}) { +function assertTSArrayType(node, opts) { assert("TSArrayType", node, opts); } -function assertTSTupleType(node, opts = {}) { +function assertTSTupleType(node, opts) { assert("TSTupleType", node, opts); } -function assertTSOptionalType(node, opts = {}) { +function assertTSOptionalType(node, opts) { assert("TSOptionalType", node, opts); } -function assertTSRestType(node, opts = {}) { +function assertTSRestType(node, opts) { assert("TSRestType", node, opts); } -function assertTSNamedTupleMember(node, opts = {}) { +function assertTSNamedTupleMember(node, opts) { assert("TSNamedTupleMember", node, opts); } -function assertTSUnionType(node, opts = {}) { +function assertTSUnionType(node, opts) { assert("TSUnionType", node, opts); } -function assertTSIntersectionType(node, opts = {}) { +function assertTSIntersectionType(node, opts) { assert("TSIntersectionType", node, opts); } -function assertTSConditionalType(node, opts = {}) { +function assertTSConditionalType(node, opts) { assert("TSConditionalType", node, opts); } -function assertTSInferType(node, opts = {}) { +function assertTSInferType(node, opts) { assert("TSInferType", node, opts); } -function assertTSParenthesizedType(node, opts = {}) { +function assertTSParenthesizedType(node, opts) { assert("TSParenthesizedType", node, opts); } -function assertTSTypeOperator(node, opts = {}) { +function assertTSTypeOperator(node, opts) { assert("TSTypeOperator", node, opts); } -function assertTSIndexedAccessType(node, opts = {}) { +function assertTSIndexedAccessType(node, opts) { assert("TSIndexedAccessType", node, opts); } -function assertTSMappedType(node, opts = {}) { +function assertTSMappedType(node, opts) { assert("TSMappedType", node, opts); } -function assertTSLiteralType(node, opts = {}) { +function assertTSLiteralType(node, opts) { assert("TSLiteralType", node, opts); } -function assertTSExpressionWithTypeArguments(node, opts = {}) { +function assertTSExpressionWithTypeArguments(node, opts) { assert("TSExpressionWithTypeArguments", node, opts); } -function assertTSInterfaceDeclaration(node, opts = {}) { +function assertTSInterfaceDeclaration(node, opts) { assert("TSInterfaceDeclaration", node, opts); } -function assertTSInterfaceBody(node, opts = {}) { +function assertTSInterfaceBody(node, opts) { assert("TSInterfaceBody", node, opts); } -function assertTSTypeAliasDeclaration(node, opts = {}) { +function assertTSTypeAliasDeclaration(node, opts) { assert("TSTypeAliasDeclaration", node, opts); } -function assertTSAsExpression(node, opts = {}) { +function assertTSAsExpression(node, opts) { assert("TSAsExpression", node, opts); } -function assertTSTypeAssertion(node, opts = {}) { +function assertTSTypeAssertion(node, opts) { assert("TSTypeAssertion", node, opts); } -function assertTSEnumDeclaration(node, opts = {}) { +function assertTSEnumDeclaration(node, opts) { assert("TSEnumDeclaration", node, opts); } -function assertTSEnumMember(node, opts = {}) { +function assertTSEnumMember(node, opts) { assert("TSEnumMember", node, opts); } -function assertTSModuleDeclaration(node, opts = {}) { +function assertTSModuleDeclaration(node, opts) { assert("TSModuleDeclaration", node, opts); } -function assertTSModuleBlock(node, opts = {}) { +function assertTSModuleBlock(node, opts) { assert("TSModuleBlock", node, opts); } -function assertTSImportType(node, opts = {}) { +function assertTSImportType(node, opts) { assert("TSImportType", node, opts); } -function assertTSImportEqualsDeclaration(node, opts = {}) { +function assertTSImportEqualsDeclaration(node, opts) { assert("TSImportEqualsDeclaration", node, opts); } -function assertTSExternalModuleReference(node, opts = {}) { +function assertTSExternalModuleReference(node, opts) { assert("TSExternalModuleReference", node, opts); } -function assertTSNonNullExpression(node, opts = {}) { +function assertTSNonNullExpression(node, opts) { assert("TSNonNullExpression", node, opts); } -function assertTSExportAssignment(node, opts = {}) { +function assertTSExportAssignment(node, opts) { assert("TSExportAssignment", node, opts); } -function assertTSNamespaceExportDeclaration(node, opts = {}) { +function assertTSNamespaceExportDeclaration(node, opts) { assert("TSNamespaceExportDeclaration", node, opts); } -function assertTSTypeAnnotation(node, opts = {}) { +function assertTSTypeAnnotation(node, opts) { assert("TSTypeAnnotation", node, opts); } -function assertTSTypeParameterInstantiation(node, opts = {}) { +function assertTSTypeParameterInstantiation(node, opts) { assert("TSTypeParameterInstantiation", node, opts); } -function assertTSTypeParameterDeclaration(node, opts = {}) { +function assertTSTypeParameterDeclaration(node, opts) { assert("TSTypeParameterDeclaration", node, opts); } -function assertTSTypeParameter(node, opts = {}) { +function assertTSTypeParameter(node, opts) { assert("TSTypeParameter", node, opts); } -function assertExpression(node, opts = {}) { +function assertExpression(node, opts) { assert("Expression", node, opts); } -function assertBinary(node, opts = {}) { +function assertBinary(node, opts) { assert("Binary", node, opts); } -function assertScopable(node, opts = {}) { +function assertScopable(node, opts) { assert("Scopable", node, opts); } -function assertBlockParent(node, opts = {}) { +function assertBlockParent(node, opts) { assert("BlockParent", node, opts); } -function assertBlock(node, opts = {}) { +function assertBlock(node, opts) { assert("Block", node, opts); } -function assertStatement(node, opts = {}) { +function assertStatement(node, opts) { assert("Statement", node, opts); } -function assertTerminatorless(node, opts = {}) { +function assertTerminatorless(node, opts) { assert("Terminatorless", node, opts); } -function assertCompletionStatement(node, opts = {}) { +function assertCompletionStatement(node, opts) { assert("CompletionStatement", node, opts); } -function assertConditional(node, opts = {}) { +function assertConditional(node, opts) { assert("Conditional", node, opts); } -function assertLoop(node, opts = {}) { +function assertLoop(node, opts) { assert("Loop", node, opts); } -function assertWhile(node, opts = {}) { +function assertWhile(node, opts) { assert("While", node, opts); } -function assertExpressionWrapper(node, opts = {}) { +function assertExpressionWrapper(node, opts) { assert("ExpressionWrapper", node, opts); } -function assertFor(node, opts = {}) { +function assertFor(node, opts) { assert("For", node, opts); } -function assertForXStatement(node, opts = {}) { +function assertForXStatement(node, opts) { assert("ForXStatement", node, opts); } -function assertFunction(node, opts = {}) { +function assertFunction(node, opts) { assert("Function", node, opts); } -function assertFunctionParent(node, opts = {}) { +function assertFunctionParent(node, opts) { assert("FunctionParent", node, opts); } -function assertPureish(node, opts = {}) { +function assertPureish(node, opts) { assert("Pureish", node, opts); } -function assertDeclaration(node, opts = {}) { +function assertDeclaration(node, opts) { assert("Declaration", node, opts); } -function assertPatternLike(node, opts = {}) { +function assertPatternLike(node, opts) { assert("PatternLike", node, opts); } -function assertLVal(node, opts = {}) { +function assertLVal(node, opts) { assert("LVal", node, opts); } -function assertTSEntityName(node, opts = {}) { +function assertTSEntityName(node, opts) { assert("TSEntityName", node, opts); } -function assertLiteral(node, opts = {}) { +function assertLiteral(node, opts) { assert("Literal", node, opts); } -function assertImmutable(node, opts = {}) { +function assertImmutable(node, opts) { assert("Immutable", node, opts); } -function assertUserWhitespacable(node, opts = {}) { +function assertUserWhitespacable(node, opts) { assert("UserWhitespacable", node, opts); } -function assertMethod(node, opts = {}) { +function assertMethod(node, opts) { assert("Method", node, opts); } -function assertObjectMember(node, opts = {}) { +function assertObjectMember(node, opts) { assert("ObjectMember", node, opts); } -function assertProperty(node, opts = {}) { +function assertProperty(node, opts) { assert("Property", node, opts); } -function assertUnaryLike(node, opts = {}) { +function assertUnaryLike(node, opts) { assert("UnaryLike", node, opts); } -function assertPattern(node, opts = {}) { +function assertPattern(node, opts) { assert("Pattern", node, opts); } -function assertClass(node, opts = {}) { +function assertClass(node, opts) { assert("Class", node, opts); } -function assertModuleDeclaration(node, opts = {}) { +function assertModuleDeclaration(node, opts) { assert("ModuleDeclaration", node, opts); } -function assertExportDeclaration(node, opts = {}) { +function assertExportDeclaration(node, opts) { assert("ExportDeclaration", node, opts); } -function assertModuleSpecifier(node, opts = {}) { +function assertModuleSpecifier(node, opts) { assert("ModuleSpecifier", node, opts); } -function assertFlow(node, opts = {}) { +function assertFlow(node, opts) { assert("Flow", node, opts); } -function assertFlowType(node, opts = {}) { +function assertFlowType(node, opts) { assert("FlowType", node, opts); } -function assertFlowBaseAnnotation(node, opts = {}) { +function assertFlowBaseAnnotation(node, opts) { assert("FlowBaseAnnotation", node, opts); } -function assertFlowDeclaration(node, opts = {}) { +function assertFlowDeclaration(node, opts) { assert("FlowDeclaration", node, opts); } -function assertFlowPredicate(node, opts = {}) { +function assertFlowPredicate(node, opts) { assert("FlowPredicate", node, opts); } -function assertEnumBody(node, opts = {}) { +function assertEnumBody(node, opts) { assert("EnumBody", node, opts); } -function assertEnumMember(node, opts = {}) { +function assertEnumMember(node, opts) { assert("EnumMember", node, opts); } -function assertJSX(node, opts = {}) { +function assertJSX(node, opts) { assert("JSX", node, opts); } -function assertPrivate(node, opts = {}) { +function assertPrivate(node, opts) { assert("Private", node, opts); } -function assertTSTypeElement(node, opts = {}) { +function assertTSTypeElement(node, opts) { assert("TSTypeElement", node, opts); } -function assertTSType(node, opts = {}) { +function assertTSType(node, opts) { assert("TSType", node, opts); } -function assertTSBaseType(node, opts = {}) { +function assertTSBaseType(node, opts) { assert("TSBaseType", node, opts); } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/ast-types/generated/index.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/ast-types/generated/index.js new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/index.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/index.js index 18152119ac791c..a4ed3056921cc2 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/index.js @@ -3,1223 +3,1223 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.ArrayExpression = exports.arrayExpression = arrayExpression; -exports.AssignmentExpression = exports.assignmentExpression = assignmentExpression; -exports.BinaryExpression = exports.binaryExpression = binaryExpression; -exports.InterpreterDirective = exports.interpreterDirective = interpreterDirective; -exports.Directive = exports.directive = directive; -exports.DirectiveLiteral = exports.directiveLiteral = directiveLiteral; -exports.BlockStatement = exports.blockStatement = blockStatement; -exports.BreakStatement = exports.breakStatement = breakStatement; -exports.CallExpression = exports.callExpression = callExpression; -exports.CatchClause = exports.catchClause = catchClause; -exports.ConditionalExpression = exports.conditionalExpression = conditionalExpression; -exports.ContinueStatement = exports.continueStatement = continueStatement; -exports.DebuggerStatement = exports.debuggerStatement = debuggerStatement; -exports.DoWhileStatement = exports.doWhileStatement = doWhileStatement; -exports.EmptyStatement = exports.emptyStatement = emptyStatement; -exports.ExpressionStatement = exports.expressionStatement = expressionStatement; -exports.File = exports.file = file; -exports.ForInStatement = exports.forInStatement = forInStatement; -exports.ForStatement = exports.forStatement = forStatement; -exports.FunctionDeclaration = exports.functionDeclaration = functionDeclaration; -exports.FunctionExpression = exports.functionExpression = functionExpression; -exports.Identifier = exports.identifier = identifier; -exports.IfStatement = exports.ifStatement = ifStatement; -exports.LabeledStatement = exports.labeledStatement = labeledStatement; -exports.StringLiteral = exports.stringLiteral = stringLiteral; -exports.NumericLiteral = exports.numericLiteral = numericLiteral; -exports.NullLiteral = exports.nullLiteral = nullLiteral; -exports.BooleanLiteral = exports.booleanLiteral = booleanLiteral; -exports.RegExpLiteral = exports.regExpLiteral = regExpLiteral; -exports.LogicalExpression = exports.logicalExpression = logicalExpression; -exports.MemberExpression = exports.memberExpression = memberExpression; -exports.NewExpression = exports.newExpression = newExpression; -exports.Program = exports.program = program; -exports.ObjectExpression = exports.objectExpression = objectExpression; -exports.ObjectMethod = exports.objectMethod = objectMethod; -exports.ObjectProperty = exports.objectProperty = objectProperty; -exports.RestElement = exports.restElement = restElement; -exports.ReturnStatement = exports.returnStatement = returnStatement; -exports.SequenceExpression = exports.sequenceExpression = sequenceExpression; -exports.ParenthesizedExpression = exports.parenthesizedExpression = parenthesizedExpression; -exports.SwitchCase = exports.switchCase = switchCase; -exports.SwitchStatement = exports.switchStatement = switchStatement; -exports.ThisExpression = exports.thisExpression = thisExpression; -exports.ThrowStatement = exports.throwStatement = throwStatement; -exports.TryStatement = exports.tryStatement = tryStatement; -exports.UnaryExpression = exports.unaryExpression = unaryExpression; -exports.UpdateExpression = exports.updateExpression = updateExpression; -exports.VariableDeclaration = exports.variableDeclaration = variableDeclaration; -exports.VariableDeclarator = exports.variableDeclarator = variableDeclarator; -exports.WhileStatement = exports.whileStatement = whileStatement; -exports.WithStatement = exports.withStatement = withStatement; -exports.AssignmentPattern = exports.assignmentPattern = assignmentPattern; -exports.ArrayPattern = exports.arrayPattern = arrayPattern; -exports.ArrowFunctionExpression = exports.arrowFunctionExpression = arrowFunctionExpression; -exports.ClassBody = exports.classBody = classBody; -exports.ClassExpression = exports.classExpression = classExpression; -exports.ClassDeclaration = exports.classDeclaration = classDeclaration; -exports.ExportAllDeclaration = exports.exportAllDeclaration = exportAllDeclaration; -exports.ExportDefaultDeclaration = exports.exportDefaultDeclaration = exportDefaultDeclaration; -exports.ExportNamedDeclaration = exports.exportNamedDeclaration = exportNamedDeclaration; -exports.ExportSpecifier = exports.exportSpecifier = exportSpecifier; -exports.ForOfStatement = exports.forOfStatement = forOfStatement; -exports.ImportDeclaration = exports.importDeclaration = importDeclaration; -exports.ImportDefaultSpecifier = exports.importDefaultSpecifier = importDefaultSpecifier; -exports.ImportNamespaceSpecifier = exports.importNamespaceSpecifier = importNamespaceSpecifier; -exports.ImportSpecifier = exports.importSpecifier = importSpecifier; -exports.MetaProperty = exports.metaProperty = metaProperty; -exports.ClassMethod = exports.classMethod = classMethod; -exports.ObjectPattern = exports.objectPattern = objectPattern; -exports.SpreadElement = exports.spreadElement = spreadElement; -exports.super = exports.Super = _super; -exports.TaggedTemplateExpression = exports.taggedTemplateExpression = taggedTemplateExpression; -exports.TemplateElement = exports.templateElement = templateElement; -exports.TemplateLiteral = exports.templateLiteral = templateLiteral; -exports.YieldExpression = exports.yieldExpression = yieldExpression; -exports.AwaitExpression = exports.awaitExpression = awaitExpression; -exports.import = exports.Import = _import; -exports.BigIntLiteral = exports.bigIntLiteral = bigIntLiteral; -exports.ExportNamespaceSpecifier = exports.exportNamespaceSpecifier = exportNamespaceSpecifier; -exports.OptionalMemberExpression = exports.optionalMemberExpression = optionalMemberExpression; -exports.OptionalCallExpression = exports.optionalCallExpression = optionalCallExpression; -exports.AnyTypeAnnotation = exports.anyTypeAnnotation = anyTypeAnnotation; -exports.ArrayTypeAnnotation = exports.arrayTypeAnnotation = arrayTypeAnnotation; -exports.BooleanTypeAnnotation = exports.booleanTypeAnnotation = booleanTypeAnnotation; -exports.BooleanLiteralTypeAnnotation = exports.booleanLiteralTypeAnnotation = booleanLiteralTypeAnnotation; -exports.NullLiteralTypeAnnotation = exports.nullLiteralTypeAnnotation = nullLiteralTypeAnnotation; -exports.ClassImplements = exports.classImplements = classImplements; -exports.DeclareClass = exports.declareClass = declareClass; -exports.DeclareFunction = exports.declareFunction = declareFunction; -exports.DeclareInterface = exports.declareInterface = declareInterface; -exports.DeclareModule = exports.declareModule = declareModule; -exports.DeclareModuleExports = exports.declareModuleExports = declareModuleExports; -exports.DeclareTypeAlias = exports.declareTypeAlias = declareTypeAlias; -exports.DeclareOpaqueType = exports.declareOpaqueType = declareOpaqueType; -exports.DeclareVariable = exports.declareVariable = declareVariable; -exports.DeclareExportDeclaration = exports.declareExportDeclaration = declareExportDeclaration; -exports.DeclareExportAllDeclaration = exports.declareExportAllDeclaration = declareExportAllDeclaration; -exports.DeclaredPredicate = exports.declaredPredicate = declaredPredicate; -exports.ExistsTypeAnnotation = exports.existsTypeAnnotation = existsTypeAnnotation; -exports.FunctionTypeAnnotation = exports.functionTypeAnnotation = functionTypeAnnotation; -exports.FunctionTypeParam = exports.functionTypeParam = functionTypeParam; -exports.GenericTypeAnnotation = exports.genericTypeAnnotation = genericTypeAnnotation; -exports.InferredPredicate = exports.inferredPredicate = inferredPredicate; -exports.InterfaceExtends = exports.interfaceExtends = interfaceExtends; -exports.InterfaceDeclaration = exports.interfaceDeclaration = interfaceDeclaration; -exports.InterfaceTypeAnnotation = exports.interfaceTypeAnnotation = interfaceTypeAnnotation; -exports.IntersectionTypeAnnotation = exports.intersectionTypeAnnotation = intersectionTypeAnnotation; -exports.MixedTypeAnnotation = exports.mixedTypeAnnotation = mixedTypeAnnotation; -exports.EmptyTypeAnnotation = exports.emptyTypeAnnotation = emptyTypeAnnotation; -exports.NullableTypeAnnotation = exports.nullableTypeAnnotation = nullableTypeAnnotation; -exports.NumberLiteralTypeAnnotation = exports.numberLiteralTypeAnnotation = numberLiteralTypeAnnotation; -exports.NumberTypeAnnotation = exports.numberTypeAnnotation = numberTypeAnnotation; -exports.ObjectTypeAnnotation = exports.objectTypeAnnotation = objectTypeAnnotation; -exports.ObjectTypeInternalSlot = exports.objectTypeInternalSlot = objectTypeInternalSlot; -exports.ObjectTypeCallProperty = exports.objectTypeCallProperty = objectTypeCallProperty; -exports.ObjectTypeIndexer = exports.objectTypeIndexer = objectTypeIndexer; -exports.ObjectTypeProperty = exports.objectTypeProperty = objectTypeProperty; -exports.ObjectTypeSpreadProperty = exports.objectTypeSpreadProperty = objectTypeSpreadProperty; -exports.OpaqueType = exports.opaqueType = opaqueType; -exports.QualifiedTypeIdentifier = exports.qualifiedTypeIdentifier = qualifiedTypeIdentifier; -exports.StringLiteralTypeAnnotation = exports.stringLiteralTypeAnnotation = stringLiteralTypeAnnotation; -exports.StringTypeAnnotation = exports.stringTypeAnnotation = stringTypeAnnotation; -exports.SymbolTypeAnnotation = exports.symbolTypeAnnotation = symbolTypeAnnotation; -exports.ThisTypeAnnotation = exports.thisTypeAnnotation = thisTypeAnnotation; -exports.TupleTypeAnnotation = exports.tupleTypeAnnotation = tupleTypeAnnotation; -exports.TypeofTypeAnnotation = exports.typeofTypeAnnotation = typeofTypeAnnotation; -exports.TypeAlias = exports.typeAlias = typeAlias; -exports.TypeAnnotation = exports.typeAnnotation = typeAnnotation; -exports.TypeCastExpression = exports.typeCastExpression = typeCastExpression; -exports.TypeParameter = exports.typeParameter = typeParameter; -exports.TypeParameterDeclaration = exports.typeParameterDeclaration = typeParameterDeclaration; -exports.TypeParameterInstantiation = exports.typeParameterInstantiation = typeParameterInstantiation; -exports.UnionTypeAnnotation = exports.unionTypeAnnotation = unionTypeAnnotation; -exports.Variance = exports.variance = variance; -exports.VoidTypeAnnotation = exports.voidTypeAnnotation = voidTypeAnnotation; -exports.EnumDeclaration = exports.enumDeclaration = enumDeclaration; -exports.EnumBooleanBody = exports.enumBooleanBody = enumBooleanBody; -exports.EnumNumberBody = exports.enumNumberBody = enumNumberBody; -exports.EnumStringBody = exports.enumStringBody = enumStringBody; -exports.EnumSymbolBody = exports.enumSymbolBody = enumSymbolBody; -exports.EnumBooleanMember = exports.enumBooleanMember = enumBooleanMember; -exports.EnumNumberMember = exports.enumNumberMember = enumNumberMember; -exports.EnumStringMember = exports.enumStringMember = enumStringMember; -exports.EnumDefaultedMember = exports.enumDefaultedMember = enumDefaultedMember; -exports.jSXAttribute = exports.JSXAttribute = exports.jsxAttribute = jsxAttribute; -exports.jSXClosingElement = exports.JSXClosingElement = exports.jsxClosingElement = jsxClosingElement; -exports.jSXElement = exports.JSXElement = exports.jsxElement = jsxElement; -exports.jSXEmptyExpression = exports.JSXEmptyExpression = exports.jsxEmptyExpression = jsxEmptyExpression; -exports.jSXExpressionContainer = exports.JSXExpressionContainer = exports.jsxExpressionContainer = jsxExpressionContainer; -exports.jSXSpreadChild = exports.JSXSpreadChild = exports.jsxSpreadChild = jsxSpreadChild; -exports.jSXIdentifier = exports.JSXIdentifier = exports.jsxIdentifier = jsxIdentifier; -exports.jSXMemberExpression = exports.JSXMemberExpression = exports.jsxMemberExpression = jsxMemberExpression; -exports.jSXNamespacedName = exports.JSXNamespacedName = exports.jsxNamespacedName = jsxNamespacedName; -exports.jSXOpeningElement = exports.JSXOpeningElement = exports.jsxOpeningElement = jsxOpeningElement; -exports.jSXSpreadAttribute = exports.JSXSpreadAttribute = exports.jsxSpreadAttribute = jsxSpreadAttribute; -exports.jSXText = exports.JSXText = exports.jsxText = jsxText; -exports.jSXFragment = exports.JSXFragment = exports.jsxFragment = jsxFragment; -exports.jSXOpeningFragment = exports.JSXOpeningFragment = exports.jsxOpeningFragment = jsxOpeningFragment; -exports.jSXClosingFragment = exports.JSXClosingFragment = exports.jsxClosingFragment = jsxClosingFragment; -exports.Noop = exports.noop = noop; -exports.Placeholder = exports.placeholder = placeholder; -exports.V8IntrinsicIdentifier = exports.v8IntrinsicIdentifier = v8IntrinsicIdentifier; -exports.ArgumentPlaceholder = exports.argumentPlaceholder = argumentPlaceholder; -exports.BindExpression = exports.bindExpression = bindExpression; -exports.ClassProperty = exports.classProperty = classProperty; -exports.PipelineTopicExpression = exports.pipelineTopicExpression = pipelineTopicExpression; -exports.PipelineBareFunction = exports.pipelineBareFunction = pipelineBareFunction; -exports.PipelinePrimaryTopicReference = exports.pipelinePrimaryTopicReference = pipelinePrimaryTopicReference; -exports.ClassPrivateProperty = exports.classPrivateProperty = classPrivateProperty; -exports.ClassPrivateMethod = exports.classPrivateMethod = classPrivateMethod; -exports.ImportAttribute = exports.importAttribute = importAttribute; -exports.Decorator = exports.decorator = decorator; -exports.DoExpression = exports.doExpression = doExpression; -exports.ExportDefaultSpecifier = exports.exportDefaultSpecifier = exportDefaultSpecifier; -exports.PrivateName = exports.privateName = privateName; -exports.RecordExpression = exports.recordExpression = recordExpression; -exports.TupleExpression = exports.tupleExpression = tupleExpression; -exports.DecimalLiteral = exports.decimalLiteral = decimalLiteral; -exports.StaticBlock = exports.staticBlock = staticBlock; -exports.tSParameterProperty = exports.TSParameterProperty = exports.tsParameterProperty = tsParameterProperty; -exports.tSDeclareFunction = exports.TSDeclareFunction = exports.tsDeclareFunction = tsDeclareFunction; -exports.tSDeclareMethod = exports.TSDeclareMethod = exports.tsDeclareMethod = tsDeclareMethod; -exports.tSQualifiedName = exports.TSQualifiedName = exports.tsQualifiedName = tsQualifiedName; -exports.tSCallSignatureDeclaration = exports.TSCallSignatureDeclaration = exports.tsCallSignatureDeclaration = tsCallSignatureDeclaration; -exports.tSConstructSignatureDeclaration = exports.TSConstructSignatureDeclaration = exports.tsConstructSignatureDeclaration = tsConstructSignatureDeclaration; -exports.tSPropertySignature = exports.TSPropertySignature = exports.tsPropertySignature = tsPropertySignature; -exports.tSMethodSignature = exports.TSMethodSignature = exports.tsMethodSignature = tsMethodSignature; -exports.tSIndexSignature = exports.TSIndexSignature = exports.tsIndexSignature = tsIndexSignature; -exports.tSAnyKeyword = exports.TSAnyKeyword = exports.tsAnyKeyword = tsAnyKeyword; -exports.tSBooleanKeyword = exports.TSBooleanKeyword = exports.tsBooleanKeyword = tsBooleanKeyword; -exports.tSBigIntKeyword = exports.TSBigIntKeyword = exports.tsBigIntKeyword = tsBigIntKeyword; -exports.tSIntrinsicKeyword = exports.TSIntrinsicKeyword = exports.tsIntrinsicKeyword = tsIntrinsicKeyword; -exports.tSNeverKeyword = exports.TSNeverKeyword = exports.tsNeverKeyword = tsNeverKeyword; -exports.tSNullKeyword = exports.TSNullKeyword = exports.tsNullKeyword = tsNullKeyword; -exports.tSNumberKeyword = exports.TSNumberKeyword = exports.tsNumberKeyword = tsNumberKeyword; -exports.tSObjectKeyword = exports.TSObjectKeyword = exports.tsObjectKeyword = tsObjectKeyword; -exports.tSStringKeyword = exports.TSStringKeyword = exports.tsStringKeyword = tsStringKeyword; -exports.tSSymbolKeyword = exports.TSSymbolKeyword = exports.tsSymbolKeyword = tsSymbolKeyword; -exports.tSUndefinedKeyword = exports.TSUndefinedKeyword = exports.tsUndefinedKeyword = tsUndefinedKeyword; -exports.tSUnknownKeyword = exports.TSUnknownKeyword = exports.tsUnknownKeyword = tsUnknownKeyword; -exports.tSVoidKeyword = exports.TSVoidKeyword = exports.tsVoidKeyword = tsVoidKeyword; -exports.tSThisType = exports.TSThisType = exports.tsThisType = tsThisType; -exports.tSFunctionType = exports.TSFunctionType = exports.tsFunctionType = tsFunctionType; -exports.tSConstructorType = exports.TSConstructorType = exports.tsConstructorType = tsConstructorType; -exports.tSTypeReference = exports.TSTypeReference = exports.tsTypeReference = tsTypeReference; -exports.tSTypePredicate = exports.TSTypePredicate = exports.tsTypePredicate = tsTypePredicate; -exports.tSTypeQuery = exports.TSTypeQuery = exports.tsTypeQuery = tsTypeQuery; -exports.tSTypeLiteral = exports.TSTypeLiteral = exports.tsTypeLiteral = tsTypeLiteral; -exports.tSArrayType = exports.TSArrayType = exports.tsArrayType = tsArrayType; -exports.tSTupleType = exports.TSTupleType = exports.tsTupleType = tsTupleType; -exports.tSOptionalType = exports.TSOptionalType = exports.tsOptionalType = tsOptionalType; -exports.tSRestType = exports.TSRestType = exports.tsRestType = tsRestType; -exports.tSNamedTupleMember = exports.TSNamedTupleMember = exports.tsNamedTupleMember = tsNamedTupleMember; -exports.tSUnionType = exports.TSUnionType = exports.tsUnionType = tsUnionType; -exports.tSIntersectionType = exports.TSIntersectionType = exports.tsIntersectionType = tsIntersectionType; -exports.tSConditionalType = exports.TSConditionalType = exports.tsConditionalType = tsConditionalType; -exports.tSInferType = exports.TSInferType = exports.tsInferType = tsInferType; -exports.tSParenthesizedType = exports.TSParenthesizedType = exports.tsParenthesizedType = tsParenthesizedType; -exports.tSTypeOperator = exports.TSTypeOperator = exports.tsTypeOperator = tsTypeOperator; -exports.tSIndexedAccessType = exports.TSIndexedAccessType = exports.tsIndexedAccessType = tsIndexedAccessType; -exports.tSMappedType = exports.TSMappedType = exports.tsMappedType = tsMappedType; -exports.tSLiteralType = exports.TSLiteralType = exports.tsLiteralType = tsLiteralType; -exports.tSExpressionWithTypeArguments = exports.TSExpressionWithTypeArguments = exports.tsExpressionWithTypeArguments = tsExpressionWithTypeArguments; -exports.tSInterfaceDeclaration = exports.TSInterfaceDeclaration = exports.tsInterfaceDeclaration = tsInterfaceDeclaration; -exports.tSInterfaceBody = exports.TSInterfaceBody = exports.tsInterfaceBody = tsInterfaceBody; -exports.tSTypeAliasDeclaration = exports.TSTypeAliasDeclaration = exports.tsTypeAliasDeclaration = tsTypeAliasDeclaration; -exports.tSAsExpression = exports.TSAsExpression = exports.tsAsExpression = tsAsExpression; -exports.tSTypeAssertion = exports.TSTypeAssertion = exports.tsTypeAssertion = tsTypeAssertion; -exports.tSEnumDeclaration = exports.TSEnumDeclaration = exports.tsEnumDeclaration = tsEnumDeclaration; -exports.tSEnumMember = exports.TSEnumMember = exports.tsEnumMember = tsEnumMember; -exports.tSModuleDeclaration = exports.TSModuleDeclaration = exports.tsModuleDeclaration = tsModuleDeclaration; -exports.tSModuleBlock = exports.TSModuleBlock = exports.tsModuleBlock = tsModuleBlock; -exports.tSImportType = exports.TSImportType = exports.tsImportType = tsImportType; -exports.tSImportEqualsDeclaration = exports.TSImportEqualsDeclaration = exports.tsImportEqualsDeclaration = tsImportEqualsDeclaration; -exports.tSExternalModuleReference = exports.TSExternalModuleReference = exports.tsExternalModuleReference = tsExternalModuleReference; -exports.tSNonNullExpression = exports.TSNonNullExpression = exports.tsNonNullExpression = tsNonNullExpression; -exports.tSExportAssignment = exports.TSExportAssignment = exports.tsExportAssignment = tsExportAssignment; -exports.tSNamespaceExportDeclaration = exports.TSNamespaceExportDeclaration = exports.tsNamespaceExportDeclaration = tsNamespaceExportDeclaration; -exports.tSTypeAnnotation = exports.TSTypeAnnotation = exports.tsTypeAnnotation = tsTypeAnnotation; -exports.tSTypeParameterInstantiation = exports.TSTypeParameterInstantiation = exports.tsTypeParameterInstantiation = tsTypeParameterInstantiation; -exports.tSTypeParameterDeclaration = exports.TSTypeParameterDeclaration = exports.tsTypeParameterDeclaration = tsTypeParameterDeclaration; -exports.tSTypeParameter = exports.TSTypeParameter = exports.tsTypeParameter = tsTypeParameter; -exports.numberLiteral = exports.NumberLiteral = NumberLiteral; -exports.regexLiteral = exports.RegexLiteral = RegexLiteral; -exports.restProperty = exports.RestProperty = RestProperty; -exports.spreadProperty = exports.SpreadProperty = SpreadProperty; +exports.arrayExpression = arrayExpression; +exports.assignmentExpression = assignmentExpression; +exports.binaryExpression = binaryExpression; +exports.interpreterDirective = interpreterDirective; +exports.directive = directive; +exports.directiveLiteral = directiveLiteral; +exports.blockStatement = blockStatement; +exports.breakStatement = breakStatement; +exports.callExpression = callExpression; +exports.catchClause = catchClause; +exports.conditionalExpression = conditionalExpression; +exports.continueStatement = continueStatement; +exports.debuggerStatement = debuggerStatement; +exports.doWhileStatement = doWhileStatement; +exports.emptyStatement = emptyStatement; +exports.expressionStatement = expressionStatement; +exports.file = file; +exports.forInStatement = forInStatement; +exports.forStatement = forStatement; +exports.functionDeclaration = functionDeclaration; +exports.functionExpression = functionExpression; +exports.identifier = identifier; +exports.ifStatement = ifStatement; +exports.labeledStatement = labeledStatement; +exports.stringLiteral = stringLiteral; +exports.numericLiteral = numericLiteral; +exports.nullLiteral = nullLiteral; +exports.booleanLiteral = booleanLiteral; +exports.regExpLiteral = regExpLiteral; +exports.logicalExpression = logicalExpression; +exports.memberExpression = memberExpression; +exports.newExpression = newExpression; +exports.program = program; +exports.objectExpression = objectExpression; +exports.objectMethod = objectMethod; +exports.objectProperty = objectProperty; +exports.restElement = restElement; +exports.returnStatement = returnStatement; +exports.sequenceExpression = sequenceExpression; +exports.parenthesizedExpression = parenthesizedExpression; +exports.switchCase = switchCase; +exports.switchStatement = switchStatement; +exports.thisExpression = thisExpression; +exports.throwStatement = throwStatement; +exports.tryStatement = tryStatement; +exports.unaryExpression = unaryExpression; +exports.updateExpression = updateExpression; +exports.variableDeclaration = variableDeclaration; +exports.variableDeclarator = variableDeclarator; +exports.whileStatement = whileStatement; +exports.withStatement = withStatement; +exports.assignmentPattern = assignmentPattern; +exports.arrayPattern = arrayPattern; +exports.arrowFunctionExpression = arrowFunctionExpression; +exports.classBody = classBody; +exports.classExpression = classExpression; +exports.classDeclaration = classDeclaration; +exports.exportAllDeclaration = exportAllDeclaration; +exports.exportDefaultDeclaration = exportDefaultDeclaration; +exports.exportNamedDeclaration = exportNamedDeclaration; +exports.exportSpecifier = exportSpecifier; +exports.forOfStatement = forOfStatement; +exports.importDeclaration = importDeclaration; +exports.importDefaultSpecifier = importDefaultSpecifier; +exports.importNamespaceSpecifier = importNamespaceSpecifier; +exports.importSpecifier = importSpecifier; +exports.metaProperty = metaProperty; +exports.classMethod = classMethod; +exports.objectPattern = objectPattern; +exports.spreadElement = spreadElement; +exports.super = _super; +exports.taggedTemplateExpression = taggedTemplateExpression; +exports.templateElement = templateElement; +exports.templateLiteral = templateLiteral; +exports.yieldExpression = yieldExpression; +exports.awaitExpression = awaitExpression; +exports.import = _import; +exports.bigIntLiteral = bigIntLiteral; +exports.exportNamespaceSpecifier = exportNamespaceSpecifier; +exports.optionalMemberExpression = optionalMemberExpression; +exports.optionalCallExpression = optionalCallExpression; +exports.anyTypeAnnotation = anyTypeAnnotation; +exports.arrayTypeAnnotation = arrayTypeAnnotation; +exports.booleanTypeAnnotation = booleanTypeAnnotation; +exports.booleanLiteralTypeAnnotation = booleanLiteralTypeAnnotation; +exports.nullLiteralTypeAnnotation = nullLiteralTypeAnnotation; +exports.classImplements = classImplements; +exports.declareClass = declareClass; +exports.declareFunction = declareFunction; +exports.declareInterface = declareInterface; +exports.declareModule = declareModule; +exports.declareModuleExports = declareModuleExports; +exports.declareTypeAlias = declareTypeAlias; +exports.declareOpaqueType = declareOpaqueType; +exports.declareVariable = declareVariable; +exports.declareExportDeclaration = declareExportDeclaration; +exports.declareExportAllDeclaration = declareExportAllDeclaration; +exports.declaredPredicate = declaredPredicate; +exports.existsTypeAnnotation = existsTypeAnnotation; +exports.functionTypeAnnotation = functionTypeAnnotation; +exports.functionTypeParam = functionTypeParam; +exports.genericTypeAnnotation = genericTypeAnnotation; +exports.inferredPredicate = inferredPredicate; +exports.interfaceExtends = interfaceExtends; +exports.interfaceDeclaration = interfaceDeclaration; +exports.interfaceTypeAnnotation = interfaceTypeAnnotation; +exports.intersectionTypeAnnotation = intersectionTypeAnnotation; +exports.mixedTypeAnnotation = mixedTypeAnnotation; +exports.emptyTypeAnnotation = emptyTypeAnnotation; +exports.nullableTypeAnnotation = nullableTypeAnnotation; +exports.numberLiteralTypeAnnotation = numberLiteralTypeAnnotation; +exports.numberTypeAnnotation = numberTypeAnnotation; +exports.objectTypeAnnotation = objectTypeAnnotation; +exports.objectTypeInternalSlot = objectTypeInternalSlot; +exports.objectTypeCallProperty = objectTypeCallProperty; +exports.objectTypeIndexer = objectTypeIndexer; +exports.objectTypeProperty = objectTypeProperty; +exports.objectTypeSpreadProperty = objectTypeSpreadProperty; +exports.opaqueType = opaqueType; +exports.qualifiedTypeIdentifier = qualifiedTypeIdentifier; +exports.stringLiteralTypeAnnotation = stringLiteralTypeAnnotation; +exports.stringTypeAnnotation = stringTypeAnnotation; +exports.symbolTypeAnnotation = symbolTypeAnnotation; +exports.thisTypeAnnotation = thisTypeAnnotation; +exports.tupleTypeAnnotation = tupleTypeAnnotation; +exports.typeofTypeAnnotation = typeofTypeAnnotation; +exports.typeAlias = typeAlias; +exports.typeAnnotation = typeAnnotation; +exports.typeCastExpression = typeCastExpression; +exports.typeParameter = typeParameter; +exports.typeParameterDeclaration = typeParameterDeclaration; +exports.typeParameterInstantiation = typeParameterInstantiation; +exports.unionTypeAnnotation = unionTypeAnnotation; +exports.variance = variance; +exports.voidTypeAnnotation = voidTypeAnnotation; +exports.enumDeclaration = enumDeclaration; +exports.enumBooleanBody = enumBooleanBody; +exports.enumNumberBody = enumNumberBody; +exports.enumStringBody = enumStringBody; +exports.enumSymbolBody = enumSymbolBody; +exports.enumBooleanMember = enumBooleanMember; +exports.enumNumberMember = enumNumberMember; +exports.enumStringMember = enumStringMember; +exports.enumDefaultedMember = enumDefaultedMember; +exports.jSXAttribute = exports.jsxAttribute = jsxAttribute; +exports.jSXClosingElement = exports.jsxClosingElement = jsxClosingElement; +exports.jSXElement = exports.jsxElement = jsxElement; +exports.jSXEmptyExpression = exports.jsxEmptyExpression = jsxEmptyExpression; +exports.jSXExpressionContainer = exports.jsxExpressionContainer = jsxExpressionContainer; +exports.jSXSpreadChild = exports.jsxSpreadChild = jsxSpreadChild; +exports.jSXIdentifier = exports.jsxIdentifier = jsxIdentifier; +exports.jSXMemberExpression = exports.jsxMemberExpression = jsxMemberExpression; +exports.jSXNamespacedName = exports.jsxNamespacedName = jsxNamespacedName; +exports.jSXOpeningElement = exports.jsxOpeningElement = jsxOpeningElement; +exports.jSXSpreadAttribute = exports.jsxSpreadAttribute = jsxSpreadAttribute; +exports.jSXText = exports.jsxText = jsxText; +exports.jSXFragment = exports.jsxFragment = jsxFragment; +exports.jSXOpeningFragment = exports.jsxOpeningFragment = jsxOpeningFragment; +exports.jSXClosingFragment = exports.jsxClosingFragment = jsxClosingFragment; +exports.noop = noop; +exports.placeholder = placeholder; +exports.v8IntrinsicIdentifier = v8IntrinsicIdentifier; +exports.argumentPlaceholder = argumentPlaceholder; +exports.bindExpression = bindExpression; +exports.classProperty = classProperty; +exports.pipelineTopicExpression = pipelineTopicExpression; +exports.pipelineBareFunction = pipelineBareFunction; +exports.pipelinePrimaryTopicReference = pipelinePrimaryTopicReference; +exports.classPrivateProperty = classPrivateProperty; +exports.classPrivateMethod = classPrivateMethod; +exports.importAttribute = importAttribute; +exports.decorator = decorator; +exports.doExpression = doExpression; +exports.exportDefaultSpecifier = exportDefaultSpecifier; +exports.privateName = privateName; +exports.recordExpression = recordExpression; +exports.tupleExpression = tupleExpression; +exports.decimalLiteral = decimalLiteral; +exports.staticBlock = staticBlock; +exports.tSParameterProperty = exports.tsParameterProperty = tsParameterProperty; +exports.tSDeclareFunction = exports.tsDeclareFunction = tsDeclareFunction; +exports.tSDeclareMethod = exports.tsDeclareMethod = tsDeclareMethod; +exports.tSQualifiedName = exports.tsQualifiedName = tsQualifiedName; +exports.tSCallSignatureDeclaration = exports.tsCallSignatureDeclaration = tsCallSignatureDeclaration; +exports.tSConstructSignatureDeclaration = exports.tsConstructSignatureDeclaration = tsConstructSignatureDeclaration; +exports.tSPropertySignature = exports.tsPropertySignature = tsPropertySignature; +exports.tSMethodSignature = exports.tsMethodSignature = tsMethodSignature; +exports.tSIndexSignature = exports.tsIndexSignature = tsIndexSignature; +exports.tSAnyKeyword = exports.tsAnyKeyword = tsAnyKeyword; +exports.tSBooleanKeyword = exports.tsBooleanKeyword = tsBooleanKeyword; +exports.tSBigIntKeyword = exports.tsBigIntKeyword = tsBigIntKeyword; +exports.tSIntrinsicKeyword = exports.tsIntrinsicKeyword = tsIntrinsicKeyword; +exports.tSNeverKeyword = exports.tsNeverKeyword = tsNeverKeyword; +exports.tSNullKeyword = exports.tsNullKeyword = tsNullKeyword; +exports.tSNumberKeyword = exports.tsNumberKeyword = tsNumberKeyword; +exports.tSObjectKeyword = exports.tsObjectKeyword = tsObjectKeyword; +exports.tSStringKeyword = exports.tsStringKeyword = tsStringKeyword; +exports.tSSymbolKeyword = exports.tsSymbolKeyword = tsSymbolKeyword; +exports.tSUndefinedKeyword = exports.tsUndefinedKeyword = tsUndefinedKeyword; +exports.tSUnknownKeyword = exports.tsUnknownKeyword = tsUnknownKeyword; +exports.tSVoidKeyword = exports.tsVoidKeyword = tsVoidKeyword; +exports.tSThisType = exports.tsThisType = tsThisType; +exports.tSFunctionType = exports.tsFunctionType = tsFunctionType; +exports.tSConstructorType = exports.tsConstructorType = tsConstructorType; +exports.tSTypeReference = exports.tsTypeReference = tsTypeReference; +exports.tSTypePredicate = exports.tsTypePredicate = tsTypePredicate; +exports.tSTypeQuery = exports.tsTypeQuery = tsTypeQuery; +exports.tSTypeLiteral = exports.tsTypeLiteral = tsTypeLiteral; +exports.tSArrayType = exports.tsArrayType = tsArrayType; +exports.tSTupleType = exports.tsTupleType = tsTupleType; +exports.tSOptionalType = exports.tsOptionalType = tsOptionalType; +exports.tSRestType = exports.tsRestType = tsRestType; +exports.tSNamedTupleMember = exports.tsNamedTupleMember = tsNamedTupleMember; +exports.tSUnionType = exports.tsUnionType = tsUnionType; +exports.tSIntersectionType = exports.tsIntersectionType = tsIntersectionType; +exports.tSConditionalType = exports.tsConditionalType = tsConditionalType; +exports.tSInferType = exports.tsInferType = tsInferType; +exports.tSParenthesizedType = exports.tsParenthesizedType = tsParenthesizedType; +exports.tSTypeOperator = exports.tsTypeOperator = tsTypeOperator; +exports.tSIndexedAccessType = exports.tsIndexedAccessType = tsIndexedAccessType; +exports.tSMappedType = exports.tsMappedType = tsMappedType; +exports.tSLiteralType = exports.tsLiteralType = tsLiteralType; +exports.tSExpressionWithTypeArguments = exports.tsExpressionWithTypeArguments = tsExpressionWithTypeArguments; +exports.tSInterfaceDeclaration = exports.tsInterfaceDeclaration = tsInterfaceDeclaration; +exports.tSInterfaceBody = exports.tsInterfaceBody = tsInterfaceBody; +exports.tSTypeAliasDeclaration = exports.tsTypeAliasDeclaration = tsTypeAliasDeclaration; +exports.tSAsExpression = exports.tsAsExpression = tsAsExpression; +exports.tSTypeAssertion = exports.tsTypeAssertion = tsTypeAssertion; +exports.tSEnumDeclaration = exports.tsEnumDeclaration = tsEnumDeclaration; +exports.tSEnumMember = exports.tsEnumMember = tsEnumMember; +exports.tSModuleDeclaration = exports.tsModuleDeclaration = tsModuleDeclaration; +exports.tSModuleBlock = exports.tsModuleBlock = tsModuleBlock; +exports.tSImportType = exports.tsImportType = tsImportType; +exports.tSImportEqualsDeclaration = exports.tsImportEqualsDeclaration = tsImportEqualsDeclaration; +exports.tSExternalModuleReference = exports.tsExternalModuleReference = tsExternalModuleReference; +exports.tSNonNullExpression = exports.tsNonNullExpression = tsNonNullExpression; +exports.tSExportAssignment = exports.tsExportAssignment = tsExportAssignment; +exports.tSNamespaceExportDeclaration = exports.tsNamespaceExportDeclaration = tsNamespaceExportDeclaration; +exports.tSTypeAnnotation = exports.tsTypeAnnotation = tsTypeAnnotation; +exports.tSTypeParameterInstantiation = exports.tsTypeParameterInstantiation = tsTypeParameterInstantiation; +exports.tSTypeParameterDeclaration = exports.tsTypeParameterDeclaration = tsTypeParameterDeclaration; +exports.tSTypeParameter = exports.tsTypeParameter = tsTypeParameter; +exports.numberLiteral = NumberLiteral; +exports.regexLiteral = RegexLiteral; +exports.restProperty = RestProperty; +exports.spreadProperty = SpreadProperty; var _builder = _interopRequireDefault(require("../builder")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -function arrayExpression(...args) { - return (0, _builder.default)("ArrayExpression", ...args); +function arrayExpression(elements) { + return (0, _builder.default)("ArrayExpression", ...arguments); } -function assignmentExpression(...args) { - return (0, _builder.default)("AssignmentExpression", ...args); +function assignmentExpression(operator, left, right) { + return (0, _builder.default)("AssignmentExpression", ...arguments); } -function binaryExpression(...args) { - return (0, _builder.default)("BinaryExpression", ...args); +function binaryExpression(operator, left, right) { + return (0, _builder.default)("BinaryExpression", ...arguments); } -function interpreterDirective(...args) { - return (0, _builder.default)("InterpreterDirective", ...args); +function interpreterDirective(value) { + return (0, _builder.default)("InterpreterDirective", ...arguments); } -function directive(...args) { - return (0, _builder.default)("Directive", ...args); +function directive(value) { + return (0, _builder.default)("Directive", ...arguments); } -function directiveLiteral(...args) { - return (0, _builder.default)("DirectiveLiteral", ...args); +function directiveLiteral(value) { + return (0, _builder.default)("DirectiveLiteral", ...arguments); } -function blockStatement(...args) { - return (0, _builder.default)("BlockStatement", ...args); +function blockStatement(body, directives) { + return (0, _builder.default)("BlockStatement", ...arguments); } -function breakStatement(...args) { - return (0, _builder.default)("BreakStatement", ...args); +function breakStatement(label) { + return (0, _builder.default)("BreakStatement", ...arguments); } -function callExpression(...args) { - return (0, _builder.default)("CallExpression", ...args); +function callExpression(callee, _arguments) { + return (0, _builder.default)("CallExpression", ...arguments); } -function catchClause(...args) { - return (0, _builder.default)("CatchClause", ...args); +function catchClause(param, body) { + return (0, _builder.default)("CatchClause", ...arguments); } -function conditionalExpression(...args) { - return (0, _builder.default)("ConditionalExpression", ...args); +function conditionalExpression(test, consequent, alternate) { + return (0, _builder.default)("ConditionalExpression", ...arguments); } -function continueStatement(...args) { - return (0, _builder.default)("ContinueStatement", ...args); +function continueStatement(label) { + return (0, _builder.default)("ContinueStatement", ...arguments); } -function debuggerStatement(...args) { - return (0, _builder.default)("DebuggerStatement", ...args); +function debuggerStatement() { + return (0, _builder.default)("DebuggerStatement", ...arguments); } -function doWhileStatement(...args) { - return (0, _builder.default)("DoWhileStatement", ...args); +function doWhileStatement(test, body) { + return (0, _builder.default)("DoWhileStatement", ...arguments); } -function emptyStatement(...args) { - return (0, _builder.default)("EmptyStatement", ...args); +function emptyStatement() { + return (0, _builder.default)("EmptyStatement", ...arguments); } -function expressionStatement(...args) { - return (0, _builder.default)("ExpressionStatement", ...args); +function expressionStatement(expression) { + return (0, _builder.default)("ExpressionStatement", ...arguments); } -function file(...args) { - return (0, _builder.default)("File", ...args); +function file(program, comments, tokens) { + return (0, _builder.default)("File", ...arguments); } -function forInStatement(...args) { - return (0, _builder.default)("ForInStatement", ...args); +function forInStatement(left, right, body) { + return (0, _builder.default)("ForInStatement", ...arguments); } -function forStatement(...args) { - return (0, _builder.default)("ForStatement", ...args); +function forStatement(init, test, update, body) { + return (0, _builder.default)("ForStatement", ...arguments); } -function functionDeclaration(...args) { - return (0, _builder.default)("FunctionDeclaration", ...args); +function functionDeclaration(id, params, body, generator, async) { + return (0, _builder.default)("FunctionDeclaration", ...arguments); } -function functionExpression(...args) { - return (0, _builder.default)("FunctionExpression", ...args); +function functionExpression(id, params, body, generator, async) { + return (0, _builder.default)("FunctionExpression", ...arguments); } -function identifier(...args) { - return (0, _builder.default)("Identifier", ...args); +function identifier(name) { + return (0, _builder.default)("Identifier", ...arguments); } -function ifStatement(...args) { - return (0, _builder.default)("IfStatement", ...args); +function ifStatement(test, consequent, alternate) { + return (0, _builder.default)("IfStatement", ...arguments); } -function labeledStatement(...args) { - return (0, _builder.default)("LabeledStatement", ...args); +function labeledStatement(label, body) { + return (0, _builder.default)("LabeledStatement", ...arguments); } -function stringLiteral(...args) { - return (0, _builder.default)("StringLiteral", ...args); +function stringLiteral(value) { + return (0, _builder.default)("StringLiteral", ...arguments); } -function numericLiteral(...args) { - return (0, _builder.default)("NumericLiteral", ...args); +function numericLiteral(value) { + return (0, _builder.default)("NumericLiteral", ...arguments); } -function nullLiteral(...args) { - return (0, _builder.default)("NullLiteral", ...args); +function nullLiteral() { + return (0, _builder.default)("NullLiteral", ...arguments); } -function booleanLiteral(...args) { - return (0, _builder.default)("BooleanLiteral", ...args); +function booleanLiteral(value) { + return (0, _builder.default)("BooleanLiteral", ...arguments); } -function regExpLiteral(...args) { - return (0, _builder.default)("RegExpLiteral", ...args); +function regExpLiteral(pattern, flags) { + return (0, _builder.default)("RegExpLiteral", ...arguments); } -function logicalExpression(...args) { - return (0, _builder.default)("LogicalExpression", ...args); +function logicalExpression(operator, left, right) { + return (0, _builder.default)("LogicalExpression", ...arguments); } -function memberExpression(...args) { - return (0, _builder.default)("MemberExpression", ...args); +function memberExpression(object, property, computed, optional) { + return (0, _builder.default)("MemberExpression", ...arguments); } -function newExpression(...args) { - return (0, _builder.default)("NewExpression", ...args); +function newExpression(callee, _arguments) { + return (0, _builder.default)("NewExpression", ...arguments); } -function program(...args) { - return (0, _builder.default)("Program", ...args); +function program(body, directives, sourceType, interpreter) { + return (0, _builder.default)("Program", ...arguments); } -function objectExpression(...args) { - return (0, _builder.default)("ObjectExpression", ...args); +function objectExpression(properties) { + return (0, _builder.default)("ObjectExpression", ...arguments); } -function objectMethod(...args) { - return (0, _builder.default)("ObjectMethod", ...args); +function objectMethod(kind, key, params, body, computed, generator, async) { + return (0, _builder.default)("ObjectMethod", ...arguments); } -function objectProperty(...args) { - return (0, _builder.default)("ObjectProperty", ...args); +function objectProperty(key, value, computed, shorthand, decorators) { + return (0, _builder.default)("ObjectProperty", ...arguments); } -function restElement(...args) { - return (0, _builder.default)("RestElement", ...args); +function restElement(argument) { + return (0, _builder.default)("RestElement", ...arguments); } -function returnStatement(...args) { - return (0, _builder.default)("ReturnStatement", ...args); +function returnStatement(argument) { + return (0, _builder.default)("ReturnStatement", ...arguments); } -function sequenceExpression(...args) { - return (0, _builder.default)("SequenceExpression", ...args); +function sequenceExpression(expressions) { + return (0, _builder.default)("SequenceExpression", ...arguments); } -function parenthesizedExpression(...args) { - return (0, _builder.default)("ParenthesizedExpression", ...args); +function parenthesizedExpression(expression) { + return (0, _builder.default)("ParenthesizedExpression", ...arguments); } -function switchCase(...args) { - return (0, _builder.default)("SwitchCase", ...args); +function switchCase(test, consequent) { + return (0, _builder.default)("SwitchCase", ...arguments); } -function switchStatement(...args) { - return (0, _builder.default)("SwitchStatement", ...args); +function switchStatement(discriminant, cases) { + return (0, _builder.default)("SwitchStatement", ...arguments); } -function thisExpression(...args) { - return (0, _builder.default)("ThisExpression", ...args); +function thisExpression() { + return (0, _builder.default)("ThisExpression", ...arguments); } -function throwStatement(...args) { - return (0, _builder.default)("ThrowStatement", ...args); +function throwStatement(argument) { + return (0, _builder.default)("ThrowStatement", ...arguments); } -function tryStatement(...args) { - return (0, _builder.default)("TryStatement", ...args); +function tryStatement(block, handler, finalizer) { + return (0, _builder.default)("TryStatement", ...arguments); } -function unaryExpression(...args) { - return (0, _builder.default)("UnaryExpression", ...args); +function unaryExpression(operator, argument, prefix) { + return (0, _builder.default)("UnaryExpression", ...arguments); } -function updateExpression(...args) { - return (0, _builder.default)("UpdateExpression", ...args); +function updateExpression(operator, argument, prefix) { + return (0, _builder.default)("UpdateExpression", ...arguments); } -function variableDeclaration(...args) { - return (0, _builder.default)("VariableDeclaration", ...args); +function variableDeclaration(kind, declarations) { + return (0, _builder.default)("VariableDeclaration", ...arguments); } -function variableDeclarator(...args) { - return (0, _builder.default)("VariableDeclarator", ...args); +function variableDeclarator(id, init) { + return (0, _builder.default)("VariableDeclarator", ...arguments); } -function whileStatement(...args) { - return (0, _builder.default)("WhileStatement", ...args); +function whileStatement(test, body) { + return (0, _builder.default)("WhileStatement", ...arguments); } -function withStatement(...args) { - return (0, _builder.default)("WithStatement", ...args); +function withStatement(object, body) { + return (0, _builder.default)("WithStatement", ...arguments); } -function assignmentPattern(...args) { - return (0, _builder.default)("AssignmentPattern", ...args); +function assignmentPattern(left, right) { + return (0, _builder.default)("AssignmentPattern", ...arguments); } -function arrayPattern(...args) { - return (0, _builder.default)("ArrayPattern", ...args); +function arrayPattern(elements) { + return (0, _builder.default)("ArrayPattern", ...arguments); } -function arrowFunctionExpression(...args) { - return (0, _builder.default)("ArrowFunctionExpression", ...args); +function arrowFunctionExpression(params, body, async) { + return (0, _builder.default)("ArrowFunctionExpression", ...arguments); } -function classBody(...args) { - return (0, _builder.default)("ClassBody", ...args); +function classBody(body) { + return (0, _builder.default)("ClassBody", ...arguments); } -function classExpression(...args) { - return (0, _builder.default)("ClassExpression", ...args); +function classExpression(id, superClass, body, decorators) { + return (0, _builder.default)("ClassExpression", ...arguments); } -function classDeclaration(...args) { - return (0, _builder.default)("ClassDeclaration", ...args); +function classDeclaration(id, superClass, body, decorators) { + return (0, _builder.default)("ClassDeclaration", ...arguments); } -function exportAllDeclaration(...args) { - return (0, _builder.default)("ExportAllDeclaration", ...args); +function exportAllDeclaration(source) { + return (0, _builder.default)("ExportAllDeclaration", ...arguments); } -function exportDefaultDeclaration(...args) { - return (0, _builder.default)("ExportDefaultDeclaration", ...args); +function exportDefaultDeclaration(declaration) { + return (0, _builder.default)("ExportDefaultDeclaration", ...arguments); } -function exportNamedDeclaration(...args) { - return (0, _builder.default)("ExportNamedDeclaration", ...args); +function exportNamedDeclaration(declaration, specifiers, source) { + return (0, _builder.default)("ExportNamedDeclaration", ...arguments); } -function exportSpecifier(...args) { - return (0, _builder.default)("ExportSpecifier", ...args); +function exportSpecifier(local, exported) { + return (0, _builder.default)("ExportSpecifier", ...arguments); } -function forOfStatement(...args) { - return (0, _builder.default)("ForOfStatement", ...args); +function forOfStatement(left, right, body, _await) { + return (0, _builder.default)("ForOfStatement", ...arguments); } -function importDeclaration(...args) { - return (0, _builder.default)("ImportDeclaration", ...args); +function importDeclaration(specifiers, source) { + return (0, _builder.default)("ImportDeclaration", ...arguments); } -function importDefaultSpecifier(...args) { - return (0, _builder.default)("ImportDefaultSpecifier", ...args); +function importDefaultSpecifier(local) { + return (0, _builder.default)("ImportDefaultSpecifier", ...arguments); } -function importNamespaceSpecifier(...args) { - return (0, _builder.default)("ImportNamespaceSpecifier", ...args); +function importNamespaceSpecifier(local) { + return (0, _builder.default)("ImportNamespaceSpecifier", ...arguments); } -function importSpecifier(...args) { - return (0, _builder.default)("ImportSpecifier", ...args); +function importSpecifier(local, imported) { + return (0, _builder.default)("ImportSpecifier", ...arguments); } -function metaProperty(...args) { - return (0, _builder.default)("MetaProperty", ...args); +function metaProperty(meta, property) { + return (0, _builder.default)("MetaProperty", ...arguments); } -function classMethod(...args) { - return (0, _builder.default)("ClassMethod", ...args); +function classMethod(kind, key, params, body, computed, _static, generator, async) { + return (0, _builder.default)("ClassMethod", ...arguments); } -function objectPattern(...args) { - return (0, _builder.default)("ObjectPattern", ...args); +function objectPattern(properties) { + return (0, _builder.default)("ObjectPattern", ...arguments); } -function spreadElement(...args) { - return (0, _builder.default)("SpreadElement", ...args); +function spreadElement(argument) { + return (0, _builder.default)("SpreadElement", ...arguments); } -function _super(...args) { - return (0, _builder.default)("Super", ...args); +function _super() { + return (0, _builder.default)("Super", ...arguments); } -function taggedTemplateExpression(...args) { - return (0, _builder.default)("TaggedTemplateExpression", ...args); +function taggedTemplateExpression(tag, quasi) { + return (0, _builder.default)("TaggedTemplateExpression", ...arguments); } -function templateElement(...args) { - return (0, _builder.default)("TemplateElement", ...args); +function templateElement(value, tail) { + return (0, _builder.default)("TemplateElement", ...arguments); } -function templateLiteral(...args) { - return (0, _builder.default)("TemplateLiteral", ...args); +function templateLiteral(quasis, expressions) { + return (0, _builder.default)("TemplateLiteral", ...arguments); } -function yieldExpression(...args) { - return (0, _builder.default)("YieldExpression", ...args); +function yieldExpression(argument, delegate) { + return (0, _builder.default)("YieldExpression", ...arguments); } -function awaitExpression(...args) { - return (0, _builder.default)("AwaitExpression", ...args); +function awaitExpression(argument) { + return (0, _builder.default)("AwaitExpression", ...arguments); } -function _import(...args) { - return (0, _builder.default)("Import", ...args); +function _import() { + return (0, _builder.default)("Import", ...arguments); } -function bigIntLiteral(...args) { - return (0, _builder.default)("BigIntLiteral", ...args); +function bigIntLiteral(value) { + return (0, _builder.default)("BigIntLiteral", ...arguments); } -function exportNamespaceSpecifier(...args) { - return (0, _builder.default)("ExportNamespaceSpecifier", ...args); +function exportNamespaceSpecifier(exported) { + return (0, _builder.default)("ExportNamespaceSpecifier", ...arguments); } -function optionalMemberExpression(...args) { - return (0, _builder.default)("OptionalMemberExpression", ...args); +function optionalMemberExpression(object, property, computed, optional) { + return (0, _builder.default)("OptionalMemberExpression", ...arguments); } -function optionalCallExpression(...args) { - return (0, _builder.default)("OptionalCallExpression", ...args); +function optionalCallExpression(callee, _arguments, optional) { + return (0, _builder.default)("OptionalCallExpression", ...arguments); } -function anyTypeAnnotation(...args) { - return (0, _builder.default)("AnyTypeAnnotation", ...args); +function anyTypeAnnotation() { + return (0, _builder.default)("AnyTypeAnnotation", ...arguments); } -function arrayTypeAnnotation(...args) { - return (0, _builder.default)("ArrayTypeAnnotation", ...args); +function arrayTypeAnnotation(elementType) { + return (0, _builder.default)("ArrayTypeAnnotation", ...arguments); } -function booleanTypeAnnotation(...args) { - return (0, _builder.default)("BooleanTypeAnnotation", ...args); +function booleanTypeAnnotation() { + return (0, _builder.default)("BooleanTypeAnnotation", ...arguments); } -function booleanLiteralTypeAnnotation(...args) { - return (0, _builder.default)("BooleanLiteralTypeAnnotation", ...args); +function booleanLiteralTypeAnnotation(value) { + return (0, _builder.default)("BooleanLiteralTypeAnnotation", ...arguments); } -function nullLiteralTypeAnnotation(...args) { - return (0, _builder.default)("NullLiteralTypeAnnotation", ...args); +function nullLiteralTypeAnnotation() { + return (0, _builder.default)("NullLiteralTypeAnnotation", ...arguments); } -function classImplements(...args) { - return (0, _builder.default)("ClassImplements", ...args); +function classImplements(id, typeParameters) { + return (0, _builder.default)("ClassImplements", ...arguments); } -function declareClass(...args) { - return (0, _builder.default)("DeclareClass", ...args); +function declareClass(id, typeParameters, _extends, body) { + return (0, _builder.default)("DeclareClass", ...arguments); } -function declareFunction(...args) { - return (0, _builder.default)("DeclareFunction", ...args); +function declareFunction(id) { + return (0, _builder.default)("DeclareFunction", ...arguments); } -function declareInterface(...args) { - return (0, _builder.default)("DeclareInterface", ...args); +function declareInterface(id, typeParameters, _extends, body) { + return (0, _builder.default)("DeclareInterface", ...arguments); } -function declareModule(...args) { - return (0, _builder.default)("DeclareModule", ...args); +function declareModule(id, body, kind) { + return (0, _builder.default)("DeclareModule", ...arguments); } -function declareModuleExports(...args) { - return (0, _builder.default)("DeclareModuleExports", ...args); +function declareModuleExports(typeAnnotation) { + return (0, _builder.default)("DeclareModuleExports", ...arguments); } -function declareTypeAlias(...args) { - return (0, _builder.default)("DeclareTypeAlias", ...args); +function declareTypeAlias(id, typeParameters, right) { + return (0, _builder.default)("DeclareTypeAlias", ...arguments); } -function declareOpaqueType(...args) { - return (0, _builder.default)("DeclareOpaqueType", ...args); +function declareOpaqueType(id, typeParameters, supertype) { + return (0, _builder.default)("DeclareOpaqueType", ...arguments); } -function declareVariable(...args) { - return (0, _builder.default)("DeclareVariable", ...args); +function declareVariable(id) { + return (0, _builder.default)("DeclareVariable", ...arguments); } -function declareExportDeclaration(...args) { - return (0, _builder.default)("DeclareExportDeclaration", ...args); +function declareExportDeclaration(declaration, specifiers, source) { + return (0, _builder.default)("DeclareExportDeclaration", ...arguments); } -function declareExportAllDeclaration(...args) { - return (0, _builder.default)("DeclareExportAllDeclaration", ...args); +function declareExportAllDeclaration(source) { + return (0, _builder.default)("DeclareExportAllDeclaration", ...arguments); } -function declaredPredicate(...args) { - return (0, _builder.default)("DeclaredPredicate", ...args); +function declaredPredicate(value) { + return (0, _builder.default)("DeclaredPredicate", ...arguments); } -function existsTypeAnnotation(...args) { - return (0, _builder.default)("ExistsTypeAnnotation", ...args); +function existsTypeAnnotation() { + return (0, _builder.default)("ExistsTypeAnnotation", ...arguments); } -function functionTypeAnnotation(...args) { - return (0, _builder.default)("FunctionTypeAnnotation", ...args); +function functionTypeAnnotation(typeParameters, params, rest, returnType) { + return (0, _builder.default)("FunctionTypeAnnotation", ...arguments); } -function functionTypeParam(...args) { - return (0, _builder.default)("FunctionTypeParam", ...args); +function functionTypeParam(name, typeAnnotation) { + return (0, _builder.default)("FunctionTypeParam", ...arguments); } -function genericTypeAnnotation(...args) { - return (0, _builder.default)("GenericTypeAnnotation", ...args); +function genericTypeAnnotation(id, typeParameters) { + return (0, _builder.default)("GenericTypeAnnotation", ...arguments); } -function inferredPredicate(...args) { - return (0, _builder.default)("InferredPredicate", ...args); +function inferredPredicate() { + return (0, _builder.default)("InferredPredicate", ...arguments); } -function interfaceExtends(...args) { - return (0, _builder.default)("InterfaceExtends", ...args); +function interfaceExtends(id, typeParameters) { + return (0, _builder.default)("InterfaceExtends", ...arguments); } -function interfaceDeclaration(...args) { - return (0, _builder.default)("InterfaceDeclaration", ...args); +function interfaceDeclaration(id, typeParameters, _extends, body) { + return (0, _builder.default)("InterfaceDeclaration", ...arguments); } -function interfaceTypeAnnotation(...args) { - return (0, _builder.default)("InterfaceTypeAnnotation", ...args); +function interfaceTypeAnnotation(_extends, body) { + return (0, _builder.default)("InterfaceTypeAnnotation", ...arguments); } -function intersectionTypeAnnotation(...args) { - return (0, _builder.default)("IntersectionTypeAnnotation", ...args); +function intersectionTypeAnnotation(types) { + return (0, _builder.default)("IntersectionTypeAnnotation", ...arguments); } -function mixedTypeAnnotation(...args) { - return (0, _builder.default)("MixedTypeAnnotation", ...args); +function mixedTypeAnnotation() { + return (0, _builder.default)("MixedTypeAnnotation", ...arguments); } -function emptyTypeAnnotation(...args) { - return (0, _builder.default)("EmptyTypeAnnotation", ...args); +function emptyTypeAnnotation() { + return (0, _builder.default)("EmptyTypeAnnotation", ...arguments); } -function nullableTypeAnnotation(...args) { - return (0, _builder.default)("NullableTypeAnnotation", ...args); +function nullableTypeAnnotation(typeAnnotation) { + return (0, _builder.default)("NullableTypeAnnotation", ...arguments); } -function numberLiteralTypeAnnotation(...args) { - return (0, _builder.default)("NumberLiteralTypeAnnotation", ...args); +function numberLiteralTypeAnnotation(value) { + return (0, _builder.default)("NumberLiteralTypeAnnotation", ...arguments); } -function numberTypeAnnotation(...args) { - return (0, _builder.default)("NumberTypeAnnotation", ...args); +function numberTypeAnnotation() { + return (0, _builder.default)("NumberTypeAnnotation", ...arguments); } -function objectTypeAnnotation(...args) { - return (0, _builder.default)("ObjectTypeAnnotation", ...args); +function objectTypeAnnotation(properties, indexers, callProperties, internalSlots, exact) { + return (0, _builder.default)("ObjectTypeAnnotation", ...arguments); } -function objectTypeInternalSlot(...args) { - return (0, _builder.default)("ObjectTypeInternalSlot", ...args); +function objectTypeInternalSlot(id, value, optional, _static, method) { + return (0, _builder.default)("ObjectTypeInternalSlot", ...arguments); } -function objectTypeCallProperty(...args) { - return (0, _builder.default)("ObjectTypeCallProperty", ...args); +function objectTypeCallProperty(value) { + return (0, _builder.default)("ObjectTypeCallProperty", ...arguments); } -function objectTypeIndexer(...args) { - return (0, _builder.default)("ObjectTypeIndexer", ...args); +function objectTypeIndexer(id, key, value, variance) { + return (0, _builder.default)("ObjectTypeIndexer", ...arguments); } -function objectTypeProperty(...args) { - return (0, _builder.default)("ObjectTypeProperty", ...args); +function objectTypeProperty(key, value, variance) { + return (0, _builder.default)("ObjectTypeProperty", ...arguments); } -function objectTypeSpreadProperty(...args) { - return (0, _builder.default)("ObjectTypeSpreadProperty", ...args); +function objectTypeSpreadProperty(argument) { + return (0, _builder.default)("ObjectTypeSpreadProperty", ...arguments); } -function opaqueType(...args) { - return (0, _builder.default)("OpaqueType", ...args); +function opaqueType(id, typeParameters, supertype, impltype) { + return (0, _builder.default)("OpaqueType", ...arguments); } -function qualifiedTypeIdentifier(...args) { - return (0, _builder.default)("QualifiedTypeIdentifier", ...args); +function qualifiedTypeIdentifier(id, qualification) { + return (0, _builder.default)("QualifiedTypeIdentifier", ...arguments); } -function stringLiteralTypeAnnotation(...args) { - return (0, _builder.default)("StringLiteralTypeAnnotation", ...args); +function stringLiteralTypeAnnotation(value) { + return (0, _builder.default)("StringLiteralTypeAnnotation", ...arguments); } -function stringTypeAnnotation(...args) { - return (0, _builder.default)("StringTypeAnnotation", ...args); +function stringTypeAnnotation() { + return (0, _builder.default)("StringTypeAnnotation", ...arguments); } -function symbolTypeAnnotation(...args) { - return (0, _builder.default)("SymbolTypeAnnotation", ...args); +function symbolTypeAnnotation() { + return (0, _builder.default)("SymbolTypeAnnotation", ...arguments); } -function thisTypeAnnotation(...args) { - return (0, _builder.default)("ThisTypeAnnotation", ...args); +function thisTypeAnnotation() { + return (0, _builder.default)("ThisTypeAnnotation", ...arguments); } -function tupleTypeAnnotation(...args) { - return (0, _builder.default)("TupleTypeAnnotation", ...args); +function tupleTypeAnnotation(types) { + return (0, _builder.default)("TupleTypeAnnotation", ...arguments); } -function typeofTypeAnnotation(...args) { - return (0, _builder.default)("TypeofTypeAnnotation", ...args); +function typeofTypeAnnotation(argument) { + return (0, _builder.default)("TypeofTypeAnnotation", ...arguments); } -function typeAlias(...args) { - return (0, _builder.default)("TypeAlias", ...args); +function typeAlias(id, typeParameters, right) { + return (0, _builder.default)("TypeAlias", ...arguments); } -function typeAnnotation(...args) { - return (0, _builder.default)("TypeAnnotation", ...args); +function typeAnnotation(typeAnnotation) { + return (0, _builder.default)("TypeAnnotation", ...arguments); } -function typeCastExpression(...args) { - return (0, _builder.default)("TypeCastExpression", ...args); +function typeCastExpression(expression, typeAnnotation) { + return (0, _builder.default)("TypeCastExpression", ...arguments); } -function typeParameter(...args) { - return (0, _builder.default)("TypeParameter", ...args); +function typeParameter(bound, _default, variance) { + return (0, _builder.default)("TypeParameter", ...arguments); } -function typeParameterDeclaration(...args) { - return (0, _builder.default)("TypeParameterDeclaration", ...args); +function typeParameterDeclaration(params) { + return (0, _builder.default)("TypeParameterDeclaration", ...arguments); } -function typeParameterInstantiation(...args) { - return (0, _builder.default)("TypeParameterInstantiation", ...args); +function typeParameterInstantiation(params) { + return (0, _builder.default)("TypeParameterInstantiation", ...arguments); } -function unionTypeAnnotation(...args) { - return (0, _builder.default)("UnionTypeAnnotation", ...args); +function unionTypeAnnotation(types) { + return (0, _builder.default)("UnionTypeAnnotation", ...arguments); } -function variance(...args) { - return (0, _builder.default)("Variance", ...args); +function variance(kind) { + return (0, _builder.default)("Variance", ...arguments); } -function voidTypeAnnotation(...args) { - return (0, _builder.default)("VoidTypeAnnotation", ...args); +function voidTypeAnnotation() { + return (0, _builder.default)("VoidTypeAnnotation", ...arguments); } -function enumDeclaration(...args) { - return (0, _builder.default)("EnumDeclaration", ...args); +function enumDeclaration(id, body) { + return (0, _builder.default)("EnumDeclaration", ...arguments); } -function enumBooleanBody(...args) { - return (0, _builder.default)("EnumBooleanBody", ...args); +function enumBooleanBody(members) { + return (0, _builder.default)("EnumBooleanBody", ...arguments); } -function enumNumberBody(...args) { - return (0, _builder.default)("EnumNumberBody", ...args); +function enumNumberBody(members) { + return (0, _builder.default)("EnumNumberBody", ...arguments); } -function enumStringBody(...args) { - return (0, _builder.default)("EnumStringBody", ...args); +function enumStringBody(members) { + return (0, _builder.default)("EnumStringBody", ...arguments); } -function enumSymbolBody(...args) { - return (0, _builder.default)("EnumSymbolBody", ...args); +function enumSymbolBody(members) { + return (0, _builder.default)("EnumSymbolBody", ...arguments); } -function enumBooleanMember(...args) { - return (0, _builder.default)("EnumBooleanMember", ...args); +function enumBooleanMember(id) { + return (0, _builder.default)("EnumBooleanMember", ...arguments); } -function enumNumberMember(...args) { - return (0, _builder.default)("EnumNumberMember", ...args); +function enumNumberMember(id, init) { + return (0, _builder.default)("EnumNumberMember", ...arguments); } -function enumStringMember(...args) { - return (0, _builder.default)("EnumStringMember", ...args); +function enumStringMember(id, init) { + return (0, _builder.default)("EnumStringMember", ...arguments); } -function enumDefaultedMember(...args) { - return (0, _builder.default)("EnumDefaultedMember", ...args); +function enumDefaultedMember(id) { + return (0, _builder.default)("EnumDefaultedMember", ...arguments); } -function jsxAttribute(...args) { - return (0, _builder.default)("JSXAttribute", ...args); +function jsxAttribute(name, value) { + return (0, _builder.default)("JSXAttribute", ...arguments); } -function jsxClosingElement(...args) { - return (0, _builder.default)("JSXClosingElement", ...args); +function jsxClosingElement(name) { + return (0, _builder.default)("JSXClosingElement", ...arguments); } -function jsxElement(...args) { - return (0, _builder.default)("JSXElement", ...args); +function jsxElement(openingElement, closingElement, children, selfClosing) { + return (0, _builder.default)("JSXElement", ...arguments); } -function jsxEmptyExpression(...args) { - return (0, _builder.default)("JSXEmptyExpression", ...args); +function jsxEmptyExpression() { + return (0, _builder.default)("JSXEmptyExpression", ...arguments); } -function jsxExpressionContainer(...args) { - return (0, _builder.default)("JSXExpressionContainer", ...args); +function jsxExpressionContainer(expression) { + return (0, _builder.default)("JSXExpressionContainer", ...arguments); } -function jsxSpreadChild(...args) { - return (0, _builder.default)("JSXSpreadChild", ...args); +function jsxSpreadChild(expression) { + return (0, _builder.default)("JSXSpreadChild", ...arguments); } -function jsxIdentifier(...args) { - return (0, _builder.default)("JSXIdentifier", ...args); +function jsxIdentifier(name) { + return (0, _builder.default)("JSXIdentifier", ...arguments); } -function jsxMemberExpression(...args) { - return (0, _builder.default)("JSXMemberExpression", ...args); +function jsxMemberExpression(object, property) { + return (0, _builder.default)("JSXMemberExpression", ...arguments); } -function jsxNamespacedName(...args) { - return (0, _builder.default)("JSXNamespacedName", ...args); +function jsxNamespacedName(namespace, name) { + return (0, _builder.default)("JSXNamespacedName", ...arguments); } -function jsxOpeningElement(...args) { - return (0, _builder.default)("JSXOpeningElement", ...args); +function jsxOpeningElement(name, attributes, selfClosing) { + return (0, _builder.default)("JSXOpeningElement", ...arguments); } -function jsxSpreadAttribute(...args) { - return (0, _builder.default)("JSXSpreadAttribute", ...args); +function jsxSpreadAttribute(argument) { + return (0, _builder.default)("JSXSpreadAttribute", ...arguments); } -function jsxText(...args) { - return (0, _builder.default)("JSXText", ...args); +function jsxText(value) { + return (0, _builder.default)("JSXText", ...arguments); } -function jsxFragment(...args) { - return (0, _builder.default)("JSXFragment", ...args); +function jsxFragment(openingFragment, closingFragment, children) { + return (0, _builder.default)("JSXFragment", ...arguments); } -function jsxOpeningFragment(...args) { - return (0, _builder.default)("JSXOpeningFragment", ...args); +function jsxOpeningFragment() { + return (0, _builder.default)("JSXOpeningFragment", ...arguments); } -function jsxClosingFragment(...args) { - return (0, _builder.default)("JSXClosingFragment", ...args); +function jsxClosingFragment() { + return (0, _builder.default)("JSXClosingFragment", ...arguments); } -function noop(...args) { - return (0, _builder.default)("Noop", ...args); +function noop() { + return (0, _builder.default)("Noop", ...arguments); } -function placeholder(...args) { - return (0, _builder.default)("Placeholder", ...args); +function placeholder(expectedNode, name) { + return (0, _builder.default)("Placeholder", ...arguments); } -function v8IntrinsicIdentifier(...args) { - return (0, _builder.default)("V8IntrinsicIdentifier", ...args); +function v8IntrinsicIdentifier(name) { + return (0, _builder.default)("V8IntrinsicIdentifier", ...arguments); } -function argumentPlaceholder(...args) { - return (0, _builder.default)("ArgumentPlaceholder", ...args); +function argumentPlaceholder() { + return (0, _builder.default)("ArgumentPlaceholder", ...arguments); } -function bindExpression(...args) { - return (0, _builder.default)("BindExpression", ...args); +function bindExpression(object, callee) { + return (0, _builder.default)("BindExpression", ...arguments); } -function classProperty(...args) { - return (0, _builder.default)("ClassProperty", ...args); +function classProperty(key, value, typeAnnotation, decorators, computed, _static) { + return (0, _builder.default)("ClassProperty", ...arguments); } -function pipelineTopicExpression(...args) { - return (0, _builder.default)("PipelineTopicExpression", ...args); +function pipelineTopicExpression(expression) { + return (0, _builder.default)("PipelineTopicExpression", ...arguments); } -function pipelineBareFunction(...args) { - return (0, _builder.default)("PipelineBareFunction", ...args); +function pipelineBareFunction(callee) { + return (0, _builder.default)("PipelineBareFunction", ...arguments); } -function pipelinePrimaryTopicReference(...args) { - return (0, _builder.default)("PipelinePrimaryTopicReference", ...args); +function pipelinePrimaryTopicReference() { + return (0, _builder.default)("PipelinePrimaryTopicReference", ...arguments); } -function classPrivateProperty(...args) { - return (0, _builder.default)("ClassPrivateProperty", ...args); +function classPrivateProperty(key, value, decorators, _static) { + return (0, _builder.default)("ClassPrivateProperty", ...arguments); } -function classPrivateMethod(...args) { - return (0, _builder.default)("ClassPrivateMethod", ...args); +function classPrivateMethod(kind, key, params, body, _static) { + return (0, _builder.default)("ClassPrivateMethod", ...arguments); } -function importAttribute(...args) { - return (0, _builder.default)("ImportAttribute", ...args); +function importAttribute(key, value) { + return (0, _builder.default)("ImportAttribute", ...arguments); } -function decorator(...args) { - return (0, _builder.default)("Decorator", ...args); +function decorator(expression) { + return (0, _builder.default)("Decorator", ...arguments); } -function doExpression(...args) { - return (0, _builder.default)("DoExpression", ...args); +function doExpression(body) { + return (0, _builder.default)("DoExpression", ...arguments); } -function exportDefaultSpecifier(...args) { - return (0, _builder.default)("ExportDefaultSpecifier", ...args); +function exportDefaultSpecifier(exported) { + return (0, _builder.default)("ExportDefaultSpecifier", ...arguments); } -function privateName(...args) { - return (0, _builder.default)("PrivateName", ...args); +function privateName(id) { + return (0, _builder.default)("PrivateName", ...arguments); } -function recordExpression(...args) { - return (0, _builder.default)("RecordExpression", ...args); +function recordExpression(properties) { + return (0, _builder.default)("RecordExpression", ...arguments); } -function tupleExpression(...args) { - return (0, _builder.default)("TupleExpression", ...args); +function tupleExpression(elements) { + return (0, _builder.default)("TupleExpression", ...arguments); } -function decimalLiteral(...args) { - return (0, _builder.default)("DecimalLiteral", ...args); +function decimalLiteral(value) { + return (0, _builder.default)("DecimalLiteral", ...arguments); } -function staticBlock(...args) { - return (0, _builder.default)("StaticBlock", ...args); +function staticBlock(body) { + return (0, _builder.default)("StaticBlock", ...arguments); } -function tsParameterProperty(...args) { - return (0, _builder.default)("TSParameterProperty", ...args); +function tsParameterProperty(parameter) { + return (0, _builder.default)("TSParameterProperty", ...arguments); } -function tsDeclareFunction(...args) { - return (0, _builder.default)("TSDeclareFunction", ...args); +function tsDeclareFunction(id, typeParameters, params, returnType) { + return (0, _builder.default)("TSDeclareFunction", ...arguments); } -function tsDeclareMethod(...args) { - return (0, _builder.default)("TSDeclareMethod", ...args); +function tsDeclareMethod(decorators, key, typeParameters, params, returnType) { + return (0, _builder.default)("TSDeclareMethod", ...arguments); } -function tsQualifiedName(...args) { - return (0, _builder.default)("TSQualifiedName", ...args); +function tsQualifiedName(left, right) { + return (0, _builder.default)("TSQualifiedName", ...arguments); } -function tsCallSignatureDeclaration(...args) { - return (0, _builder.default)("TSCallSignatureDeclaration", ...args); +function tsCallSignatureDeclaration(typeParameters, parameters, typeAnnotation) { + return (0, _builder.default)("TSCallSignatureDeclaration", ...arguments); } -function tsConstructSignatureDeclaration(...args) { - return (0, _builder.default)("TSConstructSignatureDeclaration", ...args); +function tsConstructSignatureDeclaration(typeParameters, parameters, typeAnnotation) { + return (0, _builder.default)("TSConstructSignatureDeclaration", ...arguments); } -function tsPropertySignature(...args) { - return (0, _builder.default)("TSPropertySignature", ...args); +function tsPropertySignature(key, typeAnnotation, initializer) { + return (0, _builder.default)("TSPropertySignature", ...arguments); } -function tsMethodSignature(...args) { - return (0, _builder.default)("TSMethodSignature", ...args); +function tsMethodSignature(key, typeParameters, parameters, typeAnnotation) { + return (0, _builder.default)("TSMethodSignature", ...arguments); } -function tsIndexSignature(...args) { - return (0, _builder.default)("TSIndexSignature", ...args); +function tsIndexSignature(parameters, typeAnnotation) { + return (0, _builder.default)("TSIndexSignature", ...arguments); } -function tsAnyKeyword(...args) { - return (0, _builder.default)("TSAnyKeyword", ...args); +function tsAnyKeyword() { + return (0, _builder.default)("TSAnyKeyword", ...arguments); } -function tsBooleanKeyword(...args) { - return (0, _builder.default)("TSBooleanKeyword", ...args); +function tsBooleanKeyword() { + return (0, _builder.default)("TSBooleanKeyword", ...arguments); } -function tsBigIntKeyword(...args) { - return (0, _builder.default)("TSBigIntKeyword", ...args); +function tsBigIntKeyword() { + return (0, _builder.default)("TSBigIntKeyword", ...arguments); } -function tsIntrinsicKeyword(...args) { - return (0, _builder.default)("TSIntrinsicKeyword", ...args); +function tsIntrinsicKeyword() { + return (0, _builder.default)("TSIntrinsicKeyword", ...arguments); } -function tsNeverKeyword(...args) { - return (0, _builder.default)("TSNeverKeyword", ...args); +function tsNeverKeyword() { + return (0, _builder.default)("TSNeverKeyword", ...arguments); } -function tsNullKeyword(...args) { - return (0, _builder.default)("TSNullKeyword", ...args); +function tsNullKeyword() { + return (0, _builder.default)("TSNullKeyword", ...arguments); } -function tsNumberKeyword(...args) { - return (0, _builder.default)("TSNumberKeyword", ...args); +function tsNumberKeyword() { + return (0, _builder.default)("TSNumberKeyword", ...arguments); } -function tsObjectKeyword(...args) { - return (0, _builder.default)("TSObjectKeyword", ...args); +function tsObjectKeyword() { + return (0, _builder.default)("TSObjectKeyword", ...arguments); } -function tsStringKeyword(...args) { - return (0, _builder.default)("TSStringKeyword", ...args); +function tsStringKeyword() { + return (0, _builder.default)("TSStringKeyword", ...arguments); } -function tsSymbolKeyword(...args) { - return (0, _builder.default)("TSSymbolKeyword", ...args); +function tsSymbolKeyword() { + return (0, _builder.default)("TSSymbolKeyword", ...arguments); } -function tsUndefinedKeyword(...args) { - return (0, _builder.default)("TSUndefinedKeyword", ...args); +function tsUndefinedKeyword() { + return (0, _builder.default)("TSUndefinedKeyword", ...arguments); } -function tsUnknownKeyword(...args) { - return (0, _builder.default)("TSUnknownKeyword", ...args); +function tsUnknownKeyword() { + return (0, _builder.default)("TSUnknownKeyword", ...arguments); } -function tsVoidKeyword(...args) { - return (0, _builder.default)("TSVoidKeyword", ...args); +function tsVoidKeyword() { + return (0, _builder.default)("TSVoidKeyword", ...arguments); } -function tsThisType(...args) { - return (0, _builder.default)("TSThisType", ...args); +function tsThisType() { + return (0, _builder.default)("TSThisType", ...arguments); } -function tsFunctionType(...args) { - return (0, _builder.default)("TSFunctionType", ...args); +function tsFunctionType(typeParameters, parameters, typeAnnotation) { + return (0, _builder.default)("TSFunctionType", ...arguments); } -function tsConstructorType(...args) { - return (0, _builder.default)("TSConstructorType", ...args); +function tsConstructorType(typeParameters, parameters, typeAnnotation) { + return (0, _builder.default)("TSConstructorType", ...arguments); } -function tsTypeReference(...args) { - return (0, _builder.default)("TSTypeReference", ...args); +function tsTypeReference(typeName, typeParameters) { + return (0, _builder.default)("TSTypeReference", ...arguments); } -function tsTypePredicate(...args) { - return (0, _builder.default)("TSTypePredicate", ...args); +function tsTypePredicate(parameterName, typeAnnotation, asserts) { + return (0, _builder.default)("TSTypePredicate", ...arguments); } -function tsTypeQuery(...args) { - return (0, _builder.default)("TSTypeQuery", ...args); +function tsTypeQuery(exprName) { + return (0, _builder.default)("TSTypeQuery", ...arguments); } -function tsTypeLiteral(...args) { - return (0, _builder.default)("TSTypeLiteral", ...args); +function tsTypeLiteral(members) { + return (0, _builder.default)("TSTypeLiteral", ...arguments); } -function tsArrayType(...args) { - return (0, _builder.default)("TSArrayType", ...args); +function tsArrayType(elementType) { + return (0, _builder.default)("TSArrayType", ...arguments); } -function tsTupleType(...args) { - return (0, _builder.default)("TSTupleType", ...args); +function tsTupleType(elementTypes) { + return (0, _builder.default)("TSTupleType", ...arguments); } -function tsOptionalType(...args) { - return (0, _builder.default)("TSOptionalType", ...args); +function tsOptionalType(typeAnnotation) { + return (0, _builder.default)("TSOptionalType", ...arguments); } -function tsRestType(...args) { - return (0, _builder.default)("TSRestType", ...args); +function tsRestType(typeAnnotation) { + return (0, _builder.default)("TSRestType", ...arguments); } -function tsNamedTupleMember(...args) { - return (0, _builder.default)("TSNamedTupleMember", ...args); +function tsNamedTupleMember(label, elementType, optional) { + return (0, _builder.default)("TSNamedTupleMember", ...arguments); } -function tsUnionType(...args) { - return (0, _builder.default)("TSUnionType", ...args); +function tsUnionType(types) { + return (0, _builder.default)("TSUnionType", ...arguments); } -function tsIntersectionType(...args) { - return (0, _builder.default)("TSIntersectionType", ...args); +function tsIntersectionType(types) { + return (0, _builder.default)("TSIntersectionType", ...arguments); } -function tsConditionalType(...args) { - return (0, _builder.default)("TSConditionalType", ...args); +function tsConditionalType(checkType, extendsType, trueType, falseType) { + return (0, _builder.default)("TSConditionalType", ...arguments); } -function tsInferType(...args) { - return (0, _builder.default)("TSInferType", ...args); +function tsInferType(typeParameter) { + return (0, _builder.default)("TSInferType", ...arguments); } -function tsParenthesizedType(...args) { - return (0, _builder.default)("TSParenthesizedType", ...args); +function tsParenthesizedType(typeAnnotation) { + return (0, _builder.default)("TSParenthesizedType", ...arguments); } -function tsTypeOperator(...args) { - return (0, _builder.default)("TSTypeOperator", ...args); +function tsTypeOperator(typeAnnotation) { + return (0, _builder.default)("TSTypeOperator", ...arguments); } -function tsIndexedAccessType(...args) { - return (0, _builder.default)("TSIndexedAccessType", ...args); +function tsIndexedAccessType(objectType, indexType) { + return (0, _builder.default)("TSIndexedAccessType", ...arguments); } -function tsMappedType(...args) { - return (0, _builder.default)("TSMappedType", ...args); +function tsMappedType(typeParameter, typeAnnotation, nameType) { + return (0, _builder.default)("TSMappedType", ...arguments); } -function tsLiteralType(...args) { - return (0, _builder.default)("TSLiteralType", ...args); +function tsLiteralType(literal) { + return (0, _builder.default)("TSLiteralType", ...arguments); } -function tsExpressionWithTypeArguments(...args) { - return (0, _builder.default)("TSExpressionWithTypeArguments", ...args); +function tsExpressionWithTypeArguments(expression, typeParameters) { + return (0, _builder.default)("TSExpressionWithTypeArguments", ...arguments); } -function tsInterfaceDeclaration(...args) { - return (0, _builder.default)("TSInterfaceDeclaration", ...args); +function tsInterfaceDeclaration(id, typeParameters, _extends, body) { + return (0, _builder.default)("TSInterfaceDeclaration", ...arguments); } -function tsInterfaceBody(...args) { - return (0, _builder.default)("TSInterfaceBody", ...args); +function tsInterfaceBody(body) { + return (0, _builder.default)("TSInterfaceBody", ...arguments); } -function tsTypeAliasDeclaration(...args) { - return (0, _builder.default)("TSTypeAliasDeclaration", ...args); +function tsTypeAliasDeclaration(id, typeParameters, typeAnnotation) { + return (0, _builder.default)("TSTypeAliasDeclaration", ...arguments); } -function tsAsExpression(...args) { - return (0, _builder.default)("TSAsExpression", ...args); +function tsAsExpression(expression, typeAnnotation) { + return (0, _builder.default)("TSAsExpression", ...arguments); } -function tsTypeAssertion(...args) { - return (0, _builder.default)("TSTypeAssertion", ...args); +function tsTypeAssertion(typeAnnotation, expression) { + return (0, _builder.default)("TSTypeAssertion", ...arguments); } -function tsEnumDeclaration(...args) { - return (0, _builder.default)("TSEnumDeclaration", ...args); +function tsEnumDeclaration(id, members) { + return (0, _builder.default)("TSEnumDeclaration", ...arguments); } -function tsEnumMember(...args) { - return (0, _builder.default)("TSEnumMember", ...args); +function tsEnumMember(id, initializer) { + return (0, _builder.default)("TSEnumMember", ...arguments); } -function tsModuleDeclaration(...args) { - return (0, _builder.default)("TSModuleDeclaration", ...args); +function tsModuleDeclaration(id, body) { + return (0, _builder.default)("TSModuleDeclaration", ...arguments); } -function tsModuleBlock(...args) { - return (0, _builder.default)("TSModuleBlock", ...args); +function tsModuleBlock(body) { + return (0, _builder.default)("TSModuleBlock", ...arguments); } -function tsImportType(...args) { - return (0, _builder.default)("TSImportType", ...args); +function tsImportType(argument, qualifier, typeParameters) { + return (0, _builder.default)("TSImportType", ...arguments); } -function tsImportEqualsDeclaration(...args) { - return (0, _builder.default)("TSImportEqualsDeclaration", ...args); +function tsImportEqualsDeclaration(id, moduleReference) { + return (0, _builder.default)("TSImportEqualsDeclaration", ...arguments); } -function tsExternalModuleReference(...args) { - return (0, _builder.default)("TSExternalModuleReference", ...args); +function tsExternalModuleReference(expression) { + return (0, _builder.default)("TSExternalModuleReference", ...arguments); } -function tsNonNullExpression(...args) { - return (0, _builder.default)("TSNonNullExpression", ...args); +function tsNonNullExpression(expression) { + return (0, _builder.default)("TSNonNullExpression", ...arguments); } -function tsExportAssignment(...args) { - return (0, _builder.default)("TSExportAssignment", ...args); +function tsExportAssignment(expression) { + return (0, _builder.default)("TSExportAssignment", ...arguments); } -function tsNamespaceExportDeclaration(...args) { - return (0, _builder.default)("TSNamespaceExportDeclaration", ...args); +function tsNamespaceExportDeclaration(id) { + return (0, _builder.default)("TSNamespaceExportDeclaration", ...arguments); } -function tsTypeAnnotation(...args) { - return (0, _builder.default)("TSTypeAnnotation", ...args); +function tsTypeAnnotation(typeAnnotation) { + return (0, _builder.default)("TSTypeAnnotation", ...arguments); } -function tsTypeParameterInstantiation(...args) { - return (0, _builder.default)("TSTypeParameterInstantiation", ...args); +function tsTypeParameterInstantiation(params) { + return (0, _builder.default)("TSTypeParameterInstantiation", ...arguments); } -function tsTypeParameterDeclaration(...args) { - return (0, _builder.default)("TSTypeParameterDeclaration", ...args); +function tsTypeParameterDeclaration(params) { + return (0, _builder.default)("TSTypeParameterDeclaration", ...arguments); } -function tsTypeParameter(...args) { - return (0, _builder.default)("TSTypeParameter", ...args); +function tsTypeParameter(constraint, _default, name) { + return (0, _builder.default)("TSTypeParameter", ...arguments); } function NumberLiteral(...args) { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/uppercase.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/uppercase.js new file mode 100644 index 00000000000000..1ce7732836ae4a --- /dev/null +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/generated/uppercase.js @@ -0,0 +1,1483 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "ArrayExpression", { + enumerable: true, + get: function () { + return _index.arrayExpression; + } +}); +Object.defineProperty(exports, "AssignmentExpression", { + enumerable: true, + get: function () { + return _index.assignmentExpression; + } +}); +Object.defineProperty(exports, "BinaryExpression", { + enumerable: true, + get: function () { + return _index.binaryExpression; + } +}); +Object.defineProperty(exports, "InterpreterDirective", { + enumerable: true, + get: function () { + return _index.interpreterDirective; + } +}); +Object.defineProperty(exports, "Directive", { + enumerable: true, + get: function () { + return _index.directive; + } +}); +Object.defineProperty(exports, "DirectiveLiteral", { + enumerable: true, + get: function () { + return _index.directiveLiteral; + } +}); +Object.defineProperty(exports, "BlockStatement", { + enumerable: true, + get: function () { + return _index.blockStatement; + } +}); +Object.defineProperty(exports, "BreakStatement", { + enumerable: true, + get: function () { + return _index.breakStatement; + } +}); +Object.defineProperty(exports, "CallExpression", { + enumerable: true, + get: function () { + return _index.callExpression; + } +}); +Object.defineProperty(exports, "CatchClause", { + enumerable: true, + get: function () { + return _index.catchClause; + } +}); +Object.defineProperty(exports, "ConditionalExpression", { + enumerable: true, + get: function () { + return _index.conditionalExpression; + } +}); +Object.defineProperty(exports, "ContinueStatement", { + enumerable: true, + get: function () { + return _index.continueStatement; + } +}); +Object.defineProperty(exports, "DebuggerStatement", { + enumerable: true, + get: function () { + return _index.debuggerStatement; + } +}); +Object.defineProperty(exports, "DoWhileStatement", { + enumerable: true, + get: function () { + return _index.doWhileStatement; + } +}); +Object.defineProperty(exports, "EmptyStatement", { + enumerable: true, + get: function () { + return _index.emptyStatement; + } +}); +Object.defineProperty(exports, "ExpressionStatement", { + enumerable: true, + get: function () { + return _index.expressionStatement; + } +}); +Object.defineProperty(exports, "File", { + enumerable: true, + get: function () { + return _index.file; + } +}); +Object.defineProperty(exports, "ForInStatement", { + enumerable: true, + get: function () { + return _index.forInStatement; + } +}); +Object.defineProperty(exports, "ForStatement", { + enumerable: true, + get: function () { + return _index.forStatement; + } +}); +Object.defineProperty(exports, "FunctionDeclaration", { + enumerable: true, + get: function () { + return _index.functionDeclaration; + } +}); +Object.defineProperty(exports, "FunctionExpression", { + enumerable: true, + get: function () { + return _index.functionExpression; + } +}); +Object.defineProperty(exports, "Identifier", { + enumerable: true, + get: function () { + return _index.identifier; + } +}); +Object.defineProperty(exports, "IfStatement", { + enumerable: true, + get: function () { + return _index.ifStatement; + } +}); +Object.defineProperty(exports, "LabeledStatement", { + enumerable: true, + get: function () { + return _index.labeledStatement; + } +}); +Object.defineProperty(exports, "StringLiteral", { + enumerable: true, + get: function () { + return _index.stringLiteral; + } +}); +Object.defineProperty(exports, "NumericLiteral", { + enumerable: true, + get: function () { + return _index.numericLiteral; + } +}); +Object.defineProperty(exports, "NullLiteral", { + enumerable: true, + get: function () { + return _index.nullLiteral; + } +}); +Object.defineProperty(exports, "BooleanLiteral", { + enumerable: true, + get: function () { + return _index.booleanLiteral; + } +}); +Object.defineProperty(exports, "RegExpLiteral", { + enumerable: true, + get: function () { + return _index.regExpLiteral; + } +}); +Object.defineProperty(exports, "LogicalExpression", { + enumerable: true, + get: function () { + return _index.logicalExpression; + } +}); +Object.defineProperty(exports, "MemberExpression", { + enumerable: true, + get: function () { + return _index.memberExpression; + } +}); +Object.defineProperty(exports, "NewExpression", { + enumerable: true, + get: function () { + return _index.newExpression; + } +}); +Object.defineProperty(exports, "Program", { + enumerable: true, + get: function () { + return _index.program; + } +}); +Object.defineProperty(exports, "ObjectExpression", { + enumerable: true, + get: function () { + return _index.objectExpression; + } +}); +Object.defineProperty(exports, "ObjectMethod", { + enumerable: true, + get: function () { + return _index.objectMethod; + } +}); +Object.defineProperty(exports, "ObjectProperty", { + enumerable: true, + get: function () { + return _index.objectProperty; + } +}); +Object.defineProperty(exports, "RestElement", { + enumerable: true, + get: function () { + return _index.restElement; + } +}); +Object.defineProperty(exports, "ReturnStatement", { + enumerable: true, + get: function () { + return _index.returnStatement; + } +}); +Object.defineProperty(exports, "SequenceExpression", { + enumerable: true, + get: function () { + return _index.sequenceExpression; + } +}); +Object.defineProperty(exports, "ParenthesizedExpression", { + enumerable: true, + get: function () { + return _index.parenthesizedExpression; + } +}); +Object.defineProperty(exports, "SwitchCase", { + enumerable: true, + get: function () { + return _index.switchCase; + } +}); +Object.defineProperty(exports, "SwitchStatement", { + enumerable: true, + get: function () { + return _index.switchStatement; + } +}); +Object.defineProperty(exports, "ThisExpression", { + enumerable: true, + get: function () { + return _index.thisExpression; + } +}); +Object.defineProperty(exports, "ThrowStatement", { + enumerable: true, + get: function () { + return _index.throwStatement; + } +}); +Object.defineProperty(exports, "TryStatement", { + enumerable: true, + get: function () { + return _index.tryStatement; + } +}); +Object.defineProperty(exports, "UnaryExpression", { + enumerable: true, + get: function () { + return _index.unaryExpression; + } +}); +Object.defineProperty(exports, "UpdateExpression", { + enumerable: true, + get: function () { + return _index.updateExpression; + } +}); +Object.defineProperty(exports, "VariableDeclaration", { + enumerable: true, + get: function () { + return _index.variableDeclaration; + } +}); +Object.defineProperty(exports, "VariableDeclarator", { + enumerable: true, + get: function () { + return _index.variableDeclarator; + } +}); +Object.defineProperty(exports, "WhileStatement", { + enumerable: true, + get: function () { + return _index.whileStatement; + } +}); +Object.defineProperty(exports, "WithStatement", { + enumerable: true, + get: function () { + return _index.withStatement; + } +}); +Object.defineProperty(exports, "AssignmentPattern", { + enumerable: true, + get: function () { + return _index.assignmentPattern; + } +}); +Object.defineProperty(exports, "ArrayPattern", { + enumerable: true, + get: function () { + return _index.arrayPattern; + } +}); +Object.defineProperty(exports, "ArrowFunctionExpression", { + enumerable: true, + get: function () { + return _index.arrowFunctionExpression; + } +}); +Object.defineProperty(exports, "ClassBody", { + enumerable: true, + get: function () { + return _index.classBody; + } +}); +Object.defineProperty(exports, "ClassExpression", { + enumerable: true, + get: function () { + return _index.classExpression; + } +}); +Object.defineProperty(exports, "ClassDeclaration", { + enumerable: true, + get: function () { + return _index.classDeclaration; + } +}); +Object.defineProperty(exports, "ExportAllDeclaration", { + enumerable: true, + get: function () { + return _index.exportAllDeclaration; + } +}); +Object.defineProperty(exports, "ExportDefaultDeclaration", { + enumerable: true, + get: function () { + return _index.exportDefaultDeclaration; + } +}); +Object.defineProperty(exports, "ExportNamedDeclaration", { + enumerable: true, + get: function () { + return _index.exportNamedDeclaration; + } +}); +Object.defineProperty(exports, "ExportSpecifier", { + enumerable: true, + get: function () { + return _index.exportSpecifier; + } +}); +Object.defineProperty(exports, "ForOfStatement", { + enumerable: true, + get: function () { + return _index.forOfStatement; + } +}); +Object.defineProperty(exports, "ImportDeclaration", { + enumerable: true, + get: function () { + return _index.importDeclaration; + } +}); +Object.defineProperty(exports, "ImportDefaultSpecifier", { + enumerable: true, + get: function () { + return _index.importDefaultSpecifier; + } +}); +Object.defineProperty(exports, "ImportNamespaceSpecifier", { + enumerable: true, + get: function () { + return _index.importNamespaceSpecifier; + } +}); +Object.defineProperty(exports, "ImportSpecifier", { + enumerable: true, + get: function () { + return _index.importSpecifier; + } +}); +Object.defineProperty(exports, "MetaProperty", { + enumerable: true, + get: function () { + return _index.metaProperty; + } +}); +Object.defineProperty(exports, "ClassMethod", { + enumerable: true, + get: function () { + return _index.classMethod; + } +}); +Object.defineProperty(exports, "ObjectPattern", { + enumerable: true, + get: function () { + return _index.objectPattern; + } +}); +Object.defineProperty(exports, "SpreadElement", { + enumerable: true, + get: function () { + return _index.spreadElement; + } +}); +Object.defineProperty(exports, "Super", { + enumerable: true, + get: function () { + return _index.super; + } +}); +Object.defineProperty(exports, "TaggedTemplateExpression", { + enumerable: true, + get: function () { + return _index.taggedTemplateExpression; + } +}); +Object.defineProperty(exports, "TemplateElement", { + enumerable: true, + get: function () { + return _index.templateElement; + } +}); +Object.defineProperty(exports, "TemplateLiteral", { + enumerable: true, + get: function () { + return _index.templateLiteral; + } +}); +Object.defineProperty(exports, "YieldExpression", { + enumerable: true, + get: function () { + return _index.yieldExpression; + } +}); +Object.defineProperty(exports, "AwaitExpression", { + enumerable: true, + get: function () { + return _index.awaitExpression; + } +}); +Object.defineProperty(exports, "Import", { + enumerable: true, + get: function () { + return _index.import; + } +}); +Object.defineProperty(exports, "BigIntLiteral", { + enumerable: true, + get: function () { + return _index.bigIntLiteral; + } +}); +Object.defineProperty(exports, "ExportNamespaceSpecifier", { + enumerable: true, + get: function () { + return _index.exportNamespaceSpecifier; + } +}); +Object.defineProperty(exports, "OptionalMemberExpression", { + enumerable: true, + get: function () { + return _index.optionalMemberExpression; + } +}); +Object.defineProperty(exports, "OptionalCallExpression", { + enumerable: true, + get: function () { + return _index.optionalCallExpression; + } +}); +Object.defineProperty(exports, "AnyTypeAnnotation", { + enumerable: true, + get: function () { + return _index.anyTypeAnnotation; + } +}); +Object.defineProperty(exports, "ArrayTypeAnnotation", { + enumerable: true, + get: function () { + return _index.arrayTypeAnnotation; + } +}); +Object.defineProperty(exports, "BooleanTypeAnnotation", { + enumerable: true, + get: function () { + return _index.booleanTypeAnnotation; + } +}); +Object.defineProperty(exports, "BooleanLiteralTypeAnnotation", { + enumerable: true, + get: function () { + return _index.booleanLiteralTypeAnnotation; + } +}); +Object.defineProperty(exports, "NullLiteralTypeAnnotation", { + enumerable: true, + get: function () { + return _index.nullLiteralTypeAnnotation; + } +}); +Object.defineProperty(exports, "ClassImplements", { + enumerable: true, + get: function () { + return _index.classImplements; + } +}); +Object.defineProperty(exports, "DeclareClass", { + enumerable: true, + get: function () { + return _index.declareClass; + } +}); +Object.defineProperty(exports, "DeclareFunction", { + enumerable: true, + get: function () { + return _index.declareFunction; + } +}); +Object.defineProperty(exports, "DeclareInterface", { + enumerable: true, + get: function () { + return _index.declareInterface; + } +}); +Object.defineProperty(exports, "DeclareModule", { + enumerable: true, + get: function () { + return _index.declareModule; + } +}); +Object.defineProperty(exports, "DeclareModuleExports", { + enumerable: true, + get: function () { + return _index.declareModuleExports; + } +}); +Object.defineProperty(exports, "DeclareTypeAlias", { + enumerable: true, + get: function () { + return _index.declareTypeAlias; + } +}); +Object.defineProperty(exports, "DeclareOpaqueType", { + enumerable: true, + get: function () { + return _index.declareOpaqueType; + } +}); +Object.defineProperty(exports, "DeclareVariable", { + enumerable: true, + get: function () { + return _index.declareVariable; + } +}); +Object.defineProperty(exports, "DeclareExportDeclaration", { + enumerable: true, + get: function () { + return _index.declareExportDeclaration; + } +}); +Object.defineProperty(exports, "DeclareExportAllDeclaration", { + enumerable: true, + get: function () { + return _index.declareExportAllDeclaration; + } +}); +Object.defineProperty(exports, "DeclaredPredicate", { + enumerable: true, + get: function () { + return _index.declaredPredicate; + } +}); +Object.defineProperty(exports, "ExistsTypeAnnotation", { + enumerable: true, + get: function () { + return _index.existsTypeAnnotation; + } +}); +Object.defineProperty(exports, "FunctionTypeAnnotation", { + enumerable: true, + get: function () { + return _index.functionTypeAnnotation; + } +}); +Object.defineProperty(exports, "FunctionTypeParam", { + enumerable: true, + get: function () { + return _index.functionTypeParam; + } +}); +Object.defineProperty(exports, "GenericTypeAnnotation", { + enumerable: true, + get: function () { + return _index.genericTypeAnnotation; + } +}); +Object.defineProperty(exports, "InferredPredicate", { + enumerable: true, + get: function () { + return _index.inferredPredicate; + } +}); +Object.defineProperty(exports, "InterfaceExtends", { + enumerable: true, + get: function () { + return _index.interfaceExtends; + } +}); +Object.defineProperty(exports, "InterfaceDeclaration", { + enumerable: true, + get: function () { + return _index.interfaceDeclaration; + } +}); +Object.defineProperty(exports, "InterfaceTypeAnnotation", { + enumerable: true, + get: function () { + return _index.interfaceTypeAnnotation; + } +}); +Object.defineProperty(exports, "IntersectionTypeAnnotation", { + enumerable: true, + get: function () { + return _index.intersectionTypeAnnotation; + } +}); +Object.defineProperty(exports, "MixedTypeAnnotation", { + enumerable: true, + get: function () { + return _index.mixedTypeAnnotation; + } +}); +Object.defineProperty(exports, "EmptyTypeAnnotation", { + enumerable: true, + get: function () { + return _index.emptyTypeAnnotation; + } +}); +Object.defineProperty(exports, "NullableTypeAnnotation", { + enumerable: true, + get: function () { + return _index.nullableTypeAnnotation; + } +}); +Object.defineProperty(exports, "NumberLiteralTypeAnnotation", { + enumerable: true, + get: function () { + return _index.numberLiteralTypeAnnotation; + } +}); +Object.defineProperty(exports, "NumberTypeAnnotation", { + enumerable: true, + get: function () { + return _index.numberTypeAnnotation; + } +}); +Object.defineProperty(exports, "ObjectTypeAnnotation", { + enumerable: true, + get: function () { + return _index.objectTypeAnnotation; + } +}); +Object.defineProperty(exports, "ObjectTypeInternalSlot", { + enumerable: true, + get: function () { + return _index.objectTypeInternalSlot; + } +}); +Object.defineProperty(exports, "ObjectTypeCallProperty", { + enumerable: true, + get: function () { + return _index.objectTypeCallProperty; + } +}); +Object.defineProperty(exports, "ObjectTypeIndexer", { + enumerable: true, + get: function () { + return _index.objectTypeIndexer; + } +}); +Object.defineProperty(exports, "ObjectTypeProperty", { + enumerable: true, + get: function () { + return _index.objectTypeProperty; + } +}); +Object.defineProperty(exports, "ObjectTypeSpreadProperty", { + enumerable: true, + get: function () { + return _index.objectTypeSpreadProperty; + } +}); +Object.defineProperty(exports, "OpaqueType", { + enumerable: true, + get: function () { + return _index.opaqueType; + } +}); +Object.defineProperty(exports, "QualifiedTypeIdentifier", { + enumerable: true, + get: function () { + return _index.qualifiedTypeIdentifier; + } +}); +Object.defineProperty(exports, "StringLiteralTypeAnnotation", { + enumerable: true, + get: function () { + return _index.stringLiteralTypeAnnotation; + } +}); +Object.defineProperty(exports, "StringTypeAnnotation", { + enumerable: true, + get: function () { + return _index.stringTypeAnnotation; + } +}); +Object.defineProperty(exports, "SymbolTypeAnnotation", { + enumerable: true, + get: function () { + return _index.symbolTypeAnnotation; + } +}); +Object.defineProperty(exports, "ThisTypeAnnotation", { + enumerable: true, + get: function () { + return _index.thisTypeAnnotation; + } +}); +Object.defineProperty(exports, "TupleTypeAnnotation", { + enumerable: true, + get: function () { + return _index.tupleTypeAnnotation; + } +}); +Object.defineProperty(exports, "TypeofTypeAnnotation", { + enumerable: true, + get: function () { + return _index.typeofTypeAnnotation; + } +}); +Object.defineProperty(exports, "TypeAlias", { + enumerable: true, + get: function () { + return _index.typeAlias; + } +}); +Object.defineProperty(exports, "TypeAnnotation", { + enumerable: true, + get: function () { + return _index.typeAnnotation; + } +}); +Object.defineProperty(exports, "TypeCastExpression", { + enumerable: true, + get: function () { + return _index.typeCastExpression; + } +}); +Object.defineProperty(exports, "TypeParameter", { + enumerable: true, + get: function () { + return _index.typeParameter; + } +}); +Object.defineProperty(exports, "TypeParameterDeclaration", { + enumerable: true, + get: function () { + return _index.typeParameterDeclaration; + } +}); +Object.defineProperty(exports, "TypeParameterInstantiation", { + enumerable: true, + get: function () { + return _index.typeParameterInstantiation; + } +}); +Object.defineProperty(exports, "UnionTypeAnnotation", { + enumerable: true, + get: function () { + return _index.unionTypeAnnotation; + } +}); +Object.defineProperty(exports, "Variance", { + enumerable: true, + get: function () { + return _index.variance; + } +}); +Object.defineProperty(exports, "VoidTypeAnnotation", { + enumerable: true, + get: function () { + return _index.voidTypeAnnotation; + } +}); +Object.defineProperty(exports, "EnumDeclaration", { + enumerable: true, + get: function () { + return _index.enumDeclaration; + } +}); +Object.defineProperty(exports, "EnumBooleanBody", { + enumerable: true, + get: function () { + return _index.enumBooleanBody; + } +}); +Object.defineProperty(exports, "EnumNumberBody", { + enumerable: true, + get: function () { + return _index.enumNumberBody; + } +}); +Object.defineProperty(exports, "EnumStringBody", { + enumerable: true, + get: function () { + return _index.enumStringBody; + } +}); +Object.defineProperty(exports, "EnumSymbolBody", { + enumerable: true, + get: function () { + return _index.enumSymbolBody; + } +}); +Object.defineProperty(exports, "EnumBooleanMember", { + enumerable: true, + get: function () { + return _index.enumBooleanMember; + } +}); +Object.defineProperty(exports, "EnumNumberMember", { + enumerable: true, + get: function () { + return _index.enumNumberMember; + } +}); +Object.defineProperty(exports, "EnumStringMember", { + enumerable: true, + get: function () { + return _index.enumStringMember; + } +}); +Object.defineProperty(exports, "EnumDefaultedMember", { + enumerable: true, + get: function () { + return _index.enumDefaultedMember; + } +}); +Object.defineProperty(exports, "JSXAttribute", { + enumerable: true, + get: function () { + return _index.jsxAttribute; + } +}); +Object.defineProperty(exports, "JSXClosingElement", { + enumerable: true, + get: function () { + return _index.jsxClosingElement; + } +}); +Object.defineProperty(exports, "JSXElement", { + enumerable: true, + get: function () { + return _index.jsxElement; + } +}); +Object.defineProperty(exports, "JSXEmptyExpression", { + enumerable: true, + get: function () { + return _index.jsxEmptyExpression; + } +}); +Object.defineProperty(exports, "JSXExpressionContainer", { + enumerable: true, + get: function () { + return _index.jsxExpressionContainer; + } +}); +Object.defineProperty(exports, "JSXSpreadChild", { + enumerable: true, + get: function () { + return _index.jsxSpreadChild; + } +}); +Object.defineProperty(exports, "JSXIdentifier", { + enumerable: true, + get: function () { + return _index.jsxIdentifier; + } +}); +Object.defineProperty(exports, "JSXMemberExpression", { + enumerable: true, + get: function () { + return _index.jsxMemberExpression; + } +}); +Object.defineProperty(exports, "JSXNamespacedName", { + enumerable: true, + get: function () { + return _index.jsxNamespacedName; + } +}); +Object.defineProperty(exports, "JSXOpeningElement", { + enumerable: true, + get: function () { + return _index.jsxOpeningElement; + } +}); +Object.defineProperty(exports, "JSXSpreadAttribute", { + enumerable: true, + get: function () { + return _index.jsxSpreadAttribute; + } +}); +Object.defineProperty(exports, "JSXText", { + enumerable: true, + get: function () { + return _index.jsxText; + } +}); +Object.defineProperty(exports, "JSXFragment", { + enumerable: true, + get: function () { + return _index.jsxFragment; + } +}); +Object.defineProperty(exports, "JSXOpeningFragment", { + enumerable: true, + get: function () { + return _index.jsxOpeningFragment; + } +}); +Object.defineProperty(exports, "JSXClosingFragment", { + enumerable: true, + get: function () { + return _index.jsxClosingFragment; + } +}); +Object.defineProperty(exports, "Noop", { + enumerable: true, + get: function () { + return _index.noop; + } +}); +Object.defineProperty(exports, "Placeholder", { + enumerable: true, + get: function () { + return _index.placeholder; + } +}); +Object.defineProperty(exports, "V8IntrinsicIdentifier", { + enumerable: true, + get: function () { + return _index.v8IntrinsicIdentifier; + } +}); +Object.defineProperty(exports, "ArgumentPlaceholder", { + enumerable: true, + get: function () { + return _index.argumentPlaceholder; + } +}); +Object.defineProperty(exports, "BindExpression", { + enumerable: true, + get: function () { + return _index.bindExpression; + } +}); +Object.defineProperty(exports, "ClassProperty", { + enumerable: true, + get: function () { + return _index.classProperty; + } +}); +Object.defineProperty(exports, "PipelineTopicExpression", { + enumerable: true, + get: function () { + return _index.pipelineTopicExpression; + } +}); +Object.defineProperty(exports, "PipelineBareFunction", { + enumerable: true, + get: function () { + return _index.pipelineBareFunction; + } +}); +Object.defineProperty(exports, "PipelinePrimaryTopicReference", { + enumerable: true, + get: function () { + return _index.pipelinePrimaryTopicReference; + } +}); +Object.defineProperty(exports, "ClassPrivateProperty", { + enumerable: true, + get: function () { + return _index.classPrivateProperty; + } +}); +Object.defineProperty(exports, "ClassPrivateMethod", { + enumerable: true, + get: function () { + return _index.classPrivateMethod; + } +}); +Object.defineProperty(exports, "ImportAttribute", { + enumerable: true, + get: function () { + return _index.importAttribute; + } +}); +Object.defineProperty(exports, "Decorator", { + enumerable: true, + get: function () { + return _index.decorator; + } +}); +Object.defineProperty(exports, "DoExpression", { + enumerable: true, + get: function () { + return _index.doExpression; + } +}); +Object.defineProperty(exports, "ExportDefaultSpecifier", { + enumerable: true, + get: function () { + return _index.exportDefaultSpecifier; + } +}); +Object.defineProperty(exports, "PrivateName", { + enumerable: true, + get: function () { + return _index.privateName; + } +}); +Object.defineProperty(exports, "RecordExpression", { + enumerable: true, + get: function () { + return _index.recordExpression; + } +}); +Object.defineProperty(exports, "TupleExpression", { + enumerable: true, + get: function () { + return _index.tupleExpression; + } +}); +Object.defineProperty(exports, "DecimalLiteral", { + enumerable: true, + get: function () { + return _index.decimalLiteral; + } +}); +Object.defineProperty(exports, "StaticBlock", { + enumerable: true, + get: function () { + return _index.staticBlock; + } +}); +Object.defineProperty(exports, "TSParameterProperty", { + enumerable: true, + get: function () { + return _index.tsParameterProperty; + } +}); +Object.defineProperty(exports, "TSDeclareFunction", { + enumerable: true, + get: function () { + return _index.tsDeclareFunction; + } +}); +Object.defineProperty(exports, "TSDeclareMethod", { + enumerable: true, + get: function () { + return _index.tsDeclareMethod; + } +}); +Object.defineProperty(exports, "TSQualifiedName", { + enumerable: true, + get: function () { + return _index.tsQualifiedName; + } +}); +Object.defineProperty(exports, "TSCallSignatureDeclaration", { + enumerable: true, + get: function () { + return _index.tsCallSignatureDeclaration; + } +}); +Object.defineProperty(exports, "TSConstructSignatureDeclaration", { + enumerable: true, + get: function () { + return _index.tsConstructSignatureDeclaration; + } +}); +Object.defineProperty(exports, "TSPropertySignature", { + enumerable: true, + get: function () { + return _index.tsPropertySignature; + } +}); +Object.defineProperty(exports, "TSMethodSignature", { + enumerable: true, + get: function () { + return _index.tsMethodSignature; + } +}); +Object.defineProperty(exports, "TSIndexSignature", { + enumerable: true, + get: function () { + return _index.tsIndexSignature; + } +}); +Object.defineProperty(exports, "TSAnyKeyword", { + enumerable: true, + get: function () { + return _index.tsAnyKeyword; + } +}); +Object.defineProperty(exports, "TSBooleanKeyword", { + enumerable: true, + get: function () { + return _index.tsBooleanKeyword; + } +}); +Object.defineProperty(exports, "TSBigIntKeyword", { + enumerable: true, + get: function () { + return _index.tsBigIntKeyword; + } +}); +Object.defineProperty(exports, "TSIntrinsicKeyword", { + enumerable: true, + get: function () { + return _index.tsIntrinsicKeyword; + } +}); +Object.defineProperty(exports, "TSNeverKeyword", { + enumerable: true, + get: function () { + return _index.tsNeverKeyword; + } +}); +Object.defineProperty(exports, "TSNullKeyword", { + enumerable: true, + get: function () { + return _index.tsNullKeyword; + } +}); +Object.defineProperty(exports, "TSNumberKeyword", { + enumerable: true, + get: function () { + return _index.tsNumberKeyword; + } +}); +Object.defineProperty(exports, "TSObjectKeyword", { + enumerable: true, + get: function () { + return _index.tsObjectKeyword; + } +}); +Object.defineProperty(exports, "TSStringKeyword", { + enumerable: true, + get: function () { + return _index.tsStringKeyword; + } +}); +Object.defineProperty(exports, "TSSymbolKeyword", { + enumerable: true, + get: function () { + return _index.tsSymbolKeyword; + } +}); +Object.defineProperty(exports, "TSUndefinedKeyword", { + enumerable: true, + get: function () { + return _index.tsUndefinedKeyword; + } +}); +Object.defineProperty(exports, "TSUnknownKeyword", { + enumerable: true, + get: function () { + return _index.tsUnknownKeyword; + } +}); +Object.defineProperty(exports, "TSVoidKeyword", { + enumerable: true, + get: function () { + return _index.tsVoidKeyword; + } +}); +Object.defineProperty(exports, "TSThisType", { + enumerable: true, + get: function () { + return _index.tsThisType; + } +}); +Object.defineProperty(exports, "TSFunctionType", { + enumerable: true, + get: function () { + return _index.tsFunctionType; + } +}); +Object.defineProperty(exports, "TSConstructorType", { + enumerable: true, + get: function () { + return _index.tsConstructorType; + } +}); +Object.defineProperty(exports, "TSTypeReference", { + enumerable: true, + get: function () { + return _index.tsTypeReference; + } +}); +Object.defineProperty(exports, "TSTypePredicate", { + enumerable: true, + get: function () { + return _index.tsTypePredicate; + } +}); +Object.defineProperty(exports, "TSTypeQuery", { + enumerable: true, + get: function () { + return _index.tsTypeQuery; + } +}); +Object.defineProperty(exports, "TSTypeLiteral", { + enumerable: true, + get: function () { + return _index.tsTypeLiteral; + } +}); +Object.defineProperty(exports, "TSArrayType", { + enumerable: true, + get: function () { + return _index.tsArrayType; + } +}); +Object.defineProperty(exports, "TSTupleType", { + enumerable: true, + get: function () { + return _index.tsTupleType; + } +}); +Object.defineProperty(exports, "TSOptionalType", { + enumerable: true, + get: function () { + return _index.tsOptionalType; + } +}); +Object.defineProperty(exports, "TSRestType", { + enumerable: true, + get: function () { + return _index.tsRestType; + } +}); +Object.defineProperty(exports, "TSNamedTupleMember", { + enumerable: true, + get: function () { + return _index.tsNamedTupleMember; + } +}); +Object.defineProperty(exports, "TSUnionType", { + enumerable: true, + get: function () { + return _index.tsUnionType; + } +}); +Object.defineProperty(exports, "TSIntersectionType", { + enumerable: true, + get: function () { + return _index.tsIntersectionType; + } +}); +Object.defineProperty(exports, "TSConditionalType", { + enumerable: true, + get: function () { + return _index.tsConditionalType; + } +}); +Object.defineProperty(exports, "TSInferType", { + enumerable: true, + get: function () { + return _index.tsInferType; + } +}); +Object.defineProperty(exports, "TSParenthesizedType", { + enumerable: true, + get: function () { + return _index.tsParenthesizedType; + } +}); +Object.defineProperty(exports, "TSTypeOperator", { + enumerable: true, + get: function () { + return _index.tsTypeOperator; + } +}); +Object.defineProperty(exports, "TSIndexedAccessType", { + enumerable: true, + get: function () { + return _index.tsIndexedAccessType; + } +}); +Object.defineProperty(exports, "TSMappedType", { + enumerable: true, + get: function () { + return _index.tsMappedType; + } +}); +Object.defineProperty(exports, "TSLiteralType", { + enumerable: true, + get: function () { + return _index.tsLiteralType; + } +}); +Object.defineProperty(exports, "TSExpressionWithTypeArguments", { + enumerable: true, + get: function () { + return _index.tsExpressionWithTypeArguments; + } +}); +Object.defineProperty(exports, "TSInterfaceDeclaration", { + enumerable: true, + get: function () { + return _index.tsInterfaceDeclaration; + } +}); +Object.defineProperty(exports, "TSInterfaceBody", { + enumerable: true, + get: function () { + return _index.tsInterfaceBody; + } +}); +Object.defineProperty(exports, "TSTypeAliasDeclaration", { + enumerable: true, + get: function () { + return _index.tsTypeAliasDeclaration; + } +}); +Object.defineProperty(exports, "TSAsExpression", { + enumerable: true, + get: function () { + return _index.tsAsExpression; + } +}); +Object.defineProperty(exports, "TSTypeAssertion", { + enumerable: true, + get: function () { + return _index.tsTypeAssertion; + } +}); +Object.defineProperty(exports, "TSEnumDeclaration", { + enumerable: true, + get: function () { + return _index.tsEnumDeclaration; + } +}); +Object.defineProperty(exports, "TSEnumMember", { + enumerable: true, + get: function () { + return _index.tsEnumMember; + } +}); +Object.defineProperty(exports, "TSModuleDeclaration", { + enumerable: true, + get: function () { + return _index.tsModuleDeclaration; + } +}); +Object.defineProperty(exports, "TSModuleBlock", { + enumerable: true, + get: function () { + return _index.tsModuleBlock; + } +}); +Object.defineProperty(exports, "TSImportType", { + enumerable: true, + get: function () { + return _index.tsImportType; + } +}); +Object.defineProperty(exports, "TSImportEqualsDeclaration", { + enumerable: true, + get: function () { + return _index.tsImportEqualsDeclaration; + } +}); +Object.defineProperty(exports, "TSExternalModuleReference", { + enumerable: true, + get: function () { + return _index.tsExternalModuleReference; + } +}); +Object.defineProperty(exports, "TSNonNullExpression", { + enumerable: true, + get: function () { + return _index.tsNonNullExpression; + } +}); +Object.defineProperty(exports, "TSExportAssignment", { + enumerable: true, + get: function () { + return _index.tsExportAssignment; + } +}); +Object.defineProperty(exports, "TSNamespaceExportDeclaration", { + enumerable: true, + get: function () { + return _index.tsNamespaceExportDeclaration; + } +}); +Object.defineProperty(exports, "TSTypeAnnotation", { + enumerable: true, + get: function () { + return _index.tsTypeAnnotation; + } +}); +Object.defineProperty(exports, "TSTypeParameterInstantiation", { + enumerable: true, + get: function () { + return _index.tsTypeParameterInstantiation; + } +}); +Object.defineProperty(exports, "TSTypeParameterDeclaration", { + enumerable: true, + get: function () { + return _index.tsTypeParameterDeclaration; + } +}); +Object.defineProperty(exports, "TSTypeParameter", { + enumerable: true, + get: function () { + return _index.tsTypeParameter; + } +}); +Object.defineProperty(exports, "NumberLiteral", { + enumerable: true, + get: function () { + return _index.numberLiteral; + } +}); +Object.defineProperty(exports, "RegexLiteral", { + enumerable: true, + get: function () { + return _index.regexLiteral; + } +}); +Object.defineProperty(exports, "RestProperty", { + enumerable: true, + get: function () { + return _index.restProperty; + } +}); +Object.defineProperty(exports, "SpreadProperty", { + enumerable: true, + get: function () { + return _index.spreadProperty; + } +}); + +var _index = require("./index"); \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/typescript/createTSUnionType.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/typescript/createTSUnionType.js index 7dbe885f2c42b3..9f1b8c9bff4abd 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/typescript/createTSUnionType.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/builders/typescript/createTSUnionType.js @@ -12,7 +12,7 @@ var _removeTypeDuplicates = _interopRequireDefault(require("../../modifications/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function createTSUnionType(typeAnnotations) { - const types = typeAnnotations.map(type => type.typeAnnotations); + const types = typeAnnotations.map(type => type.typeAnnotation); const flattened = (0, _removeTypeDuplicates.default)(types); if (flattened.length === 1) { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/clone/cloneNode.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/clone/cloneNode.js index 44032e3f9d896f..01b08e6a5cb079 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/clone/cloneNode.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/clone/cloneNode.js @@ -7,6 +7,8 @@ exports.default = cloneNode; var _definitions = require("../definitions"); +var _generated = require("../validators/generated"); + const has = Function.call.bind(Object.prototype.hasOwnProperty); function cloneIfNode(obj, deep, withoutLoc) { @@ -31,10 +33,10 @@ function cloneNode(node, deep = true, withoutLoc = false) { type } = node; const newNode = { - type + type: node.type }; - if (type === "Identifier") { + if ((0, _generated.isIdentifier)(node)) { newNode.name = node.name; if (has(node, "optional") && typeof node.optional === "boolean") { @@ -50,7 +52,7 @@ function cloneNode(node, deep = true, withoutLoc = false) { for (const field of Object.keys(_definitions.NODE_FIELDS[type])) { if (has(node, field)) { if (deep) { - newNode[field] = type === "File" && field === "comments" ? maybeCloneComments(node.comments, deep, withoutLoc) : cloneIfNodeOrArray(node[field], true, withoutLoc); + newNode[field] = (0, _generated.isFile)(node) && field === "comments" ? maybeCloneComments(node.comments, deep, withoutLoc) : cloneIfNodeOrArray(node[field], true, withoutLoc); } else { newNode[field] = node[field]; } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/Scope.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/Scope.js new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toExpression.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toExpression.js index 6e58b0de4d6e60..2d944f0e98d825 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toExpression.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toExpression.js @@ -3,10 +3,13 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = toExpression; +exports.default = void 0; var _generated = require("../validators/generated"); +var _default = toExpression; +exports.default = _default; + function toExpression(node) { if ((0, _generated.isExpressionStatement)(node)) { node = node.expression; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toStatement.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toStatement.js index 69b22ae09cc6ce..da020a61a017c8 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toStatement.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/toStatement.js @@ -3,12 +3,15 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = toStatement; +exports.default = void 0; var _generated = require("../validators/generated"); var _generated2 = require("../builders/generated"); +var _default = toStatement; +exports.default = _default; + function toStatement(node, ignore) { if ((0, _generated.isStatement)(node)) { return node; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/valueToNode.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/valueToNode.js index d1118e164b0aa4..95c3061ba5c3fd 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/valueToNode.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/converters/valueToNode.js @@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = valueToNode; +exports.default = void 0; var _isPlainObject = _interopRequireDefault(require("lodash/isPlainObject")); @@ -15,6 +15,9 @@ var _generated = require("../builders/generated"); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _default = valueToNode; +exports.default = _default; + function valueToNode(value) { if (value === undefined) { return (0, _generated.identifier)("undefined"); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/core.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/core.js index d6cb97eac285fb..daa22190b34461 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/core.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/core.js @@ -409,7 +409,7 @@ exports.patternLikeCommon = patternLikeCommon; })) return; } - if (((0, _helperValidatorIdentifier.isKeyword)(node.name) || (0, _helperValidatorIdentifier.isReservedWord)(node.name)) && node.name !== "this") { + if (((0, _helperValidatorIdentifier.isKeyword)(node.name) || (0, _helperValidatorIdentifier.isReservedWord)(node.name, false)) && node.name !== "this") { throw new TypeError(`"${node.name}" is not a valid identifier`); } } @@ -1052,6 +1052,7 @@ exports.patternLikeCommon = patternLikeCommon; source: { validate: (0, _utils.assertNodeType)("StringLiteral") }, + exportKind: (0, _utils.validateOptional)((0, _utils.assertOneOf)("type", "value")), assertions: { optional: true, validate: (0, _utils.chain)((0, _utils.assertValueType)("array"), (0, _utils.assertNodeType)("ImportAttribute")) @@ -1471,7 +1472,7 @@ exports.classMethodOrDeclareMethodCommon = classMethodOrDeclareMethodCommon; validate: (0, _utils.assertNodeType)("Expression") }, arguments: { - validate: (0, _utils.chain)((0, _utils.assertValueType)("array"), (0, _utils.assertEach)((0, _utils.assertNodeType)("Expression", "SpreadElement", "JSXNamespacedName"))) + validate: (0, _utils.chain)((0, _utils.assertValueType)("array"), (0, _utils.assertEach)((0, _utils.assertNodeType)("Expression", "SpreadElement", "JSXNamespacedName", "ArgumentPlaceholder"))) }, optional: { validate: !process.env.BABEL_TYPES_8_BREAKING ? (0, _utils.assertValueType)("boolean") : (0, _utils.chain)((0, _utils.assertValueType)("boolean"), (0, _utils.assertOptionalChainStart)()) diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/experimental.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/experimental.js index 1a67e2446ef4e3..5ed7ba92c9364b 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/experimental.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/experimental.js @@ -96,6 +96,10 @@ function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; validate: (0, _utils.assertNodeType)("Expression"), optional: true }, + typeAnnotation: { + validate: (0, _utils.assertNodeType)("TypeAnnotation", "TSTypeAnnotation", "Noop"), + optional: true + }, decorators: { validate: (0, _utils.chain)((0, _utils.assertValueType)("array"), (0, _utils.assertEach)((0, _utils.assertNodeType)("Decorator"))), optional: true diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/flow.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/flow.js index a6d74d7aa4c9a9..15341a7b01228a 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/flow.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/flow.js @@ -266,7 +266,8 @@ defineInterfaceishType("InterfaceDeclaration"); static: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), proto: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), optional: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), - variance: (0, _utils.validateOptionalType)("Variance") + variance: (0, _utils.validateOptionalType)("Variance"), + method: (0, _utils.validate)((0, _utils.assertValueType)("boolean")) } }); (0, _utils.default)("ObjectTypeSpreadProperty", { @@ -401,7 +402,7 @@ defineInterfaceishType("InterfaceDeclaration"); aliases: ["EnumBody"], visitor: ["members"], fields: { - explicit: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), + explicitType: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), members: (0, _utils.validateArrayOfType)("EnumBooleanMember") } }); @@ -409,7 +410,7 @@ defineInterfaceishType("InterfaceDeclaration"); aliases: ["EnumBody"], visitor: ["members"], fields: { - explicit: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), + explicitType: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), members: (0, _utils.validateArrayOfType)("EnumNumberMember") } }); @@ -417,7 +418,7 @@ defineInterfaceishType("InterfaceDeclaration"); aliases: ["EnumBody"], visitor: ["members"], fields: { - explicit: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), + explicitType: (0, _utils.validate)((0, _utils.assertValueType)("boolean")), members: (0, _utils.validateArrayOfType)(["EnumStringMember", "EnumDefaultedMember"]) } }); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/utils.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/utils.js index 447142642b7d5b..9059ca7f0611b9 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/utils.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/definitions/utils.js @@ -224,11 +224,11 @@ function assertOptionalChainStart() { } function chain(...fns) { - function validate(...args) { + const validate = function (...args) { for (const fn of fns) { fn(...args); } - } + }; validate.chainOf = fns; return validate; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js index 2801bc6882885a..5650d39806a3fa 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js @@ -442,6 +442,20 @@ Object.keys(_generated2).forEach(function (key) { }); }); +var _uppercase = require("./builders/generated/uppercase"); + +Object.keys(_uppercase).forEach(function (key) { + if (key === "default" || key === "__esModule") return; + if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; + if (key in exports && exports[key] === _uppercase[key]) return; + Object.defineProperty(exports, key, { + enumerable: true, + get: function () { + return _uppercase[key]; + } + }); +}); + var _cloneNode = _interopRequireDefault(require("./clone/cloneNode")); var _clone = _interopRequireDefault(require("./clone/clone")); @@ -544,7 +558,19 @@ var _getBindingIdentifiers = _interopRequireDefault(require("./retrievers/getBin var _getOuterBindingIdentifiers = _interopRequireDefault(require("./retrievers/getOuterBindingIdentifiers")); -var _traverse = _interopRequireDefault(require("./traverse/traverse")); +var _traverse = _interopRequireWildcard(require("./traverse/traverse")); + +Object.keys(_traverse).forEach(function (key) { + if (key === "default" || key === "__esModule") return; + if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; + if (key in exports && exports[key] === _traverse[key]) return; + Object.defineProperty(exports, key, { + enumerable: true, + get: function () { + return _traverse[key]; + } + }); +}); var _traverseFast = _interopRequireDefault(require("./traverse/traverseFast")); @@ -600,6 +626,24 @@ Object.keys(_generated4).forEach(function (key) { }); }); +var _generated5 = require("./ast-types/generated"); + +Object.keys(_generated5).forEach(function (key) { + if (key === "default" || key === "__esModule") return; + if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; + if (key in exports && exports[key] === _generated5[key]) return; + Object.defineProperty(exports, key, { + enumerable: true, + get: function () { + return _generated5[key]; + } + }); +}); + +function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; } + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } + function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const react = { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js.flow b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js.flow index aaebaa6ef52efb..7377915351301e 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js.flow +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/index.js.flow @@ -430,6 +430,7 @@ declare class BabelNodeExportAllDeclaration extends BabelNode { type: "ExportAllDeclaration"; source: BabelNodeStringLiteral; assertions?: BabelNodeImportAttribute; + exportKind?: "type" | "value"; } declare class BabelNodeExportDefaultDeclaration extends BabelNode { @@ -579,7 +580,7 @@ declare class BabelNodeOptionalMemberExpression extends BabelNode { declare class BabelNodeOptionalCallExpression extends BabelNode { type: "OptionalCallExpression"; callee: BabelNodeExpression; - arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName>; + arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>; optional: boolean; typeArguments?: BabelNodeTypeParameterInstantiation; typeParameters?: BabelNodeTSTypeParameterInstantiation; @@ -796,6 +797,7 @@ declare class BabelNodeObjectTypeProperty extends BabelNode { value: BabelNodeFlowType; variance?: BabelNodeVariance; kind: "init" | "get" | "set"; + method: boolean; optional: boolean; proto: boolean; } @@ -904,19 +906,19 @@ declare class BabelNodeEnumDeclaration extends BabelNode { declare class BabelNodeEnumBooleanBody extends BabelNode { type: "EnumBooleanBody"; members: Array<BabelNodeEnumBooleanMember>; - explicit: boolean; + explicitType: boolean; } declare class BabelNodeEnumNumberBody extends BabelNode { type: "EnumNumberBody"; members: Array<BabelNodeEnumNumberMember>; - explicit: boolean; + explicitType: boolean; } declare class BabelNodeEnumStringBody extends BabelNode { type: "EnumStringBody"; members: Array<BabelNodeEnumStringMember | BabelNodeEnumDefaultedMember>; - explicit: boolean; + explicitType: boolean; } declare class BabelNodeEnumSymbolBody extends BabelNode { @@ -1089,6 +1091,7 @@ declare class BabelNodeClassPrivateProperty extends BabelNode { key: BabelNodePrivateName; value?: BabelNodeExpression; decorators?: Array<BabelNodeDecorator>; + typeAnnotation?: BabelNodeTypeAnnotation | BabelNodeTSTypeAnnotation | BabelNodeNoop; } declare class BabelNodeClassPrivateMethod extends BabelNode { @@ -1579,867 +1582,871 @@ type BabelNodeTSType = BabelNodeTSAnyKeyword | BabelNodeTSBooleanKeyword | Babel type BabelNodeTSBaseType = BabelNodeTSAnyKeyword | BabelNodeTSBooleanKeyword | BabelNodeTSBigIntKeyword | BabelNodeTSIntrinsicKeyword | BabelNodeTSNeverKeyword | BabelNodeTSNullKeyword | BabelNodeTSNumberKeyword | BabelNodeTSObjectKeyword | BabelNodeTSStringKeyword | BabelNodeTSSymbolKeyword | BabelNodeTSUndefinedKeyword | BabelNodeTSUnknownKeyword | BabelNodeTSVoidKeyword | BabelNodeTSThisType | BabelNodeTSLiteralType; declare module "@babel/types" { - declare function arrayExpression(elements?: Array<null | BabelNodeExpression | BabelNodeSpreadElement>): BabelNodeArrayExpression; - declare function assignmentExpression(operator: string, left: BabelNodeLVal, right: BabelNodeExpression): BabelNodeAssignmentExpression; - declare function binaryExpression(operator: "+" | "-" | "/" | "%" | "*" | "**" | "&" | "|" | ">>" | ">>>" | "<<" | "^" | "==" | "===" | "!=" | "!==" | "in" | "instanceof" | ">" | "<" | ">=" | "<=", left: BabelNodeExpression | BabelNodePrivateName, right: BabelNodeExpression): BabelNodeBinaryExpression; - declare function interpreterDirective(value: string): BabelNodeInterpreterDirective; - declare function directive(value: BabelNodeDirectiveLiteral): BabelNodeDirective; - declare function directiveLiteral(value: string): BabelNodeDirectiveLiteral; - declare function blockStatement(body: Array<BabelNodeStatement>, directives?: Array<BabelNodeDirective>): BabelNodeBlockStatement; - declare function breakStatement(label?: BabelNodeIdentifier): BabelNodeBreakStatement; - declare function callExpression(callee: BabelNodeExpression | BabelNodeV8IntrinsicIdentifier, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>): BabelNodeCallExpression; - declare function catchClause(param?: BabelNodeIdentifier | BabelNodeArrayPattern | BabelNodeObjectPattern, body: BabelNodeBlockStatement): BabelNodeCatchClause; - declare function conditionalExpression(test: BabelNodeExpression, consequent: BabelNodeExpression, alternate: BabelNodeExpression): BabelNodeConditionalExpression; - declare function continueStatement(label?: BabelNodeIdentifier): BabelNodeContinueStatement; - declare function debuggerStatement(): BabelNodeDebuggerStatement; - declare function doWhileStatement(test: BabelNodeExpression, body: BabelNodeStatement): BabelNodeDoWhileStatement; - declare function emptyStatement(): BabelNodeEmptyStatement; - declare function expressionStatement(expression: BabelNodeExpression): BabelNodeExpressionStatement; - declare function file(program: BabelNodeProgram, comments?: Array<BabelNodeCommentBlock | BabelNodeCommentLine>, tokens?: Array<any>): BabelNodeFile; - declare function forInStatement(left: BabelNodeVariableDeclaration | BabelNodeLVal, right: BabelNodeExpression, body: BabelNodeStatement): BabelNodeForInStatement; - declare function forStatement(init?: BabelNodeVariableDeclaration | BabelNodeExpression, test?: BabelNodeExpression, update?: BabelNodeExpression, body: BabelNodeStatement): BabelNodeForStatement; - declare function functionDeclaration(id?: BabelNodeIdentifier, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, generator?: boolean, async?: boolean): BabelNodeFunctionDeclaration; - declare function functionExpression(id?: BabelNodeIdentifier, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, generator?: boolean, async?: boolean): BabelNodeFunctionExpression; - declare function identifier(name: string): BabelNodeIdentifier; - declare function ifStatement(test: BabelNodeExpression, consequent: BabelNodeStatement, alternate?: BabelNodeStatement): BabelNodeIfStatement; - declare function labeledStatement(label: BabelNodeIdentifier, body: BabelNodeStatement): BabelNodeLabeledStatement; - declare function stringLiteral(value: string): BabelNodeStringLiteral; - declare function numericLiteral(value: number): BabelNodeNumericLiteral; - declare function nullLiteral(): BabelNodeNullLiteral; - declare function booleanLiteral(value: boolean): BabelNodeBooleanLiteral; - declare function regExpLiteral(pattern: string, flags?: string): BabelNodeRegExpLiteral; - declare function logicalExpression(operator: "||" | "&&" | "??", left: BabelNodeExpression, right: BabelNodeExpression): BabelNodeLogicalExpression; - declare function memberExpression(object: BabelNodeExpression, property: BabelNodeExpression | BabelNodeIdentifier | BabelNodePrivateName, computed?: boolean, optional?: true | false): BabelNodeMemberExpression; - declare function newExpression(callee: BabelNodeExpression | BabelNodeV8IntrinsicIdentifier, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>): BabelNodeNewExpression; - declare function program(body: Array<BabelNodeStatement>, directives?: Array<BabelNodeDirective>, sourceType?: "script" | "module", interpreter?: BabelNodeInterpreterDirective): BabelNodeProgram; - declare function objectExpression(properties: Array<BabelNodeObjectMethod | BabelNodeObjectProperty | BabelNodeSpreadElement>): BabelNodeObjectExpression; - declare function objectMethod(kind?: "method" | "get" | "set", key: BabelNodeExpression | BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, computed?: boolean, generator?: boolean, async?: boolean): BabelNodeObjectMethod; - declare function objectProperty(key: BabelNodeExpression | BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral, value: BabelNodeExpression | BabelNodePatternLike, computed?: boolean, shorthand?: boolean, decorators?: Array<BabelNodeDecorator>): BabelNodeObjectProperty; - declare function restElement(argument: BabelNodeLVal): BabelNodeRestElement; - declare function returnStatement(argument?: BabelNodeExpression): BabelNodeReturnStatement; - declare function sequenceExpression(expressions: Array<BabelNodeExpression>): BabelNodeSequenceExpression; - declare function parenthesizedExpression(expression: BabelNodeExpression): BabelNodeParenthesizedExpression; - declare function switchCase(test?: BabelNodeExpression, consequent: Array<BabelNodeStatement>): BabelNodeSwitchCase; - declare function switchStatement(discriminant: BabelNodeExpression, cases: Array<BabelNodeSwitchCase>): BabelNodeSwitchStatement; - declare function thisExpression(): BabelNodeThisExpression; - declare function throwStatement(argument: BabelNodeExpression): BabelNodeThrowStatement; - declare function tryStatement(block: BabelNodeBlockStatement, handler?: BabelNodeCatchClause, finalizer?: BabelNodeBlockStatement): BabelNodeTryStatement; - declare function unaryExpression(operator: "void" | "throw" | "delete" | "!" | "+" | "-" | "~" | "typeof", argument: BabelNodeExpression, prefix?: boolean): BabelNodeUnaryExpression; - declare function updateExpression(operator: "++" | "--", argument: BabelNodeExpression, prefix?: boolean): BabelNodeUpdateExpression; - declare function variableDeclaration(kind: "var" | "let" | "const", declarations: Array<BabelNodeVariableDeclarator>): BabelNodeVariableDeclaration; - declare function variableDeclarator(id: BabelNodeLVal, init?: BabelNodeExpression): BabelNodeVariableDeclarator; - declare function whileStatement(test: BabelNodeExpression, body: BabelNodeStatement): BabelNodeWhileStatement; - declare function withStatement(object: BabelNodeExpression, body: BabelNodeStatement): BabelNodeWithStatement; - declare function assignmentPattern(left: BabelNodeIdentifier | BabelNodeObjectPattern | BabelNodeArrayPattern | BabelNodeMemberExpression, right: BabelNodeExpression): BabelNodeAssignmentPattern; - declare function arrayPattern(elements: Array<null | BabelNodePatternLike>): BabelNodeArrayPattern; - declare function arrowFunctionExpression(params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement | BabelNodeExpression, async?: boolean): BabelNodeArrowFunctionExpression; - declare function classBody(body: Array<BabelNodeClassMethod | BabelNodeClassPrivateMethod | BabelNodeClassProperty | BabelNodeClassPrivateProperty | BabelNodeTSDeclareMethod | BabelNodeTSIndexSignature>): BabelNodeClassBody; - declare function classExpression(id?: BabelNodeIdentifier, superClass?: BabelNodeExpression, body: BabelNodeClassBody, decorators?: Array<BabelNodeDecorator>): BabelNodeClassExpression; - declare function classDeclaration(id: BabelNodeIdentifier, superClass?: BabelNodeExpression, body: BabelNodeClassBody, decorators?: Array<BabelNodeDecorator>): BabelNodeClassDeclaration; - declare function exportAllDeclaration(source: BabelNodeStringLiteral): BabelNodeExportAllDeclaration; - declare function exportDefaultDeclaration(declaration: BabelNodeFunctionDeclaration | BabelNodeTSDeclareFunction | BabelNodeClassDeclaration | BabelNodeExpression): BabelNodeExportDefaultDeclaration; - declare function exportNamedDeclaration(declaration?: BabelNodeDeclaration, specifiers?: Array<BabelNodeExportSpecifier | BabelNodeExportDefaultSpecifier | BabelNodeExportNamespaceSpecifier>, source?: BabelNodeStringLiteral): BabelNodeExportNamedDeclaration; - declare function exportSpecifier(local: BabelNodeIdentifier, exported: BabelNodeIdentifier | BabelNodeStringLiteral): BabelNodeExportSpecifier; - declare function forOfStatement(left: BabelNodeVariableDeclaration | BabelNodeLVal, right: BabelNodeExpression, body: BabelNodeStatement, _await?: boolean): BabelNodeForOfStatement; - declare function importDeclaration(specifiers: Array<BabelNodeImportSpecifier | BabelNodeImportDefaultSpecifier | BabelNodeImportNamespaceSpecifier>, source: BabelNodeStringLiteral): BabelNodeImportDeclaration; - declare function importDefaultSpecifier(local: BabelNodeIdentifier): BabelNodeImportDefaultSpecifier; - declare function importNamespaceSpecifier(local: BabelNodeIdentifier): BabelNodeImportNamespaceSpecifier; - declare function importSpecifier(local: BabelNodeIdentifier, imported: BabelNodeIdentifier | BabelNodeStringLiteral): BabelNodeImportSpecifier; - declare function metaProperty(meta: BabelNodeIdentifier, property: BabelNodeIdentifier): BabelNodeMetaProperty; - declare function classMethod(kind?: "get" | "set" | "method" | "constructor", key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, computed?: boolean, _static?: boolean, generator?: boolean, async?: boolean): BabelNodeClassMethod; - declare function objectPattern(properties: Array<BabelNodeRestElement | BabelNodeObjectProperty>): BabelNodeObjectPattern; - declare function spreadElement(argument: BabelNodeExpression): BabelNodeSpreadElement; + declare export function arrayExpression(elements?: Array<null | BabelNodeExpression | BabelNodeSpreadElement>): BabelNodeArrayExpression; + declare export function assignmentExpression(operator: string, left: BabelNodeLVal, right: BabelNodeExpression): BabelNodeAssignmentExpression; + declare export function binaryExpression(operator: "+" | "-" | "/" | "%" | "*" | "**" | "&" | "|" | ">>" | ">>>" | "<<" | "^" | "==" | "===" | "!=" | "!==" | "in" | "instanceof" | ">" | "<" | ">=" | "<=", left: BabelNodeExpression | BabelNodePrivateName, right: BabelNodeExpression): BabelNodeBinaryExpression; + declare export function interpreterDirective(value: string): BabelNodeInterpreterDirective; + declare export function directive(value: BabelNodeDirectiveLiteral): BabelNodeDirective; + declare export function directiveLiteral(value: string): BabelNodeDirectiveLiteral; + declare export function blockStatement(body: Array<BabelNodeStatement>, directives?: Array<BabelNodeDirective>): BabelNodeBlockStatement; + declare export function breakStatement(label?: BabelNodeIdentifier): BabelNodeBreakStatement; + declare export function callExpression(callee: BabelNodeExpression | BabelNodeV8IntrinsicIdentifier, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>): BabelNodeCallExpression; + declare export function catchClause(param?: BabelNodeIdentifier | BabelNodeArrayPattern | BabelNodeObjectPattern, body: BabelNodeBlockStatement): BabelNodeCatchClause; + declare export function conditionalExpression(test: BabelNodeExpression, consequent: BabelNodeExpression, alternate: BabelNodeExpression): BabelNodeConditionalExpression; + declare export function continueStatement(label?: BabelNodeIdentifier): BabelNodeContinueStatement; + declare export function debuggerStatement(): BabelNodeDebuggerStatement; + declare export function doWhileStatement(test: BabelNodeExpression, body: BabelNodeStatement): BabelNodeDoWhileStatement; + declare export function emptyStatement(): BabelNodeEmptyStatement; + declare export function expressionStatement(expression: BabelNodeExpression): BabelNodeExpressionStatement; + declare export function file(program: BabelNodeProgram, comments?: Array<BabelNodeCommentBlock | BabelNodeCommentLine>, tokens?: Array<any>): BabelNodeFile; + declare export function forInStatement(left: BabelNodeVariableDeclaration | BabelNodeLVal, right: BabelNodeExpression, body: BabelNodeStatement): BabelNodeForInStatement; + declare export function forStatement(init?: BabelNodeVariableDeclaration | BabelNodeExpression, test?: BabelNodeExpression, update?: BabelNodeExpression, body: BabelNodeStatement): BabelNodeForStatement; + declare export function functionDeclaration(id?: BabelNodeIdentifier, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, generator?: boolean, async?: boolean): BabelNodeFunctionDeclaration; + declare export function functionExpression(id?: BabelNodeIdentifier, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, generator?: boolean, async?: boolean): BabelNodeFunctionExpression; + declare export function identifier(name: string): BabelNodeIdentifier; + declare export function ifStatement(test: BabelNodeExpression, consequent: BabelNodeStatement, alternate?: BabelNodeStatement): BabelNodeIfStatement; + declare export function labeledStatement(label: BabelNodeIdentifier, body: BabelNodeStatement): BabelNodeLabeledStatement; + declare export function stringLiteral(value: string): BabelNodeStringLiteral; + declare export function numericLiteral(value: number): BabelNodeNumericLiteral; + declare export function nullLiteral(): BabelNodeNullLiteral; + declare export function booleanLiteral(value: boolean): BabelNodeBooleanLiteral; + declare export function regExpLiteral(pattern: string, flags?: string): BabelNodeRegExpLiteral; + declare export function logicalExpression(operator: "||" | "&&" | "??", left: BabelNodeExpression, right: BabelNodeExpression): BabelNodeLogicalExpression; + declare export function memberExpression(object: BabelNodeExpression, property: BabelNodeExpression | BabelNodeIdentifier | BabelNodePrivateName, computed?: boolean, optional?: true | false): BabelNodeMemberExpression; + declare export function newExpression(callee: BabelNodeExpression | BabelNodeV8IntrinsicIdentifier, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>): BabelNodeNewExpression; + declare export function program(body: Array<BabelNodeStatement>, directives?: Array<BabelNodeDirective>, sourceType?: "script" | "module", interpreter?: BabelNodeInterpreterDirective): BabelNodeProgram; + declare export function objectExpression(properties: Array<BabelNodeObjectMethod | BabelNodeObjectProperty | BabelNodeSpreadElement>): BabelNodeObjectExpression; + declare export function objectMethod(kind?: "method" | "get" | "set", key: BabelNodeExpression | BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, computed?: boolean, generator?: boolean, async?: boolean): BabelNodeObjectMethod; + declare export function objectProperty(key: BabelNodeExpression | BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral, value: BabelNodeExpression | BabelNodePatternLike, computed?: boolean, shorthand?: boolean, decorators?: Array<BabelNodeDecorator>): BabelNodeObjectProperty; + declare export function restElement(argument: BabelNodeLVal): BabelNodeRestElement; + declare export function returnStatement(argument?: BabelNodeExpression): BabelNodeReturnStatement; + declare export function sequenceExpression(expressions: Array<BabelNodeExpression>): BabelNodeSequenceExpression; + declare export function parenthesizedExpression(expression: BabelNodeExpression): BabelNodeParenthesizedExpression; + declare export function switchCase(test?: BabelNodeExpression, consequent: Array<BabelNodeStatement>): BabelNodeSwitchCase; + declare export function switchStatement(discriminant: BabelNodeExpression, cases: Array<BabelNodeSwitchCase>): BabelNodeSwitchStatement; + declare export function thisExpression(): BabelNodeThisExpression; + declare export function throwStatement(argument: BabelNodeExpression): BabelNodeThrowStatement; + declare export function tryStatement(block: BabelNodeBlockStatement, handler?: BabelNodeCatchClause, finalizer?: BabelNodeBlockStatement): BabelNodeTryStatement; + declare export function unaryExpression(operator: "void" | "throw" | "delete" | "!" | "+" | "-" | "~" | "typeof", argument: BabelNodeExpression, prefix?: boolean): BabelNodeUnaryExpression; + declare export function updateExpression(operator: "++" | "--", argument: BabelNodeExpression, prefix?: boolean): BabelNodeUpdateExpression; + declare export function variableDeclaration(kind: "var" | "let" | "const", declarations: Array<BabelNodeVariableDeclarator>): BabelNodeVariableDeclaration; + declare export function variableDeclarator(id: BabelNodeLVal, init?: BabelNodeExpression): BabelNodeVariableDeclarator; + declare export function whileStatement(test: BabelNodeExpression, body: BabelNodeStatement): BabelNodeWhileStatement; + declare export function withStatement(object: BabelNodeExpression, body: BabelNodeStatement): BabelNodeWithStatement; + declare export function assignmentPattern(left: BabelNodeIdentifier | BabelNodeObjectPattern | BabelNodeArrayPattern | BabelNodeMemberExpression, right: BabelNodeExpression): BabelNodeAssignmentPattern; + declare export function arrayPattern(elements: Array<null | BabelNodePatternLike>): BabelNodeArrayPattern; + declare export function arrowFunctionExpression(params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement | BabelNodeExpression, async?: boolean): BabelNodeArrowFunctionExpression; + declare export function classBody(body: Array<BabelNodeClassMethod | BabelNodeClassPrivateMethod | BabelNodeClassProperty | BabelNodeClassPrivateProperty | BabelNodeTSDeclareMethod | BabelNodeTSIndexSignature>): BabelNodeClassBody; + declare export function classExpression(id?: BabelNodeIdentifier, superClass?: BabelNodeExpression, body: BabelNodeClassBody, decorators?: Array<BabelNodeDecorator>): BabelNodeClassExpression; + declare export function classDeclaration(id: BabelNodeIdentifier, superClass?: BabelNodeExpression, body: BabelNodeClassBody, decorators?: Array<BabelNodeDecorator>): BabelNodeClassDeclaration; + declare export function exportAllDeclaration(source: BabelNodeStringLiteral): BabelNodeExportAllDeclaration; + declare export function exportDefaultDeclaration(declaration: BabelNodeFunctionDeclaration | BabelNodeTSDeclareFunction | BabelNodeClassDeclaration | BabelNodeExpression): BabelNodeExportDefaultDeclaration; + declare export function exportNamedDeclaration(declaration?: BabelNodeDeclaration, specifiers?: Array<BabelNodeExportSpecifier | BabelNodeExportDefaultSpecifier | BabelNodeExportNamespaceSpecifier>, source?: BabelNodeStringLiteral): BabelNodeExportNamedDeclaration; + declare export function exportSpecifier(local: BabelNodeIdentifier, exported: BabelNodeIdentifier | BabelNodeStringLiteral): BabelNodeExportSpecifier; + declare export function forOfStatement(left: BabelNodeVariableDeclaration | BabelNodeLVal, right: BabelNodeExpression, body: BabelNodeStatement, _await?: boolean): BabelNodeForOfStatement; + declare export function importDeclaration(specifiers: Array<BabelNodeImportSpecifier | BabelNodeImportDefaultSpecifier | BabelNodeImportNamespaceSpecifier>, source: BabelNodeStringLiteral): BabelNodeImportDeclaration; + declare export function importDefaultSpecifier(local: BabelNodeIdentifier): BabelNodeImportDefaultSpecifier; + declare export function importNamespaceSpecifier(local: BabelNodeIdentifier): BabelNodeImportNamespaceSpecifier; + declare export function importSpecifier(local: BabelNodeIdentifier, imported: BabelNodeIdentifier | BabelNodeStringLiteral): BabelNodeImportSpecifier; + declare export function metaProperty(meta: BabelNodeIdentifier, property: BabelNodeIdentifier): BabelNodeMetaProperty; + declare export function classMethod(kind?: "get" | "set" | "method" | "constructor", key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, computed?: boolean, _static?: boolean, generator?: boolean, async?: boolean): BabelNodeClassMethod; + declare export function objectPattern(properties: Array<BabelNodeRestElement | BabelNodeObjectProperty>): BabelNodeObjectPattern; + declare export function spreadElement(argument: BabelNodeExpression): BabelNodeSpreadElement; declare function _super(): BabelNodeSuper; declare export { _super as super } - declare function taggedTemplateExpression(tag: BabelNodeExpression, quasi: BabelNodeTemplateLiteral): BabelNodeTaggedTemplateExpression; - declare function templateElement(value: { raw: string, cooked?: string }, tail?: boolean): BabelNodeTemplateElement; - declare function templateLiteral(quasis: Array<BabelNodeTemplateElement>, expressions: Array<BabelNodeExpression | BabelNodeTSType>): BabelNodeTemplateLiteral; - declare function yieldExpression(argument?: BabelNodeExpression, delegate?: boolean): BabelNodeYieldExpression; - declare function awaitExpression(argument: BabelNodeExpression): BabelNodeAwaitExpression; + declare export function taggedTemplateExpression(tag: BabelNodeExpression, quasi: BabelNodeTemplateLiteral): BabelNodeTaggedTemplateExpression; + declare export function templateElement(value: { raw: string, cooked?: string }, tail?: boolean): BabelNodeTemplateElement; + declare export function templateLiteral(quasis: Array<BabelNodeTemplateElement>, expressions: Array<BabelNodeExpression | BabelNodeTSType>): BabelNodeTemplateLiteral; + declare export function yieldExpression(argument?: BabelNodeExpression, delegate?: boolean): BabelNodeYieldExpression; + declare export function awaitExpression(argument: BabelNodeExpression): BabelNodeAwaitExpression; declare function _import(): BabelNodeImport; declare export { _import as import } - declare function bigIntLiteral(value: string): BabelNodeBigIntLiteral; - declare function exportNamespaceSpecifier(exported: BabelNodeIdentifier): BabelNodeExportNamespaceSpecifier; - declare function optionalMemberExpression(object: BabelNodeExpression, property: BabelNodeExpression | BabelNodeIdentifier, computed?: boolean, optional: boolean): BabelNodeOptionalMemberExpression; - declare function optionalCallExpression(callee: BabelNodeExpression, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName>, optional: boolean): BabelNodeOptionalCallExpression; - declare function anyTypeAnnotation(): BabelNodeAnyTypeAnnotation; - declare function arrayTypeAnnotation(elementType: BabelNodeFlowType): BabelNodeArrayTypeAnnotation; - declare function booleanTypeAnnotation(): BabelNodeBooleanTypeAnnotation; - declare function booleanLiteralTypeAnnotation(value: boolean): BabelNodeBooleanLiteralTypeAnnotation; - declare function nullLiteralTypeAnnotation(): BabelNodeNullLiteralTypeAnnotation; - declare function classImplements(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeClassImplements; - declare function declareClass(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeDeclareClass; - declare function declareFunction(id: BabelNodeIdentifier): BabelNodeDeclareFunction; - declare function declareInterface(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeDeclareInterface; - declare function declareModule(id: BabelNodeIdentifier | BabelNodeStringLiteral, body: BabelNodeBlockStatement, kind?: "CommonJS" | "ES"): BabelNodeDeclareModule; - declare function declareModuleExports(typeAnnotation: BabelNodeTypeAnnotation): BabelNodeDeclareModuleExports; - declare function declareTypeAlias(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, right: BabelNodeFlowType): BabelNodeDeclareTypeAlias; - declare function declareOpaqueType(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, supertype?: BabelNodeFlowType): BabelNodeDeclareOpaqueType; - declare function declareVariable(id: BabelNodeIdentifier): BabelNodeDeclareVariable; - declare function declareExportDeclaration(declaration?: BabelNodeFlow, specifiers?: Array<BabelNodeExportSpecifier | BabelNodeExportNamespaceSpecifier>, source?: BabelNodeStringLiteral): BabelNodeDeclareExportDeclaration; - declare function declareExportAllDeclaration(source: BabelNodeStringLiteral): BabelNodeDeclareExportAllDeclaration; - declare function declaredPredicate(value: BabelNodeFlow): BabelNodeDeclaredPredicate; - declare function existsTypeAnnotation(): BabelNodeExistsTypeAnnotation; - declare function functionTypeAnnotation(typeParameters?: BabelNodeTypeParameterDeclaration, params: Array<BabelNodeFunctionTypeParam>, rest?: BabelNodeFunctionTypeParam, returnType: BabelNodeFlowType): BabelNodeFunctionTypeAnnotation; - declare function functionTypeParam(name?: BabelNodeIdentifier, typeAnnotation: BabelNodeFlowType): BabelNodeFunctionTypeParam; - declare function genericTypeAnnotation(id: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeGenericTypeAnnotation; - declare function inferredPredicate(): BabelNodeInferredPredicate; - declare function interfaceExtends(id: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeInterfaceExtends; - declare function interfaceDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeInterfaceDeclaration; - declare function interfaceTypeAnnotation(_extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeInterfaceTypeAnnotation; - declare function intersectionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeIntersectionTypeAnnotation; - declare function mixedTypeAnnotation(): BabelNodeMixedTypeAnnotation; - declare function emptyTypeAnnotation(): BabelNodeEmptyTypeAnnotation; - declare function nullableTypeAnnotation(typeAnnotation: BabelNodeFlowType): BabelNodeNullableTypeAnnotation; - declare function numberLiteralTypeAnnotation(value: number): BabelNodeNumberLiteralTypeAnnotation; - declare function numberTypeAnnotation(): BabelNodeNumberTypeAnnotation; - declare function objectTypeAnnotation(properties: Array<BabelNodeObjectTypeProperty | BabelNodeObjectTypeSpreadProperty>, indexers?: Array<BabelNodeObjectTypeIndexer>, callProperties?: Array<BabelNodeObjectTypeCallProperty>, internalSlots?: Array<BabelNodeObjectTypeInternalSlot>, exact?: boolean): BabelNodeObjectTypeAnnotation; - declare function objectTypeInternalSlot(id: BabelNodeIdentifier, value: BabelNodeFlowType, optional: boolean, _static: boolean, method: boolean): BabelNodeObjectTypeInternalSlot; - declare function objectTypeCallProperty(value: BabelNodeFlowType): BabelNodeObjectTypeCallProperty; - declare function objectTypeIndexer(id?: BabelNodeIdentifier, key: BabelNodeFlowType, value: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeObjectTypeIndexer; - declare function objectTypeProperty(key: BabelNodeIdentifier | BabelNodeStringLiteral, value: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeObjectTypeProperty; - declare function objectTypeSpreadProperty(argument: BabelNodeFlowType): BabelNodeObjectTypeSpreadProperty; - declare function opaqueType(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, supertype?: BabelNodeFlowType, impltype: BabelNodeFlowType): BabelNodeOpaqueType; - declare function qualifiedTypeIdentifier(id: BabelNodeIdentifier, qualification: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier): BabelNodeQualifiedTypeIdentifier; - declare function stringLiteralTypeAnnotation(value: string): BabelNodeStringLiteralTypeAnnotation; - declare function stringTypeAnnotation(): BabelNodeStringTypeAnnotation; - declare function symbolTypeAnnotation(): BabelNodeSymbolTypeAnnotation; - declare function thisTypeAnnotation(): BabelNodeThisTypeAnnotation; - declare function tupleTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeTupleTypeAnnotation; - declare function typeofTypeAnnotation(argument: BabelNodeFlowType): BabelNodeTypeofTypeAnnotation; - declare function typeAlias(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, right: BabelNodeFlowType): BabelNodeTypeAlias; - declare function typeAnnotation(typeAnnotation: BabelNodeFlowType): BabelNodeTypeAnnotation; - declare function typeCastExpression(expression: BabelNodeExpression, typeAnnotation: BabelNodeTypeAnnotation): BabelNodeTypeCastExpression; - declare function typeParameter(bound?: BabelNodeTypeAnnotation, _default?: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeTypeParameter; - declare function typeParameterDeclaration(params: Array<BabelNodeTypeParameter>): BabelNodeTypeParameterDeclaration; - declare function typeParameterInstantiation(params: Array<BabelNodeFlowType>): BabelNodeTypeParameterInstantiation; - declare function unionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation; - declare function variance(kind: "minus" | "plus"): BabelNodeVariance; - declare function voidTypeAnnotation(): BabelNodeVoidTypeAnnotation; - declare function enumDeclaration(id: BabelNodeIdentifier, body: BabelNodeEnumBooleanBody | BabelNodeEnumNumberBody | BabelNodeEnumStringBody | BabelNodeEnumSymbolBody): BabelNodeEnumDeclaration; - declare function enumBooleanBody(members: Array<BabelNodeEnumBooleanMember>): BabelNodeEnumBooleanBody; - declare function enumNumberBody(members: Array<BabelNodeEnumNumberMember>): BabelNodeEnumNumberBody; - declare function enumStringBody(members: Array<BabelNodeEnumStringMember | BabelNodeEnumDefaultedMember>): BabelNodeEnumStringBody; - declare function enumSymbolBody(members: Array<BabelNodeEnumDefaultedMember>): BabelNodeEnumSymbolBody; - declare function enumBooleanMember(id: BabelNodeIdentifier): BabelNodeEnumBooleanMember; - declare function enumNumberMember(id: BabelNodeIdentifier, init: BabelNodeNumericLiteral): BabelNodeEnumNumberMember; - declare function enumStringMember(id: BabelNodeIdentifier, init: BabelNodeStringLiteral): BabelNodeEnumStringMember; - declare function enumDefaultedMember(id: BabelNodeIdentifier): BabelNodeEnumDefaultedMember; - declare function jsxAttribute(name: BabelNodeJSXIdentifier | BabelNodeJSXNamespacedName, value?: BabelNodeJSXElement | BabelNodeJSXFragment | BabelNodeStringLiteral | BabelNodeJSXExpressionContainer): BabelNodeJSXAttribute; - declare function jsxClosingElement(name: BabelNodeJSXIdentifier | BabelNodeJSXMemberExpression | BabelNodeJSXNamespacedName): BabelNodeJSXClosingElement; - declare function jsxElement(openingElement: BabelNodeJSXOpeningElement, closingElement?: BabelNodeJSXClosingElement, children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment>, selfClosing?: boolean): BabelNodeJSXElement; - declare function jsxEmptyExpression(): BabelNodeJSXEmptyExpression; - declare function jsxExpressionContainer(expression: BabelNodeExpression | BabelNodeJSXEmptyExpression): BabelNodeJSXExpressionContainer; - declare function jsxSpreadChild(expression: BabelNodeExpression): BabelNodeJSXSpreadChild; - declare function jsxIdentifier(name: string): BabelNodeJSXIdentifier; - declare function jsxMemberExpression(object: BabelNodeJSXMemberExpression | BabelNodeJSXIdentifier, property: BabelNodeJSXIdentifier): BabelNodeJSXMemberExpression; - declare function jsxNamespacedName(namespace: BabelNodeJSXIdentifier, name: BabelNodeJSXIdentifier): BabelNodeJSXNamespacedName; - declare function jsxOpeningElement(name: BabelNodeJSXIdentifier | BabelNodeJSXMemberExpression | BabelNodeJSXNamespacedName, attributes: Array<BabelNodeJSXAttribute | BabelNodeJSXSpreadAttribute>, selfClosing?: boolean): BabelNodeJSXOpeningElement; - declare function jsxSpreadAttribute(argument: BabelNodeExpression): BabelNodeJSXSpreadAttribute; - declare function jsxText(value: string): BabelNodeJSXText; - declare function jsxFragment(openingFragment: BabelNodeJSXOpeningFragment, closingFragment: BabelNodeJSXClosingFragment, children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment>): BabelNodeJSXFragment; - declare function jsxOpeningFragment(): BabelNodeJSXOpeningFragment; - declare function jsxClosingFragment(): BabelNodeJSXClosingFragment; - declare function noop(): BabelNodeNoop; - declare function placeholder(expectedNode: "Identifier" | "StringLiteral" | "Expression" | "Statement" | "Declaration" | "BlockStatement" | "ClassBody" | "Pattern", name: BabelNodeIdentifier): BabelNodePlaceholder; - declare function v8IntrinsicIdentifier(name: string): BabelNodeV8IntrinsicIdentifier; - declare function argumentPlaceholder(): BabelNodeArgumentPlaceholder; - declare function bindExpression(object: BabelNodeExpression, callee: BabelNodeExpression): BabelNodeBindExpression; - declare function classProperty(key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, value?: BabelNodeExpression, typeAnnotation?: BabelNodeTypeAnnotation | BabelNodeTSTypeAnnotation | BabelNodeNoop, decorators?: Array<BabelNodeDecorator>, computed?: boolean, _static?: boolean): BabelNodeClassProperty; - declare function pipelineTopicExpression(expression: BabelNodeExpression): BabelNodePipelineTopicExpression; - declare function pipelineBareFunction(callee: BabelNodeExpression): BabelNodePipelineBareFunction; - declare function pipelinePrimaryTopicReference(): BabelNodePipelinePrimaryTopicReference; - declare function classPrivateProperty(key: BabelNodePrivateName, value?: BabelNodeExpression, decorators?: Array<BabelNodeDecorator>, _static: any): BabelNodeClassPrivateProperty; - declare function classPrivateMethod(kind?: "get" | "set" | "method" | "constructor", key: BabelNodePrivateName, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, _static?: boolean): BabelNodeClassPrivateMethod; - declare function importAttribute(key: BabelNodeIdentifier | BabelNodeStringLiteral, value: BabelNodeStringLiteral): BabelNodeImportAttribute; - declare function decorator(expression: BabelNodeExpression): BabelNodeDecorator; - declare function doExpression(body: BabelNodeBlockStatement): BabelNodeDoExpression; - declare function exportDefaultSpecifier(exported: BabelNodeIdentifier): BabelNodeExportDefaultSpecifier; - declare function privateName(id: BabelNodeIdentifier): BabelNodePrivateName; - declare function recordExpression(properties: Array<BabelNodeObjectProperty | BabelNodeSpreadElement>): BabelNodeRecordExpression; - declare function tupleExpression(elements?: Array<BabelNodeExpression | BabelNodeSpreadElement>): BabelNodeTupleExpression; - declare function decimalLiteral(value: string): BabelNodeDecimalLiteral; - declare function staticBlock(body: Array<BabelNodeStatement>): BabelNodeStaticBlock; - declare function tsParameterProperty(parameter: BabelNodeIdentifier | BabelNodeAssignmentPattern): BabelNodeTSParameterProperty; - declare function tsDeclareFunction(id?: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration | BabelNodeNoop, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, returnType?: BabelNodeTSTypeAnnotation | BabelNodeNoop): BabelNodeTSDeclareFunction; - declare function tsDeclareMethod(decorators?: Array<BabelNodeDecorator>, key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, typeParameters?: BabelNodeTSTypeParameterDeclaration | BabelNodeNoop, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, returnType?: BabelNodeTSTypeAnnotation | BabelNodeNoop): BabelNodeTSDeclareMethod; - declare function tsQualifiedName(left: BabelNodeTSEntityName, right: BabelNodeIdentifier): BabelNodeTSQualifiedName; - declare function tsCallSignatureDeclaration(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSCallSignatureDeclaration; - declare function tsConstructSignatureDeclaration(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSConstructSignatureDeclaration; - declare function tsPropertySignature(key: BabelNodeExpression, typeAnnotation?: BabelNodeTSTypeAnnotation, initializer?: BabelNodeExpression): BabelNodeTSPropertySignature; - declare function tsMethodSignature(key: BabelNodeExpression, typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSMethodSignature; - declare function tsIndexSignature(parameters: Array<BabelNodeIdentifier>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSIndexSignature; - declare function tsAnyKeyword(): BabelNodeTSAnyKeyword; - declare function tsBooleanKeyword(): BabelNodeTSBooleanKeyword; - declare function tsBigIntKeyword(): BabelNodeTSBigIntKeyword; - declare function tsIntrinsicKeyword(): BabelNodeTSIntrinsicKeyword; - declare function tsNeverKeyword(): BabelNodeTSNeverKeyword; - declare function tsNullKeyword(): BabelNodeTSNullKeyword; - declare function tsNumberKeyword(): BabelNodeTSNumberKeyword; - declare function tsObjectKeyword(): BabelNodeTSObjectKeyword; - declare function tsStringKeyword(): BabelNodeTSStringKeyword; - declare function tsSymbolKeyword(): BabelNodeTSSymbolKeyword; - declare function tsUndefinedKeyword(): BabelNodeTSUndefinedKeyword; - declare function tsUnknownKeyword(): BabelNodeTSUnknownKeyword; - declare function tsVoidKeyword(): BabelNodeTSVoidKeyword; - declare function tsThisType(): BabelNodeTSThisType; - declare function tsFunctionType(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSFunctionType; - declare function tsConstructorType(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSConstructorType; - declare function tsTypeReference(typeName: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSTypeReference; - declare function tsTypePredicate(parameterName: BabelNodeIdentifier | BabelNodeTSThisType, typeAnnotation?: BabelNodeTSTypeAnnotation, asserts?: boolean): BabelNodeTSTypePredicate; - declare function tsTypeQuery(exprName: BabelNodeTSEntityName | BabelNodeTSImportType): BabelNodeTSTypeQuery; - declare function tsTypeLiteral(members: Array<BabelNodeTSTypeElement>): BabelNodeTSTypeLiteral; - declare function tsArrayType(elementType: BabelNodeTSType): BabelNodeTSArrayType; - declare function tsTupleType(elementTypes: Array<BabelNodeTSType | BabelNodeTSNamedTupleMember>): BabelNodeTSTupleType; - declare function tsOptionalType(typeAnnotation: BabelNodeTSType): BabelNodeTSOptionalType; - declare function tsRestType(typeAnnotation: BabelNodeTSType): BabelNodeTSRestType; - declare function tsNamedTupleMember(label: BabelNodeIdentifier, elementType: BabelNodeTSType, optional?: boolean): BabelNodeTSNamedTupleMember; - declare function tsUnionType(types: Array<BabelNodeTSType>): BabelNodeTSUnionType; - declare function tsIntersectionType(types: Array<BabelNodeTSType>): BabelNodeTSIntersectionType; - declare function tsConditionalType(checkType: BabelNodeTSType, extendsType: BabelNodeTSType, trueType: BabelNodeTSType, falseType: BabelNodeTSType): BabelNodeTSConditionalType; - declare function tsInferType(typeParameter: BabelNodeTSTypeParameter): BabelNodeTSInferType; - declare function tsParenthesizedType(typeAnnotation: BabelNodeTSType): BabelNodeTSParenthesizedType; - declare function tsTypeOperator(typeAnnotation: BabelNodeTSType): BabelNodeTSTypeOperator; - declare function tsIndexedAccessType(objectType: BabelNodeTSType, indexType: BabelNodeTSType): BabelNodeTSIndexedAccessType; - declare function tsMappedType(typeParameter: BabelNodeTSTypeParameter, typeAnnotation?: BabelNodeTSType, nameType?: BabelNodeTSType): BabelNodeTSMappedType; - declare function tsLiteralType(literal: BabelNodeNumericLiteral | BabelNodeStringLiteral | BabelNodeBooleanLiteral | BabelNodeBigIntLiteral): BabelNodeTSLiteralType; - declare function tsExpressionWithTypeArguments(expression: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSExpressionWithTypeArguments; - declare function tsInterfaceDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration, _extends?: Array<BabelNodeTSExpressionWithTypeArguments>, body: BabelNodeTSInterfaceBody): BabelNodeTSInterfaceDeclaration; - declare function tsInterfaceBody(body: Array<BabelNodeTSTypeElement>): BabelNodeTSInterfaceBody; - declare function tsTypeAliasDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration, typeAnnotation: BabelNodeTSType): BabelNodeTSTypeAliasDeclaration; - declare function tsAsExpression(expression: BabelNodeExpression, typeAnnotation: BabelNodeTSType): BabelNodeTSAsExpression; - declare function tsTypeAssertion(typeAnnotation: BabelNodeTSType, expression: BabelNodeExpression): BabelNodeTSTypeAssertion; - declare function tsEnumDeclaration(id: BabelNodeIdentifier, members: Array<BabelNodeTSEnumMember>): BabelNodeTSEnumDeclaration; - declare function tsEnumMember(id: BabelNodeIdentifier | BabelNodeStringLiteral, initializer?: BabelNodeExpression): BabelNodeTSEnumMember; - declare function tsModuleDeclaration(id: BabelNodeIdentifier | BabelNodeStringLiteral, body: BabelNodeTSModuleBlock | BabelNodeTSModuleDeclaration): BabelNodeTSModuleDeclaration; - declare function tsModuleBlock(body: Array<BabelNodeStatement>): BabelNodeTSModuleBlock; - declare function tsImportType(argument: BabelNodeStringLiteral, qualifier?: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSImportType; - declare function tsImportEqualsDeclaration(id: BabelNodeIdentifier, moduleReference: BabelNodeTSEntityName | BabelNodeTSExternalModuleReference): BabelNodeTSImportEqualsDeclaration; - declare function tsExternalModuleReference(expression: BabelNodeStringLiteral): BabelNodeTSExternalModuleReference; - declare function tsNonNullExpression(expression: BabelNodeExpression): BabelNodeTSNonNullExpression; - declare function tsExportAssignment(expression: BabelNodeExpression): BabelNodeTSExportAssignment; - declare function tsNamespaceExportDeclaration(id: BabelNodeIdentifier): BabelNodeTSNamespaceExportDeclaration; - declare function tsTypeAnnotation(typeAnnotation: BabelNodeTSType): BabelNodeTSTypeAnnotation; - declare function tsTypeParameterInstantiation(params: Array<BabelNodeTSType>): BabelNodeTSTypeParameterInstantiation; - declare function tsTypeParameterDeclaration(params: Array<BabelNodeTSTypeParameter>): BabelNodeTSTypeParameterDeclaration; - declare function tsTypeParameter(constraint?: BabelNodeTSType, _default?: BabelNodeTSType, name: string): BabelNodeTSTypeParameter; - declare function isArrayExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayExpression) - declare function assertArrayExpression(node: ?Object, opts?: ?Object): void - declare function isAssignmentExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAssignmentExpression) - declare function assertAssignmentExpression(node: ?Object, opts?: ?Object): void - declare function isBinaryExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBinaryExpression) - declare function assertBinaryExpression(node: ?Object, opts?: ?Object): void - declare function isInterpreterDirective(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterpreterDirective) - declare function assertInterpreterDirective(node: ?Object, opts?: ?Object): void - declare function isDirective(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDirective) - declare function assertDirective(node: ?Object, opts?: ?Object): void - declare function isDirectiveLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDirectiveLiteral) - declare function assertDirectiveLiteral(node: ?Object, opts?: ?Object): void - declare function isBlockStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBlockStatement) - declare function assertBlockStatement(node: ?Object, opts?: ?Object): void - declare function isBreakStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBreakStatement) - declare function assertBreakStatement(node: ?Object, opts?: ?Object): void - declare function isCallExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeCallExpression) - declare function assertCallExpression(node: ?Object, opts?: ?Object): void - declare function isCatchClause(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeCatchClause) - declare function assertCatchClause(node: ?Object, opts?: ?Object): void - declare function isConditionalExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeConditionalExpression) - declare function assertConditionalExpression(node: ?Object, opts?: ?Object): void - declare function isContinueStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeContinueStatement) - declare function assertContinueStatement(node: ?Object, opts?: ?Object): void - declare function isDebuggerStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDebuggerStatement) - declare function assertDebuggerStatement(node: ?Object, opts?: ?Object): void - declare function isDoWhileStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDoWhileStatement) - declare function assertDoWhileStatement(node: ?Object, opts?: ?Object): void - declare function isEmptyStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEmptyStatement) - declare function assertEmptyStatement(node: ?Object, opts?: ?Object): void - declare function isExpressionStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExpressionStatement) - declare function assertExpressionStatement(node: ?Object, opts?: ?Object): void - declare function isFile(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFile) - declare function assertFile(node: ?Object, opts?: ?Object): void - declare function isForInStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForInStatement) - declare function assertForInStatement(node: ?Object, opts?: ?Object): void - declare function isForStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForStatement) - declare function assertForStatement(node: ?Object, opts?: ?Object): void - declare function isFunctionDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionDeclaration) - declare function assertFunctionDeclaration(node: ?Object, opts?: ?Object): void - declare function isFunctionExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionExpression) - declare function assertFunctionExpression(node: ?Object, opts?: ?Object): void - declare function isIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIdentifier) - declare function assertIdentifier(node: ?Object, opts?: ?Object): void - declare function isIfStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIfStatement) - declare function assertIfStatement(node: ?Object, opts?: ?Object): void - declare function isLabeledStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeLabeledStatement) - declare function assertLabeledStatement(node: ?Object, opts?: ?Object): void - declare function isStringLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringLiteral) - declare function assertStringLiteral(node: ?Object, opts?: ?Object): void - declare function isNumericLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumericLiteral) - declare function assertNumericLiteral(node: ?Object, opts?: ?Object): void - declare function isNullLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullLiteral) - declare function assertNullLiteral(node: ?Object, opts?: ?Object): void - declare function isBooleanLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanLiteral) - declare function assertBooleanLiteral(node: ?Object, opts?: ?Object): void - declare function isRegExpLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRegExpLiteral) - declare function assertRegExpLiteral(node: ?Object, opts?: ?Object): void - declare function isLogicalExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeLogicalExpression) - declare function assertLogicalExpression(node: ?Object, opts?: ?Object): void - declare function isMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMemberExpression) - declare function assertMemberExpression(node: ?Object, opts?: ?Object): void - declare function isNewExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNewExpression) - declare function assertNewExpression(node: ?Object, opts?: ?Object): void - declare function isProgram(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeProgram) - declare function assertProgram(node: ?Object, opts?: ?Object): void - declare function isObjectExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectExpression) - declare function assertObjectExpression(node: ?Object, opts?: ?Object): void - declare function isObjectMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectMethod) - declare function assertObjectMethod(node: ?Object, opts?: ?Object): void - declare function isObjectProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectProperty) - declare function assertObjectProperty(node: ?Object, opts?: ?Object): void - declare function isRestElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRestElement) - declare function assertRestElement(node: ?Object, opts?: ?Object): void - declare function isReturnStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeReturnStatement) - declare function assertReturnStatement(node: ?Object, opts?: ?Object): void - declare function isSequenceExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSequenceExpression) - declare function assertSequenceExpression(node: ?Object, opts?: ?Object): void - declare function isParenthesizedExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeParenthesizedExpression) - declare function assertParenthesizedExpression(node: ?Object, opts?: ?Object): void - declare function isSwitchCase(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSwitchCase) - declare function assertSwitchCase(node: ?Object, opts?: ?Object): void - declare function isSwitchStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSwitchStatement) - declare function assertSwitchStatement(node: ?Object, opts?: ?Object): void - declare function isThisExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThisExpression) - declare function assertThisExpression(node: ?Object, opts?: ?Object): void - declare function isThrowStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThrowStatement) - declare function assertThrowStatement(node: ?Object, opts?: ?Object): void - declare function isTryStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTryStatement) - declare function assertTryStatement(node: ?Object, opts?: ?Object): void - declare function isUnaryExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUnaryExpression) - declare function assertUnaryExpression(node: ?Object, opts?: ?Object): void - declare function isUpdateExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUpdateExpression) - declare function assertUpdateExpression(node: ?Object, opts?: ?Object): void - declare function isVariableDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariableDeclaration) - declare function assertVariableDeclaration(node: ?Object, opts?: ?Object): void - declare function isVariableDeclarator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariableDeclarator) - declare function assertVariableDeclarator(node: ?Object, opts?: ?Object): void - declare function isWhileStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeWhileStatement) - declare function assertWhileStatement(node: ?Object, opts?: ?Object): void - declare function isWithStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeWithStatement) - declare function assertWithStatement(node: ?Object, opts?: ?Object): void - declare function isAssignmentPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAssignmentPattern) - declare function assertAssignmentPattern(node: ?Object, opts?: ?Object): void - declare function isArrayPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayPattern) - declare function assertArrayPattern(node: ?Object, opts?: ?Object): void - declare function isArrowFunctionExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrowFunctionExpression) - declare function assertArrowFunctionExpression(node: ?Object, opts?: ?Object): void - declare function isClassBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassBody) - declare function assertClassBody(node: ?Object, opts?: ?Object): void - declare function isClassExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassExpression) - declare function assertClassExpression(node: ?Object, opts?: ?Object): void - declare function isClassDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassDeclaration) - declare function assertClassDeclaration(node: ?Object, opts?: ?Object): void - declare function isExportAllDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportAllDeclaration) - declare function assertExportAllDeclaration(node: ?Object, opts?: ?Object): void - declare function isExportDefaultDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportDefaultDeclaration) - declare function assertExportDefaultDeclaration(node: ?Object, opts?: ?Object): void - declare function isExportNamedDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportNamedDeclaration) - declare function assertExportNamedDeclaration(node: ?Object, opts?: ?Object): void - declare function isExportSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportSpecifier) - declare function assertExportSpecifier(node: ?Object, opts?: ?Object): void - declare function isForOfStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForOfStatement) - declare function assertForOfStatement(node: ?Object, opts?: ?Object): void - declare function isImportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportDeclaration) - declare function assertImportDeclaration(node: ?Object, opts?: ?Object): void - declare function isImportDefaultSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportDefaultSpecifier) - declare function assertImportDefaultSpecifier(node: ?Object, opts?: ?Object): void - declare function isImportNamespaceSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportNamespaceSpecifier) - declare function assertImportNamespaceSpecifier(node: ?Object, opts?: ?Object): void - declare function isImportSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportSpecifier) - declare function assertImportSpecifier(node: ?Object, opts?: ?Object): void - declare function isMetaProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMetaProperty) - declare function assertMetaProperty(node: ?Object, opts?: ?Object): void - declare function isClassMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassMethod) - declare function assertClassMethod(node: ?Object, opts?: ?Object): void - declare function isObjectPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectPattern) - declare function assertObjectPattern(node: ?Object, opts?: ?Object): void - declare function isSpreadElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSpreadElement) - declare function assertSpreadElement(node: ?Object, opts?: ?Object): void - declare function isSuper(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSuper) - declare function assertSuper(node: ?Object, opts?: ?Object): void - declare function isTaggedTemplateExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTaggedTemplateExpression) - declare function assertTaggedTemplateExpression(node: ?Object, opts?: ?Object): void - declare function isTemplateElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTemplateElement) - declare function assertTemplateElement(node: ?Object, opts?: ?Object): void - declare function isTemplateLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTemplateLiteral) - declare function assertTemplateLiteral(node: ?Object, opts?: ?Object): void - declare function isYieldExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeYieldExpression) - declare function assertYieldExpression(node: ?Object, opts?: ?Object): void - declare function isAwaitExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAwaitExpression) - declare function assertAwaitExpression(node: ?Object, opts?: ?Object): void - declare function isImport(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImport) - declare function assertImport(node: ?Object, opts?: ?Object): void - declare function isBigIntLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBigIntLiteral) - declare function assertBigIntLiteral(node: ?Object, opts?: ?Object): void - declare function isExportNamespaceSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportNamespaceSpecifier) - declare function assertExportNamespaceSpecifier(node: ?Object, opts?: ?Object): void - declare function isOptionalMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOptionalMemberExpression) - declare function assertOptionalMemberExpression(node: ?Object, opts?: ?Object): void - declare function isOptionalCallExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOptionalCallExpression) - declare function assertOptionalCallExpression(node: ?Object, opts?: ?Object): void - declare function isAnyTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAnyTypeAnnotation) - declare function assertAnyTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isArrayTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayTypeAnnotation) - declare function assertArrayTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isBooleanTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanTypeAnnotation) - declare function assertBooleanTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isBooleanLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanLiteralTypeAnnotation) - declare function assertBooleanLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isNullLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullLiteralTypeAnnotation) - declare function assertNullLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isClassImplements(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassImplements) - declare function assertClassImplements(node: ?Object, opts?: ?Object): void - declare function isDeclareClass(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareClass) - declare function assertDeclareClass(node: ?Object, opts?: ?Object): void - declare function isDeclareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareFunction) - declare function assertDeclareFunction(node: ?Object, opts?: ?Object): void - declare function isDeclareInterface(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareInterface) - declare function assertDeclareInterface(node: ?Object, opts?: ?Object): void - declare function isDeclareModule(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareModule) - declare function assertDeclareModule(node: ?Object, opts?: ?Object): void - declare function isDeclareModuleExports(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareModuleExports) - declare function assertDeclareModuleExports(node: ?Object, opts?: ?Object): void - declare function isDeclareTypeAlias(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareTypeAlias) - declare function assertDeclareTypeAlias(node: ?Object, opts?: ?Object): void - declare function isDeclareOpaqueType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareOpaqueType) - declare function assertDeclareOpaqueType(node: ?Object, opts?: ?Object): void - declare function isDeclareVariable(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareVariable) - declare function assertDeclareVariable(node: ?Object, opts?: ?Object): void - declare function isDeclareExportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareExportDeclaration) - declare function assertDeclareExportDeclaration(node: ?Object, opts?: ?Object): void - declare function isDeclareExportAllDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareExportAllDeclaration) - declare function assertDeclareExportAllDeclaration(node: ?Object, opts?: ?Object): void - declare function isDeclaredPredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclaredPredicate) - declare function assertDeclaredPredicate(node: ?Object, opts?: ?Object): void - declare function isExistsTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExistsTypeAnnotation) - declare function assertExistsTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isFunctionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionTypeAnnotation) - declare function assertFunctionTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isFunctionTypeParam(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionTypeParam) - declare function assertFunctionTypeParam(node: ?Object, opts?: ?Object): void - declare function isGenericTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeGenericTypeAnnotation) - declare function assertGenericTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isInferredPredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInferredPredicate) - declare function assertInferredPredicate(node: ?Object, opts?: ?Object): void - declare function isInterfaceExtends(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceExtends) - declare function assertInterfaceExtends(node: ?Object, opts?: ?Object): void - declare function isInterfaceDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceDeclaration) - declare function assertInterfaceDeclaration(node: ?Object, opts?: ?Object): void - declare function isInterfaceTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceTypeAnnotation) - declare function assertInterfaceTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isIntersectionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIntersectionTypeAnnotation) - declare function assertIntersectionTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isMixedTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMixedTypeAnnotation) - declare function assertMixedTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isEmptyTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEmptyTypeAnnotation) - declare function assertEmptyTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isNullableTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullableTypeAnnotation) - declare function assertNullableTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isNumberLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumberLiteralTypeAnnotation) - declare function assertNumberLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isNumberTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumberTypeAnnotation) - declare function assertNumberTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isObjectTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeAnnotation) - declare function assertObjectTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isObjectTypeInternalSlot(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeInternalSlot) - declare function assertObjectTypeInternalSlot(node: ?Object, opts?: ?Object): void - declare function isObjectTypeCallProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeCallProperty) - declare function assertObjectTypeCallProperty(node: ?Object, opts?: ?Object): void - declare function isObjectTypeIndexer(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeIndexer) - declare function assertObjectTypeIndexer(node: ?Object, opts?: ?Object): void - declare function isObjectTypeProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeProperty) - declare function assertObjectTypeProperty(node: ?Object, opts?: ?Object): void - declare function isObjectTypeSpreadProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeSpreadProperty) - declare function assertObjectTypeSpreadProperty(node: ?Object, opts?: ?Object): void - declare function isOpaqueType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOpaqueType) - declare function assertOpaqueType(node: ?Object, opts?: ?Object): void - declare function isQualifiedTypeIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeQualifiedTypeIdentifier) - declare function assertQualifiedTypeIdentifier(node: ?Object, opts?: ?Object): void - declare function isStringLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringLiteralTypeAnnotation) - declare function assertStringLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isStringTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringTypeAnnotation) - declare function assertStringTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isSymbolTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSymbolTypeAnnotation) - declare function assertSymbolTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isThisTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThisTypeAnnotation) - declare function assertThisTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isTupleTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTupleTypeAnnotation) - declare function assertTupleTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isTypeofTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeofTypeAnnotation) - declare function assertTypeofTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isTypeAlias(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeAlias) - declare function assertTypeAlias(node: ?Object, opts?: ?Object): void - declare function isTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeAnnotation) - declare function assertTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isTypeCastExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeCastExpression) - declare function assertTypeCastExpression(node: ?Object, opts?: ?Object): void - declare function isTypeParameter(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameter) - declare function assertTypeParameter(node: ?Object, opts?: ?Object): void - declare function isTypeParameterDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameterDeclaration) - declare function assertTypeParameterDeclaration(node: ?Object, opts?: ?Object): void - declare function isTypeParameterInstantiation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameterInstantiation) - declare function assertTypeParameterInstantiation(node: ?Object, opts?: ?Object): void - declare function isUnionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUnionTypeAnnotation) - declare function assertUnionTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isVariance(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariance) - declare function assertVariance(node: ?Object, opts?: ?Object): void - declare function isVoidTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVoidTypeAnnotation) - declare function assertVoidTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isEnumDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumDeclaration) - declare function assertEnumDeclaration(node: ?Object, opts?: ?Object): void - declare function isEnumBooleanBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumBooleanBody) - declare function assertEnumBooleanBody(node: ?Object, opts?: ?Object): void - declare function isEnumNumberBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumNumberBody) - declare function assertEnumNumberBody(node: ?Object, opts?: ?Object): void - declare function isEnumStringBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumStringBody) - declare function assertEnumStringBody(node: ?Object, opts?: ?Object): void - declare function isEnumSymbolBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumSymbolBody) - declare function assertEnumSymbolBody(node: ?Object, opts?: ?Object): void - declare function isEnumBooleanMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumBooleanMember) - declare function assertEnumBooleanMember(node: ?Object, opts?: ?Object): void - declare function isEnumNumberMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumNumberMember) - declare function assertEnumNumberMember(node: ?Object, opts?: ?Object): void - declare function isEnumStringMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumStringMember) - declare function assertEnumStringMember(node: ?Object, opts?: ?Object): void - declare function isEnumDefaultedMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumDefaultedMember) - declare function assertEnumDefaultedMember(node: ?Object, opts?: ?Object): void - declare function isJSXAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXAttribute) - declare function assertJSXAttribute(node: ?Object, opts?: ?Object): void - declare function isJSXClosingElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXClosingElement) - declare function assertJSXClosingElement(node: ?Object, opts?: ?Object): void - declare function isJSXElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXElement) - declare function assertJSXElement(node: ?Object, opts?: ?Object): void - declare function isJSXEmptyExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXEmptyExpression) - declare function assertJSXEmptyExpression(node: ?Object, opts?: ?Object): void - declare function isJSXExpressionContainer(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXExpressionContainer) - declare function assertJSXExpressionContainer(node: ?Object, opts?: ?Object): void - declare function isJSXSpreadChild(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXSpreadChild) - declare function assertJSXSpreadChild(node: ?Object, opts?: ?Object): void - declare function isJSXIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXIdentifier) - declare function assertJSXIdentifier(node: ?Object, opts?: ?Object): void - declare function isJSXMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXMemberExpression) - declare function assertJSXMemberExpression(node: ?Object, opts?: ?Object): void - declare function isJSXNamespacedName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXNamespacedName) - declare function assertJSXNamespacedName(node: ?Object, opts?: ?Object): void - declare function isJSXOpeningElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXOpeningElement) - declare function assertJSXOpeningElement(node: ?Object, opts?: ?Object): void - declare function isJSXSpreadAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXSpreadAttribute) - declare function assertJSXSpreadAttribute(node: ?Object, opts?: ?Object): void - declare function isJSXText(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXText) - declare function assertJSXText(node: ?Object, opts?: ?Object): void - declare function isJSXFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXFragment) - declare function assertJSXFragment(node: ?Object, opts?: ?Object): void - declare function isJSXOpeningFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXOpeningFragment) - declare function assertJSXOpeningFragment(node: ?Object, opts?: ?Object): void - declare function isJSXClosingFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXClosingFragment) - declare function assertJSXClosingFragment(node: ?Object, opts?: ?Object): void - declare function isNoop(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNoop) - declare function assertNoop(node: ?Object, opts?: ?Object): void - declare function isPlaceholder(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePlaceholder) - declare function assertPlaceholder(node: ?Object, opts?: ?Object): void - declare function isV8IntrinsicIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeV8IntrinsicIdentifier) - declare function assertV8IntrinsicIdentifier(node: ?Object, opts?: ?Object): void - declare function isArgumentPlaceholder(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArgumentPlaceholder) - declare function assertArgumentPlaceholder(node: ?Object, opts?: ?Object): void - declare function isBindExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBindExpression) - declare function assertBindExpression(node: ?Object, opts?: ?Object): void - declare function isClassProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassProperty) - declare function assertClassProperty(node: ?Object, opts?: ?Object): void - declare function isPipelineTopicExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelineTopicExpression) - declare function assertPipelineTopicExpression(node: ?Object, opts?: ?Object): void - declare function isPipelineBareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelineBareFunction) - declare function assertPipelineBareFunction(node: ?Object, opts?: ?Object): void - declare function isPipelinePrimaryTopicReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelinePrimaryTopicReference) - declare function assertPipelinePrimaryTopicReference(node: ?Object, opts?: ?Object): void - declare function isClassPrivateProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassPrivateProperty) - declare function assertClassPrivateProperty(node: ?Object, opts?: ?Object): void - declare function isClassPrivateMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassPrivateMethod) - declare function assertClassPrivateMethod(node: ?Object, opts?: ?Object): void - declare function isImportAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportAttribute) - declare function assertImportAttribute(node: ?Object, opts?: ?Object): void - declare function isDecorator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDecorator) - declare function assertDecorator(node: ?Object, opts?: ?Object): void - declare function isDoExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDoExpression) - declare function assertDoExpression(node: ?Object, opts?: ?Object): void - declare function isExportDefaultSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportDefaultSpecifier) - declare function assertExportDefaultSpecifier(node: ?Object, opts?: ?Object): void - declare function isPrivateName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePrivateName) - declare function assertPrivateName(node: ?Object, opts?: ?Object): void - declare function isRecordExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRecordExpression) - declare function assertRecordExpression(node: ?Object, opts?: ?Object): void - declare function isTupleExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTupleExpression) - declare function assertTupleExpression(node: ?Object, opts?: ?Object): void - declare function isDecimalLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDecimalLiteral) - declare function assertDecimalLiteral(node: ?Object, opts?: ?Object): void - declare function isStaticBlock(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStaticBlock) - declare function assertStaticBlock(node: ?Object, opts?: ?Object): void - declare function isTSParameterProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSParameterProperty) - declare function assertTSParameterProperty(node: ?Object, opts?: ?Object): void - declare function isTSDeclareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSDeclareFunction) - declare function assertTSDeclareFunction(node: ?Object, opts?: ?Object): void - declare function isTSDeclareMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSDeclareMethod) - declare function assertTSDeclareMethod(node: ?Object, opts?: ?Object): void - declare function isTSQualifiedName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSQualifiedName) - declare function assertTSQualifiedName(node: ?Object, opts?: ?Object): void - declare function isTSCallSignatureDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSCallSignatureDeclaration) - declare function assertTSCallSignatureDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSConstructSignatureDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConstructSignatureDeclaration) - declare function assertTSConstructSignatureDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSPropertySignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSPropertySignature) - declare function assertTSPropertySignature(node: ?Object, opts?: ?Object): void - declare function isTSMethodSignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSMethodSignature) - declare function assertTSMethodSignature(node: ?Object, opts?: ?Object): void - declare function isTSIndexSignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIndexSignature) - declare function assertTSIndexSignature(node: ?Object, opts?: ?Object): void - declare function isTSAnyKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSAnyKeyword) - declare function assertTSAnyKeyword(node: ?Object, opts?: ?Object): void - declare function isTSBooleanKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSBooleanKeyword) - declare function assertTSBooleanKeyword(node: ?Object, opts?: ?Object): void - declare function isTSBigIntKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSBigIntKeyword) - declare function assertTSBigIntKeyword(node: ?Object, opts?: ?Object): void - declare function isTSIntrinsicKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIntrinsicKeyword) - declare function assertTSIntrinsicKeyword(node: ?Object, opts?: ?Object): void - declare function isTSNeverKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNeverKeyword) - declare function assertTSNeverKeyword(node: ?Object, opts?: ?Object): void - declare function isTSNullKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNullKeyword) - declare function assertTSNullKeyword(node: ?Object, opts?: ?Object): void - declare function isTSNumberKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNumberKeyword) - declare function assertTSNumberKeyword(node: ?Object, opts?: ?Object): void - declare function isTSObjectKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSObjectKeyword) - declare function assertTSObjectKeyword(node: ?Object, opts?: ?Object): void - declare function isTSStringKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSStringKeyword) - declare function assertTSStringKeyword(node: ?Object, opts?: ?Object): void - declare function isTSSymbolKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSSymbolKeyword) - declare function assertTSSymbolKeyword(node: ?Object, opts?: ?Object): void - declare function isTSUndefinedKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUndefinedKeyword) - declare function assertTSUndefinedKeyword(node: ?Object, opts?: ?Object): void - declare function isTSUnknownKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUnknownKeyword) - declare function assertTSUnknownKeyword(node: ?Object, opts?: ?Object): void - declare function isTSVoidKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSVoidKeyword) - declare function assertTSVoidKeyword(node: ?Object, opts?: ?Object): void - declare function isTSThisType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSThisType) - declare function assertTSThisType(node: ?Object, opts?: ?Object): void - declare function isTSFunctionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSFunctionType) - declare function assertTSFunctionType(node: ?Object, opts?: ?Object): void - declare function isTSConstructorType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConstructorType) - declare function assertTSConstructorType(node: ?Object, opts?: ?Object): void - declare function isTSTypeReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeReference) - declare function assertTSTypeReference(node: ?Object, opts?: ?Object): void - declare function isTSTypePredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypePredicate) - declare function assertTSTypePredicate(node: ?Object, opts?: ?Object): void - declare function isTSTypeQuery(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeQuery) - declare function assertTSTypeQuery(node: ?Object, opts?: ?Object): void - declare function isTSTypeLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeLiteral) - declare function assertTSTypeLiteral(node: ?Object, opts?: ?Object): void - declare function isTSArrayType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSArrayType) - declare function assertTSArrayType(node: ?Object, opts?: ?Object): void - declare function isTSTupleType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTupleType) - declare function assertTSTupleType(node: ?Object, opts?: ?Object): void - declare function isTSOptionalType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSOptionalType) - declare function assertTSOptionalType(node: ?Object, opts?: ?Object): void - declare function isTSRestType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSRestType) - declare function assertTSRestType(node: ?Object, opts?: ?Object): void - declare function isTSNamedTupleMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNamedTupleMember) - declare function assertTSNamedTupleMember(node: ?Object, opts?: ?Object): void - declare function isTSUnionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUnionType) - declare function assertTSUnionType(node: ?Object, opts?: ?Object): void - declare function isTSIntersectionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIntersectionType) - declare function assertTSIntersectionType(node: ?Object, opts?: ?Object): void - declare function isTSConditionalType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConditionalType) - declare function assertTSConditionalType(node: ?Object, opts?: ?Object): void - declare function isTSInferType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInferType) - declare function assertTSInferType(node: ?Object, opts?: ?Object): void - declare function isTSParenthesizedType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSParenthesizedType) - declare function assertTSParenthesizedType(node: ?Object, opts?: ?Object): void - declare function isTSTypeOperator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeOperator) - declare function assertTSTypeOperator(node: ?Object, opts?: ?Object): void - declare function isTSIndexedAccessType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIndexedAccessType) - declare function assertTSIndexedAccessType(node: ?Object, opts?: ?Object): void - declare function isTSMappedType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSMappedType) - declare function assertTSMappedType(node: ?Object, opts?: ?Object): void - declare function isTSLiteralType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSLiteralType) - declare function assertTSLiteralType(node: ?Object, opts?: ?Object): void - declare function isTSExpressionWithTypeArguments(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExpressionWithTypeArguments) - declare function assertTSExpressionWithTypeArguments(node: ?Object, opts?: ?Object): void - declare function isTSInterfaceDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInterfaceDeclaration) - declare function assertTSInterfaceDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSInterfaceBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInterfaceBody) - declare function assertTSInterfaceBody(node: ?Object, opts?: ?Object): void - declare function isTSTypeAliasDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAliasDeclaration) - declare function assertTSTypeAliasDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSAsExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSAsExpression) - declare function assertTSAsExpression(node: ?Object, opts?: ?Object): void - declare function isTSTypeAssertion(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAssertion) - declare function assertTSTypeAssertion(node: ?Object, opts?: ?Object): void - declare function isTSEnumDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSEnumDeclaration) - declare function assertTSEnumDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSEnumMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSEnumMember) - declare function assertTSEnumMember(node: ?Object, opts?: ?Object): void - declare function isTSModuleDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSModuleDeclaration) - declare function assertTSModuleDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSModuleBlock(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSModuleBlock) - declare function assertTSModuleBlock(node: ?Object, opts?: ?Object): void - declare function isTSImportType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSImportType) - declare function assertTSImportType(node: ?Object, opts?: ?Object): void - declare function isTSImportEqualsDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSImportEqualsDeclaration) - declare function assertTSImportEqualsDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSExternalModuleReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExternalModuleReference) - declare function assertTSExternalModuleReference(node: ?Object, opts?: ?Object): void - declare function isTSNonNullExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNonNullExpression) - declare function assertTSNonNullExpression(node: ?Object, opts?: ?Object): void - declare function isTSExportAssignment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExportAssignment) - declare function assertTSExportAssignment(node: ?Object, opts?: ?Object): void - declare function isTSNamespaceExportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNamespaceExportDeclaration) - declare function assertTSNamespaceExportDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAnnotation) - declare function assertTSTypeAnnotation(node: ?Object, opts?: ?Object): void - declare function isTSTypeParameterInstantiation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameterInstantiation) - declare function assertTSTypeParameterInstantiation(node: ?Object, opts?: ?Object): void - declare function isTSTypeParameterDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameterDeclaration) - declare function assertTSTypeParameterDeclaration(node: ?Object, opts?: ?Object): void - declare function isTSTypeParameter(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameter) - declare function assertTSTypeParameter(node: ?Object, opts?: ?Object): void - declare function isExpression(node: ?Object, opts?: ?Object): boolean - declare function assertExpression(node: ?Object, opts?: ?Object): void - declare function isBinary(node: ?Object, opts?: ?Object): boolean - declare function assertBinary(node: ?Object, opts?: ?Object): void - declare function isScopable(node: ?Object, opts?: ?Object): boolean - declare function assertScopable(node: ?Object, opts?: ?Object): void - declare function isBlockParent(node: ?Object, opts?: ?Object): boolean - declare function assertBlockParent(node: ?Object, opts?: ?Object): void - declare function isBlock(node: ?Object, opts?: ?Object): boolean - declare function assertBlock(node: ?Object, opts?: ?Object): void - declare function isStatement(node: ?Object, opts?: ?Object): boolean - declare function assertStatement(node: ?Object, opts?: ?Object): void - declare function isTerminatorless(node: ?Object, opts?: ?Object): boolean - declare function assertTerminatorless(node: ?Object, opts?: ?Object): void - declare function isCompletionStatement(node: ?Object, opts?: ?Object): boolean - declare function assertCompletionStatement(node: ?Object, opts?: ?Object): void - declare function isConditional(node: ?Object, opts?: ?Object): boolean - declare function assertConditional(node: ?Object, opts?: ?Object): void - declare function isLoop(node: ?Object, opts?: ?Object): boolean - declare function assertLoop(node: ?Object, opts?: ?Object): void - declare function isWhile(node: ?Object, opts?: ?Object): boolean - declare function assertWhile(node: ?Object, opts?: ?Object): void - declare function isExpressionWrapper(node: ?Object, opts?: ?Object): boolean - declare function assertExpressionWrapper(node: ?Object, opts?: ?Object): void - declare function isFor(node: ?Object, opts?: ?Object): boolean - declare function assertFor(node: ?Object, opts?: ?Object): void - declare function isForXStatement(node: ?Object, opts?: ?Object): boolean - declare function assertForXStatement(node: ?Object, opts?: ?Object): void - declare function isFunction(node: ?Object, opts?: ?Object): boolean - declare function assertFunction(node: ?Object, opts?: ?Object): void - declare function isFunctionParent(node: ?Object, opts?: ?Object): boolean - declare function assertFunctionParent(node: ?Object, opts?: ?Object): void - declare function isPureish(node: ?Object, opts?: ?Object): boolean - declare function assertPureish(node: ?Object, opts?: ?Object): void - declare function isDeclaration(node: ?Object, opts?: ?Object): boolean - declare function assertDeclaration(node: ?Object, opts?: ?Object): void - declare function isPatternLike(node: ?Object, opts?: ?Object): boolean - declare function assertPatternLike(node: ?Object, opts?: ?Object): void - declare function isLVal(node: ?Object, opts?: ?Object): boolean - declare function assertLVal(node: ?Object, opts?: ?Object): void - declare function isTSEntityName(node: ?Object, opts?: ?Object): boolean - declare function assertTSEntityName(node: ?Object, opts?: ?Object): void - declare function isLiteral(node: ?Object, opts?: ?Object): boolean - declare function assertLiteral(node: ?Object, opts?: ?Object): void - declare function isImmutable(node: ?Object, opts?: ?Object): boolean - declare function assertImmutable(node: ?Object, opts?: ?Object): void - declare function isUserWhitespacable(node: ?Object, opts?: ?Object): boolean - declare function assertUserWhitespacable(node: ?Object, opts?: ?Object): void - declare function isMethod(node: ?Object, opts?: ?Object): boolean - declare function assertMethod(node: ?Object, opts?: ?Object): void - declare function isObjectMember(node: ?Object, opts?: ?Object): boolean - declare function assertObjectMember(node: ?Object, opts?: ?Object): void - declare function isProperty(node: ?Object, opts?: ?Object): boolean - declare function assertProperty(node: ?Object, opts?: ?Object): void - declare function isUnaryLike(node: ?Object, opts?: ?Object): boolean - declare function assertUnaryLike(node: ?Object, opts?: ?Object): void - declare function isPattern(node: ?Object, opts?: ?Object): boolean - declare function assertPattern(node: ?Object, opts?: ?Object): void - declare function isClass(node: ?Object, opts?: ?Object): boolean - declare function assertClass(node: ?Object, opts?: ?Object): void - declare function isModuleDeclaration(node: ?Object, opts?: ?Object): boolean - declare function assertModuleDeclaration(node: ?Object, opts?: ?Object): void - declare function isExportDeclaration(node: ?Object, opts?: ?Object): boolean - declare function assertExportDeclaration(node: ?Object, opts?: ?Object): void - declare function isModuleSpecifier(node: ?Object, opts?: ?Object): boolean - declare function assertModuleSpecifier(node: ?Object, opts?: ?Object): void - declare function isFlow(node: ?Object, opts?: ?Object): boolean - declare function assertFlow(node: ?Object, opts?: ?Object): void - declare function isFlowType(node: ?Object, opts?: ?Object): boolean - declare function assertFlowType(node: ?Object, opts?: ?Object): void - declare function isFlowBaseAnnotation(node: ?Object, opts?: ?Object): boolean - declare function assertFlowBaseAnnotation(node: ?Object, opts?: ?Object): void - declare function isFlowDeclaration(node: ?Object, opts?: ?Object): boolean - declare function assertFlowDeclaration(node: ?Object, opts?: ?Object): void - declare function isFlowPredicate(node: ?Object, opts?: ?Object): boolean - declare function assertFlowPredicate(node: ?Object, opts?: ?Object): void - declare function isEnumBody(node: ?Object, opts?: ?Object): boolean - declare function assertEnumBody(node: ?Object, opts?: ?Object): void - declare function isEnumMember(node: ?Object, opts?: ?Object): boolean - declare function assertEnumMember(node: ?Object, opts?: ?Object): void - declare function isJSX(node: ?Object, opts?: ?Object): boolean - declare function assertJSX(node: ?Object, opts?: ?Object): void - declare function isPrivate(node: ?Object, opts?: ?Object): boolean - declare function assertPrivate(node: ?Object, opts?: ?Object): void - declare function isTSTypeElement(node: ?Object, opts?: ?Object): boolean - declare function assertTSTypeElement(node: ?Object, opts?: ?Object): void - declare function isTSType(node: ?Object, opts?: ?Object): boolean - declare function assertTSType(node: ?Object, opts?: ?Object): void - declare function isTSBaseType(node: ?Object, opts?: ?Object): boolean - declare function assertTSBaseType(node: ?Object, opts?: ?Object): void - declare function isNumberLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumericLiteral) - declare function assertNumberLiteral(node: ?Object, opts?: ?Object): void - declare function isRegexLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRegExpLiteral) - declare function assertRegexLiteral(node: ?Object, opts?: ?Object): void - declare function isRestProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRestElement) - declare function assertRestProperty(node: ?Object, opts?: ?Object): void - declare function isSpreadProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSpreadElement) - declare function assertSpreadProperty(node: ?Object, opts?: ?Object): void - declare function assertNode(obj: any): void - declare function createTypeAnnotationBasedOnTypeof(type: 'string' | 'number' | 'undefined' | 'boolean' | 'function' | 'object' | 'symbol'): BabelNodeTypeAnnotation - declare function createUnionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation - declare function createFlowUnionType(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation - declare function buildChildren(node: { children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment | BabelNodeJSXEmptyExpression> }): Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment> - declare function clone<T>(n: T): T; - declare function cloneDeep<T>(n: T): T; - declare function cloneDeepWithoutLoc<T>(n: T): T; - declare function cloneNode<T>(n: T, deep?: boolean, withoutLoc?: boolean): T; - declare function cloneWithoutLoc<T>(n: T): T; + declare export function bigIntLiteral(value: string): BabelNodeBigIntLiteral; + declare export function exportNamespaceSpecifier(exported: BabelNodeIdentifier): BabelNodeExportNamespaceSpecifier; + declare export function optionalMemberExpression(object: BabelNodeExpression, property: BabelNodeExpression | BabelNodeIdentifier, computed?: boolean, optional: boolean): BabelNodeOptionalMemberExpression; + declare export function optionalCallExpression(callee: BabelNodeExpression, _arguments: Array<BabelNodeExpression | BabelNodeSpreadElement | BabelNodeJSXNamespacedName | BabelNodeArgumentPlaceholder>, optional: boolean): BabelNodeOptionalCallExpression; + declare export function anyTypeAnnotation(): BabelNodeAnyTypeAnnotation; + declare export function arrayTypeAnnotation(elementType: BabelNodeFlowType): BabelNodeArrayTypeAnnotation; + declare export function booleanTypeAnnotation(): BabelNodeBooleanTypeAnnotation; + declare export function booleanLiteralTypeAnnotation(value: boolean): BabelNodeBooleanLiteralTypeAnnotation; + declare export function nullLiteralTypeAnnotation(): BabelNodeNullLiteralTypeAnnotation; + declare export function classImplements(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeClassImplements; + declare export function declareClass(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeDeclareClass; + declare export function declareFunction(id: BabelNodeIdentifier): BabelNodeDeclareFunction; + declare export function declareInterface(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeDeclareInterface; + declare export function declareModule(id: BabelNodeIdentifier | BabelNodeStringLiteral, body: BabelNodeBlockStatement, kind?: "CommonJS" | "ES"): BabelNodeDeclareModule; + declare export function declareModuleExports(typeAnnotation: BabelNodeTypeAnnotation): BabelNodeDeclareModuleExports; + declare export function declareTypeAlias(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, right: BabelNodeFlowType): BabelNodeDeclareTypeAlias; + declare export function declareOpaqueType(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, supertype?: BabelNodeFlowType): BabelNodeDeclareOpaqueType; + declare export function declareVariable(id: BabelNodeIdentifier): BabelNodeDeclareVariable; + declare export function declareExportDeclaration(declaration?: BabelNodeFlow, specifiers?: Array<BabelNodeExportSpecifier | BabelNodeExportNamespaceSpecifier>, source?: BabelNodeStringLiteral): BabelNodeDeclareExportDeclaration; + declare export function declareExportAllDeclaration(source: BabelNodeStringLiteral): BabelNodeDeclareExportAllDeclaration; + declare export function declaredPredicate(value: BabelNodeFlow): BabelNodeDeclaredPredicate; + declare export function existsTypeAnnotation(): BabelNodeExistsTypeAnnotation; + declare export function functionTypeAnnotation(typeParameters?: BabelNodeTypeParameterDeclaration, params: Array<BabelNodeFunctionTypeParam>, rest?: BabelNodeFunctionTypeParam, returnType: BabelNodeFlowType): BabelNodeFunctionTypeAnnotation; + declare export function functionTypeParam(name?: BabelNodeIdentifier, typeAnnotation: BabelNodeFlowType): BabelNodeFunctionTypeParam; + declare export function genericTypeAnnotation(id: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeGenericTypeAnnotation; + declare export function inferredPredicate(): BabelNodeInferredPredicate; + declare export function interfaceExtends(id: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier, typeParameters?: BabelNodeTypeParameterInstantiation): BabelNodeInterfaceExtends; + declare export function interfaceDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, _extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeInterfaceDeclaration; + declare export function interfaceTypeAnnotation(_extends?: Array<BabelNodeInterfaceExtends>, body: BabelNodeObjectTypeAnnotation): BabelNodeInterfaceTypeAnnotation; + declare export function intersectionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeIntersectionTypeAnnotation; + declare export function mixedTypeAnnotation(): BabelNodeMixedTypeAnnotation; + declare export function emptyTypeAnnotation(): BabelNodeEmptyTypeAnnotation; + declare export function nullableTypeAnnotation(typeAnnotation: BabelNodeFlowType): BabelNodeNullableTypeAnnotation; + declare export function numberLiteralTypeAnnotation(value: number): BabelNodeNumberLiteralTypeAnnotation; + declare export function numberTypeAnnotation(): BabelNodeNumberTypeAnnotation; + declare export function objectTypeAnnotation(properties: Array<BabelNodeObjectTypeProperty | BabelNodeObjectTypeSpreadProperty>, indexers?: Array<BabelNodeObjectTypeIndexer>, callProperties?: Array<BabelNodeObjectTypeCallProperty>, internalSlots?: Array<BabelNodeObjectTypeInternalSlot>, exact?: boolean): BabelNodeObjectTypeAnnotation; + declare export function objectTypeInternalSlot(id: BabelNodeIdentifier, value: BabelNodeFlowType, optional: boolean, _static: boolean, method: boolean): BabelNodeObjectTypeInternalSlot; + declare export function objectTypeCallProperty(value: BabelNodeFlowType): BabelNodeObjectTypeCallProperty; + declare export function objectTypeIndexer(id?: BabelNodeIdentifier, key: BabelNodeFlowType, value: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeObjectTypeIndexer; + declare export function objectTypeProperty(key: BabelNodeIdentifier | BabelNodeStringLiteral, value: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeObjectTypeProperty; + declare export function objectTypeSpreadProperty(argument: BabelNodeFlowType): BabelNodeObjectTypeSpreadProperty; + declare export function opaqueType(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, supertype?: BabelNodeFlowType, impltype: BabelNodeFlowType): BabelNodeOpaqueType; + declare export function qualifiedTypeIdentifier(id: BabelNodeIdentifier, qualification: BabelNodeIdentifier | BabelNodeQualifiedTypeIdentifier): BabelNodeQualifiedTypeIdentifier; + declare export function stringLiteralTypeAnnotation(value: string): BabelNodeStringLiteralTypeAnnotation; + declare export function stringTypeAnnotation(): BabelNodeStringTypeAnnotation; + declare export function symbolTypeAnnotation(): BabelNodeSymbolTypeAnnotation; + declare export function thisTypeAnnotation(): BabelNodeThisTypeAnnotation; + declare export function tupleTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeTupleTypeAnnotation; + declare export function typeofTypeAnnotation(argument: BabelNodeFlowType): BabelNodeTypeofTypeAnnotation; + declare export function typeAlias(id: BabelNodeIdentifier, typeParameters?: BabelNodeTypeParameterDeclaration, right: BabelNodeFlowType): BabelNodeTypeAlias; + declare export function typeAnnotation(typeAnnotation: BabelNodeFlowType): BabelNodeTypeAnnotation; + declare export function typeCastExpression(expression: BabelNodeExpression, typeAnnotation: BabelNodeTypeAnnotation): BabelNodeTypeCastExpression; + declare export function typeParameter(bound?: BabelNodeTypeAnnotation, _default?: BabelNodeFlowType, variance?: BabelNodeVariance): BabelNodeTypeParameter; + declare export function typeParameterDeclaration(params: Array<BabelNodeTypeParameter>): BabelNodeTypeParameterDeclaration; + declare export function typeParameterInstantiation(params: Array<BabelNodeFlowType>): BabelNodeTypeParameterInstantiation; + declare export function unionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation; + declare export function variance(kind: "minus" | "plus"): BabelNodeVariance; + declare export function voidTypeAnnotation(): BabelNodeVoidTypeAnnotation; + declare export function enumDeclaration(id: BabelNodeIdentifier, body: BabelNodeEnumBooleanBody | BabelNodeEnumNumberBody | BabelNodeEnumStringBody | BabelNodeEnumSymbolBody): BabelNodeEnumDeclaration; + declare export function enumBooleanBody(members: Array<BabelNodeEnumBooleanMember>): BabelNodeEnumBooleanBody; + declare export function enumNumberBody(members: Array<BabelNodeEnumNumberMember>): BabelNodeEnumNumberBody; + declare export function enumStringBody(members: Array<BabelNodeEnumStringMember | BabelNodeEnumDefaultedMember>): BabelNodeEnumStringBody; + declare export function enumSymbolBody(members: Array<BabelNodeEnumDefaultedMember>): BabelNodeEnumSymbolBody; + declare export function enumBooleanMember(id: BabelNodeIdentifier): BabelNodeEnumBooleanMember; + declare export function enumNumberMember(id: BabelNodeIdentifier, init: BabelNodeNumericLiteral): BabelNodeEnumNumberMember; + declare export function enumStringMember(id: BabelNodeIdentifier, init: BabelNodeStringLiteral): BabelNodeEnumStringMember; + declare export function enumDefaultedMember(id: BabelNodeIdentifier): BabelNodeEnumDefaultedMember; + declare export function jsxAttribute(name: BabelNodeJSXIdentifier | BabelNodeJSXNamespacedName, value?: BabelNodeJSXElement | BabelNodeJSXFragment | BabelNodeStringLiteral | BabelNodeJSXExpressionContainer): BabelNodeJSXAttribute; + declare export function jsxClosingElement(name: BabelNodeJSXIdentifier | BabelNodeJSXMemberExpression | BabelNodeJSXNamespacedName): BabelNodeJSXClosingElement; + declare export function jsxElement(openingElement: BabelNodeJSXOpeningElement, closingElement?: BabelNodeJSXClosingElement, children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment>, selfClosing?: boolean): BabelNodeJSXElement; + declare export function jsxEmptyExpression(): BabelNodeJSXEmptyExpression; + declare export function jsxExpressionContainer(expression: BabelNodeExpression | BabelNodeJSXEmptyExpression): BabelNodeJSXExpressionContainer; + declare export function jsxSpreadChild(expression: BabelNodeExpression): BabelNodeJSXSpreadChild; + declare export function jsxIdentifier(name: string): BabelNodeJSXIdentifier; + declare export function jsxMemberExpression(object: BabelNodeJSXMemberExpression | BabelNodeJSXIdentifier, property: BabelNodeJSXIdentifier): BabelNodeJSXMemberExpression; + declare export function jsxNamespacedName(namespace: BabelNodeJSXIdentifier, name: BabelNodeJSXIdentifier): BabelNodeJSXNamespacedName; + declare export function jsxOpeningElement(name: BabelNodeJSXIdentifier | BabelNodeJSXMemberExpression | BabelNodeJSXNamespacedName, attributes: Array<BabelNodeJSXAttribute | BabelNodeJSXSpreadAttribute>, selfClosing?: boolean): BabelNodeJSXOpeningElement; + declare export function jsxSpreadAttribute(argument: BabelNodeExpression): BabelNodeJSXSpreadAttribute; + declare export function jsxText(value: string): BabelNodeJSXText; + declare export function jsxFragment(openingFragment: BabelNodeJSXOpeningFragment, closingFragment: BabelNodeJSXClosingFragment, children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment>): BabelNodeJSXFragment; + declare export function jsxOpeningFragment(): BabelNodeJSXOpeningFragment; + declare export function jsxClosingFragment(): BabelNodeJSXClosingFragment; + declare export function noop(): BabelNodeNoop; + declare export function placeholder(expectedNode: "Identifier" | "StringLiteral" | "Expression" | "Statement" | "Declaration" | "BlockStatement" | "ClassBody" | "Pattern", name: BabelNodeIdentifier): BabelNodePlaceholder; + declare export function v8IntrinsicIdentifier(name: string): BabelNodeV8IntrinsicIdentifier; + declare export function argumentPlaceholder(): BabelNodeArgumentPlaceholder; + declare export function bindExpression(object: BabelNodeExpression, callee: BabelNodeExpression): BabelNodeBindExpression; + declare export function classProperty(key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, value?: BabelNodeExpression, typeAnnotation?: BabelNodeTypeAnnotation | BabelNodeTSTypeAnnotation | BabelNodeNoop, decorators?: Array<BabelNodeDecorator>, computed?: boolean, _static?: boolean): BabelNodeClassProperty; + declare export function pipelineTopicExpression(expression: BabelNodeExpression): BabelNodePipelineTopicExpression; + declare export function pipelineBareFunction(callee: BabelNodeExpression): BabelNodePipelineBareFunction; + declare export function pipelinePrimaryTopicReference(): BabelNodePipelinePrimaryTopicReference; + declare export function classPrivateProperty(key: BabelNodePrivateName, value?: BabelNodeExpression, decorators?: Array<BabelNodeDecorator>, _static: any): BabelNodeClassPrivateProperty; + declare export function classPrivateMethod(kind?: "get" | "set" | "method" | "constructor", key: BabelNodePrivateName, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, body: BabelNodeBlockStatement, _static?: boolean): BabelNodeClassPrivateMethod; + declare export function importAttribute(key: BabelNodeIdentifier | BabelNodeStringLiteral, value: BabelNodeStringLiteral): BabelNodeImportAttribute; + declare export function decorator(expression: BabelNodeExpression): BabelNodeDecorator; + declare export function doExpression(body: BabelNodeBlockStatement): BabelNodeDoExpression; + declare export function exportDefaultSpecifier(exported: BabelNodeIdentifier): BabelNodeExportDefaultSpecifier; + declare export function privateName(id: BabelNodeIdentifier): BabelNodePrivateName; + declare export function recordExpression(properties: Array<BabelNodeObjectProperty | BabelNodeSpreadElement>): BabelNodeRecordExpression; + declare export function tupleExpression(elements?: Array<BabelNodeExpression | BabelNodeSpreadElement>): BabelNodeTupleExpression; + declare export function decimalLiteral(value: string): BabelNodeDecimalLiteral; + declare export function staticBlock(body: Array<BabelNodeStatement>): BabelNodeStaticBlock; + declare export function tsParameterProperty(parameter: BabelNodeIdentifier | BabelNodeAssignmentPattern): BabelNodeTSParameterProperty; + declare export function tsDeclareFunction(id?: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration | BabelNodeNoop, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, returnType?: BabelNodeTSTypeAnnotation | BabelNodeNoop): BabelNodeTSDeclareFunction; + declare export function tsDeclareMethod(decorators?: Array<BabelNodeDecorator>, key: BabelNodeIdentifier | BabelNodeStringLiteral | BabelNodeNumericLiteral | BabelNodeExpression, typeParameters?: BabelNodeTSTypeParameterDeclaration | BabelNodeNoop, params: Array<BabelNodeIdentifier | BabelNodePattern | BabelNodeRestElement | BabelNodeTSParameterProperty>, returnType?: BabelNodeTSTypeAnnotation | BabelNodeNoop): BabelNodeTSDeclareMethod; + declare export function tsQualifiedName(left: BabelNodeTSEntityName, right: BabelNodeIdentifier): BabelNodeTSQualifiedName; + declare export function tsCallSignatureDeclaration(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSCallSignatureDeclaration; + declare export function tsConstructSignatureDeclaration(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSConstructSignatureDeclaration; + declare export function tsPropertySignature(key: BabelNodeExpression, typeAnnotation?: BabelNodeTSTypeAnnotation, initializer?: BabelNodeExpression): BabelNodeTSPropertySignature; + declare export function tsMethodSignature(key: BabelNodeExpression, typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSMethodSignature; + declare export function tsIndexSignature(parameters: Array<BabelNodeIdentifier>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSIndexSignature; + declare export function tsAnyKeyword(): BabelNodeTSAnyKeyword; + declare export function tsBooleanKeyword(): BabelNodeTSBooleanKeyword; + declare export function tsBigIntKeyword(): BabelNodeTSBigIntKeyword; + declare export function tsIntrinsicKeyword(): BabelNodeTSIntrinsicKeyword; + declare export function tsNeverKeyword(): BabelNodeTSNeverKeyword; + declare export function tsNullKeyword(): BabelNodeTSNullKeyword; + declare export function tsNumberKeyword(): BabelNodeTSNumberKeyword; + declare export function tsObjectKeyword(): BabelNodeTSObjectKeyword; + declare export function tsStringKeyword(): BabelNodeTSStringKeyword; + declare export function tsSymbolKeyword(): BabelNodeTSSymbolKeyword; + declare export function tsUndefinedKeyword(): BabelNodeTSUndefinedKeyword; + declare export function tsUnknownKeyword(): BabelNodeTSUnknownKeyword; + declare export function tsVoidKeyword(): BabelNodeTSVoidKeyword; + declare export function tsThisType(): BabelNodeTSThisType; + declare export function tsFunctionType(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSFunctionType; + declare export function tsConstructorType(typeParameters?: BabelNodeTSTypeParameterDeclaration, parameters: Array<BabelNodeIdentifier | BabelNodeRestElement>, typeAnnotation?: BabelNodeTSTypeAnnotation): BabelNodeTSConstructorType; + declare export function tsTypeReference(typeName: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSTypeReference; + declare export function tsTypePredicate(parameterName: BabelNodeIdentifier | BabelNodeTSThisType, typeAnnotation?: BabelNodeTSTypeAnnotation, asserts?: boolean): BabelNodeTSTypePredicate; + declare export function tsTypeQuery(exprName: BabelNodeTSEntityName | BabelNodeTSImportType): BabelNodeTSTypeQuery; + declare export function tsTypeLiteral(members: Array<BabelNodeTSTypeElement>): BabelNodeTSTypeLiteral; + declare export function tsArrayType(elementType: BabelNodeTSType): BabelNodeTSArrayType; + declare export function tsTupleType(elementTypes: Array<BabelNodeTSType | BabelNodeTSNamedTupleMember>): BabelNodeTSTupleType; + declare export function tsOptionalType(typeAnnotation: BabelNodeTSType): BabelNodeTSOptionalType; + declare export function tsRestType(typeAnnotation: BabelNodeTSType): BabelNodeTSRestType; + declare export function tsNamedTupleMember(label: BabelNodeIdentifier, elementType: BabelNodeTSType, optional?: boolean): BabelNodeTSNamedTupleMember; + declare export function tsUnionType(types: Array<BabelNodeTSType>): BabelNodeTSUnionType; + declare export function tsIntersectionType(types: Array<BabelNodeTSType>): BabelNodeTSIntersectionType; + declare export function tsConditionalType(checkType: BabelNodeTSType, extendsType: BabelNodeTSType, trueType: BabelNodeTSType, falseType: BabelNodeTSType): BabelNodeTSConditionalType; + declare export function tsInferType(typeParameter: BabelNodeTSTypeParameter): BabelNodeTSInferType; + declare export function tsParenthesizedType(typeAnnotation: BabelNodeTSType): BabelNodeTSParenthesizedType; + declare export function tsTypeOperator(typeAnnotation: BabelNodeTSType): BabelNodeTSTypeOperator; + declare export function tsIndexedAccessType(objectType: BabelNodeTSType, indexType: BabelNodeTSType): BabelNodeTSIndexedAccessType; + declare export function tsMappedType(typeParameter: BabelNodeTSTypeParameter, typeAnnotation?: BabelNodeTSType, nameType?: BabelNodeTSType): BabelNodeTSMappedType; + declare export function tsLiteralType(literal: BabelNodeNumericLiteral | BabelNodeStringLiteral | BabelNodeBooleanLiteral | BabelNodeBigIntLiteral): BabelNodeTSLiteralType; + declare export function tsExpressionWithTypeArguments(expression: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSExpressionWithTypeArguments; + declare export function tsInterfaceDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration, _extends?: Array<BabelNodeTSExpressionWithTypeArguments>, body: BabelNodeTSInterfaceBody): BabelNodeTSInterfaceDeclaration; + declare export function tsInterfaceBody(body: Array<BabelNodeTSTypeElement>): BabelNodeTSInterfaceBody; + declare export function tsTypeAliasDeclaration(id: BabelNodeIdentifier, typeParameters?: BabelNodeTSTypeParameterDeclaration, typeAnnotation: BabelNodeTSType): BabelNodeTSTypeAliasDeclaration; + declare export function tsAsExpression(expression: BabelNodeExpression, typeAnnotation: BabelNodeTSType): BabelNodeTSAsExpression; + declare export function tsTypeAssertion(typeAnnotation: BabelNodeTSType, expression: BabelNodeExpression): BabelNodeTSTypeAssertion; + declare export function tsEnumDeclaration(id: BabelNodeIdentifier, members: Array<BabelNodeTSEnumMember>): BabelNodeTSEnumDeclaration; + declare export function tsEnumMember(id: BabelNodeIdentifier | BabelNodeStringLiteral, initializer?: BabelNodeExpression): BabelNodeTSEnumMember; + declare export function tsModuleDeclaration(id: BabelNodeIdentifier | BabelNodeStringLiteral, body: BabelNodeTSModuleBlock | BabelNodeTSModuleDeclaration): BabelNodeTSModuleDeclaration; + declare export function tsModuleBlock(body: Array<BabelNodeStatement>): BabelNodeTSModuleBlock; + declare export function tsImportType(argument: BabelNodeStringLiteral, qualifier?: BabelNodeTSEntityName, typeParameters?: BabelNodeTSTypeParameterInstantiation): BabelNodeTSImportType; + declare export function tsImportEqualsDeclaration(id: BabelNodeIdentifier, moduleReference: BabelNodeTSEntityName | BabelNodeTSExternalModuleReference): BabelNodeTSImportEqualsDeclaration; + declare export function tsExternalModuleReference(expression: BabelNodeStringLiteral): BabelNodeTSExternalModuleReference; + declare export function tsNonNullExpression(expression: BabelNodeExpression): BabelNodeTSNonNullExpression; + declare export function tsExportAssignment(expression: BabelNodeExpression): BabelNodeTSExportAssignment; + declare export function tsNamespaceExportDeclaration(id: BabelNodeIdentifier): BabelNodeTSNamespaceExportDeclaration; + declare export function tsTypeAnnotation(typeAnnotation: BabelNodeTSType): BabelNodeTSTypeAnnotation; + declare export function tsTypeParameterInstantiation(params: Array<BabelNodeTSType>): BabelNodeTSTypeParameterInstantiation; + declare export function tsTypeParameterDeclaration(params: Array<BabelNodeTSTypeParameter>): BabelNodeTSTypeParameterDeclaration; + declare export function tsTypeParameter(constraint?: BabelNodeTSType, _default?: BabelNodeTSType, name: string): BabelNodeTSTypeParameter; + declare export function isArrayExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayExpression) + declare export function assertArrayExpression(node: ?Object, opts?: ?Object): void + declare export function isAssignmentExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAssignmentExpression) + declare export function assertAssignmentExpression(node: ?Object, opts?: ?Object): void + declare export function isBinaryExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBinaryExpression) + declare export function assertBinaryExpression(node: ?Object, opts?: ?Object): void + declare export function isInterpreterDirective(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterpreterDirective) + declare export function assertInterpreterDirective(node: ?Object, opts?: ?Object): void + declare export function isDirective(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDirective) + declare export function assertDirective(node: ?Object, opts?: ?Object): void + declare export function isDirectiveLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDirectiveLiteral) + declare export function assertDirectiveLiteral(node: ?Object, opts?: ?Object): void + declare export function isBlockStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBlockStatement) + declare export function assertBlockStatement(node: ?Object, opts?: ?Object): void + declare export function isBreakStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBreakStatement) + declare export function assertBreakStatement(node: ?Object, opts?: ?Object): void + declare export function isCallExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeCallExpression) + declare export function assertCallExpression(node: ?Object, opts?: ?Object): void + declare export function isCatchClause(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeCatchClause) + declare export function assertCatchClause(node: ?Object, opts?: ?Object): void + declare export function isConditionalExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeConditionalExpression) + declare export function assertConditionalExpression(node: ?Object, opts?: ?Object): void + declare export function isContinueStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeContinueStatement) + declare export function assertContinueStatement(node: ?Object, opts?: ?Object): void + declare export function isDebuggerStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDebuggerStatement) + declare export function assertDebuggerStatement(node: ?Object, opts?: ?Object): void + declare export function isDoWhileStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDoWhileStatement) + declare export function assertDoWhileStatement(node: ?Object, opts?: ?Object): void + declare export function isEmptyStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEmptyStatement) + declare export function assertEmptyStatement(node: ?Object, opts?: ?Object): void + declare export function isExpressionStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExpressionStatement) + declare export function assertExpressionStatement(node: ?Object, opts?: ?Object): void + declare export function isFile(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFile) + declare export function assertFile(node: ?Object, opts?: ?Object): void + declare export function isForInStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForInStatement) + declare export function assertForInStatement(node: ?Object, opts?: ?Object): void + declare export function isForStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForStatement) + declare export function assertForStatement(node: ?Object, opts?: ?Object): void + declare export function isFunctionDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionDeclaration) + declare export function assertFunctionDeclaration(node: ?Object, opts?: ?Object): void + declare export function isFunctionExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionExpression) + declare export function assertFunctionExpression(node: ?Object, opts?: ?Object): void + declare export function isIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIdentifier) + declare export function assertIdentifier(node: ?Object, opts?: ?Object): void + declare export function isIfStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIfStatement) + declare export function assertIfStatement(node: ?Object, opts?: ?Object): void + declare export function isLabeledStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeLabeledStatement) + declare export function assertLabeledStatement(node: ?Object, opts?: ?Object): void + declare export function isStringLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringLiteral) + declare export function assertStringLiteral(node: ?Object, opts?: ?Object): void + declare export function isNumericLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumericLiteral) + declare export function assertNumericLiteral(node: ?Object, opts?: ?Object): void + declare export function isNullLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullLiteral) + declare export function assertNullLiteral(node: ?Object, opts?: ?Object): void + declare export function isBooleanLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanLiteral) + declare export function assertBooleanLiteral(node: ?Object, opts?: ?Object): void + declare export function isRegExpLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRegExpLiteral) + declare export function assertRegExpLiteral(node: ?Object, opts?: ?Object): void + declare export function isLogicalExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeLogicalExpression) + declare export function assertLogicalExpression(node: ?Object, opts?: ?Object): void + declare export function isMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMemberExpression) + declare export function assertMemberExpression(node: ?Object, opts?: ?Object): void + declare export function isNewExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNewExpression) + declare export function assertNewExpression(node: ?Object, opts?: ?Object): void + declare export function isProgram(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeProgram) + declare export function assertProgram(node: ?Object, opts?: ?Object): void + declare export function isObjectExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectExpression) + declare export function assertObjectExpression(node: ?Object, opts?: ?Object): void + declare export function isObjectMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectMethod) + declare export function assertObjectMethod(node: ?Object, opts?: ?Object): void + declare export function isObjectProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectProperty) + declare export function assertObjectProperty(node: ?Object, opts?: ?Object): void + declare export function isRestElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRestElement) + declare export function assertRestElement(node: ?Object, opts?: ?Object): void + declare export function isReturnStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeReturnStatement) + declare export function assertReturnStatement(node: ?Object, opts?: ?Object): void + declare export function isSequenceExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSequenceExpression) + declare export function assertSequenceExpression(node: ?Object, opts?: ?Object): void + declare export function isParenthesizedExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeParenthesizedExpression) + declare export function assertParenthesizedExpression(node: ?Object, opts?: ?Object): void + declare export function isSwitchCase(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSwitchCase) + declare export function assertSwitchCase(node: ?Object, opts?: ?Object): void + declare export function isSwitchStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSwitchStatement) + declare export function assertSwitchStatement(node: ?Object, opts?: ?Object): void + declare export function isThisExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThisExpression) + declare export function assertThisExpression(node: ?Object, opts?: ?Object): void + declare export function isThrowStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThrowStatement) + declare export function assertThrowStatement(node: ?Object, opts?: ?Object): void + declare export function isTryStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTryStatement) + declare export function assertTryStatement(node: ?Object, opts?: ?Object): void + declare export function isUnaryExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUnaryExpression) + declare export function assertUnaryExpression(node: ?Object, opts?: ?Object): void + declare export function isUpdateExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUpdateExpression) + declare export function assertUpdateExpression(node: ?Object, opts?: ?Object): void + declare export function isVariableDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariableDeclaration) + declare export function assertVariableDeclaration(node: ?Object, opts?: ?Object): void + declare export function isVariableDeclarator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariableDeclarator) + declare export function assertVariableDeclarator(node: ?Object, opts?: ?Object): void + declare export function isWhileStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeWhileStatement) + declare export function assertWhileStatement(node: ?Object, opts?: ?Object): void + declare export function isWithStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeWithStatement) + declare export function assertWithStatement(node: ?Object, opts?: ?Object): void + declare export function isAssignmentPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAssignmentPattern) + declare export function assertAssignmentPattern(node: ?Object, opts?: ?Object): void + declare export function isArrayPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayPattern) + declare export function assertArrayPattern(node: ?Object, opts?: ?Object): void + declare export function isArrowFunctionExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrowFunctionExpression) + declare export function assertArrowFunctionExpression(node: ?Object, opts?: ?Object): void + declare export function isClassBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassBody) + declare export function assertClassBody(node: ?Object, opts?: ?Object): void + declare export function isClassExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassExpression) + declare export function assertClassExpression(node: ?Object, opts?: ?Object): void + declare export function isClassDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassDeclaration) + declare export function assertClassDeclaration(node: ?Object, opts?: ?Object): void + declare export function isExportAllDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportAllDeclaration) + declare export function assertExportAllDeclaration(node: ?Object, opts?: ?Object): void + declare export function isExportDefaultDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportDefaultDeclaration) + declare export function assertExportDefaultDeclaration(node: ?Object, opts?: ?Object): void + declare export function isExportNamedDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportNamedDeclaration) + declare export function assertExportNamedDeclaration(node: ?Object, opts?: ?Object): void + declare export function isExportSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportSpecifier) + declare export function assertExportSpecifier(node: ?Object, opts?: ?Object): void + declare export function isForOfStatement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeForOfStatement) + declare export function assertForOfStatement(node: ?Object, opts?: ?Object): void + declare export function isImportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportDeclaration) + declare export function assertImportDeclaration(node: ?Object, opts?: ?Object): void + declare export function isImportDefaultSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportDefaultSpecifier) + declare export function assertImportDefaultSpecifier(node: ?Object, opts?: ?Object): void + declare export function isImportNamespaceSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportNamespaceSpecifier) + declare export function assertImportNamespaceSpecifier(node: ?Object, opts?: ?Object): void + declare export function isImportSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportSpecifier) + declare export function assertImportSpecifier(node: ?Object, opts?: ?Object): void + declare export function isMetaProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMetaProperty) + declare export function assertMetaProperty(node: ?Object, opts?: ?Object): void + declare export function isClassMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassMethod) + declare export function assertClassMethod(node: ?Object, opts?: ?Object): void + declare export function isObjectPattern(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectPattern) + declare export function assertObjectPattern(node: ?Object, opts?: ?Object): void + declare export function isSpreadElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSpreadElement) + declare export function assertSpreadElement(node: ?Object, opts?: ?Object): void + declare export function isSuper(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSuper) + declare export function assertSuper(node: ?Object, opts?: ?Object): void + declare export function isTaggedTemplateExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTaggedTemplateExpression) + declare export function assertTaggedTemplateExpression(node: ?Object, opts?: ?Object): void + declare export function isTemplateElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTemplateElement) + declare export function assertTemplateElement(node: ?Object, opts?: ?Object): void + declare export function isTemplateLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTemplateLiteral) + declare export function assertTemplateLiteral(node: ?Object, opts?: ?Object): void + declare export function isYieldExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeYieldExpression) + declare export function assertYieldExpression(node: ?Object, opts?: ?Object): void + declare export function isAwaitExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAwaitExpression) + declare export function assertAwaitExpression(node: ?Object, opts?: ?Object): void + declare export function isImport(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImport) + declare export function assertImport(node: ?Object, opts?: ?Object): void + declare export function isBigIntLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBigIntLiteral) + declare export function assertBigIntLiteral(node: ?Object, opts?: ?Object): void + declare export function isExportNamespaceSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportNamespaceSpecifier) + declare export function assertExportNamespaceSpecifier(node: ?Object, opts?: ?Object): void + declare export function isOptionalMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOptionalMemberExpression) + declare export function assertOptionalMemberExpression(node: ?Object, opts?: ?Object): void + declare export function isOptionalCallExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOptionalCallExpression) + declare export function assertOptionalCallExpression(node: ?Object, opts?: ?Object): void + declare export function isAnyTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeAnyTypeAnnotation) + declare export function assertAnyTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isArrayTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArrayTypeAnnotation) + declare export function assertArrayTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isBooleanTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanTypeAnnotation) + declare export function assertBooleanTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isBooleanLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBooleanLiteralTypeAnnotation) + declare export function assertBooleanLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isNullLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullLiteralTypeAnnotation) + declare export function assertNullLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isClassImplements(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassImplements) + declare export function assertClassImplements(node: ?Object, opts?: ?Object): void + declare export function isDeclareClass(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareClass) + declare export function assertDeclareClass(node: ?Object, opts?: ?Object): void + declare export function isDeclareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareFunction) + declare export function assertDeclareFunction(node: ?Object, opts?: ?Object): void + declare export function isDeclareInterface(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareInterface) + declare export function assertDeclareInterface(node: ?Object, opts?: ?Object): void + declare export function isDeclareModule(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareModule) + declare export function assertDeclareModule(node: ?Object, opts?: ?Object): void + declare export function isDeclareModuleExports(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareModuleExports) + declare export function assertDeclareModuleExports(node: ?Object, opts?: ?Object): void + declare export function isDeclareTypeAlias(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareTypeAlias) + declare export function assertDeclareTypeAlias(node: ?Object, opts?: ?Object): void + declare export function isDeclareOpaqueType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareOpaqueType) + declare export function assertDeclareOpaqueType(node: ?Object, opts?: ?Object): void + declare export function isDeclareVariable(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareVariable) + declare export function assertDeclareVariable(node: ?Object, opts?: ?Object): void + declare export function isDeclareExportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareExportDeclaration) + declare export function assertDeclareExportDeclaration(node: ?Object, opts?: ?Object): void + declare export function isDeclareExportAllDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclareExportAllDeclaration) + declare export function assertDeclareExportAllDeclaration(node: ?Object, opts?: ?Object): void + declare export function isDeclaredPredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDeclaredPredicate) + declare export function assertDeclaredPredicate(node: ?Object, opts?: ?Object): void + declare export function isExistsTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExistsTypeAnnotation) + declare export function assertExistsTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isFunctionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionTypeAnnotation) + declare export function assertFunctionTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isFunctionTypeParam(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeFunctionTypeParam) + declare export function assertFunctionTypeParam(node: ?Object, opts?: ?Object): void + declare export function isGenericTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeGenericTypeAnnotation) + declare export function assertGenericTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isInferredPredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInferredPredicate) + declare export function assertInferredPredicate(node: ?Object, opts?: ?Object): void + declare export function isInterfaceExtends(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceExtends) + declare export function assertInterfaceExtends(node: ?Object, opts?: ?Object): void + declare export function isInterfaceDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceDeclaration) + declare export function assertInterfaceDeclaration(node: ?Object, opts?: ?Object): void + declare export function isInterfaceTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeInterfaceTypeAnnotation) + declare export function assertInterfaceTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isIntersectionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeIntersectionTypeAnnotation) + declare export function assertIntersectionTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isMixedTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeMixedTypeAnnotation) + declare export function assertMixedTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isEmptyTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEmptyTypeAnnotation) + declare export function assertEmptyTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isNullableTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNullableTypeAnnotation) + declare export function assertNullableTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isNumberLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumberLiteralTypeAnnotation) + declare export function assertNumberLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isNumberTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumberTypeAnnotation) + declare export function assertNumberTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeAnnotation) + declare export function assertObjectTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeInternalSlot(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeInternalSlot) + declare export function assertObjectTypeInternalSlot(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeCallProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeCallProperty) + declare export function assertObjectTypeCallProperty(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeIndexer(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeIndexer) + declare export function assertObjectTypeIndexer(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeProperty) + declare export function assertObjectTypeProperty(node: ?Object, opts?: ?Object): void + declare export function isObjectTypeSpreadProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeObjectTypeSpreadProperty) + declare export function assertObjectTypeSpreadProperty(node: ?Object, opts?: ?Object): void + declare export function isOpaqueType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeOpaqueType) + declare export function assertOpaqueType(node: ?Object, opts?: ?Object): void + declare export function isQualifiedTypeIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeQualifiedTypeIdentifier) + declare export function assertQualifiedTypeIdentifier(node: ?Object, opts?: ?Object): void + declare export function isStringLiteralTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringLiteralTypeAnnotation) + declare export function assertStringLiteralTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isStringTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStringTypeAnnotation) + declare export function assertStringTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isSymbolTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSymbolTypeAnnotation) + declare export function assertSymbolTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isThisTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeThisTypeAnnotation) + declare export function assertThisTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isTupleTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTupleTypeAnnotation) + declare export function assertTupleTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isTypeofTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeofTypeAnnotation) + declare export function assertTypeofTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isTypeAlias(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeAlias) + declare export function assertTypeAlias(node: ?Object, opts?: ?Object): void + declare export function isTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeAnnotation) + declare export function assertTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isTypeCastExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeCastExpression) + declare export function assertTypeCastExpression(node: ?Object, opts?: ?Object): void + declare export function isTypeParameter(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameter) + declare export function assertTypeParameter(node: ?Object, opts?: ?Object): void + declare export function isTypeParameterDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameterDeclaration) + declare export function assertTypeParameterDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTypeParameterInstantiation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTypeParameterInstantiation) + declare export function assertTypeParameterInstantiation(node: ?Object, opts?: ?Object): void + declare export function isUnionTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeUnionTypeAnnotation) + declare export function assertUnionTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isVariance(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVariance) + declare export function assertVariance(node: ?Object, opts?: ?Object): void + declare export function isVoidTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeVoidTypeAnnotation) + declare export function assertVoidTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isEnumDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumDeclaration) + declare export function assertEnumDeclaration(node: ?Object, opts?: ?Object): void + declare export function isEnumBooleanBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumBooleanBody) + declare export function assertEnumBooleanBody(node: ?Object, opts?: ?Object): void + declare export function isEnumNumberBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumNumberBody) + declare export function assertEnumNumberBody(node: ?Object, opts?: ?Object): void + declare export function isEnumStringBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumStringBody) + declare export function assertEnumStringBody(node: ?Object, opts?: ?Object): void + declare export function isEnumSymbolBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumSymbolBody) + declare export function assertEnumSymbolBody(node: ?Object, opts?: ?Object): void + declare export function isEnumBooleanMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumBooleanMember) + declare export function assertEnumBooleanMember(node: ?Object, opts?: ?Object): void + declare export function isEnumNumberMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumNumberMember) + declare export function assertEnumNumberMember(node: ?Object, opts?: ?Object): void + declare export function isEnumStringMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumStringMember) + declare export function assertEnumStringMember(node: ?Object, opts?: ?Object): void + declare export function isEnumDefaultedMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeEnumDefaultedMember) + declare export function assertEnumDefaultedMember(node: ?Object, opts?: ?Object): void + declare export function isJSXAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXAttribute) + declare export function assertJSXAttribute(node: ?Object, opts?: ?Object): void + declare export function isJSXClosingElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXClosingElement) + declare export function assertJSXClosingElement(node: ?Object, opts?: ?Object): void + declare export function isJSXElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXElement) + declare export function assertJSXElement(node: ?Object, opts?: ?Object): void + declare export function isJSXEmptyExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXEmptyExpression) + declare export function assertJSXEmptyExpression(node: ?Object, opts?: ?Object): void + declare export function isJSXExpressionContainer(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXExpressionContainer) + declare export function assertJSXExpressionContainer(node: ?Object, opts?: ?Object): void + declare export function isJSXSpreadChild(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXSpreadChild) + declare export function assertJSXSpreadChild(node: ?Object, opts?: ?Object): void + declare export function isJSXIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXIdentifier) + declare export function assertJSXIdentifier(node: ?Object, opts?: ?Object): void + declare export function isJSXMemberExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXMemberExpression) + declare export function assertJSXMemberExpression(node: ?Object, opts?: ?Object): void + declare export function isJSXNamespacedName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXNamespacedName) + declare export function assertJSXNamespacedName(node: ?Object, opts?: ?Object): void + declare export function isJSXOpeningElement(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXOpeningElement) + declare export function assertJSXOpeningElement(node: ?Object, opts?: ?Object): void + declare export function isJSXSpreadAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXSpreadAttribute) + declare export function assertJSXSpreadAttribute(node: ?Object, opts?: ?Object): void + declare export function isJSXText(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXText) + declare export function assertJSXText(node: ?Object, opts?: ?Object): void + declare export function isJSXFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXFragment) + declare export function assertJSXFragment(node: ?Object, opts?: ?Object): void + declare export function isJSXOpeningFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXOpeningFragment) + declare export function assertJSXOpeningFragment(node: ?Object, opts?: ?Object): void + declare export function isJSXClosingFragment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeJSXClosingFragment) + declare export function assertJSXClosingFragment(node: ?Object, opts?: ?Object): void + declare export function isNoop(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNoop) + declare export function assertNoop(node: ?Object, opts?: ?Object): void + declare export function isPlaceholder(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePlaceholder) + declare export function assertPlaceholder(node: ?Object, opts?: ?Object): void + declare export function isV8IntrinsicIdentifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeV8IntrinsicIdentifier) + declare export function assertV8IntrinsicIdentifier(node: ?Object, opts?: ?Object): void + declare export function isArgumentPlaceholder(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeArgumentPlaceholder) + declare export function assertArgumentPlaceholder(node: ?Object, opts?: ?Object): void + declare export function isBindExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeBindExpression) + declare export function assertBindExpression(node: ?Object, opts?: ?Object): void + declare export function isClassProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassProperty) + declare export function assertClassProperty(node: ?Object, opts?: ?Object): void + declare export function isPipelineTopicExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelineTopicExpression) + declare export function assertPipelineTopicExpression(node: ?Object, opts?: ?Object): void + declare export function isPipelineBareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelineBareFunction) + declare export function assertPipelineBareFunction(node: ?Object, opts?: ?Object): void + declare export function isPipelinePrimaryTopicReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePipelinePrimaryTopicReference) + declare export function assertPipelinePrimaryTopicReference(node: ?Object, opts?: ?Object): void + declare export function isClassPrivateProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassPrivateProperty) + declare export function assertClassPrivateProperty(node: ?Object, opts?: ?Object): void + declare export function isClassPrivateMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeClassPrivateMethod) + declare export function assertClassPrivateMethod(node: ?Object, opts?: ?Object): void + declare export function isImportAttribute(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeImportAttribute) + declare export function assertImportAttribute(node: ?Object, opts?: ?Object): void + declare export function isDecorator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDecorator) + declare export function assertDecorator(node: ?Object, opts?: ?Object): void + declare export function isDoExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDoExpression) + declare export function assertDoExpression(node: ?Object, opts?: ?Object): void + declare export function isExportDefaultSpecifier(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeExportDefaultSpecifier) + declare export function assertExportDefaultSpecifier(node: ?Object, opts?: ?Object): void + declare export function isPrivateName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodePrivateName) + declare export function assertPrivateName(node: ?Object, opts?: ?Object): void + declare export function isRecordExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRecordExpression) + declare export function assertRecordExpression(node: ?Object, opts?: ?Object): void + declare export function isTupleExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTupleExpression) + declare export function assertTupleExpression(node: ?Object, opts?: ?Object): void + declare export function isDecimalLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeDecimalLiteral) + declare export function assertDecimalLiteral(node: ?Object, opts?: ?Object): void + declare export function isStaticBlock(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeStaticBlock) + declare export function assertStaticBlock(node: ?Object, opts?: ?Object): void + declare export function isTSParameterProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSParameterProperty) + declare export function assertTSParameterProperty(node: ?Object, opts?: ?Object): void + declare export function isTSDeclareFunction(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSDeclareFunction) + declare export function assertTSDeclareFunction(node: ?Object, opts?: ?Object): void + declare export function isTSDeclareMethod(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSDeclareMethod) + declare export function assertTSDeclareMethod(node: ?Object, opts?: ?Object): void + declare export function isTSQualifiedName(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSQualifiedName) + declare export function assertTSQualifiedName(node: ?Object, opts?: ?Object): void + declare export function isTSCallSignatureDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSCallSignatureDeclaration) + declare export function assertTSCallSignatureDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSConstructSignatureDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConstructSignatureDeclaration) + declare export function assertTSConstructSignatureDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSPropertySignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSPropertySignature) + declare export function assertTSPropertySignature(node: ?Object, opts?: ?Object): void + declare export function isTSMethodSignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSMethodSignature) + declare export function assertTSMethodSignature(node: ?Object, opts?: ?Object): void + declare export function isTSIndexSignature(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIndexSignature) + declare export function assertTSIndexSignature(node: ?Object, opts?: ?Object): void + declare export function isTSAnyKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSAnyKeyword) + declare export function assertTSAnyKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSBooleanKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSBooleanKeyword) + declare export function assertTSBooleanKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSBigIntKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSBigIntKeyword) + declare export function assertTSBigIntKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSIntrinsicKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIntrinsicKeyword) + declare export function assertTSIntrinsicKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSNeverKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNeverKeyword) + declare export function assertTSNeverKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSNullKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNullKeyword) + declare export function assertTSNullKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSNumberKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNumberKeyword) + declare export function assertTSNumberKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSObjectKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSObjectKeyword) + declare export function assertTSObjectKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSStringKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSStringKeyword) + declare export function assertTSStringKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSSymbolKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSSymbolKeyword) + declare export function assertTSSymbolKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSUndefinedKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUndefinedKeyword) + declare export function assertTSUndefinedKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSUnknownKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUnknownKeyword) + declare export function assertTSUnknownKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSVoidKeyword(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSVoidKeyword) + declare export function assertTSVoidKeyword(node: ?Object, opts?: ?Object): void + declare export function isTSThisType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSThisType) + declare export function assertTSThisType(node: ?Object, opts?: ?Object): void + declare export function isTSFunctionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSFunctionType) + declare export function assertTSFunctionType(node: ?Object, opts?: ?Object): void + declare export function isTSConstructorType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConstructorType) + declare export function assertTSConstructorType(node: ?Object, opts?: ?Object): void + declare export function isTSTypeReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeReference) + declare export function assertTSTypeReference(node: ?Object, opts?: ?Object): void + declare export function isTSTypePredicate(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypePredicate) + declare export function assertTSTypePredicate(node: ?Object, opts?: ?Object): void + declare export function isTSTypeQuery(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeQuery) + declare export function assertTSTypeQuery(node: ?Object, opts?: ?Object): void + declare export function isTSTypeLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeLiteral) + declare export function assertTSTypeLiteral(node: ?Object, opts?: ?Object): void + declare export function isTSArrayType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSArrayType) + declare export function assertTSArrayType(node: ?Object, opts?: ?Object): void + declare export function isTSTupleType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTupleType) + declare export function assertTSTupleType(node: ?Object, opts?: ?Object): void + declare export function isTSOptionalType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSOptionalType) + declare export function assertTSOptionalType(node: ?Object, opts?: ?Object): void + declare export function isTSRestType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSRestType) + declare export function assertTSRestType(node: ?Object, opts?: ?Object): void + declare export function isTSNamedTupleMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNamedTupleMember) + declare export function assertTSNamedTupleMember(node: ?Object, opts?: ?Object): void + declare export function isTSUnionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSUnionType) + declare export function assertTSUnionType(node: ?Object, opts?: ?Object): void + declare export function isTSIntersectionType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIntersectionType) + declare export function assertTSIntersectionType(node: ?Object, opts?: ?Object): void + declare export function isTSConditionalType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSConditionalType) + declare export function assertTSConditionalType(node: ?Object, opts?: ?Object): void + declare export function isTSInferType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInferType) + declare export function assertTSInferType(node: ?Object, opts?: ?Object): void + declare export function isTSParenthesizedType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSParenthesizedType) + declare export function assertTSParenthesizedType(node: ?Object, opts?: ?Object): void + declare export function isTSTypeOperator(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeOperator) + declare export function assertTSTypeOperator(node: ?Object, opts?: ?Object): void + declare export function isTSIndexedAccessType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSIndexedAccessType) + declare export function assertTSIndexedAccessType(node: ?Object, opts?: ?Object): void + declare export function isTSMappedType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSMappedType) + declare export function assertTSMappedType(node: ?Object, opts?: ?Object): void + declare export function isTSLiteralType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSLiteralType) + declare export function assertTSLiteralType(node: ?Object, opts?: ?Object): void + declare export function isTSExpressionWithTypeArguments(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExpressionWithTypeArguments) + declare export function assertTSExpressionWithTypeArguments(node: ?Object, opts?: ?Object): void + declare export function isTSInterfaceDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInterfaceDeclaration) + declare export function assertTSInterfaceDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSInterfaceBody(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSInterfaceBody) + declare export function assertTSInterfaceBody(node: ?Object, opts?: ?Object): void + declare export function isTSTypeAliasDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAliasDeclaration) + declare export function assertTSTypeAliasDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSAsExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSAsExpression) + declare export function assertTSAsExpression(node: ?Object, opts?: ?Object): void + declare export function isTSTypeAssertion(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAssertion) + declare export function assertTSTypeAssertion(node: ?Object, opts?: ?Object): void + declare export function isTSEnumDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSEnumDeclaration) + declare export function assertTSEnumDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSEnumMember(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSEnumMember) + declare export function assertTSEnumMember(node: ?Object, opts?: ?Object): void + declare export function isTSModuleDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSModuleDeclaration) + declare export function assertTSModuleDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSModuleBlock(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSModuleBlock) + declare export function assertTSModuleBlock(node: ?Object, opts?: ?Object): void + declare export function isTSImportType(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSImportType) + declare export function assertTSImportType(node: ?Object, opts?: ?Object): void + declare export function isTSImportEqualsDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSImportEqualsDeclaration) + declare export function assertTSImportEqualsDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSExternalModuleReference(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExternalModuleReference) + declare export function assertTSExternalModuleReference(node: ?Object, opts?: ?Object): void + declare export function isTSNonNullExpression(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNonNullExpression) + declare export function assertTSNonNullExpression(node: ?Object, opts?: ?Object): void + declare export function isTSExportAssignment(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSExportAssignment) + declare export function assertTSExportAssignment(node: ?Object, opts?: ?Object): void + declare export function isTSNamespaceExportDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSNamespaceExportDeclaration) + declare export function assertTSNamespaceExportDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSTypeAnnotation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeAnnotation) + declare export function assertTSTypeAnnotation(node: ?Object, opts?: ?Object): void + declare export function isTSTypeParameterInstantiation(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameterInstantiation) + declare export function assertTSTypeParameterInstantiation(node: ?Object, opts?: ?Object): void + declare export function isTSTypeParameterDeclaration(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameterDeclaration) + declare export function assertTSTypeParameterDeclaration(node: ?Object, opts?: ?Object): void + declare export function isTSTypeParameter(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeTSTypeParameter) + declare export function assertTSTypeParameter(node: ?Object, opts?: ?Object): void + declare export function isExpression(node: ?Object, opts?: ?Object): boolean + declare export function assertExpression(node: ?Object, opts?: ?Object): void + declare export function isBinary(node: ?Object, opts?: ?Object): boolean + declare export function assertBinary(node: ?Object, opts?: ?Object): void + declare export function isScopable(node: ?Object, opts?: ?Object): boolean + declare export function assertScopable(node: ?Object, opts?: ?Object): void + declare export function isBlockParent(node: ?Object, opts?: ?Object): boolean + declare export function assertBlockParent(node: ?Object, opts?: ?Object): void + declare export function isBlock(node: ?Object, opts?: ?Object): boolean + declare export function assertBlock(node: ?Object, opts?: ?Object): void + declare export function isStatement(node: ?Object, opts?: ?Object): boolean + declare export function assertStatement(node: ?Object, opts?: ?Object): void + declare export function isTerminatorless(node: ?Object, opts?: ?Object): boolean + declare export function assertTerminatorless(node: ?Object, opts?: ?Object): void + declare export function isCompletionStatement(node: ?Object, opts?: ?Object): boolean + declare export function assertCompletionStatement(node: ?Object, opts?: ?Object): void + declare export function isConditional(node: ?Object, opts?: ?Object): boolean + declare export function assertConditional(node: ?Object, opts?: ?Object): void + declare export function isLoop(node: ?Object, opts?: ?Object): boolean + declare export function assertLoop(node: ?Object, opts?: ?Object): void + declare export function isWhile(node: ?Object, opts?: ?Object): boolean + declare export function assertWhile(node: ?Object, opts?: ?Object): void + declare export function isExpressionWrapper(node: ?Object, opts?: ?Object): boolean + declare export function assertExpressionWrapper(node: ?Object, opts?: ?Object): void + declare export function isFor(node: ?Object, opts?: ?Object): boolean + declare export function assertFor(node: ?Object, opts?: ?Object): void + declare export function isForXStatement(node: ?Object, opts?: ?Object): boolean + declare export function assertForXStatement(node: ?Object, opts?: ?Object): void + declare export function isFunction(node: ?Object, opts?: ?Object): boolean + declare export function assertFunction(node: ?Object, opts?: ?Object): void + declare export function isFunctionParent(node: ?Object, opts?: ?Object): boolean + declare export function assertFunctionParent(node: ?Object, opts?: ?Object): void + declare export function isPureish(node: ?Object, opts?: ?Object): boolean + declare export function assertPureish(node: ?Object, opts?: ?Object): void + declare export function isDeclaration(node: ?Object, opts?: ?Object): boolean + declare export function assertDeclaration(node: ?Object, opts?: ?Object): void + declare export function isPatternLike(node: ?Object, opts?: ?Object): boolean + declare export function assertPatternLike(node: ?Object, opts?: ?Object): void + declare export function isLVal(node: ?Object, opts?: ?Object): boolean + declare export function assertLVal(node: ?Object, opts?: ?Object): void + declare export function isTSEntityName(node: ?Object, opts?: ?Object): boolean + declare export function assertTSEntityName(node: ?Object, opts?: ?Object): void + declare export function isLiteral(node: ?Object, opts?: ?Object): boolean + declare export function assertLiteral(node: ?Object, opts?: ?Object): void + declare export function isImmutable(node: ?Object, opts?: ?Object): boolean + declare export function assertImmutable(node: ?Object, opts?: ?Object): void + declare export function isUserWhitespacable(node: ?Object, opts?: ?Object): boolean + declare export function assertUserWhitespacable(node: ?Object, opts?: ?Object): void + declare export function isMethod(node: ?Object, opts?: ?Object): boolean + declare export function assertMethod(node: ?Object, opts?: ?Object): void + declare export function isObjectMember(node: ?Object, opts?: ?Object): boolean + declare export function assertObjectMember(node: ?Object, opts?: ?Object): void + declare export function isProperty(node: ?Object, opts?: ?Object): boolean + declare export function assertProperty(node: ?Object, opts?: ?Object): void + declare export function isUnaryLike(node: ?Object, opts?: ?Object): boolean + declare export function assertUnaryLike(node: ?Object, opts?: ?Object): void + declare export function isPattern(node: ?Object, opts?: ?Object): boolean + declare export function assertPattern(node: ?Object, opts?: ?Object): void + declare export function isClass(node: ?Object, opts?: ?Object): boolean + declare export function assertClass(node: ?Object, opts?: ?Object): void + declare export function isModuleDeclaration(node: ?Object, opts?: ?Object): boolean + declare export function assertModuleDeclaration(node: ?Object, opts?: ?Object): void + declare export function isExportDeclaration(node: ?Object, opts?: ?Object): boolean + declare export function assertExportDeclaration(node: ?Object, opts?: ?Object): void + declare export function isModuleSpecifier(node: ?Object, opts?: ?Object): boolean + declare export function assertModuleSpecifier(node: ?Object, opts?: ?Object): void + declare export function isFlow(node: ?Object, opts?: ?Object): boolean + declare export function assertFlow(node: ?Object, opts?: ?Object): void + declare export function isFlowType(node: ?Object, opts?: ?Object): boolean + declare export function assertFlowType(node: ?Object, opts?: ?Object): void + declare export function isFlowBaseAnnotation(node: ?Object, opts?: ?Object): boolean + declare export function assertFlowBaseAnnotation(node: ?Object, opts?: ?Object): void + declare export function isFlowDeclaration(node: ?Object, opts?: ?Object): boolean + declare export function assertFlowDeclaration(node: ?Object, opts?: ?Object): void + declare export function isFlowPredicate(node: ?Object, opts?: ?Object): boolean + declare export function assertFlowPredicate(node: ?Object, opts?: ?Object): void + declare export function isEnumBody(node: ?Object, opts?: ?Object): boolean + declare export function assertEnumBody(node: ?Object, opts?: ?Object): void + declare export function isEnumMember(node: ?Object, opts?: ?Object): boolean + declare export function assertEnumMember(node: ?Object, opts?: ?Object): void + declare export function isJSX(node: ?Object, opts?: ?Object): boolean + declare export function assertJSX(node: ?Object, opts?: ?Object): void + declare export function isPrivate(node: ?Object, opts?: ?Object): boolean + declare export function assertPrivate(node: ?Object, opts?: ?Object): void + declare export function isTSTypeElement(node: ?Object, opts?: ?Object): boolean + declare export function assertTSTypeElement(node: ?Object, opts?: ?Object): void + declare export function isTSType(node: ?Object, opts?: ?Object): boolean + declare export function assertTSType(node: ?Object, opts?: ?Object): void + declare export function isTSBaseType(node: ?Object, opts?: ?Object): boolean + declare export function assertTSBaseType(node: ?Object, opts?: ?Object): void + declare export function isNumberLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeNumericLiteral) + declare export function assertNumberLiteral(node: ?Object, opts?: ?Object): void + declare export function isRegexLiteral(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRegExpLiteral) + declare export function assertRegexLiteral(node: ?Object, opts?: ?Object): void + declare export function isRestProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeRestElement) + declare export function assertRestProperty(node: ?Object, opts?: ?Object): void + declare export function isSpreadProperty(node: ?Object, opts?: ?Object): boolean %checks (node instanceof BabelNodeSpreadElement) + declare export function assertSpreadProperty(node: ?Object, opts?: ?Object): void + declare export var VISITOR_KEYS: { [type: string]: string[] } + declare export function assertNode(obj: any): void + declare export function createTypeAnnotationBasedOnTypeof(type: 'string' | 'number' | 'undefined' | 'boolean' | 'function' | 'object' | 'symbol'): BabelNodeTypeAnnotation + declare export function createUnionTypeAnnotation(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation + declare export function createFlowUnionType(types: Array<BabelNodeFlowType>): BabelNodeUnionTypeAnnotation + declare export function buildChildren(node: { children: Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment | BabelNodeJSXEmptyExpression> }): Array<BabelNodeJSXText | BabelNodeJSXExpressionContainer | BabelNodeJSXSpreadChild | BabelNodeJSXElement | BabelNodeJSXFragment> + declare export function clone<T>(n: T): T; + declare export function cloneDeep<T>(n: T): T; + declare export function cloneDeepWithoutLoc<T>(n: T): T; + declare export function cloneNode<T>(n: T, deep?: boolean, withoutLoc?: boolean): T; + declare export function cloneWithoutLoc<T>(n: T): T; declare type CommentTypeShorthand = 'leading' | 'inner' | 'trailing' - declare function addComment<T: Node>(node: T, type: CommentTypeShorthand, content: string, line?: boolean): T - declare function addComments<T: Node>(node: T, type: CommentTypeShorthand, comments: Array<Comment>): T - declare function inheritInnerComments(node: Node, parent: Node): void - declare function inheritLeadingComments(node: Node, parent: Node): void - declare function inheritsComments<T: Node>(node: T, parent: Node): void - declare function inheritTrailingComments(node: Node, parent: Node): void - declare function removeComments<T: Node>(node: T): T - declare function ensureBlock(node: BabelNode, key: string): BabelNodeBlockStatement - declare function toBindingIdentifierName(name?: ?string): string - declare function toBlock(node: BabelNodeStatement | BabelNodeExpression, parent?: BabelNodeFunction | null): BabelNodeBlockStatement - declare function toComputedKey(node: BabelNodeMethod | BabelNodeProperty, key?: BabelNodeExpression | BabelNodeIdentifier): BabelNodeExpression - declare function toExpression(node: BabelNodeExpressionStatement | BabelNodeExpression | BabelNodeClass | BabelNodeFunction): BabelNodeExpression - declare function toIdentifier(name?: ?string): string - declare function toKeyAlias(node: BabelNodeMethod | BabelNodeProperty, key?: BabelNode): string - declare function toStatement(node: BabelNodeStatement | BabelNodeClass | BabelNodeFunction | BabelNodeAssignmentExpression, ignore?: boolean): BabelNodeStatement | void - declare function valueToNode(value: any): BabelNodeExpression - declare function removeTypeDuplicates(types: Array<BabelNodeFlowType>): Array<BabelNodeFlowType> - declare function appendToMemberExpression(member: BabelNodeMemberExpression, append: BabelNode, computed?: boolean): BabelNodeMemberExpression - declare function inherits<T: Node>(child: T, parent: BabelNode | null | void): T - declare function prependToMemberExpression(member: BabelNodeMemberExpression, prepend: BabelNodeExpression): BabelNodeMemberExpression - declare function removeProperties<T>(n: T, opts: ?{}): void; - declare function removePropertiesDeep<T>(n: T, opts: ?{}): T; - declare function getBindingIdentifiers(node: BabelNode, duplicates: boolean, outerOnly?: boolean): { [key: string]: BabelNodeIdentifier | Array<BabelNodeIdentifier> } - declare function getOuterBindingIdentifiers(node: Node, duplicates: boolean): { [key: string]: BabelNodeIdentifier | Array<BabelNodeIdentifier> } + declare export function addComment<T: BabelNode>(node: T, type: CommentTypeShorthand, content: string, line?: boolean): T + declare export function addComments<T: BabelNode>(node: T, type: CommentTypeShorthand, comments: Array<Comment>): T + declare export function inheritInnerComments(node: BabelNode, parent: BabelNode): void + declare export function inheritLeadingComments(node: BabelNode, parent: BabelNode): void + declare export function inheritsComments<T: BabelNode>(node: T, parent: BabelNode): void + declare export function inheritTrailingComments(node: BabelNode, parent: BabelNode): void + declare export function removeComments<T: BabelNode>(node: T): T + declare export function ensureBlock(node: BabelNode, key: string): BabelNodeBlockStatement + declare export function toBindingIdentifierName(name?: ?string): string + declare export function toBlock(node: BabelNodeStatement | BabelNodeExpression, parent?: BabelNodeFunction | null): BabelNodeBlockStatement + declare export function toComputedKey(node: BabelNodeMethod | BabelNodeProperty, key?: BabelNodeExpression | BabelNodeIdentifier): BabelNodeExpression + declare export function toExpression(node: BabelNodeExpressionStatement | BabelNodeExpression | BabelNodeClass | BabelNodeFunction): BabelNodeExpression + declare export function toIdentifier(name?: ?string): string + declare export function toKeyAlias(node: BabelNodeMethod | BabelNodeProperty, key?: BabelNode): string + declare export function toStatement(node: BabelNodeStatement | BabelNodeClass | BabelNodeFunction | BabelNodeAssignmentExpression, ignore?: boolean): BabelNodeStatement | void + declare export function valueToNode(value: any): BabelNodeExpression + declare export function removeTypeDuplicates(types: Array<BabelNodeFlowType>): Array<BabelNodeFlowType> + declare export function appendToMemberExpression(member: BabelNodeMemberExpression, append: BabelNode, computed?: boolean): BabelNodeMemberExpression + declare export function inherits<T: BabelNode>(child: T, parent: BabelNode | null | void): T + declare export function prependToMemberExpression(member: BabelNodeMemberExpression, prepend: BabelNodeExpression): BabelNodeMemberExpression + declare export function removeProperties<T>(n: T, opts: ?{}): void; + declare export function removePropertiesDeep<T>(n: T, opts: ?{}): T; + declare export var getBindingIdentifiers: { + (node: BabelNode, duplicates?: boolean, outerOnly?: boolean): { [key: string]: BabelNodeIdentifier | Array<BabelNodeIdentifier> }, + keys: { [type: string]: string[] } + } + declare export function getOuterBindingIdentifiers(node: BabelNode, duplicates?: boolean): { [key: string]: BabelNodeIdentifier | Array<BabelNodeIdentifier> } declare type TraversalAncestors = Array<{ node: BabelNode, key: string, @@ -2450,26 +2457,26 @@ declare module "@babel/types" { enter?: TraversalHandler<T>, exit?: TraversalHandler<T>, }; - declare function traverse<T>(n: BabelNode, TraversalHandler<T> | TraversalHandlers<T>, state?: T): void; - declare function traverseFast<T>(n: Node, h: TraversalHandler<T>, state?: T): void; - declare function shallowEqual(actual: Object, expected: Object): boolean - declare function buildMatchMemberExpression(match: string, allowPartial?: boolean): (?BabelNode) => boolean - declare function is(type: string, n: BabelNode, opts: Object): boolean; - declare function isBinding(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean - declare function isBlockScoped(node: BabelNode): boolean - declare function isImmutable(node: BabelNode): boolean - declare function isLet(node: BabelNode): boolean - declare function isNode(node: ?Object): boolean - declare function isNodesEquivalent(a: any, b: any): boolean - declare function isPlaceholderType(placeholderType: string, targetType: string): boolean - declare function isReferenced(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean - declare function isScope(node: BabelNode, parent: BabelNode): boolean - declare function isSpecifierDefault(specifier: BabelNodeModuleSpecifier): boolean - declare function isType(nodetype: ?string, targetType: string): boolean - declare function isValidES3Identifier(name: string): boolean - declare function isValidES3Identifier(name: string): boolean - declare function isValidIdentifier(name: string): boolean - declare function isVar(node: BabelNode): boolean - declare function matchesPattern(node: ?BabelNode, match: string | Array<string>, allowPartial?: boolean): boolean - declare function validate(n: BabelNode, key: string, value: mixed): void; + declare export function traverse<T>(n: BabelNode, TraversalHandler<T> | TraversalHandlers<T>, state?: T): void; + declare export function traverseFast<T>(n: BabelNode, h: TraversalHandler<T>, state?: T): void; + declare export function shallowEqual(actual: Object, expected: Object): boolean + declare export function buildMatchMemberExpression(match: string, allowPartial?: boolean): (?BabelNode) => boolean + declare export function is(type: string, n: BabelNode, opts: Object): boolean; + declare export function isBinding(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean + declare export function isBlockScoped(node: BabelNode): boolean + declare export function isImmutable(node: BabelNode): boolean + declare export function isLet(node: BabelNode): boolean + declare export function isNode(node: ?Object): boolean + declare export function isNodesEquivalent(a: any, b: any): boolean + declare export function isPlaceholderType(placeholderType: string, targetType: string): boolean + declare export function isReferenced(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean + declare export function isScope(node: BabelNode, parent: BabelNode): boolean + declare export function isSpecifierDefault(specifier: BabelNodeModuleSpecifier): boolean + declare export function isType(nodetype: ?string, targetType: string): boolean + declare export function isValidES3Identifier(name: string): boolean + declare export function isValidES3Identifier(name: string): boolean + declare export function isValidIdentifier(name: string): boolean + declare export function isVar(node: BabelNode): boolean + declare export function matchesPattern(node: ?BabelNode, match: string | Array<string>, allowPartial?: boolean): boolean + declare export function validate(n: BabelNode, key: string, value: mixed): void; } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/flow/removeTypeDuplicates.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/flow/removeTypeDuplicates.js index 580268399599ed..af2d6f05ff3689 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/flow/removeTypeDuplicates.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/flow/removeTypeDuplicates.js @@ -7,6 +7,10 @@ exports.default = removeTypeDuplicates; var _generated = require("../../validators/generated"); +function getQualifiedName(node) { + return (0, _generated.isIdentifier)(node) ? node.name : `${node.id.name}.${getQualifiedName(node.qualification)}`; +} + function removeTypeDuplicates(nodes) { const generics = {}; const bases = {}; @@ -40,7 +44,7 @@ function removeTypeDuplicates(nodes) { } if ((0, _generated.isGenericTypeAnnotation)(node)) { - const name = node.id.name; + const name = getQualifiedName(node.id); if (generics[name]) { let existing = generics[name]; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/typescript/removeTypeDuplicates.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/typescript/removeTypeDuplicates.js index 5eca7e1f488bf3..52f3dfa6bfc401 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/typescript/removeTypeDuplicates.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/modifications/typescript/removeTypeDuplicates.js @@ -21,7 +21,7 @@ function removeTypeDuplicates(nodes) { continue; } - if ((0, _generated.isTSAnyKeyword)(node.type)) { + if ((0, _generated.isTSAnyKeyword)(node)) { return [node]; } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getBindingIdentifiers.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getBindingIdentifiers.js index 189f4b8eb3be51..e0f321edafd984 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getBindingIdentifiers.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getBindingIdentifiers.js @@ -28,7 +28,7 @@ function getBindingIdentifiers(node, duplicates, outerOnly) { continue; } - if ((0, _generated.isExportDeclaration)(id)) { + if ((0, _generated.isExportDeclaration)(id) && !(0, _generated.isExportAllDeclaration)(id)) { if ((0, _generated.isDeclaration)(id.declaration)) { search.push(id.declaration); } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getOuterBindingIdentifiers.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getOuterBindingIdentifiers.js index 8e1e3cb200d864..369d38fa74505e 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getOuterBindingIdentifiers.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/retrievers/getOuterBindingIdentifiers.js @@ -3,12 +3,15 @@ Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = getOuterBindingIdentifiers; +exports.default = void 0; var _getBindingIdentifiers = _interopRequireDefault(require("./getBindingIdentifiers")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _default = getOuterBindingIdentifiers; +exports.default = _default; + function getOuterBindingIdentifiers(node, duplicates) { return (0, _getBindingIdentifiers.default)(node, duplicates, true); } \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/generated/index.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/generated/index.js index 7978b63fa28492..3a4935b969867b 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/generated/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/generated/index.js @@ -3933,7 +3933,7 @@ function isExpression(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Expression" || "ArrayExpression" === nodeType || "AssignmentExpression" === nodeType || "BinaryExpression" === nodeType || "CallExpression" === nodeType || "ConditionalExpression" === nodeType || "FunctionExpression" === nodeType || "Identifier" === nodeType || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "LogicalExpression" === nodeType || "MemberExpression" === nodeType || "NewExpression" === nodeType || "ObjectExpression" === nodeType || "SequenceExpression" === nodeType || "ParenthesizedExpression" === nodeType || "ThisExpression" === nodeType || "UnaryExpression" === nodeType || "UpdateExpression" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassExpression" === nodeType || "MetaProperty" === nodeType || "Super" === nodeType || "TaggedTemplateExpression" === nodeType || "TemplateLiteral" === nodeType || "YieldExpression" === nodeType || "AwaitExpression" === nodeType || "Import" === nodeType || "BigIntLiteral" === nodeType || "OptionalMemberExpression" === nodeType || "OptionalCallExpression" === nodeType || "TypeCastExpression" === nodeType || "JSXElement" === nodeType || "JSXFragment" === nodeType || "BindExpression" === nodeType || "PipelinePrimaryTopicReference" === nodeType || "DoExpression" === nodeType || "RecordExpression" === nodeType || "TupleExpression" === nodeType || "DecimalLiteral" === nodeType || "TSAsExpression" === nodeType || "TSTypeAssertion" === nodeType || "TSNonNullExpression" === nodeType || nodeType === "Placeholder" && ("Expression" === node.expectedNode || "Identifier" === node.expectedNode || "StringLiteral" === node.expectedNode)) { + if ("ArrayExpression" === nodeType || "AssignmentExpression" === nodeType || "BinaryExpression" === nodeType || "CallExpression" === nodeType || "ConditionalExpression" === nodeType || "FunctionExpression" === nodeType || "Identifier" === nodeType || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "LogicalExpression" === nodeType || "MemberExpression" === nodeType || "NewExpression" === nodeType || "ObjectExpression" === nodeType || "SequenceExpression" === nodeType || "ParenthesizedExpression" === nodeType || "ThisExpression" === nodeType || "UnaryExpression" === nodeType || "UpdateExpression" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassExpression" === nodeType || "MetaProperty" === nodeType || "Super" === nodeType || "TaggedTemplateExpression" === nodeType || "TemplateLiteral" === nodeType || "YieldExpression" === nodeType || "AwaitExpression" === nodeType || "Import" === nodeType || "BigIntLiteral" === nodeType || "OptionalMemberExpression" === nodeType || "OptionalCallExpression" === nodeType || "TypeCastExpression" === nodeType || "JSXElement" === nodeType || "JSXFragment" === nodeType || "BindExpression" === nodeType || "PipelinePrimaryTopicReference" === nodeType || "DoExpression" === nodeType || "RecordExpression" === nodeType || "TupleExpression" === nodeType || "DecimalLiteral" === nodeType || "TSAsExpression" === nodeType || "TSTypeAssertion" === nodeType || "TSNonNullExpression" === nodeType || nodeType === "Placeholder" && ("Expression" === node.expectedNode || "Identifier" === node.expectedNode || "StringLiteral" === node.expectedNode)) { if (typeof opts === "undefined") { return true; } else { @@ -3948,7 +3948,7 @@ function isBinary(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Binary" || "BinaryExpression" === nodeType || "LogicalExpression" === nodeType) { + if ("BinaryExpression" === nodeType || "LogicalExpression" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -3963,7 +3963,7 @@ function isScopable(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Scopable" || "BlockStatement" === nodeType || "CatchClause" === nodeType || "DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "Program" === nodeType || "ObjectMethod" === nodeType || "SwitchStatement" === nodeType || "WhileStatement" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassExpression" === nodeType || "ClassDeclaration" === nodeType || "ForOfStatement" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType || "StaticBlock" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { + if ("BlockStatement" === nodeType || "CatchClause" === nodeType || "DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "Program" === nodeType || "ObjectMethod" === nodeType || "SwitchStatement" === nodeType || "WhileStatement" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassExpression" === nodeType || "ClassDeclaration" === nodeType || "ForOfStatement" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType || "StaticBlock" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -3978,7 +3978,7 @@ function isBlockParent(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "BlockParent" || "BlockStatement" === nodeType || "CatchClause" === nodeType || "DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "Program" === nodeType || "ObjectMethod" === nodeType || "SwitchStatement" === nodeType || "WhileStatement" === nodeType || "ArrowFunctionExpression" === nodeType || "ForOfStatement" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType || "StaticBlock" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { + if ("BlockStatement" === nodeType || "CatchClause" === nodeType || "DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "Program" === nodeType || "ObjectMethod" === nodeType || "SwitchStatement" === nodeType || "WhileStatement" === nodeType || "ArrowFunctionExpression" === nodeType || "ForOfStatement" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType || "StaticBlock" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -3993,7 +3993,7 @@ function isBlock(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Block" || "BlockStatement" === nodeType || "Program" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { + if ("BlockStatement" === nodeType || "Program" === nodeType || "TSModuleBlock" === nodeType || nodeType === "Placeholder" && "BlockStatement" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4008,7 +4008,7 @@ function isStatement(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Statement" || "BlockStatement" === nodeType || "BreakStatement" === nodeType || "ContinueStatement" === nodeType || "DebuggerStatement" === nodeType || "DoWhileStatement" === nodeType || "EmptyStatement" === nodeType || "ExpressionStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "IfStatement" === nodeType || "LabeledStatement" === nodeType || "ReturnStatement" === nodeType || "SwitchStatement" === nodeType || "ThrowStatement" === nodeType || "TryStatement" === nodeType || "VariableDeclaration" === nodeType || "WhileStatement" === nodeType || "WithStatement" === nodeType || "ClassDeclaration" === nodeType || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ForOfStatement" === nodeType || "ImportDeclaration" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType || "EnumDeclaration" === nodeType || "TSDeclareFunction" === nodeType || "TSInterfaceDeclaration" === nodeType || "TSTypeAliasDeclaration" === nodeType || "TSEnumDeclaration" === nodeType || "TSModuleDeclaration" === nodeType || "TSImportEqualsDeclaration" === nodeType || "TSExportAssignment" === nodeType || "TSNamespaceExportDeclaration" === nodeType || nodeType === "Placeholder" && ("Statement" === node.expectedNode || "Declaration" === node.expectedNode || "BlockStatement" === node.expectedNode)) { + if ("BlockStatement" === nodeType || "BreakStatement" === nodeType || "ContinueStatement" === nodeType || "DebuggerStatement" === nodeType || "DoWhileStatement" === nodeType || "EmptyStatement" === nodeType || "ExpressionStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "FunctionDeclaration" === nodeType || "IfStatement" === nodeType || "LabeledStatement" === nodeType || "ReturnStatement" === nodeType || "SwitchStatement" === nodeType || "ThrowStatement" === nodeType || "TryStatement" === nodeType || "VariableDeclaration" === nodeType || "WhileStatement" === nodeType || "WithStatement" === nodeType || "ClassDeclaration" === nodeType || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ForOfStatement" === nodeType || "ImportDeclaration" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType || "EnumDeclaration" === nodeType || "TSDeclareFunction" === nodeType || "TSInterfaceDeclaration" === nodeType || "TSTypeAliasDeclaration" === nodeType || "TSEnumDeclaration" === nodeType || "TSModuleDeclaration" === nodeType || "TSImportEqualsDeclaration" === nodeType || "TSExportAssignment" === nodeType || "TSNamespaceExportDeclaration" === nodeType || nodeType === "Placeholder" && ("Statement" === node.expectedNode || "Declaration" === node.expectedNode || "BlockStatement" === node.expectedNode)) { if (typeof opts === "undefined") { return true; } else { @@ -4023,7 +4023,7 @@ function isTerminatorless(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Terminatorless" || "BreakStatement" === nodeType || "ContinueStatement" === nodeType || "ReturnStatement" === nodeType || "ThrowStatement" === nodeType || "YieldExpression" === nodeType || "AwaitExpression" === nodeType) { + if ("BreakStatement" === nodeType || "ContinueStatement" === nodeType || "ReturnStatement" === nodeType || "ThrowStatement" === nodeType || "YieldExpression" === nodeType || "AwaitExpression" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4038,7 +4038,7 @@ function isCompletionStatement(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "CompletionStatement" || "BreakStatement" === nodeType || "ContinueStatement" === nodeType || "ReturnStatement" === nodeType || "ThrowStatement" === nodeType) { + if ("BreakStatement" === nodeType || "ContinueStatement" === nodeType || "ReturnStatement" === nodeType || "ThrowStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4053,7 +4053,7 @@ function isConditional(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Conditional" || "ConditionalExpression" === nodeType || "IfStatement" === nodeType) { + if ("ConditionalExpression" === nodeType || "IfStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4068,7 +4068,7 @@ function isLoop(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Loop" || "DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "WhileStatement" === nodeType || "ForOfStatement" === nodeType) { + if ("DoWhileStatement" === nodeType || "ForInStatement" === nodeType || "ForStatement" === nodeType || "WhileStatement" === nodeType || "ForOfStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4083,7 +4083,7 @@ function isWhile(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "While" || "DoWhileStatement" === nodeType || "WhileStatement" === nodeType) { + if ("DoWhileStatement" === nodeType || "WhileStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4098,7 +4098,7 @@ function isExpressionWrapper(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ExpressionWrapper" || "ExpressionStatement" === nodeType || "ParenthesizedExpression" === nodeType || "TypeCastExpression" === nodeType) { + if ("ExpressionStatement" === nodeType || "ParenthesizedExpression" === nodeType || "TypeCastExpression" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4113,7 +4113,7 @@ function isFor(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "For" || "ForInStatement" === nodeType || "ForStatement" === nodeType || "ForOfStatement" === nodeType) { + if ("ForInStatement" === nodeType || "ForStatement" === nodeType || "ForOfStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4128,7 +4128,7 @@ function isForXStatement(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ForXStatement" || "ForInStatement" === nodeType || "ForOfStatement" === nodeType) { + if ("ForInStatement" === nodeType || "ForOfStatement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4143,7 +4143,7 @@ function isFunction(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Function" || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "ObjectMethod" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { + if ("FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "ObjectMethod" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4158,7 +4158,7 @@ function isFunctionParent(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "FunctionParent" || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "ObjectMethod" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { + if ("FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "ObjectMethod" === nodeType || "ArrowFunctionExpression" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4173,7 +4173,7 @@ function isPureish(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Pureish" || "FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "ArrowFunctionExpression" === nodeType || "BigIntLiteral" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { + if ("FunctionDeclaration" === nodeType || "FunctionExpression" === nodeType || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "ArrowFunctionExpression" === nodeType || "BigIntLiteral" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4188,7 +4188,7 @@ function isDeclaration(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Declaration" || "FunctionDeclaration" === nodeType || "VariableDeclaration" === nodeType || "ClassDeclaration" === nodeType || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ImportDeclaration" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType || "EnumDeclaration" === nodeType || "TSDeclareFunction" === nodeType || "TSInterfaceDeclaration" === nodeType || "TSTypeAliasDeclaration" === nodeType || "TSEnumDeclaration" === nodeType || "TSModuleDeclaration" === nodeType || nodeType === "Placeholder" && "Declaration" === node.expectedNode) { + if ("FunctionDeclaration" === nodeType || "VariableDeclaration" === nodeType || "ClassDeclaration" === nodeType || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ImportDeclaration" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType || "EnumDeclaration" === nodeType || "TSDeclareFunction" === nodeType || "TSInterfaceDeclaration" === nodeType || "TSTypeAliasDeclaration" === nodeType || "TSEnumDeclaration" === nodeType || "TSModuleDeclaration" === nodeType || nodeType === "Placeholder" && "Declaration" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4203,7 +4203,7 @@ function isPatternLike(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "PatternLike" || "Identifier" === nodeType || "RestElement" === nodeType || "AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || nodeType === "Placeholder" && ("Pattern" === node.expectedNode || "Identifier" === node.expectedNode)) { + if ("Identifier" === nodeType || "RestElement" === nodeType || "AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || nodeType === "Placeholder" && ("Pattern" === node.expectedNode || "Identifier" === node.expectedNode)) { if (typeof opts === "undefined") { return true; } else { @@ -4218,7 +4218,7 @@ function isLVal(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "LVal" || "Identifier" === nodeType || "MemberExpression" === nodeType || "RestElement" === nodeType || "AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || "TSParameterProperty" === nodeType || nodeType === "Placeholder" && ("Pattern" === node.expectedNode || "Identifier" === node.expectedNode)) { + if ("Identifier" === nodeType || "MemberExpression" === nodeType || "RestElement" === nodeType || "AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || "TSParameterProperty" === nodeType || nodeType === "Placeholder" && ("Pattern" === node.expectedNode || "Identifier" === node.expectedNode)) { if (typeof opts === "undefined") { return true; } else { @@ -4233,7 +4233,7 @@ function isTSEntityName(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "TSEntityName" || "Identifier" === nodeType || "TSQualifiedName" === nodeType || nodeType === "Placeholder" && "Identifier" === node.expectedNode) { + if ("Identifier" === nodeType || "TSQualifiedName" === nodeType || nodeType === "Placeholder" && "Identifier" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4248,7 +4248,7 @@ function isLiteral(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Literal" || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "TemplateLiteral" === nodeType || "BigIntLiteral" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { + if ("StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "RegExpLiteral" === nodeType || "TemplateLiteral" === nodeType || "BigIntLiteral" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4263,7 +4263,7 @@ function isImmutable(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Immutable" || "StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "BigIntLiteral" === nodeType || "JSXAttribute" === nodeType || "JSXClosingElement" === nodeType || "JSXElement" === nodeType || "JSXExpressionContainer" === nodeType || "JSXSpreadChild" === nodeType || "JSXOpeningElement" === nodeType || "JSXText" === nodeType || "JSXFragment" === nodeType || "JSXOpeningFragment" === nodeType || "JSXClosingFragment" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { + if ("StringLiteral" === nodeType || "NumericLiteral" === nodeType || "NullLiteral" === nodeType || "BooleanLiteral" === nodeType || "BigIntLiteral" === nodeType || "JSXAttribute" === nodeType || "JSXClosingElement" === nodeType || "JSXElement" === nodeType || "JSXExpressionContainer" === nodeType || "JSXSpreadChild" === nodeType || "JSXOpeningElement" === nodeType || "JSXText" === nodeType || "JSXFragment" === nodeType || "JSXOpeningFragment" === nodeType || "JSXClosingFragment" === nodeType || "DecimalLiteral" === nodeType || nodeType === "Placeholder" && "StringLiteral" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4278,7 +4278,7 @@ function isUserWhitespacable(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "UserWhitespacable" || "ObjectMethod" === nodeType || "ObjectProperty" === nodeType || "ObjectTypeInternalSlot" === nodeType || "ObjectTypeCallProperty" === nodeType || "ObjectTypeIndexer" === nodeType || "ObjectTypeProperty" === nodeType || "ObjectTypeSpreadProperty" === nodeType) { + if ("ObjectMethod" === nodeType || "ObjectProperty" === nodeType || "ObjectTypeInternalSlot" === nodeType || "ObjectTypeCallProperty" === nodeType || "ObjectTypeIndexer" === nodeType || "ObjectTypeProperty" === nodeType || "ObjectTypeSpreadProperty" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4293,7 +4293,7 @@ function isMethod(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Method" || "ObjectMethod" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { + if ("ObjectMethod" === nodeType || "ClassMethod" === nodeType || "ClassPrivateMethod" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4308,7 +4308,7 @@ function isObjectMember(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ObjectMember" || "ObjectMethod" === nodeType || "ObjectProperty" === nodeType) { + if ("ObjectMethod" === nodeType || "ObjectProperty" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4323,7 +4323,7 @@ function isProperty(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Property" || "ObjectProperty" === nodeType || "ClassProperty" === nodeType || "ClassPrivateProperty" === nodeType) { + if ("ObjectProperty" === nodeType || "ClassProperty" === nodeType || "ClassPrivateProperty" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4338,7 +4338,7 @@ function isUnaryLike(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "UnaryLike" || "UnaryExpression" === nodeType || "SpreadElement" === nodeType) { + if ("UnaryExpression" === nodeType || "SpreadElement" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4353,7 +4353,7 @@ function isPattern(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Pattern" || "AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || nodeType === "Placeholder" && "Pattern" === node.expectedNode) { + if ("AssignmentPattern" === nodeType || "ArrayPattern" === nodeType || "ObjectPattern" === nodeType || nodeType === "Placeholder" && "Pattern" === node.expectedNode) { if (typeof opts === "undefined") { return true; } else { @@ -4368,7 +4368,7 @@ function isClass(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Class" || "ClassExpression" === nodeType || "ClassDeclaration" === nodeType) { + if ("ClassExpression" === nodeType || "ClassDeclaration" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4383,7 +4383,7 @@ function isModuleDeclaration(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ModuleDeclaration" || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ImportDeclaration" === nodeType) { + if ("ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType || "ImportDeclaration" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4398,7 +4398,7 @@ function isExportDeclaration(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ExportDeclaration" || "ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType) { + if ("ExportAllDeclaration" === nodeType || "ExportDefaultDeclaration" === nodeType || "ExportNamedDeclaration" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4413,7 +4413,7 @@ function isModuleSpecifier(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "ModuleSpecifier" || "ExportSpecifier" === nodeType || "ImportDefaultSpecifier" === nodeType || "ImportNamespaceSpecifier" === nodeType || "ImportSpecifier" === nodeType || "ExportNamespaceSpecifier" === nodeType || "ExportDefaultSpecifier" === nodeType) { + if ("ExportSpecifier" === nodeType || "ImportDefaultSpecifier" === nodeType || "ImportNamespaceSpecifier" === nodeType || "ImportSpecifier" === nodeType || "ExportNamespaceSpecifier" === nodeType || "ExportDefaultSpecifier" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4428,7 +4428,7 @@ function isFlow(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Flow" || "AnyTypeAnnotation" === nodeType || "ArrayTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "BooleanLiteralTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "ClassImplements" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "DeclaredPredicate" === nodeType || "ExistsTypeAnnotation" === nodeType || "FunctionTypeAnnotation" === nodeType || "FunctionTypeParam" === nodeType || "GenericTypeAnnotation" === nodeType || "InferredPredicate" === nodeType || "InterfaceExtends" === nodeType || "InterfaceDeclaration" === nodeType || "InterfaceTypeAnnotation" === nodeType || "IntersectionTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NullableTypeAnnotation" === nodeType || "NumberLiteralTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "ObjectTypeAnnotation" === nodeType || "ObjectTypeInternalSlot" === nodeType || "ObjectTypeCallProperty" === nodeType || "ObjectTypeIndexer" === nodeType || "ObjectTypeProperty" === nodeType || "ObjectTypeSpreadProperty" === nodeType || "OpaqueType" === nodeType || "QualifiedTypeIdentifier" === nodeType || "StringLiteralTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "TupleTypeAnnotation" === nodeType || "TypeofTypeAnnotation" === nodeType || "TypeAlias" === nodeType || "TypeAnnotation" === nodeType || "TypeCastExpression" === nodeType || "TypeParameter" === nodeType || "TypeParameterDeclaration" === nodeType || "TypeParameterInstantiation" === nodeType || "UnionTypeAnnotation" === nodeType || "Variance" === nodeType || "VoidTypeAnnotation" === nodeType) { + if ("AnyTypeAnnotation" === nodeType || "ArrayTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "BooleanLiteralTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "ClassImplements" === nodeType || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "DeclaredPredicate" === nodeType || "ExistsTypeAnnotation" === nodeType || "FunctionTypeAnnotation" === nodeType || "FunctionTypeParam" === nodeType || "GenericTypeAnnotation" === nodeType || "InferredPredicate" === nodeType || "InterfaceExtends" === nodeType || "InterfaceDeclaration" === nodeType || "InterfaceTypeAnnotation" === nodeType || "IntersectionTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NullableTypeAnnotation" === nodeType || "NumberLiteralTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "ObjectTypeAnnotation" === nodeType || "ObjectTypeInternalSlot" === nodeType || "ObjectTypeCallProperty" === nodeType || "ObjectTypeIndexer" === nodeType || "ObjectTypeProperty" === nodeType || "ObjectTypeSpreadProperty" === nodeType || "OpaqueType" === nodeType || "QualifiedTypeIdentifier" === nodeType || "StringLiteralTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "TupleTypeAnnotation" === nodeType || "TypeofTypeAnnotation" === nodeType || "TypeAlias" === nodeType || "TypeAnnotation" === nodeType || "TypeCastExpression" === nodeType || "TypeParameter" === nodeType || "TypeParameterDeclaration" === nodeType || "TypeParameterInstantiation" === nodeType || "UnionTypeAnnotation" === nodeType || "Variance" === nodeType || "VoidTypeAnnotation" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4443,7 +4443,7 @@ function isFlowType(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "FlowType" || "AnyTypeAnnotation" === nodeType || "ArrayTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "BooleanLiteralTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "ExistsTypeAnnotation" === nodeType || "FunctionTypeAnnotation" === nodeType || "GenericTypeAnnotation" === nodeType || "InterfaceTypeAnnotation" === nodeType || "IntersectionTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NullableTypeAnnotation" === nodeType || "NumberLiteralTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "ObjectTypeAnnotation" === nodeType || "StringLiteralTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "TupleTypeAnnotation" === nodeType || "TypeofTypeAnnotation" === nodeType || "UnionTypeAnnotation" === nodeType || "VoidTypeAnnotation" === nodeType) { + if ("AnyTypeAnnotation" === nodeType || "ArrayTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "BooleanLiteralTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "ExistsTypeAnnotation" === nodeType || "FunctionTypeAnnotation" === nodeType || "GenericTypeAnnotation" === nodeType || "InterfaceTypeAnnotation" === nodeType || "IntersectionTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NullableTypeAnnotation" === nodeType || "NumberLiteralTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "ObjectTypeAnnotation" === nodeType || "StringLiteralTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "TupleTypeAnnotation" === nodeType || "TypeofTypeAnnotation" === nodeType || "UnionTypeAnnotation" === nodeType || "VoidTypeAnnotation" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4458,7 +4458,7 @@ function isFlowBaseAnnotation(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "FlowBaseAnnotation" || "AnyTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "VoidTypeAnnotation" === nodeType) { + if ("AnyTypeAnnotation" === nodeType || "BooleanTypeAnnotation" === nodeType || "NullLiteralTypeAnnotation" === nodeType || "MixedTypeAnnotation" === nodeType || "EmptyTypeAnnotation" === nodeType || "NumberTypeAnnotation" === nodeType || "StringTypeAnnotation" === nodeType || "SymbolTypeAnnotation" === nodeType || "ThisTypeAnnotation" === nodeType || "VoidTypeAnnotation" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4473,7 +4473,7 @@ function isFlowDeclaration(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "FlowDeclaration" || "DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType) { + if ("DeclareClass" === nodeType || "DeclareFunction" === nodeType || "DeclareInterface" === nodeType || "DeclareModule" === nodeType || "DeclareModuleExports" === nodeType || "DeclareTypeAlias" === nodeType || "DeclareOpaqueType" === nodeType || "DeclareVariable" === nodeType || "DeclareExportDeclaration" === nodeType || "DeclareExportAllDeclaration" === nodeType || "InterfaceDeclaration" === nodeType || "OpaqueType" === nodeType || "TypeAlias" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4488,7 +4488,7 @@ function isFlowPredicate(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "FlowPredicate" || "DeclaredPredicate" === nodeType || "InferredPredicate" === nodeType) { + if ("DeclaredPredicate" === nodeType || "InferredPredicate" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4503,7 +4503,7 @@ function isEnumBody(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "EnumBody" || "EnumBooleanBody" === nodeType || "EnumNumberBody" === nodeType || "EnumStringBody" === nodeType || "EnumSymbolBody" === nodeType) { + if ("EnumBooleanBody" === nodeType || "EnumNumberBody" === nodeType || "EnumStringBody" === nodeType || "EnumSymbolBody" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4518,7 +4518,7 @@ function isEnumMember(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "EnumMember" || "EnumBooleanMember" === nodeType || "EnumNumberMember" === nodeType || "EnumStringMember" === nodeType || "EnumDefaultedMember" === nodeType) { + if ("EnumBooleanMember" === nodeType || "EnumNumberMember" === nodeType || "EnumStringMember" === nodeType || "EnumDefaultedMember" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4533,7 +4533,7 @@ function isJSX(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "JSX" || "JSXAttribute" === nodeType || "JSXClosingElement" === nodeType || "JSXElement" === nodeType || "JSXEmptyExpression" === nodeType || "JSXExpressionContainer" === nodeType || "JSXSpreadChild" === nodeType || "JSXIdentifier" === nodeType || "JSXMemberExpression" === nodeType || "JSXNamespacedName" === nodeType || "JSXOpeningElement" === nodeType || "JSXSpreadAttribute" === nodeType || "JSXText" === nodeType || "JSXFragment" === nodeType || "JSXOpeningFragment" === nodeType || "JSXClosingFragment" === nodeType) { + if ("JSXAttribute" === nodeType || "JSXClosingElement" === nodeType || "JSXElement" === nodeType || "JSXEmptyExpression" === nodeType || "JSXExpressionContainer" === nodeType || "JSXSpreadChild" === nodeType || "JSXIdentifier" === nodeType || "JSXMemberExpression" === nodeType || "JSXNamespacedName" === nodeType || "JSXOpeningElement" === nodeType || "JSXSpreadAttribute" === nodeType || "JSXText" === nodeType || "JSXFragment" === nodeType || "JSXOpeningFragment" === nodeType || "JSXClosingFragment" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4548,7 +4548,7 @@ function isPrivate(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "Private" || "ClassPrivateProperty" === nodeType || "ClassPrivateMethod" === nodeType || "PrivateName" === nodeType) { + if ("ClassPrivateProperty" === nodeType || "ClassPrivateMethod" === nodeType || "PrivateName" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4563,7 +4563,7 @@ function isTSTypeElement(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "TSTypeElement" || "TSCallSignatureDeclaration" === nodeType || "TSConstructSignatureDeclaration" === nodeType || "TSPropertySignature" === nodeType || "TSMethodSignature" === nodeType || "TSIndexSignature" === nodeType) { + if ("TSCallSignatureDeclaration" === nodeType || "TSConstructSignatureDeclaration" === nodeType || "TSPropertySignature" === nodeType || "TSMethodSignature" === nodeType || "TSIndexSignature" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4578,7 +4578,7 @@ function isTSType(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "TSType" || "TSAnyKeyword" === nodeType || "TSBooleanKeyword" === nodeType || "TSBigIntKeyword" === nodeType || "TSIntrinsicKeyword" === nodeType || "TSNeverKeyword" === nodeType || "TSNullKeyword" === nodeType || "TSNumberKeyword" === nodeType || "TSObjectKeyword" === nodeType || "TSStringKeyword" === nodeType || "TSSymbolKeyword" === nodeType || "TSUndefinedKeyword" === nodeType || "TSUnknownKeyword" === nodeType || "TSVoidKeyword" === nodeType || "TSThisType" === nodeType || "TSFunctionType" === nodeType || "TSConstructorType" === nodeType || "TSTypeReference" === nodeType || "TSTypePredicate" === nodeType || "TSTypeQuery" === nodeType || "TSTypeLiteral" === nodeType || "TSArrayType" === nodeType || "TSTupleType" === nodeType || "TSOptionalType" === nodeType || "TSRestType" === nodeType || "TSUnionType" === nodeType || "TSIntersectionType" === nodeType || "TSConditionalType" === nodeType || "TSInferType" === nodeType || "TSParenthesizedType" === nodeType || "TSTypeOperator" === nodeType || "TSIndexedAccessType" === nodeType || "TSMappedType" === nodeType || "TSLiteralType" === nodeType || "TSExpressionWithTypeArguments" === nodeType || "TSImportType" === nodeType) { + if ("TSAnyKeyword" === nodeType || "TSBooleanKeyword" === nodeType || "TSBigIntKeyword" === nodeType || "TSIntrinsicKeyword" === nodeType || "TSNeverKeyword" === nodeType || "TSNullKeyword" === nodeType || "TSNumberKeyword" === nodeType || "TSObjectKeyword" === nodeType || "TSStringKeyword" === nodeType || "TSSymbolKeyword" === nodeType || "TSUndefinedKeyword" === nodeType || "TSUnknownKeyword" === nodeType || "TSVoidKeyword" === nodeType || "TSThisType" === nodeType || "TSFunctionType" === nodeType || "TSConstructorType" === nodeType || "TSTypeReference" === nodeType || "TSTypePredicate" === nodeType || "TSTypeQuery" === nodeType || "TSTypeLiteral" === nodeType || "TSArrayType" === nodeType || "TSTupleType" === nodeType || "TSOptionalType" === nodeType || "TSRestType" === nodeType || "TSUnionType" === nodeType || "TSIntersectionType" === nodeType || "TSConditionalType" === nodeType || "TSInferType" === nodeType || "TSParenthesizedType" === nodeType || "TSTypeOperator" === nodeType || "TSIndexedAccessType" === nodeType || "TSMappedType" === nodeType || "TSLiteralType" === nodeType || "TSExpressionWithTypeArguments" === nodeType || "TSImportType" === nodeType) { if (typeof opts === "undefined") { return true; } else { @@ -4593,7 +4593,7 @@ function isTSBaseType(node, opts) { if (!node) return false; const nodeType = node.type; - if (nodeType === "TSBaseType" || "TSAnyKeyword" === nodeType || "TSBooleanKeyword" === nodeType || "TSBigIntKeyword" === nodeType || "TSIntrinsicKeyword" === nodeType || "TSNeverKeyword" === nodeType || "TSNullKeyword" === nodeType || "TSNumberKeyword" === nodeType || "TSObjectKeyword" === nodeType || "TSStringKeyword" === nodeType || "TSSymbolKeyword" === nodeType || "TSUndefinedKeyword" === nodeType || "TSUnknownKeyword" === nodeType || "TSVoidKeyword" === nodeType || "TSThisType" === nodeType || "TSLiteralType" === nodeType) { + if ("TSAnyKeyword" === nodeType || "TSBooleanKeyword" === nodeType || "TSBigIntKeyword" === nodeType || "TSIntrinsicKeyword" === nodeType || "TSNeverKeyword" === nodeType || "TSNullKeyword" === nodeType || "TSNumberKeyword" === nodeType || "TSObjectKeyword" === nodeType || "TSStringKeyword" === nodeType || "TSSymbolKeyword" === nodeType || "TSUndefinedKeyword" === nodeType || "TSUnknownKeyword" === nodeType || "TSVoidKeyword" === nodeType || "TSThisType" === nodeType || "TSLiteralType" === nodeType) { if (typeof opts === "undefined") { return true; } else { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isReferenced.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isReferenced.js index 8fd1662348ce13..45b3a367809b70 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isReferenced.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isReferenced.js @@ -22,13 +22,6 @@ function isReferenced(node, parent, grandparent) { case "ArrowFunctionExpression": return parent.body === node; - case "ExportSpecifier": - if (parent.source) { - return false; - } - - return parent.local === node; - case "PrivateName": return false; @@ -83,6 +76,13 @@ function isReferenced(node, parent, grandparent) { case "ExportDefaultSpecifier": return false; + case "ExportSpecifier": + if (grandparent == null ? void 0 : grandparent.source) { + return false; + } + + return parent.local === node; + case "ImportDefaultSpecifier": case "ImportNamespaceSpecifier": case "ImportSpecifier": diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isValidIdentifier.js b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isValidIdentifier.js index c1adb9afe0d25e..3fa6f98000b6ba 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isValidIdentifier.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/lib/validators/isValidIdentifier.js @@ -11,9 +11,7 @@ function isValidIdentifier(name, reserved = true) { if (typeof name !== "string") return false; if (reserved) { - if ((0, _helperValidatorIdentifier.isKeyword)(name) || (0, _helperValidatorIdentifier.isStrictReservedWord)(name)) { - return false; - } else if (name === "await") { + if ((0, _helperValidatorIdentifier.isKeyword)(name) || (0, _helperValidatorIdentifier.isStrictReservedWord)(name, true)) { return false; } } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/package.json b/tools/node_modules/@babel/core/node_modules/@babel/types/package.json index 7a3a503ea30e5b..c4807b60d81bdc 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/package.json @@ -1,6 +1,6 @@ { "name": "@babel/types", - "version": "7.12.7", + "version": "7.12.12", "description": "Babel Types is a Lodash-esque utility library for AST nodes", "author": "Sebastian McKenzie <sebmck@gmail.com>", "homepage": "https://babeljs.io/", @@ -14,22 +14,23 @@ "directory": "packages/babel-types" }, "main": "lib/index.js", - "types": "lib/index.d.ts", + "types": "lib/index-legacy.d.ts", "typesVersions": { ">=3.7": { - "lib/index.d.ts": [ - "lib/index-ts3.7.d.ts" + "lib/index-legacy.d.ts": [ + "lib/index.d.ts" ] } }, "dependencies": { - "@babel/helper-validator-identifier": "^7.10.4", + "@babel/helper-validator-identifier": "^7.12.11", "lodash": "^4.17.19", "to-fast-properties": "^2.0.0" }, "devDependencies": { - "@babel/generator": "7.12.5", - "@babel/parser": "7.12.7", + "@babel/generator": "7.12.11", + "@babel/parser": "7.12.11", + "@types/lodash": "^4.14.162", "chalk": "^4.1.0" } } \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generateTypeHelpers.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generateTypeHelpers.js deleted file mode 100644 index bf0b036c189e74..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generateTypeHelpers.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -const path = require("path"); -const chalk = require("chalk"); -const generateBuilders = require("./generators/generateBuilders"); -const generateValidators = require("./generators/generateValidators"); -const generateAsserts = require("./generators/generateAsserts"); -const generateConstants = require("./generators/generateConstants"); -const format = require("../../../scripts/utils/formatCode"); -const writeFile = require("../../../scripts/utils/writeFileAndMkDir"); - -const baseDir = path.join(__dirname, "../src"); - -console.log("Generating @babel/types dynamic functions"); - -const buildersFile = path.join(baseDir, "builders/generated/index.js"); -writeFile(buildersFile, format(generateBuilders(), buildersFile)); -console.log(` ${chalk.green("✔")} Generated builders`); - -const validatorsFile = path.join(baseDir, "validators/generated/index.js"); -writeFile(validatorsFile, format(generateValidators(), validatorsFile)); -console.log(` ${chalk.green("✔")} Generated validators`); - -const assertsFile = path.join(baseDir, "asserts/generated/index.js"); -writeFile(assertsFile, format(generateAsserts(), assertsFile)); -console.log(` ${chalk.green("✔")} Generated asserts`); - -const constantsFile = path.join(baseDir, "constants/generated/index.js"); -writeFile(constantsFile, format(generateConstants(), constantsFile)); -console.log(` ${chalk.green("✔")} Generated constants`); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateAsserts.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/asserts.js similarity index 61% rename from tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateAsserts.js rename to tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/asserts.js index 26bdb8dccbed58..a517efb31a0d6c 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateAsserts.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/asserts.js @@ -2,23 +2,30 @@ const definitions = require("../../lib/definitions"); function addAssertHelper(type) { - return `export function assert${type}(node: Object, opts?: Object = {}): void { + const result = + definitions.NODE_FIELDS[type] || definitions.FLIPPED_ALIAS_KEYS[type] + ? `node is t.${type}` + : "boolean"; + + return `export function assert${type}(node: object | null | undefined, opts?: object | null): asserts ${ + result === "boolean" ? "node" : result + } { assert("${type}", node, opts) } `; } module.exports = function generateAsserts() { - let output = `// @flow -/* + let output = `/* * This file is auto-generated! Do not modify it directly. * To re-generate run 'make build' */ import is from "../../validators/is"; +import type * as t from "../.."; -function assert(type: string, node: Object, opts?: Object): void { +function assert(type: string, node: any, opts?: any): void { if (!is(type, node, opts)) { throw new Error( - \`Expected type "\${type}" with option \${JSON.stringify((opts: any))}, \` + + \`Expected type "\${type}" with option \${JSON.stringify(opts)}, \` + \`but instead got "\${node.type}".\`, ); } @@ -34,7 +41,7 @@ function assert(type: string, node: Object, opts?: Object): void { Object.keys(definitions.DEPRECATED_KEYS).forEach(type => { const newType = definitions.DEPRECATED_KEYS[type]; - output += `export function assert${type}(node: Object, opts: Object): void { + output += `export function assert${type}(node: any, opts: any): void { console.trace("The node type ${type} has been renamed to ${newType}"); assert("${type}", node, opts); }\n`; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/ast-types.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/ast-types.js new file mode 100644 index 00000000000000..98122665def8e9 --- /dev/null +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/ast-types.js @@ -0,0 +1,140 @@ +"use strict"; + +const t = require("../../"); +const stringifyValidator = require("../utils/stringifyValidator"); + +module.exports = function generateAstTypes() { + let code = `// NOTE: This file is autogenerated. Do not modify. +// See packages/babel-types/scripts/generators/ast-types.js for script used. + +interface BaseComment { + value: string; + start: number; + end: number; + loc: SourceLocation; + type: "CommentBlock" | "CommentLine"; +} + +export interface CommentBlock extends BaseComment { + type: "CommentBlock"; +} + +export interface CommentLine extends BaseComment { + type: "CommentLine"; +} + +export type Comment = CommentBlock | CommentLine; + +export interface SourceLocation { + start: { + line: number; + column: number; + }; + + end: { + line: number; + column: number; + }; +} + +interface BaseNode { + leadingComments: ReadonlyArray<Comment> | null; + innerComments: ReadonlyArray<Comment> | null; + trailingComments: ReadonlyArray<Comment> | null; + start: number | null; + end: number | null; + loc: SourceLocation | null; + type: Node["type"]; + extra?: Record<string, unknown>; +} + +export type CommentTypeShorthand = "leading" | "inner" | "trailing"; + +export type Node = ${t.TYPES.sort().join(" | ")};\n\n`; + + const deprecatedAlias = {}; + for (const type in t.DEPRECATED_KEYS) { + deprecatedAlias[t.DEPRECATED_KEYS[type]] = type; + } + for (const type in t.NODE_FIELDS) { + const fields = t.NODE_FIELDS[type]; + const fieldNames = sortFieldNames(Object.keys(t.NODE_FIELDS[type]), type); + const struct = []; + + fieldNames.forEach(fieldName => { + const field = fields[fieldName]; + // Future / annoying TODO: + // MemberExpression.property, ObjectProperty.key and ObjectMethod.key need special cases; either: + // - convert the declaration to chain() like ClassProperty.key and ClassMethod.key, + // - declare an alias type for valid keys, detect the case and reuse it here, + // - declare a disjoint union with, for example, ObjectPropertyBase, + // ObjectPropertyLiteralKey and ObjectPropertyComputedKey, and declare ObjectProperty + // as "ObjectPropertyBase & (ObjectPropertyLiteralKey | ObjectPropertyComputedKey)" + let typeAnnotation = stringifyValidator(field.validate, ""); + + if (isNullable(field) && !hasDefault(field)) { + typeAnnotation += " | null"; + } + + const alphaNumeric = /^\w+$/; + const optional = field.optional ? "?" : ""; + + if (t.isValidIdentifier(fieldName) || alphaNumeric.test(fieldName)) { + struct.push(`${fieldName}${optional}: ${typeAnnotation};`); + } else { + struct.push(`"${fieldName}"${optional}: ${typeAnnotation};`); + } + }); + + code += `export interface ${type} extends BaseNode { + type: "${type}"; + ${struct.join("\n ").trim()} +}\n\n`; + + if (deprecatedAlias[type]) { + code += `/** + * @deprecated Use \`${type}\` + */ +export interface ${deprecatedAlias[type]} extends BaseNode { + type: "${deprecatedAlias[type]}"; + ${struct.join("\n ").trim()} +}\n\n +`; + } + } + + for (const type in t.FLIPPED_ALIAS_KEYS) { + const types = t.FLIPPED_ALIAS_KEYS[type]; + code += `export type ${type} = ${types + .map(type => `${type}`) + .join(" | ")};\n`; + } + code += "\n"; + + code += "export interface Aliases {\n"; + for (const type in t.FLIPPED_ALIAS_KEYS) { + code += ` ${type}: ${type};\n`; + } + code += "}\n\n"; + + return code; +}; + +function hasDefault(field) { + return field.default != null; +} + +function isNullable(field) { + return field.optional || hasDefault(field); +} + +function sortFieldNames(fields, type) { + return fields.sort((fieldA, fieldB) => { + const indexA = t.BUILDER_KEYS[type].indexOf(fieldA); + const indexB = t.BUILDER_KEYS[type].indexOf(fieldB); + if (indexA === indexB) return fieldA < fieldB ? -1 : 1; + if (indexA === -1) return 1; + if (indexB === -1) return -1; + return indexA - indexB; + }); +} diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/builders.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/builders.js new file mode 100644 index 00000000000000..6a528fe0c33bbd --- /dev/null +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/builders.js @@ -0,0 +1,165 @@ +"use strict"; +const definitions = require("../../lib/definitions"); +const formatBuilderName = require("../utils/formatBuilderName"); +const lowerFirst = require("../utils/lowerFirst"); + +const t = require("../../"); +const stringifyValidator = require("../utils/stringifyValidator"); + +function areAllRemainingFieldsNullable(fieldName, fieldNames, fields) { + const index = fieldNames.indexOf(fieldName); + return fieldNames.slice(index).every(_ => isNullable(fields[_])); +} + +function hasDefault(field) { + return field.default != null; +} + +function isNullable(field) { + return field.optional || hasDefault(field); +} + +function sortFieldNames(fields, type) { + return fields.sort((fieldA, fieldB) => { + const indexA = t.BUILDER_KEYS[type].indexOf(fieldA); + const indexB = t.BUILDER_KEYS[type].indexOf(fieldB); + if (indexA === indexB) return fieldA < fieldB ? -1 : 1; + if (indexA === -1) return 1; + if (indexB === -1) return -1; + return indexA - indexB; + }); +} + +function generateBuilderArgs(type) { + const fields = t.NODE_FIELDS[type]; + const fieldNames = sortFieldNames(Object.keys(t.NODE_FIELDS[type]), type); + const builderNames = t.BUILDER_KEYS[type]; + + const args = []; + + fieldNames.forEach(fieldName => { + const field = fields[fieldName]; + // Future / annoying TODO: + // MemberExpression.property, ObjectProperty.key and ObjectMethod.key need special cases; either: + // - convert the declaration to chain() like ClassProperty.key and ClassMethod.key, + // - declare an alias type for valid keys, detect the case and reuse it here, + // - declare a disjoint union with, for example, ObjectPropertyBase, + // ObjectPropertyLiteralKey and ObjectPropertyComputedKey, and declare ObjectProperty + // as "ObjectPropertyBase & (ObjectPropertyLiteralKey | ObjectPropertyComputedKey)" + let typeAnnotation = stringifyValidator(field.validate, "t."); + + if (isNullable(field) && !hasDefault(field)) { + typeAnnotation += " | null"; + } + + if (builderNames.includes(fieldName)) { + const bindingIdentifierName = t.toBindingIdentifierName(fieldName); + if (areAllRemainingFieldsNullable(fieldName, builderNames, fields)) { + args.push( + `${bindingIdentifierName}${ + isNullable(field) ? "?:" : ":" + } ${typeAnnotation}` + ); + } else { + args.push( + `${bindingIdentifierName}: ${typeAnnotation}${ + isNullable(field) ? " | undefined" : "" + }` + ); + } + } + }); + + return args; +} + +module.exports = function generateBuilders(kind) { + return kind === "uppercase.js" + ? generateUppercaseBuilders() + : generateLowercaseBuilders(); +}; + +function generateLowercaseBuilders() { + let output = `/* + * This file is auto-generated! Do not modify it directly. + * To re-generate run 'make build' + */ +import builder from "../builder"; +import type * as t from "../.."; + +/* eslint-disable @typescript-eslint/no-unused-vars */ + +`; + + const reservedNames = new Set(["super", "import"]); + Object.keys(definitions.BUILDER_KEYS).forEach(type => { + const defArgs = generateBuilderArgs(type); + const formatedBuilderName = formatBuilderName(type); + const formatedBuilderNameLocal = reservedNames.has(formatedBuilderName) + ? `_${formatedBuilderName}` + : formatedBuilderName; + output += `${ + formatedBuilderNameLocal === formatedBuilderName ? "export " : "" + }function ${formatedBuilderNameLocal}(${defArgs.join( + ", " + )}): t.${type} { return builder("${type}", ...arguments); }\n`; + if (formatedBuilderNameLocal !== formatedBuilderName) { + output += `export { ${formatedBuilderNameLocal} as ${formatedBuilderName} };\n`; + } + + // This is needed for backwards compatibility. + // It should be removed in the next major version. + // JSXIdentifier -> jSXIdentifier + if (/^[A-Z]{2}/.test(type)) { + output += `export { ${formatedBuilderNameLocal} as ${lowerFirst( + type + )} }\n`; + } + }); + + Object.keys(definitions.DEPRECATED_KEYS).forEach(type => { + const newType = definitions.DEPRECATED_KEYS[type]; + const formatedBuilderName = formatBuilderName(type); + output += `/** @deprecated */ +function ${type}(...args: Array<any>): any { + console.trace("The node type ${type} has been renamed to ${newType}"); + return builder("${type}", ...args); +} +export { ${type} as ${formatedBuilderName} };\n`; + // This is needed for backwards compatibility. + // It should be removed in the next major version. + // JSXIdentifier -> jSXIdentifier + if (/^[A-Z]{2}/.test(type)) { + output += `export { ${type} as ${lowerFirst(type)} }\n`; + } + }); + + return output; +} + +function generateUppercaseBuilders() { + let output = `/* + * This file is auto-generated! Do not modify it directly. + * To re-generate run 'make build' + */ + +/** + * This file is written in JavaScript and not TypeScript because uppercase builders + * conflict with AST types. TypeScript reads the uppercase.d.ts file instead. + */ + + export {\n`; + + Object.keys(definitions.BUILDER_KEYS).forEach(type => { + const formatedBuilderName = formatBuilderName(type); + output += ` ${formatedBuilderName} as ${type},\n`; + }); + + Object.keys(definitions.DEPRECATED_KEYS).forEach(type => { + const formatedBuilderName = formatBuilderName(type); + output += ` ${formatedBuilderName} as ${type},\n`; + }); + + output += ` } from './index';\n`; + return output; +} diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateConstants.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/constants.js similarity index 94% rename from tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateConstants.js rename to tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/constants.js index 1e4d2cabaec4da..8e8b61c50bf0bb 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateConstants.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/constants.js @@ -2,8 +2,7 @@ const definitions = require("../../lib/definitions"); module.exports = function generateConstants() { - let output = `// @flow -/* + let output = `/* * This file is auto-generated! Do not modify it directly. * To re-generate run 'make build' */ diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/flow.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/flow.js index 2a91703353db3a..8a0a7b2635511f 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/flow.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/flow.js @@ -98,7 +98,7 @@ for (const type in t.NODE_FIELDS) { // Flow chokes on super() and import() :/ if (type !== "Super" && type !== "Import") { lines.push( - `declare function ${toFunctionName(type)}(${args.join( + `declare export function ${toFunctionName(type)}(${args.join( ", " )}): ${NODE_PREFIX}${type};` ); @@ -117,85 +117,90 @@ for (const typeName of t.TYPES) { const isDeprecated = !!t.DEPRECATED_KEYS[typeName]; const realName = isDeprecated ? t.DEPRECATED_KEYS[typeName] : typeName; - let decl = `declare function is${typeName}(node: ?Object, opts?: ?Object): boolean`; + let decl = `declare export function is${typeName}(node: ?Object, opts?: ?Object): boolean`; if (t.NODE_FIELDS[realName]) { decl += ` %checks (node instanceof ${NODE_PREFIX}${realName})`; } lines.push(decl); lines.push( - `declare function assert${typeName}(node: ?Object, opts?: ?Object): void` + `declare export function assert${typeName}(node: ?Object, opts?: ?Object): void` ); } lines.push( + `declare export var VISITOR_KEYS: { [type: string]: string[] }`, + // assert/ - `declare function assertNode(obj: any): void`, + `declare export function assertNode(obj: any): void`, // builders/ // eslint-disable-next-line max-len - `declare function createTypeAnnotationBasedOnTypeof(type: 'string' | 'number' | 'undefined' | 'boolean' | 'function' | 'object' | 'symbol'): ${NODE_PREFIX}TypeAnnotation`, + `declare export function createTypeAnnotationBasedOnTypeof(type: 'string' | 'number' | 'undefined' | 'boolean' | 'function' | 'object' | 'symbol'): ${NODE_PREFIX}TypeAnnotation`, // eslint-disable-next-line max-len - `declare function createUnionTypeAnnotation(types: Array<${NODE_PREFIX}FlowType>): ${NODE_PREFIX}UnionTypeAnnotation`, + `declare export function createUnionTypeAnnotation(types: Array<${NODE_PREFIX}FlowType>): ${NODE_PREFIX}UnionTypeAnnotation`, // eslint-disable-next-line max-len - `declare function createFlowUnionType(types: Array<${NODE_PREFIX}FlowType>): ${NODE_PREFIX}UnionTypeAnnotation`, + `declare export function createFlowUnionType(types: Array<${NODE_PREFIX}FlowType>): ${NODE_PREFIX}UnionTypeAnnotation`, // this smells like "internal API" // eslint-disable-next-line max-len - `declare function buildChildren(node: { children: Array<${NODE_PREFIX}JSXText | ${NODE_PREFIX}JSXExpressionContainer | ${NODE_PREFIX}JSXSpreadChild | ${NODE_PREFIX}JSXElement | ${NODE_PREFIX}JSXFragment | ${NODE_PREFIX}JSXEmptyExpression> }): Array<${NODE_PREFIX}JSXText | ${NODE_PREFIX}JSXExpressionContainer | ${NODE_PREFIX}JSXSpreadChild | ${NODE_PREFIX}JSXElement | ${NODE_PREFIX}JSXFragment>`, + `declare export function buildChildren(node: { children: Array<${NODE_PREFIX}JSXText | ${NODE_PREFIX}JSXExpressionContainer | ${NODE_PREFIX}JSXSpreadChild | ${NODE_PREFIX}JSXElement | ${NODE_PREFIX}JSXFragment | ${NODE_PREFIX}JSXEmptyExpression> }): Array<${NODE_PREFIX}JSXText | ${NODE_PREFIX}JSXExpressionContainer | ${NODE_PREFIX}JSXSpreadChild | ${NODE_PREFIX}JSXElement | ${NODE_PREFIX}JSXFragment>`, // clone/ - `declare function clone<T>(n: T): T;`, - `declare function cloneDeep<T>(n: T): T;`, - `declare function cloneDeepWithoutLoc<T>(n: T): T;`, - `declare function cloneNode<T>(n: T, deep?: boolean, withoutLoc?: boolean): T;`, - `declare function cloneWithoutLoc<T>(n: T): T;`, + `declare export function clone<T>(n: T): T;`, + `declare export function cloneDeep<T>(n: T): T;`, + `declare export function cloneDeepWithoutLoc<T>(n: T): T;`, + `declare export function cloneNode<T>(n: T, deep?: boolean, withoutLoc?: boolean): T;`, + `declare export function cloneWithoutLoc<T>(n: T): T;`, // comments/ `declare type CommentTypeShorthand = 'leading' | 'inner' | 'trailing'`, // eslint-disable-next-line max-len - `declare function addComment<T: Node>(node: T, type: CommentTypeShorthand, content: string, line?: boolean): T`, + `declare export function addComment<T: BabelNode>(node: T, type: CommentTypeShorthand, content: string, line?: boolean): T`, // eslint-disable-next-line max-len - `declare function addComments<T: Node>(node: T, type: CommentTypeShorthand, comments: Array<Comment>): T`, - `declare function inheritInnerComments(node: Node, parent: Node): void`, - `declare function inheritLeadingComments(node: Node, parent: Node): void`, - `declare function inheritsComments<T: Node>(node: T, parent: Node): void`, - `declare function inheritTrailingComments(node: Node, parent: Node): void`, - `declare function removeComments<T: Node>(node: T): T`, + `declare export function addComments<T: BabelNode>(node: T, type: CommentTypeShorthand, comments: Array<Comment>): T`, + `declare export function inheritInnerComments(node: BabelNode, parent: BabelNode): void`, + `declare export function inheritLeadingComments(node: BabelNode, parent: BabelNode): void`, + `declare export function inheritsComments<T: BabelNode>(node: T, parent: BabelNode): void`, + `declare export function inheritTrailingComments(node: BabelNode, parent: BabelNode): void`, + `declare export function removeComments<T: BabelNode>(node: T): T`, // converters/ - `declare function ensureBlock(node: ${NODE_PREFIX}, key: string): ${NODE_PREFIX}BlockStatement`, - `declare function toBindingIdentifierName(name?: ?string): string`, + `declare export function ensureBlock(node: ${NODE_PREFIX}, key: string): ${NODE_PREFIX}BlockStatement`, + `declare export function toBindingIdentifierName(name?: ?string): string`, // eslint-disable-next-line max-len - `declare function toBlock(node: ${NODE_PREFIX}Statement | ${NODE_PREFIX}Expression, parent?: ${NODE_PREFIX}Function | null): ${NODE_PREFIX}BlockStatement`, + `declare export function toBlock(node: ${NODE_PREFIX}Statement | ${NODE_PREFIX}Expression, parent?: ${NODE_PREFIX}Function | null): ${NODE_PREFIX}BlockStatement`, // eslint-disable-next-line max-len - `declare function toComputedKey(node: ${NODE_PREFIX}Method | ${NODE_PREFIX}Property, key?: ${NODE_PREFIX}Expression | ${NODE_PREFIX}Identifier): ${NODE_PREFIX}Expression`, + `declare export function toComputedKey(node: ${NODE_PREFIX}Method | ${NODE_PREFIX}Property, key?: ${NODE_PREFIX}Expression | ${NODE_PREFIX}Identifier): ${NODE_PREFIX}Expression`, // eslint-disable-next-line max-len - `declare function toExpression(node: ${NODE_PREFIX}ExpressionStatement | ${NODE_PREFIX}Expression | ${NODE_PREFIX}Class | ${NODE_PREFIX}Function): ${NODE_PREFIX}Expression`, - `declare function toIdentifier(name?: ?string): string`, + `declare export function toExpression(node: ${NODE_PREFIX}ExpressionStatement | ${NODE_PREFIX}Expression | ${NODE_PREFIX}Class | ${NODE_PREFIX}Function): ${NODE_PREFIX}Expression`, + `declare export function toIdentifier(name?: ?string): string`, // eslint-disable-next-line max-len - `declare function toKeyAlias(node: ${NODE_PREFIX}Method | ${NODE_PREFIX}Property, key?: ${NODE_PREFIX}): string`, + `declare export function toKeyAlias(node: ${NODE_PREFIX}Method | ${NODE_PREFIX}Property, key?: ${NODE_PREFIX}): string`, // toSequenceExpression relies on types that aren't declared in flow // eslint-disable-next-line max-len - `declare function toStatement(node: ${NODE_PREFIX}Statement | ${NODE_PREFIX}Class | ${NODE_PREFIX}Function | ${NODE_PREFIX}AssignmentExpression, ignore?: boolean): ${NODE_PREFIX}Statement | void`, - `declare function valueToNode(value: any): ${NODE_PREFIX}Expression`, + `declare export function toStatement(node: ${NODE_PREFIX}Statement | ${NODE_PREFIX}Class | ${NODE_PREFIX}Function | ${NODE_PREFIX}AssignmentExpression, ignore?: boolean): ${NODE_PREFIX}Statement | void`, + `declare export function valueToNode(value: any): ${NODE_PREFIX}Expression`, // modifications/ // eslint-disable-next-line max-len - `declare function removeTypeDuplicates(types: Array<${NODE_PREFIX}FlowType>): Array<${NODE_PREFIX}FlowType>`, + `declare export function removeTypeDuplicates(types: Array<${NODE_PREFIX}FlowType>): Array<${NODE_PREFIX}FlowType>`, // eslint-disable-next-line max-len - `declare function appendToMemberExpression(member: ${NODE_PREFIX}MemberExpression, append: ${NODE_PREFIX}, computed?: boolean): ${NODE_PREFIX}MemberExpression`, + `declare export function appendToMemberExpression(member: ${NODE_PREFIX}MemberExpression, append: ${NODE_PREFIX}, computed?: boolean): ${NODE_PREFIX}MemberExpression`, // eslint-disable-next-line max-len - `declare function inherits<T: Node>(child: T, parent: ${NODE_PREFIX} | null | void): T`, + `declare export function inherits<T: BabelNode>(child: T, parent: ${NODE_PREFIX} | null | void): T`, // eslint-disable-next-line max-len - `declare function prependToMemberExpression(member: ${NODE_PREFIX}MemberExpression, prepend: ${NODE_PREFIX}Expression): ${NODE_PREFIX}MemberExpression`, - `declare function removeProperties<T>(n: T, opts: ?{}): void;`, - `declare function removePropertiesDeep<T>(n: T, opts: ?{}): T;`, + `declare export function prependToMemberExpression(member: ${NODE_PREFIX}MemberExpression, prepend: ${NODE_PREFIX}Expression): ${NODE_PREFIX}MemberExpression`, + `declare export function removeProperties<T>(n: T, opts: ?{}): void;`, + `declare export function removePropertiesDeep<T>(n: T, opts: ?{}): T;`, // retrievers/ // eslint-disable-next-line max-len - `declare function getBindingIdentifiers(node: ${NODE_PREFIX}, duplicates: boolean, outerOnly?: boolean): { [key: string]: ${NODE_PREFIX}Identifier | Array<${NODE_PREFIX}Identifier> }`, + `declare export var getBindingIdentifiers: { + (node: ${NODE_PREFIX}, duplicates?: boolean, outerOnly?: boolean): { [key: string]: ${NODE_PREFIX}Identifier | Array<${NODE_PREFIX}Identifier> }, + keys: { [type: string]: string[] } + }`, // eslint-disable-next-line max-len - `declare function getOuterBindingIdentifiers(node: Node, duplicates: boolean): { [key: string]: ${NODE_PREFIX}Identifier | Array<${NODE_PREFIX}Identifier> }`, + `declare export function getOuterBindingIdentifiers(node: BabelNode, duplicates?: boolean): { [key: string]: ${NODE_PREFIX}Identifier | Array<${NODE_PREFIX}Identifier> }`, // traverse/ `declare type TraversalAncestors = Array<{ @@ -209,36 +214,36 @@ lines.push( exit?: TraversalHandler<T>, };`.replace(/(^|\n) {2}/g, "$1"), // eslint-disable-next-line - `declare function traverse<T>(n: BabelNode, TraversalHandler<T> | TraversalHandlers<T>, state?: T): void;`, - `declare function traverseFast<T>(n: Node, h: TraversalHandler<T>, state?: T): void;`, + `declare export function traverse<T>(n: BabelNode, TraversalHandler<T> | TraversalHandlers<T>, state?: T): void;`, + `declare export function traverseFast<T>(n: BabelNode, h: TraversalHandler<T>, state?: T): void;`, // utils/ // cleanJSXElementLiteralChild is not exported // inherit is not exported - `declare function shallowEqual(actual: Object, expected: Object): boolean`, + `declare export function shallowEqual(actual: Object, expected: Object): boolean`, // validators/ // eslint-disable-next-line max-len - `declare function buildMatchMemberExpression(match: string, allowPartial?: boolean): (?BabelNode) => boolean`, - `declare function is(type: string, n: BabelNode, opts: Object): boolean;`, - `declare function isBinding(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean`, - `declare function isBlockScoped(node: BabelNode): boolean`, - `declare function isImmutable(node: BabelNode): boolean`, - `declare function isLet(node: BabelNode): boolean`, - `declare function isNode(node: ?Object): boolean`, - `declare function isNodesEquivalent(a: any, b: any): boolean`, - `declare function isPlaceholderType(placeholderType: string, targetType: string): boolean`, - `declare function isReferenced(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean`, - `declare function isScope(node: BabelNode, parent: BabelNode): boolean`, - `declare function isSpecifierDefault(specifier: BabelNodeModuleSpecifier): boolean`, - `declare function isType(nodetype: ?string, targetType: string): boolean`, - `declare function isValidES3Identifier(name: string): boolean`, - `declare function isValidES3Identifier(name: string): boolean`, - `declare function isValidIdentifier(name: string): boolean`, - `declare function isVar(node: BabelNode): boolean`, + `declare export function buildMatchMemberExpression(match: string, allowPartial?: boolean): (?BabelNode) => boolean`, + `declare export function is(type: string, n: BabelNode, opts: Object): boolean;`, + `declare export function isBinding(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean`, + `declare export function isBlockScoped(node: BabelNode): boolean`, + `declare export function isImmutable(node: BabelNode): boolean`, + `declare export function isLet(node: BabelNode): boolean`, + `declare export function isNode(node: ?Object): boolean`, + `declare export function isNodesEquivalent(a: any, b: any): boolean`, + `declare export function isPlaceholderType(placeholderType: string, targetType: string): boolean`, + `declare export function isReferenced(node: BabelNode, parent: BabelNode, grandparent?: BabelNode): boolean`, + `declare export function isScope(node: BabelNode, parent: BabelNode): boolean`, + `declare export function isSpecifierDefault(specifier: BabelNodeModuleSpecifier): boolean`, + `declare export function isType(nodetype: ?string, targetType: string): boolean`, + `declare export function isValidES3Identifier(name: string): boolean`, + `declare export function isValidES3Identifier(name: string): boolean`, + `declare export function isValidIdentifier(name: string): boolean`, + `declare export function isVar(node: BabelNode): boolean`, // eslint-disable-next-line max-len - `declare function matchesPattern(node: ?BabelNode, match: string | Array<string>, allowPartial?: boolean): boolean`, - `declare function validate(n: BabelNode, key: string, value: mixed): void;` + `declare export function matchesPattern(node: ?BabelNode, match: string | Array<string>, allowPartial?: boolean): boolean`, + `declare export function validate(n: BabelNode, key: string, value: mixed): void;` ); for (const type in t.FLIPPED_ALIAS_KEYS) { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateBuilders.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateBuilders.js deleted file mode 100644 index 5ca9f74643aabb..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateBuilders.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -const definitions = require("../../lib/definitions"); -const formatBuilderName = require("../utils/formatBuilderName"); -const lowerFirst = require("../utils/lowerFirst"); - -module.exports = function generateBuilders() { - let output = `// @flow -/* - * This file is auto-generated! Do not modify it directly. - * To re-generate run 'make build' - */ -import builder from "../builder";\n\n`; - - const reservedNames = new Set(["super", "import"]); - Object.keys(definitions.BUILDER_KEYS).forEach(type => { - const formatedBuilderName = formatBuilderName(type); - const formatedBuilderNameLocal = reservedNames.has(formatedBuilderName) - ? `_${formatedBuilderName}` - : formatedBuilderName; - output += `${ - formatedBuilderNameLocal === formatedBuilderName ? "export " : "" - }function ${formatedBuilderNameLocal}(...args: Array<any>): Object { return builder("${type}", ...args); }\n`; - // This is needed for backwards compatibility. - // arrayExpression -> ArrayExpression - output += `export { ${formatedBuilderNameLocal} as ${type} };\n`; - if (formatedBuilderNameLocal !== formatedBuilderName) { - output += `export { ${formatedBuilderNameLocal} as ${formatedBuilderName} };\n`; - } - - // This is needed for backwards compatibility. - // It should be removed in the next major version. - // JSXIdentifier -> jSXIdentifier - if (/^[A-Z]{2}/.test(type)) { - output += `export { ${formatedBuilderNameLocal} as ${lowerFirst( - type - )} }\n`; - } - }); - - Object.keys(definitions.DEPRECATED_KEYS).forEach(type => { - const newType = definitions.DEPRECATED_KEYS[type]; - output += `export function ${type}(...args: Array<any>): Object { - console.trace("The node type ${type} has been renamed to ${newType}"); - return builder("${type}", ...args); -} -export { ${type} as ${formatBuilderName(type)} };\n`; - - // This is needed for backwards compatibility. - // It should be removed in the next major version. - // JSXIdentifier -> jSXIdentifier - if (/^[A-Z]{2}/.test(type)) { - output += `export { ${type} as ${lowerFirst(type)} }\n`; - } - }); - - return output; -}; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript-legacy.js similarity index 96% rename from tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript.js rename to tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript-legacy.js index 210124d829dff4..a77040681b91e6 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/typescript-legacy.js @@ -1,17 +1,11 @@ "use strict"; -const t = require("../../"); +const t = require("../../lib"); const stringifyValidator = require("../utils/stringifyValidator"); const toFunctionName = require("../utils/toFunctionName"); -// For backward compat, we cannot use TS 3.7 syntax in published packages -const ts3_7 = process.argv.includes("--ts3.7"); - -// TypeScript 3.7: https://github.com/microsoft/TypeScript/pull/32695 will allow assert declarations -const asserts = ts3_7 ? assertion => `asserts ${assertion}` : () => `void`; - let code = `// NOTE: This file is autogenerated. Do not modify. -// See packages/babel-types/scripts/generators/typescript.js for script used. +// See packages/babel-types/scripts/generators/typescript-legacy.js for script used. interface BaseComment { value: string; @@ -146,15 +140,13 @@ for (const typeName of t.TYPES) { lines.push(`/** @deprecated Use \`assert${realName}\` */`); } lines.push( - `export function assert${typeName}(node: object | null | undefined, opts?: object | null): ${asserts( - result === "boolean" ? "node" : result - )};` + `export function assert${typeName}(node: object | null | undefined, opts?: object | null): void;` ); } lines.push( // assert/ - `export function assertNode(obj: any): ${asserts("obj is Node")}`, + `export function assertNode(obj: any): void`, // builders/ // eslint-disable-next-line max-len @@ -320,9 +312,7 @@ lines.push( // eslint-disable-next-line max-len `export function matchesPattern(node: Node | null | undefined, match: string | ReadonlyArray<string>, allowPartial?: boolean): node is MemberExpression`, // eslint-disable-next-line max-len - `export function validate<T extends Node, K extends keyof T>(n: Node | null | undefined, key: K, value: T[K]): ${asserts( - "n is T" - )}`, + `export function validate<T extends Node, K extends keyof T>(n: Node | null | undefined, key: K, value: T[K]): void;`, `export function validate(n: Node, key: string, value: any): void;` ); diff --git a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateValidators.js b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/validators.js similarity index 72% rename from tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateValidators.js rename to tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/validators.js index 1455f99e5b6713..c63d447bcdd244 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/generateValidators.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/types/scripts/generators/validators.js @@ -13,7 +13,7 @@ function addIsHelper(type, aliasKeys, deprecated) { const targetType = JSON.stringify(type); let aliasSource = ""; if (aliasKeys) { - aliasSource = " || " + joinComparisons(aliasKeys, "nodeType"); + aliasSource = joinComparisons(aliasKeys, "nodeType"); } let placeholderSource = ""; @@ -30,16 +30,26 @@ function addIsHelper(type, aliasKeys, deprecated) { if (placeholderTypes.length > 0) { placeholderSource = ' || nodeType === "Placeholder" && (' + - joinComparisons(placeholderTypes, "node.expectedNode") + + joinComparisons( + placeholderTypes, + "(node as t.Placeholder).expectedNode" + ) + ")"; } - return `export function is${type}(node: ?Object, opts?: Object): boolean { + const result = + definitions.NODE_FIELDS[type] || definitions.FLIPPED_ALIAS_KEYS[type] + ? `node is t.${type}` + : "boolean"; + + return `export function is${type}(node: object | null | undefined, opts?: object | null): ${result} { ${deprecated || ""} if (!node) return false; - const nodeType = node.type; - if (nodeType === ${targetType}${aliasSource}${placeholderSource}) { + const nodeType = (node as t.Node).type; + if (${ + aliasSource ? aliasSource : `nodeType === ${targetType}` + }${placeholderSource}) { if (typeof opts === "undefined") { return true; } else { @@ -53,12 +63,12 @@ function addIsHelper(type, aliasKeys, deprecated) { } module.exports = function generateValidators() { - let output = `// @flow -/* + let output = `/* * This file is auto-generated! Do not modify it directly. * To re-generate run 'make build' */ -import shallowEqual from "../../utils/shallowEqual";\n\n`; +import shallowEqual from "../../utils/shallowEqual"; +import type * as t from "../..";\n\n`; Object.keys(definitions.VISITOR_KEYS).forEach(type => { output += addIsHelper(type); diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/.jscs.json b/tools/node_modules/@babel/core/node_modules/function-bind/.jscs.json deleted file mode 100644 index 8c4479480be70d..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/.jscs.json +++ /dev/null @@ -1,176 +0,0 @@ -{ - "es3": true, - - "additionalRules": [], - - "requireSemicolons": true, - - "disallowMultipleSpaces": true, - - "disallowIdentifierNames": [], - - "requireCurlyBraces": { - "allExcept": [], - "keywords": ["if", "else", "for", "while", "do", "try", "catch"] - }, - - "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], - - "disallowSpaceAfterKeywords": [], - - "disallowSpaceBeforeComma": true, - "disallowSpaceAfterComma": false, - "disallowSpaceBeforeSemicolon": true, - - "disallowNodeTypes": [ - "DebuggerStatement", - "ForInStatement", - "LabeledStatement", - "SwitchCase", - "SwitchStatement", - "WithStatement" - ], - - "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, - - "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, - "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, - "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, - "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, - - "requireSpaceBetweenArguments": true, - - "disallowSpacesInsideParentheses": true, - - "disallowSpacesInsideArrayBrackets": true, - - "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, - - "disallowSpaceAfterObjectKeys": true, - - "requireCommaBeforeLineBreak": true, - - "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], - "requireSpaceAfterPrefixUnaryOperators": [], - - "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], - "requireSpaceBeforePostfixUnaryOperators": [], - - "disallowSpaceBeforeBinaryOperators": [], - "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - - "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], - "disallowSpaceAfterBinaryOperators": [], - - "disallowImplicitTypeConversion": ["binary", "string"], - - "disallowKeywords": ["with", "eval"], - - "requireKeywordsOnNewLine": [], - "disallowKeywordsOnNewLine": ["else"], - - "requireLineFeedAtFileEnd": true, - - "disallowTrailingWhitespace": true, - - "disallowTrailingComma": true, - - "excludeFiles": ["node_modules/**", "vendor/**"], - - "disallowMultipleLineStrings": true, - - "requireDotNotation": { "allExcept": ["keywords"] }, - - "requireParenthesesAroundIIFE": true, - - "validateLineBreaks": "LF", - - "validateQuoteMarks": { - "escape": true, - "mark": "'" - }, - - "disallowOperatorBeforeLineBreak": [], - - "requireSpaceBeforeKeywords": [ - "do", - "for", - "if", - "else", - "switch", - "case", - "try", - "catch", - "finally", - "while", - "with", - "return" - ], - - "validateAlignedFunctionParameters": { - "lineBreakAfterOpeningBraces": true, - "lineBreakBeforeClosingBraces": true - }, - - "requirePaddingNewLinesBeforeExport": true, - - "validateNewlineAfterArrayElements": { - "maximum": 8 - }, - - "requirePaddingNewLinesAfterUseStrict": true, - - "disallowArrowFunctions": true, - - "disallowMultiLineTernary": true, - - "validateOrderInObjectKeys": "asc-insensitive", - - "disallowIdenticalDestructuringNames": true, - - "disallowNestedTernaries": { "maxLevel": 1 }, - - "requireSpaceAfterComma": { "allExcept": ["trailing"] }, - "requireAlignedMultilineParams": false, - - "requireSpacesInGenerator": { - "afterStar": true - }, - - "disallowSpacesInGenerator": { - "beforeStar": true - }, - - "disallowVar": false, - - "requireArrayDestructuring": false, - - "requireEnhancedObjectLiterals": false, - - "requireObjectDestructuring": false, - - "requireEarlyReturn": false, - - "requireCapitalizedConstructorsNew": { - "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] - }, - - "requireImportAlphabetized": false, - - "requireSpaceBeforeObjectValues": true, - "requireSpaceBeforeDestructuredValues": true, - - "disallowSpacesInsideTemplateStringPlaceholders": true, - - "disallowArrayDestructuringReturn": false, - - "requireNewlineBeforeSingleStatementsInIf": false, - - "disallowUnusedVariables": true, - - "requireSpacesInsideImportedObjectBraces": true, - - "requireUseStrict": true -} - diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/LICENSE b/tools/node_modules/@babel/core/node_modules/function-bind/LICENSE deleted file mode 100644 index 62d6d237ff179b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 Raynos. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/README.md b/tools/node_modules/@babel/core/node_modules/function-bind/README.md deleted file mode 100644 index 81862a02cb940c..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# function-bind - -<!-- - [![build status][travis-svg]][travis-url] - [![NPM version][npm-badge-svg]][npm-url] - [![Coverage Status][5]][6] - [![gemnasium Dependency Status][7]][8] - [![Dependency status][deps-svg]][deps-url] - [![Dev Dependency status][dev-deps-svg]][dev-deps-url] ---> - -<!-- [![browser support][11]][12] --> - -Implementation of function.prototype.bind - -## Example - -I mainly do this for unit tests I run on phantomjs. -PhantomJS does not have Function.prototype.bind :( - -```js -Function.prototype.bind = require("function-bind") -``` - -## Installation - -`npm install function-bind` - -## Contributors - - - Raynos - -## MIT Licenced - - [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg - [travis-url]: https://travis-ci.org/Raynos/function-bind - [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg - [npm-url]: https://npmjs.org/package/function-bind - [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png - [6]: https://coveralls.io/r/Raynos/function-bind - [7]: https://gemnasium.com/Raynos/function-bind.png - [8]: https://gemnasium.com/Raynos/function-bind - [deps-svg]: https://david-dm.org/Raynos/function-bind.svg - [deps-url]: https://david-dm.org/Raynos/function-bind - [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg - [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies - [11]: https://ci.testling.com/Raynos/function-bind.png - [12]: https://ci.testling.com/Raynos/function-bind diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/implementation.js b/tools/node_modules/@babel/core/node_modules/function-bind/implementation.js deleted file mode 100644 index cc4daec1b080a1..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/implementation.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; - -/* eslint no-invalid-this: 1 */ - -var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; -var slice = Array.prototype.slice; -var toStr = Object.prototype.toString; -var funcType = '[object Function]'; - -module.exports = function bind(that) { - var target = this; - if (typeof target !== 'function' || toStr.call(target) !== funcType) { - throw new TypeError(ERROR_MESSAGE + target); - } - var args = slice.call(arguments, 1); - - var bound; - var binder = function () { - if (this instanceof bound) { - var result = target.apply( - this, - args.concat(slice.call(arguments)) - ); - if (Object(result) === result) { - return result; - } - return this; - } else { - return target.apply( - that, - args.concat(slice.call(arguments)) - ); - } - }; - - var boundLength = Math.max(0, target.length - args.length); - var boundArgs = []; - for (var i = 0; i < boundLength; i++) { - boundArgs.push('$' + i); - } - - bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); - - if (target.prototype) { - var Empty = function Empty() {}; - Empty.prototype = target.prototype; - bound.prototype = new Empty(); - Empty.prototype = null; - } - - return bound; -}; diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/index.js b/tools/node_modules/@babel/core/node_modules/function-bind/index.js deleted file mode 100644 index 3bb6b9609889f8..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/index.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; - -var implementation = require('./implementation'); - -module.exports = Function.prototype.bind || implementation; diff --git a/tools/node_modules/@babel/core/node_modules/function-bind/package.json b/tools/node_modules/@babel/core/node_modules/function-bind/package.json deleted file mode 100644 index 20a1727cbf8711..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/function-bind/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "function-bind", - "version": "1.1.1", - "description": "Implementation of Function.prototype.bind", - "keywords": [ - "function", - "bind", - "shim", - "es5" - ], - "author": "Raynos <raynos2@gmail.com>", - "repository": "git://github.com/Raynos/function-bind.git", - "main": "index", - "homepage": "https://github.com/Raynos/function-bind", - "contributors": [ - { - "name": "Raynos" - }, - { - "name": "Jordan Harband", - "url": "https://github.com/ljharb" - } - ], - "bugs": { - "url": "https://github.com/Raynos/function-bind/issues", - "email": "raynos2@gmail.com" - }, - "dependencies": {}, - "devDependencies": { - "@ljharb/eslint-config": "^12.2.1", - "covert": "^1.1.0", - "eslint": "^4.5.0", - "jscs": "^3.0.7", - "tape": "^4.8.0" - }, - "license": "MIT", - "scripts": { - "pretest": "npm run lint", - "test": "npm run tests-only", - "posttest": "npm run coverage -- --quiet", - "tests-only": "node test", - "coverage": "covert test/*.js", - "lint": "npm run jscs && npm run eslint", - "jscs": "jscs *.js */*.js", - "eslint": "eslint *.js */*.js" - }, - "testling": { - "files": "test/index.js", - "browsers": [ - "ie/8..latest", - "firefox/16..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/tools/node_modules/@babel/core/node_modules/has/LICENSE-MIT b/tools/node_modules/@babel/core/node_modules/has/LICENSE-MIT deleted file mode 100644 index ae7014d385df3d..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/has/LICENSE-MIT +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2013 Thiago de Arruda - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/tools/node_modules/@babel/core/node_modules/has/README.md b/tools/node_modules/@babel/core/node_modules/has/README.md deleted file mode 100644 index 635e3a4baab00b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/has/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# has - -> Object.prototype.hasOwnProperty.call shortcut - -## Installation - -```sh -npm install --save has -``` - -## Usage - -```js -var has = require('has'); - -has({}, 'hasOwnProperty'); // false -has(Object.prototype, 'hasOwnProperty'); // true -``` diff --git a/tools/node_modules/@babel/core/node_modules/has/package.json b/tools/node_modules/@babel/core/node_modules/has/package.json deleted file mode 100644 index 7c4592f16de071..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/has/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "has", - "description": "Object.prototype.hasOwnProperty.call shortcut", - "version": "1.0.3", - "homepage": "https://github.com/tarruda/has", - "author": { - "name": "Thiago de Arruda", - "email": "tpadilha84@gmail.com" - }, - "contributors": [ - { - "name": "Jordan Harband", - "email": "ljharb@gmail.com", - "url": "http://ljharb.codes" - } - ], - "repository": { - "type": "git", - "url": "git://github.com/tarruda/has.git" - }, - "bugs": { - "url": "https://github.com/tarruda/has/issues" - }, - "license": "MIT", - "licenses": [ - { - "type": "MIT", - "url": "https://github.com/tarruda/has/blob/master/LICENSE-MIT" - } - ], - "main": "./src", - "dependencies": { - "function-bind": "^1.1.1" - }, - "devDependencies": { - "@ljharb/eslint-config": "^12.2.1", - "eslint": "^4.19.1", - "tape": "^4.9.0" - }, - "engines": { - "node": ">= 0.4.0" - }, - "scripts": { - "lint": "eslint .", - "pretest": "npm run lint", - "test": "tape test" - } -} diff --git a/tools/node_modules/@babel/core/node_modules/has/src/index.js b/tools/node_modules/@babel/core/node_modules/has/src/index.js deleted file mode 100644 index dd92dd9094edb0..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/has/src/index.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; - -var bind = require('function-bind'); - -module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); diff --git a/tools/node_modules/@babel/core/node_modules/is-core-module/LICENSE b/tools/node_modules/@babel/core/node_modules/is-core-module/LICENSE deleted file mode 100644 index 2e502872a74234..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/is-core-module/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Dave Justice - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/is-core-module/README.md b/tools/node_modules/@babel/core/node_modules/is-core-module/README.md deleted file mode 100644 index 479d6d24c0f041..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/is-core-module/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# is-core-module <sup>[![Version Badge][2]][1]</sup> - -[![Build Status][3]][4] -[![dependency status][5]][6] -[![dev dependency status][7]][8] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][11]][1] - -Is this specifier a node.js core module? Optionally provide a node version to check; defaults to the current node version. - -## Example - -```js -var isCore = require('is-core-module'); -var assert = require('assert'); -assert(isCore('fs')); -assert(!isCore('butts')); -``` - -## Tests -Clone the repo, `npm install`, and run `npm test` - -[1]: https://npmjs.org/package/is-core-module -[2]: https://versionbadg.es/inspect-js/is-core-module.svg -[3]: https://travis-ci.com/inspect-js/is-core-module.svg -[4]: https://travis-ci.com/inspect-js/is-core-module -[5]: https://david-dm.org/inspect-js/is-core-module.svg -[6]: https://david-dm.org/inspect-js/is-core-module -[7]: https://david-dm.org/inspect-js/is-core-module/dev-status.svg -[8]: https://david-dm.org/inspect-js/is-core-module#info=devDependencies -[11]: https://nodei.co/npm/is-core-module.png?downloads=true&stars=true -[license-image]: https://img.shields.io/npm/l/is-core-module.svg -[license-url]: LICENSE -[downloads-image]: https://img.shields.io/npm/dm/is-core-module.svg -[downloads-url]: https://npm-stat.com/charts.html?package=is-core-module diff --git a/tools/node_modules/@babel/core/node_modules/is-core-module/core.json b/tools/node_modules/@babel/core/node_modules/is-core-module/core.json deleted file mode 100644 index 0238b61a4c71e4..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/is-core-module/core.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "assert": true, - "assert/strict": ">= 15", - "async_hooks": ">= 8", - "buffer_ieee754": "< 0.9.7", - "buffer": true, - "child_process": true, - "cluster": true, - "console": true, - "constants": true, - "crypto": true, - "_debug_agent": ">= 1 && < 8", - "_debugger": "< 8", - "dgram": true, - "diagnostics_channel": ">= 15.1", - "dns": true, - "dns/promises": ">= 15", - "domain": ">= 0.7.12", - "events": true, - "freelist": "< 6", - "fs": true, - "fs/promises": [">= 10 && < 10.1", ">= 14"], - "_http_agent": ">= 0.11.1", - "_http_client": ">= 0.11.1", - "_http_common": ">= 0.11.1", - "_http_incoming": ">= 0.11.1", - "_http_outgoing": ">= 0.11.1", - "_http_server": ">= 0.11.1", - "http": true, - "http2": ">= 8.8", - "https": true, - "inspector": ">= 8.0.0", - "_linklist": "< 8", - "module": true, - "net": true, - "node-inspect/lib/_inspect": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_client": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_repl": ">= 7.6.0 && < 12", - "os": true, - "path": true, - "path/posix": ">= 15.3", - "path/win32": ">= 15.3", - "perf_hooks": ">= 8.5", - "process": ">= 1", - "punycode": true, - "querystring": true, - "readline": true, - "repl": true, - "smalloc": ">= 0.11.5 && < 3", - "_stream_duplex": ">= 0.9.4", - "_stream_transform": ">= 0.9.4", - "_stream_wrap": ">= 1.4.1", - "_stream_passthrough": ">= 0.9.4", - "_stream_readable": ">= 0.9.4", - "_stream_writable": ">= 0.9.4", - "stream": true, - "stream/promises": ">= 15", - "string_decoder": true, - "sys": [">= 0.6 && < 0.7", ">= 0.8"], - "timers": true, - "timers/promises": ">= 15", - "_tls_common": ">= 0.11.13", - "_tls_legacy": ">= 0.11.3 && < 10", - "_tls_wrap": ">= 0.11.3", - "tls": true, - "trace_events": ">= 10", - "tty": true, - "url": true, - "util": true, - "util/types": ">= 15.3", - "v8/tools/arguments": ">= 10 && < 12", - "v8/tools/codemap": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/consarray": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/csvparser": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/logreader": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/profile_view": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/splaytree": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8": ">= 1", - "vm": true, - "wasi": ">= 13.4 && < 13.5", - "worker_threads": ">= 11.7", - "zlib": true -} diff --git a/tools/node_modules/@babel/core/node_modules/is-core-module/index.js b/tools/node_modules/@babel/core/node_modules/is-core-module/index.js deleted file mode 100644 index f5a69cf765f56b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/is-core-module/index.js +++ /dev/null @@ -1,69 +0,0 @@ -'use strict'; - -var has = require('has'); - -function specifierIncluded(current, specifier) { - var nodeParts = current.split('.'); - var parts = specifier.split(' '); - var op = parts.length > 1 ? parts[0] : '='; - var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); - - for (var i = 0; i < 3; ++i) { - var cur = parseInt(nodeParts[i] || 0, 10); - var ver = parseInt(versionParts[i] || 0, 10); - if (cur === ver) { - continue; // eslint-disable-line no-restricted-syntax, no-continue - } - if (op === '<') { - return cur < ver; - } - if (op === '>=') { - return cur >= ver; - } - return false; - } - return op === '>='; -} - -function matchesRange(current, range) { - var specifiers = range.split(/ ?&& ?/); - if (specifiers.length === 0) { - return false; - } - for (var i = 0; i < specifiers.length; ++i) { - if (!specifierIncluded(current, specifiers[i])) { - return false; - } - } - return true; -} - -function versionIncluded(nodeVersion, specifierValue) { - if (typeof specifierValue === 'boolean') { - return specifierValue; - } - - var current = typeof nodeVersion === 'undefined' - ? process.versions && process.versions.node && process.versions.node - : nodeVersion; - - if (typeof current !== 'string') { - throw new TypeError(typeof nodeVersion === 'undefined' ? 'Unable to determine current node version' : 'If provided, a valid node version is required'); - } - - if (specifierValue && typeof specifierValue === 'object') { - for (var i = 0; i < specifierValue.length; ++i) { - if (matchesRange(current, specifierValue[i])) { - return true; - } - } - return false; - } - return matchesRange(current, specifierValue); -} - -var data = require('./core.json'); - -module.exports = function isCore(x, nodeVersion) { - return has(data, x) && versionIncluded(nodeVersion, data[x]); -}; diff --git a/tools/node_modules/@babel/core/node_modules/is-core-module/package.json b/tools/node_modules/@babel/core/node_modules/is-core-module/package.json deleted file mode 100644 index 21341cc431a505..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/is-core-module/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "name": "is-core-module", - "version": "2.2.0", - "description": "Is this specifier a node.js core module?", - "main": "index.js", - "exports": { - ".": [ - { - "default": "./index.js" - }, - "./index.js" - ], - "./package.json": "./package.json" - }, - "scripts": { - "prepublish": "safe-publish-latest", - "lint": "eslint .", - "pretest": "npm run lint", - "tests-only": "tape 'test/**/*.js'", - "test": "nyc npm run tests-only", - "posttest": "aud --production", - "version": "auto-changelog && git add CHANGELOG.md", - "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/inspect-js/is-core-module.git" - }, - "keywords": [ - "core", - "modules", - "module", - "npm", - "node", - "dependencies" - ], - "author": "Jordan Harband <ljharb@gmail.com>", - "funding": { - "url": "https://github.com/sponsors/ljharb" - }, - "license": "MIT", - "bugs": { - "url": "https://github.com/inspect-js/is-core-module/issues" - }, - "homepage": "https://github.com/inspect-js/is-core-module", - "dependencies": { - "has": "^1.0.3" - }, - "devDependencies": { - "@ljharb/eslint-config": "^17.3.0", - "aud": "^1.1.3", - "auto-changelog": "^2.2.1", - "eslint": "^7.14.0", - "nyc": "^10.3.2", - "safe-publish-latest": "^1.1.4", - "tape": "^5.0.1" - }, - "auto-changelog": { - "output": "CHANGELOG.md", - "template": "keepachangelog", - "unreleased": false, - "commitLimit": false, - "backfillLimit": false, - "hideCredit": true - } -} diff --git a/tools/node_modules/@babel/core/node_modules/path-parse/LICENSE b/tools/node_modules/@babel/core/node_modules/path-parse/LICENSE deleted file mode 100644 index 810f3dbea83b53..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/path-parse/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Javier Blanco - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/tools/node_modules/@babel/core/node_modules/path-parse/README.md b/tools/node_modules/@babel/core/node_modules/path-parse/README.md deleted file mode 100644 index 05097f86aef364..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/path-parse/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# path-parse [![Build Status](https://travis-ci.org/jbgutierrez/path-parse.svg?branch=master)](https://travis-ci.org/jbgutierrez/path-parse) - -> Node.js [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) [ponyfill](https://ponyfill.com). - -## Install - -``` -$ npm install --save path-parse -``` - -## Usage - -```js -var pathParse = require('path-parse'); - -pathParse('/home/user/dir/file.txt'); -//=> { -// root : "/", -// dir : "/home/user/dir", -// base : "file.txt", -// ext : ".txt", -// name : "file" -// } -``` - -## API - -See [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) docs. - -### pathParse(path) - -### pathParse.posix(path) - -The Posix specific version. - -### pathParse.win32(path) - -The Windows specific version. - -## License - -MIT © [Javier Blanco](http://jbgutierrez.info) diff --git a/tools/node_modules/@babel/core/node_modules/path-parse/index.js b/tools/node_modules/@babel/core/node_modules/path-parse/index.js deleted file mode 100644 index 3b7601fe494eed..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/path-parse/index.js +++ /dev/null @@ -1,93 +0,0 @@ -'use strict'; - -var isWindows = process.platform === 'win32'; - -// Regex to split a windows path into three parts: [*, device, slash, -// tail] windows-only -var splitDeviceRe = - /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - -// Regex to split the tail part of the above into [*, dir, basename, ext] -var splitTailRe = - /^([\s\S]*?)((?:\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))(?:[\\\/]*)$/; - -var win32 = {}; - -// Function to split a filename into [root, dir, basename, ext] -function win32SplitPath(filename) { - // Separate device+slash from tail - var result = splitDeviceRe.exec(filename), - device = (result[1] || '') + (result[2] || ''), - tail = result[3] || ''; - // Split the tail into dir, basename and extension - var result2 = splitTailRe.exec(tail), - dir = result2[1], - basename = result2[2], - ext = result2[3]; - return [device, dir, basename, ext]; -} - -win32.parse = function(pathString) { - if (typeof pathString !== 'string') { - throw new TypeError( - "Parameter 'pathString' must be a string, not " + typeof pathString - ); - } - var allParts = win32SplitPath(pathString); - if (!allParts || allParts.length !== 4) { - throw new TypeError("Invalid path '" + pathString + "'"); - } - return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), - base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) - }; -}; - - - -// Split a filename into [root, dir, basename, ext], unix version -// 'root' is just a slash, or nothing. -var splitPathRe = - /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; -var posix = {}; - - -function posixSplitPath(filename) { - return splitPathRe.exec(filename).slice(1); -} - - -posix.parse = function(pathString) { - if (typeof pathString !== 'string') { - throw new TypeError( - "Parameter 'pathString' must be a string, not " + typeof pathString - ); - } - var allParts = posixSplitPath(pathString); - if (!allParts || allParts.length !== 4) { - throw new TypeError("Invalid path '" + pathString + "'"); - } - allParts[1] = allParts[1] || ''; - allParts[2] = allParts[2] || ''; - allParts[3] = allParts[3] || ''; - - return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), - base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) - }; -}; - - -if (isWindows) - module.exports = win32.parse; -else /* posix */ - module.exports = posix.parse; - -module.exports.posix = posix.parse; -module.exports.win32 = win32.parse; diff --git a/tools/node_modules/@babel/core/node_modules/path-parse/package.json b/tools/node_modules/@babel/core/node_modules/path-parse/package.json deleted file mode 100644 index 21332bb14f8b7f..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/path-parse/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "path-parse", - "version": "1.0.6", - "description": "Node.js path.parse() ponyfill", - "main": "index.js", - "scripts": { - "test": "node test.js" - }, - "repository": { - "type": "git", - "url": "https://github.com/jbgutierrez/path-parse.git" - }, - "keywords": [ - "path", - "paths", - "file", - "dir", - "parse", - "built-in", - "util", - "utils", - "core", - "ponyfill", - "polyfill", - "shim" - ], - "author": "Javier Blanco <http://jbgutierrez.info>", - "license": "MIT", - "bugs": { - "url": "https://github.com/jbgutierrez/path-parse/issues" - }, - "homepage": "https://github.com/jbgutierrez/path-parse#readme" -} diff --git a/tools/node_modules/@babel/core/node_modules/resolve/LICENSE b/tools/node_modules/@babel/core/node_modules/resolve/LICENSE deleted file mode 100644 index ff4fce28af33a4..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2012 James Halliday - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/tools/node_modules/@babel/core/node_modules/resolve/index.js b/tools/node_modules/@babel/core/node_modules/resolve/index.js deleted file mode 100644 index 125d8146423596..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/index.js +++ /dev/null @@ -1,6 +0,0 @@ -var async = require('./lib/async'); -async.core = require('./lib/core'); -async.isCore = require('./lib/is-core'); -async.sync = require('./lib/sync'); - -module.exports = async; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/async.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/async.js deleted file mode 100644 index 29285079451b15..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/async.js +++ /dev/null @@ -1,299 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var caller = require('./caller'); -var nodeModulesPaths = require('./node-modules-paths'); -var normalizeOptions = require('./normalize-options'); -var isCore = require('is-core-module'); - -var realpathFS = fs.realpath && typeof fs.realpath.native === 'function' ? fs.realpath.native : fs.realpath; - -var defaultIsFile = function isFile(file, cb) { - fs.stat(file, function (err, stat) { - if (!err) { - return cb(null, stat.isFile() || stat.isFIFO()); - } - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); - return cb(err); - }); -}; - -var defaultIsDir = function isDirectory(dir, cb) { - fs.stat(dir, function (err, stat) { - if (!err) { - return cb(null, stat.isDirectory()); - } - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); - return cb(err); - }); -}; - -var defaultRealpath = function realpath(x, cb) { - realpathFS(x, function (realpathErr, realPath) { - if (realpathErr && realpathErr.code !== 'ENOENT') cb(realpathErr); - else cb(null, realpathErr ? x : realPath); - }); -}; - -var maybeRealpath = function maybeRealpath(realpath, x, opts, cb) { - if (opts && opts.preserveSymlinks === false) { - realpath(x, cb); - } else { - cb(null, x); - } -}; - -var getPackageCandidates = function getPackageCandidates(x, start, opts) { - var dirs = nodeModulesPaths(start, opts, x); - for (var i = 0; i < dirs.length; i++) { - dirs[i] = path.join(dirs[i], x); - } - return dirs; -}; - -module.exports = function resolve(x, options, callback) { - var cb = callback; - var opts = options; - if (typeof options === 'function') { - cb = opts; - opts = {}; - } - if (typeof x !== 'string') { - var err = new TypeError('Path must be a string.'); - return process.nextTick(function () { - cb(err); - }); - } - - opts = normalizeOptions(x, opts); - - var isFile = opts.isFile || defaultIsFile; - var isDirectory = opts.isDirectory || defaultIsDir; - var readFile = opts.readFile || fs.readFile; - var realpath = opts.realpath || defaultRealpath; - var packageIterator = opts.packageIterator; - - var extensions = opts.extensions || ['.js']; - var includeCoreModules = opts.includeCoreModules !== false; - var basedir = opts.basedir || path.dirname(caller()); - var parent = opts.filename || basedir; - - opts.paths = opts.paths || []; - - // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory - var absoluteStart = path.resolve(basedir); - - maybeRealpath( - realpath, - absoluteStart, - opts, - function (err, realStart) { - if (err) cb(err); - else init(realStart); - } - ); - - var res; - function init(basedir) { - if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { - res = path.resolve(basedir, x); - if (x === '.' || x === '..' || x.slice(-1) === '/') res += '/'; - if ((/\/$/).test(x) && res === basedir) { - loadAsDirectory(res, opts.package, onfile); - } else loadAsFile(res, opts.package, onfile); - } else if (includeCoreModules && isCore(x)) { - return cb(null, x); - } else loadNodeModules(x, basedir, function (err, n, pkg) { - if (err) cb(err); - else if (n) { - return maybeRealpath(realpath, n, opts, function (err, realN) { - if (err) { - cb(err); - } else { - cb(null, realN, pkg); - } - }); - } else { - var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); - moduleError.code = 'MODULE_NOT_FOUND'; - cb(moduleError); - } - }); - } - - function onfile(err, m, pkg) { - if (err) cb(err); - else if (m) cb(null, m, pkg); - else loadAsDirectory(res, function (err, d, pkg) { - if (err) cb(err); - else if (d) { - maybeRealpath(realpath, d, opts, function (err, realD) { - if (err) { - cb(err); - } else { - cb(null, realD, pkg); - } - }); - } else { - var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); - moduleError.code = 'MODULE_NOT_FOUND'; - cb(moduleError); - } - }); - } - - function loadAsFile(x, thePackage, callback) { - var loadAsFilePackage = thePackage; - var cb = callback; - if (typeof loadAsFilePackage === 'function') { - cb = loadAsFilePackage; - loadAsFilePackage = undefined; - } - - var exts = [''].concat(extensions); - load(exts, x, loadAsFilePackage); - - function load(exts, x, loadPackage) { - if (exts.length === 0) return cb(null, undefined, loadPackage); - var file = x + exts[0]; - - var pkg = loadPackage; - if (pkg) onpkg(null, pkg); - else loadpkg(path.dirname(file), onpkg); - - function onpkg(err, pkg_, dir) { - pkg = pkg_; - if (err) return cb(err); - if (dir && pkg && opts.pathFilter) { - var rfile = path.relative(dir, file); - var rel = rfile.slice(0, rfile.length - exts[0].length); - var r = opts.pathFilter(pkg, x, rel); - if (r) return load( - [''].concat(extensions.slice()), - path.resolve(dir, r), - pkg - ); - } - isFile(file, onex); - } - function onex(err, ex) { - if (err) return cb(err); - if (ex) return cb(null, file, pkg); - load(exts.slice(1), x, pkg); - } - } - } - - function loadpkg(dir, cb) { - if (dir === '' || dir === '/') return cb(null); - if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { - return cb(null); - } - if ((/[/\\]node_modules[/\\]*$/).test(dir)) return cb(null); - - maybeRealpath(realpath, dir, opts, function (unwrapErr, pkgdir) { - if (unwrapErr) return loadpkg(path.dirname(dir), cb); - var pkgfile = path.join(pkgdir, 'package.json'); - isFile(pkgfile, function (err, ex) { - // on err, ex is false - if (!ex) return loadpkg(path.dirname(dir), cb); - - readFile(pkgfile, function (err, body) { - if (err) cb(err); - try { var pkg = JSON.parse(body); } catch (jsonErr) {} - - if (pkg && opts.packageFilter) { - pkg = opts.packageFilter(pkg, pkgfile); - } - cb(null, pkg, dir); - }); - }); - }); - } - - function loadAsDirectory(x, loadAsDirectoryPackage, callback) { - var cb = callback; - var fpkg = loadAsDirectoryPackage; - if (typeof fpkg === 'function') { - cb = fpkg; - fpkg = opts.package; - } - - maybeRealpath(realpath, x, opts, function (unwrapErr, pkgdir) { - if (unwrapErr) return cb(unwrapErr); - var pkgfile = path.join(pkgdir, 'package.json'); - isFile(pkgfile, function (err, ex) { - if (err) return cb(err); - if (!ex) return loadAsFile(path.join(x, 'index'), fpkg, cb); - - readFile(pkgfile, function (err, body) { - if (err) return cb(err); - try { - var pkg = JSON.parse(body); - } catch (jsonErr) {} - - if (pkg && opts.packageFilter) { - pkg = opts.packageFilter(pkg, pkgfile); - } - - if (pkg && pkg.main) { - if (typeof pkg.main !== 'string') { - var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); - mainError.code = 'INVALID_PACKAGE_MAIN'; - return cb(mainError); - } - if (pkg.main === '.' || pkg.main === './') { - pkg.main = 'index'; - } - loadAsFile(path.resolve(x, pkg.main), pkg, function (err, m, pkg) { - if (err) return cb(err); - if (m) return cb(null, m, pkg); - if (!pkg) return loadAsFile(path.join(x, 'index'), pkg, cb); - - var dir = path.resolve(x, pkg.main); - loadAsDirectory(dir, pkg, function (err, n, pkg) { - if (err) return cb(err); - if (n) return cb(null, n, pkg); - loadAsFile(path.join(x, 'index'), pkg, cb); - }); - }); - return; - } - - loadAsFile(path.join(x, '/index'), pkg, cb); - }); - }); - }); - } - - function processDirs(cb, dirs) { - if (dirs.length === 0) return cb(null, undefined); - var dir = dirs[0]; - - isDirectory(path.dirname(dir), isdir); - - function isdir(err, isdir) { - if (err) return cb(err); - if (!isdir) return processDirs(cb, dirs.slice(1)); - loadAsFile(dir, opts.package, onfile); - } - - function onfile(err, m, pkg) { - if (err) return cb(err); - if (m) return cb(null, m, pkg); - loadAsDirectory(dir, opts.package, ondir); - } - - function ondir(err, n, pkg) { - if (err) return cb(err); - if (n) return cb(null, n, pkg); - processDirs(cb, dirs.slice(1)); - } - } - function loadNodeModules(x, start, cb) { - var thunk = function () { return getPackageCandidates(x, start, opts); }; - processDirs( - cb, - packageIterator ? packageIterator(x, start, thunk, opts) : thunk() - ); - } -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/caller.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/caller.js deleted file mode 100644 index b14a2804ae828a..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/caller.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = function () { - // see https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi - var origPrepareStackTrace = Error.prepareStackTrace; - Error.prepareStackTrace = function (_, stack) { return stack; }; - var stack = (new Error()).stack; - Error.prepareStackTrace = origPrepareStackTrace; - return stack[2].getFileName(); -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/core.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/core.js deleted file mode 100644 index c417d23c5a8ff7..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/core.js +++ /dev/null @@ -1,53 +0,0 @@ -var current = (process.versions && process.versions.node && process.versions.node.split('.')) || []; - -function specifierIncluded(specifier) { - var parts = specifier.split(' '); - var op = parts.length > 1 ? parts[0] : '='; - var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); - - for (var i = 0; i < 3; ++i) { - var cur = parseInt(current[i] || 0, 10); - var ver = parseInt(versionParts[i] || 0, 10); - if (cur === ver) { - continue; // eslint-disable-line no-restricted-syntax, no-continue - } - if (op === '<') { - return cur < ver; - } else if (op === '>=') { - return cur >= ver; - } else { - return false; - } - } - return op === '>='; -} - -function matchesRange(range) { - var specifiers = range.split(/ ?&& ?/); - if (specifiers.length === 0) { return false; } - for (var i = 0; i < specifiers.length; ++i) { - if (!specifierIncluded(specifiers[i])) { return false; } - } - return true; -} - -function versionIncluded(specifierValue) { - if (typeof specifierValue === 'boolean') { return specifierValue; } - if (specifierValue && typeof specifierValue === 'object') { - for (var i = 0; i < specifierValue.length; ++i) { - if (matchesRange(specifierValue[i])) { return true; } - } - return false; - } - return matchesRange(specifierValue); -} - -var data = require('./core.json'); - -var core = {}; -for (var mod in data) { // eslint-disable-line no-restricted-syntax - if (Object.prototype.hasOwnProperty.call(data, mod)) { - core[mod] = versionIncluded(data[mod]); - } -} -module.exports = core; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/core.json b/tools/node_modules/@babel/core/node_modules/resolve/lib/core.json deleted file mode 100644 index 226198f89b5ef0..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/core.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "assert": true, - "assert/strict": ">= 15", - "async_hooks": ">= 8", - "buffer_ieee754": "< 0.9.7", - "buffer": true, - "child_process": true, - "cluster": true, - "console": true, - "constants": true, - "crypto": true, - "_debug_agent": ">= 1 && < 8", - "_debugger": "< 8", - "dgram": true, - "diagnostics_channel": ">= 15.1", - "dns": true, - "dns/promises": ">= 15", - "domain": ">= 0.7.12", - "events": true, - "freelist": "< 6", - "fs": true, - "fs/promises": [">= 10 && < 10.1", ">= 14"], - "_http_agent": ">= 0.11.1", - "_http_client": ">= 0.11.1", - "_http_common": ">= 0.11.1", - "_http_incoming": ">= 0.11.1", - "_http_outgoing": ">= 0.11.1", - "_http_server": ">= 0.11.1", - "http": true, - "http2": ">= 8.8", - "https": true, - "inspector": ">= 8.0.0", - "_linklist": "< 8", - "module": true, - "net": true, - "node-inspect/lib/_inspect": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_client": ">= 7.6.0 && < 12", - "node-inspect/lib/internal/inspect_repl": ">= 7.6.0 && < 12", - "os": true, - "path": true, - "perf_hooks": ">= 8.5", - "process": ">= 1", - "punycode": true, - "querystring": true, - "readline": true, - "repl": true, - "smalloc": ">= 0.11.5 && < 3", - "_stream_duplex": ">= 0.9.4", - "_stream_transform": ">= 0.9.4", - "_stream_wrap": ">= 1.4.1", - "_stream_passthrough": ">= 0.9.4", - "_stream_readable": ">= 0.9.4", - "_stream_writable": ">= 0.9.4", - "stream": true, - "stream/promises": ">= 15", - "string_decoder": true, - "sys": [">= 0.6 && < 0.7", ">= 0.8"], - "timers": true, - "timers/promises": ">= 15", - "_tls_common": ">= 0.11.13", - "_tls_legacy": ">= 0.11.3 && < 10", - "_tls_wrap": ">= 0.11.3", - "tls": true, - "trace_events": ">= 10", - "tty": true, - "url": true, - "util": true, - "v8/tools/arguments": ">= 10 && < 12", - "v8/tools/codemap": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/consarray": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/csvparser": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/logreader": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/profile_view": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8/tools/splaytree": [">= 4.4.0 && < 5", ">= 5.2.0 && < 12"], - "v8": ">= 1", - "vm": true, - "wasi": ">= 13.4 && < 13.5", - "worker_threads": ">= 11.7", - "zlib": true -} diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/is-core.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/is-core.js deleted file mode 100644 index 537f5c782ffe55..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/is-core.js +++ /dev/null @@ -1,5 +0,0 @@ -var isCoreModule = require('is-core-module'); - -module.exports = function isCore(x) { - return isCoreModule(x); -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/node-modules-paths.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/node-modules-paths.js deleted file mode 100644 index 2b43813a7a561b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/node-modules-paths.js +++ /dev/null @@ -1,42 +0,0 @@ -var path = require('path'); -var parse = path.parse || require('path-parse'); - -var getNodeModulesDirs = function getNodeModulesDirs(absoluteStart, modules) { - var prefix = '/'; - if ((/^([A-Za-z]:)/).test(absoluteStart)) { - prefix = ''; - } else if ((/^\\\\/).test(absoluteStart)) { - prefix = '\\\\'; - } - - var paths = [absoluteStart]; - var parsed = parse(absoluteStart); - while (parsed.dir !== paths[paths.length - 1]) { - paths.push(parsed.dir); - parsed = parse(parsed.dir); - } - - return paths.reduce(function (dirs, aPath) { - return dirs.concat(modules.map(function (moduleDir) { - return path.resolve(prefix, aPath, moduleDir); - })); - }, []); -}; - -module.exports = function nodeModulesPaths(start, opts, request) { - var modules = opts && opts.moduleDirectory - ? [].concat(opts.moduleDirectory) - : ['node_modules']; - - if (opts && typeof opts.paths === 'function') { - return opts.paths( - request, - start, - function () { return getNodeModulesDirs(start, modules); }, - opts - ); - } - - var dirs = getNodeModulesDirs(start, modules); - return opts && opts.paths ? dirs.concat(opts.paths) : dirs; -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/normalize-options.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/normalize-options.js deleted file mode 100644 index 4b56904eaea72b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/normalize-options.js +++ /dev/null @@ -1,10 +0,0 @@ -module.exports = function (x, opts) { - /** - * This file is purposefully a passthrough. It's expected that third-party - * environments will override it at runtime in order to inject special logic - * into `resolve` (by manipulating the options). One such example is the PnP - * code path in Yarn. - */ - - return opts || {}; -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/lib/sync.js b/tools/node_modules/@babel/core/node_modules/resolve/lib/sync.js deleted file mode 100644 index d5308c926e498b..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/lib/sync.js +++ /dev/null @@ -1,192 +0,0 @@ -var isCore = require('is-core-module'); -var fs = require('fs'); -var path = require('path'); -var caller = require('./caller'); -var nodeModulesPaths = require('./node-modules-paths'); -var normalizeOptions = require('./normalize-options'); - -var realpathFS = fs.realpathSync && typeof fs.realpathSync.native === 'function' ? fs.realpathSync.native : fs.realpathSync; - -var defaultIsFile = function isFile(file) { - try { - var stat = fs.statSync(file); - } catch (e) { - if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; - throw e; - } - return stat.isFile() || stat.isFIFO(); -}; - -var defaultIsDir = function isDirectory(dir) { - try { - var stat = fs.statSync(dir); - } catch (e) { - if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; - throw e; - } - return stat.isDirectory(); -}; - -var defaultRealpathSync = function realpathSync(x) { - try { - return realpathFS(x); - } catch (realpathErr) { - if (realpathErr.code !== 'ENOENT') { - throw realpathErr; - } - } - return x; -}; - -var maybeRealpathSync = function maybeRealpathSync(realpathSync, x, opts) { - if (opts && opts.preserveSymlinks === false) { - return realpathSync(x); - } - return x; -}; - -var getPackageCandidates = function getPackageCandidates(x, start, opts) { - var dirs = nodeModulesPaths(start, opts, x); - for (var i = 0; i < dirs.length; i++) { - dirs[i] = path.join(dirs[i], x); - } - return dirs; -}; - -module.exports = function resolveSync(x, options) { - if (typeof x !== 'string') { - throw new TypeError('Path must be a string.'); - } - var opts = normalizeOptions(x, options); - - var isFile = opts.isFile || defaultIsFile; - var readFileSync = opts.readFileSync || fs.readFileSync; - var isDirectory = opts.isDirectory || defaultIsDir; - var realpathSync = opts.realpathSync || defaultRealpathSync; - var packageIterator = opts.packageIterator; - - var extensions = opts.extensions || ['.js']; - var includeCoreModules = opts.includeCoreModules !== false; - var basedir = opts.basedir || path.dirname(caller()); - var parent = opts.filename || basedir; - - opts.paths = opts.paths || []; - - // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory - var absoluteStart = maybeRealpathSync(realpathSync, path.resolve(basedir), opts); - - if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { - var res = path.resolve(absoluteStart, x); - if (x === '.' || x === '..' || x.slice(-1) === '/') res += '/'; - var m = loadAsFileSync(res) || loadAsDirectorySync(res); - if (m) return maybeRealpathSync(realpathSync, m, opts); - } else if (includeCoreModules && isCore(x)) { - return x; - } else { - var n = loadNodeModulesSync(x, absoluteStart); - if (n) return maybeRealpathSync(realpathSync, n, opts); - } - - var err = new Error("Cannot find module '" + x + "' from '" + parent + "'"); - err.code = 'MODULE_NOT_FOUND'; - throw err; - - function loadAsFileSync(x) { - var pkg = loadpkg(path.dirname(x)); - - if (pkg && pkg.dir && pkg.pkg && opts.pathFilter) { - var rfile = path.relative(pkg.dir, x); - var r = opts.pathFilter(pkg.pkg, x, rfile); - if (r) { - x = path.resolve(pkg.dir, r); // eslint-disable-line no-param-reassign - } - } - - if (isFile(x)) { - return x; - } - - for (var i = 0; i < extensions.length; i++) { - var file = x + extensions[i]; - if (isFile(file)) { - return file; - } - } - } - - function loadpkg(dir) { - if (dir === '' || dir === '/') return; - if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { - return; - } - if ((/[/\\]node_modules[/\\]*$/).test(dir)) return; - - var pkgfile = path.join(maybeRealpathSync(realpathSync, dir, opts), 'package.json'); - - if (!isFile(pkgfile)) { - return loadpkg(path.dirname(dir)); - } - - var body = readFileSync(pkgfile); - - try { - var pkg = JSON.parse(body); - } catch (jsonErr) {} - - if (pkg && opts.packageFilter) { - // v2 will pass pkgfile - pkg = opts.packageFilter(pkg, /*pkgfile,*/ dir); // eslint-disable-line spaced-comment - } - - return { pkg: pkg, dir: dir }; - } - - function loadAsDirectorySync(x) { - var pkgfile = path.join(maybeRealpathSync(realpathSync, x, opts), '/package.json'); - if (isFile(pkgfile)) { - try { - var body = readFileSync(pkgfile, 'UTF8'); - var pkg = JSON.parse(body); - } catch (e) {} - - if (pkg && opts.packageFilter) { - // v2 will pass pkgfile - pkg = opts.packageFilter(pkg, /*pkgfile,*/ x); // eslint-disable-line spaced-comment - } - - if (pkg && pkg.main) { - if (typeof pkg.main !== 'string') { - var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); - mainError.code = 'INVALID_PACKAGE_MAIN'; - throw mainError; - } - if (pkg.main === '.' || pkg.main === './') { - pkg.main = 'index'; - } - try { - var m = loadAsFileSync(path.resolve(x, pkg.main)); - if (m) return m; - var n = loadAsDirectorySync(path.resolve(x, pkg.main)); - if (n) return n; - } catch (e) {} - } - } - - return loadAsFileSync(path.join(x, '/index')); - } - - function loadNodeModulesSync(x, start) { - var thunk = function () { return getPackageCandidates(x, start, opts); }; - var dirs = packageIterator ? packageIterator(x, start, thunk, opts) : thunk(); - - for (var i = 0; i < dirs.length; i++) { - var dir = dirs[i]; - if (isDirectory(path.dirname(dir))) { - var m = loadAsFileSync(dir); - if (m) return m; - var n = loadAsDirectorySync(dir); - if (n) return n; - } - } - } -}; diff --git a/tools/node_modules/@babel/core/node_modules/resolve/package.json b/tools/node_modules/@babel/core/node_modules/resolve/package.json deleted file mode 100644 index dfcfc497b34eaf..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "resolve", - "description": "resolve like require.resolve() on behalf of files asynchronously and synchronously", - "version": "1.19.0", - "repository": { - "type": "git", - "url": "git://github.com/browserify/resolve.git" - }, - "main": "index.js", - "keywords": [ - "resolve", - "require", - "node", - "module" - ], - "scripts": { - "prepublish": "safe-publish-latest && cp node_modules/is-core-module/core.json ./lib/", - "prelint": "eclint check '**/*'", - "lint": "eslint --ext=js,mjs .", - "pretests-only": "cd ./test/resolver/nested_symlinks && node mylib/sync && node mylib/async", - "tests-only": "tape test/*.js", - "pretest": "npm run lint", - "test": "npm run --silent tests-only", - "posttest": "npm run test:multirepo && aud --production", - "test:multirepo": "cd ./test/resolver/multirepo && npm install && npm test" - }, - "devDependencies": { - "@ljharb/eslint-config": "^17.2.0", - "array.prototype.map": "^1.0.2", - "aud": "^1.1.3", - "eclint": "^2.8.1", - "eslint": "^7.13.0", - "object-keys": "^1.1.1", - "safe-publish-latest": "^1.1.4", - "tap": "0.4.13", - "tape": "^5.0.1" - }, - "license": "MIT", - "author": { - "name": "James Halliday", - "email": "mail@substack.net", - "url": "http://substack.net" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - }, - "dependencies": { - "is-core-module": "^2.1.0", - "path-parse": "^1.0.6" - } -} diff --git a/tools/node_modules/@babel/core/node_modules/resolve/readme.markdown b/tools/node_modules/@babel/core/node_modules/resolve/readme.markdown deleted file mode 100644 index f742c38dd48df8..00000000000000 --- a/tools/node_modules/@babel/core/node_modules/resolve/readme.markdown +++ /dev/null @@ -1,250 +0,0 @@ -# resolve - -implements the [node `require.resolve()` -algorithm](https://nodejs.org/api/modules.html#modules_all_together) -such that you can `require.resolve()` on behalf of a file asynchronously and -synchronously - -[![build status](https://secure.travis-ci.org/browserify/resolve.png)](http://travis-ci.org/browserify/resolve) - -# example - -asynchronously resolve: - -```js -var resolve = require('resolve'); -resolve('tap', { basedir: __dirname }, function (err, res) { - if (err) console.error(err); - else console.log(res); -}); -``` - -``` -$ node example/async.js -/home/substack/projects/node-resolve/node_modules/tap/lib/main.js -``` - -synchronously resolve: - -```js -var resolve = require('resolve'); -var res = resolve.sync('tap', { basedir: __dirname }); -console.log(res); -``` - -``` -$ node example/sync.js -/home/substack/projects/node-resolve/node_modules/tap/lib/main.js -``` - -# methods - -```js -var resolve = require('resolve'); -``` - -For both the synchronous and asynchronous methods, errors may have any of the following `err.code` values: - -- `MODULE_NOT_FOUND`: the given path string (`id`) could not be resolved to a module -- `INVALID_BASEDIR`: the specified `opts.basedir` doesn't exist, or is not a directory -- `INVALID_PACKAGE_MAIN`: a `package.json` was encountered with an invalid `main` property (eg. not a string) - -## resolve(id, opts={}, cb) - -Asynchronously resolve the module path string `id` into `cb(err, res [, pkg])`, where `pkg` (if defined) is the data from `package.json`. - -options are: - -* opts.basedir - directory to begin resolving from - -* opts.package - `package.json` data applicable to the module being loaded - -* opts.extensions - array of file extensions to search in order - -* opts.includeCoreModules - set to `false` to exclude node core modules (e.g. `fs`) from the search - -* opts.readFile - how to read files asynchronously - -* opts.isFile - function to asynchronously test whether a file exists - -* opts.isDirectory - function to asynchronously test whether a directory exists - -* opts.realpath - function to asynchronously resolve a potential symlink to its real path - -* `opts.packageFilter(pkg, pkgfile, dir)` - transform the parsed package.json contents before looking at the "main" field - * pkg - package data - * pkgfile - path to package.json - * dir - directory for package.json - -* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package - * pkg - package data - * path - the path being resolved - * relativePath - the path relative from the package.json location - * returns - a relative path that will be joined from the package.json location - -* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) - - For advanced users, `paths` can also be a `opts.paths(request, start, opts)` function - * request - the import specifier being resolved - * start - lookup path - * getNodeModulesDirs - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution - * opts - the resolution options - -* `opts.packageIterator(request, start, opts)` - return the list of candidate paths where the packages sources may be found (probably don't use this) - * request - the import specifier being resolved - * start - lookup path - * getPackageCandidates - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution - * opts - the resolution options - -* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` - -* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. -This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. -**Note:** this property is currently `true` by default but it will be changed to -`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. - -default `opts` values: - -```js -{ - paths: [], - basedir: __dirname, - extensions: ['.js'], - includeCoreModules: true, - readFile: fs.readFile, - isFile: function isFile(file, cb) { - fs.stat(file, function (err, stat) { - if (!err) { - return cb(null, stat.isFile() || stat.isFIFO()); - } - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); - return cb(err); - }); - }, - isDirectory: function isDirectory(dir, cb) { - fs.stat(dir, function (err, stat) { - if (!err) { - return cb(null, stat.isDirectory()); - } - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); - return cb(err); - }); - }, - realpath: function realpath(file, cb) { - var realpath = typeof fs.realpath.native === 'function' ? fs.realpath.native : fs.realpath; - realpath(file, function (realPathErr, realPath) { - if (realPathErr && realPathErr.code !== 'ENOENT') cb(realPathErr); - else cb(null, realPathErr ? file : realPath); - }); - }, - moduleDirectory: 'node_modules', - preserveSymlinks: true -} -``` - -## resolve.sync(id, opts) - -Synchronously resolve the module path string `id`, returning the result and -throwing an error when `id` can't be resolved. - -options are: - -* opts.basedir - directory to begin resolving from - -* opts.extensions - array of file extensions to search in order - -* opts.includeCoreModules - set to `false` to exclude node core modules (e.g. `fs`) from the search - -* opts.readFile - how to read files synchronously - -* opts.isFile - function to synchronously test whether a file exists - -* opts.isDirectory - function to synchronously test whether a directory exists - -* opts.realpathSync - function to synchronously resolve a potential symlink to its real path - -* `opts.packageFilter(pkg, dir)` - transform the parsed package.json contents before looking at the "main" field - * pkg - package data - * dir - directory for package.json (Note: the second argument will change to "pkgfile" in v2) - -* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package - * pkg - package data - * path - the path being resolved - * relativePath - the path relative from the package.json location - * returns - a relative path that will be joined from the package.json location - -* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) - - For advanced users, `paths` can also be a `opts.paths(request, start, opts)` function - * request - the import specifier being resolved - * start - lookup path - * getNodeModulesDirs - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution - * opts - the resolution options - -* `opts.packageIterator(request, start, opts)` - return the list of candidate paths where the packages sources may be found (probably don't use this) - * request - the import specifier being resolved - * start - lookup path - * getPackageCandidates - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution - * opts - the resolution options - -* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` - -* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. -This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. -**Note:** this property is currently `true` by default but it will be changed to -`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. - -default `opts` values: - -```js -{ - paths: [], - basedir: __dirname, - extensions: ['.js'], - includeCoreModules: true, - readFileSync: fs.readFileSync, - isFile: function isFile(file) { - try { - var stat = fs.statSync(file); - } catch (e) { - if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; - throw e; - } - return stat.isFile() || stat.isFIFO(); - }, - isDirectory: function isDirectory(dir) { - try { - var stat = fs.statSync(dir); - } catch (e) { - if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; - throw e; - } - return stat.isDirectory(); - }, - realpathSync: function realpathSync(file) { - try { - var realpath = typeof fs.realpathSync.native === 'function' ? fs.realpathSync.native : fs.realpathSync; - return realpath(file); - } catch (realPathErr) { - if (realPathErr.code !== 'ENOENT') { - throw realPathErr; - } - } - return file; - }, - moduleDirectory: 'node_modules', - preserveSymlinks: true -} -``` - -# install - -With [npm](https://npmjs.org) do: - -```sh -npm install resolve -``` - -# license - -MIT diff --git a/tools/node_modules/@babel/core/package.json b/tools/node_modules/@babel/core/package.json index 5f93e494ea7963..93af09de9c2362 100644 --- a/tools/node_modules/@babel/core/package.json +++ b/tools/node_modules/@babel/core/package.json @@ -1,6 +1,6 @@ { "name": "@babel/core", - "version": "7.12.9", + "version": "7.12.10", "description": "Babel compiler core.", "main": "lib/index.js", "author": "Sebastian McKenzie <sebmck@gmail.com>", @@ -44,23 +44,22 @@ }, "dependencies": { "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.5", + "@babel/generator": "^7.12.10", "@babel/helper-module-transforms": "^7.12.1", "@babel/helpers": "^7.12.5", - "@babel/parser": "^7.12.7", + "@babel/parser": "^7.12.10", "@babel/template": "^7.12.7", - "@babel/traverse": "^7.12.9", - "@babel/types": "^7.12.7", + "@babel/traverse": "^7.12.10", + "@babel/types": "^7.12.10", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", "json5": "^2.1.2", "lodash": "^4.17.19", - "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" }, "devDependencies": { - "@babel/helper-transform-fixture-test-runner": "7.12.1" + "@babel/helper-transform-fixture-test-runner": "7.12.10" } } \ No newline at end of file diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/LICENSE b/tools/node_modules/@babel/plugin-syntax-top-level-await/LICENSE new file mode 100644 index 00000000000000..f31575ec773bb1 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/README.md b/tools/node_modules/@babel/plugin-syntax-top-level-await/README.md new file mode 100644 index 00000000000000..476cb27d6bbb77 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/README.md @@ -0,0 +1,19 @@ +# @babel/plugin-syntax-top-level-await + +> Allow parsing of top-level await in modules + +See our website [@babel/plugin-syntax-top-level-await](https://babeljs.io/docs/en/next/babel-plugin-syntax-top-level-await.html) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/plugin-syntax-top-level-await +``` + +or using yarn: + +```sh +yarn add @babel/plugin-syntax-top-level-await --dev +``` diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/lib/index.js b/tools/node_modules/@babel/plugin-syntax-top-level-await/lib/index.js new file mode 100644 index 00000000000000..a57cab715c0b45 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/lib/index.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _helperPluginUtils = require("@babel/helper-plugin-utils"); + +var _default = (0, _helperPluginUtils.declare)(api => { + api.assertVersion(7); + return { + name: "syntax-top-level-await", + + manipulateOptions(opts, parserOpts) { + parserOpts.plugins.push("topLevelAwait"); + } + + }; +}); + +exports.default = _default; \ No newline at end of file diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/LICENSE b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/LICENSE new file mode 100644 index 00000000000000..f31575ec773bb1 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/README.md b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/README.md new file mode 100644 index 00000000000000..4e6303e08962a2 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/README.md @@ -0,0 +1,19 @@ +# @babel/helper-plugin-utils + +> General utilities for plugins to use + +See our website [@babel/helper-plugin-utils](https://babeljs.io/docs/en/next/babel-helper-plugin-utils.html) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/helper-plugin-utils +``` + +or using yarn: + +```sh +yarn add @babel/helper-plugin-utils --dev +``` diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/lib/index.js b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/lib/index.js new file mode 100644 index 00000000000000..adb0656fa0e768 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/lib/index.js @@ -0,0 +1,77 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.declare = declare; + +function declare(builder) { + return (api, options, dirname) => { + if (!api.assertVersion) { + api = Object.assign(copyApiObject(api), { + assertVersion(range) { + throwVersionError(range, api.version); + } + + }); + } + + return builder(api, options || {}, dirname); + }; +} + +function copyApiObject(api) { + let proto = null; + + if (typeof api.version === "string" && /^7\./.test(api.version)) { + proto = Object.getPrototypeOf(api); + + if (proto && (!has(proto, "version") || !has(proto, "transform") || !has(proto, "template") || !has(proto, "types"))) { + proto = null; + } + } + + return Object.assign({}, proto, api); +} + +function has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +function throwVersionError(range, version) { + if (typeof range === "number") { + if (!Number.isInteger(range)) { + throw new Error("Expected string or integer value."); + } + + range = `^${range}.0.0-0`; + } + + if (typeof range !== "string") { + throw new Error("Expected string or integer value."); + } + + const limit = Error.stackTraceLimit; + + if (typeof limit === "number" && limit < 25) { + Error.stackTraceLimit = 25; + } + + let err; + + if (version.slice(0, 2) === "7.") { + err = new Error(`Requires Babel "^7.0.0-beta.41", but was loaded with "${version}". ` + `You'll need to update your @babel/core version.`); + } else { + err = new Error(`Requires Babel "${range}", but was loaded with "${version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`); + } + + if (typeof limit === "number") { + Error.stackTraceLimit = limit; + } + + throw Object.assign(err, { + code: "BABEL_VERSION_UNSUPPORTED", + version, + range + }); +} \ No newline at end of file diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/package.json b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/package.json new file mode 100644 index 00000000000000..282d93f719fe54 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/node_modules/@babel/helper-plugin-utils/package.json @@ -0,0 +1,18 @@ +{ + "name": "@babel/helper-plugin-utils", + "version": "7.10.4", + "description": "General utilities for plugins to use", + "author": "Logan Smyth <loganfsmyth@gmail.com>", + "homepage": "https://babeljs.io/", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-helper-plugin-utils" + }, + "main": "lib/index.js", + "gitHead": "7fd40d86a0d03ff0e9c3ea16b29689945433d4df" +} diff --git a/tools/node_modules/@babel/plugin-syntax-top-level-await/package.json b/tools/node_modules/@babel/plugin-syntax-top-level-await/package.json new file mode 100644 index 00000000000000..33b3238ea86d98 --- /dev/null +++ b/tools/node_modules/@babel/plugin-syntax-top-level-await/package.json @@ -0,0 +1,27 @@ +{ + "name": "@babel/plugin-syntax-top-level-await", + "version": "7.12.1", + "description": "Allow parsing of top-level await in modules", + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-plugin-syntax-top-level-await" + }, + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "main": "lib/index.js", + "keywords": [ + "babel-plugin" + ], + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + }, + "devDependencies": { + "@babel/core": "^7.12.1" + } +} \ No newline at end of file diff --git a/tools/node_modules/eslint/README.md b/tools/node_modules/eslint/README.md index f80edf7191fa68..73a30990e50974 100644 --- a/tools/node_modules/eslint/README.md +++ b/tools/node_modules/eslint/README.md @@ -158,6 +158,7 @@ ESLint follows [semantic versioning](https://semver.org). However, due to the na * A bug fix in a rule that results in ESLint reporting more linting errors. * A new rule is created. * A new option to an existing rule that does not result in ESLint reporting more linting errors by default. + * A new addition to an existing rule to support a newly-added language feature (within the last 12 months) that will result in ESLint reporting more linting errors by default. * An existing rule is deprecated. * A new CLI capability is created. * New capabilities to the public API are added (new classes, new methods, new arguments to existing methods, etc.). @@ -262,7 +263,7 @@ The following companies, organizations, and individuals support ESLint's ongoing <p><a href="https://automattic.com"><img src="https://images.opencollective.com/photomatt/ff91f0b/logo.png" alt="Automattic" height="undefined"></a></p><h3>Gold Sponsors</h3> <p><a href="https://google.com/chrome"><img src="https://images.opencollective.com/chrome/dc55bd4/logo.png" alt="Chrome's Web Framework & Tools Performance Fund" height="96"></a> <a href="https://www.shopify.com"><img src="https://images.opencollective.com/shopify/e780cd4/logo.png" alt="Shopify" height="96"></a> <a href="https://www.salesforce.com"><img src="https://images.opencollective.com/salesforce/ca8f997/logo.png" alt="Salesforce" height="96"></a> <a href="https://www.airbnb.com/"><img src="https://images.opencollective.com/airbnb/d327d66/logo.png" alt="Airbnb" height="96"></a> <a href="https://aka.ms/microsoftfossfund"><img src="https://avatars1.githubusercontent.com/u/67931232?u=7fddc652a464d7151b97e8f108392af7d54fa3e8&v=4" alt="Microsoft FOSS Fund Sponsorships" height="96"></a></p><h3>Silver Sponsors</h3> <p><a href="https://liftoff.io/"><img src="https://images.opencollective.com/liftoff/5c4fa84/logo.png" alt="Liftoff" height="64"></a> <a href="https://www.ampproject.org/"><img src="https://images.opencollective.com/amp/c8a3b25/logo.png" alt="AMP Project" height="64"></a></p><h3>Bronze Sponsors</h3> -<p><a href="https://thestandarddaily.com/"><img src="https://images.opencollective.com/eric-watson/db4e598/avatar.png" alt="The Standard Daily" height="32"></a> <a href="https://writersperhour.com"><img src="https://images.opencollective.com/writersperhour/5787d4b/logo.png" alt="Writers Per Hour" height="32"></a> <a href="https://www.betacalendars.com/printable-calendar"><img src="https://images.opencollective.com/betacalendars/9334b33/logo.png" alt="2021 calendar" height="32"></a> <a href="https://buy.fineproxy.org/eng/"><img src="https://images.opencollective.com/buy-fineproxy-org/b282e39/logo.png" alt="Buy.Fineproxy.Org" height="32"></a> <a href="https://www.veikkaajat.com"><img src="https://images.opencollective.com/veikkaajat/3777f94/logo.png" alt="Veikkaajat.com" height="32"></a> <a href="https://www.crosswordsolver.org/anagram-solver/"><img src="https://images.opencollective.com/anagram-solver/2666271/logo.png" alt="Anagram Solver" height="32"></a> <a href="null"><img src="https://images.opencollective.com/bugsnag-stability-monitoring/c2cef36/logo.png" alt="Bugsnag Stability Monitoring" height="32"></a> <a href="https://mixpanel.com"><img src="https://images.opencollective.com/mixpanel/cd682f7/logo.png" alt="Mixpanel" height="32"></a> <a href="https://www.vpsserver.com"><img src="https://images.opencollective.com/vpsservercom/logo.png" alt="VPS Server" height="32"></a> <a href="https://icons8.com"><img src="https://images.opencollective.com/icons8/7fa1641/logo.png" alt="Icons8: free icons, photos, illustrations, and music" height="32"></a> <a href="https://discordapp.com"><img src="https://images.opencollective.com/discordapp/7e3d9a9/logo.png" alt="Discord" height="32"></a> <a href="https://themeisle.com"><img src="https://images.opencollective.com/themeisle/d5592fe/logo.png" alt="ThemeIsle" height="32"></a> <a href="https://www.firesticktricks.com"><img src="https://images.opencollective.com/fire-stick-tricks/b8fbe2c/logo.png" alt="Fire Stick Tricks" height="32"></a></p> +<p><a href="https://streamat.se"><img src="https://images.opencollective.com/streamat/46890db/logo.png" alt="Streamat" height="32"></a> <a href="https://thestandarddaily.com/"><img src="https://images.opencollective.com/eric-watson/db4e598/avatar.png" alt="The Standard Daily" height="32"></a> <a href="https://writersperhour.com"><img src="https://images.opencollective.com/writersperhour/5787d4b/logo.png" alt="Writers Per Hour" height="32"></a> <a href="https://www.betacalendars.com/february-calendar.html"><img src="https://images.opencollective.com/betacalendars/9334b33/logo.png" alt="February 2021 calendar" height="32"></a> <a href="https://buy.fineproxy.org/eng/"><img src="https://images.opencollective.com/buy-fineproxy-org/b282e39/logo.png" alt="Buy.Fineproxy.Org" height="32"></a> <a href="https://www.crosswordsolver.org/anagram-solver/"><img src="https://images.opencollective.com/anagram-solver/2666271/logo.png" alt="Anagram Solver" height="32"></a> <a href="null"><img src="https://images.opencollective.com/bugsnag-stability-monitoring/c2cef36/logo.png" alt="Bugsnag Stability Monitoring" height="32"></a> <a href="https://mixpanel.com"><img src="https://images.opencollective.com/mixpanel/cd682f7/logo.png" alt="Mixpanel" height="32"></a> <a href="https://www.vpsserver.com"><img src="https://images.opencollective.com/vpsservercom/logo.png" alt="VPS Server" height="32"></a> <a href="https://icons8.com"><img src="https://images.opencollective.com/icons8/7fa1641/logo.png" alt="Icons8: free icons, photos, illustrations, and music" height="32"></a> <a href="https://discordapp.com"><img src="https://images.opencollective.com/discordapp/7e3d9a9/logo.png" alt="Discord" height="32"></a> <a href="https://themeisle.com"><img src="https://images.opencollective.com/themeisle/d5592fe/logo.png" alt="ThemeIsle" height="32"></a> <a href="https://www.firesticktricks.com"><img src="https://images.opencollective.com/fire-stick-tricks/b8fbe2c/logo.png" alt="Fire Stick Tricks" height="32"></a></p> <!--sponsorsend--> ## <a name="technology-sponsors"></a>Technology Sponsors diff --git a/tools/node_modules/eslint/lib/init/config-initializer.js b/tools/node_modules/eslint/lib/init/config-initializer.js index f7d4cc7a171fe9..6f62e7db87e7f7 100644 --- a/tools/node_modules/eslint/lib/init/config-initializer.js +++ b/tools/node_modules/eslint/lib/init/config-initializer.js @@ -565,7 +565,8 @@ function promptUser() { { type: "toggle", name: "installESLint", - message(answers) { + message() { + const { answers } = this.state; const verb = semver.ltr(answers.localESLintVersion, answers.requiredESLintVersionRange) ? "upgrade" : "downgrade"; diff --git a/tools/node_modules/eslint/lib/rules/complexity.js b/tools/node_modules/eslint/lib/rules/complexity.js index 7fc8bf9bc2ea8c..5d62c6ff44b8f1 100644 --- a/tools/node_modules/eslint/lib/rules/complexity.js +++ b/tools/node_modules/eslint/lib/rules/complexity.js @@ -153,7 +153,13 @@ module.exports = { IfStatement: increaseComplexity, SwitchCase: increaseSwitchComplexity, WhileStatement: increaseComplexity, - DoWhileStatement: increaseComplexity + DoWhileStatement: increaseComplexity, + + AssignmentExpression(node) { + if (astUtils.isLogicalAssignmentOperator(node.operator)) { + increaseComplexity(); + } + } }; } diff --git a/tools/node_modules/eslint/lib/rules/no-extra-parens.js b/tools/node_modules/eslint/lib/rules/no-extra-parens.js index 8d358d23ad3b50..19c6fced79d4ff 100644 --- a/tools/node_modules/eslint/lib/rules/no-extra-parens.js +++ b/tools/node_modules/eslint/lib/rules/no-extra-parens.js @@ -895,6 +895,22 @@ module.exports = { } if (node.init) { + + if (node.init.type !== "VariableDeclaration") { + const firstToken = sourceCode.getFirstToken(node.init, astUtils.isNotOpeningParenToken); + + if ( + firstToken.value === "let" && + astUtils.isOpeningBracketToken( + sourceCode.getTokenAfter(firstToken, astUtils.isNotClosingParenToken) + ) + ) { + + // ForStatement#init expression cannot start with `let[`. + tokensToIgnore.add(firstToken); + } + } + startNewReportsBuffering(); if (hasExcessParens(node.init)) { diff --git a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js index 6494a041cb8dd0..c7ff6a09a93881 100644 --- a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js +++ b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/lib/config-array-factory.js @@ -281,14 +281,15 @@ function loadESLintIgnoreFile(filePath) { * Creates an error to notify about a missing config to extend from. * @param {string} configName The name of the missing config. * @param {string} importerName The name of the config that imported the missing config + * @param {string} messageTemplate The text template to source error strings from. * @returns {Error} The error object to throw * @private */ -function configMissingError(configName, importerName) { +function configInvalidError(configName, importerName, messageTemplate) { return Object.assign( new Error(`Failed to load config "${configName}" to extend from.`), { - messageTemplate: "extend-config-missing", + messageTemplate, messageData: { configName, importerName } } ); @@ -809,7 +810,7 @@ class ConfigArrayFactory { }); } - throw configMissingError(extendName, ctx.name); + throw configInvalidError(extendName, ctx.name, "extend-config-missing"); } /** @@ -821,6 +822,11 @@ class ConfigArrayFactory { */ _loadExtendedPluginConfig(extendName, ctx) { const slashIndex = extendName.lastIndexOf("/"); + + if (slashIndex === -1) { + throw configInvalidError(extendName, ctx.filePath, "plugin-invalid"); + } + const pluginName = extendName.slice("plugin:".length, slashIndex); const configName = extendName.slice(slashIndex + 1); @@ -841,7 +847,7 @@ class ConfigArrayFactory { }); } - throw plugin.error || configMissingError(extendName, ctx.filePath); + throw plugin.error || configInvalidError(extendName, ctx.filePath, "extend-config-missing"); } /** @@ -874,7 +880,7 @@ class ConfigArrayFactory { } catch (error) { /* istanbul ignore else */ if (error && error.code === "MODULE_NOT_FOUND") { - throw configMissingError(extendName, ctx.filePath); + throw configInvalidError(extendName, ctx.filePath, "extend-config-missing"); } throw error; } diff --git a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json index e33d83420dc0b7..20f43070c13d3e 100644 --- a/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json +++ b/tools/node_modules/eslint/node_modules/@eslint/eslintrc/package.json @@ -14,7 +14,7 @@ "ignore": "^4.0.6", "import-fresh": "^3.2.1", "js-yaml": "^3.13.1", - "lodash": "^4.17.19", + "lodash": "^4.17.20", "minimatch": "^3.0.4", "strip-json-comments": "^3.1.1" }, @@ -65,5 +65,5 @@ "publish-release": "eslint-publish-release", "test": "mocha -R progress -c 'tests/lib/**/*.js'" }, - "version": "0.2.2" + "version": "0.3.0" } \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/table/README.md b/tools/node_modules/eslint/node_modules/table/README.md index b3942042c18cef..22a79962a1a2ee 100644 --- a/tools/node_modules/eslint/node_modules/table/README.md +++ b/tools/node_modules/eslint/node_modules/table/README.md @@ -1,7 +1,6 @@ <a name="table"></a> # Table -[![GitSpo Mentions](https://gitspo.com/badges/mentions/gajus/table?style=flat-square)](https://gitspo.com/mentions/gajus/table) [![Travis build status](http://img.shields.io/travis/gajus/table/master.svg?style=flat-square)](https://travis-ci.org/gajus/table) [![Coveralls](https://img.shields.io/coveralls/gajus/table.svg?style=flat-square)](https://coveralls.io/github/gajus/table) [![NPM version](http://img.shields.io/npm/v/table.svg?style=flat-square)](https://www.npmjs.org/package/table) diff --git a/tools/node_modules/eslint/node_modules/table/dist/schemas/config.json b/tools/node_modules/eslint/node_modules/table/dist/schemas/config.json index 0918dcc7648812..10fc74ab93cfc1 100644 --- a/tools/node_modules/eslint/node_modules/table/dist/schemas/config.json +++ b/tools/node_modules/eslint/node_modules/table/dist/schemas/config.json @@ -14,6 +14,9 @@ }, "drawHorizontalLine": { "typeof": "function" + }, + "singleLine": { + "typeof": "boolean" } }, "additionalProperties": false diff --git a/tools/node_modules/eslint/node_modules/table/dist/validators.js b/tools/node_modules/eslint/node_modules/table/dist/validators.js index c03925ab5d323d..e17b964fe9be3c 100644 --- a/tools/node_modules/eslint/node_modules/table/dist/validators.js +++ b/tools/node_modules/eslint/node_modules/table/dist/validators.js @@ -16,6 +16,9 @@ const schema13 = { }, "drawHorizontalLine": { "typeof": "function" + }, + "singleLine": { + "typeof": "boolean" } }, "additionalProperties": false @@ -797,7 +800,7 @@ function validate43(data, { let errors = 0; if (data && typeof data == "object" && !Array.isArray(data)) { for (const key0 in data) { - if (!((((key0 === "border") || (key0 === "columns")) || (key0 === "columnDefault")) || (key0 === "drawHorizontalLine"))) { + if (!(((((key0 === "border") || (key0 === "columns")) || (key0 === "columnDefault")) || (key0 === "drawHorizontalLine")) || (key0 === "singleLine"))) { const err0 = { keyword: "additionalProperties", dataPath, @@ -865,8 +868,25 @@ function validate43(data, { errors++; } } + if (data.singleLine !== undefined) { + if (typeof data.singleLine != "boolean") { + const err2 = { + keyword: "typeof", + dataPath: dataPath + "/singleLine", + schemaPath: "#/properties/singleLine/typeof", + params: {}, + message: "should pass \"typeof\" keyword validation" + }; + if (vErrors === null) { + vErrors = [err2]; + } else { + vErrors.push(err2); + } + errors++; + } + } } else { - const err2 = { + const err3 = { keyword: "type", dataPath, schemaPath: "#/type", @@ -876,9 +896,9 @@ function validate43(data, { message: "should be object" }; if (vErrors === null) { - vErrors = [err2]; + vErrors = [err3]; } else { - vErrors.push(err2); + vErrors.push(err3); } errors++; } diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md index ee3c1ab7da9997..65368f5fe1f9c8 100644 --- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md +++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/README.md @@ -74,6 +74,7 @@ Please review [Contributing guidelines](./CONTRIBUTING.md) and [Code components] - [Frequently Asked Questions](./docs/faq.md) - [Using in browser](#using-in-browser) - [Content Security Policy](./docs/security.md#content-security-policy) +- [Using in ES5 environment](#using-in-es5-environment) - [Command line interface](#command-line-interface) - [API reference](./docs/api.md) - [Methods](./docs/api.md#ajv-constructor-and-methods) @@ -307,6 +308,19 @@ The browser bundle is available on [cdnjs](https://cdnjs.com/libraries/ajv). **Please note**: some frameworks, e.g. Dojo, may redefine global require in a way that is not compatible with CommonJS module format. In this case Ajv bundle has to be loaded before the framework and then you can use global `ajv` (see issue [#234](https://github.com/ajv-validator/ajv/issues/234)). +## Using in ES5 environment + +You need to: + +- recompile Typescript to ES5 target - it is set to 2018 in the bundled compiled code. +- generate ES5 validation code: + +```javascript +const ajv = new Ajv({code: {es5: true}}) +``` + +See [Advanced options](https://github.com/ajv-validator/ajv/blob/master/docs/api.md#advanced-options). + ## Command line interface CLI is available as a separate npm package [ajv-cli](https://github.com/ajv-validator/ajv-cli). It supports: diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/iterate.js b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/iterate.js index 6ffdd11c523edc..fb97c64def071e 100644 --- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/iterate.js +++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/dist/compile/validate/iterate.js @@ -4,7 +4,6 @@ exports.schemaKeywords = void 0; const applicability_1 = require("./applicability"); const dataType_1 = require("./dataType"); const defaults_1 = require("./defaults"); -const dataType_2 = require("./dataType"); const keyword_1 = require("./keyword"); const util_1 = require("../util"); const _1 = require("."); @@ -31,7 +30,7 @@ function schemaKeywords(it, types, typeErrors, errsCount) { iterateKeywords(it, group); if (types.length === 1 && types[0] === group.type && typeErrors) { gen.else(); - dataType_2.reportTypeError(it); + dataType_1.reportTypeError(it); } gen.endIf(); } diff --git a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json index 977045fa8c2dbd..032498f33b03b8 100644 --- a/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json +++ b/tools/node_modules/eslint/node_modules/table/node_modules/ajv/package.json @@ -117,5 +117,5 @@ }, "tonicExampleFilename": ".tonic_example.js", "types": "dist/ajv.d.ts", - "version": "7.0.2" + "version": "7.0.3" } \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/table/package.json b/tools/node_modules/eslint/node_modules/table/package.json index 3934d31215e424..892f772a2f312a 100644 --- a/tools/node_modules/eslint/node_modules/table/package.json +++ b/tools/node_modules/eslint/node_modules/table/package.json @@ -87,5 +87,5 @@ "lint": "npm run build && eslint ./src ./test && flow", "test": "mocha --require @babel/register" }, - "version": "6.0.6" + "version": "6.0.7" } \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/uri-js/README.md b/tools/node_modules/eslint/node_modules/uri-js/README.md index 3dbe4054f2577a..43e648bbad5c85 100755 --- a/tools/node_modules/eslint/node_modules/uri-js/README.md +++ b/tools/node_modules/eslint/node_modules/uri-js/README.md @@ -97,6 +97,8 @@ URI.js supports inserting custom [scheme](http://en.wikipedia.org/wiki/URI_schem * http \[[RFC 2616](http://www.ietf.org/rfc/rfc2616.txt)\] * https \[[RFC 2818](http://www.ietf.org/rfc/rfc2818.txt)\] +* ws \[[RFC 6455](http://www.ietf.org/rfc/rfc6455.txt)\] +* wss \[[RFC 6455](http://www.ietf.org/rfc/rfc6455.txt)\] * mailto \[[RFC 6068](http://www.ietf.org/rfc/rfc6068.txt)\] * urn \[[RFC 2141](http://www.ietf.org/rfc/rfc2141.txt)\] * urn:uuid \[[RFC 4122](http://www.ietf.org/rfc/rfc4122.txt)\] @@ -156,7 +158,7 @@ URI.js supports inserting custom [scheme](http://en.wikipedia.org/wiki/URI_schem //returns: //{ // scheme : "urn", - // nid : "example", + // nid : "uuid", // uuid : "f81d4fae-7dec-11d0-a765-00a0c91e6bf6", //} diff --git a/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.js b/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.js index 47f42f8aa16696..0706116fef7e30 100755 --- a/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.js +++ b/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.js @@ -1,4 +1,4 @@ -/** @license URI.js v4.4.0 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */ +/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : typeof define === 'function' && define.amd ? define(['exports'], factory) : diff --git a/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.min.js b/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.min.js index 09edffb7ccbb63..fcd845862d917f 100755 --- a/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.min.js +++ b/tools/node_modules/eslint/node_modules/uri-js/dist/es5/uri.all.min.js @@ -1,3 +1,3 @@ -/** @license URI.js v4.4.0 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */ +/** @license URI.js v4.4.1 (c) 2011 Gary Court. License: http://github.com/garycourt/uri-js */ !function(e,r){"object"==typeof exports&&"undefined"!=typeof module?r(exports):"function"==typeof define&&define.amd?define(["exports"],r):r(e.URI=e.URI||{})}(this,function(e){"use strict";function r(){for(var e=arguments.length,r=Array(e),n=0;n<e;n++)r[n]=arguments[n];if(r.length>1){r[0]=r[0].slice(0,-1);for(var t=r.length-1,o=1;o<t;++o)r[o]=r[o].slice(1,-1);return r[t]=r[t].slice(1),r.join("")}return r[0]}function n(e){return"(?:"+e+")"}function t(e){return e===undefined?"undefined":null===e?"null":Object.prototype.toString.call(e).split(" ").pop().split("]").shift().toLowerCase()}function o(e){return e.toUpperCase()}function a(e){return e!==undefined&&null!==e?e instanceof Array?e:"number"!=typeof e.length||e.split||e.setInterval||e.call?[e]:Array.prototype.slice.call(e):[]}function i(e,r){var n=e;if(r)for(var t in r)n[t]=r[t];return n}function u(e){var t=r("[0-9]","[A-Fa-f]"),o=n(n("%[EFef]"+t+"%"+t+t+"%"+t+t)+"|"+n("%[89A-Fa-f]"+t+"%"+t+t)+"|"+n("%"+t+t)),a="[\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\=]",i=r("[\\:\\/\\?\\#\\[\\]\\@]",a),u=e?"[\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]":"[]",s=e?"[\\uE000-\\uF8FF]":"[]",f=r("[A-Za-z]","[0-9]","[\\-\\.\\_\\~]",u),c=n(n("25[0-5]")+"|"+n("2[0-4][0-9]")+"|"+n("1[0-9][0-9]")+"|"+n("0?[1-9][0-9]")+"|0?0?[0-9]"),p=n(c+"\\."+c+"\\."+c+"\\."+c),h=n(t+"{1,4}"),d=n(n(h+"\\:"+h)+"|"+p),l=n(n(h+"\\:")+"{6}"+d),m=n("\\:\\:"+n(h+"\\:")+"{5}"+d),g=n(n(h)+"?\\:\\:"+n(h+"\\:")+"{4}"+d),v=n(n(n(h+"\\:")+"{0,1}"+h)+"?\\:\\:"+n(h+"\\:")+"{3}"+d),E=n(n(n(h+"\\:")+"{0,2}"+h)+"?\\:\\:"+n(h+"\\:")+"{2}"+d),C=n(n(n(h+"\\:")+"{0,3}"+h)+"?\\:\\:"+h+"\\:"+d),y=n(n(n(h+"\\:")+"{0,4}"+h)+"?\\:\\:"+d),S=n(n(n(h+"\\:")+"{0,5}"+h)+"?\\:\\:"+h),A=n(n(n(h+"\\:")+"{0,6}"+h)+"?\\:\\:"),D=n([l,m,g,v,E,C,y,S,A].join("|")),w=n(n(f+"|"+o)+"+");return{NOT_SCHEME:new RegExp(r("[^]","[A-Za-z]","[0-9]","[\\+\\-\\.]"),"g"),NOT_USERINFO:new RegExp(r("[^\\%\\:]",f,a),"g"),NOT_HOST:new RegExp(r("[^\\%\\[\\]\\:]",f,a),"g"),NOT_PATH:new RegExp(r("[^\\%\\/\\:\\@]",f,a),"g"),NOT_PATH_NOSCHEME:new RegExp(r("[^\\%\\/\\@]",f,a),"g"),NOT_QUERY:new RegExp(r("[^\\%]",f,a,"[\\:\\@\\/\\?]",s),"g"),NOT_FRAGMENT:new RegExp(r("[^\\%]",f,a,"[\\:\\@\\/\\?]"),"g"),ESCAPE:new RegExp(r("[^]",f,a),"g"),UNRESERVED:new RegExp(f,"g"),OTHER_CHARS:new RegExp(r("[^\\%]",f,i),"g"),PCT_ENCODED:new RegExp(o,"g"),IPV4ADDRESS:new RegExp("^("+p+")$"),IPV6ADDRESS:new RegExp("^\\[?("+D+")"+n(n("\\%25|\\%(?!"+t+"{2})")+"("+w+")")+"?\\]?$")}}function s(e){throw new RangeError(H[e])}function f(e,r){for(var n=[],t=e.length;t--;)n[t]=r(e[t]);return n}function c(e,r){var n=e.split("@"),t="";return n.length>1&&(t=n[0]+"@",e=n[1]),e=e.replace(j,"."),t+f(e.split("."),r).join(".")}function p(e){for(var r=[],n=0,t=e.length;n<t;){var o=e.charCodeAt(n++);if(o>=55296&&o<=56319&&n<t){var a=e.charCodeAt(n++);56320==(64512&a)?r.push(((1023&o)<<10)+(1023&a)+65536):(r.push(o),n--)}else r.push(o)}return r}function h(e){var r=e.charCodeAt(0);return r<16?"%0"+r.toString(16).toUpperCase():r<128?"%"+r.toString(16).toUpperCase():r<2048?"%"+(r>>6|192).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase():"%"+(r>>12|224).toString(16).toUpperCase()+"%"+(r>>6&63|128).toString(16).toUpperCase()+"%"+(63&r|128).toString(16).toUpperCase()}function d(e){for(var r="",n=0,t=e.length;n<t;){var o=parseInt(e.substr(n+1,2),16);if(o<128)r+=String.fromCharCode(o),n+=3;else if(o>=194&&o<224){if(t-n>=6){var a=parseInt(e.substr(n+4,2),16);r+=String.fromCharCode((31&o)<<6|63&a)}else r+=e.substr(n,6);n+=6}else if(o>=224){if(t-n>=9){var i=parseInt(e.substr(n+4,2),16),u=parseInt(e.substr(n+7,2),16);r+=String.fromCharCode((15&o)<<12|(63&i)<<6|63&u)}else r+=e.substr(n,9);n+=9}else r+=e.substr(n,3),n+=3}return r}function l(e,r){function n(e){var n=d(e);return n.match(r.UNRESERVED)?n:e}return e.scheme&&(e.scheme=String(e.scheme).replace(r.PCT_ENCODED,n).toLowerCase().replace(r.NOT_SCHEME,"")),e.userinfo!==undefined&&(e.userinfo=String(e.userinfo).replace(r.PCT_ENCODED,n).replace(r.NOT_USERINFO,h).replace(r.PCT_ENCODED,o)),e.host!==undefined&&(e.host=String(e.host).replace(r.PCT_ENCODED,n).toLowerCase().replace(r.NOT_HOST,h).replace(r.PCT_ENCODED,o)),e.path!==undefined&&(e.path=String(e.path).replace(r.PCT_ENCODED,n).replace(e.scheme?r.NOT_PATH:r.NOT_PATH_NOSCHEME,h).replace(r.PCT_ENCODED,o)),e.query!==undefined&&(e.query=String(e.query).replace(r.PCT_ENCODED,n).replace(r.NOT_QUERY,h).replace(r.PCT_ENCODED,o)),e.fragment!==undefined&&(e.fragment=String(e.fragment).replace(r.PCT_ENCODED,n).replace(r.NOT_FRAGMENT,h).replace(r.PCT_ENCODED,o)),e}function m(e){return e.replace(/^0*(.*)/,"$1")||"0"}function g(e,r){var n=e.match(r.IPV4ADDRESS)||[],t=T(n,2),o=t[1];return o?o.split(".").map(m).join("."):e}function v(e,r){var n=e.match(r.IPV6ADDRESS)||[],t=T(n,3),o=t[1],a=t[2];if(o){for(var i=o.toLowerCase().split("::").reverse(),u=T(i,2),s=u[0],f=u[1],c=f?f.split(":").map(m):[],p=s.split(":").map(m),h=r.IPV4ADDRESS.test(p[p.length-1]),d=h?7:8,l=p.length-d,v=Array(d),E=0;E<d;++E)v[E]=c[E]||p[l+E]||"";h&&(v[d-1]=g(v[d-1],r));var C=v.reduce(function(e,r,n){if(!r||"0"===r){var t=e[e.length-1];t&&t.index+t.length===n?t.length++:e.push({index:n,length:1})}return e},[]),y=C.sort(function(e,r){return r.length-e.length})[0],S=void 0;if(y&&y.length>1){var A=v.slice(0,y.index),D=v.slice(y.index+y.length);S=A.join(":")+"::"+D.join(":")}else S=v.join(":");return a&&(S+="%"+a),S}return e}function E(e){var r=arguments.length>1&&arguments[1]!==undefined?arguments[1]:{},n={},t=!1!==r.iri?R:F;"suffix"===r.reference&&(e=(r.scheme?r.scheme+":":"")+"//"+e);var o=e.match(K);if(o){W?(n.scheme=o[1],n.userinfo=o[3],n.host=o[4],n.port=parseInt(o[5],10),n.path=o[6]||"",n.query=o[7],n.fragment=o[8],isNaN(n.port)&&(n.port=o[5])):(n.scheme=o[1]||undefined,n.userinfo=-1!==e.indexOf("@")?o[3]:undefined,n.host=-1!==e.indexOf("//")?o[4]:undefined,n.port=parseInt(o[5],10),n.path=o[6]||"",n.query=-1!==e.indexOf("?")?o[7]:undefined,n.fragment=-1!==e.indexOf("#")?o[8]:undefined,isNaN(n.port)&&(n.port=e.match(/\/\/(?:.|\n)*\:(?:\/|\?|\#|$)/)?o[4]:undefined)),n.host&&(n.host=v(g(n.host,t),t)),n.scheme!==undefined||n.userinfo!==undefined||n.host!==undefined||n.port!==undefined||n.path||n.query!==undefined?n.scheme===undefined?n.reference="relative":n.fragment===undefined?n.reference="absolute":n.reference="uri":n.reference="same-document",r.reference&&"suffix"!==r.reference&&r.reference!==n.reference&&(n.error=n.error||"URI is not a "+r.reference+" reference.");var a=J[(r.scheme||n.scheme||"").toLowerCase()];if(r.unicodeSupport||a&&a.unicodeSupport)l(n,t);else{if(n.host&&(r.domainHost||a&&a.domainHost))try{n.host=B.toASCII(n.host.replace(t.PCT_ENCODED,d).toLowerCase())}catch(i){n.error=n.error||"Host's domain name can not be converted to ASCII via punycode: "+i}l(n,F)}a&&a.parse&&a.parse(n,r)}else n.error=n.error||"URI can not be parsed.";return n}function C(e,r){var n=!1!==r.iri?R:F,t=[];return e.userinfo!==undefined&&(t.push(e.userinfo),t.push("@")),e.host!==undefined&&t.push(v(g(String(e.host),n),n).replace(n.IPV6ADDRESS,function(e,r,n){return"["+r+(n?"%25"+n:"")+"]"})),"number"!=typeof e.port&&"string"!=typeof e.port||(t.push(":"),t.push(String(e.port))),t.length?t.join(""):undefined}function y(e){for(var r=[];e.length;)if(e.match(X))e=e.replace(X,"");else if(e.match(ee))e=e.replace(ee,"/");else if(e.match(re))e=e.replace(re,"/"),r.pop();else if("."===e||".."===e)e="";else{var n=e.match(ne);if(!n)throw new Error("Unexpected dot segment condition");var t=n[0];e=e.slice(t.length),r.push(t)}return r.join("")}function S(e){var r=arguments.length>1&&arguments[1]!==undefined?arguments[1]:{},n=r.iri?R:F,t=[],o=J[(r.scheme||e.scheme||"").toLowerCase()];if(o&&o.serialize&&o.serialize(e,r),e.host)if(n.IPV6ADDRESS.test(e.host));else if(r.domainHost||o&&o.domainHost)try{e.host=r.iri?B.toUnicode(e.host):B.toASCII(e.host.replace(n.PCT_ENCODED,d).toLowerCase())}catch(u){e.error=e.error||"Host's domain name can not be converted to "+(r.iri?"Unicode":"ASCII")+" via punycode: "+u}l(e,n),"suffix"!==r.reference&&e.scheme&&(t.push(e.scheme),t.push(":"));var a=C(e,r);if(a!==undefined&&("suffix"!==r.reference&&t.push("//"),t.push(a),e.path&&"/"!==e.path.charAt(0)&&t.push("/")),e.path!==undefined){var i=e.path;r.absolutePath||o&&o.absolutePath||(i=y(i)),a===undefined&&(i=i.replace(/^\/\//,"/%2F")),t.push(i)}return e.query!==undefined&&(t.push("?"),t.push(e.query)),e.fragment!==undefined&&(t.push("#"),t.push(e.fragment)),t.join("")}function A(e,r){var n=arguments.length>2&&arguments[2]!==undefined?arguments[2]:{},t=arguments[3],o={};return t||(e=E(S(e,n),n),r=E(S(r,n),n)),n=n||{},!n.tolerant&&r.scheme?(o.scheme=r.scheme,o.userinfo=r.userinfo,o.host=r.host,o.port=r.port,o.path=y(r.path||""),o.query=r.query):(r.userinfo!==undefined||r.host!==undefined||r.port!==undefined?(o.userinfo=r.userinfo,o.host=r.host,o.port=r.port,o.path=y(r.path||""),o.query=r.query):(r.path?("/"===r.path.charAt(0)?o.path=y(r.path):(e.userinfo===undefined&&e.host===undefined&&e.port===undefined||e.path?e.path?o.path=e.path.slice(0,e.path.lastIndexOf("/")+1)+r.path:o.path=r.path:o.path="/"+r.path,o.path=y(o.path)),o.query=r.query):(o.path=e.path,r.query!==undefined?o.query=r.query:o.query=e.query),o.userinfo=e.userinfo,o.host=e.host,o.port=e.port),o.scheme=e.scheme),o.fragment=r.fragment,o}function D(e,r,n){var t=i({scheme:"null"},n);return S(A(E(e,t),E(r,t),t,!0),t)}function w(e,r){return"string"==typeof e?e=S(E(e,r),r):"object"===t(e)&&(e=E(S(e,r),r)),e}function b(e,r,n){return"string"==typeof e?e=S(E(e,n),n):"object"===t(e)&&(e=S(e,n)),"string"==typeof r?r=S(E(r,n),n):"object"===t(r)&&(r=S(r,n)),e===r}function x(e,r){return e&&e.toString().replace(r&&r.iri?R.ESCAPE:F.ESCAPE,h)}function O(e,r){return e&&e.toString().replace(r&&r.iri?R.PCT_ENCODED:F.PCT_ENCODED,d)}function N(e){return"boolean"==typeof e.secure?e.secure:"wss"===String(e.scheme).toLowerCase()}function I(e){var r=d(e);return r.match(he)?r:e}var F=u(!1),R=u(!0),T=function(){function e(e,r){var n=[],t=!0,o=!1,a=undefined;try{for(var i,u=e[Symbol.iterator]();!(t=(i=u.next()).done)&&(n.push(i.value),!r||n.length!==r);t=!0);}catch(s){o=!0,a=s}finally{try{!t&&u["return"]&&u["return"]()}finally{if(o)throw a}}return n}return function(r,n){if(Array.isArray(r))return r;if(Symbol.iterator in Object(r))return e(r,n);throw new TypeError("Invalid attempt to destructure non-iterable instance")}}(),_=function(e){if(Array.isArray(e)){for(var r=0,n=Array(e.length);r<e.length;r++)n[r]=e[r];return n}return Array.from(e)},P=2147483647,q=/^xn--/,U=/[^\0-\x7E]/,j=/[\x2E\u3002\uFF0E\uFF61]/g,H={overflow:"Overflow: input needs wider integers to process","not-basic":"Illegal input >= 0x80 (not a basic code point)","invalid-input":"Invalid input"},z=Math.floor,L=String.fromCharCode,$=function(e){return String.fromCodePoint.apply(String,_(e))},M=function(e){return e-48<10?e-22:e-65<26?e-65:e-97<26?e-97:36},V=function(e,r){return e+22+75*(e<26)-((0!=r)<<5)},k=function(e,r,n){var t=0;for(e=n?z(e/700):e>>1,e+=z(e/r);e>455;t+=36)e=z(e/35);return z(t+36*e/(e+38))},Z=function(e){var r=[],n=e.length,t=0,o=128,a=72,i=e.lastIndexOf("-");i<0&&(i=0);for(var u=0;u<i;++u)e.charCodeAt(u)>=128&&s("not-basic"),r.push(e.charCodeAt(u));for(var f=i>0?i+1:0;f<n;){for(var c=t,p=1,h=36;;h+=36){f>=n&&s("invalid-input");var d=M(e.charCodeAt(f++));(d>=36||d>z((P-t)/p))&&s("overflow"),t+=d*p;var l=h<=a?1:h>=a+26?26:h-a;if(d<l)break;var m=36-l;p>z(P/m)&&s("overflow"),p*=m}var g=r.length+1;a=k(t-c,g,0==c),z(t/g)>P-o&&s("overflow"),o+=z(t/g),t%=g,r.splice(t++,0,o)}return String.fromCodePoint.apply(String,r)},G=function(e){var r=[];e=p(e);var n=e.length,t=128,o=0,a=72,i=!0,u=!1,f=undefined;try{for(var c,h=e[Symbol.iterator]();!(i=(c=h.next()).done);i=!0){var d=c.value;d<128&&r.push(L(d))}}catch(U){u=!0,f=U}finally{try{!i&&h["return"]&&h["return"]()}finally{if(u)throw f}}var l=r.length,m=l;for(l&&r.push("-");m<n;){var g=P,v=!0,E=!1,C=undefined;try{for(var y,S=e[Symbol.iterator]();!(v=(y=S.next()).done);v=!0){var A=y.value;A>=t&&A<g&&(g=A)}}catch(U){E=!0,C=U}finally{try{!v&&S["return"]&&S["return"]()}finally{if(E)throw C}}var D=m+1;g-t>z((P-o)/D)&&s("overflow"),o+=(g-t)*D,t=g;var w=!0,b=!1,x=undefined;try{for(var O,N=e[Symbol.iterator]();!(w=(O=N.next()).done);w=!0){var I=O.value;if(I<t&&++o>P&&s("overflow"),I==t){for(var F=o,R=36;;R+=36){var T=R<=a?1:R>=a+26?26:R-a;if(F<T)break;var _=F-T,q=36-T;r.push(L(V(T+_%q,0))),F=z(_/q)}r.push(L(V(F,0))),a=k(o,D,m==l),o=0,++m}}}catch(U){b=!0,x=U}finally{try{!w&&N["return"]&&N["return"]()}finally{if(b)throw x}}++o,++t}return r.join("")},Q=function(e){return c(e,function(e){return q.test(e)?Z(e.slice(4).toLowerCase()):e})},Y=function(e){return c(e,function(e){return U.test(e)?"xn--"+G(e):e})},B={version:"2.1.0",ucs2:{decode:p,encode:$},decode:Z,encode:G,toASCII:Y,toUnicode:Q},J={},K=/^(?:([^:\/?#]+):)?(?:\/\/((?:([^\/?#@]*)@)?(\[[^\/?#\]]+\]|[^\/?#:]*)(?:\:(\d*))?))?([^?#]*)(?:\?([^#]*))?(?:#((?:.|\n|\r)*))?/i,W="".match(/(){0}/)[1]===undefined,X=/^\.\.?\//,ee=/^\/\.(\/|$)/,re=/^\/\.\.(\/|$)/,ne=/^\/?(?:.|\n)*?(?=\/|$)/,te={scheme:"http",domainHost:!0,parse:function(e,r){return e.host||(e.error=e.error||"HTTP URIs must have a host."),e},serialize:function(e,r){var n="https"===String(e.scheme).toLowerCase();return e.port!==(n?443:80)&&""!==e.port||(e.port=undefined),e.path||(e.path="/"),e}},oe={scheme:"https",domainHost:te.domainHost,parse:te.parse,serialize:te.serialize},ae={scheme:"ws",domainHost:!0,parse:function(e,r){var n=e;return n.secure=N(n),n.resourceName=(n.path||"/")+(n.query?"?"+n.query:""),n.path=undefined,n.query=undefined,n},serialize:function(e,r){if(e.port!==(N(e)?443:80)&&""!==e.port||(e.port=undefined),"boolean"==typeof e.secure&&(e.scheme=e.secure?"wss":"ws",e.secure=undefined),e.resourceName){var n=e.resourceName.split("?"),t=T(n,2),o=t[0],a=t[1];e.path=o&&"/"!==o?o:undefined,e.query=a,e.resourceName=undefined}return e.fragment=undefined,e}},ie={scheme:"wss",domainHost:ae.domainHost,parse:ae.parse,serialize:ae.serialize},ue={},se="[A-Za-z0-9\\-\\.\\_\\~\\xA0-\\u200D\\u2010-\\u2029\\u202F-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]",fe="[0-9A-Fa-f]",ce=n(n("%[EFef][0-9A-Fa-f]%"+fe+fe+"%"+fe+fe)+"|"+n("%[89A-Fa-f][0-9A-Fa-f]%"+fe+fe)+"|"+n("%"+fe+fe)),pe=r("[\\!\\$\\%\\'\\(\\)\\*\\+\\,\\-\\.0-9\\<\\>A-Z\\x5E-\\x7E]",'[\\"\\\\]'),he=new RegExp(se,"g"),de=new RegExp(ce,"g"),le=new RegExp(r("[^]","[A-Za-z0-9\\!\\$\\%\\'\\*\\+\\-\\^\\_\\`\\{\\|\\}\\~]","[\\.]",'[\\"]',pe),"g"),me=new RegExp(r("[^]",se,"[\\!\\$\\'\\(\\)\\*\\+\\,\\;\\:\\@]"),"g"),ge=me,ve={scheme:"mailto",parse:function(e,r){var n=e,t=n.to=n.path?n.path.split(","):[];if(n.path=undefined,n.query){for(var o=!1,a={},i=n.query.split("&"),u=0,s=i.length;u<s;++u){var f=i[u].split("=");switch(f[0]){case"to":for(var c=f[1].split(","),p=0,h=c.length;p<h;++p)t.push(c[p]);break;case"subject":n.subject=O(f[1],r);break;case"body":n.body=O(f[1],r);break;default:o=!0,a[O(f[0],r)]=O(f[1],r)}}o&&(n.headers=a)}n.query=undefined;for(var d=0,l=t.length;d<l;++d){var m=t[d].split("@");if(m[0]=O(m[0]),r.unicodeSupport)m[1]=O(m[1],r).toLowerCase();else try{m[1]=B.toASCII(O(m[1],r).toLowerCase())}catch(g){n.error=n.error||"Email address's domain name can not be converted to ASCII via punycode: "+g}t[d]=m.join("@")}return n},serialize:function(e,r){var n=e,t=a(e.to);if(t){for(var i=0,u=t.length;i<u;++i){var s=String(t[i]),f=s.lastIndexOf("@"),c=s.slice(0,f).replace(de,I).replace(de,o).replace(le,h),p=s.slice(f+1);try{p=r.iri?B.toUnicode(p):B.toASCII(O(p,r).toLowerCase())}catch(g){n.error=n.error||"Email address's domain name can not be converted to "+(r.iri?"Unicode":"ASCII")+" via punycode: "+g}t[i]=c+"@"+p}n.path=t.join(",")}var d=e.headers=e.headers||{};e.subject&&(d.subject=e.subject),e.body&&(d.body=e.body);var l=[];for(var m in d)d[m]!==ue[m]&&l.push(m.replace(de,I).replace(de,o).replace(me,h)+"="+d[m].replace(de,I).replace(de,o).replace(ge,h));return l.length&&(n.query=l.join("&")),n}},Ee=/^([^\:]+)\:(.*)/,Ce={scheme:"urn",parse:function(e,r){var n=e.path&&e.path.match(Ee),t=e;if(n){var o=r.scheme||t.scheme||"urn",a=n[1].toLowerCase(),i=n[2],u=o+":"+(r.nid||a),s=J[u];t.nid=a,t.nss=i,t.path=undefined,s&&(t=s.parse(t,r))}else t.error=t.error||"URN can not be parsed.";return t},serialize:function(e,r){var n=r.scheme||e.scheme||"urn",t=e.nid,o=n+":"+(r.nid||t),a=J[o];a&&(e=a.serialize(e,r));var i=e,u=e.nss;return i.path=(t||r.nid)+":"+u,i}},ye=/^[0-9A-Fa-f]{8}(?:\-[0-9A-Fa-f]{4}){3}\-[0-9A-Fa-f]{12}$/,Se={scheme:"urn:uuid",parse:function(e,r){var n=e;return n.uuid=n.nss,n.nss=undefined,r.tolerant||n.uuid&&n.uuid.match(ye)||(n.error=n.error||"UUID is not valid."),n},serialize:function(e,r){var n=e;return n.nss=(e.uuid||"").toLowerCase(),n}};J[te.scheme]=te,J[oe.scheme]=oe,J[ae.scheme]=ae,J[ie.scheme]=ie,J[ve.scheme]=ve,J[Ce.scheme]=Ce,J[Se.scheme]=Se,e.SCHEMES=J,e.pctEncChar=h,e.pctDecChars=d,e.parse=E,e.removeDotSegments=y,e.serialize=S,e.resolveComponents=A,e.resolve=D,e.normalize=w,e.equal=b,e.escapeComponent=x,e.unescapeComponent=O,Object.defineProperty(e,"__esModule",{value:!0})}); //# sourceMappingURL=uri.all.min.js.map \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/uri-js/package.json b/tools/node_modules/eslint/node_modules/uri-js/package.json index 5d55c3598416c5..b97f667a234ea6 100755 --- a/tools/node_modules/eslint/node_modules/uri-js/package.json +++ b/tools/node_modules/eslint/node_modules/uri-js/package.json @@ -16,7 +16,7 @@ "babel-cli": "^6.26.0", "babel-plugin-external-helpers": "^6.22.0", "babel-preset-latest": "^6.24.1", - "mocha": "^3.2.0", + "mocha": "^8.2.1", "mocha-qunit-ui": "^0.1.3", "rollup": "^0.41.6", "rollup-plugin-babel": "^2.7.1", @@ -78,5 +78,5 @@ "test": "mocha -u mocha-qunit-ui dist/es5/uri.all.js tests/tests.js" }, "types": "dist/es5/uri.all.d.ts", - "version": "4.4.0" + "version": "4.4.1" } \ No newline at end of file diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json index 80e0105bdcffaf..dda9992ee81177 100644 --- a/tools/node_modules/eslint/package.json +++ b/tools/node_modules/eslint/package.json @@ -12,7 +12,7 @@ "bundleDependencies": false, "dependencies": { "@babel/code-frame": "^7.0.0", - "@eslint/eslintrc": "^0.2.2", + "@eslint/eslintrc": "^0.3.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -37,7 +37,7 @@ "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", - "lodash": "^4.17.19", + "lodash": "^4.17.20", "minimatch": "^3.0.4", "natural-compare": "^1.4.0", "optionator": "^0.9.1", @@ -154,5 +154,5 @@ "test:cli": "mocha", "webpack": "node Makefile.js webpack" }, - "version": "7.17.0" + "version": "7.18.0" } \ No newline at end of file diff --git a/tools/update-babel-eslint.sh b/tools/update-babel-eslint.sh index d6de6d25e1d212..b64b8f25d32f7c 100755 --- a/tools/update-babel-eslint.sh +++ b/tools/update-babel-eslint.sh @@ -13,7 +13,7 @@ mkdir babel-eslint-tmp cd babel-eslint-tmp || exit npm init --yes -npm install --global-style --no-bin-links --production --no-package-lock @babel/core @babel/eslint-parser@latest @babel/plugin-syntax-class-properties@latest +npm install --global-style --no-bin-links --production --no-package-lock @babel/core @babel/eslint-parser@latest @babel/plugin-syntax-class-properties@latest @babel/plugin-syntax-top-level-await@latest # Use dmn to remove some unneeded files. npx dmn@2.2.2 -f clean