Skip to content

Commit

Permalink
Fixing linter
Browse files Browse the repository at this point in the history
  • Loading branch information
nscarcella committed Feb 22, 2024
1 parent 898bad6 commit 03b3857
Show file tree
Hide file tree
Showing 20 changed files with 238 additions and 239 deletions.
10 changes: 5 additions & 5 deletions deploy/createTypesPackages.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ const go = async () => {
pkg.files.forEach((fileRef) => {
fs.copyFileSync(
new URL(fileRef.from, import.meta.url),
new URL(fileRef.to, packagePath),
new URL(fileRef.to, packagePath)

Check failure on line 89 in deploy/createTypesPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
});

Expand Down Expand Up @@ -120,7 +120,7 @@ async function updatePackageJSON(pkg, packagePath) {
let version = "0.0.1";
try {
const npmResponse = await fetch(
`https://registry.npmjs.org/${packageJSON.name}`,
`https://registry.npmjs.org/${packageJSON.name}`

Check failure on line 123 in deploy/createTypesPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
/** @type {*} */
const npmPackage = await npmResponse.json();
Expand All @@ -145,7 +145,7 @@ async function updatePackageJSON(pkg, packagePath) {
pkgJSONPath,
await format(JSON.stringify(packageJSON), {
filepath: fileURLToPath(pkgJSONPath),
}),
})

Check failure on line 148 in deploy/createTypesPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);

return packageJSON;
Expand All @@ -167,7 +167,7 @@ function copyREADME(pkg, pkgJSON, writePath) {
.replace("{{version}}", pkgJSON.version)
.replace(
"{{release_href}}",
`https://github.com/microsoft/TypeScript-DOM-lib-generator/releases/tag/${htmlEncodedTag}`,
`https://github.com/microsoft/TypeScript-DOM-lib-generator/releases/tag/${htmlEncodedTag}`

Check failure on line 170 in deploy/createTypesPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);

fs.writeFileSync(writePath, readme);
Expand Down Expand Up @@ -200,7 +200,7 @@ export function postProcessDTSFiles(pkg, packagePath) {
iterateThroughFiles((content) => {
return content.replace(
"abort(reason?: any): AbortSignal;",
"// abort(reason?: any): AbortSignal; - To be re-added in the future",
"// abort(reason?: any): AbortSignal; - To be re-added in the future"

Check failure on line 203 in deploy/createTypesPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
});

Expand Down
10 changes: 5 additions & 5 deletions deploy/deployChangedPackages.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ for (const dirName of fs.readdirSync(generatedDir)) {

try {
const oldFile = await getFileFromUnpkg(
`${pkgJSON.name}@${olderVersion}/${filemap.to}`,
`${pkgJSON.name}@${olderVersion}/${filemap.to}`

Check failure on line 64 in deploy/deployChangedPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
console.log(` - ${file}`);
if (oldFile !== generatedDTSContent)
Expand Down Expand Up @@ -100,13 +100,13 @@ Assuming that this means we need to upload this package.`);

await createRelease(
`${pkgJSON.name}@${pkgJSON.version}`,
releaseNotes.join("\n\n"),
releaseNotes.join("\n\n")

Check failure on line 103 in deploy/deployChangedPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
}
} else {
console.log(
"Wanting to run: 'npm publish --access public' in " +
fileURLToPath(packageDir),
fileURLToPath(packageDir)

Check failure on line 109 in deploy/deployChangedPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
}

Expand Down Expand Up @@ -148,7 +148,7 @@ async function createRelease(tag, body) {
});
} catch (error) {
console.error(
"Creating the GitHub release failed, this is likely due to re-running the deploy.",
"Creating the GitHub release failed, this is likely due to re-running the deploy."

Check failure on line 151 in deploy/deployChangedPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
}
}
Expand All @@ -157,7 +157,7 @@ function verify() {
const authToken = process.env.GITHUB_TOKEN || process.env.GITHUB_API_TOKEN;
if (!authToken) {
throw new Error(
"There isn't an ENV var set up for creating a GitHub release, expected GITHUB_TOKEN.",
"There isn't an ENV var set up for creating a GitHub release, expected GITHUB_TOKEN."

Check failure on line 160 in deploy/deployChangedPackages.js

View workflow job for this annotation

GitHub Actions / test

Insert `,`
);
}
}
Expand Down
8 changes: 4 additions & 4 deletions deploy/migrate.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,19 @@ const tscWD = maybeTSWorkingDir.find((wd) => existsSync(wd));

if (!tscWD)
throw new Error(
"Could not find a TypeScript clone to put the generated files in.",
"Could not find a TypeScript clone to put the generated files in."
);

const generatedFiles = readdirSync("generated");
const filesToSend = generatedFiles.filter(
(file) => file.includes("dom.") || file.includes("webworker."),
(file) => file.includes("dom.") || file.includes("webworker.")
);

const generatedDir = new URL("../generated/", import.meta.url);
postProcessDTSFiles(
/** @type {any} */
({ files: filesToSend.map((f) => ({ to: f })) }),
generatedDir,
generatedDir
);

filesToSend.forEach((file) => {
Expand All @@ -38,5 +38,5 @@ filesToSend.forEach((file) => {
console.log(
`Moved ${filesToSend
.map((f) => f.replace(".generated", ""))
.join(", ")} to '${tscWD}/src/lib'.`,
.join(", ")} to '${tscWD}/src/lib'.`
);
2 changes: 1 addition & 1 deletion deploy/versionChangelog.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { basename } from "path";
const [name, before, to] = process.argv.slice(2);
if (!name || !before || !to) {
throw new Error(
"Expected three arguments: package name, version before, version to",
"Expected three arguments: package name, version before, version to"
);
}

Expand Down
34 changes: 17 additions & 17 deletions src/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,30 +41,30 @@ interface EmitOptions {
async function emitFlavor(
webidl: Browser.WebIdl,
forceKnownTypes: Set<string>,
options: EmitOptions,
options: EmitOptions
) {
const exposed = getExposedTypes(webidl, options.global, forceKnownTypes);
mergeNamesakes(exposed);

const result = emitWebIdl(exposed, options.global[0], "");
await fs.writeFile(
new URL(`${options.name}.generated.d.ts`, options.outputFolder),
result,
result
);

const iterators = emitWebIdl(exposed, options.global[0], "sync");
await fs.writeFile(
new URL(`${options.name}.iterable.generated.d.ts`, options.outputFolder),
iterators,
iterators
);

const asyncIterators = emitWebIdl(exposed, options.global[0], "async");
await fs.writeFile(
new URL(
`${options.name}.asynciterable.generated.d.ts`,
options.outputFolder,
options.outputFolder
),
asyncIterators,
asyncIterators
);
}

Expand Down Expand Up @@ -118,7 +118,7 @@ async function emitDom() {

const transferables = widlStandardTypes.flatMap((st) => {
return Object.values(st.browser.interfaces?.interface ?? {}).filter(
(i) => i.transferable,
(i) => i.transferable
);
});

Expand Down Expand Up @@ -152,12 +152,12 @@ async function emitDom() {

function mergeApiDescriptions(
idl: Browser.WebIdl,
descriptions: Record<string, string>,
descriptions: Record<string, string>
) {
const namespaces = arrayToMap(
idl.namespaces!,
(i) => i.name,
(i) => i,
(i) => i
);
for (const [key, value] of Object.entries(descriptions)) {
const target = idl.interfaces!.interface[key] || namespaces[key];
Expand All @@ -170,12 +170,12 @@ async function emitDom() {

function mergeDeprecatedMessage(
idl: Browser.WebIdl,
descriptions: Record<string, string>,
descriptions: Record<string, string>
) {
const namespaces = arrayToMap(
idl.namespaces!,
(i) => i.name,
(i) => i,
(i) => i
);
for (const [key, value] of Object.entries(descriptions)) {
const target = idl.interfaces!.interface[key] || namespaces[key];
Expand Down Expand Up @@ -301,7 +301,7 @@ async function emitDom() {

function prune(
obj: Browser.WebIdl,
template: Partial<Browser.WebIdl>,
template: Partial<Browser.WebIdl>
): Browser.WebIdl {
return filterByNull(obj, template);

Expand All @@ -312,13 +312,13 @@ async function emitDom() {
if (!obj[k]) {
console.warn(
`removedTypes.json has a redundant field ${k} in ${JSON.stringify(
template,
).slice(0, 100)}`,
template
).slice(0, 100)}`
);
} else if (Array.isArray(template[k])) {
if (!Array.isArray(obj[k])) {
throw new Error(
`Removal template ${k} is an array but the original field is not`,
`Removal template ${k} is an array but the original field is not`
);
}
// template should include strings
Expand All @@ -328,18 +328,18 @@ async function emitDom() {
});
if (filtered[k].length !== obj[k].length - template[k].length) {
const differences = template[k].filter(
(t: any) => !obj[k].includes(t),
(t: any) => !obj[k].includes(t)
);
console.warn(
`removedTypes.json has redundant array items: ${differences}`,
`removedTypes.json has redundant array items: ${differences}`
);
}
} else if (template[k] !== null) {
filtered[k] = filterByNull(obj[k], template[k]);
} else {
if (obj[k].exposed === "") {
console.warn(
`removedTypes.json removes ${k} that has already been disabled by BCD.`,
`removedTypes.json removes ${k} that has already been disabled by BCD.`
);
}
delete filtered[k];
Expand Down
4 changes: 2 additions & 2 deletions src/build/bcd.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { hasStableImplementation } from "./bcd/stable.js";

function hasMultipleImplementations(support: SupportBlock, prefix?: string) {
const hasStableImpl = (
browser: SimpleSupportStatement | SimpleSupportStatement[] | undefined,
browser: SimpleSupportStatement | SimpleSupportStatement[] | undefined
) => hasStableImplementation(browser, prefix);
let count = 0;
if (hasStableImpl(support.chrome) || hasStableImpl(support.chrome_android)) {
Expand All @@ -32,7 +32,7 @@ function isSuitable(
key: string,
compat?: CompatStatement,
parentKey?: string,
prefix?: string,
prefix?: string
) {
const forceAlive = parentKey
? forceKeepAlive[parentKey]?.includes(key)
Expand Down
16 changes: 8 additions & 8 deletions src/build/bcd/mapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ function mergeCompatStatements(data?: Identifier): CompatStatement | undefined {
const base = Object.fromEntries(
Object.keys(statements[0].support).map((key) => {
return [key, [] as SimpleSupportStatement[]];
}),
})
);

for (const statement of statements) {
Expand All @@ -56,13 +56,13 @@ function mergeCompatStatements(data?: Identifier): CompatStatement | undefined {
function mapInterfaceLike(
name: string,
i: Browser.Interface,
mapper: (data: DataToMap) => any,
mapper: (data: DataToMap) => any
) {
const data = i.mixin
? api.__mixins[name]
: i.legacyNamespace
? api[i.legacyNamespace][name]
: api[name];
? api[i.legacyNamespace][name]
: api[name];
const intCompat = data?.__compat;
const mapped = mapper({ key: name, compat: intCompat, mixin: !!i.mixin });
if (!data) {
Expand All @@ -87,7 +87,7 @@ function mapInterfaceLike(
const properties = filterMapRecord(
i.properties?.property,
recordMapper,
i.namespace,
i.namespace
);

if (i.iterator) {
Expand All @@ -96,7 +96,7 @@ function mapInterfaceLike(
// for iterable methods such as values(). Use that as a fallback.
// See also: https://github.com/mdn/browser-compat-data/issues/6367
const iteratorCompat = mergeCompatStatements(
data[iteratorKey] ?? data["values"],
data[iteratorKey] ?? data["values"]
);
const iteratorMapped = mapper({
key: iteratorKey,
Expand All @@ -122,15 +122,15 @@ function mapInterfaceLike(

export function mapToBcdCompat(
webidl: Browser.WebIdl,
mapper: (data: DataToMap) => any,
mapper: (data: DataToMap) => any
): Browser.WebIdl | undefined {
const map = (name: string, i: Browser.Interface) =>
mapInterfaceLike(name, i, mapper);

const interfaces = filterMapRecord(webidl.interfaces?.interface, map);
const mixins = filterMapRecord(webidl.mixins?.mixin, map);
const namespaces = mapDefined(webidl.namespaces, (n) =>
mapInterfaceLike(n.name, n, mapper),
mapInterfaceLike(n.name, n, mapper)
);
if (
!isEmptyRecord(interfaces) ||
Expand Down
2 changes: 1 addition & 1 deletion src/build/bcd/stable.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { SimpleSupportStatement } from "@mdn/browser-compat-data/types";

export function hasStableImplementation(
browser: SimpleSupportStatement | SimpleSupportStatement[] | undefined,
prefix?: string,
prefix?: string
): boolean {
if (!browser) {
return false;
Expand Down
Loading

0 comments on commit 03b3857

Please sign in to comment.