Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

View file

@ -0,0 +1 @@
{"files":{"Cargo.toml":"4cc8f8f284f6ae18dfc9a710fedbc571c78aae94cf8532bea1fea2d472d2fecb","LICENSE":"d89275951ba79e85476a6737b9d279e20dd1f5e09fdf66f7b8b6e7da68d7053e","README.md":"effb6c361861582a3acc02c33fd0eddd4c21dd88f07d66efb5a6f25153c76d77","deno.json":"deb508590cf9c3758b67d2947b55699f2eef02e259301e82e0db96a669739c90","rust-toolchain.toml":"8de938af815305f09269e9b3675017a512c59f10ef7fefb7044eb7a64fb87e25","scripts/01_setup.ts":"db8f02197daaa2b2cf7326f6411d2ef8ee95941ff7ad6298325d9c492d866841","scripts/02_build.ts":"526c0cfbf1a6206669fcf3002a32f1a372fbcd31a695334f1cd5b2fd45b8eda1","scripts/03_test.ts":"27f129b0fe779c30997e50da14b7b51f2e3ca46ab7c169c8f0ea08a6d03cb7c0","scripts/04_confirm.ts":"ea2126085e60f978f371220942359e2e00972ae0131a4f615349009b0a854e8e","scripts/README.md":"0925b96582b4c16a7fe074172d6dc617e4f76efcd2b5e5de13fa3463b9264621","scripts/deps.ts":"7906d2c64f9844748beda17bb6959721580d797c00dd1e4f8e52fe56b3dcccff","scripts/repos.ts":"af9b98ab7957de3fa927fc22e6d652056616dc220d7bdde35d8fb1035f25619f","scripts/reset.ts":"18c5c3f73a02a1798c28427d1f5698d19256c5bcd539e6c183ba4fe3fb580e52","src/cjs_parse.rs":"d97a2fa2d66447c1ca53f0379a69d6a87fe9e927a0e90ea703818025b85049c9","src/comments.rs":"3ebe167d2a7daa74328721488c42c78c9b9e4c018a05eb7b472384a6733eac80","src/lexing.rs":"c0d2b086e1b7d6a4a6de8274b8602ec3161d7b2420733b56cab21a900410010b","src/lib.rs":"98602ab2e9eb1eaabd8d159f798832ae660ee1101cd6c2776596fa2d818ca690","src/parsed_source.rs":"4058dc1f00ff58ca35ddeeb52fa16ababbe7fdef3b6d8438797946aa09c7baca","src/parsing.rs":"d2e81914d3ca177eb1c030ff619851a6c134bf232a306b7fd5d22ef4c30e6f01","src/scopes.rs":"42a2e2cb454d78232effb6c595a2710f6ee04187c68c54bfd2628d57ba67a14e","src/text_changes.rs":"6f92f0424733ecbd9b419a76c85902042173186007d012d606aeedb68b45ed0f","src/transpiling/mod.rs":"442f0b3ac754930d8cb3a38e36dbab429dd61ddef1f0f315eda8558d5c520b6e","src/transpiling/transforms.rs":"983bdd8f52ce8a7c42868cb7f3c5607692625061338c22cfadc8d38332d9a7da","src/types.rs":"c470c1883c4f17ee20c11416223cfef2b1e4dea35ce0c83b14104096c714e16a"},"package":"8a8adb6aeb787db71d015d8e9f63f6e004eeb09c86babb4ded00878be18619b1"}

242
third-party/vendor/deno_ast/Cargo.toml vendored Normal file
View file

@ -0,0 +1,242 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
name = "deno_ast"
version = "0.29.5"
authors = ["the Deno authors"]
description = "Source text parsing, lexing, and AST related functionality for Deno"
homepage = "https://deno.land/"
documentation = "https://docs.rs/deno_ast"
readme = "README.md"
license = "MIT"
repository = "https://github.com/denoland/deno_ast"
[package.metadata.docs.rs]
all-features = true
[dependencies.anyhow]
version = "1.0.64"
optional = true
[dependencies.base64]
version = "0.13.1"
optional = true
[dependencies.deno_media_type]
version = "0.1.2"
default-features = false
[dependencies.dprint-swc-ext]
version = "0.12.0"
[dependencies.serde]
version = "1.0.144"
features = ["derive"]
[dependencies.swc_atoms]
version = "=0.5.9"
[dependencies.swc_bundler]
version = "=0.219.5"
optional = true
[dependencies.swc_common]
version = "=0.32.0"
[dependencies.swc_config]
version = "=0.1.7"
optional = true
[dependencies.swc_config_macro]
version = "=0.1.2"
optional = true
[dependencies.swc_ecma_ast]
version = "=0.109.0"
features = ["serde-impl"]
[dependencies.swc_ecma_codegen]
version = "=0.144.1"
optional = true
[dependencies.swc_ecma_codegen_macros]
version = "=0.7.3"
optional = true
[dependencies.swc_ecma_dep_graph]
version = "=0.111.1"
optional = true
[dependencies.swc_ecma_loader]
version = "=0.44.2"
optional = true
[dependencies.swc_ecma_parser]
version = "=0.139.0"
[dependencies.swc_ecma_transforms_base]
version = "=0.132.2"
optional = true
[dependencies.swc_ecma_transforms_classes]
version = "=0.121.2"
optional = true
[dependencies.swc_ecma_transforms_compat]
version = "=0.158.3"
optional = true
[dependencies.swc_ecma_transforms_macros]
version = "=0.5.3"
optional = true
[dependencies.swc_ecma_transforms_optimization]
version = "=0.192.3"
optional = true
[dependencies.swc_ecma_transforms_proposal]
version = "=0.166.3"
optional = true
[dependencies.swc_ecma_transforms_react]
version = "=0.178.3"
optional = true
[dependencies.swc_ecma_transforms_typescript]
version = "=0.182.3"
optional = true
[dependencies.swc_ecma_utils]
version = "=0.122.0"
optional = true
[dependencies.swc_ecma_visit]
version = "=0.95.0"
optional = true
[dependencies.swc_eq_ignore_macros]
version = "=0.1.2"
[dependencies.swc_graph_analyzer]
version = "=0.21.0"
optional = true
[dependencies.swc_macros_common]
version = "=0.3.8"
optional = true
[dependencies.swc_trace_macro]
version = "=0.1.3"
optional = true
[dependencies.swc_visit]
version = "=0.5.7"
optional = true
[dependencies.swc_visit_macros]
version = "=0.5.8"
optional = true
[dependencies.text_lines]
version = "0.6.0"
features = ["serialization"]
[dependencies.url]
version = "2.3.1"
features = ["serde"]
optional = true
[dev-dependencies.pretty_assertions]
version = "1.3.0"
[dev-dependencies.serde_json]
version = "1.0.87"
features = ["preserve_order"]
[features]
bundler = [
"swc_bundler",
"swc_ecma_transforms_optimization",
"swc_graph_analyzer",
]
cjs = [
"utils",
"visit",
]
codegen = [
"swc_ecma_codegen",
"swc_ecma_codegen_macros",
"swc_macros_common",
]
compat = [
"transforms",
"swc_ecma_transforms_compat",
"swc_trace_macro",
"swc_config",
"swc_config_macro",
]
dep_graph = ["swc_ecma_dep_graph"]
module_specifier = [
"deno_media_type/module_specifier",
"url",
]
proposal = [
"transforms",
"swc_ecma_transforms_proposal",
"swc_ecma_transforms_classes",
"swc_ecma_transforms_macros",
"swc_macros_common",
]
react = [
"transforms",
"swc_ecma_transforms_react",
"swc_ecma_transforms_macros",
"swc_config",
"swc_config_macro",
"swc_macros_common",
]
scopes = [
"view",
"utils",
"visit",
]
sourcemap = ["dprint-swc-ext/sourcemap"]
transforms = [
"swc_ecma_loader",
"swc_ecma_transforms_base",
]
transpiling = [
"anyhow",
"base64",
"codegen",
"module_specifier",
"proposal",
"react",
"sourcemap",
"transforms",
"typescript",
"utils",
"visit",
]
typescript = [
"transforms",
"swc_ecma_transforms_typescript",
]
utils = ["swc_ecma_utils"]
view = ["dprint-swc-ext/view"]
visit = [
"swc_ecma_visit",
"swc_visit",
"swc_visit_macros",
"swc_macros_common",
]

21
third-party/vendor/deno_ast/LICENSE vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018-2023 the Deno authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

34
third-party/vendor/deno_ast/README.md vendored Normal file
View file

@ -0,0 +1,34 @@
# deno_ast
[![](https://img.shields.io/crates/v/deno_ast.svg)](https://crates.io/crates/deno_ast)
[![Discord Chat](https://img.shields.io/discord/684898665143206084?logo=discord&style=social)](https://discord.gg/deno)
Source text parsing, lexing, and AST related functionality for
[Deno](https://deno.land).
```rust
use deno_ast::parse_module;
use deno_ast::MediaType;
use deno_ast::ParseParams;
use deno_ast::SourceTextInfo;
let source_text = "class MyClass {}";
let text_info = SourceTextInfo::new(source_text.into());
let parsed_source = parse_module(ParseParams {
specifier: "file:///my_file.ts".to_string(),
media_type: MediaType::TypeScript,
text_info,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
}).expect("should parse");
// returns the comments
parsed_source.comments();
// returns the tokens if captured
parsed_source.tokens();
// returns the module (AST)
parsed_source.module();
// returns the `SourceTextInfo`
parsed_source.text_info();
```

14
third-party/vendor/deno_ast/deno.json vendored Normal file
View file

@ -0,0 +1,14 @@
{
"lock": false,
"fmt": {
"include": [
"./README.md",
"./scripts"
]
},
"lint": {
"include": [
"./scripts"
]
}
}

View file

@ -0,0 +1,4 @@
[toolchain]
channel = "1.70.0"
components = ["clippy", "rustfmt"]
profile = "minimal"

View file

@ -0,0 +1,25 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { Repos } from "./repos.ts";
import { $ } from "./deps.ts";
const repos = await Repos.load();
// Ensure repos are latest main
for (const repo of repos.nonDenoAstRepos()) {
$.logStep("Setting up", `${repo.name}...`);
if (await repo.hasLocalChanges()) {
throw new Error(
`Repo ${repo.name} had local changes. Please resolve this.`,
);
}
$.logGroup();
$.logStep("Switching to main...");
await repo.command("git switch main");
$.logStep("Pulling upstream main...");
await repo.command("git pull upstream main");
$.logGroupEnd();
}
// Update the repos to refer to local versions of each other
await repos.toLocalSource();

View file

@ -0,0 +1,11 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { Repos } from "./repos.ts";
import { $ } from "./deps.ts";
const repos = await Repos.load();
for (const crate of repos.getCrates()) {
$.logStep(`Building ${crate.name}...`);
await crate.build({ allFeatures: true });
}

View file

@ -0,0 +1,15 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { $ } from "./deps.ts";
import { Repos } from "./repos.ts";
const repos = await Repos.load();
let hadConfirmed = false;
for (const crate of repos.getCrates()) {
if (hadConfirmed || confirm(`Do you want to run tests for ${crate.name}?`)) {
hadConfirmed = true;
$.logStep("Running tests", `for ${crate.name}...`);
await crate.test({ allFeatures: true });
}
}

View file

@ -0,0 +1,83 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { $, Repo } from "./deps.ts";
import { Repos } from "./repos.ts";
const repos = await Repos.load();
const denoRepo = repos.get("deno");
const deno_ast = repos.getCrate("deno_ast");
const nonDenoRepos = repos.getRepos().filter((c) => c.name !== "deno");
// create a branch, commit, push for the non-deno repos
for (const repo of nonDenoRepos) {
if (!await repo.hasLocalChanges()) {
continue;
}
const currentBranch = await repo.gitCurrentBranch();
$.logStep("Analyzing", repo.name);
$.logLight("Branch:", currentBranch);
if (
confirm(
`Bump deps? (Note: do this after the dependency crates have PUBLISHED)`,
)
) {
await bumpDeps(repo);
for (const crate of repo.crates) {
await crate.cargoCheck();
}
if (
currentBranch === "main" &&
confirm(`Branch for ${repo.name}?`)
) {
await repo.gitBranch("deno_ast_" + deno_ast.version);
}
if (
await repo.hasLocalChanges() &&
confirm(`Commit and push for ${repo.name}?`)
) {
await repo.gitAdd();
await repo.gitCommit(`feat: upgrade deno_ast to ${deno_ast.version}`);
await repo.gitPush();
}
}
}
// now branch, commit, and push for the deno repo
$.logStep("Analyzing Deno");
const currentBranch = await denoRepo.gitCurrentBranch();
$.logLight("Branch:", currentBranch);
if (confirm(`Bump deps for deno?`)) {
await bumpDeps(denoRepo);
for (const crate of denoRepo.crates) {
await crate.cargoCheck();
}
if (
currentBranch === "main" &&
confirm(`Branch for deno?`)
) {
await denoRepo.gitBranch("deno_ast_" + deno_ast.version);
}
if (
await denoRepo.hasLocalChanges() && confirm(`Commit and push for deno?`)
) {
await denoRepo.gitAdd();
await denoRepo.gitCommit(
`chore: upgrade to deno_ast ${deno_ast.version}`,
);
await denoRepo.gitPush();
}
}
async function bumpDeps(repo: Repo) {
for (const crate of repo.crates) {
for (const depCrate of repos.getCrateLocalSourceCrates(crate)) {
await crate.revertLocalSource(depCrate);
const version = await depCrate.getLatestVersion();
if (version == null) {
throw new Error(`Did not find version for ${crate.name}`);
}
await crate.setDependencyVersion(depCrate.name, version);
}
}
}

View file

@ -0,0 +1,29 @@
# Scripts
These scripts provide a way to help upgrade, test, and open PRs in downstream
crates on an swc version bump.
## Setup
1. Ensure all repos are cloned into sibling directories:
- `./deno`
- `./deno_ast`
- `./deno_doc`
- `./deno_emit`
- `./deno_graph`
- `./deno_lint`
- `./dprint-plugin-typescript`
2. Ensure all repos have an `upstream` remote defined as the original repo.
## Overview
- `01_setup.ts` - Ensures all downstream crates are on the latest main, then
points them at local copies of each other.
- `02_build.ts` - Builds each crate. If you encounter any build errors, fix them
and keep running this until everything builds.
- `03_test.ts` - Tests each crate. If you encounter test failures, fix them and
keep running this until all the tests pass.
- `04_confirm.ts` - Updates the dependency versions, creates a branch, commits,
and pushes a branch for every selected repo.

View file

@ -0,0 +1,3 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
export * from "https://raw.githubusercontent.com/denoland/automation/0.19.2/mod.ts";

View file

@ -0,0 +1,149 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { $, Crate, Repo } from "./deps.ts";
export const rootDir = $.path.resolve(
$.path.join($.path.fromFileUrl(import.meta.url), "../../../../"),
);
const repoNames = [
"deno_ast",
"deno_graph",
"deno_lint",
"dprint-plugin-typescript",
"deno_doc",
"eszip",
"deno_emit",
"deno",
];
export class Repos {
#repos: readonly Repo[];
private constructor(repos: readonly Repo[]) {
this.#repos = repos;
}
static createWithoutLoading() {
return;
}
static async load({ skipLoadingCrates = false } = {}) {
if (!skipLoadingCrates) {
$.logStep("Loading repos...");
}
const repos = [];
for (const repoName of repoNames) {
$.logStep("Loading", repoName);
repos.push(await loadRepo(repoName));
}
return new Repos(repos);
function loadRepo(name: string) {
return Repo.load({
name,
path: $.path.join(rootDir, name),
skipLoadingCrates,
}).catch((err) => {
console.error(`Error loading: ${name}`);
throw err;
});
}
}
getRepos() {
return [...this.#repos];
}
getCrates() {
const crates = [];
for (const repo of this.#repos) {
if (repo.name === "deno") {
crates.push(repo.getCrate("deno"));
} else {
crates.push(
...repo.crates.filter((c) =>
c.name !== "eszip_wasm" && c.name !== "deno_emit_wasm"
),
);
}
}
return crates;
}
nonDenoAstRepos() {
return this.#repos.filter((c) => c.name !== "deno_ast");
}
get(name: string) {
const repo = this.#repos.find((c) => c.name === name);
if (repo == null) {
throw new Error(`Could not find repo with name ${name}.`);
}
return repo;
}
getCrate(name: string) {
for (const repo of this.#repos) {
for (const crate of repo.crates) {
if (crate.name === name) {
return crate;
}
}
}
throw new Error(`Could not find crate: ${name}`);
}
async toLocalSource() {
for (
const [workingCrate, otherCrate] of this.#getLocalSourceRelationships()
) {
await workingCrate.toLocalSource(otherCrate);
}
}
async revertLocalSource() {
for (
const [workingCrate, depCrate] of this.#getLocalSourceRelationships()
) {
await workingCrate.revertLocalSource(depCrate);
}
}
getCrateLocalSourceCrates(crate: Crate) {
return this.#getLocalSourceRelationships()
.filter(([workingCrate]) => workingCrate === crate)
.map(([_workingCrate, depCrate]) => depCrate);
}
#getLocalSourceRelationships() {
const deno_ast = this.getCrate("deno_ast");
const deno_graph = this.getCrate("deno_graph");
const deno_doc = this.getCrate("deno_doc");
const deno_lint = this.getCrate("deno_lint");
const dprint_plugin_typescript = this.getCrate("dprint-plugin-typescript");
const deno_cli = this.getCrate("deno");
const eszip = this.getCrate("eszip");
const deno_emit = this.getCrate("deno_emit");
return [
[deno_graph, deno_ast],
[deno_doc, deno_ast],
[deno_doc, deno_graph],
[eszip, deno_ast],
[eszip, deno_graph],
[deno_lint, deno_ast],
[dprint_plugin_typescript, deno_ast],
[deno_emit, deno_graph],
[deno_emit, deno_ast],
[deno_cli, deno_ast],
[deno_cli, deno_graph],
[deno_cli, deno_doc],
[deno_cli, deno_lint],
[deno_cli, eszip],
[deno_cli, dprint_plugin_typescript],
[deno_cli, deno_emit],
] as [Crate, Crate][];
}
}

View file

@ -0,0 +1,9 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { Repos } from "./repos.ts";
const repos = await Repos.load({ skipLoadingCrates: true });
if (confirm("Are you sure you want to git reset --hard all the repos?")) {
await Promise.all(repos.nonDenoAstRepos().map((c) => c.gitResetHard()));
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,242 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
// Need to enable this for this file in order to
// implement swc's `Comments` trait
#![allow(clippy::disallowed_types)]
use crate::swc::common::comments::Comment;
use crate::swc::common::comments::Comments as SwcComments;
use crate::swc::common::comments::SingleThreadedComments;
use crate::swc::common::comments::SingleThreadedCommentsMapInner;
use crate::swc::common::BytePos as SwcBytePos;
use crate::SourcePos;
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
#[derive(Debug)]
struct MultiThreadedCommentsInner {
leading: SingleThreadedCommentsMapInner,
trailing: SingleThreadedCommentsMapInner,
}
/// An implementation of swc's `Comments` that implements `Sync`
/// to support being used in multi-threaded code. This implementation
/// is immutable and should you need mutability you may create a copy
/// by converting it to an swc `SingleThreadedComments`.
#[derive(Clone, Debug)]
pub struct MultiThreadedComments {
inner: Arc<MultiThreadedCommentsInner>,
}
impl MultiThreadedComments {
/// Creates a new `MultiThreadedComments` from an swc `SingleThreadedComments`.
pub fn from_single_threaded(comments: SingleThreadedComments) -> Self {
let (leading, trailing) = comments.take_all();
let leading = Rc::try_unwrap(leading).unwrap().into_inner();
let trailing = Rc::try_unwrap(trailing).unwrap().into_inner();
MultiThreadedComments {
inner: Arc::new(MultiThreadedCommentsInner { leading, trailing }),
}
}
/// Gets a clone of the underlying data as `SingleThreadedComments`.
///
/// This may be useful for getting a mutable data structure for use
/// when transpiling.
pub fn as_single_threaded(&self) -> SingleThreadedComments {
let inner = &self.inner;
let leading = Rc::new(RefCell::new(inner.leading.to_owned()));
let trailing = Rc::new(RefCell::new(inner.trailing.to_owned()));
SingleThreadedComments::from_leading_and_trailing(leading, trailing)
}
/// Gets a reference to the leading comment map.
pub fn leading_map(&self) -> &SingleThreadedCommentsMapInner {
&self.inner.leading
}
/// Gets a reference to the trailing comment map.
pub fn trailing_map(&self) -> &SingleThreadedCommentsMapInner {
&self.inner.trailing
}
/// Gets a vector of all the comments sorted by position.
pub fn get_vec(&self) -> Vec<Comment> {
let mut comments = self
.inner
.leading
.values()
.chain(self.inner.trailing.values())
.flatten()
.cloned()
.collect::<Vec<_>>();
comments.sort_by_key(|comment| comment.span.lo);
comments
}
pub fn has_leading(&self, pos: SourcePos) -> bool {
self.inner.leading.contains_key(&pos.as_byte_pos())
}
pub fn get_leading(&self, pos: SourcePos) -> Option<&Vec<Comment>> {
self.inner.leading.get(&pos.as_byte_pos())
}
pub fn has_trailing(&self, pos: SourcePos) -> bool {
self.inner.trailing.contains_key(&pos.as_byte_pos())
}
pub fn get_trailing(&self, pos: SourcePos) -> Option<&Vec<Comment>> {
self.inner.trailing.get(&pos.as_byte_pos())
}
pub fn as_swc_comments(&self) -> Box<dyn SwcComments> {
Box::new(SwcMultiThreadedComments(self.clone()))
}
}
// Don't want to expose this API easily, so someone should
// use the `.as_swc_comments()` above to access it.
struct SwcMultiThreadedComments(MultiThreadedComments);
impl SwcComments for SwcMultiThreadedComments {
fn has_leading(&self, pos: SwcBytePos) -> bool {
// It's ok to convert these byte positions to source
// positions because we received them from swc and
// didn't create them on their own.
self.0.has_leading(SourcePos::unsafely_from_byte_pos(pos))
}
fn get_leading(&self, pos: SwcBytePos) -> Option<Vec<Comment>> {
self
.0
.get_leading(SourcePos::unsafely_from_byte_pos(pos))
.cloned()
}
fn has_trailing(&self, pos: SwcBytePos) -> bool {
self.0.has_trailing(SourcePos::unsafely_from_byte_pos(pos))
}
fn get_trailing(&self, pos: SwcBytePos) -> Option<Vec<Comment>> {
self
.0
.get_trailing(SourcePos::unsafely_from_byte_pos(pos))
.cloned()
}
fn add_leading(&self, _pos: SwcBytePos, _cmt: Comment) {
panic_readonly();
}
fn add_leading_comments(&self, _pos: SwcBytePos, _comments: Vec<Comment>) {
panic_readonly();
}
fn move_leading(&self, _from: SwcBytePos, _to: SwcBytePos) {
panic_readonly();
}
fn take_leading(&self, _pos: SwcBytePos) -> Option<Vec<Comment>> {
panic_readonly();
}
fn add_trailing(&self, _pos: SwcBytePos, _cmt: Comment) {
panic_readonly();
}
fn add_trailing_comments(&self, _pos: SwcBytePos, _comments: Vec<Comment>) {
panic_readonly();
}
fn move_trailing(&self, _from: SwcBytePos, _to: SwcBytePos) {
panic_readonly();
}
fn take_trailing(&self, _pos: SwcBytePos) -> Option<Vec<Comment>> {
panic_readonly();
}
fn add_pure_comment(&self, _pos: SwcBytePos) {
panic_readonly();
}
}
fn panic_readonly() -> ! {
panic!("MultiThreadedComments do not support write operations")
}
#[cfg(test)]
mod test {
use crate::parse_module;
use crate::swc::common::comments::SingleThreadedComments;
use crate::MediaType;
use crate::MultiThreadedComments;
use crate::ParseParams;
use crate::SourceTextInfo;
use crate::StartSourcePos;
#[test]
fn general_use() {
let (comments, start_pos) = get_single_threaded_comments("// 1\nt;/* 2 */");
let comments = MultiThreadedComments::from_single_threaded(comments);
// maps
assert_eq!(comments.leading_map().len(), 1);
assert_eq!(
comments
.leading_map()
.get(&(start_pos + 5).as_byte_pos())
.unwrap()[0]
.text,
" 1"
);
assert_eq!(comments.trailing_map().len(), 1);
assert_eq!(
comments
.trailing_map()
.get(&(start_pos + 7).as_byte_pos())
.unwrap()[0]
.text,
" 2 "
);
// comment vector
let comments_vec = comments.get_vec();
assert_eq!(comments_vec.len(), 2);
assert_eq!(comments_vec[0].text, " 1");
assert_eq!(comments_vec[1].text, " 2 ");
// comments trait
assert!(comments.has_leading(start_pos + 5));
assert!(!comments.has_leading(start_pos + 7));
assert_eq!(comments.get_leading(start_pos + 5).unwrap()[0].text, " 1");
assert!(comments.get_leading(start_pos + 7).is_none());
assert!(!comments.has_trailing(start_pos + 5));
assert!(comments.has_trailing(start_pos + 7));
assert!(comments.get_trailing(start_pos + 5).is_none());
assert_eq!(comments.get_trailing(start_pos + 7).unwrap()[0].text, " 2 ");
}
fn get_single_threaded_comments(
text: &str,
) -> (SingleThreadedComments, StartSourcePos) {
let module = parse_module(ParseParams {
specifier: "file.ts".to_string(),
text_info: SourceTextInfo::from_string(text.to_string()),
media_type: MediaType::TypeScript,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})
.expect("expects a module");
(
module.comments().as_single_threaded(),
module.text_info().range().start,
)
}
}

View file

@ -0,0 +1,108 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::rc::Rc;
use crate::get_syntax;
use crate::swc::atoms::Atom;
use crate::swc::common::comments::Comment;
use crate::swc::common::comments::CommentKind;
use crate::swc::common::comments::SingleThreadedComments;
use crate::swc::common::input::StringInput;
use crate::swc::parser::lexer::Lexer;
use crate::swc::parser::token::Token;
use crate::MediaType;
use crate::SourceRangedForSpanned;
use crate::StartSourcePos;
use crate::ES_VERSION;
#[derive(Debug, Clone)]
pub enum TokenOrComment {
Token(Token),
Comment { kind: CommentKind, text: Atom },
}
#[derive(Debug, Clone)]
pub struct LexedItem {
/// Range of the token or comment.
pub range: std::ops::Range<usize>,
/// Token or comment.
pub inner: TokenOrComment,
}
/// Given the source text and media type, tokenizes the provided
/// text to a collection of tokens and comments.
pub fn lex(source: &str, media_type: MediaType) -> Vec<LexedItem> {
let comments = SingleThreadedComments::default();
let start_pos = StartSourcePos::START_SOURCE_POS;
let lexer = Lexer::new(
get_syntax(media_type),
ES_VERSION,
StringInput::new(
source,
start_pos.as_byte_pos(),
(start_pos + source.len()).as_byte_pos(),
),
Some(&comments),
);
let mut tokens: Vec<LexedItem> = lexer
.map(|token| LexedItem {
range: token.range().as_byte_range(start_pos),
inner: TokenOrComment::Token(token.token),
})
.collect();
tokens.extend(flatten_comments(comments).map(|comment| LexedItem {
range: comment.range().as_byte_range(start_pos),
inner: TokenOrComment::Comment {
kind: comment.kind,
text: comment.text,
},
}));
tokens.sort_by_key(|item| item.range.start);
tokens
}
fn flatten_comments(
comments: SingleThreadedComments,
) -> impl Iterator<Item = Comment> {
let (leading, trailing) = comments.take_all();
let leading = Rc::try_unwrap(leading).unwrap().into_inner();
let trailing = Rc::try_unwrap(trailing).unwrap().into_inner();
let mut comments = leading;
comments.extend(trailing);
comments.into_iter().flat_map(|el| el.1)
}
#[cfg(test)]
mod test {
use super::*;
use crate::MediaType;
#[test]
fn tokenize_with_comments() {
let items = lex(
"const /* 1 */ t: number /* 2 */ = 5; // 3",
MediaType::TypeScript,
);
assert_eq!(items.len(), 10);
// only bother testing a few
assert!(matches!(items[1].inner, TokenOrComment::Comment { .. }));
assert!(matches!(
items[3].inner,
TokenOrComment::Token(Token::Colon)
));
assert!(matches!(items[9].inner, TokenOrComment::Comment { .. }));
}
#[test]
fn handle_bom() {
const BOM_CHAR: char = '\u{FEFF}';
let items = lex(&format!("{}1", BOM_CHAR), MediaType::JavaScript);
assert_eq!(items.len(), 1);
assert_eq!(items[0].range.start, BOM_CHAR.len_utf8());
}
}

77
third-party/vendor/deno_ast/src/lib.rs vendored Normal file
View file

@ -0,0 +1,77 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
#![deny(clippy::disallowed_methods)]
#![deny(clippy::disallowed_types)]
#[cfg(feature = "cjs")]
mod cjs_parse;
mod comments;
mod lexing;
mod parsed_source;
mod parsing;
#[cfg(feature = "scopes")]
mod scopes;
mod text_changes;
#[cfg(feature = "transpiling")]
mod transpiling;
mod types;
#[cfg(feature = "view")]
pub use dprint_swc_ext::view;
pub use dprint_swc_ext::common::*;
#[cfg(feature = "cjs")]
pub use cjs_parse::*;
pub use comments::*;
pub use deno_media_type::*;
pub use lexing::*;
pub use parsed_source::*;
pub use parsing::*;
#[cfg(feature = "scopes")]
pub use scopes::*;
pub use text_changes::*;
#[cfg(feature = "transpiling")]
pub use transpiling::*;
pub use types::*;
#[cfg(feature = "module_specifier")]
pub type ModuleSpecifier = url::Url;
pub mod swc {
pub use dprint_swc_ext::swc::atoms;
pub use dprint_swc_ext::swc::common;
#[cfg(feature = "bundler")]
pub use swc_bundler as bundler;
pub use swc_ecma_ast as ast;
#[cfg(feature = "codegen")]
pub use swc_ecma_codegen as codegen;
#[cfg(feature = "dep_graph")]
pub use swc_ecma_dep_graph as dep_graph;
pub use swc_ecma_parser as parser;
#[cfg(feature = "transforms")]
pub mod transforms {
pub use self::fixer::fixer;
pub use self::hygiene::hygiene;
pub use swc_ecma_transforms_base::assumptions::Assumptions;
pub use swc_ecma_transforms_base::fixer;
pub use swc_ecma_transforms_base::helpers;
pub use swc_ecma_transforms_base::hygiene;
pub use swc_ecma_transforms_base::pass;
pub use swc_ecma_transforms_base::perf;
pub use swc_ecma_transforms_base::resolver;
#[cfg(feature = "compat")]
pub use swc_ecma_transforms_compat as compat;
#[cfg(feature = "proposal")]
pub use swc_ecma_transforms_proposal as proposal;
#[cfg(feature = "react")]
pub use swc_ecma_transforms_react as react;
#[cfg(feature = "typescript")]
pub use swc_ecma_transforms_typescript as typescript;
}
#[cfg(feature = "utils")]
pub use swc_ecma_utils as utils;
#[cfg(feature = "visit")]
pub use swc_ecma_visit as visit;
}

View file

@ -0,0 +1,243 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::fmt;
use std::sync::Arc;
use crate::comments::MultiThreadedComments;
use crate::swc::ast::Module;
use crate::swc::ast::Program;
use crate::swc::ast::Script;
use crate::swc::common::comments::Comment;
use crate::swc::common::SyntaxContext;
use crate::swc::parser::token::TokenAndSpan;
use crate::Diagnostic;
use crate::MediaType;
use crate::SourceRangedForSpanned;
use crate::SourceTextInfo;
pub(crate) struct SyntaxContexts {
pub unresolved: SyntaxContext,
pub top_level: SyntaxContext,
}
struct ParsedSourceInner {
specifier: String,
media_type: MediaType,
text_info: SourceTextInfo,
comments: MultiThreadedComments,
program: Arc<Program>,
tokens: Option<Arc<Vec<TokenAndSpan>>>,
syntax_contexts: Option<SyntaxContexts>,
diagnostics: Vec<Diagnostic>,
}
/// A parsed source containing an AST, comments, and possibly tokens.
///
/// Note: This struct is cheap to clone.
#[derive(Clone)]
pub struct ParsedSource {
inner: Arc<ParsedSourceInner>,
}
impl ParsedSource {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
specifier: String,
media_type: MediaType,
text_info: SourceTextInfo,
comments: MultiThreadedComments,
program: Arc<Program>,
tokens: Option<Arc<Vec<TokenAndSpan>>>,
syntax_contexts: Option<SyntaxContexts>,
diagnostics: Vec<Diagnostic>,
) -> Self {
ParsedSource {
inner: Arc::new(ParsedSourceInner {
specifier,
media_type,
text_info,
comments,
program,
tokens,
syntax_contexts,
diagnostics,
}),
}
}
/// Gets the module specifier of the module.
pub fn specifier(&self) -> &str {
&self.inner.specifier
}
/// Gets the media type of the module.
pub fn media_type(&self) -> MediaType {
self.inner.media_type
}
/// Gets the text content of the module.
pub fn text_info(&self) -> &SourceTextInfo {
&self.inner.text_info
}
/// Gets the parsed program.
pub fn program(&self) -> Arc<Program> {
self.inner.program.clone()
}
/// Gets the parsed program as a reference.
pub fn program_ref(&self) -> &Program {
&self.inner.program
}
/// Gets the parsed module.
///
/// This will panic if the source is not a module.
pub fn module(&self) -> &Module {
match self.program_ref() {
Program::Module(module) => module,
Program::Script(_) => panic!("Cannot get a module when the source was a script. Use `.program()` instead."),
}
}
/// Gets the parsed script.
///
/// This will panic if the source is not a script.
pub fn script(&self) -> &Script {
match self.program_ref() {
Program::Script(script) => script,
Program::Module(_) => panic!("Cannot get a script when the source was a module. Use `.program()` instead."),
}
}
/// Gets the comments found in the source file.
pub fn comments(&self) -> &MultiThreadedComments {
&self.inner.comments
}
/// Get the source's leading comments, where triple slash directives might
/// be located.
pub fn get_leading_comments(&self) -> Vec<Comment> {
self
.inner
.comments
.get_leading(self.inner.program.range().start)
.cloned()
.unwrap_or_default()
}
/// Gets the tokens found in the source file.
///
/// This will panic if tokens were not captured during parsing.
pub fn tokens(&self) -> &[TokenAndSpan] {
self
.inner
.tokens
.as_ref()
.expect("Tokens not found because they were not captured during parsing.")
}
/// Gets the top level context used when parsing with scope analysis.
///
/// This will panic if the source was not parsed with scope analysis.
pub fn top_level_context(&self) -> SyntaxContext {
self.syntax_contexts().top_level
}
/// Gets the unresolved context used when parsing with scope analysis.
///
/// This will panic if the source was not parsed with scope analysis.
pub fn unresolved_context(&self) -> SyntaxContext {
self.syntax_contexts().unresolved
}
fn syntax_contexts(&self) -> &SyntaxContexts {
self.inner.syntax_contexts.as_ref().expect("Could not get syntax context because the source was not parsed with scope analysis.")
}
/// Gets extra non-fatal diagnostics found while parsing.
pub fn diagnostics(&self) -> &Vec<Diagnostic> {
&self.inner.diagnostics
}
/// Gets if this source is a module.
pub fn is_module(&self) -> bool {
matches!(self.program_ref(), Program::Module(_))
}
/// Gets if this source is a script.
pub fn is_script(&self) -> bool {
matches!(self.program_ref(), Program::Script(_))
}
}
impl fmt::Debug for ParsedSource {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ParsedModule")
.field("comments", &self.inner.comments)
.field("program", &self.inner.program)
.finish()
}
}
#[cfg(feature = "view")]
impl ParsedSource {
/// Gets a dprint-swc-ext view of the module.
///
/// This provides a closure to examine an "ast view" of the swc AST
/// which has more helper methods and allows for going up the ancestors
/// of a node.
///
/// Read more: https://github.com/dprint/dprint-swc-ext
pub fn with_view<'a, T>(
&self,
with_view: impl FnOnce(crate::view::Program<'a>) -> T,
) -> T {
let program_info = crate::view::ProgramInfo {
program: match self.program_ref() {
Program::Module(module) => crate::view::ProgramRef::Module(module),
Program::Script(script) => crate::view::ProgramRef::Script(script),
},
text_info: Some(self.text_info()),
tokens: self.inner.tokens.as_ref().map(|t| t as &[TokenAndSpan]),
comments: Some(crate::view::Comments {
leading: self.comments().leading_map(),
trailing: self.comments().trailing_map(),
}),
};
crate::view::with_ast_view(program_info, with_view)
}
}
#[cfg(test)]
mod test {
#[cfg(feature = "view")]
#[test]
fn should_parse_program() {
use crate::parse_program;
use crate::view::NodeTrait;
use crate::ParseParams;
use super::*;
let program = parse_program(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("// 1\n1 + 1\n// 2".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})
.expect("should parse");
let result = program.with_view(|program| {
assert_eq!(program.children().len(), 1);
assert_eq!(program.children()[0].text(), "1 + 1");
2
});
assert_eq!(result, 2);
}
}

View file

@ -0,0 +1,570 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use crate::comments::MultiThreadedComments;
use crate::swc::ast::EsVersion;
use crate::swc::ast::Module;
use crate::swc::ast::Program;
use crate::swc::ast::Script;
use crate::swc::common::comments::SingleThreadedComments;
use crate::swc::common::input::StringInput;
use crate::swc::parser::error::Error as SwcError;
use crate::swc::parser::lexer::Lexer;
use crate::swc::parser::token::TokenAndSpan;
use crate::swc::parser::EsConfig;
use crate::swc::parser::Syntax;
use crate::swc::parser::TsConfig;
use crate::Diagnostic;
use crate::MediaType;
use crate::ParsedSource;
use crate::SourceTextInfo;
/// Ecmascript version used for lexing and parsing.
pub const ES_VERSION: EsVersion = EsVersion::Es2021;
/// Parameters for parsing.
pub struct ParseParams {
/// Specifier of the source text.
pub specifier: String,
/// Source text stored in a `SourceTextInfo`.
pub text_info: SourceTextInfo,
/// Media type of the source text.
pub media_type: MediaType,
/// Whether to capture tokens or not.
pub capture_tokens: bool,
/// Whether to apply swc's scope analysis.
pub scope_analysis: bool,
/// Syntax to use when parsing.
///
/// `deno_ast` will get a default `Syntax` to use based on the
/// media type, but you may use this to provide a custom `Syntax`.
pub maybe_syntax: Option<Syntax>,
}
/// Parses the provided information attempting to figure out if the provided
/// text is for a script or a module.
pub fn parse_program(params: ParseParams) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Program, |p| p)
}
/// Parses the provided information as a program with the option of providing some
/// post-processing to the result.
///
/// # Example
///
/// ```
/// deno_ast::parse_program_with_post_process(
/// deno_ast::ParseParams {
/// specifier: "file:///my_file.ts".to_string(),
/// media_type: deno_ast::MediaType::TypeScript,
/// text_info: deno_ast::SourceTextInfo::from_string("".to_string()),
/// capture_tokens: true,
/// maybe_syntax: None,
/// scope_analysis: false,
/// },
/// |program| {
/// // do something with the program here before it gets stored
/// program
/// },
/// );
/// ```
pub fn parse_program_with_post_process(
params: ParseParams,
post_process: impl FnOnce(Program) -> Program,
) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Program, post_process)
}
/// Parses the provided information to a module.
pub fn parse_module(params: ParseParams) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Module, |p| p)
}
/// Parses a module with post processing (see docs on `parse_program_with_post_process`).
pub fn parse_module_with_post_process(
params: ParseParams,
post_process: impl FnOnce(Module) -> Module,
) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Module, |program| match program {
Program::Module(module) => Program::Module(post_process(module)),
Program::Script(_) => unreachable!(),
})
}
/// Parses the provided information to a script.
pub fn parse_script(params: ParseParams) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Script, |p| p)
}
/// Parses a script with post processing (see docs on `parse_program_with_post_process`).
pub fn parse_script_with_post_process(
params: ParseParams,
post_process: impl FnOnce(Script) -> Script,
) -> Result<ParsedSource, Diagnostic> {
parse(params, ParseMode::Script, |program| match program {
Program::Module(_) => unreachable!(),
Program::Script(script) => Program::Script(post_process(script)),
})
}
enum ParseMode {
Program,
Module,
Script,
}
fn parse(
params: ParseParams,
parse_mode: ParseMode,
post_process: impl FnOnce(Program) -> Program,
) -> Result<ParsedSource, Diagnostic> {
let source = params.text_info;
let specifier = params.specifier;
let input = source.as_string_input();
let media_type = params.media_type;
let syntax = params
.maybe_syntax
.unwrap_or_else(|| get_syntax(media_type));
let (comments, program, tokens, errors) =
parse_string_input(input, syntax, params.capture_tokens, parse_mode)
.map_err(|err| {
Diagnostic::from_swc_error(err, &specifier, source.clone())
})?;
let diagnostics = errors
.into_iter()
.map(|err| Diagnostic::from_swc_error(err, &specifier, source.clone()))
.collect();
let program = post_process(program);
let (program, syntax_contexts) = if params.scope_analysis {
#[cfg(feature = "transforms")]
{
use crate::swc::common::Globals;
use crate::swc::common::Mark;
use crate::swc::common::SyntaxContext;
use crate::swc::transforms::resolver;
use crate::swc::visit::FoldWith;
let globals = Globals::new();
crate::swc::common::GLOBALS.set(&globals, || {
let unresolved_mark = Mark::new();
let top_level_mark = Mark::new();
let program = program.fold_with(&mut resolver(
unresolved_mark,
top_level_mark,
true,
));
(
program,
Some(crate::SyntaxContexts {
unresolved: SyntaxContext::empty().apply_mark(unresolved_mark),
top_level: SyntaxContext::empty().apply_mark(top_level_mark),
}),
)
})
}
#[cfg(not(feature = "transforms"))]
panic!("Cannot parse with scope analysis. Please enable the 'transforms' feature.")
} else {
(program, None)
};
Ok(ParsedSource::new(
specifier,
params.media_type.to_owned(),
source,
MultiThreadedComments::from_single_threaded(comments),
Arc::new(program),
tokens.map(Arc::new),
syntax_contexts,
diagnostics,
))
}
#[allow(clippy::type_complexity)]
fn parse_string_input(
input: StringInput,
syntax: Syntax,
capture_tokens: bool,
parse_mode: ParseMode,
) -> Result<
(
SingleThreadedComments,
Program,
Option<Vec<TokenAndSpan>>,
Vec<SwcError>,
),
SwcError,
> {
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(syntax, ES_VERSION, input, Some(&comments));
if capture_tokens {
let lexer = crate::swc::parser::Capturing::new(lexer);
let mut parser = crate::swc::parser::Parser::new_from(lexer);
let program = match parse_mode {
ParseMode::Program => parser.parse_program()?,
ParseMode::Module => Program::Module(parser.parse_module()?),
ParseMode::Script => Program::Script(parser.parse_script()?),
};
let tokens = parser.input().take();
let errors = parser.take_errors();
Ok((comments, program, Some(tokens), errors))
} else {
let mut parser = crate::swc::parser::Parser::new_from(lexer);
let program = match parse_mode {
ParseMode::Program => parser.parse_program()?,
ParseMode::Module => Program::Module(parser.parse_module()?),
ParseMode::Script => Program::Script(parser.parse_script()?),
};
let errors = parser.take_errors();
Ok((comments, program, None, errors))
}
}
/// Gets the default `Syntax` used by `deno_ast` for the provided media type.
pub fn get_syntax(media_type: MediaType) -> Syntax {
match media_type {
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
Syntax::Typescript(TsConfig {
decorators: true,
// should be true for mts and cts:
// https://babeljs.io/docs/babel-preset-typescript#disallowambiguousjsxlike
disallow_ambiguous_jsx_like: matches!(
media_type,
MediaType::Mts | MediaType::Cts
),
dts: matches!(
media_type,
MediaType::Dts | MediaType::Dmts | MediaType::Dcts
),
tsx: media_type == MediaType::Tsx,
no_early_errors: false,
})
}
MediaType::JavaScript
| MediaType::Mjs
| MediaType::Cjs
| MediaType::Jsx
| MediaType::Json
| MediaType::Wasm
| MediaType::TsBuildInfo
| MediaType::SourceMap
| MediaType::Unknown => Syntax::Es(EsConfig {
allow_return_outside_function: true,
allow_super_outside_method: true,
auto_accessors: true,
decorators: false,
decorators_before_export: false,
export_default_from: true,
fn_bind: false,
import_attributes: true,
jsx: media_type == MediaType::Jsx,
explicit_resource_management: true,
}),
}
}
#[cfg(test)]
mod test {
use crate::LineAndColumnDisplay;
use super::*;
#[test]
fn should_parse_program() {
let program = parse_program(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("// 1\n1 + 1\n// 2".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})
.unwrap();
assert_eq!(program.specifier(), "my_file.js");
assert_eq!(program.text_info().text_str(), "// 1\n1 + 1\n// 2");
assert_eq!(program.media_type(), MediaType::JavaScript);
assert!(matches!(
program.script().body[0],
crate::swc::ast::Stmt::Expr(..)
));
assert_eq!(program.get_leading_comments().len(), 1);
assert_eq!(program.get_leading_comments()[0].text, " 1");
assert_eq!(program.tokens().len(), 3);
assert_eq!(program.comments().get_vec().len(), 2);
}
#[test]
fn should_parse_module() {
let program = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("// 1\n1 + 1\n// 2".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})
.unwrap();
assert!(matches!(
program.module().body[0],
crate::swc::ast::ModuleItem::Stmt(..)
));
}
#[cfg(feature = "view")]
#[test]
fn should_parse_brand_checks_in_js() {
use crate::view::ClassDecl;
use crate::view::ClassMethod;
use crate::view::NodeTrait;
let program = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string(
"class T { method() { #test in this; } }".to_string(),
),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})
.unwrap();
program.with_view(|program| {
let class_decl = program.children()[0].expect::<ClassDecl>();
let class_method = class_decl.class.body[0].expect::<ClassMethod>();
let method_stmt = class_method.function.body.unwrap().stmts[0];
assert_eq!(method_stmt.text(), "#test in this;");
});
}
#[test]
#[should_panic(
expected = "Tokens not found because they were not captured during parsing."
)]
fn should_panic_when_getting_tokens_and_tokens_not_captured() {
let program = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("// 1\n1 + 1\n// 2".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})
.unwrap();
program.tokens();
}
#[test]
fn should_handle_parse_error() {
let diagnostic = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("t u".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})
.err()
.unwrap();
assert_eq!(diagnostic.specifier, "my_file.js".to_string());
assert_eq!(
diagnostic.display_position(),
LineAndColumnDisplay {
line_number: 1,
column_number: 3,
}
);
assert_eq!(diagnostic.message(), "Expected ';', '}' or <eof>");
}
#[test]
#[should_panic(
expected = "Could not get syntax context because the source was not parsed with scope analysis."
)]
fn should_panic_when_getting_top_level_context_and_scope_analysis_false() {
get_scope_analysis_false_parsed_source().top_level_context();
}
#[test]
#[should_panic(
expected = "Could not get syntax context because the source was not parsed with scope analysis."
)]
fn should_panic_when_getting_unresolved_context_and_scope_analysis_false() {
get_scope_analysis_false_parsed_source().unresolved_context();
}
fn get_scope_analysis_false_parsed_source() -> ParsedSource {
parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string("// 1\n1 + 1\n// 2".to_string()),
media_type: MediaType::JavaScript,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})
.unwrap()
}
#[cfg(all(feature = "view", feature = "transforms"))]
#[test]
fn should_do_scope_analysis() {
let parsed_source = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string(
"export function test() { const test = 2; test; } test()".to_string(),
),
media_type: MediaType::JavaScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: true,
})
.unwrap();
parsed_source.with_view(|view| {
use crate::view::*;
let func_decl = view.children()[0]
.expect::<ExportDecl>()
.decl
.expect::<FnDecl>();
let func_decl_inner_expr = func_decl.function.body.unwrap().stmts[1]
.expect::<ExprStmt>()
.expr
.expect::<Ident>();
let call_expr = view.children()[1]
.expect::<ExprStmt>()
.expr
.expect::<CallExpr>();
let call_expr_id = call_expr.callee.expect::<Ident>();
// these should be the same identifier
assert_eq!(func_decl.ident.to_id(), call_expr_id.to_id());
// but these shouldn't be
assert_ne!(func_decl.ident.to_id(), func_decl_inner_expr.to_id());
});
}
#[cfg(all(feature = "view", feature = "transforms"))]
#[test]
fn should_scope_analyze_typescript() {
let parsed_source = parse_module(ParseParams {
specifier: "my_file.ts".to_string(),
text_info: SourceTextInfo::from_string(
r#"import type { Foo } from "./foo.ts";
function _bar(...Foo: Foo) {
console.log(Foo);
}"#
.to_string(),
),
media_type: MediaType::TypeScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: true,
})
.unwrap();
parsed_source.with_view(|view| {
use crate::view::*;
let named_import_ident =
view.children()[0].expect::<ImportDecl>().specifiers[0]
.expect::<ImportNamedSpecifier>()
.local;
let bar_func = view.children()[1].expect::<FnDecl>();
let bar_param_rest_pat =
bar_func.function.params[0].pat.expect::<RestPat>();
let bar_param_ident = bar_param_rest_pat.arg.expect::<BindingIdent>().id;
let bar_param_type_ident = bar_param_rest_pat
.type_ann
.unwrap()
.type_ann
.expect::<TsTypeRef>()
.type_name
.expect::<Ident>();
let console_log_arg_ident = bar_func.function.body.unwrap().stmts[0]
.expect::<ExprStmt>()
.expr
.expect::<CallExpr>()
.args[0]
.expr
.expect::<Ident>();
assert_eq!(console_log_arg_ident.to_id(), bar_param_ident.to_id());
assert_ne!(console_log_arg_ident.to_id(), named_import_ident.to_id());
assert_ne!(console_log_arg_ident.to_id(), bar_param_type_ident.to_id());
assert_eq!(named_import_ident.to_id(), bar_param_type_ident.to_id());
assert_ne!(named_import_ident.to_id(), bar_param_ident.to_id());
});
}
#[test]
fn should_error_on_syntax_diagnostic() {
let diagnostic = parse_ts_module("test;\nas#;").err().unwrap();
assert_eq!(diagnostic.message(), concat!("Expected ';', '}' or <eof>"));
}
#[test]
fn should_error_without_issue_when_there_exists_multi_byte_char_on_line_with_syntax_error(
) {
let diagnostic = parse_ts_module(concat!(
"test;\n",
r#"console.log("x", `duration ${d} not in range - ${min} ≥ ${d} && ${max} ≥ ${d}`),;"#,
)).err().unwrap();
assert_eq!(diagnostic.message(), "Expression expected",);
}
#[test]
fn should_diagnostic_for_no_equals_sign_in_var_decl() {
let diagnostic =
parse_for_diagnostic("const Methods {\nf: (x, y) => x + y,\n};");
assert_eq!(
diagnostic.message(),
"'const' declarations must be initialized"
);
}
#[test]
fn should_diganotic_when_var_stmts_sep_by_comma() {
let diagnostic = parse_for_diagnostic("let a = 0, let b = 1;");
assert_eq!(
diagnostic.message(),
"`let` cannot be used as an identifier in strict mode"
);
}
#[test]
fn should_diagnostic_for_exected_expr_type_alias() {
let diagnostic =
parse_for_diagnostic("type T =\n | unknown\n { } & unknown;");
assert_eq!(diagnostic.message(), "Expression expected");
}
fn parse_for_diagnostic(text: &str) -> Diagnostic {
let result = parse_ts_module(text).unwrap();
result.diagnostics().first().unwrap().to_owned()
}
fn parse_ts_module(text: &str) -> Result<ParsedSource, Diagnostic> {
parse_module(ParseParams {
specifier: "my_file.ts".to_string(),
text_info: SourceTextInfo::from_string(text.to_string()),
media_type: MediaType::TypeScript,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})
}
}

View file

@ -0,0 +1,440 @@
// Copyright 2020-2022 the Deno authors. All rights reserved. MIT license.
use crate::swc::ast::Id;
use crate::swc::ast::{
ArrowExpr, BlockStmt, BlockStmtOrExpr, CatchClause, ClassDecl, ClassExpr,
DoWhileStmt, Expr, FnDecl, FnExpr, ForInStmt, ForOfStmt, ForStmt, Function,
Ident, ImportDefaultSpecifier, ImportNamedSpecifier, ImportStarAsSpecifier,
Param, Pat, SwitchStmt, TsInterfaceDecl, TsTypeAliasDecl, VarDecl,
VarDeclKind, WhileStmt, WithStmt,
};
use crate::swc::atoms::JsWord;
use crate::swc::utils::find_pat_ids;
use crate::swc::visit::Visit;
use crate::swc::visit::VisitWith;
use crate::view;
use std::collections::HashMap;
#[derive(Debug)]
pub struct Scope {
vars: HashMap<Id, Var>,
symbols: HashMap<JsWord, Vec<Id>>,
}
impl Scope {
pub fn analyze(program: view::Program) -> Self {
let mut scope = Self {
vars: Default::default(),
symbols: Default::default(),
};
let mut path = vec![];
match program {
view::Program::Module(module) => {
module.inner.visit_with(&mut Analyzer {
scope: &mut scope,
path: &mut path,
});
}
view::Program::Script(script) => {
script.inner.visit_with(&mut Analyzer {
scope: &mut scope,
path: &mut path,
});
}
};
scope
}
// Get all declarations with a symbol.
pub fn ids_with_symbol(&self, sym: &JsWord) -> Option<&Vec<Id>> {
self.symbols.get(sym)
}
pub fn var(&self, id: &Id) -> Option<&Var> {
self.vars.get(id)
}
pub fn var_by_ident(&self, ident: &view::Ident) -> Option<&Var> {
self.var(&ident.inner.to_id())
}
pub fn is_global(&self, id: &Id) -> bool {
self.var(id).is_none()
}
}
#[derive(Debug)]
pub struct Var {
path: Vec<ScopeKind>,
kind: BindingKind,
}
impl Var {
/// Empty path means root scope.
#[allow(dead_code)]
pub fn path(&self) -> &[ScopeKind] {
&self.path
}
pub fn kind(&self) -> BindingKind {
self.kind
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum BindingKind {
Var,
Const,
Let,
Function,
Param,
Class,
CatchClause,
/// This means that the binding comes from `ImportStarAsSpecifier`, like
/// `import * as foo from "foo.ts";`
/// `foo` effectively represents a namespace.
NamespaceImport,
/// Represents `ImportDefaultSpecifier` or `ImportNamedSpecifier`.
/// e.g.
/// - import foo from "foo.ts";
/// - import { foo } from "foo.ts";
ValueImport,
Type,
}
impl BindingKind {
pub fn is_import(&self) -> bool {
matches!(
*self,
BindingKind::ValueImport | BindingKind::NamespaceImport
)
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum ScopeKind {
// Module,
Arrow,
Function,
Block,
Loop,
Class,
Switch,
With,
Catch,
}
struct Analyzer<'a> {
scope: &'a mut Scope,
path: &'a mut Vec<ScopeKind>,
}
impl Analyzer<'_> {
fn declare_id(&mut self, kind: BindingKind, i: Id) {
self.scope.vars.insert(
i.clone(),
Var {
kind,
path: self.path.clone(),
},
);
self.scope.symbols.entry(i.0.clone()).or_default().push(i);
}
fn declare(&mut self, kind: BindingKind, i: &Ident) {
self.declare_id(kind, i.to_id());
}
fn declare_pat(&mut self, kind: BindingKind, pat: &Pat) {
let ids: Vec<Id> = find_pat_ids(pat);
for id in ids {
self.declare_id(kind, id);
}
}
fn visit_with_path<T>(&mut self, kind: ScopeKind, node: &T)
where
T: 'static + for<'any> VisitWith<Analyzer<'any>>,
{
self.path.push(kind);
node.visit_with(self);
self.path.pop();
}
fn with<F>(&mut self, kind: ScopeKind, op: F)
where
F: FnOnce(&mut Analyzer),
{
self.path.push(kind);
op(self);
self.path.pop();
}
}
impl Visit for Analyzer<'_> {
fn visit_arrow_expr(&mut self, n: &ArrowExpr) {
self.with(ScopeKind::Arrow, |a| {
// Parameters of `ArrowExpr` are of type `Vec<Pat>`, not `Vec<Param>`,
// which means `visit_param` does _not_ handle parameters of `ArrowExpr`.
// We need to handle them manually here.
for param in &n.params {
a.declare_pat(BindingKind::Param, param);
}
n.visit_children_with(a);
});
}
/// Overriden not to add ScopeKind::Block
fn visit_block_stmt_or_expr(&mut self, n: &BlockStmtOrExpr) {
match n {
BlockStmtOrExpr::BlockStmt(s) => s.stmts.visit_with(self),
BlockStmtOrExpr::Expr(e) => e.visit_with(self),
}
}
fn visit_var_decl(&mut self, n: &VarDecl) {
n.decls.iter().for_each(|v| {
v.init.visit_with(self);
// If the class name and the variable name are the same like `let Foo = class Foo {}`,
// this binding should be treated as `BindingKind::Class`.
if let Some(expr) = &v.init {
if let Expr::Class(ClassExpr {
ident: Some(class_name),
..
}) = &**expr
{
if let Pat::Ident(var_name) = &v.name {
if var_name.id.sym == class_name.sym {
self.declare(BindingKind::Class, class_name);
return;
}
}
}
}
self.declare_pat(
match n.kind {
VarDeclKind::Var => BindingKind::Var,
VarDeclKind::Let => BindingKind::Let,
VarDeclKind::Const => BindingKind::Const,
},
&v.name,
);
});
}
/// Overriden not to add ScopeKind::Block
fn visit_function(&mut self, n: &Function) {
n.decorators.visit_with(self);
n.params.visit_with(self);
// Don't add ScopeKind::Block
match &n.body {
Some(s) => s.stmts.visit_with(self),
None => {}
}
}
fn visit_fn_decl(&mut self, n: &FnDecl) {
self.declare(BindingKind::Function, &n.ident);
self.visit_with_path(ScopeKind::Function, &n.function);
}
fn visit_fn_expr(&mut self, n: &FnExpr) {
if let Some(ident) = &n.ident {
self.declare(BindingKind::Function, ident);
}
self.visit_with_path(ScopeKind::Function, &n.function);
}
fn visit_class_decl(&mut self, n: &ClassDecl) {
self.declare(BindingKind::Class, &n.ident);
self.visit_with_path(ScopeKind::Class, &n.class);
}
fn visit_class_expr(&mut self, n: &ClassExpr) {
if let Some(class_name) = n.ident.as_ref() {
self.declare(BindingKind::Class, class_name);
}
self.visit_with_path(ScopeKind::Class, &n.class);
}
fn visit_block_stmt(&mut self, n: &BlockStmt) {
self.visit_with_path(ScopeKind::Block, &n.stmts)
}
fn visit_catch_clause(&mut self, n: &CatchClause) {
if let Some(pat) = &n.param {
self.declare_pat(BindingKind::CatchClause, pat);
}
self.visit_with_path(ScopeKind::Catch, &n.body)
}
fn visit_param(&mut self, n: &Param) {
self.declare_pat(BindingKind::Param, &n.pat);
}
fn visit_import_named_specifier(&mut self, n: &ImportNamedSpecifier) {
self.declare(BindingKind::ValueImport, &n.local);
}
fn visit_import_default_specifier(&mut self, n: &ImportDefaultSpecifier) {
self.declare(BindingKind::ValueImport, &n.local);
}
fn visit_import_star_as_specifier(&mut self, n: &ImportStarAsSpecifier) {
self.declare(BindingKind::NamespaceImport, &n.local);
}
fn visit_with_stmt(&mut self, n: &WithStmt) {
n.obj.visit_with(self);
self.with(ScopeKind::With, |a| n.body.visit_children_with(a))
}
fn visit_for_stmt(&mut self, n: &ForStmt) {
n.init.visit_with(self);
n.update.visit_with(self);
n.test.visit_with(self);
self.visit_with_path(ScopeKind::Loop, &n.body);
}
fn visit_for_of_stmt(&mut self, n: &ForOfStmt) {
n.left.visit_with(self);
n.right.visit_with(self);
self.visit_with_path(ScopeKind::Loop, &n.body);
}
fn visit_for_in_stmt(&mut self, n: &ForInStmt) {
n.left.visit_with(self);
n.right.visit_with(self);
self.visit_with_path(ScopeKind::Loop, &n.body);
}
fn visit_do_while_stmt(&mut self, n: &DoWhileStmt) {
n.test.visit_with(self);
self.visit_with_path(ScopeKind::Loop, &n.body);
}
fn visit_while_stmt(&mut self, n: &WhileStmt) {
n.test.visit_with(self);
self.visit_with_path(ScopeKind::Loop, &n.body);
}
fn visit_switch_stmt(&mut self, n: &SwitchStmt) {
n.discriminant.visit_with(self);
self.visit_with_path(ScopeKind::Switch, &n.cases);
}
fn visit_ts_type_alias_decl(&mut self, n: &TsTypeAliasDecl) {
self.declare(BindingKind::Type, &n.id);
}
fn visit_ts_interface_decl(&mut self, n: &TsInterfaceDecl) {
self.declare(BindingKind::Type, &n.id);
}
}
#[cfg(test)]
mod tests {
use super::{BindingKind, Scope, ScopeKind, Var};
use crate::parse_module;
use crate::swc::ast::Id;
use crate::MediaType;
use crate::ParseParams;
use crate::SourceTextInfo;
fn test_scope(source_code: &str, test: impl Fn(Scope)) {
let parsed_source = parse_module(ParseParams {
specifier: "my_file.js".to_string(),
text_info: SourceTextInfo::from_string(source_code.to_string()),
media_type: MediaType::TypeScript,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: true,
})
.unwrap();
parsed_source.with_view(|view| {
let scope = Scope::analyze(view);
test(scope);
});
}
fn id(scope: &Scope, s: &str) -> Id {
let ids = scope.ids_with_symbol(&s.into());
if ids.is_none() {
panic!("No identifier named {}", s);
}
let ids = ids.unwrap();
if ids.len() > 1 {
panic!("Multiple identifers named {} found", s);
}
ids.first().unwrap().clone()
}
fn var<'a>(scope: &'a Scope, symbol: &str) -> &'a Var {
scope.var(&id(scope, symbol)).unwrap()
}
#[test]
fn scopes() {
let source_code = r#"
const a = "a";
const unused = "unused";
function asdf(b: number, c: string): number {
console.log(a, b);
{
const c = 1;
let d = 2;
}
return 1;
}
class Foo {
#fizz = "fizz";
bar() {
}
}
try {
// some code that might throw
throw new Error("asdf");
} catch (e) {
const msg = "asdf " + e.message;
}
"#;
test_scope(source_code, |scope| {
assert_eq!(var(&scope, "a").kind(), BindingKind::Const);
assert_eq!(var(&scope, "a").path(), &[]);
assert_eq!(var(&scope, "b").kind(), BindingKind::Param);
assert_eq!(scope.ids_with_symbol(&"c".into()).unwrap().len(), 2);
assert_eq!(
var(&scope, "d").path(),
&[ScopeKind::Function, ScopeKind::Block]
);
assert_eq!(var(&scope, "Foo").kind(), BindingKind::Class);
assert_eq!(var(&scope, "Foo").path(), &[]);
assert_eq!(var(&scope, "e").kind(), BindingKind::CatchClause);
assert_eq!(var(&scope, "e").path(), &[]);
});
}
}

View file

@ -0,0 +1,202 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::cmp::Ordering;
use std::ops::Range;
#[derive(Clone, Debug)]
pub struct TextChange {
/// Range start to end byte index.
pub range: Range<usize>,
/// New text to insert or replace at the provided range.
pub new_text: String,
}
impl TextChange {
pub fn new(start: usize, end: usize, new_text: String) -> Self {
Self {
range: start..end,
new_text,
}
}
}
/// Applies the text changes to the given source text.
pub fn apply_text_changes(
source: &str,
mut changes: Vec<TextChange>,
) -> String {
changes.sort_by(|a, b| match a.range.start.cmp(&b.range.start) {
Ordering::Equal => a.range.end.cmp(&b.range.end),
ordering => ordering,
});
let mut last_index = 0;
let mut final_text = String::new();
for (i, change) in changes.iter().enumerate() {
if change.range.start > change.range.end {
panic!(
"Text change had start index {} greater than end index {}.\n\n{:?}",
change.range.start,
change.range.end,
&changes[0..i + 1],
)
}
if change.range.start < last_index {
panic!("Text changes were overlapping. Past index was {}, but new change had index {}.\n\n{:?}", last_index, change.range.start, &changes[0..i + 1]);
} else if change.range.start > last_index && last_index < source.len() {
final_text.push_str(
&source[last_index..std::cmp::min(source.len(), change.range.start)],
);
}
final_text.push_str(&change.new_text);
last_index = change.range.end;
}
if last_index < source.len() {
final_text.push_str(&source[last_index..]);
}
final_text
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn applies_text_changes() {
// replacing text
assert_eq!(
apply_text_changes(
"0123456789",
vec![
TextChange::new(9, 10, "z".to_string()),
TextChange::new(4, 6, "y".to_string()),
TextChange::new(1, 2, "x".to_string()),
]
),
"0x23y678z".to_string(),
);
// replacing beside
assert_eq!(
apply_text_changes(
"0123456789",
vec![
TextChange::new(0, 5, "a".to_string()),
TextChange::new(5, 7, "b".to_string()),
TextChange::new(7, 10, "c".to_string()),
]
),
"abc".to_string(),
);
// full replace
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(0, 10, "x".to_string()),]
),
"x".to_string(),
);
// 1 over
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(0, 11, "x".to_string()),]
),
"x".to_string(),
);
// insert
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(5, 5, "x".to_string()),]
),
"01234x56789".to_string(),
);
// prepend
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(0, 0, "x".to_string()),]
),
"x0123456789".to_string(),
);
// append
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(10, 10, "x".to_string()),]
),
"0123456789x".to_string(),
);
// append over
assert_eq!(
apply_text_changes(
"0123456789",
vec![TextChange::new(11, 11, "x".to_string()),]
),
"0123456789x".to_string(),
);
// multiple at start
assert_eq!(
apply_text_changes(
"0123456789",
vec![
TextChange::new(0, 7, "a".to_string()),
TextChange::new(0, 0, "b".to_string()),
TextChange::new(0, 0, "c".to_string()),
TextChange::new(7, 10, "d".to_string()),
]
),
"bcad".to_string(),
);
}
#[test]
#[should_panic(
expected = "Text changes were overlapping. Past index was 10, but new change had index 5."
)]
fn panics_text_change_within() {
apply_text_changes(
"0123456789",
vec![
TextChange::new(3, 10, "x".to_string()),
TextChange::new(5, 7, "x".to_string()),
],
);
}
#[test]
#[should_panic(
expected = "Text changes were overlapping. Past index was 4, but new change had index 3."
)]
fn panics_text_change_overlap() {
apply_text_changes(
"0123456789",
vec![
TextChange::new(2, 4, "x".to_string()),
TextChange::new(3, 5, "x".to_string()),
],
);
}
#[test]
#[should_panic(
expected = "Text change had start index 2 greater than end index 1."
)]
fn panics_start_greater_end() {
apply_text_changes(
"0123456789",
vec![TextChange::new(2, 1, "x".to_string())],
);
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,525 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use crate::swc::ast as swc_ast;
use crate::swc::common::DUMMY_SP;
use crate::swc::visit::noop_fold_type;
use crate::swc::visit::Fold;
/// Transforms import declarations to variable declarations
/// with a dynamic import. This is used to provide import
/// declaration support in script contexts such as the Deno REPL.
pub struct ImportDeclsToVarDeclsFolder;
impl Fold for ImportDeclsToVarDeclsFolder {
noop_fold_type!(); // skip typescript specific nodes
fn fold_module_item(
&mut self,
module_item: swc_ast::ModuleItem,
) -> swc_ast::ModuleItem {
use crate::swc::ast::*;
match module_item {
ModuleItem::ModuleDecl(ModuleDecl::Import(import_decl)) => {
// Handle type only imports
if import_decl.type_only {
// should have no side effects
return create_empty_stmt();
}
// The initializer (ex. `await import('./mod.ts')`)
let initializer =
create_await_import_expr(&import_decl.src.value, import_decl.with);
// Handle imports for the side effects
// ex. `import "module.ts"` -> `await import("module.ts");`
if import_decl.specifiers.is_empty() {
return ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: DUMMY_SP,
expr: initializer,
}));
}
// Collect the specifiers and create the variable statement
let named_import_props = import_decl
.specifiers
.iter()
.filter_map(|specifier| match specifier {
ImportSpecifier::Default(specifier) => Some(create_key_value(
"default".to_string(),
specifier.local.clone(),
)),
ImportSpecifier::Named(specifier) => {
Some(match specifier.imported.as_ref() {
Some(name) => create_key_value(
match name {
ModuleExportName::Ident(ident) => ident.sym.to_string(),
ModuleExportName::Str(str) => str.value.to_string(),
},
specifier.local.clone(),
),
None => create_assignment(specifier.local.clone()),
})
}
ImportSpecifier::Namespace(_) => None,
})
.collect::<Vec<_>>();
let namespace_import_name =
import_decl
.specifiers
.iter()
.find_map(|specifier| match specifier {
ImportSpecifier::Namespace(specifier) => Some(BindingIdent {
id: specifier.local.clone(),
type_ann: None,
}),
_ => None,
});
ModuleItem::Stmt(Stmt::Decl(Decl::Var(Box::new(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Const,
declare: false,
decls: {
let mut decls = Vec::new();
if !named_import_props.is_empty() {
decls.push(VarDeclarator {
span: DUMMY_SP,
name: Pat::Object(ObjectPat {
span: DUMMY_SP,
optional: false,
props: named_import_props,
type_ann: None,
}),
definite: false,
init: Some(initializer.clone()),
});
}
if let Some(namespace_import) = namespace_import_name {
decls.push(VarDeclarator {
span: DUMMY_SP,
name: Pat::Ident(namespace_import),
definite: false,
init: Some(initializer),
});
}
decls
},
}))))
}
_ => module_item,
}
}
}
/// Strips export declarations and exports on named exports so the
/// code can be used in script contexts. This is useful for example
/// in the Deno REPL.
pub struct StripExportsFolder;
impl Fold for StripExportsFolder {
noop_fold_type!(); // skip typescript specific nodes
fn fold_module_item(
&mut self,
module_item: swc_ast::ModuleItem,
) -> swc_ast::ModuleItem {
use crate::swc::ast::*;
match module_item {
ModuleItem::ModuleDecl(ModuleDecl::ExportAll(export_all)) => {
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: DUMMY_SP,
expr: create_await_import_expr(
&export_all.src.value,
export_all.with,
),
}))
}
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(export_named)) => {
if let Some(src) = export_named.src {
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: DUMMY_SP,
expr: create_await_import_expr(&src.value, export_named.with),
}))
} else {
create_empty_stmt()
}
}
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultExpr(default_expr)) => {
// transform a default export expression to its expression
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: DUMMY_SP,
expr: default_expr.expr,
}))
}
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(export_decl)) => {
// strip the export keyword on an exported declaration
ModuleItem::Stmt(Stmt::Decl(export_decl.decl))
}
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultDecl(default_decl)) => {
// only keep named default exports
match default_decl.decl {
DefaultDecl::Fn(FnExpr {
ident: Some(ident),
function,
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
declare: false,
ident,
function,
}))),
DefaultDecl::Class(ClassExpr {
ident: Some(ident),
class,
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Class(ClassDecl {
declare: false,
ident,
class,
}))),
_ => create_empty_stmt(),
}
}
_ => module_item,
}
}
}
fn create_empty_stmt() -> swc_ast::ModuleItem {
use swc_ast::*;
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
}
fn create_ident(name: String) -> swc_ast::Ident {
swc_ast::Ident {
span: DUMMY_SP,
sym: name.into(),
optional: false,
}
}
fn create_key_value(
key: String,
value: swc_ast::Ident,
) -> swc_ast::ObjectPatProp {
swc_ast::ObjectPatProp::KeyValue(swc_ast::KeyValuePatProp {
// use a string literal because it will work in more scenarios than an identifier
key: swc_ast::PropName::Str(swc_ast::Str {
span: DUMMY_SP,
value: key.into(),
raw: None,
}),
value: Box::new(swc_ast::Pat::Ident(swc_ast::BindingIdent {
id: value,
type_ann: None,
})),
})
}
fn create_await_import_expr(
module_specifier: &str,
maybe_asserts: Option<Box<swc_ast::ObjectLit>>,
) -> Box<swc_ast::Expr> {
use swc_ast::*;
let mut args = vec![ExprOrSpread {
spread: None,
expr: Box::new(Expr::Lit(Lit::Str(Str {
span: DUMMY_SP,
raw: None,
value: module_specifier.into(),
}))),
}];
// add assert object if it exists
if let Some(asserts) = maybe_asserts {
args.push(ExprOrSpread {
spread: None,
expr: Box::new(Expr::Object(ObjectLit {
span: DUMMY_SP,
props: vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(
KeyValueProp {
key: PropName::Ident(create_ident("assert".to_string())),
value: Box::new(Expr::Object(*asserts)),
},
)))],
})),
})
}
Box::new(Expr::Await(AwaitExpr {
span: DUMMY_SP,
arg: Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: Callee::Expr(Box::new(Expr::Ident(Ident {
span: DUMMY_SP,
sym: "import".into(),
optional: false,
}))),
args,
type_args: None,
})),
}))
}
fn create_assignment(key: swc_ast::Ident) -> swc_ast::ObjectPatProp {
swc_ast::ObjectPatProp::Assign(swc_ast::AssignPatProp {
span: DUMMY_SP,
key,
value: None,
})
}
#[cfg(test)]
mod test {
use crate::swc::ast::Module;
use crate::swc::codegen::text_writer::JsWriter;
use crate::swc::codegen::Node;
use crate::swc::common::FileName;
use crate::swc::common::SourceMap;
use crate::swc::parser::Parser;
use crate::swc::parser::StringInput;
use crate::swc::parser::Syntax;
use crate::swc::parser::TsConfig;
use crate::swc::visit::Fold;
use crate::swc::visit::FoldWith;
use crate::ModuleSpecifier;
use crate::ES_VERSION;
use pretty_assertions::assert_eq;
use std::rc::Rc;
use super::*;
#[test]
fn test_downlevel_imports_type_only() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import type { test } from "./mod.ts";"#,
";",
);
}
#[test]
fn test_downlevel_imports_specifier_only() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import "./mod.ts";"#,
r#"await import("./mod.ts");"#,
);
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import {} from "./mod.ts";"#,
r#"await import("./mod.ts");"#,
);
}
#[test]
fn test_downlevel_imports_default() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import mod from "./mod.ts";"#,
r#"const { "default": mod } = await import("./mod.ts");"#,
);
}
#[test]
fn test_downlevel_imports_named() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import { A } from "./mod.ts";"#,
r#"const { A } = await import("./mod.ts");"#,
);
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import { A, B, C } from "./mod.ts";"#,
r#"const { A, B, C } = await import("./mod.ts");"#,
);
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import { A as LocalA, B, C as LocalC } from "./mod.ts";"#,
r#"const { "A": LocalA, B, "C": LocalC } = await import("./mod.ts");"#,
);
}
#[test]
fn test_downlevel_imports_namespace() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import * as mod from "./mod.ts";"#,
r#"const mod = await import("./mod.ts");"#,
);
}
#[test]
fn test_downlevel_imports_mixed() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import myDefault, { A, B as LocalB } from "./mod.ts";"#,
r#"const { "default": myDefault, A, "B": LocalB } = await import("./mod.ts");"#,
);
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import myDefault, * as mod from "./mod.ts";"#,
r#"const { "default": myDefault } = await import("./mod.ts"), mod = await import("./mod.ts");"#,
);
}
#[test]
fn test_downlevel_imports_assertions() {
test_transform(
ImportDeclsToVarDeclsFolder,
r#"import data from "./mod.json" assert { type: "json" };"#,
"const { \"default\": data } = await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
);
}
#[test]
fn test_strip_exports_export_all() {
test_transform(
StripExportsFolder,
r#"export * from "./test.ts";"#,
r#"await import("./test.ts");"#,
);
}
#[test]
fn test_strip_exports_export_named() {
test_transform(
StripExportsFolder,
r#"export { test } from "./test.ts";"#,
r#"await import("./test.ts");"#,
);
test_transform(StripExportsFolder, r#"export { test };"#, ";");
}
#[test]
fn test_strip_exports_assertions() {
test_transform(
StripExportsFolder,
r#"export { default as data } from "./mod.json" assert { type: "json" };"#,
"await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
);
}
#[test]
fn test_strip_exports_export_all_assertions() {
// even though this doesn't really make sense for someone to do
test_transform(
StripExportsFolder,
r#"export * from "./mod.json" assert { type: "json" };"#,
"await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
);
}
#[test]
fn test_strip_exports_export_default_expr() {
test_transform(StripExportsFolder, "export default 5;", "5;");
}
#[test]
fn test_strip_exports_export_default_decl_name() {
test_transform(
StripExportsFolder,
"export default class Test {}",
"class Test {\n}",
);
test_transform(
StripExportsFolder,
"export default function test() {}",
"function test() {}",
);
}
#[test]
fn test_strip_exports_export_default_decl_no_name() {
test_transform(StripExportsFolder, "export default class {}", ";");
test_transform(StripExportsFolder, "export default function() {}", ";");
}
#[test]
fn test_strip_exports_export_named_decls() {
test_transform(
StripExportsFolder,
"export class Test {}",
"class Test {\n}",
);
test_transform(
StripExportsFolder,
"export function test() {}",
"function test() {}",
);
test_transform(StripExportsFolder, "export enum Test {}", "enum Test {\n}");
test_transform(
StripExportsFolder,
"export namespace Test {}",
"module Test {\n}",
);
}
#[test]
fn test_strip_exports_not_in_namespace() {
test_transform(
StripExportsFolder,
"namespace Test { export class Test {} }",
"module Test {\n export class Test {\n }\n}",
);
}
#[track_caller]
fn test_transform(
mut transform: impl Fold,
src: &str,
expected_output: &str,
) {
let (source_map, module) = parse(src);
let output = print(source_map, module.fold_with(&mut transform));
assert_eq!(output, format!("{}\n", expected_output));
}
fn parse(src: &str) -> (Rc<SourceMap>, Module) {
let source_map = Rc::new(SourceMap::default());
let source_file = source_map.new_source_file(
FileName::Url(ModuleSpecifier::parse("file:///test.ts").unwrap()),
src.to_string(),
);
let input = StringInput::from(&*source_file);
let syntax = Syntax::Typescript(TsConfig {
..Default::default()
});
let mut parser = Parser::new(syntax, input, None);
(source_map, parser.parse_module().unwrap())
}
fn print(source_map: Rc<SourceMap>, module: Module) -> String {
let mut buf = vec![];
{
let mut writer =
Box::new(JsWriter::new(source_map.clone(), "\n", &mut buf, None));
writer.set_indent_str(" "); // two spaces
let config = crate::swc::codegen::Config {
minify: false,
ascii_only: false,
omit_last_semi: false,
target: ES_VERSION,
};
let mut emitter = crate::swc::codegen::Emitter {
cfg: config,
comments: None,
cm: source_map,
wr: writer,
};
module.emit_with(&mut emitter).unwrap();
}
String::from_utf8(buf).unwrap()
}
}

352
third-party/vendor/deno_ast/src/types.rs vendored Normal file
View file

@ -0,0 +1,352 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::fmt;
use crate::swc::parser::error::SyntaxError;
use crate::LineAndColumnDisplay;
use crate::SourceRange;
use crate::SourceRangedForSpanned;
use crate::SourceTextInfo;
/// Parsing diagnostic.
#[derive(Debug, Clone)]
pub struct Diagnostic {
/// Specifier of the source the diagnostic occurred in.
pub specifier: String,
/// Range of the diagnostic.
pub range: SourceRange,
/// Swc syntax error
pub kind: SyntaxError,
source: SourceTextInfo,
}
impl PartialEq for Diagnostic {
fn eq(&self, other: &Self) -> bool {
// excludes the source
self.specifier == other.specifier
&& self.range == other.range
&& self.kind == other.kind
}
}
impl Diagnostic {
/// Message text of the diagnostic.
pub fn message(&self) -> Cow<str> {
self.kind.msg()
}
/// 1-indexed display position the diagnostic occurred at.
pub fn display_position(&self) -> LineAndColumnDisplay {
self.source.line_and_column_display(self.range.start)
}
}
impl Diagnostic {
pub fn from_swc_error(
err: crate::swc::parser::error::Error,
specifier: &str,
source: SourceTextInfo,
) -> Diagnostic {
Diagnostic {
range: err.range(),
specifier: specifier.to_string(),
kind: err.into_kind(),
source,
}
}
}
impl std::error::Error for Diagnostic {}
impl fmt::Display for Diagnostic {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let display_position = self.display_position();
write!(
f,
"{} at {}:{}:{}\n\n{}",
self.message(),
self.specifier,
display_position.line_number,
display_position.column_number,
// todo(dsherret): remove this catch unwind once we've
// tested this out a lot
std::panic::catch_unwind(|| {
get_range_text_highlight(&self.source, self.range)
.lines()
// indent two spaces
.map(|l| {
if l.trim().is_empty() {
String::new()
} else {
format!(" {}", l)
}
})
.collect::<Vec<_>>()
.join("\n")
})
.unwrap_or_else(|err| {
format!("Bug. Please report this issue: {:?}", err)
}),
)
}
}
#[derive(Debug)]
pub struct DiagnosticsError(pub Vec<Diagnostic>);
impl std::error::Error for DiagnosticsError {}
impl fmt::Display for DiagnosticsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (i, diagnostic) in self.0.iter().enumerate() {
if i > 0 {
write!(f, "\n\n")?;
}
write!(f, "{}", diagnostic)?
}
Ok(())
}
}
/// Code in this function was adapted from:
/// https://github.com/dprint/dprint/blob/a026a1350d27a61ea18207cb31897b18eaab51a1/crates/core/src/formatting/utils/string_utils.rs#L62
fn get_range_text_highlight(
source: &SourceTextInfo,
byte_range: SourceRange,
) -> String {
fn get_column_index_of_pos(text: &str, pos: usize) -> usize {
let line_start_byte_pos = get_line_start_byte_pos(text, pos);
text[line_start_byte_pos..pos].chars().count()
}
fn get_line_start_byte_pos(text: &str, pos: usize) -> usize {
let text_bytes = text.as_bytes();
for i in (0..pos).rev() {
if text_bytes.get(i) == Some(&(b'\n')) {
return i + 1;
}
}
0
}
fn get_text_and_error_range(
source: &SourceTextInfo,
byte_range: SourceRange,
) -> (&str, (usize, usize)) {
let first_line_start =
source.line_start(source.line_index(byte_range.start));
let last_line_end = source.line_end(source.line_index(byte_range.end));
let error_start = byte_range.start - first_line_start;
let error_end = error_start + (byte_range.end - byte_range.start);
let sub_text =
source.range_text(&SourceRange::new(first_line_start, last_line_end));
(sub_text, (error_start, error_end))
}
let (sub_text, (error_start, error_end)) =
get_text_and_error_range(source, byte_range);
let mut result = String::new();
// don't use .lines() here because it will trim any empty
// lines, which might for some reason be part of the range
let lines = sub_text.split('\n').collect::<Vec<_>>();
let line_count = lines.len();
for (i, mut line) in lines.into_iter().enumerate() {
if line.ends_with('\r') {
line = &line[..line.len() - 1]; // trim the \r
}
let is_last_line = i == line_count - 1;
// don't show all the lines if there are more than 3 lines
if i > 2 && !is_last_line {
continue;
}
if i > 0 {
result.push('\n');
}
if i == 2 && !is_last_line {
result.push_str("...");
continue;
}
let mut error_start_char_index = if i == 0 {
get_column_index_of_pos(sub_text, error_start)
} else {
0
};
let mut error_end_char_index = if is_last_line {
get_column_index_of_pos(sub_text, error_end)
} else {
line.chars().count()
};
let line_char_count = line.chars().count();
if line_char_count > 90 {
let start_char_index = if error_start_char_index > 60 {
std::cmp::min(error_start_char_index - 20, line_char_count - 80)
} else {
0
};
error_start_char_index -= start_char_index;
error_end_char_index -= start_char_index;
let code_text = line
.chars()
.skip(start_char_index)
.take(80)
.collect::<String>();
let mut line_text = String::new();
if start_char_index > 0 {
line_text.push_str("...");
error_start_char_index += 3;
error_end_char_index += 3;
}
line_text.push_str(&code_text);
if line_char_count > start_char_index + code_text.chars().count() {
error_end_char_index =
std::cmp::min(error_end_char_index, line_text.chars().count());
line_text.push_str("...");
}
result.push_str(&line_text);
} else {
result.push_str(line);
}
result.push('\n');
result.push_str(&" ".repeat(error_start_char_index));
result.push_str(&"~".repeat(std::cmp::max(
1, // this means it's the end of the line, so display a single ~
error_end_char_index - error_start_char_index,
)));
}
result
}
#[cfg(test)]
mod test {
use dprint_swc_ext::common::SourceRange;
use dprint_swc_ext::common::SourceTextInfo;
use pretty_assertions::assert_eq;
use super::get_range_text_highlight;
#[test]
fn range_highlight_all_text() {
let text = SourceTextInfo::from_string(
concat!(
"Line 0 - Testing this out with a long line testing0 testing1 testing2 testing3 testing4 testing5 testing6\n",
"Line 1\n",
"Line 2\n",
"Line 3\n",
"Line 4"
).to_string(),
);
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_start(0), text.line_end(4))
),
concat!(
"Line 0 - Testing this out with a long line testing0 testing1 testing2 testing3 t...\n",
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n",
"Line 1\n",
"~~~~~~\n",
"...\n",
"Line 4\n",
"~~~~~~",
),
);
}
#[test]
fn range_highlight_all_text_last_line_long() {
let text = SourceTextInfo::from_string(
concat!(
"Line 0\n",
"Line 1\n",
"Line 2\n",
"Line 3\n",
"Line 4 - Testing this out with a long line testing0 testing1 testing2 testing3 testing4 testing5 testing6\n",
).to_string(),
);
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_start(0), text.line_end(4))
),
concat!(
"Line 0\n",
"~~~~~~\n",
"Line 1\n",
"~~~~~~\n",
"...\n",
"Line 4 - Testing this out with a long line testing0 testing1 testing2 testing3 t...\n",
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",
),
);
}
#[test]
fn range_highlight_range_start_long_line() {
let text = SourceTextInfo::from_string(
"Testing this out with a long line testing0 testing1 testing2 testing3 testing4 testing5 testing6 testing7".to_string(),
);
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_start(0), text.line_start(0) + 1)
),
concat!(
"Testing this out with a long line testing0 testing1 testing2 testing3 testing4 t...\n",
"~",
),
);
}
#[test]
fn range_highlight_range_end_long_line() {
let text = SourceTextInfo::from_string(
"Testing this out with a long line testing0 testing1 testing2 testing3 testing4 testing5 testing6 testing7".to_string(),
);
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_end(0) - 1, text.line_end(0))
),
concat!(
"...ong line testing0 testing1 testing2 testing3 testing4 testing5 testing6 testing7\n",
" ~",
),
);
}
#[test]
fn range_highlight_whitespace_start_line() {
let text = SourceTextInfo::from_string(" testing\r\ntest".to_string());
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_end(0) - 1, text.line_end(1))
),
concat!(" testing\n", " ~\n", "test\n", "~~~~",),
);
}
#[test]
fn range_end_of_line() {
let text =
SourceTextInfo::from_string(" testingtestingtestingtesting".to_string());
assert_eq!(
get_range_text_highlight(
&text,
SourceRange::new(text.line_end(0), text.line_end(0))
),
concat!(
" testingtestingtestingtesting\n",
" ~",
),
);
}
}