Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

View file

@ -0,0 +1 @@
{"files":{"Cargo.lock":"f9c16fa20217f613b33e93aacbbcfad6452913839340d5ac364855a3782443bb","Cargo.toml":"c51b114133ea1d055f254834e6effcca620195b8b8a43dde641d9bdffb9b9bc0","LICENSE":"7516e1cf340213f60d96bca77bb012882dbf80e7cca5922c914174f605d9ef71","README.md":"3623763077a9070c98f14755c90b92fd5fcbc3ed4561515696bd6b7687441cdc","examples/read.rs":"70d8934a65e48627be843ede3ed88f2d7956f1ba8f7d441447b41cd76758124a","examples/rewrite.rs":"ff8a482dbf9c701093c2b6b234d36503dc445e130403e9ec0125d7d80361950c","examples/split_ram_bundle.rs":"3ab257700fe74a80f6b646b7e41cae30c93b413c2b5fdda45688a4c4bde30f73","src/builder.rs":"92b6eb9d395d6d8e85ad306d8292fcc1e8bb5703e7b62ea1647d8f03b0d8df87","src/decoder.rs":"e674f0f53d47005d91345eaf71e549f20ed2f13a1f7130d0d964982300d03dae","src/detector.rs":"668a509ccb5670f237186530ead5d21aaca0c13196ace5b44b92a80ccac26b11","src/encoder.rs":"e7f1c088ac25995375a374cb408f1e7d93b6810bb954767b09db97d1a8394db1","src/errors.rs":"8fedeb7f325e1375c6a5fd54aebc7ac4ec624fb6e7b1e13bffddbe3cd10c54d1","src/hermes.rs":"520ab85b67c8d094fd1ee32b02cbbb9473d5b588c9501a71114ae1b504c69641","src/js_identifiers.rs":"a753682f8e380c5a22a3a8454ca0084e8f38cb3cedfe51dbcc20e3a207017879","src/jsontypes.rs":"7fd589a4bbbb6c5c2d3b0d1857c7c7ef5c342bbf99232a27e50749bf6216ce78","src/lib.rs":"9a4bff717eb205b65629c4c4302b6dbcf8b855044bcb5a72355647430947e21c","src/macros.rs":"fc4a5b5162c1a7862e544045bfae244222bd18bc6e342bb052f36a580a756861","src/ram_bundle.rs":"27146b39d88c3f2290a2f5184f46c1f40c43f4f8e3dcdc7c4b4f938ac776b08f","src/sourceview.rs":"9d98a27a24423301d4e677aab58ac410b657f787cccf1f381f1a20130f603516","src/types.rs":"a90b58a50f49444ac16cf50e35f8990ee07dbaa2a814ab54403d637d1ff358ad","src/utils.rs":"aaf6d2a3648f864020894510f5b8382c743ea1396387f76b35c39c36599f9a8f","src/vlq.rs":"0c007909edec333f6c16309855cb65273eb5fff510573f6ca9fb4a2d71e5c6ff"},"package":"10da010a590ed2fa9ca8467b00ce7e9c5a8017742c0c09c45450efc172208c4b"}

633
third-party/vendor/sourcemap/Cargo.lock generated vendored Normal file
View file

@ -0,0 +1,633 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bit-set"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "data-encoding"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]]
name = "debugid"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d"
dependencies = [
"serde",
"uuid",
]
[[package]]
name = "errno"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e"
dependencies = [
"libc",
"windows-sys",
]
[[package]]
name = "fastrand"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
[[package]]
name = "getrandom"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "idna"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "if_chain"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed"
[[package]]
name = "itoa"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libm"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "linux-raw-sys"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
[[package]]
name = "magic_string"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8033ce8c43f7ccb207e4699f30eed50d7526379ee08fab47159f80b7934e18"
dependencies = [
"base64",
"regex",
"serde",
"serde_json",
"vlq",
]
[[package]]
name = "memchr"
version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "num-traits"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
"libm",
]
[[package]]
name = "percent-encoding"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "ppv-lite86"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "proptest"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf"
dependencies = [
"bit-set",
"bit-vec",
"bitflags 2.4.1",
"lazy_static",
"num-traits",
"rand",
"rand_chacha",
"rand_xorshift",
"regex-syntax",
"rusty-fork",
"tempfile",
"unarray",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_xorshift"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
dependencies = [
"rand_core",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "regex"
version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
dependencies = [
"semver",
]
[[package]]
name = "rustix"
version = "0.38.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
dependencies = [
"bitflags 2.4.1",
"errno",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "rusty-fork"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
dependencies = [
"fnv",
"quick-error",
"tempfile",
"wait-timeout",
]
[[package]]
name = "ryu"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
[[package]]
name = "scroll"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fda28d4b4830b807a8b43f7b0e6b5df875311b3e7621d84577188c175b6ec1ec"
dependencies = [
"scroll_derive",
]
[[package]]
name = "scroll_derive"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aaaae8f38bb311444cfb7f1979af0bc9240d95795f75f9ceddf6a59b79ceffa0"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "semver"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
dependencies = [
"semver-parser",
]
[[package]]
name = "semver-parser"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "serde_json"
version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "sourcemap"
version = "7.0.1"
dependencies = [
"data-encoding",
"debugid",
"if_chain",
"magic_string",
"proptest",
"rustc_version",
"scroll",
"serde",
"serde_json",
"unicode-id",
"url",
]
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "tempfile"
version = "3.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
dependencies = [
"cfg-if",
"fastrand",
"redox_syscall",
"rustix",
"windows-sys",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "unarray"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
[[package]]
name = "unicode-bidi"
version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-id"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1b6def86329695390197b82c1e244a54a131ceb66c996f2088a3876e2ae083f"
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-normalization"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
dependencies = [
"tinyvec",
]
[[package]]
name = "url"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
[[package]]
name = "uuid"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc"
[[package]]
name = "vlq"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65dd7eed29412da847b0f78bcec0ac98588165988a8cfe41d4ea1d429f8ccfff"
[[package]]
name = "wait-timeout"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
dependencies = [
"libc",
]
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"

82
third-party/vendor/sourcemap/Cargo.toml vendored Normal file
View file

@ -0,0 +1,82 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "sourcemap"
version = "7.0.1"
authors = ["Sentry <hello@sentry.io>"]
include = [
"/src/**/*.rs",
"/examples/*.rs",
"/*.toml",
"/LICENSE",
"/README.md",
]
autoexamples = true
description = "Basic sourcemap handling for Rust"
homepage = "https://github.com/getsentry/rust-sourcemap"
documentation = "https://getsentry.github.io/rust-sourcemap/"
readme = "README.md"
keywords = [
"javascript",
"sourcemap",
"sourcemaps",
]
license = "BSD-3-Clause"
repository = "https://github.com/getsentry/rust-sourcemap"
[package.metadata.docs.rs]
all-features = true
[[example]]
name = "split_ram_bundle"
required-features = ["ram_bundle"]
[dependencies.data-encoding]
version = "2.3.3"
[dependencies.debugid]
version = "0.8.0"
features = ["serde"]
[dependencies.if_chain]
version = "1.0.0"
[dependencies.scroll]
version = "0.10.1"
features = ["derive"]
optional = true
[dependencies.serde]
version = "1.0.104"
features = ["derive"]
[dependencies.serde_json]
version = "1.0.48"
[dependencies.unicode-id]
version = "0.3"
[dependencies.url]
version = "2.1.1"
[dev-dependencies.magic_string]
version = "0.3.4"
[dev-dependencies.proptest]
version = "1.2.0"
[build-dependencies.rustc_version]
version = "0.2.3"
[features]
ram_bundle = ["scroll"]

31
third-party/vendor/sourcemap/LICENSE vendored Normal file
View file

@ -0,0 +1,31 @@
Copyright (c) 2016 by Armin Ronacher.
Some rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* The names of the contributors may not be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

51
third-party/vendor/sourcemap/README.md vendored Normal file
View file

@ -0,0 +1,51 @@
# sourcemap
This library implements basic processing of JavaScript sourcemaps.
## Installation
The crate is called sourcemap and you can depend on it via cargo:
```toml
[dependencies]
sourcemap = "*"
```
If you want to use the git version:
```toml
[dependencies.sourcemap]
git = "https://github.com/getsentry/rust-sourcemap.git"
```
## Basic Operation
This crate can load JavaScript sourcemaps from JSON files. It uses
`serde` for parsing of the JSON data. Due to the nature of sourcemaps
the entirety of the file must be loaded into memory which can be quite
memory intensive.
Usage:
```rust
use sourcemap::SourceMap;
let input: &[_] = b"{
\"version\":3,
\"sources\":[\"coolstuff.js\"],
\"names\":[\"x\",\"alert\"],
\"mappings\":\"AAAA,GAAIA,GAAI,EACR,IAAIA,GAAK,EAAG,CACVC,MAAM\"
}";
let sm = SourceMap::from_reader(input).unwrap();
let token = sm.lookup_token(0, 0).unwrap(); // line-number and column
println!("token: {}", token);
```
## Features
Functionality of the crate can be turned on and off by feature flags. This is the
current list of feature flags:
* `ram_bundle`: turns on RAM bundle support
License: BSD-3-Clause

View file

@ -0,0 +1,38 @@
use std::env;
use std::fs;
use std::io::Read;
use sourcemap::{decode, DecodedMap, RewriteOptions, SourceMap};
fn load_from_reader<R: Read>(mut rdr: R) -> SourceMap {
match decode(&mut rdr).unwrap() {
DecodedMap::Regular(sm) => sm,
DecodedMap::Index(idx) => idx
.flatten_and_rewrite(&RewriteOptions {
load_local_source_contents: true,
..Default::default()
})
.unwrap(),
_ => panic!("unexpected sourcemap format"),
}
}
fn main() {
let args: Vec<_> = env::args().collect();
let mut f = fs::File::open(&args[1]).unwrap();
let sm = load_from_reader(&mut f);
let line = if args.len() > 2 {
args[2].parse::<u32>().unwrap()
} else {
0
};
let column = if args.len() > 3 {
args[3].parse::<u32>().unwrap()
} else {
0
};
let token = sm.lookup_token(line, column).unwrap(); // line-number and column
println!("token: {token}");
}

View file

@ -0,0 +1,49 @@
use std::env;
use std::fs;
use std::io::Read;
use std::path::Path;
use sourcemap::{decode, DecodedMap, RewriteOptions, SourceMap};
fn test(sm: &SourceMap) {
for (src_id, source) in sm.sources().enumerate() {
let path = Path::new(source);
if path.is_file() {
let mut f = fs::File::open(path).unwrap();
let mut contents = String::new();
if f.read_to_string(&mut contents).ok().is_none() {
continue;
}
if Some(contents.as_str()) != sm.get_source_contents(src_id as u32) {
println!(" !!! {source}");
}
}
}
}
fn load_from_reader<R: Read>(mut rdr: R) -> SourceMap {
match decode(&mut rdr).unwrap() {
DecodedMap::Regular(sm) => sm,
DecodedMap::Index(idx) => idx
.flatten_and_rewrite(&RewriteOptions {
load_local_source_contents: true,
..Default::default()
})
.unwrap(),
_ => panic!("unexpected sourcemap format"),
}
}
fn main() {
let args: Vec<_> = env::args().collect();
let mut f = fs::File::open(&args[1]).unwrap();
let sm = load_from_reader(&mut f);
println!("before dump");
test(&sm);
println!("after dump");
let mut json: Vec<u8> = vec![];
sm.to_writer(&mut json).unwrap();
let sm = load_from_reader(json.as_slice());
test(&sm);
}

View file

@ -0,0 +1,61 @@
use std::env;
use std::fs;
use std::fs::File;
use std::path::Path;
use sourcemap::ram_bundle::{split_ram_bundle, RamBundle, RamBundleType};
use sourcemap::SourceMapIndex;
const USAGE: &str = "
Usage:
./split_ram_bundle RAM_BUNDLE SOURCEMAP OUT_DIRECTORY
This example app splits the given RAM bundle and the sourcemap into a set of
source files and their sourcemaps.
Both indexed and file RAM bundles are supported.
";
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<_> = env::args().collect();
if args.len() < 4 {
println!("{USAGE}");
std::process::exit(1);
}
let bundle_path = Path::new(&args[1]);
let ram_bundle = RamBundle::parse_indexed_from_path(bundle_path)
.or_else(|_| RamBundle::parse_unbundle_from_path(bundle_path))?;
match ram_bundle.bundle_type() {
RamBundleType::Indexed => println!("Indexed RAM Bundle detected"),
RamBundleType::Unbundle => println!("File RAM Bundle detected"),
}
let sourcemap_file = File::open(&args[2])?;
let ism = SourceMapIndex::from_reader(sourcemap_file).unwrap();
let output_directory = Path::new(&args[3]);
if !output_directory.exists() {
panic!("Directory {} does not exist!", output_directory.display());
}
println!(
"Ouput directory: {}",
output_directory.canonicalize()?.display()
);
let ram_bundle_iter = split_ram_bundle(&ram_bundle, &ism).unwrap();
for result in ram_bundle_iter {
let (name, sv, sm) = result.unwrap();
println!("Writing down source: {name}");
fs::write(output_directory.join(name.clone()), sv.source())?;
let sourcemap_name = format!("{name}.map");
println!("Writing down sourcemap: {sourcemap_name}");
let out_sm = File::create(output_directory.join(sourcemap_name))?;
sm.to_writer(out_sm)?;
}
println!("Done.");
Ok(())
}

View file

@ -0,0 +1,300 @@
#![cfg_attr(not(any(unix, windows, target_os = "redox")), allow(unused_imports))]
use std::collections::HashMap;
use std::convert::AsRef;
use std::env;
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use debugid::DebugId;
use url::Url;
use crate::errors::Result;
use crate::types::{RawToken, SourceMap, Token};
/// Helper for sourcemap generation
///
/// This helper exists because generating and modifying `SourceMap`
/// objects is generally not very comfortable. As a general aid this
/// type can help.
pub struct SourceMapBuilder {
file: Option<String>,
name_map: HashMap<String, u32>,
names: Vec<String>,
tokens: Vec<RawToken>,
source_map: HashMap<String, u32>,
source_root: Option<String>,
sources: Vec<String>,
source_contents: Vec<Option<String>>,
sources_mapping: Vec<u32>,
debug_id: Option<DebugId>,
}
#[cfg(any(unix, windows, target_os = "redox"))]
fn resolve_local_reference(base: &Url, reference: &str) -> Option<PathBuf> {
let url = match base.join(reference) {
Ok(url) => {
if url.scheme() != "file" {
return None;
}
url
}
Err(_) => {
return None;
}
};
url.to_file_path().ok()
}
impl SourceMapBuilder {
/// Creates a new source map builder and sets the file.
pub fn new(file: Option<&str>) -> SourceMapBuilder {
SourceMapBuilder {
file: file.map(str::to_owned),
name_map: HashMap::new(),
names: vec![],
tokens: vec![],
source_map: HashMap::new(),
source_root: None,
sources: vec![],
source_contents: vec![],
sources_mapping: vec![],
debug_id: None,
}
}
/// Sets the debug id for the sourcemap (optional)
pub fn set_debug_id(&mut self, debug_id: Option<DebugId>) {
self.debug_id = debug_id;
}
/// Sets the file for the sourcemap (optional)
pub fn set_file<T: Into<String>>(&mut self, value: Option<T>) {
self.file = value.map(Into::into);
}
/// Returns the currently set file.
pub fn get_file(&self) -> Option<&str> {
self.file.as_deref()
}
/// Sets a new value for the source_root.
pub fn set_source_root<T: Into<String>>(&mut self, value: Option<T>) {
self.source_root = value.map(Into::into);
}
/// Returns the embedded source_root in case there is one.
pub fn get_source_root(&self) -> Option<&str> {
self.source_root.as_deref()
}
/// Registers a new source with the builder and returns the source ID.
pub fn add_source(&mut self, src: &str) -> u32 {
self.add_source_with_id(src, !0)
}
fn add_source_with_id(&mut self, src: &str, old_id: u32) -> u32 {
let count = self.sources.len() as u32;
let id = *self.source_map.entry(src.into()).or_insert(count);
if id == count {
self.sources.push(src.into());
self.sources_mapping.push(old_id);
}
id
}
/// Changes the source name for an already set source.
pub fn set_source(&mut self, src_id: u32, src: &str) {
assert!(src_id != !0, "Cannot set sources for tombstone source id");
self.sources[src_id as usize] = src.to_string();
}
/// Looks up a source name for an ID.
pub fn get_source(&self, src_id: u32) -> Option<&str> {
self.sources.get(src_id as usize).map(|x| &x[..])
}
/// Sets the source contents for an already existing source.
pub fn set_source_contents(&mut self, src_id: u32, contents: Option<&str>) {
assert!(src_id != !0, "Cannot set sources for tombstone source id");
if self.sources.len() > self.source_contents.len() {
self.source_contents.resize(self.sources.len(), None);
}
self.source_contents[src_id as usize] = contents.map(str::to_owned);
}
/// Returns the current source contents for a source.
pub fn get_source_contents(&self, src_id: u32) -> Option<&str> {
self.source_contents
.get(src_id as usize)
.and_then(|x| x.as_ref().map(|x| &x[..]))
}
/// Checks if a given source ID has source contents available.
pub fn has_source_contents(&self, src_id: u32) -> bool {
self.get_source_contents(src_id).is_some()
}
/// Loads source contents from locally accessible files if referenced
/// accordingly. Returns the number of loaded source contents
#[cfg(any(unix, windows, target_os = "redox"))]
pub fn load_local_source_contents(&mut self, base_path: Option<&Path>) -> Result<usize> {
let mut abs_path = env::current_dir()?;
if let Some(path) = base_path {
abs_path.push(path);
}
let base_url = Url::from_directory_path(&abs_path).unwrap();
let mut to_read = vec![];
for (source, &src_id) in self.source_map.iter() {
if self.has_source_contents(src_id) {
continue;
}
if let Some(path) = resolve_local_reference(&base_url, source) {
to_read.push((src_id, path));
}
}
let rv = to_read.len();
for (src_id, path) in to_read {
if let Ok(mut f) = fs::File::open(path) {
let mut contents = String::new();
if f.read_to_string(&mut contents).is_ok() {
self.set_source_contents(src_id, Some(&contents));
}
}
}
Ok(rv)
}
/// Registers a name with the builder and returns the name ID.
pub fn add_name(&mut self, name: &str) -> u32 {
let count = self.names.len() as u32;
let id = *self.name_map.entry(name.into()).or_insert(count);
if id == count {
self.names.push(name.into());
}
id
}
/// Adds a new mapping to the builder.
pub fn add(
&mut self,
dst_line: u32,
dst_col: u32,
src_line: u32,
src_col: u32,
source: Option<&str>,
name: Option<&str>,
) -> RawToken {
self.add_with_id(dst_line, dst_col, src_line, src_col, source, !0, name)
}
#[allow(clippy::too_many_arguments)]
fn add_with_id(
&mut self,
dst_line: u32,
dst_col: u32,
src_line: u32,
src_col: u32,
source: Option<&str>,
source_id: u32,
name: Option<&str>,
) -> RawToken {
let src_id = match source {
Some(source) => self.add_source_with_id(source, source_id),
None => !0,
};
let name_id = match name {
Some(name) => self.add_name(name),
None => !0,
};
let raw = RawToken {
dst_line,
dst_col,
src_line,
src_col,
src_id,
name_id,
};
self.tokens.push(raw);
raw
}
/// Adds a new mapping to the builder.
pub fn add_raw(
&mut self,
dst_line: u32,
dst_col: u32,
src_line: u32,
src_col: u32,
source: Option<u32>,
name: Option<u32>,
) -> RawToken {
let src_id = source.unwrap_or(!0);
let name_id = name.unwrap_or(!0);
let raw = RawToken {
dst_line,
dst_col,
src_line,
src_col,
src_id,
name_id,
};
self.tokens.push(raw);
raw
}
/// Shortcut for adding a new mapping based of an already existing token,
/// optionally removing the name.
pub fn add_token(&mut self, token: &Token<'_>, with_name: bool) -> RawToken {
let name = if with_name { token.get_name() } else { None };
self.add_with_id(
token.get_dst_line(),
token.get_dst_col(),
token.get_src_line(),
token.get_src_col(),
token.get_source(),
token.get_src_id(),
name,
)
}
/// Strips common prefixes from the sources in the builder
pub fn strip_prefixes<S: AsRef<str>>(&mut self, prefixes: &[S]) {
for source in self.sources.iter_mut() {
for prefix in prefixes {
let mut prefix = prefix.as_ref().to_string();
if !prefix.ends_with('/') {
prefix.push('/');
}
if source.starts_with(&prefix) {
*source = source[prefix.len()..].to_string();
break;
}
}
}
}
pub(crate) fn take_mapping(&mut self) -> Vec<u32> {
std::mem::take(&mut self.sources_mapping)
}
/// Converts the builder into a sourcemap.
pub fn into_sourcemap(self) -> SourceMap {
let contents = if !self.source_contents.is_empty() {
Some(self.source_contents)
} else {
None
};
let mut sm = SourceMap::new(self.file, self.tokens, self.names, self.sources, contents);
sm.set_source_root(self.source_root);
sm.set_debug_id(self.debug_id);
sm
}
}

View file

@ -0,0 +1,313 @@
use std::io;
use std::io::{BufReader, Read};
use serde_json::Value;
use crate::errors::{Error, Result};
use crate::hermes::decode_hermes;
use crate::jsontypes::RawSourceMap;
use crate::types::{DecodedMap, RawToken, SourceMap, SourceMapIndex, SourceMapSection};
use crate::vlq::parse_vlq_segment_into;
const DATA_PREAMBLE: &str = "data:application/json;base64,";
#[derive(PartialEq, Eq)]
enum HeaderState {
Undecided,
Junk,
AwaitingNewline,
PastHeader,
}
pub struct StripHeaderReader<R: Read> {
r: R,
header_state: HeaderState,
}
impl<R: Read> StripHeaderReader<R> {
pub fn new(reader: R) -> StripHeaderReader<R> {
StripHeaderReader {
r: reader,
header_state: HeaderState::Undecided,
}
}
}
fn is_junk_json(byte: u8) -> bool {
byte == b')' || byte == b']' || byte == b'}' || byte == b'\''
}
impl<R: Read> Read for StripHeaderReader<R> {
#[inline(always)]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if self.header_state == HeaderState::PastHeader {
return self.r.read(buf);
}
self.strip_head_read(buf)
}
}
impl<R: Read> StripHeaderReader<R> {
fn strip_head_read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let mut backing = vec![0; buf.len()];
let local_buf: &mut [u8] = &mut backing;
loop {
let read = self.r.read(local_buf)?;
if read == 0 {
return Ok(0);
}
for (offset, &byte) in local_buf[0..read].iter().enumerate() {
self.header_state = match self.header_state {
HeaderState::Undecided => {
if is_junk_json(byte) {
HeaderState::Junk
} else {
buf[..read].copy_from_slice(&local_buf[..read]);
self.header_state = HeaderState::PastHeader;
return Ok(read);
}
}
HeaderState::Junk => {
if byte == b'\r' {
HeaderState::AwaitingNewline
} else if byte == b'\n' {
HeaderState::PastHeader
} else {
HeaderState::Junk
}
}
HeaderState::AwaitingNewline => {
if byte == b'\n' {
HeaderState::PastHeader
} else {
fail!(io::Error::new(
io::ErrorKind::InvalidData,
"expected newline"
));
}
}
HeaderState::PastHeader => {
let rem = read - offset;
buf[..rem].copy_from_slice(&local_buf[offset..read]);
return Ok(rem);
}
};
}
}
}
}
pub fn strip_junk_header(slice: &[u8]) -> io::Result<&[u8]> {
if slice.is_empty() || !is_junk_json(slice[0]) {
return Ok(slice);
}
let mut need_newline = false;
for (idx, &byte) in slice.iter().enumerate() {
if need_newline && byte != b'\n' {
fail!(io::Error::new(
io::ErrorKind::InvalidData,
"expected newline"
));
} else if is_junk_json(byte) {
continue;
} else if byte == b'\r' {
need_newline = true;
} else if byte == b'\n' {
return Ok(&slice[idx..]);
}
}
Ok(&slice[slice.len()..])
}
pub fn decode_regular(rsm: RawSourceMap) -> Result<SourceMap> {
let mut dst_col;
let mut src_id = 0;
let mut src_line = 0;
let mut src_col = 0;
let mut name_id = 0;
let names = rsm.names.unwrap_or_default();
let sources = rsm.sources.unwrap_or_default();
let mappings = rsm.mappings.unwrap_or_default();
let allocation_size = mappings.matches(&[',', ';'][..]).count() + 10;
let mut tokens = Vec::with_capacity(allocation_size);
let mut nums = Vec::with_capacity(6);
for (dst_line, line) in mappings.split(';').enumerate() {
if line.is_empty() {
continue;
}
dst_col = 0;
for segment in line.split(',') {
if segment.is_empty() {
continue;
}
nums.clear();
parse_vlq_segment_into(segment, &mut nums)?;
dst_col = (i64::from(dst_col) + nums[0]) as u32;
let mut src = !0;
let mut name = !0;
if nums.len() > 1 {
if nums.len() != 4 && nums.len() != 5 {
fail!(Error::BadSegmentSize(nums.len() as u32));
}
src_id = (i64::from(src_id) + nums[1]) as u32;
if src_id >= sources.len() as u32 {
fail!(Error::BadSourceReference(src_id));
}
src = src_id;
src_line = (i64::from(src_line) + nums[2]) as u32;
src_col = (i64::from(src_col) + nums[3]) as u32;
if nums.len() > 4 {
name_id = (i64::from(name_id) + nums[4]) as u32;
if name_id >= names.len() as u32 {
fail!(Error::BadNameReference(name_id));
}
name = name_id;
}
}
tokens.push(RawToken {
dst_line: dst_line as u32,
dst_col,
src_line,
src_col,
src_id: src,
name_id: name,
});
}
}
let sources = sources.into_iter().map(Option::unwrap_or_default).collect();
// apparently we can encounter some non string types in real world
// sourcemaps :(
let names = names
.into_iter()
.map(|val| match val {
Value::String(s) => s,
Value::Number(num) => num.to_string(),
_ => "".into(),
})
.collect::<Vec<String>>();
// file sometimes is not a string for unexplicable reasons
let file = rsm.file.map(|val| match val {
Value::String(s) => s,
_ => "<invalid>".into(),
});
let mut sm = SourceMap::new(file, tokens, names, sources, rsm.sources_content);
sm.set_source_root(rsm.source_root);
sm.set_debug_id(rsm.debug_id);
Ok(sm)
}
fn decode_index(rsm: RawSourceMap) -> Result<SourceMapIndex> {
let mut sections = vec![];
for mut raw_section in rsm.sections.unwrap_or_default() {
sections.push(SourceMapSection::new(
(raw_section.offset.line, raw_section.offset.column),
raw_section.url,
match raw_section.map.take() {
Some(map) => Some(decode_common(*map)?),
None => None,
},
));
}
sections.sort_by_key(SourceMapSection::get_offset);
// file sometimes is not a string for unexplicable reasons
let file = rsm.file.map(|val| match val {
Value::String(s) => s,
_ => "<invalid>".into(),
});
Ok(SourceMapIndex::new_ram_bundle_compatible(
file,
sections,
rsm.x_facebook_offsets,
rsm.x_metro_module_paths,
))
}
fn decode_common(rsm: RawSourceMap) -> Result<DecodedMap> {
Ok(if rsm.sections.is_some() {
DecodedMap::Index(decode_index(rsm)?)
} else if rsm.x_facebook_sources.is_some() {
DecodedMap::Hermes(decode_hermes(rsm)?)
} else {
DecodedMap::Regular(decode_regular(rsm)?)
})
}
/// Decodes a sourcemap or sourcemap index from a reader
///
/// This supports both sourcemaps and sourcemap indexes unless the
/// specialized methods on the individual types.
pub fn decode<R: Read>(rdr: R) -> Result<DecodedMap> {
let mut rdr = StripHeaderReader::new(rdr);
let mut rdr = BufReader::new(&mut rdr);
let rsm: RawSourceMap = serde_json::from_reader(&mut rdr)?;
decode_common(rsm)
}
/// Decodes a sourcemap or sourcemap index from a byte slice
///
/// This supports both sourcemaps and sourcemap indexes unless the
/// specialized methods on the individual types.
pub fn decode_slice(slice: &[u8]) -> Result<DecodedMap> {
let content = strip_junk_header(slice)?;
let rsm: RawSourceMap = serde_json::from_slice(content)?;
decode_common(rsm)
}
/// Loads a sourcemap from a data URL
pub fn decode_data_url(url: &str) -> Result<DecodedMap> {
if !url.starts_with(DATA_PREAMBLE) {
fail!(Error::InvalidDataUrl);
}
let data_b64 = &url[DATA_PREAMBLE.len()..];
let data = data_encoding::BASE64
.decode(data_b64.as_bytes())
.map_err(|_| Error::InvalidDataUrl)?;
decode_slice(&data[..])
}
#[test]
fn test_strip_header() {
use std::io::BufRead;
let input: &[_] = b")]}garbage\r\n[1, 2, 3]";
let mut reader = io::BufReader::new(StripHeaderReader::new(input));
let mut text = String::new();
reader.read_line(&mut text).ok();
assert_eq!(text, "[1, 2, 3]");
}
#[test]
fn test_bad_newline() {
use std::io::BufRead;
let input: &[_] = b")]}'\r[1, 2, 3]";
let mut reader = io::BufReader::new(StripHeaderReader::new(input));
let mut text = String::new();
match reader.read_line(&mut text) {
Err(err) => {
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
Ok(_) => {
panic!("Expected failure");
}
}
}

View file

@ -0,0 +1,127 @@
use std::io::{BufRead, BufReader, Read};
use std::str;
use crate::decoder::{decode_data_url, strip_junk_header, StripHeaderReader};
use crate::errors::Result;
use crate::jsontypes::MinimalRawSourceMap;
use crate::types::DecodedMap;
use url::Url;
/// Represents a reference to a sourcemap
#[derive(PartialEq, Eq, Debug)]
pub enum SourceMapRef {
/// A regular URL reference
Ref(String),
/// A legacy URL reference
LegacyRef(String),
}
fn resolve_url(ref_url: &str, minified_url: &Url) -> Option<Url> {
minified_url.join(ref_url).ok()
}
impl SourceMapRef {
/// Return the URL of the reference
pub fn get_url(&self) -> &str {
match *self {
SourceMapRef::Ref(ref u) => u.as_str(),
SourceMapRef::LegacyRef(ref u) => u.as_str(),
}
}
/// Resolves the reference.
///
/// The given minified URL needs to be the URL of the minified file. The
/// result is the fully resolved URL of where the source map can be located.
pub fn resolve(&self, minified_url: &str) -> Option<String> {
let url = self.get_url();
if url.starts_with("data:") {
return None;
}
resolve_url(url, &Url::parse(minified_url).ok()?).map(|x| x.to_string())
}
/// Resolves the reference against a local file path
///
/// This is similar to `resolve` but operates on file paths.
#[cfg(any(unix, windows, target_os = "redox"))]
pub fn resolve_path(&self, minified_path: &std::path::Path) -> Option<std::path::PathBuf> {
let url = self.get_url();
if url.starts_with("data:") {
return None;
}
resolve_url(url, &Url::from_file_path(minified_path).ok()?)
.and_then(|x| x.to_file_path().ok())
}
/// Load an embedded sourcemap if there is a data URL.
pub fn get_embedded_sourcemap(&self) -> Result<Option<DecodedMap>> {
let url = self.get_url();
if url.starts_with("data:") {
Ok(Some(decode_data_url(url)?))
} else {
Ok(None)
}
}
}
/// Locates a sourcemap reference
///
/// Given a reader to a JavaScript file this tries to find the correct
/// sourcemap reference comment and return it.
pub fn locate_sourcemap_reference<R: Read>(rdr: R) -> Result<Option<SourceMapRef>> {
for line in BufReader::new(rdr).lines() {
let line = line?;
if line.starts_with("//# sourceMappingURL=") || line.starts_with("//@ sourceMappingURL=") {
let url = str::from_utf8(&line.as_bytes()[21..])?.trim().to_owned();
if line.starts_with("//@") {
return Ok(Some(SourceMapRef::LegacyRef(url)));
} else {
return Ok(Some(SourceMapRef::Ref(url)));
}
}
}
Ok(None)
}
/// Locates a sourcemap reference in a slice
///
/// This is an alternative to `locate_sourcemap_reference` that operates
/// on slices.
pub fn locate_sourcemap_reference_slice(slice: &[u8]) -> Result<Option<SourceMapRef>> {
locate_sourcemap_reference(slice)
}
fn is_sourcemap_common(rsm: MinimalRawSourceMap) -> bool {
(rsm.version.is_some() || rsm.file.is_some())
&& ((rsm.sources.is_some()
|| rsm.source_root.is_some()
|| rsm.sources_content.is_some()
|| rsm.names.is_some())
&& rsm.mappings.is_some())
|| rsm.sections.is_some()
}
fn is_sourcemap_impl<R: Read>(rdr: R) -> Result<bool> {
let mut rdr = StripHeaderReader::new(rdr);
let mut rdr = BufReader::new(&mut rdr);
let rsm: MinimalRawSourceMap = serde_json::from_reader(&mut rdr)?;
Ok(is_sourcemap_common(rsm))
}
fn is_sourcemap_slice_impl(slice: &[u8]) -> Result<bool> {
let content = strip_junk_header(slice)?;
let rsm: MinimalRawSourceMap = serde_json::from_slice(content)?;
Ok(is_sourcemap_common(rsm))
}
/// Checks if a valid sourcemap can be read from the given reader
pub fn is_sourcemap<R: Read>(rdr: R) -> bool {
is_sourcemap_impl(rdr).unwrap_or(false)
}
/// Checks if the given byte slice contains a sourcemap
pub fn is_sourcemap_slice(slice: &[u8]) -> bool {
is_sourcemap_slice_impl(slice).unwrap_or(false)
}

View file

@ -0,0 +1,141 @@
use std::io::Write;
use serde_json::Value;
use crate::errors::Result;
use crate::jsontypes::{RawSection, RawSectionOffset, RawSourceMap};
use crate::types::{DecodedMap, SourceMap, SourceMapIndex};
use crate::vlq::encode_vlq;
pub trait Encodable {
fn as_raw_sourcemap(&self) -> RawSourceMap;
}
pub fn encode<M: Encodable, W: Write>(sm: &M, mut w: W) -> Result<()> {
let ty = sm.as_raw_sourcemap();
serde_json::to_writer(&mut w, &ty)?;
Ok(())
}
fn encode_vlq_diff(out: &mut String, a: u32, b: u32) {
encode_vlq(out, i64::from(a) - i64::from(b))
}
fn serialize_mappings(sm: &SourceMap) -> String {
let mut rv = String::new();
// dst == minified == generated
let mut prev_dst_line = 0;
let mut prev_dst_col = 0;
let mut prev_src_line = 0;
let mut prev_src_col = 0;
let mut prev_name_id = 0;
let mut prev_src_id = 0;
for (idx, token) in sm.tokens().enumerate() {
let idx = idx as u32;
if token.get_dst_line() != prev_dst_line {
prev_dst_col = 0;
while token.get_dst_line() != prev_dst_line {
rv.push(';');
prev_dst_line += 1;
}
} else if idx > 0 {
if Some(&token) == sm.get_token(idx - 1).as_ref() {
continue;
}
rv.push(',');
}
encode_vlq_diff(&mut rv, token.get_dst_col(), prev_dst_col);
prev_dst_col = token.get_dst_col();
if token.has_source() {
encode_vlq_diff(&mut rv, token.get_src_id(), prev_src_id);
prev_src_id = token.get_src_id();
encode_vlq_diff(&mut rv, token.get_src_line(), prev_src_line);
prev_src_line = token.get_src_line();
encode_vlq_diff(&mut rv, token.get_src_col(), prev_src_col);
prev_src_col = token.get_src_col();
if token.has_name() {
encode_vlq_diff(&mut rv, token.get_name_id(), prev_name_id);
prev_name_id = token.get_name_id();
}
}
}
rv
}
impl Encodable for SourceMap {
fn as_raw_sourcemap(&self) -> RawSourceMap {
let mut have_contents = false;
let contents = self
.source_contents()
.map(|contents| {
if let Some(contents) = contents {
have_contents = true;
Some(contents.to_string())
} else {
None
}
})
.collect();
RawSourceMap {
version: Some(3),
file: self.get_file().map(|x| Value::String(x.to_string())),
sources: Some(self.sources.iter().map(|x| Some(x.to_string())).collect()),
source_root: self.get_source_root().map(|x| x.to_string()),
sources_content: if have_contents { Some(contents) } else { None },
sections: None,
names: Some(self.names().map(|x| Value::String(x.to_string())).collect()),
mappings: Some(serialize_mappings(self)),
x_facebook_offsets: None,
x_metro_module_paths: None,
x_facebook_sources: None,
debug_id: self.get_debug_id(),
}
}
}
impl Encodable for SourceMapIndex {
fn as_raw_sourcemap(&self) -> RawSourceMap {
RawSourceMap {
version: Some(3),
file: self.get_file().map(|x| Value::String(x.to_string())),
sources: None,
source_root: None,
sources_content: None,
sections: Some(
self.sections()
.map(|section| RawSection {
offset: RawSectionOffset {
line: section.get_offset_line(),
column: section.get_offset_col(),
},
url: section.get_url().map(str::to_owned),
map: section
.get_sourcemap()
.map(|sm| Box::new(sm.as_raw_sourcemap())),
})
.collect(),
),
names: None,
mappings: None,
x_facebook_offsets: None,
x_metro_module_paths: None,
x_facebook_sources: None,
debug_id: None,
}
}
}
impl Encodable for DecodedMap {
fn as_raw_sourcemap(&self) -> RawSourceMap {
match *self {
DecodedMap::Regular(ref sm) => sm.as_raw_sourcemap(),
DecodedMap::Index(ref smi) => smi.as_raw_sourcemap(),
DecodedMap::Hermes(ref smh) => smh.as_raw_sourcemap(),
}
}
}

View file

@ -0,0 +1,119 @@
use std::error;
use std::fmt;
use std::io;
use std::str;
use std::string;
/// Represents results from this library
pub type Result<T> = std::result::Result<T, Error>;
/// Represents different failure cases
#[derive(Debug)]
pub enum Error {
/// a std::io error
Io(io::Error),
#[cfg(feature = "ram_bundle")]
/// a scroll error
Scroll(scroll::Error),
/// a std::str::Utf8Error
Utf8(str::Utf8Error),
/// a JSON parsing related failure
BadJson(serde_json::Error),
/// a VLQ string was malformed and data was left over
VlqLeftover,
/// a VLQ string was empty and no values could be decoded.
VlqNoValues,
/// Overflow in Vlq handling
VlqOverflow,
/// a mapping segment had an unsupported size
BadSegmentSize(u32),
/// a reference to a non existing source was encountered
BadSourceReference(u32),
/// a reference to a non existing name was encountered
BadNameReference(u32),
/// Indicates that an incompatible sourcemap format was encountered
IncompatibleSourceMap,
/// Indicates an invalid data URL
InvalidDataUrl,
/// Flatten failed
CannotFlatten(String),
/// The magic of a RAM bundle did not match
InvalidRamBundleMagic,
/// The RAM bundle index was malformed
InvalidRamBundleIndex,
/// A RAM bundle entry was invalid
InvalidRamBundleEntry,
/// Tried to operate on a non RAM bundle file
NotARamBundle,
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
#[cfg(feature = "ram_bundle")]
impl From<scroll::Error> for Error {
fn from(err: scroll::Error) -> Self {
Error::Scroll(err)
}
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Error {
From::from(err.utf8_error())
}
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Error {
Error::Utf8(err)
}
}
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Error {
Error::BadJson(err)
}
}
impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
Error::Io(ref err) => Some(err),
#[cfg(feature = "ram_bundle")]
Error::Scroll(ref err) => Some(err),
Error::Utf8(ref err) => Some(err),
Error::BadJson(ref err) => Some(err),
_ => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Io(ref msg) => write!(f, "{msg}"),
Error::Utf8(ref msg) => write!(f, "{msg}"),
Error::BadJson(ref err) => write!(f, "bad json: {err}"),
#[cfg(feature = "ram_bundle")]
Error::Scroll(ref err) => write!(f, "parse error: {err}"),
Error::VlqLeftover => write!(f, "leftover cur/shift in vlq decode"),
Error::VlqNoValues => write!(f, "vlq decode did not produce any values"),
Error::VlqOverflow => write!(f, "vlq decode caused an overflow"),
Error::BadSegmentSize(size) => write!(f, "got {size} segments, expected 4 or 5"),
Error::BadSourceReference(id) => write!(f, "bad reference to source #{id}"),
Error::BadNameReference(id) => write!(f, "bad reference to name #{id}"),
Error::IncompatibleSourceMap => write!(f, "encountered incompatible sourcemap format"),
Error::InvalidDataUrl => write!(f, "the provided data URL is invalid"),
Error::CannotFlatten(ref msg) => {
write!(f, "cannot flatten the indexed sourcemap: {msg}")
}
Error::InvalidRamBundleMagic => write!(f, "invalid magic number for ram bundle"),
Error::InvalidRamBundleIndex => write!(f, "invalid module index in ram bundle"),
Error::InvalidRamBundleEntry => write!(f, "invalid ram bundle module entry"),
Error::NotARamBundle => write!(f, "not a ram bundle"),
}
}
}

View file

@ -0,0 +1,215 @@
use crate::decoder::{decode, decode_regular, decode_slice};
use crate::encoder::{encode, Encodable};
use crate::errors::{Error, Result};
use crate::jsontypes::{FacebookScopeMapping, FacebookSources, RawSourceMap};
use crate::types::{DecodedMap, RewriteOptions, SourceMap};
use crate::utils::greatest_lower_bound;
use crate::vlq::parse_vlq_segment_into;
use crate::Token;
use std::io::{Read, Write};
use std::ops::{Deref, DerefMut};
/// These are starting locations of scopes.
/// The `name_index` represents the index into the `HermesFunctionMap.names` vec,
/// which represents the function names/scopes.
#[derive(Debug, Clone)]
pub struct HermesScopeOffset {
line: u32,
column: u32,
name_index: u32,
}
#[derive(Debug, Clone)]
pub struct HermesFunctionMap {
names: Vec<String>,
mappings: Vec<HermesScopeOffset>,
}
/// Represents a `react-native`-style SourceMap, which has additional scope
/// information embedded.
#[derive(Debug, Clone)]
pub struct SourceMapHermes {
pub(crate) sm: SourceMap,
// There should be one `HermesFunctionMap` per each `sources` entry in the main SourceMap.
function_maps: Vec<Option<HermesFunctionMap>>,
// XXX: right now, I am too lazy to actually serialize the above `function_maps`
// back into json types, so just keep the original json. Might be a bit inefficient, but meh.
raw_facebook_sources: FacebookSources,
}
impl Deref for SourceMapHermes {
type Target = SourceMap;
fn deref(&self) -> &Self::Target {
&self.sm
}
}
impl DerefMut for SourceMapHermes {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.sm
}
}
impl Encodable for SourceMapHermes {
fn as_raw_sourcemap(&self) -> RawSourceMap {
// TODO: need to serialize the `HermesFunctionMap` mappings
let mut rsm = self.sm.as_raw_sourcemap();
rsm.x_facebook_sources = self.raw_facebook_sources.clone();
rsm
}
}
impl SourceMapHermes {
/// Creates a sourcemap from a reader over a JSON stream in UTF-8
/// format.
///
/// See [`SourceMap::from_reader`](struct.SourceMap.html#method.from_reader)
pub fn from_reader<R: Read>(rdr: R) -> Result<Self> {
match decode(rdr)? {
DecodedMap::Hermes(sm) => Ok(sm),
_ => Err(Error::IncompatibleSourceMap),
}
}
/// Creates a sourcemap from a reader over a JSON byte slice in UTF-8
/// format.
///
/// See [`SourceMap::from_slice`](struct.SourceMap.html#method.from_slice)
pub fn from_slice(slice: &[u8]) -> Result<Self> {
match decode_slice(slice)? {
DecodedMap::Hermes(sm) => Ok(sm),
_ => Err(Error::IncompatibleSourceMap),
}
}
/// Writes a sourcemap into a writer.
///
/// See [`SourceMap::to_writer`](struct.SourceMap.html#method.to_writer)
pub fn to_writer<W: Write>(&self, w: W) -> Result<()> {
encode(self, w)
}
/// Given a bytecode offset, this will find the enclosing scopes function
/// name.
pub fn get_original_function_name(&self, bytecode_offset: u32) -> Option<&str> {
let token = self.sm.lookup_token(0, bytecode_offset)?;
self.get_scope_for_token(token)
}
/// Resolves the name of the enclosing function for the given [`Token`].
pub fn get_scope_for_token(&self, token: Token) -> Option<&str> {
let function_map = self
.function_maps
.get(token.get_src_id() as usize)?
.as_ref()?;
// Find the closest mapping, just like here:
// https://github.com/facebook/metro/blob/63b523eb20e7bdf62018aeaf195bb5a3a1a67f36/packages/metro-symbolicate/src/SourceMetadataMapConsumer.js#L204-L231
let mapping = greatest_lower_bound(&function_map.mappings, &token.get_src(), |o| {
(o.line, o.column)
})?;
function_map
.names
.get(mapping.name_index as usize)
.map(|n| n.as_str())
}
/// This rewrites the sourcemap according to the provided rewrite
/// options.
///
/// See [`SourceMap::rewrite`](struct.SourceMap.html#method.rewrite)
pub fn rewrite(self, options: &RewriteOptions<'_>) -> Result<Self> {
let Self {
sm,
mut function_maps,
mut raw_facebook_sources,
} = self;
let (sm, mapping) = sm.rewrite_with_mapping(options)?;
if function_maps.len() >= mapping.len() {
function_maps = mapping
.iter()
.map(|idx| function_maps[*idx as usize].take())
.collect();
raw_facebook_sources = raw_facebook_sources.map(|mut sources| {
mapping
.into_iter()
.map(|idx| sources[idx as usize].take())
.collect()
});
}
Ok(Self {
sm,
function_maps,
raw_facebook_sources,
})
}
}
pub fn decode_hermes(mut rsm: RawSourceMap) -> Result<SourceMapHermes> {
let x_facebook_sources = rsm
.x_facebook_sources
.take()
.ok_or(Error::IncompatibleSourceMap)?;
// This is basically the logic from here:
// https://github.com/facebook/metro/blob/63b523eb20e7bdf62018aeaf195bb5a3a1a67f36/packages/metro-symbolicate/src/SourceMetadataMapConsumer.js#L182-L202
let mut nums = Vec::with_capacity(4);
let function_maps = x_facebook_sources
.iter()
.map(|v| {
let FacebookScopeMapping {
names,
mappings: raw_mappings,
} = v.as_ref()?.iter().next()?;
let mut mappings = vec![];
let mut line = 1;
let mut name_index = 0;
for line_mapping in raw_mappings.split(';') {
if line_mapping.is_empty() {
continue;
}
let mut column = 0;
for mapping in line_mapping.split(',') {
if mapping.is_empty() {
continue;
}
nums.clear();
parse_vlq_segment_into(mapping, &mut nums).ok()?;
let mut nums = nums.iter().copied();
column = (i64::from(column) + nums.next()?) as u32;
name_index = (i64::from(name_index) + nums.next().unwrap_or(0)) as u32;
line = (i64::from(line) + nums.next().unwrap_or(0)) as u32;
mappings.push(HermesScopeOffset {
column,
line,
name_index,
});
}
}
Some(HermesFunctionMap {
names: names.clone(),
mappings,
})
})
.collect();
let sm = decode_regular(rsm)?;
Ok(SourceMapHermes {
sm,
function_maps,
raw_facebook_sources: Some(x_facebook_sources),
})
}

View file

@ -0,0 +1,84 @@
use unicode_id::UnicodeID;
/// Returns true if `c` is a valid character for an identifier start.
fn is_valid_start(c: char) -> bool {
c == '$' || c == '_' || c.is_ascii_alphabetic() || {
if c.is_ascii() {
false
} else {
UnicodeID::is_id_start(c)
}
}
}
/// Returns true if `c` is a valid character for an identifier part after start.
fn is_valid_continue(c: char) -> bool {
// As specified by the ECMA-262 spec, U+200C (ZERO WIDTH NON-JOINER) and U+200D
// (ZERO WIDTH JOINER) are format-control characters that are used to make necessary
// distinctions when forming words or phrases in certain languages. They are however
// not considered by UnicodeID to be universally valid identifier characters.
c == '$' || c == '_' || c == '\u{200c}' || c == '\u{200d}' || c.is_ascii_alphanumeric() || {
if c.is_ascii() {
false
} else {
UnicodeID::is_id_continue(c)
}
}
}
fn strip_identifier(s: &str) -> Option<&str> {
let mut iter = s.char_indices();
// Is the first character a valid starting character
match iter.next() {
Some((_, c)) => {
if !is_valid_start(c) {
return None;
}
}
None => {
return None;
}
};
// Slice up to the last valid continuation character
let mut end_idx = 0;
for (i, c) in iter {
if is_valid_continue(c) {
end_idx = i;
} else {
break;
}
}
Some(&s[..=end_idx])
}
pub fn is_valid_javascript_identifier(s: &str) -> bool {
// check stripping does not reduce the length of the token
strip_identifier(s).map_or(0, |t| t.len()) == s.len()
}
/// Finds the first valid identifier in the JS Source string given, provided
/// the string begins with the identifier or whitespace.
pub fn get_javascript_token(source_line: &str) -> Option<&str> {
match source_line.split_whitespace().next() {
Some(s) => strip_identifier(s),
None => None,
}
}
#[test]
fn test_is_valid_javascript_identifier() {
// assert_eq!(is_valid_javascript_identifier("foo 123"));
assert!(is_valid_javascript_identifier("foo_$123"));
assert!(!is_valid_javascript_identifier(" foo"));
assert!(!is_valid_javascript_identifier("foo "));
assert!(!is_valid_javascript_identifier("[123]"));
assert!(!is_valid_javascript_identifier("foo.bar"));
// Should these pass?
// assert!(is_valid_javascript_identifier("foo [bar]"));
// assert!(is_valid_javascript_identifier("foo[bar]"));
assert_eq!(get_javascript_token("foo "), Some("foo"));
assert_eq!(get_javascript_token("f _hi"), Some("f"));
assert_eq!(get_javascript_token("foo.bar"), Some("foo"));
assert_eq!(get_javascript_token("[foo,bar]"), None);
}

View file

@ -0,0 +1,69 @@
use debugid::DebugId;
use serde::de::IgnoredAny;
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Serialize, Deserialize)]
pub struct RawSectionOffset {
pub line: u32,
pub column: u32,
}
#[derive(Serialize, Deserialize)]
pub struct RawSection {
pub offset: RawSectionOffset,
pub url: Option<String>,
pub map: Option<Box<RawSourceMap>>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct FacebookScopeMapping {
pub names: Vec<String>,
pub mappings: String,
}
// Each element here is matching the `sources` of the outer SourceMap.
// It has a list of metadata, the first one of which is a *function map*,
// containing scope information as a nested source map.
// See the decoder in `hermes.rs` for details.
pub type FacebookSources = Option<Vec<Option<Vec<FacebookScopeMapping>>>>;
#[derive(Serialize, Deserialize)]
pub struct RawSourceMap {
pub version: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub file: Option<Value>,
pub sources: Option<Vec<Option<String>>>,
#[serde(rename = "sourceRoot", skip_serializing_if = "Option::is_none")]
pub source_root: Option<String>,
#[serde(rename = "sourcesContent", skip_serializing_if = "Option::is_none")]
pub sources_content: Option<Vec<Option<String>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sections: Option<Vec<RawSection>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub names: Option<Vec<Value>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub mappings: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub x_facebook_offsets: Option<Vec<Option<u32>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub x_metro_module_paths: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub x_facebook_sources: FacebookSources,
#[serde(skip_serializing_if = "Option::is_none")]
pub debug_id: Option<DebugId>,
}
#[derive(Deserialize)]
pub struct MinimalRawSourceMap {
pub version: Option<u32>,
pub file: Option<IgnoredAny>,
pub sources: Option<IgnoredAny>,
#[serde(rename = "sourceRoot")]
pub source_root: Option<IgnoredAny>,
#[serde(rename = "sourcesContent")]
pub sources_content: Option<IgnoredAny>,
pub sections: Option<IgnoredAny>,
pub names: Option<IgnoredAny>,
pub mappings: Option<IgnoredAny>,
}

80
third-party/vendor/sourcemap/src/lib.rs vendored Normal file
View file

@ -0,0 +1,80 @@
//! This library implements basic processing of JavaScript sourcemaps.
//!
//! # Installation
//!
//! The crate is called sourcemap and you can depend on it via cargo:
//!
//! ```toml
//! [dependencies]
//! sourcemap = "*"
//! ```
//!
//! If you want to use the git version:
//!
//! ```toml
//! [dependencies.sourcemap]
//! git = "https://github.com/getsentry/rust-sourcemap.git"
//! ```
//!
//! # Basic Operation
//!
//! This crate can load JavaScript sourcemaps from JSON files. It uses
//! `serde` for parsing of the JSON data. Due to the nature of sourcemaps
//! the entirety of the file must be loaded into memory which can be quite
//! memory intensive.
//!
//! Usage:
//!
//! ```rust
//! use sourcemap::SourceMap;
//! let input: &[_] = b"{
//! \"version\":3,
//! \"sources\":[\"coolstuff.js\"],
//! \"names\":[\"x\",\"alert\"],
//! \"mappings\":\"AAAA,GAAIA,GAAI,EACR,IAAIA,GAAK,EAAG,CACVC,MAAM\"
//! }";
//! let sm = SourceMap::from_reader(input).unwrap();
//! let token = sm.lookup_token(0, 0).unwrap(); // line-number and column
//! println!("token: {}", token);
//! ```
//!
//! # Features
//!
//! Functionality of the crate can be turned on and off by feature flags. This is the
//! current list of feature flags:
//!
//! * `ram_bundle`: turns on RAM bundle support
//!
#[warn(missing_docs)]
mod macros;
pub use crate::builder::SourceMapBuilder;
pub use crate::decoder::{decode, decode_data_url, decode_slice};
pub use crate::detector::{
is_sourcemap, is_sourcemap_slice, locate_sourcemap_reference, locate_sourcemap_reference_slice,
SourceMapRef,
};
pub use crate::errors::{Error, Result};
pub use crate::hermes::SourceMapHermes;
pub use crate::sourceview::SourceView;
pub use crate::types::{
DecodedMap, IndexIter, NameIter, RawToken, RewriteOptions, SourceContentsIter, SourceIter,
SourceMap, SourceMapIndex, SourceMapSection, SourceMapSectionIter, Token, TokenIter,
};
pub use crate::utils::make_relative_path;
mod builder;
mod decoder;
mod detector;
mod encoder;
mod errors;
mod hermes;
mod js_identifiers;
mod jsontypes;
mod sourceview;
mod types;
mod utils;
#[cfg(feature = "ram_bundle")]
pub mod ram_bundle;
pub mod vlq;

View file

@ -0,0 +1,7 @@
#![macro_use]
macro_rules! fail {
($expr:expr) => {
return Err(::std::convert::From::from($expr));
};
}

View file

@ -0,0 +1,614 @@
//! RAM bundle operations
use scroll::Pread;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fs;
use std::fs::File;
use std::io::Read;
use std::ops::Range;
use std::path::Path;
use crate::builder::SourceMapBuilder;
use crate::errors::{Error, Result};
use crate::sourceview::SourceView;
use crate::types::{SourceMap, SourceMapIndex};
/// Magic number for RAM bundles
pub const RAM_BUNDLE_MAGIC: u32 = 0xFB0B_D1E5;
const JS_MODULES_DIR_NAME: &str = "js-modules";
/// Represents a RAM bundle header
#[derive(Debug, Pread, Clone, Copy)]
#[repr(C, packed)]
pub struct RamBundleHeader {
magic: u32,
module_count: u32,
startup_code_size: u32,
}
impl RamBundleHeader {
/// Checks if the magic matches.
pub fn is_valid_magic(&self) -> bool {
self.magic == RAM_BUNDLE_MAGIC
}
}
#[derive(Debug, Pread, Clone, Copy)]
#[repr(C, packed)]
struct ModuleEntry {
offset: u32,
length: u32,
}
impl ModuleEntry {
pub fn is_empty(self) -> bool {
self.offset == 0 && self.length == 0
}
}
/// Represents an indexed RAM bundle module
///
/// This type is used on iOS by default.
#[derive(Debug)]
pub struct RamBundleModule<'a> {
id: usize,
data: &'a [u8],
}
impl<'a> RamBundleModule<'a> {
/// Returns the integer ID of the module.
pub fn id(&self) -> usize {
self.id
}
/// Returns a slice to the data in the module.
pub fn data(&self) -> &'a [u8] {
self.data
}
/// Returns a source view of the data.
///
/// This operation fails if the source code is not valid UTF-8.
pub fn source_view(&self) -> Result<SourceView<'a>> {
match std::str::from_utf8(self.data) {
Ok(s) => Ok(SourceView::new(s)),
Err(e) => Err(Error::Utf8(e)),
}
}
}
/// An iterator over modules in a RAM bundle
pub struct RamBundleModuleIter<'a> {
range: Range<usize>,
ram_bundle: &'a RamBundle<'a>,
}
impl<'a> Iterator for RamBundleModuleIter<'a> {
type Item = Result<RamBundleModule<'a>>;
fn next(&mut self) -> Option<Self::Item> {
for next_id in self.range.by_ref() {
match self.ram_bundle.get_module(next_id) {
Ok(None) => continue,
Ok(Some(module)) => return Some(Ok(module)),
Err(e) => return Some(Err(e)),
}
}
None
}
}
/// The type of ram bundle.
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub enum RamBundleType {
Indexed,
Unbundle,
}
#[derive(Debug, Clone)]
enum RamBundleImpl<'a> {
/// Indexed RAM bundle
Indexed(IndexedRamBundle<'a>),
/// File (unbundle) RAM bundle
Unbundle(UnbundleRamBundle),
}
/// The main RAM bundle interface
#[derive(Debug, Clone)]
pub struct RamBundle<'a> {
repr: RamBundleImpl<'a>,
}
impl<'a> RamBundle<'a> {
/// Parses an indexed RAM bundle from the given slice
pub fn parse_indexed_from_slice(bytes: &'a [u8]) -> Result<Self> {
Ok(RamBundle {
repr: RamBundleImpl::Indexed(IndexedRamBundle::parse(Cow::Borrowed(bytes))?),
})
}
/// Parses an indexed RAM bundle from the given vector
pub fn parse_indexed_from_vec(bytes: Vec<u8>) -> Result<Self> {
Ok(RamBundle {
repr: RamBundleImpl::Indexed(IndexedRamBundle::parse(Cow::Owned(bytes))?),
})
}
/// Creates a new indexed RAM bundle from the file path
pub fn parse_indexed_from_path(path: &Path) -> Result<Self> {
RamBundle::parse_indexed_from_vec(fs::read(path)?)
}
/// Creates a file (unbundle) RAM bundle from the path
///
/// The provided path should point to a javascript file, that serves
/// as an entry point (startup code) for the app. The modules are stored in js-modules/
/// directory, next to the entry point. The js-modules/ directory must ONLY contain
/// files with integer names and the ".js" file suffix, along with the UNBUNDLE magic file.
pub fn parse_unbundle_from_path(bundle_path: &Path) -> Result<Self> {
Ok(RamBundle {
repr: RamBundleImpl::Unbundle(UnbundleRamBundle::parse(bundle_path)?),
})
}
/// Returns the type of the RAM bundle.
pub fn bundle_type(&self) -> RamBundleType {
match self.repr {
RamBundleImpl::Indexed(..) => RamBundleType::Indexed,
RamBundleImpl::Unbundle(..) => RamBundleType::Unbundle,
}
}
/// Looks up a module by ID in the bundle
pub fn get_module(&self, id: usize) -> Result<Option<RamBundleModule>> {
match self.repr {
RamBundleImpl::Indexed(ref indexed) => indexed.get_module(id),
RamBundleImpl::Unbundle(ref file) => file.get_module(id),
}
}
/// Returns the number of modules in the bundle
pub fn module_count(&self) -> usize {
match self.repr {
RamBundleImpl::Indexed(ref indexed) => indexed.module_count(),
RamBundleImpl::Unbundle(ref file) => file.module_count(),
}
}
/// Returns the startup code
pub fn startup_code(&self) -> Result<&[u8]> {
match self.repr {
RamBundleImpl::Indexed(ref indexed) => indexed.startup_code(),
RamBundleImpl::Unbundle(ref file) => file.startup_code(),
}
}
/// Returns an iterator over all modules in the bundle
pub fn iter_modules(&self) -> RamBundleModuleIter {
RamBundleModuleIter {
range: 0..self.module_count(),
ram_bundle: self,
}
}
}
/// Filename must be made of ascii-only digits and the .js extension
/// Anything else errors with `Error::InvalidRamBundleIndex`
fn js_filename_to_index_strict(filename: &str) -> Result<usize> {
match filename.strip_suffix(".js") {
Some(basename) => basename
.parse::<usize>()
.or(Err(Error::InvalidRamBundleIndex)),
None => Err(Error::InvalidRamBundleIndex),
}
}
/// Represents a file RAM bundle
///
/// This RAM bundle type is mostly used on Android.
#[derive(Debug, Clone)]
struct UnbundleRamBundle {
startup_code: Vec<u8>,
module_count: usize,
modules: BTreeMap<usize, Vec<u8>>,
}
impl UnbundleRamBundle {
pub fn parse(bundle_path: &Path) -> Result<Self> {
if !is_unbundle_path(bundle_path) {
return Err(Error::NotARamBundle);
}
let bundle_dir = match bundle_path.parent() {
Some(dir) => dir,
None => return Err(Error::NotARamBundle),
};
let startup_code = fs::read(bundle_path)?;
let mut max_module_id = 0;
let mut modules: BTreeMap<usize, Vec<u8>> = Default::default();
let js_modules_dir = bundle_dir.join(JS_MODULES_DIR_NAME);
for entry in js_modules_dir.read_dir()? {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
}
let path = entry.path();
let filename_os = path.file_name().unwrap();
let filename: &str = &filename_os.to_string_lossy();
if filename == "UNBUNDLE" {
continue;
}
let module_id = js_filename_to_index_strict(filename)?;
if module_id > max_module_id {
max_module_id = module_id;
}
modules.insert(module_id, fs::read(path)?);
}
Ok(UnbundleRamBundle {
startup_code,
modules,
module_count: max_module_id + 1,
})
}
/// Returns the number of modules in the bundle
pub fn module_count(&self) -> usize {
self.module_count
}
/// Returns the startup code
pub fn startup_code(&self) -> Result<&[u8]> {
Ok(&self.startup_code)
}
/// Looks up a module by ID in the bundle
pub fn get_module(&self, id: usize) -> Result<Option<RamBundleModule>> {
match self.modules.get(&id) {
Some(data) => Ok(Some(RamBundleModule { id, data })),
None => Ok(None),
}
}
}
/// Represents an indexed RAM bundle
///
/// Provides access to a react-native metro
/// [RAM bundle](https://facebook.github.io/metro/docs/en/bundling).
#[derive(Debug, Clone)]
struct IndexedRamBundle<'a> {
bytes: Cow<'a, [u8]>,
module_count: usize,
startup_code_size: usize,
startup_code_offset: usize,
}
impl<'a> IndexedRamBundle<'a> {
/// Parses a RAM bundle from a given slice of bytes.
pub fn parse(bytes: Cow<'a, [u8]>) -> Result<Self> {
let header = bytes.pread_with::<RamBundleHeader>(0, scroll::LE)?;
if !header.is_valid_magic() {
return Err(Error::InvalidRamBundleMagic);
}
let module_count = header.module_count as usize;
let startup_code_offset = std::mem::size_of::<RamBundleHeader>()
+ module_count * std::mem::size_of::<ModuleEntry>();
Ok(IndexedRamBundle {
bytes,
module_count,
startup_code_size: header.startup_code_size as usize,
startup_code_offset,
})
}
/// Returns the number of modules in the bundle
pub fn module_count(&self) -> usize {
self.module_count
}
/// Returns the startup code
pub fn startup_code(&self) -> Result<&[u8]> {
self.bytes
.pread_with(self.startup_code_offset, self.startup_code_size)
.map_err(Error::Scroll)
}
/// Looks up a module by ID in the bundle
pub fn get_module(&self, id: usize) -> Result<Option<RamBundleModule>> {
if id >= self.module_count {
return Err(Error::InvalidRamBundleIndex);
}
let entry_offset =
std::mem::size_of::<RamBundleHeader>() + id * std::mem::size_of::<ModuleEntry>();
let module_entry = self
.bytes
.pread_with::<ModuleEntry>(entry_offset, scroll::LE)?;
if module_entry.is_empty() {
return Ok(None);
}
let module_global_offset = self.startup_code_offset + module_entry.offset as usize;
if module_entry.length == 0 {
return Err(Error::InvalidRamBundleEntry);
}
// Strip the trailing NULL byte
let module_length = (module_entry.length - 1) as usize;
let data = self.bytes.pread_with(module_global_offset, module_length)?;
Ok(Some(RamBundleModule { id, data }))
}
}
/// An iterator over deconstructed RAM bundle sources and sourcemaps
pub struct SplitRamBundleModuleIter<'a> {
ram_bundle_iter: RamBundleModuleIter<'a>,
sm: SourceMap,
offsets: Vec<Option<u32>>,
}
impl<'a> SplitRamBundleModuleIter<'a> {
fn split_module(
&self,
module: RamBundleModule<'a>,
) -> Result<Option<(String, SourceView<'a>, SourceMap)>> {
let module_offset = self
.offsets
.get(module.id())
.ok_or(Error::InvalidRamBundleIndex)?;
let starting_line = match *module_offset {
Some(offset) => offset,
None => return Ok(None),
};
let mut token_iter = self.sm.tokens();
if !token_iter.seek(starting_line, 0) {
return Err(Error::InvalidRamBundleEntry);
}
let source: SourceView<'a> = module.source_view()?;
let line_count = source.line_count() as u32;
let ending_line = starting_line + line_count;
let last_line_len = source
.get_line(line_count - 1)
.map_or(0, |line| line.chars().map(char::len_utf16).sum())
as u32;
let filename = format!("{}.js", module.id);
let mut builder = SourceMapBuilder::new(Some(&filename));
for token in token_iter {
let dst_line = token.get_dst_line();
let dst_col = token.get_dst_col();
if dst_line >= ending_line || dst_col >= last_line_len {
break;
}
let raw = builder.add(
dst_line - starting_line,
dst_col,
token.get_src_line(),
token.get_src_col(),
token.get_source(),
token.get_name(),
);
if token.get_source().is_some() && !builder.has_source_contents(raw.src_id) {
builder.set_source_contents(
raw.src_id,
self.sm.get_source_contents(token.get_src_id()),
);
}
}
let sourcemap = builder.into_sourcemap();
Ok(Some((filename, source, sourcemap)))
}
}
impl<'a> Iterator for SplitRamBundleModuleIter<'a> {
type Item = Result<(String, SourceView<'a>, SourceMap)>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(module_result) = self.ram_bundle_iter.next() {
match module_result {
Ok(module) => match self.split_module(module) {
Ok(None) => continue,
Ok(Some(result_tuple)) => return Some(Ok(result_tuple)),
Err(_) => return Some(Err(Error::InvalidRamBundleEntry)),
},
Err(_) => return Some(Err(Error::InvalidRamBundleEntry)),
}
}
None
}
}
/// Deconstructs a RAM bundle into a sequence of sources and their sourcemaps
///
/// With the help of the RAM bundle's indexed sourcemap, the bundle is split into modules,
/// where each module is represented by its minified source and the corresponding sourcemap that
/// we recover from the initial indexed sourcemap.
pub fn split_ram_bundle<'a>(
ram_bundle: &'a RamBundle,
smi: &SourceMapIndex,
) -> Result<SplitRamBundleModuleIter<'a>> {
Ok(SplitRamBundleModuleIter {
ram_bundle_iter: ram_bundle.iter_modules(),
sm: smi.flatten()?,
offsets: smi
.x_facebook_offsets()
.map(|v| v.to_vec())
.ok_or(Error::NotARamBundle)?,
})
}
/// Checks if the given byte slice contains an indexed RAM bundle
pub fn is_ram_bundle_slice(slice: &[u8]) -> bool {
slice
.pread_with::<RamBundleHeader>(0, scroll::LE)
.ok()
.map_or(false, |x| x.is_valid_magic())
}
/// Returns "true" if the given path points to the startup file of a file RAM bundle
///
/// The method checks the directory structure and the magic number in UNBUNDLE file.
pub fn is_unbundle_path(bundle_path: &Path) -> bool {
if !bundle_path.is_file() {
return false;
}
let bundle_dir = match bundle_path.parent() {
Some(dir) => dir,
None => return false,
};
let unbundle_file_path = bundle_dir.join(JS_MODULES_DIR_NAME).join("UNBUNDLE");
if !unbundle_file_path.is_file() {
return false;
}
let mut unbundle_file = match File::open(unbundle_file_path) {
Ok(file) => file,
Err(_) => return false,
};
let mut bundle_magic = [0; 4];
if unbundle_file.read_exact(&mut bundle_magic).is_err() {
return false;
}
bundle_magic == RAM_BUNDLE_MAGIC.to_le_bytes()
}
#[test]
fn test_indexed_ram_bundle_parse() -> std::result::Result<(), Box<dyn std::error::Error>> {
let mut bundle_file =
File::open("./tests/fixtures/ram_bundle/indexed_bundle_1/basic.jsbundle")?;
let mut bundle_data = Vec::new();
bundle_file.read_to_end(&mut bundle_data)?;
assert!(is_ram_bundle_slice(&bundle_data));
let ram_bundle = RamBundle::parse_indexed_from_slice(&bundle_data)?;
let indexed_ram_bundle = match ram_bundle.repr.clone() {
RamBundleImpl::Indexed(bundle) => bundle,
_ => {
panic!("Invalid RamBundleImpl type");
}
};
// Header checks
assert_eq!(indexed_ram_bundle.startup_code_size, 0x7192);
assert_eq!(indexed_ram_bundle.startup_code_offset, 0x34);
assert_eq!(ram_bundle.module_count(), 5);
// Check first modules
let mut module_iter = ram_bundle.iter_modules();
let module_0 = module_iter.next().unwrap()?;
let module_0_data = module_0.data();
assert_eq!(module_0.id(), 0);
assert_eq!(module_0_data.len(), 0xa8 - 1);
assert_eq!(
&module_0_data[0..60],
"__d(function(g,r,i,a,m,e,d){\"use strict\";const o=r(d[0]),s=r".as_bytes()
);
let module_3 = module_iter.next().unwrap()?;
let module_3_data = module_3.data();
assert_eq!(module_3.id(), 3);
assert_eq!(module_3_data.len(), 0x6b - 1);
assert_eq!(
&module_3_data[0..60],
"__d(function(g,r,i,a,m,e,d){\"use strict\";console.log('inside".as_bytes()
);
let module_1 = ram_bundle.get_module(1)?;
assert!(module_1.is_none());
Ok(())
}
#[test]
fn test_indexed_ram_bundle_split() -> std::result::Result<(), Box<dyn std::error::Error>> {
let ram_bundle = RamBundle::parse_indexed_from_path(Path::new(
"./tests/fixtures/ram_bundle/indexed_bundle_1/basic.jsbundle",
))?;
let sourcemap_file =
File::open("./tests/fixtures/ram_bundle/indexed_bundle_1/basic.jsbundle.map")?;
let ism = SourceMapIndex::from_reader(sourcemap_file)?;
assert!(ism.is_for_ram_bundle());
let x_facebook_offsets = ism.x_facebook_offsets().unwrap();
assert_eq!(x_facebook_offsets.len(), 5);
let x_metro_module_paths = ism.x_metro_module_paths().unwrap();
assert_eq!(x_metro_module_paths.len(), 7);
// Modules 0, 3, 4
assert_eq!(split_ram_bundle(&ram_bundle, &ism)?.count(), 3);
let mut ram_bundle_iter = split_ram_bundle(&ram_bundle, &ism)?;
let (name, sourceview, sourcemap) = ram_bundle_iter.next().unwrap()?;
assert_eq!(name, "0.js");
assert_eq!(
&sourceview.source()[0..60],
"__d(function(g,r,i,a,m,e,d){\"use strict\";const o=r(d[0]),s=r"
);
assert_eq!(
&sourcemap.get_source_contents(0).unwrap()[0..60],
"const f = require(\"./other\");\nconst isWindows = require(\"is-"
);
Ok(())
}
#[test]
fn test_file_ram_bundle_parse() -> std::result::Result<(), Box<dyn std::error::Error>> {
let valid_bundle_path = Path::new("./tests/fixtures/ram_bundle/file_bundle_1/basic.bundle");
assert!(is_unbundle_path(valid_bundle_path));
assert!(!is_unbundle_path(Path::new("./tmp/invalid/bundle/path")));
let ram_bundle = RamBundle::parse_unbundle_from_path(valid_bundle_path)?;
match ram_bundle.repr {
RamBundleImpl::Unbundle(_) => (),
_ => {
panic!("Invalid RamBundleImpl type");
}
};
assert_eq!(ram_bundle.module_count(), 4);
let startup_code = ram_bundle.startup_code()?;
assert_eq!(
startup_code[0..60].to_vec(),
b"var __DEV__=false,__BUNDLE_START_TIME__=this.nativePerforman".to_vec()
);
let module_0 = ram_bundle.get_module(0)?.unwrap();
let module_0_data = module_0.data();
assert_eq!(
module_0_data[0..60].to_vec(),
b"__d(function(g,r,i,a,m,e,d){'use strict';var t=Date.now();r(".to_vec()
);
let module_1 = ram_bundle.get_module(1)?;
assert!(module_1.is_none());
Ok(())
}

View file

@ -0,0 +1,371 @@
use std::borrow::Cow;
use std::cell::RefCell;
use std::fmt;
use std::slice;
use std::str;
use if_chain::if_chain;
use crate::detector::{locate_sourcemap_reference_slice, SourceMapRef};
use crate::errors::Result;
use crate::js_identifiers::{get_javascript_token, is_valid_javascript_identifier};
use crate::types::{idx_from_token, sourcemap_from_token, Token};
/// An iterator that iterates over tokens in reverse.
pub struct RevTokenIter<'view, 'viewbase, 'map>
where
'viewbase: 'view,
{
sv: &'view SourceView<'viewbase>,
token: Option<Token<'map>>,
source_line: Option<(&'view str, usize, usize, usize)>,
}
impl<'view, 'viewbase, 'map> Iterator for RevTokenIter<'view, 'viewbase, 'map>
where
'viewbase: 'view,
{
type Item = (Token<'map>, Option<&'view str>);
fn next(&mut self) -> Option<(Token<'map>, Option<&'view str>)> {
let token = match self.token.take() {
None => {
return None;
}
Some(token) => token,
};
let idx = idx_from_token(&token);
if idx > 0 {
let sm = sourcemap_from_token(&token);
self.token = sm.get_token(idx - 1);
}
// if we are going to the same line as we did last iteration, we don't have to scan
// up to it again. For normal sourcemaps this should mean we only ever go to the
// line once.
let (source_line, last_char_offset, last_byte_offset) = if_chain! {
if let Some((source_line, dst_line, last_char_offset,
last_byte_offset)) = self.source_line;
if dst_line == token.get_dst_line() as usize;
then {
(source_line, last_char_offset, last_byte_offset)
} else {
if let Some(source_line) = self.sv.get_line(token.get_dst_line()) {
(source_line, !0, !0)
} else {
// if we can't find the line, return am empty one
("", !0, !0)
}
}
};
// find the byte offset where our token starts
let byte_offset = if last_byte_offset == !0 {
let mut off = 0;
let mut idx = 0;
for c in source_line.chars() {
if idx >= token.get_dst_col() as usize {
break;
}
off += c.len_utf8();
idx += c.len_utf16();
}
off
} else {
let chars_to_move = last_char_offset - token.get_dst_col() as usize;
let mut new_offset = last_byte_offset;
let mut idx = 0;
for c in source_line
.get(..last_byte_offset)
.unwrap_or("")
.chars()
.rev()
{
if idx >= chars_to_move {
break;
}
new_offset -= c.len_utf8();
idx += c.len_utf16();
}
new_offset
};
// remember where we were
self.source_line = Some((
source_line,
token.get_dst_line() as usize,
token.get_dst_col() as usize,
byte_offset,
));
// in case we run out of bounds here we reset the cache
if byte_offset >= source_line.len() {
self.source_line = None;
Some((token, None))
} else {
Some((
token,
source_line
.get(byte_offset..)
.and_then(get_javascript_token),
))
}
}
}
pub struct Lines<'a> {
sv: &'a SourceView<'a>,
idx: u32,
}
impl<'a> Iterator for Lines<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
if let Some(line) = self.sv.get_line(self.idx) {
self.idx += 1;
Some(line)
} else {
None
}
}
}
/// Provides efficient access to minified sources.
///
/// This type is used to implement fairly efficient source mapping
/// operations.
pub struct SourceView<'a> {
source: Cow<'a, str>,
processed_until: RefCell<usize>,
lines: RefCell<Vec<(*const u8, usize)>>,
}
impl<'a> Clone for SourceView<'a> {
fn clone(&self) -> SourceView<'a> {
SourceView {
source: self.source.clone(),
processed_until: RefCell::new(0),
lines: RefCell::new(vec![]),
}
}
}
impl<'a> fmt::Debug for SourceView<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceView")
.field("source", &self.source())
.finish()
}
}
unsafe fn make_str<'a>(tup: (*const u8, usize)) -> &'a str {
let (data, len) = tup;
str::from_utf8_unchecked(slice::from_raw_parts(data, len))
}
impl<'a> SourceView<'a> {
/// Creates an optimized view of a given source.
pub fn new(source: &'a str) -> SourceView<'a> {
SourceView {
source: Cow::Borrowed(source),
processed_until: RefCell::new(0),
lines: RefCell::new(vec![]),
}
}
/// Creates an optimized view from a given source string
pub fn from_string(source: String) -> SourceView<'static> {
SourceView {
source: Cow::Owned(source),
processed_until: RefCell::new(0),
lines: RefCell::new(vec![]),
}
}
/// Returns a requested minified line.
pub fn get_line(&self, idx: u32) -> Option<&str> {
let idx = idx as usize;
{
let lines = self.lines.borrow();
if idx < lines.len() {
return Some(unsafe { make_str(lines[idx]) });
}
}
// fetched everything
if *self.processed_until.borrow() > self.source.len() {
return None;
}
let mut processed_until = self.processed_until.borrow_mut();
let mut lines = self.lines.borrow_mut();
let mut done = false;
while !done {
let rest = &self.source.as_bytes()[*processed_until..];
let rv = if let Some(mut idx) = rest.iter().position(|&x| x == b'\n' || x == b'\r') {
let rv = &rest[..idx];
if rest[idx] == b'\r' && rest.get(idx + 1) == Some(&b'\n') {
idx += 1;
}
*processed_until += idx + 1;
rv
} else {
*processed_until += rest.len() + 1;
done = true;
rest
};
lines.push((rv.as_ptr(), rv.len()));
if let Some(&line) = lines.get(idx) {
return Some(unsafe { make_str(line) });
}
}
None
}
/// Returns a line slice.
///
/// Note that columns are indexed as JavaScript WTF-16 columns.
pub fn get_line_slice(&self, line: u32, col: u32, span: u32) -> Option<&str> {
self.get_line(line).and_then(|line| {
let mut off = 0;
let mut idx = 0;
let mut char_iter = line.chars().peekable();
while let Some(&c) = char_iter.peek() {
if idx >= col as usize {
break;
}
char_iter.next();
off += c.len_utf8();
idx += c.len_utf16();
}
let mut off_end = off;
for c in char_iter {
if idx >= (col + span) as usize {
break;
}
off_end += c.len_utf8();
idx += c.len_utf16();
}
if idx < ((col + span) as usize) {
None
} else {
line.get(off..off_end)
}
})
}
/// Returns an iterator over all lines.
pub fn lines(&'a self) -> Lines<'a> {
Lines { sv: self, idx: 0 }
}
/// Returns the source.
pub fn source(&self) -> &str {
&self.source
}
fn rev_token_iter<'this, 'map>(
&'this self,
token: Token<'map>,
) -> RevTokenIter<'this, 'a, 'map> {
RevTokenIter {
sv: self,
token: Some(token),
source_line: None,
}
}
/// Given a token and minified function name this attemps to resolve the
/// name to an original function name.
///
/// This invokes some guesswork and requires access to the original minified
/// source. This will not yield proper results for anonymous functions or
/// functions that do not have clear function names. (For instance it's
/// recommended that dotted function names are not passed to this
/// function).
pub fn get_original_function_name<'map>(
&self,
token: Token<'map>,
minified_name: &str,
) -> Option<&'map str> {
if !is_valid_javascript_identifier(minified_name) {
return None;
}
let mut iter = self.rev_token_iter(token).take(128).peekable();
while let Some((token, original_identifier)) = iter.next() {
if_chain! {
if original_identifier == Some(minified_name);
if let Some(item) = iter.peek();
if item.1 == Some("function");
then {
return token.get_name();
}
}
}
None
}
/// Returns the number of lines.
pub fn line_count(&self) -> usize {
self.get_line(!0);
self.lines.borrow().len()
}
/// Returns the source map reference in the source view.
pub fn sourcemap_reference(&self) -> Result<Option<SourceMapRef>> {
locate_sourcemap_reference_slice(self.source.as_bytes())
}
}
#[test]
#[allow(clippy::cognitive_complexity)]
fn test_minified_source_view() {
let view = SourceView::new("a\nb\nc");
assert_eq!(view.get_line(0), Some("a"));
assert_eq!(view.get_line(0), Some("a"));
assert_eq!(view.get_line(2), Some("c"));
assert_eq!(view.get_line(1), Some("b"));
assert_eq!(view.get_line(3), None);
assert_eq!(view.line_count(), 3);
let view = SourceView::new("a\r\nb\r\nc");
assert_eq!(view.get_line(0), Some("a"));
assert_eq!(view.get_line(0), Some("a"));
assert_eq!(view.get_line(2), Some("c"));
assert_eq!(view.get_line(1), Some("b"));
assert_eq!(view.get_line(3), None);
assert_eq!(view.line_count(), 3);
let view = SourceView::new("abc👌def\nblah");
assert_eq!(view.get_line_slice(0, 0, 3), Some("abc"));
assert_eq!(view.get_line_slice(0, 3, 1), Some("👌"));
assert_eq!(view.get_line_slice(0, 3, 2), Some("👌"));
assert_eq!(view.get_line_slice(0, 3, 3), Some("👌d"));
assert_eq!(view.get_line_slice(0, 0, 4), Some("abc👌"));
assert_eq!(view.get_line_slice(0, 0, 5), Some("abc👌"));
assert_eq!(view.get_line_slice(0, 0, 6), Some("abc👌d"));
assert_eq!(view.get_line_slice(1, 0, 4), Some("blah"));
assert_eq!(view.get_line_slice(1, 0, 5), None);
assert_eq!(view.get_line_slice(1, 0, 12), None);
let view = SourceView::new("a\nb\nc\n");
assert_eq!(view.get_line(0), Some("a"));
assert_eq!(view.get_line(1), Some("b"));
assert_eq!(view.get_line(2), Some("c"));
assert_eq!(view.get_line(3), Some(""));
assert_eq!(view.get_line(4), None);
}

1581
third-party/vendor/sourcemap/src/types.rs vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,227 @@
use std::borrow::Cow;
use std::iter::repeat;
fn split_path(path: &str) -> Vec<&str> {
let mut last_idx = 0;
let mut rv = vec![];
for (idx, _) in path.match_indices(&['/', '\\'][..]) {
rv.push(&path[last_idx..idx]);
last_idx = idx;
}
if last_idx < path.len() {
rv.push(&path[last_idx..]);
}
rv
}
fn is_abs_path(s: &str) -> bool {
if s.starts_with('/') {
return true;
} else if s.len() > 3 {
let b = s.as_bytes();
if b[1] == b':'
&& (b[2] == b'/' || b[2] == b'\\')
&& ((b[0] >= b'a' && b[0] <= b'z') || (b[0] >= b'A' && b[0] <= b'Z'))
{
return true;
}
}
false
}
fn find_common_prefix_of_sorted_vec<'a>(items: &'a [Cow<'a, [&'a str]>]) -> Option<&'a [&'a str]> {
if items.is_empty() {
return None;
}
let shortest = &items[0];
let mut max_idx = None;
for seq in items.iter() {
let mut seq_max_idx = None;
for (idx, &comp) in shortest.iter().enumerate() {
if seq.get(idx) != Some(&comp) {
break;
}
seq_max_idx = Some(idx);
}
if max_idx.is_none() || seq_max_idx < max_idx {
max_idx = seq_max_idx;
}
}
if let Some(max_idx) = max_idx {
Some(&shortest[..=max_idx])
} else {
None
}
}
pub fn find_common_prefix<'a, I: Iterator<Item = &'a str>>(iter: I) -> Option<String> {
let mut items: Vec<Cow<'_, [&str]>> = iter
.filter(|x| is_abs_path(x))
.map(|x| Cow::Owned(split_path(x)))
.collect();
items.sort_by_key(|x| x.len());
if let Some(slice) = find_common_prefix_of_sorted_vec(&items) {
let rv = slice.join("");
if !rv.is_empty() && &rv != "/" {
return Some(rv);
}
}
None
}
/// Helper function to calculate the path from a base file to a target file.
///
/// This is intended to caculate the path from a minified JavaScript file
/// to a sourcemap if they are both on the same server.
///
/// Example:
///
/// ```
/// # use sourcemap::make_relative_path;
/// assert_eq!(&make_relative_path(
/// "/foo/bar/baz.js", "/foo/baz.map"), "../baz.map");
/// ```
pub fn make_relative_path(base: &str, target: &str) -> String {
let target_path: Vec<_> = target
.split(&['/', '\\'][..])
.filter(|x| !x.is_empty())
.collect();
let mut base_path: Vec<_> = base
.split(&['/', '\\'][..])
.filter(|x| !x.is_empty())
.collect();
base_path.pop();
let mut items = vec![
Cow::Borrowed(target_path.as_slice()),
Cow::Borrowed(base_path.as_slice()),
];
items.sort_by_key(|x| x.len());
let prefix = find_common_prefix_of_sorted_vec(&items)
.map(|x| x.len())
.unwrap_or(0);
let mut rel_list: Vec<_> = repeat("../").take(base_path.len() - prefix).collect();
rel_list.extend_from_slice(&target_path[prefix..]);
if rel_list.is_empty() {
".".into()
} else {
rel_list.join("")
}
}
pub fn greatest_lower_bound<'a, T, K: Ord, F: Fn(&'a T) -> K>(
slice: &'a [T],
key: &K,
map: F,
) -> Option<&'a T> {
let mut idx = match slice.binary_search_by_key(key, &map) {
Ok(index) => index,
Err(index) => {
// If there is no match, then we know for certain that the index is where we should
// insert a new token, and that the token directly before is the greatest lower bound.
return slice.get(index.checked_sub(1)?);
}
};
// If we get an exact match, then we need to continue looking at previous tokens to see if
// they also match. We use a linear search because the number of exact matches is generally
// very small, and almost certainly smaller than the number of tokens before the index.
for i in (0..idx).rev() {
if map(&slice[i]) == *key {
idx = i;
} else {
break;
}
}
slice.get(idx)
}
#[test]
fn test_is_abs_path() {
assert!(is_abs_path("C:\\foo.txt"));
assert!(is_abs_path("d:/foo.txt"));
assert!(!is_abs_path("foo.txt"));
assert!(is_abs_path("/foo.txt"));
assert!(is_abs_path("/"));
}
#[test]
fn test_split_path() {
assert_eq!(split_path("/foo/bar/baz"), &["", "/foo", "/bar", "/baz"]);
}
#[test]
fn test_find_common_prefix() {
let rv = find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah"].into_iter());
assert_eq!(rv, Some("/foo/bar/baz".into()));
let rv = find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah", "/meh"].into_iter());
assert_eq!(rv, None);
let rv = find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah", "/foo"].into_iter());
assert_eq!(rv, Some("/foo".into()));
let rv = find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah", "foo"].into_iter());
assert_eq!(rv, Some("/foo/bar/baz".into()));
let rv =
find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah", "/blah", "foo"].into_iter());
assert_eq!(rv, None);
let rv =
find_common_prefix(vec!["/foo/bar/baz", "/foo/bar/baz/blah", "/blah", "foo"].into_iter());
assert_eq!(rv, None);
}
#[test]
fn test_make_relative_path() {
assert_eq!(
&make_relative_path("/foo/bar/baz.js", "/foo/bar/baz.map"),
"baz.map"
);
assert_eq!(
&make_relative_path("/foo/bar/.", "/foo/bar/baz.map"),
"baz.map"
);
assert_eq!(
&make_relative_path("/foo/bar/baz.js", "/foo/baz.map"),
"../baz.map"
);
assert_eq!(&make_relative_path("foo.txt", "foo.js"), "foo.js");
assert_eq!(&make_relative_path("blah/foo.txt", "foo.js"), "../foo.js");
}
#[test]
fn test_greatest_lower_bound() {
let cmp = |&(i, _id)| i;
let haystack = vec![(1, 1)];
assert_eq!(greatest_lower_bound(&haystack, &1, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &2, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &0, cmp), None);
let haystack = vec![(1, 1), (1, 2)];
assert_eq!(greatest_lower_bound(&haystack, &1, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &2, cmp), Some(&(1, 2)));
assert_eq!(greatest_lower_bound(&haystack, &0, cmp), None);
let haystack = vec![(1, 1), (1, 2), (1, 3)];
assert_eq!(greatest_lower_bound(&haystack, &1, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &2, cmp), Some(&(1, 3)));
assert_eq!(greatest_lower_bound(&haystack, &0, cmp), None);
let haystack = vec![(1, 1), (1, 2), (1, 3), (1, 4)];
assert_eq!(greatest_lower_bound(&haystack, &1, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &2, cmp), Some(&(1, 4)));
assert_eq!(greatest_lower_bound(&haystack, &0, cmp), None);
let haystack = vec![(1, 1), (1, 2), (1, 3), (1, 4), (1, 5)];
assert_eq!(greatest_lower_bound(&haystack, &1, cmp), Some(&(1, 1)));
assert_eq!(greatest_lower_bound(&haystack, &2, cmp), Some(&(1, 5)));
assert_eq!(greatest_lower_bound(&haystack, &0, cmp), None);
}

355
third-party/vendor/sourcemap/src/vlq.rs vendored Normal file
View file

@ -0,0 +1,355 @@
//! Implements utilities for dealing with the sourcemap vlq encoding.
use crate::errors::{Error, Result};
const B64_CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
const B64: [i8; 256] = [
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
62,
-1,
-1,
-1,
63,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
-1,
-1,
-1,
-1,
-1,
-1,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
-1,
-1,
-1,
-1,
-1 - 1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
];
/// Parses a VLQ segment into a vector.
pub fn parse_vlq_segment(segment: &str) -> Result<Vec<i64>> {
let mut rv = vec![];
parse_vlq_segment_into(segment, &mut rv)?;
Ok(rv)
}
/// Parses a VLQ segment into a pre-allocated `Vec` instead of returning a new allocation.
pub(crate) fn parse_vlq_segment_into(segment: &str, rv: &mut Vec<i64>) -> Result<()> {
let mut cur = 0;
let mut shift = 0;
for c in segment.bytes() {
let enc = i64::from(B64[c as usize]);
let val = enc & 0b11111;
let cont = enc >> 5;
cur += val.checked_shl(shift).ok_or(Error::VlqOverflow)?;
shift += 5;
if cont == 0 {
let sign = cur & 1;
cur >>= 1;
if sign != 0 {
cur = -cur;
}
rv.push(cur);
cur = 0;
shift = 0;
}
}
if cur != 0 || shift != 0 {
Err(Error::VlqLeftover)
} else if rv.is_empty() {
Err(Error::VlqNoValues)
} else {
Ok(())
}
}
/// Encodes a VLQ segment from a slice.
pub fn generate_vlq_segment(nums: &[i64]) -> Result<String> {
let mut rv = String::new();
for &num in nums {
encode_vlq(&mut rv, num);
}
Ok(rv)
}
pub(crate) fn encode_vlq(out: &mut String, num: i64) {
let mut num = if num < 0 { ((-num) << 1) + 1 } else { num << 1 };
loop {
let mut digit = num & 0b11111;
num >>= 5;
if num > 0 {
digit |= 1 << 5;
}
out.push(B64_CHARS[digit as usize] as char);
if num == 0 {
break;
}
}
}
#[test]
fn test_vlq_decode() {
let rv = parse_vlq_segment("AAAA").unwrap();
assert_eq!(rv, vec![0, 0, 0, 0]);
let rv = parse_vlq_segment("GAAIA").unwrap();
assert_eq!(rv, vec![3, 0, 0, 4, 0]);
}
#[test]
fn test_vlq_encode() {
let rv = generate_vlq_segment(&[0, 0, 0, 0]).unwrap();
assert_eq!(rv.as_str(), "AAAA");
let rv = generate_vlq_segment(&[3, 0, 0, 4, 0]).unwrap();
assert_eq!(rv.as_str(), "GAAIA");
}
#[test]
fn test_overflow() {
match parse_vlq_segment("00000000000000") {
Err(Error::VlqOverflow) => {}
e => {
panic!("Unexpeted result: {:?}", e);
}
}
}