Vendor things
This commit is contained in:
parent
5deceec006
commit
977e3c17e5
19434 changed files with 10682014 additions and 0 deletions
1
third-party/vendor/data-url/.cargo-checksum.json
vendored
Normal file
1
third-party/vendor/data-url/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"files":{"Cargo.toml":"4fed6a74fde6f148669afe2beb144c87d3ba4562de11f8e86a2d4626dd54166b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"a8f13b4b8701f2d546f7372d4b68a267da408f7ebecbafe28a1c2e8b92a774a1","src/forgiving_base64.rs":"dd2ce59aa0f209bdc9bdf82c67b0ea69e4576cb100850ae4c4e3b8f03ef31814","src/lib.rs":"6a0ccc483e4293ce6ef075352ebb718abc7b252ee7157584b20ad5b21e4261b7","src/make_base64_decode_table.py":"b7ee1b1aaa5579b6e14caae0ef7b84695a04c77048978384ffe031fc8d29d2b1","src/mime.rs":"de276878412f2a852281531454af97353d33ea3c3a64f9eef865562c3015172e","tests/base64.json":"0cdb75b4fcc46e4a6caa0e3b0005157a0d0be03e00bcf7b7054a045b1267568d","tests/data-urls.json":"b3be3d6e1d0eb7f1b5b20d7410d30cb76216d41c9fe3d8364446abecff88bba5","tests/generated-mime-types.json":"20924495060ac9633f10d57a326b95a5987863f27c5d4765b13b744304b33fed","tests/mime-types.json":"2151572862191efd4d5b46e12a586242ce1470b32b0fb62b2c9f4f6ea8c613aa","tests/wpt.rs":"22768dc0e24c735e02974fea8a53ad82f74e171843c94c47d4d7d4abfa4b8ccc"},"package":"5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"}
|
||||
48
third-party/vendor/data-url/Cargo.toml
vendored
Normal file
48
third-party/vendor/data-url/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.51"
|
||||
name = "data-url"
|
||||
version = "0.3.1"
|
||||
authors = ["Simon Sapin <simon.sapin@exyr.org>"]
|
||||
autotests = false
|
||||
description = "Processing of data: URL according to WHATWG’s Fetch Standard"
|
||||
readme = "README.md"
|
||||
categories = ["no_std"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
|
||||
[[test]]
|
||||
name = "wpt"
|
||||
harness = false
|
||||
|
||||
[dev-dependencies.serde]
|
||||
version = "1.0"
|
||||
features = ["derive"]
|
||||
|
||||
[dev-dependencies.serde_json]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.tester]
|
||||
version = "0.9"
|
||||
|
||||
[features]
|
||||
alloc = []
|
||||
default = ["std"]
|
||||
std = ["alloc"]
|
||||
201
third-party/vendor/data-url/LICENSE-APACHE
vendored
Normal file
201
third-party/vendor/data-url/LICENSE-APACHE
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
third-party/vendor/data-url/LICENSE-MIT
vendored
Normal file
25
third-party/vendor/data-url/LICENSE-MIT
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2013-2022 The rust-url developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
21
third-party/vendor/data-url/README.md
vendored
Normal file
21
third-party/vendor/data-url/README.md
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# data-url
|
||||
|
||||
[](https://crates.io/crates/data-url)
|
||||
[](https://docs.rs/data-url/)
|
||||
|
||||
Processing of `data:` URLs in Rust according to the Fetch Standard:
|
||||
<https://fetch.spec.whatwg.org/#data-urls>
|
||||
but starting from a string rather than a parsed URL to avoid extra copies.
|
||||
|
||||
```rust
|
||||
use data_url::{DataUrl, mime};
|
||||
|
||||
let url = DataUrl::process("data:,Hello%20World!").unwrap();
|
||||
let (body, fragment) = url.decode_to_vec().unwrap();
|
||||
|
||||
assert_eq!(url.mime_type().type_, "text");
|
||||
assert_eq!(url.mime_type().subtype, "plain");
|
||||
assert_eq!(url.mime_type().get_parameter("charset"), Some("US-ASCII"));
|
||||
assert_eq!(body, b"Hello World!");
|
||||
assert!(fragment.is_none());
|
||||
```
|
||||
201
third-party/vendor/data-url/src/forgiving_base64.rs
vendored
Normal file
201
third-party/vendor/data-url/src/forgiving_base64.rs
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
//! <https://infra.spec.whatwg.org/#forgiving-base64-decode>
|
||||
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InvalidBase64(InvalidBase64Details);
|
||||
|
||||
impl fmt::Display for InvalidBase64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.0 {
|
||||
InvalidBase64Details::UnexpectedSymbol(code_point) => {
|
||||
write!(f, "symbol with codepoint {} not expected", code_point)
|
||||
}
|
||||
InvalidBase64Details::AlphabetSymbolAfterPadding => {
|
||||
write!(f, "alphabet symbol present after padding")
|
||||
}
|
||||
InvalidBase64Details::LoneAlphabetSymbol => write!(f, "lone alphabet symbol present"),
|
||||
InvalidBase64Details::Padding => write!(f, "incorrect padding"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for InvalidBase64 {}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum InvalidBase64Details {
|
||||
UnexpectedSymbol(u8),
|
||||
AlphabetSymbolAfterPadding,
|
||||
LoneAlphabetSymbol,
|
||||
Padding,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DecodeError<E> {
|
||||
InvalidBase64(InvalidBase64),
|
||||
WriteError(E),
|
||||
}
|
||||
|
||||
impl<E: fmt::Display> fmt::Display for DecodeError<E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::InvalidBase64(inner) => write!(f, "base64 not valid: {}", inner),
|
||||
Self::WriteError(err) => write!(f, "write error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<E: std::error::Error> std::error::Error for DecodeError<E> {}
|
||||
|
||||
impl<E> From<InvalidBase64Details> for DecodeError<E> {
|
||||
fn from(e: InvalidBase64Details) -> Self {
|
||||
DecodeError::InvalidBase64(InvalidBase64(e))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) enum Impossible {}
|
||||
|
||||
impl From<DecodeError<Impossible>> for InvalidBase64 {
|
||||
fn from(e: DecodeError<Impossible>) -> Self {
|
||||
match e {
|
||||
DecodeError::InvalidBase64(e) => e,
|
||||
DecodeError::WriteError(e) => match e {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `input` is assumed to be in an ASCII-compatible encoding
|
||||
pub fn decode_to_vec(input: &[u8]) -> Result<Vec<u8>, InvalidBase64> {
|
||||
let mut v = Vec::new();
|
||||
{
|
||||
let mut decoder = Decoder::new(|bytes| {
|
||||
v.extend_from_slice(bytes);
|
||||
Ok(())
|
||||
});
|
||||
decoder.feed(input)?;
|
||||
decoder.finish()?;
|
||||
}
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
/// <https://infra.spec.whatwg.org/#forgiving-base64-decode>
|
||||
pub struct Decoder<F, E>
|
||||
where
|
||||
F: FnMut(&[u8]) -> Result<(), E>,
|
||||
{
|
||||
write_bytes: F,
|
||||
bit_buffer: u32,
|
||||
buffer_bit_length: u8,
|
||||
padding_symbols: u8,
|
||||
}
|
||||
|
||||
impl<F, E> Decoder<F, E>
|
||||
where
|
||||
F: FnMut(&[u8]) -> Result<(), E>,
|
||||
{
|
||||
pub fn new(write_bytes: F) -> Self {
|
||||
Self {
|
||||
write_bytes,
|
||||
bit_buffer: 0,
|
||||
buffer_bit_length: 0,
|
||||
padding_symbols: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Feed to the decoder partial input in an ASCII-compatible encoding
|
||||
pub fn feed(&mut self, input: &[u8]) -> Result<(), DecodeError<E>> {
|
||||
for &byte in input.iter() {
|
||||
let value = BASE64_DECODE_TABLE[byte as usize];
|
||||
if value < 0 {
|
||||
// A character that’s not part of the alphabet
|
||||
|
||||
// Remove ASCII whitespace
|
||||
if matches!(byte, b' ' | b'\t' | b'\n' | b'\r' | b'\x0C') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if byte == b'=' {
|
||||
self.padding_symbols = self.padding_symbols.saturating_add(1);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Err(InvalidBase64Details::UnexpectedSymbol(byte).into());
|
||||
}
|
||||
if self.padding_symbols > 0 {
|
||||
return Err(InvalidBase64Details::AlphabetSymbolAfterPadding.into());
|
||||
}
|
||||
self.bit_buffer <<= 6;
|
||||
self.bit_buffer |= value as u32;
|
||||
// 18 before incrementing means we’ve just reached 24
|
||||
if self.buffer_bit_length < 18 {
|
||||
self.buffer_bit_length += 6;
|
||||
} else {
|
||||
// We’ve accumulated four times 6 bits, which equals three times 8 bits.
|
||||
let byte_buffer = [
|
||||
(self.bit_buffer >> 16) as u8,
|
||||
(self.bit_buffer >> 8) as u8,
|
||||
self.bit_buffer as u8,
|
||||
];
|
||||
(self.write_bytes)(&byte_buffer).map_err(DecodeError::WriteError)?;
|
||||
self.buffer_bit_length = 0;
|
||||
// No need to reset bit_buffer,
|
||||
// since next time we’re only gonna read relevant bits.
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Call this to signal the end of the input
|
||||
pub fn finish(mut self) -> Result<(), DecodeError<E>> {
|
||||
match (self.buffer_bit_length, self.padding_symbols) {
|
||||
(0, 0) => {
|
||||
// A multiple of four of alphabet symbols, and nothing else.
|
||||
}
|
||||
(12, 2) | (12, 0) => {
|
||||
// A multiple of four of alphabet symbols, followed by two more symbols,
|
||||
// optionally followed by two padding characters (which make a total multiple of four).
|
||||
let byte_buffer = [(self.bit_buffer >> 4) as u8];
|
||||
(self.write_bytes)(&byte_buffer).map_err(DecodeError::WriteError)?;
|
||||
}
|
||||
(18, 1) | (18, 0) => {
|
||||
// A multiple of four of alphabet symbols, followed by three more symbols,
|
||||
// optionally followed by one padding character (which make a total multiple of four).
|
||||
let byte_buffer = [(self.bit_buffer >> 10) as u8, (self.bit_buffer >> 2) as u8];
|
||||
(self.write_bytes)(&byte_buffer).map_err(DecodeError::WriteError)?;
|
||||
}
|
||||
(6, _) => return Err(InvalidBase64Details::LoneAlphabetSymbol.into()),
|
||||
_ => return Err(InvalidBase64Details::Padding.into()),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Generated by `make_base64_decode_table.py` based on "Table 1: The Base 64 Alphabet"
|
||||
/// at <https://tools.ietf.org/html/rfc4648#section-4>
|
||||
///
|
||||
/// Array indices are the byte value of symbols.
|
||||
/// Array values are their positions in the base64 alphabet,
|
||||
/// or -1 for symbols not in the alphabet.
|
||||
/// The position contributes 6 bits to the decoded bytes.
|
||||
#[rustfmt::skip]
|
||||
const BASE64_DECODE_TABLE: [i8; 256] = [
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
|
||||
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1,
|
||||
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
|
||||
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
|
||||
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
|
||||
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
|
||||
];
|
||||
347
third-party/vendor/data-url/src/lib.rs
vendored
Normal file
347
third-party/vendor/data-url/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
//! Processing of `data:` URLs according to the Fetch Standard:
|
||||
//! <https://fetch.spec.whatwg.org/#data-urls>
|
||||
//! but starting from a string rather than a parsed URL to avoid extra copies.
|
||||
//!
|
||||
//! ```rust
|
||||
//! use data_url::{DataUrl, mime};
|
||||
//!
|
||||
//! let url = DataUrl::process("data:,Hello%20World!").unwrap();
|
||||
//! let (body, fragment) = url.decode_to_vec().unwrap();
|
||||
//!
|
||||
//! assert_eq!(url.mime_type().type_, "text");
|
||||
//! assert_eq!(url.mime_type().subtype, "plain");
|
||||
//! assert_eq!(url.mime_type().get_parameter("charset"), Some("US-ASCII"));
|
||||
//! assert_eq!(body, b"Hello World!");
|
||||
//! assert!(fragment.is_none());
|
||||
//! ```
|
||||
#![no_std]
|
||||
|
||||
// For forwards compatibility
|
||||
#[cfg(feature = "std")]
|
||||
extern crate std;
|
||||
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(not(feature = "alloc"))]
|
||||
compile_error!("the `alloc` feature must be enabled");
|
||||
|
||||
use alloc::{string::String, vec::Vec};
|
||||
use core::fmt;
|
||||
|
||||
macro_rules! require {
|
||||
($condition: expr) => {
|
||||
if !$condition {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub mod forgiving_base64;
|
||||
pub mod mime;
|
||||
|
||||
pub struct DataUrl<'a> {
|
||||
mime_type: mime::Mime,
|
||||
base64: bool,
|
||||
encoded_body_plus_fragment: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DataUrlError {
|
||||
NotADataUrl,
|
||||
NoComma,
|
||||
}
|
||||
|
||||
impl fmt::Display for DataUrlError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::NotADataUrl => write!(f, "not a valid data url"),
|
||||
Self::NoComma => write!(
|
||||
f,
|
||||
"data url is missing comma delimiting attributes and body"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for DataUrlError {}
|
||||
|
||||
impl<'a> DataUrl<'a> {
|
||||
/// <https://fetch.spec.whatwg.org/#data-url-processor>
|
||||
/// but starting from a string rather than a parsed `Url`, to avoid extra string copies.
|
||||
pub fn process(input: &'a str) -> Result<Self, DataUrlError> {
|
||||
use crate::DataUrlError::*;
|
||||
|
||||
let after_colon = pretend_parse_data_url(input).ok_or(NotADataUrl)?;
|
||||
|
||||
let (from_colon_to_comma, encoded_body_plus_fragment) =
|
||||
find_comma_before_fragment(after_colon).ok_or(NoComma)?;
|
||||
|
||||
let (mime_type, base64) = parse_header(from_colon_to_comma);
|
||||
|
||||
Ok(DataUrl {
|
||||
mime_type,
|
||||
base64,
|
||||
encoded_body_plus_fragment,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn mime_type(&self) -> &mime::Mime {
|
||||
&self.mime_type
|
||||
}
|
||||
|
||||
/// Streaming-decode the data URL’s body to `write_body_bytes`,
|
||||
/// and return the URL’s fragment identifier if it has one.
|
||||
pub fn decode<F, E>(
|
||||
&self,
|
||||
write_body_bytes: F,
|
||||
) -> Result<Option<FragmentIdentifier<'a>>, forgiving_base64::DecodeError<E>>
|
||||
where
|
||||
F: FnMut(&[u8]) -> Result<(), E>,
|
||||
{
|
||||
if self.base64 {
|
||||
decode_with_base64(self.encoded_body_plus_fragment, write_body_bytes)
|
||||
} else {
|
||||
decode_without_base64(self.encoded_body_plus_fragment, write_body_bytes)
|
||||
.map_err(forgiving_base64::DecodeError::WriteError)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the decoded body, and the URL’s fragment identifier if it has one.
|
||||
pub fn decode_to_vec(
|
||||
&self,
|
||||
) -> Result<(Vec<u8>, Option<FragmentIdentifier<'a>>), forgiving_base64::InvalidBase64> {
|
||||
let mut body = Vec::new();
|
||||
let fragment = self.decode(|bytes| {
|
||||
body.extend_from_slice(bytes);
|
||||
Ok(())
|
||||
})?;
|
||||
Ok((body, fragment))
|
||||
}
|
||||
}
|
||||
|
||||
/// The URL’s fragment identifier (after `#`)
|
||||
pub struct FragmentIdentifier<'a>(&'a str);
|
||||
|
||||
impl<'a> FragmentIdentifier<'a> {
|
||||
/// Like in a parsed URL
|
||||
pub fn to_percent_encoded(&self) -> String {
|
||||
let mut string = String::new();
|
||||
for byte in self.0.bytes() {
|
||||
match byte {
|
||||
// Ignore ASCII tabs or newlines like the URL parser would
|
||||
b'\t' | b'\n' | b'\r' => continue,
|
||||
// https://url.spec.whatwg.org/#fragment-percent-encode-set
|
||||
b'\0'..=b' ' | b'"' | b'<' | b'>' | b'`' | b'\x7F'..=b'\xFF' => {
|
||||
percent_encode(byte, &mut string)
|
||||
}
|
||||
// Printable ASCII
|
||||
_ => string.push(byte as char),
|
||||
}
|
||||
}
|
||||
string
|
||||
}
|
||||
}
|
||||
|
||||
/// Similar to <https://url.spec.whatwg.org/#concept-basic-url-parser>
|
||||
/// followed by <https://url.spec.whatwg.org/#concept-url-serializer>
|
||||
///
|
||||
/// * `None`: not a data URL.
|
||||
///
|
||||
/// * `Some(s)`: sort of the result of serialization, except:
|
||||
///
|
||||
/// - `data:` prefix removed
|
||||
/// - The fragment is included
|
||||
/// - Other components are **not** UTF-8 percent-encoded
|
||||
/// - ASCII tabs and newlines in the middle are **not** removed
|
||||
fn pretend_parse_data_url(input: &str) -> Option<&str> {
|
||||
// Trim C0 control or space
|
||||
let left_trimmed = input.trim_start_matches(|ch| ch <= ' ');
|
||||
|
||||
let mut bytes = left_trimmed.bytes();
|
||||
{
|
||||
// Ignore ASCII tabs or newlines like the URL parser would
|
||||
let mut iter = bytes
|
||||
.by_ref()
|
||||
.filter(|&byte| !matches!(byte, b'\t' | b'\n' | b'\r'));
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'd');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'a');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b't');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'a');
|
||||
require!(iter.next()? == b':');
|
||||
}
|
||||
let bytes_consumed = left_trimmed.len() - bytes.len();
|
||||
let after_colon = &left_trimmed[bytes_consumed..];
|
||||
|
||||
// Trim C0 control or space
|
||||
Some(after_colon.trim_end_matches(|ch| ch <= ' '))
|
||||
}
|
||||
|
||||
fn find_comma_before_fragment(after_colon: &str) -> Option<(&str, &str)> {
|
||||
for (i, byte) in after_colon.bytes().enumerate() {
|
||||
if byte == b',' {
|
||||
return Some((&after_colon[..i], &after_colon[i + 1..]));
|
||||
}
|
||||
if byte == b'#' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn parse_header(from_colon_to_comma: &str) -> (mime::Mime, bool) {
|
||||
// "Strip leading and trailing ASCII whitespace"
|
||||
// \t, \n, and \r would have been filtered by the URL parser
|
||||
// \f percent-encoded by the URL parser
|
||||
// space is the only remaining ASCII whitespace
|
||||
let trimmed = from_colon_to_comma.trim_matches(|c| matches!(c, ' ' | '\t' | '\n' | '\r'));
|
||||
|
||||
let without_base64_suffix = remove_base64_suffix(trimmed);
|
||||
let base64 = without_base64_suffix.is_some();
|
||||
let mime_type = without_base64_suffix.unwrap_or(trimmed);
|
||||
|
||||
let mut string = String::new();
|
||||
if mime_type.starts_with(';') {
|
||||
string.push_str("text/plain")
|
||||
}
|
||||
let mut in_query = false;
|
||||
for byte in mime_type.bytes() {
|
||||
match byte {
|
||||
// Ignore ASCII tabs or newlines like the URL parser would
|
||||
b'\t' | b'\n' | b'\r' => continue,
|
||||
|
||||
// https://url.spec.whatwg.org/#c0-control-percent-encode-set
|
||||
b'\0'..=b'\x1F' | b'\x7F'..=b'\xFF' => percent_encode(byte, &mut string),
|
||||
|
||||
// Bytes other than the C0 percent-encode set that are percent-encoded
|
||||
// by the URL parser in the query state.
|
||||
// '#' is also in that list but cannot occur here
|
||||
// since it indicates the start of the URL’s fragment.
|
||||
b' ' | b'"' | b'<' | b'>' if in_query => percent_encode(byte, &mut string),
|
||||
|
||||
b'?' => {
|
||||
in_query = true;
|
||||
string.push('?')
|
||||
}
|
||||
|
||||
// Printable ASCII
|
||||
_ => string.push(byte as char),
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: does Mime::from_str match the MIME Sniffing Standard’s parsing algorithm?
|
||||
// <https://mimesniff.spec.whatwg.org/#parse-a-mime-type>
|
||||
let mime_type = string.parse().unwrap_or_else(|_| mime::Mime {
|
||||
type_: String::from("text"),
|
||||
subtype: String::from("plain"),
|
||||
parameters: vec![(String::from("charset"), String::from("US-ASCII"))],
|
||||
});
|
||||
|
||||
(mime_type, base64)
|
||||
}
|
||||
|
||||
/// None: no base64 suffix
|
||||
#[allow(clippy::skip_while_next)]
|
||||
fn remove_base64_suffix(s: &str) -> Option<&str> {
|
||||
let mut bytes = s.bytes();
|
||||
{
|
||||
// Ignore ASCII tabs or newlines like the URL parser would
|
||||
let iter = bytes
|
||||
.by_ref()
|
||||
.filter(|&byte| !matches!(byte, b'\t' | b'\n' | b'\r'));
|
||||
|
||||
// Search from the end
|
||||
let mut iter = iter.rev();
|
||||
|
||||
require!(iter.next()? == b'4');
|
||||
require!(iter.next()? == b'6');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'e');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b's');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'a');
|
||||
require!(iter.next()?.to_ascii_lowercase() == b'b');
|
||||
require!(iter.skip_while(|&byte| byte == b' ').next()? == b';');
|
||||
}
|
||||
Some(&s[..bytes.len()])
|
||||
}
|
||||
|
||||
fn percent_encode(byte: u8, string: &mut String) {
|
||||
const HEX_UPPER: [u8; 16] = *b"0123456789ABCDEF";
|
||||
string.push('%');
|
||||
string.push(HEX_UPPER[(byte >> 4) as usize] as char);
|
||||
string.push(HEX_UPPER[(byte & 0x0f) as usize] as char);
|
||||
}
|
||||
|
||||
/// This is <https://url.spec.whatwg.org/#string-percent-decode> while also:
|
||||
///
|
||||
/// * Ignoring ASCII tab or newlines
|
||||
/// * Stopping at the first '#' (which indicates the start of the fragment)
|
||||
///
|
||||
/// Anything that would have been UTF-8 percent-encoded by the URL parser
|
||||
/// would be percent-decoded here.
|
||||
/// We skip that round-trip and pass it through unchanged.
|
||||
fn decode_without_base64<F, E>(
|
||||
encoded_body_plus_fragment: &str,
|
||||
mut write_bytes: F,
|
||||
) -> Result<Option<FragmentIdentifier<'_>>, E>
|
||||
where
|
||||
F: FnMut(&[u8]) -> Result<(), E>,
|
||||
{
|
||||
let bytes = encoded_body_plus_fragment.as_bytes();
|
||||
let mut slice_start = 0;
|
||||
for (i, &byte) in bytes.iter().enumerate() {
|
||||
// We only need to look for 5 different "special" byte values.
|
||||
// For everything else we make slices as large as possible, borrowing the input,
|
||||
// in order to make fewer write_all() calls.
|
||||
if matches!(byte, b'%' | b'#' | b'\t' | b'\n' | b'\r') {
|
||||
// Write everything (if anything) "non-special" we’ve accumulated
|
||||
// before this special byte
|
||||
if i > slice_start {
|
||||
write_bytes(&bytes[slice_start..i])?;
|
||||
}
|
||||
// Then deal with the special byte.
|
||||
match byte {
|
||||
b'%' => {
|
||||
let l = bytes.get(i + 2).and_then(|&b| (b as char).to_digit(16));
|
||||
let h = bytes.get(i + 1).and_then(|&b| (b as char).to_digit(16));
|
||||
if let (Some(h), Some(l)) = (h, l) {
|
||||
// '%' followed by two ASCII hex digits
|
||||
let one_byte = h as u8 * 0x10 + l as u8;
|
||||
write_bytes(&[one_byte])?;
|
||||
slice_start = i + 3;
|
||||
} else {
|
||||
// Do nothing. Leave slice_start unchanged.
|
||||
// The % sign will be part of the next slice.
|
||||
}
|
||||
}
|
||||
|
||||
b'#' => {
|
||||
let fragment_start = i + 1;
|
||||
let fragment = &encoded_body_plus_fragment[fragment_start..];
|
||||
return Ok(Some(FragmentIdentifier(fragment)));
|
||||
}
|
||||
|
||||
// Ignore over '\t' | '\n' | '\r'
|
||||
_ => slice_start = i + 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
write_bytes(&bytes[slice_start..])?;
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// `decode_without_base64()` composed with
|
||||
/// <https://infra.spec.whatwg.org/#isomorphic-decode> composed with
|
||||
/// <https://infra.spec.whatwg.org/#forgiving-base64-decode>.
|
||||
fn decode_with_base64<F, E>(
|
||||
encoded_body_plus_fragment: &str,
|
||||
write_bytes: F,
|
||||
) -> Result<Option<FragmentIdentifier<'_>>, forgiving_base64::DecodeError<E>>
|
||||
where
|
||||
F: FnMut(&[u8]) -> Result<(), E>,
|
||||
{
|
||||
let mut decoder = forgiving_base64::Decoder::new(write_bytes);
|
||||
let fragment = decode_without_base64(encoded_body_plus_fragment, |bytes| decoder.feed(bytes))?;
|
||||
decoder.finish()?;
|
||||
Ok(fragment)
|
||||
}
|
||||
19
third-party/vendor/data-url/src/make_base64_decode_table.py
vendored
Normal file
19
third-party/vendor/data-url/src/make_base64_decode_table.py
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
"""
|
||||
Generate the BASE64_DECODE_TABLE constant. See its doc-comment.
|
||||
"""
|
||||
|
||||
import string
|
||||
|
||||
# https://tools.ietf.org/html/rfc4648#section-4
|
||||
alphabet = string.ascii_uppercase + string.ascii_lowercase + string.digits + "+/"
|
||||
assert len(alphabet) == 64
|
||||
|
||||
reverse_table = [-1] * 256
|
||||
for i, symbol in enumerate(alphabet):
|
||||
reverse_table[ord(symbol)] = i
|
||||
|
||||
print("[")
|
||||
per_line = 16
|
||||
for line in range(0, 256, per_line):
|
||||
print(" " + "".join(" %2s," % value for value in reverse_table[line:][:per_line]))
|
||||
print("]")
|
||||
207
third-party/vendor/data-url/src/mime.rs
vendored
Normal file
207
third-party/vendor/data-url/src/mime.rs
vendored
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
use alloc::{borrow::ToOwned, string::String, vec::Vec};
|
||||
use core::fmt::{self, Write};
|
||||
use core::str::FromStr;
|
||||
|
||||
/// <https://mimesniff.spec.whatwg.org/#mime-type-representation>
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Mime {
|
||||
pub type_: String,
|
||||
pub subtype: String,
|
||||
/// (name, value)
|
||||
pub parameters: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
impl Mime {
|
||||
pub fn get_parameter<P>(&self, name: &P) -> Option<&str>
|
||||
where
|
||||
P: ?Sized + PartialEq<str>,
|
||||
{
|
||||
self.parameters
|
||||
.iter()
|
||||
.find(|&(n, _)| name == &**n)
|
||||
.map(|(_, v)| &**v)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MimeParsingError(());
|
||||
|
||||
impl fmt::Display for MimeParsingError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "invalid mime type")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for MimeParsingError {}
|
||||
|
||||
/// <https://mimesniff.spec.whatwg.org/#parsing-a-mime-type>
|
||||
impl FromStr for Mime {
|
||||
type Err = MimeParsingError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
parse(s).ok_or(MimeParsingError(()))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(s: &str) -> Option<Mime> {
|
||||
let trimmed = s.trim_matches(http_whitespace);
|
||||
|
||||
let (type_, rest) = split2(trimmed, '/');
|
||||
require!(only_http_token_code_points(type_) && !type_.is_empty());
|
||||
|
||||
let (subtype, rest) = split2(rest?, ';');
|
||||
let subtype = subtype.trim_end_matches(http_whitespace);
|
||||
require!(only_http_token_code_points(subtype) && !subtype.is_empty());
|
||||
|
||||
let mut parameters = Vec::new();
|
||||
if let Some(rest) = rest {
|
||||
parse_parameters(rest, &mut parameters)
|
||||
}
|
||||
|
||||
Some(Mime {
|
||||
type_: type_.to_ascii_lowercase(),
|
||||
subtype: subtype.to_ascii_lowercase(),
|
||||
parameters,
|
||||
})
|
||||
}
|
||||
|
||||
fn split2(s: &str, separator: char) -> (&str, Option<&str>) {
|
||||
let mut iter = s.splitn(2, separator);
|
||||
let first = iter.next().unwrap();
|
||||
(first, iter.next())
|
||||
}
|
||||
|
||||
fn parse_parameters(s: &str, parameters: &mut Vec<(String, String)>) {
|
||||
let mut semicolon_separated = s.split(';');
|
||||
|
||||
while let Some(piece) = semicolon_separated.next() {
|
||||
let piece = piece.trim_start_matches(http_whitespace);
|
||||
let (name, value) = split2(piece, '=');
|
||||
// We can not early return on an invalid name here, because the value
|
||||
// parsing later may consume more semicolon seperated pieces.
|
||||
let name_valid =
|
||||
!name.is_empty() && only_http_token_code_points(name) && !contains(parameters, name);
|
||||
if let Some(value) = value {
|
||||
let value = if let Some(stripped) = value.strip_prefix('"') {
|
||||
let max_len = stripped.len().saturating_sub(1); // without end quote
|
||||
let mut unescaped_value = String::with_capacity(max_len);
|
||||
let mut chars = stripped.chars();
|
||||
'until_closing_quote: loop {
|
||||
while let Some(c) = chars.next() {
|
||||
match c {
|
||||
'"' => break 'until_closing_quote,
|
||||
'\\' => unescaped_value.push(chars.next().unwrap_or_else(|| {
|
||||
semicolon_separated
|
||||
.next()
|
||||
.map(|piece| {
|
||||
// A semicolon inside a quoted value is not a separator
|
||||
// for the next parameter, but part of the value.
|
||||
chars = piece.chars();
|
||||
';'
|
||||
})
|
||||
.unwrap_or('\\')
|
||||
})),
|
||||
_ => unescaped_value.push(c),
|
||||
}
|
||||
}
|
||||
if let Some(piece) = semicolon_separated.next() {
|
||||
// A semicolon inside a quoted value is not a separator
|
||||
// for the next parameter, but part of the value.
|
||||
unescaped_value.push(';');
|
||||
chars = piece.chars()
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if !name_valid || !valid_value(value) {
|
||||
continue;
|
||||
}
|
||||
unescaped_value
|
||||
} else {
|
||||
let value = value.trim_end_matches(http_whitespace);
|
||||
if value.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if !name_valid || !valid_value(value) {
|
||||
continue;
|
||||
}
|
||||
value.to_owned()
|
||||
};
|
||||
parameters.push((name.to_ascii_lowercase(), value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(parameters: &[(String, String)], name: &str) -> bool {
|
||||
parameters.iter().any(|(n, _)| n == name)
|
||||
}
|
||||
|
||||
fn valid_value(s: &str) -> bool {
|
||||
s.chars().all(|c| {
|
||||
// <https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point>
|
||||
matches!(c, '\t' | ' '..='~' | '\u{80}'..='\u{FF}')
|
||||
})
|
||||
}
|
||||
|
||||
/// <https://mimesniff.spec.whatwg.org/#serializing-a-mime-type>
|
||||
impl fmt::Display for Mime {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.type_)?;
|
||||
f.write_str("/")?;
|
||||
f.write_str(&self.subtype)?;
|
||||
for (name, value) in &self.parameters {
|
||||
f.write_str(";")?;
|
||||
f.write_str(name)?;
|
||||
f.write_str("=")?;
|
||||
if only_http_token_code_points(value) && !value.is_empty() {
|
||||
f.write_str(value)?
|
||||
} else {
|
||||
f.write_str("\"")?;
|
||||
for c in value.chars() {
|
||||
if c == '"' || c == '\\' {
|
||||
f.write_str("\\")?
|
||||
}
|
||||
f.write_char(c)?
|
||||
}
|
||||
f.write_str("\"")?
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn http_whitespace(c: char) -> bool {
|
||||
matches!(c, ' ' | '\t' | '\n' | '\r')
|
||||
}
|
||||
|
||||
fn only_http_token_code_points(s: &str) -> bool {
|
||||
s.bytes().all(|byte| IS_HTTP_TOKEN[byte as usize])
|
||||
}
|
||||
|
||||
macro_rules! byte_map {
|
||||
($($flag:expr,)*) => ([
|
||||
$($flag != 0,)*
|
||||
])
|
||||
}
|
||||
|
||||
// Copied from https://github.com/hyperium/mime/blob/v0.3.5/src/parse.rs#L293
|
||||
#[rustfmt::skip]
|
||||
static IS_HTTP_TOKEN: [bool; 256] = byte_map![
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
82
third-party/vendor/data-url/tests/base64.json
vendored
Normal file
82
third-party/vendor/data-url/tests/base64.json
vendored
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
[
|
||||
["", []],
|
||||
["abcd", [105, 183, 29]],
|
||||
[" abcd", [105, 183, 29]],
|
||||
["abcd ", [105, 183, 29]],
|
||||
[" abcd===", null],
|
||||
["abcd=== ", null],
|
||||
["abcd ===", null],
|
||||
["a", null],
|
||||
["ab", [105]],
|
||||
["abc", [105, 183]],
|
||||
["abcde", null],
|
||||
["𐀀", null],
|
||||
["=", null],
|
||||
["==", null],
|
||||
["===", null],
|
||||
["====", null],
|
||||
["=====", null],
|
||||
["a=", null],
|
||||
["a==", null],
|
||||
["a===", null],
|
||||
["a====", null],
|
||||
["a=====", null],
|
||||
["ab=", null],
|
||||
["ab==", [105]],
|
||||
["ab===", null],
|
||||
["ab====", null],
|
||||
["ab=====", null],
|
||||
["abc=", [105, 183]],
|
||||
["abc==", null],
|
||||
["abc===", null],
|
||||
["abc====", null],
|
||||
["abc=====", null],
|
||||
["abcd=", null],
|
||||
["abcd==", null],
|
||||
["abcd===", null],
|
||||
["abcd====", null],
|
||||
["abcd=====", null],
|
||||
["abcde=", null],
|
||||
["abcde==", null],
|
||||
["abcde===", null],
|
||||
["abcde====", null],
|
||||
["abcde=====", null],
|
||||
["=a", null],
|
||||
["=a=", null],
|
||||
["a=b", null],
|
||||
["a=b=", null],
|
||||
["ab=c", null],
|
||||
["ab=c=", null],
|
||||
["abc=d", null],
|
||||
["abc=d=", null],
|
||||
["ab\u000Bcd", null],
|
||||
["ab\u3000cd", null],
|
||||
["ab\u3001cd", null],
|
||||
["ab\tcd", [105, 183, 29]],
|
||||
["ab\ncd", [105, 183, 29]],
|
||||
["ab\fcd", [105, 183, 29]],
|
||||
["ab\rcd", [105, 183, 29]],
|
||||
["ab cd", [105, 183, 29]],
|
||||
["ab\u00a0cd", null],
|
||||
["ab\t\n\f\r cd", [105, 183, 29]],
|
||||
[" \t\n\f\r ab\t\n\f\r cd\t\n\f\r ", [105, 183, 29]],
|
||||
["ab\t\n\f\r =\t\n\f\r =\t\n\f\r ", [105]],
|
||||
["A", null],
|
||||
["/A", [252]],
|
||||
["//A", [255, 240]],
|
||||
["///A", [255, 255, 192]],
|
||||
["////A", null],
|
||||
["/", null],
|
||||
["A/", [3]],
|
||||
["AA/", [0, 15]],
|
||||
["AAAA/", null],
|
||||
["AAA/", [0, 0, 63]],
|
||||
["\u0000nonsense", null],
|
||||
["abcd\u0000nonsense", null],
|
||||
["YQ", [97]],
|
||||
["YR", [97]],
|
||||
["~~", null],
|
||||
["..", null],
|
||||
["--", null],
|
||||
["__", null]
|
||||
]
|
||||
214
third-party/vendor/data-url/tests/data-urls.json
vendored
Normal file
214
third-party/vendor/data-url/tests/data-urls.json
vendored
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
[
|
||||
["data://test/,X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data://test:test/,X",
|
||||
null],
|
||||
["data:,X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:",
|
||||
null],
|
||||
["data:text/html",
|
||||
null],
|
||||
["data:text/html ;charset=x ",
|
||||
null],
|
||||
["data:,",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[]],
|
||||
["data:,X#X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:text/plain,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:text/plain ,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:text/plain%20,X",
|
||||
"text/plain%20",
|
||||
[88]],
|
||||
["data:text/plain\f,X",
|
||||
"text/plain%0c",
|
||||
[88]],
|
||||
["data:text/plain%0C,X",
|
||||
"text/plain%0c",
|
||||
[88]],
|
||||
["data:text/plain;,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:;x=x;charset=x,X",
|
||||
"text/plain;x=x;charset=x",
|
||||
[88]],
|
||||
["data:;x=x,X",
|
||||
"text/plain;x=x",
|
||||
[88]],
|
||||
["data:text/plain;charset=windows-1252,%C2%B1",
|
||||
"text/plain;charset=windows-1252",
|
||||
[194, 177]],
|
||||
["data:text/plain;Charset=UTF-8,%C2%B1",
|
||||
"text/plain;charset=UTF-8",
|
||||
[194, 177]],
|
||||
["data:text/plain;charset=windows-1252,áñçə💩",
|
||||
"text/plain;charset=windows-1252",
|
||||
[195, 161, 195, 177, 195, 167, 201, 153, 240, 159, 146, 169]],
|
||||
["data:text/plain;charset=UTF-8,áñçə💩",
|
||||
"text/plain;charset=UTF-8",
|
||||
[195, 161, 195, 177, 195, 167, 201, 153, 240, 159, 146, 169]],
|
||||
["data:image/gif,%C2%B1",
|
||||
"image/gif",
|
||||
[194, 177]],
|
||||
["data:IMAGE/gif,%C2%B1",
|
||||
"image/gif",
|
||||
[194, 177]],
|
||||
["data:IMAGE/gif;hi=x,%C2%B1",
|
||||
"image/gif;hi=x",
|
||||
[194, 177]],
|
||||
["data:IMAGE/gif;CHARSET=x,%C2%B1",
|
||||
"image/gif;charset=x",
|
||||
[194, 177]],
|
||||
["data: ,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:%20,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:\f,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:%1F,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:\u0000,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:%00,%FF",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[255]],
|
||||
["data:text/html ,X",
|
||||
"text/html",
|
||||
[88]],
|
||||
["data:text / html,X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:†,X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:†/†,X",
|
||||
"%e2%80%a0/%e2%80%a0",
|
||||
[88]],
|
||||
["data:X,X",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:image/png,X X",
|
||||
"image/png",
|
||||
[88, 32, 88]],
|
||||
["data:application/javascript,X X",
|
||||
"application/javascript",
|
||||
[88, 32, 88]],
|
||||
["data:application/xml,X X",
|
||||
"application/xml",
|
||||
[88, 32, 88]],
|
||||
["data:text/javascript,X X",
|
||||
"text/javascript",
|
||||
[88, 32, 88]],
|
||||
["data:text/plain,X X",
|
||||
"text/plain",
|
||||
[88, 32, 88]],
|
||||
["data:unknown/unknown,X X",
|
||||
"unknown/unknown",
|
||||
[88, 32, 88]],
|
||||
["data:text/plain;a=\",\",X",
|
||||
"text/plain;a=\"\"",
|
||||
[34, 44, 88]],
|
||||
["data:text/plain;a=%2C,X",
|
||||
"text/plain;a=%2C",
|
||||
[88]],
|
||||
["data:;base64;base64,WA",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:x/x;base64;base64,WA",
|
||||
"x/x",
|
||||
[88]],
|
||||
["data:x/x;base64;charset=x,WA",
|
||||
"x/x;charset=x",
|
||||
[87, 65]],
|
||||
["data:x/x;base64;charset=x;base64,WA",
|
||||
"x/x;charset=x",
|
||||
[88]],
|
||||
["data:x/x;base64;base64x,WA",
|
||||
"x/x",
|
||||
[87, 65]],
|
||||
["data:;base64,W%20A",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:;base64,W%0CA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:x;base64x,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[87, 65]],
|
||||
["data:x;base64;x,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[87, 65]],
|
||||
["data:x;base64=x,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[87, 65]],
|
||||
["data:; base64,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:; base64,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data: ;charset=x ; base64,WA",
|
||||
"text/plain;charset=x",
|
||||
[88]],
|
||||
["data:;base64;,WA",
|
||||
"text/plain",
|
||||
[87, 65]],
|
||||
["data:;base64 ,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:;base64 ,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:;base 64,WA",
|
||||
"text/plain",
|
||||
[87, 65]],
|
||||
["data:;BASe64,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[88]],
|
||||
["data:;%62ase64,WA",
|
||||
"text/plain",
|
||||
[87, 65]],
|
||||
["data:%3Bbase64,WA",
|
||||
"text/plain;charset=US-ASCII",
|
||||
[87, 65]],
|
||||
["data:;charset=x,X",
|
||||
"text/plain;charset=x",
|
||||
[88]],
|
||||
["data:; charset=x,X",
|
||||
"text/plain;charset=x",
|
||||
[88]],
|
||||
["data:;charset =x,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:;charset= x,X",
|
||||
"text/plain;charset=\" x\"",
|
||||
[88]],
|
||||
["data:;charset=,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:;charset,X",
|
||||
"text/plain",
|
||||
[88]],
|
||||
["data:;charset=\"x\",X",
|
||||
"text/plain;charset=x",
|
||||
[88]],
|
||||
["data:;CHARSET=\"X\",X",
|
||||
"text/plain;charset=X",
|
||||
[88]]
|
||||
]
|
||||
3526
third-party/vendor/data-url/tests/generated-mime-types.json
vendored
Normal file
3526
third-party/vendor/data-url/tests/generated-mime-types.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
397
third-party/vendor/data-url/tests/mime-types.json
vendored
Normal file
397
third-party/vendor/data-url/tests/mime-types.json
vendored
Normal file
|
|
@ -0,0 +1,397 @@
|
|||
[
|
||||
"Basics",
|
||||
{
|
||||
"input": "text/html;charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "TEXT/HTML;CHARSET=GBK",
|
||||
"output": "text/html;charset=GBK",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
"Legacy comment syntax",
|
||||
{
|
||||
"input": "text/html;charset=gbk(",
|
||||
"output": "text/html;charset=\"gbk(\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;x=(;charset=gbk",
|
||||
"output": "text/html;x=\"(\";charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
"Duplicate parameter",
|
||||
{
|
||||
"input": "text/html;charset=gbk;charset=windows-1255",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=();charset=GBK",
|
||||
"output": "text/html;charset=\"()\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"Spaces",
|
||||
{
|
||||
"input": "text/html;charset =gbk",
|
||||
"output": "text/html",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html ;charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html; charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset= gbk",
|
||||
"output": "text/html;charset=\" gbk\"",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset= \"gbk\"",
|
||||
"output": "text/html;charset=\" \\\"gbk\\\"\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"0x0B and 0x0C",
|
||||
{
|
||||
"input": "text/html;charset=\u000Bgbk",
|
||||
"output": "text/html",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\u000Cgbk",
|
||||
"output": "text/html",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;\u000Bcharset=gbk",
|
||||
"output": "text/html",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;\u000Ccharset=gbk",
|
||||
"output": "text/html",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"Single quotes are a token, not a delimiter",
|
||||
{
|
||||
"input": "text/html;charset='gbk'",
|
||||
"output": "text/html;charset='gbk'",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset='gbk",
|
||||
"output": "text/html;charset='gbk",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=gbk'",
|
||||
"output": "text/html;charset=gbk'",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=';charset=GBK",
|
||||
"output": "text/html;charset='",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"Invalid parameters",
|
||||
{
|
||||
"input": "text/html;test;charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;test=;charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;';charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;\";charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html ; ; charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;;;;charset=gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset= \"\u007F;charset=GBK",
|
||||
"output": "text/html;charset=GBK",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"\u007F;charset=foo\";charset=GBK",
|
||||
"output": "text/html;charset=GBK",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
"Double quotes",
|
||||
{
|
||||
"input": "text/html;charset=\"gbk\"",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"gbk",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=gbk\"",
|
||||
"output": "text/html;charset=\"gbk\\\"\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\" gbk\"",
|
||||
"output": "text/html;charset=\" gbk\"",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"gbk \"",
|
||||
"output": "text/html;charset=\"gbk \"",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"\\ gbk\"",
|
||||
"output": "text/html;charset=\" gbk\"",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"\\g\\b\\k\"",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"gbk\"x",
|
||||
"output": "text/html;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\"\";charset=GBK",
|
||||
"output": "text/html;charset=\"\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
{
|
||||
"input": "text/html;charset=\";charset=GBK",
|
||||
"output": "text/html;charset=\";charset=GBK\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"Unexpected code points",
|
||||
{
|
||||
"input": "text/html;charset={gbk}",
|
||||
"output": "text/html;charset=\"{gbk}\"",
|
||||
"navigable": true,
|
||||
"encoding": null
|
||||
},
|
||||
"Parameter name longer than 127",
|
||||
{
|
||||
"input": "text/html;0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789=x;charset=gbk",
|
||||
"output": "text/html;0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789=x;charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
"type/subtype longer than 127",
|
||||
{
|
||||
"input": "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789",
|
||||
"output": "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
},
|
||||
"Invalid names",
|
||||
{
|
||||
"input": "text/html;a]=bar;b[=bar;c=bar",
|
||||
"output": "text/html;c=bar"
|
||||
},
|
||||
"Semicolons in value",
|
||||
{
|
||||
"input": "text/html;valid=\";\";foo=bar",
|
||||
"output": "text/html;valid=\";\";foo=bar"
|
||||
},
|
||||
{
|
||||
"input": "text/html;in]valid=\";asd=foo\";foo=bar",
|
||||
"output": "text/html;foo=bar"
|
||||
},
|
||||
"Valid",
|
||||
{
|
||||
"input": "!#$%&'*+-.^_`|~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz/!#$%&'*+-.^_`|~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz;!#$%&'*+-.^_`|~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz=!#$%&'*+-.^_`|~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
||||
"output": "!#$%&'*+-.^_`|~0123456789abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz/!#$%&'*+-.^_`|~0123456789abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz;!#$%&'*+-.^_`|~0123456789abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz=!#$%&'*+-.^_`|~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
},
|
||||
{
|
||||
"input": "x/x;x=\"\t !\\\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008A\u008B\u008C\u008D\u008E\u008F\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009A\u009B\u009C\u009D\u009E\u009F\u00A0\u00A1\u00A2\u00A3\u00A4\u00A5\u00A6\u00A7\u00A8\u00A9\u00AA\u00AB\u00AC\u00AD\u00AE\u00AF\u00B0\u00B1\u00B2\u00B3\u00B4\u00B5\u00B6\u00B7\u00B8\u00B9\u00BA\u00BB\u00BC\u00BD\u00BE\u00BF\u00C0\u00C1\u00C2\u00C3\u00C4\u00C5\u00C6\u00C7\u00C8\u00C9\u00CA\u00CB\u00CC\u00CD\u00CE\u00CF\u00D0\u00D1\u00D2\u00D3\u00D4\u00D5\u00D6\u00D7\u00D8\u00D9\u00DA\u00DB\u00DC\u00DD\u00DE\u00DF\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6\u00E7\u00E8\u00E9\u00EA\u00EB\u00EC\u00ED\u00EE\u00EF\u00F0\u00F1\u00F2\u00F3\u00F4\u00F5\u00F6\u00F7\u00F8\u00F9\u00FA\u00FB\u00FC\u00FD\u00FE\u00FF\"",
|
||||
"output": "x/x;x=\"\t !\\\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008A\u008B\u008C\u008D\u008E\u008F\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009A\u009B\u009C\u009D\u009E\u009F\u00A0\u00A1\u00A2\u00A3\u00A4\u00A5\u00A6\u00A7\u00A8\u00A9\u00AA\u00AB\u00AC\u00AD\u00AE\u00AF\u00B0\u00B1\u00B2\u00B3\u00B4\u00B5\u00B6\u00B7\u00B8\u00B9\u00BA\u00BB\u00BC\u00BD\u00BE\u00BF\u00C0\u00C1\u00C2\u00C3\u00C4\u00C5\u00C6\u00C7\u00C8\u00C9\u00CA\u00CB\u00CC\u00CD\u00CE\u00CF\u00D0\u00D1\u00D2\u00D3\u00D4\u00D5\u00D6\u00D7\u00D8\u00D9\u00DA\u00DB\u00DC\u00DD\u00DE\u00DF\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6\u00E7\u00E8\u00E9\u00EA\u00EB\u00EC\u00ED\u00EE\u00EF\u00F0\u00F1\u00F2\u00F3\u00F4\u00F5\u00F6\u00F7\u00F8\u00F9\u00FA\u00FB\u00FC\u00FD\u00FE\u00FF\""
|
||||
},
|
||||
"End-of-file handling",
|
||||
{
|
||||
"input": "x/x;test",
|
||||
"output": "x/x"
|
||||
},
|
||||
{
|
||||
"input": "x/x;test=\"\\",
|
||||
"output": "x/x;test=\"\\\\\""
|
||||
},
|
||||
"Whitespace (not handled by generated-mime-types.json or above)",
|
||||
{
|
||||
"input": "x/x;x= ",
|
||||
"output": "x/x"
|
||||
},
|
||||
{
|
||||
"input": "x/x;x=\t",
|
||||
"output": "x/x"
|
||||
},
|
||||
{
|
||||
"input": "x/x\n\r\t ;x=x",
|
||||
"output": "x/x;x=x"
|
||||
},
|
||||
{
|
||||
"input": "\n\r\t x/x;x=x\n\r\t ",
|
||||
"output": "x/x;x=x"
|
||||
},
|
||||
{
|
||||
"input": "x/x;\n\r\t x=x\n\r\t ;x=y",
|
||||
"output": "x/x;x=x"
|
||||
},
|
||||
"Latin1",
|
||||
{
|
||||
"input": "text/html;test=\u00FF;charset=gbk",
|
||||
"output": "text/html;test=\"\u00FF\";charset=gbk",
|
||||
"navigable": true,
|
||||
"encoding": "GBK"
|
||||
},
|
||||
">Latin1",
|
||||
{
|
||||
"input": "x/x;test=\uFFFD;x=x",
|
||||
"output": "x/x;x=x"
|
||||
},
|
||||
"Failure",
|
||||
{
|
||||
"input": "\u000Bx/x",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "\u000Cx/x",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "x/x\u000B",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "x/x\u000C",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "\t",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "/",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "bogus",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "bogus/",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "bogus/ ",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "bogus/bogus/;",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "</>",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "(/)",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "ÿ/ÿ",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "text/html(;doesnot=matter",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "{/}",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "\u0100/\u0100",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "text /html",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "text/ html",
|
||||
"output": null
|
||||
},
|
||||
{
|
||||
"input": "\"text/html\"",
|
||||
"output": null
|
||||
}
|
||||
]
|
||||
167
third-party/vendor/data-url/tests/wpt.rs
vendored
Normal file
167
third-party/vendor/data-url/tests/wpt.rs
vendored
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
use tester as test;
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde;
|
||||
|
||||
fn run_data_url(
|
||||
input: String,
|
||||
expected_mime: Option<String>,
|
||||
expected_body: Option<Vec<u8>>,
|
||||
expected_panic: bool,
|
||||
) {
|
||||
let priorhook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |p| {
|
||||
if !expected_panic {
|
||||
priorhook(p);
|
||||
}
|
||||
}));
|
||||
let url = data_url::DataUrl::process(&input);
|
||||
if let Some(expected_mime) = expected_mime {
|
||||
let url = url.unwrap();
|
||||
let (body, _) = url.decode_to_vec().unwrap();
|
||||
if expected_mime.is_empty() {
|
||||
assert_eq!(url.mime_type().to_string(), "text/plain;charset=US-ASCII")
|
||||
} else {
|
||||
assert_eq!(url.mime_type().to_string(), expected_mime)
|
||||
}
|
||||
if let Some(expected_body) = expected_body {
|
||||
assert_eq!(body, expected_body)
|
||||
}
|
||||
} else if let Ok(url) = url {
|
||||
assert!(url.decode_to_vec().is_err(), "{:?}", url.mime_type())
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_data_url<F>(add_test: &mut F)
|
||||
where
|
||||
F: FnMut(String, bool, test::TestFn),
|
||||
{
|
||||
let known_failures = ["data://test:test/,X"];
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum TestCase {
|
||||
Two(String, Option<String>),
|
||||
Three(String, Option<String>, Vec<u8>),
|
||||
}
|
||||
|
||||
let v: Vec<TestCase> = serde_json::from_str(include_str!("data-urls.json")).unwrap();
|
||||
for test in v {
|
||||
let (input, expected_mime, expected_body) = match test {
|
||||
TestCase::Two(i, m) => (i, m, None),
|
||||
TestCase::Three(i, m, b) => (i, m, Some(b)),
|
||||
};
|
||||
let should_panic = known_failures.contains(&&*input);
|
||||
add_test(
|
||||
format!("data: URL {:?}", input),
|
||||
should_panic,
|
||||
test::TestFn::DynTestFn(Box::new(move || {
|
||||
run_data_url(input, expected_mime, expected_body, should_panic)
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_base64(input: String, expected: Option<Vec<u8>>) {
|
||||
let result = data_url::forgiving_base64::decode_to_vec(input.as_bytes());
|
||||
match (result, expected) {
|
||||
(Ok(bytes), Some(expected)) => assert_eq!(bytes, expected),
|
||||
(Ok(bytes), None) => panic!("Expected error, got {:?}", bytes),
|
||||
(Err(e), Some(expected)) => panic!("Expected {:?}, got error {:?}", expected, e),
|
||||
(Err(_), None) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_base64<F>(add_test: &mut F)
|
||||
where
|
||||
F: FnMut(String, bool, test::TestFn),
|
||||
{
|
||||
let known_failures = [];
|
||||
|
||||
let v: Vec<(String, Option<Vec<u8>>)> =
|
||||
serde_json::from_str(include_str!("base64.json")).unwrap();
|
||||
for (input, expected) in v {
|
||||
let should_panic = known_failures.contains(&&*input);
|
||||
add_test(
|
||||
format!("base64 {:?}", input),
|
||||
should_panic,
|
||||
test::TestFn::DynTestFn(Box::new(move || run_base64(input, expected))),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_mime(input: String, expected: Option<String>) {
|
||||
let result = input.parse::<data_url::mime::Mime>();
|
||||
match (result, expected) {
|
||||
(Ok(mime), Some(expected)) => assert_eq!(mime.to_string(), expected),
|
||||
(Ok(mime), None) => panic!("Expected error, got {:?}", mime),
|
||||
(Err(e), Some(expected)) => panic!("Expected {:?}, got error {:?}", expected, e),
|
||||
(Err(_), None) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_mime<F>(add_test: &mut F)
|
||||
where
|
||||
F: FnMut(String, bool, test::TestFn),
|
||||
{
|
||||
let known_failures = [];
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum Entry {
|
||||
Comment(String),
|
||||
TestCase {
|
||||
input: String,
|
||||
output: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
let v: Vec<Entry> = serde_json::from_str(include_str!("mime-types.json")).unwrap();
|
||||
let v2: Vec<Entry> = serde_json::from_str(include_str!("generated-mime-types.json")).unwrap();
|
||||
let entries = v.into_iter().chain(v2);
|
||||
|
||||
let mut last_comment = None;
|
||||
for entry in entries {
|
||||
let (input, expected) = match entry {
|
||||
Entry::TestCase { input, output } => (input, output),
|
||||
Entry::Comment(s) => {
|
||||
last_comment = Some(s);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let should_panic = known_failures.contains(&&*input);
|
||||
add_test(
|
||||
if let Some(ref s) = last_comment {
|
||||
format!("MIME type {:?} {:?}", s, input)
|
||||
} else {
|
||||
format!("MIME type {:?}", input)
|
||||
},
|
||||
should_panic,
|
||||
test::TestFn::DynTestFn(Box::new(move || run_mime(input, expected))),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut tests = Vec::new();
|
||||
{
|
||||
let mut add_one = |name: String, should_panic: bool, run: test::TestFn| {
|
||||
let desc = test::TestDesc {
|
||||
name: test::DynTestName(name),
|
||||
ignore: false,
|
||||
should_panic: match should_panic {
|
||||
true => test::ShouldPanic::Yes,
|
||||
false => test::ShouldPanic::No,
|
||||
},
|
||||
allow_fail: false,
|
||||
test_type: test::TestType::Unknown,
|
||||
};
|
||||
tests.push(test::TestDescAndFn { desc, testfn: run })
|
||||
};
|
||||
collect_data_url(&mut add_one);
|
||||
collect_base64(&mut add_one);
|
||||
collect_mime(&mut add_one);
|
||||
}
|
||||
test::test_main(&std::env::args().collect::<Vec<_>>(), tests, None)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue