Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

View file

@ -0,0 +1 @@
{"files":{"Cargo.toml":"8fb633b01ea17947fd1f76e0a447b027ddf48c019671f09e39c1338ab715605d","README.md":"3c7136fc446143eecf9668e6daf7f096eb5eb3a3e312cc674571719cb4c83bcc","src/cases/camelcase/mod.rs":"8e65fca78ea88acb32c0f214cafde39b849aef253253c3681e316f2559b26977","src/cases/case/mod.rs":"16323191c983d316debd50af11f94f4c525bb70d4d1a02db06a9aed67d4ba2a9","src/cases/classcase/mod.rs":"5b6b74530a2a693bf1ac89342f1b25f58f39336b1ee3242547c3d6ef468a878f","src/cases/kebabcase/mod.rs":"b317ebd42f22daab4b23bb4b83ce85f053d7088680d3a32eecbd13bd5331587a","src/cases/mod.rs":"e272853bcc1c5f6eb02594038febb9dcebb6eca8eac744d6e503db5082e585c6","src/cases/pascalcase/mod.rs":"a44feed6d8877fd8a31160076befe826960aa001d859587aef2dddc1aedc397b","src/cases/screamingsnakecase/mod.rs":"21582eb1ec2170d379bf3536c6ffb39b8bdc096efe2d493674458ee27b86e985","src/cases/sentencecase/mod.rs":"eb21d7d5bf0b23e1325d429dfdc149081d233a8b950c1fdfe04b4bebcc2c0ddb","src/cases/snakecase/mod.rs":"369739e37e700c028022f308aa78504873c10a5e88768f05249c1c8481b30c9d","src/cases/tablecase/mod.rs":"a6a50a397059d775a517d5dce6ba612b107919e209a9eb56871a5c1d42314664","src/cases/titlecase/mod.rs":"3f0dac5e5b434da9234d6c389f67bb2d3c8f138dc521fa29dbe3791f8eaf5341","src/cases/traincase/mod.rs":"4e2493d6594d3c505de293c69390b3f672c0fd4d35603ae1a1aae48166bc18c2","src/lib.rs":"6c5cf60f5c2f8778a3ad7638f37064527b8a86f164117d867b8b6532e2cc655e","src/numbers/deordinalize/mod.rs":"a3930b0315d20d2d86747bc4ae653a0fb7f7d80de497b0aaa9873aadd1459d18","src/numbers/mod.rs":"fed4e090f8b64a34ae64ddcb68d899cfa4dd8e8422a060be01a70dbdb71b85e0","src/numbers/ordinalize/mod.rs":"ce0d88977efaa50792e7311c0e0a73a3115928f9f7be77f914824c3d80eab66c","src/string/constants/mod.rs":"38de3d5060a5d224d28d184eab8af02203c65d74c1d380720c3260ea205f3e05","src/string/deconstantize/mod.rs":"c79f2170dc41bd6abb89a6e74fbdd87bf011f62cfe1f34d8886fda0724ade6fa","src/string/demodulize/mod.rs":"bbcb5314473e4ca02feee4903e31a332caaa912ed2cbca0f49c2fe411a826215","src/string/mod.rs":"570f7ea4dd646f2d633ddd67079db922cc2cadf916719fa19c2f59b4d522ee89","src/string/pluralize/mod.rs":"5f07fab8b5f4e7af546f1e907426724714b9b27af1ecb59a91e57dccd0833a6e","src/string/singularize/mod.rs":"9c2d833cbcdc1489013642de22578d51f558a31e8d2fea4536a27f8fa1114169","src/suffix/foreignkey/mod.rs":"e7ad9a9a0a21fcb53becb36306a15eedf67958e2da18ae928ae592177e70e7a3","src/suffix/mod.rs":"f6f99ce6fc8794d5411d91533b67be5d4a2bc5994317d32f405b2fa3c5ec660d","tests/lib.rs":"e1cfcea8a146291396ff72b0a2e84c2b9ddaa0103717442c4921c165a2ab470d"},"package":"fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"}

42
third-party/vendor/Inflector/Cargo.toml vendored Normal file
View file

@ -0,0 +1,42 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "Inflector"
version = "0.11.4"
authors = ["Josh Teeter<joshteeter@gmail.com>"]
include = ["**/*.rs", "Cargo.toml", "README.md"]
description = "Adds String based inflections for Rust. Snake, kebab, camel, sentence, class, title and table cases as well as ordinalize, deordinalize, demodulize, foreign key, and pluralize/singularize are supported as both traits and pure functions acting on String types.\n"
homepage = "https://github.com/whatisinternet/inflector"
documentation = "https://docs.rs/Inflector"
readme = "README.md"
keywords = ["pluralize", "Inflector", "camel", "snake", "inflection"]
categories = ["text-processing", "value-formatting"]
license = "BSD-2-Clause"
repository = "https://github.com/whatisinternet/inflector"
[lib]
name = "inflector"
[dependencies.lazy_static]
version = "1.2.0"
optional = true
[dependencies.regex]
version = "1.1"
optional = true
[features]
default = ["heavyweight"]
heavyweight = ["regex", "lazy_static"]
unstable = []
[badges.travis-ci]
repository = "whatisinternet/Inflector"

136
third-party/vendor/Inflector/README.md vendored Normal file
View file

@ -0,0 +1,136 @@
# Rust Inflector
[![Build Status](https://travis-ci.org/whatisinternet/Inflector.svg?branch=master)](https://travis-ci.org/whatisinternet/Inflector) [![Crates.io](https://img.shields.io/crates/v/Inflector.svg)](https://crates.io/crates/inflector)[![Crate downloads](https://img.shields.io/crates/d/Inflector.svg)](https://crates.io/crates/inflector)
Adds String based inflections for Rust. Snake, kebab, train, camel,
sentence, class, and title cases as well as ordinalize,
deordinalize, demodulize, deconstantize, foreign key, table case, and pluralize/singularize are supported as both traits and pure functions
acting on &str and String types.
-----
## Documentation:
Documentation can be found here at the README or via rust docs below.
[Rust docs with examples](https://docs.rs/Inflector)
-----
## Installation:
### As a [crate](http://crates.io)
```toml
[dependencies]
Inflector = "*"
```
### Compile yourself:
1. Install [Rust and cargo](http://doc.crates.io/)
2. git clone https://github.com/whatisinternet/Inflector
3. Library: cd inflector && cargo build --release --lib
4. You can find the library in target/release
## Usage / Example:
```rust
...
// to use methods like String.to_lower_case();
extern crate inflector;
use inflector::Inflector;
...
fn main() {
...
let camel_case_string: String = "some_string".to_camel_case();
...
}
```
Or
```rust
...
// to use methods like to_snake_case(&str);
extern crate inflector;
// use inflector::cases::classcase::to_class_case;
// use inflector::cases::classcase::is_class_case;
// use inflector::cases::camelcase::to_camel_case;
// use inflector::cases::camelcase::is_camel_case;
// use inflector::cases::pascalcase::to_pascal_case;
// use inflector::cases::pascalcase::is_pascal_case;
// use inflector::cases::screamingsnakecase::to_screamingsnake_case;
// use inflector::cases::screamingsnakecase::is_screamingsnake_case;
// use inflector::cases::snakecase::to_snake_case;
// use inflector::cases::snakecase::is_snake_case;
// use inflector::cases::kebabcase::to_kebab_case;
// use inflector::cases::kebabcase::is_kebab_case;
// use inflector::cases::traincase::to_train_case;
// use inflector::cases::traincase::is_train_case;
// use inflector::cases::sentencecase::to_sentence_case;
// use inflector::cases::sentencecase::is_sentence_case;
// use inflector::cases::titlecase::to_title_case;
// use inflector::cases::titlecase::is_title_case;
// use inflector::cases::tablecase::to_table_case;
// use inflector::cases::tablecase::is_table_case;
// use inflector::numbers::ordinalize::ordinalize;
// use inflector::numbers::deordinalize::deordinalize;
// use inflector::suffix::foreignkey::to_foreign_key;
// use inflector::suffix::foreignkey::is_foreign_key;
// use inflector::string::demodulize::demodulize;
// use inflector::string::deconstantize::deconstantize;
// use inflector::string::pluralize::to_plural;
// use inflector::string::singularize::to_singular;
...
fn main() {
...
let camel_case_string: String = to_camel_case("some_string");
...
}
```
## Advanced installation and usage:
If the project doesn't require singularize, pluralize, class, table, demodulize,
deconstantize. Then in your `cargo.toml` you may wish to specify:
```toml
[dependencies.Inflector]
version = "*"
default-features = false
```
Or
```toml
Inflector = {version="*", default-features=false}
```
To test this crate locally with features off try:
```shell
cargo test --no-default-features
```
## [Contributing](CONTRIBUTING.md)
This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.

View file

@ -0,0 +1,370 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to camelCase `String`
///
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "FooBar3";
/// let expected_string: String = "fooBar3".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::camelcase::to_camel_case;
/// let mock_string: &str = "Foo-Bar";
/// let expected_string: String = "fooBar".to_string();
/// let asserted_string: String = to_camel_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_camel_case(non_camelized_string: &str) -> String {
let options = CamelOptions {
new_word: false,
last_char: ' ',
first_word: false,
injectable_char: ' ',
has_seperator: false,
inverted: false,
};
to_case_camel_like(&non_camelized_string, options)
}
/// Determines if a `&str` is camelCase bool``
///
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "Foo";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "foo";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "fooBarIsAReallyReally3LongString";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::camelcase::is_camel_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_camel_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
pub fn is_camel_case(test_string: &str) -> bool {
to_camel_case(&test_string.clone()) == test_string
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_camel0(b: &mut Bencher) {
b.iter(|| {
let test_string = "Foo bar";
super::to_camel_case(test_string)
});
}
#[bench]
fn bench_camel1(b: &mut Bencher) {
b.iter(|| {
let test_string = "foo_bar";
super::to_camel_case(test_string)
});
}
#[bench]
fn bench_camel2(b: &mut Bencher) {
b.iter(|| {
let test_string = "fooBar";
super::to_camel_case(test_string)
});
}
#[bench]
fn bench_is_camel(b: &mut Bencher) {
b.iter(|| {
let test_string: &str = "Foo bar";
super::is_camel_case(test_string)
});
}
}
#[cfg(test)]
mod tests {
use ::to_camel_case;
use ::is_camel_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "fooBar".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "robertCMartin".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "randomTextWithBadChars".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "trailingBadChars".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "leadingBadChars".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "wrappedInBadChars".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "hasASign".to_owned();
assert_eq!(to_camel_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_camel_case(&convertable_string), true)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_camel_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,303 @@
#![deny(warnings)]
#[allow(unknown_lints)]
#[allow(unused_imports)]
use std::ascii::*;
pub struct CamelOptions {
pub new_word: bool,
pub last_char: char,
pub first_word: bool,
pub injectable_char: char,
pub has_seperator: bool,
pub inverted: bool,
}
pub fn to_case_snake_like(convertable_string: &str, replace_with: &str, case: &str) -> String {
let mut first_character: bool = true;
let mut result: String = String::with_capacity(convertable_string.len() * 2);
for char_with_index in trim_right(convertable_string).char_indices() {
if char_is_seperator(&char_with_index.1) {
if !first_character {
first_character = true;
result.push(replace_with.chars().nth(0).unwrap_or('_'));
}
} else if requires_seperator(char_with_index, first_character, &convertable_string) {
first_character = false;
result = snake_like_with_seperator(result, replace_with, &char_with_index.1, case)
} else {
first_character = false;
result = snake_like_no_seperator(result, &char_with_index.1, case)
}
}
result
}
pub fn to_case_camel_like(convertable_string: &str, camel_options: CamelOptions) -> String {
let mut new_word: bool = camel_options.new_word;
let mut first_word: bool = camel_options.first_word;
let mut last_char: char = camel_options.last_char;
let mut found_real_char: bool = false;
let mut result: String = String::with_capacity(convertable_string.len() * 2);
for character in trim_right(convertable_string).chars() {
if char_is_seperator(&character) && found_real_char {
new_word = true;
} else if !found_real_char && is_not_alphanumeric(character) {
continue;
} else if character.is_numeric() {
found_real_char = true;
new_word = true;
result.push(character);
} else if last_char_lower_current_is_upper_or_new_word(new_word, last_char, character) {
found_real_char = true;
new_word = false;
result = append_on_new_word(result, first_word, character, &camel_options);
first_word = false;
} else {
found_real_char = true;
last_char = character;
result.push(character.to_ascii_lowercase());
}
}
result
}
#[inline]
fn append_on_new_word(mut result: String, first_word: bool, character: char, camel_options: &CamelOptions) -> String {
if not_first_word_and_has_seperator(first_word, camel_options.has_seperator) {
result.push(camel_options.injectable_char);
}
if first_word_or_not_inverted(first_word, camel_options.inverted) {
result.push(character.to_ascii_uppercase());
} else {
result.push(character.to_ascii_lowercase());
}
result
}
fn not_first_word_and_has_seperator(first_word: bool, has_seperator: bool) -> bool {
has_seperator && !first_word
}
fn first_word_or_not_inverted(first_word: bool, inverted: bool) -> bool {
!inverted || first_word
}
fn last_char_lower_current_is_upper_or_new_word(new_word: bool, last_char: char, character: char) -> bool{
new_word ||
((last_char.is_lowercase() && character.is_uppercase()) &&
(last_char != ' '))
}
fn char_is_seperator(character: &char) -> bool {
is_not_alphanumeric(*character)
}
fn trim_right(convertable_string: &str) -> &str {
convertable_string.trim_end_matches(is_not_alphanumeric)
}
fn is_not_alphanumeric(character: char) -> bool {
!character.is_alphanumeric()
}
#[inline]
fn requires_seperator(char_with_index: (usize, char), first_character: bool, convertable_string: &str) -> bool {
!first_character &&
char_is_uppercase(char_with_index.1) &&
next_or_previous_char_is_lowercase(convertable_string, char_with_index.0)
}
#[inline]
fn snake_like_no_seperator(mut accumlator: String, current_char: &char, case: &str) -> String {
if case == "lower" {
accumlator.push(current_char.to_ascii_lowercase());
accumlator
} else {
accumlator.push(current_char.to_ascii_uppercase());
accumlator
}
}
#[inline]
fn snake_like_with_seperator(mut accumlator: String, replace_with: &str, current_char: &char, case: &str) -> String {
if case == "lower" {
accumlator.push(replace_with.chars().nth(0).unwrap_or('_'));
accumlator.push(current_char.to_ascii_lowercase());
accumlator
} else {
accumlator.push(replace_with.chars().nth(0).unwrap_or('_'));
accumlator.push(current_char.to_ascii_uppercase());
accumlator
}
}
fn next_or_previous_char_is_lowercase(convertable_string: &str, char_with_index: usize) -> bool {
convertable_string.chars().nth(char_with_index + 1).unwrap_or('A').is_lowercase() ||
convertable_string.chars().nth(char_with_index - 1).unwrap_or('A').is_lowercase()
}
fn char_is_uppercase(test_char: char) -> bool {
test_char == test_char.to_ascii_uppercase()
}
#[test]
fn test_trim_bad_chars() {
assert_eq!("abc", trim_right("abc----^"))
}
#[test]
fn test_trim_bad_chars_when_none_are_bad() {
assert_eq!("abc", trim_right("abc"))
}
#[test]
fn test_is_not_alphanumeric_on_is_alphanumeric() {
assert!(!is_not_alphanumeric('a'))
}
#[test]
fn test_is_not_alphanumeric_on_is_not_alphanumeric() {
assert!(is_not_alphanumeric('_'))
}
#[test]
fn test_char_is_uppercase_when_it_is() {
assert_eq!(char_is_uppercase('A'), true)
}
#[test]
fn test_char_is_uppercase_when_it_is_not() {
assert_eq!(char_is_uppercase('a'), false)
}
#[test]
fn test_next_or_previous_char_is_lowercase_true() {
assert_eq!(next_or_previous_char_is_lowercase("TestWWW", 3), true)
}
#[test]
fn test_next_or_previous_char_is_lowercase_false() {
assert_eq!(next_or_previous_char_is_lowercase("TestWWW", 5), false)
}
#[test]
fn snake_like_with_seperator_lowers() {
assert_eq!(snake_like_with_seperator("".to_owned(), "^", &'c', "lower"), "^c".to_string())
}
#[test]
fn snake_like_with_seperator_upper() {
assert_eq!(snake_like_with_seperator("".to_owned(), "^", &'c', "upper"), "^C".to_string())
}
#[test]
fn snake_like_no_seperator_lower() {
assert_eq!(snake_like_no_seperator("".to_owned(), &'C', "lower"), "c".to_string())
}
#[test]
fn snake_like_no_seperator_upper() {
assert_eq!(snake_like_no_seperator("".to_owned(), &'c', "upper"), "C".to_string())
}
#[test]
fn requires_seperator_upper_not_first_wrap_is_safe_current_upper() {
assert_eq!(requires_seperator((2, 'C'), false, "test"), true)
}
#[test]
fn requires_seperator_upper_not_first_wrap_is_safe_current_lower() {
assert_eq!(requires_seperator((2, 'c'), false, "test"), false)
}
#[test]
fn requires_seperator_upper_first_wrap_is_safe_current_upper() {
assert_eq!(requires_seperator((0, 'T'), true, "Test"), false)
}
#[test]
fn requires_seperator_upper_first_wrap_is_safe_current_lower() {
assert_eq!(requires_seperator((0, 't'), true, "Test"), false)
}
#[test]
fn requires_seperator_upper_first_wrap_is_safe_current_lower_next_is_too() {
assert_eq!(requires_seperator((0, 't'), true, "test"), false)
}
#[test]
fn test_char_is_seperator_dash() {
assert_eq!(char_is_seperator(&'-'), true)
}
#[test]
fn test_char_is_seperator_underscore() {
assert_eq!(char_is_seperator(&'_'), true)
}
#[test]
fn test_char_is_seperator_space() {
assert_eq!(char_is_seperator(&' '), true)
}
#[test]
fn test_char_is_seperator_when_not() {
assert_eq!(char_is_seperator(&'A'), false)
}
#[test]
fn test_last_char_lower_current_is_upper_or_new_word_with_new_word() {
assert_eq!(last_char_lower_current_is_upper_or_new_word(true, ' ', '-'), true)
}
#[test]
fn test_last_char_lower_current_is_upper_or_new_word_last_char_space() {
assert_eq!(last_char_lower_current_is_upper_or_new_word(false, ' ', '-'), false)
}
#[test]
fn test_last_char_lower_current_is_upper_or_new_word_last_char_lower_current_upper() {
assert_eq!(last_char_lower_current_is_upper_or_new_word(false, 'a', 'A'), true)
}
#[test]
fn test_last_char_lower_current_is_upper_or_new_word_last_char_upper_current_upper() {
assert_eq!(last_char_lower_current_is_upper_or_new_word(false, 'A', 'A'), false)
}
#[test]
fn test_last_char_lower_current_is_upper_or_new_word_last_char_upper_current_lower() {
assert_eq!(last_char_lower_current_is_upper_or_new_word(false, 'A', 'a'), false)
}
#[test]
fn test_first_word_or_not_inverted_with_first_word() {
assert_eq!(first_word_or_not_inverted(true, false), true)
}
#[test]
fn test_first_word_or_not_inverted_not_first_word_not_inverted() {
assert_eq!(first_word_or_not_inverted(false, false), true)
}
#[test]
fn test_first_word_or_not_inverted_not_first_word_is_inverted() {
assert_eq!(first_word_or_not_inverted(false, true), false)
}
#[test]
fn test_not_first_word_and_has_seperator_is_first_and_not_seperator() {
assert_eq!(not_first_word_and_has_seperator(true, false), false)
}
#[test]
fn test_not_first_word_and_has_seperator_not_first_and_not_seperator() {
assert_eq!(not_first_word_and_has_seperator(false, false), false)
}
#[test]
fn test_not_first_word_and_has_seperator_not_first_and_has_seperator() {
assert_eq!(not_first_word_and_has_seperator(false, true), true)
}

View file

@ -0,0 +1,393 @@
#![deny(warnings)]
use cases::case::*;
#[cfg(feature = "heavyweight")]
use string::singularize::to_singular;
#[cfg(feature = "heavyweight")]
/// Converts a `&str` to `ClassCase` `String`
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "FooBars";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "foo_bars";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::to_class_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_class_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_class_case(non_class_case_string: &str) -> String {
let options = CamelOptions {
new_word: true,
last_char: ' ',
first_word: false,
injectable_char: ' ',
has_seperator: false,
inverted: false,
};
let class_plural = to_case_camel_like(non_class_case_string, options);
let split: (&str, &str) =
class_plural.split_at(class_plural.rfind(char::is_uppercase).unwrap_or(0));
format!("{}{}", split.0, to_singular(split.1))
}
#[cfg(feature = "heavyweight")]
/// Determines if a `&str` is `ClassCase` `bool`
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "Foo";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "foo";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongStrings";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "foo_bar_is_a_really_really_long_strings";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::classcase::is_class_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_class_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
pub fn is_class_case(test_string: &str) -> bool {
to_class_case(&test_string.clone()) == test_string
}
#[cfg(all(feature = "unstable", test))]
#[cfg(feature = "heavyweight")]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_class_case(b: &mut Bencher) {
b.iter(|| super::to_class_case("Foo bar"));
}
#[bench]
fn bench_is_class(b: &mut Bencher) {
b.iter(|| super::is_class_case("Foo bar"));
}
#[bench]
fn bench_class_from_snake(b: &mut Bencher) {
b.iter(|| super::to_class_case("foo_bar"));
}
}
#[cfg(test)]
#[cfg(feature = "heavyweight")]
mod tests {
use ::to_class_case;
use ::is_class_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_screaming_class_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_table_case() {
let convertable_string: String = "foo_bars".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "RobertCMartin".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "RandomTextWithBadChar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "TrailingBadChar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "LeadingBadChar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "WrappedInBadChar".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "HasASign".to_owned();
assert_eq!(to_class_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_class_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_class_case(&convertable_string), true)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_class_case(&convertable_string), false)
}
#[test]
fn is_correct_from_table_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_class_case(&convertable_string), true)
}
}

View file

@ -0,0 +1,262 @@
#![deny(warnings)]
use cases::case::*;
/// Determines if a `&str` is `kebab-case`
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::is_kebab_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_kebab_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
pub fn is_kebab_case(test_string: &str) -> bool {
test_string == to_kebab_case(test_string.clone())
}
/// Converts a `&str` to `kebab-case` `String`
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::kebabcase::to_kebab_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "foo-bar".to_string();
/// let asserted_string: String = to_kebab_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_kebab_case(non_kebab_case_string: &str) -> String {
to_case_snake_like(non_kebab_case_string, "-", "lower")
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_kebab(b: &mut Bencher) {
b.iter(|| super::to_kebab_case("Foo bar"));
}
#[bench]
fn bench_is_kebab(b: &mut Bencher) {
b.iter(|| super::is_kebab_case("Foo bar"));
}
#[bench]
fn bench_kebab_from_snake(b: &mut Bencher) {
b.iter(|| super::to_kebab_case("test_test_test"));
}
}
#[cfg(test)]
mod tests {
use ::to_kebab_case;
use ::is_kebab_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "foo-bar".to_owned();
assert_eq!(to_kebab_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), true)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_kebab_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,52 @@
mod case;
/// Provides conversion to and detection of class case strings.
///
/// This version singularizes strings.
///
/// Example string `ClassCase`
pub mod classcase;
/// Provides conversion to and detection of camel case strings.
///
/// Example string `camelCase`
pub mod camelcase;
/// Provides conversion to and detection of snake case strings.
///
/// Example string `snake_case`
pub mod snakecase;
/// Provides conversion to and detection of screaming snake case strings.
///
/// Example string `SCREAMING_SNAKE_CASE`
pub mod screamingsnakecase;
/// Provides conversion to and detection of kebab case strings.
///
/// Example string `kebab-case`
pub mod kebabcase;
/// Provides conversion to and detection of train case strings.
///
/// Example string `Train-Case`
pub mod traincase;
/// Provides conversion to and detection of sentence case strings.
///
/// Example string `Sentence case`
pub mod sentencecase;
/// Provides conversion to and detection of title case strings.
///
/// Example string `Title Case`
pub mod titlecase;
/// Provides conversion to and detection of table case strings.
///
/// Example string `table_cases`
pub mod tablecase;
/// Provides conversion to pascal case strings.
///
/// Example string `PascalCase`
pub mod pascalcase;

View file

@ -0,0 +1,360 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to pascalCase `String`
///
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "FooBar".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::pascalcase::to_pascal_case;
/// let mock_string: &str = "FooBar3";
/// let expected_string: String = "FooBar3".to_string();
/// let asserted_string: String = to_pascal_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_pascal_case(non_pascalized_string: &str) -> String {
let options = CamelOptions {
new_word: true,
last_char: ' ',
first_word: false,
injectable_char: ' ',
has_seperator: false,
inverted: false,
};
to_case_camel_like(non_pascalized_string, options)
}
/// Determines if a `&str` is pascalCase bool``
///
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "Foo";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "foo";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "FooBarIsAReallyReally3LongString";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == true);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
///
///
/// ```
/// ```
/// use inflector::cases::pascalcase::is_pascal_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_pascal_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
pub fn is_pascal_case(test_string: &str) -> bool {
to_pascal_case(test_string.clone()) == test_string
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_pascal0(b: &mut Bencher) {
b.iter(|| {
let test_string = "Foo bar";
super::to_pascal_case(test_string)
});
}
#[bench]
fn bench_pascal1(b: &mut Bencher) {
b.iter(|| {
let test_string = "foo_bar";
super::to_pascal_case(test_string)
});
}
#[bench]
fn bench_pascal2(b: &mut Bencher) {
b.iter(|| {
let test_string = "fooBar";
super::to_pascal_case(test_string)
});
}
#[bench]
fn bench_is_pascal(b: &mut Bencher) {
b.iter(|| {
let test_string: &str = "Foo bar";
super::is_pascal_case(test_string)
});
}
}
#[cfg(test)]
mod tests {
use ::to_pascal_case;
use ::is_pascal_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "FooBar".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "RobertCMartin".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "RandomTextWithBadChars".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "TrailingBadChars".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "LeadingBadChars".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "WrappedInBadChars".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "HasASign".to_owned();
assert_eq!(to_pascal_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), true)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_pascal_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,253 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to `SCREAMING_SNAKE_CASE` `String`
///
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "HTTP Foo bar";
/// let expected_string: String = "HTTP_FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "FOO_BAR".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::to_screaming_snake_case;
/// let mock_string: &str = "fooBar3";
/// let expected_string: String = "FOO_BAR_3".to_string();
/// let asserted_string: String = to_screaming_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_screaming_snake_case(non_snake_case_string: &str) -> String {
to_case_snake_like(non_snake_case_string, "_", "upper")
}
/// Determines of a `&str` is `SCREAMING_SNAKE_CASE`
///
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "FOO_BAR1_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
/// ```
/// use inflector::cases::screamingsnakecase::is_screaming_snake_case;
/// let mock_string: &str = "FOO_BAR_1_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_screaming_snake_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
pub fn is_screaming_snake_case(test_string: &str) -> bool {
test_string == to_screaming_snake_case(test_string.clone())
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_screaming_snake(b: &mut Bencher) {
b.iter(|| super::to_screaming_snake_case("Foo bar"));
}
#[bench]
fn bench_is_screaming_snake(b: &mut Bencher) {
b.iter(|| super::is_screaming_snake_case("Foo bar"));
}
}
#[cfg(test)]
mod tests {
use ::to_screaming_snake_case;
use ::is_screaming_snake_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_screaming_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "FOO_BAR".to_owned();
assert_eq!(to_screaming_snake_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), true)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_screaming_snake_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,313 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to `Sentence case` `String`
///
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::to_sentence_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "Foo bar".to_string();
/// let asserted_string: String = to_sentence_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_sentence_case(non_sentence_case_string: &str) -> String {
let options = CamelOptions {
new_word: true,
last_char: ' ',
first_word: true,
injectable_char: ' ',
has_seperator: true,
inverted: true,
};
to_case_camel_like(non_sentence_case_string, options)
}
/// Determines of a `&str` is `Sentence case`
///
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "Foo";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "foo";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::sentencecase::is_sentence_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_sentence_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
pub fn is_sentence_case(test_string: &str) -> bool {
test_string == to_sentence_case(test_string.clone())
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_sentence(b: &mut Bencher) {
b.iter(|| super::to_sentence_case("Foo BAR"));
}
#[bench]
fn bench_is_sentence(b: &mut Bencher) {
b.iter(|| super::is_sentence_case("Foo bar"));
}
#[bench]
fn bench_sentence_from_snake(b: &mut Bencher) {
b.iter(|| super::to_sentence_case("foo_bar"));
}
}
#[cfg(test)]
mod tests {
use ::to_sentence_case;
use ::is_sentence_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "Foo bar".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "Robert c martin".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "Random text with bad chars".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "Trailing bad chars".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "Leading bad chars".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "Wrapped in bad chars".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "Has a sign".to_owned();
assert_eq!(to_sentence_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), true)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_sentence_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,334 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to `snake_case` `String`
///
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "HTTP Foo bar";
/// let expected_string: String = "http_foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "HTTPFooBar";
/// let expected_string: String = "http_foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "foo_bar".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::snakecase::to_snake_case;
/// let mock_string: &str = "fooBar3";
/// let expected_string: String = "foo_bar_3".to_string();
/// let asserted_string: String = to_snake_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_snake_case(non_snake_case_string: &str) -> String {
to_case_snake_like(non_snake_case_string, "_", "lower")
}
/// Determines of a `&str` is `snake_case`
///
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "FOO_BAR_IS_A_REALLY_REALLY_LONG_STRING";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "foo_bar1_string_that_is_really_really_long";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::snakecase::is_snake_case;
/// let mock_string: &str = "foo_bar_1_string_that_is_really_really_long";
/// let asserted_bool: bool = is_snake_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
pub fn is_snake_case(test_string: &str) -> bool {
test_string == to_snake_case(test_string.clone())
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_snake_from_title(b: &mut Bencher) {
b.iter(|| super::to_snake_case("Foo bar"));
}
#[bench]
fn bench_snake_from_camel(b: &mut Bencher) {
b.iter(|| super::to_snake_case("fooBar"));
}
#[bench]
fn bench_snake_from_snake(b: &mut Bencher) {
b.iter(|| super::to_snake_case("foo_bar_bar_bar"));
}
#[bench]
fn bench_is_snake(b: &mut Bencher) {
b.iter(|| super::is_snake_case("Foo bar"));
}
}
#[cfg(test)]
mod tests {
use ::to_snake_case;
use ::is_snake_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "foo_bar".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "robert_c_martin".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "random_text_with_bad_chars".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "trailing_bad_chars".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "leading_bad_chars".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "wrapped_in_bad_chars".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "has_a_sign".to_owned();
assert_eq!(to_snake_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_snake_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_snake_case(&convertable_string), true)
}
}

View file

@ -0,0 +1,271 @@
#![deny(warnings)]
#[cfg(feature = "heavyweight")]
use string::pluralize::to_plural;
#[cfg(feature = "heavyweight")]
use cases::case::*;
#[cfg(feature = "heavyweight")]
/// Converts a `&str` to `table-case` `String`
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
///
/// ```
/// use inflector::cases::tablecase::to_table_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "foo_bars".to_string();
/// let asserted_string: String = to_table_case(mock_string);
/// assert!(asserted_string == expected_string);
/// ```
pub fn to_table_case(non_table_case_string: &str) -> String {
let snaked: String = to_case_snake_like(non_table_case_string, "_", "lower");
let split: (&str, &str) = snaked.split_at(snaked.rfind('_').unwrap_or(0));
format!("{}{}", split.0, to_plural(split.1))
}
#[cfg(feature = "heavyweight")]
/// Determines if a `&str` is `table-case`
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "foo_bar_strings";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == true);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
///
/// ```
/// use inflector::cases::tablecase::is_table_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_table_case(mock_string);
/// assert!(asserted_bool == false);
/// ```
pub fn is_table_case(test_string: &str) -> bool {
to_table_case(&test_string.clone()) == test_string
}
#[cfg(all(feature = "unstable", test))]
#[cfg(feature = "heavyweight")]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_table_case(b: &mut Bencher) {
b.iter(|| super::to_table_case("Foo bar"));
}
#[bench]
fn bench_is_table_case(b: &mut Bencher) {
b.iter(|| super::is_table_case("Foo bar"));
}
}
#[cfg(test)]
#[cfg(feature = "heavyweight")]
mod tests {
use ::to_table_case;
use ::is_table_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn from_table_case() {
let convertable_string: String = "foo_bars".to_owned();
let expected: String = "foo_bars".to_owned();
assert_eq!(to_table_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_table_case(&convertable_string), false)
}
#[test]
fn is_correct_from_table_case() {
let convertable_string: String = "foo_bars".to_owned();
assert_eq!(is_table_case(&convertable_string), true)
}
}

View file

@ -0,0 +1,308 @@
#![deny(warnings)]
use cases::case::*;
/// Converts a `&str` to `Title Case` `String`
///
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::cases::titlecase::to_title_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "Foo Bar".to_string();
/// let asserted_string: String = to_title_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_title_case(non_title_case_string: &str) -> String {
let options = CamelOptions {
new_word: true,
last_char: ' ',
first_word: true,
injectable_char: ' ',
has_seperator: true,
inverted: false,
};
to_case_camel_like(non_title_case_string, options)
}
/// Determines if a `&str` is `Title Case`
///
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "FOO_BAR_STRING_THAT_IS_REALLY_REALLY_LONG";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "foo";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::cases::titlecase::is_title_case;
/// let mock_string: &str = "Foo Bar String That Is Really Really Long";
/// let asserted_bool: bool = is_title_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
pub fn is_title_case(test_string: &str) -> bool {
test_string == to_title_case(test_string.clone())
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_title(b: &mut Bencher) {
b.iter(|| super::to_title_case("Foo BAR"));
}
#[bench]
fn bench_is_title(b: &mut Bencher) {
b.iter(|| super::is_title_case("Foo bar"));
}
#[bench]
fn bench_title_from_snake(b: &mut Bencher) {
b.iter(|| super::to_title_case("foo_bar"));
}
}
#[cfg(test)]
mod tests {
use ::to_title_case;
use ::is_title_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "Foo Bar".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "Robert C Martin".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "Random Text With Bad Chars".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "Trailing Bad Chars".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "Leading Bad Chars".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "Wrapped In Bad Chars".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "Has A Sign".to_owned();
assert_eq!(to_title_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_title_case(&convertable_string), true)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_title_case(&convertable_string), false)
}
}

View file

@ -0,0 +1,320 @@
#![deny(warnings)]
use cases::case::*;
/// Determines if a `&str` is `Train-Case`
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "Foo-Bar-String-That-Is-Really-Really-Long";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::is_train_case;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_train_case(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
pub fn is_train_case(test_string: &str) -> bool {
test_string == to_train_case(test_string.clone())
}
/// Converts a `&str` to `Train-Case` `String`
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "foo-bar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "FOO_BAR";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
/// ```
/// use inflector::cases::traincase::to_train_case;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "Foo-Bar".to_string();
/// let asserted_string: String = to_train_case(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_train_case(non_train_case_string: &str) -> String {
let options = CamelOptions {
new_word: true,
last_char: ' ',
first_word: true,
injectable_char: '-',
has_seperator: true,
inverted: false,
};
to_case_camel_like(non_train_case_string, options)
}
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
#[bench]
fn bench_train(b: &mut Bencher) {
b.iter(|| super::to_train_case("Foo bar"));
}
#[bench]
fn bench_is_train(b: &mut Bencher) {
b.iter(|| super::is_train_case("Foo bar"));
}
#[bench]
fn bench_train_from_snake(b: &mut Bencher) {
b.iter(|| super::to_train_case("test_test_test"));
}
}
#[cfg(test)]
mod tests {
use ::to_train_case;
use ::is_train_case;
#[test]
fn from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn from_case_with_loads_of_space() {
let convertable_string: String = "foo bar".to_owned();
let expected: String = "Foo-Bar".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn a_name_with_a_dot() {
let convertable_string: String = "Robert C. Martin".to_owned();
let expected: String = "Robert-C-Martin".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn random_text_with_bad_chars() {
let convertable_string: String = "Random text with *(bad) chars".to_owned();
let expected: String = "Random-Text-With-Bad-Chars".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn trailing_bad_chars() {
let convertable_string: String = "trailing bad_chars*(()())".to_owned();
let expected: String = "Trailing-Bad-Chars".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn leading_bad_chars() {
let convertable_string: String = "-!#$%leading bad chars".to_owned();
let expected: String = "Leading-Bad-Chars".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn wrapped_in_bad_chars() {
let convertable_string: String = "-!#$%wrapped in bad chars&*^*&(&*^&(<><?>><?><>))".to_owned();
let expected: String = "Wrapped-In-Bad-Chars".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn has_a_sign() {
let convertable_string: String = "has a + sign".to_owned();
let expected: String = "Has-A-Sign".to_owned();
assert_eq!(to_train_case(&convertable_string), expected)
}
#[test]
fn is_correct_from_camel_case() {
let convertable_string: String = "fooBar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_pascal_case() {
let convertable_string: String = "FooBar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_kebab_case() {
let convertable_string: String = "foo-bar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_sentence_case() {
let convertable_string: String = "Foo bar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_title_case() {
let convertable_string: String = "Foo Bar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_train_case() {
let convertable_string: String = "Foo-Bar".to_owned();
assert_eq!(is_train_case(&convertable_string), true)
}
#[test]
fn is_correct_from_screaming_snake_case() {
let convertable_string: String = "FOO_BAR".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
#[test]
fn is_correct_from_snake_case() {
let convertable_string: String = "foo_bar".to_owned();
assert_eq!(is_train_case(&convertable_string), false)
}
}

332
third-party/vendor/Inflector/src/lib.rs vendored Normal file
View file

@ -0,0 +1,332 @@
#![deny(warnings, unused_variables, missing_docs, unsafe_code, unused_extern_crates)]
#![cfg_attr(feature = "unstable", feature(test))]
//! Adds String based inflections for Rust. Snake, kebab, train, camel,
//! sentence, class, and title cases as well as ordinalize,
//! deordinalize, demodulize, deconstantize, and foreign key are supported as
//! both traits and pure functions acting on String types.
//! ```rust
//! use inflector::Inflector;
//! let camel_case_string: String = "some_string".to_camel_case();
//! let is_camel_cased: bool= camel_case_string.is_camel_case();
//! assert!(is_camel_cased == true);
//! ```
#[cfg(feature = "heavyweight")]
extern crate regex;
#[cfg(feature = "heavyweight")]
#[macro_use] extern crate lazy_static;
/// Provides case inflections
/// - Camel case
/// - Class case
/// - Kebab case
/// - Train case
/// - Screaming snake case
/// - Table case
/// - Sentence case
/// - Snake case
/// - Pascal case
pub mod cases;
/// Provides number inflections
/// - Ordinalize
/// - Deordinalize
pub mod numbers;
/// Provides suffix inflections
/// - Foreign key
pub mod suffix;
/// Provides string inflections
/// - Deconstantize
/// - Demodulize
/// - Pluralize
/// - Singularize
#[cfg(feature = "heavyweight")]
pub mod string;
#[cfg(feature = "heavyweight")]
use cases::classcase::to_class_case;
#[cfg(feature = "heavyweight")]
use cases::classcase::is_class_case;
use cases::camelcase::to_camel_case;
use cases::camelcase::is_camel_case;
use cases::pascalcase::to_pascal_case;
use cases::pascalcase::is_pascal_case;
use cases::snakecase::to_snake_case;
use cases::snakecase::is_snake_case;
use cases::screamingsnakecase::to_screaming_snake_case;
use cases::screamingsnakecase::is_screaming_snake_case;
use cases::kebabcase::to_kebab_case;
use cases::kebabcase::is_kebab_case;
use cases::traincase::to_train_case;
use cases::traincase::is_train_case;
use cases::sentencecase::to_sentence_case;
use cases::sentencecase::is_sentence_case;
use cases::titlecase::to_title_case;
use cases::titlecase::is_title_case;
#[cfg(feature = "heavyweight")]
use cases::tablecase::to_table_case;
#[cfg(feature = "heavyweight")]
use cases::tablecase::is_table_case;
use numbers::ordinalize::ordinalize;
use numbers::deordinalize::deordinalize;
use suffix::foreignkey::to_foreign_key;
use suffix::foreignkey::is_foreign_key;
#[cfg(feature = "heavyweight")]
use string::demodulize::demodulize;
#[cfg(feature = "heavyweight")]
use string::deconstantize::deconstantize;
#[cfg(feature = "heavyweight")]
use string::pluralize::to_plural;
#[cfg(feature = "heavyweight")]
use string::singularize::to_singular;
#[allow(missing_docs)]
pub trait Inflector {
fn to_camel_case(&self) -> String;
fn is_camel_case(&self) -> bool;
fn to_pascal_case(&self) -> String;
fn is_pascal_case(&self) -> bool;
fn to_snake_case(&self) -> String;
fn is_snake_case(&self) -> bool;
fn to_screaming_snake_case(&self) -> String;
fn is_screaming_snake_case(&self) -> bool;
fn to_kebab_case(&self) -> String;
fn is_kebab_case(&self) -> bool;
fn to_train_case(&self) -> String;
fn is_train_case(&self) -> bool;
fn to_sentence_case(&self) -> String;
fn is_sentence_case(&self) -> bool;
fn to_title_case(&self) -> String;
fn is_title_case(&self) -> bool;
fn ordinalize(&self) -> String;
fn deordinalize(&self) -> String;
fn to_foreign_key(&self) -> String;
fn is_foreign_key(&self) -> bool;
#[cfg(feature = "heavyweight")]
fn demodulize(&self) -> String;
#[cfg(feature = "heavyweight")]
fn deconstantize(&self) -> String;
#[cfg(feature = "heavyweight")]
fn to_class_case(&self) -> String;
#[cfg(feature = "heavyweight")]
fn is_class_case(&self) -> bool;
#[cfg(feature = "heavyweight")]
fn to_table_case(&self) -> String;
#[cfg(feature = "heavyweight")]
fn is_table_case(&self) -> bool;
#[cfg(feature = "heavyweight")]
fn to_plural(&self) -> String;
#[cfg(feature = "heavyweight")]
fn to_singular(&self) -> String;
}
#[allow(missing_docs)]
pub trait InflectorNumbers {
fn ordinalize(&self) -> String;
}
macro_rules! define_implementations {
( $slf:ident; $($imp_trait:ident => $typ:ident), *) => {
$(
#[inline]
fn $imp_trait(&$slf) -> $typ {
$imp_trait($slf)
}
)*
}
}
macro_rules! define_number_implementations {
( $slf:ident; $($imp_trait:ident => $typ:ident), *) => {
$(
#[inline]
fn $imp_trait(&$slf) -> $typ {
$imp_trait(&$slf.to_string())
}
)*
}
}
macro_rules! define_gated_implementations {
( $slf:ident; $($imp_trait:ident => $typ:ident), *) => {
$(
#[inline]
#[cfg(feature = "heavyweight")]
fn $imp_trait(&$slf) -> $typ {
$imp_trait($slf)
}
)*
}
}
macro_rules! implement_string_for {
( $trt:ident; $($typ:ident), *) => {
$(
impl $trt for $typ {
define_implementations![self;
to_camel_case => String,
is_camel_case => bool,
to_pascal_case => String,
is_pascal_case => bool,
to_screaming_snake_case => String,
is_screaming_snake_case => bool,
to_snake_case => String,
is_snake_case => bool,
to_kebab_case => String,
is_kebab_case => bool,
to_train_case => String,
is_train_case => bool,
to_sentence_case => String,
is_sentence_case => bool,
to_title_case => String,
is_title_case => bool,
to_foreign_key => String,
is_foreign_key => bool,
ordinalize => String,
deordinalize => String
];
define_gated_implementations![self;
to_class_case => String,
is_class_case => bool,
to_table_case => String,
is_table_case => bool,
to_plural => String,
to_singular => String,
demodulize => String,
deconstantize => String
];
}
)*
}
}
macro_rules! implement_number_for {
( $trt:ident; $($typ:ident), *) => {
$(
impl $trt for $typ {
define_number_implementations![self;
ordinalize => String
];
}
)*
}
}
implement_string_for![
Inflector;
String, str
];
implement_number_for![
InflectorNumbers;
i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64
];
#[cfg(all(feature = "unstable", test))]
mod benchmarks {
extern crate test;
use self::test::Bencher;
use ::Inflector;
macro_rules! benchmarks {
( $($test_name:ident => $imp_trait:ident => $to_cast:expr), *) => {
$(
#[bench]
fn $test_name(b: &mut Bencher) {
b.iter(|| {
$to_cast.$imp_trait()
});
}
)*
}
}
benchmarks![
benchmark_str_to_camel => to_camel_case => "foo_bar",
benchmark_str_is_camel => is_camel_case => "fooBar",
benchmark_str_to_screaming_snake => to_screaming_snake_case => "fooBar",
benchmark_str_is_screaming_snake => is_screaming_snake_case => "FOO_BAR",
benchmark_str_to_snake => to_snake_case => "fooBar",
benchmark_str_is_snake => is_snake_case => "foo_bar",
benchmark_str_to_kebab => to_kebab_case => "fooBar",
benchmark_str_is_kebab => is_kebab_case => "foo-bar",
benchmark_str_to_train => to_train_case => "fooBar",
benchmark_str_is_train => is_train_case => "Foo-Bar",
benchmark_str_to_sentence => to_sentence_case => "fooBar",
benchmark_str_is_sentence => is_sentence_case => "Foo bar",
benchmark_str_to_title => to_title_case => "fooBar",
benchmark_str_is_title => is_title_case => "Foo Bar",
benchmark_str_ordinalize => ordinalize => "1",
benchmark_str_deordinalize => deordinalize => "1st",
benchmark_str_to_foreign_key => to_foreign_key => "Foo::Bar",
benchmark_str_is_foreign_key => is_foreign_key => "bar_id",
benchmark_string_to_camel => to_camel_case => "foo_bar".to_string(),
benchmark_string_is_camel => is_camel_case => "fooBar".to_string(),
benchmark_string_to_screaming_snake => to_screaming_snake_case => "fooBar".to_string(),
benchmark_string_is_screaming_snake => is_screaming_snake_case => "FOO_BAR".to_string(),
benchmark_string_to_snake => to_snake_case => "fooBar".to_string(),
benchmark_string_is_snake => is_snake_case => "foo_bar".to_string(),
benchmark_string_to_kebab => to_kebab_case => "fooBar".to_string(),
benchmark_string_is_kebab => is_kebab_case => "foo-bar".to_string(),
benchmark_string_to_train => to_train_case => "fooBar".to_string(),
benchmark_string_is_train => is_train_case => "Foo-Bar".to_string(),
benchmark_string_to_sentence => to_sentence_case => "fooBar".to_string(),
benchmark_string_is_sentence => is_sentence_case => "Foo bar".to_string(),
benchmark_string_to_title => to_title_case => "fooBar".to_string(),
benchmark_string_is_title => is_title_case => "Foo Bar".to_string(),
benchmark_string_ordinalize => ordinalize => "1".to_string(),
benchmark_string_deordinalize => deordinalize => "1st".to_string(),
benchmark_string_to_foreign_key => to_foreign_key => "Foo::Bar".to_string(),
benchmark_string_is_foreign_key => is_foreign_key => "bar_id".to_string()
];
#[cfg(feature = "heavyweight")]
benchmarks![
benchmark_str_to_class => to_class_case => "foo",
benchmark_str_is_class => is_class_case => "Foo",
benchmark_str_to_table => to_table_case => "fooBar",
benchmark_str_is_table => is_table_case => "foo_bars",
benchmark_str_pluralize => to_plural => "crate",
benchmark_str_singular => to_singular => "crates",
benchmark_string_to_class => to_class_case => "foo".to_string(),
benchmark_string_is_class => is_class_case => "Foo".to_string(),
benchmark_string_to_table => to_table_case => "fooBar".to_string(),
benchmark_string_is_table => is_table_case => "foo_bars".to_string(),
benchmark_string_pluralize => to_plural => "crate".to_string(),
benchmark_string_singular => to_singular => "crates".to_string(),
benchmark_string_demodulize => demodulize => "Foo::Bar".to_string(),
benchmark_string_deconstantize => deconstantize => "Foo::Bar".to_string(),
benchmark_str_demodulize => demodulize => "Foo::Bar",
benchmark_str_deconstantize => deconstantize => "Foo::Bar"
];
}

View file

@ -0,0 +1,117 @@
/// Deorginalizes a `&str`
///
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "0.1";
/// let expected_string: String = "0.1".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "-1st";
/// let expected_string: String = "-1".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "0th";
/// let expected_string: String = "0".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "1st";
/// let expected_string: String = "1".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "2nd";
/// let expected_string: String = "2".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "3rd";
/// let expected_string: String = "3".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "9th";
/// let expected_string: String = "9".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12th";
/// let expected_string: String = "12".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12000th";
/// let expected_string: String = "12000".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12001th";
/// let expected_string: String = "12001".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12002nd";
/// let expected_string: String = "12002".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12003rd";
/// let expected_string: String = "12003".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::deordinalize::deordinalize;
/// let mock_string: &str = "12004th";
/// let expected_string: String = "12004".to_owned();
/// let asserted_string: String = deordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn deordinalize(non_ordinalized_string: &str) -> String {
if non_ordinalized_string.contains('.') {
non_ordinalized_string.to_owned()
} else {
non_ordinalized_string.trim_end_matches("st")
.trim_end_matches("nd")
.trim_end_matches("rd")
.trim_end_matches("th")
.to_owned()
}
}

View file

@ -0,0 +1,9 @@
#![deny(warnings)]
/// Provides ordinalization of a string.
///
/// Example string "1" becomes "1st"
pub mod ordinalize;
/// Provides deordinalization of a string.
///
/// Example string "1st" becomes "1"
pub mod deordinalize;

View file

@ -0,0 +1,147 @@
/// Orginalizes a `&str`
///
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "a";
/// let expected_string: String = "a".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "0.1";
/// let expected_string: String = "0.1".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "-1";
/// let expected_string: String = "-1st".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "0";
/// let expected_string: String = "0th".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "1";
/// let expected_string: String = "1st".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "2";
/// let expected_string: String = "2nd".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "3";
/// let expected_string: String = "3rd".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "9";
/// let expected_string: String = "9th".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12";
/// let expected_string: String = "12th".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12000";
/// let expected_string: String = "12000th".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12001";
/// let expected_string: String = "12001st".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12002";
/// let expected_string: String = "12002nd".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12003";
/// let expected_string: String = "12003rd".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::numbers::ordinalize::ordinalize;
/// let mock_string: &str = "12004";
/// let expected_string: String = "12004th".to_owned();
/// let asserted_string: String = ordinalize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn ordinalize(non_ordinalized_string: &str) -> String {
let chars: Vec<char> = non_ordinalized_string.clone().chars().collect();
let last_number: char = chars[chars.len() - 1];
if is_ordinalizable(last_number) {
return non_ordinalized_string.to_owned();
}
if chars.len() > 1 {
if second_last_number_is_one(chars) {
return format!("{}{}", non_ordinalized_string, "th");
} else if string_contains_decimal(non_ordinalized_string.to_owned()) {
return non_ordinalized_string.to_owned();
}
}
match last_number {
'1' => format!("{}{}", non_ordinalized_string, "st"),
'2' => format!("{}{}", non_ordinalized_string, "nd"),
'3' => format!("{}{}", non_ordinalized_string, "rd"),
_ => format!("{}{}", non_ordinalized_string, "th"),
}
}
fn is_ordinalizable(last_number: char) -> bool {
!last_number.is_numeric()
}
fn second_last_number_is_one(chars: Vec<char>) -> bool {
let second_last_number: char = chars[chars.len() - 2];
second_last_number == '1'
}
fn string_contains_decimal(non_ordinalized_string: String) -> bool {
non_ordinalized_string.contains('.')
}

View file

@ -0,0 +1,225 @@
pub const UNACCONTABLE_WORDS: [&'static str; 202] = ["accommodation",
"adulthood",
"advertising",
"advice",
"aggression",
"aid",
"air",
"aircraft",
"alcohol",
"anger",
"applause",
"arithmetic",
"assistance",
"athletics",
"bacon",
"baggage",
"beef",
"biology",
"blood",
"botany",
"bread",
"butter",
"carbon",
"cardboard",
"cash",
"chalk",
"chaos",
"chess",
"crossroads",
"countryside",
"dancing",
"deer",
"dignity",
"dirt",
"dust",
"economics",
"education",
"electricity",
"engineering",
"enjoyment",
"envy",
"equipment",
"ethics",
"evidence",
"evolution",
"fame",
"fiction",
"flour",
"flu",
"food",
"fuel",
"fun",
"furniture",
"gallows",
"garbage",
"garlic",
"genetics",
"gold",
"golf",
"gossip",
"grammar",
"gratitude",
"grief",
"guilt",
"gymnastics",
"happiness",
"hardware",
"harm",
"hate",
"hatred",
"health",
"heat",
"help",
"homework",
"honesty",
"honey",
"hospitality",
"housework",
"humour",
"hunger",
"hydrogen",
"ice",
"importance",
"inflation",
"information",
"innocence",
"iron",
"irony",
"jam",
"jewelry",
"judo",
"karate",
"knowledge",
"lack",
"laughter",
"lava",
"leather",
"leisure",
"lightning",
"linguine",
"linguini",
"linguistics",
"literature",
"litter",
"livestock",
"logic",
"loneliness",
"luck",
"luggage",
"macaroni",
"machinery",
"magic",
"management",
"mankind",
"marble",
"mathematics",
"mayonnaise",
"measles",
"methane",
"milk",
"money",
"mud",
"music",
"mumps",
"nature",
"news",
"nitrogen",
"nonsense",
"nurture",
"nutrition",
"obedience",
"obesity",
"oxygen",
"pasta",
"patience",
"physics",
"poetry",
"pollution",
"poverty",
"pride",
"psychology",
"publicity",
"punctuation",
"quartz",
"racism",
"relaxation",
"reliability",
"research",
"respect",
"revenge",
"rice",
"rubbish",
"rum",
"safety",
"scenery",
"seafood",
"seaside",
"series",
"shame",
"sheep",
"shopping",
"sleep",
"smoke",
"smoking",
"snow",
"soap",
"software",
"soil",
"spaghetti",
"species",
"steam",
"stuff",
"stupidity",
"sunshine",
"symmetry",
"tennis",
"thirst",
"thunder",
"timber",
"traffic",
"transportation",
"trust",
"underwear",
"unemployment",
"unity",
"validity",
"veal",
"vegetation",
"vegetarianism",
"vengeance",
"violence",
"vitality",
"warmth",
"wealth",
"weather",
"welfare",
"wheat",
"wildlife",
"wisdom",
"yoga",
"zinc",
"zoology"];

View file

@ -0,0 +1,50 @@
#[cfg(feature = "heavyweight")]
use cases::classcase::to_class_case;
#[cfg(feature = "heavyweight")]
/// Deconstantizes a `&str`
///
/// ```
/// use inflector::string::deconstantize::deconstantize;
/// let mock_string: &str = "Bar";
/// let expected_string: String = "".to_owned();
/// let asserted_string: String = deconstantize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::deconstantize::deconstantize;
/// let mock_string: &str = "::Bar";
/// let expected_string: String = "".to_owned();
/// let asserted_string: String = deconstantize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::deconstantize::deconstantize;
/// let mock_string: &str = "Foo::Bar";
/// let expected_string: String = "Foo".to_owned();
/// let asserted_string: String = deconstantize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::deconstantize::deconstantize;
/// let mock_string: &str = "Test::Foo::Bar";
/// let expected_string: String = "Foo".to_owned();
/// let asserted_string: String = deconstantize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn deconstantize(non_deconstantized_string: &str) -> String {
if non_deconstantized_string.contains("::") {
let split_string: Vec<&str> = non_deconstantized_string.split("::").collect();
if split_string.len() > 1 {
to_class_case(split_string[split_string.len() - 2])
} else {
"".to_owned()
}
} else {
"".to_owned()
}
}

View file

@ -0,0 +1,46 @@
#[cfg(feature = "heavyweight")]
use cases::classcase::to_class_case;
#[cfg(feature = "heavyweight")]
/// Demodulize a `&str`
///
/// ```
/// use inflector::string::demodulize::demodulize;
/// let mock_string: &str = "Bar";
/// let expected_string: String = "Bar".to_owned();
/// let asserted_string: String = demodulize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::demodulize::demodulize;
/// let mock_string: &str = "::Bar";
/// let expected_string: String = "Bar".to_owned();
/// let asserted_string: String = demodulize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::demodulize::demodulize;
/// let mock_string: &str = "Foo::Bar";
/// let expected_string: String = "Bar".to_owned();
/// let asserted_string: String = demodulize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::demodulize::demodulize;
/// let mock_string: &str = "Test::Foo::Bar";
/// let expected_string: String = "Bar".to_owned();
/// let asserted_string: String = demodulize(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn demodulize(non_demodulize_string: &str) -> String {
if non_demodulize_string.contains("::") {
let split_string: Vec<&str> = non_demodulize_string.split("::").collect();
to_class_case(split_string[split_string.len() - 1])
} else {
non_demodulize_string.to_owned()
}
}

View file

@ -0,0 +1,23 @@
#![deny(warnings)]
/// Provides demodulize a string.
///
/// Example string `Foo::Bar` becomes `Bar`
#[cfg(feature = "heavyweight")]
pub mod demodulize;
/// Provides deconstantizea string.
///
/// Example string `Foo::Bar` becomes `Foo`
#[cfg(feature = "heavyweight")]
pub mod deconstantize;
/// Provides conversion to plural strings.
///
/// Example string `FooBar` -> `FooBars`
#[cfg(feature = "heavyweight")]
pub mod pluralize;
/// Provides conversion to singular strings.
///
/// Example string `FooBars` -> `FooBar`
#[cfg(feature = "heavyweight")]
pub mod singularize;
mod constants;

View file

@ -0,0 +1,194 @@
#![deny(warnings)]
use regex::Regex;
use string::constants::UNACCONTABLE_WORDS;
macro_rules! add_rule{
($r:ident, $rule:expr => $replace:expr) => {
$r.push((Regex::new($rule).unwrap(), $replace));
}
}
macro_rules! rules{
($r:ident; $($rule:expr => $replace:expr), *) => {
$(
add_rule!{$r, $rule => $replace}
)*
}
}
lazy_static!{
static ref RULES: Vec<(Regex, &'static str)> = {
let mut r = Vec::with_capacity(24);
rules![r;
r"(\w*)s$" => "s",
r"(\w*([^aeiou]ese))$" => "",
r"(\w*(ax|test))is$" => "es",
r"(\w*(alias|[^aou]us|tlas|gas|ris))$" => "es",
r"(\w*(e[mn]u))s?$" => "s",
r"(\w*([^l]ias|[aeiou]las|[emjzr]as|[iu]am))$" => "",
r"(\w*(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat))(?:us|i)$" => "i",
r"(\w*(alumn|alg|vertebr))(?:a|ae)$" => "ae",
r"(\w*(seraph|cherub))(?:im)?$" => "im",
r"(\w*(her|at|gr))o$" => "oes",
r"(\w*(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|automat|quor))(?:a|um)$" => "a",
r"(\w*(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat))(?:a|on)$" => "a",
r"(\w*)sis$" => "ses",
r"(\w*(kni|wi|li))fe$" => "ves",
r"(\w*(ar|l|ea|eo|oa|hoo))f$" => "ves",
r"(\w*([^aeiouy]|qu))y$" => "ies",
r"(\w*([^ch][ieo][ln]))ey$" => "ies",
r"(\w*(x|ch|ss|sh|zz)es)$" => "",
r"(\w*(x|ch|ss|sh|zz))$" => "es",
r"(\w*(matr|cod|mur|sil|vert|ind|append))(?:ix|ex)$" => "ices",
r"(\w*(m|l)(?:ice|ouse))$" => "ice",
r"(\w*(pe)(?:rson|ople))$" => "ople",
r"(\w*(child))(?:ren)?$" => "ren",
r"(\w*eaux)$" => ""
];
r
};
}
macro_rules! special_cases{
($s:ident, $($singular: expr => $plural:expr), *) => {
match &$s[..] {
$(
$singular => {
return $plural.to_owned();
},
)*
_ => ()
}
}
}
/// Converts a `&str` to pluralized `String`
///
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "foo_bars".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "ox";
/// let expected_string: String = "oxen".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "crate";
/// let expected_string: String = "crates".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "boxes";
/// let expected_string: String = "boxes".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "vengeance";
/// let expected_string: String = "vengeance".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "yoga";
/// let expected_string: String = "yoga".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
/// ```
/// use inflector::string::pluralize::to_plural;
/// let mock_string: &str = "geometry";
/// let expected_string: String = "geometries".to_owned();
/// let asserted_string: String = to_plural(mock_string);
/// assert_eq!(asserted_string, expected_string);
///
/// ```
///
pub fn to_plural(non_plural_string: &str) -> String {
if UNACCONTABLE_WORDS.contains(&non_plural_string.as_ref()) {
non_plural_string.to_owned()
} else {
special_cases![non_plural_string,
"ox" => "oxen",
"man" => "men",
"woman" => "women",
"die" => "dice",
"yes" => "yeses",
"foot" => "feet",
"eave" => "eaves",
"goose" => "geese",
"tooth" => "teeth",
"quiz" => "quizzes"
];
for &(ref rule, replace) in RULES.iter().rev() {
if let Some(c) = rule.captures(&non_plural_string) {
if let Some(c) = c.get(1) {
return format!("{}{}", c.as_str(), replace);
}
}
}
format!("{}s", non_plural_string)
}
}
#[cfg(test)]
mod tests {
macro_rules! as_item {
($i:item) => { $i };
}
macro_rules! make_tests{
($($singular:ident => $plural:ident); *) =>{
$(
as_item! {
#[test]
fn $singular(){
assert_eq!(
stringify!($plural),
super::to_plural(stringify!($singular))
);
}
}
)*
}
}
#[test]
fn boxes() {
assert_eq!("boxes", super::to_plural("box"));
}
make_tests!{
geometry => geometries;
ox => oxen;
woman => women;
test => tests;
axis => axes;
knife => knives;
agendum => agenda;
elf => elves;
zoology => zoology
}
}

View file

@ -0,0 +1,189 @@
use regex::Regex;
use string::constants::UNACCONTABLE_WORDS;
macro_rules! special_cases{
($s:ident, $($singular: expr => $plural:expr), *) => {
match &$s[..] {
$(
$singular => {
return $plural.to_owned();
},
)*
_ => ()
}
}
}
/// Converts a `&str` to singularized `String`
///
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "foo_bars";
/// let expected_string: String = "foo_bar".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "oxen";
/// let expected_string: String = "ox".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "crates";
/// let expected_string: String = "crate".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "oxen";
/// let expected_string: String = "ox".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "boxes";
/// let expected_string: String = "box".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "vengeance";
/// let expected_string: String = "vengeance".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::string::singularize::to_singular;
/// let mock_string: &str = "yoga";
/// let expected_string: String = "yoga".to_owned();
/// let asserted_string: String = to_singular(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
///
pub fn to_singular(non_singular_string: &str) -> String {
if UNACCONTABLE_WORDS.contains(&non_singular_string.as_ref()) {
non_singular_string.to_owned()
} else {
special_cases![non_singular_string,
"oxen" => "ox",
"boxes" => "box",
"men" => "man",
"women" => "woman",
"dice" => "die",
"yeses" => "yes",
"feet" => "foot",
"eaves" => "eave",
"geese" => "goose",
"teeth" => "tooth",
"quizzes" => "quiz"
];
for &(ref rule, replace) in RULES.iter().rev() {
if let Some(captures) = rule.captures(&non_singular_string) {
if let Some(c) = captures.get(1) {
let mut buf = String::new();
captures.expand(&format!("{}{}", c.as_str(), replace), &mut buf);
return buf;
}
}
}
format!("{}", non_singular_string)
}
}
macro_rules! add_rule{
($r:ident, $rule:expr => $replace:expr) => {
$r.push((Regex::new($rule).unwrap(), $replace));
}
}
macro_rules! rules{
($r:ident; $($rule:expr => $replace:expr), *) => {
$(
add_rule!{$r, $rule => $replace}
)*
}
}
lazy_static!{
static ref RULES: Vec<(Regex, &'static str)> = {
let mut r = Vec::with_capacity(27);
rules![r;
r"(\w*)s$" => "",
r"(\w*)(ss)$" => "$2",
r"(n)ews$" => "ews",
r"(\w*)(o)es$" => "",
r"(\w*)([ti])a$" => "um",
r"((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)(sis|ses)$" => "sis",
r"(^analy)(sis|ses)$" => "sis",
r"(\w*)([^f])ves$" => "fe",
r"(\w*)(hive)s$" => "",
r"(\w*)(tive)s$" => "",
r"(\w*)([lr])ves$" => "f",
r"(\w*([^aeiouy]|qu))ies$" => "y",
r"(s)eries$" => "eries",
r"(m)ovies$" => "ovie",
r"(\w*)(x|ch|ss|sh)es$" => "$2",
r"(m|l)ice$" => "ouse",
r"(bus)(es)?$" => "",
r"(shoe)s$" => "",
r"(cris|test)(is|es)$" => "is",
r"^(a)x[ie]s$" => "xis",
r"(octop|vir)(us|i)$" => "us",
r"(alias|status)(es)?$" => "",
r"^(ox)en" => "",
r"(vert|ind)ices$" => "ex",
r"(matr)ices$" => "ix",
r"(quiz)zes$" => "",
r"(database)s$" => ""
];
r
};
}
#[test]
fn singularize_ies_suffix() {
assert_eq!("reply", to_singular("replies"));
assert_eq!("lady", to_singular("ladies"));
assert_eq!("soliloquy", to_singular("soliloquies"));
}
#[test]
fn singularize_ss_suffix() {
assert_eq!("glass", to_singular("glass"));
assert_eq!("access", to_singular("access"));
assert_eq!("glass", to_singular("glasses"));
assert_eq!("witch", to_singular("witches"));
assert_eq!("dish", to_singular("dishes"));
}
#[test]
fn singularize_string_if_a_regex_will_match() {
let expected_string: String = "ox".to_owned();
let asserted_string: String = to_singular("oxen");
assert!(expected_string == asserted_string);
}
#[test]
fn singularize_string_returns_none_option_if_no_match() {
let expected_string: String = "bacon".to_owned();
let asserted_string: String = to_singular("bacon");
assert!(expected_string == asserted_string);
}

View file

@ -0,0 +1,139 @@
use cases::snakecase::to_snake_case;
/// Converts a `&str` to a `foreign_key`
///
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "foo_bar";
/// let expected_string: String = "foo_bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "Foo bar";
/// let expected_string: String = "foo_bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "Foo Bar";
/// let expected_string: String = "foo_bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "Foo::Bar";
/// let expected_string: String = "bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "Test::Foo::Bar";
/// let expected_string: String = "bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "FooBar";
/// let expected_string: String = "foo_bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "fooBar";
/// let expected_string: String = "foo_bar_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::to_foreign_key;
/// let mock_string: &str = "fooBar3";
/// let expected_string: String = "foo_bar_3_id".to_owned();
/// let asserted_string: String = to_foreign_key(mock_string);
/// assert!(asserted_string == expected_string);
///
/// ```
pub fn to_foreign_key(non_foreign_key_string: &str) -> String {
if non_foreign_key_string.contains("::") {
let split_string: Vec<&str> = non_foreign_key_string.split("::").collect();
safe_convert(split_string[split_string.len() - 1])
} else {
safe_convert(non_foreign_key_string)
}
}
fn safe_convert(safe_string: &str) -> String {
let snake_cased: String = to_snake_case(safe_string);
if snake_cased.ends_with("_id") {
snake_cased
} else {
format!("{}{}", snake_cased, "_id")
}
}
/// Determines if a `&str` is a `foreign_key`
///
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "Foo bar string that is really really long";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "foo-bar-string-that-is-really-really-long";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "FooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "Foo Bar Is A Really Really Long String";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "fooBarIsAReallyReallyLongString";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == false);
///
/// ```
/// ```
/// use inflector::suffix::foreignkey::is_foreign_key;
/// let mock_string: &str = "foo_bar_string_that_is_really_really_long_id";
/// let asserted_bool: bool = is_foreign_key(mock_string);
/// assert!(asserted_bool == true);
///
/// ```
pub fn is_foreign_key(test_string: &str) -> bool {
to_foreign_key(test_string.clone()) == test_string
}

View file

@ -0,0 +1,5 @@
#![deny(warnings)]
/// Provides foreign key conversion for String.
///
/// Example string `foo` becomes `foo_id`
pub mod foreignkey;

View file

@ -0,0 +1,162 @@
#![deny(warnings)]
extern crate inflector;
use inflector::Inflector;
use inflector::InflectorNumbers;
macro_rules! str_tests {
( $($test_name:ident => $imp_trait:ident => $to_cast:expr => $casted:expr), *) => {
$(
#[test]
fn $test_name() {
assert_eq!($to_cast.$imp_trait(), $casted)
}
)*
}
}
macro_rules! string_tests {
( $($test_name:ident => $imp_trait:ident => $to_cast:expr => $casted:expr), *) => {
$(
#[test]
fn $test_name() {
assert_eq!($to_cast.to_string().$imp_trait(), $casted)
}
)*
}
}
macro_rules! number_tests {
( $($test_name:ident => $imp_trait:ident => $typ:ident => $to_cast:expr => $casted:expr), *) => {
$(
#[test]
fn $test_name() {
let to_cast: $typ = $to_cast;
assert_eq!(to_cast.$imp_trait(), $casted)
}
)*
}
}
macro_rules! gated_str_tests {
( $($test_name:ident => $imp_trait:ident => $to_cast:expr => $casted:expr), *) => {
$(
#[test]
#[cfg(feature = "heavyweight")]
fn $test_name() {
assert_eq!($to_cast.$imp_trait(), $casted)
}
)*
}
}
macro_rules! gated_string_tests {
( $($test_name:ident => $imp_trait:ident => $to_cast:expr => $casted:expr), *) => {
$(
#[test]
#[cfg(feature = "heavyweight")]
fn $test_name() {
assert_eq!($to_cast.to_string().$imp_trait(), $casted)
}
)*
}
}
str_tests![
str_to_camel => to_camel_case => "foo_bar" => "fooBar".to_string(),
str_is_camel => is_camel_case => "fooBar" => true,
str_is_not_camel => is_camel_case => "foo_bar" => false,
str_to_screaming_snake => to_screaming_snake_case => "fooBar" => "FOO_BAR".to_string(),
str_is_screaming_snake => is_screaming_snake_case => "FOO_BAR" => true,
str_is_not_screaming_snake => is_screaming_snake_case => "foo_bar" => false,
str_to_snake => to_snake_case => "fooBar" => "foo_bar".to_string(),
str_is_snake => is_snake_case => "foo_bar" => true,
str_is_not_snake => is_snake_case => "fooBar" => false,
str_to_kebab => to_kebab_case => "fooBar" => "foo-bar".to_string(),
str_is_kebab => is_kebab_case => "foo-bar" => true,
str_is_not_kebab => is_kebab_case => "fooBar" => false,
str_to_train => to_train_case => "fooBar" => "Foo-Bar".to_string(),
str_is_train => is_train_case => "Foo-Bar" => true,
str_is_not_train => is_train_case => "FOO-Bar" => false,
str_to_sentence => to_sentence_case => "fooBar" => "Foo bar".to_string(),
str_is_sentence => is_sentence_case => "Foo bar" => true,
str_is_not_sentence => is_sentence_case => "foo_bar" => false,
str_to_title => to_title_case => "fooBar" => "Foo Bar".to_string(),
str_is_title => is_title_case => "Foo Bar" => true,
str_is_not_title => is_title_case => "Foo_Bar" => false,
str_ordinalize => ordinalize => "1" => "1st".to_string(),
str_deordinalize => deordinalize => "1st" => "1".to_string(),
str_to_foreign_key => to_foreign_key => "Foo::Bar" => "bar_id".to_string(),
str_is_foreign_key => is_foreign_key => "bar_id" => true,
str_is_not_foreign_key => is_foreign_key => "bar" => false
];
gated_str_tests![
str_to_class_case => to_class_case => "foo" => "Foo".to_string(),
str_is_class_case => is_class_case => "Foo" => true,
str_is_not_class_case => is_class_case => "foo" => false,
str_to_table => to_table_case => "fooBar" => "foo_bars".to_string(),
str_is_table => is_table_case => "foo_bars" => true,
str_is_not_table => is_table_case => "fooBars" => false,
str_pluralize => to_plural => "crate" => "crates".to_string(),
str_singular => to_singular => "crates" => "crate".to_string(),
str_demodulize => demodulize => "Foo::Bar" => "Bar".to_string(),
str_deconstantize => deconstantize => "Foo::Bar" => "Foo".to_string()
];
string_tests![
string_to_camel => to_camel_case => "foo_bar".to_string() => "fooBar".to_string(),
string_is_camel => is_camel_case => "fooBar".to_string() => true,
string_is_not_camel => is_camel_case => "foo_bar".to_string() => false,
string_to_screaming_snake => to_screaming_snake_case => "fooBar".to_string() => "FOO_BAR".to_string(),
string_is_screaming_snake => is_screaming_snake_case => "FOO_BAR".to_string() => true,
string_is_not_screaming_snake => is_screaming_snake_case => "foo_bar".to_string() => false,
string_to_snake => to_snake_case => "fooBar".to_string() => "foo_bar".to_string(),
string_is_snake => is_snake_case => "foo_bar".to_string() => true,
string_is_not_snake => is_snake_case => "fooBar".to_string() => false,
string_to_kebab => to_kebab_case => "fooBar".to_string() => "foo-bar".to_string(),
string_is_kebab => is_kebab_case => "foo-bar".to_string() => true,
string_is_not_kebab => is_kebab_case => "fooBar".to_string() => false,
string_to_train => to_train_case => "fooBar".to_string() => "Foo-Bar".to_string(),
string_is_train => is_train_case => "Foo-Bar".to_string() => true,
string_is_not_train => is_train_case => "foo-Bar".to_string() => false,
string_to_sentence => to_sentence_case => "fooBar".to_string() => "Foo bar".to_string(),
string_is_sentence => is_sentence_case => "Foo bar".to_string() => true,
string_is_not_sentence => is_sentence_case => "fooBar".to_string() => false,
string_to_title => to_title_case => "fooBar".to_string() => "Foo Bar".to_string(),
string_is_title => is_title_case => "Foo Bar".to_string() => true,
string_is_not_title => is_title_case => "fooBar".to_string() => false,
string_ordinalize => ordinalize => "1".to_string() => "1st".to_string(),
string_deordinalize => deordinalize => "1st".to_string() => "1".to_string(),
string_to_foreign_key => to_foreign_key => "Foo::Bar".to_string() => "bar_id".to_string(),
string_is_foreign_key => is_foreign_key => "bar_id".to_string() => true,
string_is_not_foreign_key => is_foreign_key => "bar".to_string() => false
];
gated_string_tests![
string_to_class_case => to_class_case => "foo".to_string() => "Foo".to_string(),
string_is_class_case => is_class_case => "Foo".to_string() => true,
string_is_not_class_case => is_class_case => "ooBar".to_string() => false,
string_to_table => to_table_case => "fooBar".to_string() => "foo_bars".to_string(),
string_is_table => is_table_case => "foo_bars".to_string() => true,
string_is_not_table => is_table_case => "fooBar".to_string() => false,
string_pluralize => to_plural => "crate".to_string() => "crates".to_string(),
string_singular => to_singular => "crates".to_string() => "crate".to_string(),
string_demodulize => demodulize => "Foo::Bar".to_string() => "Bar".to_string(),
string_deconstantize => deconstantize => "Foo::Bar".to_string() => "Foo".to_string()
];
number_tests![
i8_ordinalize => ordinalize => i8 => 1 => "1st".to_string(),
i16_ordinalize => ordinalize => i16 => 1 => "1st".to_string(),
i32_ordinalize => ordinalize => i32 => 1 => "1st".to_string(),
i64_ordinalize => ordinalize => i64 => 1 => "1st".to_string(),
u8_ordinalize => ordinalize => u8 => 1 => "1st".to_string(),
u16_ordinalize => ordinalize => u16 => 1 => "1st".to_string(),
u32_ordinalize => ordinalize => u32 => 1 => "1st".to_string(),
u64_ordinalize => ordinalize => u64 => 1 => "1st".to_string(),
isize_ordinalize => ordinalize => isize => 1 => "1st".to_string(),
usize_ordinalize => ordinalize => usize => 1 => "1st".to_string(),
f32_ordinalize => ordinalize => f32 => 1.0 => "1st".to_string(),
f64_ordinalize => ordinalize => f64 => 1.0 => "1st".to_string()
];

View file

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"4991589f97c0b45b6a65669ac834c1e36b63b6fc27aa3cdd8efad8ab018c94c7","Cargo.toml":"ec6f15394241acb2d0118504e9797130c0324d4ceff46218ce31f1a2ed856850","LICENSE":"a6cba85bc92e0cff7a450b1d873c0eaa2e9fc96bf472df0247a26bec77bf3ff9","README.md":"da6bee2fc99fc8f96283e3382d85330569b59b97960558aed28a665c2bac7ba1","src/codepoint_ids.rs":"dad69e55584e9b3a9c28d13c6b9fe50e0190e3bd9966662a6532e361d2efb7a4","src/err.rs":"cf565d2c68afc5261b46d707d760636d9ac9c8a55b2653024544ccbb2c035572","src/font.rs":"a586cb6d71e99b8ffd0f6a80be2278841cfc12f65b30cfaa6c342a2131e46d74","src/font_arc.rs":"791d0fd55cf13624577a77d63fc398f890f143b97df34763e8daed900d4d6344","src/glyph.rs":"02017141b7f5a6982494f6a708e6f843d4be8da784b9322ba585e4416fcc0b9e","src/lib.rs":"b5f6926f64887d85b74ef5e9f4a797d67777df19688d7ccd582fc0d848d76efd","src/nostd_float.rs":"2bcf37dbc84b917a118cf3675161090f022bba7662e07dbfe7f0db4bfa2a1452","src/outlined.rs":"5c9ca46a8af0cef450d209b3a0d56ca3128ece7280b4c93599ef9ee421c64872","src/scale.rs":"c712fb8388b9f86ea20f50f86a5d9b83448905816fdf908f3d4586d78f206c94","src/ttfp.rs":"3a9fe6ba373db0a0b93b41986f4a45d0eb5a92bf59c61d6cd20b009562beda63","src/ttfp/outliner.rs":"e4ef3695155e5da8ab7260eeac5ac3e9e5f2d64d588a5ee625571e0eed63154c","src/ttfp/variable.rs":"ee1293438364f60fbc40829dbec33d2d0a884e3e8173cb68aa282c9f0a9e822e","src/variable.rs":"48d3b514f22b279e4b5ca82776167b954856f4419759446c98a3d32d17ce3b20"},"package":"80179d7dd5d7e8c285d67c4a1e652972a92de7475beddfb92028c76463b13225"}

View file

@ -0,0 +1,95 @@
# v0.2.23
* Update _ttf-parser_ to `0.20`.
# 0.2.22
* Add `v2::GlyphImage` and `Font::glyph_raster_image2` to expose width and height info.
* Deprecate `Font::glyph_raster_image` & `GlyphImage`.
* Improve `OutlinedGlyph::draw` documentation.
# 0.2.21
* Update _ttf-parser_ to `0.19`.
* Add `GlyphImageFormat` variants `BitmapMono`, `BitmapMonoPacked`, `BitmapGray2`, `BitmapGray2Packed`,
`BitmapGray4`, `BitmapGray4Packed`, `BitmapGray8`, `BitmapPremulBgra32`.
* `Font::h_advance_unscaled`, `h_side_bearing_unscaled`, `v_advance_unscaled`, `v_side_bearing_unscaled`
and related `ScaleFont` methods now return `0.0` if the font does not define that value.
Previously calls would panic when fonts lacked support.
* Use edition 2021.
# 0.2.20
* Add `FontVec::as_slice`, `FontVec::into_vec`.
# 0.2.19
* Update _ttf-parser_ to `0.18`.
# 0.2.18
* Update _ttf-parser_ to `0.17`.
# 0.2.17
* Add `VariableFont` trait implemented by `FontRef` & `FontVec`.
Provides `variations` & `set_variation` functions.
* Add default enabled feature `variable-fonts`.
# 0.2.16
* Add `Font::pt_to_px_scale` to ease converting point size to `PxScale`.
* Add `PxScale::round`.
# 0.2.15
* Fix some font outlines by always trying to "close" them at the end. Fixes _Cantarell-VF.otf_ outlining.
# 0.2.14
* Update _ttf-parser_ to `0.15`.
# 0.2.13
* Update _ttf-parser_ to `0.14`.
# 0.2.12
* Update _owned-ttf-parser_ to `0.13.2`.
* Pre-parse cmap & kern subtables on all `Font` variants at initialization. This provides
much faster `glyph_id` & `kern` method performance, results in 25-30% faster layout
benchmark performance.
# 0.2.11
* `Font::outline` will return `None` for rare invalid/empty glyph bounds instead of panicking.
* Add `Font::glyph_raster_image` for color emoji fonts.
# 0.2.10
* Update _ttf-parser_ to `0.12`.
# 0.2.9
* Update _ttf-parser_ to `0.11`.
# 0.2.8
* Add fallback bounding box calculation for malformed font glyphs with zero sized boxes.
* Update _ttf-parser_ to `0.10`.
# 0.2.7
* Update _ttf-parser_ to `0.9`.
# 0.2.6
* Add `Font::codepoint_ids` method for iterating over `(GlyphId, char)` pairs.
* Clarify documentation.
# 0.2.5
* Add `Font::units_per_em` + documentation on unscaled font units.
* Update _ttf-parser_ to `0.8`.
# 0.2.4
* Update _ttf-parser_ to `0.7` adding CID font support.
# 0.2.3
* Add `v_advance` & `v_side_bearing` methods to `ScaleFont` + `_unscaled` variants to `Font`.
# 0.2.2
* Add `Font::glyph_bounds` method, similar to glyph_brush's `glyph_bounds` but for a single glyph.
* Rename `OutlinedGlyph::bounds` to `OutlinedGlyph::px_bounds` for clarity.
# 0.2.1
* Update _ttf-parser_ to `0.6`.
# 0.2
* Add `_unscaled` suffix to `Font` trait methods that deal with unscaled metrics.
This helps distinguish `ScaleFont`'s scaled metrics and can avoid unintended behaviour.
* Rename "libm-math" -> "libm" for consistency with _ab_glyph_rasterizer_.
# 0.1
* Implement fast glyph layout, outline & drawing primitives.

56
third-party/vendor/ab_glyph/Cargo.toml vendored Normal file
View file

@ -0,0 +1,56 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
name = "ab_glyph"
version = "0.2.23"
authors = ["Alex Butler <alexheretic@gmail.com>"]
description = "API for loading, scaling, positioning and rasterizing OpenType font glyphs."
readme = "README.md"
keywords = [
"text",
"ttf",
"truetype",
"otf",
"opentype",
]
license = "Apache-2.0"
repository = "https://github.com/alexheretic/ab-glyph"
[dependencies.ab_glyph_rasterizer]
version = "0.1.2"
default-features = false
[dependencies.libm]
version = "0.2.1"
optional = true
[dependencies.owned_ttf_parser]
version = "0.20"
default-features = false
[dev-dependencies]
[features]
default = [
"std",
"variable-fonts",
]
libm = [
"dep:libm",
"ab_glyph_rasterizer/libm",
]
std = [
"owned_ttf_parser/default",
"ab_glyph_rasterizer/default",
]
variable-fonts = ["owned_ttf_parser/variable-fonts"]

176
third-party/vendor/ab_glyph/LICENSE vendored Normal file
View file

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

42
third-party/vendor/ab_glyph/README.md vendored Normal file
View file

@ -0,0 +1,42 @@
ab_glyph
[![crates.io](https://img.shields.io/crates/v/ab_glyph.svg)](https://crates.io/crates/ab_glyph)
[![Documentation](https://docs.rs/ab_glyph/badge.svg)](https://docs.rs/ab_glyph)
========
Fast API for loading, scaling, positioning and rasterizing OpenType font glyphs.
```rust
use ab_glyph::{FontRef, Font, Glyph, point};
let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
// Get a glyph for 'q' with a scale & position.
let q_glyph: Glyph = font.glyph_id('q').with_scale_and_position(24.0, point(100.0, 0.0));
// Draw it.
if let Some(q) = font.outline_glyph(q_glyph) {
q.draw(|x, y, c| { /* draw pixel `(x, y)` with coverage: `c` */ });
}
```
## no_std
no_std environments are supported using `alloc` & [`libm`](https://github.com/rust-lang/libm).
```toml
ab_glyph = { default-features = false, features = ["libm"] }
```
## Comparison with [`rusttype`](https://gitlab.redox-os.org/redox-os/rusttype)
ab_glyph is a rewrite of rusttype made after I added .otf support for the latter and saw some performance issue's
with the rusttype API.
ab_glyph is a more focussed API concentrating on high performance for both .ttf & .otf fonts.
When laying out glyphs into paragraph, ab_glyph is faster than rusttype using .ttf fonts &
**much** faster for .otf fonts.
```
group ab-glyph rusttype 0.9
----- -------- ------------
layout_a_sentence (exo2-ttf) 1.00 11.1±0.08µs 1.56 17.3±0.14µs
layout_a_sentence (exo2-otf) 1.00 11.1±0.12µs 8.85 98.1±1.17µs
```
_Note: Numbers from May-2020 benchmarks, ab-glyph performance is also expected to have improved since then_.

View file

@ -0,0 +1,24 @@
use crate::GlyphId;
use alloc::boxed::Box;
use core::{fmt, iter};
pub struct CodepointIdIter<'a> {
pub(crate) inner: Box<dyn Iterator<Item = (GlyphId, char)> + 'a>,
}
impl<'a> Iterator for CodepointIdIter<'a> {
type Item = (GlyphId, char);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
}
impl iter::FusedIterator for CodepointIdIter<'_> {}
impl fmt::Debug for CodepointIdIter<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "CodepointIdIter")
}
}

14
third-party/vendor/ab_glyph/src/err.rs vendored Normal file
View file

@ -0,0 +1,14 @@
use core::fmt;
/// Invalid font data error.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct InvalidFont;
impl fmt::Display for InvalidFont {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "InvalidFont")
}
}
#[cfg(feature = "std")]
impl std::error::Error for InvalidFont {}

318
third-party/vendor/ab_glyph/src/font.rs vendored Normal file
View file

@ -0,0 +1,318 @@
use crate::{
point, v2, Glyph, GlyphId, Outline, OutlinedGlyph, PxScale, PxScaleFont, Rect, ScaleFont,
};
/// Functionality required from font data.
///
/// See also [`FontArc`](struct.FontArc.html), [`FontRef`](struct.FontRef.html)
/// and [`FontVec`](struct.FontVec.html).
///
/// ## Units
///
/// Units of unscaled accessors are "font units", which is an arbitrary unit
/// defined by the font. See [`Font::units_per_em`].
///
/// ab_glyph uses a non-standard scale [`PxScale`] which is the pixel height
/// of the text. See [`Font::pt_to_px_scale`] to convert standard point sizes.
pub trait Font {
/// Get the size of the font unit
///
/// This returns "font units per em", where 1em is a base unit of font scale
/// (typically the width of a capital 'M').
///
/// Returns `None` in case the font unit size exceeds the expected range.
/// See [`Face::units_per_em`](https://docs.rs/ttf-parser/latest/ttf_parser/struct.Face.html#method.units_per_em).
///
/// May be used to calculate [`PxScale`] from pt size, see [`Font::pt_to_px_scale`].
fn units_per_em(&self) -> Option<f32>;
/// Converts pt units into [`PxScale`].
///
/// Note: To handle a screen scale factor multiply it to the `pt_size` argument.
///
/// Returns `None` in case the [`Font::units_per_em`] unit size exceeds the expected range.
///
/// ## Point size (pt)
///
/// Font sizes are typically specified in "points". According to the modern
/// standard, 1pt = 1/72in. The "point size" of a font is the number of points
/// per em.
///
/// The DPI (dots-per-inch) of a screen depends on the screen in question;
/// 96 DPI is often considered the "standard". For high-DPI displays the
/// DPI may be specified directly or one may multiply 96 by a scale-factor.
///
/// Thus, for example, a 10pt font on a 96 pixels-per-inch display has
/// 10 / 72 * 96 = 13.333... pixels-per-em. If we divide this number by
/// `units_per_em` we then get a scaling factor: pixels-per-font-unit.
///
/// Note however that since [`PxScale`] values are relative to the text height,
/// one further step is needed: multiply by [`Font::height_unscaled`].
fn pt_to_px_scale(&self, pt_size: f32) -> Option<PxScale> {
let px_per_em = pt_size * (96.0 / 72.0);
let units_per_em = self.units_per_em()?;
let height = self.height_unscaled();
Some(PxScale::from(px_per_em * height / units_per_em))
}
/// Unscaled glyph ascent.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn ascent_unscaled(&self) -> f32;
/// Unscaled glyph descent.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn descent_unscaled(&self) -> f32;
/// Unscaled height `ascent - descent`.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
#[inline]
fn height_unscaled(&self) -> f32 {
self.ascent_unscaled() - self.descent_unscaled()
}
/// Unscaled line gap.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn line_gap_unscaled(&self) -> f32;
/// Lookup a `GlyphId` matching a given `char`.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn glyph_id(&self, c: char) -> GlyphId;
/// Unscaled horizontal advance for a given glyph id.
///
/// Returns `0.0` if the font does not define this value.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn h_advance_unscaled(&self, id: GlyphId) -> f32;
/// Unscaled horizontal side bearing for a given glyph id.
///
/// Returns `0.0` if the font does not define this value.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn h_side_bearing_unscaled(&self, id: GlyphId) -> f32;
/// Unscaled vertical advance for a given glyph id.
///
/// Returns `0.0` if the font does not define this value.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn v_advance_unscaled(&self, id: GlyphId) -> f32;
/// Unscaled vertical side bearing for a given glyph id.
///
/// Returns `0.0` if the font does not define this value.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn v_side_bearing_unscaled(&self, id: GlyphId) -> f32;
/// Returns additional unscaled kerning to apply for a particular pair of glyph ids.
///
/// Scaling can be done with [as_scaled](trait.Font.html#method.as_scaled).
fn kern_unscaled(&self, first: GlyphId, second: GlyphId) -> f32;
/// Compute unscaled glyph outline curves & bounding box.
fn outline(&self, id: GlyphId) -> Option<Outline>;
/// The number of glyphs present in this font. Glyph identifiers for this
/// font will always be in the range `0..self.glyph_count()`
fn glyph_count(&self) -> usize;
/// Returns an iterator of all distinct `(GlyphId, char)` pairs. Not ordered.
///
/// # Example
/// ```
/// # use ab_glyph::{Font, FontRef, GlyphId};
/// # use std::collections::HashMap;
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// // Iterate over pairs, each id will appear at most once.
/// let mut codepoint_ids = font.codepoint_ids();
/// assert_eq!(codepoint_ids.next(), Some((GlyphId(408), '\r')));
/// assert_eq!(codepoint_ids.next(), Some((GlyphId(1), ' ')));
/// assert_eq!(codepoint_ids.next(), Some((GlyphId(75), '!')));
///
/// // Build a lookup map for all ids
/// let map: HashMap<_, _> = font.codepoint_ids().collect();
/// assert_eq!(map.get(&GlyphId(75)), Some(&'!'));
/// # assert_eq!(map.len(), 908);
/// # Ok(()) }
/// ```
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_>;
/// Returns a pre-rendered image of the glyph.
///
/// This is normally only present when an outline is not sufficient to describe the glyph, such
/// as emojis (particularly color ones). The `pixel_size` parameter is in pixels per em, and will be
/// used to select between multiple possible images (if present); the returned image will
/// likely not match this value, requiring you to scale it to match the target resolution.
/// To get the largest image use `u16::MAX`.
#[allow(deprecated)]
#[deprecated(
since = "0.2.22",
note = "Deprecated in favor of `glyph_raster_image2`"
)]
fn glyph_raster_image(&self, id: GlyphId, pixel_size: u16) -> Option<crate::GlyphImage> {
self.glyph_raster_image2(id, pixel_size)
.map(|i| crate::GlyphImage {
origin: i.origin,
scale: i.pixels_per_em.into(),
data: i.data,
format: i.format,
})
}
/// Returns a pre-rendered image of the glyph.
///
/// This is normally only present when an outline is not sufficient to describe the glyph, such
/// as emojis (particularly color ones). The `pixel_size` parameter is in pixels per em, and will be
/// used to select between multiple possible images (if present); the returned image will
/// likely not match this value, requiring you to scale it to match the target resolution.
/// To get the largest image use `u16::MAX`.
fn glyph_raster_image2(&self, id: GlyphId, pixel_size: u16) -> Option<v2::GlyphImage>;
/// Returns the layout bounds of this glyph. These are different to the outline `px_bounds()`.
///
/// Horizontally: Glyph position +/- h_advance/h_side_bearing.
/// Vertically: Glyph position +/- ascent/descent.
#[inline]
fn glyph_bounds(&self, glyph: &Glyph) -> Rect
where
Self: Sized,
{
let sf = self.as_scaled(glyph.scale);
let pos = glyph.position;
Rect {
min: point(pos.x - sf.h_side_bearing(glyph.id), pos.y - sf.ascent()),
max: point(pos.x + sf.h_advance(glyph.id), pos.y - sf.descent()),
}
}
/// Compute glyph outline ready for drawing.
#[inline]
fn outline_glyph(&self, glyph: Glyph) -> Option<OutlinedGlyph>
where
Self: Sized,
{
let outline = self.outline(glyph.id)?;
let scale_factor = self.as_scaled(glyph.scale).scale_factor();
Some(OutlinedGlyph::new(glyph, outline, scale_factor))
}
/// Construct a [`PxScaleFontRef`](struct.PxScaleFontRef.html) by associating with the
/// given pixel `scale`.
///
/// # Example
/// ```
/// # use ab_glyph::{Font, FontRef, PxScale, ScaleFont};
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// assert_eq!(font.descent_unscaled(), -201.0);
///
/// assert_eq!(font.as_scaled(24.0).descent(), -4.02);
/// assert_eq!(font.as_scaled(50.0).descent(), -8.375);
/// # Ok(()) }
/// ```
#[inline]
fn as_scaled<S: Into<PxScale>>(&self, scale: S) -> PxScaleFont<&'_ Self>
where
Self: Sized,
{
PxScaleFont {
font: self,
scale: scale.into(),
}
}
/// Move into a [`PxScaleFont`](struct.PxScaleFont.html) associated with the
/// given pixel `scale`.
#[inline]
fn into_scaled<S: Into<PxScale>>(self, scale: S) -> PxScaleFont<Self>
where
Self: core::marker::Sized,
{
PxScaleFont {
font: self,
scale: scale.into(),
}
}
}
impl<F: Font> Font for &F {
#[inline]
fn units_per_em(&self) -> Option<f32> {
(*self).units_per_em()
}
#[inline]
fn ascent_unscaled(&self) -> f32 {
(*self).ascent_unscaled()
}
#[inline]
fn descent_unscaled(&self) -> f32 {
(*self).descent_unscaled()
}
#[inline]
fn line_gap_unscaled(&self) -> f32 {
(*self).line_gap_unscaled()
}
#[inline]
fn glyph_id(&self, c: char) -> GlyphId {
(*self).glyph_id(c)
}
#[inline]
fn h_advance_unscaled(&self, id: GlyphId) -> f32 {
(*self).h_advance_unscaled(id)
}
#[inline]
fn h_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
(*self).h_side_bearing_unscaled(id)
}
#[inline]
fn v_advance_unscaled(&self, id: GlyphId) -> f32 {
(*self).v_advance_unscaled(id)
}
#[inline]
fn v_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
(*self).v_side_bearing_unscaled(id)
}
#[inline]
fn kern_unscaled(&self, first: GlyphId, second: GlyphId) -> f32 {
(*self).kern_unscaled(first, second)
}
#[inline]
fn outline(&self, glyph: GlyphId) -> Option<Outline> {
(*self).outline(glyph)
}
#[inline]
fn glyph_count(&self) -> usize {
(*self).glyph_count()
}
#[inline]
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_> {
(*self).codepoint_ids()
}
#[inline]
fn glyph_raster_image2(&self, id: GlyphId, size: u16) -> Option<v2::GlyphImage> {
(*self).glyph_raster_image2(id, size)
}
}

View file

@ -0,0 +1,162 @@
use crate::{v2, Font, FontRef, FontVec, GlyphId, InvalidFont, Outline};
use alloc::sync::Arc;
use core::fmt;
/// `Font` implementor that wraps another concrete `Font + 'static` type storing in an `Arc`.
///
/// Provides convenient type erasure & cheap clones (particularly for `FontVec`).
///
/// # Example
/// ```
/// use ab_glyph::{Font, FontArc};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontArc::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// assert_eq!(font.glyph_id('s'), ab_glyph::GlyphId(56));
/// # Ok(()) }
/// ```
#[derive(Clone)]
pub struct FontArc(Arc<dyn Font + Send + Sync + 'static>);
impl FontArc {
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// # let font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// # let font_vec = FontVec::try_from_vec(font_data)?;
/// let font_arc = FontArc::new(font_vec);
/// # Ok(()) }
/// ```
#[inline]
pub fn new<F: Font + Send + Sync + 'static>(font: F) -> Self {
Self(Arc::new(font))
}
/// Creates an `FontArc` from owned data.
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font = FontArc::try_from_vec(owned_font_data)?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_vec(data: Vec<u8>) -> Result<Self, InvalidFont> {
Ok(FontVec::try_from_vec(data)?.into())
}
/// Creates an `FontArc` from a byte-slice.
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// let font = FontArc::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_slice(data: &'static [u8]) -> Result<Self, InvalidFont> {
Ok(FontRef::try_from_slice(data)?.into())
}
}
impl fmt::Debug for FontArc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FontArc")
}
}
impl Font for FontArc {
#[inline]
fn units_per_em(&self) -> Option<f32> {
self.0.units_per_em()
}
#[inline]
fn ascent_unscaled(&self) -> f32 {
self.0.ascent_unscaled()
}
#[inline]
fn descent_unscaled(&self) -> f32 {
self.0.descent_unscaled()
}
#[inline]
fn line_gap_unscaled(&self) -> f32 {
self.0.line_gap_unscaled()
}
#[inline]
fn glyph_id(&self, c: char) -> GlyphId {
self.0.glyph_id(c)
}
#[inline]
fn h_advance_unscaled(&self, id: GlyphId) -> f32 {
self.0.h_advance_unscaled(id)
}
#[inline]
fn h_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
self.0.h_side_bearing_unscaled(id)
}
#[inline]
fn v_advance_unscaled(&self, id: GlyphId) -> f32 {
self.0.v_advance_unscaled(id)
}
#[inline]
fn v_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
self.0.v_side_bearing_unscaled(id)
}
#[inline]
fn kern_unscaled(&self, first: GlyphId, second: GlyphId) -> f32 {
self.0.kern_unscaled(first, second)
}
#[inline]
fn outline(&self, glyph: GlyphId) -> Option<Outline> {
self.0.outline(glyph)
}
#[inline]
fn glyph_count(&self) -> usize {
self.0.glyph_count()
}
#[inline]
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_> {
self.0.codepoint_ids()
}
#[inline]
fn glyph_raster_image2(&self, id: GlyphId, size: u16) -> Option<v2::GlyphImage> {
self.0.glyph_raster_image2(id, size)
}
}
impl From<FontVec> for FontArc {
#[inline]
fn from(font: FontVec) -> Self {
Self::new(font)
}
}
impl From<FontRef<'static>> for FontArc {
#[inline]
fn from(font: FontRef<'static>) -> Self {
Self::new(font)
}
}
impl From<Arc<dyn Font + Send + Sync + 'static>> for FontArc {
#[inline]
fn from(font: Arc<dyn Font + Send + Sync + 'static>) -> Self {
Self(font)
}
}

171
third-party/vendor/ab_glyph/src/glyph.rs vendored Normal file
View file

@ -0,0 +1,171 @@
use crate::{Point, PxScale};
/// Glyph id.
///
/// # Example
/// ```
/// use ab_glyph::{Font, FontRef, GlyphId};
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// let q_id: GlyphId = font.glyph_id('q');
/// # Ok(()) }
/// ```
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GlyphId(pub u16);
impl GlyphId {
/// Construct a `Glyph` with given scale & position.
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf")).unwrap();
/// let glyph = font.glyph_id('z').with_scale_and_position(24.0, point(100.0, 0.0));
/// ```
#[inline]
pub fn with_scale_and_position<S: Into<PxScale>, P: Into<Point>>(
self,
scale: S,
position: P,
) -> Glyph {
Glyph {
id: self,
scale: scale.into(),
position: position.into(),
}
}
/// Construct a `Glyph` with given scale and position `point(0.0, 0.0)`.
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf")).unwrap();
/// let glyph = font.glyph_id('w').with_scale(48.0);
/// ```
#[inline]
pub fn with_scale<S: Into<PxScale>>(self, scale: S) -> Glyph {
self.with_scale_and_position(scale, Point::default())
}
}
/// A glyph with pixel scale & position.
#[derive(Clone, Debug, PartialEq, PartialOrd)]
pub struct Glyph {
/// Glyph id.
pub id: GlyphId,
/// Pixel scale of this glyph.
pub scale: PxScale,
/// Position of this glyph.
pub position: Point,
}
/// Old version of [`v2::GlyphImage`].
#[deprecated(since = "0.2.22", note = "Deprecated in favor of `v2::GlyphImage`")]
#[derive(Debug, Clone)]
pub struct GlyphImage<'a> {
/// Offset of the image from the normal origin (top at the baseline plus
/// ascent), measured in pixels at the image's current scale.
pub origin: Point,
/// Current scale of the image in pixels per em.
pub scale: f32,
/// Raw image data, not a bitmap in the case of [`GlyphImageFormat::Png`] format.
pub data: &'a [u8],
/// Format of the raw data.
pub format: GlyphImageFormat,
}
pub mod v2 {
use crate::{GlyphImageFormat, Point};
/// A pre-rendered image of a glyph, usually used for emojis or other glyphs
/// that can't be represented only using an outline.
#[non_exhaustive]
#[derive(Debug, Clone)]
pub struct GlyphImage<'a> {
/// Offset of the image from the normal origin (top at the baseline plus
/// ascent), measured in pixels at the image's current scale.
pub origin: Point,
/// Image width.
///
/// It doesn't guarantee that this value is the same as set in the `data` in the case of
/// [`GlyphImageFormat::Png`] format.
pub width: u16,
/// Image height.
///
/// It doesn't guarantee that this value is the same as set in the `data` in the case of
/// [`GlyphImageFormat::Png`] format.
pub height: u16,
/// Pixels per em of the selected strike.
pub pixels_per_em: u16,
/// Raw image data, see [`format`](GlyphImageFormat).
pub data: &'a [u8],
/// Format of the raw [`data`](Self::data).
pub format: GlyphImageFormat,
}
}
/// Valid formats for a [`GlyphImage`].
// Possible future formats: SVG, JPEG, TIFF
#[non_exhaustive]
#[derive(Debug, Clone)]
pub enum GlyphImageFormat {
Png,
/// A monochrome bitmap.
///
/// The most significant bit of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. The data for each row is padded to a byte
/// boundary, so the next row begins with the most significant bit of a new byte. 1 corresponds
/// to black, and 0 to white.
BitmapMono,
/// A packed monochrome bitmap.
///
/// The most significant bit of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. Data is tightly packed with no padding. 1
/// corresponds to black, and 0 to white.
BitmapMonoPacked,
/// A grayscale bitmap with 2 bits per pixel.
///
/// The most significant bits of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. The data for each row is padded to a byte
/// boundary, so the next row begins with the most significant bit of a new byte.
BitmapGray2,
/// A packed grayscale bitmap with 2 bits per pixel.
///
/// The most significant bits of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. Data is tightly packed with no padding.
BitmapGray2Packed,
/// A grayscale bitmap with 4 bits per pixel.
///
/// The most significant bits of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. The data for each row is padded to a byte
/// boundary, so the next row begins with the most significant bit of a new byte.
BitmapGray4,
/// A packed grayscale bitmap with 4 bits per pixel.
///
/// The most significant bits of the first byte corresponds to the top-left pixel, proceeding
/// through succeeding bits moving left to right. Data is tightly packed with no padding.
BitmapGray4Packed,
/// A grayscale bitmap with 8 bits per pixel.
///
/// The first byte corresponds to the top-left pixel, proceeding through succeeding bytes
/// moving left to right.
BitmapGray8,
/// A color bitmap with 32 bits per pixel.
///
/// The first group of four bytes corresponds to the top-left pixel, proceeding through
/// succeeding pixels moving left to right. Each byte corresponds to a color channel and the
/// channels within a pixel are in blue, green, red, alpha order. Color values are
/// pre-multiplied by the alpha. For example, the color "full-green with half translucency"
/// is encoded as `\x00\x80\x00\x80`, and not `\x00\xFF\x00\x80`.
BitmapPremulBgra32,
}

54
third-party/vendor/ab_glyph/src/lib.rs vendored Normal file
View file

@ -0,0 +1,54 @@
//! API for loading, scaling, positioning and rasterizing OpenType font glyphs.
//!
//! # Example
//! ```
//! use ab_glyph::{point, Font, FontRef, Glyph};
//!
//! # fn main() -> Result<(), ab_glyph::InvalidFont> {
//! let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
//!
//! // Get a glyph for 'q' with a scale & position.
//! let q_glyph: Glyph = font
//! .glyph_id('q')
//! .with_scale_and_position(24.0, point(100.0, 0.0));
//!
//! // Draw it.
//! if let Some(q) = font.outline_glyph(q_glyph) {
//! q.draw(|x, y, c| { /* draw pixel `(x, y)` with coverage: `c` */ });
//! }
//! # Ok(()) }
//! ```
#![warn(missing_debug_implementations)]
#![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;
mod codepoint_ids;
mod err;
mod font;
#[cfg(feature = "std")]
mod font_arc;
mod glyph;
#[cfg(all(feature = "libm", not(feature = "std")))]
mod nostd_float;
mod outlined;
mod scale;
mod ttfp;
#[cfg(feature = "variable-fonts")]
mod variable;
#[cfg(feature = "std")]
pub use crate::font_arc::*;
#[allow(deprecated)]
pub use crate::{
codepoint_ids::*,
err::*,
font::*,
glyph::*,
outlined::*,
scale::*,
ttfp::{FontRef, FontVec},
};
pub use ab_glyph_rasterizer::{point, Point};
#[cfg(feature = "variable-fonts")]
pub use variable::*;

View file

@ -0,0 +1,41 @@
/// Basic required float operations.
pub(crate) trait FloatExt {
fn floor(self) -> Self;
fn ceil(self) -> Self;
fn sqrt(self) -> Self;
fn round(self) -> Self;
fn abs(self) -> Self;
fn trunc(self) -> Self;
fn fract(self) -> Self;
}
impl FloatExt for f32 {
#[inline]
fn floor(self) -> Self {
libm::floorf(self)
}
#[inline]
fn ceil(self) -> Self {
libm::ceilf(self)
}
#[inline]
fn sqrt(self) -> Self {
libm::sqrtf(self)
}
#[inline]
fn round(self) -> Self {
libm::roundf(self)
}
#[inline]
fn abs(self) -> Self {
libm::fabsf(self)
}
#[inline]
fn trunc(self) -> Self {
libm::truncf(self)
}
#[inline]
fn fract(self) -> Self {
self - self.trunc()
}
}

View file

@ -0,0 +1,176 @@
#[cfg(all(feature = "libm", not(feature = "std")))]
use crate::nostd_float::FloatExt;
use crate::{point, Glyph, Point, PxScaleFactor};
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
/// A "raw" collection of outline curves for a glyph, unscaled & unpositioned.
#[derive(Clone, Debug)]
pub struct Outline {
/// Unscaled bounding box.
pub bounds: Rect,
/// Unscaled & unpositioned outline curves.
pub curves: Vec<OutlineCurve>,
}
impl Outline {
/// Convert unscaled bounds into pixel bounds at a given scale & position.
pub fn px_bounds(&self, scale_factor: PxScaleFactor, position: Point) -> Rect {
let Rect { min, max } = self.bounds;
// Use subpixel fraction in floor/ceil rounding to elimate rounding error
// from identical subpixel positions
let (x_trunc, x_fract) = (position.x.trunc(), position.x.fract());
let (y_trunc, y_fract) = (position.y.trunc(), position.y.fract());
Rect {
min: point(
(min.x * scale_factor.horizontal + x_fract).floor() + x_trunc,
(min.y * -scale_factor.vertical + y_fract).floor() + y_trunc,
),
max: point(
(max.x * scale_factor.horizontal + x_fract).ceil() + x_trunc,
(max.y * -scale_factor.vertical + y_fract).ceil() + y_trunc,
),
}
}
}
/// A glyph that has been outlined at a scale & position.
#[derive(Clone, Debug)]
pub struct OutlinedGlyph {
glyph: Glyph,
// Pixel scale bounds.
px_bounds: Rect,
// Scale factor
scale_factor: PxScaleFactor,
// Raw outline
outline: Outline,
}
impl OutlinedGlyph {
/// Constructs an `OutlinedGlyph` from the source `Glyph`, pixel bounds
/// & relatively positioned outline curves.
#[inline]
pub fn new(glyph: Glyph, outline: Outline, scale_factor: PxScaleFactor) -> Self {
// work this out now as it'll usually be used more than once
let px_bounds = outline.px_bounds(scale_factor, glyph.position);
Self {
glyph,
px_bounds,
scale_factor,
outline,
}
}
/// Glyph info.
#[inline]
pub fn glyph(&self) -> &Glyph {
&self.glyph
}
#[deprecated = "Renamed to `px_bounds`"]
#[doc(hidden)]
pub fn bounds(&self) -> Rect {
self.px_bounds()
}
/// Conservative whole number pixel bounding box for this glyph.
#[inline]
pub fn px_bounds(&self) -> Rect {
self.px_bounds
}
/// Draw this glyph outline using a pixel & coverage handling function.
///
/// The callback will be called for each `(x, y)` pixel coordinate inside the bounds
/// with a coverage value indicating how much the glyph covered that pixel.
///
/// A coverage value of `0.0` means the pixel is totally uncoverred by the glyph.
/// A value of `1.0` or greater means fully coverred.
pub fn draw<O: FnMut(u32, u32, f32)>(&self, o: O) {
use ab_glyph_rasterizer::Rasterizer;
let h_factor = self.scale_factor.horizontal;
let v_factor = -self.scale_factor.vertical;
let offset = self.glyph.position - self.px_bounds.min;
let (w, h) = (
self.px_bounds.width() as usize,
self.px_bounds.height() as usize,
);
let scale_up = |&Point { x, y }| point(x * h_factor, y * v_factor);
self.outline
.curves
.iter()
.fold(Rasterizer::new(w, h), |mut rasterizer, curve| match curve {
OutlineCurve::Line(p0, p1) => {
// eprintln!("r.draw_line({:?}, {:?});",
// scale_up(p0) + offset, scale_up(p1) + offset);
rasterizer.draw_line(scale_up(p0) + offset, scale_up(p1) + offset);
rasterizer
}
OutlineCurve::Quad(p0, p1, p2) => {
// eprintln!("r.draw_quad({:?}, {:?}, {:?});",
// scale_up(p0) + offset, scale_up(p1) + offset, scale_up(p2) + offset);
rasterizer.draw_quad(
scale_up(p0) + offset,
scale_up(p1) + offset,
scale_up(p2) + offset,
);
rasterizer
}
OutlineCurve::Cubic(p0, p1, p2, p3) => {
// eprintln!("r.draw_cubic({:?}, {:?}, {:?}, {:?});",
// scale_up(p0) + offset, scale_up(p1) + offset, scale_up(p2) + offset, scale_up(p3) + offset);
rasterizer.draw_cubic(
scale_up(p0) + offset,
scale_up(p1) + offset,
scale_up(p2) + offset,
scale_up(p3) + offset,
);
rasterizer
}
})
.for_each_pixel_2d(o);
}
}
impl AsRef<Glyph> for OutlinedGlyph {
#[inline]
fn as_ref(&self) -> &Glyph {
self.glyph()
}
}
/// Glyph outline primitives.
#[derive(Clone, Debug)]
pub enum OutlineCurve {
/// Straight line from `.0` to `.1`.
Line(Point, Point),
/// Quadratic Bézier curve from `.0` to `.2` using `.1` as the control.
Quad(Point, Point, Point),
/// Cubic Bézier curve from `.0` to `.3` using `.1` as the control at the beginning of the
/// curve and `.2` at the end of the curve.
Cubic(Point, Point, Point, Point),
}
/// A rectangle, with top-left corner at `min`, and bottom-right corner at `max`.
#[derive(Copy, Clone, Debug, Default, PartialEq, PartialOrd)]
pub struct Rect {
pub min: Point,
pub max: Point,
}
impl Rect {
#[inline]
pub fn width(&self) -> f32 {
self.max.x - self.min.x
}
#[inline]
pub fn height(&self) -> f32 {
self.max.y - self.min.y
}
}

270
third-party/vendor/ab_glyph/src/scale.rs vendored Normal file
View file

@ -0,0 +1,270 @@
#[cfg(all(feature = "libm", not(feature = "std")))]
use crate::nostd_float::FloatExt;
use crate::{Font, Glyph, GlyphId, OutlinedGlyph, Rect};
/// Pixel scale.
///
/// This is the pixel-height of text.
///
/// Usually one uses `x == y`, but one may use a different ratio to stretch a
/// font horizontally or vertically.
///
/// To convert pt size into pixel-scale see [`Font::pt_to_px_scale`].
///
/// # Example
/// ```
/// use ab_glyph::PxScale;
///
/// let uniform_scale_24px = PxScale::from(24.0);
/// ```
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
pub struct PxScale {
/// Horizontal scale in pixels.
pub x: f32,
/// Vertical scale in pixels.
///
/// By definition, this is the pixel-height of a font.
pub y: f32,
}
impl PxScale {
/// Returns a `PxScale` with both x & y scale values set to the nearest integer.
#[inline]
pub fn round(self) -> Self {
Self {
x: self.x.round(),
y: self.y.round(),
}
}
}
impl From<f32> for PxScale {
/// Uniform scaling where x & y are the same.
#[inline]
fn from(s: f32) -> Self {
PxScale { x: s, y: s }
}
}
/// 2D scale factors for use with unscaled metrics.
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
pub struct PxScaleFactor {
pub horizontal: f32,
pub vertical: f32,
}
/// A [`Font`](trait.Font.html) with an associated pixel scale. This can be used to provide
/// pixel scale values for glyph advances, heights etc.
///
/// # Example
/// ```
/// use ab_glyph::{Font, FontRef, PxScale, ScaleFont};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// // Associate the font with a scale of 45px
/// let scaled_font = font.as_scaled(PxScale::from(45.0));
///
/// assert_eq!(scaled_font.height(), 45.0);
/// assert_eq!(scaled_font.h_advance(scaled_font.glyph_id('b')), 21.225);
///
/// // Replace associated scale with another
/// let scaled_font = scaled_font.with_scale(180.0);
///
/// assert_eq!(scaled_font.height(), 180.0);
/// assert_eq!(scaled_font.h_advance(scaled_font.glyph_id('b')), 84.9);
/// # Ok(()) }
/// ```
pub trait ScaleFont<F: Font> {
/// Returns the pixel scale associated with this font.
fn scale(&self) -> PxScale;
/// Returns a font reference.
fn font(&self) -> &F;
/// Scale factor for unscaled font horizontal values.
#[inline]
fn h_scale_factor(&self) -> f32 {
self.scale().x / self.font().height_unscaled()
}
/// Scale factor for unscaled font vertical values.
#[inline]
fn v_scale_factor(&self) -> f32 {
self.scale().y / self.font().height_unscaled()
}
#[inline]
fn scale_factor(&self) -> PxScaleFactor {
PxScaleFactor {
horizontal: self.h_scale_factor(),
vertical: self.v_scale_factor(),
}
}
/// Pixel scaled glyph ascent.
#[inline]
fn ascent(&self) -> f32 {
self.v_scale_factor() * self.font().ascent_unscaled()
}
/// Pixel scaled glyph descent.
#[inline]
fn descent(&self) -> f32 {
self.v_scale_factor() * self.font().descent_unscaled()
}
/// Pixel scaled height `ascent - descent`.
///
/// By definition of [`PxScale`], this is `self.scale().y`.
#[inline]
fn height(&self) -> f32 {
self.scale().y
}
/// Pixel scaled line gap.
#[inline]
fn line_gap(&self) -> f32 {
self.v_scale_factor() * self.font().line_gap_unscaled()
}
/// Lookup a `GlyphId` matching a given `char`.
#[inline]
fn glyph_id(&self, c: char) -> GlyphId {
self.font().glyph_id(c)
}
/// Construct a [`Glyph`](struct.Glyph.html) with the font's pixel scale at
/// position `point(0.0, 0.0)`.
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf")).unwrap();
/// let scaled_font = font.as_scaled(50.0);
///
/// let a1 = scaled_font.scaled_glyph('a');
/// let a2 = font.glyph_id('a').with_scale(50.0); // equivalent
///
/// # assert_eq!(a1.id, a2.id);
/// assert_eq!(a1.scale, PxScale::from(50.0));
/// assert_eq!(a1.position, point(0.0, 0.0));
/// ```
#[inline]
fn scaled_glyph(&self, c: char) -> Glyph {
self.font().glyph_id(c).with_scale(self.scale())
}
/// Pixel scaled horizontal advance for a given glyph.
#[inline]
fn h_advance(&self, id: GlyphId) -> f32 {
self.h_scale_factor() * self.font().h_advance_unscaled(id)
}
/// Pixel scaled horizontal side bearing for a given glyph.
#[inline]
fn h_side_bearing(&self, id: GlyphId) -> f32 {
self.h_scale_factor() * self.font().h_side_bearing_unscaled(id)
}
/// Pixel scaled vertical advance for a given glyph.
#[inline]
fn v_advance(&self, id: GlyphId) -> f32 {
self.v_scale_factor() * self.font().v_advance_unscaled(id)
}
/// Pixel scaled vertical side bearing for a given glyph.
#[inline]
fn v_side_bearing(&self, id: GlyphId) -> f32 {
self.v_scale_factor() * self.font().v_side_bearing_unscaled(id)
}
/// Returns additional pixel scaled kerning to apply for a particular pair of glyphs.
#[inline]
fn kern(&self, first: GlyphId, second: GlyphId) -> f32 {
self.h_scale_factor() * self.font().kern_unscaled(first, second)
}
/// Returns the layout bounds of this glyph. These are different to the outline `px_bounds()`.
///
/// Horizontally: Glyph position +/- h_advance/h_side_bearing.
/// Vertically: Glyph position +/- ascent/descent.
///
/// Note this method does not make use of the associated scale, as `Glyph`
/// already includes one of it's own.
#[inline]
fn glyph_bounds(&self, glyph: &Glyph) -> Rect {
self.font().glyph_bounds(glyph)
}
/// The number of glyphs present in this font. Glyph identifiers for this
/// font will always be in the range `0..self.glyph_count()`
#[inline]
fn glyph_count(&self) -> usize {
self.font().glyph_count()
}
/// Returns an iterator of all distinct `(GlyphId, char)` pairs. Not ordered.
///
/// Same as [`Font::codepoint_ids`](trait.Font.html#tymethod.codepoint_ids).
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_>;
/// Compute glyph outline ready for drawing.
///
/// Note this method does not make use of the associated scale, as `Glyph`
/// already includes one of it's own.
#[inline]
fn outline_glyph(&self, glyph: Glyph) -> Option<OutlinedGlyph> {
self.font().outline_glyph(glyph)
}
}
impl<F: Font, SF: ScaleFont<F>> ScaleFont<F> for &SF {
#[inline]
fn scale(&self) -> PxScale {
(*self).scale()
}
#[inline]
fn font(&self) -> &F {
(*self).font()
}
#[inline]
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_> {
(*self).codepoint_ids()
}
}
/// A [`Font`](trait.Font.html) and an associated pixel scale.
#[derive(Clone, Copy, Debug)]
pub struct PxScaleFont<F> {
pub font: F,
pub scale: PxScale,
}
impl<F> PxScaleFont<F> {
#[inline]
pub fn with_scale<S: Into<PxScale>>(mut self, scale: S) -> Self {
self.scale = scale.into();
self
}
}
impl<F: Font> ScaleFont<F> for PxScaleFont<F> {
#[inline]
fn scale(&self) -> PxScale {
self.scale
}
#[inline]
fn font(&self) -> &F {
&self.font
}
#[inline]
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_> {
self.font.codepoint_ids()
}
}

349
third-party/vendor/ab_glyph/src/ttfp.rs vendored Normal file
View file

@ -0,0 +1,349 @@
//! ttf-parser crate specific code. ttf-parser types should not be leaked publicly.
mod outliner;
#[cfg(feature = "variable-fonts")]
mod variable;
use crate::{point, v2, Font, GlyphId, GlyphImageFormat, InvalidFont, Outline, Rect};
use alloc::boxed::Box;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use core::fmt;
use owned_ttf_parser::{self as ttfp, AsFaceRef};
impl From<GlyphId> for ttfp::GlyphId {
#[inline]
fn from(id: GlyphId) -> Self {
Self(id.0)
}
}
/// Font data handle stored as a `&[u8]` + parsed data.
/// See [`Font`](trait.Font.html) for more methods.
///
/// Also see the owned version [`FontVec`](struct.FontVec.html).
///
/// # Example
/// ```
/// use ab_glyph::{Font, FontRef};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
///
/// assert_eq!(font.glyph_id('s'), ab_glyph::GlyphId(56));
/// # Ok(()) }
/// ```
#[derive(Clone)]
pub struct FontRef<'font>(ttfp::PreParsedSubtables<'font, ttfp::Face<'font>>);
impl fmt::Debug for FontRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FontRef")
}
}
impl<'font> FontRef<'font> {
/// Creates an `FontRef` from a byte-slice.
///
/// For font collections see
/// [`FontRef::try_from_slice_and_index`](#method.try_from_slice_and_index).
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Exo2-Light.otf"))?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_slice(data: &'font [u8]) -> Result<Self, InvalidFont> {
Self::try_from_slice_and_index(data, 0)
}
/// Creates an `FontRef` from byte-slice.
///
/// You can set index for font collections. For simple fonts use `0` or
/// [`FontRef::try_from_slice`](#method.try_from_slice).
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// let font =
/// FontRef::try_from_slice_and_index(include_bytes!("../../dev/fonts/Exo2-Light.otf"), 0)?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_slice_and_index(data: &'font [u8], index: u32) -> Result<Self, InvalidFont> {
Ok(Self(ttfp::PreParsedSubtables::from(
ttfp::Face::parse(data, index).map_err(|_| InvalidFont)?,
)))
}
}
/// Font data handle stored in a `Vec<u8>` + parsed data.
/// See [`Font`](trait.Font.html) for more methods.
///
/// Also see [`FontRef`](struct.FontRef.html).
///
/// # Example
/// ```
/// use ab_glyph::{Font, FontVec};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font = FontVec::try_from_vec_and_index(owned_font_data, 0)?;
///
/// assert_eq!(font.glyph_id('s'), ab_glyph::GlyphId(56));
/// # Ok(()) }
/// ```
pub struct FontVec(ttfp::PreParsedSubtables<'static, ttfp::OwnedFace>);
impl fmt::Debug for FontVec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FontVec")
}
}
impl FontVec {
/// Creates an `FontVec` from owned data.
///
/// For font collections see
/// [`FontVec::try_from_vec_and_index`](#method.try_from_vec_and_index).
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font = FontVec::try_from_vec(owned_font_data)?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_vec(data: Vec<u8>) -> Result<Self, InvalidFont> {
Self::try_from_vec_and_index(data, 0)
}
/// Creates an `FontVec` from owned data.
///
/// You can set index for font collections. For simple fonts use `0` or
/// [`FontVec::try_from_vec`](#method.try_from_vec).
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font = FontVec::try_from_vec_and_index(owned_font_data, 0)?;
/// # Ok(()) }
/// ```
#[inline]
pub fn try_from_vec_and_index(data: Vec<u8>, index: u32) -> Result<Self, InvalidFont> {
Ok(Self(ttfp::PreParsedSubtables::from(
ttfp::OwnedFace::from_vec(data, index).map_err(|_| InvalidFont)?,
)))
}
/// Extracts a slice containing the data passed into e.g. [`FontVec::try_from_vec`].
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font_data_clone = owned_font_data.clone();
/// let font = FontVec::try_from_vec(owned_font_data)?;
/// assert_eq!(font.as_slice(), font_data_clone);
/// # Ok(()) }
/// ```
#[inline]
pub fn as_slice(&self) -> &[u8] {
self.0.face.as_slice()
}
/// Unwraps the data passed into e.g. [`FontVec::try_from_vec`].
///
/// # Example
/// ```
/// # use ab_glyph::*;
/// # fn main() -> Result<(), InvalidFont> {
/// # let owned_font_data = include_bytes!("../../dev/fonts/Exo2-Light.otf").to_vec();
/// let font_data_clone = owned_font_data.clone();
/// let font = FontVec::try_from_vec(owned_font_data)?;
/// assert_eq!(font.into_vec(), font_data_clone);
/// # Ok(()) }
/// ```
pub fn into_vec(self) -> Vec<u8> {
self.0.face.into_vec()
}
}
/// Implement `Font` for `Self(AsFontRef)` types.
macro_rules! impl_font {
($font:ty) => {
impl Font for $font {
#[inline]
fn units_per_em(&self) -> Option<f32> {
// TODO unwrap signature when making next breaking change
Some(self.0.as_face_ref().units_per_em().into())
}
#[inline]
fn ascent_unscaled(&self) -> f32 {
self.0.as_face_ref().ascender().into()
}
#[inline]
fn descent_unscaled(&self) -> f32 {
self.0.as_face_ref().descender().into()
}
#[inline]
fn line_gap_unscaled(&self) -> f32 {
self.0.as_face_ref().line_gap().into()
}
#[inline]
fn glyph_id(&self, c: char) -> GlyphId {
// Note: Using `PreParsedSubtables` method for better performance.
let index = self.0.glyph_index(c).map(|id| id.0).unwrap_or(0);
GlyphId(index)
}
#[inline]
fn h_advance_unscaled(&self, id: GlyphId) -> f32 {
self.0
.as_face_ref()
.glyph_hor_advance(id.into())
.unwrap_or_default()
.into()
}
#[inline]
fn h_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
self.0
.as_face_ref()
.glyph_hor_side_bearing(id.into())
.unwrap_or_default()
.into()
}
#[inline]
fn v_advance_unscaled(&self, id: GlyphId) -> f32 {
self.0
.as_face_ref()
.glyph_ver_advance(id.into())
.unwrap_or_default()
.into()
}
#[inline]
fn v_side_bearing_unscaled(&self, id: GlyphId) -> f32 {
self.0
.as_face_ref()
.glyph_ver_side_bearing(id.into())
.unwrap_or_default()
.into()
}
#[inline]
fn kern_unscaled(&self, first: GlyphId, second: GlyphId) -> f32 {
// Note: Using `PreParsedSubtables` method for better performance.
self.0
.glyphs_hor_kerning(first.into(), second.into())
.map(f32::from)
.unwrap_or_default()
}
fn outline(&self, id: GlyphId) -> Option<Outline> {
let mut outliner = outliner::OutlineCurveBuilder::default();
let ttfp::Rect {
x_min,
x_max,
y_min,
y_max,
} = self
.0
.as_face_ref()
.outline_glyph(id.into(), &mut outliner)
// invalid bounds are treated as having no outline
.filter(|b| b.x_min < b.x_max && b.y_min < b.y_max)?;
let curves = outliner.take_outline();
let bounds = Rect {
min: point(x_min.into(), y_max.into()),
max: point(x_max.into(), y_min.into()),
};
Some(Outline { bounds, curves })
}
#[inline]
fn glyph_count(&self) -> usize {
self.0.as_face_ref().number_of_glyphs() as _
}
fn codepoint_ids(&self) -> crate::CodepointIdIter<'_> {
let face_ref = self.0.as_face_ref();
#[cfg(feature = "std")]
let mut used_indices =
std::collections::HashSet::with_capacity(face_ref.number_of_glyphs() as _);
#[cfg(not(feature = "std"))]
let mut used_indices = alloc::collections::BTreeSet::new();
let inner = Box::new(
face_ref
.tables()
.cmap
.iter()
.flat_map(|c| c.subtables)
.filter(|s| s.is_unicode())
.flat_map(move |subtable| {
let mut pairs = Vec::new();
subtable.codepoints(|c| {
if let Ok(ch) = char::try_from(c) {
if let Some(idx) = subtable.glyph_index(c).filter(|i| i.0 > 0) {
if used_indices.insert(idx.0) {
pairs.push((GlyphId(idx.0), ch));
}
}
}
});
pairs
}),
);
crate::CodepointIdIter { inner }
}
fn glyph_raster_image2(&self, id: GlyphId, size: u16) -> Option<v2::GlyphImage> {
use GlyphImageFormat::*;
let img = self.0.as_face_ref().glyph_raster_image(id.into(), size)?;
Some(v2::GlyphImage {
origin: point(img.x.into(), img.y.into()),
width: img.width,
height: img.height,
pixels_per_em: img.pixels_per_em,
data: img.data,
format: match img.format {
ttfp::RasterImageFormat::PNG => Png,
ttfp::RasterImageFormat::BitmapMono => BitmapMono,
ttfp::RasterImageFormat::BitmapMonoPacked => BitmapMonoPacked,
ttfp::RasterImageFormat::BitmapGray2 => BitmapGray2,
ttfp::RasterImageFormat::BitmapGray2Packed => BitmapGray2Packed,
ttfp::RasterImageFormat::BitmapGray4 => BitmapGray4,
ttfp::RasterImageFormat::BitmapGray4Packed => BitmapGray4Packed,
ttfp::RasterImageFormat::BitmapGray8 => BitmapGray8,
ttfp::RasterImageFormat::BitmapPremulBgra32 => BitmapPremulBgra32,
},
})
}
}
};
}
impl_font!(FontRef<'_>);
impl_font!(FontVec);

View file

@ -0,0 +1,65 @@
use crate::{point, OutlineCurve, Point};
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
#[derive(Debug, Default)]
pub(crate) struct OutlineCurveBuilder {
last: Point,
last_move: Option<Point>,
outline: Vec<OutlineCurve>,
}
impl OutlineCurveBuilder {
#[inline]
pub(crate) fn take_outline(mut self) -> Vec<OutlineCurve> {
// some font glyphs implicitly close, e.g. Cantarell-VF.otf
owned_ttf_parser::OutlineBuilder::close(&mut self);
self.outline
}
}
impl owned_ttf_parser::OutlineBuilder for OutlineCurveBuilder {
#[inline]
fn move_to(&mut self, x: f32, y: f32) {
// eprintln!("M {x} {y}");
self.last = point(x, y);
self.last_move = Some(self.last);
}
#[inline]
fn line_to(&mut self, x1: f32, y1: f32) {
// eprintln!("L {x1} {y1}");
let p1 = point(x1, y1);
self.outline.push(OutlineCurve::Line(self.last, p1));
self.last = p1;
}
#[inline]
fn quad_to(&mut self, x1: f32, y1: f32, x2: f32, y2: f32) {
// eprintln!("Q {x1} {y1}");
let p1 = point(x1, y1);
let p2 = point(x2, y2);
self.outline.push(OutlineCurve::Quad(self.last, p1, p2));
self.last = p2;
}
#[inline]
fn curve_to(&mut self, x1: f32, y1: f32, x2: f32, y2: f32, x3: f32, y3: f32) {
// eprintln!("C {x1} {y1} {x3} {y3}");
let p1 = point(x1, y1);
let p2 = point(x2, y2);
let p3 = point(x3, y3);
self.outline
.push(OutlineCurve::Cubic(self.last, p1, p2, p3));
self.last = p3;
}
#[inline]
fn close(&mut self) {
// eprintln!("Z");
if let Some(m) = self.last_move.take() {
self.outline.push(OutlineCurve::Line(self.last, m));
}
}
}

View file

@ -0,0 +1,63 @@
use crate::{FontRef, FontVec, VariableFont, VariationAxis};
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use owned_ttf_parser::{self as ttfp, AsFaceRef, FaceMut};
impl VariableFont for FontRef<'_> {
fn set_variation(&mut self, axis: &[u8; 4], value: f32) -> bool {
let tag = ttfp::Tag::from_bytes(axis);
// TODO remove existence check in next breaking version
let exists = self
.0
.as_face_ref()
.variation_axes()
.into_iter()
.any(|axis| axis.tag == tag);
if exists {
self.0.set_variation(tag, value);
}
exists
}
fn variations(&self) -> Vec<VariationAxis> {
variations(self.0.as_face_ref())
}
}
impl VariableFont for FontVec {
fn set_variation(&mut self, axis: &[u8; 4], value: f32) -> bool {
self.0
.set_variation(ttfp::Tag::from_bytes(axis), value)
.is_some()
}
fn variations(&self) -> Vec<VariationAxis> {
variations(self.0.as_face_ref())
}
}
fn variations(face: &ttfp::Face<'_>) -> Vec<VariationAxis> {
face.variation_axes()
.into_iter()
.map(|axis| {
#[cfg(feature = "std")]
let name = face.names().into_iter().find_map(|n| {
if n.name_id == axis.name_id {
n.to_string()
} else {
None
}
});
#[cfg(not(feature = "std"))]
let name = None;
VariationAxis {
tag: axis.tag.to_bytes(),
name,
min_value: axis.min_value,
default_value: axis.def_value,
max_value: axis.max_value,
hidden: axis.hidden,
}
})
.collect()
}

View file

@ -0,0 +1,67 @@
#[cfg(not(feature = "std"))]
use alloc::string::String;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
/// Logic for variable fonts.
///
/// Requires feature `variable-fonts` (enabled by default).
pub trait VariableFont {
/// Sets a variation axis coordinate value by it's tag.
///
/// Returns false if there is no such axis tag.
///
/// # Example
/// ```
/// use ab_glyph::{FontRef, VariableFont};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let mut font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Cantarell-VF.otf"))?;
///
/// // set weight to 600
/// assert!(font.set_variation(b"wght", 600.0));
///
/// // no such variation tag "foob" so return false
/// assert!(!font.set_variation(b"foob", 200.0));
/// # Ok(()) }
/// ```
fn set_variation(&mut self, tag: &[u8; 4], value: f32) -> bool;
/// Returns variation axes.
///
/// # Example
/// ```
/// use ab_glyph::{FontRef, VariableFont};
///
/// # fn main() -> Result<(), ab_glyph::InvalidFont> {
/// let font = FontRef::try_from_slice(include_bytes!("../../dev/fonts/Cantarell-VF.otf"))?;
/// let var = &font.variations()[0];
/// # eprintln!("{var:#?}");
///
/// assert_eq!(var.tag, *b"wght");
/// assert_eq!(var.name.as_deref(), Some("Weight"));
/// assert!((var.min_value - 100.0).abs() < f32::EPSILON);
/// assert!((var.default_value - 400.0).abs() < f32::EPSILON);
/// assert!((var.max_value - 800.0).abs() < f32::EPSILON);
/// assert!(!var.hidden);
/// # Ok(()) }
/// ```
fn variations(&self) -> Vec<VariationAxis>;
}
#[non_exhaustive]
#[derive(Debug, Clone)]
pub struct VariationAxis {
/// Tag identifying the design variation for the axis.
pub tag: [u8; 4],
/// Unicode name.
pub name: Option<String>,
/// The minimum coordinate value for the axis.
pub min_value: f32,
/// The default coordinate value for the axis.
pub default_value: f32,
/// The maximum coordinate value for the axis.
pub max_value: f32,
/// Whether the axis should be exposed directly in user interfaces.
pub hidden: bool,
}

View file

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"f40c8fae65d889f504d619983bbab8cbc370995049ba3077acc2ba01f8116e89","Cargo.toml":"4fbf6705c2a5a23335605339c159208d1a376189693e645a6b0a30f9885be328","LICENSE":"15c517f38838988aa9f990a1e6bdfc0f20fd1aa1dcb710bb994669d098d73e70","README.md":"59162e54bce67b4181f793866a73e4906b3cb4b45f3487f045aca2ce06611a80","src/geometry.rs":"8d970a944af7199ac6a42ace5d1ca661f7764d10a2af0eb09f7b356547f80cf8","src/lib.rs":"32f718b6be690d4d22fa60bf2d2f3b73f645e293a12f0e7c969c7ff2ac2f0a54","src/nostd_float.rs":"425e4f7a3c20213d561a376a09cb75a37ba3989b42e1700a3b15f642ccb99918","src/raster.rs":"9cb90f50a5a915e17f3ea46efd4cd1cf748ec94dc5307878643dd315baa79663","tests/issues.rs":"dff1f0f9992a49a71b3ac4e298033fe9687194a7948bdf29b110daa1ccc99790"},"package":"c71b1793ee61086797f5c80b6efa2b8ffa6d5dd703f118545808a7f2e27f7046"}

View file

@ -0,0 +1,29 @@
# 0.1.8
* Do SIMD runtime detection only once on the first `Rasterizer::new` instead of on each.
# 0.1.7
* Fix x86, x86_64 no_std builds, require `std` feature for runtime detected SIMD.
# 0.1.6
* Add runtime detected AVX2 or SSE4.2 line drawing. Improves performance on compatible x86_64 CPUs.
# 0.1.5
* Remove cap of `1.0` for coverage values returned by `for_each_pixel` now `>= 1.0` means fully covered.
This allows a minor reduction in operations / performance boost.
# 0.1.4
* Add `Rasterizer::reset`, `Rasterizer::clear` methods to allow allocation reuse.
# 0.1.3
* Fix index oob panic scenario.
# 0.1.2
* For `Point` implement `Sub`, `Add`, `SubAssign`, `AddAssign`, `PartialEq`, `PartialOrd`, `From<(x, y)>`,
`From<[x, y]>` for easier use downstream.
* Switch `Point` `Debug` implementation to output `point(1.2, 3.4)` smaller representation referring to the `point` fn.
# 0.1.1
* Add explicit compile error when building no_std without the "libm" feature.
# 0.1
* Implement zero dependency coverage rasterization for lines, quadratic & cubic beziers.

View file

@ -0,0 +1,36 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "ab_glyph_rasterizer"
version = "0.1.8"
authors = ["Alex Butler <alexheretic@gmail.com>"]
description = "Coverage rasterization for lines, quadratic & cubic beziers"
readme = "README.md"
keywords = [
"text",
"ttf",
"otf",
"font",
]
license = "Apache-2.0"
repository = "https://github.com/alexheretic/ab-glyph"
[dependencies.libm]
version = "0.2.1"
optional = true
[dev-dependencies]
[features]
default = ["std"]
std = []

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2020 Alex Butler
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,51 @@
ab_glyph_rasterizer
[![crates.io](https://img.shields.io/crates/v/ab_glyph_rasterizer.svg)](https://crates.io/crates/ab_glyph_rasterizer)
[![Documentation](https://docs.rs/ab_glyph_rasterizer/badge.svg)](https://docs.rs/ab_glyph_rasterizer)
===================
Coverage rasterization for lines, quadratic & cubic beziers.
Useful for drawing .otf font glyphs.
Inspired by [font-rs](https://github.com/raphlinus/font-rs) &
[stb_truetype](https://github.com/nothings/stb/blob/master/stb_truetype.h).
## Example
```rust
let mut rasterizer = ab_glyph_rasterizer::Rasterizer::new(106, 183);
// draw a 300px 'ę' character
rasterizer.draw_cubic(point(103.0, 163.5), point(86.25, 169.25), point(77.0, 165.0), point(82.25, 151.5));
rasterizer.draw_cubic(point(82.25, 151.5), point(86.75, 139.75), point(94.0, 130.75), point(102.0, 122.0));
rasterizer.draw_line(point(102.0, 122.0), point(100.25, 111.25));
rasterizer.draw_cubic(point(100.25, 111.25), point(89.0, 112.75), point(72.75, 114.25), point(58.5, 114.25));
rasterizer.draw_cubic(point(58.5, 114.25), point(30.75, 114.25), point(18.5, 105.25), point(16.75, 72.25));
rasterizer.draw_line(point(16.75, 72.25), point(77.0, 72.25));
rasterizer.draw_cubic(point(77.0, 72.25), point(97.0, 72.25), point(105.25, 60.25), point(104.75, 38.5));
rasterizer.draw_cubic(point(104.75, 38.5), point(104.5, 13.5), point(89.0, 0.75), point(54.25, 0.75));
rasterizer.draw_cubic(point(54.25, 0.75), point(16.0, 0.75), point(0.0, 16.75), point(0.0, 64.0));
rasterizer.draw_cubic(point(0.0, 64.0), point(0.0, 110.5), point(16.0, 128.0), point(56.5, 128.0));
rasterizer.draw_cubic(point(56.5, 128.0), point(66.0, 128.0), point(79.5, 127.0), point(90.0, 125.0));
rasterizer.draw_cubic(point(90.0, 125.0), point(78.75, 135.25), point(73.25, 144.5), point(70.75, 152.0));
rasterizer.draw_cubic(point(70.75, 152.0), point(64.5, 169.0), point(75.5, 183.0), point(105.0, 170.5));
rasterizer.draw_line(point(105.0, 170.5), point(103.0, 163.5));
rasterizer.draw_cubic(point(55.0, 14.5), point(78.5, 14.5), point(88.5, 21.75), point(88.75, 38.75));
rasterizer.draw_cubic(point(88.75, 38.75), point(89.0, 50.75), point(85.75, 59.75), point(73.5, 59.75));
rasterizer.draw_line(point(73.5, 59.75), point(16.5, 59.75));
rasterizer.draw_cubic(point(16.5, 59.75), point(17.25, 25.5), point(27.0, 14.5), point(55.0, 14.5));
rasterizer.draw_line(point(55.0, 14.5), point(55.0, 14.5));
// iterate over the resultant pixel alphas, e.g. save pixel to a buffer
rasterizer.for_each_pixel(|index, alpha| {
// ...
});
```
Rendering the resultant pixel alphas as 8-bit grey produces:
![reference_otf_tailed_e](https://user-images.githubusercontent.com/2331607/78987793-ee95f480-7b26-11ea-91fb-e9f359d766f8.png)
## no_std
no_std environments are supported using `alloc` & [`libm`](https://github.com/rust-lang/libm).
```toml
ab_glyph_rasterizer = { default-features = false, features = ["libm"] }
```

View file

@ -0,0 +1,148 @@
#[cfg(all(feature = "libm", not(feature = "std")))]
use crate::nostd_float::FloatExt;
/// An (x, y) coordinate.
///
/// # Example
/// ```
/// use ab_glyph_rasterizer::{point, Point};
/// let p: Point = point(0.1, 23.2);
/// ```
#[derive(Clone, Copy, Default, PartialEq, PartialOrd)]
pub struct Point {
pub x: f32,
pub y: f32,
}
impl core::fmt::Debug for Point {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "point({:?}, {:?})", self.x, self.y)
}
}
impl Point {
#[inline]
pub(crate) fn distance_to(self, other: Point) -> f32 {
let d = other - self;
(d.x * d.x + d.y * d.y).sqrt()
}
}
/// [`Point`](struct.Point.html) constructor.
///
/// # Example
/// ```
/// # use ab_glyph_rasterizer::{point, Point};
/// let p = point(0.1, 23.2);
/// ```
#[inline]
pub fn point(x: f32, y: f32) -> Point {
Point { x, y }
}
/// Linear interpolation between points.
#[inline]
pub(crate) fn lerp(t: f32, p0: Point, p1: Point) -> Point {
point(p0.x + t * (p1.x - p0.x), p0.y + t * (p1.y - p0.y))
}
impl core::ops::Sub for Point {
type Output = Point;
/// Subtract rhs.x from x, rhs.y from y.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// let p1 = point(1.0, 2.0) - point(2.0, 1.5);
///
/// assert!((p1.x - -1.0).abs() <= core::f32::EPSILON);
/// assert!((p1.y - 0.5).abs() <= core::f32::EPSILON);
/// ```
#[inline]
fn sub(self, rhs: Point) -> Point {
point(self.x - rhs.x, self.y - rhs.y)
}
}
impl core::ops::Add for Point {
type Output = Point;
/// Add rhs.x to x, rhs.y to y.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// let p1 = point(1.0, 2.0) + point(2.0, 1.5);
///
/// assert!((p1.x - 3.0).abs() <= core::f32::EPSILON);
/// assert!((p1.y - 3.5).abs() <= core::f32::EPSILON);
/// ```
#[inline]
fn add(self, rhs: Point) -> Point {
point(self.x + rhs.x, self.y + rhs.y)
}
}
impl core::ops::AddAssign for Point {
/// ```
/// # use ab_glyph_rasterizer::*;
/// let mut p1 = point(1.0, 2.0);
/// p1 += point(2.0, 1.5);
///
/// assert!((p1.x - 3.0).abs() <= core::f32::EPSILON);
/// assert!((p1.y - 3.5).abs() <= core::f32::EPSILON);
/// ```
#[inline]
fn add_assign(&mut self, other: Self) {
self.x += other.x;
self.y += other.y;
}
}
impl core::ops::SubAssign for Point {
/// ```
/// # use ab_glyph_rasterizer::*;
/// let mut p1 = point(1.0, 2.0);
/// p1 -= point(2.0, 1.5);
///
/// assert!((p1.x - -1.0).abs() <= core::f32::EPSILON);
/// assert!((p1.y - 0.5).abs() <= core::f32::EPSILON);
/// ```
#[inline]
fn sub_assign(&mut self, other: Self) {
self.x -= other.x;
self.y -= other.y;
}
}
impl<F: Into<f32>> From<(F, F)> for Point {
/// ```
/// # use ab_glyph_rasterizer::*;
/// let p: Point = (23_f32, 34.5_f32).into();
/// let p2: Point = (5u8, 44u8).into();
/// ```
#[inline]
fn from((x, y): (F, F)) -> Self {
point(x.into(), y.into())
}
}
impl<F: Into<f32>> From<[F; 2]> for Point {
/// ```
/// # use ab_glyph_rasterizer::*;
/// let p: Point = [23_f32, 34.5].into();
/// let p2: Point = [5u8, 44].into();
/// ```
#[inline]
fn from([x, y]: [F; 2]) -> Self {
point(x.into(), y.into())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn distance_to() {
let distance = point(0.0, 0.0).distance_to(point(3.0, 4.0));
assert!((distance - 5.0).abs() <= core::f32::EPSILON);
}
}

View file

@ -0,0 +1,36 @@
//! Coverage rasterization for lines, quadratic & cubic beziers.
//! Useful for drawing .otf font glyphs.
//!
//! ```
//! use ab_glyph_rasterizer::Rasterizer;
//! # let (width, height) = (1, 1);
//! let mut rasterizer = Rasterizer::new(width, height);
//!
//! // draw outlines
//! # let [l0, l1, q0, q1, q2, c0, c1, c2, c3] = [ab_glyph_rasterizer::point(0.0, 0.0); 9];
//! rasterizer.draw_line(l0, l1);
//! rasterizer.draw_quad(q0, q1, q2);
//! rasterizer.draw_cubic(c0, c1, c2, c3);
//!
//! // iterate over the resultant pixel alphas, e.g. save pixel to a buffer
//! rasterizer.for_each_pixel(|index, alpha| {
//! // ...
//! });
//! ```
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
#[cfg(all(feature = "libm", not(feature = "std")))]
mod nostd_float;
#[cfg(not(any(feature = "libm", feature = "std")))]
compile_error!("You need to activate either the `std` or `libm` feature.");
mod geometry;
mod raster;
pub use geometry::{point, Point};
pub use raster::Rasterizer;

View file

@ -0,0 +1,31 @@
/// Basic required float operations.
pub(crate) trait FloatExt {
fn floor(self) -> Self;
fn ceil(self) -> Self;
fn sqrt(self) -> Self;
fn round(self) -> Self;
fn abs(self) -> Self;
}
impl FloatExt for f32 {
#[inline]
fn floor(self) -> Self {
libm::floorf(self)
}
#[inline]
fn ceil(self) -> Self {
libm::ceilf(self)
}
#[inline]
fn sqrt(self) -> Self {
libm::sqrtf(self)
}
#[inline]
fn round(self) -> Self {
libm::roundf(self)
}
#[inline]
fn abs(self) -> Self {
libm::fabsf(self)
}
}

View file

@ -0,0 +1,338 @@
// Forked/repurposed from `font-rs` code: https://github.com/raphlinus/font-rs
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Modifications copyright (C) 2020 Alex Butler
//
// Cubic bezier drawing adapted from stb_truetype: https://github.com/nothings/stb
#[cfg(all(feature = "libm", not(feature = "std")))]
use crate::nostd_float::FloatExt;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use crate::geometry::{lerp, Point};
type DrawLineFn = unsafe fn(&mut Rasterizer, Point, Point);
/// Coverage rasterizer for lines, quadratic & cubic beziers.
pub struct Rasterizer {
width: usize,
height: usize,
a: Vec<f32>,
draw_line_fn: DrawLineFn,
}
impl Rasterizer {
/// Allocates a new rasterizer that can draw onto a `width` x `height` alpha grid.
///
/// ```
/// use ab_glyph_rasterizer::Rasterizer;
/// let mut rasterizer = Rasterizer::new(14, 38);
/// ```
pub fn new(width: usize, height: usize) -> Self {
Self {
width,
height,
a: vec![0.0; width * height + 4],
draw_line_fn: optimal_draw_line_fn(),
}
}
/// Resets the rasterizer to an empty `width` x `height` alpha grid. This method behaves as if
/// the Rasterizer were re-created, with the advantage of not allocating if the total number of
/// pixels of the grid does not increase.
///
/// ```
/// # use ab_glyph_rasterizer::Rasterizer;
/// # let mut rasterizer = Rasterizer::new(14, 38);
/// rasterizer.reset(12, 24);
/// assert_eq!(rasterizer.dimensions(), (12, 24));
/// ```
pub fn reset(&mut self, width: usize, height: usize) {
self.width = width;
self.height = height;
self.a.truncate(0);
self.a.resize(width * height + 4, 0.0);
}
/// Clears the rasterizer. This method behaves as if the Rasterizer were re-created with the same
/// dimensions, but does not perform an allocation.
///
/// ```
/// # use ab_glyph_rasterizer::Rasterizer;
/// # let mut rasterizer = Rasterizer::new(14, 38);
/// rasterizer.clear();
/// ```
pub fn clear(&mut self) {
for px in &mut self.a {
*px = 0.0;
}
}
/// Returns the dimensions the rasterizer was built to draw to.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// let rasterizer = Rasterizer::new(9, 8);
/// assert_eq!((9, 8), rasterizer.dimensions());
/// ```
pub fn dimensions(&self) -> (usize, usize) {
(self.width, self.height)
}
/// Adds a straight line from `p0` to `p1` to the outline.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// # let mut rasterizer = Rasterizer::new(9, 8);
/// rasterizer.draw_line(point(0.0, 0.48), point(1.22, 0.48));
/// ```
pub fn draw_line(&mut self, p0: Point, p1: Point) {
unsafe { (self.draw_line_fn)(self, p0, p1) }
}
#[inline(always)] // must inline for simd versions
fn draw_line_scalar(&mut self, p0: Point, p1: Point) {
if (p0.y - p1.y).abs() <= core::f32::EPSILON {
return;
}
let (dir, p0, p1) = if p0.y < p1.y {
(1.0, p0, p1)
} else {
(-1.0, p1, p0)
};
let dxdy = (p1.x - p0.x) / (p1.y - p0.y);
let mut x = p0.x;
let y0 = p0.y as usize; // note: implicit max of 0 because usize
if p0.y < 0.0 {
x -= p0.y * dxdy;
}
for y in y0..self.height.min(p1.y.ceil() as usize) {
let linestart = y * self.width;
let dy = ((y + 1) as f32).min(p1.y) - (y as f32).max(p0.y);
let xnext = x + dxdy * dy;
let d = dy * dir;
let (x0, x1) = if x < xnext { (x, xnext) } else { (xnext, x) };
let x0floor = x0.floor();
let x0i = x0floor as i32;
let x1ceil = x1.ceil();
let x1i = x1ceil as i32;
if x1i <= x0i + 1 {
let xmf = 0.5 * (x + xnext) - x0floor;
let linestart_x0i = linestart as isize + x0i as isize;
if linestart_x0i < 0 {
continue; // oob index
}
self.a[linestart_x0i as usize] += d - d * xmf;
self.a[linestart_x0i as usize + 1] += d * xmf;
} else {
let s = (x1 - x0).recip();
let x0f = x0 - x0floor;
let a0 = 0.5 * s * (1.0 - x0f) * (1.0 - x0f);
let x1f = x1 - x1ceil + 1.0;
let am = 0.5 * s * x1f * x1f;
let linestart_x0i = linestart as isize + x0i as isize;
if linestart_x0i < 0 {
continue; // oob index
}
self.a[linestart_x0i as usize] += d * a0;
if x1i == x0i + 2 {
self.a[linestart_x0i as usize + 1] += d * (1.0 - a0 - am);
} else {
let a1 = s * (1.5 - x0f);
self.a[linestart_x0i as usize + 1] += d * (a1 - a0);
for xi in x0i + 2..x1i - 1 {
self.a[linestart + xi as usize] += d * s;
}
let a2 = a1 + (x1i - x0i - 3) as f32 * s;
self.a[linestart + (x1i - 1) as usize] += d * (1.0 - a2 - am);
}
self.a[linestart + x1i as usize] += d * am;
}
x = xnext;
}
}
/// Adds a quadratic Bézier curve from `p0` to `p2` to the outline using `p1` as the control.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// # let mut rasterizer = Rasterizer::new(14, 38);
/// rasterizer.draw_quad(point(6.2, 34.5), point(7.2, 34.5), point(9.2, 34.0));
/// ```
pub fn draw_quad(&mut self, p0: Point, p1: Point, p2: Point) {
let devx = p0.x - 2.0 * p1.x + p2.x;
let devy = p0.y - 2.0 * p1.y + p2.y;
let devsq = devx * devx + devy * devy;
if devsq < 0.333 {
self.draw_line(p0, p2);
return;
}
let tol = 3.0;
let n = 1 + (tol * devsq).sqrt().sqrt().floor() as usize;
let mut p = p0;
let nrecip = (n as f32).recip();
let mut t = 0.0;
for _i in 0..n - 1 {
t += nrecip;
let pn = lerp(t, lerp(t, p0, p1), lerp(t, p1, p2));
self.draw_line(p, pn);
p = pn;
}
self.draw_line(p, p2);
}
/// Adds a cubic Bézier curve from `p0` to `p3` to the outline using `p1` as the control
/// at the beginning of the curve and `p2` at the end of the curve.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// # let mut rasterizer = Rasterizer::new(12, 20);
/// rasterizer.draw_cubic(
/// point(10.3, 16.4),
/// point(8.6, 16.9),
/// point(7.7, 16.5),
/// point(8.2, 15.2),
/// );
/// ```
pub fn draw_cubic(&mut self, p0: Point, p1: Point, p2: Point, p3: Point) {
self.tesselate_cubic(p0, p1, p2, p3, 0);
}
// stb_truetype style cubic approximation by lines.
fn tesselate_cubic(&mut self, p0: Point, p1: Point, p2: Point, p3: Point, n: u8) {
// ...I'm not sure either ¯\_(ツ)_/¯
const OBJSPACE_FLATNESS: f32 = 0.35;
const OBJSPACE_FLATNESS_SQUARED: f32 = OBJSPACE_FLATNESS * OBJSPACE_FLATNESS;
const MAX_RECURSION_DEPTH: u8 = 16;
let longlen = p0.distance_to(p1) + p1.distance_to(p2) + p2.distance_to(p3);
let shortlen = p0.distance_to(p3);
let flatness_squared = longlen * longlen - shortlen * shortlen;
if n < MAX_RECURSION_DEPTH && flatness_squared > OBJSPACE_FLATNESS_SQUARED {
let p01 = lerp(0.5, p0, p1);
let p12 = lerp(0.5, p1, p2);
let p23 = lerp(0.5, p2, p3);
let pa = lerp(0.5, p01, p12);
let pb = lerp(0.5, p12, p23);
let mp = lerp(0.5, pa, pb);
self.tesselate_cubic(p0, p01, pa, mp, n + 1);
self.tesselate_cubic(mp, pb, p23, p3, n + 1);
} else {
self.draw_line(p0, p3);
}
}
/// Run a callback for each pixel `index` & `alpha`, with indices in `0..width * height`.
///
/// An `alpha` coverage value of `0.0` means the pixel is not covered at all by the glyph,
/// whereas a value of `1.0` (or greater) means the pixel is totally covered.
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// # let (width, height) = (1, 1);
/// # let mut rasterizer = Rasterizer::new(width, height);
/// let mut pixels = vec![0u8; width * height];
/// rasterizer.for_each_pixel(|index, alpha| {
/// pixels[index] = (alpha * 255.0) as u8;
/// });
/// ```
pub fn for_each_pixel<O: FnMut(usize, f32)>(&self, mut px_fn: O) {
let mut acc = 0.0;
self.a[..self.width * self.height]
.iter()
.enumerate()
.for_each(|(idx, c)| {
acc += c;
px_fn(idx, acc.abs());
});
}
/// Run a callback for each pixel x position, y position & alpha.
///
/// Convenience wrapper for [`Rasterizer::for_each_pixel`].
///
/// ```
/// # use ab_glyph_rasterizer::*;
/// # let mut rasterizer = Rasterizer::new(1, 1);
/// # struct Img;
/// # impl Img { fn set_pixel(&self, x: u32, y: u32, a: u8) {} }
/// # let image = Img;
/// rasterizer.for_each_pixel_2d(|x, y, alpha| {
/// image.set_pixel(x, y, (alpha * 255.0) as u8);
/// });
/// ```
pub fn for_each_pixel_2d<O: FnMut(u32, u32, f32)>(&self, mut px_fn: O) {
let width32 = self.width as u32;
self.for_each_pixel(|idx, alpha| px_fn(idx as u32 % width32, idx as u32 / width32, alpha));
}
}
/// ```
/// let rasterizer = ab_glyph_rasterizer::Rasterizer::new(3, 4);
/// assert_eq!(
/// &format!("{:?}", rasterizer),
/// "Rasterizer { width: 3, height: 4 }"
/// );
/// ```
impl core::fmt::Debug for Rasterizer {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Rasterizer")
.field("width", &self.width)
.field("height", &self.height)
.finish()
}
}
#[cfg(all(feature = "std", any(target_arch = "x86", target_arch = "x86_64")))]
#[target_feature(enable = "avx2")]
unsafe fn draw_line_avx2(rast: &mut Rasterizer, p0: Point, p1: Point) {
rast.draw_line_scalar(p0, p1)
}
#[cfg(all(feature = "std", any(target_arch = "x86", target_arch = "x86_64")))]
#[target_feature(enable = "sse4.2")]
unsafe fn draw_line_sse4_2(rast: &mut Rasterizer, p0: Point, p1: Point) {
rast.draw_line_scalar(p0, p1)
}
/// Return most optimal `DrawLineFn` impl.
///
/// With feature `std` on x86/x86_64 will use one-time runtime detection
/// to pick the best SIMD impl. Otherwise uses a scalar version.
fn optimal_draw_line_fn() -> DrawLineFn {
unsafe {
// safe as write synchronised by Once::call_once or no-write
static mut DRAW_LINE_FN: DrawLineFn = Rasterizer::draw_line_scalar;
#[cfg(all(feature = "std", any(target_arch = "x86", target_arch = "x86_64")))]
{
static INIT: std::sync::Once = std::sync::Once::new();
INIT.call_once(|| {
// runtime detect optimal simd impls
if is_x86_feature_detected!("avx2") {
DRAW_LINE_FN = draw_line_avx2
} else if is_x86_feature_detected!("sse4.2") {
DRAW_LINE_FN = draw_line_sse4_2
}
});
}
DRAW_LINE_FN
}
}

View file

@ -0,0 +1,10 @@
use ab_glyph_rasterizer::*;
/// Index oob panic rasterizing "Gauntl" using Bitter-Regular.otf
#[test]
fn rusttype_156_index_panic() {
let mut r = Rasterizer::new(6, 16);
r.draw_line(point(5.54, 14.299999), point(3.7399998, 13.799999));
r.draw_line(point(3.7399998, 13.799999), point(3.7399998, 0.0));
r.draw_line(point(3.7399998, 0.0), point(0.0, 0.10000038));
}

View file

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"ef9fa958318e442f1da7d204494cefec75c144aa6d5d5c93b0a5d6fcdf4ef6c6","Cargo.lock":"20b23c454fc3127f08a1bcd2864bbf029793759e6411fba24d44d8f4b7831ad0","Cargo.toml":"d0f15fde73d42bdf00e93f960dff908447225bede9364cb1659e44740a536c04","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"e99d88d232bf57d70f0fb87f6b496d44b6653f99f8a63d250a54c61ea4bcde40","README.md":"76d28502bd2e83f6a9e3576bd45e9a7fe5308448c4b5384b0d249515b5f67a5c","bench.plot.r":"6a5d7a4d36ed6b3d9919be703a479bef47698bf947818b483ff03951df2d4e01","benchmark.sh":"b35f89b1ca2c1dc0476cdd07f0284b72d41920d1c7b6054072f50ffba296d78d","coverage.sh":"4677e81922d08a82e83068a911717a247c66af12e559f37b78b6be3337ac9f07","examples/addr2line.rs":"3c5eb5a6726634df6cf53e4d67ee9f90c9ac09838303947f45c3bea1e84548b5","rustfmt.toml":"01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b","src/builtin_split_dwarf_loader.rs":"dc6979de81b35f82e97275e6be27ec61f3c4225ea10574a9e031813e00185174","src/function.rs":"68f047e0c78afe18ad165db255c8254ee74c35cd6df0cc07e400252981f661ed","src/lazy.rs":"0bf23f7098f1902f181e43c2ffa82a3f86df2c0dbcb9bc0ebce6a0168dd8b060","src/lib.rs":"9d6531f71fd138d31cc7596db9ab234198d0895a21ea9cb116434c19ec78b660","tests/correctness.rs":"4081f8019535305e3aa254c6a4e1436272dd873f9717c687ca0e66ea8d5871ed","tests/output_equivalence.rs":"b2cd7c59fa55808a2e66e9fe7f160d846867e3ecefe22c22a818f822c3c41f23","tests/parse.rs":"c2f7362e4679c1b4803b12ec6e8dca6da96aed7273fd210a857524a4182c30e7"},"package":"8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"}

View file

@ -0,0 +1,336 @@
# `addr2line` Change Log
--------------------------------------------------------------------------------
## 0.21.0 (2023/08/12)
### Breaking changes
* Updated `gimli`, `object`, and `fallible-iterator` dependencies.
### Changed
* The minimum supported rust version is 1.65.0.
* Store boxed slices instead of `Vec` objects in `Context`.
[#278](https://github.com/gimli-rs/addr2line/pull/278)
--------------------------------------------------------------------------------
## 0.20.0 (2023/04/15)
### Breaking changes
* The minimum supported rust version is 1.58.0.
* Changed `Context::find_frames` to return `LookupResult`.
Use `LookupResult::skip_all_loads` to obtain the result without loading split DWARF.
[#260](https://github.com/gimli-rs/addr2line/pull/260)
* Replaced `Context::find_dwarf_unit` with `Context::find_dwarf_and_unit`.
[#260](https://github.com/gimli-rs/addr2line/pull/260)
* Updated `object` dependency.
### Changed
* Fix handling of file index 0 for DWARF 5.
[#264](https://github.com/gimli-rs/addr2line/pull/264)
### Added
* Added types and methods to support loading split DWARF:
`LookupResult`, `SplitDwarfLoad`, `SplitDwarfLoader`, `Context::preload_units`.
[#260](https://github.com/gimli-rs/addr2line/pull/260)
[#262](https://github.com/gimli-rs/addr2line/pull/262)
[#263](https://github.com/gimli-rs/addr2line/pull/263)
--------------------------------------------------------------------------------
## 0.19.0 (2022/11/24)
### Breaking changes
* Updated `gimli` and `object` dependencies.
--------------------------------------------------------------------------------
## 0.18.0 (2022/07/16)
### Breaking changes
* Updated `object` dependency.
### Changed
* Fixed handling of relative path for `DW_AT_comp_dir`.
[#239](https://github.com/gimli-rs/addr2line/pull/239)
* Fixed handling of `DW_FORM_addrx` for DWARF 5 support.
[#243](https://github.com/gimli-rs/addr2line/pull/243)
* Fixed handling of units that are missing range information.
[#249](https://github.com/gimli-rs/addr2line/pull/249)
--------------------------------------------------------------------------------
## 0.17.0 (2021/10/24)
### Breaking changes
* Updated `gimli` and `object` dependencies.
### Changed
* Use `skip_attributes` to improve performance.
[#236](https://github.com/gimli-rs/addr2line/pull/236)
--------------------------------------------------------------------------------
## 0.16.0 (2021/07/26)
### Breaking changes
* Updated `gimli` and `object` dependencies.
--------------------------------------------------------------------------------
## 0.15.2 (2021/06/04)
### Fixed
* Allow `Context` to be `Send`.
[#219](https://github.com/gimli-rs/addr2line/pull/219)
--------------------------------------------------------------------------------
## 0.15.1 (2021/05/02)
### Fixed
* Don't ignore aranges with address 0.
[#217](https://github.com/gimli-rs/addr2line/pull/217)
--------------------------------------------------------------------------------
## 0.15.0 (2021/05/02)
### Breaking changes
* Updated `gimli` and `object` dependencies.
[#215](https://github.com/gimli-rs/addr2line/pull/215)
* Added `debug_aranges` parameter to `Context::from_sections`.
[#200](https://github.com/gimli-rs/addr2line/pull/200)
### Added
* Added `.debug_aranges` support.
[#200](https://github.com/gimli-rs/addr2line/pull/200)
* Added supplementary object file support.
[#208](https://github.com/gimli-rs/addr2line/pull/208)
### Fixed
* Fixed handling of Windows paths in locations.
[#209](https://github.com/gimli-rs/addr2line/pull/209)
* examples/addr2line: Flush stdout after each response.
[#210](https://github.com/gimli-rs/addr2line/pull/210)
* examples/addr2line: Avoid copying every section.
[#213](https://github.com/gimli-rs/addr2line/pull/213)
--------------------------------------------------------------------------------
## 0.14.1 (2020/12/31)
### Fixed
* Fix location lookup for skeleton units.
[#201](https://github.com/gimli-rs/addr2line/pull/201)
### Added
* Added `Context::find_location_range`.
[#196](https://github.com/gimli-rs/addr2line/pull/196)
[#199](https://github.com/gimli-rs/addr2line/pull/199)
--------------------------------------------------------------------------------
## 0.14.0 (2020/10/27)
### Breaking changes
* Updated `gimli` and `object` dependencies.
### Fixed
* Handle units that only have line information.
[#188](https://github.com/gimli-rs/addr2line/pull/188)
* Handle DWARF units with version <= 4 and no `DW_AT_name`.
[#191](https://github.com/gimli-rs/addr2line/pull/191)
* Fix handling of `DW_FORM_ref_addr`.
[#193](https://github.com/gimli-rs/addr2line/pull/193)
--------------------------------------------------------------------------------
## 0.13.0 (2020/07/07)
### Breaking changes
* Updated `gimli` and `object` dependencies.
* Added `rustc-dep-of-std` feature.
[#166](https://github.com/gimli-rs/addr2line/pull/166)
### Changed
* Improve performance by parsing function contents lazily.
[#178](https://github.com/gimli-rs/addr2line/pull/178)
* Don't skip `.debug_info` and `.debug_line` entries with a zero address.
[#182](https://github.com/gimli-rs/addr2line/pull/182)
--------------------------------------------------------------------------------
## 0.12.2 (2020/06/21)
### Fixed
* Avoid linear search for `DW_FORM_ref_addr`.
[#175](https://github.com/gimli-rs/addr2line/pull/175)
--------------------------------------------------------------------------------
## 0.12.1 (2020/05/19)
### Fixed
* Handle units with overlapping address ranges.
[#163](https://github.com/gimli-rs/addr2line/pull/163)
* Don't assert for functions with overlapping address ranges.
[#168](https://github.com/gimli-rs/addr2line/pull/168)
--------------------------------------------------------------------------------
## 0.12.0 (2020/05/12)
### Breaking changes
* Updated `gimli` and `object` dependencies.
* Added more optional features: `smallvec` and `fallible-iterator`.
[#160](https://github.com/gimli-rs/addr2line/pull/160)
### Added
* Added `Context::dwarf` and `Context::find_dwarf_unit`.
[#159](https://github.com/gimli-rs/addr2line/pull/159)
### Changed
* Removed `lazycell` dependency.
[#160](https://github.com/gimli-rs/addr2line/pull/160)
--------------------------------------------------------------------------------
## 0.11.0 (2020/01/11)
### Breaking changes
* Updated `gimli` and `object` dependencies.
* [#130](https://github.com/gimli-rs/addr2line/pull/130)
Changed `Location::file` from `Option<String>` to `Option<&str>`.
This required adding lifetime parameters to `Location` and other structs that
contain it.
* [#152](https://github.com/gimli-rs/addr2line/pull/152)
Changed `Location::line` and `Location::column` from `Option<u64>`to `Option<u32>`.
* [#156](https://github.com/gimli-rs/addr2line/pull/156)
Deleted `alloc` feature, and fixed `no-std` builds with stable rust.
Removed default `Reader` parameter for `Context`, and added `ObjectContext` instead.
### Added
* [#134](https://github.com/gimli-rs/addr2line/pull/134)
Added `Context::from_dwarf`.
### Changed
* [#133](https://github.com/gimli-rs/addr2line/pull/133)
Fixed handling of units that can't be parsed.
* [#155](https://github.com/gimli-rs/addr2line/pull/155)
Fixed `addr2line` output to match binutils.
* [#130](https://github.com/gimli-rs/addr2line/pull/130)
Improved `.debug_line` parsing performance.
* [#148](https://github.com/gimli-rs/addr2line/pull/148)
[#150](https://github.com/gimli-rs/addr2line/pull/150)
[#151](https://github.com/gimli-rs/addr2line/pull/151)
[#152](https://github.com/gimli-rs/addr2line/pull/152)
Improved `.debug_info` parsing performance.
* [#137](https://github.com/gimli-rs/addr2line/pull/137)
[#138](https://github.com/gimli-rs/addr2line/pull/138)
[#139](https://github.com/gimli-rs/addr2line/pull/139)
[#140](https://github.com/gimli-rs/addr2line/pull/140)
[#146](https://github.com/gimli-rs/addr2line/pull/146)
Improved benchmarks.
--------------------------------------------------------------------------------
## 0.10.0 (2019/07/07)
### Breaking changes
* [#127](https://github.com/gimli-rs/addr2line/pull/127)
Update `gimli`.
--------------------------------------------------------------------------------
## 0.9.0 (2019/05/02)
### Breaking changes
* [#121](https://github.com/gimli-rs/addr2line/pull/121)
Update `gimli`, `object`, and `fallible-iterator` dependencies.
### Added
* [#121](https://github.com/gimli-rs/addr2line/pull/121)
Reexport `gimli`, `object`, and `fallible-iterator`.
--------------------------------------------------------------------------------
## 0.8.0 (2019/02/06)
### Breaking changes
* [#107](https://github.com/gimli-rs/addr2line/pull/107)
Update `object` dependency to 0.11. This is part of the public API.
### Added
* [#101](https://github.com/gimli-rs/addr2line/pull/101)
Add `object` feature (enabled by default). Disable this feature to remove
the `object` dependency and `Context::new` API.
* [#102](https://github.com/gimli-rs/addr2line/pull/102)
Add `std` (enabled by default) and `alloc` features.
### Changed
* [#108](https://github.com/gimli-rs/addr2line/issues/108)
`demangle` no longer outputs the hash for rust symbols.
* [#109](https://github.com/gimli-rs/addr2line/issues/109)
Set default `R` for `Context<R>`.

704
third-party/vendor/addr2line/Cargo.lock generated vendored Normal file
View file

@ -0,0 +1,704 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
dependencies = [
"gimli 0.27.2",
]
[[package]]
name = "addr2line"
version = "0.21.0"
dependencies = [
"backtrace",
"clap",
"compiler_builtins",
"cpp_demangle",
"fallible-iterator",
"findshlibs",
"gimli 0.28.0",
"libtest-mimic",
"memmap2",
"object 0.32.0",
"rustc-demangle",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
"smallvec",
"typed-arena",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "anstream"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is-terminal",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd"
[[package]]
name = "anstyle-parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c"
dependencies = [
"anstyle",
"windows-sys",
]
[[package]]
name = "backtrace"
version = "0.3.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"
dependencies = [
"addr2line 0.19.0",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object 0.30.3",
"rustc-demangle",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
[[package]]
name = "byteorder"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "cc"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd"
dependencies = [
"clap_builder",
"clap_derive",
"once_cell",
]
[[package]]
name = "clap_builder"
version = "4.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
"terminal_size",
]
[[package]]
name = "clap_derive"
version = "4.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.15",
]
[[package]]
name = "clap_lex"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
[[package]]
name = "colorchoice"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "compiler_builtins"
version = "0.1.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "571298a3cce7e2afbd3d61abb91a18667d5ab25993ec577a88ee8ac45f00cc3a"
[[package]]
name = "cpp_demangle"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c76f98bdfc7f66172e6c7065f981ebb576ffc903fe4c0561d9f0c2509226dc6"
dependencies = [
"cfg-if",
]
[[package]]
name = "crc32fast"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
]
[[package]]
name = "errno"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f"
dependencies = [
"errno-dragonfly",
"libc",
"windows-sys",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "findshlibs"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64"
dependencies = [
"cc",
"lazy_static",
"libc",
"winapi",
]
[[package]]
name = "flate2"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
dependencies = [
"crc32fast",
"miniz_oxide",
]
[[package]]
name = "gimli"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4"
[[package]]
name = "gimli"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
dependencies = [
"compiler_builtins",
"fallible-iterator",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
"stable_deref_trait",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
[[package]]
name = "io-lifetimes"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
dependencies = [
"hermit-abi 0.3.2",
"libc",
"windows-sys",
]
[[package]]
name = "is-terminal"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi 0.3.2",
"rustix 0.38.8",
"windows-sys",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "libtest-mimic"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d8de370f98a6cb8a4606618e53e802f93b094ddec0f96988eaec2c27e6e9ce7"
dependencies = [
"clap",
"termcolor",
"threadpool",
]
[[package]]
name = "linux-raw-sys"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "linux-raw-sys"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memmap2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
dependencies = [
"libc",
]
[[package]]
name = "miniz_oxide"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
name = "num_cpus"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
dependencies = [
"hermit-abi 0.2.6",
"libc",
]
[[package]]
name = "object"
version = "0.30.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439"
dependencies = [
"memchr",
]
[[package]]
name = "object"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
dependencies = [
"flate2",
"memchr",
"ruzstd",
]
[[package]]
name = "once_cell"
version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "proc-macro2"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rustc-demangle"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b"
[[package]]
name = "rustc-std-workspace-alloc"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff66d57013a5686e1917ed6a025d54dd591fcda71a41fe07edf4d16726aefa86"
[[package]]
name = "rustc-std-workspace-core"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1956f5517128a2b6f23ab2dadf1a976f4f5b27962e7724c2bf3d45e539ec098c"
[[package]]
name = "rustix"
version = "0.37.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06"
dependencies = [
"bitflags 1.3.2",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys 0.3.8",
"windows-sys",
]
[[package]]
name = "rustix"
version = "0.38.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f"
dependencies = [
"bitflags 2.4.0",
"errno",
"libc",
"linux-raw-sys 0.4.5",
"windows-sys",
]
[[package]]
name = "ruzstd"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3ffab8f9715a0d455df4bbb9d21e91135aab3cd3ca187af0cd0c3c3f868fdc"
dependencies = [
"byteorder",
"thiserror-core",
"twox-hash",
]
[[package]]
name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "termcolor"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
dependencies = [
"winapi-util",
]
[[package]]
name = "terminal_size"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237"
dependencies = [
"rustix 0.37.23",
"windows-sys",
]
[[package]]
name = "thiserror-core"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d97345f6437bb2004cd58819d8a9ef8e36cdd7661c2abc4bbde0a7c40d9f497"
dependencies = [
"thiserror-core-impl",
]
[[package]]
name = "thiserror-core-impl"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10ac1c5050e43014d16b2f94d0d2ce79e65ffdd8b38d8048f9c8f6a8a6da62ac"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "threadpool"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
dependencies = [
"num_cpus",
]
[[package]]
name = "twox-hash"
version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
]
[[package]]
name = "typed-arena"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "unicode-ident"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
[[package]]
name = "utf8parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

147
third-party/vendor/addr2line/Cargo.toml vendored Normal file
View file

@ -0,0 +1,147 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
rust-version = "1.65"
name = "addr2line"
version = "0.21.0"
exclude = [
"/benches/*",
"/fixtures/*",
".github",
]
description = "A cross-platform symbolication library written in Rust, using `gimli`"
documentation = "https://docs.rs/addr2line"
readme = "./README.md"
keywords = [
"DWARF",
"debug",
"elf",
"symbolicate",
"atos",
]
categories = ["development-tools::debugging"]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/gimli-rs/addr2line"
[profile.bench]
codegen-units = 1
debug = true
[profile.release]
debug = true
[[example]]
name = "addr2line"
required-features = ["default"]
[[test]]
name = "output_equivalence"
harness = false
required-features = ["default"]
[[test]]
name = "correctness"
required-features = ["default"]
[[test]]
name = "parse"
required-features = ["std-object"]
[dependencies.alloc]
version = "1.0.0"
optional = true
package = "rustc-std-workspace-alloc"
[dependencies.compiler_builtins]
version = "0.1.2"
optional = true
[dependencies.core]
version = "1.0.0"
optional = true
package = "rustc-std-workspace-core"
[dependencies.cpp_demangle]
version = "0.4"
features = ["alloc"]
optional = true
default-features = false
[dependencies.fallible-iterator]
version = "0.3.0"
optional = true
default-features = false
[dependencies.gimli]
version = "0.28.0"
features = ["read"]
default-features = false
[dependencies.memmap2]
version = "0.5.5"
optional = true
[dependencies.object]
version = "0.32.0"
features = ["read"]
optional = true
default-features = false
[dependencies.rustc-demangle]
version = "0.1"
optional = true
[dependencies.smallvec]
version = "1"
optional = true
default-features = false
[dev-dependencies.backtrace]
version = "0.3.13"
[dev-dependencies.clap]
version = "4.3.21"
features = ["wrap_help"]
[dev-dependencies.findshlibs]
version = "0.10"
[dev-dependencies.libtest-mimic]
version = "0.6.1"
[dev-dependencies.typed-arena]
version = "2"
[features]
default = [
"rustc-demangle",
"cpp_demangle",
"std-object",
"fallible-iterator",
"smallvec",
"memmap2",
]
rustc-dep-of-std = [
"core",
"alloc",
"compiler_builtins",
"gimli/rustc-dep-of-std",
]
std = ["gimli/std"]
std-object = [
"std",
"object",
"object/std",
"object/compression",
"gimli/endian-reader",
]

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,25 @@
Copyright (c) 2016-2018 The gimli Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

48
third-party/vendor/addr2line/README.md vendored Normal file
View file

@ -0,0 +1,48 @@
# addr2line
[![](https://img.shields.io/crates/v/addr2line.svg)](https://crates.io/crates/addr2line)
[![](https://img.shields.io/docsrs/addr2line.svg)](https://docs.rs/addr2line)
[![Coverage Status](https://coveralls.io/repos/github/gimli-rs/addr2line/badge.svg?branch=master)](https://coveralls.io/github/gimli-rs/addr2line?branch=master)
A cross-platform library for retrieving per-address debug information
from files with DWARF debug information.
`addr2line` uses [`gimli`](https://github.com/gimli-rs/gimli) to parse
the debug information, and exposes an interface for finding
the source file, line number, and wrapping function for instruction
addresses within the target program. These lookups can either be
performed programmatically through `Context::find_location` and
`Context::find_frames`, or via the included example binary,
`addr2line` (named and modelled after the equivalent utility from
[GNU binutils](https://sourceware.org/binutils/docs/binutils/addr2line.html)).
# Quickstart
- Add the [`addr2line` crate](https://crates.io/crates/addr2line) to your `Cargo.toml`
- Load the file and parse it with [`addr2line::object::read::File::parse`](https://docs.rs/object/*/object/read/struct.File.html#method.parse)
- Pass the parsed file to [`addr2line::Context::new` ](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.new)
- Use [`addr2line::Context::find_location`](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.find_location)
or [`addr2line::Context::find_frames`](https://docs.rs/addr2line/*/addr2line/struct.Context.html#method.find_frames)
to look up debug information for an address
# Performance
`addr2line` optimizes for speed over memory by caching parsed information.
The DWARF information is parsed lazily where possible.
The library aims to perform similarly to equivalent existing tools such
as `addr2line` from binutils, `eu-addr2line` from elfutils, and
`llvm-symbolize` from the llvm project, and in the past some benchmarking
was done that indicates a comparable performance.
## License
Licensed under either of
* Apache License, Version 2.0 ([`LICENSE-APACHE`](./LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([`LICENSE-MIT`](./LICENSE-MIT) or https://opensource.org/licenses/MIT)
at your option.
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
dual licensed as above, without any additional terms or conditions.

View file

@ -0,0 +1,23 @@
v <- read.table(file("stdin"))
t <- data.frame(prog=v[,1], funcs=(v[,2]=="func"), time=v[,3], mem=v[,4], stringsAsFactors=FALSE)
t$prog <- as.character(t$prog)
t$prog[t$prog == "master"] <- "gimli-rs/addr2line"
t$funcs[t$funcs == TRUE] <- "With functions"
t$funcs[t$funcs == FALSE] <- "File/line only"
t$mem = t$mem / 1024.0
library(ggplot2)
p <- ggplot(data=t, aes(x=prog, y=time, fill=prog))
p <- p + geom_bar(stat = "identity")
p <- p + facet_wrap(~ funcs)
p <- p + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank())
p <- p + ylab("time (s)") + ggtitle("addr2line runtime")
ggsave('time.png',plot=p,width=10,height=6)
p <- ggplot(data=t, aes(x=prog, y=mem, fill=prog))
p <- p + geom_bar(stat = "identity")
p <- p + facet_wrap(~ funcs)
p <- p + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank())
p <- p + ylab("memory (kB)") + ggtitle("addr2line memory usage")
ggsave('memory.png',plot=p,width=10,height=6)

112
third-party/vendor/addr2line/benchmark.sh vendored Executable file
View file

@ -0,0 +1,112 @@
#!/bin/bash
if [[ $# -le 1 ]]; then
echo "Usage: $0 <executable> [<addresses>] REFS..."
exit 1
fi
target="$1"
shift
addresses=""
if [[ -e "$1" ]]; then
addresses="$1"
shift
fi
# path to "us"
# readlink -f, but more portable:
dirname=$(perl -e 'use Cwd "abs_path";print abs_path(shift)' "$(dirname "$0")")
# https://stackoverflow.com/a/2358432/472927
{
# compile all refs
pushd "$dirname" > /dev/null
# if the user has some local changes, preserve them
nstashed=$(git stash list | wc -l)
echo "==> Stashing any local modifications"
git stash --keep-index > /dev/null
popstash() {
# https://stackoverflow.com/q/24520791/472927
if [[ "$(git stash list | wc -l)" -ne "$nstashed" ]]; then
echo "==> Restoring stashed state"
git stash pop > /dev/null
fi
}
# if the user has added stuff to the index, abort
if ! git diff-index --quiet HEAD --; then
echo "Refusing to overwrite outstanding git changes"
popstash
exit 2
fi
current=$(git symbolic-ref --short HEAD)
for ref in "$@"; do
echo "==> Compiling $ref"
git checkout -q "$ref"
commit=$(git rev-parse HEAD)
fn="target/release/addr2line-$commit"
if [[ ! -e "$fn" ]]; then
cargo build --release --example addr2line
cp target/release/examples/addr2line "$fn"
fi
if [[ "$ref" != "$commit" ]]; then
ln -sfn "addr2line-$commit" target/release/addr2line-"$ref"
fi
done
git checkout -q "$current"
popstash
popd > /dev/null
# get us some addresses to look up
if [[ -z "$addresses" ]]; then
echo "==> Looking for benchmarking addresses (this may take a while)"
addresses=$(mktemp tmp.XXXXXXXXXX)
objdump -C -x --disassemble -l "$target" \
| grep -P '0[048]:' \
| awk '{print $1}' \
| sed 's/:$//' \
> "$addresses"
echo " -> Addresses stored in $addresses; you should re-use it next time"
fi
run() {
func="$1"
name="$2"
cmd="$3"
args="$4"
printf "%s\t%s\t" "$name" "$func"
if [[ "$cmd" =~ llvm-symbolizer ]]; then
/usr/bin/time -f '%e\t%M' "$cmd" $args -obj="$target" < "$addresses" 2>&1 >/dev/null
else
/usr/bin/time -f '%e\t%M' "$cmd" $args -e "$target" < "$addresses" 2>&1 >/dev/null
fi
}
# run without functions
log1=$(mktemp tmp.XXXXXXXXXX)
echo "==> Benchmarking"
run nofunc binutils addr2line >> "$log1"
#run nofunc elfutils eu-addr2line >> "$log1"
run nofunc llvm-sym llvm-symbolizer -functions=none >> "$log1"
for ref in "$@"; do
run nofunc "$ref" "$dirname/target/release/addr2line-$ref" >> "$log1"
done
cat "$log1" | column -t
# run with functions
log2=$(mktemp tmp.XXXXXXXXXX)
echo "==> Benchmarking with -f"
run func binutils addr2line "-f -i" >> "$log2"
#run func elfutils eu-addr2line "-f -i" >> "$log2"
run func llvm-sym llvm-symbolizer "-functions=linkage -demangle=0" >> "$log2"
for ref in "$@"; do
run func "$ref" "$dirname/target/release/addr2line-$ref" "-f -i" >> "$log2"
done
cat "$log2" | column -t
cat "$log2" >> "$log1"; rm "$log2"
echo "==> Plotting"
Rscript --no-readline --no-restore --no-save "$dirname/bench.plot.r" < "$log1"
echo "==> Cleaning up"
rm "$log1"
exit 0
}

View file

@ -0,0 +1,5 @@
#!/bin/sh
# Run tarpaulin and pycobertura to generate coverage.html.
cargo tarpaulin --skip-clean --out Xml
pycobertura show --format html --output coverage.html cobertura.xml

View file

@ -0,0 +1,317 @@
use std::borrow::Cow;
use std::fs::File;
use std::io::{BufRead, Lines, StdinLock, Write};
use std::path::{Path, PathBuf};
use clap::{Arg, ArgAction, Command};
use fallible_iterator::FallibleIterator;
use object::{Object, ObjectSection, SymbolMap, SymbolMapName};
use typed_arena::Arena;
use addr2line::{Context, Location};
fn parse_uint_from_hex_string(string: &str) -> Option<u64> {
if string.len() > 2 && string.starts_with("0x") {
u64::from_str_radix(&string[2..], 16).ok()
} else {
u64::from_str_radix(string, 16).ok()
}
}
enum Addrs<'a> {
Args(clap::parser::ValuesRef<'a, String>),
Stdin(Lines<StdinLock<'a>>),
}
impl<'a> Iterator for Addrs<'a> {
type Item = Option<u64>;
fn next(&mut self) -> Option<Option<u64>> {
let text = match *self {
Addrs::Args(ref mut vals) => vals.next().map(Cow::from),
Addrs::Stdin(ref mut lines) => lines.next().map(Result::unwrap).map(Cow::from),
};
text.as_ref()
.map(Cow::as_ref)
.map(parse_uint_from_hex_string)
}
}
fn print_loc(loc: Option<&Location<'_>>, basenames: bool, llvm: bool) {
if let Some(loc) = loc {
if let Some(ref file) = loc.file.as_ref() {
let path = if basenames {
Path::new(Path::new(file).file_name().unwrap())
} else {
Path::new(file)
};
print!("{}:", path.display());
} else {
print!("??:");
}
if llvm {
print!("{}:{}", loc.line.unwrap_or(0), loc.column.unwrap_or(0));
} else if let Some(line) = loc.line {
print!("{}", line);
} else {
print!("?");
}
println!();
} else if llvm {
println!("??:0:0");
} else {
println!("??:0");
}
}
fn print_function(name: Option<&str>, language: Option<gimli::DwLang>, demangle: bool) {
if let Some(name) = name {
if demangle {
print!("{}", addr2line::demangle_auto(Cow::from(name), language));
} else {
print!("{}", name);
}
} else {
print!("??");
}
}
fn load_file_section<'input, 'arena, Endian: gimli::Endianity>(
id: gimli::SectionId,
file: &object::File<'input>,
endian: Endian,
arena_data: &'arena Arena<Cow<'input, [u8]>>,
) -> Result<gimli::EndianSlice<'arena, Endian>, ()> {
// TODO: Unify with dwarfdump.rs in gimli.
let name = id.name();
match file.section_by_name(name) {
Some(section) => match section.uncompressed_data().unwrap() {
Cow::Borrowed(b) => Ok(gimli::EndianSlice::new(b, endian)),
Cow::Owned(b) => Ok(gimli::EndianSlice::new(arena_data.alloc(b.into()), endian)),
},
None => Ok(gimli::EndianSlice::new(&[][..], endian)),
}
}
fn find_name_from_symbols<'a>(
symbols: &'a SymbolMap<SymbolMapName<'_>>,
probe: u64,
) -> Option<&'a str> {
symbols.get(probe).map(|x| x.name())
}
struct Options<'a> {
do_functions: bool,
do_inlines: bool,
pretty: bool,
print_addrs: bool,
basenames: bool,
demangle: bool,
llvm: bool,
exe: &'a PathBuf,
sup: Option<&'a PathBuf>,
}
fn main() {
let matches = Command::new("addr2line")
.version(env!("CARGO_PKG_VERSION"))
.about("A fast addr2line Rust port")
.max_term_width(100)
.args(&[
Arg::new("exe")
.short('e')
.long("exe")
.value_name("filename")
.value_parser(clap::value_parser!(PathBuf))
.help(
"Specify the name of the executable for which addresses should be translated.",
)
.required(true),
Arg::new("sup")
.long("sup")
.value_name("filename")
.value_parser(clap::value_parser!(PathBuf))
.help("Path to supplementary object file."),
Arg::new("functions")
.short('f')
.long("functions")
.action(ArgAction::SetTrue)
.help("Display function names as well as file and line number information."),
Arg::new("pretty").short('p').long("pretty-print")
.action(ArgAction::SetTrue)
.help(
"Make the output more human friendly: each location are printed on one line.",
),
Arg::new("inlines").short('i').long("inlines")
.action(ArgAction::SetTrue)
.help(
"If the address belongs to a function that was inlined, the source information for \
all enclosing scopes back to the first non-inlined function will also be printed.",
),
Arg::new("addresses").short('a').long("addresses")
.action(ArgAction::SetTrue)
.help(
"Display the address before the function name, file and line number information.",
),
Arg::new("basenames")
.short('s')
.long("basenames")
.action(ArgAction::SetTrue)
.help("Display only the base of each file name."),
Arg::new("demangle").short('C').long("demangle")
.action(ArgAction::SetTrue)
.help(
"Demangle function names. \
Specifying a specific demangling style (like GNU addr2line) is not supported. \
(TODO)"
),
Arg::new("llvm")
.long("llvm")
.action(ArgAction::SetTrue)
.help("Display output in the same format as llvm-symbolizer."),
Arg::new("addrs")
.action(ArgAction::Append)
.help("Addresses to use instead of reading from stdin."),
])
.get_matches();
let arena_data = Arena::new();
let opts = Options {
do_functions: matches.get_flag("functions"),
do_inlines: matches.get_flag("inlines"),
pretty: matches.get_flag("pretty"),
print_addrs: matches.get_flag("addresses"),
basenames: matches.get_flag("basenames"),
demangle: matches.get_flag("demangle"),
llvm: matches.get_flag("llvm"),
exe: matches.get_one::<PathBuf>("exe").unwrap(),
sup: matches.get_one::<PathBuf>("sup"),
};
let file = File::open(opts.exe).unwrap();
let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let object = &object::File::parse(&*map).unwrap();
let endian = if object.is_little_endian() {
gimli::RunTimeEndian::Little
} else {
gimli::RunTimeEndian::Big
};
let mut load_section = |id: gimli::SectionId| -> Result<_, _> {
load_file_section(id, object, endian, &arena_data)
};
let sup_map;
let sup_object = if let Some(sup_path) = opts.sup {
let sup_file = File::open(sup_path).unwrap();
sup_map = unsafe { memmap2::Mmap::map(&sup_file).unwrap() };
Some(object::File::parse(&*sup_map).unwrap())
} else {
None
};
let symbols = object.symbol_map();
let mut dwarf = gimli::Dwarf::load(&mut load_section).unwrap();
if let Some(ref sup_object) = sup_object {
let mut load_sup_section = |id: gimli::SectionId| -> Result<_, _> {
load_file_section(id, sup_object, endian, &arena_data)
};
dwarf.load_sup(&mut load_sup_section).unwrap();
}
let mut split_dwarf_loader = addr2line::builtin_split_dwarf_loader::SplitDwarfLoader::new(
|data, endian| {
gimli::EndianSlice::new(arena_data.alloc(Cow::Owned(data.into_owned())), endian)
},
Some(opts.exe.clone()),
);
let ctx = Context::from_dwarf(dwarf).unwrap();
let stdin = std::io::stdin();
let addrs = matches
.get_many::<String>("addrs")
.map(Addrs::Args)
.unwrap_or_else(|| Addrs::Stdin(stdin.lock().lines()));
for probe in addrs {
if opts.print_addrs {
let addr = probe.unwrap_or(0);
if opts.llvm {
print!("0x{:x}", addr);
} else {
print!("0x{:016x}", addr);
}
if opts.pretty {
print!(": ");
} else {
println!();
}
}
if opts.do_functions || opts.do_inlines {
let mut printed_anything = false;
if let Some(probe) = probe {
let frames = ctx.find_frames(probe);
let frames = split_dwarf_loader.run(frames).unwrap();
let mut frames = frames.enumerate();
while let Some((i, frame)) = frames.next().unwrap() {
if opts.pretty && i != 0 {
print!(" (inlined by) ");
}
if opts.do_functions {
if let Some(func) = frame.function {
print_function(
func.raw_name().ok().as_ref().map(AsRef::as_ref),
func.language,
opts.demangle,
);
} else {
let name = find_name_from_symbols(&symbols, probe);
print_function(name, None, opts.demangle);
}
if opts.pretty {
print!(" at ");
} else {
println!();
}
}
print_loc(frame.location.as_ref(), opts.basenames, opts.llvm);
printed_anything = true;
if !opts.do_inlines {
break;
}
}
}
if !printed_anything {
if opts.do_functions {
let name = probe.and_then(|probe| find_name_from_symbols(&symbols, probe));
print_function(name, None, opts.demangle);
if opts.pretty {
print!(" at ");
} else {
println!();
}
}
print_loc(None, opts.basenames, opts.llvm);
}
} else {
let loc = probe.and_then(|probe| ctx.find_location(probe).unwrap());
print_loc(loc.as_ref(), opts.basenames, opts.llvm);
}
if opts.llvm {
println!();
}
std::io::stdout().flush().unwrap();
}
}

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,164 @@
use alloc::borrow::Cow;
use alloc::sync::Arc;
use std::fs::File;
use std::path::PathBuf;
use object::Object;
use crate::{LookupContinuation, LookupResult};
#[cfg(unix)]
fn convert_path<R: gimli::Reader<Endian = gimli::RunTimeEndian>>(
r: &R,
) -> Result<PathBuf, gimli::Error> {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
let bytes = r.to_slice()?;
let s = OsStr::from_bytes(&bytes);
Ok(PathBuf::from(s))
}
#[cfg(not(unix))]
fn convert_path<R: gimli::Reader<Endian = gimli::RunTimeEndian>>(
r: &R,
) -> Result<PathBuf, gimli::Error> {
let bytes = r.to_slice()?;
let s = std::str::from_utf8(&bytes).map_err(|_| gimli::Error::BadUtf8)?;
Ok(PathBuf::from(s))
}
fn load_section<'data: 'file, 'file, O, R, F>(
id: gimli::SectionId,
file: &'file O,
endian: R::Endian,
loader: &mut F,
) -> Result<R, gimli::Error>
where
O: object::Object<'data, 'file>,
R: gimli::Reader<Endian = gimli::RunTimeEndian>,
F: FnMut(Cow<'data, [u8]>, R::Endian) -> R,
{
use object::ObjectSection;
let data = id
.dwo_name()
.and_then(|dwo_name| {
file.section_by_name(dwo_name)
.and_then(|section| section.uncompressed_data().ok())
})
.unwrap_or(Cow::Borrowed(&[]));
Ok(loader(data, endian))
}
/// A simple builtin split DWARF loader.
pub struct SplitDwarfLoader<R, F>
where
R: gimli::Reader<Endian = gimli::RunTimeEndian>,
F: FnMut(Cow<'_, [u8]>, R::Endian) -> R,
{
loader: F,
dwarf_package: Option<gimli::DwarfPackage<R>>,
}
impl<R, F> SplitDwarfLoader<R, F>
where
R: gimli::Reader<Endian = gimli::RunTimeEndian>,
F: FnMut(Cow<'_, [u8]>, R::Endian) -> R,
{
fn load_dwarf_package(loader: &mut F, path: Option<PathBuf>) -> Option<gimli::DwarfPackage<R>> {
let mut path = path.map(Ok).unwrap_or_else(std::env::current_exe).ok()?;
let dwp_extension = path
.extension()
.map(|previous_extension| {
let mut previous_extension = previous_extension.to_os_string();
previous_extension.push(".dwp");
previous_extension
})
.unwrap_or_else(|| "dwp".into());
path.set_extension(dwp_extension);
let file = File::open(&path).ok()?;
let map = unsafe { memmap2::Mmap::map(&file).ok()? };
let dwp = object::File::parse(&*map).ok()?;
let endian = if dwp.is_little_endian() {
gimli::RunTimeEndian::Little
} else {
gimli::RunTimeEndian::Big
};
let empty = loader(Cow::Borrowed(&[]), endian);
gimli::DwarfPackage::load(
|section_id| load_section(section_id, &dwp, endian, loader),
empty,
)
.ok()
}
/// Create a new split DWARF loader.
pub fn new(mut loader: F, path: Option<PathBuf>) -> SplitDwarfLoader<R, F> {
let dwarf_package = SplitDwarfLoader::load_dwarf_package(&mut loader, path);
SplitDwarfLoader {
loader,
dwarf_package,
}
}
/// Run the provided `LookupResult` to completion, loading any necessary
/// split DWARF along the way.
pub fn run<L>(&mut self, mut l: LookupResult<L>) -> L::Output
where
L: LookupContinuation<Buf = R>,
{
loop {
let (load, continuation) = match l {
LookupResult::Output(output) => break output,
LookupResult::Load { load, continuation } => (load, continuation),
};
let mut r: Option<Arc<gimli::Dwarf<_>>> = None;
if let Some(dwp) = self.dwarf_package.as_ref() {
if let Ok(Some(cu)) = dwp.find_cu(load.dwo_id, &load.parent) {
r = Some(Arc::new(cu));
}
}
if r.is_none() {
let mut path = PathBuf::new();
if let Some(p) = load.comp_dir.as_ref() {
if let Ok(p) = convert_path(p) {
path.push(p);
}
}
if let Some(p) = load.path.as_ref() {
if let Ok(p) = convert_path(p) {
path.push(p);
}
}
if let Ok(file) = File::open(&path) {
if let Ok(map) = unsafe { memmap2::Mmap::map(&file) } {
if let Ok(file) = object::File::parse(&*map) {
let endian = if file.is_little_endian() {
gimli::RunTimeEndian::Little
} else {
gimli::RunTimeEndian::Big
};
r = gimli::Dwarf::load(|id| {
load_section(id, &file, endian, &mut self.loader)
})
.ok()
.map(|mut dwo_dwarf| {
dwo_dwarf.make_dwo(&load.parent);
Arc::new(dwo_dwarf)
});
}
}
}
}
l = continuation.resume(r);
}
}
}

View file

@ -0,0 +1,555 @@
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::cmp::Ordering;
use core::iter;
use crate::lazy::LazyCell;
use crate::maybe_small;
use crate::{Context, DebugFile, Error, RangeAttributes};
pub(crate) struct Functions<R: gimli::Reader> {
/// List of all `DW_TAG_subprogram` details in the unit.
pub(crate) functions: Box<
[(
gimli::UnitOffset<R::Offset>,
LazyCell<Result<Function<R>, Error>>,
)],
>,
/// List of `DW_TAG_subprogram` address ranges in the unit.
pub(crate) addresses: Box<[FunctionAddress]>,
}
/// A single address range for a function.
///
/// It is possible for a function to have multiple address ranges; this
/// is handled by having multiple `FunctionAddress` entries with the same
/// `function` field.
pub(crate) struct FunctionAddress {
range: gimli::Range,
/// An index into `Functions::functions`.
pub(crate) function: usize,
}
pub(crate) struct Function<R: gimli::Reader> {
pub(crate) dw_die_offset: gimli::UnitOffset<R::Offset>,
pub(crate) name: Option<R>,
/// List of all `DW_TAG_inlined_subroutine` details in this function.
inlined_functions: Box<[InlinedFunction<R>]>,
/// List of `DW_TAG_inlined_subroutine` address ranges in this function.
inlined_addresses: Box<[InlinedFunctionAddress]>,
}
pub(crate) struct InlinedFunctionAddress {
range: gimli::Range,
call_depth: usize,
/// An index into `Function::inlined_functions`.
function: usize,
}
pub(crate) struct InlinedFunction<R: gimli::Reader> {
pub(crate) dw_die_offset: gimli::UnitOffset<R::Offset>,
pub(crate) name: Option<R>,
pub(crate) call_file: Option<u64>,
pub(crate) call_line: u32,
pub(crate) call_column: u32,
}
impl<R: gimli::Reader> Functions<R> {
pub(crate) fn parse(
unit: &gimli::Unit<R>,
sections: &gimli::Dwarf<R>,
) -> Result<Functions<R>, Error> {
let mut functions = Vec::new();
let mut addresses = Vec::new();
let mut entries = unit.entries_raw(None)?;
while !entries.is_empty() {
let dw_die_offset = entries.next_offset();
if let Some(abbrev) = entries.read_abbreviation()? {
if abbrev.tag() == gimli::DW_TAG_subprogram {
let mut ranges = RangeAttributes::default();
for spec in abbrev.attributes() {
match entries.read_attribute(*spec) {
Ok(ref attr) => {
match attr.name() {
gimli::DW_AT_low_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => {
ranges.low_pc = Some(val)
}
gimli::AttributeValue::DebugAddrIndex(index) => {
ranges.low_pc = Some(sections.address(unit, index)?);
}
_ => {}
},
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => {
ranges.high_pc = Some(val)
}
gimli::AttributeValue::DebugAddrIndex(index) => {
ranges.high_pc = Some(sections.address(unit, index)?);
}
gimli::AttributeValue::Udata(val) => {
ranges.size = Some(val)
}
_ => {}
},
gimli::DW_AT_ranges => {
ranges.ranges_offset =
sections.attr_ranges_offset(unit, attr.value())?;
}
_ => {}
};
}
Err(e) => return Err(e),
}
}
let function_index = functions.len();
if ranges.for_each_range(sections, unit, |range| {
addresses.push(FunctionAddress {
range,
function: function_index,
});
})? {
functions.push((dw_die_offset, LazyCell::new()));
}
} else {
entries.skip_attributes(abbrev.attributes())?;
}
}
}
// The binary search requires the addresses to be sorted.
//
// It also requires them to be non-overlapping. In practice, overlapping
// function ranges are unlikely, so we don't try to handle that yet.
//
// It's possible for multiple functions to have the same address range if the
// compiler can detect and remove functions with identical code. In that case
// we'll nondeterministically return one of them.
addresses.sort_by_key(|x| x.range.begin);
Ok(Functions {
functions: functions.into_boxed_slice(),
addresses: addresses.into_boxed_slice(),
})
}
pub(crate) fn find_address(&self, probe: u64) -> Option<usize> {
self.addresses
.binary_search_by(|address| {
if probe < address.range.begin {
Ordering::Greater
} else if probe >= address.range.end {
Ordering::Less
} else {
Ordering::Equal
}
})
.ok()
}
pub(crate) fn parse_inlined_functions(
&self,
file: DebugFile,
unit: &gimli::Unit<R>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
) -> Result<(), Error> {
for function in &*self.functions {
function
.1
.borrow_with(|| Function::parse(function.0, file, unit, ctx, sections))
.as_ref()
.map_err(Error::clone)?;
}
Ok(())
}
}
impl<R: gimli::Reader> Function<R> {
pub(crate) fn parse(
dw_die_offset: gimli::UnitOffset<R::Offset>,
file: DebugFile,
unit: &gimli::Unit<R>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
) -> Result<Self, Error> {
let mut entries = unit.entries_raw(Some(dw_die_offset))?;
let depth = entries.next_depth();
let abbrev = entries.read_abbreviation()?.unwrap();
debug_assert_eq!(abbrev.tag(), gimli::DW_TAG_subprogram);
let mut name = None;
for spec in abbrev.attributes() {
match entries.read_attribute(*spec) {
Ok(ref attr) => {
match attr.name() {
gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
if let Ok(val) = sections.attr_string(unit, attr.value()) {
name = Some(val);
}
}
gimli::DW_AT_name => {
if name.is_none() {
name = sections.attr_string(unit, attr.value()).ok();
}
}
gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
if name.is_none() {
name = name_attr(attr.value(), file, unit, ctx, sections, 16)?;
}
}
_ => {}
};
}
Err(e) => return Err(e),
}
}
let mut inlined_functions = Vec::new();
let mut inlined_addresses = Vec::new();
Function::parse_children(
&mut entries,
depth,
file,
unit,
ctx,
sections,
&mut inlined_functions,
&mut inlined_addresses,
0,
)?;
// Sort ranges in "breadth-first traversal order", i.e. first by call_depth
// and then by range.begin. This allows finding the range containing an
// address at a certain depth using binary search.
// Note: Using DFS order, i.e. ordering by range.begin first and then by
// call_depth, would not work! Consider the two examples
// "[0..10 at depth 0], [0..2 at depth 1], [6..8 at depth 1]" and
// "[0..5 at depth 0], [0..2 at depth 1], [5..10 at depth 0], [6..8 at depth 1]".
// In this example, if you want to look up address 7 at depth 0, and you
// encounter [0..2 at depth 1], are you before or after the target range?
// You don't know.
inlined_addresses.sort_by(|r1, r2| {
if r1.call_depth < r2.call_depth {
Ordering::Less
} else if r1.call_depth > r2.call_depth {
Ordering::Greater
} else if r1.range.begin < r2.range.begin {
Ordering::Less
} else if r1.range.begin > r2.range.begin {
Ordering::Greater
} else {
Ordering::Equal
}
});
Ok(Function {
dw_die_offset,
name,
inlined_functions: inlined_functions.into_boxed_slice(),
inlined_addresses: inlined_addresses.into_boxed_slice(),
})
}
fn parse_children(
entries: &mut gimli::EntriesRaw<'_, '_, R>,
depth: isize,
file: DebugFile,
unit: &gimli::Unit<R>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
inlined_functions: &mut Vec<InlinedFunction<R>>,
inlined_addresses: &mut Vec<InlinedFunctionAddress>,
inlined_depth: usize,
) -> Result<(), Error> {
loop {
let dw_die_offset = entries.next_offset();
let next_depth = entries.next_depth();
if next_depth <= depth {
return Ok(());
}
if let Some(abbrev) = entries.read_abbreviation()? {
match abbrev.tag() {
gimli::DW_TAG_subprogram => {
Function::skip(entries, abbrev, next_depth)?;
}
gimli::DW_TAG_inlined_subroutine => {
InlinedFunction::parse(
dw_die_offset,
entries,
abbrev,
next_depth,
file,
unit,
ctx,
sections,
inlined_functions,
inlined_addresses,
inlined_depth,
)?;
}
_ => {
entries.skip_attributes(abbrev.attributes())?;
}
}
}
}
}
fn skip(
entries: &mut gimli::EntriesRaw<'_, '_, R>,
abbrev: &gimli::Abbreviation,
depth: isize,
) -> Result<(), Error> {
// TODO: use DW_AT_sibling
entries.skip_attributes(abbrev.attributes())?;
while entries.next_depth() > depth {
if let Some(abbrev) = entries.read_abbreviation()? {
entries.skip_attributes(abbrev.attributes())?;
}
}
Ok(())
}
/// Build the list of inlined functions that contain `probe`.
pub(crate) fn find_inlined_functions(
&self,
probe: u64,
) -> iter::Rev<maybe_small::IntoIter<&InlinedFunction<R>>> {
// `inlined_functions` is ordered from outside to inside.
let mut inlined_functions = maybe_small::Vec::new();
let mut inlined_addresses = &self.inlined_addresses[..];
loop {
let current_depth = inlined_functions.len();
// Look up (probe, current_depth) in inline_ranges.
// `inlined_addresses` is sorted in "breadth-first traversal order", i.e.
// by `call_depth` first, and then by `range.begin`. See the comment at
// the sort call for more information about why.
let search = inlined_addresses.binary_search_by(|range| {
if range.call_depth > current_depth {
Ordering::Greater
} else if range.call_depth < current_depth {
Ordering::Less
} else if range.range.begin > probe {
Ordering::Greater
} else if range.range.end <= probe {
Ordering::Less
} else {
Ordering::Equal
}
});
if let Ok(index) = search {
let function_index = inlined_addresses[index].function;
inlined_functions.push(&self.inlined_functions[function_index]);
inlined_addresses = &inlined_addresses[index + 1..];
} else {
break;
}
}
inlined_functions.into_iter().rev()
}
}
impl<R: gimli::Reader> InlinedFunction<R> {
fn parse(
dw_die_offset: gimli::UnitOffset<R::Offset>,
entries: &mut gimli::EntriesRaw<'_, '_, R>,
abbrev: &gimli::Abbreviation,
depth: isize,
file: DebugFile,
unit: &gimli::Unit<R>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
inlined_functions: &mut Vec<InlinedFunction<R>>,
inlined_addresses: &mut Vec<InlinedFunctionAddress>,
inlined_depth: usize,
) -> Result<(), Error> {
let mut ranges = RangeAttributes::default();
let mut name = None;
let mut call_file = None;
let mut call_line = 0;
let mut call_column = 0;
for spec in abbrev.attributes() {
match entries.read_attribute(*spec) {
Ok(ref attr) => match attr.name() {
gimli::DW_AT_low_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => ranges.low_pc = Some(val),
gimli::AttributeValue::DebugAddrIndex(index) => {
ranges.low_pc = Some(sections.address(unit, index)?);
}
_ => {}
},
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => ranges.high_pc = Some(val),
gimli::AttributeValue::DebugAddrIndex(index) => {
ranges.high_pc = Some(sections.address(unit, index)?);
}
gimli::AttributeValue::Udata(val) => ranges.size = Some(val),
_ => {}
},
gimli::DW_AT_ranges => {
ranges.ranges_offset = sections.attr_ranges_offset(unit, attr.value())?;
}
gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
if let Ok(val) = sections.attr_string(unit, attr.value()) {
name = Some(val);
}
}
gimli::DW_AT_name => {
if name.is_none() {
name = sections.attr_string(unit, attr.value()).ok();
}
}
gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
if name.is_none() {
name = name_attr(attr.value(), file, unit, ctx, sections, 16)?;
}
}
gimli::DW_AT_call_file => {
// There is a spec issue [1] with how DW_AT_call_file is specified in DWARF 5.
// Before, a file index of 0 would indicate no source file, however in
// DWARF 5 this could be a valid index into the file table.
//
// Implementations such as LLVM generates a file index of 0 when DWARF 5 is
// used.
//
// Thus, if we see a version of 5 or later, treat a file index of 0 as such.
// [1]: http://wiki.dwarfstd.org/index.php?title=DWARF5_Line_Table_File_Numbers
if let gimli::AttributeValue::FileIndex(fi) = attr.value() {
if fi > 0 || unit.header.version() >= 5 {
call_file = Some(fi);
}
}
}
gimli::DW_AT_call_line => {
call_line = attr.udata_value().unwrap_or(0) as u32;
}
gimli::DW_AT_call_column => {
call_column = attr.udata_value().unwrap_or(0) as u32;
}
_ => {}
},
Err(e) => return Err(e),
}
}
let function_index = inlined_functions.len();
inlined_functions.push(InlinedFunction {
dw_die_offset,
name,
call_file,
call_line,
call_column,
});
ranges.for_each_range(sections, unit, |range| {
inlined_addresses.push(InlinedFunctionAddress {
range,
call_depth: inlined_depth,
function: function_index,
});
})?;
Function::parse_children(
entries,
depth,
file,
unit,
ctx,
sections,
inlined_functions,
inlined_addresses,
inlined_depth + 1,
)
}
}
fn name_attr<R>(
attr: gimli::AttributeValue<R>,
mut file: DebugFile,
unit: &gimli::Unit<R>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
recursion_limit: usize,
) -> Result<Option<R>, Error>
where
R: gimli::Reader,
{
if recursion_limit == 0 {
return Ok(None);
}
match attr {
gimli::AttributeValue::UnitRef(offset) => {
name_entry(file, unit, offset, ctx, sections, recursion_limit)
}
gimli::AttributeValue::DebugInfoRef(dr) => {
let (unit, offset) = ctx.find_unit(dr, file)?;
name_entry(file, unit, offset, ctx, sections, recursion_limit)
}
gimli::AttributeValue::DebugInfoRefSup(dr) => {
if let Some(sup_sections) = sections.sup.as_ref() {
file = DebugFile::Supplementary;
let (unit, offset) = ctx.find_unit(dr, file)?;
name_entry(file, unit, offset, ctx, sup_sections, recursion_limit)
} else {
Ok(None)
}
}
_ => Ok(None),
}
}
fn name_entry<R>(
file: DebugFile,
unit: &gimli::Unit<R>,
offset: gimli::UnitOffset<R::Offset>,
ctx: &Context<R>,
sections: &gimli::Dwarf<R>,
recursion_limit: usize,
) -> Result<Option<R>, Error>
where
R: gimli::Reader,
{
let mut entries = unit.entries_raw(Some(offset))?;
let abbrev = if let Some(abbrev) = entries.read_abbreviation()? {
abbrev
} else {
return Err(gimli::Error::NoEntryAtGivenOffset);
};
let mut name = None;
let mut next = None;
for spec in abbrev.attributes() {
match entries.read_attribute(*spec) {
Ok(ref attr) => match attr.name() {
gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
if let Ok(val) = sections.attr_string(unit, attr.value()) {
return Ok(Some(val));
}
}
gimli::DW_AT_name => {
if let Ok(val) = sections.attr_string(unit, attr.value()) {
name = Some(val);
}
}
gimli::DW_AT_abstract_origin | gimli::DW_AT_specification => {
next = Some(attr.value());
}
_ => {}
},
Err(e) => return Err(e),
}
}
if name.is_some() {
return Ok(name);
}
if let Some(next) = next {
return name_attr(next, file, unit, ctx, sections, recursion_limit - 1);
}
Ok(None)
}

View file

@ -0,0 +1,31 @@
use core::cell::UnsafeCell;
pub struct LazyCell<T> {
contents: UnsafeCell<Option<T>>,
}
impl<T> LazyCell<T> {
pub fn new() -> LazyCell<T> {
LazyCell {
contents: UnsafeCell::new(None),
}
}
pub fn borrow(&self) -> Option<&T> {
unsafe { &*self.contents.get() }.as_ref()
}
pub fn borrow_with(&self, closure: impl FnOnce() -> T) -> &T {
// First check if we're already initialized...
let ptr = self.contents.get();
if let Some(val) = unsafe { &*ptr } {
return val;
}
// Note that while we're executing `closure` our `borrow_with` may
// be called recursively. This means we need to check again after
// the closure has executed. For that we use the `get_or_insert`
// method which will only perform mutation if we aren't already
// `Some`.
let val = closure();
unsafe { (*ptr).get_or_insert(val) }
}
}

1729
third-party/vendor/addr2line/src/lib.rs vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,126 @@
use addr2line::Context;
use fallible_iterator::FallibleIterator;
use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
use object::Object;
use std::borrow::Cow;
use std::fs::File;
use std::sync::Arc;
fn find_debuginfo() -> memmap2::Mmap {
let path = std::env::current_exe().unwrap();
let file = File::open(&path).unwrap();
let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = &object::File::parse(&*map).unwrap();
if let Ok(uuid) = file.mach_uuid() {
for candidate in path.parent().unwrap().read_dir().unwrap() {
let path = candidate.unwrap().path();
if !path.to_str().unwrap().ends_with(".dSYM") {
continue;
}
for candidate in path.join("Contents/Resources/DWARF").read_dir().unwrap() {
let path = candidate.unwrap().path();
let file = File::open(&path).unwrap();
let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = &object::File::parse(&*map).unwrap();
if file.mach_uuid().unwrap() == uuid {
return map;
}
}
}
}
return map;
}
#[test]
fn correctness() {
let map = find_debuginfo();
let file = &object::File::parse(&*map).unwrap();
let module_base = file.relative_address_base();
let endian = if file.is_little_endian() {
gimli::RunTimeEndian::Little
} else {
gimli::RunTimeEndian::Big
};
fn load_section<'data: 'file, 'file, O, Endian>(
id: gimli::SectionId,
file: &'file O,
endian: Endian,
) -> Result<gimli::EndianArcSlice<Endian>, gimli::Error>
where
O: object::Object<'data, 'file>,
Endian: gimli::Endianity,
{
use object::ObjectSection;
let data = file
.section_by_name(id.name())
.and_then(|section| section.uncompressed_data().ok())
.unwrap_or(Cow::Borrowed(&[]));
Ok(gimli::EndianArcSlice::new(Arc::from(&*data), endian))
}
let dwarf = gimli::Dwarf::load(|id| load_section(id, file, endian)).unwrap();
let ctx = Context::from_dwarf(dwarf).unwrap();
let mut split_dwarf_loader = addr2line::builtin_split_dwarf_loader::SplitDwarfLoader::new(
|data, endian| gimli::EndianArcSlice::new(Arc::from(&*data), endian),
None,
);
let mut bias = None;
TargetSharedLibrary::each(|lib| {
bias = Some((lib.virtual_memory_bias().0 as u64).wrapping_sub(module_base));
IterationControl::Break
});
#[allow(unused_mut)]
let mut test = |sym: u64, expected_prefix: &str| {
let ip = sym.wrapping_sub(bias.unwrap());
let frames = ctx.find_frames(ip);
let frames = split_dwarf_loader.run(frames).unwrap();
let frame = frames.last().unwrap().unwrap();
let name = frame.function.as_ref().unwrap().demangle().unwrap();
// Old rust versions generate DWARF with wrong linkage name,
// so only check the start.
if !name.starts_with(expected_prefix) {
panic!("incorrect name '{}', expected {:?}", name, expected_prefix);
}
};
test(test_function as u64, "correctness::test_function");
test(
small::test_function as u64,
"correctness::small::test_function",
);
test(auxiliary::foo as u64, "auxiliary::foo");
}
mod small {
pub fn test_function() {
println!("y");
}
}
fn test_function() {
println!("x");
}
#[test]
fn zero_function() {
let map = find_debuginfo();
let file = &object::File::parse(&*map).unwrap();
let ctx = Context::new(file).unwrap();
for probe in 0..10 {
assert!(
ctx.find_frames(probe)
.skip_all_loads()
.unwrap()
.count()
.unwrap()
< 10
);
}
}

View file

@ -0,0 +1,135 @@
use std::env;
use std::ffi::OsStr;
use std::path::Path;
use std::process::Command;
use backtrace::Backtrace;
use findshlibs::{IterationControl, SharedLibrary, TargetSharedLibrary};
use libtest_mimic::{Arguments, Failed, Trial};
#[inline(never)]
fn make_trace() -> Vec<String> {
fn foo() -> Backtrace {
bar()
}
#[inline(never)]
fn bar() -> Backtrace {
baz()
}
#[inline(always)]
fn baz() -> Backtrace {
Backtrace::new_unresolved()
}
let mut base_addr = None;
TargetSharedLibrary::each(|lib| {
base_addr = Some(lib.virtual_memory_bias().0 as isize);
IterationControl::Break
});
let addrfix = -base_addr.unwrap();
let trace = foo();
trace
.frames()
.iter()
.take(5)
.map(|x| format!("{:p}", (x.ip() as *const u8).wrapping_offset(addrfix)))
.collect()
}
fn run_cmd<P: AsRef<OsStr>>(exe: P, me: &Path, flags: Option<&str>, trace: &str) -> String {
let mut cmd = Command::new(exe);
cmd.env("LC_ALL", "C"); // GNU addr2line is localized, we aren't
cmd.env("RUST_BACKTRACE", "1"); // if a child crashes, we want to know why
if let Some(flags) = flags {
cmd.arg(flags);
}
cmd.arg("--exe").arg(me).arg(trace);
let output = cmd.output().unwrap();
assert!(output.status.success());
String::from_utf8(output.stdout).unwrap()
}
fn run_test(flags: Option<&str>) -> Result<(), Failed> {
let me = env::current_exe().unwrap();
let mut exe = me.clone();
assert!(exe.pop());
if exe.file_name().unwrap().to_str().unwrap() == "deps" {
assert!(exe.pop());
}
exe.push("examples");
exe.push("addr2line");
assert!(exe.is_file());
let trace = make_trace();
// HACK: GNU addr2line has a bug where looking up multiple addresses can cause the second
// lookup to fail. Workaround by doing one address at a time.
for addr in &trace {
let theirs = run_cmd("addr2line", &me, flags, addr);
let ours = run_cmd(&exe, &me, flags, addr);
// HACK: GNU addr2line does not tidy up paths properly, causing double slashes to be printed.
// We consider our behavior to be correct, so we fix their output to match ours.
let theirs = theirs.replace("//", "/");
assert!(
theirs == ours,
"Output not equivalent:
$ addr2line {0} --exe {1} {2}
{4}
$ {3} {0} --exe {1} {2}
{5}
",
flags.unwrap_or(""),
me.display(),
trace.join(" "),
exe.display(),
theirs,
ours
);
}
Ok(())
}
static FLAGS: &str = "aipsf";
fn make_tests() -> Vec<Trial> {
(0..(1 << FLAGS.len()))
.map(|bits| {
if bits == 0 {
None
} else {
let mut param = String::new();
param.push('-');
for (i, flag) in FLAGS.chars().enumerate() {
if (bits & (1 << i)) != 0 {
param.push(flag);
}
}
Some(param)
}
})
.map(|param| {
Trial::test(
format!("addr2line {}", param.as_ref().map_or("", String::as_str)),
move || run_test(param.as_ref().map(String::as_str)),
)
})
.collect()
}
fn main() {
if !cfg!(target_os = "linux") {
return;
}
let args = Arguments::from_args();
libtest_mimic::run(&args, make_tests()).exit();
}

View file

@ -0,0 +1,114 @@
use std::borrow::Cow;
use std::env;
use std::fs::File;
use std::path::{self, PathBuf};
use object::Object;
fn release_fixture_path() -> PathBuf {
if let Ok(p) = env::var("ADDR2LINE_FIXTURE_PATH") {
return p.into();
}
let mut path = PathBuf::new();
if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") {
path.push(dir);
}
path.push("fixtures");
path.push("addr2line-release");
path
}
fn with_file<F: FnOnce(&object::File<'_>)>(target: &path::Path, f: F) {
let file = File::open(target).unwrap();
let map = unsafe { memmap2::Mmap::map(&file).unwrap() };
let file = object::File::parse(&*map).unwrap();
f(&file)
}
fn dwarf_load<'a>(object: &object::File<'a>) -> gimli::Dwarf<Cow<'a, [u8]>> {
let load_section = |id: gimli::SectionId| -> Result<Cow<'a, [u8]>, gimli::Error> {
use object::ObjectSection;
let data = object
.section_by_name(id.name())
.and_then(|section| section.data().ok())
.unwrap_or(&[][..]);
Ok(Cow::Borrowed(data))
};
gimli::Dwarf::load(&load_section).unwrap()
}
fn dwarf_borrow<'a>(
dwarf: &'a gimli::Dwarf<Cow<'_, [u8]>>,
) -> gimli::Dwarf<gimli::EndianSlice<'a, gimli::LittleEndian>> {
let borrow_section: &dyn for<'b> Fn(
&'b Cow<'_, [u8]>,
) -> gimli::EndianSlice<'b, gimli::LittleEndian> =
&|section| gimli::EndianSlice::new(section, gimli::LittleEndian);
dwarf.borrow(&borrow_section)
}
#[test]
fn parse_base_rc() {
let target = release_fixture_path();
with_file(&target, |file| {
addr2line::ObjectContext::new(file).unwrap();
});
}
#[test]
fn parse_base_slice() {
let target = release_fixture_path();
with_file(&target, |file| {
let dwarf = dwarf_load(file);
let dwarf = dwarf_borrow(&dwarf);
addr2line::Context::from_dwarf(dwarf).unwrap();
});
}
#[test]
fn parse_lines_rc() {
let target = release_fixture_path();
with_file(&target, |file| {
let context = addr2line::ObjectContext::new(file).unwrap();
context.parse_lines().unwrap();
});
}
#[test]
fn parse_lines_slice() {
let target = release_fixture_path();
with_file(&target, |file| {
let dwarf = dwarf_load(file);
let dwarf = dwarf_borrow(&dwarf);
let context = addr2line::Context::from_dwarf(dwarf).unwrap();
context.parse_lines().unwrap();
});
}
#[test]
fn parse_functions_rc() {
let target = release_fixture_path();
with_file(&target, |file| {
let context = addr2line::ObjectContext::new(file).unwrap();
context.parse_functions().unwrap();
});
}
#[test]
fn parse_functions_slice() {
let target = release_fixture_path();
with_file(&target, |file| {
let dwarf = dwarf_load(file);
let dwarf = dwarf_borrow(&dwarf);
let context = addr2line::Context::from_dwarf(dwarf).unwrap();
context.parse_functions().unwrap();
});
}

View file

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"737088e45fdf27fe2cfedce163332d8ce08c58fd86ca287de2de34c0fbaf63e7","Cargo.toml":"f410869f0f1a5697f65a8a77be03da7aeecc0be26e7cf3a1feb1acaa4f518770","LICENSE-0BSD":"861399f8c21c042b110517e76dc6b63a2b334276c8cf17412fc3c8908ca8dc17","LICENSE-APACHE":"8ada45cd9f843acf64e4722ae262c622a2b3b3007c7310ef36ac1061a30f6adb","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"308c50cdb42b9573743068158339570b45ca3f895015ca3b87ba983edb0a21e6","RELEASE_PROCESS.md":"a86cd10fc70f167f8d00e9e4ce0c6b4ebdfa1865058390dffd1e0ad4d3e68d9d","benches/bench.rs":"c07ce370e3680c602e415f8d1ec4e543ea2163ab22a09b6b82d93e8a30adca82","src/algo.rs":"b664b131f724a809591394a10b9023f40ab5963e32a83fa3163c2668e59c8b66","src/lib.rs":"b55ba9c629b30360d08168b2ca0c96275432856a539737a105a6d6ae6bf7e88f"},"package":"f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"}

63
third-party/vendor/adler/CHANGELOG.md vendored Normal file
View file

@ -0,0 +1,63 @@
# Changelog
## Unreleased
No changes.
## [1.0.2 - 2021-02-26](https://github.com/jonas-schievink/adler/releases/tag/v1.0.2)
- Fix doctest on big-endian systems ([#9]).
[#9]: https://github.com/jonas-schievink/adler/pull/9
## [1.0.1 - 2020-11-08](https://github.com/jonas-schievink/adler/releases/tag/v1.0.1)
### Fixes
- Fix documentation on docs.rs.
## [1.0.0 - 2020-11-08](https://github.com/jonas-schievink/adler/releases/tag/v1.0.0)
### Fixes
- Fix `cargo test --no-default-features` ([#5]).
### Improvements
- Extended and clarified documentation.
- Added more rustdoc examples.
- Extended CI to test the crate with `--no-default-features`.
### Breaking Changes
- `adler32_reader` now takes its generic argument by value instead of as a `&mut`.
- Renamed `adler32_reader` to `adler32`.
## [0.2.3 - 2020-07-11](https://github.com/jonas-schievink/adler/releases/tag/v0.2.3)
- Process 4 Bytes at a time, improving performance by up to 50% ([#2]).
## [0.2.2 - 2020-06-27](https://github.com/jonas-schievink/adler/releases/tag/v0.2.2)
- Bump MSRV to 1.31.0.
## [0.2.1 - 2020-06-27](https://github.com/jonas-schievink/adler/releases/tag/v0.2.1)
- Add a few `#[inline]` annotations to small functions.
- Fix CI badge.
- Allow integration into libstd.
## [0.2.0 - 2020-06-27](https://github.com/jonas-schievink/adler/releases/tag/v0.2.0)
- Support `#![no_std]` when using `default-features = false`.
- Improve performance by around 7x.
- Support Rust 1.8.0.
- Improve API naming.
## [0.1.0 - 2020-06-26](https://github.com/jonas-schievink/adler/releases/tag/v0.1.0)
Initial release.
[#2]: https://github.com/jonas-schievink/adler/pull/2
[#5]: https://github.com/jonas-schievink/adler/pull/5

64
third-party/vendor/adler/Cargo.toml vendored Normal file
View file

@ -0,0 +1,64 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "adler"
version = "1.0.2"
authors = ["Jonas Schievink <jonasschievink@gmail.com>"]
description = "A simple clean-room implementation of the Adler-32 checksum"
documentation = "https://docs.rs/adler/"
readme = "README.md"
keywords = ["checksum", "integrity", "hash", "adler32", "zlib"]
categories = ["algorithms"]
license = "0BSD OR MIT OR Apache-2.0"
repository = "https://github.com/jonas-schievink/adler.git"
[package.metadata.docs.rs]
rustdoc-args = ["--cfg=docsrs"]
[package.metadata.release]
no-dev-version = true
pre-release-commit-message = "Release {{version}}"
tag-message = "{{version}}"
[[package.metadata.release.pre-release-replacements]]
file = "CHANGELOG.md"
replace = "## Unreleased\n\nNo changes.\n\n## [{{version}} - {{date}}](https://github.com/jonas-schievink/adler/releases/tag/v{{version}})\n"
search = "## Unreleased\n"
[[package.metadata.release.pre-release-replacements]]
file = "README.md"
replace = "adler = \"{{version}}\""
search = "adler = \"[a-z0-9\\\\.-]+\""
[[package.metadata.release.pre-release-replacements]]
file = "src/lib.rs"
replace = "https://docs.rs/adler/{{version}}"
search = "https://docs.rs/adler/[a-z0-9\\.-]+"
[[bench]]
name = "bench"
harness = false
[dependencies.compiler_builtins]
version = "0.1.2"
optional = true
[dependencies.core]
version = "1.0.0"
optional = true
package = "rustc-std-workspace-core"
[dev-dependencies.criterion]
version = "0.3.2"
[features]
default = ["std"]
rustc-dep-of-std = ["core", "compiler_builtins"]
std = []

12
third-party/vendor/adler/LICENSE-0BSD vendored Normal file
View file

@ -0,0 +1,12 @@
Copyright (C) Jonas Schievink <jonasschievink@gmail.com>
Permission to use, copy, modify, and/or distribute this software for
any purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

201
third-party/vendor/adler/LICENSE-APACHE vendored Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/LICENSE-2.0
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

23
third-party/vendor/adler/LICENSE-MIT vendored Normal file
View file

@ -0,0 +1,23 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

39
third-party/vendor/adler/README.md vendored Normal file
View file

@ -0,0 +1,39 @@
# Adler-32 checksums for Rust
[![crates.io](https://img.shields.io/crates/v/adler.svg)](https://crates.io/crates/adler)
[![docs.rs](https://docs.rs/adler/badge.svg)](https://docs.rs/adler/)
![CI](https://github.com/jonas-schievink/adler/workflows/CI/badge.svg)
This crate provides a simple implementation of the Adler-32 checksum, used in
the zlib compression format.
Please refer to the [changelog](CHANGELOG.md) to see what changed in the last
releases.
## Features
- Permissively licensed (0BSD) clean-room implementation.
- Zero dependencies.
- Zero `unsafe`.
- Decent performance (3-4 GB/s).
- Supports `#![no_std]` (with `default-features = false`).
## Usage
Add an entry to your `Cargo.toml`:
```toml
[dependencies]
adler = "1.0.2"
```
Check the [API Documentation](https://docs.rs/adler/) for how to use the
crate's functionality.
## Rust version support
Currently, this crate supports all Rust versions starting at Rust 1.31.0.
Bumping the Minimum Supported Rust Version (MSRV) is *not* considered a breaking
change, but will not be done without good reasons. The latest 3 stable Rust
versions will always be supported no matter what.

View file

@ -0,0 +1,13 @@
# What to do to publish a new release
1. Ensure all notable changes are in the changelog under "Unreleased".
2. Execute `cargo release <level>` to bump version(s), tag and publish
everything. External subcommand, must be installed with `cargo install
cargo-release`.
`<level>` can be one of `major|minor|patch`. If this is the first release
(`0.1.0`), use `minor`, since the version starts out as `0.0.0`.
3. Go to the GitHub releases, edit the just-pushed tag. Copy the release notes
from the changelog.

View file

@ -0,0 +1,109 @@
extern crate adler;
extern crate criterion;
use adler::{adler32_slice, Adler32};
use criterion::{criterion_group, criterion_main, Criterion, Throughput};
fn simple(c: &mut Criterion) {
{
const SIZE: usize = 100;
let mut group = c.benchmark_group("simple-100b");
group.throughput(Throughput::Bytes(SIZE as u64));
group.bench_function("zeroes-100", |bencher| {
bencher.iter(|| {
adler32_slice(&[0; SIZE]);
});
});
group.bench_function("ones-100", |bencher| {
bencher.iter(|| {
adler32_slice(&[0xff; SIZE]);
});
});
}
{
const SIZE: usize = 1024;
let mut group = c.benchmark_group("simple-1k");
group.throughput(Throughput::Bytes(SIZE as u64));
group.bench_function("zeroes-1k", |bencher| {
bencher.iter(|| {
adler32_slice(&[0; SIZE]);
});
});
group.bench_function("ones-1k", |bencher| {
bencher.iter(|| {
adler32_slice(&[0xff; SIZE]);
});
});
}
{
const SIZE: usize = 1024 * 1024;
let mut group = c.benchmark_group("simple-1m");
group.throughput(Throughput::Bytes(SIZE as u64));
group.bench_function("zeroes-1m", |bencher| {
bencher.iter(|| {
adler32_slice(&[0; SIZE]);
});
});
group.bench_function("ones-1m", |bencher| {
bencher.iter(|| {
adler32_slice(&[0xff; SIZE]);
});
});
}
}
fn chunked(c: &mut Criterion) {
const SIZE: usize = 16 * 1024 * 1024;
let data = vec![0xAB; SIZE];
let mut group = c.benchmark_group("chunked-16m");
group.throughput(Throughput::Bytes(SIZE as u64));
group.bench_function("5552", |bencher| {
bencher.iter(|| {
let mut h = Adler32::new();
for chunk in data.chunks(5552) {
h.write_slice(chunk);
}
h.checksum()
});
});
group.bench_function("8k", |bencher| {
bencher.iter(|| {
let mut h = Adler32::new();
for chunk in data.chunks(8 * 1024) {
h.write_slice(chunk);
}
h.checksum()
});
});
group.bench_function("64k", |bencher| {
bencher.iter(|| {
let mut h = Adler32::new();
for chunk in data.chunks(64 * 1024) {
h.write_slice(chunk);
}
h.checksum()
});
});
group.bench_function("1m", |bencher| {
bencher.iter(|| {
let mut h = Adler32::new();
for chunk in data.chunks(1024 * 1024) {
h.write_slice(chunk);
}
h.checksum()
});
});
}
criterion_group!(benches, simple, chunked);
criterion_main!(benches);

146
third-party/vendor/adler/src/algo.rs vendored Normal file
View file

@ -0,0 +1,146 @@
use crate::Adler32;
use std::ops::{AddAssign, MulAssign, RemAssign};
impl Adler32 {
pub(crate) fn compute(&mut self, bytes: &[u8]) {
// The basic algorithm is, for every byte:
// a = (a + byte) % MOD
// b = (b + a) % MOD
// where MOD = 65521.
//
// For efficiency, we can defer the `% MOD` operations as long as neither a nor b overflows:
// - Between calls to `write`, we ensure that a and b are always in range 0..MOD.
// - We use 32-bit arithmetic in this function.
// - Therefore, a and b must not increase by more than 2^32-MOD without performing a `% MOD`
// operation.
//
// According to Wikipedia, b is calculated as follows for non-incremental checksumming:
// b = n×D1 + (n1)×D2 + (n2)×D3 + ... + Dn + n*1 (mod 65521)
// Where n is the number of bytes and Di is the i-th Byte. We need to change this to account
// for the previous values of a and b, as well as treat every input Byte as being 255:
// b_inc = n×255 + (n-1)×255 + ... + 255 + n*65520
// Or in other words:
// b_inc = n*65520 + n(n+1)/2*255
// The max chunk size is thus the largest value of n so that b_inc <= 2^32-65521.
// 2^32-65521 = n*65520 + n(n+1)/2*255
// Plugging this into an equation solver since I can't math gives n = 5552.18..., so 5552.
//
// On top of the optimization outlined above, the algorithm can also be parallelized with a
// bit more work:
//
// Note that b is a linear combination of a vector of input bytes (D1, ..., Dn).
//
// If we fix some value k<N and rewrite indices 1, ..., N as
//
// 1_1, 1_2, ..., 1_k, 2_1, ..., 2_k, ..., (N/k)_k,
//
// then we can express a and b in terms of sums of smaller sequences kb and ka:
//
// ka(j) := D1_j + D2_j + ... + D(N/k)_j where j <= k
// kb(j) := (N/k)*D1_j + (N/k-1)*D2_j + ... + D(N/k)_j where j <= k
//
// a = ka(1) + ka(2) + ... + ka(k) + 1
// b = k*(kb(1) + kb(2) + ... + kb(k)) - 1*ka(2) - ... - (k-1)*ka(k) + N
//
// We use this insight to unroll the main loop and process k=4 bytes at a time.
// The resulting code is highly amenable to SIMD acceleration, although the immediate speedups
// stem from increased pipeline parallelism rather than auto-vectorization.
//
// This technique is described in-depth (here:)[https://software.intel.com/content/www/us/\
// en/develop/articles/fast-computation-of-fletcher-checksums.html]
const MOD: u32 = 65521;
const CHUNK_SIZE: usize = 5552 * 4;
let mut a = u32::from(self.a);
let mut b = u32::from(self.b);
let mut a_vec = U32X4([0; 4]);
let mut b_vec = a_vec;
let (bytes, remainder) = bytes.split_at(bytes.len() - bytes.len() % 4);
// iterate over 4 bytes at a time
let chunk_iter = bytes.chunks_exact(CHUNK_SIZE);
let remainder_chunk = chunk_iter.remainder();
for chunk in chunk_iter {
for byte_vec in chunk.chunks_exact(4) {
let val = U32X4::from(byte_vec);
a_vec += val;
b_vec += a_vec;
}
b += CHUNK_SIZE as u32 * a;
a_vec %= MOD;
b_vec %= MOD;
b %= MOD;
}
// special-case the final chunk because it may be shorter than the rest
for byte_vec in remainder_chunk.chunks_exact(4) {
let val = U32X4::from(byte_vec);
a_vec += val;
b_vec += a_vec;
}
b += remainder_chunk.len() as u32 * a;
a_vec %= MOD;
b_vec %= MOD;
b %= MOD;
// combine the sub-sum results into the main sum
b_vec *= 4;
b_vec.0[1] += MOD - a_vec.0[1];
b_vec.0[2] += (MOD - a_vec.0[2]) * 2;
b_vec.0[3] += (MOD - a_vec.0[3]) * 3;
for &av in a_vec.0.iter() {
a += av;
}
for &bv in b_vec.0.iter() {
b += bv;
}
// iterate over the remaining few bytes in serial
for &byte in remainder.iter() {
a += u32::from(byte);
b += a;
}
self.a = (a % MOD) as u16;
self.b = (b % MOD) as u16;
}
}
#[derive(Copy, Clone)]
struct U32X4([u32; 4]);
impl U32X4 {
fn from(bytes: &[u8]) -> Self {
U32X4([
u32::from(bytes[0]),
u32::from(bytes[1]),
u32::from(bytes[2]),
u32::from(bytes[3]),
])
}
}
impl AddAssign<Self> for U32X4 {
fn add_assign(&mut self, other: Self) {
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s += o;
}
}
}
impl RemAssign<u32> for U32X4 {
fn rem_assign(&mut self, quotient: u32) {
for s in self.0.iter_mut() {
*s %= quotient;
}
}
}
impl MulAssign<u32> for U32X4 {
fn mul_assign(&mut self, rhs: u32) {
for s in self.0.iter_mut() {
*s *= rhs;
}
}
}

287
third-party/vendor/adler/src/lib.rs vendored Normal file
View file

@ -0,0 +1,287 @@
//! Adler-32 checksum implementation.
//!
//! This implementation features:
//!
//! - Permissively licensed (0BSD) clean-room implementation.
//! - Zero dependencies.
//! - Zero `unsafe`.
//! - Decent performance (3-4 GB/s).
//! - `#![no_std]` support (with `default-features = false`).
#![doc(html_root_url = "https://docs.rs/adler/1.0.2")]
// Deny a few warnings in doctests, since rustdoc `allow`s many warnings by default
#![doc(test(attr(deny(unused_imports, unused_must_use))))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![warn(missing_debug_implementations)]
#![forbid(unsafe_code)]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
extern crate core as std;
mod algo;
use std::hash::Hasher;
#[cfg(feature = "std")]
use std::io::{self, BufRead};
/// Adler-32 checksum calculator.
///
/// An instance of this type is equivalent to an Adler-32 checksum: It can be created in the default
/// state via [`new`] (or the provided `Default` impl), or from a precalculated checksum via
/// [`from_checksum`], and the currently stored checksum can be fetched via [`checksum`].
///
/// This type also implements `Hasher`, which makes it easy to calculate Adler-32 checksums of any
/// type that implements or derives `Hash`. This also allows using Adler-32 in a `HashMap`, although
/// that is not recommended (while every checksum is a hash function, they are not necessarily a
/// good one).
///
/// # Examples
///
/// Basic, piecewise checksum calculation:
///
/// ```
/// use adler::Adler32;
///
/// let mut adler = Adler32::new();
///
/// adler.write_slice(&[0, 1, 2]);
/// adler.write_slice(&[3, 4, 5]);
///
/// assert_eq!(adler.checksum(), 0x00290010);
/// ```
///
/// Using `Hash` to process structures:
///
/// ```
/// use std::hash::Hash;
/// use adler::Adler32;
///
/// #[derive(Hash)]
/// struct Data {
/// byte: u8,
/// word: u16,
/// big: u64,
/// }
///
/// let mut adler = Adler32::new();
///
/// let data = Data { byte: 0x1F, word: 0xABCD, big: !0 };
/// data.hash(&mut adler);
///
/// // hash value depends on architecture endianness
/// if cfg!(target_endian = "little") {
/// assert_eq!(adler.checksum(), 0x33410990);
/// }
/// if cfg!(target_endian = "big") {
/// assert_eq!(adler.checksum(), 0x331F0990);
/// }
///
/// ```
///
/// [`new`]: #method.new
/// [`from_checksum`]: #method.from_checksum
/// [`checksum`]: #method.checksum
#[derive(Debug, Copy, Clone)]
pub struct Adler32 {
a: u16,
b: u16,
}
impl Adler32 {
/// Creates a new Adler-32 instance with default state.
#[inline]
pub fn new() -> Self {
Self::default()
}
/// Creates an `Adler32` instance from a precomputed Adler-32 checksum.
///
/// This allows resuming checksum calculation without having to keep the `Adler32` instance
/// around.
///
/// # Example
///
/// ```
/// # use adler::Adler32;
/// let parts = [
/// "rust",
/// "acean",
/// ];
/// let whole = adler::adler32_slice(b"rustacean");
///
/// let mut sum = Adler32::new();
/// sum.write_slice(parts[0].as_bytes());
/// let partial = sum.checksum();
///
/// // ...later
///
/// let mut sum = Adler32::from_checksum(partial);
/// sum.write_slice(parts[1].as_bytes());
/// assert_eq!(sum.checksum(), whole);
/// ```
#[inline]
pub fn from_checksum(sum: u32) -> Self {
Adler32 {
a: sum as u16,
b: (sum >> 16) as u16,
}
}
/// Returns the calculated checksum at this point in time.
#[inline]
pub fn checksum(&self) -> u32 {
(u32::from(self.b) << 16) | u32::from(self.a)
}
/// Adds `bytes` to the checksum calculation.
///
/// If efficiency matters, this should be called with Byte slices that contain at least a few
/// thousand Bytes.
pub fn write_slice(&mut self, bytes: &[u8]) {
self.compute(bytes);
}
}
impl Default for Adler32 {
#[inline]
fn default() -> Self {
Adler32 { a: 1, b: 0 }
}
}
impl Hasher for Adler32 {
#[inline]
fn finish(&self) -> u64 {
u64::from(self.checksum())
}
fn write(&mut self, bytes: &[u8]) {
self.write_slice(bytes);
}
}
/// Calculates the Adler-32 checksum of a byte slice.
///
/// This is a convenience function around the [`Adler32`] type.
///
/// [`Adler32`]: struct.Adler32.html
pub fn adler32_slice(data: &[u8]) -> u32 {
let mut h = Adler32::new();
h.write_slice(data);
h.checksum()
}
/// Calculates the Adler-32 checksum of a `BufRead`'s contents.
///
/// The passed `BufRead` implementor will be read until it reaches EOF (or until it reports an
/// error).
///
/// If you only have a `Read` implementor, you can wrap it in `std::io::BufReader` before calling
/// this function.
///
/// # Errors
///
/// Any error returned by the reader are bubbled up by this function.
///
/// # Examples
///
/// ```no_run
/// # fn run() -> Result<(), Box<dyn std::error::Error>> {
/// use adler::adler32;
///
/// use std::fs::File;
/// use std::io::BufReader;
///
/// let file = File::open("input.txt")?;
/// let mut file = BufReader::new(file);
///
/// adler32(&mut file)?;
/// # Ok(()) }
/// # fn main() { run().unwrap() }
/// ```
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub fn adler32<R: BufRead>(mut reader: R) -> io::Result<u32> {
let mut h = Adler32::new();
loop {
let len = {
let buf = reader.fill_buf()?;
if buf.is_empty() {
return Ok(h.checksum());
}
h.write_slice(buf);
buf.len()
};
reader.consume(len);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn zeroes() {
assert_eq!(adler32_slice(&[]), 1);
assert_eq!(adler32_slice(&[0]), 1 | 1 << 16);
assert_eq!(adler32_slice(&[0, 0]), 1 | 2 << 16);
assert_eq!(adler32_slice(&[0; 100]), 0x00640001);
assert_eq!(adler32_slice(&[0; 1024]), 0x04000001);
assert_eq!(adler32_slice(&[0; 1024 * 1024]), 0x00f00001);
}
#[test]
fn ones() {
assert_eq!(adler32_slice(&[0xff; 1024]), 0x79a6fc2e);
assert_eq!(adler32_slice(&[0xff; 1024 * 1024]), 0x8e88ef11);
}
#[test]
fn mixed() {
assert_eq!(adler32_slice(&[1]), 2 | 2 << 16);
assert_eq!(adler32_slice(&[40]), 41 | 41 << 16);
assert_eq!(adler32_slice(&[0xA5; 1024 * 1024]), 0xd5009ab1);
}
/// Example calculation from https://en.wikipedia.org/wiki/Adler-32.
#[test]
fn wiki() {
assert_eq!(adler32_slice(b"Wikipedia"), 0x11E60398);
}
#[test]
fn resume() {
let mut adler = Adler32::new();
adler.write_slice(&[0xff; 1024]);
let partial = adler.checksum();
assert_eq!(partial, 0x79a6fc2e); // from above
adler.write_slice(&[0xff; 1024 * 1024 - 1024]);
assert_eq!(adler.checksum(), 0x8e88ef11); // from above
// Make sure that we can resume computing from the partial checksum via `from_checksum`.
let mut adler = Adler32::from_checksum(partial);
adler.write_slice(&[0xff; 1024 * 1024 - 1024]);
assert_eq!(adler.checksum(), 0x8e88ef11); // from above
}
#[cfg(feature = "std")]
#[test]
fn bufread() {
use std::io::BufReader;
fn test(data: &[u8], checksum: u32) {
// `BufReader` uses an 8 KB buffer, so this will test buffer refilling.
let mut buf = BufReader::new(data);
let real_sum = adler32(&mut buf).unwrap();
assert_eq!(checksum, real_sum);
}
test(&[], 1);
test(&[0; 1024], 0x04000001);
test(&[0; 1024 * 1024], 0x00f00001);
test(&[0xA5; 1024 * 1024], 0xd5009ab1);
}
}

View file

@ -0,0 +1 @@
{"files":{"Cargo.toml":"ddcbd9309cebf3ffd26f87e09bb8f971793535955ebfd9a7196eba31a53471f8","FAQ.md":"9eb41898523ee209a0a937f9bcb78afe45ad55ca0556f8a4d4063558098f6d1e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0444c6991eead6822f7b9102e654448d51624431119546492e8b231db42c48bb","README.md":"d7f74d616a751bcca23d5d3b58a6daf556356a526c5f0b6aa0504715d176549a","build.rs":"23cbf4cf1b742e2c4da8bc58d06d1d021479dec80cec6a0bc3704c7172e2864a","rustfmt.toml":"e090969e99df9360705680cc0097cfaddae10c22dc2e01470592cf3b9787fd36","src/aes_hash.rs":"013602aec42150e59ba9ed6135525a624a4b42c1b1328b9857ec238aa12c3178","src/convert.rs":"54e49f93d51665366923d4d815cfd67790d3c769e84ab4386ba97f928d17d1bd","src/fallback_hash.rs":"a82451f6458a6e7a7e7da82a3c982e9bb825a2092ab79c41459d8011775fb0b1","src/hash_map.rs":"5ee97baa64fa528ba9c01bd018332c4974846c4813c6f8c30cee9f3546598f1c","src/hash_quality_test.rs":"1a560a181a804791bc6ad797df5352cdd87123fed7f19f659de0c2d883248bed","src/hash_set.rs":"360e55d066b44624f06e49efa140c03fda635fb17a59622cc29a83830bd1f263","src/lib.rs":"e2f4e7bfcf2807c73e3b8d3b1bd83c6789313b6b55edd59e15e04146e55e01b6","src/operations.rs":"38ed2b48a13d826c48ede5f304c9c2572c0c8f64ac8ac5a1ed4e112e536f3a97","src/random_state.rs":"03f40a654cfca2e00a2dabd21c85368ee50b8b6289efe98ea1745b25c721b9c6","src/specialize.rs":"56354db8a0f7e6ee1340a08f2ab6f79a0ff439fd61badac5e7e59fe4f4a653ba","tests/bench.rs":"7a425f564201560f9a8fb6c77f91f29bb88ec815b10bd27d15740c922a4f928e","tests/map_tests.rs":"e56b6f700e3b1176210e4b266d7a42b3263e966e5e565d53b1bc27af7a87168e","tests/nopanic.rs":"0d28a46248d77283941db1d9fd154c68b965c81a0e3db1fe4a43e06fc448da8f"},"package":"e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"}

167
third-party/vendor/ahash/Cargo.toml vendored Normal file
View file

@ -0,0 +1,167 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
rust-version = "1.60.0"
name = "ahash"
version = "0.8.11"
authors = ["Tom Kaitchuck <Tom.Kaitchuck@gmail.com>"]
build = "./build.rs"
exclude = [
"/smhasher",
"/benchmark_tools",
]
description = "A non-cryptographic hash function using AES-NI for high performance"
documentation = "https://docs.rs/ahash"
readme = "README.md"
keywords = [
"hash",
"hasher",
"hashmap",
"aes",
"no-std",
]
categories = [
"algorithms",
"data-structures",
"no-std",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/tkaitchuck/ahash"
[package.metadata.docs.rs]
features = ["std"]
rustc-args = [
"-C",
"target-feature=+aes",
]
rustdoc-args = [
"-C",
"target-feature=+aes",
]
[profile.bench]
opt-level = 3
lto = "fat"
codegen-units = 1
debug = 0
debug-assertions = false
[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
debug = 0
debug-assertions = false
[profile.test]
opt-level = 2
lto = "fat"
[lib]
name = "ahash"
path = "src/lib.rs"
test = true
doctest = true
bench = true
doc = true
[[bench]]
name = "ahash"
path = "tests/bench.rs"
harness = false
[[bench]]
name = "map"
path = "tests/map_tests.rs"
harness = false
[dependencies.atomic-polyfill]
version = "1.0.1"
optional = true
[dependencies.cfg-if]
version = "1.0"
[dependencies.const-random]
version = "0.1.17"
optional = true
[dependencies.getrandom]
version = "0.2.7"
optional = true
[dependencies.serde]
version = "1.0.117"
optional = true
[dependencies.zerocopy]
version = "0.7.31"
features = ["simd"]
default-features = false
[dev-dependencies.criterion]
version = "0.3.2"
features = ["html_reports"]
[dev-dependencies.fnv]
version = "1.0.5"
[dev-dependencies.fxhash]
version = "0.2.1"
[dev-dependencies.hashbrown]
version = "0.14.3"
[dev-dependencies.hex]
version = "0.4.2"
[dev-dependencies.no-panic]
version = "0.1.10"
[dev-dependencies.pcg-mwc]
version = "0.2.1"
[dev-dependencies.rand]
version = "0.8.5"
[dev-dependencies.seahash]
version = "4.0"
[dev-dependencies.serde_json]
version = "1.0.59"
[dev-dependencies.smallvec]
version = "1.13.1"
[build-dependencies.version_check]
version = "0.9.4"
[features]
atomic-polyfill = [
"dep:atomic-polyfill",
"once_cell/atomic-polyfill",
]
compile-time-rng = ["const-random"]
default = [
"std",
"runtime-rng",
]
nightly-arm-aes = []
no-rng = []
runtime-rng = ["getrandom"]
std = []
[target."cfg(not(all(target_arch = \"arm\", target_os = \"none\")))".dependencies.once_cell]
version = "1.18.0"
features = ["alloc"]
default-features = false

118
third-party/vendor/ahash/FAQ.md vendored Normal file
View file

@ -0,0 +1,118 @@
## How does aHash prevent DOS attacks
AHash is designed to [prevent an adversary that does not know the key from being able to create hash collisions or partial collisions.](https://github.com/tkaitchuck/aHash/wiki/How-aHash-is-resists-DOS-attacks)
If you are a cryptographer and would like to help review aHash's algorithm, please post a comment [here](https://github.com/tkaitchuck/aHash/issues/11).
In short, this is achieved by ensuring that:
* aHash is designed to [resist differential crypto analysis](https://github.com/tkaitchuck/aHash/wiki/How-aHash-is-resists-DOS-attacks#differential-analysis). Meaning it should not be possible to devise a scheme to "cancel" out a modification of the internal state from a block of input via some corresponding change in a subsequent block of input.
* This is achieved by not performing any "premixing" - This reversible mixing gave previous hashes such as murmurhash confidence in their quality, but could be undone by a deliberate attack.
* Before it is used each chunk of input is "masked" such as by xoring it with an unpredictable value.
* aHash obeys the '[strict avalanche criterion](https://en.wikipedia.org/wiki/Avalanche_effect#Strict_avalanche_criterion)':
Each bit of input has the potential to flip every bit of the output.
* Similarly, each bit in the key can affect every bit in the output.
* Input bits never effect just one, or a very few, bits in intermediate state. This is specifically designed to prevent the sort of
[differential attacks launched by the sipHash authors](https://emboss.github.io/blog/2012/12/14/breaking-murmur-hash-flooding-dos-reloaded/) which cancel previous inputs.
* The `finish` call at the end of the hash is designed to not expose individual bits of the internal state.
* For example in the main algorithm 256bits of state and 256bits of keys are reduced to 64 total bits using 3 rounds of AES encryption.
Reversing this is more than non-trivial. Most of the information is by definition gone, and any given bit of the internal state is fully diffused across the output.
* In both aHash and its fallback the internal state is divided into two halves which are updated by two unrelated techniques using the same input. - This means that if there is a way to attack one of them it likely won't be able to attack both of them at the same time.
* It is deliberately difficult to 'chain' collisions. (This has been the major technique used to weaponize attacks on other hash functions)
More details are available on [the wiki](https://github.com/tkaitchuck/aHash/wiki/How-aHash-is-resists-DOS-attacks).
## Why not use a cryptographic hash in a hashmap.
Cryptographic hashes are designed to make is nearly impossible to find two items that collide when the attacker has full control
over the input. This has several implications:
* They are very difficult to construct, and have to go to a lot of effort to ensure that collisions are not possible.
* They have no notion of a 'key'. Rather, they are fully deterministic and provide exactly one hash for a given input.
For a HashMap the requirements are different.
* Speed is very important, especially for short inputs. Often the key for a HashMap is a single `u32` or similar, and to be effective
the bucket that it should be hashed to needs to be computed in just a few CPU cycles.
* A hashmap does not need to provide a hard and fast guarantee that no two inputs will ever collide. Hence, hashCodes are not 256bits
but are just 64 or 32 bits in length. Often the first thing done with the hashcode is to truncate it further to compute which among a few buckets should be used for a key.
* Here collisions are expected, and a cheap to deal with provided there is no systematic way to generated huge numbers of values that all
go to the same bucket.
* This also means that unlike a cryptographic hash partial collisions matter. It doesn't do a hashmap any good to produce a unique 256bit hash if
the lower 12 bits are all the same. This means that even a provably irreversible hash would not offer protection from a DOS attack in a hashmap
because an attacker can easily just brute force the bottom N bits.
From a cryptography point of view, a hashmap needs something closer to a block cypher.
Where the input can be quickly mixed in a way that cannot be reversed without knowing a key.
## Why isn't aHash cryptographically secure
It is not designed to be.
Attempting to use aHash as a secure hash will likely fail to hold up for several reasons:
1. aHash relies on random keys which are assumed to not be observable by an attacker. For a cryptographic hash all inputs can be seen and controlled by the attacker.
2. aHash has not yet gone through peer review, which is a pre-requisite for security critical algorithms.
3. Because aHash uses reduced rounds of AES as opposed to the standard of 10. Things like the SQUARE attack apply to part of the internal state.
(These are mitigated by other means to prevent producing collections, but would be a problem in other contexts).
4. Like any cypher based hash, it will show certain statistical deviations from truly random output when comparing a (VERY) large number of hashes.
(By definition cyphers have fewer collisions than truly random data.)
There are efforts to build a secure hash function that uses AES-NI for acceleration, but aHash is not one of them.
## How is aHash so fast
AHash uses a number of tricks.
One trick is taking advantage of specialization. If aHash is compiled on nightly it will take
advantage of specialized hash implementations for strings, slices, and primitives.
Another is taking advantage of hardware instructions.
When it is available aHash uses AES rounds using the AES-NI instruction. AES-NI is very fast (on an intel i7-6700 it
is as fast as a 64 bit multiplication.) and handles 16 bytes of input at a time, while being a very strong permutation.
This is obviously much faster than most standard approaches to hashing, and does a better job of scrambling data than most non-secure hashes.
On an intel i7-6700 compiled on nightly Rust with flags `-C opt-level=3 -C target-cpu=native -C codegen-units=1`:
| Input | SipHash 1-3 time | FnvHash time|FxHash time| aHash time| aHash Fallback* |
|----------------|-----------|-----------|-----------|-----------|---------------|
| u8 | 9.3271 ns | 0.808 ns | **0.594 ns** | 0.7704 ns | 0.7664 ns |
| u16 | 9.5139 ns | 0.803 ns | **0.594 ns** | 0.7653 ns | 0.7704 ns |
| u32 | 9.1196 ns | 1.4424 ns | **0.594 ns** | 0.7637 ns | 0.7712 ns |
| u64 | 10.854 ns | 3.0484 ns | **0.628 ns** | 0.7788 ns | 0.7888 ns |
| u128 | 12.465 ns | 7.0728 ns | 0.799 ns | **0.6174 ns** | 0.6250 ns |
| 1 byte string | 11.745 ns | 2.4743 ns | 2.4000 ns | **1.4921 ns** | 1.5861 ns |
| 3 byte string | 12.066 ns | 3.5221 ns | 2.9253 ns | **1.4745 ns** | 1.8518 ns |
| 4 byte string | 11.634 ns | 4.0770 ns | 1.8818 ns | **1.5206 ns** | 1.8924 ns |
| 7 byte string | 14.762 ns | 5.9780 ns | 3.2282 ns | **1.5207 ns** | 1.8933 ns |
| 8 byte string | 13.442 ns | 4.0535 ns | 2.9422 ns | **1.6262 ns** | 1.8929 ns |
| 15 byte string | 16.880 ns | 8.3434 ns | 4.6070 ns | **1.6265 ns** | 1.7965 ns |
| 16 byte string | 15.155 ns | 7.5796 ns | 3.2619 ns | **1.6262 ns** | 1.8011 ns |
| 24 byte string | 16.521 ns | 12.492 ns | 3.5424 ns | **1.6266 ns** | 2.8311 ns |
| 68 byte string | 24.598 ns | 50.715 ns | 5.8312 ns | **4.8282 ns** | 5.4824 ns |
| 132 byte string| 39.224 ns | 119.96 ns | 11.777 ns | **6.5087 ns** | 9.1459 ns |
|1024 byte string| 254.00 ns | 1087.3 ns | 156.41 ns | **25.402 ns** | 54.566 ns |
* Fallback refers to the algorithm aHash would use if AES instructions are unavailable.
For reference a hash that does nothing (not even reads the input data takes) **0.520 ns**. So that represents the fastest
possible time.
As you can see above aHash like `FxHash` provides a large speedup over `SipHash-1-3` which is already nearly twice as fast as `SipHash-2-4`.
Rust's HashMap by default uses `SipHash-1-3` because faster hash functions such as `FxHash` are predictable and vulnerable to denial of
service attacks. While `aHash` has both very strong scrambling and very high performance.
AHash performs well when dealing with large inputs because aHash reads 8 or 16 bytes at a time. (depending on availability of AES-NI)
Because of this, and its optimized logic, `aHash` is able to outperform `FxHash` with strings.
It also provides especially good performance dealing with unaligned input.
(Notice the big performance gaps between 3 vs 4, 7 vs 8 and 15 vs 16 in `FxHash` above)
### Which CPUs can use the hardware acceleration
Hardware AES instructions are built into Intel processors built after 2010 and AMD processors after 2012.
It is also available on [many other CPUs](https://en.wikipedia.org/wiki/AES_instruction_set) should in eventually
be able to get aHash to work. However, only X86 and X86-64 are the only supported architectures at the moment, as currently
they are the only architectures for which Rust provides an intrinsic.
aHash also uses `sse2` and `sse3` instructions. X86 processors that have `aesni` also have these instruction sets.

201
third-party/vendor/ahash/LICENSE-APACHE vendored Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third-party/vendor/ahash/LICENSE-MIT vendored Normal file
View file

@ -0,0 +1,25 @@
Copyright (c) 2018 Tom Kaitchuck
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

109
third-party/vendor/ahash/README.md vendored Normal file
View file

@ -0,0 +1,109 @@
# aHash ![Build Status](https://img.shields.io/github/actions/workflow/status/tkaitchuck/aHash/rust.yml?branch=master) ![Licence](https://img.shields.io/crates/l/ahash) ![Downloads](https://img.shields.io/crates/d/ahash)
AHash is the [fastest](https://github.com/tkaitchuck/aHash/blob/master/compare/readme.md#Speed),
[DOS resistant hash](https://github.com/tkaitchuck/aHash/wiki/How-aHash-is-resists-DOS-attacks) currently available in Rust.
AHash is intended *exclusively* for use in in-memory hashmaps.
AHash's output is of [high quality](https://github.com/tkaitchuck/aHash/blob/master/compare/readme.md#Quality) but aHash is **not** a cryptographically secure hash.
## Design
Because AHash is a keyed hash, each map will produce completely different hashes, which cannot be predicted without knowing the keys.
[This prevents DOS attacks where an attacker sends a large number of items whose hashes collide that get used as keys in a hashmap.](https://github.com/tkaitchuck/aHash/wiki/How-aHash-is-resists-DOS-attacks)
This also avoids [accidentally quadratic behavior by reading from one map and writing to another.](https://accidentallyquadratic.tumblr.com/post/153545455987/rust-hash-iteration-reinsertion)
## Goals and Non-Goals
AHash does *not* have a fixed standard for its output. This allows it to improve over time. For example,
if any faster algorithm is found, aHash will be updated to incorporate the technique.
Similarly, should any flaw in aHash's DOS resistance be found, aHash will be changed to correct the flaw.
Because it does not have a fixed standard, different computers or computers on different versions of the code will observe different hash values.
As such, aHash is not recommended for use other than in-memory maps. Specifically, aHash is not intended for network use or in applications which persist hashed values.
(In these cases `HighwayHash` would be a better choice)
Additionally, aHash is not intended to be cryptographically secure and should not be used as a MAC, or anywhere which requires a cryptographically secure hash.
(In these cases `SHA-3` would be a better choice)
## Usage
AHash is a drop in replacement for the default implementation of the `Hasher` trait. To construct a `HashMap` using aHash
as its hasher do the following:
```rust
use ahash::{AHasher, RandomState};
use std::collections::HashMap;
let mut map: HashMap<i32, i32, RandomState> = HashMap::default();
map.insert(12, 34);
```
For convenience, wrappers called `AHashMap` and `AHashSet` are also provided.
These do the same thing with slightly less typing.
```rust
use ahash::AHashMap;
let mut map: AHashMap<i32, i32> = AHashMap::new();
map.insert(12, 34);
map.insert(56, 78);
```
## Flags
The aHash package has the following flags:
* `std`: This enables features which require the standard library. (On by default) This includes providing the utility classes `AHashMap` and `AHashSet`.
* `serde`: Enables `serde` support for the utility classes `AHashMap` and `AHashSet`.
* `runtime-rng`: To obtain a seed for Hashers will obtain randomness from the operating system. (On by default)
This is done using the [getrandom](https://github.com/rust-random/getrandom) crate.
* `compile-time-rng`: For OS targets without access to a random number generator, `compile-time-rng` provides an alternative.
If `getrandom` is unavailable and `compile-time-rng` is enabled, aHash will generate random numbers at compile time and embed them in the binary.
* `nightly-arm-aes`: To use AES instructions on 32-bit ARM, which requires nightly. This is not needed on AArch64.
This allows for DOS resistance even if there is no random number generator available at runtime (assuming the compiled binary is not public).
This makes the binary non-deterministic. (If non-determinism is a problem see [constrandom's documentation](https://github.com/tkaitchuck/constrandom#deterministic-builds))
If both `runtime-rng` and `compile-time-rng` are enabled the `runtime-rng` will take precedence and `compile-time-rng` will do nothing.
If neither flag is set, seeds can be supplied by the application. [Multiple apis](https://docs.rs/ahash/latest/ahash/random_state/struct.RandomState.html)
are available to do this.
## Comparison with other hashers
A full comparison with other hashing algorithms can be found [here](https://github.com/tkaitchuck/aHash/blob/master/compare/readme.md)
![Hasher performance](https://docs.google.com/spreadsheets/d/e/2PACX-1vSK7Li2nS-Bur9arAYF9IfT37MP-ohAe1v19lZu5fd9MajI1fSveLAQZyEie4Ea9k5-SWHTff7nL2DW/pubchart?oid=1323618938&format=image)
For a more representative performance comparison which includes the overhead of using a HashMap, see [HashBrown's benchmarks](https://github.com/rust-lang/hashbrown#performance)
as HashBrown now uses aHash as its hasher by default.
## Hash quality
AHash passes the full [SMHasher test suite](https://github.com/rurban/smhasher).
The code to reproduce the result, and the full output [are checked into the repo](https://github.com/tkaitchuck/aHash/tree/master/smhasher).
## Additional FAQ
A separate FAQ document is maintained [here](https://github.com/tkaitchuck/aHash/blob/master/FAQ.md).
If you have questions not covered there, open an issue [here](https://github.com/tkaitchuck/aHash/issues).
## License
Licensed under either of:
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
## Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
additional terms or conditions.

20
third-party/vendor/ahash/build.rs vendored Normal file
View file

@ -0,0 +1,20 @@
#![deny(warnings)]
use std::env;
fn main() {
println!("cargo:rerun-if-changed=build.rs");
if let Some(true) = version_check::supports_feature("specialize") {
println!("cargo:rustc-cfg=feature=\"specialize\"");
}
let arch = env::var("CARGO_CFG_TARGET_ARCH").expect("CARGO_CFG_TARGET_ARCH was not set");
if arch.eq_ignore_ascii_case("x86_64")
|| arch.eq_ignore_ascii_case("aarch64")
|| arch.eq_ignore_ascii_case("mips64")
|| arch.eq_ignore_ascii_case("powerpc64")
|| arch.eq_ignore_ascii_case("riscv64gc")
|| arch.eq_ignore_ascii_case("s390x")
{
println!("cargo:rustc-cfg=feature=\"folded_multiply\"");
}
}

1
third-party/vendor/ahash/rustfmt.toml vendored Normal file
View file

@ -0,0 +1 @@
max_width = 120

433
third-party/vendor/ahash/src/aes_hash.rs vendored Normal file
View file

@ -0,0 +1,433 @@
use crate::convert::*;
use crate::operations::*;
use crate::random_state::PI;
use crate::RandomState;
use core::hash::Hasher;
/// A `Hasher` for hashing an arbitrary stream of bytes.
///
/// Instances of [`AHasher`] represent state that is updated while hashing data.
///
/// Each method updates the internal state based on the new data provided. Once
/// all of the data has been provided, the resulting hash can be obtained by calling
/// `finish()`
///
/// [Clone] is also provided in case you wish to calculate hashes for two different items that
/// start with the same data.
///
#[derive(Debug, Clone)]
pub struct AHasher {
enc: u128,
sum: u128,
key: u128,
}
impl AHasher {
/// Creates a new hasher keyed to the provided keys.
///
/// Normally hashers are created via `AHasher::default()` for fixed keys or `RandomState::new()` for randomly
/// generated keys and `RandomState::with_seeds(a,b)` for seeds that are set and can be reused. All of these work at
/// map creation time (and hence don't have any overhead on a per-item bais).
///
/// This method directly creates the hasher instance and performs no transformation on the provided seeds. This may
/// be useful where a HashBuilder is not desired, such as for testing purposes.
///
/// # Example
///
/// ```
/// use std::hash::Hasher;
/// use ahash::AHasher;
///
/// let mut hasher = AHasher::new_with_keys(1234, 5678);
///
/// hasher.write_u32(1989);
/// hasher.write_u8(11);
/// hasher.write_u8(9);
/// hasher.write(b"Huh?");
///
/// println!("Hash is {:x}!", hasher.finish());
/// ```
#[inline]
pub(crate) fn new_with_keys(key1: u128, key2: u128) -> Self {
let pi: [u128; 2] = PI.convert();
let key1 = key1 ^ pi[0];
let key2 = key2 ^ pi[1];
Self {
enc: key1,
sum: key2,
key: key1 ^ key2,
}
}
#[allow(unused)] // False positive
pub(crate) fn test_with_keys(key1: u128, key2: u128) -> Self {
Self {
enc: key1,
sum: key2,
key: key1 ^ key2,
}
}
#[inline]
pub(crate) fn from_random_state(rand_state: &RandomState) -> Self {
let key1 = [rand_state.k0, rand_state.k1].convert();
let key2 = [rand_state.k2, rand_state.k3].convert();
Self {
enc: key1,
sum: key2,
key: key1 ^ key2,
}
}
#[inline(always)]
fn hash_in(&mut self, new_value: u128) {
self.enc = aesdec(self.enc, new_value);
self.sum = shuffle_and_add(self.sum, new_value);
}
#[inline(always)]
fn hash_in_2(&mut self, v1: u128, v2: u128) {
self.enc = aesdec(self.enc, v1);
self.sum = shuffle_and_add(self.sum, v1);
self.enc = aesdec(self.enc, v2);
self.sum = shuffle_and_add(self.sum, v2);
}
#[inline]
#[cfg(feature = "specialize")]
fn short_finish(&self) -> u64 {
let combined = aesenc(self.sum, self.enc);
let result: [u64; 2] = aesdec(combined, combined).convert();
result[0]
}
}
/// Provides [Hasher] methods to hash all of the primitive types.
///
/// [Hasher]: core::hash::Hasher
impl Hasher for AHasher {
#[inline]
fn write_u8(&mut self, i: u8) {
self.write_u64(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.write_u64(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.write_u64(i as u64);
}
#[inline]
fn write_u128(&mut self, i: u128) {
self.hash_in(i);
}
#[inline]
#[cfg(any(
target_pointer_width = "64",
target_pointer_width = "32",
target_pointer_width = "16"
))]
fn write_usize(&mut self, i: usize) {
self.write_u64(i as u64);
}
#[inline]
#[cfg(target_pointer_width = "128")]
fn write_usize(&mut self, i: usize) {
self.write_u128(i as u128);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.write_u128(i as u128);
}
#[inline]
#[allow(clippy::collapsible_if)]
fn write(&mut self, input: &[u8]) {
let mut data = input;
let length = data.len();
add_in_length(&mut self.enc, length as u64);
//A 'binary search' on sizes reduces the number of comparisons.
if data.len() <= 8 {
let value = read_small(data);
self.hash_in(value.convert());
} else {
if data.len() > 32 {
if data.len() > 64 {
let tail = data.read_last_u128x4();
let mut current: [u128; 4] = [self.key; 4];
current[0] = aesenc(current[0], tail[0]);
current[1] = aesdec(current[1], tail[1]);
current[2] = aesenc(current[2], tail[2]);
current[3] = aesdec(current[3], tail[3]);
let mut sum: [u128; 2] = [self.key, !self.key];
sum[0] = add_by_64s(sum[0].convert(), tail[0].convert()).convert();
sum[1] = add_by_64s(sum[1].convert(), tail[1].convert()).convert();
sum[0] = shuffle_and_add(sum[0], tail[2]);
sum[1] = shuffle_and_add(sum[1], tail[3]);
while data.len() > 64 {
let (blocks, rest) = data.read_u128x4();
current[0] = aesdec(current[0], blocks[0]);
current[1] = aesdec(current[1], blocks[1]);
current[2] = aesdec(current[2], blocks[2]);
current[3] = aesdec(current[3], blocks[3]);
sum[0] = shuffle_and_add(sum[0], blocks[0]);
sum[1] = shuffle_and_add(sum[1], blocks[1]);
sum[0] = shuffle_and_add(sum[0], blocks[2]);
sum[1] = shuffle_and_add(sum[1], blocks[3]);
data = rest;
}
self.hash_in_2(current[0], current[1]);
self.hash_in_2(current[2], current[3]);
self.hash_in_2(sum[0], sum[1]);
} else {
//len 33-64
let (head, _) = data.read_u128x2();
let tail = data.read_last_u128x2();
self.hash_in_2(head[0], head[1]);
self.hash_in_2(tail[0], tail[1]);
}
} else {
if data.len() > 16 {
//len 17-32
self.hash_in_2(data.read_u128().0, data.read_last_u128());
} else {
//len 9-16
let value: [u64; 2] = [data.read_u64().0, data.read_last_u64()];
self.hash_in(value.convert());
}
}
}
}
#[inline]
fn finish(&self) -> u64 {
let combined = aesenc(self.sum, self.enc);
let result: [u64; 2] = aesdec(aesdec(combined, self.key), combined).convert();
result[0]
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherU64 {
pub(crate) buffer: u64,
pub(crate) pad: u64,
}
/// A specialized hasher for only primitives under 64 bits.
#[cfg(feature = "specialize")]
impl Hasher for AHasherU64 {
#[inline]
fn finish(&self) -> u64 {
folded_multiply(self.buffer, self.pad)
}
#[inline]
fn write(&mut self, _bytes: &[u8]) {
unreachable!("Specialized hasher was called with a different type of object")
}
#[inline]
fn write_u8(&mut self, i: u8) {
self.write_u64(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.write_u64(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.write_u64(i as u64);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.buffer = folded_multiply(i ^ self.buffer, MULTIPLE);
}
#[inline]
fn write_u128(&mut self, _i: u128) {
unreachable!("Specialized hasher was called with a different type of object")
}
#[inline]
fn write_usize(&mut self, _i: usize) {
unreachable!("Specialized hasher was called with a different type of object")
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherFixed(pub AHasher);
/// A specialized hasher for fixed size primitives larger than 64 bits.
#[cfg(feature = "specialize")]
impl Hasher for AHasherFixed {
#[inline]
fn finish(&self) -> u64 {
self.0.short_finish()
}
#[inline]
fn write(&mut self, bytes: &[u8]) {
self.0.write(bytes)
}
#[inline]
fn write_u8(&mut self, i: u8) {
self.write_u64(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.write_u64(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.write_u64(i as u64);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.0.write_u64(i);
}
#[inline]
fn write_u128(&mut self, i: u128) {
self.0.write_u128(i);
}
#[inline]
fn write_usize(&mut self, i: usize) {
self.0.write_usize(i);
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherStr(pub AHasher);
/// A specialized hasher for strings
/// Note that the other types don't panic because the hash impl for String tacks on an unneeded call. (As does vec)
#[cfg(feature = "specialize")]
impl Hasher for AHasherStr {
#[inline]
fn finish(&self) -> u64 {
let result: [u64; 2] = self.0.enc.convert();
result[0]
}
#[inline]
fn write(&mut self, bytes: &[u8]) {
if bytes.len() > 8 {
self.0.write(bytes);
self.0.enc = aesenc(self.0.sum, self.0.enc);
self.0.enc = aesdec(aesdec(self.0.enc, self.0.key), self.0.enc);
} else {
add_in_length(&mut self.0.enc, bytes.len() as u64);
let value = read_small(bytes).convert();
self.0.sum = shuffle_and_add(self.0.sum, value);
self.0.enc = aesenc(self.0.sum, self.0.enc);
self.0.enc = aesdec(aesdec(self.0.enc, self.0.key), self.0.enc);
}
}
#[inline]
fn write_u8(&mut self, _i: u8) {}
#[inline]
fn write_u16(&mut self, _i: u16) {}
#[inline]
fn write_u32(&mut self, _i: u32) {}
#[inline]
fn write_u64(&mut self, _i: u64) {}
#[inline]
fn write_u128(&mut self, _i: u128) {}
#[inline]
fn write_usize(&mut self, _i: usize) {}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::convert::Convert;
use crate::operations::aesenc;
use crate::RandomState;
use std::hash::{BuildHasher, Hasher};
#[test]
fn test_sanity() {
let mut hasher = RandomState::with_seeds(1, 2, 3, 4).build_hasher();
hasher.write_u64(0);
let h1 = hasher.finish();
hasher.write(&[1, 0, 0, 0, 0, 0, 0, 0]);
let h2 = hasher.finish();
assert_ne!(h1, h2);
}
#[cfg(feature = "compile-time-rng")]
#[test]
fn test_builder() {
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
let mut map = HashMap::<u32, u64, BuildHasherDefault<AHasher>>::default();
map.insert(1, 3);
}
#[cfg(feature = "compile-time-rng")]
#[test]
fn test_default() {
let hasher_a = AHasher::default();
let a_enc: [u64; 2] = hasher_a.enc.convert();
let a_sum: [u64; 2] = hasher_a.sum.convert();
assert_ne!(0, a_enc[0]);
assert_ne!(0, a_enc[1]);
assert_ne!(0, a_sum[0]);
assert_ne!(0, a_sum[1]);
assert_ne!(a_enc[0], a_enc[1]);
assert_ne!(a_sum[0], a_sum[1]);
assert_ne!(a_enc[0], a_sum[0]);
assert_ne!(a_enc[1], a_sum[1]);
let hasher_b = AHasher::default();
let b_enc: [u64; 2] = hasher_b.enc.convert();
let b_sum: [u64; 2] = hasher_b.sum.convert();
assert_eq!(a_enc[0], b_enc[0]);
assert_eq!(a_enc[1], b_enc[1]);
assert_eq!(a_sum[0], b_sum[0]);
assert_eq!(a_sum[1], b_sum[1]);
}
#[test]
fn test_hash() {
let mut result: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d];
let value: [u64; 2] = [1 << 32, 0xFEDCBA9876543210];
result = aesenc(value.convert(), result.convert()).convert();
result = aesenc(result.convert(), result.convert()).convert();
let mut result2: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d];
let value2: [u64; 2] = [1, 0xFEDCBA9876543210];
result2 = aesenc(value2.convert(), result2.convert()).convert();
result2 = aesenc(result2.convert(), result.convert()).convert();
let result: [u8; 16] = result.convert();
let result2: [u8; 16] = result2.convert();
assert_ne!(hex::encode(result), hex::encode(result2));
}
#[test]
fn test_conversion() {
let input: &[u8] = "dddddddd".as_bytes();
let bytes: u64 = as_array!(input, 8).convert();
assert_eq!(bytes, 0x6464646464646464);
}
}

162
third-party/vendor/ahash/src/convert.rs vendored Normal file
View file

@ -0,0 +1,162 @@
pub(crate) trait Convert<To> {
fn convert(self) -> To;
}
macro_rules! convert {
($a:ty, $b:ty) => {
impl Convert<$b> for $a {
#[inline(always)]
fn convert(self) -> $b {
zerocopy::transmute!(self)
}
}
impl Convert<$a> for $b {
#[inline(always)]
fn convert(self) -> $a {
zerocopy::transmute!(self)
}
}
};
}
convert!([u128; 4], [u64; 8]);
convert!([u128; 4], [u32; 16]);
convert!([u128; 4], [u16; 32]);
convert!([u128; 4], [u8; 64]);
convert!([u128; 2], [u64; 4]);
convert!([u128; 2], [u32; 8]);
convert!([u128; 2], [u16; 16]);
convert!([u128; 2], [u8; 32]);
convert!(u128, [u64; 2]);
convert!(u128, [u32; 4]);
convert!(u128, [u16; 8]);
convert!(u128, [u8; 16]);
convert!([u64; 8], [u32; 16]);
convert!([u64; 8], [u16; 32]);
convert!([u64; 8], [u8; 64]);
convert!([u64; 4], [u32; 8]);
convert!([u64; 4], [u16; 16]);
convert!([u64; 4], [u8; 32]);
convert!([u64; 2], [u32; 4]);
convert!([u64; 2], [u16; 8]);
convert!([u64; 2], [u8; 16]);
convert!([u32; 4], [u16; 8]);
convert!([u32; 4], [u8; 16]);
convert!([u16; 8], [u8; 16]);
convert!(u64, [u32; 2]);
convert!(u64, [u16; 4]);
convert!(u64, [u8; 8]);
convert!([u32; 2], [u16; 4]);
convert!([u32; 2], [u8; 8]);
convert!(u32, [u16; 2]);
convert!(u32, [u8; 4]);
convert!([u16; 2], [u8; 4]);
convert!(u16, [u8; 2]);
convert!([[u64; 4]; 2], [u8; 64]);
convert!([f64; 2], [u8; 16]);
convert!([f32; 4], [u8; 16]);
convert!(f64, [u8; 8]);
convert!([f32; 2], [u8; 8]);
convert!(f32, [u8; 4]);
macro_rules! as_array {
($input:expr, $len:expr) => {{
{
#[inline(always)]
fn as_array<T>(slice: &[T]) -> &[T; $len] {
core::convert::TryFrom::try_from(slice).unwrap()
}
as_array($input)
}
}};
}
pub(crate) trait ReadFromSlice {
fn read_u16(&self) -> (u16, &[u8]);
fn read_u32(&self) -> (u32, &[u8]);
fn read_u64(&self) -> (u64, &[u8]);
fn read_u128(&self) -> (u128, &[u8]);
fn read_u128x2(&self) -> ([u128; 2], &[u8]);
fn read_u128x4(&self) -> ([u128; 4], &[u8]);
fn read_last_u16(&self) -> u16;
fn read_last_u32(&self) -> u32;
fn read_last_u64(&self) -> u64;
fn read_last_u128(&self) -> u128;
fn read_last_u128x2(&self) -> [u128; 2];
fn read_last_u128x4(&self) -> [u128; 4];
}
impl ReadFromSlice for [u8] {
#[inline(always)]
fn read_u16(&self) -> (u16, &[u8]) {
let (value, rest) = self.split_at(2);
(as_array!(value, 2).convert(), rest)
}
#[inline(always)]
fn read_u32(&self) -> (u32, &[u8]) {
let (value, rest) = self.split_at(4);
(as_array!(value, 4).convert(), rest)
}
#[inline(always)]
fn read_u64(&self) -> (u64, &[u8]) {
let (value, rest) = self.split_at(8);
(as_array!(value, 8).convert(), rest)
}
#[inline(always)]
fn read_u128(&self) -> (u128, &[u8]) {
let (value, rest) = self.split_at(16);
(as_array!(value, 16).convert(), rest)
}
#[inline(always)]
fn read_u128x2(&self) -> ([u128; 2], &[u8]) {
let (value, rest) = self.split_at(32);
(as_array!(value, 32).convert(), rest)
}
#[inline(always)]
fn read_u128x4(&self) -> ([u128; 4], &[u8]) {
let (value, rest) = self.split_at(64);
(as_array!(value, 64).convert(), rest)
}
#[inline(always)]
fn read_last_u16(&self) -> u16 {
let (_, value) = self.split_at(self.len() - 2);
as_array!(value, 2).convert()
}
#[inline(always)]
fn read_last_u32(&self) -> u32 {
let (_, value) = self.split_at(self.len() - 4);
as_array!(value, 4).convert()
}
#[inline(always)]
fn read_last_u64(&self) -> u64 {
let (_, value) = self.split_at(self.len() - 8);
as_array!(value, 8).convert()
}
#[inline(always)]
fn read_last_u128(&self) -> u128 {
let (_, value) = self.split_at(self.len() - 16);
as_array!(value, 16).convert()
}
#[inline(always)]
fn read_last_u128x2(&self) -> [u128; 2] {
let (_, value) = self.split_at(self.len() - 32);
as_array!(value, 32).convert()
}
#[inline(always)]
fn read_last_u128x4(&self) -> [u128; 4] {
let (_, value) = self.split_at(self.len() - 64);
as_array!(value, 64).convert()
}
}

View file

@ -0,0 +1,367 @@
use crate::convert::*;
use crate::operations::folded_multiply;
use crate::operations::read_small;
use crate::operations::MULTIPLE;
use crate::random_state::PI;
use crate::RandomState;
use core::hash::Hasher;
const ROT: u32 = 23; //17
/// A `Hasher` for hashing an arbitrary stream of bytes.
///
/// Instances of [`AHasher`] represent state that is updated while hashing data.
///
/// Each method updates the internal state based on the new data provided. Once
/// all of the data has been provided, the resulting hash can be obtained by calling
/// `finish()`
///
/// [Clone] is also provided in case you wish to calculate hashes for two different items that
/// start with the same data.
///
#[derive(Debug, Clone)]
pub struct AHasher {
buffer: u64,
pad: u64,
extra_keys: [u64; 2],
}
impl AHasher {
/// Creates a new hasher keyed to the provided key.
#[inline]
#[allow(dead_code)] // Is not called if non-fallback hash is used.
pub(crate) fn new_with_keys(key1: u128, key2: u128) -> AHasher {
let pi: [u128; 2] = PI.convert();
let key1: [u64; 2] = (key1 ^ pi[0]).convert();
let key2: [u64; 2] = (key2 ^ pi[1]).convert();
AHasher {
buffer: key1[0],
pad: key1[1],
extra_keys: key2,
}
}
#[allow(unused)] // False positive
pub(crate) fn test_with_keys(key1: u128, key2: u128) -> Self {
let key1: [u64; 2] = key1.convert();
let key2: [u64; 2] = key2.convert();
Self {
buffer: key1[0],
pad: key1[1],
extra_keys: key2,
}
}
#[inline]
#[allow(dead_code)] // Is not called if non-fallback hash is used.
pub(crate) fn from_random_state(rand_state: &RandomState) -> AHasher {
AHasher {
buffer: rand_state.k1,
pad: rand_state.k0,
extra_keys: [rand_state.k2, rand_state.k3],
}
}
/// This update function has the goal of updating the buffer with a single multiply
/// FxHash does this but is vulnerable to attack. To avoid this input needs to be masked to with an
/// unpredictable value. Other hashes such as murmurhash have taken this approach but were found vulnerable
/// to attack. The attack was based on the idea of reversing the pre-mixing (Which is necessarily
/// reversible otherwise bits would be lost) then placing a difference in the highest bit before the
/// multiply used to mix the data. Because a multiply can never affect the bits to the right of it, a
/// subsequent update that also differed in this bit could result in a predictable collision.
///
/// This version avoids this vulnerability while still only using a single multiply. It takes advantage
/// of the fact that when a 64 bit multiply is performed the upper 64 bits are usually computed and thrown
/// away. Instead it creates two 128 bit values where the upper 64 bits are zeros and multiplies them.
/// (The compiler is smart enough to turn this into a 64 bit multiplication in the assembly)
/// Then the upper bits are xored with the lower bits to produce a single 64 bit result.
///
/// To understand why this is a good scrambling function it helps to understand multiply-with-carry PRNGs:
/// https://en.wikipedia.org/wiki/Multiply-with-carry_pseudorandom_number_generator
/// If the multiple is chosen well, this creates a long period, decent quality PRNG.
/// Notice that this function is equivalent to this except the `buffer`/`state` is being xored with each
/// new block of data. In the event that data is all zeros, it is exactly equivalent to a MWC PRNG.
///
/// This is impervious to attack because every bit buffer at the end is dependent on every bit in
/// `new_data ^ buffer`. For example suppose two inputs differed in only the 5th bit. Then when the
/// multiplication is performed the `result` will differ in bits 5-69. More specifically it will differ by
/// 2^5 * MULTIPLE. However in the next step bits 65-128 are turned into a separate 64 bit value. So the
/// differing bits will be in the lower 6 bits of this value. The two intermediate values that differ in
/// bits 5-63 and in bits 0-5 respectively get added together. Producing an output that differs in every
/// bit. The addition carries in the multiplication and at the end additionally mean that the even if an
/// attacker somehow knew part of (but not all) the contents of the buffer before hand,
/// they would not be able to predict any of the bits in the buffer at the end.
#[inline(always)]
fn update(&mut self, new_data: u64) {
self.buffer = folded_multiply(new_data ^ self.buffer, MULTIPLE);
}
/// Similar to the above this function performs an update using a "folded multiply".
/// However it takes in 128 bits of data instead of 64. Both halves must be masked.
///
/// This makes it impossible for an attacker to place a single bit difference between
/// two blocks so as to cancel each other.
///
/// However this is not sufficient. to prevent (a,b) from hashing the same as (b,a) the buffer itself must
/// be updated between calls in a way that does not commute. To achieve this XOR and Rotate are used.
/// Add followed by xor is not the same as xor followed by add, and rotate ensures that the same out bits
/// can't be changed by the same set of input bits. To cancel this sequence with subsequent input would require
/// knowing the keys.
#[inline(always)]
fn large_update(&mut self, new_data: u128) {
let block: [u64; 2] = new_data.convert();
let combined = folded_multiply(block[0] ^ self.extra_keys[0], block[1] ^ self.extra_keys[1]);
self.buffer = (self.buffer.wrapping_add(self.pad) ^ combined).rotate_left(ROT);
}
#[inline]
#[cfg(feature = "specialize")]
fn short_finish(&self) -> u64 {
folded_multiply(self.buffer, self.pad)
}
}
/// Provides [Hasher] methods to hash all of the primitive types.
///
/// [Hasher]: core::hash::Hasher
impl Hasher for AHasher {
#[inline]
fn write_u8(&mut self, i: u8) {
self.update(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.update(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.update(i as u64);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.update(i as u64);
}
#[inline]
fn write_u128(&mut self, i: u128) {
self.large_update(i);
}
#[inline]
#[cfg(any(
target_pointer_width = "64",
target_pointer_width = "32",
target_pointer_width = "16"
))]
fn write_usize(&mut self, i: usize) {
self.write_u64(i as u64);
}
#[inline]
#[cfg(target_pointer_width = "128")]
fn write_usize(&mut self, i: usize) {
self.write_u128(i as u128);
}
#[inline]
#[allow(clippy::collapsible_if)]
fn write(&mut self, input: &[u8]) {
let mut data = input;
let length = data.len() as u64;
//Needs to be an add rather than an xor because otherwise it could be canceled with carefully formed input.
self.buffer = self.buffer.wrapping_add(length).wrapping_mul(MULTIPLE);
//A 'binary search' on sizes reduces the number of comparisons.
if data.len() > 8 {
if data.len() > 16 {
let tail = data.read_last_u128();
self.large_update(tail);
while data.len() > 16 {
let (block, rest) = data.read_u128();
self.large_update(block);
data = rest;
}
} else {
self.large_update([data.read_u64().0, data.read_last_u64()].convert());
}
} else {
let value = read_small(data);
self.large_update(value.convert());
}
}
#[inline]
fn finish(&self) -> u64 {
let rot = (self.buffer & 63) as u32;
folded_multiply(self.buffer, self.pad).rotate_left(rot)
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherU64 {
pub(crate) buffer: u64,
pub(crate) pad: u64,
}
/// A specialized hasher for only primitives under 64 bits.
#[cfg(feature = "specialize")]
impl Hasher for AHasherU64 {
#[inline]
fn finish(&self) -> u64 {
folded_multiply(self.buffer, self.pad)
//self.buffer
}
#[inline]
fn write(&mut self, _bytes: &[u8]) {
unreachable!("Specialized hasher was called with a different type of object")
}
#[inline]
fn write_u8(&mut self, i: u8) {
self.write_u64(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.write_u64(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.write_u64(i as u64);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.buffer = folded_multiply(i ^ self.buffer, MULTIPLE);
}
#[inline]
fn write_u128(&mut self, _i: u128) {
unreachable!("Specialized hasher was called with a different type of object")
}
#[inline]
fn write_usize(&mut self, _i: usize) {
unreachable!("Specialized hasher was called with a different type of object")
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherFixed(pub AHasher);
/// A specialized hasher for fixed size primitives larger than 64 bits.
#[cfg(feature = "specialize")]
impl Hasher for AHasherFixed {
#[inline]
fn finish(&self) -> u64 {
self.0.short_finish()
}
#[inline]
fn write(&mut self, bytes: &[u8]) {
self.0.write(bytes)
}
#[inline]
fn write_u8(&mut self, i: u8) {
self.write_u64(i as u64);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.write_u64(i as u64);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.write_u64(i as u64);
}
#[inline]
fn write_u64(&mut self, i: u64) {
self.0.write_u64(i);
}
#[inline]
fn write_u128(&mut self, i: u128) {
self.0.write_u128(i);
}
#[inline]
fn write_usize(&mut self, i: usize) {
self.0.write_usize(i);
}
}
#[cfg(feature = "specialize")]
pub(crate) struct AHasherStr(pub AHasher);
/// A specialized hasher for a single string
/// Note that the other types don't panic because the hash impl for String tacks on an unneeded call. (As does vec)
#[cfg(feature = "specialize")]
impl Hasher for AHasherStr {
#[inline]
fn finish(&self) -> u64 {
self.0.finish()
}
#[inline]
fn write(&mut self, bytes: &[u8]) {
if bytes.len() > 8 {
self.0.write(bytes)
} else {
let value = read_small(bytes);
self.0.buffer = folded_multiply(value[0] ^ self.0.buffer, value[1] ^ self.0.extra_keys[1]);
self.0.pad = self.0.pad.wrapping_add(bytes.len() as u64);
}
}
#[inline]
fn write_u8(&mut self, _i: u8) {}
#[inline]
fn write_u16(&mut self, _i: u16) {}
#[inline]
fn write_u32(&mut self, _i: u32) {}
#[inline]
fn write_u64(&mut self, _i: u64) {}
#[inline]
fn write_u128(&mut self, _i: u128) {}
#[inline]
fn write_usize(&mut self, _i: usize) {}
}
#[cfg(test)]
mod tests {
use crate::fallback_hash::*;
#[test]
fn test_hash() {
let mut hasher = AHasher::new_with_keys(0, 0);
let value: u64 = 1 << 32;
hasher.update(value);
let result = hasher.buffer;
let mut hasher = AHasher::new_with_keys(0, 0);
let value2: u64 = 1;
hasher.update(value2);
let result2 = hasher.buffer;
let result: [u8; 8] = result.convert();
let result2: [u8; 8] = result2.convert();
assert_ne!(hex::encode(result), hex::encode(result2));
}
#[test]
fn test_conversion() {
let input: &[u8] = "dddddddd".as_bytes();
let bytes: u64 = as_array!(input, 8).convert();
assert_eq!(bytes, 0x6464646464646464);
}
}

501
third-party/vendor/ahash/src/hash_map.rs vendored Normal file
View file

@ -0,0 +1,501 @@
use std::borrow::Borrow;
use std::collections::hash_map::{IntoKeys, IntoValues};
use std::collections::{hash_map, HashMap};
use std::fmt::{self, Debug};
use std::hash::{BuildHasher, Hash};
use std::iter::FromIterator;
use std::ops::{Deref, DerefMut, Index};
use std::panic::UnwindSafe;
#[cfg(feature = "serde")]
use serde::{
de::{Deserialize, Deserializer},
ser::{Serialize, Serializer},
};
use crate::RandomState;
/// A [`HashMap`](std::collections::HashMap) using [`RandomState`](crate::RandomState) to hash the items.
/// (Requires the `std` feature to be enabled.)
#[derive(Clone)]
pub struct AHashMap<K, V, S = crate::RandomState>(HashMap<K, V, S>);
impl<K, V> From<HashMap<K, V, crate::RandomState>> for AHashMap<K, V> {
fn from(item: HashMap<K, V, crate::RandomState>) -> Self {
AHashMap(item)
}
}
impl<K, V, const N: usize> From<[(K, V); N]> for AHashMap<K, V>
where
K: Eq + Hash,
{
/// # Examples
///
/// ```
/// use ahash::AHashMap;
///
/// let map1 = AHashMap::from([(1, 2), (3, 4)]);
/// let map2: AHashMap<_, _> = [(1, 2), (3, 4)].into();
/// assert_eq!(map1, map2);
/// ```
fn from(arr: [(K, V); N]) -> Self {
Self::from_iter(arr)
}
}
impl<K, V> Into<HashMap<K, V, crate::RandomState>> for AHashMap<K, V> {
fn into(self) -> HashMap<K, V, crate::RandomState> {
self.0
}
}
impl<K, V> AHashMap<K, V, RandomState> {
/// This crates a hashmap using [RandomState::new] which obtains its keys from [RandomSource].
/// See the documentation in [RandomSource] for notes about key strength.
pub fn new() -> Self {
AHashMap(HashMap::with_hasher(RandomState::new()))
}
/// This crates a hashmap with the specified capacity using [RandomState::new].
/// See the documentation in [RandomSource] for notes about key strength.
pub fn with_capacity(capacity: usize) -> Self {
AHashMap(HashMap::with_capacity_and_hasher(capacity, RandomState::new()))
}
}
impl<K, V, S> AHashMap<K, V, S>
where
S: BuildHasher,
{
pub fn with_hasher(hash_builder: S) -> Self {
AHashMap(HashMap::with_hasher(hash_builder))
}
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
AHashMap(HashMap::with_capacity_and_hasher(capacity, hash_builder))
}
}
impl<K, V, S> AHashMap<K, V, S>
where
K: Hash + Eq,
S: BuildHasher,
{
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.0.get(k)
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.0.get_key_value(k)
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[inline]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.0.get_mut(k)
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[inline]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
self.0.insert(k, v)
}
/// Creates a consuming iterator visiting all the keys in arbitrary order.
/// The map cannot be used after calling this.
/// The iterator element type is `K`.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let map = HashMap::from([
/// ("a", 1),
/// ("b", 2),
/// ("c", 3),
/// ]);
///
/// let mut vec: Vec<&str> = map.into_keys().collect();
/// // The `IntoKeys` iterator produces keys in arbitrary order, so the
/// // keys must be sorted to test them against a sorted array.
/// vec.sort_unstable();
/// assert_eq!(vec, ["a", "b", "c"]);
/// ```
///
/// # Performance
///
/// In the current implementation, iterating over keys takes O(capacity) time
/// instead of O(len) because it internally visits empty buckets too.
#[inline]
pub fn into_keys(self) -> IntoKeys<K, V> {
self.0.into_keys()
}
/// Creates a consuming iterator visiting all the values in arbitrary order.
/// The map cannot be used after calling this.
/// The iterator element type is `V`.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let map = HashMap::from([
/// ("a", 1),
/// ("b", 2),
/// ("c", 3),
/// ]);
///
/// let mut vec: Vec<i32> = map.into_values().collect();
/// // The `IntoValues` iterator produces values in arbitrary order, so
/// // the values must be sorted to test them against a sorted array.
/// vec.sort_unstable();
/// assert_eq!(vec, [1, 2, 3]);
/// ```
///
/// # Performance
///
/// In the current implementation, iterating over values takes O(capacity) time
/// instead of O(len) because it internally visits empty buckets too.
#[inline]
pub fn into_values(self) -> IntoValues<K, V> {
self.0.into_values()
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[inline]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.0.remove(k)
}
}
impl<K, V, S> Deref for AHashMap<K, V, S> {
type Target = HashMap<K, V, S>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K, V, S> DerefMut for AHashMap<K, V, S> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K, V, S> UnwindSafe for AHashMap<K, V, S>
where
K: UnwindSafe,
V: UnwindSafe,
{
}
impl<K, V, S> PartialEq for AHashMap<K, V, S>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
{
fn eq(&self, other: &AHashMap<K, V, S>) -> bool {
self.0.eq(&other.0)
}
}
impl<K, V, S> Eq for AHashMap<K, V, S>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
{
}
impl<K, Q: ?Sized, V, S> Index<&Q> for AHashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[inline]
fn index(&self, key: &Q) -> &V {
self.0.index(key)
}
}
impl<K, V, S> Debug for AHashMap<K, V, S>
where
K: Debug,
V: Debug,
S: BuildHasher,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(fmt)
}
}
impl<K, V> FromIterator<(K, V)> for AHashMap<K, V, RandomState>
where
K: Eq + Hash,
{
/// This crates a hashmap from the provided iterator using [RandomState::new].
/// See the documentation in [RandomSource] for notes about key strength.
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut inner = HashMap::with_hasher(RandomState::new());
inner.extend(iter);
AHashMap(inner)
}
}
impl<'a, K, V, S> IntoIterator for &'a AHashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = hash_map::Iter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
(&self.0).iter()
}
}
impl<'a, K, V, S> IntoIterator for &'a mut AHashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = hash_map::IterMut<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
(&mut self.0).iter_mut()
}
}
impl<K, V, S> IntoIterator for AHashMap<K, V, S> {
type Item = (K, V);
type IntoIter = hash_map::IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<K, V, S> Extend<(K, V)> for AHashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
#[inline]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
self.0.extend(iter)
}
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for AHashMap<K, V, S>
where
K: Eq + Hash + Copy + 'a,
V: Copy + 'a,
S: BuildHasher,
{
#[inline]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.0.extend(iter)
}
}
/// NOTE: For safety this trait impl is only available available if either of the flags `runtime-rng` (on by default) or
/// `compile-time-rng` are enabled. This is to prevent weakly keyed maps from being accidentally created. Instead one of
/// constructors for [RandomState] must be used.
#[cfg(any(feature = "compile-time-rng", feature = "runtime-rng", feature = "no-rng"))]
impl<K, V> Default for AHashMap<K, V, RandomState> {
#[inline]
fn default() -> AHashMap<K, V, RandomState> {
AHashMap(HashMap::default())
}
}
#[cfg(feature = "serde")]
impl<K, V> Serialize for AHashMap<K, V>
where
K: Serialize + Eq + Hash,
V: Serialize,
{
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.deref().serialize(serializer)
}
}
#[cfg(feature = "serde")]
impl<'de, K, V> Deserialize<'de> for AHashMap<K, V>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
{
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let hash_map = HashMap::deserialize(deserializer);
hash_map.map(|hash_map| Self(hash_map))
}
fn deserialize_in_place<D: Deserializer<'de>>(deserializer: D, place: &mut Self) -> Result<(), D::Error> {
use serde::de::{MapAccess, Visitor};
struct MapInPlaceVisitor<'a, K: 'a, V: 'a>(&'a mut AHashMap<K, V>);
impl<'a, 'de, K, V> Visitor<'de> for MapInPlaceVisitor<'a, K, V>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
{
type Value = ();
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
self.0.clear();
self.0.reserve(map.size_hint().unwrap_or(0).min(4096));
while let Some((key, value)) = map.next_entry()? {
self.0.insert(key, value);
}
Ok(())
}
}
deserializer.deserialize_map(MapInPlaceVisitor(place))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_borrow() {
let mut map: AHashMap<String, String> = AHashMap::new();
map.insert("foo".to_string(), "Bar".to_string());
map.insert("Bar".to_string(), map.get("foo").unwrap().to_owned());
}
#[cfg(feature = "serde")]
#[test]
fn test_serde() {
let mut map = AHashMap::new();
map.insert("for".to_string(), 0);
map.insert("bar".to_string(), 1);
let mut serialization = serde_json::to_string(&map).unwrap();
let mut deserialization: AHashMap<String, u64> = serde_json::from_str(&serialization).unwrap();
assert_eq!(deserialization, map);
map.insert("baz".to_string(), 2);
serialization = serde_json::to_string(&map).unwrap();
let mut deserializer = serde_json::Deserializer::from_str(&serialization);
AHashMap::deserialize_in_place(&mut deserializer, &mut deserialization).unwrap();
assert_eq!(deserialization, map);
}
}

View file

@ -0,0 +1,534 @@
use core::hash::{Hash, Hasher};
use std::collections::HashMap;
fn assert_sufficiently_different(a: u64, b: u64, tolerance: i32) {
let (same_byte_count, same_nibble_count) = count_same_bytes_and_nibbles(a, b);
assert!(same_byte_count <= tolerance, "{:x} vs {:x}: {:}", a, b, same_byte_count);
assert!(
same_nibble_count <= tolerance * 3,
"{:x} vs {:x}: {:}",
a,
b,
same_nibble_count
);
let flipped_bits = (a ^ b).count_ones();
assert!(
flipped_bits > 12 && flipped_bits < 52,
"{:x} and {:x}: {:}",
a,
b,
flipped_bits
);
for rotate in 0..64 {
let flipped_bits2 = (a ^ (b.rotate_left(rotate))).count_ones();
assert!(
flipped_bits2 > 10 && flipped_bits2 < 54,
"{:x} and {:x}: {:}",
a,
b.rotate_left(rotate),
flipped_bits2
);
}
}
fn count_same_bytes_and_nibbles(a: u64, b: u64) -> (i32, i32) {
let mut same_byte_count = 0;
let mut same_nibble_count = 0;
for byte in 0..8 {
let ba = (a >> (8 * byte)) as u8;
let bb = (b >> (8 * byte)) as u8;
if ba == bb {
same_byte_count += 1;
}
if ba & 0xF0u8 == bb & 0xF0u8 {
same_nibble_count += 1;
}
if ba & 0x0Fu8 == bb & 0x0Fu8 {
same_nibble_count += 1;
}
}
(same_byte_count, same_nibble_count)
}
fn gen_combinations(options: &[u32; 11], depth: u32, so_far: Vec<u32>, combinations: &mut Vec<Vec<u32>>) {
if depth == 0 {
return;
}
for option in options {
let mut next = so_far.clone();
next.push(*option);
combinations.push(next.clone());
gen_combinations(options, depth - 1, next, combinations);
}
}
fn test_no_full_collisions<T: Hasher>(gen_hash: impl Fn() -> T) {
let options: [u32; 11] = [
0x00000000, 0x10000000, 0x20000000, 0x40000000, 0x80000000, 0xF0000000, 1, 2, 4, 8, 15,
];
let mut combinations = Vec::new();
gen_combinations(&options, 7, Vec::new(), &mut combinations);
let mut map: HashMap<u64, Vec<u8>> = HashMap::new();
for combination in combinations {
use zerocopy::AsBytes;
let array = combination.as_slice().as_bytes().to_vec();
let mut hasher = gen_hash();
hasher.write(&array);
let hash = hasher.finish();
if let Some(value) = map.get(&hash) {
assert_eq!(
value, &array,
"Found a collision between {:x?} and {:x?}. Hash: {:x?}",
value, &array, &hash
);
} else {
map.insert(hash, array);
}
}
assert_eq!(21435887, map.len()); //11^7 + 11^6 ...
}
fn test_keys_change_output<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {
let mut a = constructor(1, 1);
let mut b = constructor(1, 2);
let mut c = constructor(2, 1);
let mut d = constructor(2, 2);
"test".hash(&mut a);
"test".hash(&mut b);
"test".hash(&mut c);
"test".hash(&mut d);
assert_sufficiently_different(a.finish(), b.finish(), 1);
assert_sufficiently_different(a.finish(), c.finish(), 1);
assert_sufficiently_different(a.finish(), d.finish(), 1);
assert_sufficiently_different(b.finish(), c.finish(), 1);
assert_sufficiently_different(b.finish(), d.finish(), 1);
assert_sufficiently_different(c.finish(), d.finish(), 1);
}
fn test_input_affect_every_byte<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {
let base = hash_with(&0, constructor(0, 0));
for shift in 0..16 {
let mut alternatives = vec![];
for v in 0..256 {
let input = (v as u128) << (shift * 8);
let hasher = constructor(0, 0);
alternatives.push(hash_with(&input, hasher));
}
assert_each_byte_differs(shift, base, alternatives);
}
}
///Ensures that for every bit in the output there is some value for each byte in the key that flips it.
fn test_keys_affect_every_byte<H: Hash, T: Hasher>(item: H, constructor: impl Fn(u128, u128) -> T) {
let base = hash_with(&item, constructor(0, 0));
for shift in 0..16 {
let mut alternatives1 = vec![];
let mut alternatives2 = vec![];
for v in 0..256 {
let input = (v as u128) << (shift * 8);
let hasher1 = constructor(input, 0);
let hasher2 = constructor(0, input);
let h1 = hash_with(&item, hasher1);
let h2 = hash_with(&item, hasher2);
alternatives1.push(h1);
alternatives2.push(h2);
}
assert_each_byte_differs(shift, base, alternatives1);
assert_each_byte_differs(shift, base, alternatives2);
}
}
fn assert_each_byte_differs(num: u64, base: u64, alternatives: Vec<u64>) {
let mut changed_bits = 0_u64;
for alternative in alternatives {
changed_bits |= base ^ alternative
}
assert_eq!(
core::u64::MAX,
changed_bits,
"Bits changed: {:x} on num: {:?}. base {:x}",
changed_bits,
num,
base
);
}
fn test_finish_is_consistent<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {
let mut hasher = constructor(1, 2);
"Foo".hash(&mut hasher);
let a = hasher.finish();
let b = hasher.finish();
assert_eq!(a, b);
}
fn test_single_key_bit_flip<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {
for bit in 0..128 {
let mut a = constructor(0, 0);
let mut b = constructor(0, 1 << bit);
let mut c = constructor(1 << bit, 0);
"1234".hash(&mut a);
"1234".hash(&mut b);
"1234".hash(&mut c);
assert_sufficiently_different(a.finish(), b.finish(), 2);
assert_sufficiently_different(a.finish(), c.finish(), 2);
assert_sufficiently_different(b.finish(), c.finish(), 2);
let mut a = constructor(0, 0);
let mut b = constructor(0, 1 << bit);
let mut c = constructor(1 << bit, 0);
"12345678".hash(&mut a);
"12345678".hash(&mut b);
"12345678".hash(&mut c);
assert_sufficiently_different(a.finish(), b.finish(), 2);
assert_sufficiently_different(a.finish(), c.finish(), 2);
assert_sufficiently_different(b.finish(), c.finish(), 2);
let mut a = constructor(0, 0);
let mut b = constructor(0, 1 << bit);
let mut c = constructor(1 << bit, 0);
"1234567812345678".hash(&mut a);
"1234567812345678".hash(&mut b);
"1234567812345678".hash(&mut c);
assert_sufficiently_different(a.finish(), b.finish(), 2);
assert_sufficiently_different(a.finish(), c.finish(), 2);
assert_sufficiently_different(b.finish(), c.finish(), 2);
}
}
fn test_all_bytes_matter<T: Hasher>(hasher: impl Fn() -> T) {
let mut item = vec![0; 256];
let base_hash = hash(&item, &hasher);
for pos in 0..256 {
item[pos] = 255;
let hash = hash(&item, &hasher);
assert_ne!(base_hash, hash, "Position {} did not affect output", pos);
item[pos] = 0;
}
}
fn test_no_pair_collisions<T: Hasher>(hasher: impl Fn() -> T) {
let base = [0_u64, 0_u64];
let base_hash = hash(&base, &hasher);
for bitpos1 in 0..64 {
let a = 1_u64 << bitpos1;
for bitpos2 in 0..bitpos1 {
let b = 1_u64 << bitpos2;
let aa = hash(&[a, a], &hasher);
let ab = hash(&[a, b], &hasher);
let ba = hash(&[b, a], &hasher);
let bb = hash(&[b, b], &hasher);
assert_sufficiently_different(base_hash, aa, 3);
assert_sufficiently_different(base_hash, ab, 3);
assert_sufficiently_different(base_hash, ba, 3);
assert_sufficiently_different(base_hash, bb, 3);
assert_sufficiently_different(aa, ab, 3);
assert_sufficiently_different(ab, ba, 3);
assert_sufficiently_different(ba, bb, 3);
assert_sufficiently_different(aa, ba, 3);
assert_sufficiently_different(ab, bb, 3);
assert_sufficiently_different(aa, bb, 3);
}
}
}
fn hash<H: Hash, T: Hasher>(b: &H, hash_builder: &dyn Fn() -> T) -> u64 {
let mut hasher = hash_builder();
b.hash(&mut hasher);
hasher.finish()
}
fn hash_with<H: Hash, T: Hasher>(b: &H, mut hasher: T) -> u64 {
b.hash(&mut hasher);
hasher.finish()
}
fn test_single_bit_flip<T: Hasher>(hasher: impl Fn() -> T) {
let size = 32;
let compare_value = hash(&0u32, &hasher);
for pos in 0..size {
let test_value = hash(&(1u32 << pos), &hasher);
assert_sufficiently_different(compare_value, test_value, 2);
}
let size = 64;
let compare_value = hash(&0u64, &hasher);
for pos in 0..size {
let test_value = hash(&(1u64 << pos), &hasher);
assert_sufficiently_different(compare_value, test_value, 2);
}
let size = 128;
let compare_value = hash(&0u128, &hasher);
for pos in 0..size {
let test_value = hash(&(1u128 << pos), &hasher);
dbg!(compare_value, test_value);
assert_sufficiently_different(compare_value, test_value, 2);
}
}
fn test_padding_doesnot_collide<T: Hasher>(hasher: impl Fn() -> T) {
for c in 0..128u8 {
for string in ["", "\0", "\x01", "1234", "12345678", "1234567812345678"].iter() {
let mut short = hasher();
string.hash(&mut short);
let value = short.finish();
let mut padded = string.to_string();
for num in 1..=128 {
let mut long = hasher();
padded.push(c as char);
padded.hash(&mut long);
let (same_bytes, same_nibbles) = count_same_bytes_and_nibbles(value, long.finish());
assert!(
same_bytes <= 3,
"{} bytes of {} -> {:x} vs {:x}",
num,
c,
value,
long.finish()
);
assert!(
same_nibbles <= 8,
"{} bytes of {} -> {:x} vs {:x}",
num,
c,
value,
long.finish()
);
let flipped_bits = (value ^ long.finish()).count_ones();
assert!(flipped_bits > 10);
}
if string.len() > 0 {
let mut padded = string[1..].to_string();
padded.push(c as char);
for num in 2..=128 {
let mut long = hasher();
padded.push(c as char);
padded.hash(&mut long);
let (same_bytes, same_nibbles) = count_same_bytes_and_nibbles(value, long.finish());
assert!(
same_bytes <= 3,
"string {:?} + {} bytes of {} -> {:x} vs {:x}",
string,
num,
c,
value,
long.finish()
);
assert!(
same_nibbles <= 8,
"string {:?} + {} bytes of {} -> {:x} vs {:x}",
string,
num,
c,
value,
long.finish()
);
let flipped_bits = (value ^ long.finish()).count_ones();
assert!(flipped_bits > 10);
}
}
}
}
}
fn test_length_extension<T: Hasher>(hasher: impl Fn(u128, u128) -> T) {
for key in 0..256 {
let h1 = hasher(key, key);
let v1 = hash_with(&[0_u8, 0, 0, 0, 0, 0, 0, 0], h1);
let h2 = hasher(key, key);
let v2 = hash_with(&[1_u8, 0, 0, 0, 0, 0, 0, 0, 0], h2);
assert_ne!(v1, v2);
}
}
fn test_sparse<T: Hasher>(hasher: impl Fn() -> T) {
use smallvec::SmallVec;
let mut buf = [0u8; 256];
let mut hashes = HashMap::new();
for idx_1 in 0..255_u8 {
for idx_2 in idx_1 + 1..=255_u8 {
for value_1 in [1, 2, 4, 8, 16, 32, 64, 128] {
for value_2 in [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 15, 16, 17, 18, 20, 24, 31, 32, 33, 48, 64, 96, 127, 128, 129,
192, 254, 255,
] {
buf[idx_1 as usize] = value_1;
buf[idx_2 as usize] = value_2;
let hash_value = hash_with(&buf, &mut hasher());
let keys = hashes.entry(hash_value).or_insert(SmallVec::<[[u8; 4]; 1]>::new());
keys.push([idx_1, value_1, idx_2, value_2]);
buf[idx_1 as usize] = 0;
buf[idx_2 as usize] = 0;
}
}
}
}
hashes.retain(|_key, value| value.len() != 1);
assert_eq!(0, hashes.len(), "Collision with: {:?}", hashes);
}
#[cfg(test)]
mod fallback_tests {
use crate::fallback_hash::*;
use crate::hash_quality_test::*;
#[test]
fn fallback_single_bit_flip() {
test_single_bit_flip(|| AHasher::new_with_keys(0, 0))
}
#[test]
fn fallback_single_key_bit_flip() {
test_single_key_bit_flip(AHasher::new_with_keys)
}
#[test]
fn fallback_all_bytes_matter() {
test_all_bytes_matter(|| AHasher::new_with_keys(0, 0));
}
#[test]
fn fallback_test_no_pair_collisions() {
test_no_pair_collisions(|| AHasher::new_with_keys(0, 0));
}
#[test]
fn fallback_test_no_full_collisions() {
test_no_full_collisions(|| AHasher::new_with_keys(0, 0));
}
#[test]
fn fallback_keys_change_output() {
test_keys_change_output(AHasher::new_with_keys);
}
#[test]
fn fallback_input_affect_every_byte() {
test_input_affect_every_byte(AHasher::new_with_keys);
}
#[test]
fn fallback_keys_affect_every_byte() {
//For fallback second key is not used in every hash.
#[cfg(all(not(feature = "specialize"), feature = "folded_multiply"))]
test_keys_affect_every_byte(0, |a, b| AHasher::new_with_keys(a ^ b, a));
test_keys_affect_every_byte("", |a, b| AHasher::new_with_keys(a ^ b, a));
test_keys_affect_every_byte((0, 0), |a, b| AHasher::new_with_keys(a ^ b, a));
}
#[test]
fn fallback_finish_is_consistant() {
test_finish_is_consistent(AHasher::test_with_keys)
}
#[test]
fn fallback_padding_doesnot_collide() {
test_padding_doesnot_collide(|| AHasher::new_with_keys(0, 0));
test_padding_doesnot_collide(|| AHasher::new_with_keys(0, 2));
test_padding_doesnot_collide(|| AHasher::new_with_keys(2, 0));
test_padding_doesnot_collide(|| AHasher::new_with_keys(2, 2));
}
#[test]
fn fallback_length_extension() {
test_length_extension(|a, b| AHasher::new_with_keys(a, b));
}
#[test]
fn test_no_sparse_collisions() {
test_sparse(|| AHasher::new_with_keys(0, 0));
test_sparse(|| AHasher::new_with_keys(1, 2));
}
}
///Basic sanity tests of the cypto properties of aHash.
#[cfg(any(
all(any(target_arch = "x86", target_arch = "x86_64"), target_feature = "aes", not(miri)),
all(target_arch = "aarch64", target_feature = "aes", not(miri)),
all(feature = "nightly-arm-aes", target_arch = "arm", target_feature = "aes", not(miri)),
))]
#[cfg(test)]
mod aes_tests {
use crate::aes_hash::*;
use crate::hash_quality_test::*;
use std::hash::{Hash, Hasher};
//This encrypts to 0.
const BAD_KEY2: u128 = 0x6363_6363_6363_6363_6363_6363_6363_6363;
//This decrypts to 0.
const BAD_KEY: u128 = 0x5252_5252_5252_5252_5252_5252_5252_5252;
#[test]
fn test_single_bit_in_byte() {
let mut hasher1 = AHasher::test_with_keys(0, 0);
8_u32.hash(&mut hasher1);
let mut hasher2 = AHasher::test_with_keys(0, 0);
0_u32.hash(&mut hasher2);
assert_sufficiently_different(hasher1.finish(), hasher2.finish(), 1);
}
#[test]
fn aes_single_bit_flip() {
test_single_bit_flip(|| AHasher::test_with_keys(BAD_KEY, BAD_KEY));
test_single_bit_flip(|| AHasher::test_with_keys(BAD_KEY2, BAD_KEY2));
}
#[test]
fn aes_single_key_bit_flip() {
test_single_key_bit_flip(AHasher::test_with_keys)
}
#[test]
fn aes_all_bytes_matter() {
test_all_bytes_matter(|| AHasher::test_with_keys(BAD_KEY, BAD_KEY));
test_all_bytes_matter(|| AHasher::test_with_keys(BAD_KEY2, BAD_KEY2));
}
#[test]
fn aes_test_no_pair_collisions() {
test_no_pair_collisions(|| AHasher::test_with_keys(BAD_KEY, BAD_KEY));
test_no_pair_collisions(|| AHasher::test_with_keys(BAD_KEY2, BAD_KEY2));
}
#[test]
fn ase_test_no_full_collisions() {
test_no_full_collisions(|| AHasher::test_with_keys(12345, 67890));
}
#[test]
fn aes_keys_change_output() {
test_keys_change_output(AHasher::test_with_keys);
}
#[test]
fn aes_input_affect_every_byte() {
test_input_affect_every_byte(AHasher::test_with_keys);
}
#[test]
fn aes_keys_affect_every_byte() {
#[cfg(not(feature = "specialize"))]
test_keys_affect_every_byte(0, AHasher::test_with_keys);
test_keys_affect_every_byte("", AHasher::test_with_keys);
test_keys_affect_every_byte((0, 0), AHasher::test_with_keys);
}
#[test]
fn aes_finish_is_consistant() {
test_finish_is_consistent(AHasher::test_with_keys)
}
#[test]
fn aes_padding_doesnot_collide() {
test_padding_doesnot_collide(|| AHasher::test_with_keys(BAD_KEY, BAD_KEY));
test_padding_doesnot_collide(|| AHasher::test_with_keys(BAD_KEY2, BAD_KEY2));
}
#[test]
fn aes_length_extension() {
test_length_extension(|a, b| AHasher::test_with_keys(a, b));
}
#[test]
fn aes_no_sparse_collisions() {
test_sparse(|| AHasher::test_with_keys(0, 0));
test_sparse(|| AHasher::test_with_keys(1, 2));
}
}

352
third-party/vendor/ahash/src/hash_set.rs vendored Normal file
View file

@ -0,0 +1,352 @@
use crate::RandomState;
use std::collections::{hash_set, HashSet};
use std::fmt::{self, Debug};
use std::hash::{BuildHasher, Hash};
use std::iter::FromIterator;
use std::ops::{BitAnd, BitOr, BitXor, Deref, DerefMut, Sub};
#[cfg(feature = "serde")]
use serde::{
de::{Deserialize, Deserializer},
ser::{Serialize, Serializer},
};
/// A [`HashSet`](std::collections::HashSet) using [`RandomState`](crate::RandomState) to hash the items.
/// (Requires the `std` feature to be enabled.)
#[derive(Clone)]
pub struct AHashSet<T, S = RandomState>(HashSet<T, S>);
impl<T> From<HashSet<T, RandomState>> for AHashSet<T> {
fn from(item: HashSet<T, RandomState>) -> Self {
AHashSet(item)
}
}
impl<T, const N: usize> From<[T; N]> for AHashSet<T>
where
T: Eq + Hash,
{
/// # Examples
///
/// ```
/// use ahash::AHashSet;
///
/// let set1 = AHashSet::from([1, 2, 3, 4]);
/// let set2: AHashSet<_> = [1, 2, 3, 4].into();
/// assert_eq!(set1, set2);
/// ```
fn from(arr: [T; N]) -> Self {
Self::from_iter(arr)
}
}
impl<T> Into<HashSet<T, RandomState>> for AHashSet<T> {
fn into(self) -> HashSet<T, RandomState> {
self.0
}
}
impl<T> AHashSet<T, RandomState> {
/// This crates a hashset using [RandomState::new].
/// See the documentation in [RandomSource] for notes about key strength.
pub fn new() -> Self {
AHashSet(HashSet::with_hasher(RandomState::new()))
}
/// This crates a hashset with the specified capacity using [RandomState::new].
/// See the documentation in [RandomSource] for notes about key strength.
pub fn with_capacity(capacity: usize) -> Self {
AHashSet(HashSet::with_capacity_and_hasher(capacity, RandomState::new()))
}
}
impl<T, S> AHashSet<T, S>
where
S: BuildHasher,
{
pub fn with_hasher(hash_builder: S) -> Self {
AHashSet(HashSet::with_hasher(hash_builder))
}
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
AHashSet(HashSet::with_capacity_and_hasher(capacity, hash_builder))
}
}
impl<T, S> Deref for AHashSet<T, S> {
type Target = HashSet<T, S>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T, S> DerefMut for AHashSet<T, S> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T, S> PartialEq for AHashSet<T, S>
where
T: Eq + Hash,
S: BuildHasher,
{
fn eq(&self, other: &AHashSet<T, S>) -> bool {
self.0.eq(&other.0)
}
}
impl<T, S> Eq for AHashSet<T, S>
where
T: Eq + Hash,
S: BuildHasher,
{
}
impl<T, S> BitOr<&AHashSet<T, S>> for &AHashSet<T, S>
where
T: Eq + Hash + Clone,
S: BuildHasher + Default,
{
type Output = AHashSet<T, S>;
/// Returns the union of `self` and `rhs` as a new `AHashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use ahash::AHashSet;
///
/// let a: AHashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: AHashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a | &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 3, 4, 5];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitor(self, rhs: &AHashSet<T, S>) -> AHashSet<T, S> {
AHashSet(self.0.bitor(&rhs.0))
}
}
impl<T, S> BitAnd<&AHashSet<T, S>> for &AHashSet<T, S>
where
T: Eq + Hash + Clone,
S: BuildHasher + Default,
{
type Output = AHashSet<T, S>;
/// Returns the intersection of `self` and `rhs` as a new `AHashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use ahash::AHashSet;
///
/// let a: AHashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: AHashSet<_> = vec![2, 3, 4].into_iter().collect();
///
/// let set = &a & &b;
///
/// let mut i = 0;
/// let expected = [2, 3];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitand(self, rhs: &AHashSet<T, S>) -> AHashSet<T, S> {
AHashSet(self.0.bitand(&rhs.0))
}
}
impl<T, S> BitXor<&AHashSet<T, S>> for &AHashSet<T, S>
where
T: Eq + Hash + Clone,
S: BuildHasher + Default,
{
type Output = AHashSet<T, S>;
/// Returns the symmetric difference of `self` and `rhs` as a new `AHashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use ahash::AHashSet;
///
/// let a: AHashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: AHashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a ^ &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 4, 5];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitxor(self, rhs: &AHashSet<T, S>) -> AHashSet<T, S> {
AHashSet(self.0.bitxor(&rhs.0))
}
}
impl<T, S> Sub<&AHashSet<T, S>> for &AHashSet<T, S>
where
T: Eq + Hash + Clone,
S: BuildHasher + Default,
{
type Output = AHashSet<T, S>;
/// Returns the difference of `self` and `rhs` as a new `AHashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use ahash::AHashSet;
///
/// let a: AHashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: AHashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a - &b;
///
/// let mut i = 0;
/// let expected = [1, 2];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn sub(self, rhs: &AHashSet<T, S>) -> AHashSet<T, S> {
AHashSet(self.0.sub(&rhs.0))
}
}
impl<T, S> Debug for AHashSet<T, S>
where
T: Debug,
S: BuildHasher,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(fmt)
}
}
impl<T> FromIterator<T> for AHashSet<T, RandomState>
where
T: Eq + Hash,
{
/// This crates a hashset from the provided iterator using [RandomState::new].
/// See the documentation in [RandomSource] for notes about key strength.
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> AHashSet<T> {
let mut inner = HashSet::with_hasher(RandomState::new());
inner.extend(iter);
AHashSet(inner)
}
}
impl<'a, T, S> IntoIterator for &'a AHashSet<T, S> {
type Item = &'a T;
type IntoIter = hash_set::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
(&self.0).iter()
}
}
impl<T, S> IntoIterator for AHashSet<T, S> {
type Item = T;
type IntoIter = hash_set::IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<T, S> Extend<T> for AHashSet<T, S>
where
T: Eq + Hash,
S: BuildHasher,
{
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
self.0.extend(iter)
}
}
impl<'a, T, S> Extend<&'a T> for AHashSet<T, S>
where
T: 'a + Eq + Hash + Copy,
S: BuildHasher,
{
#[inline]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.0.extend(iter)
}
}
/// NOTE: For safety this trait impl is only available available if either of the flags `runtime-rng` (on by default) or
/// `compile-time-rng` are enabled. This is to prevent weakly keyed maps from being accidentally created. Instead one of
/// constructors for [RandomState] must be used.
#[cfg(any(feature = "compile-time-rng", feature = "runtime-rng", feature = "no-rng"))]
impl<T> Default for AHashSet<T, RandomState> {
/// Creates an empty `AHashSet<T, S>` with the `Default` value for the hasher.
#[inline]
fn default() -> AHashSet<T, RandomState> {
AHashSet(HashSet::default())
}
}
#[cfg(feature = "serde")]
impl<T> Serialize for AHashSet<T>
where
T: Serialize + Eq + Hash,
{
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.deref().serialize(serializer)
}
}
#[cfg(feature = "serde")]
impl<'de, T> Deserialize<'de> for AHashSet<T>
where
T: Deserialize<'de> + Eq + Hash,
{
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let hash_set = HashSet::deserialize(deserializer);
hash_set.map(|hash_set| Self(hash_set))
}
fn deserialize_in_place<D: Deserializer<'de>>(deserializer: D, place: &mut Self) -> Result<(), D::Error> {
HashSet::deserialize_in_place(deserializer, place)
}
}
#[cfg(all(test, feature = "serde"))]
mod test {
use super::*;
#[test]
fn test_serde() {
let mut set = AHashSet::new();
set.insert("for".to_string());
set.insert("bar".to_string());
let mut serialization = serde_json::to_string(&set).unwrap();
let mut deserialization: AHashSet<String> = serde_json::from_str(&serialization).unwrap();
assert_eq!(deserialization, set);
set.insert("baz".to_string());
serialization = serde_json::to_string(&set).unwrap();
let mut deserializer = serde_json::Deserializer::from_str(&serialization);
AHashSet::deserialize_in_place(&mut deserializer, &mut deserialization).unwrap();
assert_eq!(deserialization, set);
}
}

Some files were not shown because too many files have changed in this diff Show more