hexsha
stringlengths 40
40
| size
int64 2
1.05M
| content
stringlengths 2
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f4b5f3e4c08ed3a3af741936fdc3718582efb24d | 27,657 | // Rust Elements Library
// Written by
// The Elements developers
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Addresses
//!
use std::error;
use std::fmt;
use std::str::FromStr;
// AsciiExt is needed until for Rust 1.26 but not for newer versions
#[allow(unused_imports, deprecated)]
use std::ascii::AsciiExt;
use bitcoin::bech32::{self, u5, FromBase32, ToBase32};
use bitcoin::blockdata::{opcodes, script};
use bitcoin::util::base58;
use bitcoin::PublicKey;
use bitcoin::hashes::{hash160, Hash};
use bitcoin::secp256k1;
#[cfg(feature = "serde")]
use serde;
use blech32;
/// Encoding error
#[derive(Debug, PartialEq)]
pub enum AddressError {
/// Base58 encoding error
Base58(base58::Error),
/// Bech32 encoding error
Bech32(bech32::Error),
/// Blech32 encoding error
Blech32(bech32::Error),
/// Was unable to parse the address.
InvalidAddress(String),
/// Script version must be 0 to 16 inclusive
InvalidWitnessVersion,
/// Unsupported witness version
UnsupportedWitnessVersion(u8),
/// An invalid blinding pubkey was encountered.
InvalidBlindingPubKey(secp256k1::Error),
/// Given the program version, the length is invalid
///
/// Version 0 scripts must be either 20 or 32 bytes
InvalidWitnessProgramLength,
}
impl fmt::Display for AddressError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let desc = error::Error::description;
match *self {
AddressError::Base58(ref e) => fmt::Display::fmt(e, f),
AddressError::Bech32(ref e) => write!(f, "bech32 error: {}", e),
AddressError::Blech32(ref e) => write!(f, "blech32 error: {}", e),
AddressError::InvalidAddress(ref a) => write!(f, "{}: {}", desc(self), a),
AddressError::UnsupportedWitnessVersion(ref wver) => {
write!(f, "{}: {}", desc(self), wver)
}
AddressError::InvalidBlindingPubKey(ref e) => write!(f, "{}: {}", desc(self), e),
_ => f.write_str(desc(self)),
}
}
}
impl error::Error for AddressError {
fn cause(&self) -> Option<&error::Error> {
match *self {
AddressError::Base58(ref e) => Some(e),
AddressError::Bech32(ref e) => Some(e),
AddressError::Blech32(ref e) => Some(e),
AddressError::InvalidBlindingPubKey(ref e) => Some(e),
_ => None,
}
}
fn description(&self) -> &str {
match *self {
AddressError::Base58(ref e) => e.description(),
AddressError::Bech32(ref e) => e.description(),
AddressError::Blech32(ref e) => e.description(),
AddressError::InvalidAddress(..) => "was unable to parse the address",
AddressError::UnsupportedWitnessVersion(..) => "unsupported witness version",
AddressError::InvalidBlindingPubKey(..) => "an invalid blinding pubkey was encountered",
AddressError::InvalidWitnessProgramLength => "program length incompatible with version",
AddressError::InvalidWitnessVersion => "invalid witness script version",
}
}
}
#[doc(hidden)]
impl From<base58::Error> for AddressError {
fn from(e: base58::Error) -> AddressError {
AddressError::Base58(e)
}
}
/// The parameters to derive addresses.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct AddressParams {
/// The base58 prefix for p2pkh addresses.
pub p2pkh_prefix: u8,
/// The base58 prefix for p2sh addresses.
pub p2sh_prefix: u8,
/// The base58 prefix for blinded addresses.
pub blinded_prefix: u8,
/// The bech32 HRP for unblinded segwit addresses.
pub bech_hrp: &'static str,
/// The bech32 HRP for blinded segwit addresses.
pub blech_hrp: &'static str,
}
impl AddressParams {
/// The Liquid network address parameters.
pub const LIQUID: AddressParams = AddressParams {
p2pkh_prefix: 57,
p2sh_prefix: 39,
blinded_prefix: 12,
bech_hrp: "ex",
blech_hrp: "lq",
};
/// The default Elements network address parameters.
pub const ELEMENTS: AddressParams = AddressParams {
p2pkh_prefix: 235,
p2sh_prefix: 75,
blinded_prefix: 4,
bech_hrp: "ert",
blech_hrp: "el",
};
}
/// The method used to produce an address
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Payload {
/// pay-to-pkhash address
PubkeyHash(hash160::Hash),
/// P2SH address
ScriptHash(hash160::Hash),
/// Segwit address
WitnessProgram {
/// The segwit version.
version: u5,
/// The segwit program.
program: Vec<u8>,
},
}
/// An Elements address.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Address {
/// the network
pub params: &'static AddressParams,
/// the traditional non-confidential payload
pub payload: Payload,
/// the blinding pubkey
pub blinding_pubkey: Option<secp256k1::PublicKey>,
}
impl Address {
/// Inspect if the address is a blinded address.
pub fn is_blinded(&self) -> bool {
self.blinding_pubkey.is_some()
}
/// Creates a pay to (compressed) public key hash address from a public key
/// This is the preferred non-witness type address
#[inline]
pub fn p2pkh(
pk: &PublicKey,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
let mut hash_engine = hash160::Hash::engine();
pk.write_into(&mut hash_engine);
Address {
params: params,
payload: Payload::PubkeyHash(hash160::Hash::from_engine(hash_engine)),
blinding_pubkey: blinder,
}
}
/// Creates a pay to script hash P2SH address from a script
/// This address type was introduced with BIP16 and is the popular type to implement multi-sig these days.
#[inline]
pub fn p2sh(
script: &script::Script,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
Address {
params: params,
payload: Payload::ScriptHash(hash160::Hash::hash(&script[..])),
blinding_pubkey: blinder,
}
}
/// Create a witness pay to public key address from a public key
/// This is the native segwit address type for an output redeemable with a single signature
pub fn p2wpkh(
pk: &PublicKey,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
let mut hash_engine = hash160::Hash::engine();
pk.write_into(&mut hash_engine);
Address {
params: params,
payload: Payload::WitnessProgram {
version: u5::try_from_u8(0).expect("0<32"),
program: hash160::Hash::from_engine(hash_engine)[..].to_vec(),
},
blinding_pubkey: blinder,
}
}
/// Create a pay to script address that embeds a witness pay to public key
/// This is a segwit address type that looks familiar (as p2sh) to legacy clients
pub fn p2shwpkh(
pk: &PublicKey,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
let mut hash_engine = hash160::Hash::engine();
pk.write_into(&mut hash_engine);
let builder = script::Builder::new()
.push_int(0)
.push_slice(&hash160::Hash::from_engine(hash_engine)[..]);
Address {
params: params,
payload: Payload::ScriptHash(hash160::Hash::hash(builder.into_script().as_bytes())),
blinding_pubkey: blinder,
}
}
/// Create a witness pay to script hash address
pub fn p2wsh(
script: &script::Script,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
use bitcoin::hashes::sha256;
Address {
params: params,
payload: Payload::WitnessProgram {
version: u5::try_from_u8(0).expect("0<32"),
program: sha256::Hash::hash(&script[..])[..].to_vec(),
},
blinding_pubkey: blinder,
}
}
/// Create a pay to script address that embeds a witness pay to script hash address
/// This is a segwit address type that looks familiar (as p2sh) to legacy clients
pub fn p2shwsh(
script: &script::Script,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Address {
use bitcoin::hashes::sha256;
let ws = script::Builder::new()
.push_int(0)
.push_slice(&sha256::Hash::hash(&script[..])[..])
.into_script();
Address {
params: params,
payload: Payload::ScriptHash(hash160::Hash::hash(&ws[..])),
blinding_pubkey: blinder,
}
}
/// Get an [Address] from an output script.
pub fn from_script(
script: &script::Script,
blinder: Option<secp256k1::PublicKey>,
params: &'static AddressParams,
) -> Option<Address> {
Some(Address {
payload: if script.is_p2pkh() {
Payload::PubkeyHash(Hash::from_slice(&script.as_bytes()[3..23]).unwrap())
} else if script.is_p2sh() {
Payload::ScriptHash(Hash::from_slice(&script.as_bytes()[2..22]).unwrap())
} else if script.is_v0_p2wpkh() {
Payload::WitnessProgram {
version: u5::try_from_u8(0).expect("0<32"),
program: script.as_bytes()[2..22].to_vec(),
}
} else if script.is_v0_p2wsh() {
Payload::WitnessProgram {
version: u5::try_from_u8(0).expect("0<32"),
program: script.as_bytes()[2..34].to_vec(),
}
} else {
return None;
},
blinding_pubkey: blinder,
params: params,
})
}
/// Generates a script pubkey spending to this address
pub fn script_pubkey(&self) -> script::Script {
match self.payload {
Payload::PubkeyHash(ref hash) => script::Builder::new()
.push_opcode(opcodes::all::OP_DUP)
.push_opcode(opcodes::all::OP_HASH160)
.push_slice(&hash[..])
.push_opcode(opcodes::all::OP_EQUALVERIFY)
.push_opcode(opcodes::all::OP_CHECKSIG),
Payload::ScriptHash(ref hash) => script::Builder::new()
.push_opcode(opcodes::all::OP_HASH160)
.push_slice(&hash[..])
.push_opcode(opcodes::all::OP_EQUAL),
Payload::WitnessProgram {
version: witver,
program: ref witprog,
} => script::Builder::new().push_int(witver.to_u8() as i64).push_slice(&witprog),
}
.into_script()
}
fn from_bech32(
s: &str,
blinded: bool,
params: &'static AddressParams,
) -> Result<Address, AddressError> {
let payload = if !blinded {
bech32::decode(s).map_err(AddressError::Bech32)?.1
} else {
blech32::decode(s).map_err(AddressError::Blech32)?.1
};
if payload.len() == 0 {
return Err(AddressError::InvalidAddress(s.to_owned()));
}
// Get the script version and program (converted from 5-bit to 8-bit)
let (version, data) = {
let (v, p5) = payload.split_at(1);
let data_res = Vec::from_base32(p5);
if let Err(e) = data_res {
return Err(match blinded {
true => AddressError::Blech32(e),
false => AddressError::Bech32(e),
});
}
(v[0], data_res.unwrap())
};
if version.to_u8() > 16 {
return Err(AddressError::InvalidWitnessVersion);
}
// Segwit version specific checks.
if version.to_u8() != 0 {
return Err(AddressError::UnsupportedWitnessVersion(version.to_u8()));
}
if !blinded && version.to_u8() == 0 && data.len() != 20 && data.len() != 32 {
return Err(AddressError::InvalidWitnessProgramLength);
}
if blinded && version.to_u8() == 0 && data.len() != 53 && data.len() != 65 {
return Err(AddressError::InvalidWitnessProgramLength);
}
let (blinding_pubkey, program) = match blinded {
true => (
Some(
secp256k1::PublicKey::from_slice(&data[..33])
.map_err(AddressError::InvalidBlindingPubKey)?,
),
data[33..].to_vec(),
),
false => (None, data),
};
Ok(Address {
params: params,
payload: Payload::WitnessProgram {
version: version,
program: program,
},
blinding_pubkey: blinding_pubkey,
})
}
// data.len() should be >= 1 when this method is called
fn from_base58(data: &[u8], params: &'static AddressParams) -> Result<Address, AddressError> {
// When unblinded, the structure is:
// <1: regular prefix> <20: hash160>
// When blinded, the structure is:
// <1: blinding prefix> <1: regular prefix> <33: blinding pubkey> <20: hash160>
let (blinded, prefix) = match data[0] == params.blinded_prefix {
true => {
if data.len() != 55 {
return Err(base58::Error::InvalidLength(data.len()))?;
}
(true, data[1])
}
false => {
if data.len() != 21 {
return Err(base58::Error::InvalidLength(data.len()))?;
}
(false, data[0])
}
};
let (blinding_pubkey, payload_data) = match blinded {
true => (
Some(
secp256k1::PublicKey::from_slice(&data[2..35])
.map_err(AddressError::InvalidBlindingPubKey)?,
),
&data[35..],
),
false => (None, &data[1..]),
};
let payload = if prefix == params.p2pkh_prefix {
Payload::PubkeyHash(hash160::Hash::from_slice(payload_data).unwrap())
} else if prefix == params.p2sh_prefix {
Payload::ScriptHash(hash160::Hash::from_slice(payload_data).unwrap())
} else {
return Err(base58::Error::InvalidVersion(vec![prefix]))?;
};
Ok(Address {
params: params,
payload: payload,
blinding_pubkey: blinding_pubkey,
})
}
/// Parse the address using the given parameters.
/// When using the built-in parameters, you can use [FromStr].
pub fn parse_with_params(
s: &str,
params: &'static AddressParams,
) -> Result<Address, AddressError> {
// Bech32.
let prefix = find_prefix(s);
let b32_ex = match_prefix(prefix, params.bech_hrp);
let b32_bl = match_prefix(prefix, params.blech_hrp);
if b32_ex || b32_bl {
return Address::from_bech32(s, b32_bl, params);
}
// Base58.
if s.len() > 150 {
return Err(base58::Error::InvalidLength(s.len() * 11 / 15))?;
}
let data = base58::from_check(s)?;
Address::from_base58(&data, params)
}
}
impl fmt::Display for Address {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self.payload {
Payload::PubkeyHash(ref hash) => {
if let Some(ref blinder) = self.blinding_pubkey {
let mut prefixed = [0; 55]; // 1 + 1 + 33 + 20
prefixed[0] = self.params.blinded_prefix;
prefixed[1] = self.params.p2pkh_prefix;
prefixed[2..35].copy_from_slice(&blinder.serialize());
prefixed[35..].copy_from_slice(&hash[..]);
base58::check_encode_slice_to_fmt(fmt, &prefixed[..])
} else {
let mut prefixed = [0; 21];
prefixed[0] = self.params.p2pkh_prefix;
prefixed[1..].copy_from_slice(&hash[..]);
base58::check_encode_slice_to_fmt(fmt, &prefixed[..])
}
}
Payload::ScriptHash(ref hash) => {
if let Some(ref blinder) = self.blinding_pubkey {
let mut prefixed = [0; 55]; // 1 + 1 + 33 + 20
prefixed[0] = self.params.blinded_prefix;
prefixed[1] = self.params.p2sh_prefix;
prefixed[2..35].copy_from_slice(&blinder.serialize());
prefixed[35..].copy_from_slice(&hash[..]);
base58::check_encode_slice_to_fmt(fmt, &prefixed[..])
} else {
let mut prefixed = [0; 21];
prefixed[0] = self.params.p2sh_prefix;
prefixed[1..].copy_from_slice(&hash[..]);
base58::check_encode_slice_to_fmt(fmt, &prefixed[..])
}
}
Payload::WitnessProgram {
version: witver,
program: ref witprog,
} => {
let hrp = match self.blinding_pubkey.is_some() {
true => self.params.blech_hrp,
false => self.params.bech_hrp,
};
if self.is_blinded() {
let mut data = Vec::with_capacity(53);
if let Some(ref blinder) = self.blinding_pubkey {
data.extend_from_slice(&blinder.serialize());
}
data.extend_from_slice(&witprog);
let mut b32_data = vec![witver];
b32_data.extend_from_slice(&data.to_base32());
blech32::encode_to_fmt(fmt, &hrp, &b32_data)
} else {
let mut bech32_writer = bech32::Bech32Writer::new(hrp, fmt)?;
bech32::WriteBase32::write_u5(&mut bech32_writer, witver)?;
bech32::ToBase32::write_base32(&witprog, &mut bech32_writer)
}
}
}
}
}
impl fmt::Debug for Address {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, fmt)
}
}
/// Extract the bech32 prefix.
/// Returns the same slice when no prefix is found.
fn find_prefix(bech32: &str) -> &str {
// Split at the last occurrence of the separator character '1'.
match bech32.rfind("1") {
None => bech32,
Some(sep) => bech32.split_at(sep).0,
}
}
/// Checks if both prefixes match, regardless of case.
/// The first prefix can be mixed case, but the second one is expected in
/// lower case.
fn match_prefix(prefix_mixed: &str, prefix_lower: &str) -> bool {
if prefix_lower.len() != prefix_mixed.len() {
false
} else {
prefix_lower
.chars()
.zip(prefix_mixed.chars())
.all(|(char_lower, char_mixed)| char_lower == char_mixed.to_ascii_lowercase())
}
}
impl FromStr for Address {
type Err = AddressError;
fn from_str(s: &str) -> Result<Address, AddressError> {
// shorthands
let liq = &AddressParams::LIQUID;
let ele = &AddressParams::ELEMENTS;
// Bech32.
let prefix = find_prefix(s);
if match_prefix(prefix, liq.bech_hrp) {
return Address::from_bech32(s, false, liq);
}
if match_prefix(prefix, liq.blech_hrp) {
return Address::from_bech32(s, true, liq);
}
if match_prefix(prefix, ele.bech_hrp) {
return Address::from_bech32(s, false, ele);
}
if match_prefix(prefix, ele.blech_hrp) {
return Address::from_bech32(s, true, ele);
}
// Base58.
if s.len() > 150 {
return Err(base58::Error::InvalidLength(s.len() * 11 / 15))?;
}
let data = base58::from_check(s)?;
if data.len() < 1 {
return Err(base58::Error::InvalidLength(data.len()))?;
}
let p = data[0];
if p == liq.p2pkh_prefix || p == liq.p2sh_prefix || p == liq.blinded_prefix {
return Address::from_base58(&data, liq);
}
if p == ele.p2pkh_prefix || p == ele.p2sh_prefix || p == ele.blinded_prefix {
return Address::from_base58(&data, ele);
}
Err(AddressError::InvalidAddress(s.to_owned()))
}
}
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for Address {
#[inline]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use std::fmt::Formatter;
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Address;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
formatter.write_str("a Bitcoin address")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Address::from_str(v).map_err(E::custom)
}
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(v)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
deserializer.deserialize_str(Visitor)
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[cfg(test)]
mod test {
use super::*;
use bitcoin::util::key;
use bitcoin::Script;
use bitcoin::secp256k1::{PublicKey, Secp256k1};
#[cfg(feature = "serde")]
use serde_json;
fn roundtrips(addr: &Address) {
assert_eq!(
Address::from_str(&addr.to_string()).ok().as_ref(),
Some(addr),
"string round-trip failed for {}",
addr,
);
assert_eq!(
Address::from_script(&addr.script_pubkey(), addr.blinding_pubkey, addr.params).as_ref(),
Some(addr),
"script round-trip failed for {}",
addr,
);
#[cfg(feature = "serde")]
assert_eq!(
serde_json::from_value::<Address>(serde_json::to_value(&addr).unwrap()).ok().as_ref(),
Some(addr)
);
}
#[test]
fn exhaustive() {
let blinder_hex = "0218845781f631c48f1c9709e23092067d06837f30aa0cd0544ac887fe91ddd166";
let blinder = PublicKey::from_str(blinder_hex).unwrap();
let sk_wif = "cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy";
let sk = key::PrivateKey::from_wif(sk_wif).unwrap();
let pk = sk.public_key(&Secp256k1::new());
let script: Script = vec![1u8, 2, 42, 255, 196].into();
let vectors = [
/* #00 */ Address::p2pkh(&pk, None, &AddressParams::LIQUID),
/* #01 */ Address::p2pkh(&pk, None, &AddressParams::ELEMENTS),
/* #02 */ Address::p2pkh(&pk, Some(blinder.clone()), &AddressParams::LIQUID),
/* #03 */ Address::p2pkh(&pk, Some(blinder.clone()), &AddressParams::ELEMENTS),
/* #04 */ Address::p2sh(&script, None, &AddressParams::LIQUID),
/* #05 */ Address::p2sh(&script, None, &AddressParams::ELEMENTS),
/* #06 */ Address::p2sh(&script, Some(blinder.clone()), &AddressParams::LIQUID),
/* #07 */
Address::p2sh(&script, Some(blinder.clone()), &AddressParams::ELEMENTS),
/* #08 */ Address::p2wpkh(&pk, None, &AddressParams::LIQUID),
/* #09 */ Address::p2wpkh(&pk, None, &AddressParams::ELEMENTS),
/* #10 */ Address::p2wpkh(&pk, Some(blinder.clone()), &AddressParams::LIQUID),
/* #11 */ Address::p2wpkh(&pk, Some(blinder.clone()), &AddressParams::ELEMENTS),
/* #12 */ Address::p2shwpkh(&pk, None, &AddressParams::LIQUID),
/* #13 */ Address::p2shwpkh(&pk, None, &AddressParams::ELEMENTS),
/* #14 */ Address::p2shwpkh(&pk, Some(blinder.clone()), &AddressParams::LIQUID),
/* #15 */
Address::p2shwpkh(&pk, Some(blinder.clone()), &AddressParams::ELEMENTS),
/* #16 */ Address::p2wsh(&script, None, &AddressParams::LIQUID),
/* #17 */ Address::p2wsh(&script, None, &AddressParams::ELEMENTS),
/* #18 */ Address::p2wsh(&script, Some(blinder.clone()), &AddressParams::LIQUID),
/* #19 */
Address::p2wsh(&script, Some(blinder.clone()), &AddressParams::ELEMENTS),
/* #20 */ Address::p2shwsh(&script, None, &AddressParams::LIQUID),
/* #21 */ Address::p2shwsh(&script, None, &AddressParams::ELEMENTS),
/* #22 */
Address::p2shwsh(&script, Some(blinder.clone()), &AddressParams::LIQUID),
/* #23 */
Address::p2shwsh(&script, Some(blinder.clone()), &AddressParams::ELEMENTS),
];
for addr in &vectors {
roundtrips(addr);
}
}
#[test]
fn test_actuals() {
// vectors: (address, blinded?, params)
let addresses = [
// Elements
("2dxmEBXc2qMYcLSKiDBxdEePY3Ytixmnh4E", false, AddressParams::ELEMENTS),
("CTEo6VKG8xbe7HnfVW9mQoWTgtgeRSPktwTLbELzGw5tV8Ngzu53EBiasFMQKVbWmKWWTAdN5AUf4M6Y", true, AddressParams::ELEMENTS),
("ert1qwhh2n5qypypm0eufahm2pvj8raj9zq5c27cysu", false, AddressParams::ELEMENTS),
("el1qq0umk3pez693jrrlxz9ndlkuwne93gdu9g83mhhzuyf46e3mdzfpva0w48gqgzgrklncnm0k5zeyw8my2ypfsmxh4xcjh2rse", true, AddressParams::ELEMENTS),
// Liquid
("GqiQRsPEyJLAsEBFB5R34KHuqxDNkG3zur", false, AddressParams::LIQUID),
("VJLDwMVWXg8RKq4mRe3YFNTAEykVN6V8x5MRUKKoC3nfRnbpnZeiG3jygMC6A4Gw967GY5EotJ4Rau2F", true, AddressParams::LIQUID),
("ex1q7gkeyjut0mrxc3j0kjlt7rmcnvsh0gt45d3fud", false, AddressParams::LIQUID),
("lq1qqf8er278e6nyvuwtgf39e6ewvdcnjupn9a86rzpx655y5lhkt0walu3djf9cklkxd3ryld97hu8h3xepw7sh2rlu7q45dcew5", true, AddressParams::LIQUID),
];
for &(a, blinded, ref params) in &addresses {
let result = a.parse();
assert!(result.is_ok(), "vector: {}, err: \"{}\"", a, result.unwrap_err());
let addr: Address = result.unwrap();
assert_eq!(a, &addr.to_string(), "vector: {}", a);
assert_eq!(blinded, addr.is_blinded());
assert_eq!(params, addr.params);
roundtrips(&addr);
}
}
}
| 36.200262 | 149 | 0.557472 |
f8fca8a2b1cf00cdf643b72ca04eff9199070724 | 1,358 | use failure::Fail;
use serde::Serialize;
use ara_error::{ApiError, BoxedError};
use ara_model::core::{User, UserCredential};
use ara_model::db::tx;
use crate::shared::{sha256_hex, PlainContext};
pub fn activate(context: &dyn PlainContext, token: &str) -> Result<(), ActivationError> {
tx(context.db(), |conn| {
let token_hash = sha256_hex(token.as_bytes());
let (user, uc) = UserCredential::find_by_activation_key(conn, &token_hash)?
.ok_or_else(|| ActivationErrorKind::InvalidToken)?; //token or user does not exists
if uc.activated {
Err(ActivationErrorKind::AlreadyActivated)?;
}
User::activate(conn, user.id)?;
//ut.delete(conn).context(ActivationErrorKind::Internal)?;
Ok(())
})
}
#[derive(Debug, Serialize, Fail, ApiError)]
pub enum ActivationErrorKind {
#[fail(display = "Invalid activation token")]
#[api_error(http(400))]
InvalidToken,
#[fail(display = "Account is currently locked")]
#[api_error(http(400))]
AccountLocked,
#[fail(display = "Account is already active")]
#[api_error(http(400))]
AlreadyActive,
#[fail(display = "Account is already activated")]
#[api_error(http(400))]
AlreadyActivated,
#[fail(display = "{}", _0)]
#[api_error(map_from(Error), http(500))]
Internal(BoxedError),
}
| 30.863636 | 95 | 0.648012 |
dd9740a41e142addd3367ec87d267d274f1cf424 | 14,168 | //!
//! Serialize a Rust data structure into a `JsValue`
//!
use errors::Error;
use errors::ErrorKind;
use errors::Result as LibResult;
use neon::prelude::*;
use serde::ser::{self, Serialize};
use std::marker::PhantomData;
use num;
fn as_num<T: num::cast::NumCast, OutT: num::cast::NumCast>(n: T) -> LibResult<OutT> {
match num::cast::<T, OutT>(n) {
Some(n2) => Ok(n2),
None => bail!(ErrorKind::CastError)
}
}
/// Converts a value of type `V` to a `JsValue`
///
/// # Errors
///
/// * `NumberCastError` trying to serialize a `u64` can fail if it overflows in a cast to `f64`
/// * `StringTooLong` if the string exceeds v8's max string size
///
#[inline]
pub fn to_value<'j, C, V>(cx: &mut C, value: &V) -> NeonResult<Handle<'j, JsValue>>
where
C: Context<'j>,
V: Serialize + ?Sized,
{
let serializer = Serializer {
cx,
ph: PhantomData,
};
match value.serialize(serializer) {
Ok(serialized_value) => Ok(serialized_value),
Err(err) => err.to_neon(cx),
}
}
#[doc(hidden)]
pub struct Serializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
cx: &'a mut C,
ph: PhantomData<&'j ()>,
}
#[doc(hidden)]
pub struct ArraySerializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
cx: &'a mut C,
array: Handle<'j, JsArray>,
}
#[doc(hidden)]
pub struct TupleVariantSerializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
outter_object: Handle<'j, JsObject>,
inner: ArraySerializer<'a, 'j, C>,
}
#[doc(hidden)]
pub struct MapSerializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
cx: &'a mut C,
object: Handle<'j, JsObject>,
key_holder: Handle<'j, JsObject>,
}
#[doc(hidden)]
pub struct StructSerializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
cx: &'a mut C,
object: Handle<'j, JsObject>,
}
#[doc(hidden)]
pub struct StructVariantSerializer<'a, 'j, C: 'a>
where
C: Context<'j>,
{
outer_object: Handle<'j, JsObject>,
inner: StructSerializer<'a, 'j, C>,
}
#[doc(hidden)]
impl<'a, 'j, C> ser::Serializer for Serializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
type SerializeSeq = ArraySerializer<'a, 'j, C>;
type SerializeTuple = ArraySerializer<'a, 'j, C>;
type SerializeTupleStruct = ArraySerializer<'a, 'j, C>;
type SerializeTupleVariant = TupleVariantSerializer<'a, 'j, C>;
type SerializeMap = MapSerializer<'a, 'j, C>;
type SerializeStruct = StructSerializer<'a, 'j, C>;
type SerializeStructVariant = StructVariantSerializer<'a, 'j, C>;
#[inline]
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
Ok(JsBoolean::new(self.cx, v).upcast())
}
#[inline]
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_i128(self, v: i128) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
}
#[inline]
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
Ok(JsNumber::new(self.cx, v).upcast())
}
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
let mut b = [0; 4];
let result = v.encode_utf8(&mut b);
let js_str = JsString::try_new(self.cx, result)
.map_err(|_| ErrorKind::StringTooLongForChar(4))?;
Ok(js_str.upcast())
}
#[inline]
fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
let len = v.len();
let js_str = JsString::try_new(self.cx, v).map_err(|_| ErrorKind::StringTooLong(len))?;
Ok(js_str.upcast())
}
#[inline]
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok, Self::Error> {
let mut buff = JsBuffer::new(self.cx, as_num::<_, u32>(v.len())?)?;
self.cx.borrow_mut(&mut buff, |buff| buff.as_mut_slice().clone_from_slice(v));
Ok(buff.upcast())
}
#[inline]
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
Ok(JsNull::new(self.cx).upcast())
}
#[inline]
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
#[inline]
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
Ok(JsNull::new(self.cx).upcast())
}
#[inline]
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
Ok(JsNull::new(self.cx).upcast())
}
#[inline]
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
#[inline]
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
#[inline]
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
let obj = JsObject::new(&mut *self.cx);
let value_js = to_value(self.cx, value)?;
obj.set(self.cx, variant, value_js)?;
Ok(obj.upcast())
}
#[inline]
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Ok(ArraySerializer::new(self.cx))
}
#[inline]
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
Ok(ArraySerializer::new(self.cx))
}
#[inline]
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
Ok(ArraySerializer::new(self.cx))
}
#[inline]
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
TupleVariantSerializer::new(self.cx, variant)
}
#[inline]
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Ok(MapSerializer::new(self.cx))
}
#[inline]
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
Ok(StructSerializer::new(self.cx))
}
#[inline]
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
StructVariantSerializer::new(self.cx, variant)
}
}
#[doc(hidden)]
impl<'a, 'j, C> ArraySerializer<'a, 'j, C>
where
C: Context<'j>,
{
#[inline]
fn new(cx: &'a mut C) -> Self {
let array = JsArray::new(cx, 0);
ArraySerializer { cx, array }
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeSeq for ArraySerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
let value = to_value(self.cx, value)?;
let arr: Handle<'j, JsArray> = self.array;
let len = arr.len(self.cx);
arr.set(self.cx, len, value)?;
Ok(())
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.array.upcast())
}
}
impl<'a, 'j, C> ser::SerializeTuple for ArraySerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
#[inline]
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
ser::SerializeSeq::end(self)
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeTupleStruct for ArraySerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
#[inline]
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
ser::SerializeSeq::end(self)
}
}
#[doc(hidden)]
impl<'a, 'j, C> TupleVariantSerializer<'a, 'j, C>
where
C: Context<'j>,
{
fn new(cx: &'a mut C, key: &'static str) -> LibResult<Self> {
let inner_array = JsArray::new(cx, 0);
let outter_object = JsObject::new(cx);
outter_object.set(cx, key, inner_array)?;
Ok(TupleVariantSerializer {
outter_object,
inner: ArraySerializer {
cx,
array: inner_array,
},
})
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeTupleVariant for TupleVariantSerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
#[inline]
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
use serde::ser::SerializeSeq;
self.inner.serialize_element(value)
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.outter_object.upcast())
}
}
#[doc(hidden)]
impl<'a, 'j, C> MapSerializer<'a, 'j, C>
where
C: Context<'j>,
{
fn new(cx: &'a mut C) -> Self {
let object = JsObject::new(cx);
let key_holder = JsObject::new(cx);
MapSerializer {
cx,
object,
key_holder,
}
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeMap for MapSerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
let key = to_value(self.cx, key)?;
self.key_holder.set(self.cx, "key", key)?;
Ok(())
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
let key: Handle<'j, JsValue> = self.key_holder.get(&mut *self.cx, "key")?;
let value_obj = to_value(self.cx, value)?;
self.object.set(self.cx, key, value_obj)?;
Ok(())
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.object.upcast())
}
}
#[doc(hidden)]
impl<'a, 'j, C> StructSerializer<'a, 'j, C>
where
C: Context<'j>,
{
#[inline]
fn new(cx: &'a mut C) -> Self {
let object = JsObject::new(cx);
StructSerializer { cx, object }
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeStruct for StructSerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
#[inline]
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
let value = to_value(self.cx, value)?;
self.object.set(self.cx, key, value)?;
Ok(())
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.object.upcast())
}
}
#[doc(hidden)]
impl<'a, 'j, C> StructVariantSerializer<'a, 'j, C>
where
C: Context<'j>,
{
fn new(cx: &'a mut C, key: &'static str) -> LibResult<Self> {
let inner_object = JsObject::new(cx);
let outter_object = JsObject::new(cx);
outter_object.set(cx, key, inner_object)?;
Ok(StructVariantSerializer {
outer_object: outter_object,
inner: StructSerializer {
cx,
object: inner_object,
},
})
}
}
#[doc(hidden)]
impl<'a, 'j, C> ser::SerializeStructVariant for StructVariantSerializer<'a, 'j, C>
where
C: Context<'j>,
{
type Ok = Handle<'j, JsValue>;
type Error = Error;
#[inline]
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
use serde::ser::SerializeStruct;
self.inner.serialize_field(key, value)
}
#[inline]
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.outer_object.upcast())
}
}
| 24.469775 | 95 | 0.5583 |
de41cf1b8959f1564aead3194c943e73886e285e | 2,233 | const RI_REG_MODE: u32 = 0x0470_0000;
const RI_REG_CONFIG: u32 = 0x0470_0004;
const RI_REG_CURRENT_LOAD: u32 = 0x0470_0008;
const RI_REG_SELECT: u32 = 0x0470_000C;
const RI_REG_REFRESH: u32 = 0x0470_0010;
const RI_REG_LATENCY: u32 = 0x0470_0014;
const RI_REG_RERROR: u32 = 0x0470_0018;
const RI_REG_WERROR: u32 = 0x0470_001C;
pub struct RI {
mode: u32,
config: u32,
current_load: u32,
select: u32,
refresh: u32,
latency: u32,
rerror: u32,
werror: u32
}
impl RI {
pub fn new() -> RI {
RI {
mode: 0,
config: 0,
current_load: 0,
select: 0,
refresh: 0,
latency: 0,
rerror: 0,
werror: 0
}
}
/* Reads from the RI's registers. */
pub fn rreg(&self, reg: u32) -> u32 {
match reg {
RI_REG_MODE => {
self.mode
}, RI_REG_CONFIG => {
self.config
}, RI_REG_CURRENT_LOAD => {
self.current_load
}, RI_REG_SELECT => {
self.select
}, RI_REG_REFRESH => {
self.refresh
}, RI_REG_LATENCY => {
self.latency
}, RI_REG_RERROR => {
self.rerror
}, RI_REG_WERROR => {
self.werror
}, _ => panic!("Read from unrecognized RI register address: {:#x}", reg)
}
}
/* Writes to the RI's registers. */
pub fn wreg(&mut self, reg: u32, value: u32) {
match reg {
RI_REG_MODE => {
self.mode = value
}, RI_REG_CONFIG => {
self.config = value
}, RI_REG_CURRENT_LOAD => {
self.current_load = value
}, RI_REG_SELECT => {
self.select = value
}, RI_REG_REFRESH => {
self.refresh = value
}, RI_REG_LATENCY => {
self.latency = value
}, RI_REG_RERROR => {
self.rerror = value
}, RI_REG_WERROR => {
self.werror = value
}, _ => panic!("Write to unrecognized RI register address: {:#x}", reg)
}
}
}
| 27.567901 | 84 | 0.479624 |
14d6508ebc6d5f386295a5d371c50fb1768c94df | 2,321 | use intern::intern;
use grammar::repr::*;
use lr1::lookahead::{Token, TokenSet};
use lr1::lookahead::Token::EOF;
use lr1::tls::Lr1Tls;
use test_util::{normalized_grammar};
use super::FirstSets;
pub fn nt(t: &str) -> Symbol {
Symbol::Nonterminal(NonterminalString(intern(t)))
}
pub fn term(t: &str) -> Symbol {
Symbol::Terminal(TerminalString::quoted(intern(t)))
}
fn la(t: &str) -> Token {
Token::Terminal(TerminalString::quoted(intern(t)))
}
fn first0(first: &FirstSets,
symbols: &[Symbol])
-> Vec<Token>
{
let v = first.first0(symbols);
v.iter().collect()
}
fn first1(first: &FirstSets,
symbols: &[Symbol],
lookahead: Token)
-> Vec<Token>
{
let v = first.first1(symbols, TokenSet::from(lookahead));
v.iter().collect()
}
#[test]
fn basic_first1() {
let grammar = normalized_grammar(r#"
grammar;
A = B "C";
B: Option<u32> = {
"D" => Some(1),
=> None
};
X = "E"; // intentionally unreachable
"#);
let _lr1_tls = Lr1Tls::install(grammar.terminals.clone());
let first_sets = FirstSets::new(&grammar);
assert_eq!(
first1(&first_sets, &[nt("A")], EOF),
vec![la("C"), la("D")]);
assert_eq!(
first1(&first_sets, &[nt("B")], EOF),
vec![la("D"), EOF]);
assert_eq!(
first1(&first_sets, &[nt("B"), term("E")], EOF),
vec![la("D"), la("E")]);
assert_eq!(
first1(&first_sets, &[nt("B"), nt("X")], EOF),
vec![la("D"), la("E")]);
}
#[test]
fn basic_first0() {
let grammar = normalized_grammar(r#"
grammar;
A = B "C";
B: Option<u32> = {
"D" => Some(1),
=> None
};
X = "E"; // intentionally unreachable
"#);
let _lr1_tls = Lr1Tls::install(grammar.terminals.clone());
let first_sets = FirstSets::new(&grammar);
assert_eq!(
first0(&first_sets, &[nt("A")]),
vec![la("C"), la("D")]);
assert_eq!(
first0(&first_sets, &[nt("B")]),
vec![la("D"), EOF]);
assert_eq!(
first0(&first_sets, &[nt("B"), term("E")]),
vec![la("D"), la("E")]);
assert_eq!(
first0(&first_sets, &[nt("B"), nt("X")]),
vec![la("D"), la("E")]);
assert_eq!(
first0(&first_sets, &[nt("X")]),
vec![la("E")]);
}
| 22.533981 | 62 | 0.525636 |
3304a7b614604acf7ac75266beb0c6a1598fabfa | 8,058 | use protobuf::RepeatedField;
use reqwest::Response;
pub struct HttpClient {
uri: reqwest::Url,
http_client: reqwest::Client,
}
impl Default for HttpClient {
fn default() -> Self {
use reqwest::header;
let mut headers = header::HeaderMap::new();
headers.insert(header::ACCEPT, header::HeaderValue::from_static("application/json"));
headers.insert(
header::HeaderName::from_static("gamelift-server-pid"),
header::HeaderValue::from_str(std::process::id().to_string().as_str())
.expect("Cannot parse a gamelift-server-pid header value"),
);
Self {
uri: reqwest::Url::parse("http://localhost:5758/")
.expect("Cannot parse GameLift Server URI"),
http_client: reqwest::ClientBuilder::new()
.default_headers(headers)
.build()
.expect("Cannot build HTTP client"),
}
}
}
impl HttpClient {
async fn send<T>(&self, message: T) -> Result<Response, crate::error::GameLiftErrorType>
where
T: protobuf::Message,
{
let message_as_bytes = message.write_to_bytes().unwrap();
let message_header =
get_message_type(&message).expect("Cannot extract the message header").to_string();
log::debug!("Message name: {}", message_header);
self.http_client
.post(self.uri.clone())
.header("gamelift-target", message_header)
.body(message_as_bytes)
.send()
.await
.map_err(|error| {
if error.status().is_some() && error.status().unwrap().is_server_error() {
crate::error::GameLiftErrorType::InternalServiceError
} else {
crate::error::GameLiftErrorType::BadRequest
}
})
}
pub async fn process_ready(
&self,
port: i32,
log_paths_to_upload: Vec<String>,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::ProcessReady {
port,
logPathsToUpload: RepeatedField::from_vec(log_paths_to_upload),
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn process_ending(&self) -> Result<(), crate::error::GameLiftErrorType> {
self.send(crate::protos::generated_with_pure::sdk::ProcessEnding::default())
.await
.map(|_| ())
}
pub async fn report_health(
&self,
health_status: bool,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::ReportHealth {
healthStatus: health_status,
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn activate_game_session(
&self,
game_session_id: crate::entity::GameSessionId,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::GameSessionActivate {
gameSessionId: game_session_id,
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn terminate_game_session(
&self,
game_session_id: crate::entity::GameSessionId,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::GameSessionTerminate {
gameSessionId: game_session_id,
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn update_player_session_creation_policy(
&self,
game_session_id: crate::entity::GameSessionId,
player_session_policy: crate::entity::PlayerSessionCreationPolicy,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::UpdatePlayerSessionCreationPolicy {
gameSessionId: game_session_id,
newPlayerSessionCreationPolicy: player_session_policy.to_string(),
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn accept_player_session(
&self,
player_session_id: crate::entity::PlayerSessionId,
game_session_id: crate::entity::GameSessionId,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::AcceptPlayerSession {
playerSessionId: player_session_id,
gameSessionId: game_session_id,
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn remove_player_session(
&self,
player_session_id: crate::entity::PlayerSessionId,
game_session_id: crate::entity::GameSessionId,
) -> Result<(), crate::error::GameLiftErrorType> {
let message = crate::protos::generated_with_pure::sdk::RemovePlayerSession {
playerSessionId: player_session_id,
gameSessionId: game_session_id,
..Default::default()
};
self.send(message).await.map(|_| ())
}
pub async fn describe_player_sessions(
&self,
request: crate::entity::DescribePlayerSessionsRequest,
) -> Result<crate::entity::DescribePlayerSessionsResult, crate::error::GameLiftErrorType> {
let response = self.send(crate::mapper::describe_player_sessions_mapper(request)).await;
match response {
Ok(response) => {
let proto_response: crate::protos::generated_with_pure::sdk::DescribePlayerSessionsResponse =
serde_json::from_str(response.text().await.unwrap().as_str()).unwrap();
Ok(crate::mapper::describe_player_session_request_mapper(proto_response))
}
Err(error) => Err(error),
}
}
pub async fn backfill_matchmaking(
&self,
request: crate::entity::StartMatchBackfillRequest,
) -> Result<crate::entity::StartMatchBackfillResult, crate::error::GameLiftErrorType> {
let response = self.send(crate::mapper::start_match_backfill_request_mapper(request)).await;
match response {
Ok(response) => {
let p: crate::protos::generated_with_pure::sdk::BackfillMatchmakingResponse =
serde_json::from_str(response.text().await.unwrap().as_str()).unwrap();
Ok(crate::mapper::start_matchmaking_result_mapper(p))
}
Err(error) => Err(error),
}
}
pub async fn stop_matchmaking(
&self,
request: crate::entity::StopMatchBackfillRequest,
) -> Result<(), crate::error::GameLiftErrorType> {
self.send(crate::mapper::stop_matchmaking_request_mapper(request)).await.map(|_| ())
}
pub async fn get_instance_certificate(
&self,
) -> Result<crate::entity::GetInstanceCertificateResult, crate::error::GameLiftErrorType> {
let response = self
.send(crate::protos::generated_with_pure::sdk::GetInstanceCertificate::default())
.await;
match response {
Ok(response) => {
let p: crate::protos::generated_with_pure::sdk::GetInstanceCertificateResponse =
serde_json::from_str(response.text().await.unwrap().as_str()).unwrap();
Ok(crate::mapper::get_instance_certificate_result_mapper(p))
}
Err(error) => Err(error),
}
}
}
fn get_message_type<T>(_: &T) -> Option<&str> {
let full_name = std::any::type_name::<T>();
Some(&full_name[full_name.rfind(':')? + 1..])
}
#[cfg(test)]
mod tests {
use crate::http_client::get_message_type;
#[test]
fn get_message_type_test() {
let process_ready = crate::protos::generated_with_pure::sdk::ProcessReady::default();
assert_eq!(get_message_type(&process_ready), Some("ProcessReady"));
}
}
| 35.813333 | 109 | 0.605361 |
389e4b072a8a9e21b775a398cb068b1763caee34 | 1,393 | use super::UInt32Value;
use quick_xml::events::BytesStart;
use quick_xml::Reader;
use quick_xml::Writer;
use reader::driver::*;
use std::io::Cursor;
use writer::driver::*;
#[derive(Clone, Default, Debug)]
pub struct WorkbookView {
active_tab: UInt32Value,
}
impl WorkbookView {
pub fn get_active_tab(&self) -> &u32 {
&self.active_tab.get_value()
}
pub fn set_active_tab(&mut self, value: u32) -> &mut Self {
self.active_tab.set_value(value);
self
}
pub(crate) fn set_attributes<R: std::io::BufRead>(
&mut self,
_reader: &mut Reader<R>,
e: &BytesStart,
) {
match get_attribute(e, b"activeTab") {
Some(v) => {
self.active_tab.set_value_string(v);
}
None => {}
}
}
pub(crate) fn write_to(&self, writer: &mut Writer<Cursor<Vec<u8>>>) {
// selection
let mut attributes: Vec<(&str, &str)> = Vec::new();
attributes.push(("xWindow", "240"));
attributes.push(("yWindow", "105"));
attributes.push(("windowWidth", "14805"));
attributes.push(("windowHeight", "8010"));
if self.active_tab.has_value() {
attributes.push(("activeTab", &self.active_tab.get_value_string()));
}
// workbookView
write_start_tag(writer, "workbookView", attributes, true);
}
}
| 27.313725 | 80 | 0.577172 |
aba9940ba774338f35bd9c9ade063d2be4a2b96b | 7,416 | #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate tera;
#[macro_use]
extern crate log;
extern crate pretty_env_logger;
extern crate simple_error;
pub mod short;
pub mod shortdb;
use short::Short;
use futures::{future, Future};
use std::env;
use std::path::Path;
use std::fs;
use std::error::Error;
use simple_error::SimpleError;
use std::sync::Arc;
use std::collections::HashMap;
use hyper::{
client::HttpConnector, rt, service::service_fn, Body, Client, Request,
Response, Server, Method, StatusCode
};
use tera::{Context, Tera};
type GenericError = Box<dyn std::error::Error + Send + Sync>;
type ResponseFuture = Box<dyn Future<Item = Response<Body>, Error = GenericError> + Send>;
type ResponseError = Result<ResponseFuture, Box<dyn Error>>;
lazy_static! {
pub static ref TERA: Tera = compile_templates!("templates/**/*");
}
static STATIC_ASSET_PATH: &str = "/static/assets";
static STATIC_ASSET_FILESYSTEM_ROOT: &str = "static/assets";
fn get_new() -> ResponseFuture {
let ctx = Context::new();
let body = Body::from(TERA.render("index.html", &ctx).unwrap().to_string());
Box::new(future::ok(
Response::builder()
.body(body)
.unwrap(),
))
}
fn get_argument_from_url(req: Request<Body>, arg: &str) -> Result<String, SimpleError> {
let args = url::form_urlencoded::parse(&req.uri().query().unwrap().as_bytes())
.into_owned()
.collect::<HashMap<String, String>>();
match args.get(arg) {
Some(value) => Ok(value.clone()),
None => Err(SimpleError::new("Argument Not Found"))
}
}
fn get_complete(req: Request<Body>) -> ResponseError {
let token = get_argument_from_url(req, "token")?;
let mut ctx = Context::new();
ctx.insert("token", &token);
let body = Body::from(TERA.render("complete.html", &ctx)?.to_string());
Ok(Box::new(future::ok(
Response::builder()
.body(body)
.unwrap(),
)))
}
fn post_new(req: Request<Body>, redis_client: &Arc<redis::Client>) -> ResponseError {
let mut con = redis_client.get_connection()?;
let target = get_argument_from_url(req, "target")?;
let short = Short::new(target)?;
let token = short.token.clone();
shortdb::add_short(short, &mut con)?;
Ok(Box::new(future::ok(
Response::builder()
.status(StatusCode::MOVED_PERMANENTLY)
.header("Location", format!("/complete?token={}", token))
.body(Body::from(""))
.unwrap(),
)))
}
/// Handle a request that does't match other requests (and therefore should be a redirect request).
fn get_redirect(req: Request<Body>, redis_client: &Arc<redis::Client>) -> ResponseError {
let mut con = redis_client.get_connection()?;
Ok(Box::new(future::ok(
Response::builder()
.status(StatusCode::MOVED_PERMANENTLY)
.header("Location", format!(
"{}",
shortdb::get_short(
&req.uri().path()[1..], &mut con
)?.target
))
.body(Body::from(""))
.unwrap(),
)))
}
fn render_error_page(error: Box<dyn Error>) -> ResponseFuture {
Box::new(future::ok(
Response::builder()
.status(500)
.body(Body::from(
format!("Internal Server Error: {}", error)
))
.unwrap(),
))
}
fn respond_handle_error(result: ResponseError) -> ResponseFuture {
match result {
Ok(response) => response,
Err(error) => {
error!("{}", error);
render_error_page(error)
}
}
}
fn get_static(req: Request<Body>) -> ResponseError {
let relative_asset_path = &req.uri().path()[STATIC_ASSET_PATH.len()..];
trace!("Loading asset located at relative path: {}", relative_asset_path);
let asset_filesystem_path: String = format!(
"{}/{}",
STATIC_ASSET_FILESYSTEM_ROOT,
relative_asset_path
);
trace!("Computed non-canonicalized filesystem path: {}", asset_filesystem_path);
let asset_canonicalized_path = fs::canonicalize(asset_filesystem_path)?;
trace!("Canonicalized filesystem path: {}", asset_canonicalized_path.to_str().unwrap());
let pwd = env::current_dir()?;
let absolute_begins_with_path = format!("{}/{}", pwd.to_str().unwrap(), STATIC_ASSET_FILESYSTEM_ROOT);
if !asset_canonicalized_path
.to_str()
.unwrap()
.starts_with(&absolute_begins_with_path)
{
// Looks like someone tried path traversal.
// just return a 404 and forget about it.
return Ok(Box::new(future::ok(
Response::builder()
.status(404)
.body(Body::from(
format!("404 - Not Found")
))
.unwrap(),
)));
}
if(!Path::new(asset_canonicalized_path.to_str().unwrap()).exists()) {
return Ok(Box::new(future::ok(
Response::builder()
.status(404)
.body(Body::from(
format!("404 - Not Found")
))
.unwrap(),
)));
}
let content = fs::read_to_string(asset_canonicalized_path)?;
return Ok(
Box::new(
future::ok(
Response::builder()
.body(Body::from(content))
.unwrap(),
)
)
);
}
fn router(req: Request<Body>, _client: &Client<HttpConnector>, redis_client: &Arc<redis::Client>) -> ResponseFuture {
match (req.method(), req.uri().path()) {
(&Method::GET, "/") => {
get_new()
}
(&Method::GET, "/new") => {
respond_handle_error(post_new(req, redis_client))
}
(&Method::GET, "/complete") => {
respond_handle_error(get_complete(req))
}
_ => {
// I'd like to find a better way to handle this,
// it feels wrong (or at least too indented) in the catchall
// match arm.
if(req.uri().path().starts_with(STATIC_ASSET_PATH)) {
respond_handle_error(get_static(req))
} else {
respond_handle_error(get_redirect(req, redis_client))
}
}
}
}
fn main() {
pretty_env_logger::init();
rt::run(future::lazy(move || {
// create a Client for all Services
let client = Client::new();
let connection_string: &str = &env::var("REDIS_CONNECTION_STRING").unwrap();
let addr = env::var("LISTEN_ADDRESS").unwrap().parse().unwrap();
let redis_client = Arc::new(
redis::Client::open(
connection_string
).unwrap()
);
// define a service containing the router function
let new_service = move || {
// Move a clone of Client into the service_fn
let client = client.clone();
let redis_client = redis_client.clone();
service_fn(move |req| router(req, &client, &redis_client))
};
// Define the server - this is what the future_lazy() we're building will resolve to
let server = Server::bind(&addr)
.serve(new_service)
.map_err(|e| eprintln!("Server error: {}", e));
println!("Listening on http://{}", addr);
server
}));
}
| 29.903226 | 117 | 0.570388 |
28b9aad71a80c3e032939b6c58bd5412c945c7f3 | 2,042 | pub trait Queue<T> {
fn with_capacity(capacity: usize) -> Self;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn push_back(&mut self, val: T);
fn pop_front(&mut self) -> Option<T>;
}
/// A custom implementation of a circular queue which is
/// extremely quick and lightweight.
/// However, the downside is you need to know an upper bound on the number of elements
/// that will be inside the queue at any given time for this queue to work.
pub struct FixedCapacityQueue<T: Clone> {
ar: Box<[Option<T>]>,
front: usize,
back: usize,
capacity: usize,
}
impl<T: Clone> FixedCapacityQueue<T> {
/// Initialize a queue where a maximum of `max_sz` elements can be
/// in the queue at any given time
pub fn with_capacity(capacity: usize) -> Self {
Self {
front: 0,
back: 0,
capacity,
ar: vec![None; capacity].into_boxed_slice(),
}
}
pub fn peek(&self) -> Option<&T> {
self.ar.get(self.front).and_then(|x| x.as_ref())
}
}
impl<T: Clone> Queue<T> for FixedCapacityQueue<T> {
fn len(&self) -> usize {
self.back - self.front
}
fn with_capacity(capacity: usize) -> Self {
Self::with_capacity(capacity)
}
fn push_back(&mut self, val: T) {
assert!(self.back < self.capacity, "Queue too small!");
self.ar[self.back] = Some(val);
self.back += 1;
}
fn pop_front(&mut self) -> Option<T> {
if self.is_empty() {
None
} else {
let res = self.ar[self.front].take();
self.front += 1;
res
}
}
}
impl<T: Clone> Queue<T> for std::collections::VecDeque<T> {
fn len(&self) -> usize {
self.len()
}
fn with_capacity(capacity: usize) -> Self {
Self::with_capacity(capacity)
}
fn push_back(&mut self, val: T) {
self.push_back(val);
}
fn pop_front(&mut self) -> Option<T> {
self.pop_front()
}
}
| 26.868421 | 86 | 0.563173 |
90cc49c35c54d47cfbdaf4e82ab1badbf1e17ad7 | 352 | use std::io::{Result, Error, ErrorKind};
use std::fs::File;
use std::path::PathBuf;
pub fn make_io_error(message: String) -> Error {
Error::new(ErrorKind::Other, message)
}
pub fn open_file(path: &PathBuf) -> Result<File> {
File::open(path)
.map_err(|why| make_io_error(format!("Failed to open file at path [{:?}]: {}", path, why)))
} | 29.333333 | 99 | 0.644886 |
67f58be0089df541678f559636c878dae47e09b9 | 2,212 | /// Scheduler in-memory store
pub mod store;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue;
use std::time::Duration;
use url::Url;
/// The representation of a Scheduled job
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ScheduledJob {
/// The unique ScheduledJob ID which uniquely identifies the job..
pub id: String,
/// The HTTP endpoint to POST `ScheduledJob`'s payload when triggered by the CRON schedule.
pub endpoint: Url,
/// The CRON expression for the execution schedule.
pub cron: String,
/// the raw payload to send to an endpoint.
pub payload: Box<RawValue>,
/// When set and enqueuing the `ScheduledJob` fails due to a unique constraint this determines
/// the backoff + retry period in seconds to try again. No retry if this is not set and will
/// trigger on its regular cadence.
#[serde(
with = "option_duration_u64_serde",
skip_serializing_if = "Option::is_none"
)]
pub retry_already_running: Option<Duration>,
/// This determines that upon recovery or restart of this scheduler if we should check that the
/// `ScheduledJob` should have run since the last time it was successfully triggered.
#[serde(default)]
pub recovery_check: bool,
/// This determines the last time the ScheduledJob was successfully triggered.
#[serde(skip_serializing_if = "Option::is_none")]
pub last_run: Option<DateTime<Utc>>,
}
mod option_duration_u64_serde {
use serde::{self, Deserialize, Deserializer, Serializer};
use std::time::Duration;
pub fn serialize<S>(d: &Option<Duration>, s: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(d) = d {
s.serialize_u64(d.as_secs())
} else {
unreachable!()
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
where
D: Deserializer<'de>,
{
let seconds: Option<u64> = Option::deserialize(deserializer)?;
match seconds {
Some(seconds) => Ok(Some(Duration::from_secs(seconds))),
None => Ok(None),
}
}
}
| 31.6 | 99 | 0.653255 |
2f0df59b2b710910dfec7f0b0a2828699901524b | 2,792 | //! Serial
use core::{fmt, marker::PhantomData};
use crate::{
gpio::*,
hal::{prelude::*, serial},
sysctl::{self, Clocks},
time::Bps,
};
use nb::{self, block};
use void::Void;
pub use tm4c129x::{UART0, UART1, UART2, UART3, UART4, UART5, UART6, UART7};
pub use tm4c_hal::{serial::*, uart_hal_macro, uart_pin_macro};
/// Serial abstraction
pub struct Serial<UART, TX, RX, RTS, CTS> {
uart: UART,
tx_pin: TX,
rx_pin: RX,
rts_pin: RTS,
cts_pin: CTS,
nl_mode: NewlineMode,
}
/// Serial receiver
pub struct Rx<UART, RX, CTS> {
_uart: PhantomData<UART>,
pin: RX,
flow_pin: CTS,
}
/// Serial transmitter
pub struct Tx<UART, TX, RTS> {
uart: UART,
pin: TX,
flow_pin: RTS,
nl_mode: NewlineMode,
}
uart_pin_macro!(UART0,
cts: [(gpioh::PH1, AF1), (gpiom::PM4, AF1), (gpiob::PB4, AF1)],
// dcd: [(gpioh::PH2, AF1), (gpiom::PM5, AF1), (gpiop::PP3, AF2)],
// dsr: [(gpioh::PH3, AF1), (gpiom::PM6, AF1), (gpiop::PP4, AF2)],
// dtr: [(gpiop::PP2, AF1)],
// ri: [(gpiok::PK7, AF1), (gpiom::PM7, AF1)],
rts: [(gpioh::PH0, AF1), (gpiob::PB5, AF1)],
rx: [(gpioa::PA0, AF1)],
tx: [(gpioa::PA1, AF1)],
);
uart_pin_macro!(UART1,
cts: [(gpion::PN1, AF1), (gpiop::PP3, AF1)],
// dcd: [(gpioe::PE2, AF1), (gpion::PN2, AF1)],
// dsr: [(gpioe::PE1, AF1), (gpion::PN3, AF1)],
// dtr: [(gpioe::PE3, AF1), (gpion::PN4, AF1)],
// ri: [(gpioe::PE4, AF1), (gpion::PN5, AF1)],
rts: [(gpioe::PE0, AF1), (gpion::PN0, AF1)],
rx: [(gpiob::PB0, AF1), (gpioq::PQ4, AF1)],
tx: [(gpiob::PB1, AF1)],
);
uart_pin_macro!(UART2,
cts: [(gpiod::PD7, AF1), (gpion::PN3, AF2)],
rts: [(gpiod::PD6, AF1), (gpion::PN2, AF2)],
rx: [(gpioa::PA6, AF1), (gpiod::PD4, AF1)],
tx: [(gpioa::PA7, AF1), (gpiod::PD5, AF1)],
);
uart_pin_macro!(UART3,
cts: [(gpiop::PP5, AF1), (gpion::PN5, AF2)],
rts: [(gpiop::PP4, AF1), (gpion::PN4, AF2)],
rx: [(gpioa::PA4, AF1), (gpioj::PJ0, AF1)],
tx: [(gpioa::PA5, AF1), (gpioj::PJ1, AF1)],
);
uart_pin_macro!(UART4,
cts: [(gpiok::PK3, AF1)],
rts: [(gpiok::PK2, AF1)],
rx: [(gpioa::PA2, AF1), (gpiok::PK0, AF1)],
tx: [(gpioa::PA3, AF1), (gpiok::PK1, AF1)],
);
uart_pin_macro!(UART5,
cts: [],
rts: [],
rx: [(gpioc::PC6, AF1)],
tx: [(gpioc::PC7, AF1)],
);
uart_pin_macro!(UART6,
cts: [],
rts: [],
rx: [(gpiop::PP0, AF1)],
tx: [(gpiop::PP1, AF1)],
);
uart_pin_macro!(UART7,
cts: [],
rts: [],
rx: [(gpioc::PC4, AF1)],
tx: [(gpioc::PC5, AF1)],
);
uart_hal_macro! {
UART0: (Uart0, uart0),
UART1: (Uart1, uart1),
UART2: (Uart2, uart2),
UART3: (Uart3, uart3),
UART4: (Uart4, uart4),
UART5: (Uart5, uart5),
UART6: (Uart6, uart6),
UART7: (Uart7, uart7),
}
| 24.068966 | 75 | 0.53904 |
086fbc9d3ddddcfba357846e45e897f806118814 | 75,625 | #![feature(box_patterns)]
#![feature(in_band_lifetimes)]
#![feature(iter_zip)]
#![feature(rustc_private)]
#![feature(control_flow_enum)]
#![recursion_limit = "512"]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::must_use_candidate)]
// warn on the same lints as `clippy_lints`
#![warn(trivial_casts, trivial_numeric_casts)]
// warn on lints, that are included in `rust-lang/rust`s bootstrap
#![warn(rust_2018_idioms, unused_lifetimes)]
// warn on rustc internal lints
#![warn(rustc::internal)]
// FIXME: switch to something more ergonomic here, once available.
// (Currently there is no way to opt into sysroot crates without `extern crate`.)
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_attr;
extern crate rustc_data_structures;
extern crate rustc_errors;
extern crate rustc_hir;
extern crate rustc_infer;
extern crate rustc_lexer;
extern crate rustc_lint;
extern crate rustc_middle;
extern crate rustc_session;
extern crate rustc_span;
extern crate rustc_target;
extern crate rustc_trait_selection;
extern crate rustc_typeck;
#[macro_use]
pub mod sym_helper;
#[allow(clippy::module_name_repetitions)]
pub mod ast_utils;
pub mod attrs;
pub mod comparisons;
pub mod consts;
pub mod diagnostics;
pub mod eager_or_lazy;
pub mod higher;
mod hir_utils;
pub mod msrvs;
pub mod numeric_literal;
pub mod paths;
pub mod ptr;
pub mod qualify_min_const_fn;
pub mod source;
pub mod str_utils;
pub mod sugg;
pub mod ty;
pub mod usage;
pub mod visitors;
pub use self::attrs::*;
pub use self::hir_utils::{both, count_eq, eq_expr_value, over, SpanlessEq, SpanlessHash};
use std::collections::hash_map::Entry;
use std::hash::BuildHasherDefault;
use if_chain::if_chain;
use rustc_ast::ast::{self, Attribute, LitKind};
use rustc_data_structures::unhash::UnhashMap;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::hir_id::{HirIdMap, HirIdSet};
use rustc_hir::intravisit::{self, walk_expr, ErasedMap, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_hir::LangItem::{OptionNone, ResultErr, ResultOk};
use rustc_hir::{
def, Arm, BindingAnnotation, Block, Body, Constness, Destination, Expr, ExprKind, FnDecl, ForeignItem, GenericArgs,
HirId, Impl, ImplItem, ImplItemKind, IsAsync, Item, ItemKind, LangItem, Local, MatchSource, Mutability, Node,
Param, Pat, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, TraitItem, TraitItemKind, TraitRef, TyKind,
UnOp,
};
use rustc_lint::{LateContext, Level, Lint, LintContext};
use rustc_middle::hir::exports::Export;
use rustc_middle::hir::map::Map;
use rustc_middle::hir::place::PlaceBase;
use rustc_middle::ty as rustc_ty;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::binding::BindingMode;
use rustc_middle::ty::{layout::IntegerExt, BorrowKind, DefIdTree, Ty, TyCtxt, TypeAndMut, TypeFoldable, UpvarCapture};
use rustc_semver::RustcVersion;
use rustc_session::Session;
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::source_map::original_sp;
use rustc_span::sym;
use rustc_span::symbol::{kw, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::Integer;
use crate::consts::{constant, Constant};
use crate::ty::{can_partially_move_ty, is_copy, is_recursively_primitive_type};
pub fn parse_msrv(msrv: &str, sess: Option<&Session>, span: Option<Span>) -> Option<RustcVersion> {
if let Ok(version) = RustcVersion::parse(msrv) {
return Some(version);
} else if let Some(sess) = sess {
if let Some(span) = span {
sess.span_err(span, &format!("`{}` is not a valid Rust version", msrv));
}
}
None
}
pub fn meets_msrv(msrv: Option<&RustcVersion>, lint_msrv: &RustcVersion) -> bool {
msrv.map_or(true, |msrv| msrv.meets(*lint_msrv))
}
#[macro_export]
macro_rules! extract_msrv_attr {
(LateContext) => {
extract_msrv_attr!(@LateContext, ());
};
(EarlyContext) => {
extract_msrv_attr!(@EarlyContext);
};
(@$context:ident$(, $call:tt)?) => {
fn enter_lint_attrs(&mut self, cx: &rustc_lint::$context<'tcx>, attrs: &'tcx [rustc_ast::ast::Attribute]) {
use $crate::get_unique_inner_attr;
match get_unique_inner_attr(cx.sess$($call)?, attrs, "msrv") {
Some(msrv_attr) => {
if let Some(msrv) = msrv_attr.value_str() {
self.msrv = $crate::parse_msrv(
&msrv.to_string(),
Some(cx.sess$($call)?),
Some(msrv_attr.span),
);
} else {
cx.sess$($call)?.span_err(msrv_attr.span, "bad clippy attribute");
}
},
_ => (),
}
}
};
}
/// Returns `true` if the two spans come from differing expansions (i.e., one is
/// from a macro and one isn't).
#[must_use]
pub fn differing_macro_contexts(lhs: Span, rhs: Span) -> bool {
rhs.ctxt() != lhs.ctxt()
}
/// If the given expression is a local binding, find the initializer expression.
/// If that initializer expression is another local binding, find its initializer again.
/// This process repeats as long as possible (but usually no more than once). Initializer
/// expressions with adjustments are ignored. If this is not desired, use [`find_binding_init`]
/// instead.
///
/// Examples:
/// ```ignore
/// let abc = 1;
/// // ^ output
/// let def = abc;
/// dbg!(def)
/// // ^^^ input
///
/// // or...
/// let abc = 1;
/// let def = abc + 2;
/// // ^^^^^^^ output
/// dbg!(def)
/// // ^^^ input
/// ```
pub fn expr_or_init<'a, 'b, 'tcx: 'b>(cx: &LateContext<'tcx>, mut expr: &'a Expr<'b>) -> &'a Expr<'b> {
while let Some(init) = path_to_local(expr)
.and_then(|id| find_binding_init(cx, id))
.filter(|init| cx.typeck_results().expr_adjustments(init).is_empty())
{
expr = init;
}
expr
}
/// Finds the initializer expression for a local binding. Returns `None` if the binding is mutable.
/// By only considering immutable bindings, we guarantee that the returned expression represents the
/// value of the binding wherever it is referenced.
///
/// Example: For `let x = 1`, if the `HirId` of `x` is provided, the `Expr` `1` is returned.
/// Note: If you have an expression that references a binding `x`, use `path_to_local` to get the
/// canonical binding `HirId`.
pub fn find_binding_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> {
let hir = cx.tcx.hir();
if_chain! {
if let Some(Node::Binding(pat)) = hir.find(hir_id);
if matches!(pat.kind, PatKind::Binding(BindingAnnotation::Unannotated, ..));
let parent = hir.get_parent_node(hir_id);
if let Some(Node::Local(local)) = hir.find(parent);
then {
return local.init;
}
}
None
}
/// Returns `true` if the given `NodeId` is inside a constant context
///
/// # Example
///
/// ```rust,ignore
/// if in_constant(cx, expr.hir_id) {
/// // Do something
/// }
/// ```
pub fn in_constant(cx: &LateContext<'_>, id: HirId) -> bool {
let parent_id = cx.tcx.hir().get_parent_item(id);
match cx.tcx.hir().get(parent_id) {
Node::Item(&Item {
kind: ItemKind::Const(..) | ItemKind::Static(..),
..
})
| Node::TraitItem(&TraitItem {
kind: TraitItemKind::Const(..),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Const(..),
..
})
| Node::AnonConst(_) => true,
Node::Item(&Item {
kind: ItemKind::Fn(ref sig, ..),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Fn(ref sig, _),
..
}) => sig.header.constness == Constness::Const,
_ => false,
}
}
/// Checks if a `QPath` resolves to a constructor of a `LangItem`.
/// For example, use this to check whether a function call or a pattern is `Some(..)`.
pub fn is_lang_ctor(cx: &LateContext<'_>, qpath: &QPath<'_>, lang_item: LangItem) -> bool {
if let QPath::Resolved(_, path) = qpath {
if let Res::Def(DefKind::Ctor(..), ctor_id) = path.res {
if let Ok(item_id) = cx.tcx.lang_items().require(lang_item) {
return cx.tcx.parent(ctor_id) == Some(item_id);
}
}
}
false
}
/// Returns `true` if this `span` was expanded by any macro.
#[must_use]
pub fn in_macro(span: Span) -> bool {
span.from_expansion() && !matches!(span.ctxt().outer_expn_data().kind, ExpnKind::Desugaring(..))
}
pub fn is_unit_expr(expr: &Expr<'_>) -> bool {
matches!(
expr.kind,
ExprKind::Block(
Block {
stmts: [],
expr: None,
..
},
_
) | ExprKind::Tup([])
)
}
/// Checks if given pattern is a wildcard (`_`)
pub fn is_wild(pat: &Pat<'_>) -> bool {
matches!(pat.kind, PatKind::Wild)
}
/// Checks if the first type parameter is a lang item.
pub fn is_ty_param_lang_item(cx: &LateContext<'_>, qpath: &QPath<'tcx>, item: LangItem) -> Option<&'tcx hir::Ty<'tcx>> {
let ty = get_qpath_generic_tys(qpath).next()?;
if let TyKind::Path(qpath) = &ty.kind {
cx.qpath_res(qpath, ty.hir_id)
.opt_def_id()
.map_or(false, |id| {
cx.tcx.lang_items().require(item).map_or(false, |lang_id| id == lang_id)
})
.then(|| ty)
} else {
None
}
}
/// Checks if the first type parameter is a diagnostic item.
pub fn is_ty_param_diagnostic_item(
cx: &LateContext<'_>,
qpath: &QPath<'tcx>,
item: Symbol,
) -> Option<&'tcx hir::Ty<'tcx>> {
let ty = get_qpath_generic_tys(qpath).next()?;
if let TyKind::Path(qpath) = &ty.kind {
cx.qpath_res(qpath, ty.hir_id)
.opt_def_id()
.map_or(false, |id| cx.tcx.is_diagnostic_item(item, id))
.then(|| ty)
} else {
None
}
}
/// Checks if the method call given in `expr` belongs to the given trait.
/// This is a deprecated function, consider using [`is_trait_method`].
pub fn match_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, path: &[&str]) -> bool {
let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
let trt_id = cx.tcx.trait_of_item(def_id);
trt_id.map_or(false, |trt_id| match_def_path(cx, trt_id, path))
}
/// Checks if a method is defined in an impl of a diagnostic item
pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
if let Some(adt) = cx.tcx.type_of(impl_did).ty_adt_def() {
return cx.tcx.is_diagnostic_item(diag_item, adt.did);
}
}
false
}
/// Checks if a method is in a diagnostic item trait
pub fn is_diag_trait_item(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
if let Some(trait_did) = cx.tcx.trait_of_item(def_id) {
return cx.tcx.is_diagnostic_item(diag_item, trait_did);
}
false
}
/// Checks if the method call given in `expr` belongs to the given trait.
pub fn is_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
cx.typeck_results()
.type_dependent_def_id(expr.hir_id)
.map_or(false, |did| is_diag_trait_item(cx, did, diag_item))
}
/// Checks if the given expression is a path referring an item on the trait
/// that is marked with the given diagnostic item.
///
/// For checking method call expressions instead of path expressions, use
/// [`is_trait_method`].
///
/// For example, this can be used to find if an expression like `u64::default`
/// refers to an item of the trait `Default`, which is associated with the
/// `diag_item` of `sym::Default`.
pub fn is_trait_item(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
if let hir::ExprKind::Path(ref qpath) = expr.kind {
cx.qpath_res(qpath, expr.hir_id)
.opt_def_id()
.map_or(false, |def_id| is_diag_trait_item(cx, def_id, diag_item))
} else {
false
}
}
pub fn last_path_segment<'tcx>(path: &QPath<'tcx>) -> &'tcx PathSegment<'tcx> {
match *path {
QPath::Resolved(_, path) => path.segments.last().expect("A path must have at least one segment"),
QPath::TypeRelative(_, seg) => seg,
QPath::LangItem(..) => panic!("last_path_segment: lang item has no path segments"),
}
}
pub fn get_qpath_generics(path: &QPath<'tcx>) -> Option<&'tcx GenericArgs<'tcx>> {
match path {
QPath::Resolved(_, p) => p.segments.last().and_then(|s| s.args),
QPath::TypeRelative(_, s) => s.args,
QPath::LangItem(..) => None,
}
}
pub fn get_qpath_generic_tys(path: &QPath<'tcx>) -> impl Iterator<Item = &'tcx hir::Ty<'tcx>> {
get_qpath_generics(path)
.map_or([].as_ref(), |a| a.args)
.iter()
.filter_map(|a| {
if let hir::GenericArg::Type(ty) = a {
Some(ty)
} else {
None
}
})
}
pub fn single_segment_path<'tcx>(path: &QPath<'tcx>) -> Option<&'tcx PathSegment<'tcx>> {
match *path {
QPath::Resolved(_, path) => path.segments.get(0),
QPath::TypeRelative(_, seg) => Some(seg),
QPath::LangItem(..) => None,
}
}
/// THIS METHOD IS DEPRECATED and will eventually be removed since it does not match against the
/// entire path or resolved `DefId`. Prefer using `match_def_path`. Consider getting a `DefId` from
/// `QPath::Resolved.1.res.opt_def_id()`.
///
/// Matches a `QPath` against a slice of segment string literals.
///
/// There is also `match_path` if you are dealing with a `rustc_hir::Path` instead of a
/// `rustc_hir::QPath`.
///
/// # Examples
/// ```rust,ignore
/// match_qpath(path, &["std", "rt", "begin_unwind"])
/// ```
pub fn match_qpath(path: &QPath<'_>, segments: &[&str]) -> bool {
match *path {
QPath::Resolved(_, path) => match_path(path, segments),
QPath::TypeRelative(ty, segment) => match ty.kind {
TyKind::Path(ref inner_path) => {
if let [prefix @ .., end] = segments {
if match_qpath(inner_path, prefix) {
return segment.ident.name.as_str() == *end;
}
}
false
},
_ => false,
},
QPath::LangItem(..) => false,
}
}
/// If the expression is a path, resolve it. Otherwise, return `Res::Err`.
pub fn expr_path_res(cx: &LateContext<'_>, expr: &Expr<'_>) -> Res {
if let ExprKind::Path(p) = &expr.kind {
cx.qpath_res(p, expr.hir_id)
} else {
Res::Err
}
}
/// Resolves the path to a `DefId` and checks if it matches the given path.
pub fn is_qpath_def_path(cx: &LateContext<'_>, path: &QPath<'_>, hir_id: HirId, segments: &[&str]) -> bool {
cx.qpath_res(path, hir_id)
.opt_def_id()
.map_or(false, |id| match_def_path(cx, id, segments))
}
/// If the expression is a path, resolves it to a `DefId` and checks if it matches the given path.
///
/// Please use `is_expr_diagnostic_item` if the target is a diagnostic item.
pub fn is_expr_path_def_path(cx: &LateContext<'_>, expr: &Expr<'_>, segments: &[&str]) -> bool {
expr_path_res(cx, expr)
.opt_def_id()
.map_or(false, |id| match_def_path(cx, id, segments))
}
/// If the expression is a path, resolves it to a `DefId` and checks if it matches the given
/// diagnostic item.
pub fn is_expr_diagnostic_item(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
expr_path_res(cx, expr)
.opt_def_id()
.map_or(false, |id| cx.tcx.is_diagnostic_item(diag_item, id))
}
/// THIS METHOD IS DEPRECATED and will eventually be removed since it does not match against the
/// entire path or resolved `DefId`. Prefer using `match_def_path`. Consider getting a `DefId` from
/// `QPath::Resolved.1.res.opt_def_id()`.
///
/// Matches a `Path` against a slice of segment string literals.
///
/// There is also `match_qpath` if you are dealing with a `rustc_hir::QPath` instead of a
/// `rustc_hir::Path`.
///
/// # Examples
///
/// ```rust,ignore
/// if match_path(&trait_ref.path, &paths::HASH) {
/// // This is the `std::hash::Hash` trait.
/// }
///
/// if match_path(ty_path, &["rustc", "lint", "Lint"]) {
/// // This is a `rustc_middle::lint::Lint`.
/// }
/// ```
pub fn match_path(path: &Path<'_>, segments: &[&str]) -> bool {
path.segments
.iter()
.rev()
.zip(segments.iter().rev())
.all(|(a, b)| a.ident.name.as_str() == *b)
}
/// If the expression is a path to a local, returns the canonical `HirId` of the local.
pub fn path_to_local(expr: &Expr<'_>) -> Option<HirId> {
if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind {
if let Res::Local(id) = path.res {
return Some(id);
}
}
None
}
/// Returns true if the expression is a path to a local with the specified `HirId`.
/// Use this function to see if an expression matches a function argument or a match binding.
pub fn path_to_local_id(expr: &Expr<'_>, id: HirId) -> bool {
path_to_local(expr) == Some(id)
}
/// Gets the definition associated to a path.
pub fn path_to_res(cx: &LateContext<'_>, path: &[&str]) -> Res {
macro_rules! try_res {
($e:expr) => {
match $e {
Some(e) => e,
None => return Res::Err,
}
};
}
fn item_child_by_name<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, name: &str) -> Option<&'tcx Export> {
tcx.item_children(def_id)
.iter()
.find(|item| item.ident.name.as_str() == name)
}
let (krate, first, path) = match *path {
[krate, first, ref path @ ..] => (krate, first, path),
[primitive] => {
return PrimTy::from_name(Symbol::intern(primitive)).map_or(Res::Err, Res::PrimTy);
},
_ => return Res::Err,
};
let tcx = cx.tcx;
let crates = tcx.crates(());
let krate = try_res!(crates.iter().find(|&&num| tcx.crate_name(num).as_str() == krate));
let first = try_res!(item_child_by_name(tcx, krate.as_def_id(), first));
let last = path
.iter()
.copied()
// `get_def_path` seems to generate these empty segments for extern blocks.
// We can just ignore them.
.filter(|segment| !segment.is_empty())
// for each segment, find the child item
.try_fold(first, |item, segment| {
let def_id = item.res.def_id();
if let Some(item) = item_child_by_name(tcx, def_id, segment) {
Some(item)
} else if matches!(item.res, Res::Def(DefKind::Enum | DefKind::Struct, _)) {
// it is not a child item so check inherent impl items
tcx.inherent_impls(def_id)
.iter()
.find_map(|&impl_def_id| item_child_by_name(tcx, impl_def_id, segment))
} else {
None
}
});
try_res!(last).res.expect_non_local()
}
/// Convenience function to get the `DefId` of a trait by path.
/// It could be a trait or trait alias.
pub fn get_trait_def_id(cx: &LateContext<'_>, path: &[&str]) -> Option<DefId> {
match path_to_res(cx, path) {
Res::Def(DefKind::Trait | DefKind::TraitAlias, trait_id) => Some(trait_id),
_ => None,
}
}
/// Gets the `hir::TraitRef` of the trait the given method is implemented for.
///
/// Use this if you want to find the `TraitRef` of the `Add` trait in this example:
///
/// ```rust
/// struct Point(isize, isize);
///
/// impl std::ops::Add for Point {
/// type Output = Self;
///
/// fn add(self, other: Self) -> Self {
/// Point(0, 0)
/// }
/// }
/// ```
pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx TraitRef<'tcx>> {
// Get the implemented trait for the current function
let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
if_chain! {
if parent_impl != hir::CRATE_HIR_ID;
if let hir::Node::Item(item) = cx.tcx.hir().get(parent_impl);
if let hir::ItemKind::Impl(impl_) = &item.kind;
then { return impl_.of_trait.as_ref(); }
}
None
}
/// This method will return tuple of projection stack and root of the expression,
/// used in `can_mut_borrow_both`.
///
/// For example, if `e` represents the `v[0].a.b[x]`
/// this method will return a tuple, composed of a `Vec`
/// containing the `Expr`s for `v[0], v[0].a, v[0].a.b, v[0].a.b[x]`
/// and an `Expr` for root of them, `v`
fn projection_stack<'a, 'hir>(mut e: &'a Expr<'hir>) -> (Vec<&'a Expr<'hir>>, &'a Expr<'hir>) {
let mut result = vec![];
let root = loop {
match e.kind {
ExprKind::Index(ep, _) | ExprKind::Field(ep, _) => {
result.push(e);
e = ep;
},
_ => break e,
};
};
result.reverse();
(result, root)
}
/// Checks if two expressions can be mutably borrowed simultaneously
/// and they aren't dependent on borrowing same thing twice
pub fn can_mut_borrow_both(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>) -> bool {
let (s1, r1) = projection_stack(e1);
let (s2, r2) = projection_stack(e2);
if !eq_expr_value(cx, r1, r2) {
return true;
}
for (x1, x2) in s1.iter().zip(s2.iter()) {
match (&x1.kind, &x2.kind) {
(ExprKind::Field(_, i1), ExprKind::Field(_, i2)) => {
if i1 != i2 {
return true;
}
},
(ExprKind::Index(_, i1), ExprKind::Index(_, i2)) => {
if !eq_expr_value(cx, i1, i2) {
return false;
}
},
_ => return false,
}
}
false
}
/// Returns true if the `def_id` associated with the `path` is recognized as a "default-equivalent"
/// constructor from the std library
fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<'_>) -> bool {
let std_types_symbols = &[
sym::String,
sym::Vec,
sym::VecDeque,
sym::LinkedList,
sym::HashMap,
sym::BTreeMap,
sym::HashSet,
sym::BTreeSet,
sym::BinaryHeap,
];
if let QPath::TypeRelative(_, method) = path {
if method.ident.name == sym::new {
if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
if let Some(adt) = cx.tcx.type_of(impl_did).ty_adt_def() {
return std_types_symbols
.iter()
.any(|&symbol| cx.tcx.is_diagnostic_item(symbol, adt.did));
}
}
}
}
false
}
/// Returns true if the expr is equal to `Default::default()` of it's type when evaluated.
/// It doesn't cover all cases, for example indirect function calls (some of std
/// functions are supported) but it is the best we have.
pub fn is_default_equivalent(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
match &e.kind {
ExprKind::Lit(lit) => match lit.node {
LitKind::Bool(false) | LitKind::Int(0, _) => true,
LitKind::Str(s, _) => s.is_empty(),
_ => false,
},
ExprKind::Tup(items) | ExprKind::Array(items) => items.iter().all(|x| is_default_equivalent(cx, x)),
ExprKind::Repeat(x, y) => if_chain! {
if let ExprKind::Lit(ref const_lit) = cx.tcx.hir().body(y.body).value.kind;
if let LitKind::Int(v, _) = const_lit.node;
if v <= 32 && is_default_equivalent(cx, x);
then {
true
}
else {
false
}
},
ExprKind::Call(repl_func, _) => if_chain! {
if let ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
if let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id();
if is_diag_trait_item(cx, repl_def_id, sym::Default)
|| is_default_equivalent_ctor(cx, repl_def_id, repl_func_qpath);
then {
true
}
else {
false
}
},
ExprKind::Path(qpath) => is_lang_ctor(cx, qpath, OptionNone),
ExprKind::AddrOf(rustc_hir::BorrowKind::Ref, _, expr) => matches!(expr.kind, ExprKind::Array([])),
_ => false,
}
}
/// Checks if the top level expression can be moved into a closure as is.
/// Currently checks for:
/// * Break/Continue outside the given loop HIR ids.
/// * Yield/Return statements.
/// * Inline assembly.
/// * Usages of a field of a local where the type of the local can be partially moved.
///
/// For example, given the following function:
///
/// ```
/// fn f<'a>(iter: &mut impl Iterator<Item = (usize, &'a mut String)>) {
/// for item in iter {
/// let s = item.1;
/// if item.0 > 10 {
/// continue;
/// } else {
/// s.clear();
/// }
/// }
/// }
/// ```
///
/// When called on the expression `item.0` this will return false unless the local `item` is in the
/// `ignore_locals` set. The type `(usize, &mut String)` can have the second element moved, so it
/// isn't always safe to move into a closure when only a single field is needed.
///
/// When called on the `continue` expression this will return false unless the outer loop expression
/// is in the `loop_ids` set.
///
/// Note that this check is not recursive, so passing the `if` expression will always return true
/// even though sub-expressions might return false.
pub fn can_move_expr_to_closure_no_visit(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'_>,
loop_ids: &[HirId],
ignore_locals: &HirIdSet,
) -> bool {
match expr.kind {
ExprKind::Break(Destination { target_id: Ok(id), .. }, _)
| ExprKind::Continue(Destination { target_id: Ok(id), .. })
if loop_ids.contains(&id) =>
{
true
},
ExprKind::Break(..)
| ExprKind::Continue(_)
| ExprKind::Ret(_)
| ExprKind::Yield(..)
| ExprKind::InlineAsm(_)
| ExprKind::LlvmInlineAsm(_) => false,
// Accessing a field of a local value can only be done if the type isn't
// partially moved.
ExprKind::Field(
&Expr {
hir_id,
kind:
ExprKind::Path(QPath::Resolved(
_,
Path {
res: Res::Local(local_id),
..
},
)),
..
},
_,
) if !ignore_locals.contains(local_id) && can_partially_move_ty(cx, cx.typeck_results().node_type(hir_id)) => {
// TODO: check if the local has been partially moved. Assume it has for now.
false
},
_ => true,
}
}
/// How a local is captured by a closure
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CaptureKind {
Value,
Ref(Mutability),
}
impl CaptureKind {
pub fn is_imm_ref(self) -> bool {
self == Self::Ref(Mutability::Not)
}
}
impl std::ops::BitOr for CaptureKind {
type Output = Self;
fn bitor(self, rhs: Self) -> Self::Output {
match (self, rhs) {
(CaptureKind::Value, _) | (_, CaptureKind::Value) => CaptureKind::Value,
(CaptureKind::Ref(Mutability::Mut), CaptureKind::Ref(_))
| (CaptureKind::Ref(_), CaptureKind::Ref(Mutability::Mut)) => CaptureKind::Ref(Mutability::Mut),
(CaptureKind::Ref(Mutability::Not), CaptureKind::Ref(Mutability::Not)) => CaptureKind::Ref(Mutability::Not),
}
}
}
impl std::ops::BitOrAssign for CaptureKind {
fn bitor_assign(&mut self, rhs: Self) {
*self = *self | rhs;
}
}
/// Given an expression referencing a local, determines how it would be captured in a closure.
/// Note as this will walk up to parent expressions until the capture can be determined it should
/// only be used while making a closure somewhere a value is consumed. e.g. a block, match arm, or
/// function argument (other than a receiver).
pub fn capture_local_usage(cx: &LateContext<'tcx>, e: &Expr<'_>) -> CaptureKind {
fn pat_capture_kind(cx: &LateContext<'_>, pat: &Pat<'_>) -> CaptureKind {
let mut capture = CaptureKind::Ref(Mutability::Not);
pat.each_binding_or_first(&mut |_, id, span, _| match cx
.typeck_results()
.extract_binding_mode(cx.sess(), id, span)
.unwrap()
{
BindingMode::BindByValue(_) if !is_copy(cx, cx.typeck_results().node_type(id)) => {
capture = CaptureKind::Value;
},
BindingMode::BindByReference(Mutability::Mut) if capture != CaptureKind::Value => {
capture = CaptureKind::Ref(Mutability::Mut);
},
_ => (),
});
capture
}
debug_assert!(matches!(
e.kind,
ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(_), .. }))
));
let mut child_id = e.hir_id;
let mut capture = CaptureKind::Value;
let mut capture_expr_ty = e;
for (parent_id, parent) in cx.tcx.hir().parent_iter(e.hir_id) {
if let [
Adjustment {
kind: Adjust::Deref(_) | Adjust::Borrow(AutoBorrow::Ref(..)),
target,
},
ref adjust @ ..,
] = *cx
.typeck_results()
.adjustments()
.get(child_id)
.map_or(&[][..], |x| &**x)
{
if let rustc_ty::RawPtr(TypeAndMut { mutbl: mutability, .. }) | rustc_ty::Ref(_, _, mutability) =
*adjust.last().map_or(target, |a| a.target).kind()
{
return CaptureKind::Ref(mutability);
}
}
match parent {
Node::Expr(e) => match e.kind {
ExprKind::AddrOf(_, mutability, _) => return CaptureKind::Ref(mutability),
ExprKind::Index(..) | ExprKind::Unary(UnOp::Deref, _) => capture = CaptureKind::Ref(Mutability::Not),
ExprKind::Assign(lhs, ..) | ExprKind::Assign(_, lhs, _) if lhs.hir_id == child_id => {
return CaptureKind::Ref(Mutability::Mut);
},
ExprKind::Field(..) => {
if capture == CaptureKind::Value {
capture_expr_ty = e;
}
},
ExprKind::Let(pat, ..) => {
let mutability = match pat_capture_kind(cx, pat) {
CaptureKind::Value => Mutability::Not,
CaptureKind::Ref(m) => m,
};
return CaptureKind::Ref(mutability);
},
ExprKind::Match(_, arms, _) => {
let mut mutability = Mutability::Not;
for capture in arms.iter().map(|arm| pat_capture_kind(cx, arm.pat)) {
match capture {
CaptureKind::Value => break,
CaptureKind::Ref(Mutability::Mut) => mutability = Mutability::Mut,
CaptureKind::Ref(Mutability::Not) => (),
}
}
return CaptureKind::Ref(mutability);
},
_ => break,
},
Node::Local(l) => match pat_capture_kind(cx, l.pat) {
CaptureKind::Value => break,
capture @ CaptureKind::Ref(_) => return capture,
},
_ => break,
}
child_id = parent_id;
}
if capture == CaptureKind::Value && is_copy(cx, cx.typeck_results().expr_ty(capture_expr_ty)) {
// Copy types are never automatically captured by value.
CaptureKind::Ref(Mutability::Not)
} else {
capture
}
}
/// Checks if the expression can be moved into a closure as is. This will return a list of captures
/// if so, otherwise, `None`.
pub fn can_move_expr_to_closure(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<HirIdMap<CaptureKind>> {
struct V<'cx, 'tcx> {
cx: &'cx LateContext<'tcx>,
// Stack of potential break targets contained in the expression.
loops: Vec<HirId>,
/// Local variables created in the expression. These don't need to be captured.
locals: HirIdSet,
/// Whether this expression can be turned into a closure.
allow_closure: bool,
/// Locals which need to be captured, and whether they need to be by value, reference, or
/// mutable reference.
captures: HirIdMap<CaptureKind>,
}
impl Visitor<'tcx> for V<'_, 'tcx> {
type Map = ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
if !self.allow_closure {
return;
}
match e.kind {
ExprKind::Path(QPath::Resolved(None, &Path { res: Res::Local(l), .. })) => {
if !self.locals.contains(&l) {
let cap = capture_local_usage(self.cx, e);
self.captures.entry(l).and_modify(|e| *e |= cap).or_insert(cap);
}
},
ExprKind::Closure(..) => {
let closure_id = self.cx.tcx.hir().local_def_id(e.hir_id).to_def_id();
for capture in self.cx.typeck_results().closure_min_captures_flattened(closure_id) {
let local_id = match capture.place.base {
PlaceBase::Local(id) => id,
PlaceBase::Upvar(var) => var.var_path.hir_id,
_ => continue,
};
if !self.locals.contains(&local_id) {
let capture = match capture.info.capture_kind {
UpvarCapture::ByValue(_) => CaptureKind::Value,
UpvarCapture::ByRef(borrow) => match borrow.kind {
BorrowKind::ImmBorrow => CaptureKind::Ref(Mutability::Not),
BorrowKind::UniqueImmBorrow | BorrowKind::MutBorrow => {
CaptureKind::Ref(Mutability::Mut)
},
},
};
self.captures
.entry(local_id)
.and_modify(|e| *e |= capture)
.or_insert(capture);
}
}
},
ExprKind::Loop(b, ..) => {
self.loops.push(e.hir_id);
self.visit_block(b);
self.loops.pop();
},
_ => {
self.allow_closure &= can_move_expr_to_closure_no_visit(self.cx, e, &self.loops, &self.locals);
walk_expr(self, e);
},
}
}
fn visit_pat(&mut self, p: &'tcx Pat<'tcx>) {
p.each_binding_or_first(&mut |_, id, _, _| {
self.locals.insert(id);
});
}
}
let mut v = V {
cx,
allow_closure: true,
loops: Vec::new(),
locals: HirIdSet::default(),
captures: HirIdMap::default(),
};
v.visit_expr(expr);
v.allow_closure.then(|| v.captures)
}
/// Returns the method names and argument list of nested method call expressions that make up
/// `expr`. method/span lists are sorted with the most recent call first.
pub fn method_calls<'tcx>(
expr: &'tcx Expr<'tcx>,
max_depth: usize,
) -> (Vec<Symbol>, Vec<&'tcx [Expr<'tcx>]>, Vec<Span>) {
let mut method_names = Vec::with_capacity(max_depth);
let mut arg_lists = Vec::with_capacity(max_depth);
let mut spans = Vec::with_capacity(max_depth);
let mut current = expr;
for _ in 0..max_depth {
if let ExprKind::MethodCall(path, span, args, _) = ¤t.kind {
if args.iter().any(|e| e.span.from_expansion()) {
break;
}
method_names.push(path.ident.name);
arg_lists.push(&**args);
spans.push(*span);
current = &args[0];
} else {
break;
}
}
(method_names, arg_lists, spans)
}
/// Matches an `Expr` against a chain of methods, and return the matched `Expr`s.
///
/// For example, if `expr` represents the `.baz()` in `foo.bar().baz()`,
/// `method_chain_args(expr, &["bar", "baz"])` will return a `Vec`
/// containing the `Expr`s for
/// `.bar()` and `.baz()`
pub fn method_chain_args<'a>(expr: &'a Expr<'_>, methods: &[&str]) -> Option<Vec<&'a [Expr<'a>]>> {
let mut current = expr;
let mut matched = Vec::with_capacity(methods.len());
for method_name in methods.iter().rev() {
// method chains are stored last -> first
if let ExprKind::MethodCall(path, _, args, _) = current.kind {
if path.ident.name.as_str() == *method_name {
if args.iter().any(|e| e.span.from_expansion()) {
return None;
}
matched.push(args); // build up `matched` backwards
current = &args[0]; // go to parent expression
} else {
return None;
}
} else {
return None;
}
}
// Reverse `matched` so that it is in the same order as `methods`.
matched.reverse();
Some(matched)
}
/// Returns `true` if the provided `def_id` is an entrypoint to a program.
pub fn is_entrypoint_fn(cx: &LateContext<'_>, def_id: DefId) -> bool {
cx.tcx
.entry_fn(())
.map_or(false, |(entry_fn_def_id, _)| def_id == entry_fn_def_id)
}
/// Returns `true` if the expression is in the program's `#[panic_handler]`.
pub fn is_in_panic_handler(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
let parent = cx.tcx.hir().get_parent_item(e.hir_id);
let def_id = cx.tcx.hir().local_def_id(parent).to_def_id();
Some(def_id) == cx.tcx.lang_items().panic_impl()
}
/// Gets the name of the item the expression is in, if available.
pub fn get_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Symbol> {
let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id);
match cx.tcx.hir().find(parent_id) {
Some(
Node::Item(Item { ident, .. })
| Node::TraitItem(TraitItem { ident, .. })
| Node::ImplItem(ImplItem { ident, .. }),
) => Some(ident.name),
_ => None,
}
}
pub struct ContainsName {
pub name: Symbol,
pub result: bool,
}
impl<'tcx> Visitor<'tcx> for ContainsName {
type Map = Map<'tcx>;
fn visit_name(&mut self, _: Span, name: Symbol) {
if self.name == name {
self.result = true;
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
/// Checks if an `Expr` contains a certain name.
pub fn contains_name(name: Symbol, expr: &Expr<'_>) -> bool {
let mut cn = ContainsName { name, result: false };
cn.visit_expr(expr);
cn.result
}
/// Returns `true` if `expr` contains a return expression
pub fn contains_return(expr: &hir::Expr<'_>) -> bool {
struct RetCallFinder {
found: bool,
}
impl<'tcx> hir::intravisit::Visitor<'tcx> for RetCallFinder {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if self.found {
return;
}
if let hir::ExprKind::Ret(..) = &expr.kind {
self.found = true;
} else {
hir::intravisit::walk_expr(self, expr);
}
}
fn nested_visit_map(&mut self) -> hir::intravisit::NestedVisitorMap<Self::Map> {
hir::intravisit::NestedVisitorMap::None
}
}
let mut visitor = RetCallFinder { found: false };
visitor.visit_expr(expr);
visitor.found
}
struct FindMacroCalls<'a, 'b> {
names: &'a [&'b str],
result: Vec<Span>,
}
impl<'a, 'b, 'tcx> Visitor<'tcx> for FindMacroCalls<'a, 'b> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
if self.names.iter().any(|fun| is_expn_of(expr.span, fun).is_some()) {
self.result.push(expr.span);
}
// and check sub-expressions
intravisit::walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
/// Finds calls of the specified macros in a function body.
pub fn find_macro_calls(names: &[&str], body: &Body<'_>) -> Vec<Span> {
let mut fmc = FindMacroCalls {
names,
result: Vec::new(),
};
fmc.visit_expr(&body.value);
fmc.result
}
/// Extends the span to the beginning of the spans line, incl. whitespaces.
///
/// ```rust,ignore
/// let x = ();
/// // ^^
/// // will be converted to
/// let x = ();
/// // ^^^^^^^^^^^^^^
/// ```
fn line_span<T: LintContext>(cx: &T, span: Span) -> Span {
let span = original_sp(span, DUMMY_SP);
let source_map_and_line = cx.sess().source_map().lookup_line(span.lo()).unwrap();
let line_no = source_map_and_line.line;
let line_start = source_map_and_line.sf.lines[line_no];
span.with_lo(line_start)
}
/// Gets the parent node, if any.
pub fn get_parent_node(tcx: TyCtxt<'_>, id: HirId) -> Option<Node<'_>> {
tcx.hir().parent_iter(id).next().map(|(_, node)| node)
}
/// Gets the parent expression, if any –- this is useful to constrain a lint.
pub fn get_parent_expr<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
get_parent_expr_for_hir(cx, e.hir_id)
}
/// This retrieves the parent for the given `HirId` if it's an expression. This is useful for
/// constraint lints
pub fn get_parent_expr_for_hir<'tcx>(cx: &LateContext<'tcx>, hir_id: hir::HirId) -> Option<&'tcx Expr<'tcx>> {
match get_parent_node(cx.tcx, hir_id) {
Some(Node::Expr(parent)) => Some(parent),
_ => None,
}
}
pub fn get_enclosing_block<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Block<'tcx>> {
let map = &cx.tcx.hir();
let enclosing_node = map
.get_enclosing_scope(hir_id)
.and_then(|enclosing_id| map.find(enclosing_id));
enclosing_node.and_then(|node| match node {
Node::Block(block) => Some(block),
Node::Item(&Item {
kind: ItemKind::Fn(_, _, eid),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Fn(_, eid),
..
}) => match cx.tcx.hir().body(eid).value.kind {
ExprKind::Block(block, _) => Some(block),
_ => None,
},
_ => None,
})
}
/// Gets the loop or closure enclosing the given expression, if any.
pub fn get_enclosing_loop_or_closure(tcx: TyCtxt<'tcx>, expr: &Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
for (_, node) in tcx.hir().parent_iter(expr.hir_id) {
match node {
Node::Expr(
e @ Expr {
kind: ExprKind::Loop(..) | ExprKind::Closure(..),
..
},
) => return Some(e),
Node::Expr(_) | Node::Stmt(_) | Node::Block(_) | Node::Local(_) | Node::Arm(_) => (),
_ => break,
}
}
None
}
/// Gets the parent node if it's an impl block.
pub fn get_parent_as_impl(tcx: TyCtxt<'_>, id: HirId) -> Option<&Impl<'_>> {
match tcx.hir().parent_iter(id).next() {
Some((
_,
Node::Item(Item {
kind: ItemKind::Impl(imp),
..
}),
)) => Some(imp),
_ => None,
}
}
/// Checks if the given expression is the else clause of either an `if` or `if let` expression.
pub fn is_else_clause(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
let mut iter = tcx.hir().parent_iter(expr.hir_id);
match iter.next() {
Some((
_,
Node::Expr(Expr {
kind: ExprKind::If(_, _, Some(else_expr)),
..
}),
)) => else_expr.hir_id == expr.hir_id,
_ => false,
}
}
/// Checks whether the given expression is a constant integer of the given value.
/// unlike `is_integer_literal`, this version does const folding
pub fn is_integer_const(cx: &LateContext<'_>, e: &Expr<'_>, value: u128) -> bool {
if is_integer_literal(e, value) {
return true;
}
let enclosing_body = cx.tcx.hir().local_def_id(cx.tcx.hir().enclosing_body_owner(e.hir_id));
if let Some((Constant::Int(v), _)) = constant(cx, cx.tcx.typeck(enclosing_body), e) {
return value == v;
}
false
}
/// Checks whether the given expression is a constant literal of the given value.
pub fn is_integer_literal(expr: &Expr<'_>, value: u128) -> bool {
// FIXME: use constant folding
if let ExprKind::Lit(ref spanned) = expr.kind {
if let LitKind::Int(v, _) = spanned.node {
return v == value;
}
}
false
}
/// Returns `true` if the given `Expr` has been coerced before.
///
/// Examples of coercions can be found in the Nomicon at
/// <https://doc.rust-lang.org/nomicon/coercions.html>.
///
/// See `rustc_middle::ty::adjustment::Adjustment` and `rustc_typeck::check::coercion` for more
/// information on adjustments and coercions.
pub fn is_adjusted(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
cx.typeck_results().adjustments().get(e.hir_id).is_some()
}
/// Returns the pre-expansion span if is this comes from an expansion of the
/// macro `name`.
/// See also [`is_direct_expn_of`].
#[must_use]
pub fn is_expn_of(mut span: Span, name: &str) -> Option<Span> {
loop {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let new_span = data.call_site;
if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind {
if mac_name.as_str() == name {
return Some(new_span);
}
}
span = new_span;
} else {
return None;
}
}
}
/// Returns the pre-expansion span if the span directly comes from an expansion
/// of the macro `name`.
/// The difference with [`is_expn_of`] is that in
/// ```rust
/// # macro_rules! foo { ($e:tt) => { $e } }; macro_rules! bar { ($e:expr) => { $e } }
/// foo!(bar!(42));
/// ```
/// `42` is considered expanded from `foo!` and `bar!` by `is_expn_of` but only
/// from `bar!` by `is_direct_expn_of`.
#[must_use]
pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let new_span = data.call_site;
if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind {
if mac_name.as_str() == name {
return Some(new_span);
}
}
}
None
}
/// Convenience function to get the return type of a function.
pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId) -> Ty<'tcx> {
let fn_def_id = cx.tcx.hir().local_def_id(fn_item);
let ret_ty = cx.tcx.fn_sig(fn_def_id).output();
cx.tcx.erase_late_bound_regions(ret_ty)
}
/// Checks if an expression is constructing a tuple-like enum variant or struct
pub fn is_ctor_or_promotable_const_function(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
if let ExprKind::Call(fun, _) = expr.kind {
if let ExprKind::Path(ref qp) = fun.kind {
let res = cx.qpath_res(qp, fun.hir_id);
return match res {
def::Res::Def(DefKind::Variant | DefKind::Ctor(..), ..) => true,
def::Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id),
_ => false,
};
}
}
false
}
/// Returns `true` if a pattern is refutable.
// TODO: should be implemented using rustc/mir_build/thir machinery
pub fn is_refutable(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool {
fn is_enum_variant(cx: &LateContext<'_>, qpath: &QPath<'_>, id: HirId) -> bool {
matches!(
cx.qpath_res(qpath, id),
def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(def::CtorOf::Variant, _), _)
)
}
fn are_refutable<'a, I: IntoIterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>, i: I) -> bool {
i.into_iter().any(|pat| is_refutable(cx, pat))
}
match pat.kind {
PatKind::Wild => false,
PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Lit(..) | PatKind::Range(..) => true,
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(pats) => {
// TODO: should be the honest check, that pats is exhaustive set
are_refutable(cx, pats)
},
PatKind::Tuple(pats, _) => are_refutable(cx, pats),
PatKind::Struct(ref qpath, fields, _) => {
is_enum_variant(cx, qpath, pat.hir_id) || are_refutable(cx, fields.iter().map(|field| &*field.pat))
},
PatKind::TupleStruct(ref qpath, pats, _) => is_enum_variant(cx, qpath, pat.hir_id) || are_refutable(cx, pats),
PatKind::Slice(head, middle, tail) => {
match &cx.typeck_results().node_type(pat.hir_id).kind() {
rustc_ty::Slice(..) => {
// [..] is the only irrefutable slice pattern.
!head.is_empty() || middle.is_none() || !tail.is_empty()
},
rustc_ty::Array(..) => are_refutable(cx, head.iter().chain(middle).chain(tail.iter())),
_ => {
// unreachable!()
true
},
}
},
}
}
/// If the pattern is an `or` pattern, call the function once for each sub pattern. Otherwise, call
/// the function once on the given pattern.
pub fn recurse_or_patterns<'tcx, F: FnMut(&'tcx Pat<'tcx>)>(pat: &'tcx Pat<'tcx>, mut f: F) {
if let PatKind::Or(pats) = pat.kind {
pats.iter().for_each(f);
} else {
f(pat);
}
}
/// Checks for the `#[automatically_derived]` attribute all `#[derive]`d
/// implementations have.
pub fn is_automatically_derived(attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(|attr| attr.has_name(sym::automatically_derived))
}
/// Remove blocks around an expression.
///
/// Ie. `x`, `{ x }` and `{{{{ x }}}}` all give `x`. `{ x; y }` and `{}` return
/// themselves.
pub fn remove_blocks<'tcx>(mut expr: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
while let ExprKind::Block(block, ..) = expr.kind {
match (block.stmts.is_empty(), block.expr.as_ref()) {
(true, Some(e)) => expr = e,
_ => break,
}
}
expr
}
pub fn is_self(slf: &Param<'_>) -> bool {
if let PatKind::Binding(.., name, _) = slf.pat.kind {
name.name == kw::SelfLower
} else {
false
}
}
pub fn is_self_ty(slf: &hir::Ty<'_>) -> bool {
if let TyKind::Path(QPath::Resolved(None, path)) = slf.kind {
if let Res::SelfTy(..) = path.res {
return true;
}
}
false
}
pub fn iter_input_pats<'tcx>(decl: &FnDecl<'_>, body: &'tcx Body<'_>) -> impl Iterator<Item = &'tcx Param<'tcx>> {
(0..decl.inputs.len()).map(move |i| &body.params[i])
}
/// Checks if a given expression is a match expression expanded from the `?`
/// operator or the `try` macro.
pub fn is_try<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
fn is_ok(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
if_chain! {
if let PatKind::TupleStruct(ref path, pat, None) = arm.pat.kind;
if is_lang_ctor(cx, path, ResultOk);
if let PatKind::Binding(_, hir_id, _, None) = pat[0].kind;
if path_to_local_id(arm.body, hir_id);
then {
return true;
}
}
false
}
fn is_err(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
if let PatKind::TupleStruct(ref path, _, _) = arm.pat.kind {
is_lang_ctor(cx, path, ResultErr)
} else {
false
}
}
if let ExprKind::Match(_, arms, ref source) = expr.kind {
// desugared from a `?` operator
if *source == MatchSource::TryDesugar {
return Some(expr);
}
if_chain! {
if arms.len() == 2;
if arms[0].guard.is_none();
if arms[1].guard.is_none();
if (is_ok(cx, &arms[0]) && is_err(cx, &arms[1])) ||
(is_ok(cx, &arms[1]) && is_err(cx, &arms[0]));
then {
return Some(expr);
}
}
}
None
}
/// Returns `true` if the lint is allowed in the current context
///
/// Useful for skipping long running code when it's unnecessary
pub fn is_lint_allowed(cx: &LateContext<'_>, lint: &'static Lint, id: HirId) -> bool {
cx.tcx.lint_level_at_node(lint, id).0 == Level::Allow
}
pub fn strip_pat_refs<'hir>(mut pat: &'hir Pat<'hir>) -> &'hir Pat<'hir> {
while let PatKind::Ref(subpat, _) = pat.kind {
pat = subpat;
}
pat
}
pub fn int_bits(tcx: TyCtxt<'_>, ity: rustc_ty::IntTy) -> u64 {
Integer::from_int_ty(&tcx, ity).size().bits()
}
#[allow(clippy::cast_possible_wrap)]
/// Turn a constant int byte representation into an i128
pub fn sext(tcx: TyCtxt<'_>, u: u128, ity: rustc_ty::IntTy) -> i128 {
let amt = 128 - int_bits(tcx, ity);
((u as i128) << amt) >> amt
}
#[allow(clippy::cast_sign_loss)]
/// clip unused bytes
pub fn unsext(tcx: TyCtxt<'_>, u: i128, ity: rustc_ty::IntTy) -> u128 {
let amt = 128 - int_bits(tcx, ity);
((u as u128) << amt) >> amt
}
/// clip unused bytes
pub fn clip(tcx: TyCtxt<'_>, u: u128, ity: rustc_ty::UintTy) -> u128 {
let bits = Integer::from_uint_ty(&tcx, ity).size().bits();
let amt = 128 - bits;
(u << amt) >> amt
}
pub fn any_parent_is_automatically_derived(tcx: TyCtxt<'_>, node: HirId) -> bool {
let map = &tcx.hir();
let mut prev_enclosing_node = None;
let mut enclosing_node = node;
while Some(enclosing_node) != prev_enclosing_node {
if is_automatically_derived(map.attrs(enclosing_node)) {
return true;
}
prev_enclosing_node = Some(enclosing_node);
enclosing_node = map.get_parent_item(enclosing_node);
}
false
}
/// Matches a function call with the given path and returns the arguments.
///
/// Usage:
///
/// ```rust,ignore
/// if let Some(args) = match_function_call(cx, cmp_max_call, &paths::CMP_MAX);
/// ```
pub fn match_function_call<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'_>,
path: &[&str],
) -> Option<&'tcx [Expr<'tcx>]> {
if_chain! {
if let ExprKind::Call(fun, args) = expr.kind;
if let ExprKind::Path(ref qpath) = fun.kind;
if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
if match_def_path(cx, fun_def_id, path);
then {
return Some(args)
}
};
None
}
/// Checks if the given `DefId` matches any of the paths. Returns the index of matching path, if
/// any.
///
/// Please use `match_any_diagnostic_items` if the targets are all diagnostic items.
pub fn match_any_def_paths(cx: &LateContext<'_>, did: DefId, paths: &[&[&str]]) -> Option<usize> {
let search_path = cx.get_def_path(did);
paths
.iter()
.position(|p| p.iter().map(|x| Symbol::intern(x)).eq(search_path.iter().copied()))
}
/// Checks if the given `DefId` matches any of provided diagnostic items. Returns the index of
/// matching path, if any.
pub fn match_any_diagnostic_items(cx: &LateContext<'_>, def_id: DefId, diag_items: &[Symbol]) -> Option<usize> {
diag_items
.iter()
.position(|item| cx.tcx.is_diagnostic_item(*item, def_id))
}
/// Checks if the given `DefId` matches the path.
pub fn match_def_path<'tcx>(cx: &LateContext<'tcx>, did: DefId, syms: &[&str]) -> bool {
// We should probably move to Symbols in Clippy as well rather than interning every time.
let path = cx.get_def_path(did);
syms.iter().map(|x| Symbol::intern(x)).eq(path.iter().copied())
}
pub fn match_panic_call(cx: &LateContext<'_>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
if let ExprKind::Call(func, [arg]) = expr.kind {
expr_path_res(cx, func)
.opt_def_id()
.map_or(false, |id| match_panic_def_id(cx, id))
.then(|| arg)
} else {
None
}
}
pub fn match_panic_def_id(cx: &LateContext<'_>, did: DefId) -> bool {
match_any_def_paths(
cx,
did,
&[
&paths::BEGIN_PANIC,
&paths::PANIC_ANY,
&paths::PANICKING_PANIC,
&paths::PANICKING_PANIC_FMT,
&paths::PANICKING_PANIC_STR,
],
)
.is_some()
}
/// Returns the list of condition expressions and the list of blocks in a
/// sequence of `if/else`.
/// E.g., this returns `([a, b], [c, d, e])` for the expression
/// `if a { c } else if b { d } else { e }`.
pub fn if_sequence<'tcx>(mut expr: &'tcx Expr<'tcx>) -> (Vec<&'tcx Expr<'tcx>>, Vec<&'tcx Block<'tcx>>) {
let mut conds = Vec::new();
let mut blocks: Vec<&Block<'_>> = Vec::new();
while let Some(higher::IfOrIfLet { cond, then, r#else }) = higher::IfOrIfLet::hir(expr) {
conds.push(&*cond);
if let ExprKind::Block(block, _) = then.kind {
blocks.push(block);
} else {
panic!("ExprKind::If node is not an ExprKind::Block");
}
if let Some(else_expr) = r#else {
expr = else_expr;
} else {
break;
}
}
// final `else {..}`
if !blocks.is_empty() {
if let ExprKind::Block(block, _) = expr.kind {
blocks.push(block);
}
}
(conds, blocks)
}
/// Checks if the given function kind is an async function.
pub fn is_async_fn(kind: FnKind<'_>) -> bool {
matches!(kind, FnKind::ItemFn(_, _, header, _) if header.asyncness == IsAsync::Async)
}
/// Peels away all the compiler generated code surrounding the body of an async function,
pub fn get_async_fn_body(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> {
if let ExprKind::Call(
_,
&[
Expr {
kind: ExprKind::Closure(_, _, body, _, _),
..
},
],
) = body.value.kind
{
if let ExprKind::Block(
Block {
stmts: [],
expr:
Some(Expr {
kind: ExprKind::DropTemps(expr),
..
}),
..
},
_,
) = tcx.hir().body(body).value.kind
{
return Some(expr);
}
};
None
}
// Finds the `#[must_use]` attribute, if any
pub fn must_use_attr(attrs: &[Attribute]) -> Option<&Attribute> {
attrs.iter().find(|a| a.has_name(sym::must_use))
}
// check if expr is calling method or function with #[must_use] attribute
pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let did = match expr.kind {
ExprKind::Call(path, _) => if_chain! {
if let ExprKind::Path(ref qpath) = path.kind;
if let def::Res::Def(_, did) = cx.qpath_res(qpath, path.hir_id);
then {
Some(did)
} else {
None
}
},
ExprKind::MethodCall(_, _, _, _) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
_ => None,
};
did.map_or(false, |did| must_use_attr(cx.tcx.get_attrs(did)).is_some())
}
/// Checks if an expression represents the identity function
/// Only examines closures and `std::convert::identity`
pub fn is_expr_identity_function(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
/// Checks if a function's body represents the identity function. Looks for bodies of the form:
/// * `|x| x`
/// * `|x| return x`
/// * `|x| { return x }`
/// * `|x| { return x; }`
fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool {
let id = if_chain! {
if let [param] = func.params;
if let PatKind::Binding(_, id, _, _) = param.pat.kind;
then {
id
} else {
return false;
}
};
let mut expr = &func.value;
loop {
match expr.kind {
#[rustfmt::skip]
ExprKind::Block(&Block { stmts: [], expr: Some(e), .. }, _, )
| ExprKind::Ret(Some(e)) => expr = e,
#[rustfmt::skip]
ExprKind::Block(&Block { stmts: [stmt], expr: None, .. }, _) => {
if_chain! {
if let StmtKind::Semi(e) | StmtKind::Expr(e) = stmt.kind;
if let ExprKind::Ret(Some(ret_val)) = e.kind;
then {
expr = ret_val;
} else {
return false;
}
}
},
_ => return path_to_local_id(expr, id) && cx.typeck_results().expr_adjustments(expr).is_empty(),
}
}
}
match expr.kind {
ExprKind::Closure(_, _, body_id, _, _) => is_body_identity_function(cx, cx.tcx.hir().body(body_id)),
ExprKind::Path(ref path) => is_qpath_def_path(cx, path, expr.hir_id, &paths::CONVERT_IDENTITY),
_ => false,
}
}
/// Gets the node where an expression is either used, or it's type is unified with another branch.
pub fn get_expr_use_or_unification_node(tcx: TyCtxt<'tcx>, expr: &Expr<'_>) -> Option<Node<'tcx>> {
let mut child_id = expr.hir_id;
let mut iter = tcx.hir().parent_iter(child_id);
loop {
match iter.next() {
None => break None,
Some((id, Node::Block(_))) => child_id = id,
Some((id, Node::Arm(arm))) if arm.body.hir_id == child_id => child_id = id,
Some((_, Node::Expr(expr))) => match expr.kind {
ExprKind::Match(_, [arm], _) if arm.hir_id == child_id => child_id = expr.hir_id,
ExprKind::Block(..) | ExprKind::DropTemps(_) => child_id = expr.hir_id,
ExprKind::If(_, then_expr, None) if then_expr.hir_id == child_id => break None,
_ => break Some(Node::Expr(expr)),
},
Some((_, node)) => break Some(node),
}
}
}
/// Checks if the result of an expression is used, or it's type is unified with another branch.
pub fn is_expr_used_or_unified(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
!matches!(
get_expr_use_or_unification_node(tcx, expr),
None | Some(Node::Stmt(Stmt {
kind: StmtKind::Expr(_)
| StmtKind::Semi(_)
| StmtKind::Local(Local {
pat: Pat {
kind: PatKind::Wild,
..
},
..
}),
..
}))
)
}
/// Checks if the expression is the final expression returned from a block.
pub fn is_expr_final_block_expr(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
matches!(get_parent_node(tcx, expr.hir_id), Some(Node::Block(..)))
}
pub fn is_no_std_crate(cx: &LateContext<'_>) -> bool {
cx.tcx.hir().attrs(hir::CRATE_HIR_ID).iter().any(|attr| {
if let ast::AttrKind::Normal(ref attr, _) = attr.kind {
attr.path == sym::no_std
} else {
false
}
})
}
/// Check if parent of a hir node is a trait implementation block.
/// For example, `f` in
/// ```rust,ignore
/// impl Trait for S {
/// fn f() {}
/// }
/// ```
pub fn is_trait_impl_item(cx: &LateContext<'_>, hir_id: HirId) -> bool {
if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
} else {
false
}
}
/// Check if it's even possible to satisfy the `where` clause for the item.
///
/// `trivial_bounds` feature allows functions with unsatisfiable bounds, for example:
///
/// ```ignore
/// fn foo() where i32: Iterator {
/// for _ in 2i32 {}
/// }
/// ```
pub fn fn_has_unsatisfiable_preds(cx: &LateContext<'_>, did: DefId) -> bool {
use rustc_trait_selection::traits;
let predicates = cx
.tcx
.predicates_of(did)
.predicates
.iter()
.filter_map(|(p, _)| if p.is_global(cx.tcx) { Some(*p) } else { None });
traits::impossible_predicates(
cx.tcx,
traits::elaborate_predicates(cx.tcx, predicates)
.map(|o| o.predicate)
.collect::<Vec<_>>(),
)
}
/// Returns the `DefId` of the callee if the given expression is a function or method call.
pub fn fn_def_id(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<DefId> {
match &expr.kind {
ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
ExprKind::Call(
Expr {
kind: ExprKind::Path(qpath),
hir_id: path_hir_id,
..
},
..,
) => cx.typeck_results().qpath_res(qpath, *path_hir_id).opt_def_id(),
_ => None,
}
}
/// Returns Option<String> where String is a textual representation of the type encapsulated in the
/// slice iff the given expression is a slice of primitives (as defined in the
/// `is_recursively_primitive_type` function) and None otherwise.
pub fn is_slice_of_primitives(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
let expr_type = cx.typeck_results().expr_ty_adjusted(expr);
let expr_kind = expr_type.kind();
let is_primitive = match expr_kind {
rustc_ty::Slice(element_type) => is_recursively_primitive_type(element_type),
rustc_ty::Ref(_, inner_ty, _) if matches!(inner_ty.kind(), &rustc_ty::Slice(_)) => {
if let rustc_ty::Slice(element_type) = inner_ty.kind() {
is_recursively_primitive_type(element_type)
} else {
unreachable!()
}
},
_ => false,
};
if is_primitive {
// if we have wrappers like Array, Slice or Tuple, print these
// and get the type enclosed in the slice ref
match expr_type.peel_refs().walk(cx.tcx).nth(1).unwrap().expect_ty().kind() {
rustc_ty::Slice(..) => return Some("slice".into()),
rustc_ty::Array(..) => return Some("array".into()),
rustc_ty::Tuple(..) => return Some("tuple".into()),
_ => {
// is_recursively_primitive_type() should have taken care
// of the rest and we can rely on the type that is found
let refs_peeled = expr_type.peel_refs();
return Some(refs_peeled.walk(cx.tcx).last().unwrap().to_string());
},
}
}
None
}
/// returns list of all pairs (a, b) from `exprs` such that `eq(a, b)`
/// `hash` must be comformed with `eq`
pub fn search_same<T, Hash, Eq>(exprs: &[T], hash: Hash, eq: Eq) -> Vec<(&T, &T)>
where
Hash: Fn(&T) -> u64,
Eq: Fn(&T, &T) -> bool,
{
match exprs {
[a, b] if eq(a, b) => return vec![(a, b)],
_ if exprs.len() <= 2 => return vec![],
_ => {},
}
let mut match_expr_list: Vec<(&T, &T)> = Vec::new();
let mut map: UnhashMap<u64, Vec<&_>> =
UnhashMap::with_capacity_and_hasher(exprs.len(), BuildHasherDefault::default());
for expr in exprs {
match map.entry(hash(expr)) {
Entry::Occupied(mut o) => {
for o in o.get() {
if eq(o, expr) {
match_expr_list.push((o, expr));
}
}
o.get_mut().push(expr);
},
Entry::Vacant(v) => {
v.insert(vec![expr]);
},
}
}
match_expr_list
}
/// Peels off all references on the pattern. Returns the underlying pattern and the number of
/// references removed.
pub fn peel_hir_pat_refs(pat: &'a Pat<'a>) -> (&'a Pat<'a>, usize) {
fn peel(pat: &'a Pat<'a>, count: usize) -> (&'a Pat<'a>, usize) {
if let PatKind::Ref(pat, _) = pat.kind {
peel(pat, count + 1)
} else {
(pat, count)
}
}
peel(pat, 0)
}
/// Peels of expressions while the given closure returns `Some`.
pub fn peel_hir_expr_while<'tcx>(
mut expr: &'tcx Expr<'tcx>,
mut f: impl FnMut(&'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>>,
) -> &'tcx Expr<'tcx> {
while let Some(e) = f(expr) {
expr = e;
}
expr
}
/// Peels off up to the given number of references on the expression. Returns the underlying
/// expression and the number of references removed.
pub fn peel_n_hir_expr_refs(expr: &'a Expr<'a>, count: usize) -> (&'a Expr<'a>, usize) {
let mut remaining = count;
let e = peel_hir_expr_while(expr, |e| match e.kind {
ExprKind::AddrOf(ast::BorrowKind::Ref, _, e) if remaining != 0 => {
remaining -= 1;
Some(e)
},
_ => None,
});
(e, count - remaining)
}
/// Peels off all references on the expression. Returns the underlying expression and the number of
/// references removed.
pub fn peel_hir_expr_refs(expr: &'a Expr<'a>) -> (&'a Expr<'a>, usize) {
let mut count = 0;
let e = peel_hir_expr_while(expr, |e| match e.kind {
ExprKind::AddrOf(ast::BorrowKind::Ref, _, e) => {
count += 1;
Some(e)
},
_ => None,
});
(e, count)
}
/// Removes `AddrOf` operators (`&`) or deref operators (`*`), but only if a reference type is
/// dereferenced. An overloaded deref such as `Vec` to slice would not be removed.
pub fn peel_ref_operators<'hir>(cx: &LateContext<'_>, mut expr: &'hir Expr<'hir>) -> &'hir Expr<'hir> {
loop {
match expr.kind {
ExprKind::AddrOf(_, _, e) => expr = e,
ExprKind::Unary(UnOp::Deref, e) if cx.typeck_results().expr_ty(e).is_ref() => expr = e,
_ => break,
}
}
expr
}
#[macro_export]
macro_rules! unwrap_cargo_metadata {
($cx: ident, $lint: ident, $deps: expr) => {{
let mut command = cargo_metadata::MetadataCommand::new();
if !$deps {
command.no_deps();
}
match command.exec() {
Ok(metadata) => metadata,
Err(err) => {
span_lint($cx, $lint, DUMMY_SP, &format!("could not read cargo metadata: {}", err));
return;
},
}
}};
}
pub fn is_hir_ty_cfg_dependant(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool {
if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
if let Res::Def(_, def_id) = path.res {
return cx.tcx.has_attr(def_id, sym::cfg) || cx.tcx.has_attr(def_id, sym::cfg_attr);
}
}
false
}
struct VisitConstTestStruct<'tcx> {
tcx: TyCtxt<'tcx>,
names: Vec<Symbol>,
found: bool,
}
impl<'hir> ItemLikeVisitor<'hir> for VisitConstTestStruct<'hir> {
fn visit_item(&mut self, item: &Item<'_>) {
if let ItemKind::Const(ty, _body) = item.kind {
if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
// We could also check for the type name `test::TestDescAndFn`
// and the `#[rustc_test_marker]` attribute?
if let Res::Def(DefKind::Struct, _) = path.res {
let has_test_marker = self
.tcx
.hir()
.attrs(item.hir_id())
.iter()
.any(|a| a.has_name(sym::rustc_test_marker));
if has_test_marker && self.names.contains(&item.ident.name) {
self.found = true;
}
}
}
}
}
fn visit_trait_item(&mut self, _: &TraitItem<'_>) {}
fn visit_impl_item(&mut self, _: &ImplItem<'_>) {}
fn visit_foreign_item(&mut self, _: &ForeignItem<'_>) {}
}
/// Checks if the function containing the given `HirId` is a `#[test]` function
///
/// Note: If you use this function, please add a `#[test]` case in `tests/ui_test`.
pub fn is_in_test_function(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
let names: Vec<_> = tcx
.hir()
.parent_iter(id)
// Since you can nest functions we need to collect all until we leave
// function scope
.filter_map(|(_id, node)| {
if let Node::Item(item) = node {
if let ItemKind::Fn(_, _, _) = item.kind {
return Some(item.ident.name);
}
}
None
})
.collect();
let parent_mod = tcx.parent_module(id);
let mut vis = VisitConstTestStruct {
tcx,
names,
found: false,
};
tcx.hir().visit_item_likes_in_module(parent_mod, &mut vis);
vis.found
}
/// Checks whether item either has `test` attribute applied, or
/// is a module with `test` in its name.
///
/// Note: If you use this function, please add a `#[test]` case in `tests/ui_test`.
pub fn is_test_module_or_function(tcx: TyCtxt<'_>, item: &Item<'_>) -> bool {
is_in_test_function(tcx, item.hir_id())
|| matches!(item.kind, ItemKind::Mod(..))
&& item.ident.name.as_str().split('_').any(|a| a == "test" || a == "tests")
}
macro_rules! op_utils {
($($name:ident $assign:ident)*) => {
/// Binary operation traits like `LangItem::Add`
pub static BINOP_TRAITS: &[LangItem] = &[$(LangItem::$name,)*];
/// Operator-Assign traits like `LangItem::AddAssign`
pub static OP_ASSIGN_TRAITS: &[LangItem] = &[$(LangItem::$assign,)*];
/// Converts `BinOpKind::Add` to `(LangItem::Add, LangItem::AddAssign)`, for example
pub fn binop_traits(kind: hir::BinOpKind) -> Option<(LangItem, LangItem)> {
match kind {
$(hir::BinOpKind::$name => Some((LangItem::$name, LangItem::$assign)),)*
_ => None,
}
}
};
}
op_utils! {
Add AddAssign
Sub SubAssign
Mul MulAssign
Div DivAssign
Rem RemAssign
BitXor BitXorAssign
BitAnd BitAndAssign
BitOr BitOrAssign
Shl ShlAssign
Shr ShrAssign
}
| 34.866298 | 120 | 0.561626 |
14953b872b1a60b2f3dedd22da1e0ffb535388f3 | 4,394 | // The From trait is used for value-to-value conversions.
// If From is implemented correctly for a type, the Into trait should work conversely.
// You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html
#[derive(Debug)]
struct Person {
name: String,
age: usize,
}
// We implement the Default trait to use it as a fallback
// when the provided string is not convertible into a Person object
impl Default for Person {
fn default() -> Person {
Person {
name: String::from("John"),
age: 30,
}
}
}
// Your task is to complete this implementation
// in order for the line `let p = Person::from("Mark,20")` to compile
// Please note that you'll need to parse the age component into a `usize`
// with something like `"4".parse::<usize>()`. The outcome of this needs to
// be handled appropriately.
//
// Steps:
// 1. If the length of the provided string is 0, then return the default of Person
// 2. Split the given string on the commas present in it
// 3. Extract the first element from the split operation and use it as the name
// 4. If the name is empty, then return the default of Person
// 5. Extract the other element from the split operation and parse it into a `usize` as the age
// If while parsing the age, something goes wrong, then return the default of Person
// Otherwise, then return an instantiated Person object with the results
impl From<&str> for Person {
fn from(s: &str) -> Person {
if s.len() == 0 {
Person::default()
} else {
let vec = s.splitn(2, ',').collect::<Vec<&str>>();
let name = vec[0].to_string();
if name.is_empty() {
Person::default()
} else if let Some(age) = vec.get(1) {
match age.parse::<usize>() {
Ok(age) => Person{name, age},
Err(_) => Person::default(),
}
} else {
Person::default()
}
}
}
}
fn main() {
// Use the `from` function
let p1 = Person::from("Mark,20");
// Since From is implemented for Person, we should be able to use Into
let p2: Person = "Gerald,70".into();
println!("{:?}", p1);
println!("{:?}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default() {
// Test that the default person is 30 year old John
let dp = Person::default();
assert_eq!(dp.name, "John");
assert_eq!(dp.age, 30);
}
#[test]
fn test_bad_convert() {
// Test that John is returned when bad string is provided
let p = Person::from("");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_good_convert() {
// Test that "Mark,20" works
let p = Person::from("Mark,20");
assert_eq!(p.name, "Mark");
assert_eq!(p.age, 20);
}
#[test]
fn test_bad_age() {
// Test that "Mark,twenty" will return the default person due to an error in parsing age
let p = Person::from("Mark,twenty");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_comma_and_age() {
let p: Person = Person::from("Mark");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_age() {
let p: Person = Person::from("Mark,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name() {
let p: Person = Person::from(",1");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_age() {
let p: Person = Person::from(",");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_invalid_age() {
let p: Person = Person::from(",one");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_trailing_comma() {
let p: Person = Person::from("Mike,32,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_trailing_comma_and_some_string() {
let p: Person = Person::from("Mike,32,man");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
}
| 29.099338 | 96 | 0.561447 |
ff09d0bec75600c504e28403d36bfa24a280f3db | 9,307 |
use rocket::response::Flash;
use rocket::request::FlashMessage;
use rocket::response::content::Html;
use blog::*;
use titlecase::titlecase;
use super::{BLOG_URL, USER_LOGIN_URL, ADMIN_LOGIN_URL};
pub const UNAUTHORIZED_POST_MESSAGE: &'static str = "You are not authorized to post articles. Please login as an administrator.<br><a href=\"admin\">Admin Login</a>";
const GENERIC_PAGE_START: &'static str = "<div class=\"v-content\">\n\t\t\t\t\t\t";
const GENERIC_PAGE_END: &'static str = "\n\t\t\t\t\t</div>";
const TABS: &'static str = "\t\t\t\t\t\t\t";
pub fn process_flash(flash_opt: Option<FlashMessage>) -> Option<String> {
let fmsg: Option<String>;
if let Some(flash) = flash_opt {
if flash.name() == "error" {
fmsg = Some(alert_danger( flash.msg() ));
} else if flash.name() == "warning" {
fmsg = Some(alert_warning( flash.msg() ));
} else if flash.name() == "success" {
fmsg = Some(alert_success( flash.msg() ));
} else {
fmsg = Some(alert_info( flash.msg() ));
}
} else {
fmsg = None;
}
fmsg
}
pub fn admin_nav_username(username: &str) -> String {
format!(r##"
<li class="v-nav-item nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
{user}
</a>
<div class="dropdown-menu" aria-labelledby="navbarDropdown">
<a class="dropdown-item" href="/insert">New Article</a>
<!-- <a class="dropdown-item" href="#">Something else here</a> -->
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="/logout">Logout</a>
</div>
</li>
"##, user=username)
}
pub fn admin_nav() -> &'static str {
r##"
<li class="v-nav-item nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
{user}
</a>
<div class="dropdown-menu" aria-labelledby="navbarDropdown">
<a class="dropdown-item" href="/insert">New Article</a>
<!-- <a class="dropdown-item" href="#">Something else here</a> -->
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="/logout">Logout</a>
</div>
</li>
"##
}
pub fn admin_nav_login() -> &'static str {
r##"<li class="v-nav-item nav-item"><a class="nav-link" href="/admin">Login</a></li>"##
}
pub fn alert_danger(msg: &str) -> String {
format!(r##"
<div class="v-centered-msg alert alert-danger" role="alert">
{why}
</div>
"##, why=msg)
}
pub fn alert_success(msg: &str) -> String {
format!(r##"
<div class="v-centered-msg alert alert-success" role="alert">
{why}
</div>
"##, why=msg)
}
pub fn alert_info(msg: &str) -> String {
format!(r##"
<div class="v-centered-msg alert alert-info" role="alert">
{why}
</div>
"##, why=msg)
}
pub fn alert_warning(msg: &str) -> String {
format!(r##"
<div class="v-centered-msg alert alert-warning" role="alert">
{why}
</div>
"##, why=msg)
}
pub fn alert_primary(msg: &str) -> String {
format!(r##"
<div class="v-centered-msg alert alert-primary" role="alert">
{why}
</div>
"##, why=msg)
}
pub fn login_form(url: &str) -> String {
format!(r##"
<form id="needs-validation" action="{url}" name="login_form" method="post" novalidate>
<div class="form-group" id="userGroup">
<label for="usernameField">Email Address</label>
<div class="col-md-9 mb-3">
<input type="text" name="username" value="" class="form-control" id="usernameField" aria-describedby="idHelp" placeholder="Username" required>
<div class="invalid-feedback">
Please specify a username
</div>
</div>
<!-- <small id="idHelp" class="form-text text-muted">Your email address will not be shared with anyone else.</small> -->
</div>
<div class="form-group" id="passGroup">
<label for="passwordField">Password</label>
<div class="col-md-9 mb-3">
<input type="password" name="password" class="form-control" id="passwordField" placeholder="Password" required>
<div class="invalid-feedback">
A password is requierd.
</div>
<input type="password" id="passwordHidden" class="hidden-pass form-control">
</div>
</div>
<div class="v-submit">
<button type="submit" class="btn btn-primary" id="submit-button-id">Login</button>
</div>
<!-- <button type="submit" class="btn btn-faded" id="submit-button-id">Login</button> -->
<!-- <button type="submit" class="btn btn-dark" id="submit-button-id">Login</button> -->
<!-- <button type="submit" class="btn btn-success" id="submit-button-id">Login</button> -->
</form>
"##, url=url)
}
// http://localhost:8000/admin
pub fn login_form_fail(url: &str, user: &str, why: &str) -> String {
format!(r##"
{alert}
<form id="needs-validation" action="{url}" name="login_form" method="post" novalidate>
<div class="form-group" id="userGroup">
<label for="usernameField">Email Address</label>
<div class="col-md-9 mb-3">
<input type="text" name="username" value="{user}" class="form-control" id="usernameField" aria-describedby="idHelp" placeholder="Username" required>
<div class="invalid-feedback">
Please specify a username
</div>
</div>
<!-- <small id="idHelp" class="form-text text-muted">Your email address will not be shared with anyone else.</small> -->
</div>
<div class="form-group" id="passGroup">
<label for="passwordField">Password</label>
<div class="col-md-9 mb-3">
<input type="password" name="password" class="form-control" id="passwordField" placeholder="Password" required>
<div class="invalid-feedback">
A password is requierd.
</div>
<input type="password" id="passwordHidden" class="hidden-pass form-control">
</div>
</div>
<div class="v-submit">
<button type="submit" class="btn btn-primary" id="submit-button-id">Login</button>
</div>
<!-- <button type="submit" class="btn btn-faded" id="submit-button-id">Login</button> -->
<!-- <button type="submit" class="btn btn-dark" id="submit-button-id">Login</button> -->
<!-- <button type="submit" class="btn btn-success" id="submit-button-id">Login</button> -->
</form>
"##, url=url, user=user, alert=alert_danger(&format!("Login failed: {}", why)))
}
pub fn link_tags(tags: &Vec<String>) -> String {
let mut contents = String::new();
for t in tags {
contents.push_str(&format!(" <a href=\"{url}tag?tag={tag}\">{tag}</a>", url=BLOG_URL, tag=t));
}
contents
}
| 48.222798 | 185 | 0.447083 |
89e37561c8770e3afd5fc5b5da2876c2bf657efd | 2,049 | pub mod auth;
pub mod header;
pub mod typed;
pub mod untyped;
pub use header::Header;
pub use untyped::*;
/// Simple NewType around `Vec<Header>` that gives many helpful methods when dealing with headers
/// in [super::Request], [super::Response] and [super::SipMessage].
#[derive(Debug, PartialEq, Eq, Clone, Default)]
pub struct Headers(Vec<Header>);
impl Headers {
pub fn push(&mut self, h: Header) {
self.0.push(h)
}
pub fn unique_push(&mut self, h: Header) {
self.0
.retain(|s| std::mem::discriminant(s) != std::mem::discriminant(&h));
self.push(h);
}
pub fn iter(&self) -> impl Iterator<Item = &Header> {
self.0.iter()
}
pub fn extend(&mut self, i: Vec<Header>) {
self.0.extend(i)
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut Header> {
self.0.iter_mut()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn retain<F>(&mut self, f: F)
where
F: FnMut(&Header) -> bool,
{
self.0.retain(f)
}
}
impl IntoIterator for Headers {
type IntoIter = ::std::vec::IntoIter<Self::Item>;
type Item = Header;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl std::convert::From<Header> for Headers {
fn from(header: Header) -> Self {
Self(vec![header])
}
}
impl std::convert::From<Vec<Header>> for Headers {
fn from(headers: Vec<Header>) -> Self {
Self(headers)
}
}
impl std::convert::From<Headers> for Vec<Header> {
fn from(from: Headers) -> Self {
from.0
}
}
impl std::fmt::Display for Headers {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.is_empty() {
write!(f, "")
} else {
write!(
f,
"{}\r\n",
self.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join("\r\n")
)
}
}
}
| 22.516484 | 97 | 0.527574 |
eb711f68a13048fda45c24baf81d65b314c55ad4 | 86 | fn foo() {
S {};
S { x, y: 32, };
S { x, y: 32, ..Default::default() };
}
| 14.333333 | 41 | 0.348837 |
71a44a01a69aa8bded2625f70cdddbb49450d9ab | 762 | // traits2.rs
//
// Your task is to implement the trait
// `AppendBar' for a vector of strings.
//
// To implement this trait, consider for
// a moment what it means to 'append "Bar"'
// to a vector of strings.
//
// No boiler plate code this time,
// you can do this!
trait AppendBar {
fn append_bar(self) -> Self;
}
//TODO: Add your code here
impl AppendBar for Vec<String> {
fn append_bar(mut self) -> Self {
&self.push(String::from("Bar"));
self
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_vec_pop_eq_bar() {
let mut foo = vec![String::from("Foo")].append_bar();
assert_eq!(foo.pop().unwrap(), String::from("Bar"));
assert_eq!(foo.pop().unwrap(), String::from("Foo"));
}
}
| 21.166667 | 61 | 0.598425 |
e6c4aded1e00fedcf0ea88a430b3dbf4b82e8cf0 | 239 | // macros1.rs
// Make me compile! Execute `rustlings hint macros1` for hints :)
// run using ``rustlings run macros1``
macro_rules! my_macro {
() => {
println!("Check out my macro!");
};
}
fn main() {
my_macro!();
}
| 15.933333 | 65 | 0.58159 |
21ce08ab69c73cb5bf5244bffa361d39ca5e04f2 | 116 | // run-pass
#![allow(dead_code)]
#![feature(const_let)]
enum Foo {
Bar = { let x = 1; 3 }
}
pub fn main() {}
| 10.545455 | 26 | 0.534483 |
d689e6dda9b0c09b3f02356bfc8d4672b1f1b44b | 2,690 | // Copyright 2017-2021 Lukas Pustina <[email protected]>
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::convert::TryFrom;
use std::net::IpAddr;
use std::str::FromStr;
use serde::Deserialize;
use tracing::trace;
use crate::nameserver::NameServerConfig;
use crate::services::server_lists::{OpenNic, ServerListDownloader};
use crate::services::{Error, Result};
use crate::utils::deserialize::des_f32_from_string;
static BASE_URI: &str = &"https://api.opennic.org/geoip/?json";
#[derive(Deserialize)]
pub struct NameServer {
pub host: String,
pub ip: String,
#[serde(deserialize_with = "des_f32_from_string", rename = "stat")]
pub reliability: f32,
}
/// Cf. https://wiki.opennic.org/api/geoip
pub async fn download(downloader: ServerListDownloader, spec: &OpenNic) -> Result<Vec<NameServerConfig>> {
trace!("Downloading servers from OpenNic");
let params = [
("res", &spec.number.to_string()),
("pct", &spec.reliability.to_string()),
("ipv", &spec.ipv.to_string()),
("anon", &spec.anon.to_string()),
];
let res = downloader
.http_client
.get(BASE_URI)
.query(¶ms)
.timeout(downloader.opts.timeout)
.send()
.await
.map_err(|e| Error::HttpClientError {
why: "call failed",
source: e,
})?;
if !res.status().is_success() {
return Err(Error::HttpClientErrorMessage {
why: "unexpected status code",
details: format!("status code: {}", res.status()),
});
}
let body = res.text().await.map_err(|e| Error::HttpClientError {
why: "reading body failed",
source: e,
})?;
let servers = serde_json::from_str::<Vec<NameServer>>(&body).map_err(Error::from)?;
let nameserver_configs: Vec<NameServerConfig> = servers
.into_iter()
.map(TryFrom::try_from)
.map(Result::ok)
.flatten()
.collect();
Ok(nameserver_configs)
}
impl TryFrom<NameServer> for NameServerConfig {
type Error = Error;
fn try_from(ns: NameServer) -> std::result::Result<Self, Self::Error> {
let ip_addr = IpAddr::from_str(&ns.ip).map_err(|_| Error::ParserError {
what: ns.ip,
to: "IpAddr",
why: "is not a valid IP address".to_string(),
})?;
Ok(NameServerConfig::udp_with_name((ip_addr, 53), "opennic".to_string()))
}
}
| 30.568182 | 106 | 0.625651 |
d5c2033b516a6bc18719dbb5243688bac625455e | 5,384 | // Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration for a Habitat JobSrv Worker
use std::net::{IpAddr, Ipv4Addr};
use std::path::PathBuf;
use github_api_client::config::GitHubCfg;
use hab_core::config::ConfigFile;
use hab_core::url;
use error::Error;
pub type JobSrvCfg = Vec<JobSrvAddr>;
#[derive(Clone, Debug, Deserialize)]
#[serde(default)]
pub struct Config {
/// Enable automatic publishing for all builds by default
pub auto_publish: bool,
/// Filepath where persistent application data is stored
pub data_path: PathBuf,
/// Filepath to where the builder encryption keys can be found
pub key_dir: PathBuf,
/// Path to worker event logs
pub log_path: PathBuf,
/// Default channel name for Publish post-processor to use to determine which channel to
/// publish artifacts to
pub bldr_channel: String,
/// Default URL for Publish post-processor to use to determine which Builder to use
/// for retrieving signing keys and publishing artifacts
pub bldr_url: String,
/// List of Job Servers to connect to
pub jobsrv: JobSrvCfg,
pub features_enabled: String,
/// Github application id to use for private repo access
pub github: GitHubCfg,
pub airlock_enabled: bool,
/// Whether or not to recreate network namespace if one already exists
pub recreate_ns_dir: bool,
pub network_interface: Option<String>,
pub network_gateway: Option<IpAddr>,
}
impl Config {
pub fn jobsrv_addrs(&self) -> Vec<(String, String, String)> {
let mut addrs = vec![];
for job_server in &self.jobsrv {
let hb = format!("tcp://{}:{}", job_server.host, job_server.heartbeat);
let queue = format!("tcp://{}:{}", job_server.host, job_server.port);
let log = format!("tcp://{}:{}", job_server.host, job_server.log_port);
addrs.push((hb, queue, log));
}
addrs
}
pub fn ns_dir_path(&self) -> PathBuf {
self.data_path.join("network").join("airlock-ns")
}
}
impl Default for Config {
fn default() -> Self {
Config {
auto_publish: true,
data_path: PathBuf::from("/tmp"),
log_path: PathBuf::from("/tmp"),
key_dir: PathBuf::from("/hab/svc/builder-worker/files"),
bldr_channel: String::from("unstable"),
bldr_url: url::default_bldr_url(),
jobsrv: vec![JobSrvAddr::default()],
features_enabled: "".to_string(),
github: GitHubCfg::default(),
airlock_enabled: true,
recreate_ns_dir: false,
network_interface: None,
network_gateway: None,
}
}
}
impl ConfigFile for Config {
type Error = Error;
}
#[derive(Clone, Debug, Deserialize)]
#[serde(default)]
pub struct JobSrvAddr {
pub host: IpAddr,
pub port: u16,
pub heartbeat: u16,
pub log_port: u16,
}
impl Default for JobSrvAddr {
fn default() -> Self {
JobSrvAddr {
host: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
port: 5566,
heartbeat: 5567,
log_port: 5568,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn config_from_file() {
let content = r#"
data_path = "/path/to/data"
log_path = "/path/to/logs"
key_dir = "/path/to/key"
features_enabled = "FOO,BAR"
recreate_ns_dir = true
network_interface = "eth1"
network_gateway = "192.168.10.1"
[[jobsrv]]
host = "1:1:1:1:1:1:1:1"
port = 9000
heartbeat = 9001
log_port = 9021
[[jobsrv]]
host = "2.2.2.2"
port = 9000
"#;
let config = Config::from_raw(&content).unwrap();
assert_eq!(&format!("{}", config.data_path.display()), "/path/to/data");
assert_eq!(&format!("{}", config.log_path.display()), "/path/to/logs");
assert_eq!(&format!("{}", config.key_dir.display()), "/path/to/key");
assert_eq!(&format!("{}", config.jobsrv[0].host), "1:1:1:1:1:1:1:1");
assert_eq!(config.jobsrv[0].port, 9000);
assert_eq!(config.jobsrv[0].heartbeat, 9001);
assert_eq!(config.jobsrv[0].log_port, 9021);
assert_eq!(&format!("{}", config.jobsrv[1].host), "2.2.2.2");
assert_eq!(config.jobsrv[1].port, 9000);
assert_eq!(config.jobsrv[1].heartbeat, 5567);
assert_eq!(&config.features_enabled, "FOO,BAR");
assert_eq!(config.network_interface, Some(String::from("eth1")));
assert_eq!(config.airlock_enabled, true);
assert_eq!(config.recreate_ns_dir, true);
assert_eq!(
config.network_gateway,
Some(IpAddr::V4(Ipv4Addr::new(192, 168, 10, 1)))
);
}
}
| 32.630303 | 92 | 0.615713 |
4809225ac7f212d8266c73fd8658b01f82b39524 | 1,259 | #[test]
fn kill_window() {
use crate::{KillWindow, TargetWindow};
use std::borrow::Cow;
// Kill the current window or the window at target-window, removing it from any sessions
// to which it is linked
//
// # Manual
// tmux ^1.7:
// ```text
// tmux kill-window [-a] [-t target-window]
// (alias: killw)
// ```
//
// tmux ^0.8:
// ```text
// tmux kill-window [-t target-window]
// (alias: killw)
// ```
let target_window = TargetWindow::Raw("1").to_string();
let mut kill_pane = KillWindow::new();
#[cfg(feature = "tmux_1_7")]
kill_pane.parent_sighup();
#[cfg(feature = "tmux_0_8")]
kill_pane.target_window(&target_window);
#[cfg(not(feature = "cmd_alias"))]
let cmd = "kill-window";
#[cfg(feature = "cmd_alias")]
let cmd = "killw";
let mut s = Vec::new();
#[cfg(feature = "tmux_1_7")]
s.push("-a");
#[cfg(feature = "tmux_0_8")]
s.extend_from_slice(&["-t", "1"]);
let s = s.into_iter().map(|a| a.into()).collect();
assert_eq!(kill_pane.0.bin, Cow::Borrowed("tmux"));
assert_eq!(kill_pane.0.bin_args, None);
assert_eq!(kill_pane.0.cmd, Some(Cow::Borrowed(cmd)));
assert_eq!(kill_pane.0.cmd_args, Some(s));
}
| 27.977778 | 92 | 0.575854 |
2362bf13236949939f2fd02c065c77b1b82ba214 | 5,216 | use blake2::Blake2b;
use bls12_381_plus::Scalar;
use ff::Field;
use hkdf::HkdfExtract;
use rand_chacha::ChaChaRng;
use rand_core::{CryptoRng, RngCore, SeedableRng};
use serde::{Deserialize, Serialize};
use short_group_signatures_core::lib::*;
use zeroize::Zeroize;
/// The secret key contains a field element for each
/// message that is signed and two extra.
/// See section 4.2 in
/// <https://eprint.iacr.org/2015/525.pdf> and
/// <https://eprint.iacr.org/2017/1197.pdf>
///
/// `w` corresponds to m' in the paper to achieve
/// EUF-CMA security level.
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct SecretKey {
pub(crate) w: Scalar,
pub(crate) x: Scalar,
#[serde(with = "VecSerializer")]
pub(crate) y: Vec<Scalar, U128>,
}
impl Zeroize for SecretKey {
fn zeroize(&mut self) {
self.w.zeroize();
self.x.zeroize();
for y in self.y.iter_mut() {
y.zeroize();
}
}
}
impl Drop for SecretKey {
fn drop(&mut self) {
self.zeroize();
}
}
impl Default for SecretKey {
fn default() -> Self {
Self {
w: Scalar::zero(),
x: Scalar::zero(),
y: Vec::new(),
}
}
}
impl SecretKey {
const SCALAR_SIZE: usize = 32;
/// Compute a secret key from a hash
pub fn hash<B: AsRef<[u8]>>(count: usize, data: B) -> Option<Self> {
const SALT: &'static [u8] = b"PS-SIG-KEYGEN-SALT-";
let info = (count as u32).to_be_bytes();
let mut extractor = HkdfExtract::<Blake2b>::new(Some(SALT));
extractor.input_ikm(data.as_ref());
extractor.input_ikm(&[0u8]);
let mut okm = [0u8; 32];
let (_, h) = extractor.finalize();
let _ = h.expand(&info[..], &mut okm);
let rng = ChaChaRng::from_seed(okm);
generate_secret_key(count, rng)
}
/// Compute a secret key from a CS-PRNG
pub fn random(count: usize, rng: impl RngCore + CryptoRng) -> Option<Self> {
generate_secret_key(count, rng)
}
/// Store the secret key as a sequence of bytes
/// Each scalar is compressed to big-endian format
/// Needs (N + 2) * 32 space otherwise it will panic
/// where N is the number of messages that can be signed
pub fn to_bytes(&self, buffer: &mut [u8]) {
fn to_be_bytes(s: Scalar) -> [u8; 32] {
let mut t = s.to_bytes();
t.reverse();
t
}
let mut offset = 0;
let mut end = Self::SCALAR_SIZE;
buffer[offset..end].copy_from_slice(&to_be_bytes(self.w)[..]);
offset = end;
end += Self::SCALAR_SIZE;
buffer[offset..end].copy_from_slice(&to_be_bytes(self.x)[..]);
offset = end;
end += Self::SCALAR_SIZE;
for y in &self.y {
buffer[offset..end].copy_from_slice(&to_be_bytes(*y)[..]);
offset = end;
end += Self::SCALAR_SIZE;
}
}
/// Convert a byte sequence into the secret key
/// Expected size is (N + 2) * 32 bytes
/// where N is the number of messages that can be signed
pub fn from_bytes<B: AsRef<[u8]>>(bytes: B) -> Option<Self> {
// Length for w, x, and 1 y
const MIN_SIZE: usize = SecretKey::SCALAR_SIZE * 3;
let buffer = bytes.as_ref();
if buffer.len() % Self::SCALAR_SIZE != 0 {
return None;
}
if buffer.len() < MIN_SIZE {
return None;
}
fn from_be_bytes(d: &[u8]) -> Scalar {
use core::convert::TryFrom;
let mut t = <[u8; SecretKey::SCALAR_SIZE]>::try_from(d).expect("invalid length");
t.reverse();
Scalar::from_bytes(&t).unwrap()
}
let y_cnt = (buffer.len() / Self::SCALAR_SIZE) - 2;
let mut offset = 0;
let mut end = Self::SCALAR_SIZE;
let w = from_be_bytes(&buffer[offset..end]);
offset = end;
end += Self::SCALAR_SIZE;
let x = from_be_bytes(&buffer[offset..end]);
offset = end;
end += Self::SCALAR_SIZE;
let mut y = Vec::new();
for _ in 0..y_cnt {
if let Err(_) = y.push(from_be_bytes(&buffer[offset..end])) {
return None;
}
}
Some(Self { w, x, y })
}
/// Check if this secret key is valid
pub fn is_valid(&self) -> bool {
let mut res = !self.w.is_zero();
res &= !self.x.is_zero();
for y in &self.y {
res &= !y.is_zero();
}
res
}
/// Check if this public key is invalid
pub fn is_invalid(&self) -> bool {
let mut res = self.w.is_zero();
res |= self.x.is_zero();
for y in &self.y {
res |= y.is_zero();
}
res
}
}
fn generate_secret_key(count: usize, mut rng: impl RngCore + CryptoRng) -> Option<SecretKey> {
if count == 0 || count > 128 {
return None;
}
let w = Scalar::random(&mut rng);
let x = Scalar::random(&mut rng);
let mut y = Vec::new();
for _ in 0..count {
if let Err(_) = y.push(Scalar::random(&mut rng)) {
return None;
}
}
Some(SecretKey { w, x, y })
}
| 28.043011 | 94 | 0.546587 |
1cf36dab395fd3f1091014239be44fa40264d3b5 | 2,456 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the CompilerCalls interface to the compiler works.
// ignore-cross-compile
#![feature(rustc_private, path)]
#![feature(core)]
extern crate getopts;
extern crate rustc;
extern crate rustc_driver;
extern crate syntax;
use rustc::session::Session;
use rustc::session::config::{self, Input};
use rustc_driver::{driver, CompilerCalls, Compilation};
use syntax::diagnostics;
use std::path::PathBuf;
struct TestCalls {
count: u32
}
impl<'a> CompilerCalls<'a> for TestCalls {
fn early_callback(&mut self,
_: &getopts::Matches,
_: &diagnostics::registry::Registry)
-> Compilation {
self.count *= 2;
Compilation::Continue
}
fn late_callback(&mut self,
_: &getopts::Matches,
_: &Session,
_: &Input,
_: &Option<PathBuf>,
_: &Option<PathBuf>)
-> Compilation {
self.count *= 3;
Compilation::Stop
}
fn some_input(&mut self, input: Input, input_path: Option<PathBuf>)
-> (Input, Option<PathBuf>) {
self.count *= 5;
(input, input_path)
}
fn no_input(&mut self,
_: &getopts::Matches,
_: &config::Options,
_: &Option<PathBuf>,
_: &Option<PathBuf>,
_: &diagnostics::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
panic!("This shouldn't happen");
}
fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> {
panic!("This shouldn't be called");
}
}
fn main() {
let mut tc = TestCalls { count: 1 };
// we should never get use this filename, but lets make sure they are valid args.
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
rustc_driver::run_compiler(&args, &mut tc);
assert!(tc.count == 30);
}
| 29.590361 | 85 | 0.588762 |
38d39667774867199abf98bf5bb476e50da52eaa | 6,457 | #[macro_use]
mod sys_common;
use sys_common::{io::tmpdir, symlink_supported};
#[test]
fn cap_smoke_test() {
let tmpdir = tmpdir();
check!(tmpdir.create_dir_all("dir/inner"));
check!(tmpdir.write("red.txt", b"hello world\n"));
check!(tmpdir.write("dir/green.txt", b"goodmight moon\n"));
check!(tmpdir.write("dir/inner/blue.txt", b"hey mars\n"));
let inner = check!(tmpdir.open_dir("dir/inner"));
check!(tmpdir.open("red.txt"));
#[cfg(not(windows))]
error!(tmpdir.open("blue.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("blue.txt"), 2);
#[cfg(not(windows))]
error!(tmpdir.open("green.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("green.txt"), 2);
check!(tmpdir.open("./red.txt"));
#[cfg(not(windows))]
error!(tmpdir.open("./blue.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("./blue.txt"), 2);
#[cfg(not(windows))]
error!(tmpdir.open("./green.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("./green.txt"), 2);
#[cfg(not(windows))]
error!(tmpdir.open("dir/red.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("dir/red.txt"), 2);
check!(tmpdir.open("dir/green.txt"));
#[cfg(not(windows))]
error!(tmpdir.open("dir/blue.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("dir/blue.txt"), 2);
#[cfg(not(windows))]
error!(tmpdir.open("dir/inner/red.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("dir/inner/red.txt"), 2);
#[cfg(not(windows))]
error!(tmpdir.open("dir/inner/green.txt"), "No such file");
#[cfg(windows)]
error!(tmpdir.open("dir/inner/green.txt"), 2);
check!(tmpdir.open("dir/inner/blue.txt"));
check!(tmpdir.open("dir/../red.txt"));
check!(tmpdir.open("dir/inner/../../red.txt"));
check!(tmpdir.open("dir/inner/../inner/../../red.txt"));
#[cfg(not(windows))]
error!(inner.open("red.txt"), "No such file");
#[cfg(windows)]
error!(inner.open("red.txt"), 2);
#[cfg(not(windows))]
error!(inner.open("green.txt"), "No such file");
#[cfg(windows)]
error!(inner.open("green.txt"), 2);
error_contains!(
inner.open("../inner/blue.txt"),
"a path led outside of the filesystem"
);
error_contains!(
inner.open("../inner/red.txt"),
"a path led outside of the filesystem"
);
#[cfg(not(windows))]
error!(inner.open_dir(""), "No such file");
#[cfg(windows)]
error!(inner.open_dir(""), 2);
error_contains!(inner.open_dir("/"), "a path led outside of the filesystem");
error_contains!(
inner.open_dir("/etc/services"),
"a path led outside of the filesystem"
);
check!(inner.open_dir("."));
check!(inner.open_dir("./"));
check!(inner.open_dir("./."));
error_contains!(inner.open_dir(".."), "a path led outside of the filesystem");
error_contains!(
inner.open_dir("../"),
"a path led outside of the filesystem"
);
error_contains!(
inner.open_dir("../."),
"a path led outside of the filesystem"
);
error_contains!(
inner.open_dir("./.."),
"a path led outside of the filesystem"
);
}
#[test]
fn symlinks() {
#[cfg(windows)]
use cap_fs_ext::DirExt;
if !symlink_supported() {
return;
}
let tmpdir = tmpdir();
check!(tmpdir.create_dir_all("dir/inner"));
check!(tmpdir.write("red.txt", b"hello world\n"));
check!(tmpdir.write("dir/green.txt", b"goodmight moon\n"));
check!(tmpdir.write("dir/inner/blue.txt", b"hey mars\n"));
let inner = check!(tmpdir.open_dir("dir/inner"));
check!(tmpdir.symlink("dir", "link"));
#[cfg(not(windows))]
check!(tmpdir.symlink("does_not_exist", "badlink"));
check!(tmpdir.open("link/../red.txt"));
check!(tmpdir.open("link/green.txt"));
check!(tmpdir.open("link/inner/blue.txt"));
#[cfg(not(windows))]
{
error_contains!(tmpdir.open("link/red.txt"), "No such file");
error_contains!(tmpdir.open("link/../green.txt"), "No such file");
}
#[cfg(windows)]
{
error_contains!(
tmpdir.open("link/red.txt"),
"The system cannot find the file specified."
);
error_contains!(
tmpdir.open("link/../green.txt"),
"The system cannot find the file specified."
);
}
check!(tmpdir.open("./dir/.././/link/..///./red.txt"));
check!(tmpdir.open("link/inner/../inner/../../red.txt"));
error_contains!(
inner.open("../inner/../inner/../../link/other.txt"),
"a path led outside of the filesystem"
);
#[cfg(not(windows))]
{
error_contains!(
tmpdir.open("./dir/.././/link/..///./not.txt"),
"No such file"
);
error_contains!(tmpdir.open("link/other.txt"), "No such file");
error_contains!(tmpdir.open("badlink/../red.txt"), "No such file");
}
#[cfg(windows)]
{
error_contains!(
tmpdir.open("./dir/.././/link/..///./not.txt"),
"The system cannot find the file specified."
);
error_contains!(
tmpdir.open("link/other.txt"),
"The system cannot find the file specified."
);
}
}
#[test]
#[cfg(not(windows))]
fn symlink_loop() {
#[cfg(windows)]
use cap_fs_ext::DirExt;
let tmpdir = tmpdir();
check!(tmpdir.symlink("link", "link"));
// TODO: Check the error message
error_contains!(tmpdir.open("link"), "");
}
#[test]
fn symlink_loop_from_rename() {
#[cfg(windows)]
use cap_fs_ext::DirExt;
if !symlink_supported() {
return;
}
let tmpdir = tmpdir();
check!(tmpdir.create("file"));
check!(tmpdir.symlink("file", "link"));
check!(tmpdir.open("link"));
check!(tmpdir.rename("file", &tmpdir, "renamed"));
error_contains!(tmpdir.open("link"), "");
check!(tmpdir.rename("link", &tmpdir, "file"));
error_contains!(tmpdir.open("file"), "");
check!(tmpdir.rename("file", &tmpdir, "link"));
error_contains!(tmpdir.open("link"), "");
check!(tmpdir.rename("renamed", &tmpdir, "file"));
check!(tmpdir.open("link"));
}
#[cfg(linux)]
#[test]
fn proc_self_fd() {
let fd = check!(File::open("/proc/self/fd"));
let dir = cap_std::fs::Dir::from_std_file(fd);
error!(dir.open("0"), "No such file");
}
| 28.570796 | 82 | 0.56946 |
5ba5462c798a2476969040752f98b3fcc8722541 | 4,584 | #![allow(clippy::integer_arithmetic)]
use clap::{crate_description, crate_name, value_t, App, Arg};
use panoptes_ledger::entry::{self, create_ticks, init_poh, EntrySlice, VerifyRecyclers};
use panoptes_measure::measure::Measure;
use panoptes_perf::perf_libs;
use panoptes_sdk::hash::hash;
fn main() {
panoptes_logger::setup();
let matches = App::new(crate_name!())
.about(crate_description!())
.version(panoptes_version::version!())
.arg(
Arg::with_name("max_num_entries")
.long("max-num-entries")
.takes_value(true)
.value_name("SIZE")
.help("Number of entries."),
)
.arg(
Arg::with_name("start_num_entries")
.long("start-num-entries")
.takes_value(true)
.value_name("SIZE")
.help("Packets per chunk"),
)
.arg(
Arg::with_name("hashes_per_tick")
.long("hashes-per-tick")
.takes_value(true)
.value_name("SIZE")
.help("hashes per tick"),
)
.arg(
Arg::with_name("num_transactions_per_entry")
.long("num-transactions-per-entry")
.takes_value(true)
.value_name("NUM")
.help("Skip transaction sanity execution"),
)
.arg(
Arg::with_name("iterations")
.long("iterations")
.takes_value(true)
.help("Number of iterations"),
)
.arg(
Arg::with_name("num_threads")
.long("num-threads")
.takes_value(true)
.help("Number of threads"),
)
.arg(
Arg::with_name("cuda")
.long("cuda")
.takes_value(false)
.help("Use cuda"),
)
.get_matches();
let max_num_entries = value_t!(matches, "max_num_entries", u64).unwrap_or(64);
let start_num_entries = value_t!(matches, "start_num_entries", u64).unwrap_or(max_num_entries);
let iterations = value_t!(matches, "iterations", usize).unwrap_or(10);
let hashes_per_tick = value_t!(matches, "hashes_per_tick", u64).unwrap_or(10_000);
let start_hash = hash(&[1, 2, 3, 4]);
let ticks = create_ticks(max_num_entries, hashes_per_tick, start_hash);
let mut num_entries = start_num_entries as usize;
if matches.is_present("cuda") {
perf_libs::init_cuda();
}
init_poh();
while num_entries <= max_num_entries as usize {
let mut time = Measure::start("time");
for _ in 0..iterations {
assert!(ticks[..num_entries]
.verify_cpu_generic(&start_hash)
.finish_verify());
}
time.stop();
println!(
"{},cpu_generic,{}",
num_entries,
time.as_us() / iterations as u64
);
if is_x86_feature_detected!("avx2") && entry::api().is_some() {
let mut time = Measure::start("time");
for _ in 0..iterations {
assert!(ticks[..num_entries]
.verify_cpu_x86_simd(&start_hash, 8)
.finish_verify());
}
time.stop();
println!(
"{},cpu_simd_avx2,{}",
num_entries,
time.as_us() / iterations as u64
);
}
if is_x86_feature_detected!("avx512f") && entry::api().is_some() {
let mut time = Measure::start("time");
for _ in 0..iterations {
assert!(ticks[..num_entries]
.verify_cpu_x86_simd(&start_hash, 16)
.finish_verify());
}
time.stop();
println!(
"{},cpu_simd_avx512,{}",
num_entries,
time.as_us() / iterations as u64
);
}
if perf_libs::api().is_some() {
let mut time = Measure::start("time");
let recyclers = VerifyRecyclers::default();
for _ in 0..iterations {
assert!(ticks[..num_entries]
.start_verify(&start_hash, recyclers.clone())
.finish_verify());
}
time.stop();
println!(
"{},gpu_cuda,{}",
num_entries,
time.as_us() / iterations as u64
);
}
println!();
num_entries *= 2;
}
}
| 33.459854 | 99 | 0.497382 |
bf46edcfab8b172ffc4affd0eff34f98625e62af | 1,990 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we correctly infer variance for region parameters in
// various self-contained types.
#![feature(rustc_attrs)]
// Regions that just appear in normal spots are contravariant:
#[rustc_variance]
struct Test2<'a, 'b, 'c> { //~ ERROR [-, -, -]
x: &'a isize,
y: &'b [isize],
c: &'c str
}
// Those same annotations in function arguments become covariant:
#[rustc_variance]
struct Test3<'a, 'b, 'c> { //~ ERROR [+, +, +]
x: extern "Rust" fn(&'a isize),
y: extern "Rust" fn(&'b [isize]),
c: extern "Rust" fn(&'c str),
}
// Mutability induces invariance:
#[rustc_variance]
struct Test4<'a, 'b:'a> { //~ ERROR [-, o]
x: &'a mut &'b isize,
}
// Mutability induces invariance, even when in a
// contravariant context:
#[rustc_variance]
struct Test5<'a, 'b:'a> { //~ ERROR [+, o]
x: extern "Rust" fn(&'a mut &'b isize),
}
// Invariance is a trap from which NO ONE CAN ESCAPE.
// In other words, even though the `&'b isize` occurs in
// an argument list (which is contravariant), that
// argument list occurs in an invariant context.
#[rustc_variance]
struct Test6<'a, 'b:'a> { //~ ERROR [-, o]
x: &'a mut extern "Rust" fn(&'b isize),
}
// No uses at all is bivariant:
#[rustc_variance]
struct Test7<'a> { //~ ERROR [*]
//~^ ERROR parameter `'a` is never used
x: isize
}
// Try enums too.
#[rustc_variance]
enum Test8<'a, 'b, 'c:'b> { //~ ERROR [+, -, o]
Test8A(extern "Rust" fn(&'a isize)),
Test8B(&'b [isize]),
Test8C(&'b mut &'c str),
}
fn main() {}
| 25.844156 | 68 | 0.639196 |
1da05dcbc084c009c187ccfed440c1203b20bdf3 | 2,418 | use regex::{Error, Regex, RegexBuilder};
#[derive(Debug, PartialEq)]
pub enum ErrorKind {
UnknownCommand(char),
UnknownFlag(char),
TooManySegments,
NotEnoughSegments,
RegexError(Error),
}
#[derive(Debug, PartialEq)]
pub struct RegexData {
pattern_str: String,
pub replace_str: String,
pub flag_global: bool,
flag_case_insensitive: bool,
}
impl RegexData {
pub fn build_regex(&self) -> Result<Regex, ErrorKind> {
RegexBuilder::new(&self.pattern_str)
.case_insensitive(self.flag_case_insensitive)
.build()
.map_err(ErrorKind::RegexError)
}
}
pub fn split_regex(expr: &str) -> Result<RegexData, ErrorKind> {
let expr = expr.chars().collect::<Vec<_>>();
if expr[0] != 's' {
return Err(ErrorKind::UnknownCommand(expr[0]));
}
let delimiter = expr[1];
let mut segments = vec![];
let mut segment = vec![];
let mut i = 2;
while i < expr.len() {
let c = expr[i];
if c == '\\' {
segment.push(expr[i + 1]);
i += 1;
} else if c == delimiter {
segments.push(segment.iter().collect::<String>());
segment.clear();
} else {
segment.push(c);
}
i += 1;
}
if !segment.is_empty() {
segments.push(segment.iter().collect::<String>());
}
if segments.len() < 2 {
return Err(ErrorKind::NotEnoughSegments);
} else if segments.len() > 3 {
return Err(ErrorKind::TooManySegments);
}
let mut ret = RegexData {
pattern_str: segments[0].to_owned(),
replace_str: segments[1].to_owned(),
flag_global: false,
flag_case_insensitive: false,
};
if segments.len() == 3 {
for c in segments[2].chars() {
match c {
'i' => ret.flag_case_insensitive = true,
'g' => ret.flag_global = true,
_ => return Err(ErrorKind::UnknownFlag(c)),
}
}
}
Ok(ret)
}
#[cfg(test)]
mod tests {
use crate::sedregex::*;
#[test]
fn test() {
assert_eq!(
Ok(RegexData {
pattern_str: "123".to_string(),
replace_str: "456".to_string(),
flag_global: false,
flag_case_insensitive: false,
}),
split_regex("s/123/456")
);
}
}
| 24.18 | 64 | 0.53019 |
e4e1259df8bab428a64bedd95d0863f182de6a46 | 10,577 | //! The `genesis_config` module is a library for generating the chain's genesis config.
#![cfg(feature = "full")]
use crate::{
account::Account,
clock::{UnixTimestamp, DEFAULT_TICKS_PER_SLOT},
epoch_schedule::EpochSchedule,
fee_calculator::FeeRateGovernor,
hash::{hash, Hash},
inflation::Inflation,
native_token::lamports_to_sol,
poh_config::PohConfig,
pubkey::Pubkey,
rent::Rent,
shred_version::compute_shred_version,
signature::{Keypair, Signer},
system_program,
timing::years_as_slots,
};
use bincode::{deserialize, serialize};
use chrono::{TimeZone, Utc};
use memmap2::Mmap;
use std::{
collections::BTreeMap,
fmt,
fs::{File, OpenOptions},
io::Write,
path::{Path, PathBuf},
str::FromStr,
time::{SystemTime, UNIX_EPOCH},
};
// deprecated default that is no longer used
pub const UNUSED_DEFAULT: u64 = 1024;
// The order can't align with release lifecycle only to remain ABI-compatible...
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, AbiEnumVisitor, AbiExample)]
pub enum ClusterType {
Testnet,
MainnetBeta,
Devnet,
Development,
}
impl ClusterType {
pub const STRINGS: [&'static str; 4] = ["development", "devnet", "testnet", "mainnet-beta"];
}
impl FromStr for ClusterType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"development" => Ok(ClusterType::Development),
"devnet" => Ok(ClusterType::Devnet),
"testnet" => Ok(ClusterType::Testnet),
"mainnet-beta" => Ok(ClusterType::MainnetBeta),
_ => Err(format!("{} is unrecognized for cluster type", s)),
}
}
}
#[frozen_abi(digest = "Bj6E2ZCpEUuNFM7HREcL5Dg3CPsbndNuCR1aVmBBFFU4")]
#[derive(Serialize, Deserialize, Debug, Clone, AbiExample)]
pub struct GenesisConfig {
/// when the network (bootstrap validator) was started relative to the UNIX Epoch
pub creation_time: UnixTimestamp,
/// initial accounts
pub accounts: BTreeMap<Pubkey, Account>,
/// built-in programs
pub native_instruction_processors: Vec<(String, Pubkey)>,
/// accounts for network rewards, these do not count towards capitalization
pub rewards_pools: BTreeMap<Pubkey, Account>,
pub ticks_per_slot: u64,
pub unused: u64,
/// network speed configuration
pub poh_config: PohConfig,
/// this field exists only to ensure that the binary layout of GenesisConfig remains compatible
/// with the Panoptes v0.23 release line
pub __backwards_compat_with_v0_23: u64,
/// transaction fee config
pub fee_rate_governor: FeeRateGovernor,
/// rent config
pub rent: Rent,
/// inflation config
pub inflation: Inflation,
/// how slots map to epochs
pub epoch_schedule: EpochSchedule,
/// network runlevel
pub cluster_type: ClusterType,
}
// useful for basic tests
pub fn create_genesis_config(lamports: u64) -> (GenesisConfig, Keypair) {
let faucet_keypair = Keypair::new();
(
GenesisConfig::new(
&[(
faucet_keypair.pubkey(),
Account::new(lamports, 0, &system_program::id()),
)],
&[],
),
faucet_keypair,
)
}
impl Default for GenesisConfig {
fn default() -> Self {
Self {
creation_time: SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs() as UnixTimestamp,
accounts: BTreeMap::default(),
native_instruction_processors: Vec::default(),
rewards_pools: BTreeMap::default(),
ticks_per_slot: DEFAULT_TICKS_PER_SLOT,
unused: UNUSED_DEFAULT,
poh_config: PohConfig::default(),
inflation: Inflation::default(),
__backwards_compat_with_v0_23: 0,
fee_rate_governor: FeeRateGovernor::default(),
rent: Rent::default(),
epoch_schedule: EpochSchedule::default(),
cluster_type: ClusterType::Development,
}
}
}
impl GenesisConfig {
pub fn new(
accounts: &[(Pubkey, Account)],
native_instruction_processors: &[(String, Pubkey)],
) -> Self {
Self {
accounts: accounts
.iter()
.cloned()
.collect::<BTreeMap<Pubkey, Account>>(),
native_instruction_processors: native_instruction_processors.to_vec(),
..GenesisConfig::default()
}
}
pub fn hash(&self) -> Hash {
let serialized = serialize(&self).unwrap();
hash(&serialized)
}
pub fn disable_cap_altering_features_for_preciseness(&mut self) {
self.accounts
.remove(&crate::feature_set::simple_capitalization::id());
}
fn genesis_filename(ledger_path: &Path) -> PathBuf {
Path::new(ledger_path).join("genesis.bin")
}
pub fn load(ledger_path: &Path) -> Result<Self, std::io::Error> {
let filename = Self::genesis_filename(&ledger_path);
let file = OpenOptions::new()
.read(true)
.open(&filename)
.map_err(|err| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!("Unable to open {:?}: {:?}", filename, err),
)
})?;
//UNPANO: Required to create a Mmap
let mem = unsafe { Mmap::map(&file) }.map_err(|err| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!("Unable to map {:?}: {:?}", filename, err),
)
})?;
let genesis_config = deserialize(&mem).map_err(|err| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!("Unable to deserialize {:?}: {:?}", filename, err),
)
})?;
Ok(genesis_config)
}
pub fn write(&self, ledger_path: &Path) -> Result<(), std::io::Error> {
let serialized = serialize(&self).map_err(|err| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!("Unable to serialize: {:?}", err),
)
})?;
std::fs::create_dir_all(&ledger_path)?;
let mut file = File::create(Self::genesis_filename(&ledger_path))?;
file.write_all(&serialized)
}
pub fn add_account(&mut self, pubkey: Pubkey, account: Account) {
self.accounts.insert(pubkey, account);
}
pub fn add_native_instruction_processor(&mut self, name: String, program_id: Pubkey) {
self.native_instruction_processors.push((name, program_id));
}
pub fn hashes_per_tick(&self) -> Option<u64> {
self.poh_config.hashes_per_tick
}
pub fn ticks_per_slot(&self) -> u64 {
self.ticks_per_slot
}
pub fn ns_per_slot(&self) -> u128 {
self.poh_config.target_tick_duration.as_nanos() * self.ticks_per_slot() as u128
}
pub fn slots_per_year(&self) -> f64 {
years_as_slots(
1.0,
&self.poh_config.target_tick_duration,
self.ticks_per_slot(),
)
}
}
impl fmt::Display for GenesisConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"\
Creation time: {}\n\
Cluster type: {:?}\n\
Genesis hash: {}\n\
Shred version: {}\n\
Ticks per slot: {:?}\n\
Hashes per tick: {:?}\n\
Slots per epoch: {}\n\
Warmup epochs: {}abled\n\
{:?}\n\
{:?}\n\
{:?}\n\
Capitalization: {} PANO in {} accounts\n\
Native instruction processors: {:#?}\n\
Rewards pool: {:#?}\n\
",
Utc.timestamp(self.creation_time, 0).to_rfc3339(),
self.cluster_type,
self.hash(),
compute_shred_version(&self.hash(), None),
self.ticks_per_slot,
self.poh_config.hashes_per_tick,
self.epoch_schedule.slots_per_epoch,
if self.epoch_schedule.warmup {
"en"
} else {
"dis"
},
self.inflation,
self.rent,
self.fee_rate_governor,
lamports_to_sol(
self.accounts
.iter()
.map(|(pubkey, account)| {
if account.lamports == 0 {
panic!("{:?}", (pubkey, account));
}
account.lamports
})
.sum::<u64>()
),
self.accounts.len(),
self.native_instruction_processors,
self.rewards_pools,
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::signature::{Keypair, Signer};
use std::path::PathBuf;
fn make_tmp_path(name: &str) -> PathBuf {
let out_dir = std::env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string());
let keypair = Keypair::new();
let path = [
out_dir,
"tmp".to_string(),
format!("{}-{}", name, keypair.pubkey()),
]
.iter()
.collect();
// whack any possible collision
let _ignored = std::fs::remove_dir_all(&path);
// whack any possible collision
let _ignored = std::fs::remove_file(&path);
path
}
#[test]
fn test_genesis_config() {
let faucet_keypair = Keypair::new();
let mut config = GenesisConfig::default();
config.add_account(
faucet_keypair.pubkey(),
Account::new(10_000, 0, &Pubkey::default()),
);
config.add_account(
solana_sdk::pubkey::new_rand(),
Account::new(1, 0, &Pubkey::default()),
);
config.add_native_instruction_processor("hi".to_string(), solana_sdk::pubkey::new_rand());
assert_eq!(config.accounts.len(), 2);
assert!(config
.accounts
.iter()
.any(|(pubkey, account)| *pubkey == faucet_keypair.pubkey()
&& account.lamports == 10_000));
let path = &make_tmp_path("genesis_config");
config.write(&path).expect("write");
let loaded_config = GenesisConfig::load(&path).expect("load");
assert_eq!(config.hash(), loaded_config.hash());
let _ignored = std::fs::remove_file(&path);
}
}
| 31.108824 | 99 | 0.556774 |
235e4f00ede540de156792597d66a2f87be12082 | 496 | pub mod aur_use {
use reqwest;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AurResponse {
pub version: i16,
pub r#type: String,
pub resultcount: i32,
}
pub async fn does_pkg_exist(pkg_name: &str) -> Result<AurResponse, reqwest::Error> {
let resp: AurResponse = reqwest::get(pkg_name)
.await?
.json::<AurResponse>()
.await?;
Ok(resp)
}
}
| 22.545455 | 88 | 0.570565 |
4880d13f39a6b539b317402c3313d3ee8d857d0e | 6,504 | use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::eager_or_lazy::is_lazyness_candidate;
use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_macro_callsite};
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item, match_type};
use clippy_utils::{contains_return, get_trait_def_id, last_path_segment, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::source_map::Span;
use rustc_span::symbol::sym;
use std::borrow::Cow;
use super::OR_FUN_CALL;
/// Checks for the `OR_FUN_CALL` lint.
#[allow(clippy::too_many_lines)]
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
expr: &hir::Expr<'_>,
method_span: Span,
name: &str,
args: &'tcx [hir::Expr<'_>],
) {
/// Checks for `unwrap_or(T::new())` or `unwrap_or(T::default())`.
fn check_unwrap_or_default(
cx: &LateContext<'_>,
name: &str,
fun: &hir::Expr<'_>,
self_expr: &hir::Expr<'_>,
arg: &hir::Expr<'_>,
or_has_args: bool,
span: Span,
) -> bool {
if_chain! {
if !or_has_args;
if name == "unwrap_or";
if let hir::ExprKind::Path(ref qpath) = fun.kind;
let path = &*last_path_segment(qpath).ident.as_str();
if ["default", "new"].contains(&path);
let arg_ty = cx.typeck_results().expr_ty(arg);
if let Some(default_trait_id) = get_trait_def_id(cx, &paths::DEFAULT_TRAIT);
if implements_trait(cx, arg_ty, default_trait_id, &[]);
then {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
OR_FUN_CALL,
span,
&format!("use of `{}` followed by a call to `{}`", name, path),
"try this",
format!(
"{}.unwrap_or_default()",
snippet_with_applicability(cx, self_expr.span, "..", &mut applicability)
),
applicability,
);
true
} else {
false
}
}
}
/// Checks for `*or(foo())`.
#[allow(clippy::too_many_arguments)]
fn check_general_case<'tcx>(
cx: &LateContext<'tcx>,
name: &str,
method_span: Span,
self_expr: &hir::Expr<'_>,
arg: &'tcx hir::Expr<'_>,
span: Span,
// None if lambda is required
fun_span: Option<Span>,
) {
// (path, fn_has_argument, methods, suffix)
static KNOW_TYPES: [(&[&str], bool, &[&str], &str); 4] = [
(&paths::BTREEMAP_ENTRY, false, &["or_insert"], "with"),
(&paths::HASHMAP_ENTRY, false, &["or_insert"], "with"),
(&paths::OPTION, false, &["map_or", "ok_or", "or", "unwrap_or"], "else"),
(&paths::RESULT, true, &["or", "unwrap_or"], "else"),
];
if let hir::ExprKind::MethodCall(ref path, _, ref args, _) = &arg.kind {
if path.ident.as_str() == "len" {
let ty = cx.typeck_results().expr_ty(&args[0]).peel_refs();
match ty.kind() {
ty::Slice(_) | ty::Array(_, _) => return,
_ => (),
}
if is_type_diagnostic_item(cx, ty, sym::vec_type) {
return;
}
}
}
if_chain! {
if KNOW_TYPES.iter().any(|k| k.2.contains(&name));
if is_lazyness_candidate(cx, arg);
if !contains_return(&arg);
let self_ty = cx.typeck_results().expr_ty(self_expr);
if let Some(&(_, fn_has_arguments, poss, suffix)) =
KNOW_TYPES.iter().find(|&&i| match_type(cx, self_ty, i.0));
if poss.contains(&name);
then {
let macro_expanded_snipped;
let sugg: Cow<'_, str> = {
let (snippet_span, use_lambda) = match (fn_has_arguments, fun_span) {
(false, Some(fun_span)) => (fun_span, false),
_ => (arg.span, true),
};
let snippet = {
let not_macro_argument_snippet = snippet_with_macro_callsite(cx, snippet_span, "..");
if not_macro_argument_snippet == "vec![]" {
macro_expanded_snipped = snippet(cx, snippet_span, "..");
match macro_expanded_snipped.strip_prefix("$crate::vec::") {
Some(stripped) => Cow::from(stripped),
None => macro_expanded_snipped
}
}
else {
not_macro_argument_snippet
}
};
if use_lambda {
let l_arg = if fn_has_arguments { "_" } else { "" };
format!("|{}| {}", l_arg, snippet).into()
} else {
snippet
}
};
let span_replace_word = method_span.with_hi(span.hi());
span_lint_and_sugg(
cx,
OR_FUN_CALL,
span_replace_word,
&format!("use of `{}` followed by a function call", name),
"try this",
format!("{}_{}({})", name, suffix, sugg),
Applicability::HasPlaceholders,
);
}
}
}
if args.len() == 2 {
match args[1].kind {
hir::ExprKind::Call(ref fun, ref or_args) => {
let or_has_args = !or_args.is_empty();
if !check_unwrap_or_default(cx, name, fun, &args[0], &args[1], or_has_args, expr.span) {
let fun_span = if or_has_args { None } else { Some(fun.span) };
check_general_case(cx, name, method_span, &args[0], &args[1], expr.span, fun_span);
}
},
hir::ExprKind::Index(..) | hir::ExprKind::MethodCall(..) => {
check_general_case(cx, name, method_span, &args[0], &args[1], expr.span, None);
},
_ => {},
}
}
}
| 37.37931 | 109 | 0.481704 |
1a75b10f85c19a7f25f54118060a6e20fedae0de | 489 | //! Business Identifier Code (BIC, or SWIFT BIC or SWIFT code) as defined by ISO 9362.
use crate::iso3166::CountryCode;
use crate::iso9362::{BranchCode, InstitutionCode, LocationCode};
use serde::{Deserialize, Serialize};
/// [`BIC`] is an ISO 9362 BIC code.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct BIC {
pub institution: InstitutionCode,
pub country: CountryCode,
pub location: LocationCode,
pub branch: Option<BranchCode>,
}
| 32.6 | 86 | 0.728016 |
5d10dfdb9b0f4ea9a2bde121329b72e3ad830f2e | 2,239 | use indexmap::map::IndexMap;
use serde::{Deserialize, Serialize};
use crate::auth::KrakenAuth;
// Structs/Enums
use super::{EndpointInfo, KrakenInput, MethodType};
// Traits
use super::{Input, MutateInput, Output, UpdateInput};
/// Request builder for the Cancel Open Order endpoint
pub struct KICancelOrder {
params: IndexMap<String, String>,
}
impl KICancelOrder {
/// Constructor returning a [KrakenInput] builder for the cancel open order endpoint.
/// txid is the transaction ID of the order that is to be cancelled
pub fn build(txid: String) -> KICancelOrder {
let cancelorder = KICancelOrder {
params: IndexMap::new(),
};
cancelorder.with_txid(txid)
}
/// Update the transaction ID of the order to cancel. Useful for templating or iterating over a
/// list of transaction IDs without allocation
pub fn with_txid(self, txid: String) -> Self {
self.update_input("txid", txid)
}
fn with_nonce(self) -> Self {
self.update_input("nonce", KrakenAuth::nonce())
}
}
impl MutateInput for KICancelOrder {
fn list_mut(&mut self) -> &mut IndexMap<String, String> {
&mut self.params
}
}
impl UpdateInput for KICancelOrder {}
impl Input for KICancelOrder {
fn finish(self) -> KrakenInput {
KrakenInput {
info: EndpointInfo {
methodtype: MethodType::Private,
endpoint: String::from("CancelOrder"),
},
params: Some(self.with_nonce().params),
}
}
fn finish_clone(self) -> (KrakenInput, Self) {
let newself = self.with_nonce();
(
KrakenInput {
info: EndpointInfo {
methodtype: MethodType::Private,
endpoint: String::from("CancelOrder"),
},
params: Some(newself.params.clone()),
},
newself,
)
}
}
/// Response from the Cancel Open Orders endpoint
#[derive(Deserialize, Serialize, Debug)]
pub struct KOCancelOrder {
/// number of orders canceled
pub count: u32,
/// if set, order(s) is/are pending cancellation
pub pending: u32,
}
impl Output for KOCancelOrder {}
| 27.641975 | 99 | 0.613667 |
5dcf1824ef0b7e3d387d0507f8f987a0ccf011df | 3,025 | use clippy_utils::diagnostics::span_lint;
use clippy_utils::is_hir_ty_cfg_dependant;
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, GenericArg};
use rustc_lint::LateContext;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::sym;
use super::CAST_PTR_ALIGNMENT;
pub(super) fn check(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Cast(cast_expr, cast_to) = expr.kind {
if is_hir_ty_cfg_dependant(cx, cast_to) {
return;
}
let (cast_from, cast_to) = (
cx.typeck_results().expr_ty(cast_expr),
cx.typeck_results().expr_ty(expr),
);
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
} else if let ExprKind::MethodCall(method_path, _, args, _) = expr.kind {
if_chain! {
if method_path.ident.name == sym!(cast);
if let Some(generic_args) = method_path.args;
if let [GenericArg::Type(cast_to)] = generic_args.args;
// There probably is no obvious reason to do this, just to be consistent with `as` cases.
if !is_hir_ty_cfg_dependant(cx, cast_to);
then {
let (cast_from, cast_to) =
(cx.typeck_results().expr_ty(&args[0]), cx.typeck_results().expr_ty(expr));
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
}
}
}
}
fn lint_cast_ptr_alignment<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, cast_from: Ty<'tcx>, cast_to: Ty<'tcx>) {
if_chain! {
if let ty::RawPtr(from_ptr_ty) = &cast_from.kind();
if let ty::RawPtr(to_ptr_ty) = &cast_to.kind();
if let Ok(from_layout) = cx.layout_of(from_ptr_ty.ty);
if let Ok(to_layout) = cx.layout_of(to_ptr_ty.ty);
if from_layout.align.abi < to_layout.align.abi;
// with c_void, we inherently need to trust the user
if !is_c_void(cx, from_ptr_ty.ty);
// when casting from a ZST, we don't know enough to properly lint
if !from_layout.is_zst();
then {
span_lint(
cx,
CAST_PTR_ALIGNMENT,
expr.span,
&format!(
"casting from `{}` to a more-strictly-aligned pointer (`{}`) ({} < {} bytes)",
cast_from,
cast_to,
from_layout.align.abi.bytes(),
to_layout.align.abi.bytes(),
),
);
}
}
}
/// Check if the given type is either `core::ffi::c_void` or
/// one of the platform specific `libc::<platform>::c_void` of libc.
fn is_c_void(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
if let ty::Adt(adt, _) = ty.kind() {
let names = cx.get_def_path(adt.did);
if names.is_empty() {
return false;
}
if names[0] == sym::libc || names[0] == sym::core && *names.last().unwrap() == sym!(c_void) {
return true;
}
}
false
}
| 37.345679 | 115 | 0.567934 |
9b87b5260a2c242cea1a121c904952cb82282fe2 | 7,778 | // Copyright 2018 Guillaume Pinot (@TeXitoi) <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use structopt::StructOpt;
use std::ffi::{OsStr, OsString};
use std::num::ParseIntError;
use std::path::PathBuf;
#[derive(StructOpt, PartialEq, Debug)]
struct PathOpt {
#[structopt(short = "p", long = "path", parse(from_os_str))]
path: PathBuf,
#[structopt(short = "d", default_value = "../", parse(from_os_str))]
default_path: PathBuf,
#[structopt(short = "v", parse(from_os_str))]
vector_path: Vec<PathBuf>,
#[structopt(short = "o", parse(from_os_str))]
option_path_1: Option<PathBuf>,
#[structopt(short = "q", parse(from_os_str))]
option_path_2: Option<PathBuf>,
}
#[test]
fn test_path_opt_simple() {
assert_eq!(
PathOpt {
path: PathBuf::from("/usr/bin"),
default_path: PathBuf::from("../"),
vector_path: vec![
PathBuf::from("/a/b/c"),
PathBuf::from("/d/e/f"),
PathBuf::from("/g/h/i"),
],
option_path_1: None,
option_path_2: Some(PathBuf::from("j.zip")),
},
PathOpt::from_clap(&PathOpt::clap().get_matches_from(&[
"test", "-p", "/usr/bin", "-v", "/a/b/c", "-v", "/d/e/f", "-v", "/g/h/i", "-q",
"j.zip",
]))
);
}
fn parse_hex(input: &str) -> Result<u64, ParseIntError> {
u64::from_str_radix(input, 16)
}
#[derive(StructOpt, PartialEq, Debug)]
struct HexOpt {
#[structopt(short = "n", parse(try_from_str = parse_hex))]
number: u64,
}
#[test]
#[allow(clippy::unreadable_literal)]
fn test_parse_hex() {
assert_eq!(
HexOpt { number: 5 },
HexOpt::from_clap(&HexOpt::clap().get_matches_from(&["test", "-n", "5"]))
);
assert_eq!(
HexOpt { number: 0xabcdef },
HexOpt::from_clap(&HexOpt::clap().get_matches_from(&["test", "-n", "abcdef"]))
);
let err = HexOpt::clap()
.get_matches_from_safe(&["test", "-n", "gg"])
.unwrap_err();
assert!(err.message.contains("invalid digit found in string"), err);
}
fn custom_parser_1(_: &str) -> &'static str {
"A"
}
fn custom_parser_2(_: &str) -> Result<&'static str, u32> {
Ok("B")
}
fn custom_parser_3(_: &OsStr) -> &'static str {
"C"
}
fn custom_parser_4(_: &OsStr) -> Result<&'static str, OsString> {
Ok("D")
}
#[derive(StructOpt, PartialEq, Debug)]
struct NoOpOpt {
#[structopt(short = "a", parse(from_str = custom_parser_1))]
a: &'static str,
#[structopt(short = "b", parse(try_from_str = custom_parser_2))]
b: &'static str,
#[structopt(short = "c", parse(from_os_str = custom_parser_3))]
c: &'static str,
#[structopt(short = "d", parse(try_from_os_str = custom_parser_4))]
d: &'static str,
}
#[test]
fn test_every_custom_parser() {
assert_eq!(
NoOpOpt {
a: "A",
b: "B",
c: "C",
d: "D"
},
NoOpOpt::from_clap(
&NoOpOpt::clap().get_matches_from(&["test", "-a=?", "-b=?", "-c=?", "-d=?"])
)
);
}
// Note: can't use `Vec<u8>` directly, as structopt would instead look for
// conversion function from `&str` to `u8`.
type Bytes = Vec<u8>;
#[derive(StructOpt, PartialEq, Debug)]
struct DefaultedOpt {
#[structopt(short = "b", parse(from_str))]
bytes: Bytes,
#[structopt(short = "i", parse(try_from_str))]
integer: u64,
#[structopt(short = "p", parse(from_os_str))]
path: PathBuf,
}
#[test]
fn test_parser_with_default_value() {
assert_eq!(
DefaultedOpt {
bytes: b"E\xc2\xb2=p\xc2\xb2c\xc2\xb2+m\xc2\xb2c\xe2\x81\xb4".to_vec(),
integer: 9000,
path: PathBuf::from("src/lib.rs"),
},
DefaultedOpt::from_clap(&DefaultedOpt::clap().get_matches_from(&[
"test",
"-b",
"E²=p²c²+m²c⁴",
"-i",
"9000",
"-p",
"src/lib.rs",
]))
);
}
#[derive(PartialEq, Debug)]
struct Foo(u8);
fn foo(value: u64) -> Foo {
Foo(value as u8)
}
#[derive(StructOpt, PartialEq, Debug)]
struct Occurrences {
#[structopt(short = "s", long = "signed", parse(from_occurrences))]
signed: i32,
#[structopt(short = "l", parse(from_occurrences))]
little_signed: i8,
#[structopt(short = "u", parse(from_occurrences))]
unsigned: usize,
#[structopt(short = "r", parse(from_occurrences))]
little_unsigned: u8,
#[structopt(short = "c", long = "custom", parse(from_occurrences = foo))]
custom: Foo,
}
#[test]
fn test_parser_occurrences() {
assert_eq!(
Occurrences {
signed: 3,
little_signed: 1,
unsigned: 0,
little_unsigned: 4,
custom: Foo(5),
},
Occurrences::from_clap(&Occurrences::clap().get_matches_from(&[
"test", "-s", "--signed", "--signed", "-l", "-rrrr", "-cccc", "--custom",
]))
);
}
#[test]
fn test_custom_bool() {
fn parse_bool(s: &str) -> Result<bool, String> {
match s {
"true" => Ok(true),
"false" => Ok(false),
_ => Err(format!("invalid bool {}", s)),
}
}
#[derive(StructOpt, PartialEq, Debug)]
struct Opt {
#[structopt(short = "d", parse(try_from_str = parse_bool))]
debug: bool,
#[structopt(
short = "v",
default_value = "false",
parse(try_from_str = parse_bool)
)]
verbose: bool,
#[structopt(short = "t", parse(try_from_str = parse_bool))]
tribool: Option<bool>,
#[structopt(short = "b", parse(try_from_str = parse_bool))]
bitset: Vec<bool>,
}
assert!(Opt::clap().get_matches_from_safe(&["test"]).is_err());
assert!(Opt::clap().get_matches_from_safe(&["test", "-d"]).is_err());
assert!(Opt::clap()
.get_matches_from_safe(&["test", "-dfoo"])
.is_err());
assert_eq!(
Opt {
debug: false,
verbose: false,
tribool: None,
bitset: vec![],
},
Opt::from_iter(&["test", "-dfalse"])
);
assert_eq!(
Opt {
debug: true,
verbose: false,
tribool: None,
bitset: vec![],
},
Opt::from_iter(&["test", "-dtrue"])
);
assert_eq!(
Opt {
debug: true,
verbose: false,
tribool: None,
bitset: vec![],
},
Opt::from_iter(&["test", "-dtrue", "-vfalse"])
);
assert_eq!(
Opt {
debug: true,
verbose: true,
tribool: None,
bitset: vec![],
},
Opt::from_iter(&["test", "-dtrue", "-vtrue"])
);
assert_eq!(
Opt {
debug: true,
verbose: false,
tribool: Some(false),
bitset: vec![],
},
Opt::from_iter(&["test", "-dtrue", "-tfalse"])
);
assert_eq!(
Opt {
debug: true,
verbose: false,
tribool: Some(true),
bitset: vec![],
},
Opt::from_iter(&["test", "-dtrue", "-ttrue"])
);
assert_eq!(
Opt {
debug: true,
verbose: false,
tribool: None,
bitset: vec![false, true, false, false],
},
Opt::from_iter(&["test", "-dtrue", "-bfalse", "-btrue", "-bfalse", "-bfalse"])
);
}
| 26.546075 | 91 | 0.524814 |
9137cac76edc683ea50c378bcb006555b219d92c | 651 | use super::{Field, InformationElement};
use std::str;
use std::str::Utf8Error;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Ssid {
bytes: Vec<u8>,
}
impl Ssid {
pub fn new(bytes: Vec<u8>) -> Ssid {
Ssid { bytes }
}
pub fn as_str(&self) -> Result<&str, Utf8Error> {
str::from_utf8(&self.bytes)
}
}
impl InformationElement for Ssid {
const NAME: &'static str = "SSID";
const ID: u8 = 0;
fn bytes(&self) -> &[u8] {
&self.bytes
}
fn information_fields(&self) -> Vec<Field> {
vec![Field::new("SSID", self.as_str().unwrap_or_default())]
}
}
impl_display_for_ie!(Ssid);
| 19.147059 | 67 | 0.585253 |
bb064c6ef616fcf15cb771d3cd37d8a908cb72f2 | 4,018 | use azure_core::errors::AzureError;
use azure_core::AddAsHeader;
use http::request::Builder;
use http::HeaderMap;
use std::borrow::Cow;
use std::collections::HashMap;
use std::convert::TryFrom;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Properties<'a, 'b>(HashMap<Cow<'a, str>, Cow<'b, str>>);
const HEADER: &str = "x-ms-properties";
impl<'a, 'b> Default for Properties<'a, 'b> {
fn default() -> Self {
Self::new()
}
}
impl<'a, 'b> Properties<'a, 'b> {
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn insert<K: Into<Cow<'a, str>>, V: Into<Cow<'b, str>>>(
&mut self,
k: K,
v: V,
) -> Option<Cow<'b, str>> {
self.0.insert(k.into(), v.into())
}
pub fn hash_map(&self) -> &HashMap<Cow<'a, str>, Cow<'b, str>> {
&self.0
}
}
impl<'a, 'b> AddAsHeader for Properties<'a, 'b> {
fn add_as_header(&self, builder: Builder) -> Builder {
// the header is a comma separated list of key=base64(value) see
// [https://docs.microsoft.com/en-us/rest/api/storageservices/datalakestoragegen2/filesystem/create#request-headers](https://docs.microsoft.com/en-us/rest/api/storageservices/datalakestoragegen2/filesystem/create#request-headers)
let mut s = String::new();
self.0.iter().for_each(|(k, v)| {
s.push_str(&format!("{}={},", k.as_ref(), base64::encode(v.as_ref())));
});
// since we added a comma to the last entry, we will strip it to the exported header (this
// is safe since we know that comma is 1 byte in UTF8):
builder.header(HEADER, &s[..s.len() - 1])
}
}
impl TryFrom<&HeaderMap> for Properties<'static, 'static> {
type Error = AzureError;
fn try_from(headers: &HeaderMap) -> Result<Self, Self::Error> {
let mut properties = Self::new();
// this is probably too complicated. Should we split
// it in more maneageable code blocks?
// The logic is this:
// 1. Look for the header. If not found return error
// 2. Split the header value by comma
// 3. For each comma separated value:
// 4. Split by equals. If we do not have at least 2 entries, return error.
// 5. For each pair:
// 6. Base64 decode the second entry (value). If error, return error.
// 7. Insert the key value pair in the returned struct.
headers
.get(HEADER)
.ok_or_else(|| AzureError::HeaderNotFound(HEADER.to_owned()))? // HEADER must exists or we return Err
.to_str()?
.split(',') // The list is a CSV so we split by comma
.map(|key_value_pair| {
let mut key_and_value = key_value_pair.split('='); // Each entry is key and value separated by =
// we must have a key and a value (so two entries)
let key = key_and_value
.next()
.ok_or_else(|| AzureError::GenericErrorWithText("missing key".to_owned()))?;
let value = key_and_value
.next()
.ok_or_else(|| AzureError::GenericErrorWithText("missing value".to_owned()))?;
// we do not check if there are more entries. We just ignore them.
Ok((key, value))
})
.collect::<Result<Vec<(&str, &str)>, AzureError>>()? // if we have an error, return error
.into_iter()
.map(|(key, value)| {
let value = std::str::from_utf8(&base64::decode(value)?)?.to_owned(); // the value is base64 encoded se we decode it
Ok((key, value))
})
.collect::<Result<Vec<(&str, String)>, AzureError>>()? // if we have an error, return error
.into_iter()
.for_each(|(key, value)| {
properties.insert(key.to_owned(), value); // finally store the key and value into the properties
});
Ok(properties)
}
}
| 39.009709 | 237 | 0.564958 |
d9ed7d10a35890b398150e1d31fff52679749bed | 600 | use vizia::*;
// Example showing inline styling of views
fn main() {
Application::new(WindowDescription::new().with_title("Style"), |cx| {
VStack::new(cx, |cx| {
Label::new(cx, "Label 1")
.width(Pixels(100.0))
.height(Pixels(30.0))
.background_color(Color::blue());
Label::new(cx, "Label 2")
.width(Pixels(200.0))
.height(Pixels(50.0))
.background_color(Color::green());
})
.width(Pixels(500.0))
.height(Pixels(500.0));
})
.run();
}
| 26.086957 | 73 | 0.481667 |
e5fd7e11881375696a71f6e5805c08088a7e8e71 | 18,870 | /*---------------------------------------------------------------------------------------------
* Copyright © 2016-present Earth Computing Corporation. All rights reserved.
* Licensed under the MIT License. See LICENSE.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
use multi_mut::HashMapMultiMut;
use std::{fmt, fmt::Write,
collections::{HashMap, HashSet},
iter::FromIterator,
//sync::mpsc::channel,
thread, thread::{JoinHandle}};
use crossbeam::crossbeam_channel::unbounded as channel;
use crate::blueprint::{Blueprint, Cell, };
use crate::config::{CONFIG, CellQty, LinkQty};
use crate::dal::{add_to_trace, fork_trace_header, get_cell_replay_lines, update_trace_header};
use crate::link::{Link, DuplexLinkPortChannel, LinkFromPorts, LinkToPorts };
use crate::nalcell::{NalCell};
use crate::name::{CellID, LinkID};
use crate::port::{PortSeed, CommonPortLike};
use crate::replay::{process_trace_record, TraceFormat};
use crate::simulated_border_port::{SimulatedBorderPortFactory, SimulatedBorderPort, DuplexPortNocChannel};
use crate::simulated_interior_port::{SimulatedInteriorPortFactory, SimulatedInteriorPort, DuplexPortLinkChannel,
LinkFromPort, LinkToPort, PortFromLink, PortToLink};
use crate::utility::{CellNo, CellConfig, PortNo, Edge, S, TraceHeaderParams, TraceType};
#[derive(Clone, Debug)]
pub struct DuplexLinkEndChannel {
link_to_port: LinkToPort,
link_from_port: LinkFromPort,
}
impl DuplexLinkEndChannel {
pub fn get_link_to_port(&self) -> &LinkToPort { &self.link_to_port }
pub fn get_link_from_port(&self) -> &LinkFromPort { &self.link_from_port }
}
#[derive(Clone, Debug)]
pub struct DuplexLinkEndChannels {
left: DuplexLinkEndChannel,
rite: DuplexLinkEndChannel,
}
impl DuplexLinkEndChannels {
pub fn new(left: DuplexLinkEndChannel, rite: DuplexLinkEndChannel) -> DuplexLinkEndChannels {
DuplexLinkEndChannels { left, rite }
}
}
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct CellInteriorConnection {
cell_no: CellNo,
port_no: PortNo,
}
impl CellInteriorConnection {
pub fn new(cell_no: CellNo, port_no: PortNo) -> CellInteriorConnection {
CellInteriorConnection { cell_no, port_no }
}
}
impl fmt::Display for CellInteriorConnection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(cell: {}, port: {})", *self.cell_no, *self.port_no)
}
}
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct EdgeConnection {
left: CellInteriorConnection,
rite: CellInteriorConnection,
}
impl EdgeConnection {
pub fn new(left: CellInteriorConnection, rite: CellInteriorConnection) -> EdgeConnection {
EdgeConnection { left, rite }
}
}
impl fmt::Display for EdgeConnection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}<->{}", self.left, self.rite)
}
}
type NalCellType = NalCell::<SimulatedInteriorPortFactory, SimulatedInteriorPort, SimulatedBorderPortFactory, SimulatedBorderPort>;
#[derive(Clone, Debug, Default)]
pub struct Rack {
cells: HashMap<CellNo, NalCellType>,
links: HashMap<EdgeConnection, Link>,
}
impl Rack {
pub fn new() -> Rack { Default::default() }
pub fn initialize(&mut self, blueprint: &Blueprint, duplex_port_noc_channel_cell_port_map: HashMap::<CellNo, HashMap::<PortNo, DuplexPortNocChannel>>) -> Result<Vec<JoinHandle<()>>, Error> {
let _f = "initialize";
let num_cells = blueprint.get_ncells();
let edge_list = blueprint.get_edge_list();
let mut edge_connection_list = Vec::<EdgeConnection>::new();
if *num_cells < 1 { return Err(RackError::Cells{ num_cells, func_name: _f }.into()); }
if edge_list.len() < *num_cells - 1 { return Err(RackError::Edges { nlinks: LinkQty(edge_list.len()), func_name: _f }.into() ); }
let mut link_handles = Vec::new();
let mut duplex_port_link_channel_cell_port_map = HashMap::<CellNo, HashMap::<PortNo, DuplexPortLinkChannel>>::new();
let mut duplex_link_port_channel_cell_port_map = HashMap::<CellNo, HashMap::<PortNo, DuplexLinkPortChannel>>::new();
let mut dest_cell_port_map = HashMap::<CellNo, HashMap::<PortNo, CellNo>>::new(); // This isn't needed yet, but may be
let mut duplex_link_end_channel_map = HashMap::<CellInteriorConnection, DuplexLinkEndChannel>::new();
for edge in edge_list {
let mut connect_port = |cell_no, dest_cell_no, side_name| {
let cell = blueprint.get_cell(cell_no).expect(&format!("Rack: blueprint.get_cell(cell_no for cell {} must work", cell_no));
let interior_ports = cell.get_interior_ports();
for interior_port_no in interior_ports {
if **interior_port_no == 0 {
return Err(RackError::InteriorPort { func_name: _f, cell_no: cell_no }.into())
}
if (!duplex_port_link_channel_cell_port_map.contains_key(&cell_no)) ||
(!duplex_port_link_channel_cell_port_map[&cell_no].contains_key(&interior_port_no)) {
let (link_to_port, port_from_link): (LinkToPort, PortFromLink) = channel();
let (port_to_link, link_from_port): (PortToLink, LinkFromPort) = channel();
duplex_port_link_channel_cell_port_map
.entry(cell_no)
.or_insert(HashMap::new())
.insert(*interior_port_no, DuplexPortLinkChannel::new(port_from_link, port_to_link));
duplex_link_port_channel_cell_port_map
.entry(cell_no)
.or_insert(HashMap::new())
.insert(*interior_port_no, DuplexLinkPortChannel::new(link_from_port, link_to_port));
dest_cell_port_map
.entry(cell_no)
.or_insert(HashMap::new())
.insert(*interior_port_no, dest_cell_no);
return Ok(interior_port_no);
}
}
return Err(RackError::NoPortAvailable { edge: *edge, side_name: side_name, func_name: _f, comment: "no port available for edge", });
};
let left_port_no = connect_port(edge.0, edge.1, "left")?;
let rite_port_no = connect_port(edge.1, edge.0, "rite")?;
let edge_connection: EdgeConnection = EdgeConnection {
left: CellInteriorConnection {
cell_no: edge.0,
port_no: *left_port_no,
},
rite: CellInteriorConnection {
cell_no: edge.1,
port_no: *rite_port_no,
},
};
edge_connection_list.push(edge_connection);
let left_duplex_link_port_channel_port_map = &duplex_link_port_channel_cell_port_map[&edge.0];
let left_duplex_link_port_channel = &left_duplex_link_port_channel_port_map[&left_port_no];
duplex_link_end_channel_map.insert(
edge_connection.left,
DuplexLinkEndChannel {
link_to_port: left_duplex_link_port_channel.get_link_to_port().clone(),
link_from_port: left_duplex_link_port_channel.get_link_from_port().clone(),
},
);
let rite_duplex_link_port_channel_port_map = &duplex_link_port_channel_cell_port_map[&edge.1];
let rite_duplex_link_port_channel = &rite_duplex_link_port_channel_port_map[&rite_port_no];
duplex_link_end_channel_map.insert(
edge_connection.rite,
DuplexLinkEndChannel {
link_to_port: rite_duplex_link_port_channel.get_link_to_port().clone(),
link_from_port: rite_duplex_link_port_channel.get_link_from_port().clone(),
},
);
}
let mut cell_no_map = HashMap::<String, CellNo>::new();
for border_cell in blueprint.get_border_cells() {
cell_no_map.insert(border_cell.get_name(), border_cell.get_cell_no());
}
for interior_cell in blueprint.get_interior_cells() {
cell_no_map.insert(interior_cell.get_name(), interior_cell.get_cell_no());
}
let simulated_border_port_factory = SimulatedBorderPortFactory::new(
PortSeed::new(),
cell_no_map.clone(),
blueprint.clone(),
duplex_port_noc_channel_cell_port_map,
);
let simulated_interior_port_factory = SimulatedInteriorPortFactory::new(
PortSeed::new(),
cell_no_map.clone(),
blueprint.clone(),
duplex_port_link_channel_cell_port_map,
);
for border_cell in blueprint.get_border_cells() {
let cell_no = border_cell.get_cell_no();
let border_ports = border_cell.get_border_ports();
let (nal_cell, _join_handle) = match NalCell::new(
&border_cell.get_name(),
border_cell.get_num_phys_ports(),
&HashSet::from_iter(border_ports.clone()),
CellConfig::Large,
simulated_interior_port_factory.clone(),
Some(simulated_border_port_factory.clone()),
) {
Ok(t) => t,
Err(e) => {
println!("Rack: {} error from nalcell {}", _f, e);
return Err(RackError::Chain { func_name: _f, comment: S("Border cell") }.into() );
}
};
{
if CONFIG.trace_options.all || CONFIG.trace_options.dc || CONFIG.trace_options.visualize { // Needed for visualization
let trace_params = &TraceHeaderParams { module: file!(), line_no: line!(), function: _f, format: "border_cell_start" };
let cell_id = nal_cell.get_id();
let trace = json!({ "cell_id": cell_id, "cell_number": cell_no,
"border_ports": border_ports, "location": CONFIG.geometry.get(*cell_no)});
add_to_trace(TraceType::Trace, trace_params, &trace, _f);
}
}
self.cells.insert(cell_no, nal_cell);
}
for interior_cell in blueprint.get_interior_cells() {
let cell_no = interior_cell.get_cell_no();
let (nal_cell, _join_handle) = match NalCell::new(
&interior_cell.get_name(),
interior_cell.get_num_phys_ports(),
&HashSet::new(),
CellConfig::Large,
simulated_interior_port_factory.clone(),
None,
)
{
Ok(t) => t,
Err(e) => {
println!("Rack: {} error from nalcell {}", _f, e);
return Err(RackError::Chain { func_name: _f, comment: S("Interior cell") }.into());
}
};
{
if CONFIG.trace_options.all || CONFIG.trace_options.dc || CONFIG.trace_options.visualize { // Needed for visualization
let trace_params = &TraceHeaderParams { module: file!(), line_no: line!(), function: _f, format: "interior_cell_start" };
let cell_id = nal_cell.get_id();
let trace = json!({ "cell_id": cell_id, "cell_number": cell_no, "location": CONFIG.geometry.get(*cell_no as usize) });
add_to_trace(TraceType::Trace, trace_params, &trace, _f);
}
}
self.cells.insert(cell_no, nal_cell);
}
println!("Created all simulated cells\n\nConnections");
for edge_connection in edge_connection_list {
let (left_cell, rite_cell) = self.cells
.get_pair_mut(&edge_connection.left.cell_no, &edge_connection.rite.cell_no)
.expect("Rack: problem with edge connection");
let left_cell_id: CellID = left_cell.get_id(); // For Trace
let left_port_no = &edge_connection.left.port_no;
let left_port = left_cell.listen_link_and_pe(&left_port_no)?;
let rite_port_no = &edge_connection.rite.port_no;
let rite_cell_id: CellID = rite_cell.get_id(); // For Trace
let rite_port = rite_cell.listen_link_and_pe(&rite_port_no)?;
let link = Link::new(
left_port.get_id(),
rite_port.get_id(),
LinkToPorts::new(
duplex_link_end_channel_map[&edge_connection.left].get_link_to_port().clone(),
duplex_link_end_channel_map[&edge_connection.rite].get_link_to_port().clone(),
)
)?;
println!("{}", edge_connection);
{
if CONFIG.trace_options.all || CONFIG.trace_options.dc {
let trace_params = &TraceHeaderParams { module: file!(), line_no: line!(), function: _f, format: "connect_link" };
let trace = json!({ "left_cell": left_cell_id, "rite_cell": rite_cell_id, "left_port": left_port_no, "rite_port": rite_port_no, "link_id": link.get_id() });
add_to_trace(TraceType::Trace, trace_params, &trace, _f);
}
}
let mut link_clone = link.clone();
let child_trace_header = fork_trace_header();
let thread_name = format!("Link {} thread", link.get_id());
let link_from_left = duplex_link_end_channel_map[&edge_connection.left].link_from_port.clone();
let link_from_rite = duplex_link_end_channel_map[&edge_connection.rite].link_from_port.clone();
let join_handle = thread::Builder::new().name(thread_name).spawn( move || {
update_trace_header(child_trace_header);
let _ = link_clone.listen(LinkFromPorts::new(
link_from_left,
link_from_rite,
));
})?;
//let mut handle_pair = link.start_threads(link_to_left, link_from_left, link_to_rite, link_from_rite)?;
link_handles.append(&mut vec![join_handle]);
self.links.insert(edge_connection, link);
}
println!("\nRack {}: Assigned ports; created and listening on simulated links", _f);
Ok(link_handles)
}
pub fn construct(blueprint: &Blueprint, duplex_port_noc_channel_cell_port_map: HashMap::<CellNo, HashMap::<PortNo, DuplexPortNocChannel>>) -> Result<(Rack, Vec<JoinHandle<()>>), Error> {
let _f = "construct";
let mut rack = Rack::new();
let join_handles = rack.initialize(blueprint, duplex_port_noc_channel_cell_port_map).context(RackError::Chain { func_name: _f, comment: S("initialize")})?;
Ok((rack, join_handles))
}
pub fn get_cells(&self) -> &HashMap<CellNo, NalCell::<SimulatedInteriorPortFactory, SimulatedInteriorPort, SimulatedBorderPortFactory, SimulatedBorderPort>> { &self.cells }
pub fn get_links_mut(&mut self) -> &mut HashMap<EdgeConnection, Link> { &mut self.links }
pub fn get_links(&self) -> &HashMap<EdgeConnection, Link> { &self.links }
pub fn get_cell_ids(&self) -> HashMap<CellNo, CellID> {
self.cells.iter().map(|cell_no_and_cell| (*cell_no_and_cell.0, cell_no_and_cell.1.get_id())).collect::<HashMap<CellNo, _>>()
}
pub fn get_link_ids(&self) -> HashMap<EdgeConnection, LinkID> {
self.links.iter().map(|edge_connection_and_link| (*edge_connection_and_link.0, edge_connection_and_link.1.get_id())).collect::<HashMap<EdgeConnection, _>>()
}
pub fn select_noc_border_cell(&mut self) -> Result<(CellNo, NalCell::<SimulatedInteriorPortFactory, SimulatedInteriorPort, SimulatedBorderPortFactory, SimulatedBorderPort>), Error> {
let _f = "select_noc_border_cell";
return if CONFIG.replay {
let mut trace_lines = get_cell_replay_lines("Rack").context(RackError::Chain { func_name: _f, comment: S("Rack") })?;
let record = trace_lines.next().transpose()?.expect(&format!("First record for rack must be there"));
let trace_format = process_trace_record(record)?;
match trace_format {
TraceFormat::BorderCell(cell_no,) => {
let cell = self.cells.get_mut(&cell_no)
.ok_or::<Error>(RackError::Boundary { func_name: _f }.into())?;
Ok((cell_no, (*cell).clone()))
},
_ => {
unimplemented!()
}
}
} else {
self.cells
.iter()
.find(|(_, nalcell)| nalcell.is_border())
.map(|(cell_no, cell)| (*cell_no, (*cell).clone()))
.ok_or::<Error>(RackError::Boundary { func_name: _f }.into())
}
}
}
impl fmt::Display for Rack {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = format!("\nLinks\n");
for (_edge_connection, link) in &self.links {
write!(s, " {}\n", link)?;
}
s = s + "\nCells";
for i in 0..self.cells.len() {
if i < 30 { write!(s, "\n{}\n", self.cells[&CellNo(i)])?; }
}
write!(f, "{}", s)
}
}
// Errors
use failure::{Error, ResultExt};
#[derive(Debug, Fail)]
pub enum RackError {
#[fail(display = "RackError::Chain {} {}", func_name, comment)]
Chain { func_name: &'static str, comment: String },
#[fail(display = "RackError::Boundary {}: No boundary cells found", func_name)]
Boundary { func_name: &'static str },
#[fail(display = "RackError::Cells {}: The number of cells {:?} must be at least 1", func_name, num_cells)]
Cells { num_cells: CellQty, func_name: &'static str },
#[fail(display = "RackError::Edges {}: {:?} is not enough links to connect all cells", func_name, nlinks)]
Edges { nlinks: LinkQty, func_name: &'static str },
#[fail(display = "RackError::InteriorPort {} {}", func_name, cell_no)]
InteriorPort { func_name: &'static str, cell_no: CellNo },
#[fail(display = "RackError::Wire {}: {:?} is not a valid edge at {}", func_name, edge, comment)]
Wire { edge: Edge, func_name: &'static str, comment: &'static str },
#[fail(display = "RackError::NoPortAvailable {}: {:?} No port available for {} side of edge at {}", func_name, side_name, edge, comment)]
NoPortAvailable { edge: Edge, side_name: &'static str, func_name: &'static str, comment: &'static str },
}
| 53.760684 | 194 | 0.602279 |
fce3e747301492d32c5a7cbfd2c482674318b420 | 20,974 | use self::{
ctx::Ctx,
storage::{Storage, *},
};
use crate::{
marks::Marks,
util::{can_end_conditionally, idents_used_by, now},
};
use fxhash::{FxHashMap, FxHashSet};
use std::time::Instant;
use swc_atoms::JsWord;
use swc_common::{SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_utils::{ident::IdentLike, Id};
use swc_ecma_visit::{noop_visit_type, Node, Visit, VisitWith};
mod ctx;
pub(crate) mod storage;
pub(crate) fn analyze<N>(n: &N, marks: Option<Marks>) -> ProgramData
where
N: VisitWith<UsageAnalyzer>,
{
analyze_with_storage::<ProgramData, _>(n, marks)
}
/// TODO: Track assignments to variables via `arguments`.
/// TODO: Scope-local. (Including block)
///
/// If `marks` is [None], markers are ignored.
pub(crate) fn analyze_with_storage<S, N>(n: &N, marks: Option<Marks>) -> S
where
S: Storage,
N: VisitWith<UsageAnalyzer<S>>,
{
let start_time = now();
let mut v = UsageAnalyzer {
data: Default::default(),
marks,
scope: Default::default(),
ctx: Default::default(),
};
n.visit_with(&Invalid { span: DUMMY_SP }, &mut v);
let top_scope = v.scope;
v.data.top_scope().merge(top_scope, false);
if let Some(start_time) = start_time {
let end_time = Instant::now();
log::debug!("Scope analysis took {:?}", end_time - start_time);
}
v.data
}
#[derive(Debug, Default)]
pub(crate) struct VarUsageInfo {
pub inline_prevented: bool,
/// The number of reference to this identifier.
pub ref_count: usize,
/// `true` if a varaible is conditionally initialized.
pub cond_init: bool,
/// `false` if it's only used.
pub declared: bool,
pub declared_count: usize,
/// `true` if the enclosing function defines this variable as a parameter.
pub declared_as_fn_param: bool,
pub declared_as_fn_expr: bool,
pub assign_count: usize,
pub mutation_by_call_count: usize,
pub usage_count: usize,
/// The variable itself is modified.
pub reassigned: bool,
/// The variable itself or a property of it is modified.
pub mutated: bool,
pub has_property_access: bool,
pub accessed_props: FxHashSet<JsWord>,
pub exported: bool,
/// True if used **above** the declaration. (Not eval order).
pub used_above_decl: bool,
/// `true` if it's declared by function parameters or variables declared in
/// a closest function and used only within it and not used by child
/// functions.
pub is_fn_local: bool,
used_by_nested_fn: bool,
pub used_in_loop: bool,
pub var_kind: Option<VarDeclKind>,
pub var_initialized: bool,
pub declared_as_catch_param: bool,
/// TODO: Implement this.
///
/// Indicates a variable or function is overrided without using it.
pub overriden_without_used: bool,
pub no_side_effect_for_member_access: bool,
pub used_as_callee: bool,
/// In `c = b`, `b` inffects `c`.
infects: Vec<Id>,
}
impl VarUsageInfo {
pub fn is_mutated_only_by_one_call(&self) -> bool {
self.assign_count == 0 && self.mutation_by_call_count == 1
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ScopeKind {
Fn,
Block,
}
#[derive(Debug, Default, Clone)]
pub(crate) struct ScopeData {
pub has_with_stmt: bool,
pub has_eval_call: bool,
}
/// Analyzed info of a whole program we are working on.
#[derive(Debug, Default)]
pub(crate) struct ProgramData {
pub vars: FxHashMap<Id, VarUsageInfo>,
pub top: ScopeData,
pub scopes: FxHashMap<SyntaxContext, ScopeData>,
}
/// This assumes there are no two variable with same name and same span hygiene.
#[derive(Debug)]
pub(crate) struct UsageAnalyzer<S = ProgramData>
where
S: Storage,
{
data: S,
marks: Option<Marks>,
scope: S::ScopeData,
ctx: Ctx,
}
impl<S> UsageAnalyzer<S>
where
S: Storage,
{
fn with_child<F, Ret>(&mut self, child_ctxt: SyntaxContext, kind: ScopeKind, op: F) -> Ret
where
F: FnOnce(&mut UsageAnalyzer<S>) -> Ret,
{
let mut child = UsageAnalyzer {
data: Default::default(),
marks: self.marks,
ctx: self.ctx,
scope: Default::default(),
};
let ret = op(&mut child);
{
let child_scope = child.data.scope(child_ctxt);
child_scope.merge(child.scope, false);
}
self.data.merge(kind, child.data);
ret
}
fn report_usage(&mut self, i: &Ident, is_assign: bool) {
self.data.report_usage(self.ctx, i, is_assign)
}
fn declare_decl(
&mut self,
i: &Ident,
has_init: bool,
kind: Option<VarDeclKind>,
_is_fn_decl: bool,
) -> &mut S::VarData {
self.scope.add_declared_symbol(i);
self.data.declare_decl(self.ctx, i, has_init, kind)
}
}
impl<S> Visit for UsageAnalyzer<S>
where
S: Storage,
{
noop_visit_type!();
fn visit_await_expr(&mut self, n: &AwaitExpr, _: &dyn Node) {
let ctx = Ctx {
in_await_arg: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_arrow_expr(&mut self, n: &ArrowExpr, _: &dyn Node) {
self.with_child(n.span.ctxt, ScopeKind::Fn, |child| {
{
let ctx = Ctx {
in_pat_of_param: true,
..child.ctx
};
n.params.visit_with(n, &mut *child.with_ctx(ctx));
}
match &n.body {
BlockStmtOrExpr::BlockStmt(body) => {
// We use visit_children_with instead of visit_with to bypass block scope
// handler.
body.visit_children_with(child);
}
BlockStmtOrExpr::Expr(body) => {
body.visit_with(n, child);
}
}
})
}
fn visit_assign_expr(&mut self, n: &AssignExpr, _: &dyn Node) {
let ctx = Ctx {
in_assign_lhs: true,
is_exact_reassignment: true,
..self.ctx
};
n.left.visit_with(n, &mut *self.with_ctx(ctx));
let ctx = Ctx {
in_assign_lhs: false,
is_exact_reassignment: false,
..self.ctx
};
n.right.visit_with(n, &mut *self.with_ctx(ctx));
}
fn visit_block_stmt(&mut self, n: &BlockStmt, _: &dyn Node) {
self.with_child(n.span.ctxt, ScopeKind::Block, |child| {
n.visit_children_with(child);
})
}
fn visit_call_expr(&mut self, n: &CallExpr, _: &dyn Node) {
let inline_prevented = self.ctx.inline_prevented
|| self
.marks
.map(|marks| n.span.has_mark(marks.noinline))
.unwrap_or_default();
{
let ctx = Ctx {
inline_prevented,
..self.ctx
};
n.callee.visit_with(n, &mut *self.with_ctx(ctx));
}
match &n.callee {
ExprOrSuper::Super(_) => {}
ExprOrSuper::Expr(callee) => match &**callee {
Expr::Ident(callee) => {
self.data
.var_or_default(callee.to_id())
.mark_used_as_callee();
}
_ => {}
},
}
{
let ctx = Ctx {
inline_prevented,
in_call_arg: true,
is_exact_arg: true,
is_exact_reassignment: false,
..self.ctx
};
n.args.visit_with(n, &mut *self.with_ctx(ctx));
}
match &n.callee {
ExprOrSuper::Expr(callee) => match &**callee {
Expr::Ident(Ident { sym, .. }) if *sym == *"eval" => {
self.scope.mark_eval_called();
}
_ => {}
},
_ => {}
}
}
fn visit_catch_clause(&mut self, n: &CatchClause, _: &dyn Node) {
{
let ctx = Ctx {
in_cond: true,
in_catch_param: true,
..self.ctx
};
n.param.visit_with(n, &mut *self.with_ctx(ctx));
}
{
let ctx = Ctx {
in_cond: true,
..self.ctx
};
n.body.visit_with(n, &mut *self.with_ctx(ctx));
}
}
fn visit_class(&mut self, n: &Class, _: &dyn Node) {
n.decorators.visit_with(n, self);
{
let ctx = Ctx {
inline_prevented: true,
..self.ctx
};
n.super_class.visit_with(n, &mut *self.with_ctx(ctx));
}
n.body.visit_with(n, self);
}
fn visit_class_decl(&mut self, n: &ClassDecl, _: &dyn Node) {
self.declare_decl(&n.ident, true, None, false);
n.visit_children_with(self);
}
fn visit_do_while_stmt(&mut self, n: &DoWhileStmt, _: &dyn Node) {
let ctx = Ctx {
in_loop: true,
in_cond: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_export_named_specifier(&mut self, n: &ExportNamedSpecifier, _: &dyn Node) {
self.report_usage(&n.orig, false)
}
fn visit_expr(&mut self, e: &Expr, _: &dyn Node) {
e.visit_children_with(self);
match e {
Expr::Ident(i) => {
self.report_usage(i, self.ctx.in_update_arg || self.ctx.in_assign_lhs);
}
_ => {}
}
}
fn visit_fn_decl(&mut self, n: &FnDecl, _: &dyn Node) {
self.declare_decl(&n.ident, true, None, true);
n.visit_children_with(self);
}
fn visit_fn_expr(&mut self, n: &FnExpr, _: &dyn Node) {
n.visit_children_with(self);
if let Some(id) = &n.ident {
self.data
.var_or_default(id.to_id())
.mark_declared_as_fn_expr();
}
}
fn visit_for_in_stmt(&mut self, n: &ForInStmt, _: &dyn Node) {
n.right.visit_with(n, self);
self.with_child(n.span.ctxt, ScopeKind::Block, |child| {
let ctx = Ctx {
in_left_of_for_loop: true,
is_exact_reassignment: true,
..child.ctx
};
n.left.visit_with(n, &mut *child.with_ctx(ctx));
n.right.visit_with(n, child);
let ctx = Ctx {
in_loop: true,
in_cond: true,
..child.ctx
};
n.body.visit_with(n, &mut *child.with_ctx(ctx));
});
}
fn visit_for_of_stmt(&mut self, n: &ForOfStmt, _: &dyn Node) {
n.right.visit_with(n, self);
self.with_child(n.span.ctxt, ScopeKind::Block, |child| {
let ctx = Ctx {
in_left_of_for_loop: true,
is_exact_reassignment: true,
..child.ctx
};
n.left.visit_with(n, &mut *child.with_ctx(ctx));
let ctx = Ctx {
in_loop: true,
in_cond: true,
..child.ctx
};
n.body.visit_with(n, &mut *child.with_ctx(ctx))
});
}
fn visit_for_stmt(&mut self, n: &ForStmt, _: &dyn Node) {
n.init.visit_with(n, self);
let ctx = Ctx {
in_loop: true,
in_cond: true,
..self.ctx
};
n.test.visit_with(n, &mut *self.with_ctx(ctx));
n.update.visit_with(n, &mut *self.with_ctx(ctx));
n.body.visit_with(n, &mut *self.with_ctx(ctx));
}
fn visit_function(&mut self, n: &Function, _: &dyn Node) {
n.decorators.visit_with(n, self);
let is_standalone = self
.marks
.map(|marks| n.span.has_mark(marks.standalone))
.unwrap_or_default();
// We don't dig into standalone function, as it does not share any variable with
// outer scope.
if self.ctx.skip_standalone && is_standalone {
return;
}
let ctx = Ctx {
skip_standalone: self.ctx.skip_standalone || is_standalone,
..self.ctx
};
self.with_ctx(ctx)
.with_child(n.span.ctxt, ScopeKind::Fn, |child| {
n.params.visit_with(n, child);
match &n.body {
Some(body) => {
// We use visit_children_with instead of visit_with to bypass block scope
// handler.
body.visit_children_with(child);
}
None => {}
}
})
}
fn visit_if_stmt(&mut self, n: &IfStmt, _: &dyn Node) {
let ctx = Ctx {
in_cond: true,
..self.ctx
};
n.test.visit_with(n, self);
n.cons.visit_with(n, &mut *self.with_ctx(ctx));
n.alt.visit_with(n, &mut *self.with_ctx(ctx));
}
fn visit_import_default_specifier(&mut self, n: &ImportDefaultSpecifier, _: &dyn Node) {
self.declare_decl(&n.local, true, None, false);
}
fn visit_import_named_specifier(&mut self, n: &ImportNamedSpecifier, _: &dyn Node) {
self.declare_decl(&n.local, true, None, false);
}
fn visit_import_star_as_specifier(&mut self, n: &ImportStarAsSpecifier, _: &dyn Node) {
self.declare_decl(&n.local, true, None, false);
}
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
{
let ctx = Ctx {
is_exact_arg: false,
is_exact_reassignment: false,
..self.ctx
};
e.obj
.visit_with(&Invalid { span: DUMMY_SP }, &mut *self.with_ctx(ctx));
}
if e.computed {
let ctx = Ctx {
is_exact_arg: false,
is_exact_reassignment: false,
..self.ctx
};
e.prop
.visit_with(&Invalid { span: DUMMY_SP }, &mut *self.with_ctx(ctx));
}
match &e.obj {
ExprOrSuper::Super(_) => {}
ExprOrSuper::Expr(obj) => match &**obj {
Expr::Ident(obj) => {
let v = self.data.var_or_default(obj.to_id());
v.mark_has_property_access();
if !e.computed {
match &*e.prop {
Expr::Ident(prop) => {
v.add_accessed_property(prop.sym.clone());
}
_ => {}
}
}
}
_ => {}
},
}
}
fn visit_module(&mut self, n: &Module, _: &dyn Node) {
let ctx = Ctx {
skip_standalone: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx))
}
fn visit_named_export(&mut self, n: &NamedExport, _: &dyn Node) {
if n.src.is_some() {
return;
}
n.visit_children_with(self);
}
fn visit_new_expr(&mut self, n: &NewExpr, _: &dyn Node) {
{
n.callee.visit_with(n, self);
let ctx = Ctx {
in_call_arg: true,
is_exact_arg: true,
..self.ctx
};
n.args.visit_with(n, &mut *self.with_ctx(ctx));
}
}
fn visit_param(&mut self, n: &Param, _: &dyn Node) {
let ctx = Ctx {
in_pat_of_param: false,
..self.ctx
};
n.decorators.visit_with(n, &mut *self.with_ctx(ctx));
let ctx = Ctx {
in_pat_of_param: true,
var_decl_kind_of_pat: None,
..self.ctx
};
n.pat.visit_with(n, &mut *self.with_ctx(ctx));
}
fn visit_pat(&mut self, n: &Pat, _: &dyn Node) {
n.visit_children_with(self);
let Ctx {
in_left_of_for_loop,
in_pat_of_param,
..
} = self.ctx;
match n {
Pat::Ident(i) => {
if self.ctx.in_pat_of_var_decl
|| self.ctx.in_pat_of_param
|| self.ctx.in_catch_param
{
let v = self.declare_decl(
&i.id,
self.ctx.in_pat_of_var_decl_with_init,
self.ctx.var_decl_kind_of_pat,
false,
);
if in_pat_of_param {
v.mark_declared_as_fn_param();
}
if in_left_of_for_loop {
v.mark_reassigned();
v.mark_mutated();
}
} else {
self.report_usage(&i.id, true);
}
}
_ => {}
}
}
fn visit_prop(&mut self, n: &Prop, _: &dyn Node) {
let ctx = Ctx {
in_update_arg: false,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
match n {
Prop::Shorthand(i) => {
self.report_usage(i, false);
}
_ => {}
}
}
fn visit_setter_prop(&mut self, n: &SetterProp, _: &dyn Node) {
self.with_child(n.span.ctxt, ScopeKind::Fn, |a| {
n.key.visit_with(n, a);
{
let ctx = Ctx {
in_pat_of_param: true,
..a.ctx
};
n.param.visit_with(n, &mut *a.with_ctx(ctx));
}
n.body.visit_with(n, a);
});
}
fn visit_stmt(&mut self, n: &Stmt, _: &dyn Node) {
let ctx = Ctx {
in_update_arg: false,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_stmts(&mut self, stmts: &[Stmt], _: &dyn Node) {
let mut had_cond = false;
for stmt in stmts {
let ctx = Ctx {
in_cond: self.ctx.in_cond || had_cond,
..self.ctx
};
stmt.visit_with(&Invalid { span: DUMMY_SP }, &mut *self.with_ctx(ctx));
had_cond |= can_end_conditionally(stmt);
}
}
fn visit_switch_case(&mut self, n: &SwitchCase, _: &dyn Node) {
n.test.visit_with(n, self);
{
let ctx = Ctx {
in_cond: true,
..self.ctx
};
n.cons.visit_with(n, &mut *self.with_ctx(ctx));
}
}
fn visit_try_stmt(&mut self, n: &TryStmt, _: &dyn Node) {
let ctx = Ctx {
in_cond: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_update_expr(&mut self, n: &UpdateExpr, _: &dyn Node) {
let ctx = Ctx {
in_update_arg: true,
is_exact_reassignment: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_var_decl(&mut self, n: &VarDecl, _: &dyn Node) {
let ctx = Ctx {
var_decl_kind_of_pat: Some(n.kind),
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
for decl in &n.decls {
match (&decl.name, decl.init.as_deref()) {
(Pat::Ident(var), Some(init)) => {
let used_idents = idents_used_by(init);
for id in used_idents {
self.data
.var_or_default(id.clone())
.add_infects(var.to_id());
self.data.var_or_default(var.to_id()).add_infects(id);
}
}
_ => {}
}
}
}
fn visit_var_declarator(&mut self, e: &VarDeclarator, _: &dyn Node) {
let ctx = Ctx {
in_pat_of_var_decl: true,
in_pat_of_var_decl_with_init: e.init.is_some(),
in_var_decl_with_no_side_effect_for_member_access: match e.init.as_deref() {
Some(Expr::Array(..) | Expr::Lit(..)) => true,
_ => false,
},
..self.ctx
};
e.name.visit_with(e, &mut *self.with_ctx(ctx));
e.init.visit_with(e, self);
}
fn visit_while_stmt(&mut self, n: &WhileStmt, _: &dyn Node) {
let ctx = Ctx {
in_loop: true,
in_cond: true,
..self.ctx
};
n.visit_children_with(&mut *self.with_ctx(ctx));
}
fn visit_with_stmt(&mut self, n: &WithStmt, _: &dyn Node) {
self.scope.mark_with_stmt();
n.visit_children_with(self);
}
}
| 27.561104 | 97 | 0.501573 |
fb8a234fd14096b764b24ce0eb1d952190d068dc | 16,616 | /*!
The `Page` class deals with operations done on pages, like editing.
*/
#![deny(missing_docs)]
use crate::media_wiki_error::MediaWikiError;
use crate::api::Api;
use crate::title::Title;
use serde_json::Value;
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
/// Represents a page.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Page {
title: Title,
}
impl Page {
/// Creates a new `Page` from a `Title`.
pub fn new(title: Title) -> Self {
Page { title }
}
/// Accesses the `Title` of this `Page`.
pub fn title(&self) -> &Title {
&self.title
}
/// Fetches the current text of this `Page`. If there is one slot in
/// the current revision, it is fetched; if there are multiple slots,
/// the "main" slot is fetched, or an error is returned if there is
/// no "main" slot.
///
/// # Errors
/// If the page is missing, will return a `PageError::Missing`.
///
/// [`Api::get_query_api_json`]: ../api/struct.Api.html#method.get_query_api_json
pub async fn text(&self, api: &Api) -> Result<String, PageError> {
let title = self
.title
.full_pretty(api)
.ok_or_else(|| PageError::BadTitle(self.title.clone()))?;
let params = [
("action", "query"),
("prop", "revisions"),
("titles", &title),
("rvslots", "*"),
("rvprop", "content"),
("formatversion", "2"),
]
.iter()
.map(|&(k, v)| (k.to_string(), v.to_string()))
.collect();
let result = api
.get_query_api_json(¶ms)
.await
.map_err(PageError::MediaWiki)?;
let page = &result["query"]["pages"][0];
if page["missing"].as_bool() == Some(true) {
Err(PageError::Missing(self.title.clone()))
} else if let Some(slots) = page["revisions"][0]["slots"].as_object() {
if let Some(the_slot) = {
slots["main"].as_object().or_else(|| {
if slots.len() == 1 {
slots.values().next().unwrap().as_object() // unwrap OK, length is 1
} else {
None
}
})
} {
match the_slot["content"].as_str() {
Some(string) => Ok(string.to_string()),
None => Err(PageError::BadResponse(result)),
}
} else {
Err(PageError::BadResponse(result))
}
} else {
Err(PageError::BadResponse(result))
}
}
/// Replaces the contents of this `Page` with the given text, using the given
/// edit summary.
///
/// # Errors
/// May return a `PageError` or any error from [`Api::post_query_api_json`].
///
/// [`Api::post_query_api_json`]: ../api/struct.Api.html#method.post_query_api_json
pub async fn edit_text(
&self,
api: &mut Api,
text: impl Into<String>,
summary: impl Into<String>,
) -> Result<(), Box<dyn Error>> {
let title = self
.title
.full_pretty(api)
.ok_or_else(|| PageError::BadTitle(self.title.clone()))?;
let bot = if api.user().is_bot() { "true" } else { "false" };
let mut params: HashMap<String, String> = [
("action", "edit"),
("title", &title),
("text", &text.into()),
("summary", &summary.into()),
("bot", bot),
("formatversion", "2"),
("token", &api.get_edit_token().await?),
]
.iter()
.map(|&(k, v)| (k.to_string(), v.to_string()))
.collect();
if !api.user().user_name().is_empty() {
params.insert("assert".to_string(), "user".to_string());
}
let result = api.post_query_api_json(¶ms).await?;
match result["edit"]["result"].as_str() {
Some("Success") => Ok(()),
_ => Err(Box::new(PageError::EditError(result))),
}
}
/// Performs an "action=query" API action and returns the result.
async fn action_query(
&self,
api: &Api,
additional_params: &[(&str, &str)],
) -> Result<Value, PageError> {
let title = self
.title
.full_pretty(api)
.ok_or_else(|| PageError::BadTitle(self.title.clone()))?;
let mut params = api.params_into(&[("action", "query"), ("titles", &title)]);
for (k, v) in additional_params {
params.insert(k.to_string(), v.to_string());
}
api.get_query_api_json_all(¶ms).await.map_err(|e|PageError::RequestError(Box::new(e)))
}
// From an API result in the form of query/pages, extract a sub-object for each page (should be only one)
fn extract_page_properties_from_api_results(
&self,
result: Value,
subkey: &str,
) -> Result<Vec<Value>, Box<dyn Error>> {
match result["query"]["pages"].is_null() {
true => Err(Box::new(PageError::Missing(self.title.clone()))),
false => match result["query"]["pages"].as_object() {
Some(obj) => Ok(obj
.iter()
.flat_map(|(_pageid, v_page)| match v_page[subkey].as_array() {
Some(arr) => arr.to_owned(),
None => vec![],
})
.collect()),
None => Err(Box::new(PageError::UnexpectedResultFormat(format!(
"{:?}",
&result["query"]["pages"]
)))),
},
}
}
fn json_result_into_titles(&self, arr: Vec<Value>, api: &Api) -> Vec<Title> {
arr.iter()
.filter_map(|v| match v["title"].as_str() {
Some(title) => Some(Title::new_from_full(title, api)),
None => None,
})
.collect()
}
/// Returns the categories of a page, as a JSON Value Vec
pub async fn categories(&self, api: &Api) -> Result<Vec<Value>, Box<dyn Error>> {
let result = self
.action_query(
api,
&[
("prop", "categories"),
("cllimit", "max"),
("clprop", "hidden|sortkey|timestamp"),
],
)
.await?;
self.extract_page_properties_from_api_results(result, "categories")
}
/// Returns the categories of a page, as a JSON Value Vec
pub async fn interwiki_links(&self, api: &Api) -> Result<Vec<Value>, Box<dyn Error>> {
let result = self
.action_query(api, &[("prop", "iwlinks"), ("iwlimit", "max")])
.await?;
self.extract_page_properties_from_api_results(result, "iwlinks")
}
/// Returns the templates of a page, as a Title Vec
pub async fn templates(&self, api: &Api) -> Result<Vec<Title>, Box<dyn Error>> {
let result = self
.action_query(
api,
&[
("prop", "templates"),
("tllimit", "max"),
("tlnamespace", "*"),
],
)
.await?;
let result = self.extract_page_properties_from_api_results(result, "templates")?;
Ok(self.json_result_into_titles(result, api))
}
/// Returns the wiki-internal links on a page, as a Title Vec
pub async fn links(&self, api: &Api) -> Result<Vec<Title>, Box<dyn Error>> {
let result = self
.action_query(
api,
&[("prop", "links"), ("pllimit", "max"), ("plnamespace", "*")],
)
.await?;
let result = self.extract_page_properties_from_api_results(result, "links")?;
Ok(self.json_result_into_titles(result, api))
}
/// Returns the wiki-internal links on a page, as a Title Vec
pub async fn links_here(
&self,
api: &Api,
direct_links: bool,
redirects: bool,
) -> Result<Vec<Title>, Box<dyn Error>> {
let lhshow = match (direct_links, redirects) {
(true, true) => "!redirect|redirect",
(true, false) => "!redirect",
(false, true) => "redirect",
(false, false) => "",
};
let result = self
.action_query(
api,
&[
("prop", "linkshere"),
("lhlimit", "max"),
("lhnamespace", "*"),
("lhshow", lhshow),
],
)
.await?;
let result = self.extract_page_properties_from_api_results(result, "linkshere")?;
Ok(self.json_result_into_titles(result, api))
}
/// Returns the images used on a page, as a Title Vec
pub async fn images(&self, api: &Api) -> Result<Vec<Title>, Box<dyn Error>> {
let result = self
.action_query(api, &[("prop", "images"), ("imlimit", "max")])
.await?;
let result = self.extract_page_properties_from_api_results(result, "images")?;
Ok(self.json_result_into_titles(result, api))
}
/// Returns the coordinates of a page, as a JSON Value Vec
pub async fn coordinates(&self, api: &Api) -> Result<Vec<Value>, Box<dyn Error>> {
self.extract_page_properties_from_api_results(
self.action_query(
api,
&[
("prop", "coordinates"),
("cllimit", "max"),
("coprop", "country|dim|globe|name|region|type"),
("coprimary", "all"),
],
)
.await?,
"coordinates",
)
}
/// Returns the coordinates of a page, including distance from a point, as a JSON Value Vec
pub async fn coordinates_distance(
&self,
api: &Api,
lat: f64,
lon: f64,
) -> Result<Vec<Value>, Box<dyn Error>> {
self.extract_page_properties_from_api_results(
self.action_query(
api,
&[
("prop", "coordinates"),
("cllimit", "max"),
("coprop", "country|dim|globe|name|region|type"),
("coprimary", "all"),
("codistancefrompoint", format!("{}|{}", lat, lon).as_str()),
],
)
.await?,
"coordinates",
)
}
/// Returns the external links of a page, as a String Vec
pub async fn external_links(&self, api: &Api) -> Result<Vec<String>, Box<dyn Error>> {
let result = self
.action_query(api, &[("prop", "extlinks"), ("ellimit", "max")])
.await?;
Ok(self
.extract_page_properties_from_api_results(result, "extlinks")?
.iter()
.filter_map(|v| v["*"].as_str())
.map(|v| v.to_string())
.collect())
}
/*
TODO for action=query:
extracts
fileusage
globalusage
imageinfo
images
info
langlinks
linkshere
pageimages
pageprops
pageterms
pageviews
redirects
revisions
transcludedin
wbentityusage
*/
}
/// Errors that can go wrong while performing operations on a `Page`.
#[derive(Debug)]
#[non_exhaustive]
pub enum PageError {
/// Couldn't obtain the title for this page for use in an API request.
BadTitle(Title),
/// Couldn't understand the API response (provided).
BadResponse(Value),
/// Missing page.
Missing(Title),
/// Edit failed; API response is provided.
EditError(Value),
/// Error while performing the API request.
RequestError(Box<dyn Error>),
/// Unexpected data structure (eg array instead of object) in API JSON result
UnexpectedResultFormat(String),
/// MediaWikiError wrapper
MediaWiki(MediaWikiError),
}
impl fmt::Display for PageError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
PageError::BadTitle(title) => write!(f, "invalid title for this Page: {:?}", title),
PageError::BadResponse(response) => write!(
f,
"bad API response while fetching revision content: {:?}",
response
),
PageError::Missing(title) => write!(f, "page missing: {:?}", title),
PageError::EditError(response) => write!(f, "edit resulted in error: {:?}", response),
PageError::RequestError(error) => write!(f, "request error: {}", error),
PageError::UnexpectedResultFormat(error) => write!(f, "result format error: {}", error),
PageError::MediaWiki(error) => write!(f, "result format error: {}", error),
}
}
}
impl Error for PageError {}
/*
impl From<MediaWikiError> for PageError {
fn from(e: MediaWikiError) -> Self {
match e {
MediaWikiError::Reqwest(e) => PageError::RequestError(Box::new(e)),
MediaWikiError::ReqwestHeader(e) => PageError::RequestError(Box::new(e)),
}
}
}
*/
#[cfg(test)]
mod tests {
use super::*;
use crate::api::*;
async fn wd_api() -> Api {
Api::new("https://www.wikidata.org/w/api.php")
.await
.unwrap()
}
#[tokio::test]
async fn page_text_main_page_nonempty() {
let page = Page::new(Title::new("Main Page", 4));
let text = page.text(&wd_api().await).await.unwrap();
assert!(!text.is_empty());
}
#[tokio::test]
async fn page_text_nonexistent() {
let title = Title::new("This page does not exist", 0);
let page = Page::new(title.clone());
match page.text(&wd_api().await).await {
Err(PageError::Missing(t)) => assert!(t == title),
x => panic!("expected missing error, found {:?}", x),
}
}
#[tokio::test]
async fn page_categories() {
let page = Page::new(Title::new("Community portal", 4));
let result = page.categories(&wd_api().await).await.unwrap();
assert!(result.len() > 1);
}
#[tokio::test]
async fn page_templates() {
let page = Page::new(Title::new("Community portal", 4));
let result = page.templates(&wd_api().await).await.unwrap();
assert!(result.len() > 5);
assert!(result.contains(&Title::new("Protected", 10)))
}
#[tokio::test]
async fn page_coordinates() {
let page = Page::new(Title::new("Q64", 0)); // Berlin
let result = page.coordinates(&wd_api().await).await.unwrap();
assert!(!result.is_empty());
// Distance to Cologne
let result = page
.coordinates_distance(&wd_api().await, 50.94222222, 6.95777778)
.await
.unwrap();
result
.iter()
.filter(|v| v["primary"].as_str() == Some(""))
.for_each(|v| {
assert!(v["dist"].as_f64().unwrap() > 475700.0);
assert!(v["dist"].as_f64().unwrap() < 475701.0);
});
}
#[tokio::test]
async fn page_external_links() {
let page = Page::new(Title::new("Q64", 0));
let result = page.external_links(&wd_api().await).await.unwrap();
assert!(result.contains(&"https://www.berlin.de/".to_string()));
}
#[tokio::test]
async fn page_links() {
let page = Page::new(Title::new("Community portal", 4));
let result = page.links(&wd_api().await).await.unwrap();
assert!(result.contains(&Title::new("Bot requests", 4)))
}
#[tokio::test]
async fn page_images() {
let page = Page::new(Title::new("Q64", 0));
let result = page.images(&wd_api().await).await.unwrap();
assert!(result.contains(&Title::new("Cityscape Berlin.jpg", 6)))
}
#[tokio::test]
async fn page_links_here() {
let page = Page::new(Title::new("Q1481", 0));
let result = page.links_here(&wd_api().await, true, false).await.unwrap();
assert!(result.contains(&Title::new("Q7894", 0)))
}
#[tokio::test]
async fn page_interwiki_links() {
let page = Page::new(Title::new("Wikidata list", 10));
let result = page.interwiki_links(&wd_api().await).await.unwrap();
// println!("{:?}", &result);
assert!(result.contains(&json!({"prefix":"mw","*":"Wikidata_query_service/User_Manual"})));
}
}
| 33.5 | 109 | 0.519921 |
67db7b913a3daee2ac186c6cce96d449637db178 | 3,358 | use std::fs::File;
use std::io::BufReader;
use std::io::BufRead;
use std::collections::VecDeque;
struct Node {
name: String,
enodes: Vec<usize>,
checked: bool,
parent: usize,
}
struct Graph {
nodes: Vec<Node>,
}
impl Graph {
fn search_node(&mut self, name: &str) -> usize {
for i in 0..self.nodes.len() {
if self.nodes[i].name == name
{
return i;
}
}
return <usize>::max_value();
}
fn init(path: &str) -> Graph {
println!("start init");
let mut graph = Graph { nodes: vec![] };
let f = File::open(path).unwrap();
let file = BufReader::new(&f);
for (_num, line) in file.lines().enumerate() {
let l = line.unwrap();
let s: Vec<&str> = l.split(";").collect();
let mut aind = graph.search_node(s[0]);
if aind == <usize>::max_value() {
graph.nodes.push(Node { name: s[0].to_string(), enodes:vec![], checked: false, parent: <usize>::max_value() });
aind = graph.nodes.len() - 1
}
let mut mind = graph.search_node(s[1]);
if mind == <usize>::max_value() {
graph.nodes.push(Node { name: s[1].to_string(), enodes: vec![], checked: false, parent: <usize>::max_value() });
let len=graph.nodes.len() - 1;
graph.nodes[len].enodes.push(aind);
mind = graph.nodes.len() - 1
} else {
graph.nodes[mind].enodes.push(aind);
}
if aind == <usize>::max_value() {
let len=graph.nodes.len() - 1;
graph.nodes[len].enodes.push(mind);
} else {
graph.nodes[aind].enodes.push(mind);
}
}
println!("end init");
return graph;
}
fn search(&mut self, start: &str, end: &str) {
println!();
println!("search for connection between '{}' and '{}' ",start,end);
println!();
let mut queue: VecDeque<usize> = VecDeque::new();
let istart = self.search_node(start);
let mut icurrent=istart;
let iend = self.search_node(end);
self.nodes[icurrent].checked = true;
queue.push_back(icurrent);
while queue.len() > 0 {
icurrent = queue.pop_front().unwrap();
if icurrent == iend {
break;
}
let len = self.nodes[icurrent].enodes.len() - 1;
for i in 0..len {
let inode = self.nodes[icurrent].enodes[i];
if !self.nodes[inode].checked {
self.nodes[inode].checked = true;
self.nodes[inode].parent = icurrent;
queue.push_back(inode);
}
}
}
let mut mora=0;
loop {
println!("{}", self.nodes[icurrent].name);
if icurrent==istart {
break
}
if mora%2==0{
print!(" Movie: ")
}
mora+=1;
icurrent = self.nodes[icurrent].parent;
}
}
}
fn main() {
let mut graph = Graph::init("data/movies-demo.csv");
graph.search("Martin Sheen", "Elarica Gallacher");
}
| 25.633588 | 128 | 0.471114 |
fe15715dce17ccaebef8a41a64c72bf87a2095a1 | 823 | /*
* Hetzner Cloud API
*
* Copied from the official API documentation for the Public Hetzner Cloud.
*
* The version of the OpenAPI document: 0.5.0
*
* Generated by: https://openapi-generator.tech
*/
/// RetryIssuanceOrRenewalResponse : Response to POST https://api.hetzner.cloud/v1/certificates/{id}/actions/retry
#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
pub struct RetryIssuanceOrRenewalResponse {
#[serde(rename = "action")]
pub action: Box<crate::models::Action>,
}
impl RetryIssuanceOrRenewalResponse {
/// Response to POST https://api.hetzner.cloud/v1/certificates/{id}/actions/retry
pub fn new(action: crate::models::Action) -> RetryIssuanceOrRenewalResponse {
RetryIssuanceOrRenewalResponse {
action: Box::new(action),
}
}
}
| 26.548387 | 114 | 0.703524 |
62d9a33c83d7af78d0280a0118e950844b59c99b | 1,834 | // Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use prelude::v1::*;
use libc::c_int;
pub type Key = pthread_key_t;
#[inline]
pub unsafe fn create(dtor: Option<unsafe extern fn(*mut u8)>) -> Key {
let mut key = 0;
assert_eq!(pthread_key_create(&mut key, dtor), 0);
return key;
}
#[inline]
pub unsafe fn set(key: Key, value: *mut u8) {
let r = pthread_setspecific(key, value);
debug_assert_eq!(r, 0);
}
#[inline]
pub unsafe fn get(key: Key) -> *mut u8 {
pthread_getspecific(key)
}
#[inline]
pub unsafe fn destroy(key: Key) {
let r = pthread_key_delete(key);
debug_assert_eq!(r, 0);
}
#[cfg(any(target_os = "macos",
target_os = "ios"))]
type pthread_key_t = ::libc::c_ulong;
#[cfg(any(target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd"))]
type pthread_key_t = ::libc::c_int;
#[cfg(not(any(target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd")))]
type pthread_key_t = ::libc::c_uint;
extern {
fn pthread_key_create(key: *mut pthread_key_t,
dtor: Option<unsafe extern fn(*mut u8)>) -> c_int;
fn pthread_key_delete(key: pthread_key_t) -> c_int;
fn pthread_getspecific(key: pthread_key_t) -> *mut u8;
fn pthread_setspecific(key: pthread_key_t, value: *mut u8) -> c_int;
}
| 29.111111 | 76 | 0.647764 |
723c6ad10a10fc3215085ce6be65ea63fb8f6812 | 3,066 | #[doc = "Register `HBN_PIR_VTH` reader"]
pub struct R(crate::R<HBN_PIR_VTH_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<HBN_PIR_VTH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<HBN_PIR_VTH_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<HBN_PIR_VTH_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `HBN_PIR_VTH` writer"]
pub struct W(crate::W<HBN_PIR_VTH_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<HBN_PIR_VTH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<HBN_PIR_VTH_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<HBN_PIR_VTH_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `pir_vth` reader - "]
pub struct PIR_VTH_R(crate::FieldReader<u16, u16>);
impl PIR_VTH_R {
pub(crate) fn new(bits: u16) -> Self {
PIR_VTH_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PIR_VTH_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `pir_vth` writer - "]
pub struct PIR_VTH_W<'a> {
w: &'a mut W,
}
impl<'a> PIR_VTH_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x3fff) | (value as u32 & 0x3fff);
self.w
}
}
impl R {
#[doc = "Bits 0:13"]
#[inline(always)]
pub fn pir_vth(&self) -> PIR_VTH_R {
PIR_VTH_R::new((self.bits & 0x3fff) as u16)
}
}
impl W {
#[doc = "Bits 0:13"]
#[inline(always)]
pub fn pir_vth(&mut self) -> PIR_VTH_W {
PIR_VTH_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "HBN_PIR_VTH.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [hbn_pir_vth](index.html) module"]
pub struct HBN_PIR_VTH_SPEC;
impl crate::RegisterSpec for HBN_PIR_VTH_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [hbn_pir_vth::R](R) reader structure"]
impl crate::Readable for HBN_PIR_VTH_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [hbn_pir_vth::W](W) writer structure"]
impl crate::Writable for HBN_PIR_VTH_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets HBN_PIR_VTH to value 0"]
impl crate::Resettable for HBN_PIR_VTH_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.76699 | 404 | 0.608284 |
67cc694ae12c7087121c76cacbf2a749273923a5 | 1,045 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that specialization works even if only the upstream crate enables it
// aux-build:cross_crate.rs
extern crate cross_crate;
use cross_crate::*;
fn main() {
assert!(0u8.foo() == "generic Clone");
assert!(vec![0u8].foo() == "generic Vec");
assert!(vec![0i32].foo() == "Vec<i32>");
assert!(0i32.foo() == "i32");
assert!(String::new().foo() == "String");
assert!(((), 0).foo() == "generic pair");
assert!(((), ()).foo() == "generic uniform pair");
assert!((0u8, 0u32).foo() == "(u8, u32)");
assert!((0u8, 0u8).foo() == "(u8, u8)");
}
| 34.833333 | 76 | 0.643062 |
bfcc0391c83e35708e7ce662e2722de0819077e3 | 784 | use std::iter::FromIterator;
use super::Treemap;
impl FromIterator<u64> for Treemap {
/// Convenience method for creating treemap from an iterator.
///
/// # Examples
///
/// ```
/// use std::{u32, u64};
/// use croaring::Treemap;
///
/// let treemap: Treemap = (1..3).chain(u64::from(u32::MAX)+1..u64::from(u32::MAX)+10).collect();
///
/// assert!(!treemap.is_empty());
/// assert!(treemap.contains(1));
/// assert!(treemap.contains(2));
/// assert!(treemap.contains(u64::from(u32::MAX)+1));
/// assert!(treemap.contains(u64::from(u32::MAX)+5));
/// assert_eq!(treemap.cardinality(), 11);
/// ```
fn from_iter<I: IntoIterator<Item = u64>>(iter: I) -> Self {
Treemap::of(&Vec::from_iter(iter))
}
}
| 29.037037 | 101 | 0.563776 |
f7e5dab5332db02e33bfd1eaf47c85bab67d0380 | 597 | use bakkesmod::prelude::*;
use bakkesmod::wrappers::unreal::*;
use bakkesmod::{game, console};
#[plugin_init]
pub fn on_load() {
console::register_notifier("set_ball_location", Box::new(move |_: Vec<String>| {
let game = match game::get_game_event_as_server() {
Some(g) => g,
None => {
log_console!("game is null!");
return;
}
};
match game.get_ball() {
Some(ball) => log_console!("{}", ball.get_location()),
None => log_console!("ball is null")
};
}));
} | 28.428571 | 84 | 0.512563 |
9b8925be4bbba13250634e3dff1b0dc29a799175 | 5,503 | // Copyright © 2016-2017 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use rabble::{self, Pid, CorrelationId, Envelope};
use msg::Msg;
use super::utils::QuorumTracker;
use vr::vr_msg::{self, VrMsg, RecoveryResponse, ClientOp};
use vr::VrCtx;
use vr::vr_fsm::{Transition, VrState, State};
use vr::states::Backup;
use api::Backend;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RecoveryPrimary {
pub pid: Pid,
pub view: u64,
pub op: u64,
pub commit_num: u64,
pub state: Backend,
pub log_start: u64,
pub log_tail: Vec<ClientOp>
}
/// The recovery state of the VR Protocol where a replica is recovering data from a quorum of
/// replicas
state!(Recovery {
ctx: VrCtx,
nonce: u64,
// Primary from the latest view we've heard from
primary: Option<RecoveryPrimary>,
responses: QuorumTracker<()>
});
impl Transition for Recovery {
fn handle(mut self,
msg: VrMsg,
from: Pid,
_: CorrelationId,
output: &mut Vec<Envelope<Msg>>) -> VrState
{
match msg {
VrMsg::Tick => {
if self.responses.is_expired() {
let cid = CorrelationId::pid(self.ctx.pid.clone());
self.responses = QuorumTracker::new(self.ctx.quorum, self.ctx.idle_timeout_ms);
self.primary = None;
self.ctx.broadcast(self.recovery_msg(), cid, output);
}
self.into()
},
VrMsg::RecoveryResponse(msg) => {
self.update_recovery_state(from, msg, output);
self.commit_recovery(output)
},
_ => self.into()
}
}
}
impl Recovery {
pub fn new(ctx: VrCtx, nonce: u64) -> Recovery {
let quorum = ctx.quorum;
Recovery {
ctx: ctx,
nonce: nonce,
primary: None,
// Expire immediately so recovery is started on the next tick
responses: QuorumTracker::new(quorum, 0)
}
}
fn has_quorum(&self) -> bool {
let current_view = self.ctx.view;
self.responses.has_super_quorum() &&
self.primary.as_ref().map_or(false, |p| p.view == current_view)
}
fn commit_recovery(mut self, output: &mut Vec<Envelope<Msg>>) -> VrState {
if self.has_quorum() {
let commit_num = {
let primary = self.primary.take().unwrap();
self.ctx.op = primary.op;
self.ctx.backend = primary.state;
self.ctx.log_start = primary.log_start;
self.ctx.log = primary.log_tail;
// Don't attempt to commit operations that are already part of the backend state
// They don't exist in the log anyway.
self.ctx.commit_num = primary.log_start;
primary.commit_num
};
let mut backup = Backup::new(self.ctx);
backup.set_primary(output);
// This isn't in the VR protocol, but we send a PrepareOk here so that
// the primary can update it's min_accept table in case it committed operations while
// this replica was down.
let cid = CorrelationId::pid(backup.ctx.pid.clone());
backup.send_prepare_ok(cid, output);
return backup.commit(commit_num, output);
}
self.into()
}
fn update_recovery_state(&mut self,
from: Pid,
msg: RecoveryResponse,
output: &mut Vec<Envelope<Msg>>)
{
if msg.nonce != self.nonce {
return;
}
if msg.epoch < self.ctx.epoch {
return;
}
// If we get a response from a replica in a later epoch, we learn the config from the
// message and try again with the new group. If this replica isn't a member of the new group
// it shuts down.
if msg.epoch > self.ctx.epoch {
let cid = CorrelationId::pid(self.ctx.pid.clone());
self.ctx.epoch = msg.epoch;
self.ctx.view = msg.view;
self.ctx.old_config = msg.old_config.unwrap();
self.ctx.new_config = msg.new_config.unwrap();
self.ctx.quorum = self.ctx.new_config.replicas.len() as u64 / 2 + 1;
self.primary = None;
self.responses = QuorumTracker::new(self.ctx.quorum, self.ctx.idle_timeout_ms);
self.ctx.broadcast(self.recovery_msg(), cid, output);
return;
}
if msg.view > self.ctx.view {
self.ctx.view = msg.view;
}
let response_from_primary = msg.op.is_some();
if response_from_primary && msg.view == self.ctx.view {
self.ctx.global_min_accept = msg.global_min_accept;
self.primary = Some(RecoveryPrimary {
pid: from.clone(),
view: msg.view,
op: msg.op.unwrap(),
commit_num: msg.commit_num.unwrap(),
state: msg.state.unwrap(),
log_start: msg.log_start.unwrap(),
log_tail: msg.log_tail.unwrap()
});
}
self.responses.insert(from, ())
}
fn recovery_msg(&self) -> rabble::Msg<Msg> {
vr_msg::Recovery {
epoch: self.ctx.epoch,
nonce: self.nonce,
}.into()
}
}
| 34.829114 | 100 | 0.550427 |
dd7b6e34a1cf26e70f9fab004555e70264913525 | 1,791 | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
use super::dispatch_json::{JsonOp, Value};
use crate::op_error::OpError;
use crate::ops::json_op;
use crate::state::State;
use crate::worker::WorkerEvent;
use deno_core::*;
use futures::channel::mpsc;
use futures::sink::SinkExt;
use std::convert::From;
pub fn web_worker_op<D>(
sender: mpsc::Sender<WorkerEvent>,
dispatcher: D,
) -> impl Fn(Value, Option<ZeroCopyBuf>) -> Result<JsonOp, OpError>
where
D: Fn(
&mpsc::Sender<WorkerEvent>,
Value,
Option<ZeroCopyBuf>,
) -> Result<JsonOp, OpError>,
{
move |args: Value,
zero_copy: Option<ZeroCopyBuf>|
-> Result<JsonOp, OpError> { dispatcher(&sender, args, zero_copy) }
}
pub fn init(i: &mut Isolate, s: &State, sender: &mpsc::Sender<WorkerEvent>) {
i.register_op(
"op_worker_post_message",
s.core_op(json_op(web_worker_op(
sender.clone(),
op_worker_post_message,
))),
);
i.register_op(
"op_worker_close",
s.core_op(json_op(web_worker_op(sender.clone(), op_worker_close))),
);
}
/// Post message to host as guest worker
fn op_worker_post_message(
sender: &mpsc::Sender<WorkerEvent>,
_args: Value,
data: Option<ZeroCopyBuf>,
) -> Result<JsonOp, OpError> {
let d = Vec::from(data.unwrap().as_ref()).into_boxed_slice();
let mut sender = sender.clone();
let fut = sender.send(WorkerEvent::Message(d));
futures::executor::block_on(fut).expect("Failed to post message to host");
Ok(JsonOp::Sync(json!({})))
}
/// Notify host that guest worker closes
fn op_worker_close(
sender: &mpsc::Sender<WorkerEvent>,
_args: Value,
_data: Option<ZeroCopyBuf>,
) -> Result<JsonOp, OpError> {
let mut sender = sender.clone();
sender.close_channel();
Ok(JsonOp::Sync(json!({})))
}
| 27.553846 | 77 | 0.683417 |
d6ce2f3d8fa4a1152529f6f798dd5e9ff9525ad5 | 22,337 | use Cursive;
use Printer;
use With;
use align::{Align, HAlign, VAlign};
use direction::Direction;
use event::{Callback, Event, EventResult, Key, MouseButton, MouseEvent};
use menu::MenuTree;
use std::borrow::Borrow;
use std::cell::Cell;
use std::cmp::min;
use std::rc::Rc;
use theme::ColorStyle;
use unicode_width::UnicodeWidthStr;
use vec::Vec2;
use view::{Position, ScrollBase, View};
use views::MenuPopup;
/// View to select an item among a list.
///
/// It contains a list of values of type T, with associated labels.
///
/// # Examples
///
/// ```no_run
/// # extern crate cursive;
/// # use cursive::Cursive;
/// # use cursive::views::{SelectView, Dialog, TextView};
/// # use cursive::align::HAlign;
/// # fn main() {
/// let mut time_select = SelectView::new().h_align(HAlign::Center);
/// time_select.add_item("Short", 1);
/// time_select.add_item("Medium", 5);
/// time_select.add_item("Long", 10);
///
/// time_select.set_on_submit(|s, time| {
/// s.pop_layer();
/// let text = format!("You will wait for {} minutes...", time);
/// s.add_layer(Dialog::around(TextView::new(text))
/// .button("Quit", |s| s.quit()));
/// });
///
/// let mut siv = Cursive::new();
/// siv.add_layer(Dialog::around(time_select)
/// .title("How long is your wait?"));
/// # }
///
/// ```
pub struct SelectView<T = String> {
items: Vec<Item<T>>,
enabled: bool,
// the focus needs to be manipulable from callbacks
focus: Rc<Cell<usize>>,
scrollbase: ScrollBase,
// This is a custom callback to include a &T.
// It will be called whenever "Enter" is pressed.
on_submit: Option<Rc<Fn(&mut Cursive, &T)>>,
// This callback is called when the selection is changed.
on_select: Option<Rc<Fn(&mut Cursive, &T)>>,
align: Align,
// `true` if we show a one-line view, with popup on selection.
popup: bool,
// We need the last offset to place the popup window
// We "cache" it during the draw, so we need interior mutability.
last_offset: Cell<Vec2>,
last_size: Vec2,
}
impl<T: 'static> Default for SelectView<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: 'static> SelectView<T> {
/// Creates a new empty SelectView.
pub fn new() -> Self {
SelectView {
items: Vec::new(),
enabled: true,
focus: Rc::new(Cell::new(0)),
scrollbase: ScrollBase::new(),
on_select: None,
on_submit: None,
align: Align::top_left(),
popup: false,
last_offset: Cell::new(Vec2::zero()),
last_size: Vec2::zero(),
}
}
/// Turns `self` into a popup select view.
///
/// Chainable variant.
pub fn popup(self) -> Self {
self.with(|s| s.set_popup(true))
}
/// Turns `self` into a popup select view.
pub fn set_popup(&mut self, popup: bool) {
self.popup = popup;
}
/// Disables this view.
///
/// A disabled view cannot be selected.
pub fn disable(&mut self) {
self.enabled = false;
}
/// Disables this view.
///
/// Chainable variant.
pub fn disabled(self) -> Self {
self.with(Self::disable)
}
/// Re-enables this view.
pub fn enable(&mut self) {
self.enabled = true;
}
/// Enable or disable this view.
pub fn set_enabled(&mut self, enabled: bool) {
self.enabled = enabled;
}
/// Returns `true` if this view is enabled.
pub fn is_enabled(&self) -> bool {
self.enabled
}
/// Sets a callback to be used when an item is selected.
pub fn set_on_select<F>(&mut self, cb: F)
where
F: Fn(&mut Cursive, &T) + 'static,
{
self.on_select = Some(Rc::new(cb));
}
/// Sets a callback to be used when an item is selected.
///
/// Chainable variant.
pub fn on_select<F>(self, cb: F) -> Self
where
F: Fn(&mut Cursive, &T) + 'static,
{
self.with(|s| s.set_on_select(cb))
}
/// Sets a callback to be used when `<Enter>` is pressed.
///
/// The item currently selected will be given to the callback.
///
/// Here, `V` can be `T` itself, or a type that can be borrowed from `T`.
pub fn set_on_submit<F, R, V: ?Sized>(&mut self, cb: F)
where
F: 'static + Fn(&mut Cursive, &V) -> R,
T: Borrow<V>,
{
self.on_submit = Some(Rc::new(move |s, t| {
cb(s, t.borrow());
}));
}
/// Sets a callback to be used when `<Enter>` is pressed.
///
/// The item currently selected will be given to the callback.
///
/// Chainable variant.
pub fn on_submit<F, V: ?Sized>(self, cb: F) -> Self
where
F: Fn(&mut Cursive, &V) + 'static,
T: Borrow<V>,
{
self.with(|s| s.set_on_submit(cb))
}
/// Sets the alignment for this view.
pub fn align(mut self, align: Align) -> Self {
self.align = align;
self
}
/// Sets the vertical alignment for this view.
/// (If the view is given too much space vertically.)
pub fn v_align(mut self, v: VAlign) -> Self {
self.align.v = v;
self
}
/// Sets the horizontal alignment for this view.
pub fn h_align(mut self, h: HAlign) -> Self {
self.align.h = h;
self
}
/// Returns the value of the currently selected item.
///
/// Panics if the list is empty.
pub fn selection(&self) -> Rc<T> {
Rc::clone(&self.items[self.focus()].value)
}
/// Removes all items from this view.
pub fn clear(&mut self) {
self.items.clear();
self.focus.set(0);
}
/// Adds a item to the list, with given label and value.
pub fn add_item<S: Into<String>>(&mut self, label: S, value: T) {
self.items.push(Item::new(label.into(), value));
}
/// Gets an item at given idx or None.
///
/// ```
/// use cursive::Cursive;
/// use cursive::views::{SelectView, TextView};
/// let select = SelectView::new()
/// .item("Short", 1);
/// assert_eq!(select.get_item(0), Some(("Short", &1)));
/// ```
pub fn get_item(&self, i: usize) -> Option<(&str, &T)> {
self.items
.get(i)
.map(|item| (item.label.as_ref(), &*item.value))
}
/// Gets a mut item at given idx or None.
pub fn get_item_mut(&mut self, i: usize) -> Option<(&mut String, &mut T)> {
if i >= self.items.len() {
None
} else {
let item = &mut self.items[i];
if let Some(t) = Rc::get_mut(&mut item.value) {
let label = &mut item.label;
Some((label, t))
} else {
None
}
}
}
/// Removes an item from the list.
pub fn remove_item(&mut self, id: usize) {
self.items.remove(id);
let focus = self.focus();
if focus >= id && focus > 0 {
self.focus.set(focus - 1);
}
}
/// Chainable variant of add_item
pub fn item<S: Into<String>>(self, label: S, value: T) -> Self {
self.with(|s| s.add_item(label, value))
}
/// Adds all items from from an iterator.
pub fn add_all<S, I>(&mut self, iter: I)
where
S: Into<String>,
I: IntoIterator<Item = (S, T)>,
{
for (s, t) in iter {
self.add_item(s, t);
}
}
/// Adds all items from from an iterator.
///
/// Chainable variant.
pub fn with_all<S, I>(self, iter: I) -> Self
where
S: Into<String>,
I: IntoIterator<Item = (S, T)>,
{
self.with(|s| s.add_all(iter))
}
fn draw_item(&self, printer: &Printer, i: usize) {
let l = self.items[i].label.width();
let x = self.align.h.get_offset(l, printer.size.x);
printer.print_hline((0, 0), x, " ");
printer.print((x, 0), &self.items[i].label);
if l < printer.size.x {
assert!((l + x) <= printer.size.x);
printer.print_hline((x + l, 0), printer.size.x - (l + x), " ");
}
}
/// Returns the id of the item currently selected.
///
/// Returns `None` if the list is empty.
pub fn selected_id(&self) -> Option<usize> {
if self.items.is_empty() {
None
} else {
Some(self.focus())
}
}
/// Returns the number of items in this list.
pub fn len(&self) -> usize {
self.items.len()
}
/// Returns `true` if this list has no item.
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
fn focus(&self) -> usize {
self.focus.get()
}
/// Moves the selection to the given position.
pub fn set_selection(&mut self, i: usize) {
// TODO: Check if `i > self.len()` ?
self.focus.set(i);
self.scrollbase.scroll_to(i);
}
/// Sets the selection to the given position.
///
/// Chainable variant.
pub fn selected(self, i: usize) -> Self {
self.with(|s| s.set_selection(i))
}
/// Moves the selection up by the given number of rows.
pub fn select_up(&mut self, n: usize) {
self.focus_up(n);
let focus = self.focus();
self.scrollbase.scroll_to(focus);
}
/// Moves the selection down by the given number of rows.
pub fn select_down(&mut self, n: usize) {
self.focus_down(n);
let focus = self.focus();
self.scrollbase.scroll_to(focus);
}
// Low-level focus change. Does not fix scrollbase.
fn focus_up(&mut self, n: usize) {
let focus = self.focus().saturating_sub(n);
self.focus.set(focus);
}
// Low-level focus change. Does not fix scrollbase.
fn focus_down(&mut self, n: usize) {
let focus = min(self.focus() + n, self.items.len().saturating_sub(1));
self.focus.set(focus);
}
fn submit(&mut self) -> EventResult {
let cb = self.on_submit.clone().unwrap();
let v = self.selection();
// We return a Callback Rc<|s| cb(s, &*v)>
EventResult::Consumed(Some(Callback::from_fn(move |s| cb(s, &v))))
}
fn on_event_regular(&mut self, event: Event) -> EventResult {
let mut fix_scroll = true;
match event {
Event::Key(Key::Up) if self.focus() > 0 => self.focus_up(1),
Event::Key(Key::Down) if self.focus() + 1 < self.items.len() => {
self.focus_down(1)
}
Event::Key(Key::PageUp) => self.focus_up(10),
Event::Key(Key::PageDown) => self.focus_down(10),
Event::Key(Key::Home) => self.focus.set(0),
Event::Key(Key::End) => {
self.focus.set(self.items.len().saturating_sub(1))
}
Event::Mouse {
event: MouseEvent::WheelDown,
..
} if self.scrollbase.can_scroll_down() =>
{
fix_scroll = false;
self.scrollbase.scroll_down(5);
}
Event::Mouse {
event: MouseEvent::WheelUp,
..
} if self.scrollbase.can_scroll_up() =>
{
fix_scroll = false;
self.scrollbase.scroll_up(5);
}
Event::Mouse {
event: MouseEvent::Press(MouseButton::Left),
position,
offset,
} if position
.checked_sub(offset)
.map(|position| {
self.scrollbase.start_drag(position, self.last_size.x)
})
.unwrap_or(false) =>
{
fix_scroll = false;
}
Event::Mouse {
event: MouseEvent::Hold(MouseButton::Left),
position,
offset,
} => {
// If the mouse is dragged, we always consume the event.
fix_scroll = false;
let position = position.saturating_sub(offset);
self.scrollbase.drag(position);
}
Event::Mouse {
event: MouseEvent::Press(_),
position,
offset,
} => if let Some(position) = position.checked_sub(offset) {
let scrollbar_size = if self.scrollbase.scrollable() {
(2, 0)
} else {
(0, 0)
};
let clickable_size =
self.last_size.saturating_sub(scrollbar_size);
if position < clickable_size {
fix_scroll = false;
self.focus.set(position.y + self.scrollbase.start_line);
}
},
Event::Mouse {
event: MouseEvent::Release(MouseButton::Left),
position,
offset,
} => {
fix_scroll = false;
self.scrollbase.release_grab();
if self.on_submit.is_some() {
if let Some(position) = position.checked_sub(offset) {
let scrollbar_size = if self.scrollbase.scrollable() {
(2, 0)
} else {
(0, 0)
};
let clickable_size =
self.last_size.saturating_sub(scrollbar_size);
if position < clickable_size
&& (position.y + self.scrollbase.start_line)
== self.focus()
{
return self.submit();
}
}
}
}
Event::Key(Key::Enter) if self.on_submit.is_some() => {
return self.submit();
}
Event::Char(c) => {
// Starting from the current focus,
// find the first item that match the char.
// Cycle back to the beginning of
// the list when we reach the end.
// This is achieved by chaining twice the iterator
let iter = self.items.iter().chain(self.items.iter());
if let Some((i, _)) = iter.enumerate()
.skip(self.focus() + 1)
.find(|&(_, item)| item.label.starts_with(c))
{
// Apply modulo in case we have a hit
// from the chained iterator
self.focus.set(i % self.items.len());
} else {
return EventResult::Ignored;
}
}
_ => return EventResult::Ignored,
}
if fix_scroll {
let focus = self.focus();
self.scrollbase.scroll_to(focus);
}
EventResult::Consumed(self.on_select.clone().map(|cb| {
let v = self.selection();
Callback::from_fn(move |s| cb(s, &v))
}))
}
fn open_popup(&mut self) -> EventResult {
// Build a shallow menu tree to mimick the items array.
// TODO: cache it?
let mut tree = MenuTree::new();
for (i, item) in self.items.iter().enumerate() {
let focus = Rc::clone(&self.focus);
let on_submit = self.on_submit.as_ref().cloned();
let value = Rc::clone(&item.value);
tree.add_leaf(item.label.clone(), move |s| {
focus.set(i);
if let Some(ref on_submit) = on_submit {
on_submit(s, &value);
}
});
}
// Let's keep the tree around,
// the callback will want to use it.
let tree = Rc::new(tree);
let focus = self.focus();
// This is the offset for the label text.
// We'll want to show the popup so that the text matches.
// It'll be soo cool.
let item_length = self.items[focus].label.len();
let text_offset = (self.last_size.x.saturating_sub(item_length)) / 2;
// The total offset for the window is:
// * the last absolute offset at which we drew this view
// * shifted to the right of the text offset
// * shifted to the top of the focus (so the line matches)
// * shifted top-left of the border+padding of the popup
let offset = self.last_offset.get();
let offset = offset + (text_offset, 0);
let offset = offset.saturating_sub((0, focus));
let offset = offset.saturating_sub((2, 1));
// And now, we can return the callback that will create the popup.
EventResult::with_cb(move |s| {
// The callback will want to work with a fresh Rc
let tree = Rc::clone(&tree);
// We'll relativise the absolute position,
// So that we are locked to the parent view.
// A nice effect is that window resizes will keep both
// layers together.
let current_offset = s.screen().offset();
let offset = offset.signed() - current_offset;
// And finally, put the view in view!
s.screen_mut().add_layer_at(
Position::parent(offset),
MenuPopup::new(tree).focus(focus),
);
})
}
// A popup view only does one thing: open the popup on Enter.
fn on_event_popup(&mut self, event: Event) -> EventResult {
match event {
// TODO: add Left/Right support for quick-switch?
Event::Key(Key::Enter) => self.open_popup(),
Event::Mouse {
event: MouseEvent::Release(MouseButton::Left),
position,
offset,
} if position.fits_in_rect(offset, self.last_size) =>
{
self.open_popup()
}
_ => EventResult::Ignored,
}
}
}
impl SelectView<String> {
/// Convenient method to use the label as value.
pub fn add_item_str<S: Into<String>>(&mut self, label: S) {
let label = label.into();
self.add_item(label.clone(), label);
}
/// Chainable variant of add_item_str
pub fn item_str<S: Into<String>>(self, label: S) -> Self {
self.with(|s| s.add_item_str(label))
}
/// Adds all strings from an iterator.
///
/// # Examples
///
/// ```
/// # use cursive::views::SelectView;
/// let mut select_view = SelectView::new();
/// select_view.add_all_str(vec!["a", "b", "c"]);
/// ```
pub fn add_all_str<S, I>(&mut self, iter: I)
where
S: Into<String>,
I: IntoIterator<Item = S>,
{
for s in iter {
self.add_item_str(s);
}
}
/// Adds all strings from an iterator.
///
/// Chainable variant.
pub fn with_all_str<S, I>(self, iter: I) -> Self
where
S: Into<String>,
I: IntoIterator<Item = S>,
{
self.with(|s| s.add_all_str(iter))
}
}
impl<T: 'static> View for SelectView<T> {
fn draw(&self, printer: &Printer) {
self.last_offset.set(printer.offset);
if self.popup {
let style = if !self.enabled {
ColorStyle::secondary()
} else if !printer.focused {
ColorStyle::primary()
} else {
ColorStyle::highlight()
};
let x = match printer.size.x.checked_sub(1) {
Some(x) => x,
None => return,
};
printer.with_color(style, |printer| {
// Prepare the entire background
printer.print_hline((1, 0), x, " ");
// Draw the borders
printer.print((0, 0), "<");
printer.print((x, 0), ">");
let label = &self.items[self.focus()].label;
// And center the text?
let offset = HAlign::Center.get_offset(label.len(), x + 1);
printer.print((offset, 0), label);
});
} else {
let h = self.items.len();
let offset = self.align.v.get_offset(h, printer.size.y);
let printer =
&printer.sub_printer(Vec2::new(0, offset), printer.size, true);
self.scrollbase.draw(printer, |printer, i| {
printer.with_selection(i == self.focus(), |printer| {
if i != self.focus() && !self.enabled {
printer
.with_color(ColorStyle::secondary(), |printer| {
self.draw_item(printer, i)
});
} else {
self.draw_item(printer, i);
}
});
});
}
}
fn required_size(&mut self, req: Vec2) -> Vec2 {
// Items here are not compressible.
// So no matter what the horizontal requirements are,
// we'll still return our longest item.
let w = self.items
.iter()
.map(|item| item.label.width())
.max()
.unwrap_or(1);
if self.popup {
Vec2::new(w + 2, 1)
} else {
let h = self.items.len();
let scrolling = req.y < h;
// Add 2 spaces for the scrollbar if we need
let w = if scrolling { w + 2 } else { w };
Vec2::new(w, h)
}
}
fn on_event(&mut self, event: Event) -> EventResult {
if self.popup {
self.on_event_popup(event)
} else {
self.on_event_regular(event)
}
}
fn take_focus(&mut self, _: Direction) -> bool {
self.enabled && !self.items.is_empty()
}
fn layout(&mut self, size: Vec2) {
self.last_size = size;
if !self.popup {
self.scrollbase.set_heights(size.y, self.items.len());
}
}
}
struct Item<T> {
label: String,
value: Rc<T>,
}
impl<T> Item<T> {
fn new(label: String, value: T) -> Self {
Item {
label: label,
value: Rc::new(value),
}
}
}
| 31.110028 | 79 | 0.505036 |
1af4a8ad723860c577a7b988d06eb3b139148172 | 666 | // This file is part of olympus-xmp. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT. No part of olympus-xmp, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2022 The developers of olympus-xmp. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT.
#[allow(missing_docs)]
pub(super) const x8FFFD: char = '\u{8FFFD}';
| 95.142857 | 390 | 0.791291 |
759b21218c353e70787f853a6c09725bf7132b10 | 16,147 | // Inspired by Paul Woolcock's cargo-fmt (https://github.com/pwoolcoc/cargo-fmt/).
#![deny(warnings)]
#![allow(clippy::match_like_matches_macro)]
use std::cmp::Ordering;
use std::collections::{BTreeMap, BTreeSet};
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::hash::{Hash, Hasher};
use std::io::{self, Write};
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str;
use structopt::StructOpt;
#[path = "test/mod.rs"]
#[cfg(test)]
mod cargo_fmt_tests;
#[derive(StructOpt, Debug)]
#[structopt(
bin_name = "cargo fmt",
about = "This utility formats all bin and lib files of \
the current crate using rustfmt."
)]
pub struct Opts {
/// No output printed to stdout
#[structopt(short = "q", long = "quiet")]
quiet: bool,
/// Use verbose output
#[structopt(short = "v", long = "verbose")]
verbose: bool,
/// Print rustfmt version and exit
#[structopt(long = "version")]
version: bool,
/// Specify package to format
#[structopt(short = "p", long = "package", value_name = "package")]
packages: Vec<String>,
/// Specify path to Cargo.toml
#[structopt(long = "manifest-path", value_name = "manifest-path")]
manifest_path: Option<String>,
/// Specify message-format: short|json|human
#[structopt(long = "message-format", value_name = "message-format")]
message_format: Option<String>,
/// Options passed to rustfmt
// 'raw = true' to make `--` explicit.
#[structopt(name = "rustfmt_options", raw(true))]
rustfmt_options: Vec<String>,
/// Format all packages, and also their local path-based dependencies
#[structopt(long = "all")]
format_all: bool,
/// Run rustfmt in check mode
#[structopt(long = "check")]
check: bool,
}
fn main() {
let exit_status = execute();
std::io::stdout().flush().unwrap();
std::process::exit(exit_status);
}
const SUCCESS: i32 = 0;
const FAILURE: i32 = 1;
fn execute() -> i32 {
// Drop extra `fmt` argument provided by `cargo`.
let mut found_fmt = false;
let args = env::args().filter(|x| {
if found_fmt {
true
} else {
found_fmt = x == "fmt";
x != "fmt"
}
});
let opts = Opts::from_iter(args);
let verbosity = match (opts.verbose, opts.quiet) {
(false, false) => Verbosity::Normal,
(false, true) => Verbosity::Quiet,
(true, false) => Verbosity::Verbose,
(true, true) => {
print_usage_to_stderr("quiet mode and verbose mode are not compatible");
return FAILURE;
}
};
if opts.version {
return handle_command_status(get_rustfmt_info(&[String::from("--version")]));
}
if opts.rustfmt_options.iter().any(|s| {
["--print-config", "-h", "--help", "-V", "--version"].contains(&s.as_str())
|| s.starts_with("--help=")
|| s.starts_with("--print-config=")
}) {
return handle_command_status(get_rustfmt_info(&opts.rustfmt_options));
}
let strategy = CargoFmtStrategy::from_opts(&opts);
let mut rustfmt_args = opts.rustfmt_options;
if opts.check {
let check_flag = "--check";
if !rustfmt_args.iter().any(|o| o == check_flag) {
rustfmt_args.push(check_flag.to_owned());
}
}
if let Some(message_format) = opts.message_format {
if let Err(msg) = convert_message_format_to_rustfmt_args(&message_format, &mut rustfmt_args)
{
print_usage_to_stderr(&msg);
return FAILURE;
}
}
if let Some(specified_manifest_path) = opts.manifest_path {
if !specified_manifest_path.ends_with("Cargo.toml") {
print_usage_to_stderr("the manifest-path must be a path to a Cargo.toml file");
return FAILURE;
}
let manifest_path = PathBuf::from(specified_manifest_path);
handle_command_status(format_crate(
verbosity,
&strategy,
rustfmt_args,
Some(&manifest_path),
))
} else {
handle_command_status(format_crate(verbosity, &strategy, rustfmt_args, None))
}
}
fn rustfmt_command() -> Command {
let rustfmt_var = env::var_os("RUSTFMT");
let rustfmt = match &rustfmt_var {
Some(rustfmt) => rustfmt,
None => OsStr::new("rustfmt"),
};
Command::new(rustfmt)
}
fn convert_message_format_to_rustfmt_args(
message_format: &str,
rustfmt_args: &mut Vec<String>,
) -> Result<(), String> {
let mut contains_emit_mode = false;
let mut contains_check = false;
let mut contains_list_files = false;
for arg in rustfmt_args.iter() {
if arg.starts_with("--emit") {
contains_emit_mode = true;
}
if arg == "--check" {
contains_check = true;
}
if arg == "-l" || arg == "--files-with-diff" {
contains_list_files = true;
}
}
match message_format {
"short" => {
if !contains_list_files {
rustfmt_args.push(String::from("-l"));
}
Ok(())
}
"json" => {
if contains_emit_mode {
return Err(String::from(
"cannot include --emit arg when --message-format is set to json",
));
}
if contains_check {
return Err(String::from(
"cannot include --check arg when --message-format is set to json",
));
}
rustfmt_args.push(String::from("--emit"));
rustfmt_args.push(String::from("json"));
Ok(())
}
"human" => Ok(()),
_ => {
return Err(format!(
"invalid --message-format value: {}. Allowed values are: short|json|human",
message_format
));
}
}
}
fn print_usage_to_stderr(reason: &str) {
eprintln!("{}", reason);
let app = Opts::clap();
app.after_help("")
.write_help(&mut io::stderr())
.expect("failed to write to stderr");
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Verbosity {
Verbose,
Normal,
Quiet,
}
fn handle_command_status(status: Result<i32, io::Error>) -> i32 {
match status {
Err(e) => {
print_usage_to_stderr(&e.to_string());
FAILURE
}
Ok(status) => status,
}
}
fn get_rustfmt_info(args: &[String]) -> Result<i32, io::Error> {
let mut command = rustfmt_command()
.stdout(std::process::Stdio::inherit())
.args(args)
.spawn()
.map_err(|e| match e.kind() {
io::ErrorKind::NotFound => io::Error::new(
io::ErrorKind::Other,
"Could not run rustfmt, please make sure it is in your PATH.",
),
_ => e,
})?;
let result = command.wait()?;
if result.success() {
Ok(SUCCESS)
} else {
Ok(result.code().unwrap_or(SUCCESS))
}
}
fn format_crate(
verbosity: Verbosity,
strategy: &CargoFmtStrategy,
rustfmt_args: Vec<String>,
manifest_path: Option<&Path>,
) -> Result<i32, io::Error> {
let targets = get_targets(strategy, manifest_path)?;
// Currently only bin and lib files get formatted.
run_rustfmt(&targets, &rustfmt_args, verbosity)
}
/// Target uses a `path` field for equality and hashing.
#[derive(Debug)]
pub struct Target {
/// A path to the main source file of the target.
path: PathBuf,
/// A kind of target (e.g., lib, bin, example, ...).
kind: String,
/// Rust edition for this target.
edition: String,
}
impl Target {
pub fn from_target(target: &cargo_metadata::Target) -> Self {
let path = PathBuf::from(&target.src_path);
let canonicalized = fs::canonicalize(&path).unwrap_or(path);
Target {
path: canonicalized,
kind: target.kind[0].clone(),
edition: target.edition.clone(),
}
}
}
impl PartialEq for Target {
fn eq(&self, other: &Target) -> bool {
self.path == other.path
}
}
impl PartialOrd for Target {
fn partial_cmp(&self, other: &Target) -> Option<Ordering> {
Some(self.path.cmp(&other.path))
}
}
impl Ord for Target {
fn cmp(&self, other: &Target) -> Ordering {
self.path.cmp(&other.path)
}
}
impl Eq for Target {}
impl Hash for Target {
fn hash<H: Hasher>(&self, state: &mut H) {
self.path.hash(state);
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum CargoFmtStrategy {
/// Format every packages and dependencies.
All,
/// Format packages that are specified by the command line argument.
Some(Vec<String>),
/// Format the root packages only.
Root,
}
impl CargoFmtStrategy {
pub fn from_opts(opts: &Opts) -> CargoFmtStrategy {
match (opts.format_all, opts.packages.is_empty()) {
(false, true) => CargoFmtStrategy::Root,
(true, _) => CargoFmtStrategy::All,
(false, false) => CargoFmtStrategy::Some(opts.packages.clone()),
}
}
}
/// Based on the specified `CargoFmtStrategy`, returns a set of main source files.
fn get_targets(
strategy: &CargoFmtStrategy,
manifest_path: Option<&Path>,
) -> Result<BTreeSet<Target>, io::Error> {
let mut targets = BTreeSet::new();
match *strategy {
CargoFmtStrategy::Root => get_targets_root_only(manifest_path, &mut targets)?,
CargoFmtStrategy::All => {
get_targets_recursive(manifest_path, &mut targets, &mut BTreeSet::new())?
}
CargoFmtStrategy::Some(ref hitlist) => {
get_targets_with_hitlist(manifest_path, hitlist, &mut targets)?
}
}
if targets.is_empty() {
Err(io::Error::new(
io::ErrorKind::Other,
"Failed to find targets".to_owned(),
))
} else {
Ok(targets)
}
}
fn get_targets_root_only(
manifest_path: Option<&Path>,
targets: &mut BTreeSet<Target>,
) -> Result<(), io::Error> {
let metadata = get_cargo_metadata(manifest_path)?;
let workspace_root_path = PathBuf::from(&metadata.workspace_root).canonicalize()?;
let (in_workspace_root, current_dir_manifest) = if let Some(target_manifest) = manifest_path {
(
workspace_root_path == target_manifest,
target_manifest.canonicalize()?,
)
} else {
let current_dir = env::current_dir()?.canonicalize()?;
(
workspace_root_path == current_dir,
current_dir.join("Cargo.toml"),
)
};
let package_targets = match metadata.packages.len() {
1 => metadata.packages.into_iter().next().unwrap().targets,
_ => metadata
.packages
.into_iter()
.filter(|p| {
in_workspace_root
|| PathBuf::from(&p.manifest_path)
.canonicalize()
.unwrap_or_default()
== current_dir_manifest
})
.map(|p| p.targets)
.flatten()
.collect(),
};
for target in package_targets {
targets.insert(Target::from_target(&target));
}
Ok(())
}
fn get_targets_recursive(
manifest_path: Option<&Path>,
targets: &mut BTreeSet<Target>,
visited: &mut BTreeSet<String>,
) -> Result<(), io::Error> {
let metadata = get_cargo_metadata(manifest_path)?;
for package in &metadata.packages {
add_targets(&package.targets, targets);
// Look for local dependencies using information available since cargo v1.51
// It's theoretically possible someone could use a newer version of rustfmt with
// a much older version of `cargo`, but we don't try to explicitly support that scenario.
// If someone reports an issue with path-based deps not being formatted, be sure to
// confirm their version of `cargo` (not `cargo-fmt`) is >= v1.51
// https://github.com/rust-lang/cargo/pull/8994
for dependency in &package.dependencies {
if dependency.path.is_none() || visited.contains(&dependency.name) {
continue;
}
let manifest_path = PathBuf::from(dependency.path.as_ref().unwrap()).join("Cargo.toml");
if manifest_path.exists()
&& !metadata
.packages
.iter()
.any(|p| p.manifest_path.eq(&manifest_path))
{
visited.insert(dependency.name.to_owned());
get_targets_recursive(Some(&manifest_path), targets, visited)?;
}
}
}
Ok(())
}
fn get_targets_with_hitlist(
manifest_path: Option<&Path>,
hitlist: &[String],
targets: &mut BTreeSet<Target>,
) -> Result<(), io::Error> {
let metadata = get_cargo_metadata(manifest_path)?;
let mut workspace_hitlist: BTreeSet<&String> = BTreeSet::from_iter(hitlist);
for package in metadata.packages {
if workspace_hitlist.remove(&package.name) {
for target in package.targets {
targets.insert(Target::from_target(&target));
}
}
}
if workspace_hitlist.is_empty() {
Ok(())
} else {
let package = workspace_hitlist.iter().next().unwrap();
Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("package `{}` is not a member of the workspace", package),
))
}
}
fn add_targets(target_paths: &[cargo_metadata::Target], targets: &mut BTreeSet<Target>) {
for target in target_paths {
targets.insert(Target::from_target(target));
}
}
fn run_rustfmt(
targets: &BTreeSet<Target>,
fmt_args: &[String],
verbosity: Verbosity,
) -> Result<i32, io::Error> {
let by_edition = targets
.iter()
.inspect(|t| {
if verbosity == Verbosity::Verbose {
println!("[{} ({})] {:?}", t.kind, t.edition, t.path)
}
})
.fold(BTreeMap::new(), |mut h, t| {
h.entry(&t.edition).or_insert_with(Vec::new).push(&t.path);
h
});
let mut status = vec![];
for (edition, files) in by_edition {
let stdout = if verbosity == Verbosity::Quiet {
std::process::Stdio::null()
} else {
std::process::Stdio::inherit()
};
if verbosity == Verbosity::Verbose {
print!("rustfmt");
print!(" --edition {}", edition);
fmt_args.iter().for_each(|f| print!(" {}", f));
files.iter().for_each(|f| print!(" {}", f.display()));
println!();
}
let mut command = rustfmt_command()
.stdout(stdout)
.args(files)
.args(&["--edition", edition])
.args(fmt_args)
.spawn()
.map_err(|e| match e.kind() {
io::ErrorKind::NotFound => io::Error::new(
io::ErrorKind::Other,
"Could not run rustfmt, please make sure it is in your PATH.",
),
_ => e,
})?;
status.push(command.wait()?);
}
Ok(status
.iter()
.filter_map(|s| if s.success() { None } else { s.code() })
.next()
.unwrap_or(SUCCESS))
}
fn get_cargo_metadata(manifest_path: Option<&Path>) -> Result<cargo_metadata::Metadata, io::Error> {
let mut cmd = cargo_metadata::MetadataCommand::new();
cmd.no_deps();
if let Some(manifest_path) = manifest_path {
cmd.manifest_path(manifest_path);
}
cmd.other_options(vec![String::from("--offline")]);
match cmd.exec() {
Ok(metadata) => Ok(metadata),
Err(_) => {
cmd.other_options(vec![]);
match cmd.exec() {
Ok(metadata) => Ok(metadata),
Err(error) => Err(io::Error::new(io::ErrorKind::Other, error.to_string())),
}
}
}
}
| 29.519196 | 100 | 0.565182 |
71127e88ec32c0f231822ffeb1f4bf50f6783b82 | 1,180 | // This file is part of Substrate.
// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// tag::description[]
//! Generic implementations of Extrinsic/Header/Block.
// end::description[]
mod block;
mod checked_extrinsic;
mod digest;
mod era;
mod header;
#[cfg(test)]
mod tests;
mod unchecked_extrinsic;
pub use self::{
block::{Block, BlockId, SignedBlock},
checked_extrinsic::CheckedExtrinsic,
digest::{ChangesTrieSignal, Digest, DigestItem, DigestItemRef, OpaqueDigestItemId},
era::{Era, Phase},
header::Header,
unchecked_extrinsic::{SignedPayload, UncheckedExtrinsic},
};
| 30.25641 | 84 | 0.750847 |
2316d905a326fdd74ca7bb928779c4ebd43d5a5a | 7,302 | //! Collect statistics about what is being painted.
use crate::*;
/// Size of the elements in a vector/array.
#[derive(Clone, Copy, PartialEq)]
enum ElementSize {
Unknown,
Homogeneous(usize),
Heterogenous,
}
impl Default for ElementSize {
fn default() -> Self {
Self::Unknown
}
}
/// Aggregate information about a bunch of allocations.
#[derive(Clone, Copy, Default, PartialEq)]
pub struct AllocInfo {
element_size: ElementSize,
num_allocs: usize,
num_elements: usize,
num_bytes: usize,
}
impl<T> From<&[T]> for AllocInfo {
fn from(slice: &[T]) -> Self {
Self::from_slice(slice)
}
}
impl std::ops::Add for AllocInfo {
type Output = AllocInfo;
fn add(self, rhs: AllocInfo) -> AllocInfo {
use ElementSize::{Heterogenous, Homogeneous, Unknown};
let element_size = match (self.element_size, rhs.element_size) {
(Heterogenous, _) | (_, Heterogenous) => Heterogenous,
(Unknown, other) | (other, Unknown) => other,
(Homogeneous(lhs), Homogeneous(rhs)) if lhs == rhs => Homogeneous(lhs),
_ => Heterogenous,
};
AllocInfo {
element_size,
num_allocs: self.num_allocs + rhs.num_allocs,
num_elements: self.num_elements + rhs.num_elements,
num_bytes: self.num_bytes + rhs.num_bytes,
}
}
}
impl std::ops::AddAssign for AllocInfo {
fn add_assign(&mut self, rhs: AllocInfo) {
*self = *self + rhs;
}
}
impl std::iter::Sum for AllocInfo {
fn sum<I>(iter: I) -> Self
where
I: Iterator<Item = Self>,
{
let mut sum = Self::default();
for value in iter {
sum += value;
}
sum
}
}
impl AllocInfo {
// pub fn from_shape(shape: &Shape) -> Self {
// match shape {
// Shape::Noop
// Shape::Vec(shapes) => Self::from_shapes(shapes)
// | Shape::Circle { .. }
// | Shape::LineSegment { .. }
// | Shape::Rect { .. } => Self::default(),
// Shape::Path { points, .. } => Self::from_slice(points),
// Shape::Text { galley, .. } => Self::from_galley(galley),
// Shape::Mesh(mesh) => Self::from_mesh(mesh),
// }
// }
pub fn from_galley(galley: &Galley) -> Self {
Self::from_slice(galley.text().as_bytes())
+ Self::from_slice(&galley.rows)
+ galley.rows.iter().map(Self::from_galley_row).sum()
}
fn from_galley_row(row: &crate::text::Row) -> Self {
Self::from_mesh(&row.visuals.mesh) + Self::from_slice(&row.glyphs)
}
pub fn from_mesh(mesh: &Mesh) -> Self {
Self::from_slice(&mesh.indices) + Self::from_slice(&mesh.vertices)
}
pub fn from_slice<T>(slice: &[T]) -> Self {
use std::mem::size_of;
let element_size = size_of::<T>();
Self {
element_size: ElementSize::Homogeneous(element_size),
num_allocs: 1,
num_elements: slice.len(),
num_bytes: slice.len() * element_size,
}
}
pub fn num_elements(&self) -> usize {
assert!(self.element_size != ElementSize::Heterogenous);
self.num_elements
}
pub fn num_allocs(&self) -> usize {
self.num_allocs
}
pub fn num_bytes(&self) -> usize {
self.num_bytes
}
pub fn megabytes(&self) -> String {
megabytes(self.num_bytes())
}
pub fn format(&self, what: &str) -> String {
if self.num_allocs() == 0 {
format!("{:6} {:14}", 0, what)
} else if self.num_allocs() == 1 {
format!(
"{:6} {:14} {} 1 allocation",
self.num_elements,
what,
self.megabytes()
)
} else if self.element_size != ElementSize::Heterogenous {
format!(
"{:6} {:14} {} {:3} allocations",
self.num_elements(),
what,
self.megabytes(),
self.num_allocs()
)
} else {
format!(
"{:6} {:14} {} {:3} allocations",
"",
what,
self.megabytes(),
self.num_allocs()
)
}
}
}
/// Collected allocation statistics for shapes and meshes.
#[derive(Clone, Copy, Default)]
pub struct PaintStats {
pub shapes: AllocInfo,
pub shape_text: AllocInfo,
pub shape_path: AllocInfo,
pub shape_mesh: AllocInfo,
pub shape_vec: AllocInfo,
pub num_callbacks: usize,
pub text_shape_vertices: AllocInfo,
pub text_shape_indices: AllocInfo,
/// Number of separate clip rectangles
pub clipped_primitives: AllocInfo,
pub vertices: AllocInfo,
pub indices: AllocInfo,
}
impl PaintStats {
pub fn from_shapes(shapes: &[ClippedShape]) -> Self {
let mut stats = Self::default();
stats.shape_path.element_size = ElementSize::Heterogenous; // nicer display later
stats.shape_vec.element_size = ElementSize::Heterogenous; // nicer display later
stats.shapes = AllocInfo::from_slice(shapes);
for ClippedShape(_, shape) in shapes {
stats.add(shape);
}
stats
}
fn add(&mut self, shape: &Shape) {
match shape {
Shape::Vec(shapes) => {
// self += PaintStats::from_shapes(&shapes); // TODO
self.shapes += AllocInfo::from_slice(shapes);
self.shape_vec += AllocInfo::from_slice(shapes);
for shape in shapes {
self.add(shape);
}
}
Shape::Noop
| Shape::Circle { .. }
| Shape::LineSegment { .. }
| Shape::Rect { .. }
| Shape::CubicBezier(_)
| Shape::QuadraticBezier(_) => {}
Shape::Path(path_shape) => {
self.shape_path += AllocInfo::from_slice(&path_shape.points);
}
Shape::Text(text_shape) => {
self.shape_text += AllocInfo::from_galley(&text_shape.galley);
for row in &text_shape.galley.rows {
self.text_shape_indices += AllocInfo::from_slice(&row.visuals.mesh.indices);
self.text_shape_vertices += AllocInfo::from_slice(&row.visuals.mesh.vertices);
}
}
Shape::Mesh(mesh) => {
self.shape_mesh += AllocInfo::from_mesh(mesh);
}
Shape::Callback(_) => {
self.num_callbacks += 1;
}
}
}
pub fn with_clipped_primitives(
mut self,
clipped_primitives: &[crate::ClippedPrimitive],
) -> Self {
self.clipped_primitives += AllocInfo::from_slice(clipped_primitives);
for clipped_primitive in clipped_primitives {
if let Primitive::Mesh(mesh) = &clipped_primitive.primitive {
self.vertices += AllocInfo::from_slice(&mesh.vertices);
self.indices += AllocInfo::from_slice(&mesh.indices);
}
}
self
}
}
fn megabytes(size: usize) -> String {
format!("{:.2} MB", size as f64 / 1e6)
}
| 30.049383 | 98 | 0.536154 |
3ad78f088f9c9f5fe26e92187f40e0cee21218d7 | 1,272 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty
// Don't panic on blocks without results
// There are several tests in this run-pass that raised
// when this bug was opened. The cases where the compiler
// panics before the fix have a comment.
#![feature(std_misc)]
use std::thunk::Thunk;
struct S {x:()}
fn test(slot: &mut Option<Thunk<(),Thunk>>) -> () {
let a = slot.take();
let _a = match a {
// `{let .. a(); }` would break
Some(a) => { let _a = a(); },
None => (),
};
}
fn not(b: bool) -> bool {
if b {
!b
} else {
// `panic!(...)` would break
panic!("Break the compiler");
}
}
pub fn main() {
// {} would break
let _r = {};
let mut slot = None;
// `{ test(...); }` would break
let _s : S = S{ x: { test(&mut slot); } };
let _b = not(true);
}
| 24.941176 | 68 | 0.60456 |
eb3a2f6d110c846f6b86fb5f998031c8c1edd734 | 6,420 | use std::convert::{TryFrom, TryInto};
use thiserror::Error;
tonic::include_proto!("io.linkerd.proxy.net");
/// Indicates an IP address could not be decoded.
#[derive(Clone, Debug, Error)]
#[error("invalid IP address")]
pub struct InvalidIpAddress;
/// Indicates an IP address could not be decoded.
#[derive(Clone, Debug, Error)]
pub enum InvalidIpNetwork {
#[error("invalid IP address")]
Ip(#[from] InvalidIpAddress),
#[error("invalid network prefix length")]
PrefixLen(#[from] ipnet::PrefixLenError),
}
// === impl IpAddress ===
#[cfg(feature = "net")]
impl TryFrom<IpAddress> for std::net::IpAddr {
type Error = InvalidIpAddress;
fn try_from(ip: IpAddress) -> Result<Self, Self::Error> {
use ip_address::Ip;
match ip.ip {
Some(Ip::Ipv4(octets)) => Ok(std::net::Ipv4Addr::from(octets).into()),
Some(Ip::Ipv6(v6)) => std::net::Ipv6Addr::try_from(v6)
.map(Into::into)
.map_err(|_| InvalidIpAddress),
None => Err(InvalidIpAddress),
}
}
}
#[cfg(feature = "net")]
impl<T> From<T> for IpAddress
where
ip_address::Ip: From<T>,
{
#[inline]
fn from(ip: T) -> Self {
Self {
ip: Some(ip.into()),
}
}
}
impl From<std::net::IpAddr> for IpAddress {
fn from(ip: std::net::IpAddr) -> Self {
match ip {
std::net::IpAddr::V4(v4) => Self {
ip: Some(v4.into()),
},
std::net::IpAddr::V6(v6) => Self {
ip: Some(v6.into()),
},
}
}
}
impl From<[u8; 4]> for ip_address::Ip {
fn from(octets: [u8; 4]) -> Self {
ip_address::Ip::Ipv4(
u32::from(octets[0]) << 24
| u32::from(octets[1]) << 16
| u32::from(octets[2]) << 8
| u32::from(octets[3]),
)
}
}
// === impl IpNetwork ===
impl TryFrom<IpNetwork> for ipnet::IpNet {
type Error = InvalidIpNetwork;
fn try_from(net: IpNetwork) -> Result<Self, Self::Error> {
let ip = net
.ip
.ok_or(InvalidIpNetwork::Ip(InvalidIpAddress))?
.try_into()
.map_err(InvalidIpNetwork::Ip)?;
let prefix_len = if (0..=std::u8::MAX as u32).contains(&net.prefix_len) {
net.prefix_len as u8
} else {
return Err(InvalidIpNetwork::PrefixLen(ipnet::PrefixLenError));
};
match ip {
std::net::IpAddr::V4(addr) => ipnet::Ipv4Net::new(addr, prefix_len)
.map(Into::into)
.map_err(InvalidIpNetwork::PrefixLen),
std::net::IpAddr::V6(addr) => ipnet::Ipv6Net::new(addr, prefix_len)
.map(Into::into)
.map_err(InvalidIpNetwork::PrefixLen),
}
}
}
impl<T> From<(T, u8)> for IpNetwork
where
IpAddress: From<T>,
{
#[inline]
fn from((ip, prefix_len): (T, u8)) -> Self {
Self {
ip: Some(ip.into()),
prefix_len: prefix_len.into(),
}
}
}
impl From<ipnet::IpNet> for IpNetwork {
fn from(net: ipnet::IpNet) -> Self {
IpNetwork {
ip: Some(net.addr().into()),
prefix_len: net.prefix_len().into(),
}
}
}
// === impl ip_address:Ip ===
impl From<std::net::Ipv4Addr> for ip_address::Ip {
#[inline]
fn from(v4: std::net::Ipv4Addr) -> Self {
Self::from(v4.octets())
}
}
impl<T> From<T> for ip_address::Ip
where
IPv6: From<T>,
{
#[inline]
fn from(t: T) -> Self {
ip_address::Ip::Ipv6(IPv6::from(t))
}
}
// === impl IPv6 ===
impl From<[u8; 16]> for IPv6 {
fn from(octets: [u8; 16]) -> Self {
let first = (u64::from(octets[0]) << 56)
+ (u64::from(octets[1]) << 48)
+ (u64::from(octets[2]) << 40)
+ (u64::from(octets[3]) << 32)
+ (u64::from(octets[4]) << 24)
+ (u64::from(octets[5]) << 16)
+ (u64::from(octets[6]) << 8)
+ u64::from(octets[7]);
let last = (u64::from(octets[8]) << 56)
+ (u64::from(octets[9]) << 48)
+ (u64::from(octets[10]) << 40)
+ (u64::from(octets[11]) << 32)
+ (u64::from(octets[12]) << 24)
+ (u64::from(octets[13]) << 16)
+ (u64::from(octets[14]) << 8)
+ u64::from(octets[15]);
Self { first, last }
}
}
impl From<std::net::Ipv6Addr> for IPv6 {
#[inline]
fn from(v6: std::net::Ipv6Addr) -> Self {
Self::from(v6.octets())
}
}
impl From<IPv6> for std::net::Ipv6Addr {
fn from(ip: IPv6) -> std::net::Ipv6Addr {
std::net::Ipv6Addr::new(
(ip.first >> 48) as u16,
(ip.first >> 32) as u16,
(ip.first >> 16) as u16,
(ip.first) as u16,
(ip.last >> 48) as u16,
(ip.last >> 32) as u16,
(ip.last >> 16) as u16,
(ip.last) as u16,
)
}
}
// === impl TcpAddress ===
impl From<std::net::SocketAddr> for TcpAddress {
fn from(sa: std::net::SocketAddr) -> TcpAddress {
TcpAddress {
ip: Some(sa.ip().into()),
port: u32::from(sa.port()),
}
}
}
impl TryFrom<TcpAddress> for std::net::SocketAddr {
type Error = InvalidIpAddress;
fn try_from(tcp: TcpAddress) -> Result<std::net::SocketAddr, Self::Error> {
if let Some(ip) = tcp.ip {
let port = tcp.port as u16;
let ip = std::net::IpAddr::try_from(ip)?;
return Ok(std::net::SocketAddr::from((ip, port)));
}
Err(InvalidIpAddress)
}
}
#[cfg(feature = "arbitrary")]
mod arbitary {
use super::*;
use quickcheck::*;
impl Arbitrary for IpAddress {
fn arbitrary(g: &mut Gen) -> Self {
IpAddress {
ip: Arbitrary::arbitrary(g),
}
}
}
impl Arbitrary for ip_address::Ip {
fn arbitrary(g: &mut Gen) -> Self {
if bool::arbitrary(g) {
ip_address::Ip::Ipv4(Arbitrary::arbitrary(g))
} else {
ip_address::Ip::Ipv6(IPv6::arbitrary(g))
}
}
}
impl Arbitrary for IPv6 {
fn arbitrary(g: &mut Gen) -> Self {
IPv6 {
first: Arbitrary::arbitrary(g),
last: Arbitrary::arbitrary(g),
}
}
}
}
| 26.097561 | 82 | 0.499844 |
1e450bae4c03c717364a73295d0cddef5ea3e976 | 4,940 | // External imports
use num::{rational::Ratio, BigUint};
// Workspace imports
use zksync_types::{Token, TokenId, TokenLike, TokenPrice};
use zksync_utils::{big_decimal_to_ratio, ratio_to_big_decimal};
// Local imports
use crate::tests::db_test;
use crate::{
tokens::{TokensSchema, STORED_USD_PRICE_PRECISION},
QueryResult, StorageProcessor,
};
/// Verifies the token save & load mechanism.
#[db_test]
async fn tokens_storage(mut storage: StorageProcessor<'_>) -> QueryResult<()> {
// There should be only Ethereum main token by default.
assert_eq!(storage.tokens_schema().get_count().await?, 1);
let tokens = TokensSchema(&mut storage)
.load_tokens()
.await
.expect("Load tokens query failed");
assert_eq!(tokens.len(), 1);
let eth_token = Token {
id: 0,
address: "0000000000000000000000000000000000000000".parse().unwrap(),
symbol: "ETH".into(),
decimals: 18,
};
assert_eq!(tokens[&0], eth_token);
// Add two tokens.
let token_a = Token {
id: 1,
address: "0000000000000000000000000000000000000001".parse().unwrap(),
symbol: "ABC".into(),
decimals: 9,
};
let token_b = Token {
id: 2,
address: "0000000000000000000000000000000000000002".parse().unwrap(),
symbol: "DEF".into(),
decimals: 6,
};
TokensSchema(&mut storage)
.store_token(token_a.clone())
.await
.expect("Store tokens query failed");
TokensSchema(&mut storage)
.store_token(token_b.clone())
.await
.expect("Store tokens query failed");
// The count is updated.
assert_eq!(storage.tokens_schema().get_count().await?, 3);
// Load tokens again.
let tokens = TokensSchema(&mut storage)
.load_tokens()
.await
.expect("Load tokens query failed");
assert_eq!(tokens.len(), 3);
assert_eq!(tokens[ð_token.id], eth_token);
assert_eq!(tokens[&token_a.id], token_a);
assert_eq!(tokens[&token_b.id], token_b);
let token_b_by_id = TokensSchema(&mut storage)
.get_token(TokenLike::Id(token_b.id))
.await
.expect("get token query failed")
.expect("token by id not found");
assert_eq!(token_b, token_b_by_id);
let token_b_by_address = TokensSchema(&mut storage)
.get_token(TokenLike::Address(token_b.address))
.await
.expect("get token query failed")
.expect("token by address not found");
assert_eq!(token_b, token_b_by_address);
let token_b_by_symbol = TokensSchema(&mut storage)
.get_token(TokenLike::Symbol(token_b.symbol.clone()))
.await
.expect("get token query failed")
.expect("token by symbol not found");
assert_eq!(token_b, token_b_by_symbol);
// Now check that storing the token that already exists is the same as updating it.
let token_c = Token {
id: 2,
address: "0000000000000000000000000000000000000008".parse().unwrap(),
symbol: "BAT".into(),
decimals: 6,
};
TokensSchema(&mut storage)
.store_token(token_c.clone())
.await
.expect("Store tokens query failed");
// Load updated token.
let token_c_by_id = TokensSchema(&mut storage)
.get_token(TokenLike::Id(token_c.id))
.await
.expect("get token query failed")
.expect("token by id not found");
assert_eq!(token_c, token_c_by_id);
Ok(())
}
/// Checks the store/load routine for `ticker_price` table.
#[db_test]
async fn test_ticker_price(mut storage: StorageProcessor<'_>) -> QueryResult<()> {
const TOKEN_ID: TokenId = 0;
// No entry exists yet.
let loaded = storage
.tokens_schema()
.get_historical_ticker_price(TOKEN_ID)
.await?;
assert!(loaded.is_none());
// Store new price.
// `usd_price` is not a finite decimal, so we expect it to be rounded
// up to `STORED_USD_PRICE_PRECISION` digits.
let price = TokenPrice {
usd_price: Ratio::new(BigUint::from(4u32), BigUint::from(9u32)),
last_updated: chrono::Utc::now(),
};
storage
.tokens_schema()
.update_historical_ticker_price(TOKEN_ID, price.clone())
.await?;
// Load it again.
let loaded = storage
.tokens_schema()
.get_historical_ticker_price(TOKEN_ID)
.await?
.expect("couldn't load token price");
// During the load the price was converted back to ratio.
let expected_stored_decimal =
ratio_to_big_decimal(&price.usd_price, STORED_USD_PRICE_PRECISION);
let expected_price = big_decimal_to_ratio(&expected_stored_decimal).unwrap();
assert_eq!(loaded.usd_price, expected_price);
// Comparing this fields directly might fail, use timestamps.
assert_eq!(
loaded.last_updated.timestamp(),
price.last_updated.timestamp()
);
Ok(())
}
| 32.287582 | 87 | 0.640486 |
9c6430c0039a3e832b30205ef70e233da7236b0e | 3,482 | //! Derive `Write`.
use crate::internal::get_root_path;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, quote_spanned, ToTokens};
use syn::{
parse::{Parse, ParseStream, Result as ParseResult},
parse_quote,
spanned::Spanned,
Data, DeriveInput, Fields, GenericParam, Generics, Ident, Index, Path,
};
pub struct DeriveWrite {
ident: Ident,
generics: Generics,
data: Data,
root_path: Path,
}
impl Parse for DeriveWrite {
fn parse(input: ParseStream) -> ParseResult<Self> {
let DeriveInput {
attrs,
ident,
mut generics,
data,
..
} = input.parse()?;
let root_path = get_root_path(&attrs);
for param in &mut generics.params {
if let GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(parse_quote!(#root_path::Write));
}
}
Ok(Self {
ident,
generics,
data,
root_path,
})
}
}
impl ToTokens for DeriveWrite {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let root = &self.root_path;
let name = &self.ident;
let (impl_generics, ty_generics, where_clause) =
self.generics.split_for_impl();
let call_site = ::proc_macro2::Span::call_site();
let var = quote!(self);
let writes = match &self.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let recurse = fields.named.iter().map(|f| {
let name = &f.ident;
let access = quote_spanned!(call_site => #var.#name);
quote_spanned! { f.span() =>
#root::Write::write(&#access, bytes, pos)?;
}
});
quote! {
#(#recurse)*
Ok(())
}
}
Fields::Unnamed(ref fields) => {
let recurse =
fields.unnamed.iter().enumerate().map(|(i, f)| {
let index = Index {
index: i as u32,
span: call_site,
};
let access =
quote_spanned!(call_site => #var.#index);
quote_spanned! { f.span() =>
#root::Write::write(&#access, bytes, pos)?;
}
});
quote! {
#(#recurse)*
Ok(())
}
}
Fields::Unit => {
quote! {
Ok(())
}
}
},
Data::Enum(_) | Data::Union(_) => unimplemented!(),
};
let expanded = quote! {
#[automatically_derived]
#[allow(unused_qualifications)]
impl #impl_generics #root::Write for #name #ty_generics #where_clause {
#[inline]
fn write(&self, bytes: &mut [u8], pos: &mut usize) -> Result<(), #root::WriteError> {
#writes
}
}
};
expanded.to_tokens(tokens);
}
}
| 32.542056 | 101 | 0.42332 |
620487b86cc52737d6fb9074c1de85e39e7d0859 | 2,334 | use std::marker::PhantomData;
use bitvec::prelude::{BitVec, Lsb0};
use super::super::PrimitiveFixedWidthEncode;
use super::BlockBuilder;
/// Encodes fixed-width data into a block, with null element support.
///
/// The layout is fixed-width data and a u8 bitmap, concatenated together.
pub struct PlainPrimitiveNullableBlockBuilder<T: PrimitiveFixedWidthEncode> {
data: Vec<u8>,
bitmap: BitVec<Lsb0, u8>,
target_size: usize,
_phantom: PhantomData<T>,
}
impl<T: PrimitiveFixedWidthEncode> PlainPrimitiveNullableBlockBuilder<T> {
pub fn new(target_size: usize) -> Self {
let data = Vec::with_capacity(target_size);
let bitmap = BitVec::<Lsb0, u8>::with_capacity(target_size);
Self {
data,
target_size,
bitmap,
_phantom: PhantomData,
}
}
}
impl<T: PrimitiveFixedWidthEncode> BlockBuilder<T::ArrayType>
for PlainPrimitiveNullableBlockBuilder<T>
{
fn append(&mut self, item: Option<&T>) {
if let Some(item) = item {
item.encode(&mut self.data);
self.bitmap.push(true);
} else {
T::DEAFULT_VALUE.encode(&mut self.data);
self.bitmap.push(false);
}
}
fn estimated_size(&self) -> usize {
let bitmap_byte_len = (self.bitmap.len() + 7) / 8;
self.data.len() + bitmap_byte_len
}
fn should_finish(&self, _next_item: &Option<&T>) -> bool {
!self.data.is_empty() && self.estimated_size() + 1 + T::WIDTH > self.target_size
}
fn finish(self) -> Vec<u8> {
let mut data = self.data;
data.extend(self.bitmap.as_raw_slice().iter());
data
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_build_i32() {
let mut builder = PlainPrimitiveNullableBlockBuilder::<i32>::new(128);
builder.append(Some(&1));
builder.append(None);
builder.append(Some(&3));
builder.append(Some(&4));
assert_eq!(builder.estimated_size(), 17);
assert!(!builder.should_finish(&Some(&5)));
let data = builder.finish();
// bitmap should be 1011 and Lsb0, so u8 will be 0b1101 = 13
let expected_data: Vec<u8> = vec![1, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 13];
assert_eq!(data, expected_data);
}
}
| 29.544304 | 94 | 0.604113 |
d96db9436f2c69b5dd39f1ef1291b159df9b9a85 | 144 | pub(crate) mod list;
mod login;
pub(crate) mod register;
pub(crate) mod token;
pub(crate) use login::login;
pub(crate) use register::register;
| 18 | 34 | 0.736111 |
d6ff89473ccb4ddab4937ef382364d2e5abf201d | 5,544 | mod utils;
use wasm_bindgen::prelude::*;
use web_sys::console;
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
// JS doesn't have a chars type which means:
// - The _c argument is the first char of a JS string.
// - The char returned will be a JS string.
#[wasm_bindgen]
pub fn char_example(_c: char) -> char {
'🚀'
}
#[wasm_bindgen]
pub fn string_example(s: String) -> String {
format!("Hello {}", s)
}
// str cannot be used as a return type.
// This is because we can't return borrowed references with the wasm_bindgen macro.
#[wasm_bindgen]
pub fn str_example(s: &str) -> String {
format!("Hello {}", s)
}
#[wasm_bindgen]
pub fn number_example(n: i32) -> i32 { // assume the same for u32, usize, etc.
n+100
}
#[wasm_bindgen]
pub fn bool_example(_b: bool) -> bool {
true
}
// `Box<[JsValue]>` are the representation for a JS array object.
// When it comes to Js Arrays:
// - They are iterable.
// - Can contain multiple types by being of type JsValue (strictly typed arrays exist for numbers).
// - Don't really support N-dimensional arrays and are expensive to work with.
#[wasm_bindgen]
pub fn mixed_array_example(array: Box<[JsValue]>) -> Box<[JsValue]> {
for value in array.iter() {
// compute things...
}
vec![
"Hello".into(),
512.into(),
JsValue::NULL,
JsValue::UNDEFINED,
61.20.into(),
]
.into_boxed_slice()
}
// Typed arrays are only available for number types.
// For example, the function below will return a JS Int32Array type.
#[wasm_bindgen]
pub fn typed_array_example(_array: Box<[i32]>) -> Box<[i32]> {
vec![1, 2, 3, 4, 5, 6, 7].into_boxed_slice()
}
// When it comes to Option:
// - Some returns the value inside.
// - None returns a JS undefined.
#[wasm_bindgen(catch)]
pub fn option_example() -> Option<i32> {
None
}
// When it comes to Result
// - Result<T, JsValue> is the only supported signature. T must be convertible to a JsValue.
// - #[wasm_bindgen(catch)] must be used when returning a result.
// - Err will be equivalent to a JS thrown error.
// - Ok will return the value inside.
#[wasm_bindgen]
pub fn result_example() -> Result<i32, JsValue> {
// With the wasm prelude imported, we can convert most common types by calling .into()
Err("Look Pa, I'm throwing a JS error!".into())
}
// When it comes to Enums:
// - They are C styled.
// - JS represents them through an object with a number for each variant.
#[wasm_bindgen]
pub enum ExampleEnum {
Yes,
No,
}
#[wasm_bindgen]
pub fn verify_enum_choice(choice: ExampleEnum) -> bool {
match choice {
ExampleEnum::Yes => true,
ExampleEnum::No => false,
}
}
// When it comes to Structs:
// - Cannot contain lifetimes or type parameters.
// - Each field value must impl the Copy trait.
#[wasm_bindgen]
pub struct ExampleStruct {
pub value: i32,
}
// For struct impl, we have the option for struct methods and type-level functions.
// JS handles structs by creating a JS object with a pointer (i.o.w. we can use references!).
#[wasm_bindgen]
impl ExampleStruct {
pub fn new(value: i32) -> ExampleStruct {
ExampleStruct { value }
}
pub fn read_method(&self) -> i32 {
self.value
}
pub fn write_method(&mut self, value: i32) {
self.value = value;
}
pub fn transfer_ownership(self) -> ExampleStruct {
self
}
}
// Binding JS involves a bit of boilerplate because we have to specify each name
// and signature to bind.
#[wasm_bindgen]
extern "C" {
// Bindings must be named as their JS equivalent
fn alert(s: &str);
// A different name can be specified as long as the original name is passed to the macro.
#[wasm_bindgen(js_name = prompt)]
fn ask(s: &str) -> String;
// Methods can be from any js namespace.
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
// Using a different name allows us to specify various signatures.
#[wasm_bindgen(js_namespace = console, js_name = log)]
fn log_num(n: i32);
//* JS Class example *\\
// The process is a little verbose because create a binding for
// each part of the class we want (class name, constructor, methods, setters, getters).
type Coordinate;
#[wasm_bindgen(constructor)]
fn new(x: i32, y: i32) -> Coordinate;
// methods must match the naming in the class declaration.
#[wasm_bindgen(method)]
fn printValues(this: &Coordinate) -> String;
// getters are named as the property we want.
#[wasm_bindgen(getter, method)]
fn x(this: &Coordinate) -> i32;
// setters are named the same as getters but with a `set_` prefix.
#[wasm_bindgen(setter, method)]
fn set_x(this: &Coordinate, x: i32);
}
#[wasm_bindgen]
pub fn manual_bindings_example() {
alert("Hey buddy!");
log(&ask("Tell me about your day!"));
let coordinates = Coordinate::new(-4, 15);
log_num(coordinates.x()); // prints -4
coordinates.set_x(coordinates.x() * 2);
log(&coordinates.printValues()); // prints (-8, 15)
}
#[wasm_bindgen]
pub fn print_things() {
// console has multiple log_x functions that represent how many items are being printed.
// log_x takes in a reference to a JsValue so we need to convert the values we want to print.
console::log_1(&"Printing from Rust!!".into());
console::log_2(&"Numbers: ".into(), &1234.into());
} | 28.725389 | 99 | 0.664322 |
1836c5415d29f52d36759cde8909640fc6283618 | 12,542 | use crate::whole_stream_command::{whole_stream_command, Command};
use indexmap::IndexMap;
use nu_errors::ShellError;
use nu_parser::ParserScope;
use nu_protocol::{hir::Block, Signature, Value};
use nu_source::Spanned;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Scope {
frames: Arc<parking_lot::Mutex<Vec<ScopeFrame>>>,
}
impl Default for Scope {
fn default() -> Self {
Self::new()
}
}
impl Scope {
pub fn new() -> Scope {
Scope {
frames: Arc::new(parking_lot::Mutex::new(vec![ScopeFrame::new()])),
}
}
pub fn get_command(&self, name: &str) -> Option<Command> {
for frame in self.frames.lock().iter().rev() {
if let Some(command) = frame.get_command(name) {
return Some(command);
}
}
None
}
pub fn get_aliases(&self) -> IndexMap<String, Vec<Spanned<String>>> {
let mut output = IndexMap::new();
for frame in self.frames.lock().iter().rev() {
for v in frame.aliases.iter() {
if !output.contains_key(v.0) {
output.insert(v.0.clone(), v.1.clone());
}
}
}
output
}
pub fn get_commands(&self) -> IndexMap<String, Signature> {
let mut output = IndexMap::new();
for frame in self.frames.lock().iter().rev() {
for (name, command) in frame.commands.iter() {
if !output.contains_key(name) {
output.insert(name.clone(), command.signature());
}
}
}
output
}
pub fn get_aliases_with_name(&self, name: &str) -> Option<Vec<Vec<Spanned<String>>>> {
let aliases: Vec<_> = self
.frames
.lock()
.iter()
.rev()
.filter_map(|frame| frame.aliases.get(name).cloned())
.collect();
if aliases.is_empty() {
None
} else {
Some(aliases)
}
}
pub fn get_custom_commands_with_name(&self, name: &str) -> Option<Vec<Arc<Block>>> {
let custom_commands: Vec<_> = self
.frames
.lock()
.iter()
.rev()
.filter_map(|frame| frame.custom_commands.get(name).cloned())
.collect();
if custom_commands.is_empty() {
None
} else {
Some(custom_commands)
}
}
pub fn add_command(&self, name: String, command: Command) {
// Note: this is assumed to always be true, as there is always a global top frame
if let Some(frame) = self.frames.lock().last_mut() {
frame.add_command(name, command)
}
}
pub fn get_alias_names(&self) -> Vec<String> {
let mut names = vec![];
for frame in self.frames.lock().iter() {
let mut frame_command_names = frame.get_alias_names();
names.append(&mut frame_command_names);
}
names.dedup();
names.sort();
names
}
pub fn get_command_names(&self) -> Vec<String> {
let mut names = vec![];
for frame in self.frames.lock().iter() {
let mut frame_command_names = frame.get_command_names();
frame_command_names.extend(frame.get_alias_names());
frame_command_names.extend(frame.get_custom_command_names());
names.append(&mut frame_command_names);
}
names.sort();
names.dedup();
names
}
pub fn len(&self) -> usize {
self.frames.lock().len()
}
pub fn is_empty(&self) -> bool {
self.frames.lock().is_empty()
}
fn has_cmd_helper(&self, name: &str, f: fn(&ScopeFrame, &str) -> bool) -> bool {
self.frames.lock().iter().any(|frame| f(frame, name))
}
pub fn has_command(&self, name: &str) -> bool {
self.has_cmd_helper(name, ScopeFrame::has_command)
}
pub fn has_custom_command(&self, name: &str) -> bool {
self.has_cmd_helper(name, ScopeFrame::has_custom_command)
}
pub fn has_alias(&self, name: &str) -> bool {
self.has_cmd_helper(name, ScopeFrame::has_alias)
}
pub fn expect_command(&self, name: &str) -> Result<Command, ShellError> {
if let Some(c) = self.get_command(name) {
Ok(c)
} else {
Err(ShellError::untagged_runtime_error(format!(
"Missing command '{}'",
name
)))
}
}
pub fn get_vars(&self) -> IndexMap<String, Value> {
//FIXME: should this be an iterator?
let mut output = IndexMap::new();
for frame in self.frames.lock().iter().rev() {
for v in frame.vars.iter() {
if !output.contains_key(v.0) {
output.insert(v.0.clone(), v.1.clone());
}
}
}
output
}
pub fn get_env_vars(&self) -> IndexMap<String, String> {
//FIXME: should this be an iterator?
let mut output = IndexMap::new();
for frame in self.frames.lock().iter().rev() {
for v in frame.env.iter() {
if !output.contains_key(v.0) {
output.insert(v.0.clone(), v.1.clone());
}
}
}
output
}
pub fn get_env(&self, name: &str) -> Option<String> {
for frame in self.frames.lock().iter().rev() {
if let Some(v) = frame.env.get(name) {
return Some(v.clone());
}
}
None
}
pub fn get_var(&self, name: &str) -> Option<Value> {
for frame in self.frames.lock().iter().rev() {
if let Some(v) = frame.vars.get(name) {
return Some(v.clone());
}
}
None
}
pub fn add_var(&self, name: impl Into<String>, value: Value) {
if let Some(frame) = self.frames.lock().last_mut() {
frame.vars.insert(name.into(), value);
}
}
pub fn add_vars(&self, vars: &IndexMap<String, Value>) {
if let Some(frame) = self.frames.lock().last_mut() {
frame
.vars
.extend(vars.iter().map(|(s, v)| (s.clone(), v.clone())))
}
}
pub fn add_env_var(&self, name: impl Into<String>, value: String) {
if let Some(frame) = self.frames.lock().last_mut() {
frame.env.insert(name.into(), value);
}
}
pub fn add_env(&self, env_vars: IndexMap<String, String>) {
if let Some(frame) = self.frames.lock().last_mut() {
frame.env.extend(env_vars)
}
}
pub fn add_env_to_base(&self, env_vars: IndexMap<String, String>) {
if let Some(frame) = self.frames.lock().first_mut() {
frame.env.extend(env_vars)
}
}
pub fn add_env_var_to_base(&self, name: impl Into<String>, value: String) {
if let Some(frame) = self.frames.lock().first_mut() {
frame.env.insert(name.into(), value);
}
}
pub fn set_exit_scripts(&self, scripts: Vec<String>) {
if let Some(frame) = self.frames.lock().last_mut() {
frame.exitscripts = scripts
}
}
pub fn enter_scope_with_tag(&self, tag: String) {
self.frames.lock().push(ScopeFrame::with_tag(tag));
}
//Removes the scopeframe with tag.
pub fn exit_scope_with_tag(&self, tag: &str) {
let mut frames = self.frames.lock();
let tag = Some(tag);
if let Some(i) = frames.iter().rposition(|f| f.tag.as_deref() == tag) {
frames.remove(i);
}
}
pub fn get_exitscripts_of_frame_with_tag(&self, tag: &str) -> Option<Vec<String>> {
let frames = self.frames.lock();
let tag = Some(tag);
frames.iter().find_map(|f| {
if f.tag.as_deref() == tag {
Some(f.exitscripts.clone())
} else {
None
}
})
}
pub fn get_frame_with_tag(&self, tag: &str) -> Option<ScopeFrame> {
let frames = self.frames.lock();
let tag = Some(tag);
frames.iter().rev().find_map(|f| {
if f.tag.as_deref() == tag {
Some(f.clone())
} else {
None
}
})
}
pub fn update_frame_with_tag(&self, frame: ScopeFrame, tag: &str) -> Result<(), ShellError> {
let mut frames = self.frames.lock();
let tag = Some(tag);
for f in frames.iter_mut().rev() {
if f.tag.as_deref() == tag {
*f = frame;
return Ok(());
}
}
// Frame not found, return err
Err(ShellError::untagged_runtime_error(format!(
"Can't update frame with tag {:?}. No such frame present!",
tag
)))
}
}
impl ParserScope for Scope {
fn get_signature(&self, name: &str) -> Option<nu_protocol::Signature> {
self.get_command(name).map(|x| x.signature())
}
fn has_signature(&self, name: &str) -> bool {
self.get_command(name).is_some()
}
fn add_definition(&self, block: Arc<Block>) {
if let Some(frame) = self.frames.lock().last_mut() {
let name = block.params.name.clone();
frame.custom_commands.insert(name.clone(), block.clone());
frame.commands.insert(name, whole_stream_command(block));
}
}
fn get_definitions(&self) -> Vec<Arc<Block>> {
let mut blocks = vec![];
if let Some(frame) = self.frames.lock().last() {
for (_, custom_command) in &frame.custom_commands {
blocks.push(custom_command.clone());
}
}
blocks
}
fn get_alias(&self, name: &str) -> Option<Vec<Spanned<String>>> {
for frame in self.frames.lock().iter().rev() {
if let Some(x) = frame.aliases.get(name) {
return Some(x.clone());
}
}
None
}
fn add_alias(&self, name: &str, replacement: Vec<Spanned<String>>) {
// Note: this is assumed to always be true, as there is always a global top frame
if let Some(frame) = self.frames.lock().last_mut() {
frame.aliases.insert(name.to_string(), replacement);
}
}
fn enter_scope(&self) {
self.frames.lock().push(ScopeFrame::new());
}
fn exit_scope(&self) {
self.frames.lock().pop();
}
}
/// An evaluation scope. Scopes map variable names to Values and aid in evaluating blocks and expressions.
#[derive(Debug, Clone)]
pub struct ScopeFrame {
pub vars: IndexMap<String, Value>,
pub env: IndexMap<String, String>,
pub commands: IndexMap<String, Command>,
pub custom_commands: IndexMap<String, Arc<Block>>,
pub aliases: IndexMap<String, Vec<Spanned<String>>>,
///Optional tag to better identify this scope frame later
pub tag: Option<String>,
pub exitscripts: Vec<String>,
}
impl Default for ScopeFrame {
fn default() -> Self {
ScopeFrame::new()
}
}
impl ScopeFrame {
pub fn has_command(&self, name: &str) -> bool {
self.commands.contains_key(name)
}
pub fn has_custom_command(&self, name: &str) -> bool {
self.custom_commands.contains_key(name)
}
pub fn has_alias(&self, name: &str) -> bool {
self.aliases.contains_key(name)
}
pub fn get_alias_names(&self) -> Vec<String> {
self.aliases.keys().map(|x| x.to_string()).collect()
}
pub fn get_command_names(&self) -> Vec<String> {
self.commands.keys().map(|x| x.to_string()).collect()
}
pub fn get_custom_command_names(&self) -> Vec<String> {
self.custom_commands.keys().map(|x| x.to_string()).collect()
}
pub fn add_command(&mut self, name: String, command: Command) {
self.commands.insert(name, command);
}
pub fn get_command(&self, name: &str) -> Option<Command> {
self.commands.get(name).cloned()
}
pub fn new() -> ScopeFrame {
ScopeFrame {
vars: IndexMap::new(),
env: IndexMap::new(),
commands: IndexMap::new(),
custom_commands: IndexMap::new(),
aliases: IndexMap::new(),
tag: None,
exitscripts: Vec::new(),
}
}
pub fn with_tag(tag: String) -> ScopeFrame {
let mut scope = ScopeFrame::new();
scope.tag = Some(tag);
scope
}
}
| 28.634703 | 106 | 0.536517 |
0386f3369833a73403bce53dc79c561ecfe2bba0 | 85,114 | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
#![warn(missing_docs)]
use std::cmp::Ordering;
use std::collections::HashSet;
use std::fmt;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use mz_lowertest::MzReflect;
use mz_ore::collections::CollectionExt;
use mz_ore::id_gen::IdGen;
use mz_ore::stack::{maybe_grow, CheckedRecursion, RecursionGuard, RecursionLimitError};
use mz_repr::adt::numeric::NumericMaxScale;
use mz_repr::{ColumnName, ColumnType, Datum, Diff, RelationType, Row, ScalarType};
use self::func::{AggregateFunc, TableFunc};
use crate::explain::ViewExplanation;
use crate::{
func as scalar_func, DummyHumanizer, EvalError, ExprHumanizer, GlobalId, Id, LocalId,
MirScalarExpr, UnaryFunc, VariadicFunc,
};
pub mod canonicalize;
pub mod func;
pub mod join_input_mapper;
/// A recursion limit to be used for stack-safe traversals of [`MirRelationExpr`] trees.
///
/// The recursion limit must be large enough to accomodate for the linear representation
/// of some pathological but frequently occurring query fragments.
///
/// For example, in MIR we could have long chains of
/// - (1) `Let` bindings,
/// - (2) `CallBinary` calls with associative functions such as `OR` and `+`
///
/// Until we fix those, we need to stick with the larger recursion limit.
pub const RECURSION_LIMIT: usize = 2048;
/// An abstract syntax tree which defines a collection.
///
/// The AST is meant reflect the capabilities of the `differential_dataflow::Collection` type,
/// written generically enough to avoid run-time compilation work.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash, MzReflect)]
pub enum MirRelationExpr {
/// A constant relation containing specified rows.
///
/// The runtime memory footprint of this operator is zero.
Constant {
/// Rows of the constant collection and their multiplicities.
rows: Result<Vec<(Row, Diff)>, EvalError>,
/// Schema of the collection.
typ: RelationType,
},
/// Get an existing dataflow.
///
/// The runtime memory footprint of this operator is zero.
Get {
/// The identifier for the collection to load.
#[mzreflect(ignore)]
id: Id,
/// Schema of the collection.
typ: RelationType,
},
/// Introduce a temporary dataflow.
///
/// The runtime memory footprint of this operator is zero.
Let {
/// The identifier to be used in `Get` variants to retrieve `value`.
#[mzreflect(ignore)]
id: LocalId,
/// The collection to be bound to `id`.
value: Box<MirRelationExpr>,
/// The result of the `Let`, evaluated with `id` bound to `value`.
body: Box<MirRelationExpr>,
},
/// Project out some columns from a dataflow
///
/// The runtime memory footprint of this operator is zero.
Project {
/// The source collection.
input: Box<MirRelationExpr>,
/// Indices of columns to retain.
outputs: Vec<usize>,
},
/// Append new columns to a dataflow
///
/// The runtime memory footprint of this operator is zero.
Map {
/// The source collection.
input: Box<MirRelationExpr>,
/// Expressions which determine values to append to each row.
/// An expression may refer to columns in `input` or
/// expressions defined earlier in the vector
scalars: Vec<MirScalarExpr>,
},
/// Like Map, but yields zero-or-more output rows per input row
///
/// The runtime memory footprint of this operator is zero.
FlatMap {
/// The source collection
input: Box<MirRelationExpr>,
/// The table func to apply
func: TableFunc,
/// The argument to the table func
exprs: Vec<MirScalarExpr>,
},
/// Keep rows from a dataflow where all the predicates are true
///
/// The runtime memory footprint of this operator is zero.
Filter {
/// The source collection.
input: Box<MirRelationExpr>,
/// Predicates, each of which must be true.
predicates: Vec<MirScalarExpr>,
},
/// Join several collections, where some columns must be equal.
///
/// For further details consult the documentation for [`MirRelationExpr::join`].
///
/// The runtime memory footprint of this operator can be proportional to
/// the sizes of all inputs and the size of all joins of prefixes.
/// This may be reduced due to arrangements available at rendering time.
Join {
/// A sequence of input relations.
inputs: Vec<MirRelationExpr>,
/// A sequence of equivalence classes of expressions on the cross product of inputs.
///
/// Each equivalence class is a list of scalar expressions, where for each class the
/// intended interpretation is that all evaluated expressions should be equal.
///
/// Each scalar expression is to be evaluated over the cross-product of all records
/// from all inputs. In many cases this may just be column selection from specific
/// inputs, but more general cases exist (e.g. complex functions of multiple columns
/// from multiple inputs, or just constant literals).
equivalences: Vec<Vec<MirScalarExpr>>,
/// Join implementation information.
#[serde(default)]
implementation: JoinImplementation,
},
/// Group a dataflow by some columns and aggregate over each group
///
/// The runtime memory footprint of this operator is at most proportional to the
/// number of distinct records in the input and output. The actual requirements
/// can be less: the number of distinct inputs to each aggregate, summed across
/// each aggregate, plus the output size. For more details consult the code that
/// builds the associated dataflow.
Reduce {
/// The source collection.
input: Box<MirRelationExpr>,
/// Column indices used to form groups.
group_key: Vec<MirScalarExpr>,
/// Expressions which determine values to append to each row, after the group keys.
aggregates: Vec<AggregateExpr>,
/// True iff the input is known to monotonically increase (only addition of records).
#[serde(default)]
monotonic: bool,
/// User hint: expected number of values per group key. Used to optimize physical rendering.
#[serde(default)]
expected_group_size: Option<usize>,
},
/// Groups and orders within each group, limiting output.
///
/// The runtime memory footprint of this operator is proportional to its input and output.
TopK {
/// The source collection.
input: Box<MirRelationExpr>,
/// Column indices used to form groups.
group_key: Vec<usize>,
/// Column indices used to order rows within groups.
order_key: Vec<ColumnOrder>,
/// Number of records to retain
#[serde(default)]
limit: Option<usize>,
/// Number of records to skip
#[serde(default)]
offset: usize,
/// True iff the input is known to monotonically increase (only addition of records).
#[serde(default)]
monotonic: bool,
},
/// Return a dataflow where the row counts are negated
///
/// The runtime memory footprint of this operator is zero.
Negate {
/// The source collection.
input: Box<MirRelationExpr>,
},
/// Keep rows from a dataflow where the row counts are positive
///
/// The runtime memory footprint of this operator is proportional to its input and output.
Threshold {
/// The source collection.
input: Box<MirRelationExpr>,
},
/// Adds the frequencies of elements in contained sets.
///
/// The runtime memory footprint of this operator is zero.
Union {
/// A source collection.
base: Box<MirRelationExpr>,
/// Source collections to union.
inputs: Vec<MirRelationExpr>,
},
/// Technically a no-op. Used to render an index. Will be used to optimize queries
/// on finer grain
///
/// The runtime memory footprint of this operator is proportional to its input.
ArrangeBy {
/// The source collection
input: Box<MirRelationExpr>,
/// Columns to arrange `input` by, in order of decreasing primacy
keys: Vec<Vec<MirScalarExpr>>,
},
/// Declares that `keys` are primary keys for `input`.
/// Should be used *very* sparingly, and only if there's no plausible
/// way to derive the key information from the underlying expression.
/// The result of declaring a key that isn't actually a key for the underlying expression is undefined.
///
/// There is no operator rendered for this IR node; thus, its runtime memory footprint is zero.
DeclareKeys {
/// The source collection
input: Box<MirRelationExpr>,
/// The set of columns in the source collection that form a key.
keys: Vec<Vec<usize>>,
},
}
impl MirRelationExpr {
/// Reports the schema of the relation.
///
/// This method determines the type through recursive traversal of the
/// relation expression, drawing from the types of base collections.
/// As such, this is not an especially cheap method, and should be used
/// judiciously.
///
/// The relation type is computed incrementally with a recursive post-order
/// traversal, that accumulates the input types for the relations yet to be
/// visited in `type_stack`.
pub fn typ(&self) -> RelationType {
let mut type_stack = Vec::new();
self.visit_pre_post(
&mut |e: &MirRelationExpr| -> Option<Vec<&MirRelationExpr>> {
if let MirRelationExpr::Let { body, .. } = &e {
// Do not traverse the value sub-graph, since it's not relevant for
// determing the relation type of Let operators.
Some(vec![&*body])
} else {
None
}
},
&mut |e: &MirRelationExpr| {
if let MirRelationExpr::Let { .. } = &e {
let body_typ = type_stack.pop().unwrap();
// Insert a dummy relation type for the value, since `typ_with_input_types`
// won't look at it, but expects the relation type of the body to be second.
type_stack.push(RelationType::empty());
type_stack.push(body_typ);
}
let num_inputs = e.num_inputs();
let relation_type =
e.typ_with_input_types(&type_stack[type_stack.len() - num_inputs..]);
type_stack.truncate(type_stack.len() - num_inputs);
type_stack.push(relation_type);
},
);
assert_eq!(type_stack.len(), 1);
type_stack.pop().unwrap()
}
/// Reports the schema of the relation given the schema of the input relations.
///
/// `input_types` is required to contain the schemas for the input relations of
/// the current relation in the same order as they are visited by `try_visit1`
/// method, even though not all may be used for computing the schema of the
/// current relation. For example, `Let` expects two input types, one for the
/// value relation and one for the body, in that order, but only the one for the
/// body is used to determine the type of the `Let` relation.
///
/// It is meant to be used during post-order traversals to compute relation
/// schemas incrementally.
pub fn typ_with_input_types(&self, input_types: &[RelationType]) -> RelationType {
assert_eq!(self.num_inputs(), input_types.len());
match self {
MirRelationExpr::Constant { rows, typ } => {
if let Ok(rows) = rows {
let n_cols = typ.arity();
// If the `i`th entry is `Some`, then we have not yet observed non-uniqueness in the `i`th column.
let mut unique_values_per_col = vec![Some(HashSet::<Datum>::default()); n_cols];
for (row, diff) in rows {
for (i, (datum, column_typ)) in
row.iter().zip(typ.column_types.iter()).enumerate()
{
// If the record will be observed, we should validate its type.
if datum != Datum::Dummy {
assert!(
datum.is_instance_of(column_typ),
"Expected datum of type {:?}, got value {:?}",
column_typ,
datum
);
if let Some(unique_vals) = &mut unique_values_per_col[i] {
let is_dupe = *diff != 1 || !unique_vals.insert(datum);
if is_dupe {
unique_values_per_col[i] = None;
}
}
}
}
}
if rows.len() == 0 || (rows.len() == 1 && rows[0].1 == 1) {
RelationType::new(typ.column_types.clone()).with_key(vec![])
} else {
// XXX - Multi-column keys are not detected.
typ.clone().with_keys(
unique_values_per_col
.into_iter()
.enumerate()
.filter(|(_idx, unique_vals)| unique_vals.is_some())
.map(|(idx, _)| vec![idx])
.collect(),
)
}
} else {
typ.clone()
}
}
MirRelationExpr::Get { typ, .. } => typ.clone(),
MirRelationExpr::Let { .. } => input_types.last().unwrap().clone(),
MirRelationExpr::Project { input: _, outputs } => {
let input_typ = &input_types[0];
let mut output_typ = RelationType::new(
outputs
.iter()
.map(|&i| input_typ.column_types[i].clone())
.collect(),
);
for keys in input_typ.keys.iter() {
if keys.iter().all(|k| outputs.contains(k)) {
output_typ = output_typ.with_key(
keys.iter()
.map(|c| outputs.iter().position(|o| o == c).unwrap())
.collect(),
);
}
}
output_typ
}
MirRelationExpr::Map { scalars, .. } => {
let mut typ = input_types[0].clone();
let arity = typ.column_types.len();
let mut remappings = Vec::new();
for (column, scalar) in scalars.iter().enumerate() {
typ.column_types.push(scalar.typ(&typ));
// assess whether the scalar preserves uniqueness,
// and could participate in a key!
fn uniqueness(expr: &MirScalarExpr) -> Option<usize> {
match expr {
MirScalarExpr::CallUnary { func, expr } => {
if func.preserves_uniqueness() {
uniqueness(expr)
} else {
None
}
}
MirScalarExpr::Column(c) => Some(*c),
_ => None,
}
}
if let Some(c) = uniqueness(scalar) {
remappings.push((c, column + arity));
}
}
// Any column in `remappings` could be replaced in a key
// by the corresponding c. This could lead to combinatorial
// explosion using our current representation, so we wont
// do that. Instead, we'll handle the case of one remapping.
if remappings.len() == 1 {
let (old, new) = remappings.pop().unwrap();
let mut new_keys = Vec::new();
for key in typ.keys.iter() {
if key.contains(&old) {
let mut new_key: Vec<usize> =
key.iter().cloned().filter(|k| k != &old).collect();
new_key.push(new);
new_key.sort_unstable();
new_keys.push(new_key);
}
}
for new_key in new_keys {
typ = typ.with_key(new_key);
}
}
typ
}
MirRelationExpr::FlatMap { func, .. } => {
let mut input_typ = input_types[0].clone();
input_typ
.column_types
.extend(func.output_type().column_types);
// FlatMap can add duplicate rows, so input keys are no longer valid
let typ = RelationType::new(input_typ.column_types);
typ
}
MirRelationExpr::Filter { predicates, .. } => {
// A filter inherits the keys of its input unless the filters
// have reduced the input to a single row, in which case the
// keys of the input are `()`.
let mut input_typ = input_types[0].clone();
let cols_equal_to_literal = predicates
.iter()
.filter_map(|p| {
if let MirScalarExpr::CallBinary {
func: crate::BinaryFunc::Eq,
expr1,
expr2,
} = p
{
if let MirScalarExpr::Column(c) = &**expr1 {
if expr2.is_literal_ok() {
return Some(c);
}
}
}
None
})
.collect::<Vec<_>>();
for key_set in &mut input_typ.keys {
key_set.retain(|k| !cols_equal_to_literal.contains(&k));
}
if !input_typ.keys.is_empty() {
// If `[0 1]` is an input key and there `#0 = #1` exists as a
// predicate, we should present both `[0]` and `[1]` as keys
// of the output. Also, if there is a key involving X column
// and an equality between X and another column Y, a variant
// of that key with Y instead of X should be presented as
// a key of the output.
// First, we build an iterator over the equivalences
let classes = predicates.iter().filter_map(|p| {
if let MirScalarExpr::CallBinary {
func: crate::BinaryFunc::Eq,
expr1,
expr2,
} = p
{
if let Some(c1) = expr1.as_column() {
if let Some(c2) = expr2.as_column() {
return Some((c1, c2));
}
}
}
None
});
// Keep doing replacements until the number of keys settles
let mut prev_keys: HashSet<_> = input_typ.keys.drain(..).collect();
let mut prev_keys_size = 0;
while prev_keys_size != prev_keys.len() {
prev_keys_size = prev_keys.len();
for (c1, c2) in classes.clone() {
let mut new_keys = HashSet::new();
for key in prev_keys.into_iter() {
let contains_c1 = key.contains(&c1);
let contains_c2 = key.contains(&c2);
if contains_c1 && contains_c2 {
new_keys.insert(
key.iter().filter(|c| **c != c1).cloned().collect_vec(),
);
new_keys.insert(
key.iter().filter(|c| **c != c2).cloned().collect_vec(),
);
} else {
if contains_c1 {
new_keys.insert(
key.iter()
.map(|c| if *c == c1 { c2 } else { *c })
.sorted()
.collect_vec(),
);
} else if contains_c2 {
new_keys.insert(
key.iter()
.map(|c| if *c == c2 { c1 } else { *c })
.sorted()
.collect_vec(),
);
}
new_keys.insert(key);
}
}
prev_keys = new_keys;
}
}
input_typ.keys = prev_keys.into_iter().sorted().collect_vec();
}
// Augment non-nullability of columns, by observing either
// 1. Predicates that explicitly test for null values, and
// 2. Columns that if null would make a predicate be null.
let mut nonnull_required_columns = HashSet::new();
for predicate in predicates {
// Add any columns that being null would force the predicate to be null.
// Should that happen, the row would be discarded.
predicate.non_null_requirements(&mut nonnull_required_columns);
// Test for explicit checks that a column is non-null.
if let MirScalarExpr::CallUnary {
func: UnaryFunc::Not(scalar_func::Not),
expr,
} = predicate
{
if let MirScalarExpr::CallUnary {
func: UnaryFunc::IsNull(scalar_func::IsNull),
expr,
} = &**expr
{
if let MirScalarExpr::Column(c) = &**expr {
input_typ.column_types[*c].nullable = false;
}
}
}
}
// Set as nonnull any columns where null values would cause
// any predicate to evaluate to null.
for column in nonnull_required_columns.into_iter() {
input_typ.column_types[column].nullable = false;
}
input_typ
}
MirRelationExpr::Join { equivalences, .. } => {
// Iterating and cloning types inside the flat_map() avoids allocating Vec<>,
// as clones are directly added to column_types Vec<>.
let column_types = input_types
.iter()
.flat_map(|i| i.column_types.iter().cloned())
.collect::<Vec<_>>();
let mut typ = RelationType::new(column_types);
// It is important the `new_from_input_types` constructor is
// used. Otherwise, Materialize may potentially end up in an
// infinite loop.
let input_mapper =
join_input_mapper::JoinInputMapper::new_from_input_types(input_types);
let global_keys = input_mapper.global_keys(
&input_types
.iter()
.map(|t| t.keys.clone())
.collect::<Vec<_>>(),
equivalences,
);
for keys in global_keys {
typ = typ.with_key(keys.clone());
}
typ
}
MirRelationExpr::Reduce {
group_key,
aggregates,
..
} => {
let input_typ = &input_types[0];
let mut column_types = group_key
.iter()
.map(|e| e.typ(input_typ))
.collect::<Vec<_>>();
for agg in aggregates {
column_types.push(agg.typ(input_typ));
}
let mut result = RelationType::new(column_types);
// The group key should form a key, but we might already have
// keys that are subsets of the group key, and should retain
// those instead, if so.
let mut keys = Vec::new();
for key in input_typ.keys.iter() {
if key
.iter()
.all(|k| group_key.contains(&MirScalarExpr::Column(*k)))
{
keys.push(
key.iter()
.map(|i| {
group_key
.iter()
.position(|k| k == &MirScalarExpr::Column(*i))
.unwrap()
})
.collect::<Vec<_>>(),
);
}
}
if keys.is_empty() {
keys.push((0..group_key.len()).collect());
}
for key in keys {
result = result.with_key(key);
}
result
}
MirRelationExpr::TopK {
group_key, limit, ..
} => {
// If `limit` is `Some(1)` then the group key will become
// a unique key, as there will be only one record with that key.
let mut typ = input_types[0].clone();
if limit == &Some(1) {
typ = typ.with_key(group_key.clone())
}
typ
}
MirRelationExpr::Negate { input: _ } => {
// Although negate may have distinct records for each key,
// the multiplicity is -1 rather than 1. This breaks many
// of the optimization uses of "keys".
let mut typ = input_types[0].clone();
typ.keys.clear();
typ
}
MirRelationExpr::Threshold { .. } => input_types[0].clone(),
MirRelationExpr::Union { base, inputs } => {
let mut base_cols = input_types[0].column_types.clone();
for input_type in input_types.iter().skip(1) {
for (base_col, col) in
base_cols.iter_mut().zip_eq(input_type.column_types.iter())
{
*base_col = base_col
.union(&col)
.map_err(|e| format!("{}\nIn {:#?}", e, self))
.unwrap();
}
}
// Generally, unions do not have any unique keys, because
// each input might duplicate some. However, there is at
// least one idiomatic structure that does preserve keys,
// which results from SQL aggregations that must populate
// absent records with default values. In that pattern,
// the union of one GET with its negation, which has first
// been subjected to a projection and map, we can remove
// their influence on the key structure.
//
// If there are A, B, each with a unique `key` such that
// we are looking at
//
// A.proj(set_containg_key) + (B - A.proj(key)).map(stuff)
//
// Then we can report `key` as a unique key.
//
// TODO: make unique key structure an optimization analysis
// rather than part of the type information.
// TODO: perhaps ensure that (above) A.proj(key) is a
// subset of B, as otherwise there are negative records
// and who knows what is true (not expected, but again
// who knows what the query plan might look like).
let (base_projection, base_with_project_stripped) =
if let MirRelationExpr::Project { input, outputs } = &**base {
(outputs.clone(), &**input)
} else {
// A input without a project is equivalent to an input
// with the project being all columns in the input in order.
((0..base_cols.len()).collect::<Vec<_>>(), &**base)
};
let mut keys = Vec::new();
if let MirRelationExpr::Get {
id: first_id,
typ: _,
} = base_with_project_stripped
{
if inputs.len() == 1 {
if let MirRelationExpr::Map { input, .. } = &inputs[0] {
if let MirRelationExpr::Union { base, inputs } = &**input {
if inputs.len() == 1 {
if let MirRelationExpr::Project { input, outputs } = &**base {
if let MirRelationExpr::Negate { input } = &**input {
if let MirRelationExpr::Get {
id: second_id,
typ: _,
} = &**input
{
if first_id == second_id {
keys.extend(
inputs[0].typ().keys.drain(..).filter(
|key| {
key.iter().all(|c| {
outputs.get(*c) == Some(c)
&& base_projection.get(*c)
== Some(c)
})
},
),
);
}
}
}
}
}
}
}
}
}
RelationType::new(base_cols).with_keys(keys)
// Important: do not inherit keys of either input, as not unique.
}
MirRelationExpr::ArrangeBy { .. } => input_types[0].clone(),
MirRelationExpr::DeclareKeys { keys, .. } => {
input_types[0].clone().with_keys(keys.clone())
}
}
}
/// The number of columns in the relation.
///
/// This number is determined from the type, which is determined recursively
/// at non-trivial cost.
pub fn arity(&self) -> usize {
match self {
MirRelationExpr::Constant { rows: _, typ } => typ.arity(),
MirRelationExpr::Get { typ, .. } => typ.arity(),
MirRelationExpr::Let { body, .. } => body.arity(),
MirRelationExpr::Project { input: _, outputs } => outputs.len(),
MirRelationExpr::Map { input, scalars } => input.arity() + scalars.len(),
MirRelationExpr::FlatMap { input, func, .. } => {
input.arity() + func.output_type().column_types.len()
}
MirRelationExpr::Filter { input, .. } => input.arity(),
MirRelationExpr::Join { inputs, .. } => inputs.iter().map(|i| i.arity()).sum(),
MirRelationExpr::Reduce {
input: _,
group_key,
aggregates,
..
} => group_key.len() + aggregates.len(),
MirRelationExpr::TopK { input, .. } => input.arity(),
MirRelationExpr::Negate { input } => input.arity(),
MirRelationExpr::Threshold { input } => input.arity(),
MirRelationExpr::Union { base, inputs: _ } => base.arity(),
MirRelationExpr::ArrangeBy { input, .. } => input.arity(),
MirRelationExpr::DeclareKeys { input, .. } => input.arity(),
}
}
/// The number of child relations this relation has.
pub fn num_inputs(&self) -> usize {
let mut count = 0;
self.visit_children(|_| count += 1);
count
}
/// Constructs a constant collection from specific rows and schema, where
/// each row will have a multiplicity of one.
pub fn constant(rows: Vec<Vec<Datum>>, typ: RelationType) -> Self {
let rows = rows.into_iter().map(|row| (row, 1)).collect();
MirRelationExpr::constant_diff(rows, typ)
}
/// Constructs a constant collection from specific rows and schema, where
/// each row can have an arbitrary multiplicity.
pub fn constant_diff(rows: Vec<(Vec<Datum>, Diff)>, typ: RelationType) -> Self {
for (row, _diff) in &rows {
for (datum, column_typ) in row.iter().zip(typ.column_types.iter()) {
assert!(
datum.is_instance_of(column_typ),
"Expected datum of type {:?}, got value {:?}",
column_typ,
datum
);
}
}
let rows = Ok(rows
.into_iter()
.map(move |(row, diff)| (Row::pack_slice(&row), diff))
.collect());
MirRelationExpr::Constant { rows, typ }
}
/// Constructs the expression for getting a global collection
pub fn global_get(id: GlobalId, typ: RelationType) -> Self {
MirRelationExpr::Get {
id: Id::Global(id),
typ,
}
}
/// Retains only the columns specified by `output`.
pub fn project(self, outputs: Vec<usize>) -> Self {
MirRelationExpr::Project {
input: Box::new(self),
outputs,
}
}
/// Append to each row the results of applying elements of `scalar`.
pub fn map(self, scalars: Vec<MirScalarExpr>) -> Self {
MirRelationExpr::Map {
input: Box::new(self),
scalars,
}
}
/// Like `map`, but yields zero-or-more output rows per input row
pub fn flat_map(self, func: TableFunc, exprs: Vec<MirScalarExpr>) -> Self {
MirRelationExpr::FlatMap {
input: Box::new(self),
func,
exprs,
}
}
/// Retain only the rows satisifying each of several predicates.
pub fn filter<I>(self, predicates: I) -> Self
where
I: IntoIterator<Item = MirScalarExpr>,
{
MirRelationExpr::Filter {
input: Box::new(self),
predicates: predicates.into_iter().collect(),
}
}
/// Form the Cartesian outer-product of rows in both inputs.
pub fn product(self, right: Self) -> Self {
MirRelationExpr::join(vec![self, right], vec![])
}
/// Performs a relational equijoin among the input collections.
///
/// The sequence `inputs` each describe different input collections, and the sequence `variables` describes
/// equality constraints that some of their columns must satisfy. Each element in `variable` describes a set
/// of pairs `(input_index, column_index)` where every value described by that set must be equal.
///
/// For example, the pair `(input, column)` indexes into `inputs[input][column]`, extracting the `input`th
/// input collection and for each row examining its `column`th column.
///
/// # Example
///
/// ```rust
/// use mz_repr::{Datum, ColumnType, RelationType, ScalarType};
/// use mz_expr::MirRelationExpr;
///
/// // A common schema for each input.
/// let schema = RelationType::new(vec![
/// ScalarType::Int32.nullable(false),
/// ScalarType::Int32.nullable(false),
/// ]);
///
/// // the specific data are not important here.
/// let data = vec![Datum::Int32(0), Datum::Int32(1)];
///
/// // Three collections that could have been different.
/// let input0 = MirRelationExpr::constant(vec![data.clone()], schema.clone());
/// let input1 = MirRelationExpr::constant(vec![data.clone()], schema.clone());
/// let input2 = MirRelationExpr::constant(vec![data.clone()], schema.clone());
///
/// // Join the three relations looking for triangles, like so.
/// //
/// // Output(A,B,C) := Input0(A,B), Input1(B,C), Input2(A,C)
/// let joined = MirRelationExpr::join(
/// vec![input0, input1, input2],
/// vec![
/// vec![(0,0), (2,0)], // fields A of inputs 0 and 2.
/// vec![(0,1), (1,0)], // fields B of inputs 0 and 1.
/// vec![(1,1), (2,1)], // fields C of inputs 1 and 2.
/// ],
/// );
///
/// // Technically the above produces `Output(A,B,B,C,A,C)` because the columns are concatenated.
/// // A projection resolves this and produces the correct output.
/// let result = joined.project(vec![0, 1, 3]);
/// ```
pub fn join(inputs: Vec<MirRelationExpr>, variables: Vec<Vec<(usize, usize)>>) -> Self {
let input_mapper = join_input_mapper::JoinInputMapper::new(&inputs);
let equivalences = variables
.into_iter()
.map(|vs| {
vs.into_iter()
.map(|(r, c)| input_mapper.map_expr_to_global(MirScalarExpr::Column(c), r))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Self::join_scalars(inputs, equivalences)
}
/// Constructs a join operator from inputs and required-equal scalar expressions.
pub fn join_scalars(
inputs: Vec<MirRelationExpr>,
equivalences: Vec<Vec<MirScalarExpr>>,
) -> Self {
MirRelationExpr::Join {
inputs,
equivalences,
implementation: JoinImplementation::Unimplemented,
}
}
/// Perform a key-wise reduction / aggregation.
///
/// The `group_key` argument indicates columns in the input collection that should
/// be grouped, and `aggregates` lists aggregation functions each of which produces
/// one output column in addition to the keys.
pub fn reduce(
self,
group_key: Vec<usize>,
aggregates: Vec<AggregateExpr>,
expected_group_size: Option<usize>,
) -> Self {
MirRelationExpr::Reduce {
input: Box::new(self),
group_key: group_key.into_iter().map(MirScalarExpr::Column).collect(),
aggregates,
monotonic: false,
expected_group_size,
}
}
/// Perform a key-wise reduction order by and limit.
///
/// The `group_key` argument indicates columns in the input collection that should
/// be grouped, the `order_key` argument indicates columns that should be further
/// used to order records within groups, and the `limit` argument constrains the
/// total number of records that should be produced in each group.
pub fn top_k(
self,
group_key: Vec<usize>,
order_key: Vec<ColumnOrder>,
limit: Option<usize>,
offset: usize,
) -> Self {
MirRelationExpr::TopK {
input: Box::new(self),
group_key,
order_key,
limit,
offset,
monotonic: false,
}
}
/// Negates the occurrences of each row.
pub fn negate(self) -> Self {
MirRelationExpr::Negate {
input: Box::new(self),
}
}
/// Removes all but the first occurrence of each row.
pub fn distinct(self) -> Self {
let arity = self.arity();
self.distinct_by((0..arity).collect())
}
/// Removes all but the first occurrence of each key. Columns not included
/// in the `group_key` are discarded.
pub fn distinct_by(self, group_key: Vec<usize>) -> Self {
self.reduce(group_key, vec![], None)
}
/// Discards rows with a negative frequency.
pub fn threshold(self) -> Self {
MirRelationExpr::Threshold {
input: Box::new(self),
}
}
/// Unions together any number inputs.
///
/// If `inputs` is empty, then an empty relation of type `typ` is
/// constructed.
pub fn union_many(mut inputs: Vec<Self>, typ: RelationType) -> Self {
if inputs.len() == 0 {
MirRelationExpr::Constant {
rows: Ok(vec![]),
typ,
}
} else if inputs.len() == 1 {
inputs.into_element()
} else {
MirRelationExpr::Union {
base: Box::new(inputs.remove(0)),
inputs,
}
}
}
/// Produces one collection where each row is present with the sum of its frequencies in each input.
pub fn union(self, other: Self) -> Self {
MirRelationExpr::Union {
base: Box::new(self),
inputs: vec![other],
}
}
/// Arranges the collection by the specified columns
pub fn arrange_by(self, keys: &[Vec<MirScalarExpr>]) -> Self {
MirRelationExpr::ArrangeBy {
input: Box::new(self),
keys: keys.to_owned(),
}
}
/// Indicates if this is a constant empty collection.
///
/// A false value does not mean the collection is known to be non-empty,
/// only that we cannot currently determine that it is statically empty.
pub fn is_empty(&self) -> bool {
if let MirRelationExpr::Constant { rows: Ok(rows), .. } = self {
rows.is_empty()
} else {
false
}
}
/// Returns the distinct global identifiers on which this expression
/// depends.
///
/// See [`MirRelationExpr::global_uses_into`] to reuse an existing vector.
pub fn global_uses(&self) -> Vec<GlobalId> {
let mut out = vec![];
self.global_uses_into(&mut out);
out.sort();
out.dedup();
out
}
/// Appends global identifiers on which this expression depends to `out`.
///
/// Unlike [`MirRelationExpr::global_uses`], this method does not deduplicate
/// the global identifiers.
pub fn global_uses_into(&self, out: &mut Vec<GlobalId>) {
if let MirRelationExpr::Get {
id: Id::Global(id), ..
} = self
{
out.push(*id);
}
self.visit_children(|expr| expr.global_uses_into(out))
}
/// Pretty-print this MirRelationExpr to a string.
///
/// This method allows an additional `ExprHumanizer` which can annotate
/// identifiers with human-meaningful names for the identifiers.
pub fn pretty_humanized(&self, id_humanizer: &impl ExprHumanizer) -> String {
ViewExplanation::new(self, id_humanizer).to_string()
}
/// Pretty-print this MirRelationExpr to a string.
pub fn pretty(&self) -> String {
ViewExplanation::new(self, &DummyHumanizer).to_string()
}
/// Print this MirRelationExpr to a JSON-formatted string.
pub fn json(&self) -> String {
serde_json::to_string(self).unwrap()
}
/// Pretty-print this MirRelationExpr to a string with type information.
pub fn pretty_typed(&self) -> String {
let mut explanation = ViewExplanation::new(self, &DummyHumanizer);
explanation.explain_types();
explanation.to_string()
}
/// Take ownership of `self`, leaving an empty `MirRelationExpr::Constant` with the correct type.
pub fn take_safely(&mut self) -> MirRelationExpr {
let typ = self.typ();
std::mem::replace(
self,
MirRelationExpr::Constant {
rows: Ok(vec![]),
typ,
},
)
}
/// Take ownership of `self`, leaving an empty `MirRelationExpr::Constant` with an **incorrect** type.
///
/// This should only be used if `self` is about to be dropped or otherwise overwritten.
pub fn take_dangerous(&mut self) -> MirRelationExpr {
let empty = MirRelationExpr::Constant {
rows: Ok(vec![]),
typ: RelationType::new(Vec::new()),
};
std::mem::replace(self, empty)
}
/// Replaces `self` with some logic applied to `self`.
pub fn replace_using<F>(&mut self, logic: F)
where
F: FnOnce(MirRelationExpr) -> MirRelationExpr,
{
let empty = MirRelationExpr::Constant {
rows: Ok(vec![]),
typ: RelationType::new(Vec::new()),
};
let expr = std::mem::replace(self, empty);
*self = logic(expr);
}
/// Store `self` in a `Let` and pass the corresponding `Get` to `body`
pub fn let_in<Body>(self, id_gen: &mut IdGen, body: Body) -> super::MirRelationExpr
where
Body: FnOnce(&mut IdGen, MirRelationExpr) -> super::MirRelationExpr,
{
if let MirRelationExpr::Get { .. } = self {
// already done
body(id_gen, self)
} else {
let id = LocalId::new(id_gen.allocate_id());
let get = MirRelationExpr::Get {
id: Id::Local(id),
typ: self.typ(),
};
let body = (body)(id_gen, get);
MirRelationExpr::Let {
id,
value: Box::new(self),
body: Box::new(body),
}
}
}
/// Return every row in `self` that does not have a matching row in the first columns of `keys_and_values`, using `default` to fill in the remaining columns
/// (If `default` is a row of nulls, this is the 'outer' part of LEFT OUTER JOIN)
pub fn anti_lookup(
self,
id_gen: &mut IdGen,
keys_and_values: MirRelationExpr,
default: Vec<(Datum, ColumnType)>,
) -> MirRelationExpr {
assert_eq!(keys_and_values.arity() - self.arity(), default.len());
self.let_in(id_gen, |_id_gen, get_keys| {
MirRelationExpr::join(
vec![
// all the missing keys (with count 1)
keys_and_values
.distinct_by((0..get_keys.arity()).collect())
.negate()
.union(get_keys.clone().distinct()),
// join with keys to get the correct counts
get_keys.clone(),
],
(0..get_keys.arity())
.map(|i| vec![(0, i), (1, i)])
.collect(),
)
// get rid of the extra copies of columns from keys
.project((0..get_keys.arity()).collect())
// This join is logically equivalent to
// `.map(<default_expr>)`, but using a join allows for
// potential predicate pushdown and elision in the
// optimizer.
.product(MirRelationExpr::constant(
vec![default.iter().map(|(datum, _)| *datum).collect()],
RelationType::new(default.iter().map(|(_, typ)| typ.clone()).collect()),
))
})
}
/// Return:
/// * every row in keys_and_values
/// * every row in `self` that does not have a matching row in the first columns of `keys_and_values`, using `default` to fill in the remaining columns
/// (This is LEFT OUTER JOIN if:
/// 1) `default` is a row of null
/// 2) matching rows in `keys_and_values` and `self` have the same multiplicity.)
pub fn lookup(
self,
id_gen: &mut IdGen,
keys_and_values: MirRelationExpr,
default: Vec<(Datum<'static>, ColumnType)>,
) -> MirRelationExpr {
keys_and_values.let_in(id_gen, |id_gen, get_keys_and_values| {
get_keys_and_values.clone().union(self.anti_lookup(
id_gen,
get_keys_and_values,
default,
))
})
}
/// Passes the collection through unchanged, but informs the optimizer that `keys` are primary keys.
pub fn declare_keys(self, keys: Vec<Vec<usize>>) -> Self {
Self::DeclareKeys {
input: Box::new(self),
keys,
}
}
/// Applies a fallible immutable `f` to each child of type `MirRelationExpr`.
pub fn try_visit_children<'a, F, E>(&'a self, f: F) -> Result<(), E>
where
F: FnMut(&'a MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_children(self, f)
}
/// Applies a fallible mutable `f` to each child of type `MirRelationExpr`.
pub fn try_visit_mut_children<'a, F, E>(&'a mut self, f: F) -> Result<(), E>
where
F: FnMut(&'a mut MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_mut_children(self, f)
}
/// Applies an infallible immutable `f` to each child of type `MirRelationExpr`.
pub fn visit_children<'a, F>(&'a self, f: F)
where
F: FnMut(&'a MirRelationExpr),
{
MirRelationExprVisitor::new().visit_children(self, f)
}
/// Applies an infallible mutable `f` to each child of type `MirRelationExpr`.
pub fn visit_mut_children<'a, F>(&'a mut self, f: F)
where
F: FnMut(&'a mut MirRelationExpr),
{
MirRelationExprVisitor::new().visit_mut_children(self, f)
}
/// Post-order immutable fallible `MirRelationExpr` visitor.
pub fn try_visit_post<'a, F, E>(&'a self, f: &mut F) -> Result<(), E>
where
F: FnMut(&'a MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_post(self, f)
}
/// Post-order mutable fallible `MirRelationExpr` visitor.
pub fn try_visit_mut_post<F, E>(&mut self, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_mut_post(self, f)
}
/// Post-order immutable infallible `MirRelationExpr` visitor.
pub fn visit_post<'a, F>(&'a self, f: &mut F)
where
F: FnMut(&'a MirRelationExpr),
{
MirRelationExprVisitor::new().visit_post(self, f)
}
/// Post-order mutable infallible `MirRelationExpr` visitor.
pub fn visit_mut_post<F>(&mut self, f: &mut F)
where
F: FnMut(&mut MirRelationExpr),
{
MirRelationExprVisitor::new().visit_mut_post(self, f)
}
/// Pre-order immutable fallible `MirRelationExpr` visitor.
pub fn try_visit_pre<F, E>(&self, f: &mut F) -> Result<(), E>
where
F: FnMut(&MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_pre(self, f)
}
/// Pre-order mutable fallible `MirRelationExpr` visitor.
pub fn try_visit_mut_pre<F, E>(&mut self, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_mut_pre(self, f)
}
/// Pre-order immutable infallible `MirRelationExpr` visitor.
pub fn visit_pre<F>(&self, f: &mut F)
where
F: FnMut(&MirRelationExpr),
{
MirRelationExprVisitor::new().visit_pre(self, f)
}
/// Pre-order mutable infallible `MirRelationExpr` visitor.
pub fn visit_mut_pre<F>(&mut self, f: &mut F)
where
F: FnMut(&mut MirRelationExpr),
{
MirRelationExprVisitor::new().visit_mut_pre(self, f)
}
/// A generalization of [`Self::visit_pre`] and [`Self::visit_post`].
///
/// The function `pre` runs on a `MirRelationExpr` before it runs on any of the
/// child `MirRelationExpr`s. The function `post` runs on child `MirRelationExpr`s
/// first before the parent.
///
/// Optionally, `pre` can return which child `MirRelationExpr`s, if any, should be
/// visited (default is to visit all children).
pub fn visit_pre_post<F1, F2>(&self, pre: &mut F1, post: &mut F2)
where
F1: FnMut(&MirRelationExpr) -> Option<Vec<&MirRelationExpr>>,
F2: FnMut(&MirRelationExpr),
{
MirRelationExprVisitor::new().visit_pre_post(self, pre, post)
}
/// Fallible visitor for the [`MirScalarExpr`]s directly owned by this relation expression.
///
/// The `f` visitor should not recursively descend into owned [`MirRelationExpr`]s.
pub fn try_visit_scalars_mut1<F, E>(&mut self, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirScalarExpr) -> Result<(), E>,
{
MirRelationExprVisitor::new().try_visit_scalar_children_mut(self, f)
}
/// Fallible mutable visitor for the [`MirScalarExpr`]s in the [`MirRelationExpr`] subtree rooted at `self`.
///
/// Note that this does not recurse into [`MirRelationExpr`] subtrees within [`MirScalarExpr`] nodes.
pub fn try_visit_scalars_mut<F, E>(&mut self, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirScalarExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
MirRelationExprVisitor::new().try_visit_scalars_mut(self, f)
}
/// Infallible mutable visitor for the [`MirScalarExpr`]s in the [`MirRelationExpr`] subtree rooted at at `self`.
///
/// Note that this does not recurse into [`MirRelationExpr`] subtrees within [`MirScalarExpr`] nodes.
pub fn visit_scalars_mut<F>(&mut self, f: &mut F)
where
F: FnMut(&mut MirScalarExpr),
{
MirRelationExprVisitor::new().visit_scalars_mut(self, f)
}
}
#[derive(Debug)]
struct MirRelationExprVisitor {
recursion_guard: RecursionGuard,
}
/// Contains visitor implementations.
///
/// [child, pre, post] x [fallible, infallible] x [immutable, mutable]
impl MirRelationExprVisitor {
/// Constructs a new MirRelationExprVisitor using a [`RecursionGuard`] with [`RECURSION_LIMIT`].
fn new() -> MirRelationExprVisitor {
MirRelationExprVisitor {
recursion_guard: RecursionGuard::with_limit(RECURSION_LIMIT),
}
}
/// Applies a fallible immutable `f` to each `expr` child of type `MirRelationExpr`.
fn try_visit_children<'a, F, E>(&self, expr: &'a MirRelationExpr, mut f: F) -> Result<(), E>
where
F: FnMut(&'a MirRelationExpr) -> Result<(), E>,
{
match expr {
MirRelationExpr::Constant { .. } | MirRelationExpr::Get { .. } => (),
MirRelationExpr::Let { value, body, .. } => {
f(value)?;
f(body)?;
}
MirRelationExpr::Project { input, .. } => {
f(input)?;
}
MirRelationExpr::Map { input, .. } => {
f(input)?;
}
MirRelationExpr::FlatMap { input, .. } => {
f(input)?;
}
MirRelationExpr::Filter { input, .. } => {
f(input)?;
}
MirRelationExpr::Join { inputs, .. } => {
for input in inputs {
f(input)?;
}
}
MirRelationExpr::Reduce { input, .. } => {
f(input)?;
}
MirRelationExpr::TopK { input, .. } => {
f(input)?;
}
MirRelationExpr::Negate { input } => f(input)?,
MirRelationExpr::Threshold { input } => f(input)?,
MirRelationExpr::Union { base, inputs } => {
f(base)?;
for input in inputs {
f(input)?;
}
}
MirRelationExpr::ArrangeBy { input, .. } => {
f(input)?;
}
MirRelationExpr::DeclareKeys { input, .. } => {
f(input)?;
}
}
Ok(())
}
/// Applies a fallible mutable `f` to each `expr` child of type `MirRelationExpr`.
fn try_visit_mut_children<'a, F, E>(
&self,
expr: &'a mut MirRelationExpr,
mut f: F,
) -> Result<(), E>
where
F: FnMut(&'a mut MirRelationExpr) -> Result<(), E>,
{
match expr {
MirRelationExpr::Constant { .. } | MirRelationExpr::Get { .. } => (),
MirRelationExpr::Let { value, body, .. } => {
f(value)?;
f(body)?;
}
MirRelationExpr::Project { input, .. } => {
f(input)?;
}
MirRelationExpr::Map { input, .. } => {
f(input)?;
}
MirRelationExpr::FlatMap { input, .. } => {
f(input)?;
}
MirRelationExpr::Filter { input, .. } => {
f(input)?;
}
MirRelationExpr::Join { inputs, .. } => {
for input in inputs {
f(input)?;
}
}
MirRelationExpr::Reduce { input, .. } => {
f(input)?;
}
MirRelationExpr::TopK { input, .. } => {
f(input)?;
}
MirRelationExpr::Negate { input } => f(input)?,
MirRelationExpr::Threshold { input } => f(input)?,
MirRelationExpr::Union { base, inputs } => {
f(base)?;
for input in inputs {
f(input)?;
}
}
MirRelationExpr::ArrangeBy { input, .. } => {
f(input)?;
}
MirRelationExpr::DeclareKeys { input, .. } => {
f(input)?;
}
}
Ok(())
}
/// Applies an infallible immutable `f` to each `expr` child of type `MirRelationExpr`.
fn visit_children<'a, F>(&self, expr: &'a MirRelationExpr, mut f: F)
where
F: FnMut(&'a MirRelationExpr),
{
match expr {
MirRelationExpr::Constant { .. } | MirRelationExpr::Get { .. } => (),
MirRelationExpr::Let { value, body, .. } => {
f(value);
f(body);
}
MirRelationExpr::Project { input, .. } => {
f(input);
}
MirRelationExpr::Map { input, .. } => {
f(input);
}
MirRelationExpr::FlatMap { input, .. } => {
f(input);
}
MirRelationExpr::Filter { input, .. } => {
f(input);
}
MirRelationExpr::Join { inputs, .. } => {
for input in inputs {
f(input);
}
}
MirRelationExpr::Reduce { input, .. } => {
f(input);
}
MirRelationExpr::TopK { input, .. } => {
f(input);
}
MirRelationExpr::Negate { input } => f(input),
MirRelationExpr::Threshold { input } => f(input),
MirRelationExpr::Union { base, inputs } => {
f(base);
for input in inputs {
f(input);
}
}
MirRelationExpr::ArrangeBy { input, .. } => {
f(input);
}
MirRelationExpr::DeclareKeys { input, .. } => {
f(input);
}
}
}
/// Applies an infallible mutable `f` to each `expr` child of type `MirRelationExpr`.
fn visit_mut_children<'a, F>(&self, expr: &'a mut MirRelationExpr, mut f: F)
where
F: FnMut(&'a mut MirRelationExpr),
{
match expr {
MirRelationExpr::Constant { .. } | MirRelationExpr::Get { .. } => (),
MirRelationExpr::Let { value, body, .. } => {
f(value);
f(body);
}
MirRelationExpr::Project { input, .. } => {
f(input);
}
MirRelationExpr::Map { input, .. } => {
f(input);
}
MirRelationExpr::FlatMap { input, .. } => {
f(input);
}
MirRelationExpr::Filter { input, .. } => {
f(input);
}
MirRelationExpr::Join { inputs, .. } => {
for input in inputs {
f(input);
}
}
MirRelationExpr::Reduce { input, .. } => {
f(input);
}
MirRelationExpr::TopK { input, .. } => {
f(input);
}
MirRelationExpr::Negate { input } => f(input),
MirRelationExpr::Threshold { input } => f(input),
MirRelationExpr::Union { base, inputs } => {
f(base);
for input in inputs {
f(input);
}
}
MirRelationExpr::ArrangeBy { input, .. } => {
f(input);
}
MirRelationExpr::DeclareKeys { input, .. } => {
f(input);
}
}
}
/// Post-order immutable fallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn try_visit_post<'a, F, E>(&self, expr: &'a MirRelationExpr, f: &mut F) -> Result<(), E>
where
F: FnMut(&'a MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
self.checked_recur(move |_| {
self.try_visit_children(expr, |e| self.try_visit_post(e, f))?;
f(expr)
})
}
/// Post-order mutable fallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn try_visit_mut_post<F, E>(&self, expr: &mut MirRelationExpr, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
self.checked_recur(move |_| {
self.try_visit_mut_children(expr, |e| self.try_visit_mut_post(e, f))?;
f(expr)
})
}
/// Post-order immutable infallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn visit_post<'a, F>(&self, expr: &'a MirRelationExpr, f: &mut F)
where
F: FnMut(&'a MirRelationExpr),
{
maybe_grow(|| {
self.visit_children(expr, |e| self.visit_post(e, f));
f(expr)
})
}
/// Post-order mutable infallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn visit_mut_post<F>(&self, expr: &mut MirRelationExpr, f: &mut F)
where
F: FnMut(&mut MirRelationExpr),
{
maybe_grow(|| {
self.visit_mut_children(expr, |e| self.visit_mut_post(e, f));
f(expr)
})
}
/// Pre-order immutable fallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn try_visit_pre<F, E>(&self, expr: &MirRelationExpr, f: &mut F) -> Result<(), E>
where
F: FnMut(&MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
self.checked_recur(move |_| {
f(expr)?;
self.try_visit_children(expr, |e| self.try_visit_pre(e, f))
})
}
/// Pre-order mutable fallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn try_visit_mut_pre<F, E>(&self, expr: &mut MirRelationExpr, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirRelationExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
self.checked_recur(move |_| {
f(expr)?;
self.try_visit_mut_children(expr, |e| self.try_visit_mut_pre(e, f))
})
}
/// Pre-order immutable infallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn visit_pre<F>(&self, expr: &MirRelationExpr, f: &mut F)
where
F: FnMut(&MirRelationExpr),
{
maybe_grow(|| {
f(expr);
self.visit_children(expr, |e| self.visit_pre(e, f))
})
}
/// Pre-order mutable infallible `MirRelationExpr` visitor for `expr`.
#[inline]
fn visit_mut_pre<F>(&self, expr: &mut MirRelationExpr, f: &mut F)
where
F: FnMut(&mut MirRelationExpr),
{
maybe_grow(|| {
f(expr);
self.visit_mut_children(expr, |e| self.visit_mut_pre(e, f))
})
}
/// A generalization of [`Self::visit_pre`] and [`Self::visit_post`].
///
/// The function `pre` runs on a `MirRelationExpr` before it runs on any of the
/// child `MirRelationExpr`s. The function `post` runs on child `MirRelationExpr`s
/// first before the parent.
///
/// Optionally, `pre` can return which child `MirRelationExpr`s, if any, should be
/// visited (default is to visit all children).
#[inline]
fn visit_pre_post<F1, F2>(&self, expr: &MirRelationExpr, pre: &mut F1, post: &mut F2)
where
F1: FnMut(&MirRelationExpr) -> Option<Vec<&MirRelationExpr>>,
F2: FnMut(&MirRelationExpr),
{
maybe_grow(|| {
if let Some(to_visit) = pre(expr) {
for e in to_visit {
self.visit_pre_post(e, pre, post);
}
} else {
self.visit_children(expr, |e| self.visit_pre_post(e, pre, post));
}
post(expr);
})
}
/// Fallible visitor for the [`MirScalarExpr`]s directly owned by this relation expression.
///
/// The `f` visitor should not recursively descend into owned [`MirRelationExpr`]s.
#[inline]
fn try_visit_scalar_children_mut<F, E>(
&self,
expr: &mut MirRelationExpr,
f: &mut F,
) -> Result<(), E>
where
F: FnMut(&mut MirScalarExpr) -> Result<(), E>,
{
// Match written out explicitly to reduce the possibility of adding a
// new field with a `MirScalarExpr` within and forgetting to account for it
// here.
match expr {
MirRelationExpr::Map { scalars, input: _ } => {
for s in scalars {
f(s)?;
}
Ok(())
}
MirRelationExpr::Filter {
predicates,
input: _,
} => {
for p in predicates {
f(p)?;
}
Ok(())
}
MirRelationExpr::FlatMap {
exprs,
input: _,
func: _,
} => {
for expr in exprs {
f(expr)?;
}
Ok(())
}
MirRelationExpr::Join {
equivalences,
inputs: _,
implementation: _,
} => {
for equivalence in equivalences {
for expr in equivalence {
f(expr)?;
}
}
Ok(())
}
MirRelationExpr::ArrangeBy { input: _, keys } => {
for key in keys {
for s in key {
f(s)?;
}
}
Ok(())
}
MirRelationExpr::Reduce {
group_key,
aggregates,
..
} => {
for s in group_key {
f(s)?;
}
for agg in aggregates {
f(&mut agg.expr)?;
}
Ok(())
}
MirRelationExpr::Constant { rows: _, typ: _ }
| MirRelationExpr::Get { id: _, typ: _ }
| MirRelationExpr::Let {
id: _,
value: _,
body: _,
}
| MirRelationExpr::Project {
input: _,
outputs: _,
}
| MirRelationExpr::TopK {
input: _,
group_key: _,
order_key: _,
limit: _,
offset: _,
monotonic: _,
}
| MirRelationExpr::Negate { input: _ }
| MirRelationExpr::Threshold { input: _ }
| MirRelationExpr::DeclareKeys { input: _, keys: _ }
| MirRelationExpr::Union { base: _, inputs: _ } => Ok(()),
}
}
/// Fallible mutable visitor for all [`MirScalarExpr`]s in the [`MirRelationExpr`] subtree rooted at `expr`.
///
/// Note that this does not recurse into [`MirRelationExpr`] subtrees wrapped in [`MirScalarExpr`] nodes.
#[inline]
fn try_visit_scalars_mut<F, E>(&self, expr: &mut MirRelationExpr, f: &mut F) -> Result<(), E>
where
F: FnMut(&mut MirScalarExpr) -> Result<(), E>,
E: From<RecursionLimitError>,
{
self.try_visit_mut_post(expr, &mut |e| self.try_visit_scalar_children_mut(e, f))
}
/// Infallible mutable visitor for the [`MirScalarExpr`]s in the [`MirRelationExpr`] subtree rooted at `expr`.
///
/// Note that this does not recurse into [`MirRelationExpr`] subtrees within [`MirScalarExpr`] nodes.
#[inline]
fn visit_scalars_mut<F>(&self, expr: &mut MirRelationExpr, f: &mut F)
where
F: FnMut(&mut MirScalarExpr),
{
self.try_visit_scalars_mut(expr, &mut |s| {
f(s);
Ok::<_, RecursionLimitError>(())
})
.expect("Unexpected error in `visit_scalars_mut` call")
}
}
/// Add checked recursion support for [`MirRelationExprVisitor`].
impl CheckedRecursion for MirRelationExprVisitor {
fn recursion_guard(&self) -> &RecursionGuard {
&self.recursion_guard
}
}
/// Specification for an ordering by a column.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, MzReflect)]
pub struct ColumnOrder {
/// The column index.
pub column: usize,
/// Whether to sort in descending order.
#[serde(default)]
pub desc: bool,
}
impl fmt::Display for ColumnOrder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"#{} {}",
self.column,
if self.desc { "desc" } else { "asc" }
)
}
}
/// Describes an aggregation expression.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash, MzReflect)]
pub struct AggregateExpr {
/// Names the aggregation function.
pub func: AggregateFunc,
/// An expression which extracts from each row the input to `func`.
pub expr: MirScalarExpr,
/// Should the aggregation be applied only to distinct results in each group.
#[serde(default)]
pub distinct: bool,
}
impl AggregateExpr {
/// Computes the type of this `AggregateExpr`.
pub fn typ(&self, relation_type: &RelationType) -> ColumnType {
self.func.output_type(self.expr.typ(relation_type))
}
/// Returns whether the expression has a constant result.
pub fn is_constant(&self) -> bool {
match self.func {
AggregateFunc::MaxInt16
| AggregateFunc::MaxInt32
| AggregateFunc::MaxInt64
| AggregateFunc::MaxFloat32
| AggregateFunc::MaxFloat64
| AggregateFunc::MaxBool
| AggregateFunc::MaxString
| AggregateFunc::MaxDate
| AggregateFunc::MaxTimestamp
| AggregateFunc::MaxTimestampTz
| AggregateFunc::MinInt16
| AggregateFunc::MinInt32
| AggregateFunc::MinInt64
| AggregateFunc::MinFloat32
| AggregateFunc::MinFloat64
| AggregateFunc::MinBool
| AggregateFunc::MinString
| AggregateFunc::MinDate
| AggregateFunc::MinTimestamp
| AggregateFunc::MinTimestampTz
| AggregateFunc::Any
| AggregateFunc::All
| AggregateFunc::Dummy => self.expr.is_literal(),
AggregateFunc::Count => self.expr.is_literal_null(),
_ => self.expr.is_literal_err(),
}
}
/// Extracts unique input from aggregate type
pub fn on_unique(&self, input_type: &RelationType) -> MirScalarExpr {
match self.func {
// Count is one if non-null, and zero if null.
AggregateFunc::Count => self
.expr
.clone()
.call_unary(UnaryFunc::IsNull(crate::func::IsNull))
.if_then_else(
MirScalarExpr::literal_ok(Datum::Int64(0), ScalarType::Int64),
MirScalarExpr::literal_ok(Datum::Int64(1), ScalarType::Int64),
),
// SumInt16 takes Int16s as input, but outputs Int64s.
AggregateFunc::SumInt16 => self
.expr
.clone()
.call_unary(UnaryFunc::CastInt16ToInt64(scalar_func::CastInt16ToInt64)),
// SumInt32 takes Int32s as input, but outputs Int64s.
AggregateFunc::SumInt32 => self
.expr
.clone()
.call_unary(UnaryFunc::CastInt32ToInt64(scalar_func::CastInt32ToInt64)),
// SumInt64 takes Int64s as input, but outputs numerics.
AggregateFunc::SumInt64 => self.expr.clone().call_unary(UnaryFunc::CastInt64ToNumeric(
scalar_func::CastInt64ToNumeric(Some(NumericMaxScale::ZERO)),
)),
// JsonbAgg takes _anything_ as input, but must output a Jsonb array.
AggregateFunc::JsonbAgg { .. } => MirScalarExpr::CallVariadic {
func: VariadicFunc::JsonbBuildArray,
exprs: vec![self.expr.clone().call_unary(UnaryFunc::RecordGet(0))],
},
// JsonbAgg takes _anything_ as input, but must output a Jsonb object.
AggregateFunc::JsonbObjectAgg { .. } => {
let record = self.expr.clone().call_unary(UnaryFunc::RecordGet(0));
MirScalarExpr::CallVariadic {
func: VariadicFunc::JsonbBuildObject,
exprs: (0..2)
.map(|i| record.clone().call_unary(UnaryFunc::RecordGet(i)))
.collect(),
}
}
// StringAgg takes nested records of strings and outputs a string
AggregateFunc::StringAgg { .. } => self
.expr
.clone()
.call_unary(UnaryFunc::RecordGet(0))
.call_unary(UnaryFunc::RecordGet(0)),
// ListConcat and ArrayConcat take a single level of records and output a list containing exactly 1 element
AggregateFunc::ListConcat { .. } | AggregateFunc::ArrayConcat { .. } => {
self.expr.clone().call_unary(UnaryFunc::RecordGet(0))
}
// RowNumber takes a list of records and outputs a list containing exactly 1 element
AggregateFunc::RowNumber { .. } => {
let list = self
.expr
.clone()
// extract the list within the record
.call_unary(UnaryFunc::RecordGet(0));
// extract the expression within the list
let record = MirScalarExpr::CallVariadic {
func: VariadicFunc::ListIndex,
exprs: vec![
list,
MirScalarExpr::literal_ok(Datum::Int64(1), ScalarType::Int64),
],
};
MirScalarExpr::CallVariadic {
func: VariadicFunc::ListCreate {
elem_type: self
.typ(input_type)
.scalar_type
.unwrap_list_element_type()
.clone(),
},
exprs: vec![MirScalarExpr::CallVariadic {
func: VariadicFunc::RecordCreate {
field_names: vec![
ColumnName::from("?row_number?"),
ColumnName::from("?record?"),
],
},
exprs: vec![
MirScalarExpr::literal_ok(Datum::Int64(1), ScalarType::Int64),
record,
],
}],
}
}
// All other variants should return the argument to the aggregation.
AggregateFunc::MaxNumeric
| AggregateFunc::MaxInt16
| AggregateFunc::MaxInt32
| AggregateFunc::MaxInt64
| AggregateFunc::MaxFloat32
| AggregateFunc::MaxFloat64
| AggregateFunc::MaxBool
| AggregateFunc::MaxString
| AggregateFunc::MaxDate
| AggregateFunc::MaxTimestamp
| AggregateFunc::MaxTimestampTz
| AggregateFunc::MinNumeric
| AggregateFunc::MinInt16
| AggregateFunc::MinInt32
| AggregateFunc::MinInt64
| AggregateFunc::MinFloat32
| AggregateFunc::MinFloat64
| AggregateFunc::MinBool
| AggregateFunc::MinString
| AggregateFunc::MinDate
| AggregateFunc::MinTimestamp
| AggregateFunc::MinTimestampTz
| AggregateFunc::SumFloat32
| AggregateFunc::SumFloat64
| AggregateFunc::SumNumeric
| AggregateFunc::Any
| AggregateFunc::All
| AggregateFunc::Dummy => self.expr.clone(),
}
}
}
impl fmt::Display for AggregateExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(
f,
"{}({}{})",
self.func,
if self.distinct { "distinct " } else { "" },
self.expr
)
}
}
/// Describe a join implementation in dataflow.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash, MzReflect)]
pub enum JoinImplementation {
/// Perform a sequence of binary differential dataflow joins.
///
/// The first argument indicates 1) the index of the starting collection
/// and 2) if it should be arranged, the keys to arrange it by.
/// The sequence that follows lists other relation indexes, and the key for
/// the arrangement we should use when joining it in.
///
/// Each collection index should occur exactly once, either in the first
/// position or somewhere in the list.
Differential(
(usize, Option<Vec<MirScalarExpr>>),
Vec<(usize, Vec<MirScalarExpr>)>,
),
/// Perform independent delta query dataflows for each input.
///
/// The argument is a sequence of plans, for the input collections in order.
/// Each plan starts from the corresponding index, and then in sequence joins
/// against collections identified by index and with the specified arrangement key.
DeltaQuery(Vec<Vec<(usize, Vec<MirScalarExpr>)>>),
/// No implementation yet selected.
Unimplemented,
}
impl Default for JoinImplementation {
fn default() -> Self {
JoinImplementation::Unimplemented
}
}
/// Instructions for finishing the result of a query.
///
/// The primary reason for the existence of this structure and attendant code
/// is that SQL's ORDER BY requires sorting rows (as already implied by the
/// keywords), whereas much of the rest of SQL is defined in terms of unordered
/// multisets. But as it turns out, the same idea can be used to optimize
/// trivial peeks.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct RowSetFinishing {
/// Order rows by the given columns.
pub order_by: Vec<ColumnOrder>,
/// Include only as many rows (after offset).
pub limit: Option<usize>,
/// Omit as many rows.
pub offset: usize,
/// Include only given columns.
pub project: Vec<usize>,
}
impl RowSetFinishing {
/// True if the finishing does nothing to any result set.
pub fn is_trivial(&self, arity: usize) -> bool {
self.limit.is_none()
&& self.order_by.is_empty()
&& self.offset == 0
&& self.project.iter().copied().eq(0..arity)
}
/// Applies finishing actions to a row set.
pub fn finish(&self, rows: &mut Vec<Row>) {
let mut left_datum_vec = mz_repr::DatumVec::new();
let mut right_datum_vec = mz_repr::DatumVec::new();
let mut sort_by = |left: &Row, right: &Row| {
let left_datums = left_datum_vec.borrow_with(left);
let right_datums = right_datum_vec.borrow_with(right);
compare_columns(&self.order_by, &left_datums, &right_datums, || {
left.cmp(&right)
})
};
let offset = self.offset;
if offset > rows.len() {
*rows = Vec::new();
} else {
if let Some(limit) = self.limit {
let offset_plus_limit = offset + limit;
if rows.len() > offset_plus_limit {
pdqselect::select_by(rows, offset_plus_limit, &mut sort_by);
rows.truncate(offset_plus_limit);
}
}
if offset > 0 {
pdqselect::select_by(rows, offset, &mut sort_by);
rows.drain(..offset);
}
rows.sort_by(&mut sort_by);
let mut row_packer = Row::default();
let mut datum_vec = mz_repr::DatumVec::new();
for row in rows.iter_mut() {
*row = {
let datums = datum_vec.borrow_with(&row);
row_packer.extend(self.project.iter().map(|i| &datums[*i]));
row_packer.finish_and_reuse()
};
}
}
}
}
/// Compare `left` and `right` using `order`. If that doesn't produce a strict ordering, call `tiebreaker`.
pub fn compare_columns<F>(
order: &[ColumnOrder],
left: &[Datum],
right: &[Datum],
tiebreaker: F,
) -> Ordering
where
F: Fn() -> Ordering,
{
for order in order {
let (lval, rval) = (&left[order.column], &right[order.column]);
let cmp = if order.desc {
rval.cmp(&lval)
} else {
lval.cmp(&rval)
};
if cmp != Ordering::Equal {
return cmp;
}
}
tiebreaker()
}
| 39.025218 | 160 | 0.507355 |
5d206dec984e6fb3bd4688c676032b8b0d559c5d | 1,921 | //! Semihosting operations
// TODO document
#![allow(missing_docs)]
pub const CLOCK: usize = 0x10;
pub const CLOSE: usize = 0x02;
pub const ELAPSED: usize = 0x30;
pub const ERRNO: usize = 0x13;
pub const FLEN: usize = 0x0c;
pub const GET_CMDLINE: usize = 0x15;
pub const HEAPINFO: usize = 0x16;
pub const ISERROR: usize = 0x08;
pub const ISTTY: usize = 0x09;
pub const OPEN: usize = 0x01;
pub const READ: usize = 0x06;
pub const READC: usize = 0x07;
pub const REMOVE: usize = 0x0e;
pub const RENAME: usize = 0x0f;
pub const SEEK: usize = 0x0a;
pub const SYSTEM: usize = 0x12;
pub const TICKFREQ: usize = 0x31;
pub const TIME: usize = 0x11;
pub const TMPNAM: usize = 0x0d;
pub const WRITE0: usize = 0x04;
pub const WRITE: usize = 0x05;
pub const WRITEC: usize = 0x03;
pub const ENTER_SVC: usize = 0x17;
pub const REPORT_EXCEPTION: usize = 0x18;
/// Values for the mode parameter of the OPEN syscall.
pub mod open {
/// Mode corresponding to fopen "r" mode.
pub const R: usize = 0;
/// Mode corresponding to fopen "rb" mode.
pub const R_BINARY: usize = 1;
/// Mode corresponding to fopen "r+" mode.
pub const RW: usize = 2;
/// Mode corresponding to fopen "r+b" mode.
pub const RW_BINARY: usize = 3;
/// Mode corresponding to fopen "w" mode.
pub const W_TRUNC: usize = 4;
/// Mode corresponding to fopen "wb" mode.
pub const W_TRUNC_BINARY: usize = 5;
/// Mode corresponding to fopen "w+" mode.
pub const RW_TRUNC: usize = 6;
/// Mode corresponding to fopen "w+b" mode.
pub const RW_TRUNC_BINARY: usize = 7;
/// Mode corresponding to fopen "a" mode.
pub const W_APPEND: usize = 8;
/// Mode corresponding to fopen "ab" mode.
pub const W_APPEND_BINARY: usize = 9;
/// Mode corresponding to fopen "a+" mode.
pub const RW_APPEND: usize = 10;
/// Mode corresponding to fopen "a+b" mode.
pub const RW_APPEND_BINARY: usize = 11;
}
| 33.12069 | 54 | 0.675169 |
b986f6aabcc228225fef1f7d965720c3bc58e613 | 14,381 | use crate::distribution::{self, poisson, Discrete, Univariate};
use crate::function::{beta, gamma};
use crate::statistics::*;
use crate::{Result, StatsError};
use rand::distributions::Distribution;
use rand::Rng;
use std::f64;
/// Implements the
/// [NegativeBinomial](http://en.wikipedia.org/wiki/Negative_binomial_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::{NegativeBinomial, Discrete};
/// use statrs::statistics::Mean;
/// use statrs::prec::{almost_eq};
///
/// let r = NegativeBinomial::new(4.0, 0.5).unwrap();
/// assert_eq!(r.mean(), 4.0);
/// assert!(almost_eq(r.pmf(0), 0.0625, 1e-8));
/// assert!(almost_eq(r.pmf(3), 0.15625, 1e-8));
/// ```
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct NegativeBinomial {
r: f64,
p: f64,
}
impl NegativeBinomial {
/// Constructs a new negative binomial distribution
/// with a given `p` probability of the number of successes `r`
///
/// # Errors
///
/// Returns an error if `p` is `NaN`, less than `0.0`,
/// greater than `1.0`, or if `r` is `NaN` or less than `0`
///
/// # Examples
///
/// ```
/// use statrs::distribution::NegativeBinomial;
///
/// let mut result = NegativeBinomial::new(4.0, 0.5);
/// assert!(result.is_ok());
///
/// result = NegativeBinomial::new(-0.5, 5.0);
/// assert!(result.is_err());
/// ```
pub fn new(r: f64, p: f64) -> Result<NegativeBinomial> {
if p.is_nan() || p < 0.0 || p > 1.0 || r.is_nan() || r < 0.0 {
Err(StatsError::BadParams)
} else {
Ok(NegativeBinomial { p, r })
}
}
/// Returns the probability of success `p` of
/// the negative binomial distribution.
///
/// # Examples
///
/// ```
/// use statrs::distribution::NegativeBinomial;
///
/// let r = NegativeBinomial::new(5.0, 0.5).unwrap();
/// assert_eq!(r.p(), 0.5);
/// ```
pub fn p(&self) -> f64 {
self.p
}
/// Returns the number `r` of success of this negative
/// binomial distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::NegativeBinomial;
///
/// let r = NegativeBinomial::new(5.0, 0.5).unwrap();
/// assert_eq!(r.r(), 5.0);
/// ```
pub fn r(&self) -> f64 {
self.r
}
}
impl Distribution<u64> for NegativeBinomial {
fn sample<R: Rng + ?Sized>(&self, r: &mut R) -> u64 {
let lambda = distribution::gamma::sample_unchecked(r, self.r, (1.0 - self.p) / self.p);
poisson::sample_unchecked(r, lambda).floor() as u64
}
}
impl Univariate<u64, f64> for NegativeBinomial {
/// Calculates the cumulative distribution function for the
/// negative binomial distribution at `x`
///
/// Note that due to extending the distribution to the reals
/// (allowing positive real values for `r`), while still technically
/// a discrete distribution the CDF behaves more like that of a
/// continuous distribution rather than a discrete distribution
/// (i.e. a smooth graph rather than a step-ladder)
///
/// # Formula
///
/// ```ignore
/// 1 - I_(1 - p)(x + 1, r)
/// ```
///
/// where `I_(x)(a, b)` is the regularized incomplete beta function
fn cdf(&self, x: f64) -> f64 {
if x < 0.0 {
0.0
} else if x.is_infinite() {
1.0
} else {
1.0 - beta::beta_reg(x + 1.0, self.r, 1.0 - self.p)
}
}
}
impl Min<u64> for NegativeBinomial {
/// Returns the minimum value in the domain of the
/// negative binomial distribution representable by a 64-bit
/// integer
///
/// # Formula
///
/// ```ignore
/// 0
/// ```
fn min(&self) -> u64 {
0
}
}
impl Max<u64> for NegativeBinomial {
/// Returns the maximum value in the domain of the
/// negative binomial distribution representable by a 64-bit
/// integer
///
/// # Formula
///
/// ```ignore
/// u64::MAX
/// ```
fn max(&self) -> u64 {
std::u64::MAX
}
}
impl Mean<f64> for NegativeBinomial {
/// Returns the mean of the negative binomial distribution
///
/// # Formula
///
/// ```ignore
/// r * (1-p) / p
/// ```
fn mean(&self) -> f64 {
self.r * (1.0 - self.p) / self.p
}
}
impl Variance<f64> for NegativeBinomial {
/// Returns the variance of the negative binomial distribution
///
/// # Formula
///
/// ```ignore
/// r * (1-p) / p^2
/// ```
fn variance(&self) -> f64 {
self.r * (1.0 - self.p) / (self.p * self.p)
}
/// Returns the standard deviation of the negative binomial distribution
///
/// # Formula
///
/// ```ignore
/// sqrt(r * (1-p))/p
/// ```
fn std_dev(&self) -> f64 {
f64::sqrt(self.r * (1.0 - self.p)) / self.p
}
}
impl Skewness<f64> for NegativeBinomial {
/// Returns the skewness of the negative binomial distribution
///
/// # Formula
///
/// ```ignore
/// (2-p) / sqrt(r * (1-p))
/// ```
fn skewness(&self) -> f64 {
(2.0 - self.p) / f64::sqrt(self.r * (1.0 - self.p))
}
}
impl Mode<f64> for NegativeBinomial {
/// Returns the mode for the negative binomial distribution
///
/// # Formula
///
/// ```ignore
/// if r > 1 then
/// floor((r - 1) * (1-p / p))
/// else
/// 0
/// ```
fn mode(&self) -> f64 {
if self.r > 1.0 {
f64::floor((self.r - 1.0) * (1.0 - self.p) / self.p)
} else {
0.0
}
}
}
impl Discrete<u64, f64> for NegativeBinomial {
/// Calculates the probability mass function for the negative binomial
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// (x + r - 1 choose k) * (1 - p)^x * p^r
/// ```
fn pmf(&self, x: u64) -> f64 {
self.ln_pmf(x).exp()
}
/// Calculates the log probability mass function for the negative binomial
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// ln(x + r - 1 choose k) * (1 - p)^x * p^r))
/// ```
fn ln_pmf(&self, x: u64) -> f64 {
let k = x as f64;
gamma::ln_gamma(self.r + k) - gamma::ln_gamma(self.r) - gamma::ln_gamma(k + 1.0)
+ (self.r * self.p.ln())
+ (k * (1.0 - self.p).ln())
}
}
#[rustfmt::skip]
#[cfg(test)]
mod test {
use std::fmt::Debug;
use std::f64;
use crate::statistics::*;
use crate::distribution::{Univariate, Discrete, NegativeBinomial};
// use crate::distribution::internal::*;
fn try_create(r: f64, p: f64) -> NegativeBinomial {
let r = NegativeBinomial::new(r, p);
assert!(r.is_ok());
r.unwrap()
}
fn create_case(r: f64, p: f64) {
let dist = try_create(r, p);
assert_eq!(p, dist.p());
assert_eq!(r, dist.r());
}
fn bad_create_case(r: f64, p: f64) {
let r = NegativeBinomial::new(r, p);
assert!(r.is_err());
}
fn get_value<T, F>(r: f64, p: f64, eval: F) -> T
where T: PartialEq + Debug,
F: Fn(NegativeBinomial) -> T
{
let r = try_create(r, p);
eval(r)
}
fn test_case<T, F>(r: f64, p: f64, expected: T, eval: F)
where T: PartialEq + Debug,
F: Fn(NegativeBinomial) -> T
{
let x = get_value(r, p, eval);
assert_eq!(expected, x);
}
fn test_case_or_nan<F>(r: f64, p: f64, expected: f64, eval: F)
where F: Fn(NegativeBinomial) -> f64
{
let x = get_value(r, p, eval);
if expected.is_nan() {
assert!(x.is_nan())
}
else {
assert_eq!(expected, x);
}
}
fn test_almost<F>(r: f64, p: f64, expected: f64, acc: f64, eval: F)
where F: Fn(NegativeBinomial) -> f64
{
let x = get_value(r, p, eval);
assert_almost_eq!(expected, x, acc);
}
#[test]
fn test_create() {
create_case(0.0, 0.0);
create_case(0.3, 0.4);
create_case(1.0, 0.3);
}
#[test]
fn test_bad_create() {
bad_create_case(f64::NAN, 1.0);
bad_create_case(0.0, f64::NAN);
bad_create_case(-1.0, 1.0);
bad_create_case(2.0, 2.0);
}
#[test]
fn test_mean() {
test_case(4.0, 0.0, f64::INFINITY, |x| x.mean());
test_almost(3.0, 0.3, 7.0, 1e-15 , |x| x.mean());
test_case(2.0, 1.0, 0.0, |x| x.mean());
}
#[test]
fn test_variance() {
test_case(4.0, 0.0, f64::INFINITY, |x| x.variance());
test_almost(3.0, 0.3, 23.333333333333, 1e-12, |x| x.variance());
test_case(2.0, 1.0, 0.0, |x| x.variance());
}
#[test]
fn test_std_dev() {
test_case(4.0, 0.0, f64::INFINITY, |x| x.std_dev());
test_almost(3.0, 0.3, 4.830458915, 1e-9, |x| x.std_dev());
test_case(2.0, 1.0, 0.0, |x| x.std_dev());
}
#[test]
fn test_skewness() {
test_case(0.0, 0.0, f64::INFINITY, |x| x.skewness());
test_almost(0.1, 0.3, 6.425396041, 1e-09, |x| x.skewness());
test_case(1.0, 1.0, f64::INFINITY, |x| x.skewness());
}
#[test]
fn test_mode() {
test_case(0.0, 0.0, 0.0, |x| x.mode());
test_case(0.3, 0.0, 0.0, |x| x.mode());
test_case(1.0, 1.0, 0.0, |x| x.mode());
test_case(10.0, 0.01, 891.0, |x| x.mode());
}
#[test]
fn test_min_max() {
test_case(1.0, 0.5, 0, |x| x.min());
test_case(1.0, 0.3, std::u64::MAX, |x| x.max());
}
#[test]
fn test_pmf() {
test_almost(4.0, 0.5, 0.0625, 1e-8, |x| x.pmf(0));
test_almost(4.0, 0.5, 0.15625, 1e-8, |x| x.pmf(3));
test_case(1.0, 0.0, 0.0, |x| x.pmf(0));
test_case(1.0, 0.0, 0.0, |x| x.pmf(1));
test_almost(3.0, 0.2, 0.008, 1e-15, |x| x.pmf(0));
test_almost(3.0, 0.2, 0.0192, 1e-15, |x| x.pmf(1));
test_almost(3.0, 0.2, 0.04096, 1e-15, |x| x.pmf(3));
test_almost(10.0, 0.2, 1.024e-07, 1e-07, |x| x.pmf(0));
test_almost(10.0, 0.2, 8.192e-07, 1e-07, |x| x.pmf(1));
test_almost(10.0, 0.2, 0.001015706852, 1e-07, |x| x.pmf(10));
test_almost(1.0, 0.3, 0.3, 1e-15, |x| x.pmf(0));
test_almost(1.0, 0.3, 0.21, 1e-15, |x| x.pmf(1));
test_almost(3.0, 0.3, 0.027, 1e-15, |x| x.pmf(0));
test_case(0.3, 1.0, 0.0, |x| x.pmf(1));
test_case(0.3, 1.0, 0.0, |x| x.pmf(3));
test_case_or_nan(0.3, 1.0, f64::NAN, |x| x.pmf(0));
test_case(0.3, 1.0, 0.0, |x| x.pmf(1));
test_case(0.3, 1.0, 0.0, |x| x.pmf(10));
test_case_or_nan(1.0, 1.0, f64::NAN, |x| x.pmf(0));
test_case(1.0, 1.0, 0.0, |x| x.pmf(1));
test_case_or_nan(3.0, 1.0, f64::NAN, |x| x.pmf(0));
test_case(3.0, 1.0, 0.0, |x| x.pmf(1));
test_case(3.0, 1.0, 0.0, |x| x.pmf(3));
test_case_or_nan(10.0, 1.0, f64::NAN, |x| x.pmf(0));
test_case(10.0, 1.0, 0.0, |x| x.pmf(1));
test_case(10.0, 1.0, 0.0, |x| x.pmf(10));
}
#[test]
fn test_ln_pmf() {
test_case(1.0, 0.0, f64::NEG_INFINITY, |x| x.ln_pmf(0));
test_case(1.0, 0.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_almost(3.0, 0.2, -4.828313737, 1e-08, |x| x.ln_pmf(0));
test_almost(3.0, 0.2, -3.952845, 1e-08, |x| x.ln_pmf(1));
test_almost(3.0, 0.2, -3.195159298, 1e-08, |x| x.ln_pmf(3));
test_almost(10.0, 0.2, -16.09437912, 1e-08, |x| x.ln_pmf(0));
test_almost(10.0, 0.2, -14.01493758, 1e-08, |x| x.ln_pmf(1));
test_almost(10.0, 0.2, -6.892170503, 1e-08, |x| x.ln_pmf(10));
test_almost(1.0, 0.3, -1.203972804, 1e-08, |x| x.ln_pmf(0));
test_almost(1.0, 0.3, -1.560647748, 1e-08, |x| x.ln_pmf(1));
test_almost(3.0, 0.3, -3.611918413, 1e-08, |x| x.ln_pmf(0));
test_case(0.3, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_case(0.3, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(3));
test_case_or_nan(0.3, 1.0, f64::NAN, |x| x.ln_pmf(0));
test_case(0.3, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_case(0.3, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(10));
test_case_or_nan(1.0, 1.0, f64::NAN, |x| x.ln_pmf(0));
test_case(1.0, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_case_or_nan(3.0, 1.0, f64::NAN, |x| x.ln_pmf(0));
test_case(3.0, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_case(3.0, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(3));
test_case_or_nan(10.0, 1.0, f64::NAN, |x| x.ln_pmf(0));
test_case(10.0, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(1));
test_case(10.0, 1.0, f64::NEG_INFINITY, |x| x.ln_pmf(10));
}
#[test]
fn test_cdf() {
test_case(1.0, 0.0, 0.0, |x| x.cdf(0.2));
test_almost(3.0, 0.2, 0.01090199062, 1e-08, |x| x.cdf(0.2));
test_almost(10.0, 0.2, 1.718008933e-07, 1e-08, |x| x.cdf(0.2));
test_almost(1.0, 0.3, 0.3481950594, 1e-08, |x| x.cdf(0.2));
test_almost(3.0, 0.3, 0.03611085389, 1e-08, |x| x.cdf(0.2));
test_almost(1.0, 0.3, 0.3, 1e-08, |x| x.cdf(0.0));
test_almost(1.0, 0.3, 0.3481950594, 1e-08, |x| x.cdf(0.2));
test_almost(1.0, 0.3, 0.51, 1e-08, |x| x.cdf(1.0));
test_almost(1.0, 0.3, 0.83193, 1e-08, |x| x.cdf(4.0));
test_almost(1.0, 0.3, 0.9802267326, 1e-08, |x| x.cdf(10.0));
test_case(1.0, 1.0, 1.0, |x| x.cdf(0.0));
test_case(1.0, 1.0, 1.0, |x| x.cdf(1.0));
test_almost(10.0, 0.75, 0.05631351471, 1e-08, |x| x.cdf(0.0));
test_almost(10.0, 0.75, 0.1970973015, 1e-08, |x| x.cdf(1.0));
test_almost(10.0, 0.75, 0.9960578583, 1e-08, |x| x.cdf(10.0));
}
#[test]
fn test_cdf_lower_bound() {
test_case(3.0, 0.5, 0.0, |x| x.cdf(-1.0));
}
#[test]
fn test_cdf_upper_bound() {
test_case(3.0, 0.5, 1.0, |x| x.cdf(100.0));
}
// TODO: figure out the best way to re-implement this test. We currently
// do not have a good way to characterize a discrete distribution with a
// CDF that is continuous
//
// #[test]
// fn test_discrete() {
// test::check_discrete_distribution(&try_create(5.0, 0.3), 35);
// test::check_discrete_distribution(&try_create(10.0, 0.7), 21);
// }
}
| 30.663113 | 95 | 0.518462 |
d771be077ae3a403aeb830a216ea5a2d12f7f1fb | 25,259 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use dep_graph::DepGraph;
use infer::{InferCtxt, InferOk};
use ty::{self, Ty, TypeFoldable, ToPolyTraitRef, TyCtxt, ToPredicate};
use ty::error::ExpectedFound;
use rustc_data_structures::obligation_forest::{ObligationForest, Error};
use rustc_data_structures::obligation_forest::{ForestObligation, ObligationProcessor};
use std::marker::PhantomData;
use syntax::ast;
use util::nodemap::{FxHashSet, NodeMap};
use hir::def_id::DefId;
use super::CodeAmbiguity;
use super::CodeProjectionError;
use super::CodeSelectionError;
use super::{FulfillmentError, FulfillmentErrorCode};
use super::{ObligationCause, PredicateObligation, Obligation};
use super::project;
use super::select::SelectionContext;
use super::Unimplemented;
impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> {
type Predicate = ty::Predicate<'tcx>;
fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate }
}
pub struct GlobalFulfilledPredicates<'tcx> {
set: FxHashSet<ty::PolyTraitPredicate<'tcx>>,
dep_graph: DepGraph,
}
/// The fulfillment context is used to drive trait resolution. It
/// consists of a list of obligations that must be (eventually)
/// satisfied. The job is to track which are satisfied, which yielded
/// errors, and which are still pending. At any point, users can call
/// `select_where_possible`, and the fulfilment context will try to do
/// selection, retaining only those obligations that remain
/// ambiguous. This may be helpful in pushing type inference
/// along. Once all type inference constraints have been generated, the
/// method `select_all_or_error` can be used to report any remaining
/// ambiguous cases as errors.
pub struct FulfillmentContext<'tcx> {
// A list of all obligations that have been registered with this
// fulfillment context.
predicates: ObligationForest<PendingPredicateObligation<'tcx>>,
// A set of constraints that regionck must validate. Each
// constraint has the form `T:'a`, meaning "some type `T` must
// outlive the lifetime 'a". These constraints derive from
// instantiated type parameters. So if you had a struct defined
// like
//
// struct Foo<T:'static> { ... }
//
// then in some expression `let x = Foo { ... }` it will
// instantiate the type parameter `T` with a fresh type `$0`. At
// the same time, it will record a region obligation of
// `$0:'static`. This will get checked later by regionck. (We
// can't generally check these things right away because we have
// to wait until types are resolved.)
//
// These are stored in a map keyed to the id of the innermost
// enclosing fn body / static initializer expression. This is
// because the location where the obligation was incurred can be
// relevant with respect to which sublifetime assumptions are in
// place. The reason that we store under the fn-id, and not
// something more fine-grained, is so that it is easier for
// regionck to be sure that it has found *all* the region
// obligations (otherwise, it's easy to fail to walk to a
// particular node-id).
region_obligations: NodeMap<Vec<RegionObligation<'tcx>>>,
}
#[derive(Clone)]
pub struct RegionObligation<'tcx> {
pub sub_region: &'tcx ty::Region,
pub sup_type: Ty<'tcx>,
pub cause: ObligationCause<'tcx>,
}
#[derive(Clone, Debug)]
pub struct PendingPredicateObligation<'tcx> {
pub obligation: PredicateObligation<'tcx>,
pub stalled_on: Vec<Ty<'tcx>>,
}
impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> {
/// Creates a new fulfillment context.
pub fn new() -> FulfillmentContext<'tcx> {
FulfillmentContext {
predicates: ObligationForest::new(),
region_obligations: NodeMap(),
}
}
/// "Normalize" a projection type `<SomeType as SomeTrait>::X` by
/// creating a fresh type variable `$0` as well as a projection
/// predicate `<SomeType as SomeTrait>::X == $0`. When the
/// inference engine runs, it will attempt to find an impl of
/// `SomeTrait` or a where clause that lets us unify `$0` with
/// something concrete. If this fails, we'll unify `$0` with
/// `projection_ty` again.
pub fn normalize_projection_type(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>)
-> Ty<'tcx>
{
debug!("normalize_projection_type(projection_ty={:?})",
projection_ty);
assert!(!projection_ty.has_escaping_regions());
// FIXME(#20304) -- cache
let mut selcx = SelectionContext::new(infcx);
let normalized = project::normalize_projection_type(&mut selcx, projection_ty, cause, 0);
for obligation in normalized.obligations {
self.register_predicate_obligation(infcx, obligation);
}
debug!("normalize_projection_type: result={:?}", normalized.value);
normalized.value
}
pub fn register_bound(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
cause: ObligationCause<'tcx>)
{
let trait_ref = ty::TraitRef {
def_id: def_id,
substs: infcx.tcx.mk_substs_trait(ty, &[]),
};
self.register_predicate_obligation(infcx, Obligation {
cause: cause,
recursion_depth: 0,
predicate: trait_ref.to_predicate()
});
}
pub fn register_region_obligation(&mut self,
t_a: Ty<'tcx>,
r_b: &'tcx ty::Region,
cause: ObligationCause<'tcx>)
{
register_region_obligation(t_a, r_b, cause, &mut self.region_obligations);
}
pub fn register_predicate_obligation(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
obligation: PredicateObligation<'tcx>)
{
// this helps to reduce duplicate errors, as well as making
// debug output much nicer to read and so on.
let obligation = infcx.resolve_type_vars_if_possible(&obligation);
debug!("register_predicate_obligation(obligation={:?})", obligation);
assert!(!infcx.is_in_snapshot());
if infcx.tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) {
debug!("register_predicate_obligation: duplicate");
return
}
self.predicates.register_obligation(PendingPredicateObligation {
obligation: obligation,
stalled_on: vec![]
});
}
pub fn region_obligations(&self,
body_id: ast::NodeId)
-> &[RegionObligation<'tcx>]
{
match self.region_obligations.get(&body_id) {
None => Default::default(),
Some(vec) => vec,
}
}
pub fn select_all_or_error(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>)
-> Result<(),Vec<FulfillmentError<'tcx>>>
{
self.select_where_possible(infcx)?;
let errors: Vec<_> =
self.predicates.to_errors(CodeAmbiguity)
.into_iter()
.map(|e| to_fulfillment_error(e))
.collect();
if errors.is_empty() {
Ok(())
} else {
Err(errors)
}
}
pub fn select_where_possible(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>)
-> Result<(),Vec<FulfillmentError<'tcx>>>
{
let mut selcx = SelectionContext::new(infcx);
self.select(&mut selcx)
}
pub fn pending_obligations(&self) -> Vec<PendingPredicateObligation<'tcx>> {
self.predicates.pending_obligations()
}
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
/// only attempts to select obligations that haven't been seen before.
fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
-> Result<(),Vec<FulfillmentError<'tcx>>> {
debug!("select(obligation-forest-size={})", self.predicates.len());
let mut errors = Vec::new();
loop {
debug!("select: starting another iteration");
// Process pending obligations.
let outcome = self.predicates.process_obligations(&mut FulfillProcessor {
selcx: selcx,
region_obligations: &mut self.region_obligations,
});
debug!("select: outcome={:?}", outcome);
// these are obligations that were proven to be true.
for pending_obligation in outcome.completed {
let predicate = &pending_obligation.obligation.predicate;
selcx.tcx().fulfilled_predicates.borrow_mut()
.add_if_global(selcx.tcx(), predicate);
}
errors.extend(
outcome.errors.into_iter()
.map(|e| to_fulfillment_error(e)));
// If nothing new was added, no need to keep looping.
if outcome.stalled {
break;
}
}
debug!("select({} predicates remaining, {} errors) done",
self.predicates.len(), errors.len());
if errors.is_empty() {
Ok(())
} else {
Err(errors)
}
}
}
struct FulfillProcessor<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> {
selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
region_obligations: &'a mut NodeMap<Vec<RegionObligation<'tcx>>>,
}
impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, 'tcx> {
type Obligation = PendingPredicateObligation<'tcx>;
type Error = FulfillmentErrorCode<'tcx>;
fn process_obligation(&mut self,
obligation: &mut Self::Obligation)
-> Result<Option<Vec<Self::Obligation>>, Self::Error>
{
process_predicate(self.selcx,
obligation,
self.region_obligations)
.map(|os| os.map(|os| os.into_iter().map(|o| PendingPredicateObligation {
obligation: o,
stalled_on: vec![]
}).collect()))
}
fn process_backedge<'c, I>(&mut self, cycle: I,
_marker: PhantomData<&'c PendingPredicateObligation<'tcx>>)
where I: Clone + Iterator<Item=&'c PendingPredicateObligation<'tcx>>,
{
if coinductive_match(self.selcx, cycle.clone()) {
debug!("process_child_obligations: coinductive match");
} else {
let cycle : Vec<_> = cycle.map(|c| c.obligation.clone()).collect();
self.selcx.infcx().report_overflow_error_cycle(&cycle);
}
}
}
/// Return the set of type variables contained in a trait ref
fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
t: ty::PolyTraitRef<'tcx>) -> Vec<Ty<'tcx>>
{
t.skip_binder() // ok b/c this check doesn't care about regions
.input_types()
.map(|t| selcx.infcx().resolve_type_vars_if_possible(&t))
.filter(|t| t.has_infer_types())
.flat_map(|t| t.walk())
.filter(|t| match t.sty { ty::TyInfer(_) => true, _ => false })
.collect()
}
/// Processes a predicate obligation and returns either:
/// - `Ok(Some(v))` if the predicate is true, presuming that `v` are also true
/// - `Ok(None)` if we don't have enough info to be sure
/// - `Err` if the predicate does not hold
fn process_predicate<'a, 'gcx, 'tcx>(
selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
pending_obligation: &mut PendingPredicateObligation<'tcx>,
region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
-> Result<Option<Vec<PredicateObligation<'tcx>>>,
FulfillmentErrorCode<'tcx>>
{
// if we were stalled on some unresolved variables, first check
// whether any of them have been resolved; if not, don't bother
// doing more work yet
if !pending_obligation.stalled_on.is_empty() {
if pending_obligation.stalled_on.iter().all(|&ty| {
let resolved_ty = selcx.infcx().shallow_resolve(&ty);
resolved_ty == ty // nothing changed here
}) {
debug!("process_predicate: pending obligation {:?} still stalled on {:?}",
selcx.infcx().resolve_type_vars_if_possible(&pending_obligation.obligation),
pending_obligation.stalled_on);
return Ok(None);
}
pending_obligation.stalled_on = vec![];
}
let obligation = &mut pending_obligation.obligation;
if obligation.predicate.has_infer_types() {
obligation.predicate = selcx.infcx().resolve_type_vars_if_possible(&obligation.predicate);
}
match obligation.predicate {
ty::Predicate::Trait(ref data) => {
if selcx.tcx().fulfilled_predicates.borrow().check_duplicate_trait(data) {
return Ok(Some(vec![]));
}
let trait_obligation = obligation.with(data.clone());
match selcx.select(&trait_obligation) {
Ok(Some(vtable)) => {
debug!("selecting trait `{:?}` at depth {} yielded Ok(Some)",
data, obligation.recursion_depth);
Ok(Some(vtable.nested_obligations()))
}
Ok(None) => {
debug!("selecting trait `{:?}` at depth {} yielded Ok(None)",
data, obligation.recursion_depth);
// This is a bit subtle: for the most part, the
// only reason we can fail to make progress on
// trait selection is because we don't have enough
// information about the types in the trait. One
// exception is that we sometimes haven't decided
// what kind of closure a closure is. *But*, in
// that case, it turns out, the type of the
// closure will also change, because the closure
// also includes references to its upvars as part
// of its type, and those types are resolved at
// the same time.
//
// FIXME(#32286) logic seems false if no upvars
pending_obligation.stalled_on =
trait_ref_type_vars(selcx, data.to_poly_trait_ref());
debug!("process_predicate: pending obligation {:?} now stalled on {:?}",
selcx.infcx().resolve_type_vars_if_possible(obligation),
pending_obligation.stalled_on);
Ok(None)
}
Err(selection_err) => {
info!("selecting trait `{:?}` at depth {} yielded Err",
data, obligation.recursion_depth);
Err(CodeSelectionError(selection_err))
}
}
}
ty::Predicate::Equate(ref binder) => {
match selcx.infcx().equality_predicate(&obligation.cause, binder) {
Ok(InferOk { obligations, value: () }) => {
Ok(Some(obligations))
},
Err(_) => Err(CodeSelectionError(Unimplemented)),
}
}
ty::Predicate::RegionOutlives(ref binder) => {
match selcx.infcx().region_outlives_predicate(&obligation.cause, binder) {
Ok(()) => Ok(Some(Vec::new())),
Err(_) => Err(CodeSelectionError(Unimplemented)),
}
}
ty::Predicate::TypeOutlives(ref binder) => {
// Check if there are higher-ranked regions.
match selcx.tcx().no_late_bound_regions(binder) {
// If there are, inspect the underlying type further.
None => {
// Convert from `Binder<OutlivesPredicate<Ty, Region>>` to `Binder<Ty>`.
let binder = binder.map_bound_ref(|pred| pred.0);
// Check if the type has any bound regions.
match selcx.tcx().no_late_bound_regions(&binder) {
// If so, this obligation is an error (for now). Eventually we should be
// able to support additional cases here, like `for<'a> &'a str: 'a`.
None => {
Err(CodeSelectionError(Unimplemented))
}
// Otherwise, we have something of the form
// `for<'a> T: 'a where 'a not in T`, which we can treat as `T: 'static`.
Some(t_a) => {
let r_static = selcx.tcx().mk_region(ty::ReStatic);
register_region_obligation(t_a, r_static,
obligation.cause.clone(),
region_obligations);
Ok(Some(vec![]))
}
}
}
// If there aren't, register the obligation.
Some(ty::OutlivesPredicate(t_a, r_b)) => {
register_region_obligation(t_a, r_b,
obligation.cause.clone(),
region_obligations);
Ok(Some(vec![]))
}
}
}
ty::Predicate::Projection(ref data) => {
let project_obligation = obligation.with(data.clone());
match project::poly_project_and_unify_type(selcx, &project_obligation) {
Ok(None) => {
pending_obligation.stalled_on =
trait_ref_type_vars(selcx, data.to_poly_trait_ref());
Ok(None)
}
Ok(v) => Ok(v),
Err(e) => Err(CodeProjectionError(e))
}
}
ty::Predicate::ObjectSafe(trait_def_id) => {
if !selcx.tcx().is_object_safe(trait_def_id) {
Err(CodeSelectionError(Unimplemented))
} else {
Ok(Some(Vec::new()))
}
}
ty::Predicate::ClosureKind(closure_def_id, kind) => {
match selcx.infcx().closure_kind(closure_def_id) {
Some(closure_kind) => {
if closure_kind.extends(kind) {
Ok(Some(vec![]))
} else {
Err(CodeSelectionError(Unimplemented))
}
}
None => {
Ok(None)
}
}
}
ty::Predicate::WellFormed(ty) => {
match ty::wf::obligations(selcx.infcx(), obligation.cause.body_id,
ty, obligation.cause.span) {
None => {
pending_obligation.stalled_on = vec![ty];
Ok(None)
}
s => Ok(s)
}
}
ty::Predicate::Subtype(ref subtype) => {
match selcx.infcx().subtype_predicate(&obligation.cause, subtype) {
None => {
// none means that both are unresolved
pending_obligation.stalled_on = vec![subtype.skip_binder().a,
subtype.skip_binder().b];
Ok(None)
}
Some(Ok(ok)) => {
Ok(Some(ok.obligations))
}
Some(Err(err)) => {
let expected_found = ExpectedFound::new(subtype.skip_binder().a_is_expected,
subtype.skip_binder().a,
subtype.skip_binder().b);
Err(FulfillmentErrorCode::CodeSubtypeError(expected_found, err))
}
}
}
}
}
/// For defaulted traits, we use a co-inductive strategy to solve, so
/// that recursion is ok. This routine returns true if the top of the
/// stack (`cycle[0]`):
/// - is a defaulted trait, and
/// - it also appears in the backtrace at some position `X`; and,
/// - all the predicates at positions `X..` between `X` an the top are
/// also defaulted traits.
fn coinductive_match<'a,'c,'gcx,'tcx,I>(selcx: &mut SelectionContext<'a,'gcx,'tcx>,
cycle: I) -> bool
where I: Iterator<Item=&'c PendingPredicateObligation<'tcx>>,
'tcx: 'c
{
let mut cycle = cycle;
cycle
.all(|bt_obligation| {
let result = coinductive_obligation(selcx, &bt_obligation.obligation);
debug!("coinductive_match: bt_obligation={:?} coinductive={}",
bt_obligation, result);
result
})
}
fn coinductive_obligation<'a,'gcx,'tcx>(selcx: &SelectionContext<'a,'gcx,'tcx>,
obligation: &PredicateObligation<'tcx>)
-> bool {
match obligation.predicate {
ty::Predicate::Trait(ref data) => {
selcx.tcx().trait_has_default_impl(data.def_id())
}
_ => {
false
}
}
}
fn register_region_obligation<'tcx>(t_a: Ty<'tcx>,
r_b: &'tcx ty::Region,
cause: ObligationCause<'tcx>,
region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
{
let region_obligation = RegionObligation { sup_type: t_a,
sub_region: r_b,
cause: cause };
debug!("register_region_obligation({:?}, cause={:?})",
region_obligation, region_obligation.cause);
region_obligations.entry(region_obligation.cause.body_id)
.or_insert(vec![])
.push(region_obligation);
}
impl<'a, 'gcx, 'tcx> GlobalFulfilledPredicates<'gcx> {
pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'gcx> {
GlobalFulfilledPredicates {
set: FxHashSet(),
dep_graph: dep_graph,
}
}
pub fn check_duplicate(&self, key: &ty::Predicate<'tcx>) -> bool {
if let ty::Predicate::Trait(ref data) = *key {
self.check_duplicate_trait(data)
} else {
false
}
}
pub fn check_duplicate_trait(&self, data: &ty::PolyTraitPredicate<'tcx>) -> bool {
// For the global predicate registry, when we find a match, it
// may have been computed by some other task, so we want to
// add a read from the node corresponding to the predicate
// processing to make sure we get the transitive dependencies.
if self.set.contains(data) {
debug_assert!(data.is_global());
self.dep_graph.read(data.dep_node());
debug!("check_duplicate: global predicate `{:?}` already proved elsewhere", data);
true
} else {
false
}
}
fn add_if_global(&mut self, tcx: TyCtxt<'a, 'gcx, 'tcx>, key: &ty::Predicate<'tcx>) {
if let ty::Predicate::Trait(ref data) = *key {
// We only add things to the global predicate registry
// after the current task has proved them, and hence
// already has the required read edges, so we don't need
// to add any more edges here.
if data.is_global() {
if let Some(data) = tcx.lift_to_global(data) {
if self.set.insert(data.clone()) {
debug!("add_if_global: global predicate `{:?}` added", data);
}
}
}
}
}
}
fn to_fulfillment_error<'tcx>(
error: Error<PendingPredicateObligation<'tcx>, FulfillmentErrorCode<'tcx>>)
-> FulfillmentError<'tcx>
{
let obligation = error.backtrace.into_iter().next().unwrap().obligation;
FulfillmentError::new(obligation, error.error)
}
| 39.966772 | 98 | 0.548755 |
d6a0d24bef360dea669754ea951c436952b821c2 | 10,871 | // Take a look at the license at the top of the repository in the LICENSE file.
use glib::subclass::prelude::*;
use glib::translate::*;
use glib::{Cast, Error};
use crate::Cancellable;
use crate::InputStream;
use crate::OutputStream;
use crate::OutputStreamSpliceFlags;
use std::ptr;
pub trait OutputStreamImpl: ObjectImpl + OutputStreamImplExt + Send {
fn write(
&self,
stream: &Self::Type,
buffer: &[u8],
cancellable: Option<&Cancellable>,
) -> Result<usize, Error> {
self.parent_write(stream, buffer, cancellable)
}
fn close(&self, stream: &Self::Type, cancellable: Option<&Cancellable>) -> Result<(), Error> {
self.parent_close(stream, cancellable)
}
fn flush(&self, stream: &Self::Type, cancellable: Option<&Cancellable>) -> Result<(), Error> {
self.parent_flush(stream, cancellable)
}
fn splice(
&self,
stream: &Self::Type,
input_stream: &InputStream,
flags: OutputStreamSpliceFlags,
cancellable: Option<&Cancellable>,
) -> Result<usize, Error> {
self.parent_splice(stream, input_stream, flags, cancellable)
}
}
pub trait OutputStreamImplExt: ObjectSubclass {
fn parent_write(
&self,
stream: &Self::Type,
buffer: &[u8],
cancellable: Option<&Cancellable>,
) -> Result<usize, Error>;
fn parent_close(
&self,
stream: &Self::Type,
cancellable: Option<&Cancellable>,
) -> Result<(), Error>;
fn parent_flush(
&self,
stream: &Self::Type,
cancellable: Option<&Cancellable>,
) -> Result<(), Error>;
fn parent_splice(
&self,
stream: &Self::Type,
input_stream: &InputStream,
flags: OutputStreamSpliceFlags,
cancellable: Option<&Cancellable>,
) -> Result<usize, Error>;
}
impl<T: OutputStreamImpl> OutputStreamImplExt for T {
fn parent_write(
&self,
stream: &Self::Type,
buffer: &[u8],
cancellable: Option<&Cancellable>,
) -> Result<usize, Error> {
unsafe {
let data = T::type_data();
let parent_class = data.as_ref().parent_class() as *mut ffi::GOutputStreamClass;
let f = (*parent_class)
.write_fn
.expect("No parent class implementation for \"write\"");
let mut err = ptr::null_mut();
let res = f(
stream.unsafe_cast_ref::<OutputStream>().to_glib_none().0,
mut_override(buffer.as_ptr()),
buffer.len(),
cancellable.to_glib_none().0,
&mut err,
);
if res == -1 {
Err(from_glib_full(err))
} else {
assert!(res >= 0);
let res = res as usize;
assert!(res <= buffer.len());
Ok(res)
}
}
}
fn parent_close(
&self,
stream: &Self::Type,
cancellable: Option<&Cancellable>,
) -> Result<(), Error> {
unsafe {
let data = T::type_data();
let parent_class = data.as_ref().parent_class() as *mut ffi::GOutputStreamClass;
let mut err = ptr::null_mut();
if let Some(f) = (*parent_class).close_fn {
if from_glib(f(
stream.unsafe_cast_ref::<OutputStream>().to_glib_none().0,
cancellable.to_glib_none().0,
&mut err,
)) {
Ok(())
} else {
Err(from_glib_full(err))
}
} else {
Ok(())
}
}
}
fn parent_flush(
&self,
stream: &Self::Type,
cancellable: Option<&Cancellable>,
) -> Result<(), Error> {
unsafe {
let data = T::type_data();
let parent_class = data.as_ref().parent_class() as *mut ffi::GOutputStreamClass;
let mut err = ptr::null_mut();
if let Some(f) = (*parent_class).flush {
if from_glib(f(
stream.unsafe_cast_ref::<OutputStream>().to_glib_none().0,
cancellable.to_glib_none().0,
&mut err,
)) {
Ok(())
} else {
Err(from_glib_full(err))
}
} else {
Ok(())
}
}
}
fn parent_splice(
&self,
stream: &Self::Type,
input_stream: &InputStream,
flags: OutputStreamSpliceFlags,
cancellable: Option<&Cancellable>,
) -> Result<usize, Error> {
unsafe {
let data = T::type_data();
let parent_class = data.as_ref().parent_class() as *mut ffi::GOutputStreamClass;
let mut err = ptr::null_mut();
let f = (*parent_class)
.splice
.expect("No parent class implementation for \"splice\"");
let res = f(
stream.unsafe_cast_ref::<OutputStream>().to_glib_none().0,
input_stream.to_glib_none().0,
flags.into_glib(),
cancellable.to_glib_none().0,
&mut err,
);
if res == -1 {
Err(from_glib_full(err))
} else {
assert!(res >= 0);
let res = res as usize;
Ok(res)
}
}
}
}
unsafe impl<T: OutputStreamImpl> IsSubclassable<T> for OutputStream {
fn class_init(class: &mut ::glib::Class<Self>) {
Self::parent_class_init::<T>(class);
let klass = class.as_mut();
klass.write_fn = Some(stream_write::<T>);
klass.close_fn = Some(stream_close::<T>);
klass.flush = Some(stream_flush::<T>);
klass.splice = Some(stream_splice::<T>);
}
}
unsafe extern "C" fn stream_write<T: OutputStreamImpl>(
ptr: *mut ffi::GOutputStream,
buffer: *mut u8,
count: usize,
cancellable: *mut ffi::GCancellable,
err: *mut *mut glib::ffi::GError,
) -> isize {
use std::isize;
use std::slice;
assert!(count <= isize::MAX as usize);
let instance = &*(ptr as *mut T::Instance);
let imp = instance.impl_();
let wrap: Borrowed<OutputStream> = from_glib_borrow(ptr);
match imp.write(
wrap.unsafe_cast_ref(),
slice::from_raw_parts(buffer as *const u8, count),
Option::<Cancellable>::from_glib_borrow(cancellable)
.as_ref()
.as_ref(),
) {
Ok(res) => {
assert!(res <= isize::MAX as usize);
assert!(res <= count);
res as isize
}
Err(e) => {
if !err.is_null() {
*err = e.into_raw();
}
-1
}
}
}
unsafe extern "C" fn stream_close<T: OutputStreamImpl>(
ptr: *mut ffi::GOutputStream,
cancellable: *mut ffi::GCancellable,
err: *mut *mut glib::ffi::GError,
) -> glib::ffi::gboolean {
let instance = &*(ptr as *mut T::Instance);
let imp = instance.impl_();
let wrap: Borrowed<OutputStream> = from_glib_borrow(ptr);
match imp.close(
wrap.unsafe_cast_ref(),
Option::<Cancellable>::from_glib_borrow(cancellable)
.as_ref()
.as_ref(),
) {
Ok(_) => glib::ffi::GTRUE,
Err(e) => {
if !err.is_null() {
*err = e.into_raw();
}
glib::ffi::GFALSE
}
}
}
unsafe extern "C" fn stream_flush<T: OutputStreamImpl>(
ptr: *mut ffi::GOutputStream,
cancellable: *mut ffi::GCancellable,
err: *mut *mut glib::ffi::GError,
) -> glib::ffi::gboolean {
let instance = &*(ptr as *mut T::Instance);
let imp = instance.impl_();
let wrap: Borrowed<OutputStream> = from_glib_borrow(ptr);
match imp.flush(
wrap.unsafe_cast_ref(),
Option::<Cancellable>::from_glib_borrow(cancellable)
.as_ref()
.as_ref(),
) {
Ok(_) => glib::ffi::GTRUE,
Err(e) => {
if !err.is_null() {
*err = e.into_raw();
}
glib::ffi::GFALSE
}
}
}
unsafe extern "C" fn stream_splice<T: OutputStreamImpl>(
ptr: *mut ffi::GOutputStream,
input_stream: *mut ffi::GInputStream,
flags: ffi::GOutputStreamSpliceFlags,
cancellable: *mut ffi::GCancellable,
err: *mut *mut glib::ffi::GError,
) -> isize {
let instance = &*(ptr as *mut T::Instance);
let imp = instance.impl_();
let wrap: Borrowed<OutputStream> = from_glib_borrow(ptr);
match imp.splice(
wrap.unsafe_cast_ref(),
&from_glib_borrow(input_stream),
from_glib(flags),
Option::<Cancellable>::from_glib_borrow(cancellable)
.as_ref()
.as_ref(),
) {
Ok(res) => {
use std::isize;
assert!(res <= isize::MAX as usize);
res as isize
}
Err(e) => {
if !err.is_null() {
*err = e.into_raw();
}
-1
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::prelude::*;
use std::cell::RefCell;
mod imp {
use super::*;
#[derive(Default)]
pub struct SimpleOutputStream {
pub sum: RefCell<usize>,
}
#[glib::object_subclass]
impl ObjectSubclass for SimpleOutputStream {
const NAME: &'static str = "SimpleOutputStream";
type Type = super::SimpleOutputStream;
type ParentType = OutputStream;
}
impl ObjectImpl for SimpleOutputStream {}
impl OutputStreamImpl for SimpleOutputStream {
fn write(
&self,
_stream: &Self::Type,
buffer: &[u8],
_cancellable: Option<&Cancellable>,
) -> Result<usize, Error> {
let mut sum = self.sum.borrow_mut();
for b in buffer {
*sum += *b as usize;
}
Ok(buffer.len())
}
}
}
glib::wrapper! {
pub struct SimpleOutputStream(ObjectSubclass<imp::SimpleOutputStream>)
@extends OutputStream;
}
#[test]
fn test_simple_stream() {
let stream = glib::Object::new::<SimpleOutputStream>(&[]).unwrap();
assert_eq!(
*imp::SimpleOutputStream::from_instance(&stream).sum.borrow(),
0
);
assert_eq!(
stream.write(&[1, 2, 3, 4, 5], crate::Cancellable::NONE),
Ok(5)
);
assert_eq!(
*imp::SimpleOutputStream::from_instance(&stream).sum.borrow(),
15
);
}
}
| 28.458115 | 98 | 0.511912 |
71945983d3c39e335847624056197c0e9c7aa6d1 | 13,337 | use form_urlencoded::Serializer;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{
convert::{Into, TryFrom, TryInto},
vec::IntoIter,
};
use crate::app::models::*;
#[derive(Serialize)]
pub struct Uris {
pub uris: Vec<String>,
}
pub enum SearchType {
Artist,
Album,
}
impl SearchType {
fn into_string(self) -> &'static str {
match self {
Self::Artist => "artist",
Self::Album => "album",
}
}
}
pub struct SearchQuery {
pub query: String,
pub types: Vec<SearchType>,
pub limit: usize,
pub offset: usize,
}
impl SearchQuery {
pub fn into_query_string(self) -> String {
let mut types = self
.types
.into_iter()
.fold(String::new(), |acc, t| acc + t.into_string() + ",");
types.pop();
let re = Regex::new(r"(\W|\s)+").unwrap();
let query = re.replace_all(&self.query[..], " ");
let serialized = Serializer::new(String::new())
.append_pair("q", query.as_ref())
.append_pair("offset", &self.offset.to_string()[..])
.append_pair("limit", &self.limit.to_string()[..])
.append_pair("market", "from_token")
.finish();
format!("type={}&{}", types, serialized)
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct Page<T> {
items: Option<Vec<T>>,
offset: Option<usize>,
limit: Option<usize>,
total: usize,
}
impl<T> Page<T> {
fn new(items: Vec<T>) -> Self {
let l = items.len();
Self {
total: l,
items: Some(items),
offset: Some(0),
limit: Some(l),
}
}
fn map<Mapper, U>(self, mapper: Mapper) -> Page<U>
where
Mapper: Fn(T) -> U,
{
let Page {
items,
offset,
limit,
total,
} = self;
Page {
items: items.map(|item| item.into_iter().map(mapper).collect()),
offset,
limit,
total,
}
}
pub fn limit(&self) -> usize {
self.limit
.or_else(|| Some(self.items.as_ref()?.len()))
.unwrap_or(50)
}
pub fn total(&self) -> usize {
self.total
}
pub fn offset(&self) -> usize {
self.offset.unwrap_or(0)
}
}
impl<T> IntoIterator for Page<T> {
type Item = T;
type IntoIter = IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.items.unwrap_or_else(Vec::new).into_iter()
}
}
impl<T> Default for Page<T> {
fn default() -> Self {
Self {
items: None,
total: 0,
offset: Some(0),
limit: Some(0),
}
}
}
trait WithImages {
fn images(&self) -> &[Image];
fn best_image<T: PartialOrd, F: Fn(&Image) -> T>(&self, criterion: F) -> Option<&Image> {
let mut ords = self
.images()
.iter()
.map(|image| (criterion(image), image))
.collect::<Vec<(T, &Image)>>();
ords.sort_by(|a, b| (a.0).partial_cmp(&b.0).unwrap());
Some(ords.first()?.1)
}
fn best_image_for_width(&self, width: i32) -> Option<&Image> {
self.best_image(|i| (width - i.width.unwrap_or(0) as i32).abs())
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct Playlist {
pub id: String,
pub name: String,
pub images: Vec<Image>,
pub tracks: Page<PlaylistTrack>,
pub owner: PlaylistOwner,
}
#[derive(Deserialize, Debug, Clone)]
pub struct PlaylistOwner {
pub id: String,
pub display_name: String,
}
impl WithImages for Playlist {
fn images(&self) -> &[Image] {
&self.images[..]
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct PlaylistTrack {
pub is_local: bool,
pub track: Option<FailibleTrackItem>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct SavedTrack {
pub added_at: String,
pub track: TrackItem,
}
#[derive(Deserialize, Debug, Clone)]
pub struct SavedAlbum {
pub album: Album,
}
#[derive(Deserialize, Debug, Clone)]
pub struct FullAlbum {
#[serde(flatten)]
pub album: Album,
#[serde(flatten)]
pub album_info: AlbumInfo,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Album {
pub id: String,
pub tracks: Option<Page<AlbumTrackItem>>,
pub artists: Vec<Artist>,
pub name: String,
pub images: Vec<Image>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct AlbumInfo {
pub label: String,
pub release_date: String,
pub copyrights: Vec<Copyright>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Copyright {
pub text: String,
#[serde(alias = "type")]
pub type_: char,
}
impl WithImages for Album {
fn images(&self) -> &[Image] {
&self.images[..]
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct Image {
pub url: String,
pub height: Option<u32>,
pub width: Option<u32>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Artist {
pub id: String,
pub name: String,
pub images: Option<Vec<Image>>,
}
impl WithImages for Artist {
fn images(&self) -> &[Image] {
if let Some(ref images) = self.images {
images
} else {
&[]
}
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct User {
pub id: String,
pub display_name: String,
}
#[derive(Deserialize, Debug, Clone)]
pub struct TopTracks {
pub tracks: Vec<TrackItem>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct AlbumTrackItem {
pub id: String,
pub track_number: Option<usize>,
pub uri: String,
pub name: String,
pub duration_ms: i64,
pub artists: Vec<Artist>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct TrackItem {
#[serde(flatten)]
pub track: AlbumTrackItem,
pub album: Album,
}
#[derive(Deserialize, Debug, Clone)]
pub struct BadTrackItem {}
#[derive(Deserialize, Debug, Clone)]
#[serde(untagged)]
pub enum FailibleTrackItem {
Ok(TrackItem),
Failing(BadTrackItem),
}
impl FailibleTrackItem {
fn get(self) -> Option<TrackItem> {
match self {
Self::Ok(track) => Some(track),
Self::Failing(_) => None,
}
}
}
#[derive(Deserialize, Debug, Clone)]
pub struct RawSearchResults {
pub albums: Option<Page<Album>>,
pub artists: Option<Page<Artist>>,
}
impl From<Artist> for ArtistSummary {
fn from(artist: Artist) -> Self {
let photo = artist.best_image_for_width(200).map(|i| &i.url).cloned();
let Artist { id, name, .. } = artist;
Self { id, name, photo }
}
}
impl TryFrom<PlaylistTrack> for TrackItem {
type Error = ();
fn try_from(PlaylistTrack { is_local, track }: PlaylistTrack) -> Result<Self, Self::Error> {
track
.ok_or(())?
.get()
.filter(|_| !is_local)
.map(|mut track| {
std::mem::take(&mut track.track.track_number);
track
})
.ok_or(())
}
}
impl From<SavedTrack> for TrackItem {
fn from(track: SavedTrack) -> Self {
let mut track = track.track;
std::mem::take(&mut track.track.track_number);
track
}
}
impl From<TopTracks> for Vec<SongDescription> {
fn from(top_tracks: TopTracks) -> Self {
Page::new(top_tracks.tracks).into()
}
}
impl<T> From<Page<T>> for Vec<SongDescription>
where
T: TryInto<TrackItem>,
{
fn from(page: Page<T>) -> Self {
SongBatch::from(page).songs
}
}
impl From<(Page<AlbumTrackItem>, &Album)> for SongBatch {
fn from(page_and_album: (Page<AlbumTrackItem>, &Album)) -> Self {
let (page, album) = page_and_album;
Self::from(page.map(|track| TrackItem {
track,
album: album.clone(),
}))
}
}
impl<T> From<Page<T>> for SongBatch
where
T: TryInto<TrackItem>,
{
fn from(page: Page<T>) -> Self {
let batch = Batch {
offset: page.offset(),
batch_size: page.limit(),
total: page.total(),
};
let songs = page
.into_iter()
.enumerate()
.filter_map(|(i, t)| {
let TrackItem { track, album } = t.try_into().ok()?;
let AlbumTrackItem {
artists,
id,
uri,
name,
duration_ms,
track_number,
} = track;
let track_number = track_number.unwrap_or_else(|| batch.offset + i + 1) as u32;
let artists = artists
.into_iter()
.map(|a| ArtistRef {
id: a.id,
name: a.name,
})
.collect::<Vec<ArtistRef>>();
let art = album.best_image_for_width(200).map(|i| &i.url).cloned();
let Album {
id: album_id,
name: album_name,
..
} = album;
let album_ref = AlbumRef {
id: album_id,
name: album_name,
};
Some(SongDescription {
id,
track_number,
uri,
title: name,
artists,
album: album_ref,
duration: duration_ms as u32,
art,
})
})
.collect();
SongBatch { songs, batch }
}
}
impl TryFrom<Album> for SongBatch {
type Error = ();
fn try_from(mut album: Album) -> Result<Self, Self::Error> {
let tracks = std::mem::replace(&mut album.tracks, None).ok_or(())?;
Ok((tracks, &album).into())
}
}
impl From<FullAlbum> for AlbumFullDescription {
fn from(full_album: FullAlbum) -> Self {
let description = full_album.album.into();
let release_details = full_album.album_info.into();
Self {
description,
release_details,
}
}
}
impl From<Album> for AlbumDescription {
fn from(album: Album) -> Self {
let artists = album
.artists
.iter()
.map(|a| ArtistRef {
id: a.id.clone(),
name: a.name.clone(),
})
.collect::<Vec<ArtistRef>>();
let songs = SongList::new_from_initial_batch(
album
.clone()
.try_into()
.unwrap_or_else(|_| SongBatch::empty()),
); //FIXME
let art = album.best_image_for_width(200).map(|i| i.url.clone());
Self {
id: album.id,
title: album.name,
artists,
art,
songs,
is_liked: false,
}
}
}
impl From<AlbumInfo> for AlbumReleaseDetails {
fn from(
AlbumInfo {
label,
release_date,
copyrights,
}: AlbumInfo,
) -> Self {
let copyright_text = copyrights
.iter()
.map(|c| format!("[{}] {}", c.type_, c.text))
.collect::<Vec<String>>()
.join(",\n ");
Self {
label,
release_date,
copyright_text,
}
}
}
impl From<Playlist> for PlaylistDescription {
fn from(playlist: Playlist) -> Self {
let art = playlist.best_image_for_width(200).map(|i| i.url.clone());
let Playlist {
id,
name,
tracks,
owner,
..
} = playlist;
let PlaylistOwner {
id: owner_id,
display_name,
} = owner;
let song_batch = tracks.into();
PlaylistDescription {
id,
title: name,
art,
songs: SongList::new_from_initial_batch(song_batch),
owner: UserRef {
id: owner_id,
display_name,
},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_playlist_track_null() {
let track = r#"{"is_local": false, "track": null}"#;
let deserialized: PlaylistTrack = serde_json::from_str(track).unwrap();
let track_item: Option<TrackItem> = deserialized.try_into().ok();
assert!(track_item.is_none());
}
#[test]
fn test_playlist_track_local() {
let track = r#"{"is_local": true, "track": {"name": ""}}"#;
let deserialized: PlaylistTrack = serde_json::from_str(track).unwrap();
let track_item: Option<TrackItem> = deserialized.try_into().ok();
assert!(track_item.is_none());
}
#[test]
fn test_playlist_track_ok() {
let track = r#"{"is_local":false,"track":{"album":{"artists":[{"external_urls":{"spotify":""},"href":"","id":"","name":"","type":"artist","uri":""}],"id":"","images":[{"height":64,"url":"","width":64}],"name":""},"artists":[{"id":"","name":""}],"duration_ms":1,"id":"","name":"","uri":""}}"#;
let deserialized: PlaylistTrack = serde_json::from_str(track).unwrap();
let track_item: Option<TrackItem> = deserialized.try_into().ok();
assert!(track_item.is_some());
}
}
| 24.382084 | 300 | 0.521032 |
fca55bd749ff3ddaece4f1c491fb96395e9b0987 | 2,226 | use crate::model::model::{FieldType, Field, DataType, DataValue, DataError, IncompatibleError};
use crate::image::ImageView;
use image::{ImageFormat, ImageError};
use imageproc::drawing::Canvas;
use base64::DecodeError;
pub const IMAGE_TYPE: FieldType = FieldType(0b_000_000_000);
pub(crate) struct ImageDataType;
impl DataType for ImageDataType {
fn read(&self, image: &ImageView, _: &Field) -> Result<DataValue, DataError> {
Ok(DataValue::Image {
width: image.width,
height: image.height,
data_url: format!("data:image/png;base64,{}", image.get_base64())
})
}
fn write(&self, image: &mut ImageView, _field: &Field, value: DataValue) -> Result<(), DataError> {
match value {
DataValue::Image { width, height, data_url } => {
if width != image.width || height != image.height {
log::error!("target size is {}x{}, but value size is {}x{}", image.width, image.height, width, height);
Result::Err(DataError::Incompatible(IncompatibleError::InvalidSize))
} else {
let bytes = base64::decode(data_url.strip_prefix("data:image/png;base64,").unwrap())?;
let temp_image = image::load_from_memory_with_format(&bytes, ImageFormat::Png)?;
for x in 0..width {
for y in 0..height {
image.set_pixel(x, y, temp_image.get_pixel(x, y))?;
}
}
Result::Ok(())
}
}
_ => Result::Err(DataError::Incompatible(IncompatibleError::InvalidDataType))
}
}
}
impl From<()> for DataError {
fn from(_: ()) -> Self {
DataError::Incompatible(IncompatibleError::InvalidSize)
}
}
impl From<DecodeError> for DataError {
fn from(e: DecodeError) -> Self {
DataError::Incompatible(IncompatibleError::CannotParseValue(e.to_string()))
}
}
impl From<ImageError> for DataError {
fn from(ie: ImageError) -> Self {
log::error!("Error {}", ie);
DataError::Incompatible(IncompatibleError::CannotParseValue(ie.to_string()))
}
} | 36.491803 | 123 | 0.579964 |
506d7a2321045d52c82e0f3d4db8ff3b7fbb98aa | 64,670 | //! Manage xml character escapes
use memchr;
use std::borrow::Cow;
use std::collections::HashMap;
use std::ops::Range;
#[derive(Debug)]
pub enum EscapeError {
/// Entity with Null character
EntityWithNull(::std::ops::Range<usize>),
/// Unrecognized escape symbol
UnrecognizedSymbol(
::std::ops::Range<usize>,
::std::result::Result<String, ::std::string::FromUtf8Error>,
),
/// Cannot find `;` after `&`
UnterminatedEntity(::std::ops::Range<usize>),
/// Cannot convert Hexa to utf8
TooLongHexadecimal,
/// Character is not a valid hexadecimal value
InvalidHexadecimal(char),
/// Cannot convert decimal to hexa
TooLongDecimal,
/// Character is not a valid decimal value
InvalidDecimal(char),
// Not a valid unicode codepoint
InvalidCodepoint(u32),
}
impl std::fmt::Display for EscapeError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
EscapeError::EntityWithNull(e) => write!(
f,
"Error while escaping character at range {:?}: Null character entity not allowed",
e
),
EscapeError::UnrecognizedSymbol(rge, res) => write!(
f,
"Error while escaping character at range {:?}: Unrecognized escape symbol: {:?}",
rge, res
),
EscapeError::UnterminatedEntity(e) => write!(
f,
"Error while escaping character at range {:?}: Cannot find ';' after '&'",
e
),
EscapeError::TooLongHexadecimal => write!(f, "Cannot convert hexadecimal to utf8"),
EscapeError::InvalidHexadecimal(e) => {
write!(f, "'{}' is not a valid hexadecimal character", e)
}
EscapeError::TooLongDecimal => write!(f, "Cannot convert decimal to utf8"),
EscapeError::InvalidDecimal(e) => write!(f, "'{}' is not a valid decimal character", e),
EscapeError::InvalidCodepoint(n) => write!(f, "'{}' is not a valid codepoint", n),
}
}
}
impl std::error::Error for EscapeError {}
/// Escapes a `&[u8]` and replaces all xml special characters (<, >, &, ', ") with their
/// corresponding xml escaped value.
pub fn escape(raw: &[u8]) -> Cow<[u8]> {
fn to_escape(b: u8) -> bool {
match b {
b'<' | b'>' | b'\'' | b'&' | b'"' => true,
_ => false,
}
}
let mut escaped = None;
let mut bytes = raw.iter();
let mut pos = 0;
while let Some(i) = bytes.position(|&b| to_escape(b)) {
if escaped.is_none() {
escaped = Some(Vec::with_capacity(raw.len()));
}
let escaped = escaped.as_mut().expect("initialized");
let new_pos = pos + i;
escaped.extend_from_slice(&raw[pos..new_pos]);
match raw[new_pos] {
b'<' => escaped.extend_from_slice(b"<"),
b'>' => escaped.extend_from_slice(b">"),
b'\'' => escaped.extend_from_slice(b"'"),
b'&' => escaped.extend_from_slice(b"&"),
b'"' => escaped.extend_from_slice(b"""),
_ => unreachable!("Only '<', '>','\', '&' and '\"' are escaped"),
}
pos = new_pos + 1;
}
if let Some(mut escaped) = escaped {
if let Some(raw) = raw.get(pos..) {
escaped.extend_from_slice(raw);
}
Cow::Owned(escaped)
} else {
Cow::Borrowed(raw)
}
}
/// Unescape a `&[u8]` and replaces all xml escaped characters ('&...;') into their corresponding
/// value
pub fn unescape(raw: &[u8]) -> Result<Cow<[u8]>, EscapeError> {
do_unescape(raw, None)
}
/// Unescape a `&[u8]` and replaces all xml escaped characters ('&...;') into their corresponding
/// value, using a dictionnary of custom entities.
///
/// # Pre-condition
///
/// The keys and values of `custom_entities`, if any, must be valid UTF-8.
pub fn unescape_with<'a>(
raw: &'a [u8],
custom_entities: &HashMap<Vec<u8>, Vec<u8>>,
) -> Result<Cow<'a, [u8]>, EscapeError> {
do_unescape(raw, Some(custom_entities))
}
/// Unescape a `&[u8]` and replaces all xml escaped characters ('&...;') into their corresponding
/// value, using an optional dictionnary of custom entities.
///
/// # Pre-condition
///
/// The keys and values of `custom_entities`, if any, must be valid UTF-8.
pub fn do_unescape<'a>(
raw: &'a [u8],
custom_entities: Option<&HashMap<Vec<u8>, Vec<u8>>>,
) -> Result<Cow<'a, [u8]>, EscapeError> {
let mut unescaped = None;
let mut last_end = 0;
let mut iter = memchr::memchr2_iter(b'&', b';', raw);
while let Some(start) = iter.by_ref().find(|p| raw[*p] == b'&') {
match iter.next() {
Some(end) if raw[end] == b';' => {
// append valid data
if unescaped.is_none() {
unescaped = Some(Vec::with_capacity(raw.len()));
}
let unescaped = unescaped.as_mut().expect("initialized");
unescaped.extend_from_slice(&raw[last_end..start]);
// search for character correctness
let pat = &raw[start + 1..end];
if let Some(s) = named_entity(pat) {
unescaped.extend_from_slice(s.as_bytes());
} else if pat.starts_with(b"#") {
push_utf8(unescaped, parse_number(&pat[1..], start..end)?);
} else if let Some(value) = custom_entities.and_then(|hm| hm.get(pat)) {
unescaped.extend_from_slice(&value);
} else {
return Err(EscapeError::UnrecognizedSymbol(
start + 1..end,
String::from_utf8(pat.to_vec()),
));
}
last_end = end + 1;
}
_ => return Err(EscapeError::UnterminatedEntity(start..raw.len())),
}
}
if let Some(mut unescaped) = unescaped {
if let Some(raw) = raw.get(last_end..) {
unescaped.extend_from_slice(raw);
}
Ok(Cow::Owned(unescaped))
} else {
Ok(Cow::Borrowed(raw))
}
}
#[cfg(not(feature = "escape-html"))]
const fn named_entity(name: &[u8]) -> Option<&str> {
let s = match name {
b"lt" => "<",
b"gt" => ">",
b"amp" => "&",
b"apos" => "'",
b"quot" => "\"",
_ => return None
};
Some(s)
}
#[cfg(feature = "escape-html")]
const fn named_entity(name: &[u8]) -> Option<&str> {
// imported from https://dev.w3.org/html5/html-author/charref
let s = match name {
b"Tab" => "\u{09}",
b"NewLine" => "\u{0A}",
b"excl" => "\u{21}",
b"quot" | b"QUOT" => "\u{22}",
b"num" => "\u{23}",
b"dollar" => "\u{24}",
b"percnt" => "\u{25}",
b"amp" | b"AMP" => "\u{26}",
b"apos" => "\u{27}",
b"lpar" => "\u{28}",
b"rpar" => "\u{29}",
b"ast" | b"midast" => "\u{2A}",
b"plus" => "\u{2B}",
b"comma" => "\u{2C}",
b"period" => "\u{2E}",
b"sol" => "\u{2F}",
b"colon" => "\u{3A}",
b"semi" => "\u{3B}",
b"lt" | b"LT" => "\u{3C}",
b"equals" => "\u{3D}",
b"gt" | b"GT" => "\u{3E}",
b"quest" => "\u{3F}",
b"commat" => "\u{40}",
b"lsqb" | b"lbrack" => "\u{5B}",
b"bsol" => "\u{5C}",
b"rsqb" | b"rbrack" => "\u{5D}",
b"Hat" => "\u{5E}",
b"lowbar" => "\u{5F}",
b"grave" | b"DiacriticalGrave" => "\u{60}",
b"lcub" | b"lbrace" => "\u{7B}",
b"verbar" | b"vert" | b"VerticalLine" => "\u{7C}",
b"rcub" | b"rbrace" => "\u{7D}",
b"nbsp" | b"NonBreakingSpace" => "\u{A0}",
b"iexcl" => "\u{A1}",
b"cent" => "\u{A2}",
b"pound" => "\u{A3}",
b"curren" => "\u{A4}",
b"yen" => "\u{A5}",
b"brvbar" => "\u{A6}",
b"sect" => "\u{A7}",
b"Dot" | b"die" | b"DoubleDot" | b"uml" => "\u{A8}",
b"copy" | b"COPY" => "\u{A9}",
b"ordf" => "\u{AA}",
b"laquo" => "\u{AB}",
b"not" => "\u{AC}",
b"shy" => "\u{AD}",
b"reg" | b"circledR" | b"REG" => "\u{AE}",
b"macr" | b"OverBar" | b"strns" => "\u{AF}",
b"deg" => "\u{B0}",
b"plusmn" | b"pm" | b"PlusMinus" => "\u{B1}",
b"sup2" => "\u{B2}",
b"sup3" => "\u{B3}",
b"acute" | b"DiacriticalAcute" => "\u{B4}",
b"micro" => "\u{B5}",
b"para" => "\u{B6}",
b"middot" | b"centerdot" | b"CenterDot" => "\u{B7}",
b"cedil" | b"Cedilla" => "\u{B8}",
b"sup1" => "\u{B9}",
b"ordm" => "\u{BA}",
b"raquo" => "\u{BB}",
b"frac14" => "\u{BC}",
b"frac12" | b"half" => "\u{BD}",
b"frac34" => "\u{BE}",
b"iquest" => "\u{BF}",
b"Agrave" => "\u{C0}",
b"Aacute" => "\u{C1}",
b"Acirc" => "\u{C2}",
b"Atilde" => "\u{C3}",
b"Auml" => "\u{C4}",
b"Aring" => "\u{C5}",
b"AElig" => "\u{C6}",
b"Ccedil" => "\u{C7}",
b"Egrave" => "\u{C8}",
b"Eacute" => "\u{C9}",
b"Ecirc" => "\u{CA}",
b"Euml" => "\u{CB}",
b"Igrave" => "\u{CC}",
b"Iacute" => "\u{CD}",
b"Icirc" => "\u{CE}",
b"Iuml" => "\u{CF}",
b"ETH" => "\u{D0}",
b"Ntilde" => "\u{D1}",
b"Ograve" => "\u{D2}",
b"Oacute" => "\u{D3}",
b"Ocirc" => "\u{D4}",
b"Otilde" => "\u{D5}",
b"Ouml" => "\u{D6}",
b"times" => "\u{D7}",
b"Oslash" => "\u{D8}",
b"Ugrave" => "\u{D9}",
b"Uacute" => "\u{DA}",
b"Ucirc" => "\u{DB}",
b"Uuml" => "\u{DC}",
b"Yacute" => "\u{DD}",
b"THORN" => "\u{DE}",
b"szlig" => "\u{DF}",
b"agrave" => "\u{E0}",
b"aacute" => "\u{E1}",
b"acirc" => "\u{E2}",
b"atilde" => "\u{E3}",
b"auml" => "\u{E4}",
b"aring" => "\u{E5}",
b"aelig" => "\u{E6}",
b"ccedil" => "\u{E7}",
b"egrave" => "\u{E8}",
b"eacute" => "\u{E9}",
b"ecirc" => "\u{EA}",
b"euml" => "\u{EB}",
b"igrave" => "\u{EC}",
b"iacute" => "\u{ED}",
b"icirc" => "\u{EE}",
b"iuml" => "\u{EF}",
b"eth" => "\u{F0}",
b"ntilde" => "\u{F1}",
b"ograve" => "\u{F2}",
b"oacute" => "\u{F3}",
b"ocirc" => "\u{F4}",
b"otilde" => "\u{F5}",
b"ouml" => "\u{F6}",
b"divide" | b"div" => "\u{F7}",
b"oslash" => "\u{F8}",
b"ugrave" => "\u{F9}",
b"uacute" => "\u{FA}",
b"ucirc" => "\u{FB}",
b"uuml" => "\u{FC}",
b"yacute" => "\u{FD}",
b"thorn" => "\u{FE}",
b"yuml" => "\u{FF}",
b"Amacr" => "\u{10}",
b"amacr" => "\u{10}",
b"Abreve" => "\u{10}",
b"abreve" => "\u{10}",
b"Aogon" => "\u{10}",
b"aogon" => "\u{10}",
b"Cacute" => "\u{10}",
b"cacute" => "\u{10}",
b"Ccirc" => "\u{10}",
b"ccirc" => "\u{10}",
b"Cdot" => "\u{10}",
b"cdot" => "\u{10}",
b"Ccaron" => "\u{10}",
b"ccaron" => "\u{10}",
b"Dcaron" => "\u{10}",
b"dcaron" => "\u{10}",
b"Dstrok" => "\u{11}",
b"dstrok" => "\u{11}",
b"Emacr" => "\u{11}",
b"emacr" => "\u{11}",
b"Edot" => "\u{11}",
b"edot" => "\u{11}",
b"Eogon" => "\u{11}",
b"eogon" => "\u{11}",
b"Ecaron" => "\u{11}",
b"ecaron" => "\u{11}",
b"Gcirc" => "\u{11}",
b"gcirc" => "\u{11}",
b"Gbreve" => "\u{11}",
b"gbreve" => "\u{11}",
b"Gdot" => "\u{12}",
b"gdot" => "\u{12}",
b"Gcedil" => "\u{12}",
b"Hcirc" => "\u{12}",
b"hcirc" => "\u{12}",
b"Hstrok" => "\u{12}",
b"hstrok" => "\u{12}",
b"Itilde" => "\u{12}",
b"itilde" => "\u{12}",
b"Imacr" => "\u{12}",
b"imacr" => "\u{12}",
b"Iogon" => "\u{12}",
b"iogon" => "\u{12}",
b"Idot" => "\u{13}",
b"imath" | b"inodot" => "\u{13}",
b"IJlig" => "\u{13}",
b"ijlig" => "\u{13}",
b"Jcirc" => "\u{13}",
b"jcirc" => "\u{13}",
b"Kcedil" => "\u{13}",
b"kcedil" => "\u{13}",
b"kgreen" => "\u{13}",
b"Lacute" => "\u{13}",
b"lacute" => "\u{13}",
b"Lcedil" => "\u{13}",
b"lcedil" => "\u{13}",
b"Lcaron" => "\u{13}",
b"lcaron" => "\u{13}",
b"Lmidot" => "\u{13}",
b"lmidot" => "\u{14}",
b"Lstrok" => "\u{14}",
b"lstrok" => "\u{14}",
b"Nacute" => "\u{14}",
b"nacute" => "\u{14}",
b"Ncedil" => "\u{14}",
b"ncedil" => "\u{14}",
b"Ncaron" => "\u{14}",
b"ncaron" => "\u{14}",
b"napos" => "\u{14}",
b"ENG" => "\u{14}",
b"eng" => "\u{14}",
b"Omacr" => "\u{14}",
b"omacr" => "\u{14}",
b"Odblac" => "\u{15}",
b"odblac" => "\u{15}",
b"OElig" => "\u{15}",
b"oelig" => "\u{15}",
b"Racute" => "\u{15}",
b"racute" => "\u{15}",
b"Rcedil" => "\u{15}",
b"rcedil" => "\u{15}",
b"Rcaron" => "\u{15}",
b"rcaron" => "\u{15}",
b"Sacute" => "\u{15}",
b"sacute" => "\u{15}",
b"Scirc" => "\u{15}",
b"scirc" => "\u{15}",
b"Scedil" => "\u{15}",
b"scedil" => "\u{15}",
b"Scaron" => "\u{16}",
b"scaron" => "\u{16}",
b"Tcedil" => "\u{16}",
b"tcedil" => "\u{16}",
b"Tcaron" => "\u{16}",
b"tcaron" => "\u{16}",
b"Tstrok" => "\u{16}",
b"tstrok" => "\u{16}",
b"Utilde" => "\u{16}",
b"utilde" => "\u{16}",
b"Umacr" => "\u{16}",
b"umacr" => "\u{16}",
b"Ubreve" => "\u{16}",
b"ubreve" => "\u{16}",
b"Uring" => "\u{16}",
b"uring" => "\u{16}",
b"Udblac" => "\u{17}",
b"udblac" => "\u{17}",
b"Uogon" => "\u{17}",
b"uogon" => "\u{17}",
b"Wcirc" => "\u{17}",
b"wcirc" => "\u{17}",
b"Ycirc" => "\u{17}",
b"ycirc" => "\u{17}",
b"Yuml" => "\u{17}",
b"Zacute" => "\u{17}",
b"zacute" => "\u{17}",
b"Zdot" => "\u{17}",
b"zdot" => "\u{17}",
b"Zcaron" => "\u{17}",
b"zcaron" => "\u{17}",
b"fnof" => "\u{19}",
b"imped" => "\u{1B}",
b"gacute" => "\u{1F}",
b"jmath" => "\u{23}",
b"circ" => "\u{2C}",
b"caron" | b"Hacek" => "\u{2C}",
b"breve" | b"Breve" => "\u{2D}",
b"dot" | b"DiacriticalDot" => "\u{2D}",
b"ring" => "\u{2D}",
b"ogon" => "\u{2D}",
b"tilde" | b"DiacriticalTilde" => "\u{2D}",
b"dblac" | b"DiacriticalDoubleAcute" => "\u{2D}",
b"DownBreve" => "\u{31}",
b"UnderBar" => "\u{33}",
b"Alpha" => "\u{39}",
b"Beta" => "\u{39}",
b"Gamma" => "\u{39}",
b"Delta" => "\u{39}",
b"Epsilon" => "\u{39}",
b"Zeta" => "\u{39}",
b"Eta" => "\u{39}",
b"Theta" => "\u{39}",
b"Iota" => "\u{39}",
b"Kappa" => "\u{39}",
b"Lambda" => "\u{39}",
b"Mu" => "\u{39}",
b"Nu" => "\u{39}",
b"Xi" => "\u{39}",
b"Omicron" => "\u{39}",
b"Pi" => "\u{3A}",
b"Rho" => "\u{3A}",
b"Sigma" => "\u{3A}",
b"Tau" => "\u{3A}",
b"Upsilon" => "\u{3A}",
b"Phi" => "\u{3A}",
b"Chi" => "\u{3A}",
b"Psi" => "\u{3A}",
b"Omega" => "\u{3A}",
b"alpha" => "\u{3B}",
b"beta" => "\u{3B}",
b"gamma" => "\u{3B}",
b"delta" => "\u{3B}",
b"epsiv" | b"varepsilon" | b"epsilon" => "\u{3B}",
b"zeta" => "\u{3B}",
b"eta" => "\u{3B}",
b"theta" => "\u{3B}",
b"iota" => "\u{3B}",
b"kappa" => "\u{3B}",
b"lambda" => "\u{3B}",
b"mu" => "\u{3B}",
b"nu" => "\u{3B}",
b"xi" => "\u{3B}",
b"omicron" => "\u{3B}",
b"pi" => "\u{3C}",
b"rho" => "\u{3C}",
b"sigmav" | b"varsigma" | b"sigmaf" => "\u{3C}",
b"sigma" => "\u{3C}",
b"tau" => "\u{3C}",
b"upsi" | b"upsilon" => "\u{3C}",
b"phi" | b"phiv" | b"varphi" => "\u{3C}",
b"chi" => "\u{3C}",
b"psi" => "\u{3C}",
b"omega" => "\u{3C}",
b"thetav" | b"vartheta" | b"thetasym" => "\u{3D}",
b"Upsi" | b"upsih" => "\u{3D}",
b"straightphi" => "\u{3D}",
b"piv" | b"varpi" => "\u{3D}",
b"Gammad" => "\u{3D}",
b"gammad" | b"digamma" => "\u{3D}",
b"kappav" | b"varkappa" => "\u{3F}",
b"rhov" | b"varrho" => "\u{3F}",
b"epsi" | b"straightepsilon" => "\u{3F}",
b"bepsi" | b"backepsilon" => "\u{3F}",
b"IOcy" => "\u{40}",
b"DJcy" => "\u{40}",
b"GJcy" => "\u{40}",
b"Jukcy" => "\u{40}",
b"DScy" => "\u{40}",
b"Iukcy" => "\u{40}",
b"YIcy" => "\u{40}",
b"Jsercy" => "\u{40}",
b"LJcy" => "\u{40}",
b"NJcy" => "\u{40}",
b"TSHcy" => "\u{40}",
b"KJcy" => "\u{40}",
b"Ubrcy" => "\u{40}",
b"DZcy" => "\u{40}",
b"Acy" => "\u{41}",
b"Bcy" => "\u{41}",
b"Vcy" => "\u{41}",
b"Gcy" => "\u{41}",
b"Dcy" => "\u{41}",
b"IEcy" => "\u{41}",
b"ZHcy" => "\u{41}",
b"Zcy" => "\u{41}",
b"Icy" => "\u{41}",
b"Jcy" => "\u{41}",
b"Kcy" => "\u{41}",
b"Lcy" => "\u{41}",
b"Mcy" => "\u{41}",
b"Ncy" => "\u{41}",
b"Ocy" => "\u{41}",
b"Pcy" => "\u{41}",
b"Rcy" => "\u{42}",
b"Scy" => "\u{42}",
b"Tcy" => "\u{42}",
b"Ucy" => "\u{42}",
b"Fcy" => "\u{42}",
b"KHcy" => "\u{42}",
b"TScy" => "\u{42}",
b"CHcy" => "\u{42}",
b"SHcy" => "\u{42}",
b"SHCHcy" => "\u{42}",
b"HARDcy" => "\u{42}",
b"Ycy" => "\u{42}",
b"SOFTcy" => "\u{42}",
b"Ecy" => "\u{42}",
b"YUcy" => "\u{42}",
b"YAcy" => "\u{42}",
b"acy" => "\u{43}",
b"bcy" => "\u{43}",
b"vcy" => "\u{43}",
b"gcy" => "\u{43}",
b"dcy" => "\u{43}",
b"iecy" => "\u{43}",
b"zhcy" => "\u{43}",
b"zcy" => "\u{43}",
b"icy" => "\u{43}",
b"jcy" => "\u{43}",
b"kcy" => "\u{43}",
b"lcy" => "\u{43}",
b"mcy" => "\u{43}",
b"ncy" => "\u{43}",
b"ocy" => "\u{43}",
b"pcy" => "\u{43}",
b"rcy" => "\u{44}",
b"scy" => "\u{44}",
b"tcy" => "\u{44}",
b"ucy" => "\u{44}",
b"fcy" => "\u{44}",
b"khcy" => "\u{44}",
b"tscy" => "\u{44}",
b"chcy" => "\u{44}",
b"shcy" => "\u{44}",
b"shchcy" => "\u{44}",
b"hardcy" => "\u{44}",
b"ycy" => "\u{44}",
b"softcy" => "\u{44}",
b"ecy" => "\u{44}",
b"yucy" => "\u{44}",
b"yacy" => "\u{44}",
b"iocy" => "\u{45}",
b"djcy" => "\u{45}",
b"gjcy" => "\u{45}",
b"jukcy" => "\u{45}",
b"dscy" => "\u{45}",
b"iukcy" => "\u{45}",
b"yicy" => "\u{45}",
b"jsercy" => "\u{45}",
b"ljcy" => "\u{45}",
b"njcy" => "\u{45}",
b"tshcy" => "\u{45}",
b"kjcy" => "\u{45}",
b"ubrcy" => "\u{45}",
b"dzcy" => "\u{45}",
b"ensp" => "\u{2002}",
b"emsp" => "\u{2003}",
b"emsp13" => "\u{2004}",
b"emsp14" => "\u{2005}",
b"numsp" => "\u{2007}",
b"puncsp" => "\u{2008}",
b"thinsp" | b"ThinSpace" => "\u{2009}",
b"hairsp" | b"VeryThinSpace" => "\u{200A}",
b"ZeroWidthSpace"
| b"NegativeVeryThinSpace"
| b"NegativeThinSpace"
| b"NegativeMediumSpace"
| b"NegativeThickSpace" => "\u{200B}",
b"zwnj" => "\u{200C}",
b"zwj" => "\u{200D}",
b"lrm" => "\u{200E}",
b"rlm" => "\u{200F}",
b"hyphen" | b"dash" => "\u{2010}",
b"ndash" => "\u{2013}",
b"mdash" => "\u{2014}",
b"horbar" => "\u{2015}",
b"Verbar" | b"Vert" => "\u{2016}",
b"lsquo" | b"OpenCurlyQuote" => "\u{2018}",
b"rsquo" | b"rsquor" | b"CloseCurlyQuote" => "\u{2019}",
b"lsquor" | b"sbquo" => "\u{201A}",
b"ldquo" | b"OpenCurlyDoubleQuote" => "\u{201C}",
b"rdquo" | b"rdquor" | b"CloseCurlyDoubleQuote" => "\u{201D}",
b"ldquor" | b"bdquo" => "\u{201E}",
b"dagger" => "\u{2020}",
b"Dagger" | b"ddagger" => "\u{2021}",
b"bull" | b"bullet" => "\u{2022}",
b"nldr" => "\u{2025}",
b"hellip" | b"mldr" => "\u{2026}",
b"permil" => "\u{2030}",
b"pertenk" => "\u{2031}",
b"prime" => "\u{2032}",
b"Prime" => "\u{2033}",
b"tprime" => "\u{2034}",
b"bprime" | b"backprime" => "\u{2035}",
b"lsaquo" => "\u{2039}",
b"rsaquo" => "\u{203A}",
b"oline" => "\u{203E}",
b"caret" => "\u{2041}",
b"hybull" => "\u{2043}",
b"frasl" => "\u{2044}",
b"bsemi" => "\u{204F}",
b"qprime" => "\u{2057}",
b"MediumSpace" => "\u{205F}",
b"NoBreak" => "\u{2060}",
b"ApplyFunction" | b"af" => "\u{2061}",
b"InvisibleTimes" | b"it" => "\u{2062}",
b"InvisibleComma" | b"ic" => "\u{2063}",
b"euro" => "\u{20AC}",
b"tdot" | b"TripleDot" => "\u{20DB}",
b"DotDot" => "\u{20DC}",
b"Copf" | b"complexes" => "\u{2102}",
b"incare" => "\u{2105}",
b"gscr" => "\u{210A}",
b"hamilt" | b"HilbertSpace" | b"Hscr" => "\u{210B}",
b"Hfr" | b"Poincareplane" => "\u{210C}",
b"quaternions" | b"Hopf" => "\u{210D}",
b"planckh" => "\u{210E}",
b"planck" | b"hbar" | b"plankv" | b"hslash" => "\u{210F}",
b"Iscr" | b"imagline" => "\u{2110}",
b"image" | b"Im" | b"imagpart" | b"Ifr" => "\u{2111}",
b"Lscr" | b"lagran" | b"Laplacetrf" => "\u{2112}",
b"ell" => "\u{2113}",
b"Nopf" | b"naturals" => "\u{2115}",
b"numero" => "\u{2116}",
b"copysr" => "\u{2117}",
b"weierp" | b"wp" => "\u{2118}",
b"Popf" | b"primes" => "\u{2119}",
b"rationals" | b"Qopf" => "\u{211A}",
b"Rscr" | b"realine" => "\u{211B}",
b"real" | b"Re" | b"realpart" | b"Rfr" => "\u{211C}",
b"reals" | b"Ropf" => "\u{211D}",
b"rx" => "\u{211E}",
b"trade" | b"TRADE" => "\u{2122}",
b"integers" | b"Zopf" => "\u{2124}",
b"ohm" => "\u{2126}",
b"mho" => "\u{2127}",
b"Zfr" | b"zeetrf" => "\u{2128}",
b"iiota" => "\u{2129}",
b"angst" => "\u{212B}",
b"bernou" | b"Bernoullis" | b"Bscr" => "\u{212C}",
b"Cfr" | b"Cayleys" => "\u{212D}",
b"escr" => "\u{212F}",
b"Escr" | b"expectation" => "\u{2130}",
b"Fscr" | b"Fouriertrf" => "\u{2131}",
b"phmmat" | b"Mellintrf" | b"Mscr" => "\u{2133}",
b"order" | b"orderof" | b"oscr" => "\u{2134}",
b"alefsym" | b"aleph" => "\u{2135}",
b"beth" => "\u{2136}",
b"gimel" => "\u{2137}",
b"daleth" => "\u{2138}",
b"CapitalDifferentialD" | b"DD" => "\u{2145}",
b"DifferentialD" | b"dd" => "\u{2146}",
b"ExponentialE" | b"exponentiale" | b"ee" => "\u{2147}",
b"ImaginaryI" | b"ii" => "\u{2148}",
b"frac13" => "\u{2153}",
b"frac23" => "\u{2154}",
b"frac15" => "\u{2155}",
b"frac25" => "\u{2156}",
b"frac35" => "\u{2157}",
b"frac45" => "\u{2158}",
b"frac16" => "\u{2159}",
b"frac56" => "\u{215A}",
b"frac18" => "\u{215B}",
b"frac38" => "\u{215C}",
b"frac58" => "\u{215D}",
b"frac78" => "\u{215E}",
b"larr" | b"leftarrow" | b"LeftArrow" | b"slarr" | b"ShortLeftArrow" => "\u{2190}",
b"uarr" | b"uparrow" | b"UpArrow" | b"ShortUpArrow" => "\u{2191}",
b"rarr" | b"rightarrow" | b"RightArrow" | b"srarr" | b"ShortRightArrow" => "\u{2192}",
b"darr" | b"downarrow" | b"DownArrow" | b"ShortDownArrow" => "\u{2193}",
b"harr" | b"leftrightarrow" | b"LeftRightArrow" => "\u{2194}",
b"varr" | b"updownarrow" | b"UpDownArrow" => "\u{2195}",
b"nwarr" | b"UpperLeftArrow" | b"nwarrow" => "\u{2196}",
b"nearr" | b"UpperRightArrow" | b"nearrow" => "\u{2197}",
b"searr" | b"searrow" | b"LowerRightArrow" => "\u{2198}",
b"swarr" | b"swarrow" | b"LowerLeftArrow" => "\u{2199}",
b"nlarr" | b"nleftarrow" => "\u{219A}",
b"nrarr" | b"nrightarrow" => "\u{219B}",
b"rarrw" | b"rightsquigarrow" => "\u{219D}",
b"Larr" | b"twoheadleftarrow" => "\u{219E}",
b"Uarr" => "\u{219F}",
b"Rarr" | b"twoheadrightarrow" => "\u{21A0}",
b"Darr" => "\u{21A1}",
b"larrtl" | b"leftarrowtail" => "\u{21A2}",
b"rarrtl" | b"rightarrowtail" => "\u{21A3}",
b"LeftTeeArrow" | b"mapstoleft" => "\u{21A4}",
b"UpTeeArrow" | b"mapstoup" => "\u{21A5}",
b"map" | b"RightTeeArrow" | b"mapsto" => "\u{21A6}",
b"DownTeeArrow" | b"mapstodown" => "\u{21A7}",
b"larrhk" | b"hookleftarrow" => "\u{21A9}",
b"rarrhk" | b"hookrightarrow" => "\u{21AA}",
b"larrlp" | b"looparrowleft" => "\u{21AB}",
b"rarrlp" | b"looparrowright" => "\u{21AC}",
b"harrw" | b"leftrightsquigarrow" => "\u{21AD}",
b"nharr" | b"nleftrightarrow" => "\u{21AE}",
b"lsh" | b"Lsh" => "\u{21B0}",
b"rsh" | b"Rsh" => "\u{21B1}",
b"ldsh" => "\u{21B2}",
b"rdsh" => "\u{21B3}",
b"crarr" => "\u{21B5}",
b"cularr" | b"curvearrowleft" => "\u{21B6}",
b"curarr" | b"curvearrowright" => "\u{21B7}",
b"olarr" | b"circlearrowleft" => "\u{21BA}",
b"orarr" | b"circlearrowright" => "\u{21BB}",
b"lharu" | b"LeftVector" | b"leftharpoonup" => "\u{21BC}",
b"lhard" | b"leftharpoondown" | b"DownLeftVector" => "\u{21BD}",
b"uharr" | b"upharpoonright" | b"RightUpVector" => "\u{21BE}",
b"uharl" | b"upharpoonleft" | b"LeftUpVector" => "\u{21BF}",
b"rharu" | b"RightVector" | b"rightharpoonup" => "\u{21C0}",
b"rhard" | b"rightharpoondown" | b"DownRightVector" => "\u{21C1}",
b"dharr" | b"RightDownVector" | b"downharpoonright" => "\u{21C2}",
b"dharl" | b"LeftDownVector" | b"downharpoonleft" => "\u{21C3}",
b"rlarr" | b"rightleftarrows" | b"RightArrowLeftArrow" => "\u{21C4}",
b"udarr" | b"UpArrowDownArrow" => "\u{21C5}",
b"lrarr" | b"leftrightarrows" | b"LeftArrowRightArrow" => "\u{21C6}",
b"llarr" | b"leftleftarrows" => "\u{21C7}",
b"uuarr" | b"upuparrows" => "\u{21C8}",
b"rrarr" | b"rightrightarrows" => "\u{21C9}",
b"ddarr" | b"downdownarrows" => "\u{21CA}",
b"lrhar" | b"ReverseEquilibrium" | b"leftrightharpoons" => "\u{21CB}",
b"rlhar" | b"rightleftharpoons" | b"Equilibrium" => "\u{21CC}",
b"nlArr" | b"nLeftarrow" => "\u{21CD}",
b"nhArr" | b"nLeftrightarrow" => "\u{21CE}",
b"nrArr" | b"nRightarrow" => "\u{21CF}",
b"lArr" | b"Leftarrow" | b"DoubleLeftArrow" => "\u{21D0}",
b"uArr" | b"Uparrow" | b"DoubleUpArrow" => "\u{21D1}",
b"rArr" | b"Rightarrow" | b"Implies" | b"DoubleRightArrow" => "\u{21D2}",
b"dArr" | b"Downarrow" | b"DoubleDownArrow" => "\u{21D3}",
b"hArr" | b"Leftrightarrow" | b"DoubleLeftRightArrow" | b"iff" => "\u{21D4}",
b"vArr" | b"Updownarrow" | b"DoubleUpDownArrow" => "\u{21D5}",
b"nwArr" => "\u{21D6}",
b"neArr" => "\u{21D7}",
b"seArr" => "\u{21D8}",
b"swArr" => "\u{21D9}",
b"lAarr" | b"Lleftarrow" => "\u{21DA}",
b"rAarr" | b"Rrightarrow" => "\u{21DB}",
b"zigrarr" => "\u{21DD}",
b"larrb" | b"LeftArrowBar" => "\u{21E4}",
b"rarrb" | b"RightArrowBar" => "\u{21E5}",
b"duarr" | b"DownArrowUpArrow" => "\u{21F5}",
b"loarr" => "\u{21FD}",
b"roarr" => "\u{21FE}",
b"hoarr" => "\u{21FF}",
b"forall" | b"ForAll" => "\u{2200}",
b"comp" | b"complement" => "\u{2201}",
b"part" | b"PartialD" => "\u{2202}",
b"exist" | b"Exists" => "\u{2203}",
b"nexist" | b"NotExists" | b"nexists" => "\u{2204}",
b"empty" | b"emptyset" | b"emptyv" | b"varnothing" => "\u{2205}",
b"nabla" | b"Del" => "\u{2207}",
b"isin" | b"isinv" | b"Element" | b"in" => "\u{2208}",
b"notin" | b"NotElement" | b"notinva" => "\u{2209}",
b"niv" | b"ReverseElement" | b"ni" | b"SuchThat" => "\u{220B}",
b"notni" | b"notniva" | b"NotReverseElement" => "\u{220C}",
b"prod" | b"Product" => "\u{220F}",
b"coprod" | b"Coproduct" => "\u{2210}",
b"sum" | b"Sum" => "\u{2211}",
b"minus" => "\u{2212}",
b"mnplus" | b"mp" | b"MinusPlus" => "\u{2213}",
b"plusdo" | b"dotplus" => "\u{2214}",
b"setmn" | b"setminus" | b"Backslash" | b"ssetmn" | b"smallsetminus" => "\u{2216}",
b"lowast" => "\u{2217}",
b"compfn" | b"SmallCircle" => "\u{2218}",
b"radic" | b"Sqrt" => "\u{221A}",
b"prop" | b"propto" | b"Proportional" | b"vprop" | b"varpropto" => "\u{221D}",
b"infin" => "\u{221E}",
b"angrt" => "\u{221F}",
b"ang" | b"angle" => "\u{2220}",
b"angmsd" | b"measuredangle" => "\u{2221}",
b"angsph" => "\u{2222}",
b"mid" | b"VerticalBar" | b"smid" | b"shortmid" => "\u{2223}",
b"nmid" | b"NotVerticalBar" | b"nsmid" | b"nshortmid" => "\u{2224}",
b"par" | b"parallel" | b"DoubleVerticalBar" | b"spar" | b"shortparallel" => "\u{2225}",
b"npar"
| b"nparallel"
| b"NotDoubleVerticalBar"
| b"nspar"
| b"nshortparallel" => "\u{2226}",
b"and" | b"wedge" => "\u{2227}",
b"or" | b"vee" => "\u{2228}",
b"cap" => "\u{2229}",
b"cup" => "\u{222A}",
b"int" | b"Integral" => "\u{222B}",
b"Int" => "\u{222C}",
b"tint" | b"iiint" => "\u{222D}",
b"conint" | b"oint" | b"ContourIntegral" => "\u{222E}",
b"Conint" | b"DoubleContourIntegral" => "\u{222F}",
b"Cconint" => "\u{2230}",
b"cwint" => "\u{2231}",
b"cwconint" | b"ClockwiseContourIntegral" => "\u{2232}",
b"awconint" | b"CounterClockwiseContourIntegral" => "\u{2233}",
b"there4" | b"therefore" | b"Therefore" => "\u{2234}",
b"becaus" | b"because" | b"Because" => "\u{2235}",
b"ratio" => "\u{2236}",
b"Colon" | b"Proportion" => "\u{2237}",
b"minusd" | b"dotminus" => "\u{2238}",
b"mDDot" => "\u{223A}",
b"homtht" => "\u{223B}",
b"sim" | b"Tilde" | b"thksim" | b"thicksim" => "\u{223C}",
b"bsim" | b"backsim" => "\u{223D}",
b"ac" | b"mstpos" => "\u{223E}",
b"acd" => "\u{223F}",
b"wreath" | b"VerticalTilde" | b"wr" => "\u{2240}",
b"nsim" | b"NotTilde" => "\u{2241}",
b"esim" | b"EqualTilde" | b"eqsim" => "\u{2242}",
b"sime" | b"TildeEqual" | b"simeq" => "\u{2243}",
b"nsime" | b"nsimeq" | b"NotTildeEqual" => "\u{2244}",
b"cong" | b"TildeFullEqual" => "\u{2245}",
b"simne" => "\u{2246}",
b"ncong" | b"NotTildeFullEqual" => "\u{2247}",
b"asymp" | b"ap" | b"TildeTilde" | b"approx" | b"thkap" | b"thickapprox" => "\u{2248}",
b"nap" | b"NotTildeTilde" | b"napprox" => "\u{2249}",
b"ape" | b"approxeq" => "\u{224A}",
b"apid" => "\u{224B}",
b"bcong" | b"backcong" => "\u{224C}",
b"asympeq" | b"CupCap" => "\u{224D}",
b"bump" | b"HumpDownHump" | b"Bumpeq" => "\u{224E}",
b"bumpe" | b"HumpEqual" | b"bumpeq" => "\u{224F}",
b"esdot" | b"DotEqual" | b"doteq" => "\u{2250}",
b"eDot" | b"doteqdot" => "\u{2251}",
b"efDot" | b"fallingdotseq" => "\u{2252}",
b"erDot" | b"risingdotseq" => "\u{2253}",
b"colone" | b"coloneq" | b"Assign" => "\u{2254}",
b"ecolon" | b"eqcolon" => "\u{2255}",
b"ecir" | b"eqcirc" => "\u{2256}",
b"cire" | b"circeq" => "\u{2257}",
b"wedgeq" => "\u{2259}",
b"veeeq" => "\u{225A}",
b"trie" | b"triangleq" => "\u{225C}",
b"equest" | b"questeq" => "\u{225F}",
b"ne" | b"NotEqual" => "\u{2260}",
b"equiv" | b"Congruent" => "\u{2261}",
b"nequiv" | b"NotCongruent" => "\u{2262}",
b"le" | b"leq" => "\u{2264}",
b"ge" | b"GreaterEqual" | b"geq" => "\u{2265}",
b"lE" | b"LessFullEqual" | b"leqq" => "\u{2266}",
b"gE" | b"GreaterFullEqual" | b"geqq" => "\u{2267}",
b"lnE" | b"lneqq" => "\u{2268}",
b"gnE" | b"gneqq" => "\u{2269}",
b"Lt" | b"NestedLessLess" | b"ll" => "\u{226A}",
b"Gt" | b"NestedGreaterGreater" | b"gg" => "\u{226B}",
b"twixt" | b"between" => "\u{226C}",
b"NotCupCap" => "\u{226D}",
b"nlt" | b"NotLess" | b"nless" => "\u{226E}",
b"ngt" | b"NotGreater" | b"ngtr" => "\u{226F}",
b"nle" | b"NotLessEqual" | b"nleq" => "\u{2270}",
b"nge" | b"NotGreaterEqual" | b"ngeq" => "\u{2271}",
b"lsim" | b"LessTilde" | b"lesssim" => "\u{2272}",
b"gsim" | b"gtrsim" | b"GreaterTilde" => "\u{2273}",
b"nlsim" | b"NotLessTilde" => "\u{2274}",
b"ngsim" | b"NotGreaterTilde" => "\u{2275}",
b"lg" | b"lessgtr" | b"LessGreater" => "\u{2276}",
b"gl" | b"gtrless" | b"GreaterLess" => "\u{2277}",
b"ntlg" | b"NotLessGreater" => "\u{2278}",
b"ntgl" | b"NotGreaterLess" => "\u{2279}",
b"pr" | b"Precedes" | b"prec" => "\u{227A}",
b"sc" | b"Succeeds" | b"succ" => "\u{227B}",
b"prcue" | b"PrecedesSlantEqual" | b"preccurlyeq" => "\u{227C}",
b"sccue" | b"SucceedsSlantEqual" | b"succcurlyeq" => "\u{227D}",
b"prsim" | b"precsim" | b"PrecedesTilde" => "\u{227E}",
b"scsim" | b"succsim" | b"SucceedsTilde" => "\u{227F}",
b"npr" | b"nprec" | b"NotPrecedes" => "\u{2280}",
b"nsc" | b"nsucc" | b"NotSucceeds" => "\u{2281}",
b"sub" | b"subset" => "\u{2282}",
b"sup" | b"supset" | b"Superset" => "\u{2283}",
b"nsub" => "\u{2284}",
b"nsup" => "\u{2285}",
b"sube" | b"SubsetEqual" | b"subseteq" => "\u{2286}",
b"supe" | b"supseteq" | b"SupersetEqual" => "\u{2287}",
b"nsube" | b"nsubseteq" | b"NotSubsetEqual" => "\u{2288}",
b"nsupe" | b"nsupseteq" | b"NotSupersetEqual" => "\u{2289}",
b"subne" | b"subsetneq" => "\u{228A}",
b"supne" | b"supsetneq" => "\u{228B}",
b"cupdot" => "\u{228D}",
b"uplus" | b"UnionPlus" => "\u{228E}",
b"sqsub" | b"SquareSubset" | b"sqsubset" => "\u{228F}",
b"sqsup" | b"SquareSuperset" | b"sqsupset" => "\u{2290}",
b"sqsube" | b"SquareSubsetEqual" | b"sqsubseteq" => "\u{2291}",
b"sqsupe" | b"SquareSupersetEqual" | b"sqsupseteq" => "\u{2292}",
b"sqcap" | b"SquareIntersection" => "\u{2293}",
b"sqcup" | b"SquareUnion" => "\u{2294}",
b"oplus" | b"CirclePlus" => "\u{2295}",
b"ominus" | b"CircleMinus" => "\u{2296}",
b"otimes" | b"CircleTimes" => "\u{2297}",
b"osol" => "\u{2298}",
b"odot" | b"CircleDot" => "\u{2299}",
b"ocir" | b"circledcirc" => "\u{229A}",
b"oast" | b"circledast" => "\u{229B}",
b"odash" | b"circleddash" => "\u{229D}",
b"plusb" | b"boxplus" => "\u{229E}",
b"minusb" | b"boxminus" => "\u{229F}",
b"timesb" | b"boxtimes" => "\u{22A0}",
b"sdotb" | b"dotsquare" => "\u{22A1}",
b"vdash" | b"RightTee" => "\u{22A2}",
b"dashv" | b"LeftTee" => "\u{22A3}",
b"top" | b"DownTee" => "\u{22A4}",
b"bottom" | b"bot" | b"perp" | b"UpTee" => "\u{22A5}",
b"models" => "\u{22A7}",
b"vDash" | b"DoubleRightTee" => "\u{22A8}",
b"Vdash" => "\u{22A9}",
b"Vvdash" => "\u{22AA}",
b"VDash" => "\u{22AB}",
b"nvdash" => "\u{22AC}",
b"nvDash" => "\u{22AD}",
b"nVdash" => "\u{22AE}",
b"nVDash" => "\u{22AF}",
b"prurel" => "\u{22B0}",
b"vltri" | b"vartriangleleft" | b"LeftTriangle" => "\u{22B2}",
b"vrtri" | b"vartriangleright" | b"RightTriangle" => "\u{22B3}",
b"ltrie" | b"trianglelefteq" | b"LeftTriangleEqual" => "\u{22B4}",
b"rtrie" | b"trianglerighteq" | b"RightTriangleEqual" => "\u{22B5}",
b"origof" => "\u{22B6}",
b"imof" => "\u{22B7}",
b"mumap" | b"multimap" => "\u{22B8}",
b"hercon" => "\u{22B9}",
b"intcal" | b"intercal" => "\u{22BA}",
b"veebar" => "\u{22BB}",
b"barvee" => "\u{22BD}",
b"angrtvb" => "\u{22BE}",
b"lrtri" => "\u{22BF}",
b"xwedge" | b"Wedge" | b"bigwedge" => "\u{22C0}",
b"xvee" | b"Vee" | b"bigvee" => "\u{22C1}",
b"xcap" | b"Intersection" | b"bigcap" => "\u{22C2}",
b"xcup" | b"Union" | b"bigcup" => "\u{22C3}",
b"diam" | b"diamond" | b"Diamond" => "\u{22C4}",
b"sdot" => "\u{22C5}",
b"sstarf" | b"Star" => "\u{22C6}",
b"divonx" | b"divideontimes" => "\u{22C7}",
b"bowtie" => "\u{22C8}",
b"ltimes" => "\u{22C9}",
b"rtimes" => "\u{22CA}",
b"lthree" | b"leftthreetimes" => "\u{22CB}",
b"rthree" | b"rightthreetimes" => "\u{22CC}",
b"bsime" | b"backsimeq" => "\u{22CD}",
b"cuvee" | b"curlyvee" => "\u{22CE}",
b"cuwed" | b"curlywedge" => "\u{22CF}",
b"Sub" | b"Subset" => "\u{22D0}",
b"Sup" | b"Supset" => "\u{22D1}",
b"Cap" => "\u{22D2}",
b"Cup" => "\u{22D3}",
b"fork" | b"pitchfork" => "\u{22D4}",
b"epar" => "\u{22D5}",
b"ltdot" | b"lessdot" => "\u{22D6}",
b"gtdot" | b"gtrdot" => "\u{22D7}",
b"Ll" => "\u{22D8}",
b"Gg" | b"ggg" => "\u{22D9}",
b"leg" | b"LessEqualGreater" | b"lesseqgtr" => "\u{22DA}",
b"gel" | b"gtreqless" | b"GreaterEqualLess" => "\u{22DB}",
b"cuepr" | b"curlyeqprec" => "\u{22DE}",
b"cuesc" | b"curlyeqsucc" => "\u{22DF}",
b"nprcue" | b"NotPrecedesSlantEqual" => "\u{22E0}",
b"nsccue" | b"NotSucceedsSlantEqual" => "\u{22E1}",
b"nsqsube" | b"NotSquareSubsetEqual" => "\u{22E2}",
b"nsqsupe" | b"NotSquareSupersetEqual" => "\u{22E3}",
b"lnsim" => "\u{22E6}",
b"gnsim" => "\u{22E7}",
b"prnsim" | b"precnsim" => "\u{22E8}",
b"scnsim" | b"succnsim" => "\u{22E9}",
b"nltri" | b"ntriangleleft" | b"NotLeftTriangle" => "\u{22EA}",
b"nrtri" | b"ntriangleright" | b"NotRightTriangle" => "\u{22EB}",
b"nltrie" | b"ntrianglelefteq" | b"NotLeftTriangleEqual" => "\u{22EC}",
b"nrtrie" | b"ntrianglerighteq" | b"NotRightTriangleEqual" => "\u{22ED}",
b"vellip" => "\u{22EE}",
b"ctdot" => "\u{22EF}",
b"utdot" => "\u{22F0}",
b"dtdot" => "\u{22F1}",
b"disin" => "\u{22F2}",
b"isinsv" => "\u{22F3}",
b"isins" => "\u{22F4}",
b"isindot" => "\u{22F5}",
b"notinvc" => "\u{22F6}",
b"notinvb" => "\u{22F7}",
b"isinE" => "\u{22F9}",
b"nisd" => "\u{22FA}",
b"xnis" => "\u{22FB}",
b"nis" => "\u{22FC}",
b"notnivc" => "\u{22FD}",
b"notnivb" => "\u{22FE}",
b"barwed" | b"barwedge" => "\u{2305}",
b"Barwed" | b"doublebarwedge" => "\u{2306}",
b"lceil" | b"LeftCeiling" => "\u{2308}",
b"rceil" | b"RightCeiling" => "\u{2309}",
b"lfloor" | b"LeftFloor" => "\u{230A}",
b"rfloor" | b"RightFloor" => "\u{230B}",
b"drcrop" => "\u{230C}",
b"dlcrop" => "\u{230D}",
b"urcrop" => "\u{230E}",
b"ulcrop" => "\u{230F}",
b"bnot" => "\u{2310}",
b"profline" => "\u{2312}",
b"profsurf" => "\u{2313}",
b"telrec" => "\u{2315}",
b"target" => "\u{2316}",
b"ulcorn" | b"ulcorner" => "\u{231C}",
b"urcorn" | b"urcorner" => "\u{231D}",
b"dlcorn" | b"llcorner" => "\u{231E}",
b"drcorn" | b"lrcorner" => "\u{231F}",
b"frown" | b"sfrown" => "\u{2322}",
b"smile" | b"ssmile" => "\u{2323}",
b"cylcty" => "\u{232D}",
b"profalar" => "\u{232E}",
b"topbot" => "\u{2336}",
b"ovbar" => "\u{233D}",
b"solbar" => "\u{233F}",
b"angzarr" => "\u{237C}",
b"lmoust" | b"lmoustache" => "\u{23B0}",
b"rmoust" | b"rmoustache" => "\u{23B1}",
b"tbrk" | b"OverBracket" => "\u{23B4}",
b"bbrk" | b"UnderBracket" => "\u{23B5}",
b"bbrktbrk" => "\u{23B6}",
b"OverParenthesis" => "\u{23DC}",
b"UnderParenthesis" => "\u{23DD}",
b"OverBrace" => "\u{23DE}",
b"UnderBrace" => "\u{23DF}",
b"trpezium" => "\u{23E2}",
b"elinters" => "\u{23E7}",
b"blank" => "\u{2423}",
b"oS" | b"circledS" => "\u{24C8}",
b"boxh" | b"HorizontalLine" => "\u{2500}",
b"boxv" => "\u{2502}",
b"boxdr" => "\u{250C}",
b"boxdl" => "\u{2510}",
b"boxur" => "\u{2514}",
b"boxul" => "\u{2518}",
b"boxvr" => "\u{251C}",
b"boxvl" => "\u{2524}",
b"boxhd" => "\u{252C}",
b"boxhu" => "\u{2534}",
b"boxvh" => "\u{253C}",
b"boxH" => "\u{2550}",
b"boxV" => "\u{2551}",
b"boxdR" => "\u{2552}",
b"boxDr" => "\u{2553}",
b"boxDR" => "\u{2554}",
b"boxdL" => "\u{2555}",
b"boxDl" => "\u{2556}",
b"boxDL" => "\u{2557}",
b"boxuR" => "\u{2558}",
b"boxUr" => "\u{2559}",
b"boxUR" => "\u{255A}",
b"boxuL" => "\u{255B}",
b"boxUl" => "\u{255C}",
b"boxUL" => "\u{255D}",
b"boxvR" => "\u{255E}",
b"boxVr" => "\u{255F}",
b"boxVR" => "\u{2560}",
b"boxvL" => "\u{2561}",
b"boxVl" => "\u{2562}",
b"boxVL" => "\u{2563}",
b"boxHd" => "\u{2564}",
b"boxhD" => "\u{2565}",
b"boxHD" => "\u{2566}",
b"boxHu" => "\u{2567}",
b"boxhU" => "\u{2568}",
b"boxHU" => "\u{2569}",
b"boxvH" => "\u{256A}",
b"boxVh" => "\u{256B}",
b"boxVH" => "\u{256C}",
b"uhblk" => "\u{2580}",
b"lhblk" => "\u{2584}",
b"block" => "\u{2588}",
b"blk14" => "\u{2591}",
b"blk12" => "\u{2592}",
b"blk34" => "\u{2593}",
b"squ" | b"square" | b"Square" => "\u{25A1}",
b"squf" | b"squarf" | b"blacksquare" | b"FilledVerySmallSquare" => "\u{25AA}",
b"EmptyVerySmallSquare" => "\u{25AB}",
b"rect" => "\u{25AD}",
b"marker" => "\u{25AE}",
b"fltns" => "\u{25B1}",
b"xutri" | b"bigtriangleup" => "\u{25B3}",
b"utrif" | b"blacktriangle" => "\u{25B4}",
b"utri" | b"triangle" => "\u{25B5}",
b"rtrif" | b"blacktriangleright" => "\u{25B8}",
b"rtri" | b"triangleright" => "\u{25B9}",
b"xdtri" | b"bigtriangledown" => "\u{25BD}",
b"dtrif" | b"blacktriangledown" => "\u{25BE}",
b"dtri" | b"triangledown" => "\u{25BF}",
b"ltrif" | b"blacktriangleleft" => "\u{25C2}",
b"ltri" | b"triangleleft" => "\u{25C3}",
b"loz" | b"lozenge" => "\u{25CA}",
b"cir" => "\u{25CB}",
b"tridot" => "\u{25EC}",
b"xcirc" | b"bigcirc" => "\u{25EF}",
b"ultri" => "\u{25F8}",
b"urtri" => "\u{25F9}",
b"lltri" => "\u{25FA}",
b"EmptySmallSquare" => "\u{25FB}",
b"FilledSmallSquare" => "\u{25FC}",
b"starf" | b"bigstar" => "\u{2605}",
b"star" => "\u{2606}",
b"phone" => "\u{260E}",
b"female" => "\u{2640}",
b"male" => "\u{2642}",
b"spades" | b"spadesuit" => "\u{2660}",
b"clubs" | b"clubsuit" => "\u{2663}",
b"hearts" | b"heartsuit" => "\u{2665}",
b"diams" | b"diamondsuit" => "\u{2666}",
b"sung" => "\u{266A}",
b"flat" => "\u{266D}",
b"natur" | b"natural" => "\u{266E}",
b"sharp" => "\u{266F}",
b"check" | b"checkmark" => "\u{2713}",
b"cross" => "\u{2717}",
b"malt" | b"maltese" => "\u{2720}",
b"sext" => "\u{2736}",
b"VerticalSeparator" => "\u{2758}",
b"lbbrk" => "\u{2772}",
b"rbbrk" => "\u{2773}",
b"lobrk" | b"LeftDoubleBracket" => "\u{27E6}",
b"robrk" | b"RightDoubleBracket" => "\u{27E7}",
b"lang" | b"LeftAngleBracket" | b"langle" => "\u{27E8}",
b"rang" | b"RightAngleBracket" | b"rangle" => "\u{27E9}",
b"Lang" => "\u{27EA}",
b"Rang" => "\u{27EB}",
b"loang" => "\u{27EC}",
b"roang" => "\u{27ED}",
b"xlarr" | b"longleftarrow" | b"LongLeftArrow" => "\u{27F5}",
b"xrarr" | b"longrightarrow" | b"LongRightArrow" => "\u{27F6}",
b"xharr" | b"longleftrightarrow" | b"LongLeftRightArrow" => "\u{27F7}",
b"xlArr" | b"Longleftarrow" | b"DoubleLongLeftArrow" => "\u{27F8}",
b"xrArr" | b"Longrightarrow" | b"DoubleLongRightArrow" => "\u{27F9}",
b"xhArr" | b"Longleftrightarrow" | b"DoubleLongLeftRightArrow" => "\u{27FA}",
b"xmap" | b"longmapsto" => "\u{27FC}",
b"dzigrarr" => "\u{27FF}",
b"nvlArr" => "\u{2902}",
b"nvrArr" => "\u{2903}",
b"nvHarr" => "\u{2904}",
b"Map" => "\u{2905}",
b"lbarr" => "\u{290C}",
b"rbarr" | b"bkarow" => "\u{290D}",
b"lBarr" => "\u{290E}",
b"rBarr" | b"dbkarow" => "\u{290F}",
b"RBarr" | b"drbkarow" => "\u{2910}",
b"DDotrahd" => "\u{2911}",
b"UpArrowBar" => "\u{2912}",
b"DownArrowBar" => "\u{2913}",
b"Rarrtl" => "\u{2916}",
b"latail" => "\u{2919}",
b"ratail" => "\u{291A}",
b"lAtail" => "\u{291B}",
b"rAtail" => "\u{291C}",
b"larrfs" => "\u{291D}",
b"rarrfs" => "\u{291E}",
b"larrbfs" => "\u{291F}",
b"rarrbfs" => "\u{2920}",
b"nwarhk" => "\u{2923}",
b"nearhk" => "\u{2924}",
b"searhk" | b"hksearow" => "\u{2925}",
b"swarhk" | b"hkswarow" => "\u{2926}",
b"nwnear" => "\u{2927}",
b"nesear" | b"toea" => "\u{2928}",
b"seswar" | b"tosa" => "\u{2929}",
b"swnwar" => "\u{292A}",
b"rarrc" => "\u{2933}",
b"cudarrr" => "\u{2935}",
b"ldca" => "\u{2936}",
b"rdca" => "\u{2937}",
b"cudarrl" => "\u{2938}",
b"larrpl" => "\u{2939}",
b"curarrm" => "\u{293C}",
b"cularrp" => "\u{293D}",
b"rarrpl" => "\u{2945}",
b"harrcir" => "\u{2948}",
b"Uarrocir" => "\u{2949}",
b"lurdshar" => "\u{294A}",
b"ldrushar" => "\u{294B}",
b"LeftRightVector" => "\u{294E}",
b"RightUpDownVector" => "\u{294F}",
b"DownLeftRightVector" => "\u{2950}",
b"LeftUpDownVector" => "\u{2951}",
b"LeftVectorBar" => "\u{2952}",
b"RightVectorBar" => "\u{2953}",
b"RightUpVectorBar" => "\u{2954}",
b"RightDownVectorBar" => "\u{2955}",
b"DownLeftVectorBar" => "\u{2956}",
b"DownRightVectorBar" => "\u{2957}",
b"LeftUpVectorBar" => "\u{2958}",
b"LeftDownVectorBar" => "\u{2959}",
b"LeftTeeVector" => "\u{295A}",
b"RightTeeVector" => "\u{295B}",
b"RightUpTeeVector" => "\u{295C}",
b"RightDownTeeVector" => "\u{295D}",
b"DownLeftTeeVector" => "\u{295E}",
b"DownRightTeeVector" => "\u{295F}",
b"LeftUpTeeVector" => "\u{2960}",
b"LeftDownTeeVector" => "\u{2961}",
b"lHar" => "\u{2962}",
b"uHar" => "\u{2963}",
b"rHar" => "\u{2964}",
b"dHar" => "\u{2965}",
b"luruhar" => "\u{2966}",
b"ldrdhar" => "\u{2967}",
b"ruluhar" => "\u{2968}",
b"rdldhar" => "\u{2969}",
b"lharul" => "\u{296A}",
b"llhard" => "\u{296B}",
b"rharul" => "\u{296C}",
b"lrhard" => "\u{296D}",
b"udhar" | b"UpEquilibrium" => "\u{296E}",
b"duhar" | b"ReverseUpEquilibrium" => "\u{296F}",
b"RoundImplies" => "\u{2970}",
b"erarr" => "\u{2971}",
b"simrarr" => "\u{2972}",
b"larrsim" => "\u{2973}",
b"rarrsim" => "\u{2974}",
b"rarrap" => "\u{2975}",
b"ltlarr" => "\u{2976}",
b"gtrarr" => "\u{2978}",
b"subrarr" => "\u{2979}",
b"suplarr" => "\u{297B}",
b"lfisht" => "\u{297C}",
b"rfisht" => "\u{297D}",
b"ufisht" => "\u{297E}",
b"dfisht" => "\u{297F}",
b"lopar" => "\u{2985}",
b"ropar" => "\u{2986}",
b"lbrke" => "\u{298B}",
b"rbrke" => "\u{298C}",
b"lbrkslu" => "\u{298D}",
b"rbrksld" => "\u{298E}",
b"lbrksld" => "\u{298F}",
b"rbrkslu" => "\u{2990}",
b"langd" => "\u{2991}",
b"rangd" => "\u{2992}",
b"lparlt" => "\u{2993}",
b"rpargt" => "\u{2994}",
b"gtlPar" => "\u{2995}",
b"ltrPar" => "\u{2996}",
b"vzigzag" => "\u{299A}",
b"vangrt" => "\u{299C}",
b"angrtvbd" => "\u{299D}",
b"ange" => "\u{29A4}",
b"range" => "\u{29A5}",
b"dwangle" => "\u{29A6}",
b"uwangle" => "\u{29A7}",
b"angmsdaa" => "\u{29A8}",
b"angmsdab" => "\u{29A9}",
b"angmsdac" => "\u{29AA}",
b"angmsdad" => "\u{29AB}",
b"angmsdae" => "\u{29AC}",
b"angmsdaf" => "\u{29AD}",
b"angmsdag" => "\u{29AE}",
b"angmsdah" => "\u{29AF}",
b"bemptyv" => "\u{29B0}",
b"demptyv" => "\u{29B1}",
b"cemptyv" => "\u{29B2}",
b"raemptyv" => "\u{29B3}",
b"laemptyv" => "\u{29B4}",
b"ohbar" => "\u{29B5}",
b"omid" => "\u{29B6}",
b"opar" => "\u{29B7}",
b"operp" => "\u{29B9}",
b"olcross" => "\u{29BB}",
b"odsold" => "\u{29BC}",
b"olcir" => "\u{29BE}",
b"ofcir" => "\u{29BF}",
b"olt" => "\u{29C0}",
b"ogt" => "\u{29C1}",
b"cirscir" => "\u{29C2}",
b"cirE" => "\u{29C3}",
b"solb" => "\u{29C4}",
b"bsolb" => "\u{29C5}",
b"boxbox" => "\u{29C9}",
b"trisb" => "\u{29CD}",
b"rtriltri" => "\u{29CE}",
b"LeftTriangleBar" => "\u{29CF}",
b"RightTriangleBar" => "\u{29D0}",
b"race" => "\u{29DA}",
b"iinfin" => "\u{29DC}",
b"infintie" => "\u{29DD}",
b"nvinfin" => "\u{29DE}",
b"eparsl" => "\u{29E3}",
b"smeparsl" => "\u{29E4}",
b"eqvparsl" => "\u{29E5}",
b"lozf" | b"blacklozenge" => "\u{29EB}",
b"RuleDelayed" => "\u{29F4}",
b"dsol" => "\u{29F6}",
b"xodot" | b"bigodot" => "\u{2A00}",
b"xoplus" | b"bigoplus" => "\u{2A01}",
b"xotime" | b"bigotimes" => "\u{2A02}",
b"xuplus" | b"biguplus" => "\u{2A04}",
b"xsqcup" | b"bigsqcup" => "\u{2A06}",
b"qint" | b"iiiint" => "\u{2A0C}",
b"fpartint" => "\u{2A0D}",
b"cirfnint" => "\u{2A10}",
b"awint" => "\u{2A11}",
b"rppolint" => "\u{2A12}",
b"scpolint" => "\u{2A13}",
b"npolint" => "\u{2A14}",
b"pointint" => "\u{2A15}",
b"quatint" => "\u{2A16}",
b"intlarhk" => "\u{2A17}",
b"pluscir" => "\u{2A22}",
b"plusacir" => "\u{2A23}",
b"simplus" => "\u{2A24}",
b"plusdu" => "\u{2A25}",
b"plussim" => "\u{2A26}",
b"plustwo" => "\u{2A27}",
b"mcomma" => "\u{2A29}",
b"minusdu" => "\u{2A2A}",
b"loplus" => "\u{2A2D}",
b"roplus" => "\u{2A2E}",
b"Cross" => "\u{2A2F}",
b"timesd" => "\u{2A30}",
b"timesbar" => "\u{2A31}",
b"smashp" => "\u{2A33}",
b"lotimes" => "\u{2A34}",
b"rotimes" => "\u{2A35}",
b"otimesas" => "\u{2A36}",
b"Otimes" => "\u{2A37}",
b"odiv" => "\u{2A38}",
b"triplus" => "\u{2A39}",
b"triminus" => "\u{2A3A}",
b"tritime" => "\u{2A3B}",
b"iprod" | b"intprod" => "\u{2A3C}",
b"amalg" => "\u{2A3F}",
b"capdot" => "\u{2A40}",
b"ncup" => "\u{2A42}",
b"ncap" => "\u{2A43}",
b"capand" => "\u{2A44}",
b"cupor" => "\u{2A45}",
b"cupcap" => "\u{2A46}",
b"capcup" => "\u{2A47}",
b"cupbrcap" => "\u{2A48}",
b"capbrcup" => "\u{2A49}",
b"cupcup" => "\u{2A4A}",
b"capcap" => "\u{2A4B}",
b"ccups" => "\u{2A4C}",
b"ccaps" => "\u{2A4D}",
b"ccupssm" => "\u{2A50}",
b"And" => "\u{2A53}",
b"Or" => "\u{2A54}",
b"andand" => "\u{2A55}",
b"oror" => "\u{2A56}",
b"orslope" => "\u{2A57}",
b"andslope" => "\u{2A58}",
b"andv" => "\u{2A5A}",
b"orv" => "\u{2A5B}",
b"andd" => "\u{2A5C}",
b"ord" => "\u{2A5D}",
b"wedbar" => "\u{2A5F}",
b"sdote" => "\u{2A66}",
b"simdot" => "\u{2A6A}",
b"congdot" => "\u{2A6D}",
b"easter" => "\u{2A6E}",
b"apacir" => "\u{2A6F}",
b"apE" => "\u{2A70}",
b"eplus" => "\u{2A71}",
b"pluse" => "\u{2A72}",
b"Esim" => "\u{2A73}",
b"Colone" => "\u{2A74}",
b"Equal" => "\u{2A75}",
b"eDDot" | b"ddotseq" => "\u{2A77}",
b"equivDD" => "\u{2A78}",
b"ltcir" => "\u{2A79}",
b"gtcir" => "\u{2A7A}",
b"ltquest" => "\u{2A7B}",
b"gtquest" => "\u{2A7C}",
b"les" | b"LessSlantEqual" | b"leqslant" => "\u{2A7D}",
b"ges" | b"GreaterSlantEqual" | b"geqslant" => "\u{2A7E}",
b"lesdot" => "\u{2A7F}",
b"gesdot" => "\u{2A80}",
b"lesdoto" => "\u{2A81}",
b"gesdoto" => "\u{2A82}",
b"lesdotor" => "\u{2A83}",
b"gesdotol" => "\u{2A84}",
b"lap" | b"lessapprox" => "\u{2A85}",
b"gap" | b"gtrapprox" => "\u{2A86}",
b"lne" | b"lneq" => "\u{2A87}",
b"gne" | b"gneq" => "\u{2A88}",
b"lnap" | b"lnapprox" => "\u{2A89}",
b"gnap" | b"gnapprox" => "\u{2A8A}",
b"lEg" | b"lesseqqgtr" => "\u{2A8B}",
b"gEl" | b"gtreqqless" => "\u{2A8C}",
b"lsime" => "\u{2A8D}",
b"gsime" => "\u{2A8E}",
b"lsimg" => "\u{2A8F}",
b"gsiml" => "\u{2A90}",
b"lgE" => "\u{2A91}",
b"glE" => "\u{2A92}",
b"lesges" => "\u{2A93}",
b"gesles" => "\u{2A94}",
b"els" | b"eqslantless" => "\u{2A95}",
b"egs" | b"eqslantgtr" => "\u{2A96}",
b"elsdot" => "\u{2A97}",
b"egsdot" => "\u{2A98}",
b"el" => "\u{2A99}",
b"eg" => "\u{2A9A}",
b"siml" => "\u{2A9D}",
b"simg" => "\u{2A9E}",
b"simlE" => "\u{2A9F}",
b"simgE" => "\u{2AA0}",
b"LessLess" => "\u{2AA1}",
b"GreaterGreater" => "\u{2AA2}",
b"glj" => "\u{2AA4}",
b"gla" => "\u{2AA5}",
b"ltcc" => "\u{2AA6}",
b"gtcc" => "\u{2AA7}",
b"lescc" => "\u{2AA8}",
b"gescc" => "\u{2AA9}",
b"smt" => "\u{2AAA}",
b"lat" => "\u{2AAB}",
b"smte" => "\u{2AAC}",
b"late" => "\u{2AAD}",
b"bumpE" => "\u{2AAE}",
b"pre" | b"preceq" | b"PrecedesEqual" => "\u{2AAF}",
b"sce" | b"succeq" | b"SucceedsEqual" => "\u{2AB0}",
b"prE" => "\u{2AB3}",
b"scE" => "\u{2AB4}",
b"prnE" | b"precneqq" => "\u{2AB5}",
b"scnE" | b"succneqq" => "\u{2AB6}",
b"prap" | b"precapprox" => "\u{2AB7}",
b"scap" | b"succapprox" => "\u{2AB8}",
b"prnap" | b"precnapprox" => "\u{2AB9}",
b"scnap" | b"succnapprox" => "\u{2ABA}",
b"Pr" => "\u{2ABB}",
b"Sc" => "\u{2ABC}",
b"subdot" => "\u{2ABD}",
b"supdot" => "\u{2ABE}",
b"subplus" => "\u{2ABF}",
b"supplus" => "\u{2AC0}",
b"submult" => "\u{2AC1}",
b"supmult" => "\u{2AC2}",
b"subedot" => "\u{2AC3}",
b"supedot" => "\u{2AC4}",
b"subE" | b"subseteqq" => "\u{2AC5}",
b"supE" | b"supseteqq" => "\u{2AC6}",
b"subsim" => "\u{2AC7}",
b"supsim" => "\u{2AC8}",
b"subnE" | b"subsetneqq" => "\u{2ACB}",
b"supnE" | b"supsetneqq" => "\u{2ACC}",
b"csub" => "\u{2ACF}",
b"csup" => "\u{2AD0}",
b"csube" => "\u{2AD1}",
b"csupe" => "\u{2AD2}",
b"subsup" => "\u{2AD3}",
b"supsub" => "\u{2AD4}",
b"subsub" => "\u{2AD5}",
b"supsup" => "\u{2AD6}",
b"suphsub" => "\u{2AD7}",
b"supdsub" => "\u{2AD8}",
b"forkv" => "\u{2AD9}",
b"topfork" => "\u{2ADA}",
b"mlcp" => "\u{2ADB}",
b"Dashv" | b"DoubleLeftTee" => "\u{2AE4}",
b"Vdashl" => "\u{2AE6}",
b"Barv" => "\u{2AE7}",
b"vBar" => "\u{2AE8}",
b"vBarv" => "\u{2AE9}",
b"Vbar" => "\u{2AEB}",
b"Not" => "\u{2AEC}",
b"bNot" => "\u{2AED}",
b"rnmid" => "\u{2AEE}",
b"cirmid" => "\u{2AEF}",
b"midcir" => "\u{2AF0}",
b"topcir" => "\u{2AF1}",
b"nhpar" => "\u{2AF2}",
b"parsim" => "\u{2AF3}",
b"parsl" => "\u{2AFD}",
b"fflig" => "\u{FB00}",
b"filig" => "\u{FB01}",
b"fllig" => "\u{FB02}",
b"ffilig" => "\u{FB03}",
b"ffllig" => "\u{FB04}",
b"Ascr" => "\u{1D49}",
b"Cscr" => "\u{1D49}",
b"Dscr" => "\u{1D49}",
b"Gscr" => "\u{1D4A}",
b"Jscr" => "\u{1D4A}",
b"Kscr" => "\u{1D4A}",
b"Nscr" => "\u{1D4A}",
b"Oscr" => "\u{1D4A}",
b"Pscr" => "\u{1D4A}",
b"Qscr" => "\u{1D4A}",
b"Sscr" => "\u{1D4A}",
b"Tscr" => "\u{1D4A}",
b"Uscr" => "\u{1D4B}",
b"Vscr" => "\u{1D4B}",
b"Wscr" => "\u{1D4B}",
b"Xscr" => "\u{1D4B}",
b"Yscr" => "\u{1D4B}",
b"Zscr" => "\u{1D4B}",
b"ascr" => "\u{1D4B}",
b"bscr" => "\u{1D4B}",
b"cscr" => "\u{1D4B}",
b"dscr" => "\u{1D4B}",
b"fscr" => "\u{1D4B}",
b"hscr" => "\u{1D4B}",
b"iscr" => "\u{1D4B}",
b"jscr" => "\u{1D4B}",
b"kscr" => "\u{1D4C}",
b"lscr" => "\u{1D4C}",
b"mscr" => "\u{1D4C}",
b"nscr" => "\u{1D4C}",
b"pscr" => "\u{1D4C}",
b"qscr" => "\u{1D4C}",
b"rscr" => "\u{1D4C}",
b"sscr" => "\u{1D4C}",
b"tscr" => "\u{1D4C}",
b"uscr" => "\u{1D4C}",
b"vscr" => "\u{1D4C}",
b"wscr" => "\u{1D4C}",
b"xscr" => "\u{1D4C}",
b"yscr" => "\u{1D4C}",
b"zscr" => "\u{1D4C}",
b"Afr" => "\u{1D50}",
b"Bfr" => "\u{1D50}",
b"Dfr" => "\u{1D50}",
b"Efr" => "\u{1D50}",
b"Ffr" => "\u{1D50}",
b"Gfr" => "\u{1D50}",
b"Jfr" => "\u{1D50}",
b"Kfr" => "\u{1D50}",
b"Lfr" => "\u{1D50}",
b"Mfr" => "\u{1D51}",
b"Nfr" => "\u{1D51}",
b"Ofr" => "\u{1D51}",
b"Pfr" => "\u{1D51}",
b"Qfr" => "\u{1D51}",
b"Sfr" => "\u{1D51}",
b"Tfr" => "\u{1D51}",
b"Ufr" => "\u{1D51}",
b"Vfr" => "\u{1D51}",
b"Wfr" => "\u{1D51}",
b"Xfr" => "\u{1D51}",
b"Yfr" => "\u{1D51}",
b"afr" => "\u{1D51}",
b"bfr" => "\u{1D51}",
b"cfr" => "\u{1D52}",
b"dfr" => "\u{1D52}",
b"efr" => "\u{1D52}",
b"ffr" => "\u{1D52}",
b"gfr" => "\u{1D52}",
b"hfr" => "\u{1D52}",
b"ifr" => "\u{1D52}",
b"jfr" => "\u{1D52}",
b"kfr" => "\u{1D52}",
b"lfr" => "\u{1D52}",
b"mfr" => "\u{1D52}",
b"nfr" => "\u{1D52}",
b"ofr" => "\u{1D52}",
b"pfr" => "\u{1D52}",
b"qfr" => "\u{1D52}",
b"rfr" => "\u{1D52}",
b"sfr" => "\u{1D53}",
b"tfr" => "\u{1D53}",
b"ufr" => "\u{1D53}",
b"vfr" => "\u{1D53}",
b"wfr" => "\u{1D53}",
b"xfr" => "\u{1D53}",
b"yfr" => "\u{1D53}",
b"zfr" => "\u{1D53}",
b"Aopf" => "\u{1D53}",
b"Bopf" => "\u{1D53}",
b"Dopf" => "\u{1D53}",
b"Eopf" => "\u{1D53}",
b"Fopf" => "\u{1D53}",
b"Gopf" => "\u{1D53}",
b"Iopf" => "\u{1D54}",
b"Jopf" => "\u{1D54}",
b"Kopf" => "\u{1D54}",
b"Lopf" => "\u{1D54}",
b"Mopf" => "\u{1D54}",
b"Oopf" => "\u{1D54}",
b"Sopf" => "\u{1D54}",
b"Topf" => "\u{1D54}",
b"Uopf" => "\u{1D54}",
b"Vopf" => "\u{1D54}",
b"Wopf" => "\u{1D54}",
b"Xopf" => "\u{1D54}",
b"Yopf" => "\u{1D55}",
b"aopf" => "\u{1D55}",
b"bopf" => "\u{1D55}",
b"copf" => "\u{1D55}",
b"dopf" => "\u{1D55}",
b"eopf" => "\u{1D55}",
b"fopf" => "\u{1D55}",
b"gopf" => "\u{1D55}",
b"hopf" => "\u{1D55}",
b"iopf" => "\u{1D55}",
b"jopf" => "\u{1D55}",
b"kopf" => "\u{1D55}",
b"lopf" => "\u{1D55}",
b"mopf" => "\u{1D55}",
b"nopf" => "\u{1D55}",
b"oopf" => "\u{1D56}",
b"popf" => "\u{1D56}",
b"qopf" => "\u{1D56}",
b"ropf" => "\u{1D56}",
b"sopf" => "\u{1D56}",
b"topf" => "\u{1D56}",
b"uopf" => "\u{1D56}",
b"vopf" => "\u{1D56}",
b"wopf" => "\u{1D56}",
b"xopf" => "\u{1D56}",
b"yopf" => "\u{1D56}",
b"zopf" => "\u{1D56}",
_ => return None
};
Some(s)
}
fn push_utf8(out: &mut Vec<u8>, code: char) {
let mut buf = [0u8; 4];
out.extend_from_slice(code.encode_utf8(&mut buf).as_bytes());
}
fn parse_number(bytes: &[u8], range: Range<usize>) -> Result<char, EscapeError> {
let code = if bytes.starts_with(b"x") {
parse_hexadecimal(&bytes[1..])
} else {
parse_decimal(&bytes)
}?;
if code == 0 {
return Err(EscapeError::EntityWithNull(range));
}
match std::char::from_u32(code) {
Some(c) => Ok(c),
None => Err(EscapeError::InvalidCodepoint(code)),
}
}
fn parse_hexadecimal(bytes: &[u8]) -> Result<u32, EscapeError> {
// maximum code is 0x10FFFF => 6 characters
if bytes.len() > 6 {
return Err(EscapeError::TooLongHexadecimal);
}
let mut code = 0;
for &b in bytes {
code <<= 4;
code += match b {
b'0'..=b'9' => b - b'0',
b'a'..=b'f' => b - b'a' + 10,
b'A'..=b'F' => b - b'A' + 10,
b => return Err(EscapeError::InvalidHexadecimal(b as char)),
} as u32;
}
Ok(code)
}
fn parse_decimal(bytes: &[u8]) -> Result<u32, EscapeError> {
// maximum code is 0x10FFFF = 1114111 => 7 characters
if bytes.len() > 7 {
return Err(EscapeError::TooLongDecimal);
}
let mut code = 0;
for &b in bytes {
code *= 10;
code += match b {
b'0'..=b'9' => b - b'0',
b => return Err(EscapeError::InvalidDecimal(b as char)),
} as u32;
}
Ok(code)
}
#[test]
fn test_unescape() {
assert_eq!(&*unescape(b"test").unwrap(), b"test");
assert_eq!(&*unescape(b"<test>").unwrap(), b"<test>");
assert_eq!(&*unescape(b"0").unwrap(), b"0");
assert_eq!(&*unescape(b"0").unwrap(), b"0");
assert!(unescape(b"&foo;").is_err());
}
#[test]
fn test_unescape_with() {
let custom_entities = vec![(b"foo".to_vec(), b"BAR".to_vec())]
.into_iter()
.collect();
assert_eq!(&*unescape_with(b"test", &custom_entities).unwrap(), b"test");
assert_eq!(
&*unescape_with(b"<test>", &custom_entities).unwrap(),
b"<test>"
);
assert_eq!(&*unescape_with(b"0", &custom_entities).unwrap(), b"0");
assert_eq!(&*unescape_with(b"0", &custom_entities).unwrap(), b"0");
assert_eq!(&*unescape_with(b"&foo;", &custom_entities).unwrap(), b"BAR");
assert!(unescape_with(b"&fop;", &custom_entities).is_err());
}
#[test]
fn test_escape() {
assert_eq!(&*escape(b"test"), b"test");
assert_eq!(&*escape(b"<test>"), b"<test>");
assert_eq!(&*escape(b"\"a\"bc"), b""a"bc");
assert_eq!(&*escape(b"\"a\"b&c"), b""a"b&c");
assert_eq!(
&*escape(b"prefix_\"a\"b&<>c"),
"prefix_"a"b&<>c".as_bytes()
);
}
| 37.102697 | 100 | 0.416004 |
d57077dc7526e57e068d42ea9c529023a593179f | 1,196 | use shaderc::{OptimizationLevel, ShaderKind};
use std::fs;
pub struct Compiler;
impl Compiler {
pub fn compile_shaders(directory: &str) {
for entry in fs::read_dir(directory).unwrap() {
let path = entry.unwrap().path();
let name = path.as_path().to_str().unwrap();
if path.is_dir() {
Self::compile_shaders(name);
} else if name.ends_with(".vert") {
Self::compile_shader(name, ShaderKind::Vertex);
} else if name.ends_with(".frag") {
Self::compile_shader(name, ShaderKind::Fragment);
}
}
}
pub fn compile_shader(filename: &str, kind: ShaderKind) {
let mut compiler = shaderc::Compiler::new().unwrap();
let mut options = shaderc::CompileOptions::new().unwrap();
options.set_optimization_level(OptimizationLevel::Performance);
let source = fs::read_to_string(filename).unwrap();
let artefact = compiler.compile_into_spirv(&source, kind, filename, "main", Some(&options)).unwrap();
let outfile = format!("{}.spirv", filename);
fs::write(outfile, artefact.as_binary_u8()).unwrap();
}
}
| 34.171429 | 109 | 0.600334 |
bb9d6335831aaf39c548cc199584a5cb40a4c18b | 960 | use rcc::traits::Rcc;
use rcc::traits::Hsi;
use bitbanding::range::Range;
use bitbanding::traits::PeripheralBitbanding;
use parts::stm32::f4:Stm32F439 as Stm32F439Trait;
pub struct Stm32F439 {}
pub trait Stm32F439Trait {}
impl Stm32F439Trait for Stm32F439 {}
impl Rcc for Stm32F439 {
const RCC: u32 = 0x4002_3800;
const RCC_CR_RESET_VALUE: u32 = 0x0000_0083;
const RCC_CFGR_RESET_VALUE: u32 = 0x0000_0000;
}
impl RccCrHsi for Stm32F439 {
const RCC_HSION_BIT: u8 = 0x00;
const RCC_HSIRDY_BIT: u8 = 0x01;
const RCC_HSITRIM_MASK: u32 = 0x0000_00FC;
const RCC_HSITRIM_OFFS: u8 = 0x02;
const RCC_HSICAL_MASK: u32 = 0x0000_FF00;
const RCC_HSICAL_OFFS: u8 = 0x07;
}
impl PeripheralBitbanding for Stm32F439 {
const PERIPHERAL_BITBAND: Range = Range {
start: 0x2000_0000,
end: 0x2200_0000,
};
const PERIPHERAL_ALIAS: Range = Range {
start: 0x4000_0000,
end: 0x4200_0000,
};
}
| 23.414634 | 50 | 0.702083 |
1cb82c7195b9fa791ee32655f727e3c04378b377 | 1,627 | #[doc = "Register `WLENGTHH` reader"]
pub struct R(crate::R<WLENGTHH_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<WLENGTHH_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<WLENGTHH_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<WLENGTHH_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Field `WLENGTHH` reader - SETUP data, byte 7, MSB of wLength"]
pub struct WLENGTHH_R(crate::FieldReader<u8, u8>);
impl WLENGTHH_R {
pub(crate) fn new(bits: u8) -> Self {
WLENGTHH_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for WLENGTHH_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl R {
#[doc = "Bits 0:7 - SETUP data, byte 7, MSB of wLength"]
#[inline(always)]
pub fn wlengthh(&self) -> WLENGTHH_R {
WLENGTHH_R::new((self.bits & 0xff) as u8)
}
}
#[doc = "SETUP data, byte 7, MSB of wLength\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [wlengthh](index.html) module"]
pub struct WLENGTHH_SPEC;
impl crate::RegisterSpec for WLENGTHH_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [wlengthh::R](R) reader structure"]
impl crate::Readable for WLENGTHH_SPEC {
type Reader = R;
}
#[doc = "`reset()` method sets WLENGTHH to value 0"]
impl crate::Resettable for WLENGTHH_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 30.698113 | 245 | 0.628765 |
71d7467fe94960b09eeb9417838b4b8ff06c9ebf | 11,239 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
file_format::{CodeOffset, FunctionDefinitionIndex, TableIndex},
IndexKind,
};
use move_core_types::{
language_storage::ModuleId,
vm_status::{self, StatusCode, StatusType, VMStatus},
};
use std::fmt;
pub type VMResult<T> = ::std::result::Result<T, VMError>;
pub type BinaryLoaderResult<T> = ::std::result::Result<T, PartialVMError>;
pub type PartialVMResult<T> = ::std::result::Result<T, PartialVMError>;
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum Location {
Undefined,
Script,
Module(ModuleId),
}
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct VMError {
major_status: StatusCode,
sub_status: Option<u64>,
message: Option<String>,
location: Location,
indices: Vec<(IndexKind, TableIndex)>,
offsets: Vec<(FunctionDefinitionIndex, CodeOffset)>,
}
impl VMError {
pub fn into_vm_status(self) -> VMStatus {
let VMError {
major_status,
sub_status,
location,
mut offsets,
..
} = self;
match (major_status, sub_status, location) {
(StatusCode::EXECUTED, sub_status, _) => {
debug_assert!(sub_status.is_none());
VMStatus::Executed
}
(StatusCode::ABORTED, Some(code), Location::Script) => {
VMStatus::MoveAbort(vm_status::AbortLocation::Script, code)
}
(StatusCode::ABORTED, Some(code), Location::Module(id)) => {
VMStatus::MoveAbort(vm_status::AbortLocation::Module(id), code)
}
(StatusCode::ABORTED, sub_status, location) => {
debug_assert!(
false,
"Expected a code and module/script location with ABORTED, but got {:?} and {}",
sub_status, location
);
VMStatus::Error(StatusCode::ABORTED)
}
// TODO Errors for OUT_OF_GAS do not always have index set
(major_status, sub_status, location)
if major_status.status_type() == StatusType::Execution =>
{
debug_assert!(
offsets.len() == 1,
"Unexpected offsets. major_status: {:?}\
sub_status: {:?}\
location: {:?}\
offsets: {:#?}",
major_status,
sub_status,
location,
offsets
);
let abort_location = match location {
Location::Script => vm_status::AbortLocation::Script,
Location::Module(id) => vm_status::AbortLocation::Module(id),
Location::Undefined => {
return VMStatus::Error(major_status);
}
};
let (function, code_offset) = match offsets.pop() {
None => {
return VMStatus::Error(major_status);
}
Some((fdef_idx, code_offset)) => (fdef_idx.0, code_offset),
};
VMStatus::ExecutionFailure {
status_code: major_status,
location: abort_location,
function,
code_offset,
}
}
(major_status, _, _) => VMStatus::Error(major_status),
}
}
pub fn major_status(&self) -> StatusCode {
self.major_status
}
pub fn sub_status(&self) -> Option<u64> {
self.sub_status
}
pub fn message(&self) -> Option<&String> {
self.message.as_ref()
}
pub fn location(&self) -> &Location {
&self.location
}
pub fn indices(&self) -> &Vec<(IndexKind, TableIndex)> {
&self.indices
}
pub fn offsets(&self) -> &Vec<(FunctionDefinitionIndex, CodeOffset)> {
&self.offsets
}
pub fn status_type(&self) -> StatusType {
self.major_status.status_type()
}
pub fn all_data(
self,
) -> (
StatusCode,
Option<u64>,
Option<String>,
Location,
Vec<(IndexKind, TableIndex)>,
Vec<(FunctionDefinitionIndex, CodeOffset)>,
) {
let VMError {
major_status,
sub_status,
message,
location,
indices,
offsets,
} = self;
(
major_status,
sub_status,
message,
location,
indices,
offsets,
)
}
}
#[derive(Debug, Clone)]
pub struct PartialVMError {
major_status: StatusCode,
sub_status: Option<u64>,
message: Option<String>,
indices: Vec<(IndexKind, TableIndex)>,
offsets: Vec<(FunctionDefinitionIndex, CodeOffset)>,
}
impl PartialVMError {
pub fn finish(self, location: Location) -> VMError {
let PartialVMError {
major_status,
sub_status,
message,
indices,
offsets,
} = self;
VMError {
major_status,
sub_status,
location,
message,
indices,
offsets,
}
}
pub fn new(major_status: StatusCode) -> Self {
Self {
major_status,
sub_status: None,
message: None,
indices: vec![],
offsets: vec![],
}
}
pub fn major_status(&self) -> StatusCode {
self.major_status
}
pub fn with_sub_status(self, sub_status: u64) -> Self {
debug_assert!(self.sub_status.is_none());
Self {
sub_status: Some(sub_status),
..self
}
}
pub fn with_message(self, message: String) -> Self {
debug_assert!(self.message.is_none());
Self {
message: Some(message),
..self
}
}
pub fn at_index(self, kind: IndexKind, index: TableIndex) -> Self {
let mut indices = self.indices;
indices.push((kind, index));
Self { indices, ..self }
}
pub fn at_indices(self, additional_indices: Vec<(IndexKind, TableIndex)>) -> Self {
let mut indices = self.indices;
indices.extend(additional_indices);
Self { indices, ..self }
}
pub fn at_code_offset(self, function: FunctionDefinitionIndex, offset: CodeOffset) -> Self {
let mut offsets = self.offsets;
offsets.push((function, offset));
Self { offsets, ..self }
}
pub fn at_code_offsets(
self,
additional_offsets: Vec<(FunctionDefinitionIndex, CodeOffset)>,
) -> Self {
let mut offsets = self.offsets;
offsets.extend(additional_offsets);
Self { offsets, ..self }
}
/// Append the message `message` to the message field of the VM status, and insert a seperator
/// if the original message is non-empty.
pub fn append_message_with_separator(
self,
separator: char,
additional_message: String,
) -> Self {
let message = match self.message {
Some(mut msg) => {
if !msg.is_empty() {
msg.push(separator);
}
msg.push_str(&additional_message);
msg
}
None => additional_message,
};
Self {
message: Some(message),
..self
}
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Location::Undefined => write!(f, "UNDEFINED"),
Location::Script => write!(f, "Script"),
Location::Module(id) => write!(f, "Module {:?}", id),
}
}
}
impl fmt::Display for PartialVMError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut status = format!("PartialVMError with status {:#?}", self.major_status);
if let Some(sub_status) = self.sub_status {
status = format!("{} with sub status {}", status, sub_status);
}
if let Some(msg) = &self.message {
status = format!("{} and message {}", status, msg);
}
for (kind, index) in &self.indices {
status = format!("{} at index {} for {}", status, index, kind);
}
for (fdef, code_offset) in &self.offsets {
status = format!(
"{} at code offset {} in function definition {}",
status, code_offset, fdef
);
}
write!(f, "{}", status)
}
}
impl fmt::Display for VMError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut status = format!("VMError with status {:#?}", self.major_status);
if let Some(sub_status) = self.sub_status {
status = format!("{} with sub status {}", status, sub_status);
}
status = format!("{} at location {}", status, self.location);
if let Some(msg) = &self.message {
status = format!("{} and message {}", status, msg);
}
for (kind, index) in &self.indices {
status = format!("{} at index {} for {}", status, index, kind);
}
for (fdef, code_offset) in &self.offsets {
status = format!(
"{} at code offset {} in function definition {}",
status, code_offset, fdef
);
}
write!(f, "{}", status)
}
}
////////////////////////////////////////////////////////////////////////////
/// Conversion functions from internal VM statuses into external VM statuses
////////////////////////////////////////////////////////////////////////////
impl Into<VMStatus> for VMError {
fn into(self) -> VMStatus {
self.into_vm_status()
}
}
pub fn vm_status_of_result<T>(result: VMResult<T>) -> VMStatus {
match result {
Ok(_) => VMStatus::Executed,
Err(err) => err.into_vm_status(),
}
}
pub fn offset_out_of_bounds(
status: StatusCode,
kind: IndexKind,
target_offset: usize,
target_pool_len: usize,
cur_function: FunctionDefinitionIndex,
cur_bytecode_offset: CodeOffset,
) -> PartialVMError {
let msg = format!(
"Index {} out of bounds for {} at bytecode offset {} in function {} while indexing {}",
target_offset, target_pool_len, cur_bytecode_offset, cur_function, kind
);
PartialVMError::new(status)
.with_message(msg)
.at_code_offset(cur_function, cur_bytecode_offset)
}
pub fn bounds_error(
status: StatusCode,
kind: IndexKind,
idx: TableIndex,
len: usize,
) -> PartialVMError {
let msg = format!(
"Index {} out of bounds for {} while indexing {}",
idx, len, kind
);
PartialVMError::new(status)
.at_index(kind, idx)
.with_message(msg)
}
pub fn verification_error(status: StatusCode, kind: IndexKind, idx: TableIndex) -> PartialVMError {
PartialVMError::new(status).at_index(kind, idx)
}
| 29.11658 | 99 | 0.525581 |
56ca9ba59a627ec8f2622c2267c6b34869161add | 379 | use uinput::{Error, Device};
use uinput::event::Keyboard::All;
use uinput;
pub fn create() -> Result<Device, String> {
create_device()
.map_err(|e| format!("{}", e))
}
fn create_device() -> Result<Device, Error> {
let device = uinput::open("/dev/uinput")?
.name("razer-naga-virtual-keyboard")?
.event(All)?
.create()?;
Ok(device)
}
| 21.055556 | 45 | 0.583113 |
ab59ab6348e97ea7efa27c1e1e727c33cb933187 | 14,588 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use std::cmp::Ordering;
use crate::codec::collation::Collator;
use crate::{match_template_collator, match_template_evaltype};
use crate::{Collation, EvalType, FieldTypeAccessor};
use match_template::match_template;
use tipb::FieldType;
use super::*;
/// A scalar value container, a.k.a. datum, for all concrete eval types.
///
/// In many cases, for example, at the framework level, the concrete eval type is unknown at compile
/// time. So we use this enum container to represent types dynamically. It is similar to trait
/// object `Box<T>` where `T` is a concrete eval type but faster.
///
/// Like `VectorValue`, the inner concrete value is immutable.
///
/// Compared to `VectorValue`, it only contains a single concrete value.
/// Compared to `Datum`, it is a newer encapsulation that naturally wraps `Option<..>`.
///
/// TODO: Once we removed the `Option<..>` wrapper, it will be much like `Datum`. At that time,
/// we only need to preserve one of them.
#[derive(Clone, Debug, PartialEq)]
pub enum ScalarValue {
Int(Option<super::Int>),
Real(Option<super::Real>),
Decimal(Option<super::Decimal>),
Bytes(Option<super::Bytes>),
DateTime(Option<super::DateTime>),
Duration(Option<super::Duration>),
Json(Option<super::Json>),
Enum(Option<super::Enum>),
Set(Option<super::Set>),
}
impl ScalarValue {
#[inline]
pub fn eval_type(&self) -> EvalType {
match_template_evaltype! {
TT, match self {
ScalarValue::TT(_) => EvalType::TT,
}
}
}
#[inline]
pub fn as_scalar_value_ref(&self) -> ScalarValueRef<'_> {
match self {
ScalarValue::Int(x) => ScalarValueRef::Int(x.as_ref()),
ScalarValue::Duration(x) => ScalarValueRef::Duration(x.as_ref()),
ScalarValue::DateTime(x) => ScalarValueRef::DateTime(x.as_ref()),
ScalarValue::Real(x) => ScalarValueRef::Real(x.as_ref()),
ScalarValue::Decimal(x) => ScalarValueRef::Decimal(x.as_ref()),
ScalarValue::Bytes(x) => ScalarValueRef::Bytes(x.as_ref().map(|x| x.as_slice())),
ScalarValue::Json(x) => ScalarValueRef::Json(x.as_ref().map(|x| x.as_ref())),
ScalarValue::Enum(x) => ScalarValueRef::Enum(x.as_ref().map(|x| x.as_ref())),
ScalarValue::Set(x) => ScalarValueRef::Set(x.as_ref().map(|x| x.as_ref())),
}
}
#[inline]
pub fn is_none(&self) -> bool {
match_template_evaltype! {
TT, match self {
ScalarValue::TT(v) => v.is_none(),
}
}
}
#[inline]
pub fn is_some(&self) -> bool {
match_template_evaltype! {
TT, match self {
ScalarValue::TT(v) => v.is_some(),
}
}
}
}
impl AsMySQLBool for ScalarValue {
#[inline]
fn as_mysql_bool(&self, context: &mut EvalContext) -> Result<bool> {
match_template_evaltype! {
TT, match self {
ScalarValue::TT(v) => v.as_ref().as_mysql_bool(context),
}
}
}
}
macro_rules! impl_from {
($ty:tt) => {
impl From<Option<$ty>> for ScalarValue {
#[inline]
fn from(s: Option<$ty>) -> ScalarValue {
ScalarValue::$ty(s)
}
}
impl From<$ty> for ScalarValue {
#[inline]
fn from(s: $ty) -> ScalarValue {
ScalarValue::$ty(Some(s))
}
}
impl From<ScalarValue> for Option<$ty> {
#[inline]
fn from(s: ScalarValue) -> Option<$ty> {
match s {
ScalarValue::$ty(v) => v,
_ => panic!(
"Cannot cast {} scalar value into {}",
s.eval_type(),
stringify!($ty),
),
}
}
}
};
}
impl_from! { Int }
impl_from! { Real }
impl_from! { Decimal }
impl_from! { Bytes }
impl_from! { DateTime }
impl_from! { Duration }
impl_from! { Json }
impl From<Option<f64>> for ScalarValue {
#[inline]
fn from(s: Option<f64>) -> ScalarValue {
ScalarValue::Real(s.and_then(|f| Real::new(f).ok()))
}
}
impl<'a> From<Option<JsonRef<'a>>> for ScalarValue {
#[inline]
fn from(s: Option<JsonRef<'a>>) -> ScalarValue {
ScalarValue::Json(s.map(|x| x.to_owned()))
}
}
impl<'a> From<Option<BytesRef<'a>>> for ScalarValue {
#[inline]
fn from(s: Option<BytesRef<'a>>) -> ScalarValue {
ScalarValue::Bytes(s.map(|x| x.to_vec()))
}
}
impl From<f64> for ScalarValue {
#[inline]
fn from(s: f64) -> ScalarValue {
ScalarValue::Real(Real::new(s).ok())
}
}
impl From<ScalarValue> for Option<f64> {
#[inline]
fn from(s: ScalarValue) -> Option<f64> {
match s {
ScalarValue::Real(v) => v.map(|v| v.into_inner()),
_ => panic!("Cannot cast {} scalar value into f64", s.eval_type()),
}
}
}
/// A scalar value reference container. Can be created from `ScalarValue` or `VectorValue`.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ScalarValueRef<'a> {
Int(Option<&'a super::Int>),
Real(Option<&'a super::Real>),
Decimal(Option<&'a super::Decimal>),
Bytes(Option<BytesRef<'a>>),
DateTime(Option<&'a super::DateTime>),
Duration(Option<&'a super::Duration>),
Json(Option<JsonRef<'a>>),
Enum(Option<EnumRef<'a>>),
Set(Option<SetRef<'a>>),
}
impl<'a> ScalarValueRef<'a> {
#[inline]
#[allow(clippy::clone_on_copy)]
pub fn to_owned(self) -> ScalarValue {
match self {
ScalarValueRef::Int(x) => ScalarValue::Int(x.cloned()),
ScalarValueRef::Duration(x) => ScalarValue::Duration(x.cloned()),
ScalarValueRef::DateTime(x) => ScalarValue::DateTime(x.cloned()),
ScalarValueRef::Real(x) => ScalarValue::Real(x.cloned()),
ScalarValueRef::Decimal(x) => ScalarValue::Decimal(x.cloned()),
ScalarValueRef::Bytes(x) => ScalarValue::Bytes(x.map(|x| x.to_vec())),
ScalarValueRef::Json(x) => ScalarValue::Json(x.map(|x| x.to_owned())),
ScalarValueRef::Enum(x) => ScalarValue::Enum(x.map(|x| x.to_owned())),
ScalarValueRef::Set(x) => ScalarValue::Set(x.map(|x| x.to_owned())),
}
}
#[inline]
pub fn eval_type(&self) -> EvalType {
match_template_evaltype! {
TT, match self {
ScalarValueRef::TT(_) => EvalType::TT,
}
}
}
/// Encodes into binary format.
pub fn encode(
&self,
field_type: &FieldType,
ctx: &mut EvalContext,
output: &mut Vec<u8>,
) -> Result<()> {
use crate::codec::datum_codec::EvaluableDatumEncoder;
match self {
ScalarValueRef::Int(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
// Always encode to INT / UINT instead of VAR INT to be efficient.
let is_unsigned = field_type.is_unsigned();
output.write_evaluable_datum_int(**val, is_unsigned)?;
}
}
Ok(())
}
ScalarValueRef::Real(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
output.write_evaluable_datum_real(val.into_inner())?;
}
}
Ok(())
}
ScalarValueRef::Decimal(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
output.write_evaluable_datum_decimal(val)?;
}
}
Ok(())
}
ScalarValueRef::Bytes(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
output.write_evaluable_datum_bytes(val)?;
}
}
Ok(())
}
ScalarValueRef::DateTime(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
output.write_evaluable_datum_date_time(**val, ctx)?;
}
}
Ok(())
}
ScalarValueRef::Duration(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
output.write_evaluable_datum_duration(**val)?;
}
}
Ok(())
}
ScalarValueRef::Json(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(ref val) => {
output.write_evaluable_datum_json(*val)?;
}
}
Ok(())
}
// TODO: we should implement enum/set encode
ScalarValueRef::Enum(_) => unimplemented!(),
ScalarValueRef::Set(_) => unimplemented!(),
}
}
pub fn encode_sort_key(
&self,
field_type: &FieldType,
ctx: &mut EvalContext,
output: &mut Vec<u8>,
) -> Result<()> {
use crate::codec::datum_codec::EvaluableDatumEncoder;
match self {
ScalarValueRef::Bytes(val) => {
match val {
None => {
output.write_evaluable_datum_null()?;
}
Some(val) => {
let sort_key = match_template_collator! {
TT, match field_type.collation().map_err(crate::codec::Error::from)? {
Collation::TT => TT::sort_key(val)?
}
};
output.write_evaluable_datum_bytes(&sort_key)?;
}
}
Ok(())
}
_ => self.encode(field_type, ctx, output),
}
}
#[inline]
pub fn cmp_sort_key(
&self,
other: &ScalarValueRef<'_>,
field_type: &FieldType,
) -> crate::codec::Result<Ordering> {
Ok(match_template! {
TT = [Real, Decimal, DateTime, Duration, Json, Enum],
match (self, other) {
(ScalarValueRef::TT(v1), ScalarValueRef::TT(v2)) => v1.cmp(v2),
(ScalarValueRef::Int(v1), ScalarValueRef::Int(v2)) => compare_int(&v1.cloned(), &v2.cloned(), field_type),
(ScalarValueRef::Bytes(None), ScalarValueRef::Bytes(None)) => Ordering::Equal,
(ScalarValueRef::Bytes(Some(_)), ScalarValueRef::Bytes(None)) => Ordering::Greater,
(ScalarValueRef::Bytes(None), ScalarValueRef::Bytes(Some(_))) => Ordering::Less,
(ScalarValueRef::Bytes(Some(v1)), ScalarValueRef::Bytes(Some(v2))) => {
match_template_collator! {
TT, match field_type.collation()? {
Collation::TT => TT::sort_compare(v1, v2)?
}
}
}
_ => panic!("Cannot compare two ScalarValueRef in different type"),
}
})
}
}
#[inline]
fn compare_int(
lhs: &Option<super::Int>,
rhs: &Option<super::Int>,
field_type: &FieldType,
) -> Ordering {
if field_type.is_unsigned() {
lhs.map(|i| i as u64).cmp(&rhs.map(|i| i as u64))
} else {
lhs.cmp(rhs)
}
}
macro_rules! impl_as_ref {
($ty:tt, $name:ident) => {
impl ScalarValue {
#[inline]
pub fn $name(&self) -> Option<&$ty> {
Evaluable::borrow_scalar_value(self)
}
}
impl<'a> ScalarValueRef<'a> {
#[inline]
pub fn $name(&'a self) -> Option<&'a $ty> {
Evaluable::borrow_scalar_value_ref(*self)
}
}
};
}
impl_as_ref! { Int, as_int }
impl_as_ref! { Real, as_real }
impl_as_ref! { Decimal, as_decimal }
impl_as_ref! { DateTime, as_date_time }
impl_as_ref! { Duration, as_duration }
impl ScalarValue {
#[inline]
pub fn as_json(&self) -> Option<JsonRef<'_>> {
EvaluableRef::borrow_scalar_value(self)
}
}
impl<'a> ScalarValueRef<'a> {
#[inline]
pub fn as_json(&'a self) -> Option<JsonRef<'a>> {
EvaluableRef::borrow_scalar_value_ref(*self)
}
}
impl ScalarValue {
#[inline]
pub fn as_bytes(&self) -> Option<BytesRef<'_>> {
EvaluableRef::borrow_scalar_value(self)
}
}
impl<'a> ScalarValueRef<'a> {
#[inline]
pub fn as_bytes(&'a self) -> Option<BytesRef<'a>> {
EvaluableRef::borrow_scalar_value_ref(*self)
}
}
impl<'a> Ord for ScalarValueRef<'a> {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other)
.expect("Cannot compare two ScalarValueRef in different type")
}
}
impl<'a> PartialOrd for ScalarValueRef<'a> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match_template_evaltype! {
TT, match (self, other) {
// v1 and v2 are `Option<T>`. However, in MySQL NULL values are considered lower
// than any non-NULL value, so using `Option::PartialOrd` directly is fine.
(ScalarValueRef::TT(v1), ScalarValueRef::TT(v2)) => Some(v1.cmp(v2)),
_ => None,
}
}
}
}
impl<'a> PartialEq<ScalarValue> for ScalarValueRef<'a> {
fn eq(&self, other: &ScalarValue) -> bool {
self == &other.as_scalar_value_ref()
}
}
impl<'a> PartialEq<ScalarValueRef<'a>> for ScalarValue {
fn eq(&self, other: &ScalarValueRef<'_>) -> bool {
other == self
}
}
| 31.782135 | 122 | 0.503427 |
fe498e0a3247d8be98e90c31178f97f3937e6029 | 1,693 | /*
* Copyright 2022 nzelot<[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use tokio::task;
use crate::error::{Result, SoundbaseError};
use crate::db::{DbPool};
mod aow_rock_antenne;
mod tow_rock_antenne;
pub fn fetch_albums_of_week(db : &DbPool) {
let pool = db.clone();
tokio::task::spawn(async move {
if let Err(e) = aow_rock_antenne::fetch_new_rockantenne_album_of_week(pool).await {
println!("AOW Fetch for Rock Antenne raised an Error! => {:?}", e);
}
});
}
pub fn fetch_charts(db : &DbPool) {
let pool = db.clone();
tokio::task::spawn(async move {
if let Err(e) = tow_rock_antenne::fetch_new_rockantenne_top20_of_week(pool).await {
println!("Charts Fetch for Rock Antenne raised an Error! => {:?}", e);
}
});
}
fn get_selector(selector: &'static str) -> Result<scraper::Selector> {
let sel = scraper::Selector::parse(selector);
match sel {
Ok(s) => Ok(s),
Err(e) => {
Err(SoundbaseError {
http_code: http::StatusCode::INTERNAL_SERVER_ERROR,
msg: format!("{:?}", e),
})
}
}
} | 31.943396 | 91 | 0.642646 |
ef28aeaa953d4211da2786e9cb14cfb6d45c936a | 4,667 | //! Processing of received requests
use crate::net::request::Request;
use super::resolver::DoHResolver;
use log::*;
use threadpool::{Builder as ThreadPoolBuilder, ThreadPool};
use num_cpus;
use crossbeam_channel::{Receiver as XBeamReceiver, RecvTimeoutError as XBeamRecvTimeoutError};
use srvzio;
use std::{thread, time::Duration};
const PROCESSOR_SERVICE_NAME: &'static str = "Processor";
const PROCESSOR_RECEIVER_THREAD_NAME: &'static str = "processor_receiver_thread";
const PROCESSOR_RESOLVER_THREAD_NAME: &'static str = "processor_resolver_thread";
const PROCESSOR_THREADS_COUNT_CPU_MULTIPLIER: usize = 4;
const PROCESSOR_RECEIVER_TIMEOUT_SEC: u64 = 10;
/// Processor is a service that receives and responds to DNS requests
///
/// It has internal threading to handle being "started" and "stopped": stopped at construction.
/// The `Request` are received via the `Receiver` (see `crossbeam::channel::Receiver`), and then
/// resolved via the `DoHResolver`.
///
/// The service is agnostic to what kind of DNS-over-HTTPS resolution is configured: it just
/// uses the provided `DoHResolver`.
pub struct Processor {
receiver: XBeamReceiver<Request>,
resolver: Box<DoHResolver + Send>,
status: srvzio::ServiceStatusFlag,
receiver_thread: Option<thread::JoinHandle<()>>,
}
impl Processor {
/// Constructor
///
/// # Parameters
/// * `receiver`: a `crossbeam::channel::Receiver` that delivers `Request` data
/// * `resolver`: a struct that implements the `DoHResolver`, wrapped in a `Box`
pub fn new(receiver: XBeamReceiver<Request>, resolver: Box<DoHResolver + Send>) -> Processor {
Processor {
receiver,
resolver,
status: srvzio::ServiceStatusFlag::default(),
receiver_thread: None,
}
}
}
impl srvzio::Service for Processor {
fn name(&self) -> &'static str {
PROCESSOR_SERVICE_NAME
}
fn start(&mut self) {
self.status.starting();
let receiver = self.receiver.clone();
let resolver = self.resolver.clone();
let status = self.status.clone();
// Launch a 'request receiving' thread
self.receiver_thread = Some(thread::Builder::new()
.name(PROCESSOR_RECEIVER_THREAD_NAME.into())
.spawn(move || {
let pool = crate_thread_pool();
status.started();
// Receive 'requests' for processing, but interrupt at regular intervals to check if Processor was stopped
loop {
match receiver.recv_timeout(Duration::from_secs(PROCESSOR_RECEIVER_TIMEOUT_SEC)) {
Ok(req) => {
{
let q = req.dns_query();
let s = req.source();
debug!("Received: id={} type={:?} source={} queries={:?}", q.id(), q.message_type(), s, q.queries());
}
let resolver = resolver.clone();
pool.execute(move || resolve_and_respond(req, resolver));
},
Err(XBeamRecvTimeoutError::Timeout) => {
if status.is_stopping() {
trace!("{} is done running: stop processing requests", PROCESSOR_SERVICE_NAME);
break;
}
},
Err(err) => {
error!("Unexpected error when processing requests: {}", err);
}
}
}
debug!("Wait for any pending processing...");
pool.join();
debug!("... done processing");
status.stopped();
})
.expect(format!("Unable to spawn thread: {}", PROCESSOR_RECEIVER_THREAD_NAME).as_ref())
);
}
fn await_started(&mut self) {
while !self.status.is_started() {}
}
fn stop(&mut self) {
trace!("{} should now stop...", PROCESSOR_SERVICE_NAME);
self.status.stopping();
}
fn await_stopped(&mut self) {
while !self.status.is_stopped() {}
// Wait for receiver thread to stop (if it's actually set)
if self.receiver_thread.is_some() {
self.receiver_thread
.take()
.unwrap()
.join()
.expect(format!("Panicked upon termination: {}", PROCESSOR_RECEIVER_THREAD_NAME).as_ref());
}
}
}
fn crate_thread_pool() -> ThreadPool {
ThreadPoolBuilder::new()
.num_threads(num_cpus::get() * PROCESSOR_THREADS_COUNT_CPU_MULTIPLIER)
.thread_name(PROCESSOR_RESOLVER_THREAD_NAME.into())
.build()
}
fn resolve_and_respond(req: Request, resolver: Box<DoHResolver>) -> () {
match resolver.resolve(req.dns_query()) {
Ok(res_msg) => {
debug!("Responding: id={} type={:?} answers={:?}", res_msg.id(), res_msg.message_type(), res_msg.answers());
req.respond(res_msg);
},
Err(err) => {
error!("Unable to resolve request: {}", err);
}
}
} | 30.907285 | 117 | 0.63574 |
5699ef376929a440bb26524b26635ae1d4684fb2 | 1,219 | //! The RFC 2389 Feature (`FEAT`) command
use crate::server::commands::Cmd;
use crate::server::error::FTPError;
use crate::server::reply::{Reply, ReplyCode};
use crate::server::CommandArgs;
use crate::storage;
pub struct Feat;
impl<S, U> Cmd<S, U> for Feat
where
U: Send + Sync + 'static,
S: 'static + storage::StorageBackend<U> + Sync + Send,
S::File: tokio_io::AsyncRead + Send,
S::Metadata: storage::Metadata,
{
fn execute(&self, args: &CommandArgs<S, U>) -> Result<Reply, FTPError> {
let mut feat_text = vec![" SIZE", " MDTM", "UTF8"];
// Add the features. According to the spec each feature line must be
// indented by a space.
if args.tls_configured {
feat_text.push(" AUTH TLS");
feat_text.push(" PBSZ");
feat_text.push(" PROT");
}
if args.storage_features & storage::FEATURE_RESTART > 0 {
feat_text.push(" REST STREAM");
}
// Show them in alphabetical order.
feat_text.sort();
feat_text.insert(0, "Extensions supported:");
feat_text.push("END");
let reply = Reply::new_multiline(ReplyCode::SystemStatus, feat_text);
Ok(reply)
}
}
| 30.475 | 77 | 0.602953 |
225d4c2b4d68e506425c7479baf0b62dcf59c2b9 | 137 | FieldFoo(<u32 as ::configure_me::parse_arg::ParseArg>::Error),
FieldBar(<String as ::configure_me::parse_arg::ParseArg>::Error),
| 45.666667 | 69 | 0.708029 |
bbb2cd11024177404c258353ef57360b49728f7d | 873 | //! Test saving "default" and specific quality jpeg.
#![cfg(all(feature = "jpeg", feature = "tiff"))]
extern crate image;
use image::{ImageOutputFormat, ImageFormat};
use std::io::Cursor;
#[test]
fn jqeg_qualitys() {
let img = image::open("tests/images/tiff/testsuite/mandrill.tiff").unwrap();
let mut default = vec![];
img.write_to(&mut Cursor::new(&mut default), ImageFormat::Jpeg).unwrap();
assert_eq!(&[255, 216], &default[..2]);
let mut small = vec![];
img.write_to(&mut Cursor::new(&mut small), ImageOutputFormat::Jpeg(10))
.unwrap();
assert_eq!(&[255, 216], &small[..2]);
assert!(small.len() < default.len());
let mut large = vec![];
img.write_to(&mut Cursor::new(&mut large), ImageOutputFormat::Jpeg(99))
.unwrap();
assert_eq!(&[255, 216], &large[..2]);
assert!(large.len() > default.len());
}
| 29.1 | 80 | 0.618557 |
efe43d008b3f6d2db8b5ee5812552296c7dc3c6a | 100,521 | mod client;
mod output;
use {
crate::{
client::*,
output::{CliStakePool, CliStakePoolDetails, CliStakePoolStakeAccountInfo, CliStakePools},
},
clap::{
crate_description, crate_name, crate_version, value_t, value_t_or_exit, App, AppSettings,
Arg, ArgGroup, ArgMatches, SubCommand,
},
solana_clap_utils::{
input_parsers::{keypair_of, pubkey_of},
input_validators::{
is_amount, is_keypair_or_ask_keyword, is_parsable, is_pubkey, is_url,
is_valid_percentage, is_valid_pubkey, is_valid_signer,
},
keypair::{signer_from_path_with_config, SignerFromPathConfig},
},
solana_cli_output::OutputFormat,
solana_client::rpc_client::RpcClient,
solana_program::{
borsh::{get_instance_packed_len, get_packed_len},
instruction::Instruction,
program_pack::Pack,
pubkey::Pubkey,
stake,
},
solana_remote_wallet::remote_wallet::RemoteWalletManager,
solana_sdk::{
commitment_config::CommitmentConfig,
native_token::{self, Sol},
signature::{Keypair, Signer},
signers::Signers,
system_instruction,
transaction::Transaction,
},
spl_associated_token_account::{create_associated_token_account, get_associated_token_address},
spl_stake_pool::state::ValidatorStakeInfo,
spl_stake_pool::{
self, find_stake_program_address, find_transient_stake_program_address,
find_withdraw_authority_program_address,
instruction::{FundingType, PreferredValidatorType},
state::{Fee, FeeType, StakePool, ValidatorList},
MINIMUM_ACTIVE_STAKE,
},
std::cmp::Ordering,
std::{process::exit, sync::Arc},
};
pub(crate) struct Config {
rpc_client: RpcClient,
verbose: bool,
output_format: OutputFormat,
manager: Box<dyn Signer>,
staker: Box<dyn Signer>,
funding_authority: Option<Box<dyn Signer>>,
token_owner: Box<dyn Signer>,
fee_payer: Box<dyn Signer>,
dry_run: bool,
no_update: bool,
}
type Error = Box<dyn std::error::Error>;
type CommandResult = Result<(), Error>;
const STAKE_STATE_LEN: usize = 200;
macro_rules! unique_signers {
($vec:ident) => {
$vec.sort_by_key(|l| l.pubkey());
$vec.dedup();
};
}
fn check_fee_payer_balance(config: &Config, required_balance: u64) -> Result<(), Error> {
let balance = config.rpc_client.get_balance(&config.fee_payer.pubkey())?;
if balance < required_balance {
Err(format!(
"Fee payer, {}, has insufficient balance: {} required, {} available",
config.fee_payer.pubkey(),
Sol(required_balance),
Sol(balance)
)
.into())
} else {
Ok(())
}
}
fn get_signer(
matches: &ArgMatches<'_>,
keypair_name: &str,
keypair_path: &str,
wallet_manager: &mut Option<Arc<RemoteWalletManager>>,
signer_from_path_config: SignerFromPathConfig,
) -> Box<dyn Signer> {
signer_from_path_with_config(
matches,
matches.value_of(keypair_name).unwrap_or(keypair_path),
keypair_name,
wallet_manager,
&signer_from_path_config,
)
.unwrap_or_else(|e| {
eprintln!("error: {}", e);
exit(1);
})
}
fn send_transaction_no_wait(
config: &Config,
transaction: Transaction,
) -> solana_client::client_error::Result<()> {
if config.dry_run {
let result = config.rpc_client.simulate_transaction(&transaction)?;
println!("Simulate result: {:?}", result);
} else {
let signature = config.rpc_client.send_transaction(&transaction)?;
println!("Signature: {}", signature);
}
Ok(())
}
fn send_transaction(
config: &Config,
transaction: Transaction,
) -> solana_client::client_error::Result<()> {
if config.dry_run {
let result = config.rpc_client.simulate_transaction(&transaction)?;
println!("Simulate result: {:?}", result);
} else {
let signature = config
.rpc_client
.send_and_confirm_transaction_with_spinner(&transaction)?;
println!("Signature: {}", signature);
}
Ok(())
}
fn checked_transaction_with_signers<T: Signers>(
config: &Config,
instructions: &[Instruction],
signers: &T,
) -> Result<Transaction, Error> {
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
let transaction = Transaction::new_signed_with_payer(
instructions,
Some(&config.fee_payer.pubkey()),
signers,
recent_blockhash,
);
check_fee_payer_balance(config, fee_calculator.calculate_fee(transaction.message()))?;
Ok(transaction)
}
fn new_stake_account(
fee_payer: &Pubkey,
instructions: &mut Vec<Instruction>,
lamports: u64,
) -> Keypair {
// Account for tokens not specified, creating one
let stake_receiver_keypair = Keypair::new();
let stake_receiver_pubkey = stake_receiver_keypair.pubkey();
println!(
"Creating account to receive stake {}",
stake_receiver_pubkey
);
instructions.push(
// Creating new account
system_instruction::create_account(
fee_payer,
&stake_receiver_pubkey,
lamports,
STAKE_STATE_LEN as u64,
&stake::program::id(),
),
);
stake_receiver_keypair
}
#[allow(clippy::too_many_arguments)]
fn command_create_pool(
config: &Config,
deposit_authority: Option<Keypair>,
epoch_fee: Fee,
stake_withdrawal_fee: Fee,
stake_deposit_fee: Fee,
stake_referral_fee: u8,
max_validators: u32,
stake_pool_keypair: Option<Keypair>,
validator_list_keypair: Option<Keypair>,
mint_keypair: Option<Keypair>,
reserve_keypair: Option<Keypair>,
) -> CommandResult {
let reserve_keypair = reserve_keypair.unwrap_or_else(Keypair::new);
println!("Creating reserve stake {}", reserve_keypair.pubkey());
let mint_keypair = mint_keypair.unwrap_or_else(Keypair::new);
println!("Creating mint {}", mint_keypair.pubkey());
let stake_pool_keypair = stake_pool_keypair.unwrap_or_else(Keypair::new);
let validator_list_keypair = validator_list_keypair.unwrap_or_else(Keypair::new);
let reserve_stake_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(STAKE_STATE_LEN)?
+ 1;
let mint_account_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(spl_token::state::Mint::LEN)?;
let pool_fee_account_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(spl_token::state::Account::LEN)?;
let stake_pool_account_lamports = config
.rpc_client
.get_minimum_balance_for_rent_exemption(get_packed_len::<StakePool>())?;
let empty_validator_list = ValidatorList::new(max_validators);
let validator_list_size = get_instance_packed_len(&empty_validator_list)?;
let validator_list_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(validator_list_size)?;
let mut total_rent_free_balances = reserve_stake_balance
+ mint_account_balance
+ pool_fee_account_balance
+ stake_pool_account_lamports
+ validator_list_balance;
let default_decimals = spl_token::native_mint::DECIMALS;
// Calculate withdraw authority used for minting pool tokens
let (withdraw_authority, _) = find_withdraw_authority_program_address(
&spl_stake_pool::id(),
&stake_pool_keypair.pubkey(),
);
if config.verbose {
println!("Stake pool withdraw authority {}", withdraw_authority);
}
let mut instructions = vec![
// Account for the stake pool reserve
system_instruction::create_account(
&config.fee_payer.pubkey(),
&reserve_keypair.pubkey(),
reserve_stake_balance,
STAKE_STATE_LEN as u64,
&stake::program::id(),
),
stake::instruction::initialize(
&reserve_keypair.pubkey(),
&stake::state::Authorized {
staker: withdraw_authority,
withdrawer: withdraw_authority,
},
&stake::state::Lockup::default(),
),
// Account for the stake pool mint
system_instruction::create_account(
&config.fee_payer.pubkey(),
&mint_keypair.pubkey(),
mint_account_balance,
spl_token::state::Mint::LEN as u64,
&spl_token::id(),
),
// Initialize pool token mint account
spl_token::instruction::initialize_mint(
&spl_token::id(),
&mint_keypair.pubkey(),
&withdraw_authority,
None,
default_decimals,
)?,
];
let pool_fee_account = add_associated_token_account(
config,
&mint_keypair.pubkey(),
&config.manager.pubkey(),
&mut instructions,
&mut total_rent_free_balances,
);
println!("Creating pool fee collection account {}", pool_fee_account);
let mut setup_transaction =
Transaction::new_with_payer(&instructions, Some(&config.fee_payer.pubkey()));
let mut initialize_transaction = Transaction::new_with_payer(
&[
// Validator stake account list storage
system_instruction::create_account(
&config.fee_payer.pubkey(),
&validator_list_keypair.pubkey(),
validator_list_balance,
validator_list_size as u64,
&spl_stake_pool::id(),
),
// Account for the stake pool
system_instruction::create_account(
&config.fee_payer.pubkey(),
&stake_pool_keypair.pubkey(),
stake_pool_account_lamports,
get_packed_len::<StakePool>() as u64,
&spl_stake_pool::id(),
),
// Initialize stake pool
spl_stake_pool::instruction::initialize(
&spl_stake_pool::id(),
&stake_pool_keypair.pubkey(),
&config.manager.pubkey(),
&config.staker.pubkey(),
&validator_list_keypair.pubkey(),
&reserve_keypair.pubkey(),
&mint_keypair.pubkey(),
&pool_fee_account,
&spl_token::id(),
deposit_authority.as_ref().map(|x| x.pubkey()),
epoch_fee,
stake_withdrawal_fee,
stake_deposit_fee,
stake_referral_fee,
max_validators,
),
],
Some(&config.fee_payer.pubkey()),
);
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
check_fee_payer_balance(
config,
total_rent_free_balances
+ fee_calculator.calculate_fee(setup_transaction.message())
+ fee_calculator.calculate_fee(initialize_transaction.message()),
)?;
let mut setup_signers = vec![config.fee_payer.as_ref(), &mint_keypair, &reserve_keypair];
unique_signers!(setup_signers);
setup_transaction.sign(&setup_signers, recent_blockhash);
send_transaction(config, setup_transaction)?;
println!(
"Creating stake pool {} with validator list {}",
stake_pool_keypair.pubkey(),
validator_list_keypair.pubkey()
);
let mut initialize_signers = vec![
config.fee_payer.as_ref(),
&stake_pool_keypair,
&validator_list_keypair,
config.manager.as_ref(),
];
if let Some(deposit_authority) = deposit_authority {
println!(
"Deposits will be restricted to {} only, this can be changed using the set-funding-authority command.",
deposit_authority.pubkey()
);
let mut initialize_signers = initialize_signers.clone();
initialize_signers.push(&deposit_authority);
unique_signers!(initialize_signers);
initialize_transaction.sign(&initialize_signers, recent_blockhash);
} else {
unique_signers!(initialize_signers);
initialize_transaction.sign(&initialize_signers, recent_blockhash);
}
send_transaction(config, initialize_transaction)?;
Ok(())
}
fn command_vsa_add(
config: &Config,
stake_pool_address: &Pubkey,
vote_account: &Pubkey,
) -> CommandResult {
let (stake_account_address, _) =
find_stake_program_address(&spl_stake_pool::id(), vote_account, stake_pool_address);
println!(
"Adding stake account {}, delegated to {}",
stake_account_address, vote_account
);
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
if validator_list.contains(vote_account) {
println!(
"Stake pool already contains validator {}, ignoring",
vote_account
);
return Ok(());
}
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let mut signers = vec![config.fee_payer.as_ref(), config.staker.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[
spl_stake_pool::instruction::add_validator_to_pool_with_vote(
&spl_stake_pool::id(),
&stake_pool,
stake_pool_address,
&config.fee_payer.pubkey(),
vote_account,
),
],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_vsa_remove(
config: &Config,
stake_pool_address: &Pubkey,
vote_account: &Pubkey,
new_authority: &Option<Pubkey>,
stake_receiver: &Option<Pubkey>,
) -> CommandResult {
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let (stake_account_address, _) =
find_stake_program_address(&spl_stake_pool::id(), vote_account, stake_pool_address);
println!(
"Removing stake account {}, delegated to {}",
stake_account_address, vote_account
);
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let mut instructions = vec![];
let mut stake_keypair = None;
let stake_receiver = stake_receiver.unwrap_or_else(|| {
let new_stake_keypair = new_stake_account(
&config.fee_payer.pubkey(),
&mut instructions,
/* stake_receiver_account_balance = */ 0,
);
let stake_pubkey = new_stake_keypair.pubkey();
stake_keypair = Some(new_stake_keypair);
stake_pubkey
});
let staker_pubkey = config.staker.pubkey();
let new_authority = new_authority.as_ref().unwrap_or(&staker_pubkey);
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
let validator_stake_info = validator_list
.find(vote_account)
.ok_or("Vote account not found in validator list")?;
let mut signers = vec![config.fee_payer.as_ref(), config.staker.as_ref()];
if let Some(stake_keypair) = stake_keypair.as_ref() {
signers.push(stake_keypair);
}
instructions.push(
// Create new validator stake account address
spl_stake_pool::instruction::remove_validator_from_pool_with_vote(
&spl_stake_pool::id(),
&stake_pool,
stake_pool_address,
vote_account,
new_authority,
validator_stake_info.transient_seed_suffix_start,
&stake_receiver,
),
);
unique_signers!(signers);
let transaction = checked_transaction_with_signers(config, &instructions, &signers)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_increase_validator_stake(
config: &Config,
stake_pool_address: &Pubkey,
vote_account: &Pubkey,
amount: f64,
) -> CommandResult {
let lamports = native_token::sol_to_lamports(amount);
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
let validator_stake_info = validator_list
.find(vote_account)
.ok_or("Vote account not found in validator list")?;
let mut signers = vec![config.fee_payer.as_ref(), config.staker.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[
spl_stake_pool::instruction::increase_validator_stake_with_vote(
&spl_stake_pool::id(),
&stake_pool,
stake_pool_address,
vote_account,
lamports,
validator_stake_info.transient_seed_suffix_start,
),
],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_decrease_validator_stake(
config: &Config,
stake_pool_address: &Pubkey,
vote_account: &Pubkey,
amount: f64,
) -> CommandResult {
let lamports = native_token::sol_to_lamports(amount);
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
let validator_stake_info = validator_list
.find(vote_account)
.ok_or("Vote account not found in validator list")?;
let mut signers = vec![config.fee_payer.as_ref(), config.staker.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[
spl_stake_pool::instruction::decrease_validator_stake_with_vote(
&spl_stake_pool::id(),
&stake_pool,
stake_pool_address,
vote_account,
lamports,
validator_stake_info.transient_seed_suffix_start,
),
],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_set_preferred_validator(
config: &Config,
stake_pool_address: &Pubkey,
preferred_type: PreferredValidatorType,
vote_address: Option<Pubkey>,
) -> CommandResult {
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let mut signers = vec![config.fee_payer.as_ref(), config.staker.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[spl_stake_pool::instruction::set_preferred_validator(
&spl_stake_pool::id(),
stake_pool_address,
&config.staker.pubkey(),
&stake_pool.validator_list,
preferred_type,
vote_address,
)],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn add_associated_token_account(
config: &Config,
mint: &Pubkey,
owner: &Pubkey,
instructions: &mut Vec<Instruction>,
rent_free_balances: &mut u64,
) -> Pubkey {
// Account for tokens not specified, creating one
let account = get_associated_token_address(owner, mint);
if get_token_account(&config.rpc_client, &account, mint).is_err() {
println!("Creating associated token account {} to receive stake pool tokens of mint {}, owned by {}", account, mint, owner);
let min_account_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(spl_token::state::Account::LEN)
.unwrap();
instructions.push(create_associated_token_account(
&config.fee_payer.pubkey(),
owner,
mint,
));
*rent_free_balances += min_account_balance;
} else {
println!("Using existing associated token account {} to receive stake pool tokens of mint {}, owned by {}", account, mint, owner);
}
account
}
fn command_deposit_stake(
config: &Config,
stake_pool_address: &Pubkey,
stake: &Pubkey,
withdraw_authority: Box<dyn Signer>,
pool_token_receiver_account: &Option<Pubkey>,
referrer_token_account: &Option<Pubkey>,
) -> CommandResult {
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let stake_state = get_stake_state(&config.rpc_client, stake)?;
if config.verbose {
println!("Depositing stake account {:?}", stake_state);
}
let vote_account = match stake_state {
stake::state::StakeState::Stake(_, stake) => Ok(stake.delegation.voter_pubkey),
_ => Err("Wrong stake account state, must be delegated to validator"),
}?;
// Check if this vote account has staking account in the pool
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
if !validator_list.contains(&vote_account) {
return Err("Stake account for this validator does not exist in the pool.".into());
}
// Calculate validator stake account address linked to the pool
let (validator_stake_account, _) =
find_stake_program_address(&spl_stake_pool::id(), &vote_account, stake_pool_address);
let validator_stake_state = get_stake_state(&config.rpc_client, &validator_stake_account)?;
println!(
"Depositing stake {} into stake pool account {}",
stake, validator_stake_account
);
if config.verbose {
println!("{:?}", validator_stake_state);
}
let mut instructions: Vec<Instruction> = vec![];
let mut signers = vec![config.fee_payer.as_ref(), withdraw_authority.as_ref()];
let mut total_rent_free_balances: u64 = 0;
// Create token account if not specified
let pool_token_receiver_account =
pool_token_receiver_account.unwrap_or(add_associated_token_account(
config,
&stake_pool.pool_mint,
&config.token_owner.pubkey(),
&mut instructions,
&mut total_rent_free_balances,
));
let referrer_token_account = referrer_token_account.unwrap_or(pool_token_receiver_account);
let pool_withdraw_authority =
find_withdraw_authority_program_address(&spl_stake_pool::id(), stake_pool_address).0;
let mut deposit_instructions =
if let Some(stake_deposit_authority) = config.funding_authority.as_ref() {
signers.push(stake_deposit_authority.as_ref());
if stake_deposit_authority.pubkey() != stake_pool.stake_deposit_authority {
let error = format!(
"Invalid deposit authority specified, expected {}, received {}",
stake_pool.stake_deposit_authority,
stake_deposit_authority.pubkey()
);
return Err(error.into());
}
spl_stake_pool::instruction::deposit_stake_with_authority(
&spl_stake_pool::id(),
stake_pool_address,
&stake_pool.validator_list,
&stake_deposit_authority.pubkey(),
&pool_withdraw_authority,
stake,
&withdraw_authority.pubkey(),
&validator_stake_account,
&stake_pool.reserve_stake,
&pool_token_receiver_account,
&stake_pool.manager_fee_account,
&referrer_token_account,
&stake_pool.pool_mint,
&spl_token::id(),
)
} else {
spl_stake_pool::instruction::deposit_stake(
&spl_stake_pool::id(),
stake_pool_address,
&stake_pool.validator_list,
&pool_withdraw_authority,
stake,
&withdraw_authority.pubkey(),
&validator_stake_account,
&stake_pool.reserve_stake,
&pool_token_receiver_account,
&stake_pool.manager_fee_account,
&referrer_token_account,
&stake_pool.pool_mint,
&spl_token::id(),
)
};
instructions.append(&mut deposit_instructions);
let mut transaction =
Transaction::new_with_payer(&instructions, Some(&config.fee_payer.pubkey()));
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
check_fee_payer_balance(
config,
total_rent_free_balances + fee_calculator.calculate_fee(transaction.message()),
)?;
unique_signers!(signers);
transaction.sign(&signers, recent_blockhash);
send_transaction(config, transaction)?;
Ok(())
}
fn command_deposit_sol(
config: &Config,
stake_pool_address: &Pubkey,
from: &Option<Keypair>,
pool_token_receiver_account: &Option<Pubkey>,
referrer_token_account: &Option<Pubkey>,
amount: f64,
) -> CommandResult {
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let amount = native_token::sol_to_lamports(amount);
// Check withdraw_from balance
let from_pubkey = from
.as_ref()
.map_or_else(|| config.fee_payer.pubkey(), |keypair| keypair.pubkey());
let from_balance = config.rpc_client.get_balance(&from_pubkey)?;
if from_balance < amount {
return Err(format!(
"Not enough SOL to deposit into pool: {}.\nMaximum deposit amount is {} SOL.",
Sol(amount),
Sol(from_balance)
)
.into());
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let mut instructions: Vec<Instruction> = vec![];
// ephemeral SOL account just to do the transfer
let user_sol_transfer = Keypair::new();
let mut signers = vec![config.fee_payer.as_ref(), &user_sol_transfer];
if let Some(keypair) = from.as_ref() {
signers.push(keypair)
}
let mut total_rent_free_balances: u64 = 0;
// Create the ephemeral SOL account
instructions.push(system_instruction::transfer(
&from_pubkey,
&user_sol_transfer.pubkey(),
amount,
));
// Create token account if not specified
let pool_token_receiver_account =
pool_token_receiver_account.unwrap_or(add_associated_token_account(
config,
&stake_pool.pool_mint,
&config.token_owner.pubkey(),
&mut instructions,
&mut total_rent_free_balances,
));
let referrer_token_account = referrer_token_account.unwrap_or(pool_token_receiver_account);
let pool_withdraw_authority =
find_withdraw_authority_program_address(&spl_stake_pool::id(), stake_pool_address).0;
let deposit_instruction = if let Some(deposit_authority) = config.funding_authority.as_ref() {
let expected_sol_deposit_authority = stake_pool.sol_deposit_authority.ok_or_else(|| {
"SOL deposit authority specified in arguments but stake pool has none".to_string()
})?;
signers.push(deposit_authority.as_ref());
if deposit_authority.pubkey() != expected_sol_deposit_authority {
let error = format!(
"Invalid deposit authority specified, expected {}, received {}",
expected_sol_deposit_authority,
deposit_authority.pubkey()
);
return Err(error.into());
}
spl_stake_pool::instruction::deposit_sol_with_authority(
&spl_stake_pool::id(),
stake_pool_address,
&deposit_authority.pubkey(),
&pool_withdraw_authority,
&stake_pool.reserve_stake,
&user_sol_transfer.pubkey(),
&pool_token_receiver_account,
&stake_pool.manager_fee_account,
&referrer_token_account,
&stake_pool.pool_mint,
&spl_token::id(),
amount,
)
} else {
spl_stake_pool::instruction::deposit_sol(
&spl_stake_pool::id(),
stake_pool_address,
&pool_withdraw_authority,
&stake_pool.reserve_stake,
&user_sol_transfer.pubkey(),
&pool_token_receiver_account,
&stake_pool.manager_fee_account,
&referrer_token_account,
&stake_pool.pool_mint,
&spl_token::id(),
amount,
)
};
instructions.push(deposit_instruction);
let mut transaction =
Transaction::new_with_payer(&instructions, Some(&config.fee_payer.pubkey()));
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
check_fee_payer_balance(
config,
total_rent_free_balances + fee_calculator.calculate_fee(transaction.message()),
)?;
unique_signers!(signers);
transaction.sign(&signers, recent_blockhash);
send_transaction(config, transaction)?;
Ok(())
}
fn command_list(config: &Config, stake_pool_address: &Pubkey) -> CommandResult {
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let reserve_stake_account_address = stake_pool.reserve_stake.to_string();
let total_lamports = stake_pool.total_lamports;
let last_update_epoch = stake_pool.last_update_epoch;
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
let max_number_of_validators = validator_list.header.max_validators;
let current_number_of_validators = validator_list.validators.len();
let pool_mint = get_token_mint(&config.rpc_client, &stake_pool.pool_mint)?;
let epoch_info = config.rpc_client.get_epoch_info()?;
let pool_withdraw_authority =
find_withdraw_authority_program_address(&spl_stake_pool::id(), stake_pool_address).0;
let reserve_stake = config.rpc_client.get_account(&stake_pool.reserve_stake)?;
let minimum_reserve_stake_balance = config
.rpc_client
.get_minimum_balance_for_rent_exemption(STAKE_STATE_LEN)?
+ 1;
let cli_stake_pool_stake_account_infos = validator_list
.validators
.iter()
.map(|validator| {
let (stake_account_address, _) = find_stake_program_address(
&spl_stake_pool::id(),
&validator.vote_account_address,
stake_pool_address,
);
let (transient_stake_account_address, _) = find_transient_stake_program_address(
&spl_stake_pool::id(),
&validator.vote_account_address,
stake_pool_address,
validator.transient_seed_suffix_start,
);
let update_required = validator.last_update_epoch != epoch_info.epoch;
CliStakePoolStakeAccountInfo {
vote_account_address: validator.vote_account_address.to_string(),
stake_account_address: stake_account_address.to_string(),
validator_active_stake_lamports: validator.active_stake_lamports,
validator_last_update_epoch: validator.last_update_epoch,
validator_lamports: validator.stake_lamports(),
validator_transient_stake_account_address: transient_stake_account_address
.to_string(),
validator_transient_stake_lamports: validator.transient_stake_lamports,
update_required,
}
})
.collect();
let total_pool_tokens =
spl_token::amount_to_ui_amount(stake_pool.pool_token_supply, pool_mint.decimals);
let mut cli_stake_pool = CliStakePool::from((
*stake_pool_address,
stake_pool,
validator_list,
pool_withdraw_authority,
));
let update_required = last_update_epoch != epoch_info.epoch;
let cli_stake_pool_details = CliStakePoolDetails {
reserve_stake_account_address,
reserve_stake_lamports: reserve_stake.lamports,
minimum_reserve_stake_balance,
stake_accounts: cli_stake_pool_stake_account_infos,
total_lamports,
total_pool_tokens,
current_number_of_validators: current_number_of_validators as u32,
max_number_of_validators,
update_required,
};
cli_stake_pool.details = Some(cli_stake_pool_details);
println!("{}", config.output_format.formatted_string(&cli_stake_pool));
Ok(())
}
fn command_update(
config: &Config,
stake_pool_address: &Pubkey,
force: bool,
no_merge: bool,
) -> CommandResult {
if config.no_update {
println!("Update requested, but --no-update flag specified, so doing nothing");
return Ok(());
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let epoch_info = config.rpc_client.get_epoch_info()?;
if stake_pool.last_update_epoch == epoch_info.epoch {
if force {
println!("Update not required, but --force flag specified, so doing it anyway");
} else {
println!("Update not required");
return Ok(());
}
}
let validator_list = get_validator_list(&config.rpc_client, &stake_pool.validator_list)?;
let (mut update_list_instructions, final_instructions) =
spl_stake_pool::instruction::update_stake_pool(
&spl_stake_pool::id(),
&stake_pool,
&validator_list,
stake_pool_address,
no_merge,
);
let update_list_instructions_len = update_list_instructions.len();
if update_list_instructions_len > 0 {
let last_instruction = update_list_instructions.split_off(update_list_instructions_len - 1);
// send the first ones without waiting
for instruction in update_list_instructions {
let transaction = checked_transaction_with_signers(
config,
&[instruction],
&[config.fee_payer.as_ref()],
)?;
send_transaction_no_wait(config, transaction)?;
}
// wait on the last one
let transaction = checked_transaction_with_signers(
config,
&last_instruction,
&[config.fee_payer.as_ref()],
)?;
send_transaction(config, transaction)?;
}
let transaction = checked_transaction_with_signers(
config,
&final_instructions,
&[config.fee_payer.as_ref()],
)?;
send_transaction(config, transaction)?;
Ok(())
}
#[derive(PartialEq, Debug)]
struct WithdrawAccount {
stake_address: Pubkey,
vote_address: Option<Pubkey>,
pool_amount: u64,
}
fn sorted_accounts<F>(
validator_list: &ValidatorList,
stake_pool: &StakePool,
get_info: F,
) -> Vec<(Pubkey, u64, Option<Pubkey>)>
where
F: Fn(&ValidatorStakeInfo) -> (Pubkey, u64, Option<Pubkey>),
{
let mut result: Vec<(Pubkey, u64, Option<Pubkey>)> = validator_list
.validators
.iter()
.map(get_info)
.collect::<Vec<_>>();
result.sort_by(|left, right| {
if left.2 == stake_pool.preferred_withdraw_validator_vote_address {
Ordering::Less
} else if right.2 == stake_pool.preferred_withdraw_validator_vote_address {
Ordering::Greater
} else {
right.1.cmp(&left.1)
}
});
result
}
fn prepare_withdraw_accounts(
rpc_client: &RpcClient,
stake_pool: &StakePool,
pool_amount: u64,
stake_pool_address: &Pubkey,
skip_fee: bool,
) -> Result<Vec<WithdrawAccount>, Error> {
let min_balance = rpc_client
.get_minimum_balance_for_rent_exemption(STAKE_STATE_LEN)?
.saturating_add(MINIMUM_ACTIVE_STAKE);
let pool_mint = get_token_mint(rpc_client, &stake_pool.pool_mint)?;
let validator_list: ValidatorList = get_validator_list(rpc_client, &stake_pool.validator_list)?;
let mut accounts: Vec<(Pubkey, u64, Option<Pubkey>)> = Vec::new();
accounts.append(&mut sorted_accounts(
&validator_list,
stake_pool,
|validator| {
let (stake_account_address, _) = find_stake_program_address(
&spl_stake_pool::id(),
&validator.vote_account_address,
stake_pool_address,
);
(
stake_account_address,
validator.active_stake_lamports,
Some(validator.vote_account_address),
)
},
));
accounts.append(&mut sorted_accounts(
&validator_list,
stake_pool,
|validator| {
let (transient_stake_account_address, _) = find_transient_stake_program_address(
&spl_stake_pool::id(),
&validator.vote_account_address,
stake_pool_address,
validator.transient_seed_suffix_start,
);
(
transient_stake_account_address,
validator.transient_stake_lamports,
Some(validator.vote_account_address),
)
},
));
let reserve_stake = rpc_client.get_account(&stake_pool.reserve_stake)?;
accounts.push((stake_pool.reserve_stake, reserve_stake.lamports, None));
// Prepare the list of accounts to withdraw from
let mut withdraw_from: Vec<WithdrawAccount> = vec![];
let mut remaining_amount = pool_amount;
let fee = stake_pool.stake_withdrawal_fee;
let inverse_fee = Fee {
numerator: fee.denominator - fee.numerator,
denominator: fee.denominator,
};
// Go through available accounts and withdraw from largest to smallest
for (stake_address, lamports, vote_address_opt) in accounts {
if lamports <= min_balance {
continue;
}
let available_for_withdrawal_wo_fee =
stake_pool.calc_pool_tokens_for_deposit(lamports).unwrap();
let available_for_withdrawal = if skip_fee {
available_for_withdrawal_wo_fee
} else {
available_for_withdrawal_wo_fee * inverse_fee.denominator / inverse_fee.numerator
};
let pool_amount = u64::min(available_for_withdrawal, remaining_amount);
// Those accounts will be withdrawn completely with `claim` instruction
withdraw_from.push(WithdrawAccount {
stake_address,
vote_address: vote_address_opt,
pool_amount,
});
remaining_amount -= pool_amount;
if remaining_amount == 0 {
break;
}
}
// Not enough stake to withdraw the specified amount
if remaining_amount > 0 {
return Err(format!(
"No stake accounts found in this pool with enough balance to withdraw {} pool tokens.",
spl_token::amount_to_ui_amount(pool_amount, pool_mint.decimals)
)
.into());
}
Ok(withdraw_from)
}
fn command_withdraw_stake(
config: &Config,
stake_pool_address: &Pubkey,
use_reserve: bool,
vote_account_address: &Option<Pubkey>,
stake_receiver_param: &Option<Pubkey>,
pool_token_account: &Option<Pubkey>,
pool_amount: f64,
) -> CommandResult {
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let pool_mint = get_token_mint(&config.rpc_client, &stake_pool.pool_mint)?;
let pool_amount = spl_token::ui_amount_to_amount(pool_amount, pool_mint.decimals);
let pool_withdraw_authority =
find_withdraw_authority_program_address(&spl_stake_pool::id(), stake_pool_address).0;
let pool_token_account = pool_token_account.unwrap_or(get_associated_token_address(
&config.token_owner.pubkey(),
&stake_pool.pool_mint,
));
let token_account = get_token_account(
&config.rpc_client,
&pool_token_account,
&stake_pool.pool_mint,
)?;
let stake_account_rent_exemption = config
.rpc_client
.get_minimum_balance_for_rent_exemption(STAKE_STATE_LEN)?;
// Check withdraw_from balance
if token_account.amount < pool_amount {
return Err(format!(
"Not enough token balance to withdraw {} pool tokens.\nMaximum withdraw amount is {} pool tokens.",
spl_token::amount_to_ui_amount(pool_amount, pool_mint.decimals),
spl_token::amount_to_ui_amount(token_account.amount, pool_mint.decimals)
)
.into());
}
let withdraw_accounts = if use_reserve {
vec![WithdrawAccount {
stake_address: stake_pool.reserve_stake,
vote_address: None,
pool_amount,
}]
} else if let Some(vote_account_address) = vote_account_address {
let (stake_account_address, _) = find_stake_program_address(
&spl_stake_pool::id(),
vote_account_address,
stake_pool_address,
);
let stake_account = config.rpc_client.get_account(&stake_account_address)?;
let available_for_withdrawal = stake_pool
.calc_lamports_withdraw_amount(
stake_account
.lamports
.saturating_sub(MINIMUM_ACTIVE_STAKE)
.saturating_sub(stake_account_rent_exemption),
)
.unwrap();
if available_for_withdrawal < pool_amount {
return Err(format!(
"Not enough lamports available for withdrawal from {}, {} asked, {} available",
stake_account_address, pool_amount, available_for_withdrawal
)
.into());
}
vec![WithdrawAccount {
stake_address: stake_account_address,
vote_address: Some(*vote_account_address),
pool_amount,
}]
} else {
// Get the list of accounts to withdraw from
prepare_withdraw_accounts(
&config.rpc_client,
&stake_pool,
pool_amount,
stake_pool_address,
stake_pool.manager_fee_account == pool_token_account,
)?
};
// Construct transaction to withdraw from withdraw_accounts account list
let mut instructions: Vec<Instruction> = vec![];
let user_transfer_authority = Keypair::new(); // ephemeral keypair just to do the transfer
let mut signers = vec![
config.fee_payer.as_ref(),
config.token_owner.as_ref(),
&user_transfer_authority,
];
let mut new_stake_keypairs = vec![];
instructions.push(
// Approve spending token
spl_token::instruction::approve(
&spl_token::id(),
&pool_token_account,
&user_transfer_authority.pubkey(),
&config.token_owner.pubkey(),
&[],
pool_amount,
)?,
);
let mut total_rent_free_balances = 0;
// Go through prepared accounts and withdraw/claim them
for withdraw_account in withdraw_accounts {
// Convert pool tokens amount to lamports
let sol_withdraw_amount = stake_pool
.calc_lamports_withdraw_amount(withdraw_account.pool_amount)
.unwrap();
if let Some(vote_address) = withdraw_account.vote_address {
println!(
"Withdrawing {}, or {} pool tokens, from stake account {}, delegated to {}",
Sol(sol_withdraw_amount),
spl_token::amount_to_ui_amount(withdraw_account.pool_amount, pool_mint.decimals),
withdraw_account.stake_address,
vote_address,
);
} else {
println!(
"Withdrawing {}, or {} pool tokens, from stake account {}",
Sol(sol_withdraw_amount),
spl_token::amount_to_ui_amount(withdraw_account.pool_amount, pool_mint.decimals),
withdraw_account.stake_address,
);
}
// Use separate mutable variable because withdraw might create a new account
let stake_receiver = stake_receiver_param.unwrap_or_else(|| {
let stake_keypair = new_stake_account(
&config.fee_payer.pubkey(),
&mut instructions,
stake_account_rent_exemption,
);
let stake_pubkey = stake_keypair.pubkey();
total_rent_free_balances += stake_account_rent_exemption;
new_stake_keypairs.push(stake_keypair);
stake_pubkey
});
instructions.push(spl_stake_pool::instruction::withdraw_stake(
&spl_stake_pool::id(),
stake_pool_address,
&stake_pool.validator_list,
&pool_withdraw_authority,
&withdraw_account.stake_address,
&stake_receiver,
&config.staker.pubkey(),
&user_transfer_authority.pubkey(),
&pool_token_account,
&stake_pool.manager_fee_account,
&stake_pool.pool_mint,
&spl_token::id(),
withdraw_account.pool_amount,
));
}
let mut transaction =
Transaction::new_with_payer(&instructions, Some(&config.fee_payer.pubkey()));
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
check_fee_payer_balance(
config,
total_rent_free_balances + fee_calculator.calculate_fee(transaction.message()),
)?;
for new_stake_keypair in &new_stake_keypairs {
signers.push(new_stake_keypair);
}
unique_signers!(signers);
transaction.sign(&signers, recent_blockhash);
send_transaction(config, transaction)?;
Ok(())
}
fn command_withdraw_sol(
config: &Config,
stake_pool_address: &Pubkey,
pool_token_account: &Option<Pubkey>,
sol_receiver: &Pubkey,
pool_amount: f64,
) -> CommandResult {
if !config.no_update {
command_update(config, stake_pool_address, false, false)?;
}
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
let pool_mint = get_token_mint(&config.rpc_client, &stake_pool.pool_mint)?;
let pool_amount = spl_token::ui_amount_to_amount(pool_amount, pool_mint.decimals);
let pool_token_account = pool_token_account.unwrap_or(get_associated_token_address(
&config.token_owner.pubkey(),
&stake_pool.pool_mint,
));
let token_account = get_token_account(
&config.rpc_client,
&pool_token_account,
&stake_pool.pool_mint,
)?;
// Check withdraw_from balance
if token_account.amount < pool_amount {
return Err(format!(
"Not enough token balance to withdraw {} pool tokens.\nMaximum withdraw amount is {} pool tokens.",
spl_token::amount_to_ui_amount(pool_amount, pool_mint.decimals),
spl_token::amount_to_ui_amount(token_account.amount, pool_mint.decimals)
)
.into());
}
// Construct transaction to withdraw from withdraw_accounts account list
let user_transfer_authority = Keypair::new(); // ephemeral keypair just to do the transfer
let mut signers = vec![
config.fee_payer.as_ref(),
config.token_owner.as_ref(),
&user_transfer_authority,
];
let mut instructions = vec![
// Approve spending token
spl_token::instruction::approve(
&spl_token::id(),
&pool_token_account,
&user_transfer_authority.pubkey(),
&config.token_owner.pubkey(),
&[],
pool_amount,
)?,
];
let pool_withdraw_authority =
find_withdraw_authority_program_address(&spl_stake_pool::id(), stake_pool_address).0;
let withdraw_instruction = if let Some(withdraw_authority) = config.funding_authority.as_ref() {
let expected_sol_withdraw_authority =
stake_pool.sol_withdraw_authority.ok_or_else(|| {
"SOL withdraw authority specified in arguments but stake pool has none".to_string()
})?;
signers.push(withdraw_authority.as_ref());
if withdraw_authority.pubkey() != expected_sol_withdraw_authority {
let error = format!(
"Invalid deposit withdraw specified, expected {}, received {}",
expected_sol_withdraw_authority,
withdraw_authority.pubkey()
);
return Err(error.into());
}
spl_stake_pool::instruction::withdraw_sol_with_authority(
&spl_stake_pool::id(),
stake_pool_address,
&withdraw_authority.pubkey(),
&pool_withdraw_authority,
&user_transfer_authority.pubkey(),
&pool_token_account,
&stake_pool.reserve_stake,
sol_receiver,
&stake_pool.manager_fee_account,
&stake_pool.pool_mint,
&spl_token::id(),
pool_amount,
)
} else {
spl_stake_pool::instruction::withdraw_sol(
&spl_stake_pool::id(),
stake_pool_address,
&pool_withdraw_authority,
&user_transfer_authority.pubkey(),
&pool_token_account,
&stake_pool.reserve_stake,
sol_receiver,
&stake_pool.manager_fee_account,
&stake_pool.pool_mint,
&spl_token::id(),
pool_amount,
)
};
instructions.push(withdraw_instruction);
let mut transaction =
Transaction::new_with_payer(&instructions, Some(&config.fee_payer.pubkey()));
let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;
check_fee_payer_balance(config, fee_calculator.calculate_fee(transaction.message()))?;
unique_signers!(signers);
transaction.sign(&signers, recent_blockhash);
send_transaction(config, transaction)?;
Ok(())
}
fn command_set_manager(
config: &Config,
stake_pool_address: &Pubkey,
new_manager: &Option<Keypair>,
new_fee_receiver: &Option<Pubkey>,
) -> CommandResult {
let stake_pool = get_stake_pool(&config.rpc_client, stake_pool_address)?;
// If new accounts are missing in the arguments use the old ones
let (new_manager_pubkey, mut signers): (Pubkey, Vec<&dyn Signer>) = match new_manager {
None => (stake_pool.manager, vec![]),
Some(value) => (value.pubkey(), vec![value]),
};
let new_fee_receiver = match new_fee_receiver {
None => stake_pool.manager_fee_account,
Some(value) => {
// Check for fee receiver being a valid token account and have to same mint as the stake pool
let token_account =
get_token_account(&config.rpc_client, value, &stake_pool.pool_mint)?;
if token_account.mint != stake_pool.pool_mint {
return Err("Fee receiver account belongs to a different mint"
.to_string()
.into());
}
*value
}
};
signers.append(&mut vec![
config.fee_payer.as_ref(),
config.manager.as_ref(),
]);
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[spl_stake_pool::instruction::set_manager(
&spl_stake_pool::id(),
stake_pool_address,
&config.manager.pubkey(),
&new_manager_pubkey,
&new_fee_receiver,
)],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_set_staker(
config: &Config,
stake_pool_address: &Pubkey,
new_staker: &Pubkey,
) -> CommandResult {
let mut signers = vec![config.fee_payer.as_ref(), config.manager.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[spl_stake_pool::instruction::set_staker(
&spl_stake_pool::id(),
stake_pool_address,
&config.manager.pubkey(),
new_staker,
)],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_set_funding_authority(
config: &Config,
stake_pool_address: &Pubkey,
new_authority: Option<Pubkey>,
funding_type: FundingType,
) -> CommandResult {
let mut signers = vec![config.fee_payer.as_ref(), config.manager.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[spl_stake_pool::instruction::set_funding_authority(
&spl_stake_pool::id(),
stake_pool_address,
&config.manager.pubkey(),
new_authority.as_ref(),
funding_type,
)],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_set_fee(
config: &Config,
stake_pool_address: &Pubkey,
new_fee: FeeType,
) -> CommandResult {
let mut signers = vec![config.fee_payer.as_ref(), config.manager.as_ref()];
unique_signers!(signers);
let transaction = checked_transaction_with_signers(
config,
&[spl_stake_pool::instruction::set_fee(
&spl_stake_pool::id(),
stake_pool_address,
&config.manager.pubkey(),
new_fee,
)],
&signers,
)?;
send_transaction(config, transaction)?;
Ok(())
}
fn command_list_all_pools(config: &Config) -> CommandResult {
let all_pools = get_stake_pools(&config.rpc_client)?;
let cli_stake_pool_vec: Vec<CliStakePool> =
all_pools.into_iter().map(CliStakePool::from).collect();
let cli_stake_pools = CliStakePools {
pools: cli_stake_pool_vec,
};
println!(
"{}",
config.output_format.formatted_string(&cli_stake_pools)
);
Ok(())
}
fn main() {
solana_logger::setup_with_default("solana=info");
let matches = App::new(crate_name!())
.about(crate_description!())
.version(crate_version!())
.setting(AppSettings::SubcommandRequiredElseHelp)
.arg({
let arg = Arg::with_name("config_file")
.short("C")
.long("config")
.value_name("PATH")
.takes_value(true)
.global(true)
.help("Configuration file to use");
if let Some(ref config_file) = *solana_cli_config::CONFIG_FILE {
arg.default_value(config_file)
} else {
arg
}
})
.arg(
Arg::with_name("verbose")
.long("verbose")
.short("v")
.takes_value(false)
.global(true)
.help("Show additional information"),
)
.arg(
Arg::with_name("output_format")
.long("output")
.value_name("FORMAT")
.global(true)
.takes_value(true)
.possible_values(&["json", "json-compact"])
.help("Return information in specified output format"),
)
.arg(
Arg::with_name("dry_run")
.long("dry-run")
.takes_value(false)
.global(true)
.help("Simulate transaction instead of executing"),
)
.arg(
Arg::with_name("no_update")
.long("no-update")
.takes_value(false)
.global(true)
.help("Do not automatically update the stake pool if needed"),
)
.arg(
Arg::with_name("json_rpc_url")
.long("url")
.value_name("URL")
.takes_value(true)
.validator(is_url)
.help("JSON RPC URL for the cluster. Default from the configuration file."),
)
.arg(
Arg::with_name("staker")
.long("staker")
.value_name("KEYPAIR")
.validator(is_valid_signer)
.takes_value(true)
.help("Stake pool staker. [default: cli config keypair]"),
)
.arg(
Arg::with_name("manager")
.long("manager")
.value_name("KEYPAIR")
.validator(is_valid_signer)
.takes_value(true)
.help("Stake pool manager. [default: cli config keypair]"),
)
.arg(
Arg::with_name("funding_authority")
.long("funding-authority")
.value_name("KEYPAIR")
.validator(is_valid_signer)
.takes_value(true)
.help("Stake pool funding authority for deposits or withdrawals. [default: cli config keypair]"),
)
.arg(
Arg::with_name("token_owner")
.long("token-owner")
.value_name("KEYPAIR")
.validator(is_valid_signer)
.takes_value(true)
.help("Owner of pool token account [default: cli config keypair]"),
)
.arg(
Arg::with_name("fee_payer")
.long("fee-payer")
.value_name("KEYPAIR")
.validator(is_valid_signer)
.takes_value(true)
.help("Transaction fee payer account [default: cli config keypair]"),
)
.subcommand(SubCommand::with_name("create-pool")
.about("Create a new stake pool")
.arg(
Arg::with_name("epoch_fee_numerator")
.long("epoch-fee-numerator")
.short("n")
.validator(is_parsable::<u64>)
.value_name("NUMERATOR")
.takes_value(true)
.required(true)
.help("Epoch fee numerator, fee amount is numerator divided by denominator."),
)
.arg(
Arg::with_name("epoch_fee_denominator")
.long("epoch-fee-denominator")
.short("d")
.validator(is_parsable::<u64>)
.value_name("DENOMINATOR")
.takes_value(true)
.required(true)
.help("Epoch fee denominator, fee amount is numerator divided by denominator."),
)
.arg(
Arg::with_name("withdrawal_fee_numerator")
.long("withdrawal-fee-numerator")
.validator(is_parsable::<u64>)
.value_name("NUMERATOR")
.takes_value(true)
.requires("withdrawal_fee_denominator")
.help("Withdrawal fee numerator, fee amount is numerator divided by denominator [default: 0]"),
).arg(
Arg::with_name("withdrawal_fee_denominator")
.long("withdrawal-fee-denominator")
.validator(is_parsable::<u64>)
.value_name("DENOMINATOR")
.takes_value(true)
.requires("withdrawal_fee_numerator")
.help("Withdrawal fee denominator, fee amount is numerator divided by denominator [default: 0]"),
)
.arg(
Arg::with_name("deposit_fee_numerator")
.long("deposit-fee-numerator")
.validator(is_parsable::<u64>)
.value_name("NUMERATOR")
.takes_value(true)
.requires("deposit_fee_denominator")
.help("Deposit fee numerator, fee amount is numerator divided by denominator [default: 0]"),
).arg(
Arg::with_name("deposit_fee_denominator")
.long("deposit-fee-denominator")
.validator(is_parsable::<u64>)
.value_name("DENOMINATOR")
.takes_value(true)
.requires("deposit_fee_numerator")
.help("Deposit fee denominator, fee amount is numerator divided by denominator [default: 0]"),
)
.arg(
Arg::with_name("referral_fee")
.long("referral-fee")
.validator(is_valid_percentage)
.value_name("FEE_PERCENTAGE")
.takes_value(true)
.help("Referral fee percentage, maximum 100"),
)
.arg(
Arg::with_name("max_validators")
.long("max-validators")
.short("m")
.validator(is_parsable::<u32>)
.value_name("NUMBER")
.takes_value(true)
.required(true)
.help("Max number of validators included in the stake pool"),
)
.arg(
Arg::with_name("deposit_authority")
.long("deposit-authority")
.short("a")
.validator(is_valid_signer)
.value_name("DEPOSIT_AUTHORITY_KEYPAIR")
.takes_value(true)
.help("Deposit authority required to sign all deposits into the stake pool"),
)
.arg(
Arg::with_name("pool_keypair")
.long("pool-keypair")
.short("p")
.validator(is_keypair_or_ask_keyword)
.value_name("PATH")
.takes_value(true)
.help("Stake pool keypair [default: new keypair]"),
)
.arg(
Arg::with_name("validator_list_keypair")
.long("validator-list-keypair")
.validator(is_keypair_or_ask_keyword)
.value_name("PATH")
.takes_value(true)
.help("Validator list keypair [default: new keypair]"),
)
.arg(
Arg::with_name("mint_keypair")
.long("mint-keypair")
.validator(is_keypair_or_ask_keyword)
.value_name("PATH")
.takes_value(true)
.help("Stake pool mint keypair [default: new keypair]"),
)
.arg(
Arg::with_name("reserve_keypair")
.long("reserve-keypair")
.validator(is_keypair_or_ask_keyword)
.value_name("PATH")
.takes_value(true)
.help("Stake pool reserve keypair [default: new keypair]"),
)
)
.subcommand(SubCommand::with_name("add-validator")
.about("Add validator account to the stake pool. Must be signed by the pool staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("vote_account")
.index(2)
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.required(true)
.help("The validator vote account that the stake is delegated to"),
)
)
.subcommand(SubCommand::with_name("remove-validator")
.about("Remove validator account from the stake pool. Must be signed by the pool staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("vote_account")
.index(2)
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.required(true)
.help("Vote account for the validator to remove from the pool"),
)
.arg(
Arg::with_name("new_authority")
.long("new-authority")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("New authority to set as Staker and Withdrawer in the stake account removed from the pool.
Defaults to the client keypair."),
)
.arg(
Arg::with_name("stake_receiver")
.long("stake-receiver")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Stake account to receive SOL from the stake pool. Defaults to a new stake account."),
)
)
.subcommand(SubCommand::with_name("increase-validator-stake")
.about("Increase stake to a validator, drawing from the stake pool reserve. Must be signed by the pool staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("vote_account")
.index(2)
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.required(true)
.help("Vote account for the validator to increase stake to"),
)
.arg(
Arg::with_name("amount")
.index(3)
.validator(is_amount)
.value_name("AMOUNT")
.takes_value(true)
.help("Amount in SOL to add to the validator stake account. Must be at least the rent-exempt amount for a stake plus 1 SOL for merging."),
)
)
.subcommand(SubCommand::with_name("decrease-validator-stake")
.about("Decrease stake to a validator, splitting from the active stake. Must be signed by the pool staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("vote_account")
.index(2)
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.required(true)
.help("Vote account for the validator to decrease stake from"),
)
.arg(
Arg::with_name("amount")
.index(3)
.validator(is_amount)
.value_name("AMOUNT")
.takes_value(true)
.help("Amount in SOL to remove from the validator stake account. Must be at least the rent-exempt amount for a stake."),
)
)
.subcommand(SubCommand::with_name("set-preferred-validator")
.about("Set the preferred validator for deposits or withdrawals. Must be signed by the pool staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("preferred_type")
.index(2)
.value_name("OPERATION")
.possible_values(&["deposit", "withdraw"]) // PreferredValidatorType enum
.takes_value(true)
.required(true)
.help("Operation for which to restrict the validator"),
)
.arg(
Arg::with_name("vote_account")
.long("vote-account")
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.help("Vote account for the validator that users must deposit into."),
)
.arg(
Arg::with_name("unset")
.long("unset")
.takes_value(false)
.help("Unset the preferred validator."),
)
.group(ArgGroup::with_name("validator")
.arg("vote_account")
.arg("unset")
.required(true)
)
)
.subcommand(SubCommand::with_name("deposit-stake")
.about("Deposit active stake account into the stake pool in exchange for pool tokens")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
)
.arg(
Arg::with_name("stake_account")
.index(2)
.validator(is_pubkey)
.value_name("STAKE_ACCOUNT_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake address to join the pool"),
)
.arg(
Arg::with_name("withdraw_authority")
.long("withdraw-authority")
.validator(is_valid_signer)
.value_name("KEYPAIR")
.takes_value(true)
.help("Withdraw authority for the stake account to be deposited. [default: cli config keypair]"),
)
.arg(
Arg::with_name("token_receiver")
.long("token-receiver")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Account to receive the minted pool tokens. \
Defaults to the token-owner's associated pool token account. \
Creates the account if it does not exist."),
)
.arg(
Arg::with_name("referrer")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Pool token account to receive the referral fees for deposits. \
Defaults to the token receiver."),
)
)
.subcommand(SubCommand::with_name("deposit-sol")
.about("Deposit SOL into the stake pool in exchange for pool tokens")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address"),
).arg(
Arg::with_name("amount")
.index(2)
.validator(is_amount)
.value_name("AMOUNT")
.takes_value(true)
.help("Amount in SOL to deposit into the stake pool reserve account."),
)
.arg(
Arg::with_name("from")
.long("from")
.validator(is_valid_signer)
.value_name("KEYPAIR")
.takes_value(true)
.help("Source account of funds. [default: cli config keypair]"),
)
.arg(
Arg::with_name("token_receiver")
.long("token-receiver")
.validator(is_pubkey)
.value_name("POOL_TOKEN_RECEIVER_ADDRESS")
.takes_value(true)
.help("Account to receive the minted pool tokens. \
Defaults to the token-owner's associated pool token account. \
Creates the account if it does not exist."),
)
.arg(
Arg::with_name("referrer")
.long("referrer")
.validator(is_pubkey)
.value_name("REFERRER_TOKEN_ADDRESS")
.takes_value(true)
.help("Account to receive the referral fees for deposits. \
Defaults to the token receiver."),
)
)
.subcommand(SubCommand::with_name("list")
.about("List stake accounts managed by this pool")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
)
.subcommand(SubCommand::with_name("update")
.about("Updates all balances in the pool after validator stake accounts receive rewards.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("force")
.long("force")
.takes_value(false)
.help("Update all balances, even if it has already been performed this epoch."),
)
.arg(
Arg::with_name("no_merge")
.long("no-merge")
.takes_value(false)
.help("Do not automatically merge transient stakes. Useful if the stake pool is in an expected state, but the balances still need to be updated."),
)
)
.subcommand(SubCommand::with_name("withdraw-stake")
.about("Withdraw active stake from the stake pool in exchange for pool tokens")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("amount")
.index(2)
.validator(is_amount)
.value_name("AMOUNT")
.takes_value(true)
.required(true)
.help("Amount of pool tokens to withdraw for activated stake."),
)
.arg(
Arg::with_name("pool_account")
.long("pool-account")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Pool token account to withdraw tokens from. Defaults to the token-owner's associated token account."),
)
.arg(
Arg::with_name("stake_receiver")
.long("stake-receiver")
.validator(is_pubkey)
.value_name("STAKE_ACCOUNT_ADDRESS")
.takes_value(true)
.requires("withdraw_from")
.help("Stake account from which to receive a stake from the stake pool. Defaults to a new stake account."),
)
.arg(
Arg::with_name("vote_account")
.long("vote-account")
.validator(is_pubkey)
.value_name("VOTE_ACCOUNT_ADDRESS")
.takes_value(true)
.help("Validator to withdraw from. Defaults to the largest validator stakes in the pool."),
)
.arg(
Arg::with_name("use_reserve")
.long("use-reserve")
.takes_value(false)
.help("Withdraw from the stake pool's reserve. Only possible if all validator stakes are at the minimum possible amount."),
)
.group(ArgGroup::with_name("withdraw_from")
.arg("use_reserve")
.arg("vote_account")
)
)
.subcommand(SubCommand::with_name("withdraw-sol")
.about("Withdraw SOL from the stake pool's reserve in exchange for pool tokens")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("sol_receiver")
.index(2)
.validator(is_valid_pubkey)
.value_name("SYSTEM_ACCOUNT_ADDRESS_OR_KEYPAIR")
.takes_value(true)
.required(true)
.help("System account to receive SOL from the stake pool. Defaults to the payer."),
)
.arg(
Arg::with_name("amount")
.index(3)
.validator(is_amount)
.value_name("AMOUNT")
.takes_value(true)
.required(true)
.help("Amount of pool tokens to withdraw for SOL."),
)
.arg(
Arg::with_name("pool_account")
.long("pool-account")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Pool token account to withdraw tokens from. Defaults to the token-owner's associated token account."),
)
)
.subcommand(SubCommand::with_name("set-manager")
.about("Change manager or fee receiver account for the stake pool. Must be signed by the current manager.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("new_manager")
.long("new-manager")
.validator(is_valid_signer)
.value_name("KEYPAIR")
.takes_value(true)
.help("Keypair for the new stake pool manager."),
)
.arg(
Arg::with_name("new_fee_receiver")
.long("new-fee-receiver")
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Public key for the new account to set as the stake pool fee receiver."),
)
.group(ArgGroup::with_name("new_accounts")
.arg("new_manager")
.arg("new_fee_receiver")
.required(true)
.multiple(true)
)
)
.subcommand(SubCommand::with_name("set-staker")
.about("Change staker account for the stake pool. Must be signed by the manager or current staker.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("new_staker")
.index(2)
.validator(is_pubkey)
.value_name("ADDRESS")
.takes_value(true)
.help("Public key for the new stake pool staker."),
)
)
.subcommand(SubCommand::with_name("set-funding-authority")
.about("Change one of the funding authorities for the stake pool. Must be signed by the manager.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(
Arg::with_name("funding_type")
.index(2)
.value_name("FUNDING_TYPE")
.possible_values(&["stake-deposit", "sol-deposit", "sol-withdraw"]) // FundingType enum
.takes_value(true)
.required(true)
.help("Funding type to be updated."),
)
.arg(
Arg::with_name("new_authority")
.index(3)
.validator(is_pubkey)
.value_name("AUTHORITY_ADDRESS")
.takes_value(true)
.help("Public key for the new stake pool funding authority."),
)
.arg(
Arg::with_name("unset")
.long("unset")
.takes_value(false)
.help("Unset the stake deposit authority. The program will use a program derived address.")
)
.group(ArgGroup::with_name("validator")
.arg("new_authority")
.arg("unset")
.required(true)
)
)
.subcommand(SubCommand::with_name("set-fee")
.about("Change the [epoch/withdraw/stake deposit/sol deposit] fee assessed by the stake pool. Must be signed by the manager.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(Arg::with_name("fee_type")
.index(2)
.value_name("FEE_TYPE")
.possible_values(&["epoch", "stake-deposit", "sol-deposit", "stake-withdrawal", "sol-withdrawal"]) // FeeType enum
.takes_value(true)
.required(true)
.help("Fee type to be updated."),
)
.arg(
Arg::with_name("fee_numerator")
.index(3)
.validator(is_parsable::<u64>)
.value_name("NUMERATOR")
.takes_value(true)
.required(true)
.help("Fee numerator, fee amount is numerator divided by denominator."),
)
.arg(
Arg::with_name("fee_denominator")
.index(4)
.validator(is_parsable::<u64>)
.value_name("DENOMINATOR")
.takes_value(true)
.required(true)
.help("Fee denominator, fee amount is numerator divided by denominator."),
)
)
.subcommand(SubCommand::with_name("set-referral-fee")
.about("Change the referral fee assessed by the stake pool for stake deposits. Must be signed by the manager.")
.arg(
Arg::with_name("pool")
.index(1)
.validator(is_pubkey)
.value_name("POOL_ADDRESS")
.takes_value(true)
.required(true)
.help("Stake pool address."),
)
.arg(Arg::with_name("fee_type")
.index(2)
.value_name("FEE_TYPE")
.possible_values(&["stake", "sol"]) // FeeType enum, kind of
.takes_value(true)
.required(true)
.help("Fee type to be updated."),
)
.arg(
Arg::with_name("fee")
.index(3)
.validator(is_valid_percentage)
.value_name("FEE_PERCENTAGE")
.takes_value(true)
.required(true)
.help("Fee percentage, maximum 100"),
)
)
.subcommand(SubCommand::with_name("list-all")
.about("List information about all stake pools")
)
.get_matches();
let mut wallet_manager = None;
let cli_config = if let Some(config_file) = matches.value_of("config_file") {
solana_cli_config::Config::load(config_file).unwrap_or_default()
} else {
solana_cli_config::Config::default()
};
let config = {
let json_rpc_url = value_t!(matches, "json_rpc_url", String)
.unwrap_or_else(|_| cli_config.json_rpc_url.clone());
let staker = get_signer(
&matches,
"staker",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
);
let funding_authority = if matches.is_present("funding_authority") {
Some(get_signer(
&matches,
"funding_authority",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
))
} else {
None
};
let manager = get_signer(
&matches,
"manager",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
);
let token_owner = get_signer(
&matches,
"token_owner",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
);
let fee_payer = get_signer(
&matches,
"fee_payer",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
);
let verbose = matches.is_present("verbose");
let output_format = matches
.value_of("output_format")
.map(|value| match value {
"json" => OutputFormat::Json,
"json-compact" => OutputFormat::JsonCompact,
_ => unreachable!(),
})
.unwrap_or(if verbose {
OutputFormat::DisplayVerbose
} else {
OutputFormat::Display
});
let dry_run = matches.is_present("dry_run");
let no_update = matches.is_present("no_update");
Config {
rpc_client: RpcClient::new_with_commitment(json_rpc_url, CommitmentConfig::confirmed()),
verbose,
output_format,
manager,
staker,
funding_authority,
token_owner,
fee_payer,
dry_run,
no_update,
}
};
let _ = match matches.subcommand() {
("create-pool", Some(arg_matches)) => {
let deposit_authority = keypair_of(arg_matches, "deposit_authority");
let e_numerator = value_t_or_exit!(arg_matches, "epoch_fee_numerator", u64);
let e_denominator = value_t_or_exit!(arg_matches, "epoch_fee_denominator", u64);
let w_numerator = value_t!(arg_matches, "withdrawal_fee_numerator", u64);
let w_denominator = value_t!(arg_matches, "withdrawal_fee_denominator", u64);
let d_numerator = value_t!(arg_matches, "deposit_fee_numerator", u64);
let d_denominator = value_t!(arg_matches, "deposit_fee_denominator", u64);
let referral_fee = value_t!(arg_matches, "referral_fee", u8);
let max_validators = value_t_or_exit!(arg_matches, "max_validators", u32);
let pool_keypair = keypair_of(arg_matches, "pool_keypair");
let validator_list_keypair = keypair_of(arg_matches, "validator_list_keypair");
let mint_keypair = keypair_of(arg_matches, "mint_keypair");
let reserve_keypair = keypair_of(arg_matches, "reserve_keypair");
command_create_pool(
&config,
deposit_authority,
Fee {
numerator: e_numerator,
denominator: e_denominator,
},
Fee {
numerator: w_numerator.unwrap_or(0),
denominator: w_denominator.unwrap_or(0),
},
Fee {
numerator: d_numerator.unwrap_or(0),
denominator: d_denominator.unwrap_or(0),
},
referral_fee.unwrap_or(0),
max_validators,
pool_keypair,
validator_list_keypair,
mint_keypair,
reserve_keypair,
)
}
("add-validator", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let vote_account_address = pubkey_of(arg_matches, "vote_account").unwrap();
command_vsa_add(&config, &stake_pool_address, &vote_account_address)
}
("remove-validator", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let vote_account = pubkey_of(arg_matches, "vote_account").unwrap();
let new_authority = pubkey_of(arg_matches, "new_authority");
let stake_receiver = pubkey_of(arg_matches, "stake_receiver");
command_vsa_remove(
&config,
&stake_pool_address,
&vote_account,
&new_authority,
&stake_receiver,
)
}
("increase-validator-stake", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let vote_account = pubkey_of(arg_matches, "vote_account").unwrap();
let amount = value_t_or_exit!(arg_matches, "amount", f64);
command_increase_validator_stake(&config, &stake_pool_address, &vote_account, amount)
}
("decrease-validator-stake", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let vote_account = pubkey_of(arg_matches, "vote_account").unwrap();
let amount = value_t_or_exit!(arg_matches, "amount", f64);
command_decrease_validator_stake(&config, &stake_pool_address, &vote_account, amount)
}
("set-preferred-validator", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let preferred_type = match arg_matches.value_of("preferred_type").unwrap() {
"deposit" => PreferredValidatorType::Deposit,
"withdraw" => PreferredValidatorType::Withdraw,
_ => unreachable!(),
};
let vote_account = pubkey_of(arg_matches, "vote_account");
let _unset = arg_matches.is_present("unset");
// since unset and vote_account can't both be set, if unset is set
// then vote_account will be None, which is valid for the program
command_set_preferred_validator(
&config,
&stake_pool_address,
preferred_type,
vote_account,
)
}
("deposit-stake", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let stake_account = pubkey_of(arg_matches, "stake_account").unwrap();
let token_receiver: Option<Pubkey> = pubkey_of(arg_matches, "token_receiver");
let referrer: Option<Pubkey> = pubkey_of(arg_matches, "referrer");
let withdraw_authority = get_signer(
arg_matches,
"withdraw_authority",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: false,
},
);
command_deposit_stake(
&config,
&stake_pool_address,
&stake_account,
withdraw_authority,
&token_receiver,
&referrer,
)
}
("deposit-sol", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let token_receiver: Option<Pubkey> = pubkey_of(arg_matches, "token_receiver");
let referrer: Option<Pubkey> = pubkey_of(arg_matches, "referrer");
let from = keypair_of(arg_matches, "from");
let amount = value_t_or_exit!(arg_matches, "amount", f64);
command_deposit_sol(
&config,
&stake_pool_address,
&from,
&token_receiver,
&referrer,
amount,
)
}
("list", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
command_list(&config, &stake_pool_address)
}
("update", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let no_merge = arg_matches.is_present("no_merge");
let force = arg_matches.is_present("force");
command_update(&config, &stake_pool_address, force, no_merge)
}
("withdraw-stake", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let vote_account = pubkey_of(arg_matches, "vote_account");
let pool_account = pubkey_of(arg_matches, "pool_account");
let pool_amount = value_t_or_exit!(arg_matches, "amount", f64);
let stake_receiver = pubkey_of(arg_matches, "stake_receiver");
let use_reserve = arg_matches.is_present("use_reserve");
command_withdraw_stake(
&config,
&stake_pool_address,
use_reserve,
&vote_account,
&stake_receiver,
&pool_account,
pool_amount,
)
}
("withdraw-sol", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let pool_account = pubkey_of(arg_matches, "pool_account");
let pool_amount = value_t_or_exit!(arg_matches, "amount", f64);
let sol_receiver = get_signer(
arg_matches,
"sol_receiver",
&cli_config.keypair_path,
&mut wallet_manager,
SignerFromPathConfig {
allow_null_signer: true,
},
)
.pubkey();
command_withdraw_sol(
&config,
&stake_pool_address,
&pool_account,
&sol_receiver,
pool_amount,
)
}
("set-manager", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let new_manager: Option<Keypair> = keypair_of(arg_matches, "new_manager");
let new_fee_receiver: Option<Pubkey> = pubkey_of(arg_matches, "new_fee_receiver");
command_set_manager(
&config,
&stake_pool_address,
&new_manager,
&new_fee_receiver,
)
}
("set-staker", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let new_staker = pubkey_of(arg_matches, "new_staker").unwrap();
command_set_staker(&config, &stake_pool_address, &new_staker)
}
("set-funding-authority", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let new_authority = pubkey_of(arg_matches, "new_authority");
let funding_type = match arg_matches.value_of("funding_type").unwrap() {
"sol-deposit" => FundingType::SolDeposit,
"stake-deposit" => FundingType::StakeDeposit,
"sol-withdraw" => FundingType::SolWithdraw,
_ => unreachable!(),
};
let _unset = arg_matches.is_present("unset");
command_set_funding_authority(&config, &stake_pool_address, new_authority, funding_type)
}
("set-fee", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let numerator = value_t_or_exit!(arg_matches, "fee_numerator", u64);
let denominator = value_t_or_exit!(arg_matches, "fee_denominator", u64);
let new_fee = Fee {
denominator,
numerator,
};
match arg_matches.value_of("fee_type").unwrap() {
"epoch" => command_set_fee(&config, &stake_pool_address, FeeType::Epoch(new_fee)),
"stake-deposit" => {
command_set_fee(&config, &stake_pool_address, FeeType::StakeDeposit(new_fee))
}
"sol-deposit" => {
command_set_fee(&config, &stake_pool_address, FeeType::SolDeposit(new_fee))
}
"stake-withdrawal" => command_set_fee(
&config,
&stake_pool_address,
FeeType::StakeWithdrawal(new_fee),
),
"sol-withdrawal" => command_set_fee(
&config,
&stake_pool_address,
FeeType::SolWithdrawal(new_fee),
),
_ => unreachable!(),
}
}
("set-referral-fee", Some(arg_matches)) => {
let stake_pool_address = pubkey_of(arg_matches, "pool").unwrap();
let fee = value_t_or_exit!(arg_matches, "fee", u8);
assert!(
fee <= 100u8,
"Invalid fee {}%. Fee needs to be in range [0-100]",
fee
);
let fee_type = match arg_matches.value_of("fee_type").unwrap() {
"sol" => FeeType::SolReferral(fee),
"stake" => FeeType::StakeReferral(fee),
_ => unreachable!(),
};
command_set_fee(&config, &stake_pool_address, fee_type)
}
("list-all", _) => command_list_all_pools(&config),
_ => unreachable!(),
}
.map_err(|err| {
eprintln!("{}", err);
exit(1);
});
}
| 37.66242 | 167 | 0.565504 |
084ee1a966348052b2df2c4624365c3ce4fa0b1a | 4,374 | use crate::error::Result;
use crate::expr::{Expression, Variable};
use std::fmt;
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
pub enum Node {
/// A simple comment.
Comment(String),
/// Bind the expression to a variable.
Let { var: Variable, expr: Expression },
/// Assert that the condition is true.
Assert { condition: Expression },
/// Assume that the condition is true.
Assume { condition: Expression },
}
impl Node {
/// Create a new comment.
pub fn comment<S>(text: S) -> Self
where
S: Into<String>,
{
Self::Comment(text.into())
}
/// Create a new variable binding.
pub fn assign(var: Variable, expr: Expression) -> Result<Self> {
expr.sort().expect_sort(var.sort())?;
Ok(Self::Let { var, expr })
}
/// Create a new assertion.
pub fn assert(condition: Expression) -> Result<Self> {
condition.sort().expect_boolean()?;
Ok(Self::Assert { condition })
}
/// Create a new assumption.
pub fn assume(condition: Expression) -> Result<Self> {
condition.sort().expect_boolean()?;
Ok(Self::Assume { condition })
}
/// Returns whether this node is a comment.
pub fn is_comment(&self) -> bool {
matches!(self, Self::Comment(..))
}
/// Returns whether this node is a variable binding.
pub fn is_let(&self) -> bool {
matches!(self, Self::Let { .. })
}
/// Returns whether this node is an assertion.
pub fn is_assert(&self) -> bool {
matches!(self, Self::Assert { .. })
}
/// Returns whether this node is an assumption.
pub fn is_assume(&self) -> bool {
matches!(self, Self::Assume { .. })
}
/// Get each `Variable` used by this `Node`.
pub fn variables_used(&self) -> Vec<&Variable> {
match self {
Self::Let { expr, .. } => expr.variables(),
Self::Assert { condition } | Self::Assume { condition } => condition.variables(),
Self::Comment(_) => Vec::new(),
}
}
/// Get a mutable reference to each `Variable` used by this `Node`.
pub fn variables_used_mut(&mut self) -> Vec<&mut Variable> {
match self {
Self::Let { expr, .. } => expr.variables_mut(),
Self::Assert { condition } | Self::Assume { condition } => condition.variables_mut(),
Self::Comment(_) => Vec::new(),
}
}
/// Get each `Variable` defined by this `Node`.
pub fn variables_defined(&self) -> Vec<&Variable> {
match self {
Self::Let { var, .. } => vec![var],
Self::Assert { .. } | Self::Assume { .. } | Self::Comment(_) => Vec::new(),
}
}
/// Get a mutable reference to each `Variable` defined by this `Node`.
pub fn variables_defined_mut(&mut self) -> Vec<&mut Variable> {
match self {
Self::Let { var, .. } => vec![var],
Self::Assert { .. } | Self::Assume { .. } | Self::Comment(_) => Vec::new(),
}
}
/// Get each `Variable` referenced by this `Node`.
pub fn variables(&self) -> Vec<&Variable> {
self.variables_used()
.into_iter()
.chain(self.variables_defined().into_iter())
.collect()
}
/// Get each `Expression` of this `Node`.
pub fn expressions(&self) -> Vec<&Expression> {
match self {
Self::Let { expr, .. } => vec![expr],
Self::Assert { condition } | Self::Assume { condition } => vec![condition],
Self::Comment(_) => Vec::new(),
}
}
/// Get a mutable reference to each `Expression` of this `Node`.
pub fn expressions_mut(&mut self) -> Vec<&mut Expression> {
match self {
Self::Let { expr, .. } => vec![expr],
Self::Assert { condition } | Self::Assume { condition } => vec![condition],
Self::Comment(_) => Vec::new(),
}
}
}
impl fmt::Display for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Comment(text) => write!(f, "// {}", text),
Self::Let { var, expr } => write!(f, "let {} = {}", var, expr),
Self::Assert { condition } => write!(f, "assert {}", condition),
Self::Assume { condition } => write!(f, "assume {}", condition),
}
}
}
| 32.4 | 97 | 0.537723 |
acc109b9ed6a614c375389030093bb133487effb | 1,756 |
pub struct IconRvHookup {
props: crate::Props,
}
impl yew::Component for IconRvHookup {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M20 17v-6c0-1.1-.9-2-2-2H7V7l-3 3 3 3v-2h4v3H4v3c0 1.1.9 2 2 2h2c0 1.66 1.34 3 3 3s3-1.34 3-3h8v-2h-2zm-9 3c-.55 0-1-.45-1-1s.45-1 1-1 1 .45 1 1-.45 1-1 1zm7-6h-4v-3h4v3zM17 2v2H9v2h8v2l3-3-3-3z" opacity=".3"/><path d="M20 17v-6c0-1.1-.9-2-2-2H7V7l-3 3 3 3v-2h4v3H4v3c0 1.1.9 2 2 2h2c0 1.66 1.34 3 3 3s3-1.34 3-3h8v-2h-2zm-9 3c-.55 0-1-.45-1-1s.45-1 1-1 1 .45 1 1-.45 1-1 1zm7-6h-4v-3h4v3zM17 2v2H9v2h8v2l3-3-3-3z"/></svg>
</svg>
}
}
}
| 38.173913 | 565 | 0.585991 |
de301b1fc499d9aa43048174657fc88cbdacc67d | 8,538 | use super::*;
use crate::symbol::Symbol;
use crate::source_map::{SourceMap, FilePathMapping};
use crate::parse::token;
use crate::with_default_globals;
use errors::{Handler, emitter::EmitterWriter};
use std::io;
use std::path::PathBuf;
use syntax_pos::{BytePos, Span};
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
let emitter = EmitterWriter::new(
Box::new(io::sink()),
Some(sm.clone()),
false,
false,
false,
None,
false,
);
ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
}
// Creates a string reader for the given string.
fn setup<'a>(sm: &SourceMap,
sess: &'a ParseSess,
teststr: String)
-> StringReader<'a> {
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
StringReader::new(sess, sf, None)
}
#[test]
fn t1() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut string_reader = setup(
&sm,
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
);
assert_eq!(string_reader.next_token(), token::Comment);
assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = Token::new(
mk_ident("fn"),
Span::with_root_ctxt(BytePos(21), BytePos(23)),
);
assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span);
assert_eq!(string_reader.next_token(), token::Whitespace);
// Read another token.
let tok3 = string_reader.next_token();
assert_eq!(string_reader.pos.clone(), BytePos(28));
let tok4 = Token::new(
mk_ident("main"),
Span::with_root_ctxt(BytePos(24), BytePos(28)),
);
assert_eq!(tok3.kind, tok4.kind);
assert_eq!(tok3.span, tok4.span);
assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
assert_eq!(string_reader.pos.clone(), BytePos(29))
})
}
// Checks that the given reader produces the desired stream
// of tokens (stop checking after exhausting `expected`).
fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
for expected_tok in &expected {
assert_eq!(&string_reader.next_token(), expected_tok);
}
}
// Makes the identifier by looking up the string in the interner.
fn mk_ident(id: &str) -> TokenKind {
token::Ident(Symbol::intern(id), false)
}
fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
}
#[test]
fn doublecolon_parsing() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(
setup(&sm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
);
})
}
#[test]
fn doublecolon_parsing_2() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(
setup(&sm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
);
})
}
#[test]
fn doublecolon_parsing_3() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(
setup(&sm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
);
})
}
#[test]
fn doublecolon_parsing_4() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(
setup(&sm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
);
})
}
#[test]
fn character_a() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(
setup(&sm, &sh, "'a'".to_string()).next_token(),
mk_lit(token::Char, "a", None),
);
})
}
#[test]
fn character_space() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(
setup(&sm, &sh, "' '".to_string()).next_token(),
mk_lit(token::Char, " ", None),
);
})
}
#[test]
fn character_escaped() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(
setup(&sm, &sh, "'\\n'".to_string()).next_token(),
mk_lit(token::Char, "\\n", None),
);
})
}
#[test]
fn lifetime_name() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(
setup(&sm, &sh, "'abc".to_string()).next_token(),
token::Lifetime(Symbol::intern("'abc")),
);
})
}
#[test]
fn raw_string() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(
setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
);
})
}
#[test]
fn literal_suffixes() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(
setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
);
// with a whitespace separator
assert_eq!(
setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, None),
);
}}
}
test!("'a'", Char, "a");
test!("b'a'", Byte, "a");
test!("\"a\"", Str, "a");
test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101");
test!("0xABC", Integer, "0xABC");
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
assert_eq!(
setup(&sm, &sh, "2us".to_string()).next_token(),
mk_lit(token::Integer, "2", Some("us")),
);
assert_eq!(
setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::StrRaw(3), "raw", Some("suffix")),
);
assert_eq!(
setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
);
})
}
#[test]
fn line_doc_comments() {
assert!(is_doc_comment("///"));
assert!(is_doc_comment("/// blah"));
assert!(!is_doc_comment("////"));
}
#[test]
fn nested_block_comments() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
assert_eq!(lexer.next_token(), token::Comment);
assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
})
}
#[test]
fn crlf_comments() {
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.kind, token::Comment);
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token(), token::Whitespace);
assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
})
}
| 31.622222 | 94 | 0.548021 |
9cd3a43b976f5f277948b06f55a5277dcae2815f | 1,825 | use libzeropool::{
fawkes_crypto::{borsh, BorshDeserialize, BorshSerialize},
native::{account::Account as NativeAccount, note::Note as NativeNote},
};
use libzeropool_rs::client::state::{State, Transaction as InnerTransaction};
use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*;
use crate::database::Database;
use crate::{utils, Fr, PoolParams, POOL_PARAMS};
#[derive(Debug, PartialEq, BorshSerialize, BorshDeserialize, Serialize, Deserialize)]
pub enum Transaction {
Account(NativeAccount<Fr>),
Note(NativeNote<Fr>),
}
impl From<InnerTransaction<Fr>> for Transaction {
fn from(other: InnerTransaction<Fr>) -> Self {
match other {
InnerTransaction::Account(acc) => Transaction::Account(acc),
InnerTransaction::Note(note) => Transaction::Note(note),
}
}
}
#[wasm_bindgen]
pub struct UserState {
#[wasm_bindgen(skip)]
pub inner: State<Database, PoolParams>,
}
#[wasm_bindgen]
impl UserState {
#[allow(unused_variables)]
#[wasm_bindgen]
pub async fn init(db_id: String) -> Self {
utils::set_panic_hook();
#[cfg(any(feature = "bundler", feature = "web"))]
let state = State::init_web(db_id, POOL_PARAMS.clone()).await;
#[cfg(not(any(feature = "bundler", feature = "web")))]
let state = State::init_test(POOL_PARAMS.clone());
UserState { inner: state }
}
#[wasm_bindgen(js_name = "earliestUsableIndex")]
/// Return an index of a earliest usable note.
pub fn earliest_usable_index(&self) -> u64 {
self.inner.earliest_usable_index()
}
#[wasm_bindgen(js_name = "totalBalance")]
/// Returns user's total balance (account + available notes).
pub fn total_balance(&self) -> String {
self.inner.total_balance().to_string()
}
}
| 29.918033 | 85 | 0.661918 |
e5fbf33b82cc6d539870ca18c4015a5e9ed2567f | 2,652 | #![allow(clippy::module_inception)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::ptr_arg)]
#![allow(clippy::large_enum_variant)]
#![doc = "generated by AutoRust 0.1.0"]
#[cfg(feature = "package-2018-09")]
pub mod package_2018_09;
#[cfg(all(feature = "package-2018-09", not(feature = "no-default-version")))]
pub use package_2018_09::{models, operations, operations::Error};
#[cfg(feature = "package-2020-01")]
pub mod package_2020_01;
#[cfg(all(feature = "package-2020-01", not(feature = "no-default-version")))]
pub use package_2020_01::{models, operations, operations::Error};
#[cfg(feature = "package-2020-06")]
pub mod package_2020_06;
use azure_core::setters;
#[cfg(all(feature = "package-2020-06", not(feature = "no-default-version")))]
pub use package_2020_06::{models, operations, operations::Error};
pub fn config(
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
token_credential: Box<dyn azure_core::TokenCredential>,
) -> OperationConfigBuilder {
OperationConfigBuilder {
http_client,
base_path: None,
token_credential,
token_credential_resource: None,
}
}
pub struct OperationConfigBuilder {
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
base_path: Option<String>,
token_credential: Box<dyn azure_core::TokenCredential>,
token_credential_resource: Option<String>,
}
impl OperationConfigBuilder {
setters! { base_path : String => Some (base_path) , token_credential_resource : String => Some (token_credential_resource) , }
pub fn build(self) -> OperationConfig {
OperationConfig {
http_client: self.http_client,
base_path: self.base_path.unwrap_or_else(|| "https://management.azure.com".to_owned()),
token_credential: Some(self.token_credential),
token_credential_resource: self
.token_credential_resource
.unwrap_or_else(|| "https://management.azure.com/".to_owned()),
}
}
}
pub struct OperationConfig {
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
base_path: String,
token_credential: Option<Box<dyn azure_core::TokenCredential>>,
token_credential_resource: String,
}
impl OperationConfig {
pub fn http_client(&self) -> &dyn azure_core::HttpClient {
self.http_client.as_ref()
}
pub fn base_path(&self) -> &str {
self.base_path.as_str()
}
pub fn token_credential(&self) -> Option<&dyn azure_core::TokenCredential> {
self.token_credential.as_deref()
}
pub fn token_credential_resource(&self) -> &str {
self.token_credential_resource.as_str()
}
}
| 38.434783 | 130 | 0.691176 |
116f2230fa07c4837c09b285423f8c2f9b50f2f1 | 3,811 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CPUIRQSEL36 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `EV`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EVR {
#[doc = "UART1 combined interrupt, interrupt flags are found here UART1:MIS"]
UART1_COMB,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl EVR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
EVR::UART1_COMB => 37,
EVR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> EVR {
match value {
37 => EVR::UART1_COMB,
i => EVR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART1_COMB`"]
#[inline]
pub fn is_uart1_comb(&self) -> bool {
*self == EVR::UART1_COMB
}
}
#[doc = "Values that can be written to the field `EV`"]
pub enum EVW {
#[doc = "UART1 combined interrupt, interrupt flags are found here UART1:MIS"]
UART1_COMB,
}
impl EVW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
EVW::UART1_COMB => 37,
}
}
}
#[doc = r" Proxy"]
pub struct _EVW<'a> {
w: &'a mut W,
}
impl<'a> _EVW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: EVW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "UART1 combined interrupt, interrupt flags are found here UART1:MIS"]
#[inline]
pub fn uart1_comb(self) -> &'a mut W {
self.variant(EVW::UART1_COMB)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 127;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:6 - 6:0\\] Read only selection value"]
#[inline]
pub fn ev(&self) -> EVR {
EVR::_from({
const MASK: u8 = 127;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 37 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:6 - 6:0\\] Read only selection value"]
#[inline]
pub fn ev(&mut self) -> _EVW {
_EVW { w: self }
}
}
| 25.238411 | 81 | 0.511939 |
9cc22d9d109b64b5a9e7f8ae63bdd728f6e2d7ae | 392 | mod keywords;
mod writer;
use std::fmt::Error as FmtError;
use thiserror::Error;
pub use writer::Writer;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
IoError(#[from] FmtError),
}
pub fn write_string(module: &crate::Module) -> Result<String, Error> {
let mut w = Writer::new(String::new());
w.write(module)?;
let output = w.finish();
Ok(output)
}
| 18.666667 | 70 | 0.647959 |
cc6e3e5483be93a1d2b533f079aacb62256f9335 | 3,311 | use yew::prelude::*;
use crate::components::{Icon, Link};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ButtonSize {
Large,
Medium,
Small,
}
impl Default for ButtonSize {
fn default() -> Self {
Self::Medium
}
}
#[derive(Debug, Clone, PartialEq, Properties)]
pub struct ButtonProps {
#[prop_or_default]
pub classes: Classes,
#[prop_or_else(Callback::noop)]
pub on_click: Callback<MouseEvent>,
#[prop_or_default]
pub size: ButtonSize,
#[prop_or_default]
pub icon: Option<Classes>,
#[prop_or_default]
pub text: Option<String>,
#[prop_or(false)]
pub active: bool,
#[prop_or(false)]
pub disabled: bool,
#[prop_or_default]
pub href: Option<String>,
}
#[function_component(Button)]
pub fn button(props: &ButtonProps) -> Html {
let mut classes = classes!(
props.classes.clone(),
"transition",
"duration-200",
"px-4",
"bg-brand-bg",
"dark:bg-brand-dark-bg",
"text-brand-text",
"dark:text-brand-dark-text",
"disabled:cursor-not-allowed",
"disabled:hover:bg-opacity-0",
"disabled:hover:text-brand-text",
"dark:disabled:hover:text-brand-dark-text",
"disabled:opacity-50",
"border",
"border-brand-border",
"dark:border-brand-dark-border",
"hover:bg-brand-primary-hover",
"dark:hover:bg-brand-dark-primary-hover",
"hover:text-brand-text-primary-hover",
"dark:hover:text-brand-dark-text-primary-hover",
"focus:outline-none",
"focus:ring",
"focus:ring-brand-focus",
"dark:focus:ring-brand-dark-focus",
"focus:ring-opacity-50",
"focus:border-brand-focus",
"dark:focus:border-brand-dark-focus",
"first:rounded-l-2xl",
"first:border-r-0",
"last:rounded-r-2xl",
"last:border-l-0",
"first:last:border",
);
classes.push(match props.size {
ButtonSize::Large => {
classes!("text-lg", "py-2")
}
ButtonSize::Medium => {
classes!("py-2")
}
ButtonSize::Small => {
classes!("text-sm", "py-1")
}
});
classes.push(if props.active {
classes!(
"bg-brand-primary-active",
"dark:bg-brand-dark-primary-active",
"text-brand-text-primary-active",
"dark:text-brand-dark-text-primary-active"
)
} else {
classes!()
});
let inner = match (&props.icon, &props.text) {
(Some(icon), Some(text)) => html! {
<>
<Icon icon={icon.clone()} classes={classes!("mr-2")} />
{ text }
</>
},
(Some(icon), None) => html! { <Icon icon={icon.clone()} /> },
(None, Some(text)) => html! { text },
(None, None) => html! {},
};
let button = html! {
<button
class={classes}
onclick={props.on_click.clone()}
disabled={props.disabled}
>
{ inner }
</button>
};
match &props.href {
Some(href) => html! {
<Link href={href.clone()}>
{ button }
</Link>
},
None => button,
}
}
| 24.894737 | 71 | 0.520991 |
621ae44646daff5dc7bd4a342bdeb0a5315cb024 | 865 | #[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Interrupt Enable 1"]
pub sfrie1: crate::Reg<sfrie1::SFRIE1_SPEC>,
#[doc = "0x02 - Interrupt Flag 1"]
pub sfrifg1: crate::Reg<sfrifg1::SFRIFG1_SPEC>,
#[doc = "0x04 - RESET Pin Control Register"]
pub sfrrpcr: crate::Reg<sfrrpcr::SFRRPCR_SPEC>,
}
#[doc = "SFRIE1 register accessor: an alias for `Reg<SFRIE1_SPEC>`"]
pub type SFRIE1 = crate::Reg<sfrie1::SFRIE1_SPEC>;
#[doc = "Interrupt Enable 1"]
pub mod sfrie1;
#[doc = "SFRIFG1 register accessor: an alias for `Reg<SFRIFG1_SPEC>`"]
pub type SFRIFG1 = crate::Reg<sfrifg1::SFRIFG1_SPEC>;
#[doc = "Interrupt Flag 1"]
pub mod sfrifg1;
#[doc = "SFRRPCR register accessor: an alias for `Reg<SFRRPCR_SPEC>`"]
pub type SFRRPCR = crate::Reg<sfrrpcr::SFRRPCR_SPEC>;
#[doc = "RESET Pin Control Register"]
pub mod sfrrpcr;
| 37.608696 | 70 | 0.690173 |
ff7675bc34c3868b0f4e14ec1130024935ad6835 | 738 | extern crate quire;
#[macro_use] extern crate serde_derive;
use quire::{parse_config, Options};
use quire::validate::{Structure, Scalar};
#[derive(Deserialize)]
#[allow(dead_code)]
struct Config {
item1: String,
item2: Option<String>,
}
fn validator() -> Structure<'static> {
Structure::new()
.member("item1", Scalar::new())
.member("item2", Scalar::new().optional())
}
fn work(cfg: &Config) {
println!("item1 is {}.", cfg.item1);
//intln!("item2 is {}.", cfg.item2);
// hey, this is just demonstration code ...
}
fn main() {
let cfg: Config;
cfg = parse_config("examples/config.yaml",
&validator(), &Options::default())
.expect("valid config");
work(&cfg)
}
| 23.0625 | 57 | 0.607046 |
f93602dbe1aa7a1fb0b49457f5a8fa0e65d4c9c1 | 11,191 | //! Parameter optimization
//!
//! The fit of the parameters is done by gradient descent (using the ADAM algorithm) on the gradient of the marginal log-likelihood
//! (which let us use all the data without bothering with cross-validation)
//!
//! If the kernel can be rescaled, we use ideas from [Fast methods for training Gaussian processes on large datasets](https://arxiv.org/pdf/1604.01250.pdf)
//! to rescale the kernel at each step with the optimal magnitude which has the effect of fitting the noise without computing its gradient.
//!
//! Otherwise we fit the noise in log-scale as its magnitude matters more than its precise value.
use super::GaussianProcess;
use crate::algebra::{make_cholesky_cov_matrix, make_gradient_covariance_matrices};
use crate::parameters::{kernel::Kernel, prior::Prior};
impl<KernelType: Kernel, PriorType: Prior> GaussianProcess<KernelType, PriorType> {
//-------------------------------------------------------------------------------------------------
// NON-SCALABLE KERNEL
/// Computes the gradient of the marginal likelihood for the current value of each parameter
/// The produced vector contains the gradient per kernel parameter followed by the gradient for the noise parameter
fn gradient_marginal_likelihood(&self) -> Vec<f64> {
// formula: 1/2 ( transpose(alpha) * dp * alpha - trace(K^-1 * dp) )
// K = cov(train,train)
// alpha = K^-1 * output
// dp = gradient(K, parameter)
// needed for the per parameter gradient computation
let cov_inv = self.covmat_cholesky.inverse();
let alpha = &cov_inv * self.training_outputs.as_vector();
// loop on the gradient matrix for each parameter
let mut results = vec![];
for cov_gradient in
make_gradient_covariance_matrices(&self.training_inputs.as_matrix(), &self.kernel)
{
// transpose(alpha) * cov_gradient * alpha
let data_fit: f64 = cov_gradient
.column_iter()
.zip(alpha.iter())
.map(|(col, alpha_col)| alpha.dot(&col) * alpha_col)
.sum();
// trace(cov_inv * cov_gradient)
let complexity_penalty: f64 = cov_inv
.row_iter()
.zip(cov_gradient.column_iter())
.map(|(c, d)| c.tr_dot(&d))
.sum();
results.push((data_fit - complexity_penalty) / 2.);
}
// adds the noise parameter
// gradient(K, noise) = 2*noise*Id
let data_fit = alpha.dot(&alpha);
let complexity_penalty = cov_inv.trace();
let noise_gradient = self.noise * (data_fit - complexity_penalty);
results.push(noise_gradient);
results
}
/// Fit parameters using a gradient descent algorithm.
///
/// Runs for a maximum of `max_iter` iterations (100 is a good default value).
/// Stops prematurely if all the composants of the gradient go below `convergence_fraction` time the value of their respectiv parameter (0.05 is a good default value).
///
/// The `noise` parameter is fitted in log-scale as its magnitude matters more than its precise value
pub(super) fn optimize_parameters(&mut self, max_iter: usize, convergence_fraction: f64) {
// use the ADAM gradient descent algorithm
// see [optimizing-gradient-descent](https://ruder.io/optimizing-gradient-descent/)
// for a good point on current gradient descent algorithms
// constant parameters
let beta1 = 0.9;
let beta2 = 0.999;
let epsilon = 1e-8;
let learning_rate = 0.1;
let mut parameters: Vec<_> = self
.kernel
.get_parameters()
.iter()
.map(|&p| if p == 0. { epsilon } else { p }) // insures no parameter is 0 (which would block the algorithm)
.collect();
parameters.push(self.noise.ln()); // adds noise in log-space
let mut mean_grad = vec![0.; parameters.len()];
let mut var_grad = vec![0.; parameters.len()];
for i in 1..=max_iter {
let mut gradients = self.gradient_marginal_likelihood();
if let Some(noise_grad) = gradients.last_mut() {
// corrects gradient of noise for log-space
*noise_grad *= self.noise
}
let mut continue_search = false;
for p in 0..parameters.len() {
mean_grad[p] = beta1 * mean_grad[p] + (1. - beta1) * gradients[p];
var_grad[p] = beta2 * var_grad[p] + (1. - beta2) * gradients[p].powi(2);
let bias_corrected_mean = mean_grad[p] / (1. - beta1.powi(i as i32));
let bias_corrected_variance = var_grad[p] / (1. - beta2.powi(i as i32));
let delta = learning_rate * bias_corrected_mean
/ (bias_corrected_variance.sqrt() + epsilon);
continue_search |= delta.abs() > convergence_fraction;
parameters[p] *= 1. + delta;
}
// sets parameters
self.kernel.set_parameters(¶meters);
if let Some(noise) = parameters.last() {
// gets out of log-space before setting noise
self.noise = noise.exp()
}
// fits model
self.covmat_cholesky = make_cholesky_cov_matrix(
&self.training_inputs.as_matrix(),
&self.kernel,
self.noise,
);
if !continue_search {
//println!("Iterations:{}", i);
break;
};
}
/*println!("Fit done. likelihood:{} parameters:{:?} noise:{:e}",
self.likelihood(),
parameters,
self.noise);*/
}
//-------------------------------------------------------------------------------------------------
// SCALABLE KERNEL
/// Returns a couple containing the optimal scale for the kernel+noise (which is used to optimize the noise)
/// plus a vector containing the gradient per kernel parameter (but NOT the gradient for the noise parameter)
///
/// see [Fast methods for training Gaussian processes on large datasets](https://arxiv.org/pdf/1604.01250.pdf)
/// for the formula used to compute the scale and the modification to the gradient
fn scaled_gradient_marginal_likelihood(&self) -> (f64, Vec<f64>) {
// formula:
// gradient = 1/2 ( transpose(alpha) * dp * alpha / scale - trace(K^-1 * dp) )
// scale = transpose(output) * K^-1 * output / n
// K = cov(train,train)
// alpha = K^-1 * output
// dp = gradient(K, parameter)
// needed for the per parameter gradient computation
let cov_inv = self.covmat_cholesky.inverse();
let training_output = self.training_outputs.as_vector();
let alpha = &cov_inv * training_output;
// scaling for the kernel
let scale = training_output.dot(&alpha) / (training_output.nrows() as f64);
// loop on the gradient matrix for each parameter
let mut results = vec![];
for cov_gradient in
make_gradient_covariance_matrices(&self.training_inputs.as_matrix(), &self.kernel)
{
// transpose(alpha) * cov_gradient * alpha / scale
// NOTE: this quantity is divided by the scale wich is not the case for the unscaled gradient
let data_fit = cov_gradient
.column_iter()
.zip(alpha.iter())
.map(|(col, alpha_col)| alpha.dot(&col) * alpha_col)
.sum::<f64>()
/ scale;
// trace(cov_inv * cov_gradient)
let complexity_penalty: f64 = cov_inv
.row_iter()
.zip(cov_gradient.column_iter())
.map(|(c, d)| c.tr_dot(&d))
.sum();
results.push((data_fit - complexity_penalty) / 2.);
}
// adds the noise parameter
// gradient(K, noise) = 2*noise*Id
/*let data_fit = alpha.dot(&alpha) / scale;
let complexity_penalty = cov_inv.trace();
let noise_gradient = self.noise * (data_fit - complexity_penalty);
results.push(noise_gradient);*/
(scale, results)
}
/// Fit parameters using a gradient descent algorithm.
/// Additionnaly, at eac step, the kernel and noise are rescaled using the optimal magnitude.
///
/// Runs for a maximum of `max_iter` iterations (100 is a good default value).
/// Stops prematurely if all the composants of the gradient go below `convergence_fraction` time the value of their respectiv parameter (0.05 is a good default value).
pub(super) fn scaled_optimize_parameters(
&mut self,
max_iter: usize,
convergence_fraction: f64,
) {
// use the ADAM gradient descent algorithm
// see [optimizing-gradient-descent](https://ruder.io/optimizing-gradient-descent/)
// for a good point on current gradient descent algorithms
// constant parameters
let beta1 = 0.9;
let beta2 = 0.999;
let epsilon = 1e-8;
let learning_rate = 0.1;
let mut parameters: Vec<_> = self
.kernel
.get_parameters()
.iter()
.map(|&p| if p == 0. { epsilon } else { p }) // insures no parameter is 0 (which would block the algorithm)
.collect();
let mut mean_grad = vec![0.; parameters.len()];
let mut var_grad = vec![0.; parameters.len()];
for i in 1..=max_iter {
let (scale, gradients) = self.scaled_gradient_marginal_likelihood();
let mut continue_search = false;
for p in 0..parameters.len() {
mean_grad[p] = beta1 * mean_grad[p] + (1. - beta1) * gradients[p];
var_grad[p] = beta2 * var_grad[p] + (1. - beta2) * gradients[p].powi(2);
let bias_corrected_mean = mean_grad[p] / (1. - beta1.powi(i as i32));
let bias_corrected_variance = var_grad[p] / (1. - beta2.powi(i as i32));
let delta = learning_rate * bias_corrected_mean
/ (bias_corrected_variance.sqrt() + epsilon);
continue_search |= delta.abs() > convergence_fraction;
parameters[p] *= 1. + delta;
}
// set parameters
self.kernel.set_parameters(¶meters);
self.kernel.rescale(scale);
self.noise *= scale;
parameters = self.kernel.get_parameters(); // get parameters back as they have been rescaled
// fits model
self.covmat_cholesky = make_cholesky_cov_matrix(
&self.training_inputs.as_matrix(),
&self.kernel,
self.noise,
);
if !continue_search {
//println!("Iterations:{}", i);
break;
};
}
/*println!("Scaled fit done. likelihood:{} parameters:{:?} noise:{:e}",
self.likelihood(),
parameters,
self.noise);*/
}
}
| 42.877395 | 171 | 0.574301 |
fc51191451740c9fdf2832c7aa72530412073200 | 663 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_mut)]
#![allow(unused_variables)]
// pretty-expanded FIXME #23616
fn foo<'a, I>(mut it: I) where I: Iterator<Item=&'a isize> {}
fn main() {
foo([1, 2].iter());
}
| 31.571429 | 68 | 0.704374 |
1e923d009b9dc1fd53448ea726769c30d5cbb607 | 2,687 | use std::cell::RefCell;
use std::iter::Iterator;
use std::sync::mpsc::{channel, Receiver, Sender};
use compression::Compression;
use error;
use frame::events::{
SchemaChange as FrameSchemaChange, ServerEvent as FrameServerEvent,
SimpleServerEvent as FrameSimpleServerEvent,
};
use frame::parser::parse_frame;
use std::error::Error;
use transport::CDRSTransport;
/// Full Server Event which includes all details about occured change.
pub type ServerEvent = FrameServerEvent;
/// Simplified Server event. It should be used to represent an event
/// which consumer wants listen to.
pub type SimpleServerEvent = FrameSimpleServerEvent;
/// Reexport of `FrameSchemaChange`.
pub type SchemaChange = FrameSchemaChange;
/// Factory function which returns a `Listener` and related `EventStream.`
///
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
///
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {
let (tx, rx) = channel();
let listener = Listener {
transport: transport,
tx: tx,
};
let stream = EventStream { rx: rx };
(listener, stream)
}
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
pub struct Listener<X> {
transport: X,
tx: Sender<ServerEvent>,
}
impl<X: CDRSTransport + 'static> Listener<RefCell<X>> {
/// It starts a process of listening to new events. Locks a frame.
pub fn start(self, compressor: &Compression) -> error::Result<()> {
loop {
let event_opt = try!(parse_frame(&self.transport, compressor))
.get_body()?
.into_server_event();
let event = if event_opt.is_some() {
// unwrap is safe as we've checked that event_opt.is_some()
event_opt.unwrap().event as ServerEvent
} else {
continue;
};
match self.tx.send(event) {
Err(err) => return Err(error::Error::General(err.description().to_string())),
_ => continue,
}
}
}
}
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub struct EventStream {
rx: Receiver<ServerEvent>,
}
impl Iterator for EventStream {
type Item = ServerEvent;
fn next(&mut self) -> Option<Self::Item> {
self.rx.recv().ok()
}
}
| 30.885057 | 93 | 0.649423 |
017b9377adaa9b09729b176c84b6fcc2446194d0 | 962 | use std;
use trackable::error::{ErrorKind as TrackableErrorKind, ErrorKindExt, TrackableError};
use entity::object::ObjectVersion;
/// クレート固有の`Error`型。
#[derive(Debug, Clone, TrackableError, Serialize, Deserialize)]
pub struct Error(TrackableError<ErrorKind>);
impl From<std::io::Error> for Error {
fn from(f: std::io::Error) -> Self {
ErrorKind::Other.cause(f).into()
}
}
impl From<std::ffi::NulError> for Error {
fn from(f: std::ffi::NulError) -> Self {
ErrorKind::Other.cause(f).into()
}
}
impl From<std::num::ParseIntError> for Error {
fn from(f: std::num::ParseIntError) -> Self {
ErrorKind::InvalidInput.cause(f).into()
}
}
/// エラーの種類。
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ErrorKind {
InvalidInput,
Unavailable,
Timeout,
NotLeader,
Unexpected(Option<ObjectVersion>),
Other,
}
impl TrackableErrorKind for ErrorKind {}
| 26 | 86 | 0.674636 |
01fc1de4a5e061b0073b0b57702b2ba4fe281d35 | 6,688 | //! Holds definition to wrap and alter game.
use error::MaeveError;
use load::save;
use protos::master::Conditional;
use protos::master::Context;
use protos::master::Game;
use protos::master::State;
use protos::master::branch::Branch;
use protos::master::context::Content;
use protos::master::context::Scope;
use protos::master::game;
use screen::Interfaceable;
use std::collections::HashMap;
use std::rc::Rc;
#[derive(Debug)]
pub struct Machine<'m, I: 'm + Interfaceable> {
pub src: &'m mut I,
pub game: &'m mut Game,
// Ideal version, these are all references, or pointers and not direct
// objects.
pub level: game::Level,
pub items: HashMap<String, game::Item>,
pub person: game::Character,
}
pub enum Action {
Act(Rc<game::Action>),
NoOp,
Undefined,
Save,
}
pub fn create_machine<'m, I: Interfaceable>(
src: &'m mut I,
game: &'m mut Game,
) -> Result<Box<Machine<'m, I>>, MaeveError> {
let mut items: HashMap<String, game::Item> = HashMap::new();
let game_ref = game.clone();
if let Some(ref person) = game_ref.person {
items.extend(person.inventory.clone());
if let Some(level) = game_ref.levels.get(&person.level) {
items.extend(level.items.clone());
let mut machine = Machine {
src: src,
game: game,
level: level.clone(),
items: items,
person: person.clone(),
};
return Ok(Box::new(machine));
}
return Err(MaeveError::from("Level for character not found..."));
}
return Err(MaeveError::from(
"A Character was not specifying in the game...",
));
}
impl<'m, I: Interfaceable> Machine<'m, I> {
/* TODO: Create a mutable version of this such that state can be
* directly set. This would also mean not copying game in extract phase
* such that the reference can be directly set. */
fn extract_state(
&'m self,
context: &Context,
) -> Result<&'m State, MaeveError> {
let state = match Scope::from_i32(context.scope)? {
Scope::Character => self.person.stats.get(&context.id)?,
Scope::Level => self.level.state.as_ref().unwrap(),
Scope::Item => self.items.get(&context.id)?.state.as_ref().unwrap(),
Scope::Global => self.game.globals.get(&context.id)?,
};
return Ok(state);
}
fn change_state(&'m mut self, context: &Context) -> Result<(), MaeveError> {
let mut state = self.extract_state(&context)?.clone();
// TODO(madisetti): Allow for addition, deletion and replacement of
// tags. I'm just feeling pretty lazy at the moment.
// Technically only replace.
match context.content.as_ref().unwrap() {
&Content::Tags(ref tags) => {
state.tags.as_mut_slice()[0] = tags.to_string()
}
&Content::Value(value) => state.value = value,
}
// TODO: If the reference version of this is created. Use the
// references to directly set the values on the game object. As per
// explained earlier, borrow hell, amkes this difficult to achieve.
match Scope::from_i32(context.scope)? {
Scope::Character => {
*self.game
.person
.as_mut()
.unwrap()
.stats
.get_mut(&context.id)? = state
}
Scope::Level => {
self.game.levels.get_mut(&self.person.level)?.state =
Some(state)
}
// TODO: Only works for character levels for now. Fix.
Scope::Item => {
self.game
.levels
.get_mut(&self.person.level)?
.items
.get_mut(&context.id)?
.state = Some(state)
}
Scope::Global => *self.game.globals.get_mut(&context.id)? = state,
}
return Ok(());
}
fn check_context(&'m self, clause: &Context) -> Result<bool, MaeveError> {
let state = self.extract_state(clause)?;
return match clause.content.as_ref().unwrap() {
&Content::Tags(ref tags) => Ok(state.tags.contains(&tags)),
&Content::Value(value) => Ok(state.value == value),
};
}
fn evaluate_conditional(
&'m mut self,
conditional: &Conditional,
description: &mut String,
) -> Result<(), MaeveError> {
let branch =
if self.check_context(&conditional.clause.as_ref().unwrap())? {
&conditional.left
} else {
&conditional.right
};
if let &Some(ref branch) = branch {
match branch.branch.as_ref().unwrap() {
&Branch::Fork(ref fork) => {
return self.evaluate_conditional(&*fork, description)
}
&Branch::Change(ref change) => {
description.push_str(&change.comment);
return self.change_state(change.context.as_ref().unwrap());
}
&Branch::Leaf(ref leaf) => description.push_str(&leaf),
}
}
return Ok(());
}
fn evaluate(
&'m mut self,
maybe_conditional: &Option<&Conditional>,
description: &mut String,
resultant_level: &String,
) -> Result<(), MaeveError> {
if ! resultant_level.is_empty() {
self.game
.person
.as_mut()
.unwrap()
.level = resultant_level.clone();
description.push_str(resultant_level);
description.push_str("\n---\n");
}
if let &Some(conditional) = maybe_conditional {
return self.evaluate_conditional(conditional, description)
}
return Ok(());
}
pub fn process_action(
&'m mut self,
game_action: Action,
) -> Result<String, MaeveError> {
let mut description = String::from("");
match game_action {
Action::Act(action) => {
description.push_str(action.description.as_ref());
self.evaluate(
&action.conditional.as_ref(),
&mut description,
&action.resultant_level,
)?;
}
Action::Save => save(self.src, &mut self.game)?,
_ => description.push_str("Didn't do anything..."),
}
return Ok(description);
}
}
| 33.108911 | 80 | 0.533343 |
ab459a96389b22614bb12f1bbaa547331e76cbdc | 1,213 | extern crate serde_json;
use std::net::TcpListener;
use std::io::{BufReader, BufRead};
use std::thread;
use chat::ChatMessage;
use chat::ReplyMessage;
use std::sync::mpsc::Sender;
pub fn listen(sender: Sender<ReplyMessage>) {
let listener = TcpListener::bind("127.0.0.1:2933").expect("Failed to bind port");
for stream in listener.incoming() {
match stream {
Ok(stream) => {
let sender = sender.clone();
thread::spawn(move || {
let reader = BufReader::new(stream);
for line in reader.lines() {
let message: Result<ChatMessage, _> = serde_json::from_str(line.unwrap().as_str());
match message {
Ok(message) => {
println!("{:?}", message);
let _ = sender.send(ReplyMessage(message, None));
},
Err(e) => println!("Error parsing json: {:?}", e),
}
}
});
}
Err(e) => {
println!("{:?}", e);
}
}
}
} | 35.676471 | 107 | 0.436109 |