blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
6739ec9e44e785f4320b4e0dbf7d67f6eee4ade4
Rust
shinh/pfds
/src/stream.rs
UTF-8
3,871
3.359375
3
[]
no_license
// 4.2 Stream use std::fmt::Debug; use lazy::Thunk; #[derive(Clone, Debug)] pub enum StreamNode<'a, T: 'a + Clone + Debug> { Nil, Cons(T, Thunk<'a, StreamNode<'a, T>>) } #[derive(Clone, Debug)] pub struct Stream<'a, T: 'a + Clone + Debug> { head: Thunk<'a, StreamNode<'a, T>> } impl<'a, T: 'a + Clone + Debug> Iterator for Stream<'a, T> { type Item = T; fn next(&mut self) -> Option<T> { match self.eval() { StreamNode::Nil => None, StreamNode::Cons(v, t) => { self.head = t.clone(); Some(v.clone()) } } } } impl<'a, T: 'a + Clone + Debug> Stream<'a, T> { pub fn new(t: Thunk<'a, StreamNode<'a, T>>) -> Self { Stream { head: t } } pub fn eval(&self) -> StreamNode<'a, T> { self.head.eval() } pub fn empty() -> Self { Stream::new(lazy!(StreamNode::Nil)) } pub fn push(&self, v: T) -> Self { let tail = self.head.clone(); Stream::new(lazy!( StreamNode::Cons(v.clone(), tail.clone()) )) } pub fn make<I>(it: I) -> Self where I: DoubleEndedIterator<Item=T> { let mut r = Stream::empty(); for v in it.rev() { r = r.push(v); } r } pub fn iter(&self) -> Self { self.clone() } pub fn head(&self) -> Result<T, &str> { match self.eval() { StreamNode::Nil => Err("head for empty stream"), StreamNode::Cons(v, _) => Ok(v), } } pub fn tail(&self) -> Result<Self, &str> { match self.eval() { StreamNode::Nil => Err("tail for empty stream"), StreamNode::Cons(_, t) => Ok(Stream::new(t)), } } pub fn pop(&self) -> Result<(T, Self), &str> { match self.eval() { StreamNode::Nil => Err("pop for empty stream"), StreamNode::Cons(v, t) => Ok((v, Stream::new(t))), } } pub fn concat(&self, t: Self) -> Self { match self.eval() { StreamNode::Nil => t, StreamNode::Cons(v, s) => Stream::new(lazy!( StreamNode::Cons( v.clone(), Stream::new(s.clone()).concat(t.clone()).head) )) } } pub fn take_n(&self, n: usize) -> Self { if n == 0 { return Self::empty(); } match self.eval() { StreamNode::Nil => Self::empty(), StreamNode::Cons(v, s) => Stream::new(lazy!( StreamNode::Cons(v.clone(), Stream::new(s.clone()).take_n(n-1).head) )) } } fn reverse_impl(s: Thunk<'a, StreamNode<'a, T>>, r: Thunk<'a, StreamNode<'a, T>>) -> Thunk<'a, StreamNode<'a, T>> { match s.eval() { StreamNode::Nil => r, StreamNode::Cons(v, s) => { let c = lazy!(StreamNode::Cons(v.clone(), r.clone())); lazy!(Self::reverse_impl(s.clone(), c.clone()).eval()) } } } pub fn reverse(&self) -> Self { Stream::new(Self::reverse_impl( self.head.clone(), lazy!(StreamNode::Nil))) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_stream() { let s = Stream::make(1..4); assert_eq!(1, s.head().unwrap()); assert_eq!(vec![1,2,3], s.iter().collect::<Vec<i32>>()); let s = Stream::make(1..4).concat(Stream::make(5..7)); assert_eq!(vec![1,2,3,5,6], s.iter().collect::<Vec<i32>>()); let s = Stream::make(1..4).reverse(); assert_eq!(vec![3,2,1], s.iter().collect::<Vec<i32>>()); let s = Stream::make(1..4).concat(Stream::make(5..10)); assert_eq!(vec![1,2,3,5,6], s.take_n(5).iter().collect::<Vec<i32>>()); } }
true
35363df128bb765cc7b1dd3bb70f0c443a6e828b
Rust
jaysonsantos/rust-jwt
/src/crypt.rs
UTF-8
767
2.765625
3
[ "MIT" ]
permissive
use crypto::digest::Digest; use crypto::hmac::Hmac; use crypto::mac::{Mac, MacResult}; use base64; pub fn sign<D: Digest>(data: &str, key: &[u8], digest: D) -> String { let mut hmac = Hmac::new(digest, key); hmac.input(data.as_bytes()); let mac = hmac.result(); let code = mac.code(); base64::encode_config(code, base64::URL_SAFE_NO_PAD) } pub fn verify<D: Digest>(target: &str, data: &str, key: &[u8], digest: D) -> bool { let target_bytes = match base64::decode_config(target, base64::URL_SAFE_NO_PAD) { Ok(x) => x, Err(_) => return false, }; let target_mac = MacResult::new_from_owned(target_bytes); let mut hmac = Hmac::new(digest, key); hmac.input(data.as_bytes()); hmac.result() == target_mac }
true
fc4286b156f976f0bbcefada5482dc2e6df20d1e
Rust
vedangj044/drogue-cloud
/console-frontend/src/backend/mod.rs
UTF-8
8,408
2.53125
3
[ "Apache-2.0" ]
permissive
use anyhow::Context; use chrono::{DateTime, Utc}; use drogue_cloud_console_common::UserInfo; use http::{Response, Uri}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use std::{sync::RwLock, time::Duration}; use url::Url; use yew::{format::Text, prelude::*, services::fetch::*, utils::window}; /// Backend information #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct BackendInformation { pub url: Url, #[serde(default)] pub login_note: Option<String>, } impl BackendInformation { pub fn url<S: AsRef<str>>(&self, path: S) -> Url { let mut result = self.url.clone(); result.set_path(path.as_ref()); result } pub fn uri<S: AsRef<str>>(&self, path: S) -> Uri { self.url(path).to_string().parse().unwrap() } pub fn url_str<S: AsRef<str>>(&self, path: S) -> String { self.url(path).into() } pub fn request<S, IN, OUT: 'static>( &self, method: http::Method, path: S, payload: IN, headers: Vec<(&str, &str)>, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { self.request_with(method, path, payload, headers, Default::default(), callback) } pub fn request_with<S, IN, OUT: 'static>( &self, method: http::Method, path: S, payload: IN, headers: Vec<(&str, &str)>, options: RequestOptions, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { let request = http::request::Builder::new() .method(method) .uri(self.uri(path)); let token = match Backend::access_token() { Some(token) => token, None => { if !options.disable_reauth { Backend::reauthenticate().ok(); return Err(anyhow::anyhow!("Performing re-auth")); } return Err(anyhow::anyhow!("Missing token")); } }; let mut request = request.header("Authorization", format!("Bearer {}", token)); for (k, v) in headers { request = request.header(k, v); } let request = request.body(payload).context("Failed to create request")?; let task = FetchService::fetch_with_options( request, FetchOptions { cache: Some(Cache::NoCache), credentials: Some(Credentials::Include), redirect: Some(Redirect::Follow), mode: Some(Mode::Cors), ..Default::default() }, callback.reform(move |response: Response<_>| { log::info!("Backend response code: {}", response.status().as_u16()); match response.status().as_u16() { 401 | 403 | 408 if !options.disable_reauth => { // 408 is "sent" by yew if the request fails, which it does when CORS is in play Backend::reauthenticate().ok(); } _ => {} }; response }), ) .map_err(|err| anyhow::anyhow!("Failed to fetch: {:?}", err))?; Ok(task) } } #[derive(Clone, Debug, Default)] pub struct RequestOptions { pub disable_reauth: bool, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Backend { pub info: BackendInformation, token: Option<Token>, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Token { pub access_token: String, pub expires: Option<DateTime<Utc>>, pub id_token: String, pub refresh_token: Option<String>, pub userinfo: Option<UserInfo>, } impl Token { pub fn is_expired(&self) -> bool { self.valid_for() .map_or(false, |timeout| timeout.as_secs() < 30) } pub fn valid_for(&self) -> Option<Duration> { self.expires .map(|expires| expires.signed_duration_since(Utc::now())) .and_then(|expires| expires.to_std().ok()) } pub fn if_valid(&self) -> Option<&Self> { if self.is_expired() { None } else { Some(self) } } } static CONSOLE_BACKEND: Lazy<RwLock<Option<Backend>>> = Lazy::new(|| RwLock::new(None)); impl Backend { /// Return the backend endpoint, or [`Option::None`]. pub fn get() -> Option<Backend> { CONSOLE_BACKEND.read().unwrap().clone() } pub fn url<S: AsRef<str>>(path: S) -> Option<Url> { Self::get().map(|backend| backend.info.url(path)) } #[allow(dead_code)] pub fn uri<S: AsRef<str>>(path: S) -> Option<Uri> { Self::get().map(|backend| backend.info.uri(path)) } pub fn url_str<S: AsRef<str>>(path: S) -> Option<String> { Self::get().map(|backend| backend.info.url_str(path)) } /// Get the access token, if it is not expired yet pub fn access_token() -> Option<String> { Self::get() .and_then(|b| b.token) .as_ref() .and_then(|t| t.if_valid()) .map(|token| token.access_token.clone()) } /// Get full token information pub fn token() -> Option<Token> { Self::get().and_then(|b| b.token) } pub(crate) fn set(info: Option<BackendInformation>) { *CONSOLE_BACKEND.write().unwrap() = info.map(|info| Backend { info, token: None }); } fn update<F>(f: F) where F: FnOnce(&mut Backend), { let mut backend = CONSOLE_BACKEND.write().unwrap(); if let Some(ref mut backend) = *backend { f(backend); } } pub(crate) fn update_token(token: Option<Token>) { Self::update(|backend| backend.token = token); } pub fn current_url(&self) -> String { self.info.url.to_string() } pub fn request<S, IN, OUT: 'static>( method: http::Method, path: S, payload: IN, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { Self::request_with(method, path, payload, Default::default(), callback) } pub fn request_with<S, IN, OUT: 'static>( method: http::Method, path: S, payload: IN, options: RequestOptions, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { Self::get() .ok_or_else(|| anyhow::anyhow!("Missing backend"))? .info .request_with(method, path, payload, vec![], options, callback) } pub fn reauthenticate() -> Result<(), anyhow::Error> { Self::navigate_to( "/api/console/v1alpha1/ui/login", "Trigger re-authenticate flow", ) } pub fn logout() -> Result<(), anyhow::Error> { Self::navigate_to("/api/console/v1alpha1/ui/logout", "Trigger logout flow") } fn navigate_to<S: AsRef<str>>(path: S, op: &str) -> Result<(), anyhow::Error> { let target = Backend::url_str(path).context("Backend information missing"); log::debug!("{}: {:?}", op, target); window().location().set_href(&target?).unwrap(); Ok(()) } } #[cfg(test)] mod test { use super::*; use chrono::DateTime; fn setup() { /* env_logger::builder() .filter_level(log::LevelFilter::Debug) .init(); */ } #[test] fn test_date_parser() { setup(); let str = "2020-11-30T11:33:37.437915952Z"; let date = DateTime::parse_from_rfc3339(str); assert!(date.is_ok()); } #[test] fn test_valid_for() { setup(); let date = Utc::now() + chrono::Duration::seconds(120); let token = Token { access_token: String::new(), id_token: String::new(), refresh_token: None, expires: Some(date), userinfo: None, }; assert!(!token.is_expired()); assert!(token.valid_for().is_some()); } }
true
cd41d96144eaa3c5ce852d46ed2186de71830468
Rust
lilydjwg/pid_children
/src/util.rs
UTF-8
1,585
3.203125
3
[ "BSD-3-Clause" ]
permissive
use std::fs::{File,read_dir}; use std::io::{BufRead, BufReader}; use std::collections::HashMap; pub fn get_all_children_for_pid(pid: &str) -> Vec<String> { let mut children_map = HashMap::new(); for d in read_dir("/proc").unwrap() { let pid = d.unwrap().file_name().to_string_lossy().into_owned(); if pid.parse::<usize>().is_ok() { if let Some(ppid) = get_ppid_for(&pid) { let children = children_map.entry(ppid).or_insert_with(Vec::new); children.push(pid); } } } get_all_children_for_pid_from_map(&children_map, pid) } fn get_all_children_for_pid_from_map(map: &HashMap<String,Vec<String>>, pid: &str) -> Vec<String> { let mut pids = Vec::new(); if let Some(children) = map.get(pid) { for pid in children { let grandchildren = get_all_children_for_pid_from_map(map, pid); pids.push(pid.to_string()); for child in grandchildren { pids.push(child.to_string()); } } } pids } fn get_ppid_for(pid: &str) -> Option<String> { let status = format!("/proc/{}/status", pid); let file = match File::open(&status) { Ok(f) => f, Err(_) => return None, }; let reader = BufReader::new(file); for l in reader.lines() { let line = match l { Ok(s) => s, Err(_) => return None, }; if line.starts_with("PPid:") { return Some(line.split_whitespace().nth(1).unwrap().to_string()); } } None }
true
213bcd5fa9ce26fa7b30865c679fa56a35403c89
Rust
MSDimos/commander-rust
/crates/commander-rust-core/tests/conversion_test.rs
UTF-8
9,132
2.859375
3
[ "MIT" ]
permissive
#![feature(int_error_matching)] use commander_rust_core::converters::{Arg, Args, Application}; use commander_rust_core::traits::{PushOptions, PushSubCommand}; use commander_rust_core::converters::{FromArgs, FromArg, Mixed}; use std::num::ParseIntError; use std::path::PathBuf; use commander_rust_core::parser::{Segment, SegmentWrapper}; use std::ffi::OsString; use commander_rust_core::{Command, Options, SubCommand}; use std::collections::HashMap; macro_rules! test_ints { ($src: expr => $($ty: ty = $val: expr; )*) => { $({ let a: Result<$ty, ParseIntError> = <$ty>::from_arg(&$src); assert_eq!(a, Ok($val)); let oa: Option<$ty> = Option::from_arg(&$src).unwrap(); assert_eq!(oa, Some($val)); })* }; } macro_rules! test_ints_vec { ($src: expr => $($ty: ty = $val: expr; )*) => { $({ let va: Vec<$ty> = Vec::from_args(&$src).unwrap(); assert_eq!(va, $val); })* }; } #[test] fn conversion_test() { let args = Args(vec![ "1024".to_string(), "2048".to_string(), "9086".to_string() ] .into_iter() .map(|n| Arg(n)) .collect() ); let arg = Arg("123".to_string()); test_ints! {arg => u8 = 123; u16 = 123; u32 = 123; u64 = 123; u128 = 123; } ; test_ints! {arg => i8 = 123; i16 = 123; i32 = 123; i64 = 123; i128 = 123; } ; test_ints_vec! {args => u16 = vec![1024, 2048, 9086]; u32 = vec![1024, 2048, 9086]; u64 = vec![1024, 2048, 9086]; u128 = vec![1024, 2048, 9086]; } ; test_ints_vec! {args => i16 = vec![1024, 2048, 9086]; i32 = vec![1024, 2048, 9086]; i64 = vec![1024, 2048, 9086]; i128 = vec![1024, 2048, 9086]; } ; let err_u8 = u8::from_arg(&Arg("256".to_string())); assert!(err_u8.is_err()); let str = String::from_arg(&Arg("hello world!".to_string())); assert_eq!(Ok("hello world!".to_string()), str); let path_buf = PathBuf::from_arg(&Arg("+-*".to_string())); assert_eq!(Ok(PathBuf::from("+-*")), path_buf); } #[test] fn customize_arg_conversion_test() { #[derive(Eq, PartialEq, Debug)] struct MyU8 { num: u8, } ; impl<'a> FromArg<'a> for MyU8 { type Error = (); fn from_arg(arg: &'a Arg) -> Result<Self, Self::Error> { match u8::from_arg(arg) { Ok(num) => Ok(MyU8 { num }), Err(_) => Err(()), } } } assert_eq!( Ok(MyU8 { num: 127, }), MyU8::from_arg(&Arg("127".to_string())) ); } #[test] fn customize_args_conversion_test() { #[derive(Eq, PartialEq, Debug)] struct Person { name: String, age: u8, } impl<'a> FromArgs<'a> for Person { type Error = (); fn from_args(args: &'a Args) -> Result<Self, Self::Error> { if args.len() != 2 { Err(()) } else { let name = String::from_arg(&args[0]); let age = u8::from_arg(&args[1]); if name.is_ok() && age.is_ok() { Ok(Person { name: name.unwrap(), age: age.unwrap(), }) } else { Err(()) } } } } assert_eq!( Ok(Person { name: "Jack".to_string(), age: 46, }), Person::from_args(&Args(vec![Arg("Jack".to_string()), Arg("46".to_string())])), ); } #[test] fn converter_test() { let mut command = Command::from(r#"net"#); let mut sub_cmd = SubCommand::from(r#"net -> send <..content>"#); command.push_option(Options::from(r#"-s, --ssl, "using ssl"#)); sub_cmd.push_option(Options::from(r#"-m, --method <method>, "method to use""#)); sub_cmd.push_option(Options::from(r#"-u, --url <url>, "url of target""#)); sub_cmd.push_option(Options::from(r#"-c, --content-type <content_type>, "content-type of sending message""#)); sub_cmd.push_option(Options::from(r#"--max-size <size>, "max size(b) of message""#)); sub_cmd.push_option(Options::from(r#"--headers [..headers], "headers of request""#)); command.push_sub_command(sub_cmd); let args_os: Vec<OsString> = vec![ // "/path/of/cli", // ignore it "send", "--method", "post", "--url", "https://www.example.com", "--content-type", "text/plain", "--headers", "Accept=*/*", "Accept-Language=zh-CN; en-US", "User-Agent=Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0", "Connection=keep-alive", "--max-size", "1024", "--", "hello world!" ] .iter() .map(|s| OsString::from(s)).collect(); let mut segments = SegmentWrapper(Segment::from_vec(args_os)); let converter = Application::from_parser_result(&segments.parse_test(&command), &command).unwrap(); #[derive(Eq, PartialEq, Debug)] struct Headers { accept: String, accept_language: String, user_agent: String, connection: String, } impl<'a> FromArgs<'a> for Headers { type Error = (); fn from_args(args: &'a Args) -> Result<Self, Self::Error> { let mut map = HashMap::new(); let keys = ["Accept", "Accept-Language", "User-Agent", "Connection"]; for arg in args.iter() { if let Ok(s) = String::from_arg(arg) { let mut s: Vec<String> = s.split_terminator('=').into_iter().map(|s| s.to_string()).collect(); if s.len() != 2 { return Err(()); } else if keys.contains(&s[0].as_str()) { map.insert(s.remove(0), s.remove(0)); } else { return Err(()); } } else { return Err(()); } } if map.len() != keys.len() { Err(()) } else { Ok(Headers { accept: map.remove("Accept").unwrap(), accept_language: map.remove("Accept-Language").unwrap(), user_agent: map.remove("User-Agent").unwrap(), connection: map.remove("Connection").unwrap(), }) } } } assert!(converter.cmd_args.is_empty()); if let Mixed::Multiply(args) = converter.sub_args.get("content").unwrap() { let contents: Vec<String> = Vec::from_args(args).unwrap_or(vec![]); assert_eq!(vec!["hello world!".to_string()], contents); } else { assert!(false); } assert_eq!(Some("send".to_string()), converter.sub_name); if let Some(map) = converter.get_opt("method") { if let Some(Mixed::Single(arg)) = map.get("method") { let tmp = String::from_arg(arg).unwrap_or(String::new()); assert_eq!("post", tmp); } else { assert!(false); } } else { assert!(false); } if let Some(map) = converter.get_opt("url") { if let Some(Mixed::Single(arg)) = map.get("url") { let tmp = String::from_arg(arg).unwrap_or(String::new()); assert_eq!("https://www.example.com", tmp); } else { assert!(false); } } else { assert!(false); } if let Some(map) = converter.get_opt("content-type") { if let Some(Mixed::Single(arg)) = map.get("content_type") { let tmp = String::from_arg(arg).unwrap_or(String::new()); assert_eq!("text/plain", tmp); } else { assert!(false); } } else { assert!(false); } if let Some(map) = converter.get_opt("max-size") { if let Some(Mixed::Single(arg)) = map.get("size") { let tmp = u16::from_arg(arg).unwrap_or(0); assert_eq!(1024, tmp); } else { assert!(false); } } else { assert!(false); } if let Some(map) = converter.get_opt("headers") { if let Some(Mixed::Multiply(args)) = map.get("headers") { let tmp = <Headers as FromArgs>::from_args(args).unwrap(); assert_eq!(Headers { accept: "*/*".to_string(), accept_language: "zh-CN; en-US".to_string(), user_agent: "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0".to_string(), connection: "keep-alive".to_string(), }, tmp); } else { assert!(false); } } else { assert!(false); } assert!(!converter.contains_opt("ssl")); assert!(!converter.contains_opt("s")); assert!(converter.contains_opt("u")); assert!(converter.contains_opt("url")); }
true
2c683e72cfc1e8ab5d8012bad9abf2e58dca4df5
Rust
vuapo-eth/bee
/bee-core/src/lib.rs
UTF-8
2,009
3
3
[]
no_license
//! Core functionality #![deny(bad_style, missing_docs, unsafe_code)] #![cfg_attr(release, deny(warnings))] pub mod constants; use constants::{NAME, VERSION}; use std::fmt; use std::io::stdout; use std::time::{Duration, Instant}; use log::info; use tokio::prelude::*; use tokio::runtime::Runtime; use tokio::timer::Interval; enum State { Starting, Running, Stopping, } impl fmt::Display for State { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { State::Starting => write!(f, "Starting..."), State::Running => write!(f, "Running..."), State::Stopping => write!(f, "Stopping..."), } } } /// The Bee node. pub struct Bee { runtime: Runtime, state: State, } impl Bee { /// Creates a new Bee node. /// /// # Example /// ``` /// use bee_core::Bee; /// /// let bee = Bee::new(); /// ``` pub fn new() -> Self { let state = State::Starting; info!("{}", state); Self { runtime: Runtime::new().expect("Couldn't create Tokio runtime."), state } } /// Starts the node. /// /// # Example /// ``` /// use bee_core::Bee; /// /// let mut bee = Bee::new(); /// bee.init(); /// ``` pub fn init(&mut self) { let processing = Interval::new(Instant::now(), Duration::from_millis(250)); self.runtime.spawn( processing .for_each(|_| { print!("."); stdout().flush().unwrap(); Ok(()) }) .map_err(|_| {}), ); } /// Starts the node. pub fn run(mut self) { self.state = State::Running; info!("{}", self.state); self.runtime.shutdown_on_idle().wait().unwrap(); self.state = State::Stopping; info!("{}", self.state); } } /// Returns a nice greeting. pub fn get_name() -> String { format!("{} v{}", NAME, VERSION) }
true
5048229b8481808c0599d3dbed02de0cc5f029e9
Rust
TheWestDevop/smor-user-rust-api
/src/user_handler.rs
UTF-8
20,859
2.515625
3
[]
no_license
use diesel::prelude::*; use diesel::PgConnection; use crate::models::*; use bcrypt::{DEFAULT_COST, hash,verify}; use rocket_contrib::json::{JsonValue}; use crate::auth::*; use crate::schema; use chrono::Local; use lettre::*; use smtp::{ConnectionReuseParameters, authentication::*}; use native_tls::{Protocol, TlsConnector}; use dotenv::dotenv; use std::env; pub fn send_mail2_user(email:String,subject:String,body:String) -> Result<lettre::smtp::response::Response, lettre::smtp::error::Error>{ dotenv().ok(); let mail_account = env::var("MAIL_ACCOUNT").expect("Error loading GMAIL_ACCOUNT. \n Company email is required!!! ."); let mail_password = env::var("MAIL_PASSWORD").expect("Error loading GMAIL_PASSWORD. \n Company email password is required!!! ."); let email = SendableEmail::new( Envelope::new( Some(EmailAddress::new("no-reply@bloomrydes.com".to_string()).unwrap()), vec![EmailAddress::new(email.to_string()).unwrap()], ).unwrap(), subject.to_string(), body.to_string().into_bytes(), ); let mut tls_builder = TlsConnector::builder(); tls_builder.min_protocol_version(Some(Protocol::Tlsv10)); let tls_parameters = ClientTlsParameters::new( "mail.bloomrydes.com".to_string(), tls_builder.build().unwrap() ); let mut mailer = SmtpClient::new( ("mail.bloomrydes.com", 587), ClientSecurity::Wrapper(tls_parameters) ).unwrap() .authentication_mechanism(Mechanism::Login) .credentials(Credentials::new( mail_account.to_string(), mail_password.to_string() )) .connection_reuse(ConnectionReuseParameters::ReuseUnlimited) .transport(); let result = mailer.send(email); mailer.close(); return result; // assert!(result.is_ok()); } pub fn login_user(con:PgConnection,user:String,password:String,app:String) -> JsonValue{ let clean_password = password.trim(); let clean_email = user.trim(); let clean_app = app.trim().to_lowercase(); if clean_password.is_empty() || clean_email.is_empty() || clean_app.is_empty() { json!({ "status":false, "message":"invalid email or password" }) }else{ use schema::smor_users::dsl::*; let results = smor_users.filter(email.eq(clean_email)) .load::<User>(&con).expect("Error unable to fetch user"); if results.is_empty() { json!({ "status":false, "message":"invalid email or password" }) }else { if !results[0].status { json!({ "status":false, "message":"Your account has been locked, contact customer care" }) }else{ let verify_admin = verify(clean_password, &results[0].password); match verify_admin { Ok(valid) => { match valid { true => { let iat = Local::now().to_string(); let user = format!("{}{}{}",results[0].name,results[0].email,results[0].user_id).to_string(); let u_role = &results[0].role.to_string(); let token = generate_token(&user,&iat,&u_role); match clean_app.as_str() { "user" => { let mail_user = send_mail2_user( results[0].email.to_string(), "Account Login".to_string(), "Your smorfarm account has been logged in".to_string() ); match mail_user { Ok(res) => println!("mail sent to user with response {:?}",res), Err(err) => println!("mail error {:?}",err) }; json!( { "status":true, "data":{ "user_id":results[0].user_id, "name":results[0].name, "avatar":results[0].avatar, "phone":results[0].phone, "email":results[0].email, "token":token } } ) }, "chef" => { use schema::smor_chef_profiles::dsl::*; let profile_result = smor_chef_profiles.filter(user_id.eq(&results[0].user_id)) .load::<Chef>(&con).expect("Error unable to fetch chef profile"); if profile_result.is_empty() { json!( { "status":true, "data":{ "id":results[0].id, "user_id":results[0].user_id, "name":results[0].name, "avatar":results[0].avatar, "phone":results[0].phone, "email":results[0].email, "role":results[0].role, "status":results[0].status, "token":token, "nickname":"", "dish":"", "dish_cost":"", "details":"", "rating":0, "experience":"", "next_of_kin_full_name":"", "next_of_kin_address":"", "next_of_kin_phone":"", "next_of_kin_relationship":"", "state":"", "lga":"", "icon":"", "availability_status":false, "verification_status":false, } } ) } else { json!( { "status":true, "data":{ "id":results[0].id, "user_id":results[0].user_id, "name":results[0].name, "avatar":results[0].avatar, "phone":results[0].phone, "email":results[0].email, "role":results[0].role, "status":results[0].status, "token":token, "nickname":profile_result[0].nickname, "dish":profile_result[0].dish, "dish_cost":profile_result[0].dish_cost, "details":profile_result[0].details, "rating":profile_result[0].rating, "experience":profile_result[0].experience, "next_of_kin_full_name":profile_result[0].next_of_kin_full_name, "next_of_kin_address":profile_result[0].next_of_kin_address, "next_of_kin_phone":profile_result[0].next_of_kin_phone, "next_of_kin_relationship":profile_result[0].next_of_kin_relationship, "state":profile_result[0].state, "lga":profile_result[0].lga, "icon":profile_result[0].icon, "availability_status":profile_result[0].availability_status, "verification_status":profile_result[0].verification_status, } } ) } }, _ => json!({ "status":false, "message":"Invalid app specified" }) } }, false => json!({ "status":false, "message":"Invalid email or password" }), } }, Err(_) => json!({ "status":false, "message":"Invalid email or password" }) } } } } } pub fn register_user(con:PgConnection,n_name:String,n_phone:String,n_email:String,pass:String)-> JsonValue { use schema::smor_users::dsl::*; let n_password = hash(pass, DEFAULT_COST).unwrap(); let results = smor_users.filter(email.eq(&n_email)) .load::<User>(&con).expect("Error unable to fetch user by email"); if results.len().eq(&0) { use schema::smor_users; let new_user = NewUser::new(n_name, n_phone, n_email, n_password); let result = diesel::insert_into(smor_users::table) .values(new_user) .get_result::<User>(&con) .expect("Error creating new user"); let iat = Local::now().to_string(); let user = format!("{}{}{}",result.name,result.email,result.user_id).to_string(); let u_role = &result.role.to_string(); let token = generate_token(&user,&iat,&u_role); let user = json!({ "user_id":result.user_id, "name":result.name, "phone":result.phone, "avatar":result.avatar, "email":result.email, "token":token, }); json!({ "status":true, "data":user }) }else{ json!({ "status":false, "message":"an account exist with given email" }) } } pub fn register_chef_detail(con:PgConnection,chef:NewChef) -> JsonValue { use schema::smor_chef_profiles; let result = diesel::insert_into(smor_chef_profiles::table) .values(chef) .get_result::<Chef>(&con); match result { Ok(r) => json!({ "status": true, "data":r }), Err(err) =>{ println!("Server Error {:?}", err); json!({ "status": false, "message":"Server Error, Kindly try again" }) } } } pub fn update_user_profile(con:PgConnection,user:UpdateUser) -> JsonValue { use schema::smor_users::dsl::*; let results = diesel::update(&user) .set(( name.eq(&user.name), phone.eq(&user.phone), email.eq(&user.email), update_at.eq(&user.update_at) )) .get_result::<User>(&con); match results { Ok(r) => json!({ "status": true, "data":{ "id":r.id, "user_id":r.user_id, "name":r.name, "avatar":r.avatar, "phone":r.phone, "email":r.email, "status":r.status, } }), Err(err) => { println!("Server Error {:?}", err); json!({ "status": false, "message":"Server Error, Kindly try again" }) } } } pub fn update_user_avatar(con:PgConnection,url:String,uid:String) -> JsonValue { use schema::smor_users::dsl::*; let result = diesel::update(smor_users.filter(user_id.eq(&uid))) .set( avatar.eq(&url), ) .execute(&con); match result { Ok(_r) => json!({ "status": true, "data":"Profile Picture Uploaded successfully" }), Err(err) => { println!("Server Error {:?}", err); json!({ "status": false, "message":"Server Error, Kindly try again" }) } } } pub fn update_chef_profile(con:PgConnection,chef:UpdateChef)-> JsonValue { use schema::smor_chef_profiles::dsl::*; let results = diesel::update(smor_chef_profiles.filter(user_id.eq(&chef.user_id))) .set(( nickname.eq(&chef.nickname), dish.eq(&chef.dish), details.eq(&chef.details), icon.eq(&chef.icon), experience.eq(&chef.experience), state.eq(&chef.state), lga.eq(&chef.lga), update_at.eq(&chef.update_at) )) .get_result::<Chef>(&con); // .expect("Error updating chef profile"); match results { Ok(r) => json!({ "status": true, "data":r }), Err(err) =>{ println!("Server Error {:?}", err); json!({ "status": false, "message":"Server Error, Kindly try again" }) } } } pub fn disable_enable_availability(con:PgConnection,uid:String,status:bool)-> JsonValue { use schema::smor_chef_profiles::dsl::*; diesel::update(smor_chef_profiles.filter(user_id.eq(&uid))) .set(( availability_status.eq(&status), )) .execute(&con) .expect("Error updating chef profile"); json!({ "status": true, "data":"Availability Status Updated" }) } pub fn rating_chef(con:PgConnection,n_rating:i32,uid:String) -> JsonValue { // use schema::smor_users::dsl::*; use schema::smor_chef_profiles::dsl::*; let p_rating = smor_chef_profiles.select(rating).filter(user_id.eq(&uid)) .load::<i32>(&con).expect("Error unable to fetch chef profile for rating"); let rate = 0.01 * n_rating as f32; let new_rating = rate + p_rating[0] as f32; diesel::update(smor_chef_profiles.filter(user_id.eq(&uid))) .set( rating.eq(new_rating as i32), ) .execute(&con) .expect("Error updating user profile"); json!({ "status": true, "data":"Chef rating was successfully" }) } pub fn get_user(con:PgConnection,uid:String) -> JsonValue { use schema::smor_users::dsl::*; let results = smor_users.filter(user_id.eq(&uid)) .load::<User>(&con).expect("Error unable to fetch user by user_id"); // print!("query result {:?}",results); return json!({ "status": true, "data":results[0] }) } pub fn get_chef(con:PgConnection,uid:String) -> JsonValue { use schema::smor_users::dsl::*; let results = smor_users.filter(schema::smor_users::dsl::user_id.eq(&uid)).load::<User>(&con).expect("Error unable to fetch user by user_id"); // print!("query result {:?}",results); use schema::smor_chef_profiles::dsl::*; let profile_result = smor_chef_profiles.filter(schema::smor_chef_profiles::dsl::user_id.eq(&uid)) .load::<Chef>(&con).expect("Error unable to fetch chef profile"); let iat = Local::now().to_string(); let user = format!("{}{}{}",results[0].name,results[0].email,results[0].user_id).to_string(); let u_role = &results[0].role.to_string(); let token = generate_token(&user,&iat,&u_role); return json!( { "status":true, "data":{ "id":results[0].id, "user_id":results[0].user_id, "name":results[0].name, "avatar":results[0].avatar, "phone":results[0].phone, "email":results[0].email, "token":token, "nickname":profile_result[0].nickname, "dish":profile_result[0].dish, "dish_cost":profile_result[0].dish_cost, "details":profile_result[0].details, "rating":profile_result[0].rating, "experience":profile_result[0].experience, "next_of_kin_full_name":profile_result[0].next_of_kin_full_name, "next_of_kin_address":profile_result[0].next_of_kin_address, "next_of_kin_phone":profile_result[0].next_of_kin_phone, "next_of_kin_relationship":profile_result[0].next_of_kin_relationship, "state":profile_result[0].state, "lga":profile_result[0].lga, "icon":profile_result[0].icon, "availability_status":profile_result[0].availability_status, "verification_status":profile_result[0].verification_status, } } ) } pub fn search(con:PgConnection,search:Search_Chef) -> JsonValue { use schema::smor_chef_profiles::dsl::*; let results = smor_chef_profiles.filter(state.eq(&search.state).and(lga.eq(&search.lga)).and(dish.ilike(&search.dish).and(verification_status.eq(true)).and(availability_status.eq(true)))) .order(rating.desc()).load::<Chef>(&con).expect("Error unable to fetch searched dish"); // print!("query result {:?}",results); return json!({ "status": true, "data":results }); } pub fn search_by_nickname(con:PgConnection,name:String) -> JsonValue { use schema::smor_chef_profiles::dsl::*; let results = smor_chef_profiles.filter(nickname.ilike(&name).and(verification_status.eq(true)).and(availability_status.eq(true))) .order(rating.desc()).load::<Chef>(&con).expect("Error unable to fetch searched dish"); // print!("query result {:?}",results); return json!({ "status": true, "data":results }); }
true
0170cca438e6de97d39a27a284f1b0a5a6453266
Rust
nickbabcock/highway-rs
/src/x86/avx.rs
UTF-8
9,563
2.546875
3
[ "MIT" ]
permissive
use super::{v2x64u::V2x64U, v4x64u::V4x64U}; use crate::internal::unordered_load3; use crate::internal::{HashPacket, PACKET_SIZE}; use crate::key::Key; use crate::traits::HighwayHash; use core::arch::x86_64::*; /// AVX empowered implementation that will only work on `x86_64` with avx2 enabled at the CPU /// level. #[derive(Debug, Default, Clone)] pub struct AvxHash { v0: V4x64U, v1: V4x64U, mul0: V4x64U, mul1: V4x64U, buffer: HashPacket, } impl HighwayHash for AvxHash { #[inline] fn append(&mut self, data: &[u8]) { unsafe { self.append(data); } } #[inline] fn finalize64(mut self) -> u64 { unsafe { Self::finalize64(&mut self) } } #[inline] fn finalize128(mut self) -> [u64; 2] { unsafe { Self::finalize128(&mut self) } } #[inline] fn finalize256(mut self) -> [u64; 4] { unsafe { Self::finalize256(&mut self) } } } impl AvxHash { /// Creates a new `AvxHash` while circumventing the runtime check for avx2. /// /// # Safety /// /// If called on a machine without avx2, a segfault will occur. Only use if you have /// control over the deployment environment and have either benchmarked that the runtime /// check is significant or are unable to check for avx2 capabilities #[must_use] #[target_feature(enable = "avx2")] pub unsafe fn force_new(key: Key) -> Self { let mul0 = V4x64U::new( 0x243f_6a88_85a3_08d3, 0x1319_8a2e_0370_7344, 0xa409_3822_299f_31d0, 0xdbe6_d5d5_fe4c_ce2f, ); let mul1 = V4x64U::new( 0x4528_21e6_38d0_1377, 0xbe54_66cf_34e9_0c6c, 0xc0ac_f169_b5f1_8a8c, 0x3bd3_9e10_cb0e_f593, ); let key = V4x64U::from(_mm256_load_si256(key.0.as_ptr().cast::<__m256i>())); AvxHash { v0: key ^ mul0, v1: key.rotate_by_32() ^ mul1, mul0, mul1, buffer: HashPacket::default(), } } /// Creates a new `AvxHash` if the avx2 feature is detected. #[must_use] pub fn new(key: Key) -> Option<Self> { #[cfg(feature = "std")] { if is_x86_feature_detected!("avx2") { Some(unsafe { Self::force_new(key) }) } else { None } } #[cfg(not(feature = "std"))] { let _key = key; None } } #[target_feature(enable = "avx2")] pub(crate) unsafe fn finalize64(&mut self) -> u64 { if !self.buffer.is_empty() { self.update_remainder(); } for _i in 0..4 { let permuted = AvxHash::permute(&self.v0); self.update(permuted); } let sum0 = V2x64U::from(_mm256_castsi256_si128((self.v0 + self.mul0).0)); let sum1 = V2x64U::from(_mm256_castsi256_si128((self.v1 + self.mul1).0)); let hash = sum0 + sum1; let mut result: u64 = 0; // Each lane is sufficiently mixed, so just truncate to 64 bits. _mm_storel_epi64(core::ptr::addr_of_mut!(result).cast::<__m128i>(), hash.0); result } #[target_feature(enable = "avx2")] pub(crate) unsafe fn finalize128(&mut self) -> [u64; 2] { if !self.buffer.is_empty() { self.update_remainder(); } for _i in 0..6 { let permuted = AvxHash::permute(&self.v0); self.update(permuted); } let sum0 = V2x64U::from(_mm256_castsi256_si128((self.v0 + self.mul0).0)); let sum1 = V2x64U::from(_mm256_extracti128_si256((self.v1 + self.mul1).0, 1)); let hash = sum0 + sum1; let mut result: [u64; 2] = [0; 2]; _mm_storeu_si128(result.as_mut_ptr().cast::<__m128i>(), hash.0); result } #[target_feature(enable = "avx2")] pub(crate) unsafe fn finalize256(&mut self) -> [u64; 4] { if !self.buffer.is_empty() { self.update_remainder(); } for _i in 0..10 { let permuted = AvxHash::permute(&self.v0); self.update(permuted); } let sum0 = self.v0 + self.mul0; let sum1 = self.v1 + self.mul1; let hash = AvxHash::modular_reduction(&sum1, &sum0); let mut result: [u64; 4] = [0; 4]; _mm256_storeu_si256(result.as_mut_ptr().cast::<__m256i>(), hash.0); result } #[inline] #[target_feature(enable = "avx2")] unsafe fn data_to_lanes(packet: &[u8]) -> V4x64U { V4x64U::from(_mm256_loadu_si256(packet.as_ptr().cast::<__m256i>())) } #[target_feature(enable = "avx2")] unsafe fn remainder(bytes: &[u8]) -> V4x64U { let size_mod32 = bytes.len(); let size256 = _mm256_broadcastd_epi32(_mm_cvtsi64_si128(size_mod32 as i64)); let size_mod4 = size_mod32 & 3; let size = _mm256_castsi256_si128(size256); if size_mod32 & 16 != 0 { let packetL = _mm_load_si128(bytes.as_ptr().cast::<__m128i>()); let int_mask = _mm_cmpgt_epi32(size, _mm_set_epi32(31, 27, 23, 19)); let int_lanes = _mm_maskload_epi32(bytes.as_ptr().offset(16).cast::<i32>(), int_mask); let remainder = &bytes[(size_mod32 & !3) + size_mod4 - 4..]; let last4 = i32::from_le_bytes([remainder[0], remainder[1], remainder[2], remainder[3]]); let packetH = _mm_insert_epi32(int_lanes, last4, 3); let packetL256 = _mm256_castsi128_si256(packetL); let packet = _mm256_inserti128_si256(packetL256, packetH, 1); V4x64U::from(packet) } else { let int_mask = _mm_cmpgt_epi32(size, _mm_set_epi32(15, 11, 7, 3)); let packetL = _mm_maskload_epi32(bytes.as_ptr().cast::<i32>(), int_mask); let remainder = &bytes[size_mod32 & !3..]; let last3 = unordered_load3(remainder); let packetH = _mm_cvtsi64_si128(last3 as i64); let packetL256 = _mm256_castsi128_si256(packetL); let packet = _mm256_inserti128_si256(packetL256, packetH, 1); V4x64U::from(packet) } } #[target_feature(enable = "avx2")] unsafe fn update_remainder(&mut self) { let size = self.buffer.len(); let size256 = _mm256_broadcastd_epi32(_mm_cvtsi64_si128(size as i64)); self.v0 += V4x64U::from(size256); let shifted_left = V4x64U::from(_mm256_sllv_epi32(self.v1.0, size256)); let tip = _mm256_broadcastd_epi32(_mm_cvtsi32_si128(32)); let shifted_right = V4x64U::from(_mm256_srlv_epi32(self.v1.0, _mm256_sub_epi32(tip, size256))); self.v1 = shifted_left | shifted_right; let packet = AvxHash::remainder(self.buffer.as_slice()); self.update(packet); } #[target_feature(enable = "avx2")] unsafe fn zipper_merge(v: &V4x64U) -> V4x64U { let hi = 0x0708_0609_0D0A_040B; let lo = 0x000F_010E_0502_0C03; v.shuffle(&V4x64U::new(hi, lo, hi, lo)) } #[target_feature(enable = "avx2")] unsafe fn update(&mut self, packet: V4x64U) { self.v1 += packet; self.v1 += self.mul0; self.mul0 ^= self.v1.mul_low32(&self.v0.shr_by_32()); self.v0 += self.mul1; self.mul1 ^= self.v0.mul_low32(&self.v1.shr_by_32()); self.v0 += AvxHash::zipper_merge(&self.v1); self.v1 += AvxHash::zipper_merge(&self.v0); } #[target_feature(enable = "avx2")] unsafe fn permute(v: &V4x64U) -> V4x64U { let indices = V4x64U::new( 0x0000_0002_0000_0003, 0x0000_0000_0000_0001, 0x0000_0006_0000_0007, 0x0000_0004_0000_0005, ); V4x64U::from(_mm256_permutevar8x32_epi32(v.0, indices.0)) } #[target_feature(enable = "avx2")] unsafe fn modular_reduction(x: &V4x64U, init: &V4x64U) -> V4x64U { let top_bits2 = V4x64U::from(_mm256_srli_epi64(x.0, 62)); let ones = V4x64U::from(_mm256_cmpeq_epi64(x.0, x.0)); let shifted1_unmasked = *x + *x; let top_bits1 = V4x64U::from(_mm256_srli_epi64(x.0, 63)); let upper_8bytes = V4x64U::from(_mm256_slli_si256(ones.0, 8)); let shifted2 = shifted1_unmasked + shifted1_unmasked; let upper_bit_of_128 = V4x64U::from(_mm256_slli_epi64(upper_8bytes.0, 63)); let zero = V4x64U::from(_mm256_setzero_si256()); let new_low_bits2 = V4x64U::from(_mm256_unpacklo_epi64(zero.0, top_bits2.0)); let shifted1 = shifted1_unmasked.and_not(&upper_bit_of_128); let new_low_bits1 = V4x64U::from(_mm256_unpacklo_epi64(zero.0, top_bits1.0)); *init ^ shifted2 ^ new_low_bits2 ^ shifted1 ^ new_low_bits1 } #[target_feature(enable = "avx2")] unsafe fn append(&mut self, data: &[u8]) { if self.buffer.is_empty() { let mut chunks = data.chunks_exact(PACKET_SIZE); for chunk in chunks.by_ref() { self.update(Self::data_to_lanes(chunk)); } self.buffer.set_to(chunks.remainder()); } else if let Some(tail) = self.buffer.fill(data) { self.update(Self::data_to_lanes(self.buffer.inner())); let mut chunks = tail.chunks_exact(PACKET_SIZE); for chunk in chunks.by_ref() { self.update(Self::data_to_lanes(chunk)); } self.buffer.set_to(chunks.remainder()); } } } impl_write!(AvxHash); impl_hasher!(AvxHash);
true
bff2ad6672ca0a72d971e6b0df3149dbad1431fa
Rust
ccqpein/Arithmetic-Exercises
/Word-Search-II/WSII.rs
UTF-8
4,635
3.34375
3
[ "Apache-2.0" ]
permissive
use std::collections::{HashMap, HashSet}; pub fn find_words(board: Vec<Vec<char>>, words: Vec<String>) -> Vec<String> { let mut table: HashMap<char, HashSet<(usize, usize)>> = HashMap::new(); for r in 0..board.len() { for c in 0..board.get(0).unwrap().len() { let e = table.entry(board[r][c]).or_insert(HashSet::new()); e.insert((r, c)); } } //let mut all_string: HashMap<(String, (usize, usize)), bool> = HashMap::new(); fn rec_helper( current: Option<(usize, usize)>, rest_words: &[char], table: &HashMap<char, HashSet<(usize, usize)>>, //all_record: &mut HashMap<(String, (usize, usize)), bool>, path_record: HashSet<(usize, usize)>, ) -> bool { // if let Some(v) = all_record.get(&(String::from_iter(rest_words), current.unwrap_or((0, 0)))) // { // return *v; // } if rest_words.len() == 0 { return true; } let next_around: HashSet<(usize, usize)> = match current { Some(c) => { if let Some(next_c) = table.get(&rest_words[0]) { let aaa = around(c); let aaa = aaa .difference(&path_record) .cloned() .collect::<HashSet<_>>(); aaa.intersection(next_c).cloned().collect() } else { return false; } } None => { if let Some(next_c) = table.get(&rest_words[0]) { next_c.iter().cloned().collect() } else { return false; } } }; for cc in next_around { let mut new_path_record = path_record.clone(); new_path_record.insert(cc); if rec_helper( Some(cc), &rest_words[1..], table, //all_record, new_path_record, ) { // if let Some(ccc) = current { // all_record // .entry((String::from_iter(&*rest_words), ccc)) // .or_insert(true); // } return true; } else { // if let Some(ccc) = current { // all_record // .entry((String::from_iter(&*rest_words), ccc)) // .or_insert(false); // } } } return false; } words .iter() .filter(|w| { rec_helper( None, &w.chars().collect::<Vec<_>>(), &table, //&mut all_string, HashSet::new(), ) }) .map(|s| s.clone()) .collect() } fn around(current: (usize, usize)) -> HashSet<(usize, usize)> { let mut result = HashSet::new(); if current.0 == 0 { result.insert((current.0 + 1, current.1)); } else { result.insert((current.0 + 1, current.1)); result.insert((current.0 - 1, current.1)); } if current.1 == 0 { result.insert((current.0, current.1 + 1)); } else { result.insert((current.0, current.1 + 1)); result.insert((current.0, current.1 - 1)); } result } fn main() { dbg!(find_words( vec![ vec!['o', 'a', 'a', 'n'], vec!['e', 't', 'a', 'e'], vec!['i', 'h', 'k', 'r'], vec!['i', 'f', 'l', 'v'] ], [ "oath".to_string(), "pea".to_string(), "eat".to_string(), "rain".to_string() ] .to_vec() )); dbg!(find_words( vec![vec!['a', 'a'],], ["aaa".to_string()].to_vec() )); dbg!(find_words( vec![vec!['a', 'b'], vec!['c', 'd']], vec![ "ab".to_string(), "cb".to_string(), "bd".to_string(), "ac".to_string(), "ca".to_string(), "da".to_string(), "bc".to_string(), "db".to_string(), "abb".to_string(), "acb".to_string() ] )); dbg!(find_words( vec![vec!['a', 'b'], vec!['a', 'a']], vec![ "aba".to_string(), "baa".to_string(), "bab".to_string(), "aaab".to_string(), "aaa".to_string(), "aaaa".to_string(), "aaba".to_string() ] )); }
true
6eb0c7fd7261f32cd7c0ae33b8e8b6786e86f4ee
Rust
wasmerio/wapm-cli
/src/dataflow/find_command_result.rs
UTF-8
15,104
2.5625
3
[ "MIT" ]
permissive
use crate::config::Config; use crate::data::lock::lockfile::{Lockfile, LockfileError}; use crate::data::manifest::Manifest; use crate::dataflow::lockfile_packages::LockfileResult; use crate::dataflow::manifest_packages::ManifestResult; use std::path::{Path, PathBuf}; use thiserror::Error; use crate::graphql::execute_query; use graphql_client::*; #[derive(GraphQLQuery)] #[graphql( schema_path = "graphql/schema.graphql", query_path = "graphql/queries/get_package_by_command.graphql", response_derives = "Debug" )] struct GetPackageByCommandQuery; #[derive(Debug)] pub struct PackageInfoFromCommand { pub command: String, pub version: String, pub namespaced_package_name: String, } impl PackageInfoFromCommand { fn get_response( command_name: String, ) -> anyhow::Result<get_package_by_command_query::ResponseData> { let q = GetPackageByCommandQuery::build_query(get_package_by_command_query::Variables { command_name, }); execute_query(&q) } pub fn get(command_name: String) -> anyhow::Result<Self> { let response = Self::get_response(command_name)?; let response_val = response .get_command .ok_or_else(|| anyhow!("Error getting packages for given command from server"))?; Ok(Self { command: response_val.command, version: response_val.package_version.version, namespaced_package_name: response_val.package_version.package.display_name, }) } } #[derive(Clone, Debug, Error)] pub enum Error { #[error( "Command \"{command}\" was not found in the local directory or the global install directory." )] CommandNotFound { command: String, error: String, local_log: Vec<String>, global_log: Vec<String>, }, #[error( "Command \"{command}\" was neither found in the local nor in the global directory. {error}" )] CommandNotFoundInLocalDirectoryAndErrorReadingGlobalDirectory { command: String, error: String, local_log: Vec<String>, global_log: Vec<String>, }, #[error( "Could not get command \"{command}\" because there was a problem with the local package. {error}" )] ReadingLocalDirectory { command: String, error: String, local_log: Vec<String>, global_log: Vec<String>, }, #[error( "Command \"{0}\" exists in lockfile, but corresponding module \"{1}\" not found in lockfile.", )] CommandFoundButCorrespondingModuleIsMissing(String, String), #[error( "Failed to get command \"{0}\" because there was an error opening the global installation directory. {1}", )] CouldNotOpenGlobalsDirectory(String, String), } #[derive(Debug)] pub enum FindCommandResult { CommandNotFound { error: String, // Extended description of the error extended: Vec<String>, }, CommandFound { source: PathBuf, manifest_dir: PathBuf, args: Option<String>, module_name: String, prehashed_cache_key: Option<String>, }, Error(anyhow::Error), } impl From<LockfileError> for FindCommandResult { fn from(error: LockfileError) -> Self { match error { LockfileError::CommandNotFound(c) => FindCommandResult::CommandNotFound { error: c, extended: Vec::new(), }, _ => FindCommandResult::Error(error.into()), } } } impl FindCommandResult { fn find_command_in_manifest_and_lockfile<S: AsRef<str>>( command_name: S, manifest: Manifest, lockfile: Lockfile, directory: &Path, ) -> Self { match lockfile.get_command(command_name.as_ref()) { Err(e) => e.into(), Ok(lockfile_command) => { debug!("Command found in lockfile: {:?}", &lockfile_command); if lockfile_command.package_name == manifest.package.name { // this is a local module command let found_module = manifest.module.as_ref().and_then(|modules| { modules.iter().find(|m| m.name == lockfile_command.module) }); match found_module { Some(module) => FindCommandResult::CommandFound { source: module.source.clone(), manifest_dir: manifest.base_directory_path, args: lockfile_command.main_args.clone(), module_name: module.name.clone(), // don't use prehashed cache key for local modules prehashed_cache_key: None, }, None => FindCommandResult::Error( Error::CommandFoundButCorrespondingModuleIsMissing( command_name.as_ref().to_string(), lockfile_command.module.clone(), ) .into(), ), } } else { // this is a module being run as a dependency in a local context debug!( "Command's package name({}) and manifest's package name({}) are different", lockfile_command.package_name, manifest.package.name ); match lockfile.get_module( &lockfile_command.package_name, &lockfile_command.package_version, &lockfile_command.module, ) { Ok(lockfile_module) => { let path = lockfile_module .get_canonical_source_path_from_lockfile_dir(directory.into()); let manifest_dir = lockfile_module .get_canonical_manifest_path_from_lockfile_dir( directory.into(), true, ); FindCommandResult::CommandFound { source: path, manifest_dir, args: lockfile_command.main_args.clone(), module_name: lockfile_module.name.clone(), prehashed_cache_key: lockfile .get_prehashed_cache_key_from_command(lockfile_command), } } Err(e) => FindCommandResult::Error(e), } } } } } fn find_command_in_lockfile<S: AsRef<str>>( command_name: S, lockfile: Lockfile, directory: &Path, ) -> Self { let command_name = command_name.as_ref(); let mut error_lines = Vec::new(); // Look into the lockfile.commands to find the command by name first if let Ok(lockfile_command) = lockfile.get_command(command_name) { // If this fails, the package is corrupt match lockfile.get_module( &lockfile_command.package_name, &lockfile_command.package_version, &lockfile_command.module, ) { Ok(lockfile_module) => { let path = lockfile_module .get_canonical_source_path_from_lockfile_dir(directory.into()); let manifest_dir = lockfile_module .get_canonical_manifest_path_from_lockfile_dir(directory.into(), true); return FindCommandResult::CommandFound { source: path, manifest_dir, args: lockfile_command.main_args.clone(), module_name: lockfile_module.name.clone(), prehashed_cache_key: lockfile .get_prehashed_cache_key_from_command(lockfile_command), }; } Err(e) => { return FindCommandResult::CommandNotFound { error: command_name.to_string(), extended: vec![format!("{e}")], }; } } } if let Some(s) = lockfile .modules .keys() .find(|k| k.as_str().contains(command_name)) { error_lines.push(String::new()); error_lines.push("Note:".to_string()); error_lines.push(format!(" A package {s:?} seems to be installed locally")); error_lines.push(format!( " but the package {s:?} has no commands to execute" )); let all_commands = lockfile.commands.keys().cloned().collect::<Vec<_>>(); let nearest = all_commands .iter() .filter_map(|c| sublime_fuzzy::best_match(c, command_name).map(|_| c.clone())) .take(3) .collect::<Vec<_>>(); if !nearest.is_empty() { error_lines.push(String::new()); error_lines.push("Did you mean:".to_string()); for n in &nearest { error_lines.push(format!(" {n}")); } error_lines.push(String::new()); } } FindCommandResult::CommandNotFound { error: command_name.to_string(), extended: error_lines, } } pub fn find_command_in_directory<S: AsRef<str>>(directory: &Path, command_name: S) -> Self { let manifest_result = ManifestResult::find_in_directory(directory); let lockfile_result = LockfileResult::find_in_directory(directory); match (manifest_result, lockfile_result) { (ManifestResult::ManifestError(e), _) => return FindCommandResult::Error(e.into()), (_, LockfileResult::LockfileError(e)) => return FindCommandResult::Error(e.into()), (ManifestResult::NoManifest, LockfileResult::NoLockfile) => {} // continue (ManifestResult::NoManifest, LockfileResult::Lockfile(l)) => { debug!("Looking for local command in the lockfile"); return Self::find_command_in_lockfile(command_name, l, directory); } // the edge case of a manifest, but no lockfile would an invalid state. This function // should always be run after updating the lockfile with the latest manifest changes. // If that function were to fail so horribly that it did not error, and no lockfile was // generated, then we will get this panic. (ManifestResult::Manifest(_m), LockfileResult::NoLockfile) => { panic!("Manifest exists, but lockfile not found!") } (ManifestResult::Manifest(m), LockfileResult::Lockfile(l)) => { debug!("Looking for local command in the manifest and lockfile"); return Self::find_command_in_manifest_and_lockfile(command_name, m, l, directory); } }; FindCommandResult::CommandNotFound { error: command_name.as_ref().to_string(), extended: Vec::new(), } } } #[derive(Debug)] pub struct Command { // PathBuf, Option<String>, String, bool pub source: PathBuf, pub manifest_dir: PathBuf, pub args: Option<String>, pub module_name: String, /// whether the command was found in the global context pub is_global: bool, /// the prehashed module key pub prehashed_cache_key: Option<String>, } /// Get a command from anywhere, where anywhere is the set of packages in the local lockfile and the global lockfile. /// A flag indicating global run is also returned. Commands are found in local lockfile first. pub fn get_command_from_anywhere<S: AsRef<str>>(command_name: S) -> Result<Command, Error> { // look in the local directory, update if necessary let current_directory = crate::config::Config::get_current_dir().unwrap(); let local_command_result = FindCommandResult::find_command_in_directory(&current_directory, &command_name); let mut log = Vec::new(); match local_command_result { FindCommandResult::CommandNotFound { error: _, extended } => { log = extended; // not found, continue searching... } FindCommandResult::CommandFound { source, manifest_dir, args, module_name, prehashed_cache_key, } => { return Ok(Command { source, manifest_dir, args, module_name, is_global: false, prehashed_cache_key, }); } FindCommandResult::Error(e) => { return Err(Error::ReadingLocalDirectory { command: command_name.as_ref().to_string(), error: e.to_string(), local_log: log, global_log: Vec::new(), }); } }; trace!("Local command not found"); // look in the global directory let global_directory = Config::get_globals_directory().map_err(|e| { Error::CouldNotOpenGlobalsDirectory(command_name.as_ref().to_string(), e.to_string()) })?; let global_command_result = FindCommandResult::find_command_in_directory(&global_directory, &command_name); let mut global_log = Vec::new(); match global_command_result { FindCommandResult::CommandNotFound { error: _, extended } => { global_log = extended; // continue searching... } FindCommandResult::CommandFound { source, manifest_dir, args, module_name, prehashed_cache_key, } => { return Ok(Command { source, manifest_dir, args, module_name, is_global: true, prehashed_cache_key, }); } FindCommandResult::Error(e) => { return Err( Error::CommandNotFoundInLocalDirectoryAndErrorReadingGlobalDirectory { command: command_name.as_ref().to_string(), error: e.to_string(), local_log: log, global_log, }, ); } }; trace!("Global command not found"); return Err(Error::CommandNotFound { command: command_name.as_ref().to_string(), error: "Command not found in global or local directory".to_string(), local_log: log, global_log, }); }
true
f78fed866d28131638789c0a813016a102056c45
Rust
Bouzomgi/Rust-Unsafe-Analyzer
/tests/unions/test4.rs
UTF-8
287
3.03125
3
[]
no_license
// Declared union field accessing as a field of a struct fn main() { union SomeUnion { first: u8 } struct SomeStruct { primero: SomeUnion } let myUnion = SomeUnion{ first: 1 }; let myStruct = SomeStruct{ primero: myUnion }; unsafe { let a = myStruct.primero.first; } }
true
e0cbc132cf22acc52043f7fc93e3c45954d150d8
Rust
nfarnan/cs1666_examples
/rust/rs05_structs/src/main.rs
UTF-8
1,194
3.96875
4
[]
no_license
// Tuple struct struct Point(i32, i32); // Regular struct struct Rectangle { top_left: Point, width: i32, height: i32, // ^ last trailing comma optional } // Methods impl Rectangle { // Note that there is no special constructor name // Do not have use "new" fn new(top_left: Point, width: i32, height: i32) -> Rectangle { Rectangle { top_left, width, height, } } fn identify(&self) { println!("I am a Rectangle"); } } // Trait def trait Shape { fn area(&self) -> i32; fn contains(&self, p: Point) -> bool; } //TODO: Provide an implementation of the Shape trait for Rectangle //<Your code here> fn print_area<T: Shape> (some_shape: T) { println!("Shape's area: {}", some_shape.area()); } fn main() { // Using typical computer graphics coordinates, (0, 0) in the top left // y increases as it goes down, x increases as it goes right let r = Rectangle::new(Point(5, 5), 10, 20); r.identify(); print_area(r); let s = Rectangle::new(Point(0, 0), 10, 10); println!("s contains (5, 5): {}", s.contains(Point(5, 5))); println!("s contains (-1, -1): {}", s.contains(Point(-1, -1))); println!("s contains (7, 11): {}", s.contains(Point(7, 11))); }
true
f03a787991fc440499a753edd4fbd44eacc6cf0c
Rust
sychelsea/RustOJ
/cf-1198A.rs
UTF-8
2,482
3.078125
3
[]
no_license
#![allow(unused_imports)] use std::io::{self, Write}; use std::str; use std::vec::Vec; use std::cmp::min; struct Scanner<R> { reader: R, buf_str: Vec<u8>, buf_iter: str::SplitAsciiWhitespace<'static>, } impl<R: io::BufRead> Scanner<R> { fn new(reader: R) -> Self { Self { reader, buf_str: Vec::new(), buf_iter: "".split_ascii_whitespace() } } fn token<T: str::FromStr>(&mut self) -> T { loop { if let Some(token) = self.buf_iter.next() { return token.parse().ok().expect("Failed parse"); } self.buf_str.clear(); self.reader.read_until(b'\n', &mut self.buf_str).expect("Failed read"); self.buf_iter = unsafe { let slice = str::from_utf8_unchecked(&self.buf_str); std::mem::transmute(slice.split_ascii_whitespace()) } } } } fn mylog(x: i32) -> usize { let mut k: usize = 0; let mut temp: usize = 1; // 2^k while x as usize > temp { k += 1; temp *= 2 } return k; } fn main() { let stdin = io::stdin(); let mut scan = Scanner::new(stdin.lock()); let n = scan.token::<usize>(); let I = scan.token::<i32>(); let mut a = Vec::new(); for i in 0..n { let x = scan.token::<i32>(); a.push(x); } a.sort(); let mut m: i32 = 1; for i in 1..n { if a[i] != a[i-1] { m += 1; } } let mut pre_num: [i32; 400010] = [0; 400010]; let mut suf_num: [i32; 400010] = [0; 400010]; for i in 1..n { if a[i] == a[i-1] { pre_num[i] = pre_num[i-1]; } else { pre_num[i] = pre_num[i-1] + 1; } } for i in 0..n { suf_num[i] = m - pre_num[i] - 1; } let mut ans: usize = n + 1; for i in 0..n { if i > 0 && a[i] == a[i-1] { continue; } let mut l: usize = i; let mut r: usize = n - 1; while l <= r { let mut mid: usize = (l+r)/2; if n * mylog(m - pre_num[i] - suf_num[mid]) <= 8 * I as usize { if a[l] == a[r] { mid = r; } ans = min(ans, i + (n-mid-1)); l = mid + 1; } else { r = mid - 1; } } } println!("{}", ans); }
true
71ffd8aecc4103b711f90066e1ace4777e6d0e27
Rust
scizzorz/recipe-notation
/src/main.rs
UTF-8
1,826
2.796875
3
[]
no_license
use pest::iterators::Pair; use pest::iterators::Pairs; use pest::Parser; use pest_derive::Parser; use std::fs; use std::io; use std::io::prelude::*; // you're about to read some awful, hacky, PoC Rust code. // you've been warned. #[derive(Parser)] #[grammar = "recipe.pest"] struct RecipeParser; fn main() { let mut contents = String::new(); io::stdin().read_to_string(&mut contents).unwrap(); // let mut file = fs::File::open("recipe").unwrap(); // file.read_to_string(&mut contents).unwrap(); let mut steps = RecipeParser::parse(Rule::main, &contents).unwrap(); let mut ing_count = 0; println!("digraph {{"); println!(" rankdir=LR;"); for step in steps { if step.as_rule() == Rule::EOI { break; } let mut inner = step.into_inner(); let id = inner.next().map(|x| x.as_str().to_string()).unwrap(); let action = inner.next().map(|x| x.as_str().to_string()).unwrap(); println!(" step{}[shape=cds, label=\"{}\", style=filled, fillcolor=\"#FFFF99\"];", id, action); let ings = inner.next().unwrap().into_inner(); for ing in ings { let label = ing.as_str().to_string(); let inner = ing.into_inner().next().unwrap(); match inner.as_rule() { Rule::raw_ingredient => { ing_count += 1; println!(" ing{}[shape=box, label=\"{}\", style=filled, fillcolor=\"#9999FF\"];", ing_count, label); println!(" ing{} -> step{}", ing_count, id); } Rule::prev_ingredient => { println!(" step{} -> step{}", inner.into_inner().as_str().to_string(), id); } _ => panic!("fuc"), } } } println!("}}"); }
true
5ccdfd636ea5f06d63dd6c3794860500565a0b25
Rust
zhengshuxin/test
/rust/thread/thread5.rs
UTF-8
672
3.09375
3
[]
no_license
use std::sync::{Arc, Mutex}; use std::thread; use std::sync::mpsc; fn main() { let data = Arc::new(Mutex::new(0u32)); let (tx, rx) = mpsc::channel(); for _ in 0..10 { let (data, tx) = (data.clone(), tx.clone()); thread::spawn(move|| { let mut data = data.lock().unwrap(); *data += 1; tx.send(()).unwrap(); }); } for _ in 0..10 { rx.recv().unwrap(); } println!("all threads are finished!"); let d = data.lock().unwrap(); println!("d is {:?} {}, value is {}", d, d, *d); let n = 100; let mut n1 = n.clone(); n1 += 1; println!("n={}, n1={}", n, n1); }
true
ab981907b352f33292f6978dc8ebacd5e68da30f
Rust
brooks-builds/platformer_with_level_editor
/src/events/mod.rs
UTF-8
1,501
2.84375
3
[ "MIT" ]
permissive
use std::collections::HashMap; use crossbeam::channel::{Receiver, Sender}; use eyre::Result; use self::event::Event; pub mod event; pub struct EventManager { event_receiver: Receiver<Event>, event_sender: Sender<Event>, event_subscribers: HashMap<String, Vec<Sender<Event>>>, } impl EventManager { pub fn new() -> Self { let (event_sender, event_receiver) = crossbeam::channel::unbounded(); let event_subscribers = HashMap::new(); Self { event_receiver, event_sender, event_subscribers, } } pub fn register(&self) -> Sender<Event> { self.event_sender.clone() } pub fn process(&mut self) -> Result<()> { while let Ok(event) = self.event_receiver.try_recv() { if let Some(subscribers) = self.event_subscribers.get_mut(event.as_ref()) { subscribers .iter_mut() .try_for_each(|subscriber| subscriber.send(event.clone()))?; } } Ok(()) } pub fn subscribe(&mut self, events_to_subscribe_to: Vec<String>) -> Receiver<Event> { let (sender, receiver) = crossbeam::channel::unbounded(); events_to_subscribe_to .into_iter() .for_each(|stringified_event| { let subscribers = self.event_subscribers.entry(stringified_event).or_default(); subscribers.push(sender.clone()); }); receiver } }
true
a8b9165084334df1abab426044d4a9ddd7b8d7c8
Rust
nerdrew/slow_rust_read_to_end
/src/main.rs
UTF-8
1,824
2.71875
3
[]
no_license
#![feature(convert, path_ext)] extern crate flate2; use flate2::read::GzDecoder; use std::io::prelude::*; use std::fs::{File}; use std::path::PathBuf; use std::str::FromStr; fn main() { let path = PathBuf::from("test.gz"); match fast_gunzip_file(&path) { Ok(bytes) => { println!("{}", std::str::from_utf8(&bytes).unwrap()); }, Err(e) => { println!("IO error reading/ungzipping filename={} error={}", &path.to_str().unwrap(), e); } }; match slow_gunzip_file(&path) { Ok(bytes) => { println!("{}", std::str::from_utf8(&bytes).unwrap()); }, Err(e) => { println!("IO error reading/ungzipping filename={} error={}", &path.to_str().unwrap(), e); } }; } fn fast_gunzip_file(filename: &PathBuf) -> std::io::Result<Vec<u8>> { println!("fast gunzip filename={}", &filename.to_str().unwrap()); let size = match filename.metadata() { Ok(metadata) => metadata.len() as usize, Err(e) => return Err(e) }; let gz_file = try!(File::open(&filename)); let mut decoder = try!(GzDecoder::new(gz_file)); let mut bytes = Vec::new(); try!(decoder.read_to_end(&mut bytes)); println!("unzipped"); Ok(bytes) } fn slow_gunzip_file(filename: &PathBuf) -> std::io::Result<Vec<u8>> { println!("slow gunzip filename={}", &filename.to_str().unwrap()); let size = match filename.metadata() { Ok(metadata) => metadata.len() as usize, Err(e) => return Err(e) }; let gz_file = try!(File::open(&filename)); let mut decoder = try!(GzDecoder::new(gz_file)); let mut bytes = Vec::with_capacity(200_000_000); try!(decoder.read_to_end(&mut bytes)); println!("unzipped"); Ok(bytes) }
true
18e27cc27aa37c808a9a28db09024980ad07be5e
Rust
fschutt/beziercurve-wkt
/src/intersection.rs
UTF-8
44,198
3.015625
3
[ "MIT" ]
permissive
#![allow(non_snake_case)] //! Module for calculating curve-curve use crate::{Point, Line, QuadraticCurve, CubicCurve}; type OptionTuple<T> = (Option<T>, Option<T>, Option<T>); #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub struct BezierNormalVector { pub x: f64, pub y: f64 } #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum IntersectionResult { NoIntersection, FoundIntersection(Intersection), Infinite(InfiniteIntersections) } #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum Intersection { LineLine(LineLineIntersection), LineQuad(LineQuadIntersection), LineCubic(LineCubicIntersection), QuadLine(QuadLineIntersection), QuadQuad(Vec<QuadQuadIntersection>), QuadCubic(Vec<QuadCubicIntersection>), CubicLine(CubicLineIntersection), CubicQuad(Vec<CubicQuadIntersection>), CubicCubic(Vec<CubicCubicIntersection>), } /// When two curves are the same, they have infinite intersections /// Currently, this is treated the same as having no intersections #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub enum InfiniteIntersections { LineLine(Line, Line), QuadQuad(QuadraticCurve, QuadraticCurve), QuadCubic(QuadraticCurve, CubicCurve), CubicQuad(CubicCurve, QuadraticCurve), CubicCubic(CubicCurve, CubicCurve), } #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub struct LineLineIntersection { pub t1: f64, pub line1: Line, pub t2: f64, pub line2: Line, } impl LineLineIntersection { #[inline] pub fn get_intersection_point_1(&self) -> Point { // lerp(line.0, line.1, t1) let new_x = (1.0 - self.t1) * self.line1.0.x + self.t1 * self.line1.1.x; let new_y = (1.0 - self.t1) * self.line1.0.y + self.t1 * self.line1.1.y; Point::new(new_x, new_y) } #[inline] pub fn get_intersection_point_2(&self) -> Point { // lerp(line.0, line.1, t2) let new_x = (1.0 - self.t2) * self.line2.0.x + self.t2 * self.line2.1.x; let new_y = (1.0 - self.t2) * self.line2.0.y + self.t2 * self.line2.1.y; Point::new(new_x, new_y) } } macro_rules! cubic_line {($structname:ident, $curvetype:ident) => ( // A line-curve intersection can intersect in up to 3 points #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub enum $structname { Intersect1 { curve: $curvetype, line: Line, t_curve_1: f64, t_line_1: f64, }, Intersect2 { curve: $curvetype, line: Line, t_curve_1: f64, t_line_1: f64, t_curve_2: f64, t_line_2: f64, }, Intersect3 { curve: $curvetype, line: Line, t_curve_1: f64, t_line_1: f64, t_curve_2: f64, t_line_2: f64, t_curve_3: f64, t_line_3: f64, } } impl $structname { #[inline] pub fn get_curve_t1(&self) -> f64 { use self::$structname::*; match self { Intersect1 { t_curve_1, .. } => *t_curve_1, Intersect2 { t_curve_1, .. } => *t_curve_1, Intersect3 { t_curve_1, .. } => *t_curve_1, } } #[inline] pub fn get_curve_t2(&self) -> Option<f64> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { t_curve_2, .. } => Some(*t_curve_2), Intersect3 { t_curve_2, .. } => Some(*t_curve_2), } } #[inline] pub fn get_curve_t3(&self) -> Option<f64> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { .. } => None, Intersect3 { t_curve_3, .. } => Some(*t_curve_3), } } #[inline] pub fn get_line_t1(&self) -> f64 { use self::$structname::*; match self { Intersect1 { t_line_1, .. } => *t_line_1, Intersect2 { t_line_1, .. } => *t_line_1, Intersect3 { t_line_1, .. } => *t_line_1, } } #[inline] pub fn get_line_t2(&self) -> Option<f64> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { t_line_2, .. } => Some(*t_line_2), Intersect3 { t_line_2, .. } => Some(*t_line_2), } } #[inline] pub fn get_line_t3(&self) -> Option<f64> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { .. } => None, Intersect3 { t_line_3, .. } => Some(*t_line_3), } } #[inline] pub fn get_intersection_point_1(&self) -> Point { use self::$structname::*; match self { Intersect1 { line, t_line_1, .. } => lerp(line.0, line.1, *t_line_1), Intersect2 { line, t_line_1, .. } => lerp(line.0, line.1, *t_line_1), Intersect3 { line, t_line_1, .. } => lerp(line.0, line.1, *t_line_1), } } #[inline] pub fn get_intersection_point_2(&self) -> Option<Point> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { line, t_line_2, .. } => Some(lerp(line.0, line.1, *t_line_2)), Intersect3 { line, t_line_2, .. } => Some(lerp(line.0, line.1, *t_line_2)), } } #[inline] pub fn get_intersection_point_3(&self) -> Option<Point> { use self::$structname::*; match self { Intersect1 { .. } => None, Intersect2 { .. } => None, Intersect3 { line, t_line_3, .. } => Some(lerp(line.0, line.1, *t_line_3)), } } } )} cubic_line!(LineQuadIntersection, QuadraticCurve); cubic_line!(LineCubicIntersection, CubicCurve); macro_rules! cubic_cubic {($structname:ident, $curvetype:ident, $evaluate_fn:ident) => ( #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub struct $structname { pub t1: f64, pub curve1: $curvetype, pub t2: f64, pub curve2: $curvetype, } impl $structname { pub fn get_intersection_point_1(&self) -> Point { $evaluate_fn(self.curve1, self.t1) } pub fn get_intersection_point_2(&self) -> Point { $evaluate_fn(self.curve2, self.t2) } } )} cubic_line!(QuadLineIntersection, QuadraticCurve); cubic_cubic!(QuadQuadIntersection, QuadraticCurve, quadratic_evaluate); #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub struct QuadCubicIntersection { pub t1: f64, pub curve1: QuadraticCurve, pub t2: f64, pub curve2: CubicCurve, } impl QuadCubicIntersection { pub fn get_intersection_point_1(&self) -> Point { quadratic_evaluate(self.curve1, self.t1) } pub fn get_intersection_point_2(&self) -> Point { evaluate(self.curve2, self.t2) } } cubic_line!(CubicLineIntersection, CubicCurve); #[derive(Debug, Copy, Clone, PartialEq, PartialOrd)] pub struct CubicQuadIntersection { pub t1: f64, pub curve1: CubicCurve, pub t2: f64, pub curve2: QuadraticCurve, } impl CubicQuadIntersection { pub fn get_intersection_point_1(&self) -> Point { evaluate(self.curve1, self.t1) } pub fn get_intersection_point_2(&self) -> Point { quadratic_evaluate(self.curve2, self.t2) } } cubic_cubic!(CubicCubicIntersection, CubicCurve, evaluate); pub(crate) fn line_line_intersect(line1: Line, line2: Line) -> IntersectionResult { use self::{Intersection::*, IntersectionResult::*}; if line1 == line2 { return Infinite(InfiniteIntersections::LineLine(line1, line2)); } if line1.0 == line1.1 || line2.0 == line2.1 { return NoIntersection; } match do_line_line_intersect(line1, line2) { None => NoIntersection, Some(s) => FoundIntersection(LineLine(s)), } } pub(crate) fn line_quad_intersect(line1: Line, curve1: QuadraticCurve) -> IntersectionResult { use self::{Intersection::*, LineQuadIntersection::*, IntersectionResult::*}; if line1.0 == line1.1 || (curve1.0 == curve1.1 && curve1.1 == curve1.2) { return NoIntersection; } match do_curve_line_intersect(quadratic_to_cubic_curve(curve1), line1) { (Some((t_line_1, t_curve_1)), None, None) => FoundIntersection(LineQuad(Intersect1 { curve: curve1, line: line1, t_curve_1, t_line_1, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), None) => FoundIntersection(LineQuad(Intersect2 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), Some((t_line_3, t_curve_3))) => FoundIntersection(LineQuad(Intersect3 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, t_curve_3, t_line_3, })), _ => NoIntersection, } } pub(crate) fn line_cubic_intersect(line1: Line, curve1: CubicCurve) -> IntersectionResult { use self::{Intersection::*, LineCubicIntersection::*, IntersectionResult::*}; if line1.0 == line1.1 || (curve1.0 == curve1.1 && curve1.1 == curve1.2 && curve1.2 == curve1.3) { return NoIntersection; } match do_curve_line_intersect(curve1, line1) { (Some((t_line_1, t_curve_1)), None, None) => FoundIntersection(LineCubic(Intersect1 { curve: curve1, line: line1, t_curve_1, t_line_1, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), None) => FoundIntersection(LineCubic(Intersect2 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), Some((t_line_3, t_curve_3))) => FoundIntersection(LineCubic(Intersect3 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, t_curve_3, t_line_3, })), _ => NoIntersection, } } pub(crate) fn quad_line_intersect(curve1: QuadraticCurve, line1: Line) -> IntersectionResult { use self::{Intersection::*, QuadLineIntersection::*, IntersectionResult::*}; if line1.0 == line1.1 || (curve1.0 == curve1.1 && curve1.1 == curve1.2) { return NoIntersection; } match do_curve_line_intersect(quadratic_to_cubic_curve(curve1), line1) { (Some((t_line_1, t_curve_1)), None, None) => FoundIntersection(QuadLine(Intersect1 { curve: curve1, line: line1, t_curve_1, t_line_1, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), None) => FoundIntersection(QuadLine(Intersect2 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), Some((t_line_3, t_curve_3))) => FoundIntersection(QuadLine(Intersect3 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, t_curve_3, t_line_3, })), _ => NoIntersection, } } pub(crate) fn quad_quad_intersect(curve1: QuadraticCurve, curve2: QuadraticCurve) -> IntersectionResult { use self::{Intersection::*, IntersectionResult::*}; if curve1 == curve2 { return Infinite(InfiniteIntersections::QuadQuad(curve1, curve2)); } if (curve1.0 == curve1.1 && curve1.1 == curve1.2) || (curve2.0 == curve2.1 && curve2.1 == curve2.2) { return NoIntersection; } match do_curve_curve_intersect(quadratic_to_cubic_curve(curve1), quadratic_to_cubic_curve(curve2)) { None => NoIntersection, Some(s) => { FoundIntersection(QuadQuad(s .into_iter() .map(|c| QuadQuadIntersection { curve1, curve2, t1: c.t1, t2: c.t2 }) .collect()) ) } } } pub(crate) fn quad_cubic_intersect(curve1: QuadraticCurve, curve2: CubicCurve) -> IntersectionResult { use self::{Intersection::*, IntersectionResult::*}; if (curve1.0 == curve1.1 && curve1.1 == curve1.2) || (curve2.0 == curve2.1 && curve2.1 == curve2.2 && curve2.2 == curve2.3) { return NoIntersection; } let curve1_new = quadratic_to_cubic_curve(curve1); if curve1_new == curve2 { return Infinite(InfiniteIntersections::QuadCubic(curve1, curve2)); } match do_curve_curve_intersect(curve1_new, curve2) { None => NoIntersection, Some(s) => { FoundIntersection(QuadCubic(s .into_iter() .map(|c| QuadCubicIntersection { curve1, curve2, t1: c.t1, t2: c.t2 }) .collect()) ) } } } pub(crate) fn cubic_line_intersect(curve1: CubicCurve, line1: Line) -> IntersectionResult { use self::{Intersection::*, CubicLineIntersection::*, IntersectionResult::*}; if line1.0 == line1.1 || (curve1.0 == curve1.1 && curve1.1 == curve1.2 && curve1.2 == curve1.3) { return NoIntersection; } match do_curve_line_intersect(curve1, line1) { (Some((t_line_1, t_curve_1)), None, None) => FoundIntersection(CubicLine(Intersect1 { curve: curve1, line: line1, t_curve_1, t_line_1, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), None) => FoundIntersection(CubicLine(Intersect2 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, })), (Some((t_line_1, t_curve_1)), Some((t_line_2, t_curve_2)), Some((t_line_3, t_curve_3))) => FoundIntersection(CubicLine(Intersect3 { curve: curve1, line: line1, t_curve_1, t_line_1, t_curve_2, t_line_2, t_curve_3, t_line_3, })), _ => NoIntersection, } } pub(crate) fn cubic_quad_intersect(curve1: CubicCurve, curve2: QuadraticCurve) -> IntersectionResult { use self::{Intersection::*, IntersectionResult::*}; if (curve1.0 == curve1.1 && curve1.1 == curve1.2 && curve1.2 == curve1.3) || (curve2.0 == curve2.1 && curve2.1 == curve2.2) { return NoIntersection; } let curve2_new = quadratic_to_cubic_curve(curve2); if curve2_new == curve1 { return Infinite(InfiniteIntersections::CubicQuad(curve1, curve2)); } match do_curve_curve_intersect(curve1, curve2_new) { None => NoIntersection, Some(s) => { FoundIntersection(CubicQuad(s .into_iter() .map(|c| CubicQuadIntersection { curve1, curve2, t1: c.t1, t2: c.t2 }) .collect()) ) } } } pub(crate) fn cubic_cubic_intersect(curve1: CubicCurve, curve2: CubicCurve) -> IntersectionResult { use self::{Intersection::*, IntersectionResult::*}; if curve1 == curve2 { return Infinite(InfiniteIntersections::CubicCubic(curve1, curve2)); } if (curve1.0 == curve1.1 && curve1.1 == curve1.2) || (curve2.0 == curve2.1 && curve2.1 == curve2.2) { return NoIntersection; } match do_curve_curve_intersect(curve1, curve2) { None => NoIntersection, Some(s) => FoundIntersection(CubicCubic(s)), } } #[inline] pub(crate) fn split_line(line: Line, t: f64) -> (Line, Line) { let t = t.max(0.0).min(1.0); let split_point = lerp(line.0, line.1, t); ( (line.0, split_point), (split_point, line.1), ) } #[inline] pub(crate) fn split_quad(curve: QuadraticCurve, t: f64) -> (QuadraticCurve, QuadraticCurve) { let t = t.max(0.0).min(1.0); let p = quad_hull_points(curve, t); ((p[0], p[3], p[5]), (p[5], p[4], p[2])) } #[inline] pub(crate) fn split_cubic(curve: CubicCurve, t: f64) -> (CubicCurve, CubicCurve) { let t = t.max(0.0).min(1.0); subdivide(curve, t) } // Determines the intersection point of the line defined by points A and B with the // line defined by points C and D. // // Returns YES if the intersection point was found, and stores that point in X,Y. // Returns NO if there is no determinable intersection point, in which case X,Y will // be unmodified. #[inline] fn do_line_line_intersect( (a, b): Line, (c, d): Line, ) -> Option<LineLineIntersection> { let (original_a, original_b) = (a, b); let (original_c, original_d) = (c, d); // (1) Translate the system so that point A is on the origin. let b = Point::new(b.x - a.x, b.y - a.y); let mut c = Point::new(c.x - a.x, c.y - a.y); let mut d = Point::new(d.x - a.x, d.y - a.y); // Get the length from a to b let dist_ab = (b.x*b.x + b.y*b.y).sqrt(); // Rotate the system so that point B is on the positive X axis. let cos_b = b.x / dist_ab; let sin_b = b.y / dist_ab; // Rotate c and d around b let new_x = c.x * cos_b + c.y * sin_b; c.y = c.y * cos_b - c.x * sin_b; c.x = new_x; let new_x = d.x * cos_b + d.y * sin_b; d.y = d.y * cos_b - d.x * sin_b; d.x = new_x; // Fail if the lines are parallel if c.y == d.y { return None; } // Calculate the position of the intersection point along line A-B. let t = d.x + (c.x - d.x) * d.y / (d.y - c.y); let new_x = original_a.x + t * cos_b; let new_y = original_a.y + t * sin_b; // The t projected onto the line a - b let t1 = ( ((new_x - original_a.x) / (original_b.x - original_a.x)) + ((new_y - original_a.y) / (original_b.y - original_a.y)) ) / 2.0; // The t projected onto the line b - c let t2 = ( ((new_x - original_c.x) / (original_d.x - original_c.x)) + ((new_y - original_c.y) / (original_d.y - original_c.y)) ) / 2.0; Some(LineLineIntersection { t1, line1: (original_a, original_b), t2, line2: (original_c, original_d), }) } /// Intersect a cubic curve with a line. /// /// Based on http://www.particleincell.com/blog/2013/cubic-line-intersection/ // https://jsbin.com/nawoxemopa/1/edit?js,console #[inline] fn do_curve_line_intersect( (a1, a2, a3, a4): CubicCurve, (b1, b2): Line, ) -> OptionTuple<(f64, f64)> { // If the numbers are below 10.0, the algorithm has // problems with precision, multiply by 100 // also, round to 3 decimals to avoid precision issues let numbers = [a1.x, a1.y, a2.x, a2.y, a3.x, a3.y, a4.x, a4.y, b1.x, b1.y, b2.x, b2.y]; let mut lowest_number = 0.0_f64; for n in &numbers { lowest_number = lowest_number.min(*n); } let smallest_number_abs = lowest_number.abs(); let multiplier = if smallest_number_abs != 0.0 { 100.0 / smallest_number_abs } else { 100.0 }; let a1 = Point::new(round_3(a1.x * multiplier), round_3(a1.y * multiplier)); let a2 = Point::new(round_3(a2.x * multiplier), round_3(a2.y * multiplier)); let a3 = Point::new(round_3(a3.x * multiplier), round_3(a3.y * multiplier)); let a4 = Point::new(round_3(a4.x * multiplier), round_3(a4.y * multiplier)); let b1 = Point::new(round_3(b1.x * multiplier), round_3(b1.y * multiplier)); let b2 = Point::new(round_3(b2.x * multiplier), round_3(b2.y * multiplier)); #[inline] fn round_3(input: f64) -> f64 { ((input * 1000.0_f64) as u64) as f64 / 1000.0_f64 } let A = b2.y - b1.y; // A = y2 - y1 let B = b1.x - b2.x; // B = x1 - x2 let C = b1.x * (b1.y - b2.y) + b1.y * (b2.x - b1.x); // C = x1*(y1-y2)+y1*(x2-x1) let bx = bezier_coeffs(a1.x, a2.x, a3.x, a4.x); let by = bezier_coeffs(a1.y, a2.y, a3.y, a4.y); let p_0 = A * bx.0 + B * by.0; // t^3 let p_1 = A * bx.1 + B * by.1; // t^2 let p_2 = A * bx.2 + B * by.2; // t let p_3 = A * bx.3 + B * by.3 + C; // 1 let r = cubic_roots(p_0, p_1, p_2, p_3); let mut intersections = (None, None, None); // for root in r macro_rules! unroll_loop {($index:tt) => ({ if let Some(t) = r.$index { let final_x = bx.0* t * t * t + bx.1 * t * t + bx.2 * t + bx.3; let final_y = by.0* t * t * t + by.1 * t * t + by.2 * t + by.3; // (final_x, final_y) is intersection point assuming infinitely long line segment, // make sure we are also in bounds of the line let x_dist = b2.x - b1.x; let y_dist = b2.y - b1.y; let t_line = if x_dist != 0.0 { // if not vertical line (final_x - b1.x) / x_dist } else { (final_y - b1.y) / y_dist }; intersections.$index = if !t.is_sign_positive() || t > 1.0 || !t_line.is_sign_positive() || t_line > 1.0 { None } else { Some((t_line as f64, t as f64)) } } })} unroll_loop!(0); unroll_loop!(1); unroll_loop!(2); intersections } // Intersect a quadratic with another quadratic curve fn do_curve_curve_intersect(a: CubicCurve, b: CubicCurve) -> Option<Vec<CubicCubicIntersection>> { let intersections = curve_intersections_inner(a, b, 0.0, 1.0, 0.0, 1.0, 1.0, false, 0, 32, 0.8); if intersections.is_empty() { None } else { Some(intersections) } } // --- helper functions // Generates all hull points, at all iterations, for an on-curve point // at the specified t-value. This generates a point[6], where the first iteration is // [0,1,2], the second iteration is [3,4], the third iteration is [5] // (the on-curve point) #[inline(always)] fn quad_hull_points(curve: QuadraticCurve, t: f64) -> [Point;6] { let (p0, p1, p2) = curve; // 2nd iteration let p3 = lerp(p0, p1, t); let p4 = lerp(p1, p2, t); // 3rd iteration let p5 = lerp(p3, p4, t); [p0, p1, p2, p3, p4, p5] } /// Calculates the normal vector at a certain point (perpendicular to the curve) #[inline] pub(crate) fn cubic_bezier_normal(curve: CubicCurve, t: f64) -> BezierNormalVector { // 1. Calculate the derivative of the bezier curve // // This means, we go from 4 control points to 3 control points and redistribute // the weights of the control points according to the formula: // // w'0 = 3(w1-w0) // w'1 = 3(w2-w1) // w'2 = 3(w3-w2) let weight_1_x = 3.0 * (curve.1.x - curve.0.x); let weight_1_y = 3.0 * (curve.1.y - curve.0.y); let weight_2_x = 3.0 * (curve.2.x - curve.1.x); let weight_2_y = 3.0 * (curve.2.y - curve.1.y); let weight_3_x = 3.0 * (curve.3.x - curve.2.x); let weight_3_y = 3.0 * (curve.3.y - curve.2.y); // The first derivative of a cubic bezier curve is a quadratic bezier curve // Luckily, the first derivative is also the tangent vector. So all we need to do // is to get the quadratic bezier let mut tangent = quadratic_evaluate(( Point { x: weight_1_x, y: weight_1_y }, Point { x: weight_2_x, y: weight_2_y }, Point { x: weight_3_x, y: weight_3_y }, ), t); // We normalize the tangent to have a lenght of 1 let tangent_length = (tangent.x.powi(2) + tangent.y.powi(2)).sqrt(); tangent.x /= tangent_length; tangent.y /= tangent_length; // The tangent is the vector that runs "along" the curve at a specific point. // To get the normal (to calcuate the rotation of the characters), we need to // rotate the tangent vector by 90 degrees. // // Rotating by 90 degrees is very simple, as we only need to flip the x and y axis BezierNormalVector { x: -tangent.y, y: tangent.x, } } /// Calculates the normal vector at a certain point (perpendicular to the curve) #[inline] pub(crate) fn quadratic_bezier_normal(curve: QuadraticCurve, t: f64) -> BezierNormalVector { cubic_bezier_normal(quadratic_to_cubic_curve(curve), t) } /// Calculates the normal vector at a certain point (perpendicular to the curve) #[inline] pub(crate) fn line_normal(line: Line, _t: f64) -> BezierNormalVector { // calculate the rise / run, then simply // inverse the axis to rotate by 90 degrees let diff_x = line.1.x - line.0.x; let diff_y = line.1.y - line.0.y; let line_length = (diff_x.powi(2) + diff_y.powi(2)).sqrt(); BezierNormalVector { x: -diff_y / line_length, y: diff_x / line_length, } } #[inline] fn quadratic_evaluate(curve: QuadraticCurve, t: f64) -> Point { let one_minus = 1.0 - t; let one_minus_square = one_minus.powi(2); let t_pow2 = t.powi(2); let x = one_minus_square * curve.0.x + 2.0 * one_minus * t * curve.1.x + 3.0 * t_pow2 * curve.2.x; let y = one_minus_square * curve.0.y + 2.0 * one_minus * t * curve.1.y + 3.0 * t_pow2 * curve.2.y; Point { x, y } } // based on http://mysite.verizon.net/res148h4j/javascript/script_exact_cubic.html#the%20source%20code #[inline(always)] fn cubic_roots(a: f64, b: f64, c: f64, d: f64) -> (Option<f64>, Option<f64>, Option<f64>) { use std::f64::consts::PI; // special case for linear and quadratic case if is_zero(a) { if is_zero(b) { // linear formula let p = -1.0 * (d / c); let ret = ( if !p.is_sign_positive() || p > 1.0 { None } else { Some(p) }, None, None ); let ret = sort_special(ret); return ret; } else { // quadratic discriminant let d_q = c.powi(2) - 4.0 * b * d; if d_q.is_sign_positive() { let d_q = d_q.sqrt(); let m = -1.0 * (d_q + c) / (2.0 * b); let n = (d_q - c) / (2.0 * b); let ret = ( if !m.is_sign_positive() || m > 1.0 { None } else { Some(m) }, if !n.is_sign_positive() || n > 1.0 { None } else { Some(n) }, None, ); let ret = sort_special(ret); return ret; } } } let A = b / a; let B = c / a; let C = d / a; let Q = (3.0 * B - (A*A)) / 9.0; let R = (9.0 * A * B - 27.0 * C - 2.0 * (A*A*A)) / 54.0; let D = Q*Q*Q + R*R; // polynomial discriminant let ret = if D.is_sign_positive() { // complex or duplicate roots const ONE_THIRD: f64 = 1.0 / 3.0; let D_sqrt = D.sqrt(); let S = sign(R + D_sqrt) * (R + D_sqrt).abs().powf(ONE_THIRD); let T = sign(R - D_sqrt) * (R - D_sqrt).abs().powf(ONE_THIRD); let m = -A / 3.0 + (S + T); // real root let n = -A / 3.0 - (S + T) / 2.0; // real part of complex root let p = -A / 3.0 - (S + T) / 2.0; // real part of complex root let mut ret = ( if !m.is_sign_positive() || m > 1.0 { None } else { Some(m) }, if !n.is_sign_positive() || n > 1.0 { None } else { Some(n) }, if !p.is_sign_positive() || p > 1.0 { None } else { Some(p) }, ); let imaginary = (3.0_f64.sqrt() * (S - T) / 2.0).abs(); // complex part of root pair // discard complex roots if !is_zero(imaginary) { ret.1 = None; ret.2 = None; } ret } else { let th = (R / (-1.0 * Q.powi(3)).sqrt()).acos(); let minus_q_sqrt = (-1.0 * Q).sqrt(); let m = 2.0 * minus_q_sqrt * (th / 3.0).cos() - A / 3.0; let n = 2.0 * minus_q_sqrt * ((th + 2.0 * PI) / 3.0).cos() - A / 3.0; let p = 2.0 * minus_q_sqrt * ((th + 4.0 * PI) / 3.0).cos() - A / 3.0; // discard out of spec roots ( if !m.is_sign_positive() || m > 1.0 { None } else { Some(m) }, if !n.is_sign_positive() || n > 1.0 { None } else { Some(n) }, if !p.is_sign_positive() || p > 1.0 { None } else { Some(p) }, ) }; // sort but place None at the end let ret = sort_special(ret); ret } #[inline] fn sign(a: f64) -> f64 { if a.is_sign_positive() { 1.0 } else { -1.0 } } #[inline] fn bezier_coeffs(a: f64, b: f64, c: f64, d: f64) -> (f64, f64, f64, f64) { ( -a + 3.0*b + -3.0*c + d, 3.0*a - 6.0*b + 3.0*c, -3.0*a + 3.0*b, a ) } // Sort so that the None values are at the end #[inline] fn sort_special(a: OptionTuple<f64>) -> OptionTuple<f64> { match a { (None, None, None) => (None, None, None), (Some(a), None, None) | (None, Some(a), None) | (None, None, Some(a)) => (Some(a), None, None), (Some(a), Some(b), None) | (None, Some(a), Some(b)) | (Some(b), None, Some(a)) => (Some(a.min(b)), Some(a.max(b)), None), (Some(a), Some(b), Some(c)) => { let new_a = a.min(b).min(c); let new_b = if a < b && b < c { b } else if b < c && c < a { c } else { a }; let new_c = a.max(b).max(c); (Some(new_a), Some(new_b), Some(new_c)) } } } // Convert a quadratic bezier into a cubic bezier #[inline] fn quadratic_to_cubic_curve(c: QuadraticCurve) -> CubicCurve { const TWO_THIRDS: f64 = 2.0 / 3.0; let c1_x = c.0.x + TWO_THIRDS * (c.1.x - c.0.x); let c1_y = c.0.y + TWO_THIRDS * (c.1.y - c.0.y); let c2_x = c.2.x + TWO_THIRDS * (c.1.x - c.2.x); let c2_y = c.2.y + TWO_THIRDS * (c.1.y - c.2.y); (c.0, Point::new(c1_x, c1_y), Point::new(c2_x, c2_y), c.2) } /// Bezier curve intersection algorithm and utilities /// Directly extracted from PaperJS's implementation bezier curve fat-line clipping /// The original source code is available under the MIT license at /// /// https://github.com/paperjs/paper.js/ const TOLERANCE:f64 = 1e-5; const EPSILON: f64 = 1e-10; #[inline] fn is_zero(val: f64) -> bool { val.abs() <= EPSILON } /// Computes the signed distance of (x, y) between (px, py) and (vx, vy) #[inline] fn signed_distance(px: f64, py: f64, mut vx: f64, mut vy: f64, x: f64, y: f64) -> f64 { vx -= px; vy -= py; if is_zero(vx) { if vy.is_sign_positive() { px - x } else { x - px } } else if is_zero(vy) { if vx.is_sign_positive() { y - py } else { py - y } } else { (vx * (y - py) - vy * (x - px)) / (vx * vx + vy * vy).sqrt() } } /// Calculate the convex hull for the non-parametric bezier curve D(ti, di(t)). /// /// The ti is equally spaced across [0..1] — [0, 1/3, 2/3, 1] for /// di(t), [dq0, dq1, dq2, dq3] respectively. In other words our CVs for the /// curve are already sorted in the X axis in the increasing order. /// Calculating convex-hull is much easier than a set of arbitrary points. /// /// The convex-hull is returned as two parts [TOP, BOTTOM]. Both are in a /// coordinate space where y increases upwards with origin at bottom-left /// /// - TOP: The part that lies above the 'median' (line connecting end points of the curve) /// - BOTTOM: The part that lies below the median. #[inline] fn convex_hull(dq0: f64, dq1: f64, dq2: f64, dq3: f64) -> [Vec<[f64;2]>;2] { let p0 = [0.0, dq0]; let p1 = [1.0 / 3.0, dq1]; let p2 = [2.0 / 3.0, dq2]; let p3 = [1.0, dq3]; // Find signed distance of p1 and p2 from line [ p0, p3 ] let dist1 = signed_distance(0.0, dq0, 1.0, dq3, 1.0 / 3.0, dq1); let dist2 = signed_distance(0.0, dq0, 1.0, dq3, 2.0 / 3.0, dq2); // Check if p1 and p2 are on the same side of the line [ p0, p3 ] let (mut hull, flip) = if dist1 * dist2 < 0.0 { // p1 and p2 lie on different sides of [ p0, p3 ]. The hull is a // quadrilateral and line [ p0, p3 ] is NOT part of the hull so we // are pretty much done here. // The top part includes p1, // we will reverse it later if that is not the case let hull = [vec![p0, p1, p3], vec![p0, p2, p3]]; let flip = dist1 < 0.0; (hull, flip) } else { // p1 and p2 lie on the same sides of [ p0, p3 ]. The hull can be // a triangle or a quadrilateral and line [ p0, p3 ] is part of the // hull. Check if the hull is a triangle or a quadrilateral. // Also, if at least one of the distances for p1 or p2, from line // [p0, p3] is zero then hull must at most have 3 vertices. let (cross, pmax) = if dist1.abs() > dist2.abs() { // apex is dq3 and the other apex point is dq0 vector dqapex -> // dqapex2 or base vector which is already part of the hull. let cross = (dq3 - dq2 - (dq3 - dq0) / 3.0) * (2.0 * (dq3 - dq2) - dq3 + dq1) / 3.0; (cross, p1) } else { // apex is dq0 in this case, and the other apex point is dq3 // vector dqapex -> dqapex2 or base vector which is already part // of the hull. let cross = (dq1 - dq0 + (dq0 - dq3) / 3.0) * (-2.0 * (dq0 - dq1) + dq0 - dq2) / 3.0; (cross, p2) }; let distZero = is_zero(dist1) || is_zero(dist2); // Compare cross products of these vectors to determine if the point // is in the triangle [ p3, pmax, p0 ], or if it is a quadrilateral. let hull = if cross < 0.0 || distZero { [vec![p0, pmax, p3], vec![p0, p3]] } else { [vec![p0, p1, p2, p3], vec![p0, p3]] }; let flip = if is_zero(dist1) { !dist2.is_sign_positive() } else { !dist1.is_sign_positive() }; (hull, flip) }; if flip { hull.reverse(); } hull } /// Clips the convex-hull and returns [tMin, tMax] for the curve contained. #[inline] fn clip_convex_hull(hullTop: &[[f64;2]], hullBottom: &[[f64;2]], dMin: f64, dMax: f64) -> Option<f64> { if hullTop[0][1] < dMin { // Left of hull is below dMin, walk through the hull until it // enters the region between dMin and dMax clip_convex_hull_part(hullTop, true, dMin) } else if hullBottom[0][1] > dMax { // Left of hull is above dMax, walk through the hull until it // enters the region between dMin and dMax clip_convex_hull_part(hullBottom, false, dMax) } else { // Left of hull is between dMin and dMax, no clipping possible Some(hullTop[0][0]) } } #[inline] fn clip_convex_hull_part(part: &[[f64;2]], top: bool, threshold: f64) -> Option<f64> { let mut pxpy = part[0]; for [qx, qy] in part.iter().copied() { let [px, py] = pxpy; let a = if top { qy >= threshold } else { qy <= threshold }; if a { return Some(px + (threshold - py) * (qx - px) / (qy - py)); } pxpy = [qx, qy]; } // All points of hull are above / below the threshold None } /// Calculates the fat line of a curve and returns the maximum and minimum offset widths /// for the fatline of a curve #[inline] fn get_fatline((p0, p1, p2, p3): CubicCurve) -> (f64, f64) { // Calculate the fat-line L, for Q is the baseline l and two // offsets which completely encloses the curve P. let d1 = signed_distance(p0.x, p0.y, p3.x, p3.y, p1.x, p1.y); let d2 = signed_distance(p0.x, p0.y, p3.x, p3.y, p2.x, p2.y); let factor = if (d1 * d2).is_sign_positive() { 3.0 / 4.0 } else { 4.0 / 9.0 }; // Get a tighter fit let dMin = factor * 0.0_f64.min(d1).min(d2); let dMax = factor * 0.0_f64.max(d1).max(d2); // The width of the 'fatline' is |dMin| + |dMax| (dMin, dMax) } #[inline] fn subdivide((p1, c1, c2, p2): CubicCurve, t: f64) -> (CubicCurve, CubicCurve) { // Triangle computation, with loops unrolled. let u = 1.0 - t; // Interpolate from 4 to 3 points let p3x = u * p1.x + t * c1.x; let p3y = u * p1.y + t * c1.y; let p4x = u * c1.x + t * c2.x; let p4y = u * c1.y + t * c2.y; let p5x = u * c2.x + t * p2.x; let p5y = u * c2.y + t * p2.y; // Interpolate from 3 to 2 points let p6x = u * p3x + t * p4x; let p6y = u * p3y + t * p4y; let p7x = u * p4x + t * p5x; let p7y = u * p4y + t * p5y; // Interpolate from 2 points to 1 point let p8x = u * p6x + t * p7x; let p8y = u * p6y + t * p7y; // We now have all the values we need to build the sub-curves [left, right]: ( (p1, Point::new(p3x, p3y), Point::new(p6x, p6y), Point::new(p8x, p8y)), (Point::new(p8x, p8y), Point::new(p7x, p7y), Point::new(p5x, p5y), p2) ) } /// Returns the part of a curve between t1 and t2 #[inline] fn get_part(mut v: CubicCurve, t1: f64, t2: f64) -> CubicCurve { if t1.is_sign_positive() { v = subdivide(v, t1).1; // right } // Interpolate the parameter at 't2' in the new curve and cut there. if t2 < 1.0 { v = subdivide(v, (t2 - t1) / (1.0 - t1)).0; // left } v } /// Calculates the coordinates of the point on a bezier curve at a given t #[inline] fn evaluate((p1, c1, c2, p2): CubicCurve, t: f64) -> Point { // Handle special case at beginning / end of curve if t < TOLERANCE || t > (1.0 - TOLERANCE) { let is_zero = t < TOLERANCE; let x = if is_zero { p1.x } else { p2.x }; let y = if is_zero { p1.y } else { p2.y }; Point::new(x, y) } else { // Calculate the polynomial coefficients. let cx = 3.0 * (c1.x - p1.x); let bx = 3.0 * (c2.x - c1.x) - cx; let ax = p2.x - p1.x - cx - bx; let cy = 3.0 * (c1.y - p1.y); let by = 3.0 * (c2.y - c1.y) - cy; let ay = p2.y - p1.y - cy - by; // Calculate the curve point at parameter value t let x = ((ax * t + bx) * t + cx) * t + p1.x; let y = ((ay * t + by) * t + cy) * t + p1.y; Point::new(x, y) } } /// Computes the intersections of two bezier curves #[inline] fn curve_intersections_inner( mut v1: CubicCurve, v2: CubicCurve, tMin: f64, tMax: f64, uMin: f64, uMax: f64, oldTDiff: f64, reverse: bool, recursion: usize, recursionLimit: usize, tLimit: f64 ) -> Vec<CubicCubicIntersection> { // Avoid deeper recursion. // NOTE: @iconexperience determined that more than 20 recursions are // needed sometimes, depending on the tDiff threshold values further // below when determining which curve converges the least. He also // recommended a threshold of 0.5 instead of the initial 0.8 // See: https:#github.com/paperjs/paper.js/issues/565 if recursion > recursionLimit { return Vec::new(); } // Let P be the first curve and Q be the second // Calculate the fat-line L for Q is the baseline l and two // offsets which completely encloses the curve P. let (dMin, dMax) = get_fatline(v2); // Calculate non-parametric bezier curve D(ti, di(t)) - di(t) is the // distance of P from the baseline l of the fat-line, ti is equally // spaced in [0, 1] let dp0 = signed_distance(v2.0.x, v2.0.y, v2.3.x, v2.3.y, v1.0.x, v1.0.y); let dp1 = signed_distance(v2.0.x, v2.0.y, v2.3.x, v2.3.y, v1.1.x, v1.1.y); let dp2 = signed_distance(v2.0.x, v2.0.y, v2.3.x, v2.3.y, v1.2.x, v1.2.y); let dp3 = signed_distance(v2.0.x, v2.0.y, v2.3.x, v2.3.y, v1.3.x, v1.3.y); // NOTE: the recursion threshold of 4 is needed to prevent issue #571 // from occurring: https://github.com/paperjs/paper.js/issues/571 let (tMinNew, tMaxNew, tDiff) = if v2.0.x == v2.3.x && uMax - uMin <= EPSILON && recursion > 4 { // The fatline of Q has converged to a point, the clipping is not // reliable. Return the value we have even though we will miss the // precision. let tNew = (tMax + tMin) / 2.0; (tNew, tNew, 0.0) } else { // Get the top and bottom parts of the convex-hull let [mut top, mut bottom] = convex_hull(dp0, dp1, dp2, dp3); // Clip the convex-hull with dMin and dMax let tMinClip = clip_convex_hull(&top, &bottom, dMin, dMax); top.reverse(); bottom.reverse(); let tMaxClip = clip_convex_hull(&top, &bottom, dMin, dMax); // No intersections if one of the tvalues are null or 'undefined' let (tMinClip, tMaxClip) = match (tMinClip, tMaxClip) { (Some(min), Some(max)) => (min, max), _ => return Vec::new(), }; // Clip P with the fatline for Q v1 = get_part(v1, tMinClip, tMaxClip); // tMin and tMax are within the range (0, 1). We need to project it // to the original parameter range for v2. let tDiff = tMaxClip - tMinClip; let tMinNew = tMax * tMinClip + tMin * (1.0 - tMinClip); let tMaxNew = tMax * tMaxClip + tMin * (1.0 - tMaxClip); (tMinNew, tMaxNew, tDiff) }; // Check if we need to subdivide the curves if oldTDiff > tLimit && tDiff > tLimit { // Subdivide the curve which has converged the least. if tMaxNew - tMinNew > uMax - uMin { let parts = subdivide(v1, 0.5); let t = tMinNew + (tMaxNew - tMinNew) / 2.0; let mut intersections = Vec::new(); intersections.append(&mut curve_intersections_inner(v2, parts.0, uMin, uMax, tMinNew, t, tDiff, !reverse, recursion + 1, recursionLimit, tLimit)); intersections.append(&mut curve_intersections_inner(v2, parts.1, uMin, uMax, t, tMaxNew, tDiff, !reverse, recursion + 1, recursionLimit, tLimit)); intersections } else { let parts = subdivide(v2, 0.5); let t = uMin + (uMax - uMin) / 2.0; let mut intersections = Vec::new(); intersections.append(&mut curve_intersections_inner(parts.0, v1, uMin, t, tMinNew, tMaxNew, tDiff, !reverse, recursion + 1, recursionLimit, tLimit)); intersections.append(&mut curve_intersections_inner(parts.1, v1, t, uMax, tMinNew, tMaxNew, tDiff, !reverse, recursion + 1, recursionLimit, tLimit)); intersections } } else if (uMax - uMin).max(tMaxNew - tMinNew) < TOLERANCE { // We have isolated the intersection with sufficient precision let t1 = tMinNew + (tMaxNew - tMinNew) / 2.0; let t2 = uMin + (uMax - uMin) / 2.0; if reverse { vec![CubicCubicIntersection { t1: t2, curve1: v2, t2: t1, curve2: v1, }] } else { vec![CubicCubicIntersection { t1, curve1: v1, t2, curve2: v2, }] } } else { // Recurse curve_intersections_inner(v2, v1, uMin, uMax, tMinNew, tMaxNew, tDiff, !reverse, recursion + 1, recursionLimit, tLimit) } } #[inline(always)] fn lerp(p1: Point, p2: Point, t: f64) -> Point { let new_x = (1.0 - t) * p1.x + t * p2.x; let new_y = (1.0 - t) * p1.y + t * p2.y; Point::new(new_x, new_y) }
true
9091213e635f1dab7e13616501c5fc37e3baff35
Rust
rphmeier/tokio
/tokio-timer/src/lib.rs
UTF-8
1,459
3.078125
3
[ "MIT" ]
permissive
//! Utilities for scheduling work to happen after a period of time. //! //! This crate provides a number of utilities for working with periods of time: //! //! * [`Delay`]: A future that completes at a specified instant in time. //! //! * [`Interval`] A stream that yields at fixed time intervals. //! //! * [`Deadline`]: Wraps a future, requiring it to complete before a specified //! instant in time, erroring if the future takes too long. //! //! These three types are backed by a [`Timer`] instance. In order for //! [`Delay`], [`Interval`], and [`Deadline`] to function, the associated //! [`Timer`] instance must be running on some thread. //! //! [`Delay`]: struct.Delay.html //! [`Deadline`]: struct.Deadline.html //! [`Interval`]: struct.Interval.html //! [`Timer`]: timer/struct.Timer.html #![doc(html_root_url = "https://docs.rs/tokio-timer/0.2.4")] #![deny(missing_docs, warnings, missing_debug_implementations)] extern crate tokio_executor; #[macro_use] extern crate futures; pub mod clock; pub mod timer; mod atomic; mod deadline; mod delay; mod error; mod interval; use std::time::{Duration, Instant}; pub use self::deadline::{Deadline, DeadlineError}; pub use self::delay::Delay; pub use self::error::Error; pub use self::interval::Interval; pub use self::timer::{with_default, Timer}; /// Create a Future that completes in `duration` from now. pub fn sleep(duration: Duration) -> Delay { Delay::new(Instant::now() + duration) }
true
e5534835d90ff31f610952a4a55851d4fbf0a719
Rust
ialhamad/salal
/src/scanner.rs
UTF-8
6,595
3.03125
3
[]
no_license
use crate::tokens::{Token, TokenVariant}; use anyhow::Result; use phf::phf_map; static KEYWORDS: phf::Map<&'static str, TokenVariant> = phf_map! { "and" => TokenVariant::And, "class" => TokenVariant::Class, "else" => TokenVariant::Else, "false" => TokenVariant::False, "for" => TokenVariant::Fun, "fun" => TokenVariant::For, "if" => TokenVariant::If, "nil" => TokenVariant::Nil, "or" => TokenVariant::Or, "print" => TokenVariant::Print, "return" => TokenVariant::Return, "super" => TokenVariant::Super, "this" => TokenVariant::This, "true" => TokenVariant::True, "var" => TokenVariant::Var, "while" => TokenVariant::While, }; pub struct Scanner { source: Vec<char>, start: usize, current: usize, line: usize, has_error: bool, } impl Scanner { pub fn new(source: String) -> Self { let chars = source.chars().into_iter().collect::<Vec<_>>(); Self { source: chars, current: 0, start: 0, line: 1, has_error: false, } } pub fn scan(&mut self) -> Result<Vec<Token>> { let mut tokens = vec![]; while !self.is_at_end() { self.start = self.current; let token = self.scan_tokens(); if let Some(token) = token { tokens.push(token); } } tokens.push(Token::new(TokenVariant::Eof, String::new(), self.line)); Ok(tokens) } pub fn scan_tokens(&mut self) -> Option<Token> { let c = self.advance(); match c { '(' => self.make_token(TokenVariant::LeftParen), ')' => self.make_token(TokenVariant::RightParen), '{' => self.make_token(TokenVariant::LeftBrace), '}' => self.make_token(TokenVariant::RightBrace), ',' => self.make_token(TokenVariant::Comma), '.' => self.make_token(TokenVariant::Dot), '-' => self.make_token(TokenVariant::Minus), '+' => self.make_token(TokenVariant::Plus), ';' => self.make_token(TokenVariant::Semicolon), '*' => self.make_token(TokenVariant::Star), '!' => { if self.match_char('=') { self.make_token(TokenVariant::BangEqual) } else { self.make_token(TokenVariant::Bang) } } '=' => { if self.match_char('=') { self.make_token(TokenVariant::EqualEqual) } else { self.make_token(TokenVariant::Equal) } } '<' => { if self.match_char('=') { self.make_token(TokenVariant::LessEqual) } else { self.make_token(TokenVariant::Less) } } '>' => { if self.match_char('=') { self.make_token(TokenVariant::GreaterEqual) } else { self.make_token(TokenVariant::Greater) } } '"' => self.string(), _ if c.is_ascii_digit() => self.number(), _ if c.is_ascii_alphanumeric() => self.identifier(), '/' if self.peek_next() == '/' => { while self.peek() != '\n' && !self.is_at_end() { self.current += 1; } None } ' ' | '\r' | '\t' => None, '\n' => { self.line += 1; None } _ => { self.has_error = true; None } } } fn is_at_end(&self) -> bool { self.source.is_empty() || self.current >= self.source.len() } pub fn advance(&mut self) -> char { if !self.is_at_end() { self.current += 1; } self.source[self.current - 1] } pub fn make_token(&self, variant: TokenVariant) -> Option<Token> { let lexeme = self.source[self.start..self.current].iter().collect(); Some(Token::new(variant, lexeme, self.line)) } pub fn match_char(&mut self, c: char) -> bool { if !self.is_at_end() && self.source[self.current] == c { self.current += 1; return true; } false } pub fn peek(&self) -> char { if self.is_at_end() { '\0' } else { self.source[self.current] } } pub fn peek_next(&self) -> char { if self.is_at_end() { '\0' } else { self.source[self.current + 1] } } pub fn string(&mut self) -> Option<Token> { while self.peek() != '"' && !self.is_at_end() { if self.peek() == '\n' { self.line += 1; } self.current += 1; } if self.is_at_end() { return None; } self.current += 1; let value = self.source[self.start + 1..self.current - 1] .iter() .collect::<String>(); Some(Token::new( TokenVariant::String(value.clone()), value, self.line, )) } pub fn number(&mut self) -> Option<Token> { while self.peek().is_numeric() { self.current += 1; } if self.peek() == '.' && self.peek_next().is_ascii_digit() { self.current += 1; while self.peek().is_ascii_digit() { self.current += 1; } } let value = self.source[self.start..self.current] .iter() .collect::<String>() .parse::<f64>(); match value { Ok(num) => Some(Token::new( TokenVariant::Number(num), num.to_string(), self.line, )), Err(_) => None, } } pub fn identifier(&mut self) -> Option<Token> { while self.peek().is_ascii_alphanumeric() { self.current += 1; } let value: String = self.source[self.start..self.current].iter().collect(); if let Some(keyword) = KEYWORDS.get(value.as_str()) { Some(Token::new(keyword.clone(), value, self.line)) } else { Some(Token::new( TokenVariant::Identifier(value.clone()), value, self.line, )) } } }
true
d2e82fab6270326bd37cdacbaa72cb62d9e19ad0
Rust
placrosse/fluvio
/tests/runner/src/utils/test_meta/mod.rs
UTF-8
3,440
2.671875
3
[ "Apache-2.0" ]
permissive
pub mod derive_attr; pub mod environment; use std::any::Any; use std::time::{Duration, Instant}; use std::fmt::{self, Debug, Display, Formatter}; use structopt::StructOpt; use structopt::clap::AppSettings; use prettytable::{table, row, cell}; use environment::EnvironmentSetup; use dyn_clone::DynClone; pub trait TestOption: Debug + DynClone { fn as_any(&self) -> &dyn Any; } dyn_clone::clone_trait_object!(TestOption); #[derive(Debug, Clone)] pub struct TestCase { pub environment: EnvironmentSetup, pub option: Box<dyn TestOption>, } impl TestCase { pub fn new(environment: EnvironmentSetup, option: Box<dyn TestOption>) -> Self { Self { environment, option, } } } #[derive(Debug, Clone, StructOpt, PartialEq)] pub enum TestCli { #[structopt(external_subcommand)] Args(Vec<String>), } impl Default for TestCli { fn default() -> Self { TestCli::Args(Vec::new()) } } #[derive(Debug, Clone, StructOpt, Default, PartialEq)] #[structopt( name = "fluvio-test-runner", about = "Test fluvio platform", global_settings = &[AppSettings::ColoredHelp])] pub struct BaseCli { #[structopt(flatten)] pub environment: EnvironmentSetup, #[structopt(subcommand)] pub test_cmd_args: Option<TestCli>, } #[derive(Debug, Clone)] pub struct TestTimer { pub start_time: Instant, pub duration: Option<Duration>, } impl TestTimer { pub fn start() -> Self { TestTimer { start_time: Instant::now(), duration: None, } } pub fn stop(&mut self) { self.duration = Some(self.start_time.elapsed()); } pub fn duration(&self) -> Duration { self.duration.expect("Timer is still running") } } #[derive(Debug, Clone, Default)] pub struct TestResult { pub success: bool, pub duration: Duration, // stats pub bytes_produced: u64, pub produce_latency: u64, pub num_producers: u64, pub bytes_consumed: u64, pub consume_latency: u64, pub num_consumers: u64, pub num_topics: u64, pub topic_create_latency: u64, } impl TestResult { pub fn as_any(&self) -> &dyn Any { self } } // TODO: Parse the time scalars into Duration impl Display for TestResult { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let success_str = format!("{}", self.success); let duration_str = format!("{:?}", self.duration); let producer_latency_str = format!("{:?}", Duration::from_nanos(self.produce_latency)); let consumer_latency_str = format!("{:?}", Duration::from_nanos(self.consume_latency)); let topic_create_latency_str = format!("{:?}", Duration::from_nanos(self.topic_create_latency)); let table = table!( [b->"Test Results"], ["Pass?", b->success_str], ["Duration", duration_str], // ["# topics created", self.num_topics], ["topic create latency 99.9%", topic_create_latency_str], ["# producers created", self.num_producers], ["bytes produced", self.bytes_produced], ["producer latency 99.9%", producer_latency_str], ["# consumers created", self.num_consumers], ["bytes consumed", self.bytes_consumed], ["consumer latency 99.9%", consumer_latency_str] ); write!(f, "{}", table) } }
true
bd4639a78c79a433d334a22c6da1800caba353c7
Rust
erictapen/vdirsyncer
/rust/src/storage/mod.rs
UTF-8
3,978
2.84375
3
[ "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
permissive
mod dav; pub mod exports; mod filesystem; mod http; mod singlefile; mod utils; use errors::{Error, Fallible}; use item::Item; use serde::{Deserialize, Serialize}; type ItemAndEtag = (Item, String); pub trait StorageConfig: Clone + Serialize + Deserialize<'static> { /// Get the collection key of the object, if any. fn get_collection(&self) -> Option<&str>; } pub trait ConfigurableStorage: Storage + Sized { /// An instance of a configuration can be used to configure a storage and/or to discover /// storages. /// /// If a user configures a storage, the entire map in the configuration file is serialized into /// an instance of this type. type Config: StorageConfig; /// Load storage from configuration fn from_config(config: Self::Config) -> Fallible<Self>; /// Discover collections. Take a configuration like the user specified and yield configurations /// that actually point to valid storages. fn discover(config: Self::Config) -> Fallible<Box<Iterator<Item = Self::Config>>>; /// Create a new collection, honoring the `collection` value: /// /// * `collection == Some(_)` means that the storage should use the given name for the /// collection. If it fails to do so, the `collection` in the return value may be different. /// * `collection = None` means that the configuration already points to a new storage /// location. In that case the configuration can usually be returned as-is, but does not have /// to be. /// /// The return value should have a non-`None` collection value. fn create(config: Self::Config) -> Fallible<Self::Config>; } pub trait Storage { /// returns an iterator of `(href, etag)` fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>>; ///Fetch a single item. /// ///:param href: href to fetch ///:returns: (item, etag) ///:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if item can't be found. fn get(&mut self, href: &str) -> Fallible<ItemAndEtag>; /// Upload a new item. /// /// In cases where the new etag cannot be atomically determined (i.e. in the same /// "transaction" as the upload itself), this method may return `None` as etag. This /// special case only exists because of DAV. Avoid this situation whenever possible. /// /// Returns `(href, etag)` fn upload(&mut self, item: Item) -> Fallible<(String, String)>; /// Update an item. /// /// The etag may be none in some cases, see `upload`. /// /// Returns `etag` fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String>; /// Delete an item by href. fn delete(&mut self, href: &str, etag: &str) -> Fallible<()>; /// Enter buffered mode for storages that support it. /// /// Uploads, updates and deletions may not be effective until `flush` is explicitly called. /// /// Use this if you will potentially write a lot of data to the storage, it improves /// performance for storages that implement it. fn buffered(&mut self) {} /// Write back all changes to the collection. fn flush(&mut self) -> Fallible<()> { Ok(()) } /// Get a metadata value fn get_meta(&mut self, _key: Metadata) -> Fallible<String> { Err(Error::MetadataUnsupported)? } /// Set a metadata value fn set_meta(&mut self, _key: Metadata, _value: &str) -> Fallible<()> { Err(Error::MetadataUnsupported)? } /// Attempt to delete collection fn delete_collection(&mut self) -> Fallible<()> { Err(Error::StorageDeletionUnsupported)? } } #[derive(Debug, Clone, Copy)] #[repr(C)] pub enum Metadata { Color, Displayname, } #[inline] pub fn normalize_meta_value(value: &str) -> &str { // `None` is returned by iCloud for empty properties. if value.is_empty() || value == "None" { "" } else { value.trim() } }
true
763f6b64dc94b8cdd2edb57f9f3071d7f75367f0
Rust
jqnatividad/qsv
/src/cmd/extsort.rs
UTF-8
5,076
2.78125
3
[ "MIT", "Unlicense" ]
permissive
static USAGE: &str = r#" Sort an arbitrarily large CSV/text file using a multithreaded external sort algorithm. This command is not specific to CSV data, it sorts any text file on a line-by-line basis. If sorting a non-CSV file, be sure to set --no-headers, otherwise, the first line will not be included in the external sort. Usage: qsv extsort [options] [<input>] [<output>] qsv extsort --help External sort option: --memory-limit <arg> The maximum amount of memory to buffer the on-disk hash table. This is a percentage of total memory. [default: 10] -j, --jobs <arg> The number of jobs to run in parallel. When not set, the number of jobs is set to the number of CPUs detected. Common options: -h, --help Display this message -n, --no-headers When set, the first row will not be interpreted as headers and will be sorted with the rest of the rows. Otherwise, the first row will always appear as the header row in the output. "#; use std::{ fs, io::{self, stdin, stdout, BufRead, Write}, path, }; use ext_sort::{buffer::mem::MemoryLimitedBufferBuilder, ExternalSorter, ExternalSorterBuilder}; use serde::Deserialize; use sysinfo::{System, SystemExt}; use crate::{config, util, CliResult}; #[derive(Deserialize)] struct Args { arg_input: Option<String>, arg_output: Option<String>, flag_jobs: Option<usize>, flag_memory_limit: Option<u8>, flag_no_headers: bool, } const MEMORY_LIMITED_BUFFER: u64 = 100 * 1_000_000; // 100 MB const RW_BUFFER_CAPACITY: usize = 1_000_000; // 1 MB pub fn run(argv: &[&str]) -> CliResult<()> { let args: Args = util::get_args(USAGE, argv)?; // memory buffer to use for external merge sort, // if we can detect the total memory, use 10% of it by default // and up to --memory-limit (capped at 50%), // otherwise, if we cannot detect the free memory use a default of 100 MB let mem_limited_buffer = if System::IS_SUPPORTED { let mut sys = System::new(); sys.refresh_memory(); (sys.total_memory() * 1000) / u8::min(args.flag_memory_limit.unwrap_or(10), 50) as u64 } else { MEMORY_LIMITED_BUFFER }; log::info!("{mem_limited_buffer} bytes used for in memory mergesort buffer..."); let mut input_reader: Box<dyn BufRead> = match &args.arg_input { Some(input_path) => { if input_path.to_lowercase().ends_with(".sz") { return fail_incorrectusage_clierror!( "Input file cannot be a .sz file. Use 'qsv snappy decompress' first." ); } let file = fs::File::open(input_path)?; Box::new(io::BufReader::with_capacity( config::DEFAULT_RDR_BUFFER_CAPACITY, file, )) }, None => Box::new(io::BufReader::new(stdin().lock())), }; let mut output_writer: Box<dyn Write> = match &args.arg_output { Some(output_path) => { if output_path.to_lowercase().ends_with(".sz") { return fail_clierror!( "Output file cannot be a .sz file. Compress it after sorting with 'qsv snappy \ compress'." ); } Box::new(io::BufWriter::with_capacity( RW_BUFFER_CAPACITY, fs::File::create(output_path)?, )) }, None => Box::new(io::BufWriter::with_capacity( RW_BUFFER_CAPACITY, stdout().lock(), )), }; let sorter: ExternalSorter<String, io::Error, MemoryLimitedBufferBuilder> = match ExternalSorterBuilder::new() .with_tmp_dir(path::Path::new("./")) .with_buffer(MemoryLimitedBufferBuilder::new(mem_limited_buffer)) .with_rw_buf_size(RW_BUFFER_CAPACITY) .with_threads_number(util::njobs(args.flag_jobs)) .build() { Ok(sorter) => sorter, Err(e) => { return fail_clierror!("cannot create external sorter: {e}"); }, }; let mut header = String::new(); if !args.flag_no_headers { input_reader.read_line(&mut header)?; } let Ok(sorted) = sorter.sort(input_reader.lines()) else { return fail!("cannot do external sort"); }; if !header.is_empty() { output_writer.write_all(format!("{}\n", header.trim_end()).as_bytes())?; } for item in sorted.map(Result::unwrap) { output_writer.write_all(format!("{item}\n").as_bytes())?; } output_writer.flush()?; Ok(()) } #[test] fn test_mem_check() { // check to see if sysinfo return meminfo without segfaulting let mut sys = System::new(); sys.refresh_memory(); let mem10percent = (sys.total_memory() * 1000) / 10; // 10 percent of total memory assert!(mem10percent > 0); }
true
b794791901d86699bf7df994d9d17ea9015e1911
Rust
LanceEa/pgx
/pgx/src/datum/sql_entity_graph/postgres_hash.rs
UTF-8
2,277
2.546875
3
[ "MIT" ]
permissive
use super::{SqlGraphEntity, SqlGraphIdentifier, ToSql}; use std::cmp::Ordering; /// The output of a [`PostgresHash`](crate::datum::sql_entity_graph::PostgresHash) from `quote::ToTokens::to_tokens`. #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct PostgresHashEntity { pub name: &'static str, pub file: &'static str, pub line: u32, pub full_path: &'static str, pub module_path: &'static str, pub id: core::any::TypeId, } impl Ord for PostgresHashEntity { fn cmp(&self, other: &Self) -> Ordering { self.file .cmp(other.file) .then_with(|| self.file.cmp(other.file)) } } impl PartialOrd for PostgresHashEntity { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Into<SqlGraphEntity> for PostgresHashEntity { fn into(self) -> SqlGraphEntity { SqlGraphEntity::Hash(self) } } impl SqlGraphIdentifier for PostgresHashEntity { fn dot_identifier(&self) -> String { format!("hash {}", self.full_path) } fn rust_identifier(&self) -> String { self.full_path.to_string() } fn file(&self) -> Option<&'static str> { Some(self.file) } fn line(&self) -> Option<u32> { Some(self.line) } } impl ToSql for PostgresHashEntity { #[tracing::instrument(level = "debug", err, skip(self, _context), fields(identifier = %self.rust_identifier()))] fn to_sql(&self, _context: &super::PgxSql) -> eyre::Result<String> { let sql = format!("\n\ -- {file}:{line}\n\ -- {full_path}\n\ CREATE OPERATOR FAMILY {name}_hash_ops USING hash;\n\ CREATE OPERATOR CLASS {name}_hash_ops DEFAULT FOR TYPE {name} USING hash FAMILY {name}_hash_ops AS\n\ \tOPERATOR 1 = ({name}, {name}),\n\ \tFUNCTION 1 {name}_hash({name});\ ", name = self.name, full_path = self.full_path, file = self.file, line = self.line, ); tracing::debug!(%sql); Ok(sql) } }
true
d7d0d27ea37166a4b57a5464e4b7d3dfa4a0b8b2
Rust
tomlankhorst/advent-of-code-2020-rust
/src/day15-rambunctious-recitation.rs
UTF-8
1,185
3.265625
3
[]
no_license
use std::{env, io}; use std::fs::File; use std::io::BufRead; use std::collections::HashMap; fn game(mut seq : Vec<u64>, to: u64) -> u64 { seq.reserve(to as usize); let mut birth: HashMap<u64, u64> = HashMap::new(); seq[0..seq.len()-1].iter().enumerate().for_each(|(i, &s)| { birth.insert(s, i as u64); }); for turn in seq.len() as u64..to { let spoken = *seq.last().unwrap(); let born = match birth.get(&spoken) { Some(b) => b + 1, None => turn, }; seq.push(turn - born); birth.insert(spoken, turn-1); } *seq.last().unwrap() } fn main() { let args: Vec<String> = env::args().collect(); if args.len() != 2 { panic!("Provide input file"); } let file = File::open(&args[1]) .expect("Couldn't read input file"); let mut line = String::new(); io::BufReader::new(file).read_line(&mut line) .expect("Could not read first line"); let seq : Vec<u64> = line.split(',') .map(|c| c.parse().unwrap() ) .collect(); println!("Part 1: {}", game(seq.clone(), 2020)); println!("Part 2: {}", game(seq, 30_000_000)); }
true
35d4126bceea83e66a11759cb5519bf4adec6d09
Rust
ralphtheninja/eyros
/src/ensure.rs
UTF-8
1,378
2.796875
3
[]
no_license
#[macro_export] macro_rules! ensure_eq { ($left:expr, $right:expr) => ({ match (&$left, &$right) { (left_val, right_val) => { if !(*left_val == *right_val) { return Err(format_err!(r#"assertion failed: `(left == right)` left: `{:?}`, right: `{:?}`"#, left_val, right_val)); } } } }); ($left:expr, $right:expr, $($arg:tt)*) => ({ match (&($left), &($right)) { (left_val, right_val) => { if !(*left_val == *right_val) { return Err(format_err!(r#"assertion failed: `(left == right)` left: `{:?}`, right: `{:?}`: {}"#, left_val, right_val, format_args!($($arg)*))); } } } }); } #[macro_export] macro_rules! ensure_eq_some { ($left:expr, $right:expr) => ({ match (&$left, &$right) { (left_val, right_val) => { if !(*left_val == *right_val) { return Some(Err(format_err!(r#"assertion failed: `(left == right)` left: `{:?}`, right: `{:?}`"#, left_val, right_val))); } } } }); ($left:expr, $right:expr, $($arg:tt)*) => ({ match (&($left), &($right)) { (left_val, right_val) => { if !(*left_val == *right_val) { return Some(Err(format_err!(r#"assertion failed: `(left == right)` left: `{:?}`, right: `{:?}`: {}"#, left_val, right_val, format_args!($($arg)*)))); } } } }); }
true
135efe503bb040509094d4dcf74b284512ac71ee
Rust
openstax/adaptarr-server
/crates/rest-api/src/teams.rs
UTF-8
10,503
2.609375
3
[]
no_license
use actix_web::{ HttpResponse, HttpRequest, web::{self, Json, Path, ServiceConfig}, http::StatusCode, }; use adaptarr_error::Error; use adaptarr_models::{ Invite, Model, Role, Team, TeamMember, TeamPermissions, TeamPublicParams, User, permissions::{ AddMember, EditRole, PermissionBits, RemoveMember, }, }; use adaptarr_web::{ Created, Database, FormOrJson, TeamScoped, session::{Elevated, Session}, }; use diesel::Connection as _; use serde::Deserialize; use crate::Result; /// Configure routes. pub fn configure(app: &mut ServiceConfig) { app .service(web::resource("/teams") .route(web::get().to(list_teams)) .route(web::post().to(create_team)) ) .service(web::resource("/teams/{id}") .name("team") .route(web::get().to(get_team)) .route(web::put().to(update_team)) ) .service(web::resource("/teams/{id}/roles") .route(web::get().to(list_roles)) .route(web::post().to(create_role)) ) .service(web::resource("/teams/{id}/roles/{role}") .name("role") .route(web::get().to(get_role)) .route(web::put().to(update_role)) .route(web::delete().to(delete_role)) ) .service(web::resource("/teams/{id}/members") .route(web::get().to(list_members)) .route(web::post().to(add_member)) ) .service(web::resource("/teams/{id}/members/{member}") .name("member") .route(web::get().to(get_member)) .route(web::put().to(update_member)) .route(web::delete().to(delete_member)) ) ; } /// Get list of all teams. /// /// ## Method /// /// ``` /// GET /teams /// ``` fn list_teams(db: Database, session: Session) -> Result<Json<Vec<<Team as Model>::Public>>> { let teams = if session.is_elevated { Team::all(&db)? } else { session.user(&db)?.get_teams(&db)? }; Ok(Json(teams.get_public_full(&db, &TeamPublicParams { include_role_permissions: session.is_elevated, })?)) } #[derive(Deserialize)] struct NewTeam { name: String, } /// Create a new team. /// /// ## Method /// /// ``` /// POST /teams /// ``` fn create_team( req: HttpRequest, db: Database, _: Session<Elevated>, data: FormOrJson<NewTeam>, ) -> Result<Created<String, Json<<Team as Model>::Public>>> { let team = Team::create(&db, &data.name)?; let location = req.url_for("team", &[team.id().to_string()])?.to_string(); Ok(Created(location, Json(team.get_public_full(&db, &TeamPublicParams { include_role_permissions: true, })?))) } /// Get a team by ID. /// /// ## Method /// /// ```text /// GET /teams/:id /// ``` fn get_team(db: Database, session: Session, team: TeamScoped<Team>, id: Path<i32>) -> Result<Json<<Team as Model>::Public>> { Ok(Json(Team::by_id(&db, *id)?.get_public_full(&db, &TeamPublicParams { include_role_permissions: session.is_elevated | team.permissions().contains(TeamPermissions::EDIT_ROLE), })?)) } #[derive(Deserialize)] struct TeamUpdate { name: String, } /// Modify a team. /// /// ## Method /// /// ```text /// PUT /teams/:id /// ``` fn update_team( db: Database, _: Session<Elevated>, _: TeamScoped<Team>, id: Path<i32>, update: FormOrJson<TeamUpdate>, ) -> Result<Json<<Team as Model>::Public>> { let mut team = Team::by_id(&db, *id)?; team.set_name(&db, &update.name)?; Ok(Json(team.get_public_full(&db, &TeamPublicParams { include_role_permissions: true, })?)) } /// Get list of all roles in a team. /// /// ## Method /// /// ```text /// GET /teams/:id/roles /// ``` fn list_roles(db: Database, scope: TeamScoped<Team>, id: Path<i32>) -> Result<Json<Vec<<Role as Model>::Public>>> { let show_permissions = scope.permissions().contains(TeamPermissions::EDIT_ROLE); Ok(Json(Team::by_id(&db, *id)? .get_roles(&db)? .get_public_full(&db, &show_permissions)?)) } #[derive(Deserialize)] struct NewRole { name: String, #[serde(default = "TeamPermissions::empty")] permissions: TeamPermissions, } /// Create a new role. /// /// ## Method /// /// ```text /// POST /teams/:id/roles /// ``` fn create_role( req: HttpRequest, db: Database, scope: TeamScoped<Team, EditRole>, data: Json<NewRole>, ) -> Result<Created<String, Json<<Role as Model>::Public>>> { let team = scope.resource(); let role = Role::create(&db, team, &data.name, data.permissions)?; let location = req.url_for( "role", &[team.id().to_string(), role.id().to_string()])?.to_string(); Ok(Created(location, Json(role.get_public_full(&db, &true)?))) } /// Get a role by ID. /// /// ## Method /// /// ```text /// GET /teams/:id/roles/:role /// ``` fn get_role(db: Database, member: TeamScoped<Team>, path: Path<(i32, i32)>) -> Result<Json<<Role as Model>::Public>> { let (team_id, role_id) = path.into_inner(); let show_permissions = member.permissions().contains(TeamPermissions::EDIT_ROLE); Ok(Json(Team::by_id(&db, team_id)? .get_role(&db, role_id)? .get_public_full(&db, &show_permissions)?)) } #[derive(Deserialize)] struct RoleUpdate { name: Option<String>, permissions: Option<TeamPermissions>, } /// Update a role. /// /// ## Method /// /// ```text /// PUT /teams/:id/roles/:role /// ``` fn update_role( db: Database, _: TeamScoped<Team, EditRole>, path: Path<(i32, i32)>, update: Json<RoleUpdate>, ) -> Result<Json<<Role as Model>::Public>> { let (team_id, role_id) = path.into_inner(); let mut role = Team::by_id(&db, team_id)?.get_role(&db, role_id)?; let db = &db; db.transaction::<_, diesel::result::Error, _>(|| { if let Some(ref name) = update.name { role.set_name(db, name)?; } if let Some(permissions) = update.permissions { role.set_permissions(db, permissions)?; } Ok(()) })?; Ok(Json(role.get_public_full(&db, &true)?)) } /// Delete a role. /// /// ## Method /// /// ```text /// DELETE /teams/:id/roles/:role /// ``` fn delete_role( db: Database, _: TeamScoped<Team, EditRole>, path: Path<(i32, i32)>, ) -> Result<HttpResponse> { let (team_id, role_id) = path.into_inner(); Team::by_id(&db, team_id)?.get_role(&db, role_id)?.delete(&db)?; Ok(HttpResponse::new(StatusCode::NO_CONTENT)) } /// List all members in a team. /// /// ## Method /// /// ```text /// GET /teams/:id/members /// ``` fn list_members(db: Database, scope: TeamScoped<Team>) -> Result<Json<Vec<<TeamMember as Model>::Public>>> { let show_permissions = scope.permissions().contains(TeamPermissions::EDIT_ROLE); Ok(Json(scope.resource() .get_members(&db)? .get_public_full(&db, &show_permissions)?)) } #[derive(Deserialize)] struct NewMember { user: UserRef, permissions: TeamPermissions, role: Option<i32>, } #[derive(Deserialize)] #[serde(untagged)] enum UserRef { ById(i32), ByEmail(String), } /// Add a member to a team. /// /// ## Method /// /// ```text /// POST /teams/:id/members /// ``` fn add_member( db: Database, scope: TeamScoped<Team, AddMember>, new: FormOrJson<NewMember>, ) -> Result<HttpResponse> { let NewMember { user, permissions, role } = new.into_inner(); let user = match user { UserRef::ById(id) => User::by_id(&db, id)?, UserRef::ByEmail(email) => User::by_email(&db, &email)?, }; let permissions = permissions & scope.permissions(); let team = scope.into_resource(); let role = role.map(|id| team.get_role(&db, id)).transpose()?; let locale = user.locale(); let invite = Invite::create_for_existing( &db, team, role.as_ref(), permissions, user)?; invite.do_send_mail(locale); Ok(HttpResponse::new(StatusCode::ACCEPTED)) } /// Get a specific member of a team. /// /// ## Method /// /// ```text /// GET /teams/:id/members/:member /// ``` fn get_member(db: Database, scope: TeamScoped<Team>, path: Path<(i32, i32)>) -> Result<Json<<TeamMember as Model>::Public>> { let (_, member_id) = path.into_inner(); let user = User::by_id(&db, member_id)?; let member = scope.resource().get_member(&db, &user)?; let show_permissions = scope.permissions().contains(TeamPermissions::EDIT_ROLE); Ok(Json(member.get_public_full(&db, &show_permissions)?)) } #[derive(Deserialize)] struct MemberUpdate { permissions: Option<TeamPermissions>, #[serde(default, deserialize_with = "adaptarr_util::de_optional_null")] role: Option<Option<i32>>, } /// Update a member of a team. /// /// ## Method /// /// ```text /// PUT /teams/:id/members/:member /// ``` fn update_member( db: Database, scope: TeamScoped<Team>, path: Path<(i32, i32)>, update: FormOrJson<MemberUpdate>, ) -> Result<Json<<TeamMember as Model>::Public>> { let (_, member_id) = path.into_inner(); let user = User::by_id(&db, member_id)?; let team = scope.resource(); let mut member = team.get_member(&db, &user)?; db.transaction::<_, Error, _>(|| { if let Some(permissions) = update.permissions { scope.permissions().require( TeamPermissions::EDIT_MEMBER_PERMISSIONS)?; member.set_permissions( &db, member.permissions() & !scope.permissions() | permissions, )?; } if let Some(role) = update.role { scope.permissions().require(TeamPermissions::ASSIGN_ROLE)?; let role = role.map(|id| team.get_role(&db, id)).transpose()?; member.set_role(&db, role)?; } Ok(()) })?; let show_permissions = scope.permissions().contains(TeamPermissions::EDIT_ROLE); Ok(Json(member.get_public_full(&db, &show_permissions)?)) } /// Remove a user from a team. /// /// ## Method /// /// ```text /// DELETE /teams/:id/members/:member /// ``` fn delete_member( db: Database, scope: TeamScoped<Team, RemoveMember>, path: Path<(i32, i32)>, ) -> Result<HttpResponse> { let (_, member_id) = path.into_inner(); let user = User::by_id(&db, member_id)?; scope.resource().get_member(&db, &user)?.delete(&db)?; Ok(HttpResponse::new(StatusCode::NO_CONTENT)) }
true
6894b421cbd8b45bc694f89cdbb768c748400b3a
Rust
CryZe/libtww
/src/system/time/duration.rs
UTF-8
13,959
3.625
4
[ "MIT" ]
permissive
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::ops::{Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign}; const NANOS_PER_SEC: u32 = 1_000_000_000; const NANOS_PER_MILLI: u32 = 1_000_000; const MILLIS_PER_SEC: u64 = 1_000; /// A duration type to represent a span of time, typically used for system /// timeouts. /// /// Each duration is composed of a number of seconds and nanosecond precision. /// APIs binding a system timeout will typically round up the nanosecond /// precision if the underlying system does not support that level of precision. /// /// Durations implement many common traits, including `Add`, `Sub`, and other /// ops traits. Currently a duration may only be inspected for its number of /// seconds and its nanosecond precision. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let five_seconds = Duration::new(5, 0); /// let five_seconds_and_five_nanos = five_seconds + Duration::new(0, 5); /// /// assert_eq!(five_seconds_and_five_nanos.as_secs(), 5); /// assert_eq!(five_seconds_and_five_nanos.subsec_nanos(), 5); /// /// let ten_millis = Duration::from_millis(10); /// ``` #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] pub struct Duration { secs: u64, nanos: u32, // Always 0 <= nanos < NANOS_PER_SEC } impl Duration { /// Creates a new `Duration` from the specified number of seconds and /// additional nanosecond precision. /// /// If the nanoseconds is greater than 1 billion (the number of nanoseconds /// in a second), then it will carry over into the seconds provided. /// /// # Panics /// /// This constructor will panic if the carry from the nanoseconds overflows /// the seconds counter. #[inline] pub fn new(secs: u64, nanos: u32) -> Duration { let secs = secs.checked_add((nanos / NANOS_PER_SEC) as u64) .expect("overflow in Duration::new"); let nanos = nanos % NANOS_PER_SEC; Duration { secs: secs, nanos: nanos, } } /// Creates a new `Duration` from the specified number of seconds. #[inline] pub fn from_secs(secs: u64) -> Duration { Duration { secs: secs, nanos: 0, } } /// Creates a new `Duration` from the specified number of milliseconds. #[inline] pub fn from_millis(millis: u64) -> Duration { let secs = millis / MILLIS_PER_SEC; let nanos = ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI; Duration { secs: secs, nanos: nanos, } } /// Returns the number of whole seconds represented by this duration. /// /// The extra precision represented by this duration is ignored (i.e. extra /// nanoseconds are not represented in the returned value). #[inline] pub fn as_secs(&self) -> u64 { self.secs } /// Returns the nanosecond precision represented by this duration. /// /// This method does **not** return the length of the duration when /// represented by nanoseconds. The returned number always represents a /// fractional portion of a second (i.e. it is less than one billion). #[inline] pub fn subsec_nanos(&self) -> u32 { self.nanos } /// Checked duration addition. Computes `self + other`, returning `None` /// if overflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(duration_checked_ops)] /// /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)), Some(Duration::new(0, 1))); /// assert_eq!(Duration::new(1, 0).checked_add(Duration::new(std::u64::MAX, 0)), None); /// ``` #[inline] pub fn checked_add(self, rhs: Duration) -> Option<Duration> { if let Some(mut secs) = self.secs.checked_add(rhs.secs) { let mut nanos = self.nanos + rhs.nanos; if nanos >= NANOS_PER_SEC { nanos -= NANOS_PER_SEC; if let Some(new_secs) = secs.checked_add(1) { secs = new_secs; } else { return None; } } debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs: secs, nanos: nanos, }) } else { None } } /// Checked duration subtraction. Computes `self + other`, returning `None` /// if the result would be negative or if underflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(duration_checked_ops)] /// /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 1).checked_sub(Duration::new(0, 0)), Some(Duration::new(0, 1))); /// assert_eq!(Duration::new(0, 0).checked_sub(Duration::new(0, 1)), None); /// ``` #[inline] pub fn checked_sub(self, rhs: Duration) -> Option<Duration> { if let Some(mut secs) = self.secs.checked_sub(rhs.secs) { let nanos = if self.nanos >= rhs.nanos { self.nanos - rhs.nanos } else { if let Some(sub_secs) = secs.checked_sub(1) { secs = sub_secs; self.nanos + NANOS_PER_SEC - rhs.nanos } else { return None; } }; debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs: secs, nanos: nanos, }) } else { None } } /// Checked duration multiplication. Computes `self * other`, returning /// `None` if underflow or overflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(duration_checked_ops)] /// /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 500_000_001).checked_mul(2), Some(Duration::new(1, 2))); /// assert_eq!(Duration::new(std::u64::MAX - 1, 0).checked_mul(2), None); /// ``` #[inline] pub fn checked_mul(self, rhs: u32) -> Option<Duration> { // Multiply nanoseconds as u64, because it cannot overflow that way. let total_nanos = self.nanos as u64 * rhs as u64; let extra_secs = total_nanos / (NANOS_PER_SEC as u64); let nanos = (total_nanos % (NANOS_PER_SEC as u64)) as u32; if let Some(secs) = self.secs .checked_mul(rhs as u64) .and_then(|s| s.checked_add(extra_secs)) { debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs: secs, nanos: nanos, }) } else { None } } /// Checked duration division. Computes `self / other`, returning `None` /// if `other == 0` or the operation results in underflow or overflow. /// /// # Examples /// /// Basic usage: /// /// ``` /// #![feature(duration_checked_ops)] /// /// use std::time::Duration; /// /// assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0))); /// assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000))); /// assert_eq!(Duration::new(2, 0).checked_div(0), None); /// ``` #[inline] pub fn checked_div(self, rhs: u32) -> Option<Duration> { if rhs != 0 { let secs = self.secs / (rhs as u64); let carry = self.secs - secs * (rhs as u64); let extra_nanos = carry * (NANOS_PER_SEC as u64) / (rhs as u64); let nanos = self.nanos / rhs + (extra_nanos as u32); debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs: secs, nanos: nanos, }) } else { None } } } impl Add for Duration { type Output = Duration; fn add(self, rhs: Duration) -> Duration { self.checked_add(rhs).expect("overflow when adding durations") } } impl AddAssign for Duration { fn add_assign(&mut self, rhs: Duration) { *self = *self + rhs; } } impl Sub for Duration { type Output = Duration; fn sub(self, rhs: Duration) -> Duration { self.checked_sub(rhs).expect("overflow when subtracting durations") } } impl SubAssign for Duration { fn sub_assign(&mut self, rhs: Duration) { *self = *self - rhs; } } impl Mul<u32> for Duration { type Output = Duration; fn mul(self, rhs: u32) -> Duration { self.checked_mul(rhs).expect("overflow when multiplying duration by scalar") } } impl MulAssign<u32> for Duration { fn mul_assign(&mut self, rhs: u32) { *self = *self * rhs; } } impl Div<u32> for Duration { type Output = Duration; fn div(self, rhs: u32) -> Duration { self.checked_div(rhs).expect("divide by zero error when dividing duration by scalar") } } impl DivAssign<u32> for Duration { fn div_assign(&mut self, rhs: u32) { *self = *self / rhs; } } #[cfg(test)] mod tests { use super::Duration; #[test] fn creation() { assert!(Duration::from_secs(1) != Duration::from_secs(0)); assert_eq!(Duration::from_secs(1) + Duration::from_secs(2), Duration::from_secs(3)); assert_eq!(Duration::from_millis(10) + Duration::from_secs(4), Duration::new(4, 10 * 1_000_000)); assert_eq!(Duration::from_millis(4000), Duration::new(4, 0)); } #[test] fn secs() { assert_eq!(Duration::new(0, 0).as_secs(), 0); assert_eq!(Duration::from_secs(1).as_secs(), 1); assert_eq!(Duration::from_millis(999).as_secs(), 0); assert_eq!(Duration::from_millis(1001).as_secs(), 1); } #[test] fn nanos() { assert_eq!(Duration::new(0, 0).subsec_nanos(), 0); assert_eq!(Duration::new(0, 5).subsec_nanos(), 5); assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1); assert_eq!(Duration::from_secs(1).subsec_nanos(), 0); assert_eq!(Duration::from_millis(999).subsec_nanos(), 999 * 1_000_000); assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1 * 1_000_000); } #[test] fn add() { assert_eq!(Duration::new(0, 0) + Duration::new(0, 1), Duration::new(0, 1)); assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001), Duration::new(1, 1)); } #[test] fn checked_add() { assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)), Some(Duration::new(0, 1))); assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)), Some(Duration::new(1, 1))); assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::u64::MAX, 0)), None); } #[test] fn sub() { assert_eq!(Duration::new(0, 1) - Duration::new(0, 0), Duration::new(0, 1)); assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000), Duration::new(0, 1)); assert_eq!(Duration::new(1, 0) - Duration::new(0, 1), Duration::new(0, 999_999_999)); } #[test] fn checked_sub() { let zero = Duration::new(0, 0); let one_nano = Duration::new(0, 1); let one_sec = Duration::new(1, 0); assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1))); assert_eq!(one_sec.checked_sub(one_nano), Some(Duration::new(0, 999_999_999))); assert_eq!(zero.checked_sub(one_nano), None); assert_eq!(zero.checked_sub(one_sec), None); } #[test] #[should_panic] fn sub_bad1() { Duration::new(0, 0) - Duration::new(0, 1); } #[test] #[should_panic] fn sub_bad2() { Duration::new(0, 0) - Duration::new(1, 0); } #[test] fn mul() { assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2)); assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3)); assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4)); assert_eq!(Duration::new(0, 500_000_001) * 4000, Duration::new(2000, 4000)); } #[test] fn checked_mul() { assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2))); assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3))); assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4))); assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000), Some(Duration::new(2000, 4000))); assert_eq!(Duration::new(::u64::MAX - 1, 0).checked_mul(2), None); } #[test] fn div() { assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0)); assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333)); assert_eq!(Duration::new(99, 999_999_000) / 100, Duration::new(0, 999_999_990)); } #[test] fn checked_div() { assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0))); assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000))); assert_eq!(Duration::new(2, 0).checked_div(0), None); } }
true
bb7a8fd0c691f00e49659451d130d7a250c4efea
Rust
tor-legit/LRVM
/lrvm_tools/src/exceptions/native.rs
UTF-8
8,643
3.21875
3
[]
no_license
use crate::asm::Reg; use crate::exceptions::AuxHwException; use std::fmt; /// Describe a native exception pub enum NativeException { UnknownOpCode(u8), UnknownRegister(u8), ReadProtectedRegister(u8), WriteProtectedRegister(u8), UnalignedMemoryAddress { unalignment: u8 }, MmuRefusedRead(u16), MmuRefusedWrite(u16), MmuRefusedExec(u16), SupervisorReservedInstruction(u8), DivisionOrModByZero, OverflowingDivOrMod, InvalidCondFlag(u8), InvalidCondMode(u8), UnknownComponentId(u16), UnknownHardwareInformationCode(u8), ComponentNotMapped(u16), HardwareException(AuxHwException), Interruption(u8), } impl NativeException { /// Decode a native exception pub fn decode(ex: u32) -> Result<Self, ()> { Self::decode_with_mode(ex).map(|(ex, _)| ex) } /// Decode a native exception along with the supervisor status. /// If the error is indicated to have happened in supervisor mode, the second member of the returned tuple is set to `true`. /// If it's `false`, the error indicates to have happened in userland mode. pub fn decode_with_mode(ex: u32) -> Result<(Self, bool), ()> { let bytes = ex.to_be_bytes(); let code = bytes[1]; let associated = u16::from_be_bytes([bytes[2], bytes[3]]); Ok((Self::decode_parts(code, Some(associated))?, bytes[0] != 0)) } /// Decode a split exception pub fn decode_parts(code: u8, associated: Option<u16>) -> Result<Self, ()> { let data_or_err = associated.ok_or(()); match code { 0x01 => Ok(Self::UnknownOpCode(data_or_err? as u8)), 0x02 => Ok(Self::UnknownRegister(data_or_err? as u8)), 0x03 => Ok(Self::ReadProtectedRegister(data_or_err? as u8)), 0x04 => Ok(Self::WriteProtectedRegister(data_or_err? as u8)), 0x05 => Ok(Self::UnalignedMemoryAddress { unalignment: data_or_err? as u8, }), 0x06 => Ok(Self::MmuRefusedRead(data_or_err?)), 0x07 => Ok(Self::MmuRefusedWrite(data_or_err?)), 0x08 => Ok(Self::MmuRefusedExec(data_or_err?)), 0x09 => Ok(Self::SupervisorReservedInstruction(data_or_err? as u8)), 0x0A => Ok(Self::DivisionOrModByZero), 0x0B => Ok(Self::OverflowingDivOrMod), 0x0C => Ok(Self::InvalidCondFlag(data_or_err? as u8)), 0x0D => Ok(Self::InvalidCondMode(data_or_err? as u8)), 0x10 => Ok(Self::UnknownComponentId(data_or_err?)), 0x11 => Ok(Self::UnknownHardwareInformationCode(data_or_err? as u8)), 0x12 => Ok(Self::ComponentNotMapped(data_or_err?)), 0xA0 => Ok(Self::HardwareException(AuxHwException::decode( data_or_err?, )?)), 0xF0 => Ok(Self::Interruption(data_or_err? as u8)), _ => Err(()), } } /// Get the exception's code pub fn code(&self) -> u8 { match self { Self::UnknownOpCode(_) => 0x01, Self::UnknownRegister(_) => 0x02, Self::ReadProtectedRegister(_) => 0x03, Self::WriteProtectedRegister(_) => 0x04, Self::UnalignedMemoryAddress { unalignment: _ } => 0x05, Self::MmuRefusedRead(_) => 0x06, Self::MmuRefusedWrite(_) => 0x07, Self::MmuRefusedExec(_) => 0x08, Self::SupervisorReservedInstruction(_) => 0x09, Self::DivisionOrModByZero => 0x0A, Self::OverflowingDivOrMod => 0x0B, Self::InvalidCondFlag(_) => 0x0C, Self::InvalidCondMode(_) => 0x0D, Self::UnknownComponentId(_) => 0x10, Self::UnknownHardwareInformationCode(_) => 0x11, Self::ComponentNotMapped(_) => 0x12, Self::HardwareException(_) => 0xA0, Self::Interruption(_) => 0xF0, } } /// Get the exception's eventual associated data pub fn associated_data(&self) -> Option<u16> { match self { Self::UnknownOpCode(opcode) => Some((*opcode).into()), Self::UnknownRegister(reg_id) => Some((*reg_id).into()), Self::ReadProtectedRegister(reg_id) => Some((*reg_id).into()), Self::WriteProtectedRegister(reg_id) => Some((*reg_id).into()), Self::UnalignedMemoryAddress { unalignment } => Some((*unalignment).into()), Self::MmuRefusedRead(addr_lower) => Some(*addr_lower), Self::MmuRefusedWrite(addr_lower) => Some(*addr_lower), Self::MmuRefusedExec(addr_lower) => Some(*addr_lower), Self::SupervisorReservedInstruction(opcode) => Some((*opcode).into()), Self::DivisionOrModByZero => None, Self::OverflowingDivOrMod => None, Self::InvalidCondFlag(flag) => Some((*flag).into()), Self::InvalidCondMode(flag) => Some((*flag).into()), Self::UnknownComponentId(id_lower) => Some(*id_lower), Self::UnknownHardwareInformationCode(code) => Some((*code).into()), Self::ComponentNotMapped(id_lower) => Some(*id_lower), Self::HardwareException(hw_ex) => Some(hw_ex.encode()), Self::Interruption(code) => Some((*code).into()), } } /// Encode the exception on 24-bits pub fn encode(&self) -> u32 { ((self.code() as u32) << 16) + self.associated_data().unwrap_or(0) as u32 } /// Encode the exception with supervisor informations on 32-bits. /// `was_sv` indicates if the error occurred in supervisor mode (else it was on userland mode). pub fn encode_with_mode(&self, was_sv: bool) -> u32 { self.encode() + if was_sv { 1 << 24 } else { 0 } } } impl fmt::Display for NativeException { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match self { Self::UnknownOpCode(opcode) => format!("Unknown opcode {:#004X}", opcode), Self::UnknownRegister(reg_id) => format!("Unknown register code {:#004X}", reg_id), Self::ReadProtectedRegister(reg_id) => format!( "Register {} cannot be read in this mode", Reg::from_code(*reg_id).unwrap() ), Self::WriteProtectedRegister(reg_id) => format!( "Register {} cannot be written in this mode", Reg::from_code(*reg_id).unwrap() ), Self::UnalignedMemoryAddress { unalignment } => format!("Unaligned memory address (unalignment is {})", unalignment), Self::MmuRefusedRead(addr_lower) => format!( "Address cannot be read in this mode (address' weakest bits are {:#006X})", addr_lower ), Self::MmuRefusedWrite(addr_lower) => format!( "Address cannot be written in this mode (address' weakest bits are {:#006X})", addr_lower ), Self::MmuRefusedExec(addr_lower) => format!( "Address cannot be executed in this mode (address' weakest bits are {:#006X})", addr_lower ), Self::SupervisorReservedInstruction(opcode) => format!( "Instruction with opcode {:#004X} cannot be run in userland mode", opcode ), Self::DivisionOrModByZero => "Cannot perform a division or modulus by zero".to_string(), Self::OverflowingDivOrMod => "Cannot perform an overflowing division or modulus".to_string(), Self::InvalidCondFlag(flag) => format!("Invalid IF/IF2 flag provided: {:#004X}", flag), Self::InvalidCondMode(mode) => format!("Invalid IF2 condition mode provided: {:#004X}", mode), Self::UnknownComponentId(id_lower) => format!("Unknown component ID (weakest bits are {:#006X})", id_lower), Self::UnknownHardwareInformationCode(code) => format!("Unknown hardware information code {:#004X}", code), Self::ComponentNotMapped(id_lower) => format!("Component with ID {:#004X} is not mapped", id_lower), Self::HardwareException(hw_ex) => format!("Hardware exception: {}", hw_ex), Self::Interruption(code) => format!("Interruption (code {:#004X})", code), } ) } }
true
19ca3677489a0300922913b3261475d6a039aaba
Rust
rayman520/Untitled1
/src/utils/mod.rs
UTF-8
487
2.765625
3
[]
no_license
use std::process; pub mod events; pub mod keyboard; pub trait CheckResult<T, E>{ fn check_result(self) -> T; } impl<T, E: ::std::fmt::Debug> CheckResult<T, E> for Result<T, E> { fn check_result(self) -> T { match self{ Ok(val) => val, Err(msg) => { println!("Error: {:?}", msg); process::exit(1); } } } } pub fn exit(err: &str) { println!("{}", err); ::std::process::exit(1); }
true
d3f19c10cf9bffa55f3bfc48a9358750d63310d1
Rust
jsdw/boundvariable
/src/common/broadcaster.rs
UTF-8
4,584
3.40625
3
[]
no_license
use tokio::prelude::*; use futures::sync::mpsc; #[derive(Clone)] pub struct Broadcaster { sender: mpsc::UnboundedSender<Msg> } enum Msg { Subscribe(Box<dyn Sink<SinkItem=u8, SinkError=()> + Send + Sync + 'static>), Broadcast(u8), Close } /// This structure adds a convenient interface which you to /// subscribe and send messages to the broadcaster: impl Broadcaster { pub fn new() -> (Broadcaster, mpsc::Receiver<()>) { make_broadcaster() } pub async fn subscribe(&mut self, sink: impl Sink<SinkItem=u8, SinkError=()> + Send + Sync + 'static) -> () { let msg = Msg::Subscribe(Box::new(sink)); let _ = await!(self.sender.send_async(msg)); } pub async fn close(&mut self) -> () { let _ = await!(self.sender.send_async(Msg::Close)); } } /// Broadcaster is also a valid Sink, to avoid needing to consume the inner sink /// on every attempt to send a byte into it, and allow us to use `.forward` to /// stream bytes into it. impl Sink for Broadcaster { type SinkItem = u8; type SinkError = (); fn start_send(&mut self, byte: u8) -> Result<AsyncSink<u8>, Self::SinkError> { match self.sender.start_send(Msg::Broadcast(byte)) { Err(_) => Err(()), Ok(inner) => Ok(inner.map(|_| byte)) } } fn poll_complete(&mut self) -> Poll<(), Self::SinkError> { self.sender .poll_complete() .map_err(|_| ()) } } /// Create a new byte broadcaster (this will panic if it does not execute in the context /// of a tokio runtime). You can subscribe new Sinks and broadcast bytes to them. If a sink /// errors (eg it is no longer possible to send to it) it is no longer broadcasted to. fn make_broadcaster() -> (Broadcaster, mpsc::Receiver<()>) { let (send_broadcaster, mut recv_broadcaster) = mpsc::unbounded(); let (mut send_closed, recv_closed) = mpsc::channel::<()>(0); tokio::spawn_async(async move { let mut outputters: Vec<BoxedSink<u8,()>> = vec![]; while let Some(res) = await!(recv_broadcaster.next()) { let msg = match res { Ok(byte) => byte, Err(e) => { return eprintln!("Error receiving msg to broadcast: {:?}", e); } }; match msg { // Subscribe a Sink to being sent output: Msg::Subscribe(sink) => { // Subscribe a new sink to receive output. We have to newtype // the sink into our own struct since Sink isn't implemented // on Box<dyn Sink> for some reason: outputters.push(BoxedSink(sink)); }, // Get given some output to send: Msg::Broadcast(byte) => { // Send a message to each sink, recording any that failed: let mut errored = vec![]; for (i, sink) in outputters.iter_mut().enumerate() { if let Err(_) = await!(sink.send_async(byte)) { errored.push(i); } } // If sending to a sink fails, remove it from the vec: if errored.len() > 0 { outputters = outputters.into_iter().enumerate().filter_map(|(i,sink)| { if errored.iter().find(|&&n| i == n).is_some() { None } else { Some(sink) } }).collect(); } }, // Close the broadcaster so it can receive no more output Msg::Close => { recv_broadcaster.close(); } } } let _ = await!(send_closed.send_async(())); }); // return our interface: (Broadcaster { sender: send_broadcaster, }, recv_closed) } // This is necessary to make Boxed Sinks actually impl the Sink trait, // as for some reason they do not appear to at the moment: struct BoxedSink<I,E>(Box<dyn Sink<SinkItem=I, SinkError=E> + Send + Sync + 'static>); impl <I,E> Sink for BoxedSink<I,E> { type SinkItem = I; type SinkError = E; fn start_send(&mut self, input: Self::SinkItem) -> Result<AsyncSink<Self::SinkItem>, Self::SinkError> { self.0.start_send(input) } fn poll_complete(&mut self) -> Poll<(), Self::SinkError> { self.0.poll_complete() } }
true
ba036c8094d9f5e473ad22584d507888cebc6c6c
Rust
TianyiShi2001/nom-pdb
/src/primary_structure/modres.rs
UTF-8
4,599
2.65625
3
[ "MIT" ]
permissive
// Copyright (c) 2020 Tianyi Shi // // This software is released under the MIT License. // https://opensource.org/licenses/MIT //! The [MODRES](http://www.wwpdb.org/documentation/file-format-content/format33/sect3.html#MODRES) //! record provides descriptions of modifications (e.g., chemical or post-translational) to protein //! and nucleic acid residues. Included are correlations between residue names given in a PDB entry //! and standard residues. //! //! # Record Format //! //! | COLUMNS | DATA TYPE | FIELD | DEFINITION | //! | ------- | ------------ | -------- | ---------------------------------------- | //! | 1 - 6 | Record name | "MODRES" | | //! | 8 - 11 | IDcode | idCode | ID code of this entry. | //! | 13 - 15 | Residue name | resName | Residue name used in this entry. | //! | 17 | Character | chainID | Chain identifier. | //! | 19 - 22 | Integer | seqNum | Sequence number. | //! | 23 | AChar | iCode | Insertion code. | //! | 25 - 27 | Residue name | stdRes | Standard residue name. | //! | 30 - 70 | String | comment | Description of the residue modification. | // // * MODRES comes after SEQRES, thus non-standard residue names in SEQRES cannot be identified // directly use crate::{ common::parser::parse_right, types::{ ModifiedAminoAcid, ModifiedAminoAcidTable, ModifiedNucleotide, ModifiedNucleotideTable, StandardAminoAcid, StandardNucleotide, TryParseFw3, }, }; use nom::{ bytes::complete::take, character::complete::{anychar, line_ending}, IResult, }; pub struct ModresParser; // impl ModresParser { // fn parse_into_structure(inp: &[u8], structure: &mut Structure) -> IResult<&[u8], ()> { // let mut res = HashMap::new(); // let mut inp = inp; // loop { // let (i, _) = Self::parse_oneline(inp, &mut res)?; // if peek(take(6usize))(i)?.1 != "MODRES" { // return Ok((i, res)); // } // let (i, _) = take(6usize)(i)?; // inp = i; // } // } // } impl ModresParser { pub fn parse_into<'a>( inp: &'a [u8], modified_aa: &mut ModifiedAminoAcidTable, modified_nuc: &mut ModifiedNucleotideTable, ) -> IResult<&'a [u8], ()> { let inp = &inp[6..]; let (inp, name) = take(3usize)(inp)?; let name = unsafe { std::str::from_utf8_unchecked(name).to_owned() }; let inp = &inp[1..]; let (inp, _chain) = anychar(inp)?; let inp = &inp[1..]; let (inp, _sequence_number) = parse_right::<u32>(inp, 4usize)?; let (inp, _insertion_code) = anychar(inp)?; let inp = &inp[1..]; let (inp, standard_res) = take(3usize)(inp)?; let inp = &inp[2..]; let (inp, description) = take(51usize)(inp)?; let description = unsafe { std::str::from_utf8_unchecked(description) .trim_end() .to_owned() }; if let Some(standard) = StandardAminoAcid::try_parse_fw3(standard_res) { modified_aa.insert( name, ModifiedAminoAcid { standard, description, }, ); } else if let Some(standard) = StandardNucleotide::try_parse_fw3(standard_res) { modified_nuc.insert( name, ModifiedNucleotide { standard, description, }, ); } else { panic!(format!("Mapping modified residue to standard residue, but encountered invalid standard residue: {:?}", std::str::from_utf8(standard_res).unwrap())) } let (inp, _) = line_ending(inp)?; Ok((inp, ())) } } // #[cfg(test)] // mod tests { // use super::*; // #[test] // fn test_modres() { // let inp = " 1A8O MSE A 151 MET SELENOMETHIONINE // MODRES 1A8O MSE A 185 MET SELENOMETHIONINE // MODRES 1A8O FOO A 214 MET FOOBARBAZATONINE // MODRES 1A8O FOO A 215 MET FOOBARBAZATONINE // XXXXXX ..."; // let (i, modres) = ModresParser::parse(inp).unwrap(); // assert_eq!("XXXXXX ...", i); // assert_eq!(modres.get("FOO").unwrap().occurence.len(), 2usize); // assert_eq!(&modres.get("FOO").unwrap().description, "FOOBARBAZATONINE"); // } // }
true
c3e78f9d6414b26cb8978b1dd8332be8f7db1b74
Rust
flip1995/rust-clippy
/tests/ui/suspicious_to_owned.rs
UTF-8
1,926
2.6875
3
[ "MIT", "Apache-2.0" ]
permissive
//@no-rustfix: overlapping suggestions #![warn(clippy::suspicious_to_owned)] #![warn(clippy::implicit_clone)] #![allow(clippy::redundant_clone)] use std::borrow::Cow; use std::ffi::{c_char, CStr}; fn main() { let moo = "Moooo"; let c_moo = b"Moooo\0"; let c_moo_ptr = c_moo.as_ptr() as *const c_char; let moos = ['M', 'o', 'o']; let moos_vec = moos.to_vec(); // we expect this to be linted let cow = Cow::Borrowed(moo); let _ = cow.to_owned(); // we expect no lints for this let cow = Cow::Borrowed(moo); let _ = cow.into_owned(); // we expect no lints for this let cow = Cow::Borrowed(moo); let _ = cow.clone(); // we expect this to be linted let cow = Cow::Borrowed(&moos); let _ = cow.to_owned(); // we expect no lints for this let cow = Cow::Borrowed(&moos); let _ = cow.into_owned(); // we expect no lints for this let cow = Cow::Borrowed(&moos); let _ = cow.clone(); // we expect this to be linted let cow = Cow::Borrowed(&moos_vec); let _ = cow.to_owned(); // we expect no lints for this let cow = Cow::Borrowed(&moos_vec); let _ = cow.into_owned(); // we expect no lints for this let cow = Cow::Borrowed(&moos_vec); let _ = cow.clone(); // we expect this to be linted let cow = unsafe { CStr::from_ptr(c_moo_ptr) }.to_string_lossy(); let _ = cow.to_owned(); // we expect no lints for this let cow = unsafe { CStr::from_ptr(c_moo_ptr) }.to_string_lossy(); let _ = cow.into_owned(); // we expect no lints for this let cow = unsafe { CStr::from_ptr(c_moo_ptr) }.to_string_lossy(); let _ = cow.clone(); // we expect no lints for these let _ = moo.to_owned(); let _ = c_moo.to_owned(); let _ = moos.to_owned(); // we expect implicit_clone lints for these let _ = String::from(moo).to_owned(); let _ = moos_vec.to_owned(); }
true
af0415350f522983220ea885c0cb1ce8f4e6024b
Rust
phozzy/mergesort
/src/main.rs
UTF-8
1,700
3.421875
3
[]
no_license
fn sort(unsorted: Vec<i32>) -> Vec<i32> { let length = unsorted.len(); if length == 1 { unsorted } else { let (first_unsorted, second_unsorted) = unsorted.split_at(length / 2); merge(sort(first_unsorted.to_vec()), sort(second_unsorted.to_vec())) } } fn merge(first_sorted: Vec<i32>, second_sorted: Vec<i32>) -> Vec<i32> { let mut output: Vec<i32> = Vec::new(); let mut first = first_sorted.iter(); let mut second = second_sorted.iter(); let mut f; let mut s; let mut fi = true; let mut si = true; match first.next() { Some(val) => f = val, None => panic!("Empy array!"), }; match second.next() { Some(val) => s = val, None => panic!("Empy array!"), }; loop { if f < s && fi || !si { output.push(*f); match first.next() { Some(val) => { f = val; }, None => { fi = false; }, }; } else { output.push(*s); match second.next() { Some(val) => { s = val; }, None => { si = false; }, }; }; if !(fi || si) { break output; } } } fn main() { let second_sorted = vec![3, 5, 9]; let first_sorted = vec![2, 7]; let output = merge(first_sorted, second_sorted); for it in output { println!("{}", it); }; println!("sorted"); let unsorted = vec![8,2,3,1,5]; for it in sort(unsorted) { println!("{}", it); }; }
true
d556568a29a59314494de1e40e3ee87e30cfcbc2
Rust
linkerd/linkerd2-proxy
/linkerd/proxy/http/src/client_handle.rs
UTF-8
1,885
2.6875
3
[ "Apache-2.0" ]
permissive
use std::{ future::Future, net::SocketAddr, pin::Pin, sync::Arc, task::{Context, Poll}, }; use tokio::sync::Notify; /// A server-set extension that holds information about the client. #[derive(Clone, Debug)] pub struct ClientHandle { /// The peer address of the client. pub addr: SocketAddr, /// Notifies the client to shutdown its connection. pub close: Close, } /// A handle that signals the client connection to close. #[derive(Clone, Debug)] pub struct Close(Arc<Notify>); pub type Closed = Pin<Box<dyn Future<Output = ()> + Send + 'static>>; /// A middleware that adds a clone of the `ClientHandle` as an extension to each /// request. #[derive(Clone, Debug)] pub struct SetClientHandle<S> { inner: S, handle: ClientHandle, } // === Close === impl Close { pub fn close(&self) { self.0.notify_one() } } impl ClientHandle { pub fn new(addr: SocketAddr) -> (ClientHandle, Closed) { let notify = Arc::new(Notify::new()); let handle = ClientHandle { addr, close: Close(notify.clone()), }; let closed = Box::pin(async move { notify.notified().await; }); (handle, closed) } } // === SetClientHandle === impl<S> SetClientHandle<S> { pub fn new(handle: ClientHandle, inner: S) -> Self { Self { inner, handle } } } impl<B, S> tower::Service<http::Request<B>> for SetClientHandle<S> where S: tower::Service<http::Request<B>>, { type Response = S::Response; type Error = S::Error; type Future = S::Future; fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { self.inner.poll_ready(cx) } fn call(&mut self, mut req: http::Request<B>) -> Self::Future { req.extensions_mut().insert(self.handle.clone()); self.inner.call(req) } }
true
55544c660f10dc06b8d8d1cd31e83aa777780284
Rust
nguyenminhhieu12041996/casper-node
/types/src/block_time.rs
UTF-8
1,244
3.03125
3
[ "Apache-2.0" ]
permissive
use alloc::vec::Vec; use crate::bytesrepr::{Error, FromBytes, ToBytes, U64_SERIALIZED_LENGTH}; /// The number of bytes in a serialized [`BlockTime`]. pub const BLOCKTIME_SERIALIZED_LENGTH: usize = U64_SERIALIZED_LENGTH; /// A newtype wrapping a [`u64`] which represents the block time. #[derive(Clone, Copy, Default, Debug, PartialEq, Eq, PartialOrd)] pub struct BlockTime(u64); impl BlockTime { /// Constructs a `BlockTime`. pub fn new(value: u64) -> Self { BlockTime(value) } /// Saturating integer subtraction. Computes `self - other`, saturating at `0` instead of /// overflowing. pub fn saturating_sub(self, other: BlockTime) -> Self { BlockTime(self.0.saturating_sub(other.0)) } } impl From<BlockTime> for u64 { fn from(blocktime: BlockTime) -> Self { blocktime.0 } } impl ToBytes for BlockTime { fn to_bytes(&self) -> Result<Vec<u8>, Error> { self.0.to_bytes() } fn serialized_length(&self) -> usize { BLOCKTIME_SERIALIZED_LENGTH } } impl FromBytes for BlockTime { fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let (time, rem) = FromBytes::from_bytes(bytes)?; Ok((BlockTime::new(time), rem)) } }
true
ac9fedc1bd5db44eb06f12e5dc92693a795326c3
Rust
jDomantas/shroom
/spark-emu/src/vm.rs
UTF-8
13,057
3
3
[]
no_license
use std::fmt; use std::io::{self, Read, Write}; use std::iter::FromIterator; use std::num::Wrapping; use instruction::Instr; use executable::{Exe, CODE_START, DATA_START, STACK_START, STACK_SIZE}; #[derive(Debug)] pub enum LoadError { BadDataLength(usize), } impl fmt::Display for LoadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { LoadError::BadDataLength(len) => write!(f, "data section length must be divisible by 8, but is {}", len), } } } #[derive(Debug)] pub struct SmallByteSlice { bytes: [u8; 10], } impl FromIterator<u8> for SmallByteSlice { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = u8>, { let mut result = SmallByteSlice { bytes: [0; 10] }; for (index, byte) in iter.into_iter().take(10).enumerate() { result.bytes[index] = byte; } result } } impl fmt::LowerHex for SmallByteSlice { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut list = f.debug_list(); for &byte in &self.bytes { list.entry(&format_args!("{:>02x}", byte)); } list.finish() } } #[derive(Debug)] pub enum ExecError { MisalignedDataAccess(u64), BadDataAccess(u64), BadCodeRead(u64), MisalignedStack(u64), InvalidInstruction(SmallByteSlice), Io(io::Error), BadDivide, DivByZero, InvalidSyscall(u64), } impl fmt::Display for ExecError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ExecError::MisalignedDataAccess(addr) => write!(f, "misaligned data access at {:#x}", addr), ExecError::BadDataAccess(addr) => write!(f, "out of range data access at {:#x}", addr), ExecError::BadCodeRead(addr) => write!(f, "out of range code access at {:#x}", addr), ExecError::MisalignedStack(sp) => write!(f, "misaligned stack with rsp = {:#x}", sp), ExecError::InvalidInstruction(ref bytes) => write!(f, "cannot decode instruction from {:#x}", bytes), ExecError::Io(ref e) => write!(f, "{}", e), ExecError::BadDivide => write!(f, "attempted to divide with rdx != 0"), ExecError::DivByZero => write!(f, "attempted to divide by 0"), ExecError::InvalidSyscall(id) => write!(f, "unknown syscall id: {}", id), } } } impl From<io::Error> for ExecError { fn from(err: io::Error) -> ExecError { ExecError::Io(err) } } pub type ExecResult<T> = Result<T, ExecError>; #[derive(Clone)] struct DataSection { start_address: u64, data: Vec<u64>, } impl DataSection { fn new(data: Vec<u8>) -> Result<Self, LoadError> { assert_eq!(STACK_START + STACK_SIZE, DATA_START); assert_eq!(STACK_SIZE % 8, 0); let mut converted_data = Vec::new(); // zero initialize stack for _ in 0..(STACK_SIZE / 8) { converted_data.push(0u64); } if data.len() % 8 != 0 { return Err(LoadError::BadDataLength(data.len())); } let mut pos = 0; let mut curr = 0; let mut taken = 0; while pos < data.len() { let byte = u64::from(data[pos]); curr += byte << (taken * 8); taken += 1; pos += 1; if taken % 8 == 0 { converted_data.push(curr); taken = 0; curr = 0; } } Ok(DataSection { start_address: STACK_START, data: converted_data, }) } fn access(&mut self, addr: u64) -> ExecResult<&mut u64> { if self.data.len() == 0 { return Err(ExecError::BadDataAccess(addr)); } let last_address = self.start_address + (self.data.len() - 1) as u64 * 8; if addr < self.start_address || addr > last_address { return Err(ExecError::BadDataAccess(addr)); } let addr2 = addr - self.start_address; if addr2 % 8 != 0 { return Err(ExecError::MisalignedDataAccess(addr)); } Ok(&mut self.data[(addr2 / 8) as usize]) } } #[derive(Clone)] struct CodeSection { start_address: u64, data: Vec<u8>, } impl CodeSection { fn new(data: Vec<u8>) -> Self { CodeSection { start_address: CODE_START, data, } } fn load_slice(&self, addr: u64) -> ExecResult<&[u8]> { if addr < self.start_address || addr >= self.start_address + self.data.len() as u64 { return Err(ExecError::BadCodeRead(addr)); } let addr2 = addr - self.start_address; Ok(&self.data[addr2 as usize..]) } } pub struct Vm<'a> { rip: Wrapping<u64>, rax: Wrapping<u64>, rbx: Wrapping<u64>, rdx: Wrapping<u64>, rsp: Wrapping<u64>, rbp: Wrapping<u64>, below_flag: bool, zero_flag: bool, code: CodeSection, data: DataSection, stdin: &'a mut (Read + 'a), stdout: &'a mut (Write + 'a), have_pending_writes: bool, trace_instructions: bool, } impl<'a> Vm<'a> { pub fn new( exe: Exe, stdin: &'a mut (Read + 'a), stdout: &'a mut (Write + 'a), trace_instructions: bool, ) -> Result<Self, LoadError> { let code = CodeSection::new(exe.code); let data = DataSection::new(exe.data)?; Ok(Vm { rip: Wrapping(CODE_START), rax: Wrapping(0), rbx: Wrapping(0), rdx: Wrapping(0), rsp: Wrapping(STACK_START + STACK_SIZE), rbp: Wrapping(0), below_flag: false, zero_flag: false, code, data, stdin, stdout, have_pending_writes: false, trace_instructions, }) } pub fn cycle(&mut self) -> ExecResult<()> { let instr = { let code_view = self.code.load_slice(self.rip.0)?; if let Some(instr) = Instr::decode(code_view) { instr } else { let code = code_view.iter().cloned().take(10).collect(); return Err(ExecError::InvalidInstruction(code)); } }; self.execute_instr(instr) } fn execute_instr(&mut self, instr: Instr) -> ExecResult<()> { if self.trace_instructions { eprintln!("rip = {:#x}, instruction: {}", self.rip.0, instr); } self.rip += Wrapping(instr.len()); match instr { Instr::AddRaxRbx => { self.rax += self.rbx; } Instr::AddRsp(value) => { self.rsp += Wrapping(value); } Instr::Call(offset) => { let return_addr = self.rip.0; self.push(return_addr)?; self.rip += Wrapping(offset); } Instr::CmpRaxRbx => { self.below_flag = self.rax < self.rbx; self.zero_flag = self.rax == self.rbx; } Instr::DivRbx => { if self.rdx.0 != 0 { return Err(ExecError::BadDivide); } if self.rbx.0 == 0 { return Err(ExecError::DivByZero); } self.rdx = self.rax % self.rbx; self.rax = self.rax / self.rbx; } Instr::Jmp(offset) => { self.rip += Wrapping(offset); } Instr::Jnz(offset) => { if !self.zero_flag { self.rip += Wrapping(offset); } } Instr::Jz(offset) => { if self.zero_flag { self.rip += Wrapping(offset); } } Instr::LeaRaxRbpOffset(offset) => { self.rax = self.rbp + Wrapping(offset); } Instr::MovRax(val) => { self.rax = Wrapping(val); } Instr::MovRaxOffsetRbx(offset) => { let addr = (self.rax + Wrapping(offset)).0; *self.data.access(addr)? = self.rbx.0; } Instr::MovRaxQwordRsp => { let value = *self.data.access(self.rsp.0)?; self.rax = Wrapping(value); } Instr::MovRaxRspOffset(offset) => { let addr = (self.rsp + Wrapping(offset)).0; self.rax = Wrapping(*self.data.access(addr)?); } Instr::MovRbpRsp => { self.rbp = self.rsp; } Instr::MovRbxRspRaxOffset(offset) => { let addr = (self.rsp + self.rax + Wrapping(offset)).0; self.rbp = Wrapping(*self.data.access(addr)?); } Instr::MovRspOffsetRbx(offset) => { let addr = (self.rsp + Wrapping(offset)).0; *self.data.access(addr)? = self.rbx.0; } Instr::MulRbx => { self.rax *= self.rbx; } Instr::PopRax => { self.rax = Wrapping(self.pop()?); } Instr::PopRbp => { self.rbp = Wrapping(self.pop()?); } Instr::PopRbx => { self.rbx = Wrapping(self.pop()?); } Instr::PopRdx => { self.rdx = Wrapping(self.pop()?); } Instr::PushQwordRax => { let value = *self.data.access(self.rax.0)?; self.push(value)?; } Instr::PushQwordRaxOffset(offset) => { let addr = (self.rax + Wrapping(offset)).0; let value = *self.data.access(addr)?; self.push(value)?; } Instr::PushRax => { let rax = self.rax.0; self.push(rax)?; } Instr::PushRbp => { let rbp = self.rbp.0; self.push(rbp)?; } Instr::PushRbx => { let rbx = self.rbx.0; self.push(rbx)?; } Instr::PushRdx => { let rdx = self.rdx.0; self.push(rdx)?; } Instr::Ret => { self.rip = Wrapping(self.pop()?); } Instr::SetbDl => { self.rdx &= Wrapping(!0xFF); if self.below_flag { self.rdx |= Wrapping(1); } } Instr::SeteDl => { self.rdx &= Wrapping(!0xFF); if self.zero_flag { self.rdx |= Wrapping(1); } } Instr::SetneDl => { self.rdx &= Wrapping(!0xFF); if !self.zero_flag { self.rdx |= Wrapping(1); } } Instr::SubRaxRbx => { self.rax -= self.rbx; } Instr::SubRsp(x) => { self.rsp -= Wrapping(x); } Instr::TestRaxRax => { self.zero_flag = self.rax.0 == 0; } Instr::XorRaxRax => { self.rax = Wrapping(0); } Instr::XorRdxRdx => { self.rdx = Wrapping(0); } Instr::Syscall => { match self.rax.0 { 0 => { // exit let arg = self.rbx.0; ::std::process::exit(arg as i32); } 1 => { // read_byte if self.have_pending_writes { self.stdout.flush()?; } let value = self.read_byte()?; self.rbx = Wrapping(value); } 2 => { // write_byte let value = (self.rbx.0 & 0xFF) as u8; self.stdout.write(&[value])?; self.have_pending_writes = true; } other => { return Err(ExecError::InvalidSyscall(other)); } } } } if self.rsp.0 % 8 == 0 { Ok(()) } else { Err(ExecError::MisalignedStack(self.rsp.0)) } } fn push(&mut self, value: u64) -> ExecResult<()> { self.rsp -= Wrapping(8); *self.data.access(self.rsp.0)? = value; Ok(()) } fn pop(&mut self) -> ExecResult<u64> { let value = *self.data.access(self.rsp.0)?; self.rsp += Wrapping(8); Ok(value) } fn read_byte(&mut self) -> ExecResult<u64> { let mut buf = [0]; let amount_read = self.stdin.read(&mut buf)?; Ok(if amount_read == 0 { 256 } else { u64::from(buf[0]) }) } }
true
9899a295f63ab8b5f6dda6d3bba44d85445e1b43
Rust
my3157/btree
/src/wal_file.rs
UTF-8
5,148
2.875
3
[ "Apache-2.0" ]
permissive
extern crate bincode; extern crate rustc_serialize; use bincode::SizeLimit; use bincode::rustc_serialize::{encode, decode}; use crate::{KeyType, ValueType}; use std::error::Error; use std::fs::{File, OpenOptions}; use std::io::{Read, Write, ErrorKind, Seek, SeekFrom}; use std::io::Error as IOError; use std::marker::PhantomData; use std::cmp::Ordering; #[derive(RustcEncodable, RustcDecodable, PartialEq)] pub struct KeyValuePair<K: KeyType, V: ValueType> { pub key: K, pub value: V, } impl <K: KeyType, V: ValueType> PartialOrd for KeyValuePair<K,V> { fn partial_cmp(&self, other: &KeyValuePair<K,V>) -> Option<Ordering> { if self.key == other.key { Some(self.value.cmp(&other.value)) } else { Some(self.key.cmp(&other.key)) } } } pub struct RecordFile<K: KeyType, V: ValueType> { fd: File, // the file key_size: usize, value_size: usize, _k_marker: PhantomData<K>, _v_marker: PhantomData<V> } pub struct RecordFileIterator<'a, K: KeyType + 'a, V: ValueType + 'a> { wal_file: &'a mut RecordFile<K,V>, // the file } impl <K: KeyType, V: ValueType> RecordFile<K,V> { pub fn new(wal_file_path: &String, key_size: usize, value_size: usize) -> Result<RecordFile<K,V>, Box<dyn Error>> { let wal_file = OpenOptions::new().read(true).write(true).create(true).open(wal_file_path)?; return Ok(RecordFile{fd: wal_file, key_size: key_size, value_size: value_size, _k_marker: PhantomData, _v_marker: PhantomData}); } pub fn is_new(&self) -> Result<bool, Box<dyn Error>> { Ok(self.fd.metadata()?.len() == 0) } /// Returns the number of records in the WAL file pub fn count(&self) -> Result<u64, Box<dyn Error>> { let file_size = self.fd.metadata()?.len(); let rec_size: u64 = (self.key_size + self.value_size) as u64; if file_size % rec_size != 0 { Err(From::from(IOError::new(ErrorKind::InvalidData, "File size is NOT a multiple of key size + value size"))) } else { Ok(file_size/rec_size) } } pub fn insert_record(&mut self, kv: &KeyValuePair<K,V>) -> Result<(), Box<dyn Error>> { // encode the record let record_size = self.key_size + self.value_size; let mut buff = encode(&kv, SizeLimit::Bounded(record_size as u64))?; // add it out to the max size if buff.len() > self.key_size + self.value_size { return Err(From::from(IOError::new(ErrorKind::InvalidData, "Key and value size are too large"))); } else { let diff = (self.key_size + self.value_size) - buff.len(); buff.extend(vec![0; diff]); } match self.fd.write_all(&buff) { Ok(_) => Ok( () ), Err(e) => Err(From::from(e)) } } } impl <'a, K: KeyType, V: ValueType> IntoIterator for &'a mut RecordFile<K,V> { type Item = KeyValuePair<K,V>; type IntoIter = RecordFileIterator<'a, K,V>; fn into_iter(self) -> Self::IntoIter { // seek back to the start self.fd.seek(SeekFrom::Start(0)).unwrap(); // create our iterator RecordFileIterator{wal_file: self} } } impl <'a, K: KeyType, V: ValueType> Iterator for RecordFileIterator<'a,K,V> { type Item = KeyValuePair<K,V>; fn next(&mut self) -> Option<Self::Item> { let total_size = self.wal_file.key_size + self.wal_file.value_size; let mut buff = vec![0; total_size]; println!("Creating buffer: {}", total_size); // attempt to read a buffer's worth and decode match self.wal_file.fd.read_exact(&mut buff) { Ok(_) => { match decode(&buff) { Ok(record) => Some(record), Err(_) => None } }, Err(e) => { println!("ERROR: {}", e); None } } } } #[cfg(test)] mod tests { use crate::tests::gen_temp_name; use std::fs; use crate::wal_file::{RecordFile, KeyValuePair}; #[test] fn test_iterator() { let temp_path = gen_temp_name(); let file_path = temp_path.to_owned() + ".wal"; // create a new blank file let mut wal_file = RecordFile::new(&file_path, 20, 20).unwrap(); assert!(wal_file.is_new().unwrap()); let kv1 = KeyValuePair{key: "hello".to_owned(), value: "world".to_owned()}; let kv2 = KeyValuePair{key: "foo".to_owned(), value: "bar".to_owned()}; wal_file.insert_record(&kv1).unwrap(); wal_file.insert_record(&kv2).unwrap(); assert!(wal_file.count().unwrap() == 2); let mut wal_it = wal_file.into_iter(); let it_kv1 = wal_it.next().unwrap(); assert!(kv1.key == it_kv1.key); assert!(kv1.value == it_kv1.value); let it_kv2 = wal_it.next().unwrap(); assert!(kv2.key == it_kv2.key); assert!(kv2.value == it_kv2.value); fs::remove_file(&file_path); } }
true
2e1dbbae47872fa05d34c4988ff9282356ddec00
Rust
furaf/profix
/src/factory.rs
UTF-8
1,167
2.515625
3
[ "MIT" ]
permissive
use std; use native_tls; use FixClient; #[derive(Debug)] pub enum ConnectionFailure { TlsError(native_tls::Error), TlsHandshakeError(native_tls::HandshakeError<std::net::TcpStream>), TcpStreamError(std::io::Error), } impl From<native_tls::Error> for ConnectionFailure { fn from(tls_error: native_tls::Error) -> ConnectionFailure { ConnectionFailure::TlsError(tls_error) } } impl From<native_tls::HandshakeError<std::net::TcpStream>> for ConnectionFailure { fn from(tls_error: native_tls::HandshakeError<std::net::TcpStream>) -> ConnectionFailure { ConnectionFailure::TlsHandshakeError(tls_error) } } impl From<std::io::Error> for ConnectionFailure { fn from(io_error: std::io::Error) -> ConnectionFailure { ConnectionFailure::TcpStreamError(io_error) } } #[derive(Clone)] pub struct CompIds { pub sender: String, pub target: String, } pub trait FixFactory<Handler> { //called everytime we need to establish connection. fn connection_factory(&self) -> Result<FixClient, ConnectionFailure>; //called everytime we established connection. fn handler_factory(&self) -> Handler; }
true
d509d6c7c050dfcd9c5cff248699c33e7a22d9fa
Rust
vinnyhoward/til
/rust/rust-sandbox/src/var.rs
UTF-8
352
3.859375
4
[ "MIT" ]
permissive
pub fn run() { let name = "Vince"; let mut age = 29; println!("My name is {} and I am {} years old", name, age); age = 30; // Define a constant const userId: i32 = 001; println!("User ID is {}", userId); // Assign multiple vars let (my_name, my_age) = ("Vince", 29); println!("{} is {}", my_name, my_age); }
true
7d3c42d20da5b9e342402a8a90c29d1e6f07c511
Rust
JosephLing/Mole
/src/parse.rs
UTF-8
10,467
3.171875
3
[ "MIT" ]
permissive
use chrono::{NaiveDate, NaiveDateTime, NaiveTime}; use std::path::PathBuf; type ErrorMessage = String; #[derive(PartialEq)] pub enum ParseError { InvalidKey(ErrorMessage), EmptyValue(ErrorMessage), InvalidValue(ErrorMessage), InvalidConfig(ErrorMessage), } impl std::fmt::Debug for ParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ParseError::InvalidKey(s) => writeln!(f, "Invalid key: {}", s), ParseError::EmptyValue(s) => writeln!(f, "Empty value\n{}", s), ParseError::InvalidValue(s) => writeln!(f, "Invalid value: {}", s), ParseError::InvalidConfig(s) => writeln!(f, "Invalid configuration: {}", s), } } } pub fn parse_error_message( message: &str, path: &PathBuf, line: &str, start: usize, end: usize, lineno: i8, ) -> ErrorMessage { let spacing = if lineno < 99 { " " } else if lineno < 127 { " " } else { " " }; let mut underline = String::new(); for _i in 0..start { underline.push(' '); } for _i in start..end { underline.push('^'); } let msg : ErrorMessage = format!( "\n{s } --> {p} {n}:{start}\n{s } |\n{n:w$} | {line}\n{s } | {underline}\n{s } |\n{s }{m}", p = path.to_str().unwrap(), line = line, s = spacing, w = spacing.len(), underline = underline, n = lineno, start = start, m = message ); msg } pub fn parse_key<'a>( rest: &'a str, path: &PathBuf, line: &str, lineno: i8, ) -> Result<(&'a str, &'a str), ParseError> { if rest.is_empty() { return Err(ParseError::EmptyValue(parse_error_message( "expected name of key", path, line, line.len(), line.len() + 5, lineno, ))); } if let Some(index) = rest.find(':') { return Ok((&rest[0..index], &rest[index + 1..])); } Err(ParseError::InvalidKey(parse_error_message( "no semicolon found", path, line, line.len(), line.len() + 1, lineno, ))) } pub fn parse_value_string<'a>( rest: &'a str, path: &PathBuf, line: &str, lineno: i8, ) -> Result<&'a str, ParseError> { let rest = rest.trim(); if rest.is_empty() { return Err(ParseError::EmptyValue(parse_error_message( "empty value", path, line, line.len(), line.len() + 5, lineno, ))); } if rest.starts_with('"') { if !rest.ends_with('"') { return Err(ParseError::InvalidValue(parse_error_message( "string started with \" character but did not close string at the end", path, line, 0, line.len(), lineno, ))); } else { return Ok(&rest[1..rest.len() - 1]); } } if rest.starts_with('\'') { if !rest.ends_with('\'') { return Err(ParseError::InvalidValue(parse_error_message( "string started with \" character but did not close string at the end", path, line, 0, line.len(), lineno, ))); } else { return Ok(&rest[1..rest.len() - 1]); } } if rest == "---" { return Err(ParseError::InvalidValue(parse_error_message( "found '---' can't use configuration start and end identifier as a value", path, line, line.len() - 3, line.len(), lineno, ))); } Ok(rest) } pub fn parse_value_boolean( rest: &str, path: &PathBuf, line: &str, lineno: i8, ) -> Result<bool, ParseError> { match rest.parse::<bool>() { Ok(b) => Ok(b), Err(_) => Err(ParseError::InvalidValue(parse_error_message( "", path, line, line.len() - rest.len(), line.len(), lineno, ))), } } pub fn parse_value_time( rest: &str, path: &PathBuf, line: &str, lineno: i8, ) -> Result<NaiveDateTime, ParseError> { match NaiveDate::parse_from_str(rest, "%Y-%m-%d") { Ok(date) => Ok(date.and_time(NaiveTime::from_hms_milli(0, 0, 0, 0))), Err(_) => match NaiveDateTime::parse_from_str(rest, "%Y-%m-%d %H:%M") { Ok(date) => Ok(date), Err(err) => Err(ParseError::InvalidValue(parse_error_message( &("date error: ".to_owned() + &err.to_string() + " expected Y-m-d or Y-m-d h:m"), path, line, line.len() - rest.len(), line.len(), lineno, ))), }, } } pub fn parse_value_list( mut rest: &str, path: &PathBuf, line: &str, lineno: i8, ) -> Result<Vec<String>, ParseError> { rest = rest.trim(); if rest.is_empty() { return Err(ParseError::EmptyValue(parse_error_message( "empty", path, line, line.len(), line.len() + 5, lineno, ))); } let mut list: Vec<String> = Vec::new(); let mut prev = 0; let mut in_string = false; let mut in_string_lower = false; if rest.starts_with('[') { if rest.ends_with(']') { rest = rest.trim_start_matches('[').trim_end_matches(']'); } else { return Err(ParseError::InvalidValue(parse_error_message( "found opening square bracket for list but no opening bracket", path, line, 0, line.len(), lineno, ))); } } let bytes = rest.as_bytes(); for (i, &item) in bytes.iter().enumerate() { if item == b',' && !in_string && !in_string_lower { list.push(parse_value_string(&rest[prev..i], path, line, lineno)?.to_string()); prev = i + 1; } else if item == b'"' && !in_string_lower { in_string = !in_string; } else if item == b'\'' && !in_string { in_string_lower = !in_string_lower; } } if prev == rest.len() { return Err(ParseError::InvalidValue(parse_error_message( "value expected after semi-colon", path, line, line.len(), line.len() + 5, lineno, ))); } else if in_string { return Err(ParseError::InvalidValue(parse_error_message( "found a string but no closing \"", path, line, line.len() - 1, line.len(), lineno, ))); } else if in_string_lower { return Err(ParseError::InvalidValue(parse_error_message( "found a string but no closing \'", path, line, line.len() - 1, line.len(), lineno, ))); } else { list.push(parse_value_string(&rest[prev..], path, line, lineno)?.to_string()); } Ok(list) } #[cfg(test)] mod parse_tests { use super::*; use pretty_assertions::assert_eq; #[test] fn parse_key_test() { let line = "hello: world"; let (key, rest) = parse_key(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(key, "hello"); assert_eq!(rest, " world"); } #[test] fn parse_key_no_semicolon() { let line = "hello world"; let err = parse_key(line, &PathBuf::from("test.txt"), line, 1).err(); match err { Some(ParseError::InvalidKey(config)) => assert!( config.contains("no semicolon found"), "expected 'no semicolon found' in {}", config ), _ => assert!(false, "expected error"), } } #[test] fn parse_value_list_multi_spaced() { let line = "a, b, c, d"; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec!["a", "b", "c", "d"], list); } #[test] fn parse_value_list_single() { let line = "a"; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec!["a"], list); } #[test] fn parse_value_list_double_no_spaced() { let line = "a, b"; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec!["a", "b"], list); } #[test] fn parse_value_list_square_brackets() { let line = "[a, b]"; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec!["a", "b"], list); } #[test] fn parse_value_list_sauare_brackets_err() { let line = "[a, b"; let err = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).err(); match err { Some(ParseError::InvalidValue(config)) => assert!( config.contains("found opening square bracket for list but no opening bracket"), "found opening square bracket for list but no opening bracket' in {}", config ), _ => assert!(false, "expected error"), } } #[test] fn parse_value_list_single_quote() { let line = "',a', 'b'"; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec![",a", "b"], list); } #[test] fn parse_value_list_double_quote() { let line = "\",a\", \"b\""; let list = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).unwrap(); assert_eq!(vec![",a", "b"], list); } #[test] fn parse_value_list_err() { let line = "a, b,"; let err = parse_value_list(line, &PathBuf::from("test.txt"), line, 1).err(); match err { Some(ParseError::InvalidValue(config)) => assert!( config.contains("value expected after semi-colon"), "expected 'value expected after semi-colon' in {}", config ), _ => assert!(false, "expected error"), } } }
true
5e5bbab250543fc5c334141713da7601c6e508e8
Rust
glesica/alert-after
/main.rs
UTF-8
2,078
2.765625
3
[]
no_license
#[cfg(target_os = "macos")] extern crate mac_notification_sys; extern crate notify_rust; use std::{borrow, env, io, process}; use std::io::Write; #[cfg(target_os = "macos")] fn notify(msg_title: &str, msg_body: &str) { let bundle = mac_notification_sys::get_bundle_identifier("safari").unwrap(); mac_notification_sys::set_application(&bundle).unwrap(); mac_notification_sys::send_notification(msg_title, &None, msg_body, &None).unwrap(); } #[cfg(not(target_os = "macos"))] fn notify(msg_title: &str, msg_body: &str) { use notify_rust::Notification; Notification::new() .summary(msg_title) .body(msg_body) .show() .unwrap(); } fn main() { let mut args = env::args(); let _ = args.next().unwrap(); let program_name = match args.next() { Some(program_name) => program_name, None => { writeln!(io::stderr(), "usage: aa <program name and args>") .expect("could not write to stderr"); process::exit(1); } }; let mut command = process::Command::new(program_name.clone()); let args = args.collect::<Vec<_>>(); command.args(args.clone()); let mut child = match command.spawn() { Ok(child) => child, Err(e) => { writeln!(io::stderr(), "aa: Unknown command '{}': {}", program_name, e) .expect("could not write to stderr"); process::exit(1); } }; let exit_status = child.wait().expect("failed to wait on command"); let mut full_cmd = program_name; full_cmd.push_str(" "); full_cmd.push_str(&args.join(" ")); let cmd_success: borrow::Cow<str> = match exit_status.code() { Some(0) => "Command exited successfully".into(), Some(code) => format!("Command exited with status code {}", code).into(), None => "Command exited".into(), }; notify(&full_cmd, &cmd_success); if let Some(code) = exit_status.code() { process::exit(code); } }
true
4b97f110152f85e9e1ffdf6961c5d585fdc5e55b
Rust
wackywendell/eulerrust
/src/lib.rs
UTF-8
5,588
3.171875
3
[ "BSD-3-Clause" ]
permissive
/// Functions for use in euler projects #[warn(non_camel_case_types)] #[warn(non_snake_case)] #[warn(unused_qualifications)] #[warn(non_upper_case_globals)] #[warn(missing_docs)] extern crate primes; extern crate rand; use std::collections::HashSet; use std::collections::HashMap; use std::collections::hash_map::Entry; pub use primes::PrimeSet; const U64_BITS: usize = 64; /// Count the number of occurrences of each value in an iterator pub fn counter<K, I>(list: I) -> HashMap<K, u64> where K: Eq + std::hash::Hash, I: Iterator<Item = K> { let mut counter: HashMap<K, u64> = HashMap::new(); for key in list { match counter.entry(key) { Entry::Vacant(entry) => { entry.insert(1); } Entry::Occupied(entry) => { (*entry.into_mut()) += 1; } } } counter } pub fn isqrt_opt(n: u64) -> Option<u64> { if n <= 1 { return Some(n); } let mut x = n / 2; while x > 2u64.pow((U64_BITS / 2) as u32) { // Prevents overflows x = (x + n / x + 1) / 2; } let mut usedset = HashSet::new(); while x * x != n { usedset.insert(x); x = (x + n / x + 1) / 2; if usedset.contains(&x) { return None; } } Some(x) } pub fn isqrt(n: u64) -> u64 { if n <= 1 { return n; } let mut x = n / 2; while x > 2u64.pow((U64_BITS / 2) as u32) { // Prevents overflows x = (x + n / x + 1) / 2; } while x * x != n { let lastx = x; x = (x + n / x + 1) / 2; println!("new x: {}", x); if x == lastx { println!("inside if: {}", x); if x * x < n { return x; } else { return x - 1; } } println!("if passed: {}", x); } return x; } pub fn is_palindrome(n: u64) -> bool { let s = n.to_string(); // let s_rev = String::from_utf8(s.into_bytes().iter().rev().collect()); let s_rev = s.as_bytes().iter().rev(); let s_rev_vec: Vec<u8> = s_rev.map(|&b| b).collect(); let s_rev = String::from_utf8(s_rev_vec); match s_rev { Ok(s2) => s == s2, _ => false, } } #[derive(Clone)] pub struct Pairs<'a, T: 'a> { vector: &'a [T], first: usize, second: usize, } impl<'a, T> Iterator for Pairs<'a, T> { type Item=(&'a T, &'a T); fn next(&mut self) -> Option<(&'a T, &'a T)> { let l = self.vector.len(); if self.second >= (l - 1) && self.first >= (l - 2) { return None; } self.second += 1; if self.second >= l { self.first += 1; self.second = self.first + 1; } return unsafe { Some((self.vector.get_unchecked(self.first), self.vector.get_unchecked(self.second))) }; } } pub fn pairs<'a, T>(vec: &'a [T]) -> Pairs<'a, T> { Pairs { vector: vec, first: 0, second: 0, } } #[test] fn test_square() { assert_eq!(isqrt_opt(4), Some(2)); assert_eq!(isqrt_opt(5), None); let mut ntests = vec![1, 7, 8, 9, 10, 11, 12, 189654, 4294967295]; for _ in 0u64..1000 { let mut n = rand::random::<u64>(); n = n % 2u64.pow((U64_BITS / 2) as u32); ntests.push(n); } for &n in ntests.iter() { println!("n: {}, n*n: {}", n, n * n); assert!(n < 2u64.pow((U64_BITS / 2) as u32)); assert_eq!(isqrt_opt(n * n), Some(n)); if n > 1 { assert_eq!(isqrt_opt(n * n - 1), None); } else { assert_eq!(isqrt_opt(n * n - 1), Some(0)); } assert_eq!(isqrt_opt(n * n + 1), None); } } #[test] fn test_isqrt() { assert_eq!(isqrt(0), 0); let mut ntests: Vec<u64> = vec![1, 7, 8, 9, 10, 11, 12, 189654, 4294967295]; for i in 1u64..1001 { let mut n = rand::random::<u64>(); n = n % 2u64.pow((U64_BITS / 2) as u32); ntests.push(n); ntests.push(i); } for n in ntests { assert!(n < 2u64.pow((U64_BITS / 2) as u32)); let x: u64 = isqrt(n); assert!(x * x <= n); assert!((x + 1) * (x + 1) > n); assert_eq!(isqrt(n * n - 1), n - 1); assert_eq!(isqrt(n * n + 1), n); } } #[test] fn test_palindrome() { assert!(is_palindrome(1)); assert!(is_palindrome(2)); assert!(is_palindrome(8)); assert!(!is_palindrome(10)); assert!(!is_palindrome(12)); assert!(!is_palindrome(100)); assert!(!is_palindrome(11110)); assert!(!is_palindrome(21111)); assert!(!is_palindrome(12131)); assert!(is_palindrome(11)); assert!(is_palindrome(111)); assert!(is_palindrome(232)); assert!(is_palindrome(181)); } #[test] fn test_pairs() { let v = [1, 2, 5, 4u64]; let mut my_pairs = pairs(&v); assert_eq!(my_pairs.next(), Some((&v[0], &v[1]))); assert_eq!(my_pairs.next(), Some((&v[0], &v[2]))); assert_eq!(my_pairs.next(), Some((&v[0], &v[3]))); assert_eq!(my_pairs.next(), Some((&v[1], &v[2]))); assert_eq!(my_pairs.next(), Some((&v[1], &v[3]))); assert_eq!(my_pairs.next(), Some((&v[2], &v[3]))); // ~ assert_eq!(my_pairs.next(), Some((&1, &5))); // ~ assert_eq!(my_pairs.next(), Some((&1, &4))); // ~ assert_eq!(my_pairs.next(), Some((&2, &5))); // ~ assert_eq!(my_pairs.next(), Some((&2, &4))); // ~ assert_eq!(my_pairs.next(), Some((&5, &4))); assert_eq!(my_pairs.next(), None); assert_eq!(my_pairs.next(), None); }
true
32b36ce1b863fee67ac3877230e9921325534515
Rust
Lichthagel/trakt-rust
/src/models/stats.rs
UTF-8
2,146
2.890625
3
[ "MIT" ]
permissive
/// [Stats] of a user /// /// [Stats]: https://trakt.docs.apiary.io/#reference/users/stats/get-stats #[derive(Debug, Serialize, Deserialize)] pub struct UserStats { pub movies: UserMovieStats, pub shows: UserShowStats, pub seasons: UserSeasonStats, pub episodes: UserEpisodeStats, pub network: UserNetworkStats, pub ratings: UserRatingStats, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserMovieStats { pub plays: u64, pub watched: u64, pub minutes: u64, pub collected: u64, pub ratings: u64, pub comments: u64, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserShowStats { pub watched: u64, pub collected: u64, pub ratings: u64, pub comments: u64, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserSeasonStats { pub ratings: u64, pub comments: u64, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserEpisodeStats { pub plays: u64, pub watched: u64, pub minutes: u64, pub collected: u64, pub ratings: u64, pub comments: u64, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserNetworkStats { pub friends: u64, pub followers: u64, pub following: u64, } /// See [UserStats] /// /// [UserStats]: struct.UserStats.html #[derive(Debug, Serialize, Deserialize)] pub struct UserRatingStats { pub total: u64, pub distribution: [u64; 10], } /// Stats of a [movie] or [show] /// /// [movie]: https://trakt.docs.apiary.io/#reference/movies/stats/get-movie-stats /// [show]: https://trakt.docs.apiary.io/#reference/shows/stats/get-show-stats #[derive(Debug, Serialize, Deserialize)] pub struct MediaStats { pub watchers: u64, pub plays: u64, pub collectors: u64, pub collected_episodes: Option<u64>, pub comments: u64, pub lists: u64, pub votes: u64, }
true
80f1f7913da9317e4225554587eef1b2213b02e4
Rust
mxxo/iou
/src/registrar/mod.rs
UTF-8
12,736
3.203125
3
[ "Apache-2.0", "MIT" ]
permissive
//! Types related to registration and registered resources. //! //! The [`Registrar`] type can be used to register resources with the kernel that will be used with //! a particular [`IoUring`] instance. This can improve performance by avoiding the kernel from //! reallocating resources for each IO events performed against those resources. //! //! When file descriptors and buffers are registered with the kernel, an iterator of the type-safe //! [`Registered`] wrapper is returned. This wrapper makes it easier to correctly use //! pre-registered resources. By passing a [`RegisteredFd`] or the correct type of registered //! buffer to an [`SQE`][crate::SQE]'s prep methods, the SQE will be properly prepared to use the //! pre-registered object. mod registered; use std::fmt; use std::io; use std::marker::PhantomData; use std::ptr::NonNull; use std::os::unix::io::RawFd; use crate::{IoUring, Probe, resultify}; pub use registered::*; /// A `Registrar` creates ahead-of-time kernel references to files and user buffers. /// /// Preregistration significantly reduces per-IO overhead, so consider registering frequently /// used files and buffers. For file IO, preregistration lets the kernel skip the atomic acquire and /// release of a kernel-specific file descriptor. For buffer IO, the kernel can avoid mapping kernel /// memory for every operation. /// /// Beware that registration is relatively expensive and should be done before any performance /// sensitive code. /// /// If you want to register a file but don't have an open file descriptor yet, you can register /// a [placeholder](PLACEHOLDER_FD) descriptor and /// [update](crate::registrar::Registrar::update_registered_files) it later. /// ``` /// # use iou::{IoUring, Registrar, registrar::RegisteredFd}; /// # fn main() -> std::io::Result<()> { /// let mut ring = IoUring::new(8)?; /// let mut registrar: Registrar = ring.registrar(); /// # let fds = &[0, 1]; /// let registered_files: Vec<RegisteredFd> = registrar.register_files(fds)?.collect(); /// # Ok(()) /// # } /// ``` pub struct Registrar<'ring> { ring: NonNull<uring_sys::io_uring>, _marker: PhantomData<&'ring mut IoUring>, } impl<'ring> Registrar<'ring> { pub(crate) fn new(ring: &'ring IoUring) -> Registrar<'ring> { Registrar { ring: NonNull::from(&ring.ring), _marker: PhantomData, } } pub fn register_buffers(&self, buffers: Vec<Box<[u8]>>) -> io::Result<impl Iterator<Item = RegisteredBuf>> { let len = buffers.len(); let addr = buffers.as_ptr() as *const _; resultify(unsafe { uring_sys::io_uring_register_buffers(self.ring.as_ptr(), addr, len as _) })?; Ok(buffers .into_iter() .enumerate() .map(|(i, buf)| RegisteredBuf::new(i as u32, buf)) ) } pub fn register_buffers_by_ref<'a>(&self, buffers: &'a [&'a [u8]]) -> io::Result<impl Iterator<Item = RegisteredBufRef<'a>> + 'a> { let len = buffers.len(); let addr = buffers.as_ptr() as *const _; resultify(unsafe { uring_sys::io_uring_register_buffers(self.ring.as_ptr(), addr, len as _) })?; Ok(buffers .iter() .enumerate() .map(|(i, buf)| Registered::new(i as u32, &**buf)) ) } pub fn register_buffers_by_mut<'a>(&self, buffers: &'a mut [&'a mut [u8]]) -> io::Result<impl Iterator<Item = RegisteredBufMut<'a>> + 'a> { let len = buffers.len(); let addr = buffers.as_ptr() as *const _; resultify(unsafe { uring_sys::io_uring_register_buffers(self.ring.as_ptr(), addr, len as _) })?; Ok(buffers .iter_mut() .enumerate() .map(|(i, buf)| Registered::new(i as u32, &mut **buf)) ) } /// Unregister all currently registered buffers. An explicit call to this method is often unecessary, /// because all buffers will be unregistered automatically when the ring is dropped. pub fn unregister_buffers(&self) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_unregister_buffers(self.ring.as_ptr()) })?; Ok(()) } /// Register a set of files with the kernel. Registered files handle kernel fileset indexing /// behind the scenes and can often be used in place of raw file descriptors. /// /// # Errors /// Returns an error if /// * there is a preexisting set of registered files, /// * the `files` slice was empty, /// * the inner [`io_uring_register_files`](uring_sys::io_uring_register_files) call failed for /// another reason /// ```no_run /// # use iou::IoUring; /// # fn main() -> std::io::Result<()> { /// # let mut ring = IoUring::new(2)?; /// # let mut registrar = ring.registrar(); /// # let raw_fds = [1, 2]; /// # let bufs = &[std::io::IoSlice::new(b"hi")]; /// let fileset: Vec<_> = registrar.register_files(&raw_fds)?.collect(); /// let reg_file = fileset[0]; /// # let mut sqe = ring.prepare_sqe().unwrap(); /// unsafe { sqe.prep_write_vectored(reg_file, bufs, 0); } /// # Ok(()) /// # } /// ``` pub fn register_files<'a>(&self, files: &'a [RawFd]) -> io::Result<impl Iterator<Item = RegisteredFd> + 'a> { assert!(files.len() <= u32::MAX as usize); resultify(unsafe { uring_sys::io_uring_register_files( self.ring.as_ptr(), files.as_ptr() as *const _, files.len() as _ ) })?; Ok(files .iter() .enumerate() .map(|(i, &fd)| RegisteredFd::new(i as u32, fd)) ) } /// Update the currently registered kernel fileset. It is usually more efficient to reserve space /// for files before submitting events, because `IoUring` will wait until the submission queue is /// empty before registering files. /// # Errors /// Returns an error if /// * there isn't a registered fileset, /// * the `files` slice was empty, /// * `offset` is out of bounds, /// * the `files` slice was too large, /// * the inner [`io_uring_register_files_update`](uring_sys::io_uring_register_files_update) call /// failed for another reason pub fn update_registered_files<'a>(&mut self, offset: usize, files: &'a [RawFd]) -> io::Result<impl Iterator<Item = RegisteredFd> + 'a> { assert!(files.len() + offset <= u32::MAX as usize); resultify(unsafe { uring_sys::io_uring_register_files_update( self.ring.as_ptr(), offset as _, files.as_ptr() as *const _, files.len() as _, ) })?; Ok(files .iter() .enumerate() .map(move |(i, &fd)| RegisteredFd::new((i + offset) as u32, fd)) ) } /// Unregister all currently registered files. An explicit call to this method is often unecessary, /// because all files will be unregistered automatically when the ring is dropped. /// /// # Errors /// Returns an error if /// * there isn't a registered fileset, /// * the inner [`io_uring_unregister_files`](uring_sys::io_uring_unregister_files) call /// failed for another reason /// /// You can use this method to replace an existing fileset: /// ``` /// # use iou::IoUring; /// # fn main() -> std::io::Result<()> { /// # let mut ring = IoUring::new(2)?; /// # let mut registrar = ring.registrar(); /// let raw_fds = [0, 1]; /// let fds: Vec<_> = registrar.register_files(&raw_fds)?.collect(); /// assert_eq!(fds.len(), 2); /// /// registrar.unregister_files()?; /// /// let other_raw_fds = [0, 1, 2]; /// let new_fds: Vec<_> = registrar.register_files(&other_raw_fds)?.collect(); /// assert_eq!(new_fds.len(), 3); /// # Ok(()) /// # } /// ``` pub fn unregister_files(&self) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_unregister_files(self.ring.as_ptr()) })?; Ok(()) } pub fn register_eventfd(&self, eventfd: RawFd) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_register_eventfd(self.ring.as_ptr(), eventfd) })?; Ok(()) } pub fn register_eventfd_async(&self, eventfd: RawFd) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_register_eventfd_async(self.ring.as_ptr(), eventfd) })?; Ok(()) } pub fn unregister_eventfd(&self) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_unregister_eventfd(self.ring.as_ptr()) })?; Ok(()) } pub fn register_personality(&self) -> io::Result<Personality> { let id = resultify(unsafe { uring_sys::io_uring_register_personality(self.ring.as_ptr()) })?; debug_assert!(id < u16::MAX as u32); Ok(Personality { id: id as u16 }) } pub fn unregister_personality(&self, personality: Personality) -> io::Result<()> { resultify(unsafe { uring_sys::io_uring_unregister_personality(self.ring.as_ptr(), personality.id as _) })?; Ok(()) } pub fn probe(&self) -> io::Result<Probe> { Probe::for_ring(self.ring.as_ptr()) } } impl fmt::Debug for Registrar<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let fd = unsafe { self.ring.as_ref().ring_fd }; f.debug_struct(std::any::type_name::<Self>()).field("fd", &fd).finish() } } unsafe impl<'ring> Send for Registrar<'ring> { } unsafe impl<'ring> Sync for Registrar<'ring> { } #[derive(Debug, Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Copy)] pub struct Personality { pub(crate) id: u16, } impl From<u16> for Personality { fn from(id: u16) -> Personality { Personality { id } } } #[cfg(test)] mod tests { use super::*; use std::os::unix::io::AsRawFd; #[test] #[should_panic(expected = "Invalid argument")] fn register_empty_slice() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&[]).unwrap(); } #[test] #[should_panic(expected = "Bad file descriptor")] fn register_bad_fd() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&[-100]).unwrap(); } #[test] #[should_panic(expected = "Device or resource busy")] fn double_register() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&[1]).unwrap(); let _ = ring.registrar().register_files(&[1]).unwrap(); } #[test] #[should_panic(expected = "No such device or address")] fn empty_unregister_err() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().unregister_files().unwrap(); } #[test] #[should_panic(expected = "No such device or address")] fn empty_update_err() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().update_registered_files(0, &[1]).unwrap(); } #[test] #[should_panic(expected = "Invalid argument")] fn offset_out_of_bounds_update() { let raw_fds = [1, 2]; let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&raw_fds).unwrap(); let _ = ring.registrar().update_registered_files(2, &raw_fds).unwrap(); } #[test] #[should_panic(expected = "Invalid argument")] fn slice_len_out_of_bounds_update() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&[1, 1]).unwrap(); let _ = ring.registrar().update_registered_files(0, &[1, 1, 1]).unwrap(); } #[test] fn valid_fd_update() { let ring = IoUring::new(1).unwrap(); let file = std::fs::File::create("tmp.txt").unwrap(); let _ = ring.registrar().register_files(&[file.as_raw_fd()]).unwrap(); let new_file = std::fs::File::create("new_tmp.txt").unwrap(); let _ = ring.registrar().update_registered_files(0, &[new_file.as_raw_fd()]).unwrap(); let _ = std::fs::remove_file("tmp.txt"); let _ = std::fs::remove_file("new_tmp.txt"); } #[test] fn placeholder_update() { let ring = IoUring::new(1).unwrap(); let _ = ring.registrar().register_files(&[-1, -1, -1]).unwrap(); let file = std::fs::File::create("tmp.txt").unwrap(); let _ = ring.registrar().update_registered_files(0, &[file.as_raw_fd()]).unwrap(); let _ = std::fs::remove_file("tmp.txt"); } }
true
708cd56c2d9b6061732bedd263117762def569f5
Rust
yukoga/the-rust-programming-language
/1.2 hello world/hello_cargo/src/main.rs
UTF-8
77
2.59375
3
[ "Apache-2.0" ]
permissive
fn main() { println!("Hello, Rust world! - 日本語はどうだ!"); }
true
7b2218636d012c125e1c32eb93ac48163bbf7903
Rust
isabella232/lorawan-h3
/cli/src/main.rs
UTF-8
2,655
2.921875
3
[ "Apache-2.0" ]
permissive
mod regions; use geo_types::Coordinate; use h3ron::Index; use std::{env, process::exit, str::FromStr}; fn main() { let args: Vec<String> = env::args().into_iter().skip(1).collect(); let h3: Index = match args.as_slice() { [index_str] => u64::from_hex_dec_bin(index_str) .map_err(|_| "u64") .and_then(|index| { let index = Index::from(index); if index.is_valid() { Ok(index) } else { Err("H3 index") } }) .unwrap_or_else(|e| { eprintln!("{} is not a valid {}", index_str, e); exit(1) }), [lat_str, lon_str] => { let xy = &[lat_str, lon_str] .iter() .map(|lat_or_lon| f64::from_str(lat_or_lon)) .collect::<Result<Vec<f64>, _>>() .unwrap_or_else(|_| { eprintln!("{} {} are not valid coordinates", lat_str, lon_str); exit(1) }); Index::from_coordinate(&Coordinate { x: xy[1], y: xy[0] }, 12) } _ => { usage(); exit(1) } }; if let Some((region, parent_index)) = lookup(h3) { println!("{} @ {}", region, parent_index.to_string()); } else { exit(1); } } fn usage() { eprintln!("lwr <H3> | <LAT> <LON>"); } fn lookup(target_index: Index) -> Option<(&'static str, Index)> { for (region, indices) in regions::REGIONS { eprintln!("searching {} for {}", region, target_index.to_string()); if let Some(parent_index) = indices .iter() .map(|i| Index::new(*i)) .find(|i| i.contains(&target_index)) { return Some((region, parent_index)); } } None } trait FromHexDecBin: Sized { type Error; fn from_hex_dec_bin(s: &str) -> Result<Self, Self::Error>; } macro_rules! impl_from_hex_dec_bin { ($T:tt, $E:ty) => { impl FromHexDecBin for $T { type Error = $E; fn from_hex_dec_bin(s: &str) -> Result<$T, Self::Error> { if s.len() > 2 { match s.split_at(2) { ("0x", rest) => $T::from_str_radix(rest, 16), ("0b", rest) => $T::from_str_radix(rest, 2), _ => $T::from_str_radix(s, 10), } } else { $T::from_str_radix(s, 10) } } } }; } impl_from_hex_dec_bin!(u64, ::std::num::ParseIntError);
true
6209834f11361a2885ad1bc771f53c69f4933fb1
Rust
GeoffClements/slim-client-protocol-rs
/slimproto/src/buffer.rs
UTF-8
4,655
3.078125
3
[ "MIT" ]
permissive
/// Used to wrap around a reader. /// Keeps the associates status data updated use std::{ io::{BufRead, BufReader, Read}, sync::{Arc, Mutex}, }; use crate::status::StatusData; type MaybeCallback = Option<Box<dyn FnMut() + Send + Sync + 'static>>; pub struct SlimBuffer<R> { inner: BufReader<R>, status: Arc<Mutex<StatusData>>, threshold: u32, threshold_cb: MaybeCallback, prebuf: Vec<u8>, } impl<R> SlimBuffer<R> where R: Read, { pub fn new( inner: R, status: Arc<Mutex<StatusData>>, threshold: u32, threshold_cb: MaybeCallback, ) -> Self { let buf = BufReader::new(inner); if let Ok(mut status) = status.lock() { status.set_buffer_size(buf.capacity() as u32); } let mut this: SlimBuffer<R> = Self { inner: buf, status, threshold, threshold_cb, prebuf: Vec::with_capacity(255 * 1024), }; this.pre_buf(); this } pub fn with_capacity( capacity: usize, inner: R, status: Arc<Mutex<StatusData>>, threshold: u32, threshold_cb: MaybeCallback, ) -> Self { let buf = BufReader::with_capacity(capacity, inner); if let Ok(mut status) = status.lock() { status.set_buffer_size(buf.capacity() as u32); } let mut this: SlimBuffer<R> = Self { inner: buf, status, threshold, threshold_cb, prebuf: Vec::with_capacity(255 * 1024), }; this.pre_buf(); this } fn pre_buf(&mut self) { let mut buf = [0u8; 1024]; while self.prebuf.len() < self.threshold as usize { if let Ok(n) = self.inner.read(&mut buf) { if n == 0 { break; } self.prebuf.extend_from_slice(&buf[..n]); } else { break; } } if let Some(callback) = &mut self.threshold_cb { callback(); } } } impl<R> Read for SlimBuffer<R> where R: Read, { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { let bytes_read = if self.prebuf.len() > 0 { let n_bytes = (&self.prebuf[..]).read(buf)?; self.prebuf.drain(..n_bytes); n_bytes } else { self.inner.read(buf)? }; if let Ok(mut status) = self.status.lock() { status.add_bytes_received(bytes_read as u64); status.set_fullness(self.inner.buffer().len() as u32); } Ok(bytes_read) } } impl<R> BufRead for SlimBuffer<R> where R: Read, { fn fill_buf(&mut self) -> std::io::Result<&[u8]> { self.inner.fill_buf() } fn consume(&mut self, amt: usize) { self.inner.consume(amt) } } #[cfg(test)] mod tests { use std::sync::RwLock; use super::*; #[test] fn prebuf() { const BUFLEN: usize = 1024; let status = Arc::new(Mutex::new(StatusData::default())); let source: Vec<u8> = (0u8..255).into_iter().cycle().take(BUFLEN).collect(); let sb = SlimBuffer::new(&source[..], status, 2, None); assert_eq!(sb.prebuf, source); assert!(sb.prebuf.len() == source.len()); } #[test] fn prebuf_overfill() { const BUFLEN: usize = 1024 * 2; let status = Arc::new(Mutex::new(StatusData::default())); let source: Vec<u8> = (0u8..255).into_iter().cycle().take(BUFLEN).collect(); let mut sb = SlimBuffer::new(&source[..], status, 2, None); let mut buf = vec![0u8; BUFLEN]; let n = sb.read(&mut buf).unwrap(); sb.read(&mut buf[n..]).unwrap(); assert_eq!(buf, source); assert!(sb.prebuf.len() == 0); } #[test] fn callback() { const BUFLEN: usize = 1024 * 2; let status = Arc::new(Mutex::new(StatusData::default())); let source: Vec<u8> = (0u8..255).into_iter().cycle().take(BUFLEN).collect(); let value = Arc::new(RwLock::new(0)); let value_ref = value.clone(); let mut sb = SlimBuffer::new( &source[..], status, 2, Some(Box::new(move || { if let Ok(mut value) = value_ref.write() { *value += 1; } })), ); let mut buf = vec![0u8; BUFLEN]; let n = sb.read(&mut buf).unwrap(); sb.read(&mut buf[n..]).unwrap(); let val = value.read().unwrap(); assert!(*val == 1); } }
true
e1d2bd66894cb68531b07e3887ee304953662291
Rust
kovrik/hexdumpr
/src/lib.rs
UTF-8
2,211
3.375
3
[]
no_license
use std::cmp; pub fn print_hexdump(data: &[u8], offset: usize, display: char, bytes: usize) { let mut address = 0; while address <= data.len() { let end = cmp::min(address + 16, data.len()); print_line(&data[address..end], address + offset, display, bytes); address = address + 16; } } fn print_line(line: &[u8], address: usize, display: char, bytes: usize) { // print address print!("\n{:08x}:", address); let words = match (line.len() % bytes) == 0 { true => line.len() / bytes, false => (line.len() / bytes) + 1, }; for b in 0..words { let word = match bytes { 1 => line[b] as u16, _ => match line.len() == bytes*b + 1 { true => u16::from_be(((line[bytes * b] as u16) << 8) + 0), false => u16::from_be(((line[bytes * b] as u16) << 8) + (line[bytes * b + 1] as u16)), }, }; match display { 'b' => print!(" {:03o}", word), 'c' => match ((word as u8) as char).is_control() { true => print!(" "), false => print!(" {:03}", (word as u8) as char), }, 'C' => print!(" {:02x}", word), 'x' => print!(" {:04x}", word), 'o' => print!(" {:06o} ", word), 'd' => print!(" {:05} ", word), _ => print!(" {:04x}", word), } } if display != 'c' { if (line.len() % 16) > 0 { // align let words_left = (16 - line.len()) / bytes; let word_size = match display { 'b' => 4, 'c' => 4, 'C' => 3, 'x' => 5, 'o' => 8, 'd' => 8, _ => 5, }; for _ in 0..word_size * words_left { print!(" "); } } print!(" "); for c in line { // replace all control chars with dots match (*c as char).is_control() { true => print!("."), false => print!("{}", (*c as char)), } } } }
true
d5e9beb69da2076d3608ae016284ebbb51a84c2f
Rust
Akagi201/learning-rust
/trait/dyn_trait/src/main.rs
UTF-8
519
3.25
3
[ "MIT" ]
permissive
trait MyTrait { fn method(&self); } struct Type1; impl MyTrait for Type1 { fn method(&self) { println!("Type1: method"); } } struct Type2; impl MyTrait for Type2 { fn method(&self) { println!("Type2: method"); } } struct Type3; impl MyTrait for Type3 { fn method(&self) { println!("Type3: method"); } } fn main() { let vec: Vec<Box<dyn MyTrait>> = vec![Box::new(Type1), Box::new(Type2), Box::new(Type3)]; for item in vec { item.method(); } }
true
5f84401d4268e6d2a7683b192275506dea51cfcf
Rust
kobby-pentangeli/merkle-tree-accumulator
/src/error.rs
UTF-8
271
2.890625
3
[ "MIT" ]
permissive
//! Error types use thiserror::Error; #[derive(Clone, Debug, Eq, Error, PartialEq)] pub enum Error { /// Serialization error #[error("Serialization error: _0")] HashSerialize(String), /// None error #[error("Option.None an error")] NoneError, }
true
7c33b8c111a629016242dbdfef6b37255fac01ff
Rust
robo9k/clap-socketaddr
/examples/bind.rs
UTF-8
403
2.625
3
[]
no_license
use clap::Clap; /// Example for socket address in command line arguments #[derive(Clap, Debug)] struct Cli { #[clap(flatten)] address: clap_socketaddr::SocketAddrArgs, } fn main() { let args = Cli::parse(); println!("args {:?}", args); let tcp_listener = std::net::TcpListener::bind::<std::net::SocketAddr>(args.address.into()); println!("tcp listener {:?}", tcp_listener); }
true
3c8fa8ee2b774b1b3d9b4f838bb0e5c531da6966
Rust
y-usuzumi/survive-the-course
/survive-the-course-rs/src/problems/leetcode/_560_Subarray_Sum_Equals_K.rs
UTF-8
595
3.03125
3
[ "BSD-3-Clause" ]
permissive
// https://leetcode.com/problems/subarray-sum-equals-k/description/ use std::collections::HashMap; pub struct Solution; impl Solution { pub fn subarray_sum(nums: Vec<i32>, k: i32) -> i32 { let mut h = HashMap::new(); h.insert(0, 1); let mut curr_sum = 0; let mut result = 0; for num in nums { curr_sum += num; let diff = curr_sum - k; result += *h.entry(diff).or_default(); *h.entry(curr_sum).or_default() += 1; } return result; } } #[cfg(test)] mod tests { use super::*; }
true
4af9599a5e0fcf8b93c1cd7a10ed31f733e159fb
Rust
mdashti/rust-tagless
/src/main.rs
UTF-8
7,202
3.0625
3
[]
no_license
#![feature(box_syntax)] #![feature(box_patterns)] #![feature(refcell_replace_swap)] use std::collections::HashMap; use std::cell::Cell; use std::rc::Rc; use std::any::Any; use std::cell::RefCell; use std::default::Default; use std::borrow::BorrowMut; trait Val { type Output; fn get(&self) -> Self::Output; } #[derive(Debug,Clone, Eq, Ord, PartialOrd, PartialEq, Default)] struct NumVal { v: i64, } impl Val for NumVal { type Output = i64; fn get(&self) -> Self::Output { self.v } } impl std::ops::Add for NumVal { type Output = Self; fn add(self, rhs: Self) -> Self::Output { Self { v: self.v + rhs.v } } } #[derive(Debug,Clone, Eq, Ord, PartialOrd, PartialEq, Default)] struct BoolVal { v: bool, } impl Val for BoolVal { type Output = bool; fn get(&self) -> Self::Output { self.v } } trait Exp { type Output; fn stage(&self) -> Box<StagedExp<Output=Self::Output>>; fn interpret(&self) -> Self::Output; } trait StagedExp { type Output; fn run(&self) -> Self::Output; } struct ConstantExp<T: 'static+Clone> { const_val: T, } struct ConstantStagedExp<T: 'static+Clone> { const_val: T, } impl<T: 'static+Clone> Exp for ConstantExp<T>{ type Output = T; fn stage(&self) -> Box<StagedExp<Output=Self::Output>> { box ConstantStagedExp { const_val: self.const_val.clone() } } fn interpret(&self) -> Self::Output { self.const_val.clone() } } impl<T: 'static+Clone> StagedExp for ConstantStagedExp<T>{ type Output = T; fn run(&self) -> Self::Output { self.const_val.clone() } } static mut var_counter: i32 = 0; #[derive(Debug,Clone)] struct VariableExp<T: 'static+Clone> { id: i32, var_val: Rc<RefCell<T>>, } impl<T: 'static+Clone+Default> VariableExp<T> { fn fresh() -> VariableExp<T> { VariableExp { id: { unsafe{ var_counter += 1; var_counter } }, var_val: Rc::new(RefCell::new(T::default())), } } } impl<T: 'static+Clone> VariableExp<T> { fn fresh_with_val(v: T) -> VariableExp<T> { VariableExp { id: { unsafe{ var_counter += 1; var_counter } }, var_val: Rc::new(RefCell::new(v)), } } } impl<T: 'static+Clone> Exp for VariableExp<T>{ type Output = T; fn stage(&self) -> Box<StagedExp<Output=Self::Output>> { box self.clone() } fn interpret(&self) -> Self::Output { self.var_val.borrow().clone() } } impl<T: 'static+Clone> StagedExp for VariableExp<T>{ type Output = T; fn run(&self) -> Self::Output { self.var_val.borrow().clone() } } struct AddExp { exp1: Box<Exp<Output=NumVal>>, exp2: Box<Exp<Output=NumVal>>, } struct AddStagedExp { staged_exp1: Box<StagedExp<Output=NumVal>>, staged_exp2: Box<StagedExp<Output=NumVal>>, } impl Exp for AddExp{ type Output = NumVal; fn stage(&self) -> Box<StagedExp<Output=Self::Output>> { box AddStagedExp { staged_exp1: self.exp1.stage(), staged_exp2: self.exp2.stage(), } } fn interpret(&self) -> Self::Output { self.exp1.interpret() + self.exp2.interpret() } } impl StagedExp for AddStagedExp{ type Output = NumVal; fn run(&self) -> Self::Output { self.staged_exp1.run() + self.staged_exp2.run() } } struct LessThanExp { exp1: Box<Exp<Output=NumVal>>, exp2: Box<Exp<Output=NumVal>>, } struct LessThanStagedExp { staged_exp1: Box<StagedExp<Output=NumVal>>, staged_exp2: Box<StagedExp<Output=NumVal>>, } impl Exp for LessThanExp{ type Output = BoolVal; fn stage(&self) -> Box<StagedExp<Output=Self::Output>> { box LessThanStagedExp { staged_exp1: self.exp1.stage(), staged_exp2: self.exp2.stage(), } } fn interpret(&self) -> Self::Output { Self::Output { v: self.exp1.interpret() < self.exp2.interpret() } } } impl StagedExp for LessThanStagedExp{ type Output = BoolVal; fn run(&self) -> Self::Output { Self::Output { v: self.staged_exp1.run() < self.staged_exp2.run() } } } struct LetExp<T: 'static+Clone, U: 'static+Clone> { exp1: Box<Exp<Output=T>>, exp2: Box<Fn(VariableExp<T>) -> Box<Exp<Output=U>>> } struct LetStagedExp<T: 'static+Clone, U: 'static+Clone> { staged_exp1: Box<StagedExp<Output=T>>, staged_exp1_var: VariableExp<T>, staged_exp2: Box<StagedExp<Output=U>>, } impl<T: 'static+Clone+Default, U: 'static+Clone> Exp for LetExp<T,U>{ type Output = U; fn stage(&self) -> Box<StagedExp<Output=Self::Output>> { let exp1_var = VariableExp::fresh(); let staged_exp2 = (self.exp2)(exp1_var.clone()).stage(); box LetStagedExp { staged_exp1: self.exp1.stage(), staged_exp1_var: exp1_var, staged_exp2, } } fn interpret(&self) -> Self::Output { let exp1_var = VariableExp::fresh_with_val(self.exp1.interpret()); (self.exp2)(exp1_var).interpret() } } impl<T: 'static+Clone, U: 'static+Clone> StagedExp for LetStagedExp<T,U>{ type Output = U; fn run(&self) -> Self::Output { self.staged_exp1_var.var_val.replace( self.staged_exp1.run() ); self.staged_exp2.run() } } fn unit_exp<T: 'static+Clone>(const_val: T) -> ConstantExp<T> { ConstantExp { const_val } } fn add_exp(exp1: Box<Exp<Output=NumVal>>, exp2: Box<Exp<Output=NumVal>>) -> AddExp { AddExp { exp1, exp2 } } fn let_exp<T: 'static+Clone+Default, U: 'static+Clone>(exp1: Box<Exp<Output=T>>, exp2: Box<Fn(VariableExp<T>) -> Box<Exp<Output=U>>>) -> LetExp<T,U> { LetExp { exp1, exp2 } } fn main() { // let i = 1 { // while i < 1000 { // i = i + 1 // } // } // let expr = Expr::Let( // "i", // Type::Number, // box Expr::Constant(Value::Number(1)), // box Expr::While( // box Expr::LessThan(box Expr::Get("i"), box Expr::Constant(Value::Number(1000))), // box Expr::Set( // "i", // box Expr::Add(box Expr::Get("i"), box Expr::Constant(Value::Number(1))), // ), // ), // ); // // println!("{:?}", interpret(&HashMap::new(), &expr)); // if let Staged::Bool(bool) = stage(&HashMap::new(), &expr) { // println!("{:?}", bool()); // } let num1 = unit_exp(NumVal{ v: 1 }); let num2 = unit_exp(NumVal{ v: 2 }); let add_nums = add_exp(box num1, box num2); let let_nums = let_exp(box add_nums, box |v| { let num3 = unit_exp(NumVal{ v: 5 }); let add_nums2 = add_exp(box v, box num3); box add_nums2 }); println!("{:?}", let_nums.interpret()); let staged_expr = let_nums.stage(); println!("{:?}", staged_expr.run()); }
true
4095ca5e4c3ed21b6b6a8de99478bdfee55e73df
Rust
euclidr/leetcode
/examples/p958.rs
UTF-8
2,843
3.40625
3
[]
no_license
use std::rc::Rc; use std::cell::RefCell; // Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None } } } struct Solution; impl Solution { pub fn is_complete_tree(root: Option<Rc<RefCell<TreeNode>>>) -> bool { if root.is_none() { return true; } let root = root.unwrap(); let mut node = root.clone(); let mut height = 0; loop { let tmp = node; match tmp.borrow().left { Some(ref left) => { node = left.clone(); height = height + 1; }, None => break, }; }; let mut stack = vec![(root, 0)]; let mut finished = false; while !stack.is_empty() { let (node, level) = stack.pop().unwrap(); if level == height { if node.borrow().left.is_some() || node.borrow().right.is_some() { return false } continue; } if level < height - 1 { if node.borrow().left.is_none() || node.borrow().right.is_none() { return false; } else { if let Some(ref right) = node.borrow().right { stack.push((right.clone(), level+1)); }; if let Some(ref left) = node.borrow().left { stack.push((left.clone(), level+1)); }; } continue; } if level == height - 1 { if finished && (node.borrow().left.is_some() || node.borrow().right.is_some()) { return false; } if node.borrow().left.is_none() && node.borrow().right.is_some() { return false; } if node.borrow().left.is_some() { if node.borrow().right.is_none() { finished = true; } else { if let Some(ref right) = node.borrow().right { stack.push((right.clone(), level+1)); }; } if let Some(ref left) = node.borrow().left { stack.push((left.clone(), level+1)); }; continue; } finished = true; } } true } } fn main() { Solution::is_complete_tree(None); }
true
e73e12409bd5ee4a161393756511b4f4ecabb430
Rust
TimBednarzyk/conrod-android-test
/conrod/src/image.rs
UTF-8
3,072
3.640625
4
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! A type used to manage a user's image data and map them to `Image` widgets: //! //! - [Map](./struct.Map.html) use std; use widget; /// A type used to map the `widget::Id` of `Image` widgets to their associated `Img` data. /// /// The `image::Map` type is usually instantiated and loaded during the "setup" stage of the /// application before the main loop begins. A macro is provided to simplify the construction of /// maps with multiple images. /// /// ```ignore /// let image_map = image_map! { /// (RUST_LOGO, try!(image::open("rust-logo.png"))), /// (CAT_PIC, try!(image::open("floof.jpeg"))), /// }; /// ``` pub struct Map<Img> { map: HashMap<Img>, /// Whether or not the `image::Map` will trigger a redraw the next time `Ui::draw` is called. /// /// This is automatically set to `true` when any method that takes `&mut self` is called. pub trigger_redraw: std::cell::Cell<bool>, } /// The type of `std::collections::HashMap` used within the `image::Map`. pub type HashMap<Img> = std::collections::HashMap<widget::Id, Img>; impl<Img> std::ops::Deref for Map<Img> { type Target = HashMap<Img>; fn deref(&self) -> &Self::Target { &self.map } } impl<Img> Map<Img> { /// Construct a new, empty `image::Map`. pub fn new() -> Self { Map { map: std::collections::HashMap::new(), trigger_redraw: std::cell::Cell::new(true), } } // Calling any of the following methods will trigger a redraw when using `Ui::draw_if_changed`. /// Uniquely borrow the `Img` associated with the given widget. /// /// Note: Calling this will trigger a redraw the next time `Ui::draw_if_changed` is called. pub fn get_mut(&mut self, id: widget::Id) -> Option<&mut Img> { self.trigger_redraw.set(true); self.map.get_mut(&id) } /// Inserts the given widget-image pair into the map. /// /// If the map did not already have an image associated with this widget, `None` is returned. /// /// If the map did already have an image associated with this widget, the old value is removed /// from the map and returned. /// /// Note: Calling this will trigger a redraw the next time `Ui::draw_if_changed` is called. pub fn insert(&mut self, id: widget::Id, img: Img) -> Option<Img> { self.trigger_redraw.set(true); self.map.insert(id, img) } } impl<Img> std::iter::Extend<(widget::Id, Img)> for Map<Img> { fn extend<I>(&mut self, mappings: I) where I: IntoIterator<Item=(widget::Id, Img)>, { self.trigger_redraw.set(true); self.map.extend(mappings.into_iter().map(|(id, img)| (id, img))); } } /// A macro for simplifying the instantiation of an `image::Map`. /// /// See the [**Map**](./image/struct.Map.html) documentation for an example. #[macro_export] macro_rules! image_map { ($(($idx:expr, $img:expr)),* $(,)*) => {{ let mut map = $crate::image::Map::new(); $( map.insert($idx, $img); )* map }}; }
true
5bee70875688c531747f34e562ee0f1256d6db75
Rust
SCappella/riddler
/2019-09-13-states/states/src/main.rs
UTF-8
4,361
3.203125
3
[]
no_license
use std::fmt::Debug; struct Graph<T> { data: Vec<(T, Vec<usize>)>, } impl<T> Graph<T> { #[inline] fn new(nodes: Vec<T>) -> Self { Self { data: nodes.into_iter().map(|t| (t, Vec::new())).collect(), } } #[inline] fn add_edge(&mut self, start: usize, end: usize) { self.data[start].1.push(end) } fn len(&self) -> usize { self.data.len() } #[inline] fn get_neighbors(&self, start: usize) -> &[usize] { &self.data[start].1 } fn longest_simple_path(&self) -> Vec<usize> { let mut longest_path = Vec::new(); let mut tasks: Vec<Vec<usize>> = vec![(0..self.len()).rev().collect()]; let mut path = vec![]; let mut in_path = vec![false; self.len()]; loop { while let Some(next_node) = tasks.last_mut().unwrap().pop() { path.push(next_node); in_path[next_node] = true; tasks.push( self.get_neighbors(next_node) .iter() .rev() .filter(|&&node| !in_path[node]) .copied() .collect(), ); } if path.len() > longest_path.len() { longest_path = path.clone(); } if let Some(popped_node) = path.pop() { in_path[popped_node] = false; tasks.pop(); } else { break; } } longest_path } } impl<T: Debug> Graph<T> { fn longest_simple_path_debug(&self) { let mut longest_path = Vec::new(); let mut count = 1; let mut tasks: Vec<Vec<usize>> = vec![(0..self.len()).rev().collect()]; let mut path = vec![]; let mut in_path = vec![false; self.len()]; loop { while let Some(next_node) = tasks.last_mut().unwrap().pop() { path.push(next_node); in_path[next_node] = true; tasks.push( self.get_neighbors(next_node) .iter() .rev() .filter(|&&node| !in_path[node]) .copied() .collect(), ); } if path.len() >= longest_path.len() { if path.len() == longest_path.len() { count += 1; println!("{}", count); } else { count = 1; println!("{}", count); longest_path = path.clone(); } } if let Some(popped_node) = path.pop() { in_path[popped_node] = false; tasks.pop(); } else { break; } } println!( "{:?}", longest_path .into_iter() .map(|node| &self.data[node].0) .collect::<Vec<_>>() ); } } fn main() { let digraphs_all = [ "AK", "AL", "AR", "AS", "AZ", "CA", "CO", "CT", "DC", "DE", "FL", "FM", "GA", "GU", "HI", "IA", "ID", "IL", "IN", "KS", "KY", "LA", "MA", "MD", "ME", "MH", "MI", "MN", "MO", "MP", "MS", "MT", "NC", "ND", "NE", "NH", "NJ", "NM", "NV", "NY", "OH", "OK", "OR", "PA", "PR", "PW", "RI", "SC", "SD", "TN", "TX", "UT", "VA", "VI", "VT", "WA", "WI", "WV", "WY", ]; let digraphs_states = [ "AK", "AL", "AR", "AZ", "CA", "CO", "CT", "DE", "FL", "GA", "HI", "IA", "ID", "IL", "IN", "KS", "KY", "LA", "MA", "MD", "ME", "MI", "MN", "MO", "MS", "MT", "NC", "ND", "NE", "NH", "NJ", "NM", "NV", "NY", "OH", "OK", "OR", "PA", "RI", "SC", "SD", "TN", "TX", "UT", "VA", "VT", "WA", "WI", "WV", "WY", ]; // switch this let digraphs = digraphs_all; //let digraphs = digraphs_states; let mut graph = Graph::new(digraphs.iter().collect()); for (i, digraph_1) in digraphs.iter().enumerate() { for (j, digraph_2) in digraphs.iter().enumerate() { if digraph_1.as_bytes()[1] == digraph_2.as_bytes()[0] { graph.add_edge(i, j); } } } graph.longest_simple_path_debug(); }
true
491ba46bf42792df64db41cf61175888f6d5b3ca
Rust
dtynn/learning
/leetcode-rs/p0139_word_break/src/lib.rs
UTF-8
1,226
3.109375
3
[]
no_license
struct Solution {} impl Solution { pub fn word_break(s: String, word_dict: Vec<String>) -> bool { let bytes = s.as_bytes(); let mut can_dict = word_dict .into_iter() .fold(std::collections::HashMap::new(), |mut dic, word| { dic.insert(word.into_bytes(), true); dic }); Solution::can_break(&bytes[..], &mut can_dict) } fn can_break(b: &[u8], dict: &mut std::collections::HashMap<Vec<u8>, bool>) -> bool { if b.len() == 0 { return true; } if let Some(can) = dict.get(b).cloned() { return can; } for i in 1..b.len() { let left = &b[..i]; let left_can = Solution::can_break(left, dict); dict.insert(left.to_owned(), left_can); if !left_can { continue; } let right = &b[i..]; let right_can = Solution::can_break(right, dict); dict.insert(right.to_owned(), right_can); if right_can { return true; } } dict.insert(b.to_owned(), false); false } }
true
b66ca32d65ba7ec92f50d31dc27b0b5a0a210486
Rust
oxidecomputer/RustCrypto_traits
/elliptic-curve/tests/pkcs8.rs
UTF-8
2,139
2.71875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! PKCS#8 tests #![cfg(all(feature = "dev", feature = "pkcs8"))] use elliptic_curve::{ dev::{PublicKey, SecretKey}, sec1::ToEncodedPoint, }; use hex_literal::hex; use pkcs8::{FromPrivateKey, FromPublicKey}; /// DER-encoded PKCS#8 private key const PKCS8_PRIVATE_KEY_DER: &[u8; 138] = include_bytes!("examples/pkcs8-private-key.der"); /// DER-encoded PKCS#8 public key const PKCS8_PUBLIC_KEY_DER: &[u8; 91] = include_bytes!("examples/pkcs8-public-key.der"); /// PEM-encoded PKCS#8 private key #[cfg(feature = "pem")] const PKCS8_PRIVATE_KEY_PEM: &str = include_str!("examples/pkcs8-private-key.pem"); /// PEM-encoded PKCS#8 public key #[cfg(feature = "pem")] const PKCS8_PUBLIC_KEY_PEM: &str = include_str!("examples/pkcs8-public-key.pem"); #[test] fn decode_pkcs8_private_key_from_der() { let secret_key = SecretKey::from_pkcs8_der(&PKCS8_PRIVATE_KEY_DER[..]).unwrap(); let expected_scalar = hex!("69624171561A63340DE0E7D869F2A05492558E1A04868B6A9F854A866788188D"); assert_eq!(secret_key.to_bytes().as_slice(), &expected_scalar[..]); } #[test] fn decode_pkcs8_public_key_from_der() { let public_key = PublicKey::from_public_key_der(&PKCS8_PUBLIC_KEY_DER[..]).unwrap(); let expected_sec1_point = hex!("041CACFFB55F2F2CEFD89D89EB374B2681152452802DEEA09916068137D839CF7FC481A44492304D7EF66AC117BEFE83A8D08F155F2B52F9F618DD447029048E0F"); assert_eq!( public_key.to_encoded_point(false).as_bytes(), &expected_sec1_point[..] ); } #[test] #[cfg(feature = "pem")] fn decode_pkcs8_private_key_from_pem() { let secret_key = PKCS8_PRIVATE_KEY_PEM.parse::<SecretKey>().unwrap(); // Ensure key parses equivalently to DER let der_key = SecretKey::from_pkcs8_der(&PKCS8_PRIVATE_KEY_DER[..]).unwrap(); assert_eq!(secret_key.to_bytes(), der_key.to_bytes()); } #[test] #[cfg(feature = "pem")] fn decode_pkcs8_public_key_from_pem() { let public_key = PKCS8_PUBLIC_KEY_PEM.parse::<PublicKey>().unwrap(); // Ensure key parses equivalently to DER let der_key = PublicKey::from_public_key_der(&PKCS8_PUBLIC_KEY_DER[..]).unwrap(); assert_eq!(public_key, der_key); }
true
ad6933747cd0bac7a0dba2246a9bb00bad49f270
Rust
Aelto/calco
/src/api/inherited_sheet.rs
UTF-8
5,841
2.890625
3
[ "MIT" ]
permissive
use crate::models::inherited_sheet::InheritedSheet; use crate::models::user::{UserRole}; use crate::models::sheet::Sheet; use crate::utils::req_auth::request_authentication; use serde::{Deserialize, Serialize}; use actix_web::{web, HttpRequest, HttpResponse, Result, http}; use chrono::prelude::*; #[derive(Serialize, Deserialize)] pub struct CreateInheritedSheetBody { pub date: String, pub sheet_id: i32, pub inherited_sheet_id: i32 } pub async fn create_inherited_sheet(req: HttpRequest, form: web::Form<CreateInheritedSheetBody>) -> Result<HttpResponse> { let auth_result = request_authentication(&req, UserRole::Guest); match auth_result { Ok(auth) => { if !auth.has_access() { return Ok( HttpResponse::NotFound() .content_type("text/plain") .body("HTTP 404: Not found"), ); } } Err(e) => { return Ok( HttpResponse::InternalServerError() .content_type("text/plain") .body(e), ) } }; if let Ok(date) = NaiveDate::parse_from_str(&form.date, "%Y-%m-%d") { let inherited_sheet = InheritedSheet::new(form.sheet_id, form.inherited_sheet_id, date.and_hms(0, 0, 0).timestamp()); inherited_sheet.insert() .map_err(|err| { println!("error when creating inherited_sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when inserting inherited_sheet into database") })?; let parent_sheet = Sheet::get_by_id(inherited_sheet.parent_sheet_id).map_err(|err| { println!("error when fetching parent sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when fetching parent sheet from database") })?; let child_sheet = Sheet::get_by_id(inherited_sheet.inherited_sheet_id).map_err(|err| { println!("error when fetching child sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when fetching child sheet from database") })?; match (parent_sheet, child_sheet) { (Some(mut parent_sheet), Some(child_sheet)) => { parent_sheet.add_to_cached_value(child_sheet.cached_value) .map_err(|err| { println!("error when updating parent sheet cached value {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when updating parent sheet cached value in database") })?; }, _ => {} }; Ok( HttpResponse::Found() .header(http::header::LOCATION, format!("/sheet/{}", form.sheet_id)) .content_type("text/plain") .body("created") ) } else { println!("error when parsing inherited_sheet date"); return Ok( HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when parsing inherited_sheet date") ); } } #[derive(Serialize, Deserialize)] pub struct DeleteInheritedSheetByIdBody { pub sheet_id: i32, pub inherited_sheet_id: i32 } pub async fn delete_inherited_sheet_by_id(req: HttpRequest, form: web::Form<DeleteInheritedSheetByIdBody>) -> Result<HttpResponse> { let auth_result = request_authentication(&req, UserRole::Guest); match auth_result { Ok(auth) => { if !auth.has_access() { return Ok( HttpResponse::NotFound() .content_type("text/plain") .body("HTTP 404: Not found"), ); } } Err(e) => { return Ok( HttpResponse::InternalServerError() .content_type("text/plain") .body(e), ) } }; let some_inherited_sheet = InheritedSheet::get_by_parent_and_inherited_id(form.sheet_id, form.inherited_sheet_id) .map_err(|err| { println!("error when fetching inherited sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when searching inherited sheet from database") })?; if let Some(inherited_sheet) = some_inherited_sheet { inherited_sheet.remove().map_err(|err| { println!("error when removing inherited sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when removing inherited sheet from database") })?; let parent_sheet = Sheet::get_by_id(inherited_sheet.parent_sheet_id).map_err(|err| { println!("error when fetching parent sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when fetching parent sheet from database") })?; let child_sheet = Sheet::get_by_id(inherited_sheet.inherited_sheet_id).map_err(|err| { println!("error when fetching child sheet {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when fetching child sheet from database") })?; match (parent_sheet, child_sheet) { (Some(mut parent_sheet), Some(child_sheet)) => { parent_sheet.remove_from_cached_value(child_sheet.cached_value) .map_err(|err| { println!("error when updating parent sheet cached value {}", err); HttpResponse::InternalServerError() .content_type("text/plain") .body("Internal server error: error when updating parent sheet cached value in database") })?; }, _ => {} }; } Ok( HttpResponse::Found() .header(http::header::LOCATION, format!("/sheet/{}", form.sheet_id)) .content_type("text/plain") .body("created") ) }
true
692e3b489951a9dd8ae61923a35e0d638b1ebd71
Rust
winksaville/fuchsia
/third_party/rust_crates/vendor/tempdir/src/lib.rs
UTF-8
13,007
3.25
3
[ "Apache-2.0", "MIT", "BSD-3-Clause" ]
permissive
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://www.rust-lang.org/favicon.ico", html_root_url = "https://docs.rs/tempdir/0.3.7")] #![cfg_attr(test, deny(warnings))] //! Temporary directories of files. //! //! The [`TempDir`] type creates a directory on the file system that //! is deleted once it goes out of scope. At construction, the //! `TempDir` creates a new directory with a randomly generated name //! and a prefix of your choosing. //! //! [`TempDir`]: struct.TempDir.html //! [`std::env::temp_dir()`]: https://doc.rust-lang.org/std/env/fn.temp_dir.html //! //! # Examples //! //! ``` //! extern crate tempdir; //! //! use std::fs::File; //! use std::io::{self, Write}; //! use tempdir::TempDir; //! //! fn main() { //! if let Err(_) = run() { //! ::std::process::exit(1); //! } //! } //! //! fn run() -> Result<(), io::Error> { //! // Create a directory inside of `std::env::temp_dir()`, named with //! // the prefix "example". //! let tmp_dir = TempDir::new("example")?; //! let file_path = tmp_dir.path().join("my-temporary-note.txt"); //! let mut tmp_file = File::create(file_path)?; //! writeln!(tmp_file, "Brian was here. Briefly.")?; //! //! // By closing the `TempDir` explicitly, we can check that it has //! // been deleted successfully. If we don't close it explicitly, //! // the directory will still be deleted when `tmp_dir` goes out //! // of scope, but we won't know whether deleting the directory //! // succeeded. //! drop(tmp_file); //! tmp_dir.close()?; //! Ok(()) //! } //! ``` extern crate rand; extern crate remove_dir_all; use std::env; use std::io::{self, Error, ErrorKind}; use std::fmt; use std::fs; use std::path::{self, PathBuf, Path}; use rand::{thread_rng, Rng}; use remove_dir_all::remove_dir_all; /// A directory in the filesystem that is automatically deleted when /// it goes out of scope. /// /// The [`TempDir`] type creates a directory on the file system that /// is deleted once it goes out of scope. At construction, the /// `TempDir` creates a new directory with a randomly generated name, /// and with a prefix of your choosing. /// /// The default constructor, [`TempDir::new`], creates directories in /// the location returned by [`std::env::temp_dir()`], but `TempDir` /// can be configured to manage a temporary directory in any location /// by constructing with [`TempDir::new_in`]. /// /// After creating a `TempDir`, work with the file system by doing /// standard [`std::fs`] file system operations on its [`Path`], /// which can be retrieved with [`TempDir::path`]. Once the `TempDir` /// value is dropped, the directory at the path will be deleted, along /// with any files and directories it contains. It is your responsibility /// to ensure that no further file system operations are attempted /// inside the temporary directory once it has been deleted. /// /// Various platform-specific conditions may cause `TempDir` to fail /// to delete the underlying directory. It's important to ensure that /// handles (like [`File`] and [`ReadDir`]) to files inside the /// directory are dropped before the `TempDir` goes out of scope. The /// `TempDir` destructor will silently ignore any errors in deleting /// the directory; to instead handle errors call [`TempDir::close`]. /// /// Note that if the program exits before the `TempDir` destructor is /// run, such as via [`std::process::exit`], by segfaulting, or by /// receiving a signal like `SIGINT`, then the temporary directory /// will not be deleted. /// /// [`File`]: http://doc.rust-lang.org/std/fs/struct.File.html /// [`Path`]: http://doc.rust-lang.org/std/path/struct.Path.html /// [`ReadDir`]: http://doc.rust-lang.org/std/fs/struct.ReadDir.html /// [`TempDir::close`]: struct.TempDir.html#method.close /// [`TempDir::new`]: struct.TempDir.html#method.new /// [`TempDir::new_in`]: struct.TempDir.html#method.new_in /// [`TempDir::path`]: struct.TempDir.html#method.path /// [`TempDir`]: struct.TempDir.html /// [`std::env::temp_dir()`]: https://doc.rust-lang.org/std/env/fn.temp_dir.html /// [`std::fs`]: http://doc.rust-lang.org/std/fs/index.html /// [`std::process::exit`]: http://doc.rust-lang.org/std/process/fn.exit.html pub struct TempDir { path: Option<PathBuf>, } // How many times should we (re)try finding an unused random name? It should be // enough that an attacker will run out of luck before we run out of patience. const NUM_RETRIES: u32 = 1 << 31; // How many characters should we include in a random file name? It needs to // be enough to dissuade an attacker from trying to preemptively create names // of that length, but not so huge that we unnecessarily drain the random number // generator of entropy. const NUM_RAND_CHARS: usize = 12; impl TempDir { /// Attempts to make a temporary directory inside of `env::temp_dir()` whose /// name will have the prefix, `prefix`. The directory and /// everything inside it will be automatically deleted once the /// returned `TempDir` is destroyed. /// /// # Errors /// /// If the directory can not be created, `Err` is returned. /// /// # Examples /// /// ``` /// use std::fs::File; /// use std::io::Write; /// use tempdir::TempDir; /// /// # use std::io; /// # fn run() -> Result<(), io::Error> { /// // Create a directory inside of `std::env::temp_dir()`, named with /// // the prefix, "example". /// let tmp_dir = TempDir::new("example")?; /// let file_path = tmp_dir.path().join("my-temporary-note.txt"); /// let mut tmp_file = File::create(file_path)?; /// writeln!(tmp_file, "Brian was here. Briefly.")?; /// /// // `tmp_dir` goes out of scope, the directory as well as /// // `tmp_file` will be deleted here. /// # Ok(()) /// # } /// ``` pub fn new(prefix: &str) -> io::Result<TempDir> { TempDir::new_in(&env::temp_dir(), prefix) } /// Attempts to make a temporary directory inside of `tmpdir` /// whose name will have the prefix `prefix`. The directory and /// everything inside it will be automatically deleted once the /// returned `TempDir` is destroyed. /// /// # Errors /// /// If the directory can not be created, `Err` is returned. /// /// # Examples /// /// ``` /// use std::fs::{self, File}; /// use std::io::Write; /// use tempdir::TempDir; /// /// # use std::io; /// # fn run() -> Result<(), io::Error> { /// // Create a directory inside of the current directory, named with /// // the prefix, "example". /// let tmp_dir = TempDir::new_in(".", "example")?; /// let file_path = tmp_dir.path().join("my-temporary-note.txt"); /// let mut tmp_file = File::create(file_path)?; /// writeln!(tmp_file, "Brian was here. Briefly.")?; /// # Ok(()) /// # } /// ``` pub fn new_in<P: AsRef<Path>>(tmpdir: P, prefix: &str) -> io::Result<TempDir> { let storage; let mut tmpdir = tmpdir.as_ref(); if !tmpdir.is_absolute() { let cur_dir = env::current_dir()?; storage = cur_dir.join(tmpdir); tmpdir = &storage; // return TempDir::new_in(&cur_dir.join(tmpdir), prefix); } let mut rng = thread_rng(); for _ in 0..NUM_RETRIES { let suffix: String = rng.gen_ascii_chars().take(NUM_RAND_CHARS).collect(); let leaf = if !prefix.is_empty() { format!("{}.{}", prefix, suffix) } else { // If we're given an empty string for a prefix, then creating a // directory starting with "." would lead to it being // semi-invisible on some systems. suffix }; let path = tmpdir.join(&leaf); match fs::create_dir(&path) { Ok(_) => return Ok(TempDir { path: Some(path) }), Err(ref e) if e.kind() == ErrorKind::AlreadyExists => {} Err(e) => return Err(e), } } Err(Error::new(ErrorKind::AlreadyExists, "too many temporary directories already exist")) } /// Accesses the [`Path`] to the temporary directory. /// /// [`Path`]: http://doc.rust-lang.org/std/path/struct.Path.html /// /// # Examples /// /// ``` /// use tempdir::TempDir; /// /// # use std::io; /// # fn run() -> Result<(), io::Error> { /// let tmp_path; /// /// { /// let tmp_dir = TempDir::new("example")?; /// tmp_path = tmp_dir.path().to_owned(); /// /// // Check that the temp directory actually exists. /// assert!(tmp_path.exists()); /// /// // End of `tmp_dir` scope, directory will be deleted /// } /// /// // Temp directory should be deleted by now /// assert_eq!(tmp_path.exists(), false); /// # Ok(()) /// # } /// ``` pub fn path(&self) -> &path::Path { self.path.as_ref().unwrap() } /// Unwraps the [`Path`] contained in the `TempDir` and /// returns it. This destroys the `TempDir` without deleting the /// directory represented by the returned `Path`. /// /// [`Path`]: http://doc.rust-lang.org/std/path/struct.Path.html /// /// # Examples /// /// ``` /// use std::fs; /// use tempdir::TempDir; /// /// # use std::io; /// # fn run() -> Result<(), io::Error> { /// let tmp_dir = TempDir::new("example")?; /// /// // Convert `tmp_dir` into a `Path`, destroying the `TempDir` /// // without deleting the directory. /// let tmp_path = tmp_dir.into_path(); /// /// // Delete the temporary directory ourselves. /// fs::remove_dir_all(tmp_path)?; /// # Ok(()) /// # } /// ``` pub fn into_path(mut self) -> PathBuf { self.path.take().unwrap() } /// Closes and removes the temporary directory, returing a `Result`. /// /// Although `TempDir` removes the directory on drop, in the destructor /// any errors are ignored. To detect errors cleaning up the temporary /// directory, call `close` instead. /// /// # Errors /// /// This function may return a variety of [`std::io::Error`]s that result from deleting /// the files and directories contained with the temporary directory, /// as well as from deleting the temporary directory itself. These errors /// may be platform specific. /// /// [`std::io::Error`]: http://doc.rust-lang.org/std/io/struct.Error.html /// /// # Examples /// /// ``` /// use std::fs::File; /// use std::io::Write; /// use tempdir::TempDir; /// /// # use std::io; /// # fn run() -> Result<(), io::Error> { /// // Create a directory inside of `std::env::temp_dir()`, named with /// // the prefix, "example". /// let tmp_dir = TempDir::new("example")?; /// let file_path = tmp_dir.path().join("my-temporary-note.txt"); /// let mut tmp_file = File::create(file_path)?; /// writeln!(tmp_file, "Brian was here. Briefly.")?; /// /// // By closing the `TempDir` explicitly we can check that it has /// // been deleted successfully. If we don't close it explicitly, /// // the directory will still be deleted when `tmp_dir` goes out /// // of scope, but we won't know whether deleting the directory /// // succeeded. /// drop(tmp_file); /// tmp_dir.close()?; /// # Ok(()) /// # } /// ``` pub fn close(mut self) -> io::Result<()> { let result = remove_dir_all(self.path()); // Prevent the Drop impl from removing the dir a second time. self.path = None; result } } impl AsRef<Path> for TempDir { fn as_ref(&self) -> &Path { self.path() } } impl fmt::Debug for TempDir { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("TempDir") .field("path", &self.path()) .finish() } } impl Drop for TempDir { fn drop(&mut self) { // Path is `None` if `close()` or `into_path()` has been called. if let Some(ref p) = self.path { let _ = remove_dir_all(p); } } } // the tests for this module need to change the path using change_dir, // and this doesn't play nicely with other tests so these unit tests are located // in src/test/run-pass/tempfile.rs
true
844ed518e15eef50ef5da6ae31fb85899850b15b
Rust
IanTayler/fbsim
/src/components/collision_box.rs
UTF-8
1,705
2.859375
3
[ "LicenseRef-scancode-warranty-disclaimer", "MIT" ]
permissive
use crate::rectangle; use amethyst::{ assets::PrefabData, core::{math, Transform}, derive::PrefabData, ecs::{Component, DenseVecStorage, Entity, WriteStorage}, Error, }; use serde::{Deserialize, Serialize}; #[derive(Debug, Deserialize, Copy, Clone, Serialize, PrefabData)] #[prefab(Component)] pub struct CollisionBox { /// Distance from the center to the upper left. /// Note: probably negative. pub upper_left_distance: math::Vector2<f32>, /// Distance from the center to the lower right. pub lower_right_distance: math::Vector2<f32>, } impl CollisionBox { pub fn rectangle(&self, transform: &Transform) -> math::Vector4<f32> { let translation = transform.translation().xy(); math::Vector4::new( translation.x + self.upper_left_distance.x, translation.y + self.upper_left_distance.y, translation.x + self.lower_right_distance.x, translation.y + self.lower_right_distance.y, ) } } impl Component for CollisionBox { type Storage = DenseVecStorage<Self>; } impl rectangle::Rectangle<f32> for CollisionBox { fn upper_left(&self) -> math::Vector2<f32> { self.upper_left_distance } fn lower_right(&self) -> math::Vector2<f32> { self.lower_right_distance } fn center(&self) -> math::Vector2<f32> { (self.upper_left_distance + self.lower_right_distance) / 2.0 } } pub fn are_colliding( collision1: &CollisionBox, transform1: &Transform, collision2: &CollisionBox, transform2: &Transform, ) -> bool { rectangle::overlap( collision1.rectangle(transform1), collision2.rectangle(transform2), ) }
true
4b4a7495d59181592d303a509895a186e98d5e6b
Rust
WillDeJs/ray_tracing
/src/tests.rs
UTF-8
1,033
3.25
3
[]
no_license
use crate::grfx::vector::Vec3D; #[test] fn vector_dot_product() { let lhs = Vec3D::new(1.0,2.0,3.0); let rhs = Vec3D::new(1.0,2.0,3.0); assert_eq!(Vec3D::dot(&lhs, &rhs), 14.0); } #[test] fn vector_cross_product () { let lhs = Vec3D::new(1.0,2.0,3.0); let rhs = Vec3D::new(3.0,4.0,5.0); assert_eq!(Vec3D::cross(&lhs, &rhs), Vec3D::new(-2.0, 4.0, -2.0)); } #[test] fn vector_cross_product_zero () { let lhs = Vec3D::new(1.0,2.0,3.0); let rhs = Vec3D::new(1.0,2.0,3.0); assert_eq!(Vec3D::cross(&lhs, &rhs), Vec3D::new(0.0, 0.0, 0.0)); } #[test] fn vector_multiply() { let lhs = Vec3D::new(1.0,2.0,3.0); let rhs = 2.0; assert_eq!(lhs * rhs, Vec3D::new(2.0, 4.0, 6.0)); } #[test] fn vector_divide() { let lhs = Vec3D::new(2.0,4.0,6.0); let rhs = 2.0; assert_eq!(lhs / rhs, Vec3D::new(1.0,2.0,3.0)); } #[test] fn vector_unit() { let lhs = Vec3D::new(5.0,5.0,5.0); assert_eq!(lhs.unit_vector(), Vec3D::new(1.0/3.0_f32.sqrt(),1.0/3.0_f32.sqrt(),1.0/3.0_f32.sqrt())); }
true
8a63f1da53ae58617e3a2493383455be0862a6cb
Rust
tirust/msp432e4
/src/gpioa/dr12r.rs
UTF-8
4,482
2.625
3
[ "BSD-3-Clause" ]
permissive
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::DR12R { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = "Possible values of the field `GPIO_DR12R_DRV12`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum GPIO_DR12R_DRV12R { #[doc = "The corresponding GPIO pin has 12-mA drive. This encoding is only valid if the GPIOPP EDE bit is set and the appropriate GPIOPC EDM bit field is programmed to 0x3"] GPIO_DR12R_DRV12_12MA, #[doc = r"Reserved"] _Reserved(u8), } impl GPIO_DR12R_DRV12R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { GPIO_DR12R_DRV12R::GPIO_DR12R_DRV12_12MA => 1, GPIO_DR12R_DRV12R::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> GPIO_DR12R_DRV12R { match value { 1 => GPIO_DR12R_DRV12R::GPIO_DR12R_DRV12_12MA, i => GPIO_DR12R_DRV12R::_Reserved(i), } } #[doc = "Checks if the value of the field is `GPIO_DR12R_DRV12_12MA`"] #[inline(always)] pub fn is_gpio_dr12r_drv12_12ma(&self) -> bool { *self == GPIO_DR12R_DRV12R::GPIO_DR12R_DRV12_12MA } } #[doc = "Values that can be written to the field `GPIO_DR12R_DRV12`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum GPIO_DR12R_DRV12W { #[doc = "The corresponding GPIO pin has 12-mA drive. This encoding is only valid if the GPIOPP EDE bit is set and the appropriate GPIOPC EDM bit field is programmed to 0x3"] GPIO_DR12R_DRV12_12MA, } impl GPIO_DR12R_DRV12W { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { GPIO_DR12R_DRV12W::GPIO_DR12R_DRV12_12MA => 1, } } } #[doc = r"Proxy"] pub struct _GPIO_DR12R_DRV12W<'a> { w: &'a mut W, } impl<'a> _GPIO_DR12R_DRV12W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: GPIO_DR12R_DRV12W) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "The corresponding GPIO pin has 12-mA drive. This encoding is only valid if the GPIOPP EDE bit is set and the appropriate GPIOPC EDM bit field is programmed to 0x3"] #[inline(always)] pub fn gpio_dr12r_drv12_12ma(self) -> &'a mut W { self.variant(GPIO_DR12R_DRV12W::GPIO_DR12R_DRV12_12MA) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(255 << 0); self.w.bits |= ((value as u32) & 255) << 0; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:7 - Output Pad 12-mA Drive Enable"] #[inline(always)] pub fn gpio_dr12r_drv12(&self) -> GPIO_DR12R_DRV12R { GPIO_DR12R_DRV12R::_from(((self.bits >> 0) & 255) as u8) } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:7 - Output Pad 12-mA Drive Enable"] #[inline(always)] pub fn gpio_dr12r_drv12(&mut self) -> _GPIO_DR12R_DRV12W { _GPIO_DR12R_DRV12W { w: self } } }
true
afb99c2d0d5c5d5944a6f40fa46d53575cb5522d
Rust
cmhteixeira/todo-cli
/src/data.rs
UTF-8
4,428
2.96875
3
[]
no_license
use std::time::Instant; use serde::{Deserialize, Serialize}; use serde_json::Result; use serde_json::error::Category::Data; #[derive(Serialize, Deserialize, Debug)] pub enum Importance { One, Two, Three, Four, Five, } #[derive(Serialize, Deserialize, Debug)] pub struct Task<'a> { id: u32, #[serde(borrow)] description: &'a str, #[serde(borrow)] project: Option<&'a str>, #[serde(borrow)] context: Option<&'a str>, importance: Option<Importance>, time_stamp: u128, } impl Task<'_> { pub fn new<'a>(id: u32, description: &'a str, project: Option<&'a str>, context: Option<&'a str>, importance: Option<Importance>) -> Task<'a> { Task { id, description, project, context, importance, time_stamp: std::time::Instant::now().elapsed().as_millis(), } } } #[derive(Serialize, Deserialize, Debug)] pub struct DataPersisted<'b> { #[serde(borrow)] active: Vec<Task<'b>>, #[serde(borrow)] completed: Vec<Task<'b>>, } impl<'a> DataPersisted<'a> { pub fn empty() -> DataPersisted<'static> { DataPersisted { active: vec![], completed: vec![], } } pub fn add_active<'b: 'a>(&mut self, description: &'b str, project: Option<&'b str>, context: Option<&'b str>, importance: Option<Importance>) -> () { let mut next_id = 1; for i in 1..u32::MAX { if self.active.iter().any(|task| task.id == i) { continue; } else if self.completed.iter().any(|task| task.id == i) { continue; } else { next_id = i; break; } } self.active.append(&mut vec![Task::new(next_id as u32, description, project, context, importance)]) } pub fn mark_completed(&mut self, task_id: u32) -> () { let active = self.active.iter().position(|elem| elem.id == task_id); match active { None => (), // do nothing Some(task_index) => { let removed_task = self.active.remove(task_index); self.completed.append(&mut vec![removed_task]); } } } pub fn complete_tasks(&mut self, task_ids: Vec<u8>) -> () { for i in task_ids { self.mark_completed(i as u32) } } pub fn delete_task(&mut self, task_id: u32) -> () { let active = self.active.iter().position(|elem| elem.id == task_id); match active { None => (), // do nothing Some(task_index) => { self.active.remove(task_index); } } let completed = self.completed.iter().position(|elem| elem.id == task_id); match completed { None => (), // do nothing Some(task_index) => { self.completed.remove(task_index); } } } pub fn delete_tasks(&mut self, task_ids: Vec<u8>) -> () { for id in task_ids { self.delete_task(id as u32) } } pub fn print_tty(&self) -> String { let mut res = String::new(); res.push_str("\u{001b}[1;31mActive\u{001b}[0m \u{23F3}\n"); let greatest_size = self.active.iter().map(|task| task.id.to_string().len()).max(); for i in &self.active { res.push_str(DataPersisted::print_task(i, greatest_size.unwrap()).as_str()); } res.push_str("\n"); res.push_str("\n"); res.push_str("\u{001b}[1;31mCompleted\u{001b}[0m \u{2705}\n"); let greatest_size = self.completed.iter().map(|task| task.id.to_string().len()).max(); for i in &self.completed { res.push_str(DataPersisted::print_task(i, greatest_size.unwrap()).as_str()); } res } fn print_task<'b>(task: &'b Task<'b>, max_id: usize) -> String { format!("{:width$} {} {} {}\n", task.id, task.description, DataPersisted::format_project(task.project.unwrap_or_else(|| "")), DataPersisted::format_context(task.context.unwrap_or_else(|| "")), width = max_id) } fn format_project(project_name: &str) -> String { format!("\u{001b}[40;1m\u{001b}[33m{}\u{001b}[0m", project_name) } fn format_context(context_name: &str) -> String { format!("\u{001b}[40;1m\u{001b}[33m{}\u{001b}[0m", context_name) } }
true
d43833c9e36a02cd433643fb31c936be8556f046
Rust
facebook/hhvm
/hphp/hack/src/hackc/assemble/lexer.rs
UTF-8
29,608
3.203125
3
[ "PHP-3.01", "Zend-2.0", "MIT" ]
permissive
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::str::FromStr; use anyhow::anyhow; use anyhow::bail; use anyhow::ensure; use anyhow::Result; use crate::token::Line; use crate::token::Token; // We initially planned on using Logos, a crate for tokenizing really fast. // We chose not to use Logos because it doesn't support all regexes -- for instance, it can't // tokenize based on the regex "\"\"\".*\"\"\"". Here's the git issue: // https://github.com/maciejhirsz/logos/issues/246 #[derive(Clone)] pub(crate) struct Lexer<'a> { line: Line, pending: Option<Token<'a>>, // Cached next (non-newline) source: &'a [u8], } impl<'a> Iterator for Lexer<'a> { type Item = Token<'a>; /// Returns the next token, never returning a newline and instead advancing the lexer past them. fn next(&mut self) -> Option<Self::Item> { self.fill(); // If `pending` already has a token this does nothing self.pending.take() } } impl<'a> Lexer<'a> { pub(crate) fn error(&mut self, err: impl std::fmt::Display) -> anyhow::Error { if let Some(tok) = self.peek() { tok.error(err) } else { anyhow!("Error [end of file]: {err}") } } /// If `pending` is none, fills with the next non-newline token, or None of one does not exist. fn fill(&mut self) { if self.pending.is_none() { loop { (self.pending, self.source) = parse_token(self.source, self.line); match self.pending { None => { // Either a whitespace or EOF if self.source.is_empty() { return; } } Some(Token::Newline(_)) => { self.line.0 += 1; } Some(_) => break, } } } } pub(crate) fn is_empty(&mut self) -> bool { self.fill(); self.pending.is_none() } fn find_next_newline(&self) -> Option<usize> { // Find the next newline - but ignore it if it's in quotes. We handle // triple quotes as a trick - '"""' is no different than '""' followed // by '"'. let mut in_quotes = false; let mut escape = false; self.source.iter().copied().position(|ch| { if escape { escape = false; } else if !in_quotes && ch == b'\n' { return true; } else if ch == b'\"' { in_quotes = !in_quotes; } else if ch == b'\\' { escape = true; } false }) } /// Advances the lexer past its first non-leading newline, returning a mini-lexer of the tokens up until that newline. pub(crate) fn split_at_newline(&mut self) -> Option<Lexer<'a>> { self.fill(); if let Some(pending) = self.pending.take() { let idx = self.find_next_newline().unwrap_or(self.source.len()); let source; (source, self.source) = self.source.split_at(idx); Some(Lexer { line: self.line, pending: Some(pending), source, }) } else { None } } /// Similarly to `Lexer::next`, will not return a peek to a newline. Instead /// this `peek` returns a view to the first token that's not a newline, and modifies the lexer in that it strips /// it of leading newlines. pub(crate) fn peek(&mut self) -> Option<&Token<'a>> { self.fill(); self.pending.as_ref() } /// Peeks at the next token past the current one. Unlike peek() won't skip /// newlines. Doesn't cache the token so this call can be (relatively) /// expensive if used often. pub(crate) fn peek1(&mut self) -> Option<Token<'a>> { self.fill(); let (peek1, _) = parse_token(self.source, self.line); peek1 } pub(crate) fn expect_token(&mut self) -> Result<Token<'a>> { self.next() .ok_or_else(|| anyhow!("End of token stream sooner than expected")) } /// Returns f() applied to the next token without consuming the next token. pub(crate) fn peek_is<F>(&mut self, f: F) -> bool where F: Fn(&Token<'a>) -> bool, { self.peek().map_or(false, f) } /// Applies f() to the next token and then compares the result with the /// passed-in string. pub(crate) fn peek_is_str<F>(&mut self, f: F, s: &str) -> bool where F: Fn(&Token<'a>) -> bool, { self.peek_is(|t| f(t) && t.as_bytes() == s.as_bytes()) } /// Applies f() to the next token and consumes it if the predicate passes. /// Returns Some(token) if the predicate passed. pub(crate) fn next_if<F>(&mut self, f: F) -> Option<Token<'a>> where F: Fn(&Token<'a>) -> bool, { let tr = self.peek_is(f); if tr { self.next() } else { None } } /// Applies f() to the next token and consumes it if the predicate /// passes. Returns true if the predicate passed. pub(crate) fn next_is<F>(&mut self, f: F) -> bool where F: Fn(&Token<'a>) -> bool, { self.next_if(f).is_some() } /// Applies f() to the next token and consumes it if the predicate passes /// and matches the given string. pub(crate) fn next_is_str<F>(&mut self, f: F, s: &str) -> bool where F: Fn(&Token<'a>) -> bool, { let tr = self.peek_is_str(f, s); if tr { self.next(); } tr } /// Applies f() to the next token and consumes it if the predicate /// passes. Returns Ok(Token) if the predicate passed or Err() if the /// predicate failed. pub(crate) fn expect<F>(&mut self, f: F) -> Result<Token<'a>> where F: Fn(&Token<'a>) -> bool, { let tok = self.expect_token()?; if f(&tok) { Ok(tok) } else { Err(tok.error("Unexpected token")) } } /// Applies f() to the next token and returns the result. /// expected token (expected token specified by f) pub(crate) fn expect_with<T, F>(&mut self, f: F) -> Result<T> where F: FnOnce(Token<'a>) -> Result<T>, { f(self.expect_token()?) } /// Checks both the f() predicate and that the str matches. pub(crate) fn expect_str<F>(&mut self, f: F, s: &str) -> Result<Token<'a>> where F: FnOnce(&Token<'a>) -> bool, { let tok = self.expect_token()?; if f(&tok) && tok.as_bytes() == s.as_bytes() { Ok(tok) } else { Err(tok.error(format!("Expected {s:?} got {:?}", tok.as_bytes()))) } } /// Similar to `expect` but instead of returning a Result that usually contains a slice of u8, /// applies f to the `from_utf8 str` of the top token, bailing if the top token is not a number. pub(crate) fn expect_and_get_number<T: FromStr>(&mut self) -> Result<T> { let num = if self.peek_is(Token::is_dash) { self.expect_with(Token::into_dash)? // -INF } else if self.peek_is(Token::is_identifier) { self.expect_with(Token::into_identifier)? // NAN, INF, etc will parse as float constants } else { self.expect_with(Token::into_number)? }; // If num is a dash we expect an identifier to follow if num == b"-" { let mut num = num.to_vec(); num.extend_from_slice(self.expect_with(Token::into_identifier)?); FromStr::from_str(std::str::from_utf8(&num)?).map_err(|_| { anyhow!("Number-looking token in tokenizer that cannot be parsed into number") }) } else { FromStr::from_str(std::str::from_utf8(num)?).map_err(|_| { anyhow!("Number-looking token in tokenizer that cannot be parsed into number") }) // This std::str::from_utf8 will never bail; if it should bail, the above `expect` bails first. } } /// A var can be written in HHAS as $abc or "$abc". Only valid if a $ preceeds pub(crate) fn expect_var(&mut self) -> Result<Vec<u8>> { if self.peek_is(Token::is_str_literal) { let s = self.expect_with(Token::into_str_literal)?; if s.starts_with(b"\"$") { // Remove the "" b/c that's not part of the var name // also unescape ("$\340\260" etc is literal bytes) let s = escaper::unquote_slice(s); let s = escaper::unescape_literal_bytes_into_vec_bytes(s)?; Ok(s) } else { bail!("Var does not start with $: {:?}", s) } } else { Ok(self.expect_with(Token::into_variable)?.to_vec()) } } /// Bails if lexer is not empty, message contains the next token pub(crate) fn expect_end(&mut self) -> Result<()> { ensure!( self.is_empty(), "Expected end of token stream, see: {}", self.next().unwrap() ); Ok(()) } pub(crate) fn from_slice(source: &'a [u8], start_line: Line) -> Self { Lexer { line: start_line, source, pending: None, } } pub(crate) fn parse_list<T, F>(&mut self, mut callback: F) -> Result<Vec<T>> where F: FnMut(&mut Self) -> Result<Option<T>>, { // a b c let mut result = Vec::new(); while let Some(t) = callback(self)? { result.push(t); } Ok(result) } pub(crate) fn parse_comma_list<T, F>( &mut self, expect_comma: bool, mut callback: F, ) -> Result<Vec<T>> where F: FnMut(&mut Self) -> Result<T>, { // a, b, c let mut result = Vec::new(); if !expect_comma { result.push(callback(self)?); } while self.next_is(Token::is_comma) { result.push(callback(self)?); } Ok(result) } } fn is_identifier_lead(ch: u8) -> bool { ch.is_ascii_alphabetic() || (ch >= 0x80) || (ch == b'_') || (ch == b'/') } fn is_identifier_tail(ch: u8) -> bool { // r"(?-u)[_/a-zA-Z\x80-\xff]([_/\\a-zA-Z0-9\x80-\xff\.\$#\-]|::)*" // // NOTE: identifiers can include "::" but this won't match that because it's // a multi-byte sequence. ch.is_ascii_alphanumeric() || (ch >= 0x80) || (ch == b'.') || (ch == b'$') || (ch == b'#') || (ch == b'-') || (ch == b'_') || (ch == b'/') || (ch == b'\\') } fn gather_identifier(source: &[u8]) -> (&[u8], &[u8]) { // This can't be easy because ':' isn't part of an identifier, but '::' // is... let mut len = 1; loop { len += source[len..] .iter() .copied() .take_while(|&c| is_identifier_tail(c)) .count(); if !source[len..].starts_with(b"::") { break; } len += 2; } source.split_at(len) } fn is_number_lead(ch: u8) -> bool { ch.is_ascii_digit() } fn is_number_tail(ch: u8) -> bool { // r"[-+]?[0-9]+\.?[0-9]*([eE][-+]?[0-9]+\.?[0-9]*)?" ch.is_ascii_digit() || (ch == b'.') } fn gather_number(source: &[u8]) -> (&[u8], &[u8]) { // The plus location (only after 'e') makes this tricky. let mut last_e = false; let len = source[1..] .iter() .copied() .take_while(|&c| { if is_number_tail(c) { last_e = false; } else if c == b'e' || c == b'E' { last_e = true; } else if (c == b'+' || c == b'-') && last_e { last_e = false; } else { return false; } true }) .count(); source.split_at(1 + len) } fn is_global_tail(ch: u8) -> bool { // r"(?-u)[\.@][_a-z/A-Z\x80-\xff][_/a-zA-Z/0-9\x80-\xff\-\.]*" ch.is_ascii_alphanumeric() || (ch >= 0x80) || (ch == b'.') || (ch == b'-') || (ch == b'_') } fn is_var_tail(ch: u8) -> bool { // r"(?-u)\$[_a-zA-Z0-9$\x80-\xff][_/a-zA-Z0-9$\x80-\xff]*" ch.is_ascii_alphanumeric() || (ch >= 0x80) || (ch == b'$') || (ch == b'_') } fn is_decl_tail(ch: u8) -> bool { // r"(?-u)[\.@][_a-z/A-Z\x80-\xff][_/a-zA-Z/0-9\x80-\xff\-\.]*", // Decl, global. (?-u) turns off utf8 check ch.is_ascii_alphanumeric() || (ch >= 0x80) || (ch == b'-') || (ch == b'.') || (ch == b'_') } fn is_non_newline_whitespace(ch: u8) -> bool { ch == b' ' || ch == b'\t' || ch == b'\r' } fn gather_tail<F>(source: &[u8], f: F) -> (&[u8], &[u8]) where F: Fn(u8) -> bool, { let len = source[1..].iter().copied().take_while(|c| f(*c)).count(); source.split_at(1 + len) } fn gather_quoted(source: &[u8], count: usize) -> (&[u8], &[u8]) { let mut quotes = 0; let mut escaped = false; let len = source[count..] .iter() .copied() .take_while(|&c| { if quotes == count { return false; } else if c == b'"' && !escaped { quotes += 1; } else if c == b'\\' && !escaped { escaped = true; quotes = 0; } else { escaped = false; quotes = 0; } true }) .count(); source.split_at(count + len) } fn parse_token(mut source: &[u8], line: Line) -> (Option<Token<'_>>, &[u8]) { let tok = if let Some(lead) = source.first() { match *lead { ch if is_identifier_lead(ch) => { // Identifier let tok; (tok, source) = gather_identifier(source); Some(Token::Identifier(tok, line)) } ch if is_number_lead(ch) => { // Number let tok; (tok, source) = gather_number(source); Some(Token::Number(tok, line)) } ch if is_non_newline_whitespace(ch) => { (_, source) = gather_tail(source, is_non_newline_whitespace); None } b'#' => { // Don't consume the newline. let len = source[1..] .iter() .copied() .take_while(|&c| c != b'\n') .count(); (_, source) = source.split_at(1 + len); None } b'\n' => { (_, source) = source.split_at(1); Some(Token::Newline(line)) } b'@' => { if source.len() > 1 { // Global let tok; (tok, source) = gather_tail(source, is_global_tail); Some(Token::Global(tok, line)) } else { // Error let tok = std::mem::take(&mut source); Some(Token::Error(tok, line)) } } b'$' => { if source.len() > 1 { // Var let tok; (tok, source) = gather_tail(source, is_var_tail); Some(Token::Variable(tok, line)) } else { // Error let tok = std::mem::take(&mut source); Some(Token::Error(tok, line)) } } b'"' => { if source.starts_with(b"\"\"\"") { // Triple string literal let tok; (tok, source) = gather_quoted(source, 3); Some(Token::TripleStrLiteral(tok, line)) } else { // Single string literal let tok; (tok, source) = gather_quoted(source, 1); Some(Token::StrLiteral(tok, line)) } } b'.' => { if source.starts_with(b"...") { // Variadic (_, source) = source.split_at(3); Some(Token::Variadic(line)) } else if source.len() > 1 { // Decl let tok; (tok, source) = gather_tail(source, is_decl_tail); Some(Token::Decl(tok, line)) } else { // Error let tok = std::mem::take(&mut source); Some(Token::Error(tok, line)) } } b'-' => { if source.get(1).copied().map_or(false, is_number_lead) { // Negative number let tok; (tok, source) = gather_number(source); Some(Token::Number(tok, line)) } else { // Dash (_, source) = source.split_at(1); Some(Token::Dash(line)) } } b'+' => { // Positive number let tok; (tok, source) = gather_number(source); Some(Token::Number(tok, line)) } b';' => { (_, source) = source.split_at(1); Some(Token::Semicolon(line)) } b'{' => { (_, source) = source.split_at(1); Some(Token::OpenCurly(line)) } b'[' => { (_, source) = source.split_at(1); Some(Token::OpenBracket(line)) } b'(' => { (_, source) = source.split_at(1); Some(Token::OpenParen(line)) } b')' => { (_, source) = source.split_at(1); Some(Token::CloseParen(line)) } b']' => { (_, source) = source.split_at(1); Some(Token::CloseBracket(line)) } b'}' => { (_, source) = source.split_at(1); Some(Token::CloseCurly(line)) } b',' => { (_, source) = source.split_at(1); Some(Token::Comma(line)) } b'<' => { (_, source) = source.split_at(1); Some(Token::Lt(line)) } b'>' => { (_, source) = source.split_at(1); Some(Token::Gt(line)) } b'=' => { (_, source) = source.split_at(1); Some(Token::Equal(line)) } b':' => { (_, source) = source.split_at(1); Some(Token::Colon(line)) } _ => todo!("CH: {lead:?} ({})", *lead as char), } } else { None }; (tok, source) } #[cfg(test)] mod test { use bumpalo::Bump; use super::*; use crate::assemble; #[test] fn str_into_test() -> Result<()> { // Want to test that only tokens surrounded by "" are str_literals // Want to confirm the assumption that after any token_iter.expect_with(Token::into_str_literal) call, you can safely remove the first and last element in slice let s = br#"abc "abc" """abc""""#; let mut lex = Lexer::from_slice(s, Line(1)); assert!(lex.next_is(Token::is_identifier)); let sl = lex.expect_with(Token::into_str_literal)?; assert!(sl[0] == b'"' && sl[sl.len() - 1] == b'"'); let tsl = lex.expect_with(Token::into_triple_str_literal)?; assert!( tsl[0..3] == [b'"', b'"', b'"'] && tsl[tsl.len() - 3..tsl.len()] == [b'"', b'"', b'"'] ); Ok(()) } #[test] fn just_nl_is_empty() { let s = b"\n \n \n"; let mut lex = Lexer::from_slice(s, Line(1)); assert!(lex.split_at_newline().is_none()); assert!(lex.is_empty()); } #[test] fn splits_mult_newlines_go_away() { // Point of this test: want to make sure that 3 mini-lexers are spawned (multiple new lines don't do anything) let s = b"\n \n a \n \n \n b \n \n c \n"; let mut lex = Lexer::from_slice(s, Line(1)); let mut a = lex.split_at_newline().unwrap(); let mut b = lex.split_at_newline().unwrap(); let mut c = lex.split_at_newline().unwrap(); assert_eq!(lex.next(), None); assert_eq!(a.next().unwrap().into_identifier().unwrap(), b"a"); assert_eq!(a.next(), None); assert_eq!(b.next().unwrap().into_identifier().unwrap(), b"b"); assert_eq!(b.next(), None); assert_eq!(c.next().unwrap().into_identifier().unwrap(), b"c"); assert_eq!(c.next(), None); } #[test] fn no_trailing_newlines() { let s = b"a \n \n \n"; let mut lex = Lexer::from_slice(s, Line(1)); assert!(lex.next().is_some()); assert!(lex.is_empty()); } #[test] #[allow(unused)] fn splitting_multiple_lines() { let s = b".try { \n .srcloc 3:7, 3:22 \n String \"I'm i\\\"n the try\n\" \n Print \n PopC \n } .catch { \n Dup \n L1: \n Throw \n }"; let mut lex = Lexer::from_slice(s, Line(1)); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_decl().unwrap(), b".try"); assert!(sub.next().unwrap().is_open_curly()); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_decl().unwrap(), b".srcloc"); assert_eq!(sub.next().unwrap().into_number().unwrap(), b"3"); assert!(sub.next().unwrap().is_colon()); assert_eq!(sub.next().unwrap().into_number().unwrap(), b"7"); assert!(sub.next().unwrap().is_comma()); assert_eq!(sub.next().unwrap().into_number().unwrap(), b"3"); assert!(sub.next().unwrap().is_colon()); assert_eq!(sub.next().unwrap().into_number().unwrap(), b"22"); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"String"); assert_eq!( sub.next().unwrap().into_str_literal().unwrap(), b"\"I'm i\\\"n the try\n\"" ); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"Print"); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"PopC"); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert!(sub.next().unwrap().is_close_curly()); assert_eq!(sub.next().unwrap().into_decl().unwrap(), b".catch"); assert!(sub.next().unwrap().is_open_curly()); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"Dup"); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"L1"); assert!(sub.next().unwrap().is_colon()); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert_eq!(sub.next().unwrap().into_identifier().unwrap(), b"Throw"); assert_eq!(sub.next(), None); let mut sub = lex.split_at_newline().unwrap(); assert!(sub.next().unwrap().is_close_curly()); assert_eq!(sub.next(), None); assert_eq!(lex.next(), None); } #[test] fn peek_next_on_newlines() { let s = b"\n\na\n\n"; let mut lex = Lexer::from_slice(s, Line(1)); assert!(lex.peek().is_some()); assert!(lex.next().is_some()); assert!(lex.split_at_newline().is_none()); // Have consumed the a here -- "\n\n" was left and that's been consumed. } #[test] #[should_panic] fn no_top_level_shouldnt_parse() { // Is there a better way, maybe to verify the string in the bail? let s = b".srloc 3:7,3:22"; let alloc = Bump::default(); assert!(matches!(assemble::assemble_from_bytes(&alloc, s), Ok(_))) } #[test] #[should_panic] fn no_fpath_semicolon_shouldnt_parse() { let s = br#".filepath "aaaa""#; let alloc = Bump::default(); assert!(matches!(assemble::assemble_from_bytes(&alloc, s), Ok(_))) } #[test] #[should_panic] fn fpath_wo_file_shouldnt_parse() { let s = br#".filepath aaa"#; let alloc = Bump::default(); assert!(matches!(assemble::assemble_from_bytes(&alloc, s), Ok(_))) } #[test] fn difficult_strings() { let s = br#""\"0\"" "12345\\:2\\" "class_meth() expects a literal class name or ::class constant, followed by a constant string that refers to a static method on that class"; "#; let mut l: Lexer<'_> = Lexer::from_slice(s, Line(1)); // Expecting 3 string tokens let _st1 = l.next().unwrap(); let _by1 = str::as_bytes(r#""\"0\"""#); assert!(matches!(_st1, Token::StrLiteral(_by1, _))); let _st2 = l.next().unwrap(); let _by2 = str::as_bytes(r#""12345\\:2\\""#); assert!(matches!(_st1, Token::StrLiteral(_by2, _))); let _st3 = l.next().unwrap(); let _by3 = str::as_bytes( r#""class_meth() expects a literal class name or ::class constant, followed by a constant string that refers to a static method on that class""#, ); assert!(matches!(_st1, Token::StrLiteral(_by3, _))); } #[test] fn odd_unicode_test() { let s: &[u8] = b".\xA9\xEF\xB8\x8E $0\xC5\xA3\xB1\xC3 \xE2\x98\xBA\xE2\x98\xBA\xE2\x98\xBA @\xE2\x99\xA1\xE2\x99\xA4$"; let mut l: Lexer<'_> = Lexer::from_slice(s, Line(1)); // We are expecting an decl, a var, an identifier a global, and an error on the last empty variable let decl = l.next().unwrap(); assert!(matches!(decl, Token::Decl(..))); let var = l.next().unwrap(); assert!(matches!(var, Token::Variable(..))); let iden = l.next().unwrap(); assert!(matches!(iden, Token::Identifier(..))); let glob = l.next().unwrap(); assert!(matches!(glob, Token::Global(..))); let err = l.next().unwrap(); assert!(matches!(err, Token::Error(..))) } #[test] fn every_token_test() { let s = br#"@_global $0Var """tripleStrLiteral:)""" #hashtagComment .Decl "str!Literal" ... ;-{[( )]} =98 -98 +101. 43.2 , < > : _/identifier/ /filepath id$di aa:bb aa::bb ."#; // Expect glob var tsl decl strlit semicolon dash open_curly open_brack open_paren close_paren close_bracket // close_curly equal number number number number , < > : identifier identifier ERROR on the last . let mut l: Lexer<'_> = Lexer::from_slice(s, Line(1)); assert_eq!(l.next().unwrap().into_global().unwrap(), b"@_global"); assert_eq!(l.next().unwrap().into_variable().unwrap(), b"$0Var"); assert_eq!( l.next().unwrap().into_triple_str_literal().unwrap(), br#""""tripleStrLiteral:)""""# ); assert_eq!(l.next().unwrap().into_decl().unwrap(), b".Decl"); assert_eq!( l.next().unwrap().into_str_literal().unwrap(), br#""str!Literal""# ); assert!(l.next().unwrap().is_variadic()); assert!(l.next().unwrap().is_semicolon()); assert!(l.next().unwrap().is_dash()); assert!(l.next().unwrap().is_open_curly()); assert!(l.next().unwrap().is_open_bracket()); assert!(l.next().unwrap().is_open_paren()); assert!(l.next().unwrap().is_close_paren()); assert!(l.next().unwrap().is_close_bracket()); assert!(l.next().unwrap().is_close_curly()); assert!(l.next().unwrap().is_equal()); assert_eq!(l.next().unwrap().into_number().unwrap(), b"98"); assert_eq!(l.next().unwrap().into_number().unwrap(), b"-98"); assert_eq!(l.next().unwrap().into_number().unwrap(), b"+101."); assert_eq!(l.next().unwrap().into_number().unwrap(), b"43.2"); assert!(l.next().unwrap().is_comma()); assert!(l.next().unwrap().is_lt()); assert!(l.next().unwrap().is_gt()); assert!(l.next().unwrap().is_colon()); assert_eq!( l.next().unwrap().into_identifier().unwrap(), b"_/identifier/" ); assert_eq!(l.next().unwrap().into_identifier().unwrap(), b"/filepath"); assert_eq!(l.next().unwrap().into_identifier().unwrap(), b"id$di"); assert_eq!(l.next().unwrap().into_identifier().unwrap(), b"aa"); assert!(l.next().unwrap().is_colon()); assert_eq!(l.next().unwrap().into_identifier().unwrap(), b"bb"); assert_eq!(l.next().unwrap().into_identifier().unwrap(), b"aa::bb"); let err = l.next().unwrap(); assert!(matches!(err, Token::Error(..)), "failed to match {}", err); } }
true
832bc7637f068d4533b950801363440b93c44439
Rust
Jonahss/stl-rust
/src/mesh.rs
UTF-8
3,935
3.265625
3
[]
no_license
use std::collections::{HashMap, HashSet}; use std::fmt; use crate::stl::{Solid}; use crate::stl::datatypes::{Vertex}; // While Solid stores a surface as triangles, the way an STL file does, Mesh stores a list of vertices and edges. // Let's try storing them in an adjacency list type VertexId = usize; pub struct Mesh { vertices: HashMap<Vertex, VertexId>, vertex_lookup: HashMap<VertexId, Vertex>, adjacency_list: HashMap<VertexId, HashSet<VertexId>>, num_triangles: i32, } impl Mesh { fn add_triangle(& mut self, a: Vertex, b: Vertex, c: Vertex) { let a_id_option = self.vertices.get(&a); let a_id; match a_id_option { None => { let a_copy = a.clone(); self.vertices.insert(a, self.vertices.len()); a_id = self.vertices.len()-1; self.vertex_lookup.insert(a_id, a_copy); }, Some(x) => { a_id = *x; } } let b_id_option = self.vertices.get(&b); let b_id; match b_id_option { None => { let b_copy = b.clone(); self.vertices.insert(b, self.vertices.len()); b_id = self.vertices.len()-1; self.vertex_lookup.insert(b_id, b_copy); }, Some(x) => { b_id = *x; } } let c_id_option = self.vertices.get(&c); let c_id; match c_id_option { None => { let c_copy = c.clone(); self.vertices.insert(c, self.vertices.len()); c_id = self.vertices.len()-1; self.vertex_lookup.insert(c_id, c_copy); }, Some(x) => { c_id = *x; } } let a_neighbors = self.adjacency_list.entry(a_id).or_insert_with(HashSet::new); a_neighbors.insert(b_id); a_neighbors.insert(c_id); let b_neighbors = self.adjacency_list.entry(b_id).or_insert_with(HashSet::new); b_neighbors.insert(a_id); b_neighbors.insert(c_id); let c_neighbors = self.adjacency_list.entry(c_id).or_insert_with(HashSet::new); c_neighbors.insert(a_id); c_neighbors.insert(b_id); self.num_triangles += 1; } } impl Mesh { pub fn from_solid(solid: Solid) -> Mesh { let mut mesh = Mesh { vertices: HashMap::new(), vertex_lookup: HashMap::new(), adjacency_list: HashMap::new(), num_triangles: 0, }; for v in solid.triangles { mesh.add_triangle(v.0, v.1, v.2); } mesh } pub fn num_edges(&self) -> i32 { (self.adjacency_list.values().fold(0, |acc, x| acc + x.len()) / 2) as i32 // each edge is listed twice, once for each starting vertex } pub fn num_vertices(&self) -> i32 { self.vertices.len() as i32 } pub fn average_vertex_valence(&self) -> i32 { self.adjacency_list.values().map(|edges| edges.len()).sum::<usize>() as i32 / self.num_vertices() } pub fn genus(&self) -> i32 { let num_edges = self.num_edges(); let num_vertices = self.num_vertices(); 1 - ((num_vertices - num_edges + self.num_triangles) / 2) } } impl fmt::Display for Mesh { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let num_edges = self.num_edges(); let num_vertices = self.num_vertices(); let first = self.vertices.keys().next().expect("no vertices in mesh!"); let vertex_display = format!("{:?}", first); let first_edges = self.adjacency_list.get(&0).expect("no edges"); let first_edges: Vec<&Vertex> = first_edges.iter().map(|id| self.vertex_lookup.get(id).unwrap()).collect(); writeln!(f, "Mesh with {} vertices, {} edges, {} triangles", num_vertices, num_edges, self.num_triangles); writeln!(f, "F ≈ 2V ({} ≈ {})", self.num_triangles, 2 * num_vertices); writeln!(f, "E ≈ 3V ({} ≈ {})", self.num_edges(), 3 * num_vertices); writeln!(f, "Average Vertex Valence ≈ 6 ({} ≈ 6)", self.average_vertex_valence()); writeln!(f, "Euler formula: V - E + F = 2(1-g) (g = {})", self.genus()); writeln!(f, "a vertex: {:?}, {:?}", vertex_display, first_edges) } }
true
f7df5801dde86cb8220c98fe6ba206fed504f03d
Rust
AngryLawyer/uo-data-viewer
/src/hues_scene.rs
UTF-8
4,748
2.671875
3
[ "MIT" ]
permissive
use cgmath::Point2; use ggez::event::{KeyCode, KeyMods}; use ggez::graphics::{self, Canvas, Color, DrawParam, Text}; use ggez::{Context, GameResult}; use scene::{BoxedScene, Scene, SceneChangeEvent, SceneName}; use std::fs::File; use std::io::Error; use std::io::Result; use std::path::Path; use uorustlibs::color::Color as ColorTrait; use uorustlibs::hues::{Hue, HueGroup, HueReader}; static HEIGHT: f32 = 16.0; pub struct HuesScene { reader: Result<HueReader<File>>, index: u32, texture: Option<Canvas>, exiting: bool, } impl<'a> HuesScene { pub fn new(ctx: &mut Context) -> BoxedScene<'a, SceneName, ()> { let mut scene = Box::new(HuesScene { reader: HueReader::new(&Path::new("./assets/hues.mul")), texture: None, index: 0, exiting: false, }); scene.load_group(ctx).expect("Failed to create slice"); scene } fn load_group(&mut self, ctx: &mut Context) -> GameResult<()> { let dest = Canvas::with_window_size(ctx)?; let maybe_group = match self.reader { Ok(ref mut hue_reader) => hue_reader.read_hue_group(self.index), Err(ref x) => Err(Error::new(x.kind(), "Whoops")), }; match maybe_group { Ok(group) => { graphics::set_canvas(ctx, Some(&dest)); graphics::clear(ctx, graphics::BLACK); self.draw_hue_group(ctx, self.index, &group)?; graphics::set_canvas(ctx, None); } Err(_) => { graphics::set_canvas(ctx, Some(&dest)); graphics::clear(ctx, graphics::BLACK); graphics::set_canvas(ctx, None); } }; self.texture = Some(dest); Ok(()) } fn draw_hue_group( &self, ctx: &mut Context, group_idx: u32, group: &HueGroup, ) -> GameResult<()> { for (idx, hue) in group.entries.iter().enumerate() { self.draw_hue(ctx, &hue, idx as u32)?; } let label = Text::new(format!("Group {} - {}", group_idx, group.header)); graphics::draw( ctx, &label, (Point2::new(0.0, HEIGHT * 8.0 + 4.0), graphics::WHITE), )?; Ok(()) } fn draw_hue(&self, ctx: &mut Context, hue: &Hue, hue_idx: u32) -> GameResult<()> { for (col_idx, &color) in hue.color_table.iter().enumerate() { let (r, g, b, _) = color.to_rgba(); let rect = graphics::Rect::new(col_idx as f32 * 16.0, hue_idx as f32 * HEIGHT, 16.0, HEIGHT); let r1 = graphics::Mesh::new_rectangle( ctx, graphics::DrawMode::fill(), rect, Color::from_rgba(r, g, b, 255), )?; graphics::draw(ctx, &r1, DrawParam::default()).unwrap(); } let label_text = format!( "{}: {} - {}", if hue.name.trim().len() > 0 { &hue.name } else { "NONE" }, hue.table_start, hue.table_end ); let label = Text::new(format!("{}", label_text)); graphics::draw( ctx, &label, ( Point2::new(hue.color_table.len() as f32 * 16.0, hue_idx as f32 * HEIGHT), graphics::WHITE, ), )?; Ok(()) } } impl Scene<SceneName, ()> for HuesScene { fn draw(&mut self, ctx: &mut Context, _engine_data: &mut ()) -> GameResult<()> { match self.texture { Some(ref texture) => { graphics::draw(ctx, texture, DrawParam::default())?; } None => (), }; Ok(()) } fn update( &mut self, _ctx: &mut Context, _engine_data: &mut (), ) -> GameResult<Option<SceneChangeEvent<SceneName>>> { if self.exiting { Ok(Some(SceneChangeEvent::PopScene)) } else { Ok(None) } } fn key_down_event( &mut self, ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods, _repeat: bool, _engine_data: &mut (), ) { match keycode { KeyCode::Escape => self.exiting = true, KeyCode::Left => { if self.index > 0 { self.index -= 1; self.load_group(ctx).expect("Failed to create slice"); } } KeyCode::Right => { self.index += 1; self.load_group(ctx).expect("Failed to create slice"); } _ => (), } } }
true
32313760c95be34d4996e02ef6b3cb13e8815249
Rust
nikmikov/bitmex-recorder-rs
/src/main.rs
UTF-8
3,365
2.59375
3
[ "BSD-2-Clause" ]
permissive
#[macro_use] extern crate enum_display_derive; extern crate chrono; extern crate csv; extern crate env_logger; extern crate ws; extern crate uuid; mod wire; use wire::bitmex; use std::sync::mpsc; use std::thread; struct Client<T> { out: ws::Sender, tx: mpsc::Sender<T>, } struct CsvSink<W: std::io::Write> { writer: csv::Writer<W>, } trait StreamProcessor<T: serde::Serialize> { fn process(&mut self, msg: &T); } impl<W: std::io::Write> CsvSink<W> { fn new(write: W) -> CsvSink<W> { let writer = csv::WriterBuilder::new() .delimiter(b'|') .has_headers(false) .flexible(true) .from_writer(write); CsvSink { writer: writer } } fn serialize<T: serde::Serialize>(&mut self, message: T) { match self.writer.serialize(message) { Err(err) => log::error!("Serialize Error: {:?}", err), _ => self.writer.flush().unwrap(), } } } impl<W: std::io::Write> StreamProcessor<bitmex::Response> for CsvSink<W> { fn process(&mut self, msg: &bitmex::Response) { use bitmex::Response::*; match msg { Subscribe { subscribe, success } => { log::info!("Subscribed: {}: success: {}", subscribe, success) } i @ Info { .. } => log::info!("{:?}", i), e @ Error { .. } => log::error!("{:?}", e), TableData { table, action, data, } => { for row in data { self.serialize(bitmex::TableRowAction { table, action, row }) } } } } } impl<T> ws::Handler for Client<T> where T: serde::de::DeserializeOwned, { fn on_open(&mut self, _: ws::Handshake) -> ws::Result<()> { let subscribe = bitmex::Request::Subscribe { args: vec![ bitmex::Table::Trade, //bitmex::Table::OrderBookL2, //bitmex::Table::OrderBookL2_25, ], }; let ser = serde_json::to_string(&subscribe).unwrap(); log::info!("Sending subscribe command: {:?}", ser); self.out.send(ser) } fn on_message(&mut self, msg: ws::Message) -> ws::Result<()> { let payload: String = msg.into_text().unwrap(); //println!("{}", payload); let resp: Result<T, serde_json::error::Error> = serde_json::from_str(&payload); match resp { Ok(resp) => self.tx.send(resp).unwrap(), Err(err) => log::error!("{}", err), } Ok(()) // never fail } fn on_error(&mut self, err: ws::Error) { log::error!("On Error, {}", err) } } fn main() { env_logger::init(); let (tx, rx) = mpsc::channel::<bitmex::Response>(); let mut csv_sink = CsvSink::new(std::io::stdout()); thread::spawn(move || loop { match rx.recv() { Ok(resp) => csv_sink.process(&resp), Err(err) => log::error!("Channel receive error: {:?}", err), } }); let bitmex_addr = "wss://www.bitmex.com/realtime"; log::info!("Connecting to {}", bitmex_addr); if let Err(error) = ws::connect(bitmex_addr, |out| Client { out: out, tx: tx.clone(), }) { log::error!("Failed to create WebSocket due to: {:?}", error) } }
true
5511d797b13daa0accdbb8a2f6dd62893f36c819
Rust
TheBlueMatt/Angora
/fuzzer/src/cond_stmt/cond_stmt.rs
UTF-8
3,138
2.65625
3
[ "Apache-2.0" ]
permissive
use super::CondState; use crate::fuzz_type::FuzzType; use angora_common::{cond_stmt_base::CondStmtBase, defs, tag::TagSeg}; use std::hash::{Hash, Hasher}; #[derive(Debug, Default, Clone)] pub struct CondStmt { pub base: CondStmtBase, pub offsets: Vec<TagSeg>, pub offsets_opt: Vec<TagSeg>, pub variables: Vec<u8>, pub speed: u32, pub is_desirable: bool, // non-convex pub is_consistent: bool, pub fuzz_times: usize, pub state: CondState, pub num_minimal_optima: usize, pub linear: bool, } impl PartialEq for CondStmt { fn eq(&self, other: &CondStmt) -> bool { self.base == other.base } } impl Eq for CondStmt {} impl Hash for CondStmt { fn hash<H: Hasher>(&self, state: &mut H) { self.base.cmpid.hash(state); self.base.context.hash(state); self.base.order.hash(state); } } impl CondStmt { pub fn new() -> Self { let cond_base = Default::default(); Self { base: cond_base, offsets: vec![], offsets_opt: vec![], variables: vec![], speed: 0, is_consistent: true, is_desirable: true, fuzz_times: 0, state: CondState::default(), num_minimal_optima: 0, linear: false, } } pub fn from(cond_base: CondStmtBase) -> Self { let mut cond = Self::new(); cond.base = cond_base; cond } pub fn get_fuzz_type(&self) -> FuzzType { match self.base.op { defs::COND_AFL_OP => FuzzType::AFLFuzz, defs::COND_LEN_OP => FuzzType::LenFuzz, defs::COND_FN_OP => FuzzType::CmpFnFuzz, _ => { if self.base.is_explore() { FuzzType::ExploreFuzz } else if self.base.is_exploitable() { FuzzType::ExploitFuzz } else { FuzzType::OtherFuzz } }, } } pub fn is_tainted(&self) -> bool { self.offsets.len() > 0 } pub fn is_bool(&self) -> bool { (self.base.may_be_bool() && !self.is_desirable) || (self.base.op & defs::COND_BOOL_MASK) > 0 } pub fn mark_as_done(&mut self) { self.base.condition = defs::COND_DONE_ST; self.clear(); } pub fn clear(&mut self) { self.offsets = vec![]; self.offsets_opt = vec![]; self.variables = vec![]; } pub fn is_discarded(&self) -> bool { self.is_done() || self.state.is_unsolvable() || self.state.is_timeout() } pub fn is_first_time(&self) -> bool { self.fuzz_times == 1 } pub fn get_afl_cond(id: usize, speed: u32, edge_num: usize) -> Self { let mut afl_cond = Self::new(); afl_cond.speed = speed; afl_cond.base.op = defs::COND_AFL_OP; afl_cond.base.cmpid = id as u32; afl_cond.base.context = 0; afl_cond.base.order = 0; afl_cond.base.arg1 = edge_num as u64; afl_cond } pub fn is_done(&self) -> bool { self.base.is_done() } }
true
c0f52011df225fe247f455c2f603d6d63df99f97
Rust
stjepangolemac/halite3bdk
/src/ship.rs
UTF-8
544
3.203125
3
[ "MIT" ]
permissive
use crate::command::Command; use crate::direction::Direction; use crate::position::Position; #[derive(Debug, Clone)] pub struct Ship { pub id: u32, pub owner_id: u32, pub position: Position, pub halite: u32, } impl Ship { pub fn new(id: u32, owner_id: u32, position: Position, halite: u32) -> Self { Ship { id, owner_id, position, halite, } } pub fn r#move(&self, direction: Direction) -> Command { Command::Move(self.id, direction) } }
true
5d1139179ffb77ce928e637e3e5827a4e83c3df9
Rust
johnyenter-briars/gentle_intro_to_rust
/threads_networking_filesharing/refcell.rs
UTF-8
473
3.265625
3
[]
no_license
use std::cell::RefCell; fn main() { let greetings = RefCell::new("hello".to_string()); assert_eq!(*greetings.borrow(), "hello"); assert_eq!(greetings.borrow().len(), 5); //method calls will dereference for you *greetings.borrow_mut() = "hola".to_string(); assert_eq!(*greetings.borrow(), "hola"); let mut gr = greetings.borrow_mut(); *gr = "hola".to_string(); assert_eq!(*greetings.borrow(), "hola"); // will blow up - since we alreayd borrowed on line 13 }
true
cb9e27b718675cefb73f2572cabef3e706419762
Rust
myyrakle/beauty_alphabet
/src/to_beauty.rs
UTF-8
810
3.015625
3
[ "MIT" ]
permissive
const SMALL_INTERVAL: u32 = ('𝒶' as u32 -'a' as u32); const CAPITAL_INTERVAL: u32 = ('𝒜' as u32 -'A' as u32); fn to_beauty_char(c: char)-> char { let result = match c { 'o' => '𝑜', 'e' => '𝑒', 'g' => '𝑔', 'B' => 'ℬ', 'E' => 'ℰ', 'F' => 'ℱ', 'H' => 'ℋ', 'I' => 'ℐ', 'L' => 'ℒ', 'M' => 'ℳ', 'R' => 'ℛ', it @ 'a' ... 'z' => std::char::from_u32( it as u32 + SMALL_INTERVAL ).unwrap(), it @ 'A' ... 'Z' => std::char::from_u32( it as u32 + CAPITAL_INTERVAL ).unwrap(), _ => c, } as u32; return std::char::from_u32(result).unwrap(); } fn to_beauty_string(s: &str)-> String { s.chars().into_iter().map(|c|{ to_beauty_char(c) }).collect() }
true
3db7ccbd343f827d11d84025817e874cb1151809
Rust
phansch/rust-clippy
/clippy_lints/src/undropped_manually_drops.rs
UTF-8
1,994
2.859375
3
[ "MIT", "Apache-2.0" ]
permissive
use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::ty::is_type_lang_item; use clippy_utils::{match_function_call, paths}; use rustc_hir::{lang_items, Expr}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; declare_clippy_lint! { /// ### What it does /// Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`. /// /// ### Why is this bad? /// The safe `drop` function does not drop the inner value of a `ManuallyDrop`. /// /// ### Known problems /// Does not catch cases if the user binds `std::mem::drop` /// to a different name and calls it that way. /// /// ### Example /// ```rust /// struct S; /// drop(std::mem::ManuallyDrop::new(S)); /// ``` /// Use instead: /// ```rust /// struct S; /// unsafe { /// std::mem::ManuallyDrop::drop(&mut std::mem::ManuallyDrop::new(S)); /// } /// ``` #[clippy::version = "1.49.0"] pub UNDROPPED_MANUALLY_DROPS, correctness, "use of safe `std::mem::drop` function to drop a std::mem::ManuallyDrop, which will not drop the inner value" } declare_lint_pass!(UndroppedManuallyDrops => [UNDROPPED_MANUALLY_DROPS]); impl LateLintPass<'tcx> for UndroppedManuallyDrops { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let Some([arg_0, ..]) = match_function_call(cx, expr, &paths::DROP) { let ty = cx.typeck_results().expr_ty(arg_0); if is_type_lang_item(cx, ty, lang_items::LangItem::ManuallyDrop) { span_lint_and_help( cx, UNDROPPED_MANUALLY_DROPS, expr.span, "the inner value of this ManuallyDrop will not be dropped", None, "to drop a `ManuallyDrop<T>`, use std::mem::ManuallyDrop::drop", ); } } } }
true
87645bdeda2c5cd4b1f4c356adfa5813d688b022
Rust
NathanHowell/opentelemetry-rust
/opentelemetry/src/global/trace.rs
UTF-8
10,853
3.015625
3
[ "Apache-2.0" ]
permissive
use crate::{trace, trace::TracerProvider, Context, KeyValue}; use std::fmt; use std::mem; use std::sync::{Arc, RwLock}; use std::time::SystemTime; /// Wraps the [`BoxedTracer`]'s [`Span`] so it can be used generically by /// applications without knowing the underlying type. /// /// [`BoxedTracer`]: struct.BoxedTracer.html /// [`Span`]: ../api/trace/span/trait.Span.html #[derive(Debug)] pub struct BoxedSpan(Box<DynSpan>); type DynSpan = dyn trace::Span + Send + Sync; impl trace::Span for BoxedSpan { /// Records events at a specific time in the context of a given `Span`. /// /// Note that the OpenTelemetry project documents certain ["standard event names and /// keys"](https://github.com/open-telemetry/opentelemetry-specification/tree/v0.5.0/specification/trace/semantic_conventions/README.md) /// which have prescribed semantic meanings. fn add_event_with_timestamp( &self, name: String, timestamp: SystemTime, attributes: Vec<KeyValue>, ) { self.0.add_event_with_timestamp(name, timestamp, attributes) } /// Returns the `SpanContext` for the given `Span`. fn span_context(&self) -> &trace::SpanContext { self.0.span_context() } /// Returns true if this `Span` is recording information like events with the `add_event` /// operation, attributes using `set_attributes`, status with `set_status`, etc. fn is_recording(&self) -> bool { self.0.is_recording() } /// Sets a single `Attribute` where the attribute properties are passed as arguments. /// /// Note that the OpenTelemetry project documents certain ["standard /// attributes"](https://github.com/open-telemetry/opentelemetry-specification/tree/v0.5.0/specification/trace/semantic_conventions/README.md) /// that have prescribed semantic meanings. fn set_attribute(&self, attribute: KeyValue) { self.0.set_attribute(attribute) } /// Sets the status of the `Span`. If used, this will override the default `Span` /// status, which is `Unset`. fn set_status(&self, code: trace::StatusCode, message: String) { self.0.set_status(code, message) } /// Updates the `Span`'s name. fn update_name(&self, new_name: String) { self.0.update_name(new_name) } /// Finishes the span with given timestamp. fn end_with_timestamp(&self, timestamp: SystemTime) { self.0.end_with_timestamp(timestamp); } } /// Wraps the [`GlobalProvider`]'s [`Tracer`] so it can be used generically by /// applications without knowing the underlying type. /// /// [`GlobalProvider`]: struct.GlobalProvider.html /// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html #[derive(Debug)] pub struct BoxedTracer(Box<dyn GenericTracer + Send + Sync>); impl trace::Tracer for BoxedTracer { /// Global tracer uses `BoxedSpan`s so that it can be a global singleton, /// which is not possible if it takes generic type parameters. type Span = BoxedSpan; /// Returns a span with an inactive `SpanContext`. Used by functions that /// need to return a default span like `get_active_span` if no span is present. fn invalid(&self) -> Self::Span { BoxedSpan(self.0.invalid_boxed()) } /// Starts a new `Span`. /// /// Each span has zero or one parent spans and zero or more child spans, which /// represent causally related operations. A tree of related spans comprises a /// trace. A span is said to be a _root span_ if it does not have a parent. Each /// trace includes a single root span, which is the shared ancestor of all other /// spans in the trace. fn start_with_context(&self, name: &str, cx: Context) -> Self::Span { BoxedSpan(self.0.start_with_context_boxed(name, cx)) } /// Creates a span builder /// /// An ergonomic way for attributes to be configured before the `Span` is started. fn span_builder(&self, name: &str) -> trace::SpanBuilder { trace::SpanBuilder::from_name(name.to_string()) } /// Create a span from a `SpanBuilder` fn build(&self, builder: trace::SpanBuilder) -> Self::Span { BoxedSpan(self.0.build_boxed(builder)) } } /// Allows a specific [`Tracer`] to be used generically by [`BoxedTracer`] /// instances by mirroring the interface and boxing the return types. /// /// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html /// [`BoxedTracer`]: struct.BoxedTracer.html pub trait GenericTracer: fmt::Debug + 'static { /// Create a new invalid span for use in cases where there are no active spans. fn invalid_boxed(&self) -> Box<DynSpan>; /// Returns a trait object so the underlying implementation can be swapped /// out at runtime. fn start_with_context_boxed(&self, name: &str, cx: Context) -> Box<DynSpan>; /// Returns a trait object so the underlying implementation can be swapped /// out at runtime. fn build_boxed(&self, builder: trace::SpanBuilder) -> Box<DynSpan>; } impl<S, T> GenericTracer for T where S: trace::Span + Send + Sync, T: trace::Tracer<Span = S>, { /// Create a new invalid span for use in cases where there are no active spans. fn invalid_boxed(&self) -> Box<DynSpan> { Box::new(self.invalid()) } /// Returns a trait object so the underlying implementation can be swapped /// out at runtime. fn start_with_context_boxed(&self, name: &str, cx: Context) -> Box<DynSpan> { Box::new(self.start_with_context(name, cx)) } /// Returns a trait object so the underlying implementation can be swapped /// out at runtime. fn build_boxed(&self, builder: trace::SpanBuilder) -> Box<DynSpan> { Box::new(self.build(builder)) } } /// Allows a specific [`TracerProvider`] to be used generically by the /// [`GlobalProvider`] by mirroring the interface and boxing the return types. /// /// [`TracerProvider`]: ../api/trace/provider/trait.TracerProvider.html /// [`GlobalProvider`]: struct.GlobalProvider.html pub trait GenericTracerProvider: fmt::Debug + 'static { /// Creates a named tracer instance that is a trait object through the underlying `TracerProvider`. fn get_tracer_boxed( &self, name: &'static str, version: Option<&'static str>, ) -> Box<dyn GenericTracer + Send + Sync>; } impl<S, T, P> GenericTracerProvider for P where S: trace::Span + Send + Sync, T: trace::Tracer<Span = S> + Send + Sync, P: trace::TracerProvider<Tracer = T>, { /// Return a boxed generic tracer fn get_tracer_boxed( &self, name: &'static str, version: Option<&'static str>, ) -> Box<dyn GenericTracer + Send + Sync> { Box::new(self.get_tracer(name, version)) } } /// Represents the globally configured [`TracerProvider`] instance for this /// application. This allows generic tracing through the returned /// [`BoxedTracer`] instances. /// /// [`TracerProvider`]: ../api/trace/provider/trait.TracerProvider.html /// [`BoxedTracer`]: struct.BoxedTracer.html #[derive(Clone, Debug)] pub struct GlobalTracerProvider { provider: Arc<dyn GenericTracerProvider + Send + Sync>, } impl GlobalTracerProvider { /// Create a new GlobalProvider instance from a struct that implements `TracerProvider`. fn new<P, T, S>(provider: P) -> Self where S: trace::Span + Send + Sync, T: trace::Tracer<Span = S> + Send + Sync, P: trace::TracerProvider<Tracer = T> + Send + Sync, { GlobalTracerProvider { provider: Arc::new(provider), } } } impl trace::TracerProvider for GlobalTracerProvider { type Tracer = BoxedTracer; /// Find or create a named tracer using the global provider. fn get_tracer(&self, name: &'static str, version: Option<&'static str>) -> Self::Tracer { BoxedTracer(self.provider.get_tracer_boxed(name, version)) } } lazy_static::lazy_static! { /// The global `Tracer` provider singleton. static ref GLOBAL_TRACER_PROVIDER: RwLock<GlobalTracerProvider> = RwLock::new(GlobalTracerProvider::new(trace::NoopTracerProvider::new())); } /// Returns an instance of the currently configured global [`TracerProvider`] through /// [`GlobalProvider`]. /// /// [`TracerProvider`]: ../api/trace/provider/trait.TracerProvider.html /// [`GlobalProvider`]: struct.GlobalProvider.html pub fn tracer_provider() -> GlobalTracerProvider { GLOBAL_TRACER_PROVIDER .read() .expect("GLOBAL_TRACER_PROVIDER RwLock poisoned") .clone() } /// Creates a named instance of [`Tracer`] via the configured [`GlobalProvider`]. /// /// If the name is an empty string, the provider will use a default name. /// /// This is a more convenient way of expressing `global::tracer_provider().get_tracer(name, None)`. /// /// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html /// [`GlobalProvider`]: struct.GlobalProvider.html pub fn tracer(name: &'static str) -> BoxedTracer { tracer_provider().get_tracer(name, None) } /// Creates a named instance of [`Tracer`] with version info via the configured [`GlobalProvider`] /// /// If the name is an empty string, the provider will use a default name. /// If the version is an empty string, it will be used as part of instrumentation library information. /// /// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html /// [`GlobalProvider`]: struct.GlobalProvider.html pub fn tracer_with_version(name: &'static str, version: &'static str) -> BoxedTracer { tracer_provider().get_tracer(name, Some(version)) } /// Restores the previous tracer provider on drop. /// /// This is commonly used to uninstall pipelines. As you can only have one active tracer provider, /// the previous provider is usually the default no-op provider. #[derive(Debug)] pub struct TracerProviderGuard(Option<GlobalTracerProvider>); impl Drop for TracerProviderGuard { fn drop(&mut self) { if let Some(previous) = self.0.take() { let mut global_provider = GLOBAL_TRACER_PROVIDER .write() .expect("GLOBAL_TRACER_PROVIDER RwLock poisoned"); *global_provider = previous; } } } /// Sets the given [`TracerProvider`] instance as the current global provider. /// /// [`TracerProvider`]: ../api/trace/provider/trait.TracerProvider.html #[must_use] pub fn set_tracer_provider<P, T, S>(new_provider: P) -> TracerProviderGuard where S: trace::Span + Send + Sync, T: trace::Tracer<Span = S> + Send + Sync, P: trace::TracerProvider<Tracer = T> + Send + Sync, { let mut tracer_provider = GLOBAL_TRACER_PROVIDER .write() .expect("GLOBAL_TRACER_PROVIDER RwLock poisoned"); let previous = mem::replace( &mut *tracer_provider, GlobalTracerProvider::new(new_provider), ); TracerProviderGuard(Some(previous)) }
true
041b340c921816be67ea16989fd70d7563fb0d16
Rust
erglabs/arti
/crates/tor-dirclient/src/response.rs
UTF-8
2,381
3.109375
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! Define a response type for directory requests. use tor_proto::circuit::UniqId; use crate::Error; /// A successful (or at any rate, well-formed) response to a directory /// request. #[derive(Debug)] pub struct DirResponse { /// An HTTP status code. status: u16, /// The decompressed output that we got from the directory cache. output: Vec<u8>, /// The error, if any, that caused us to stop getting this response early. error: Option<Error>, /// Information about the directory cache we used. source: Option<SourceInfo>, } /// Information about the source of a directory response. /// /// We use this to remember when a request has failed, so we can /// abandon the circuit. /// /// (In the future, we will probably want to use this structure to /// remember that the cache isn't working.) #[derive(Debug, Clone)] pub struct SourceInfo { /// Unique identifier for the circuit we're using circuit: UniqId, } impl DirResponse { /// Construct a new DirResponse from its parts pub(crate) fn new( status: u16, error: Option<Error>, output: Vec<u8>, source: Option<SourceInfo>, ) -> Self { DirResponse { status, output, error, source, } } /// Return the HTTP status code for this response. pub fn status_code(&self) -> u16 { self.status } /// Return true if this is in incomplete response. pub fn is_partial(&self) -> bool { self.error.is_some() } /// Return the error from this response, if any. pub fn error(&self) -> Option<&Error> { self.error.as_ref() } /// Return the output from this response. pub fn output(&self) -> &[u8] { &self.output } /// Consume this DirResponse and return the output in it. pub fn into_output(self) -> Vec<u8> { self.output } /// Return the source information about this response. pub fn source(&self) -> Option<&SourceInfo> { self.source.as_ref() } } impl SourceInfo { /// Construct a new SourceInfo pub(crate) fn new(circuit: UniqId) -> Self { SourceInfo { circuit } } /// Return the unique circuit identifier for the circuit on which /// we received this info. pub fn unique_circ_id(&self) -> &UniqId { &self.circuit } }
true
35a30144c37a9b7eed0bd86aeae7b54204d56480
Rust
xcaptain/rust-algorithms
/leetcode/src/p59.rs
UTF-8
1,168
3.125
3
[]
no_license
// https://leetcode-cn.com/problems/spiral-matrix-ii/ pub fn generate_matrix(n: i32) -> Vec<Vec<i32>> { let mut matrix = vec![vec![0; n as usize]; n as usize]; let mut seen = vec![vec![false; n as usize]; n as usize]; let mut r = 0_usize; let mut c = 0_usize; let mut di = 0_usize; let dr: Vec<i32> = vec![0, 1, 0, -1]; let dc: Vec<i32> = vec![1, 0, -1, 0]; for i in 1..=n * n { matrix[r][c] = i; seen[r][c] = true; // println!("r={}, c={}, di={}", r, c, di); let cr = (r as i32) + dr[di]; let cc = (c as i32) + dc[di]; if cr >= 0 && cr < n && cc >= 0 && cc < n && !seen[cr as usize][cc as usize] { r = cr as usize; c = cc as usize; } else { di = (di + 1) % 4; let rr = r as i32 + dr[di]; r = rr as usize; let rc = c as i32 + dc[di]; c = rc as usize; } } matrix } #[cfg(test)] mod tests { use super::*; #[test] fn test_p59() { assert_eq!( vec![vec![1, 2, 3], vec![8, 9, 4], vec![7, 6, 5]], generate_matrix(3) ); } }
true
521f5caef7913cd2042219ab2e96867fbc3b05ec
Rust
busyboredom/patina
/src/misc/misc_parameters.rs
UTF-8
251
3.015625
3
[ "MIT" ]
permissive
pub struct MiscParameters { pub age: i32, } impl MiscParameters { pub fn new() -> MiscParameters { MiscParameters { age: 30 } } } impl Default for MiscParameters { fn default() -> Self { MiscParameters::new() } }
true
41af979f3ce2577d3ed1f3527d47aa6e55e0309a
Rust
tweber12/rulac
/src/color/flow/chain.rs
UTF-8
12,566
2.65625
3
[]
no_license
const INITIAL_OFFSET: i64 = -10000; use crate::color::tensor::{AntiTripletIndex, ColorIndex, ColorTensor, TripletIndex}; use crate::color::ColorExpr; use crate::math_expr::parse::parse_math; use crate::math_expr::{BinaryOperator, Number}; use crate::ufo::UfoMath; use num_rational::Rational32; use num_traits::Zero; #[derive(Clone, Debug)] pub struct ChainBuilder { pub chains: Vec<Chain>, } impl ChainBuilder { pub fn from_expr(structure: &UfoMath) -> ChainBuilder { ChainBuilder { chains: vec![Chain::from_expr(structure)], } } pub fn from_chain(chain: Chain) -> ChainBuilder { ChainBuilder { chains: vec![chain], } } pub fn from_chains(chains: &[Chain]) -> ChainBuilder { ChainBuilder { chains: chains.to_vec(), } } pub fn empty() -> ChainBuilder { ChainBuilder { chains: vec![Chain::new(0, 0)], } } pub fn from_tensor(tensor: ColorTensor) -> ChainBuilder { let mut chains = ChainBuilder::from_chain(Chain::new(0, 1)); chains.append_all(tensor); chains } pub fn add_index<I: Into<ColorIndex> + Copy>(&mut self, index: I) { for chain in self.chains.iter_mut() { chain.add_index(index); } } pub fn add_factor<I: Into<Rational32>>(&mut self, factor: I) { let factor = factor.into(); for chain in self.chains.iter_mut() { chain.factor *= factor; } } pub fn append_all(&mut self, tensor: ColorTensor) { for chain in self.chains.iter_mut() { chain.append(tensor); } } pub fn replace_tensors<F>(self, function: &F) -> ChainBuilder where F: Fn(ColorTensor, i64) -> Option<ChainBuilder>, { let mut out = ChainBuilder::new(); for chain in self.chains { out.add_chains(chain.replace_tensors(function)); } out } pub fn replace_indices<P, F>(self, index_predicate: &P, function: &F) -> ChainBuilder where P: Fn(&ColorIndex) -> bool, F: Fn(&[ColorTensor]) -> ChainBuilder, { let mut out = ChainBuilder::new(); for chain in self.chains { out.add_chains(chain.replace_indices(index_predicate, function)); } out } pub fn reduce(mut self) -> ChainBuilder { for c in self.chains.iter_mut() { c.order_tensors(); } self.chains.sort_by(|a, b| a.tensors.cmp(&b.tensors)); let mut iter = self.chains.into_iter(); let mut first = match iter.next() { Some(n) => n, None => return ChainBuilder::empty(), }; let mut out = Vec::new(); for chain in iter { if chain.tensors != first.tensors { if !first.factor.is_zero() { out.push(first); } first = chain; } else { first.factor += chain.factor; } } if !first.factor.is_zero() { out.push(first); } ChainBuilder { chains: out } } pub fn combine_pairs<L, R, F>( self, predicate_left: &L, predicate_right: &R, function: &F, ) -> ChainBuilder where L: Fn(&ColorTensor) -> bool, R: Fn(&ColorTensor) -> bool, F: Fn(ColorTensor, ColorTensor) -> ChainBuilder, { let mut out = ChainBuilder::new(); for chain in self.chains { out.add_chains(chain.combine_pairs(predicate_left, predicate_right, function)); } out } fn new() -> ChainBuilder { ChainBuilder { chains: Vec::new() } } fn add_chains(&mut self, other: ChainBuilder) { self.chains.extend(other.chains); } fn join_all(&mut self, new_chain: &Chain) { for chain in self.chains.iter_mut() { chain.join(new_chain); } } fn join_all_multi(&mut self, chains: &[Chain]) { if chains.len() == 1 { return self.join_all(&chains[0]); } let temp = self.clone(); self.join_all(&chains[0]); for new_chain in &chains[1..] { let mut new = temp.clone(); new.join_all(new_chain); self.add_chains(new); } } fn offset(&self) -> i64 { self.chains .iter() .map(|c| c.offset) .min() .unwrap_or(INITIAL_OFFSET) } } #[derive(Clone, Debug)] pub struct Chain { pub factor: Rational32, pub tensors: Vec<ColorTensor>, indices: Vec<ColorIndex>, offset: i64, } impl Chain { pub fn new_trace<T: Into<Rational32>>(offset: i64, indices: &[ColorIndex], factor: T) -> Chain { let mut chain = Chain::new(offset, factor); chain.add_trace(indices); chain } pub fn new_delta_chain<T: Into<Rational32>>( indices: &[(TripletIndex, AntiTripletIndex)], factor: T, ) -> Chain { let tensors = indices .iter() .map(|(i, j)| ColorTensor::new_kronecker_triplet(*i, *j)) .collect(); Chain { factor: factor.into(), tensors, indices: Vec::new(), offset: 0, } } fn new<T: Into<Rational32>>(offset: i64, factor: T) -> Chain { Chain { factor: factor.into(), tensors: Vec::new(), indices: Vec::new(), offset, } } fn join(&mut self, other: &Chain) { self.factor *= other.factor; self.tensors.extend(other.tensors.iter()); self.indices.extend(other.indices.iter()); self.offset = i64::min(self.offset, other.offset); } fn append(&mut self, tensor: ColorTensor) { self.tensors.push(tensor); } fn replace_tensors<F>(mut self, function: &F) -> ChainBuilder where F: Fn(ColorTensor, i64) -> Option<ChainBuilder>, { let tensors = self.tensors; self.tensors = Vec::new(); let mut offset = self.offset; let mut out = ChainBuilder::from_chain(self); for tensor in tensors { match function(tensor, offset) { Some(new) => out.join_all_multi(&new.chains), None => out.append_all(tensor), } offset = out.offset(); } out } fn replace_indices<P, F>(mut self, index_predicate: &P, function: &F) -> ChainBuilder where P: Fn(&ColorIndex) -> bool, F: Fn(&[ColorTensor]) -> ChainBuilder, { let (selected, other) = self.indices.into_iter().partition(index_predicate); self.indices = other; let mut out = ChainBuilder::from_chain(self); for index in selected.into_iter().filter(index_predicate) { let mut next = ChainBuilder::new(); for chain in out.chains { next.add_chains(chain.replace_index(index, function)); } out = next; } out } fn replace_index<F>(mut self, index: ColorIndex, function: &F) -> ChainBuilder where F: Fn(&[ColorTensor]) -> ChainBuilder, { let mut matches = Vec::new(); let tensors = self.tensors; self.tensors = Vec::new(); for tensor in tensors { if tensor.has_normalized_index(index) { matches.push(tensor); } else { self.tensors.push(tensor); } } let mut out = ChainBuilder::from_chain(self); out.join_all_multi(&function(&matches).chains); out } fn combine_pairs<L, R, F>( mut self, predicate_left: &L, predicate_right: &R, function: &F, ) -> ChainBuilder where L: Fn(&ColorTensor) -> bool, R: Fn(&ColorTensor) -> bool, F: Fn(ColorTensor, ColorTensor) -> ChainBuilder, { let (selected, rest) = self .tensors .into_iter() .partition(|t| predicate_left(t) || predicate_right(t)); self.tensors = rest; if selected.is_empty() { return ChainBuilder::from_chain(self); } let (mut left, mut right): (Vec<_>, Vec<_>) = selected.into_iter().partition(predicate_left); if left.is_empty() || right.is_empty() { self.tensors.extend(left); self.tensors.extend(right); return ChainBuilder::from_chain(self); } while left.len() > right.len() { self.tensors.push( left.pop() .expect("UNREACHABLE: left.len() > right.len() >= 0"), ); } while right.len() > left.len() { self.tensors.push( right .pop() .expect("UNREACHABLE: right.len() > left.len() >= 0"), ); } let mut result = ChainBuilder::from_chain(self); for (l, r) in left.into_iter().zip(right) { result.join_all_multi(&function(l, r).chains); } result.combine_pairs(predicate_left, predicate_right, function) } fn summation_index<T>(&mut self) -> T where T: From<i64>, { let next = self.offset; self.offset -= 1; next.into() } fn add_trace(&mut self, indices: &[ColorIndex]) { let start: TripletIndex = self.summation_index(); let mut sum = start; for (i, index) in indices.iter().enumerate() { let next: TripletIndex = if i != indices.len() - 1 { self.summation_index() } else { start }; self.add_index(next); match index { ColorIndex::Octet { index } => self.tensors.push(ColorTensor::FundamentalRep { a1: *index, i2: sum, jb3: next.bar(), }), ColorIndex::Sextet { index } => { self.tensors.push(ColorTensor::SextetClebschGordan { alpha1: *index, ib2: sum.bar(), jb3: next.bar(), }) } ColorIndex::AntiSextet { index } => { self.tensors.push(ColorTensor::AntiSextetClebschGordan { alphab1: *index, i2: sum, j3: next, }) } _ => panic!("BUG: Unexpected color index in `add_trace`!"), } sum = next; } } fn from_expr(structure: &UfoMath) -> Chain { let UfoMath(structure) = structure; let expr: ColorExpr = parse_math(structure).unwrap(); let mut structure = Chain::new(INITIAL_OFFSET, 1); structure.add_expr(expr); structure } fn add_expr(&mut self, expr: ColorExpr) { match expr { ColorExpr::BinaryOp { operator, left, right, } => { if operator != BinaryOperator::Mul { panic!("REPLACE: Unsupported operator"); } self.add_expr(*left); self.add_expr(*right); } ColorExpr::Tensor { tensor } => self.tensors.push(tensor), ColorExpr::Sum { expr, index } => { self.add_index(index); self.add_expr(*expr); } ColorExpr::Number { value: Number::Integer(i), } => { if i != 1 { panic!("REPLACE: Unsupported number"); } // Do nothing in this case, the color structure is trivial } _ => panic!("REPLACE: Unsupported expression"), } } fn add_index<I: Into<ColorIndex>>(&mut self, index: I) { self.indices.push(index.into()); } fn order_tensors(&mut self) { self.tensors.sort(); } } #[cfg(test)] mod test { use crate::color::tensor::ColorTensor; #[test] fn order_delta() { let d1 = ColorTensor::new_kronecker_triplet(2, 1); let d2 = ColorTensor::new_kronecker_triplet(1, 5); let d3 = ColorTensor::new_kronecker_triplet(1, 3); assert!(d1 > d2); assert!(d1 > d3); assert!(d2 > d3); } }
true
09a6ebe97eb99864329bb45ce1d5da2f72cc5d47
Rust
coriolinus/exercism-rs
/rna-transcription/src/lib.rs
UTF-8
2,482
3.359375
3
[]
no_license
use std::iter::FromIterator; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Nucleotide { Adenine, Cytosine, Guanine, Thymine, Uracil, } impl Nucleotide { pub fn from_char(&c: &char) -> Option<Nucleotide> { match c { 'A' => Some(Nucleotide::Adenine), 'C' => Some(Nucleotide::Cytosine), 'G' => Some(Nucleotide::Guanine), 'T' => Some(Nucleotide::Thymine), 'U' => Some(Nucleotide::Uracil), _ => None, } } pub fn to_char(&self) -> char { match *self { Nucleotide::Adenine => 'A', Nucleotide::Cytosine => 'C', Nucleotide::Guanine => 'G', Nucleotide::Thymine => 'T', Nucleotide::Uracil => 'U', } } } // newtypes for disambiguation #[derive(Debug, PartialEq)] pub struct RibonucleicAcid(pub Vec<Nucleotide>); #[derive(Debug, PartialEq)] pub struct DeoxyribonucleicAcid(pub Vec<Nucleotide>); // RNA impls impl RibonucleicAcid { pub fn new(s: &str) -> Option<Self> { s.chars().map(|c| Nucleotide::from_char(&c)) .map(|n| match n { // Thymine doesn't appear in RNA Some(Nucleotide::Thymine) => None, Some(n) => Some(n), None => None, }).collect() } } impl FromIterator<Nucleotide> for RibonucleicAcid { fn from_iter<I: IntoIterator<Item=Nucleotide>>(iter: I) -> Self { RibonucleicAcid(Vec::from_iter(iter)) } } // DNA impls impl DeoxyribonucleicAcid { pub fn new(s: &str) -> Option<Self> { s.chars().map(|c| Nucleotide::from_char(&c)) .map(|n| match n { // Uracil doesn't appear in DNA Some(Nucleotide::Uracil) => None, Some(n) => Some(n), None => None, }).collect() } pub fn to_rna(&self) -> Option<RibonucleicAcid> { self.0.iter().map(|&n| match n { Nucleotide::Guanine => Some(Nucleotide::Cytosine), Nucleotide::Cytosine => Some(Nucleotide::Guanine), Nucleotide::Thymine => Some(Nucleotide::Adenine), Nucleotide::Adenine => Some(Nucleotide::Uracil), _ => None, }).collect() } } impl FromIterator<Nucleotide> for DeoxyribonucleicAcid { fn from_iter<I: IntoIterator<Item=Nucleotide>>(iter: I) -> Self { DeoxyribonucleicAcid(Vec::from_iter(iter)) } }
true
a1b52e5456649a8a4a4c841e093b574df8a57e10
Rust
aGiant/robust_trading.icml2019
/rstat/src/univariate/continuous/f_dist.rs
UTF-8
4,048
2.84375
3
[ "MIT", "BSD-3-Clause" ]
permissive
use crate::core::*; use rand::Rng; use spaces::continuous::PositiveReals; use std::fmt; #[derive(Debug, Clone, Copy)] pub struct FDist { pub d1: usize, pub d2: usize, } impl FDist { pub fn new(d1: usize, d2: usize) -> FDist { assert_natural!(d1); assert_natural!(d2); FDist { d1, d2 } } } impl Into<rand::distributions::FisherF> for FDist { fn into(self) -> rand::distributions::FisherF { rand::distributions::FisherF::new(self.d1 as f64, self.d2 as f64) } } impl Into<rand::distributions::FisherF> for &FDist { fn into(self) -> rand::distributions::FisherF { rand::distributions::FisherF::new(self.d1 as f64, self.d2 as f64) } } impl Distribution for FDist { type Support = PositiveReals; fn support(&self) -> PositiveReals { PositiveReals } fn cdf(&self, x: f64) -> Probability { use special_fun::FloatSpecial; let d1 = self.d1 as f64; let d2 = self.d2 as f64; let x = d1 * x / (d1 * x + d2); x.betainc(d1 / 2.0, d2 / 2.0).into() } fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> f64 { use rand::distributions::{FisherF as FisherFSampler, Distribution as DistSampler}; let sampler: FisherFSampler = self.into(); sampler.sample(rng) } } impl ContinuousDistribution for FDist { fn pdf(&self, x: f64) -> f64 { use special_fun::FloatSpecial; let d1 = self.d1 as f64; let d2 = self.d2 as f64; let numerator = ((d1 * x).powf(d1) * d2.powf(d2) / (d1 * x + d2).powf(d1 + d2)).sqrt(); let denominator = x * (d1 / 2.0).beta(d2 / 2.0); numerator / denominator } } impl UnivariateMoments for FDist { fn mean(&self) -> f64 { if self.d2 <= 2 { unimplemented!("Mean is undefined for values of d2 <= 2.") } let d2 = self.d2 as f64; d2 / (d2 - 2.0) } fn variance(&self) -> f64 { if self.d2 <= 4 { unimplemented!("Variance is undefined for values of d2 <= 4.") } let d1 = self.d1 as f64; let d2 = self.d2 as f64; let d2m2 = d2 - 2.0; 2.0 * d2 * d2 * (d1 + d2m2) / d1 / d2m2 / d2m2 / (d2 - 4.0) } fn skewness(&self) -> f64 { if self.d2 <= 6 { unimplemented!("Skewness is undefined for values of d2 <= 6.") } let d1 = self.d1 as f64; let d2 = self.d2 as f64; let numerator = (2.0 * d1 + d2 - 2.0) * (8.0 * (d2 - 4.0)).sqrt(); let denominator = (d2 - 6.0) * (d1 * (d1 + d2 - 2.0)).sqrt(); numerator / denominator } fn excess_kurtosis(&self) -> f64 { if self.d2 <= 8 { unimplemented!("Kurtosis is undefined for values of d2 <= 8.") } let d1 = self.d1 as f64; let d2 = self.d2 as f64; let d2m2 = d2 - 2.0; let numerator = 12.0 * d1 * (5.0 * d2 - 22.0) * (d1 + d2m2) + (d2 - 4.0) * d2m2 * d2m2; let denominator = d1 * (d2 - 6.0) * (d2 - 8.0) * (d1 + d2m2); numerator / denominator } } impl Modes for FDist { fn modes(&self) -> Vec<f64> { if self.d1 <= 2 { unimplemented!("Mode is undefined for values of d1 <= 2.") } let d1 = self.d1 as f64; let d2 = self.d2 as f64; vec![(d1 - 2.0) / d1 * d2 / (d2 + 2.0)] } } impl Entropy for FDist { fn entropy(&self) -> f64 { use special_fun::FloatSpecial; let d1 = self.d1 as f64; let d2 = self.d2 as f64; let d1o2 = d1 / 2.0; let d2o2 = d2 / 2.0; d1o2.gamma().ln() + d2o2.gamma().ln() - ((d1 + d2) / 2.0).gamma().ln() + (1.0 - d1o2) * (1.0 + d1o2).digamma() - (1.0 - d2o2) * (1.0 + d2o2).digamma() + ((d1 + d2) / 2.0) * ((d1 + d2) / 2.0).digamma() + (d1 / d2).ln() } } impl fmt::Display for FDist { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "F({}, {})", self.d1, self.d2) } }
true
2a170257f19f1002c764cc4fc2ff3656feb1aa9d
Rust
suluke/monobo
/experiments/advent_of_code_2021/src/day_03.rs
UTF-8
3,165
3.109375
3
[]
no_license
use std::fs; use std::path::PathBuf; fn part1() -> std::io::Result<()> { let mut input_path = PathBuf::new(); input_path.push(env!("CARGO_MANIFEST_DIR")); input_path.push("input"); input_path.push("03.txt"); struct State { one_counts: Vec<usize>, total: usize, } let state = State { one_counts: Vec::with_capacity(12), total: 0, }; let state = fs::read_to_string(input_path) .unwrap() .lines() .fold(state, |mut state, line| { state.total += 1; for (idx, a_char) in line.chars().enumerate() { if state.one_counts.len() <= idx { state.one_counts.push(0); } state.one_counts[idx] += if a_char == '0' { 0 } else { 1 }; } state }); let gamma = state .one_counts .iter() .map(|val| -> usize { (*val > (state.total / 2)) as usize }) .fold(0, |acc, val| acc * 2 + val); let eps = state .one_counts .iter() .map(|val| -> usize { (*val < (state.total / 2)) as usize }) .fold(0, |acc, val| acc * 2 + val); println!("day3/pt1: {}", gamma * eps); Ok(()) } fn oxgen(lines: &[String]) -> usize { let mut rem = lines.to_vec(); let mut pos = 0; while rem.len() > 1 { let mc = if rem.iter().fold(0, |acc, val| { acc + (val.chars().nth(pos).unwrap() == '1') as usize }) * 2 >= rem.len() {'1'} else {'0'}; rem = rem.iter().filter(|str| {str.chars().nth(pos).unwrap() == mc}).cloned().collect(); pos += 1; } rem[0] .chars() .fold(0, |acc, bit| acc * 2 + (bit == '1') as usize) } fn coscrub(lines: &[String]) -> usize { let mut rem = lines.to_vec(); let mut pos = 0; while rem.len() > 1 { let mc = if rem.iter().fold(0, |acc, val| { acc + (val.chars().nth(pos).unwrap() == '0') as usize }) * 2 <= rem.len() {'0'} else {'1'}; rem = rem.iter().filter(|str| {str.chars().nth(pos).unwrap() == mc}).cloned().collect(); pos += 1; } rem[0] .chars() .fold(0, |acc, bit| acc * 2 + (bit == '1') as usize) } fn part2() -> std::io::Result<()> { let mut input_path = PathBuf::new(); input_path.push(env!("CARGO_MANIFEST_DIR")); input_path.push("input"); input_path.push("03.txt"); let lines: Vec<String> = fs::read_to_string(input_path) .unwrap() .lines() .map(str::to_owned) .collect(); let ox = oxgen(&lines); let co = coscrub(&lines); println!("day3/pt2: {}", ox * co); Ok(()) } pub fn solve() -> std::io::Result<()> { part1()?; part2() } #[cfg(test)] mod tests { use super::*; #[test] fn pt2() { let lines: Vec<String> = "00100\n11110\n10110\n10111\n10101\n01111\n00111\n11100\n10000\n11001\n00010\n01010" .lines() .map(str::to_owned) .collect(); let ox = oxgen(&lines); let co = coscrub(&lines); assert_eq!(ox, 23); assert_eq!(co, 10); } }
true
297e121d268f34c9a46545ac791974158f9e7e14
Rust
cryptopossum/gp-v2-services
/orderbook/src/api/get_markets.rs
UTF-8
5,993
2.546875
3
[]
no_license
use anyhow::{anyhow, Result}; use ethcontract::{H160, U256}; use model::order::OrderKind; use num::{BigInt, BigRational}; use serde::{Deserialize, Serialize}; use shared::{conversions::U256Ext, price_estimate::PriceEstimating}; use std::sync::Arc; use std::{convert::Infallible, str::FromStr}; use warp::{hyper::StatusCode, reply, Filter, Rejection, Reply}; #[derive(Clone, Debug, PartialEq)] struct AmountEstimateQuery { market: Market, amount: U256, kind: OrderKind, } #[derive(Deserialize, Serialize)] struct AmountEstimateResult { #[serde(with = "serde_with::rust::display_fromstr")] amount: BigInt, token: H160, } struct TokenAmount(U256); impl FromStr for TokenAmount { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self(U256::from_dec_str(s)?)) } } #[derive(Clone, Debug, PartialEq, Default)] struct Market { base_token: H160, quote_token: H160, } impl FromStr for Market { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let parts: Vec<&str> = s.split('-').collect(); if parts.len() != 2 { Err(anyhow!( "Market needs to be consist of two addresses separated by -" )) } else { Ok(Market { base_token: H160::from_str(parts[0])?, quote_token: H160::from_str(parts[1])?, }) } } } fn get_amount_estimate_request( ) -> impl Filter<Extract = (AmountEstimateQuery,), Error = Rejection> + Clone { warp::path!("markets" / Market / OrderKind / TokenAmount) .and(warp::get()) .map(|market, kind, amount: TokenAmount| AmountEstimateQuery { market, kind, amount: amount.0, }) } fn get_amount_estimate_response( result: Result<BigRational>, query: AmountEstimateQuery, ) -> impl Reply { match result { Ok(price) => { let (amount, token) = match query.kind { OrderKind::Buy => ( query.amount.to_big_rational() * price, query.market.base_token, ), OrderKind::Sell => ( query.amount.to_big_rational() / price, query.market.quote_token, ), }; reply::with_status( reply::json(&AmountEstimateResult { amount: amount.to_integer(), token, }), StatusCode::OK, ) } Err(_) => reply::with_status( super::error("NotFound", "No price estimate found"), StatusCode::NOT_FOUND, ), } } pub fn get_amount_estimate( price_estimator: Arc<dyn PriceEstimating>, ) -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone { get_amount_estimate_request().and_then(move |query: AmountEstimateQuery| { let price_estimator = price_estimator.clone(); async move { let result = price_estimator .estimate_price( query.market.base_token, query.market.quote_token, query.amount, query.kind, ) .await; Result::<_, Infallible>::Ok(get_amount_estimate_response(result, query)) } }) } #[cfg(test)] mod tests { use super::*; use crate::api::response_body; use warp::test::request; #[tokio::test] async fn test_get_amount_estimate_request() { let get_query = |path| async move { request() .path(path) .filter(&get_amount_estimate_request()) .await .unwrap() }; let request = get_query("/markets/0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2-0x6b175474e89094c44da98b954eedeac495271d0f/sell/100").await; assert_eq!( request, AmountEstimateQuery { market: Market { base_token: H160::from_str("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") .unwrap(), quote_token: H160::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .unwrap() }, kind: OrderKind::Sell, amount: 100.into() } ); let request = get_query("/markets/0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2-0x6b175474e89094c44da98b954eedeac495271d0f/buy/100").await; assert_eq!(request.kind, OrderKind::Buy); } #[tokio::test] async fn test_get_amount_estimate_response_ok() { let query = AmountEstimateQuery { market: Market { base_token: H160::from_low_u64_be(1), quote_token: H160::from_low_u64_be(2), }, amount: 100.into(), kind: OrderKind::Sell, }; // Sell Order let response = get_amount_estimate_response(Ok(BigRational::from_integer(2.into())), query.clone()) .into_response(); assert_eq!(response.status(), StatusCode::OK); let estimate: AmountEstimateResult = serde_json::from_slice(response_body(response).await.as_slice()).unwrap(); assert_eq!(estimate.amount, 50.into()); assert_eq!(estimate.token, query.market.quote_token); // Buy Order let response = get_amount_estimate_response( Ok(BigRational::from_integer(2.into())), AmountEstimateQuery { kind: OrderKind::Buy, ..query.clone() }, ) .into_response(); let estimate: AmountEstimateResult = serde_json::from_slice(response_body(response).await.as_slice()).unwrap(); assert_eq!(estimate.amount, 200.into()); assert_eq!(estimate.token, query.market.base_token); } }
true
e2314cbb3fcd38567772515ff1d7dd108dca7f55
Rust
JiWonOck/Rust_study
/placeholder.rs
UTF-8
672
3.609375
4
[]
no_license
/*_ 패턴은 어떠한 값과도 매칭 될 것입니다. 우리의 다른 갈래 뒤에 이를 집어넣음으로써, _는 그전에 명시하지 않은 모든 가능한 경우에 대해 매칭 될 것입니다. ()는 단지 단위 값이므로, _ 케이스에서는 어떤 일도 일어나지 않을 것입니다. 결과적으로, 우리가 _ 변경자 이전에 나열하지 않은 모든 가능한 값들에 대해서는 아무것도 하고 싶지 않다는 것을 말해줄 수 있습니다.*/ let some_u8_value = 0u8; match some_u8_value { 1 => println!("one"), 3 => println!("three"), 5 => println!("five"), 7 => println!("seven"), _ => (), }
true
e0cc558ba9fc3e560f24a804126c8f589e043186
Rust
d3zd3z/euler
/rust/src/pr017.rs
UTF-8
2,899
3.78125
4
[]
no_license
// Problem 17 // // 17 May 2002 // // // If the numbers 1 to 5 are written out in words: one, two, three, four, // five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total. // // If all the numbers from 1 to 1000 (one thousand) inclusive were written // out in words, how many letters would be used? // // // NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and // forty-two) contains 23 letters and 115 (one hundred and fifteen) contains // 20 letters. The use of "and" when writing out numbers is in compliance // with British usage. // // 21124 use std::mem::replace; define_problem!(pr017, 17, 21124); fn pr017() -> u64 { let mut conv = Converter::new(); let mut result = 0; for i in 1 .. 1001 { let text = conv.make_english(i); // println(fmt!("%4u '%s'", i, text)); result += count_letters(&text[..]); } result } fn count_letters(text: &str) -> u64 { let mut count = 0; for ch in text.chars() { if ch.is_alphabetic() { count += 1; } } count } struct Converter { buffer: String, add_space: bool } impl Converter { pub fn new() -> Box<Converter> { Box::new(Converter { add_space: false, buffer: String::new() }) } } impl Converter { fn make_english(&mut self, n: u64) -> String { self.add_space = false; self.buffer = String::new(); let mut work = n; if work > 1000 { panic!("Number too large") } if work == 1000 { return "one thousand".to_string() } if work >= 100 { self.add_ones(work/100); self.add("hundred"); work %= 100; if work > 0 { self.add("and"); } } if work >= 20 { self.add_tens(work/10); work %= 10; if work > 0 { self.add_space = false; self.add("-"); self.add_space = false; } } if work >= 1 { self.add_ones(work); } // let result = copy self.buffer; replace(&mut self.buffer, String::new()) } fn add_ones(&mut self, n: u64) { self.add(ONES[n as usize - 1]); } fn add_tens(&mut self, n: u64) { self.add(TENS[n as usize - 1]); } fn add(&mut self, text: &str) { if self.add_space { self.buffer.push(' '); } self.buffer.push_str(text); self.add_space = true; } } static ONES: &[&str] = &[ "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen", "eighteen", "nineteen" ]; static TENS: &[&str] = &[ "ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty", "ninety" ];
true
d257598fc9ecf8dc365b7a826aea1329e3478e01
Rust
Becavalier/rust-by-example-cases
/error-handling/src/main.rs
UTF-8
9,427
3.609375
4
[ "MIT" ]
permissive
use std::num::ParseIntError; fn drink(beverage: &str) { if beverage == "lemonade" { panic!("AAAaaaaa!!!!"); } println!("Some refreshing {} is all I need.", beverage); } fn give_commoner(gift: Option<&str>) { match gift { Some("snake") => println!("Yuck! I'm putting this snake back in the forest."), Some(inner) => println!("{}? How nice.", inner), None => println!("No gift? Oh well."), } } fn give_royal(gift: Option<&str>) { let inside = gift.unwrap(); if inside == "snake" { panic!("AAAaaaaa!!!!"); } println!("I love {}s!!!!!", inside); } fn next_birthday(current_age: Option<u8>) -> Option<String> { let next_age: u8 = current_age?; Some(format!("Next year I will be {}", next_age)) } struct Person { job: Option<Job>, } #[derive(Clone, Copy)] struct Job { phone_number: Option<PhoneNumber>, } #[derive(Clone, Copy)] struct PhoneNumber { area_code: Option<u8>, number: u32, } impl Person { fn work_phone_area_code(&self) -> Option<u8> { self.job?.phone_number?.area_code } } #[derive(Debug)] enum Food { Apple, Carrot, Potato, } #[derive(Debug)] struct Peeled(Food); #[derive(Debug)] struct Chopped(Food); #[derive(Debug)] struct Cooked(Food); fn peel(food: Option<Food>) -> Option<Peeled> { match food { Some(food) => Some(Peeled(food)), None => None, } } fn chop(peeled: Option<Peeled>) -> Option<Chopped> { match peeled { Some(Peeled(food)) => Some(Chopped(food)), None => None, } } fn cook(chopped: Option<Chopped>) -> Option<Cooked> { chopped.map(|Chopped(food)| Cooked(food)) } fn process(food: Option<Food>) -> Option<Cooked> { food.map(|f| Peeled(f)) .map(|Peeled(f)| Chopped(f)) .map(|Chopped(f)| Cooked(f)) } fn eat(food: Option<Cooked>) { match food { Some(food) => println!("Mmm. I love {:?}", food), None => println!("Oh no! It wasn't edible."), } } fn multiply(first_number_str: &str, second_number_str: &str) -> i32 { let first_number = first_number_str.parse::<i32>().unwrap(); let second_number = second_number_str.parse::<i32>().unwrap(); first_number + second_number } /* Option<T, U>::and_then */ fn sq(x: u32) -> Option<u32> { Some(x * x) } fn nope(_: u32) -> Option<u32> { None } // define a generic alias for a `Result`. type AliasedResult<T> = Result<T, ParseIntError>; fn multiply_with_map(first_number_str: &str, second_number_str: &str) -> AliasedResult<i32> { first_number_str.parse::<i32>().and_then(|first_number| { second_number_str .parse::<i32>() .map(|second_number| first_number * second_number) }) } /* Early returns */ fn multiply_with_early_returns( first_number_str: &str, second_number_str: &str, ) -> AliasedResult<i32> { let first_number = match first_number_str.parse::<i32>() { Ok(first_number) => first_number, Err(e) => return Err(e), // early return. }; let second_number = match second_number_str.parse::<i32>() { Ok(second_number) => second_number, Err(e) => return Err(e), }; Ok(first_number * second_number) } fn print(result: AliasedResult<i32>) { match result { Ok(n) => println!("n is {}", n), Err(e) => println!("Error: {}", e), } } /* ? */ fn multiply_with_question_mark( first_number_str: &str, second_number_str: &str, ) -> AliasedResult<i32> { let first_number = first_number_str.parse::<i32>()?; let second_number = second_number_str.parse::<i32>()?; Ok(first_number * second_number) } /* Handle mixed error types */ fn double_first(vec: Vec<&str>) -> Result<Option<i32>, ParseIntError> { let opt = vec.first().map(|first| first.parse::<i32>().map(|n| 2 * n)); // Option<Result<i32, ParseIntError>> -> Result<Option<i32>, ParseIntError>. opt.map_or(Ok(None), |r| r.map(Some)) // `r.map(Some)` will be the return value. } /* Defining an error type */ use std::fmt; #[derive(Debug, Clone)] struct DoubleError; impl fmt::Display for DoubleError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "invalid first item to double.") } } fn double_first_x(vec: Vec<&str>) -> std::result::Result<i32, DoubleError> { vec.first() // change the error to our new type. .ok_or(DoubleError) .and_then(|s| { s.parse::<i32>() // update to the new error type here also. .map_err(|_| DoubleError) .map(|i| 2 * i) }) } /* Boxing errors */ use std::error; #[derive(Debug, Clone)] struct EmptyVec; impl fmt::Display for EmptyVec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "invalid first item to double.") } } impl error::Error for EmptyVec {} fn double_first_box(vec: Vec<&str>) -> std::result::Result<i32, Box<dyn error::Error>> { vec.first() // lazily evaluated, compared to `ok_or()`. .ok_or_else(|| EmptyVec.into()) // converts to Box, move value to heap. .and_then(|s| { s.parse::<i32>() .map_err(|e| e.into()) // converts to Box. .map(|i| 2 * i) }) } /* Other uses of ? */ #[derive(Debug)] struct EmptyVecB; impl fmt::Display for EmptyVecB { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "invalid first item to double.") } } impl error::Error for EmptyVecB {} fn double_first_question_box(vec: Vec<&str>) -> std::result::Result<i32, Box<dyn error::Error>> { // use `?` to get the inner value out immediately. // use ? where the error is convertible to the return type, it will convert automatically. // `Box::<EmptyVec>::from(EmptyVec)`. let first = vec.first().ok_or(EmptyVec)?; let parsed = first.parse::<i32>()?; Ok(2 * parsed) } /* Wrapping errors */ use std::error::Error as _; #[derive(Debug)] enum DoubleErrorB { EmptyVec, Parse(ParseIntError), } impl fmt::Display for DoubleErrorB { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DoubleErrorB::EmptyVec => write!(f, "please use a vector with at least one element"), DoubleErrorB::Parse(ref e) => { write!(f, "the provided string could not be parsed as int") } } } } impl error::Error for DoubleErrorB { fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { DoubleErrorB::EmptyVec => None, DoubleErrorB::Parse(ref e) => Some(e), } } } impl From<ParseIntError> for DoubleErrorB { fn from(err: ParseIntError) -> DoubleErrorB { DoubleErrorB::Parse(err) } } fn double_first_with_wrapping(vec: Vec<&str>) -> std::result::Result<i32, DoubleErrorB> { let first = vec.first().ok_or(DoubleErrorB::EmptyVec)?; let parsed = first.parse::<i32>()?; Ok(2 * parsed) } fn main() -> Result<(), ParseIntError> { drink("water"); // drink("lemonade"); let food = Some("cabbage"); let snake = Some("snake"); let void = None; give_commoner(food); give_commoner(snake); give_commoner(void); let bird = Some("robin"); // let nothing = None; give_royal(bird); // give_royal(nothing); let p = Person { job: Some(Job { phone_number: Some(PhoneNumber { area_code: Some(61), number: 439222222, }), }), }; assert_eq!(p.work_phone_area_code(), Some(61)); let apple = Some(Food::Apple); let carrot = Some(Food::Carrot); let potato = None; let cooked_apple = cook(chop(peel(apple))); let cooked_carrot = cook(chop(peel(carrot))); let cooked_potato = process(potato); eat(cooked_apple); eat(cooked_carrot); eat(cooked_potato); assert_eq!(Some(2).and_then(sq).and_then(sq), Some(16)); assert_eq!(Some(2).and_then(sq).and_then(nope), None); let twenty = multiply("10", "2"); println!("double is {}", twenty); // let tt = multiply("t", "2"); // panic. // println!("double is {}", tt); let number_str = "10"; let number = match number_str.parse::<i32>() { Ok(number) => number, Err(e) => return Err(e), }; print(multiply_with_early_returns("10", "2")); print(multiply_with_early_returns("t", "2")); let strings = vec!["tofu", "93", "18"]; println!("{:?}", double_first_x(strings)); /* Iterating over Results */ let strings = vec!["tofu", "93", "18"]; let numbers: Vec<_> = strings .iter() .filter_map(|s| s.parse::<i32>().ok()) .collect(); println!("Results: {:?}", numbers); let numbers: Result<Vec<_>, _> = strings.iter().map(|s| s.parse::<i32>()).collect(); println!("Results: {:?}", numbers); let (numbers, errors): (Vec<_>, Vec<_>) = strings .into_iter() .map(|s| s.parse::<i32>()) .partition(Result::is_ok); println!("Numbers: {:?}", numbers); println!("Errors: {:?}", errors); // `collect()` - transforms an iterator into a collection. let numbers: Vec<_> = numbers.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); println!("Numbers: {:?}", numbers); println!("Errors: {:?}", errors); Ok(()) }
true
54e83fc60805daab2424bcde6f5bafbc6d28cdab
Rust
Monadic-Cat/breadrisc
/src/eight/base.rs
UTF-8
5,803
3.078125
3
[]
no_license
use log::info; use std::convert::TryFrom; use std::error::Error; use std::fmt; use std::fmt::{Display, Formatter}; use std::marker::PhantomData; use typenum::{Unsigned,U2, U3, IsGreater}; #[derive(Debug)] pub(crate) struct RegisterAddressOverflow {} impl Display for RegisterAddressOverflow { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "Too large") } } impl Error for RegisterAddressOverflow {} impl From<RegisterAddressOverflow> for &str { fn from(_: RegisterAddressOverflow) -> &'static str { "Too large" } } #[derive(Debug, Clone, Copy)] pub(crate) struct NRegisterAddress<N: Unsigned> { index: u8, _bound: PhantomData<N>, } impl<N: Unsigned> TryFrom<u8> for NRegisterAddress<N> { type Error = RegisterAddressOverflow; fn try_from(p: u8) -> Result<Self, Self::Error> { if p <= ((1 << N::to_u8()) - 1) { Ok(NRegisterAddress { index: p, _bound: PhantomData, }) } else { Err(RegisterAddressOverflow {}) } } } impl<N: Unsigned> From<NRegisterAddress<N>> for u8 { fn from(r: NRegisterAddress<N>) -> u8 { r.index } } impl<N: Unsigned> From<NRegisterAddress<N>> for usize { fn from(r: NRegisterAddress<N>) -> usize { r.index as usize } } /// DO NOT LET THIS GET OUTSIDE THE CRATE. /// SCREAMING NO OH GOD THAT'D BE TERRIBLE /// AND CONFUSING. pub(crate) trait MyFrom<T> { fn my_from(_: T) -> Self; } impl<N: Unsigned, M> MyFrom<NRegisterAddress<N>> for NRegisterAddress<M> where M: Unsigned + IsGreater<N>, { fn my_from(r: NRegisterAddress<N>) -> Self { Self { index: r.index, _bound: PhantomData } } } pub(crate) type RegisterAddress = NRegisterAddress<U3>; // #[derive(Debug, Clone, Copy)] // pub(crate) struct RegisterAddress(u8); // impl RegisterAddress { // pub(crate) fn new(p: u8) -> Result<RegisterAddress, RegisterAddressOverflow> { // if p <= 7 { // Ok(RegisterAddress(p)) // } else { // Err(RegisterAddressOverflow {}) // } // } // } // impl From<RegisterAddress> for usize { // fn from(a: RegisterAddress) -> Self { // let a = a.0; // a as Self // } // } // impl From<NRegisterAddress<U3>> for RegisterAddress { // fn from(r: NRegisterAddress<U3>) -> Self { // Self(r.index) // } // } #[derive(Debug, Clone, Copy)] pub(crate) enum IO { Read, Write, } /// Add instruction. Uses wrapping arithmetic. #[derive(Debug, Clone, Copy)] pub(crate) struct Add { pub(crate) destination: RegisterAddress, pub(crate) source: RegisterAddress, } /// Bitwise NAND instruction. #[derive(Debug, Clone, Copy)] pub(crate) struct Nand { pub(crate) destination: RegisterAddress, pub(crate) source: RegisterAddress, } /// Read/Write between memory and registers. #[derive(Debug, Clone, Copy)] pub(crate) struct Mem { pub(crate) destination: RegisterAddress, pub(crate) source: NRegisterAddress<U2>, pub(crate) direction: IO, } /// Move If Zero instruction. #[derive(Debug, Clone, Copy)] pub(crate) struct Miz { pub(crate) destination: RegisterAddress, pub(crate) source: RegisterAddress, } #[derive(Debug, Clone, Copy)] pub(crate) enum Instruction { Add(Add), Nand(Nand), Mem(Mem), Miz(Miz), } /// Need this for the VM. impl TryFrom<u8> for Instruction { type Error = &'static str; fn try_from(code: u8) -> Result<Self, Self::Error> { info!("Instruction Code: b{:08b}", code); let op = (code & (0b11 << 6)) >> 6; let op = if op == 0 { Instruction::Add(Add { destination: RegisterAddress::try_from((code & (0b111 << 3)) >> 3)?, source: RegisterAddress::try_from(code & 0b111)?, }) } else if op == 1 { Instruction::Nand(Nand { destination: RegisterAddress::try_from((code & (0b111 << 3)) >> 3)?, source: RegisterAddress::try_from(code & 0b111)?, }) } else if op == 2 { Instruction::Mem(Mem { destination: RegisterAddress::try_from((code & (0b111 << 3)) >> 3)?, direction: if (code & (0b1 << 2)) >> 2 == 1 { IO::Write } else { IO::Read }, source: NRegisterAddress::<U2>::try_from(code & 0b11)?, }) } else if op == 3 { Instruction::Miz(Miz { destination: RegisterAddress::try_from((code & (0b111 << 3)) >> 3)?, source: RegisterAddress::try_from(code & 0b111)?, }) } else { panic!("Something went terribly wrong.") }; Ok(op) } } /// Need this for the assembler. impl From<Instruction> for u8 { fn from(inst: Instruction) -> u8 { match inst { Instruction::Add(Add { destination, source, }) => 0 << 6 | destination.index << 3 | source.index, Instruction::Nand(Nand { destination, source, }) => 1 << 6 | destination.index << 3 | source.index, Instruction::Mem(Mem { destination, source, direction, }) => { 2 << 6 | destination.index << 3 | match direction { IO::Read => 0, IO::Write => 1, } << 2 | source.index } Instruction::Miz(Miz { destination, source, }) => 3 << 6 | destination.index << 3 | source.index, } } }
true
ed9d1a5626018160d9d9a307213c3ed12e830897
Rust
vinnyhoward/til
/rust/toy-problems/arrays/remove_first_and_last_char.rs
UTF-8
980
4.21875
4
[ "MIT" ]
permissive
// Remove First and Last Character // It's pretty straightforward. Your goal is to create a // function that removes the first and last characters of // a string. You're given one parameter, the original string. // You don't have to worry with strings with less than two // characters. fn main() { pub fn remove_char(s: &str) -> String { // create mutable string vector let mut chars = s.chars(); // skips over first index chars.next(); // skips over last index chars.next_back(); // is a generic method to obtain an iterator, // whether this iterator yields values, immutable // references or mutable references is context // dependent and can sometimes be surprising. chars.into_iter().collect() } println!("{}", remove_char("eloquent")); // "loquen" println!("{}", remove_char("country")); // "ountr" println!("{}", remove_char("person")); // "erso" }
true
99305428b2941dac733602fba3c2fb509f2f3a4e
Rust
Mandragorian/kitap
/src/hash.rs
UTF-8
549
2.53125
3
[]
no_license
use sha3::{Digest, Sha3_512}; use generic_array::GenericArray; pub type KitapHash = GenericArray<u8, <Sha3_512 as Digest>::OutputSize>; pub const HASH_SIZE: usize = 64; pub struct KitapHasher { hasher: Sha3_512 } impl KitapHasher { pub fn new() -> KitapHasher { let hasher = Sha3_512::new(); KitapHasher { hasher, } } pub fn input<D: AsRef<[u8]>>(&mut self, data: D) { self.hasher.input(data); } pub fn result(self) -> KitapHash { self.hasher.result().into() } }
true
ccd4e20605cfddcd830cbdb5d8f4824aa0077117
Rust
CNife/leetcode
/rust/finished/src/degree_of_an_array.rs
UTF-8
1,591
3.484375
3
[]
no_license
use std::collections::{BinaryHeap, HashMap}; pub fn find_shortest_sub_array(nums: Vec<i32>) -> i32 { if nums.len() < 2 { nums.len() as i32 } else { keys_with_max_values(count_map(&nums)) .into_iter() .map(|key| subarray_length_of_key(key, &nums)) .min() .unwrap() as i32 } } fn count_map(nums: &[i32]) -> HashMap<i32, i32> { let mut count_map = HashMap::new(); for &num in nums { *count_map.entry(num).or_insert(0) += 1; } count_map } fn keys_with_max_values(counts: HashMap<i32, i32>) -> Vec<i32> { let mut count_heap: BinaryHeap<(i32, i32)> = counts .into_iter() .map(|(key, count)| (count, key)) .collect(); debug_assert!(!count_heap.is_empty()); let max_count = count_heap.peek().unwrap().0; let mut result = Vec::new(); while let Some((count, key)) = count_heap.pop() { if count == max_count { result.push(key); } else { break; } } result } fn subarray_length_of_key(key: i32, nums: &[i32]) -> usize { let left = nums .iter() .enumerate() .find(|(_, num)| **num == key) .unwrap() .0; let right = nums .iter() .enumerate() .rfind(|(_, num)| **num == key) .unwrap() .0; right - left + 1 } #[test] fn test() { let cases = vec![(vec![1, 2, 2, 3, 1], 2), (vec![1, 2, 2, 3, 1, 4, 2], 6)]; for (nums, expect) in cases { assert_eq!(find_shortest_sub_array(nums), expect); } }
true
d125cec268c813441c198c2228ea3f82ebbc5d6a
Rust
Frederik-Baetens/axum
/axum/src/add_extension.rs
UTF-8
1,821
2.75
3
[ "MIT" ]
permissive
// this is vendored from tower-http to reduce public dependencies use http::Request; use std::task::{Context, Poll}; use tower_layer::Layer; use tower_service::Service; /// [`Layer`] for adding some shareable value to [request extensions]. /// /// See [Sharing state with handlers](index.html#sharing-state-with-handlers) /// for more details. /// /// [request extensions]: https://docs.rs/http/latest/http/struct.Extensions.html #[derive(Clone, Copy, Debug)] pub struct AddExtensionLayer<T> { value: T, } impl<T> AddExtensionLayer<T> { /// Create a new [`AddExtensionLayer`]. pub fn new(value: T) -> Self { AddExtensionLayer { value } } } impl<S, T> Layer<S> for AddExtensionLayer<T> where T: Clone, { type Service = AddExtension<S, T>; fn layer(&self, inner: S) -> Self::Service { AddExtension { inner, value: self.value.clone(), } } } /// Middleware for adding some shareable value to [request extensions]. /// /// See [Sharing state with handlers](index.html#sharing-state-with-handlers) /// for more details. /// /// [request extensions]: https://docs.rs/http/latest/http/struct.Extensions.html #[derive(Clone, Copy, Debug)] pub struct AddExtension<S, T> { inner: S, value: T, } impl<ResBody, S, T> Service<Request<ResBody>> for AddExtension<S, T> where S: Service<Request<ResBody>>, T: Clone + Send + Sync + 'static, { type Response = S::Response; type Error = S::Error; type Future = S::Future; #[inline] fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { self.inner.poll_ready(cx) } fn call(&mut self, mut req: Request<ResBody>) -> Self::Future { req.extensions_mut().insert(self.value.clone()); self.inner.call(req) } }
true
b76396856659793f16dcbc69001da08fa83444d9
Rust
marco-c/gecko-dev-wordified
/third_party/rust/tempfile/tests/tempfile.rs
UTF-8
2,084
2.6875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
# ! [ deny ( rust_2018_idioms ) ] use std : : fs ; use std : : io : : { Read Seek SeekFrom Write } ; use std : : sync : : mpsc : : { sync_channel TryRecvError } ; use std : : thread ; # [ test ] fn test_basic ( ) { let mut tmpfile = tempfile : : tempfile ( ) . unwrap ( ) ; write ! ( tmpfile " abcde " ) . unwrap ( ) ; tmpfile . seek ( SeekFrom : : Start ( 0 ) ) . unwrap ( ) ; let mut buf = String : : new ( ) ; tmpfile . read_to_string ( & mut buf ) . unwrap ( ) ; assert_eq ! ( " abcde " buf ) ; } # [ test ] fn test_cleanup ( ) { let tmpdir = tempfile : : tempdir ( ) . unwrap ( ) ; { let mut tmpfile = tempfile : : tempfile_in ( & tmpdir ) . unwrap ( ) ; write ! ( tmpfile " abcde " ) . unwrap ( ) ; } let num_files = fs : : read_dir ( & tmpdir ) . unwrap ( ) . count ( ) ; assert ! ( num_files = = 0 ) ; } / / Only run this test on Linux . MacOS doesn ' t like us creating so many files apparently . # [ cfg ( target_os = " linux " ) ] # [ test ] fn test_pathological_cleaner ( ) { let tmpdir = tempfile : : tempdir ( ) . unwrap ( ) ; let ( tx rx ) = sync_channel ( 0 ) ; let cleaner_thread = thread : : spawn ( move | | { let tmp_path = rx . recv ( ) . unwrap ( ) ; while rx . try_recv ( ) = = Err ( TryRecvError : : Empty ) { let files = fs : : read_dir ( & tmp_path ) . unwrap ( ) ; for f in files { / / skip errors if f . is_err ( ) { continue ; } let f = f . unwrap ( ) ; let _ = fs : : remove_file ( f . path ( ) ) ; } } } ) ; / / block until cleaner_thread makes progress tx . send ( tmpdir . path ( ) . to_owned ( ) ) . unwrap ( ) ; / / need 40 - 400 iterations to encounter race with cleaner on original system for _ in 0 . . 10000 { let mut tmpfile = tempfile : : tempfile_in ( & tmpdir ) . unwrap ( ) ; write ! ( tmpfile " abcde " ) . unwrap ( ) ; tmpfile . seek ( SeekFrom : : Start ( 0 ) ) . unwrap ( ) ; let mut buf = String : : new ( ) ; tmpfile . read_to_string ( & mut buf ) . unwrap ( ) ; assert_eq ! ( " abcde " buf ) ; } / / close the channel to make cleaner_thread exit drop ( tx ) ; cleaner_thread . join ( ) . expect ( " The cleaner thread failed " ) ; }
true
a04251700e704b7744feb2a3a63fd288793e2e94
Rust
traviskaufman/serde_bencode
/bencode/src/read.rs
UTF-8
2,232
3.140625
3
[ "MIT" ]
permissive
use std::io; use serde::iter; use super::error::Result; pub trait Read { fn next_char(&mut self) -> Option<Result<u8>>; fn peek_char(&self) -> Option<u8>; fn position(&self) -> usize; } pub struct IteratorRead<I> where I: Iterator<Item = io::Result<u8>> { iter: iter::LineColIterator<I>, ch: Option<u8>, } impl<I> IteratorRead<I> where I: Iterator<Item = io::Result<u8>> { pub fn new(raw_iter: I) -> Self { IteratorRead { iter: iter::LineColIterator::new(raw_iter), ch: None, } } } impl<I> Read for IteratorRead<I> where I: Iterator<Item = io::Result<u8>> { fn next_char(&mut self) -> Option<Result<u8>> { match self.iter.next() { Some(Ok(t)) => { self.ch = Some(t); Some(Ok(t)) } Some(err_res) => Some(err_res.map_err(From::from)), _ => None, } } fn peek_char(&self) -> Option<u8> { self.ch } fn position(&self) -> usize { self.iter.col() } } pub struct SliceRead<'a> { slice: &'a [u8], pos: usize, } impl<'a> SliceRead<'a> { pub fn new(slice: &'a [u8]) -> Self { SliceRead { slice: slice, pos: 0, } } } impl<'a> Read for SliceRead<'a> { fn next_char(&mut self) -> Option<Result<u8>> { if let Some(ch) = self.peek_char() { self.pos += 1; Some(Ok(ch)) } else { None } } fn peek_char(&self) -> Option<u8> { if self.pos == self.slice.len() { return None; } Some(self.slice[self.pos]) } fn position(&self) -> usize { self.pos } } pub struct StringRead<'a> { slice_read: SliceRead<'a>, } impl<'a> StringRead<'a> { pub fn new(s: &'a String) -> Self { StringRead { slice_read: SliceRead::new(s.as_bytes()) } } } impl<'a> Read for StringRead<'a> { fn next_char(&mut self) -> Option<Result<u8>> { self.slice_read.next_char() } fn peek_char(&self) -> Option<u8> { self.slice_read.peek_char() } fn position(&self) -> usize { self.slice_read.position() } }
true
8b6c97f80061ebb7c777ea48410a672526c0af09
Rust
paulfariello/aoc-2020
/aoc-2020-03/src/main.rs
UTF-8
2,766
3.421875
3
[]
no_license
#[macro_use] extern crate log; use std::fs::File; use std::io::{Read, BufRead, BufReader}; #[derive(Debug, Clone)] enum Place { Free, Tree, } #[derive(Debug, Clone)] struct Map { data: Vec<Vec<Place>>, width: Option<usize>, height: usize, } impl Map { pub fn new() -> Self { Self { data: Vec::new(), width: None, height: 0, } } pub fn add_row(&mut self, line: String) { let mut row = Vec::new(); for c in line.chars() { row.push(match c { '.' => Place::Free, '#' => Place::Tree, _ => unreachable!(), }); } match self.width { None => self.width = Some(row.len()), Some(width) => assert_eq!(width, row.len()), } self.data.push(row); self.height += 1; } pub fn place(&self, x: usize, y: usize) -> Place { self.data[x][y % self.width.unwrap()].clone() } pub fn count_tree(&self, right: usize, down: usize) -> u64 { let (mut x, mut y) = (0, 0); let mut count = 0; while x < self.height { count += match self.place(x, y) { Place::Free => 0, Place::Tree => 1, }; x += down; y += right; } count } } fn load_map(input: &mut dyn Read) -> Map { let reader = BufReader::new(input); let mut map = Map::new(); for line in reader.lines() { map.add_row(line.unwrap()); } map } fn solve_1(input_file: &str) -> Result<u64, &str> { info!("AoC 2020-01 - part 1"); let mut input = File::open(input_file).unwrap(); let map = load_map(&mut input); Ok(map.count_tree(3, 1)) } fn solve_2(input_file: &str) -> Result<u64, &str> { info!("AoC 2020-01 - part 2"); let mut input = File::open(input_file).unwrap(); let map = load_map(&mut input); Ok(map.count_tree(1, 1) * map.count_tree(3, 1) * map.count_tree(5, 1) * map.count_tree(7, 1) * map.count_tree(1, 2)) } fn main() { let logger = flexi_logger::Logger::with_env_or_str("debug"); if let Err(e) = logger.start() { panic!("Cannot start logger: {}", e); } if let Ok(result) = solve_1("input.txt") { info!("Part 1: {}", result); } if let Ok(result) = solve_2("input.txt") { info!("Part 2: {}", result); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_1() { let result = solve_1("tests.txt"); assert_eq!(result, Ok(7)); } #[test] fn test_2() { let result = solve_2("tests.txt"); assert!(result.is_ok()); assert_eq!(result.unwrap(), 336); } }
true
adfd486ca7153b0e00ba78e6e82c6af6ef33655c
Rust
ANLAB-KAIST/rust-static-config
/tests/test_static_config.rs
UTF-8
1,931
2.6875
3
[]
no_license
extern crate static_config; use std::convert::TryInto; #[test] fn test_static_args() { assert!(true == static_config::config("level1.bool").try_into().unwrap()); assert!(-128i8 == static_config::config("level1.i8").try_into().unwrap()); assert!(-32768i16 == static_config::config("level1.i16").try_into().unwrap()); assert!(-2147483648i32 == static_config::config("level1.i32").try_into().unwrap()); assert!(-9223372036854775808i64 == static_config::config("level1.i64").try_into().unwrap()); assert!("string value with escape \"!!\"" .eq(static_config::config("level1.string").try_into().unwrap())); assert!( false == static_config::config("level1.level2.bool") .try_into() .unwrap() ); assert!( 255u8 == static_config::config("level1.level2.u8") .try_into() .unwrap() ); assert!( 65535u16 == static_config::config("level1.level2.u16") .try_into() .unwrap() ); assert!( 4294967295u32 == static_config::config("level1.level2.u32") .try_into() .unwrap() ); assert!( 9223372036854775807u64 == static_config::config("level1.level2.u64") .try_into() .unwrap() ); assert!( "string value with escape \"\\\"".eq(static_config::config("level1.level2.string") .try_into() .unwrap()) ); let _array: [u8; static_config::CONST_USIZE.LEVEL1_LEVEL2_U8]; assert!(255usize == static_config::CONST_USIZE.LEVEL1_LEVEL2_U8); assert!(65535usize == static_config::CONST_USIZE.LEVEL1_LEVEL2_U16); assert!(4294967295usize == static_config::CONST_USIZE.LEVEL1_LEVEL2_U32); assert!(9223372036854775807usize == static_config::CONST_USIZE.LEVEL1_LEVEL2_U64); }
true
006c3b93d4f2f3543207e1e035fd2f103998570b
Rust
kiskoza/30days-of-rust
/day12/src/person/person.rs
UTF-8
314
2.96875
3
[]
no_license
pub trait Person { fn new(first_name: String, last_name: String, id: i32) -> Self; fn print_person(&self) { println!("Name: {}, {}", self.last_name(), self.first_name()); println!("ID: {}", self.id()); } fn last_name(&self) -> &String; fn first_name(&self) -> &String; fn id(&self) -> &i32; }
true
3f4979954d390d07a8e82ffacb6738e5d289e777
Rust
zsytssk/cha
/src/core/lexer/token/token_data.rs
UTF-8
1,085
3.375
3
[ "MIT" ]
permissive
use super::super::config::keyword::Keyword; use super::super::config::punc::Punc; #[derive(Debug)] pub enum TokenData { Bool(bool), Keyword(Keyword), /** 变量 */ Identifier(String), /** 字符串 */ r#String(String), /** 标点符号 */ Punc(Punc), EOL, SPACE, EOF, } impl TokenData { pub fn from_str(val: &str) -> Option<TokenData> { match val { "true" => Some(TokenData::Bool(true)), "false" => Some(TokenData::Bool(false)), _ => { if let Some(keyword) = Keyword::from_str(val) { Some(TokenData::Keyword(keyword)) } else { Some(TokenData::Identifier(val.to_owned())) } } } } pub fn from_punc(val: &str) -> Option<TokenData> { match Punc::from_str(val) { Some(punc) => Some(TokenData::Punc(punc)), _ => None, } } pub fn from_string(val: &str) -> Option<TokenData> { Some(TokenData::r#String(val.to_owned())) } }
true
788f90764183c5f130b6e1150958e554a945247f
Rust
oxidecomputer/serde_tokenstream
/src/lib.rs
UTF-8
1,700
2.796875
3
[ "Apache-2.0" ]
permissive
// Copyright 2022 Oxide Computer Company //! This is a [`serde::Deserializer`] implementation for //! [`proc_macro2::TokenStream`]. It is intended for proc_macro builders who //! want rich configuration in their custom attributes. //! //! If you'd like the consumers of your macro use it like this: //! //! ```ignore //! #[my_macro { //! settings = { //! reticulate_splines = true, //! normalizing_power = false, //! }, //! disaster = "pandemic", //! }] //! ``` //! //! Your macro will start like this: //! //! ```ignore //! #[proc_macro_attribute] //! pub fn my_macro( //! attr: proc_macro::TokenStream, //! item: proc_macro::TokenStream, //! ) -> proc_macro::TokenStream { //! // ... //! # } //! ``` //! //! Use `serde_tokenstream` to deserialize `attr` into a structure with the //! `Deserialize` trait (typically via a `derive` macro): //! //! ``` //! # use proc_macro2::TokenStream; //! # use serde_tokenstream::from_tokenstream; //! # use serde::Deserialize; //! # #[derive(Deserialize)] //! # struct Config; //! # pub fn my_macro( //! # attr: proc_macro2::TokenStream, //! # item: proc_macro2::TokenStream, //! # ) -> proc_macro2::TokenStream { //! let config = match from_tokenstream::<Config>(&TokenStream::from(attr)) { //! Ok(c) => c, //! Err(err) => return err.to_compile_error().into(), //! }; //! # item //! # } //! ``` mod ibidem; mod ordered_map; mod serde_tokenstream; pub use crate::ibidem::ParseWrapper; pub use crate::ibidem::TokenStreamWrapper; pub use crate::ordered_map::OrderedMap; pub use crate::serde_tokenstream::from_tokenstream; pub use crate::serde_tokenstream::Error; pub use crate::serde_tokenstream::Result;
true
611ad8fb373654726ce5846b252c33a231867520
Rust
arielgabizon/sapling-crypto
/src/redjubjub.rs
UTF-8
7,164
2.8125
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! Implementation of RedJubjub, a specialization of RedDSA to the Jubjub curve. //! See section 5.4.6 of the Sapling protocol specification. use pairing::{Field, PrimeField, PrimeFieldRepr}; use rand::Rng; use std::io::{self, Read, Write}; use jubjub::{FixedGenerators, JubjubEngine, JubjubParams, Unknown, edwards::Point}; use util::{hash_to_scalar}; fn read_scalar<E: JubjubEngine, R: Read>(reader: R) -> io::Result<E::Fs> { let mut s_repr = <E::Fs as PrimeField>::Repr::default(); s_repr.read_le(reader)?; match E::Fs::from_repr(s_repr) { Ok(s) => Ok(s), Err(_) => Err(io::Error::new( io::ErrorKind::InvalidInput, "scalar is not in field", )), } } fn write_scalar<E: JubjubEngine, W: Write>(s: &E::Fs, writer: W) -> io::Result<()> { s.into_repr().write_le(writer) } fn h_star<E: JubjubEngine>(a: &[u8], b: &[u8]) -> E::Fs { hash_to_scalar::<E>(b"Zcash_RedJubjubH", a, b) } pub struct Signature { rbar: [u8; 32], sbar: [u8; 32], } pub struct PrivateKey<E: JubjubEngine>(E::Fs); pub struct PublicKey<E: JubjubEngine>(pub Point<E, Unknown>); impl Signature { pub fn read<R: Read>(mut reader: R) -> io::Result<Self> { let mut rbar = [0u8; 32]; let mut sbar = [0u8; 32]; reader.read_exact(&mut rbar)?; reader.read_exact(&mut sbar)?; Ok(Signature { rbar, sbar }) } pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> { writer.write_all(&self.rbar)?; writer.write_all(&self.sbar) } } impl<E: JubjubEngine> PrivateKey<E> { pub fn randomize(&self, alpha: E::Fs) -> Self { let mut tmp = self.0; tmp.add_assign(&alpha); PrivateKey(tmp) } pub fn read<R: Read>(reader: R) -> io::Result<Self> { let pk = read_scalar::<E, R>(reader)?; Ok(PrivateKey(pk)) } pub fn write<W: Write>(&self, writer: W) -> io::Result<()> { write_scalar::<E, W>(&self.0, writer) } pub fn sign<R: Rng>( &self, msg: &[u8], rng: &mut R, p_g: FixedGenerators, params: &E::Params, ) -> Signature { // T = (l_H + 128) bits of randomness // For H*, l_H = 512 bits let mut t = [0u8; 80]; rng.fill_bytes(&mut t[..]); // r = H*(T || M) let r = h_star::<E>(&t[..], msg); // R = r . P_G let r_g = params.generator(p_g).mul(r, params); let mut rbar = [0u8; 32]; r_g.write(&mut rbar[..]) .expect("Jubjub points should serialize to 32 bytes"); // S = r + H*(Rbar || M) . sk let mut s = h_star::<E>(&rbar[..], msg); s.mul_assign(&self.0); s.add_assign(&r); let mut sbar = [0u8; 32]; write_scalar::<E, &mut [u8]>(&s, &mut sbar[..]) .expect("Jubjub scalars should serialize to 32 bytes"); Signature { rbar, sbar } } } impl<E: JubjubEngine> PublicKey<E> { pub fn from_private(privkey: &PrivateKey<E>, p_g: FixedGenerators, params: &E::Params) -> Self { let res = params.generator(p_g).mul(privkey.0, params).into(); PublicKey(res) } pub fn randomize(&self, alpha: E::Fs, p_g: FixedGenerators, params: &E::Params) -> Self { let res: Point<E, Unknown> = params.generator(p_g).mul(alpha, params).into(); let res = res.add(&self.0, params); PublicKey(res) } pub fn read<R: Read>(reader: R, params: &E::Params) -> io::Result<Self> { let p = Point::read(reader, params)?; Ok(PublicKey(p)) } pub fn write<W: Write>(&self, writer: W) -> io::Result<()> { self.0.write(writer) } pub fn verify( &self, msg: &[u8], sig: &Signature, p_g: FixedGenerators, params: &E::Params, ) -> bool { // c = H*(Rbar || M) let c = h_star::<E>(&sig.rbar[..], msg); // Signature checks: // R != invalid let r = match Point::read(&sig.rbar[..], params) { Ok(r) => r, Err(_) => return false, }; // S < order(G) // (E::Fs guarantees its representation is in the field) let s = match read_scalar::<E, &[u8]>(&sig.sbar[..]) { Ok(s) => s, Err(_) => return false, }; // S . P_G = R + c . vk self.0.mul(c, params).add(&r, params) == params.generator(p_g).mul(s, params).into() } } #[cfg(test)] mod tests { use pairing::bls12_381::Bls12; use rand::thread_rng; use jubjub::JubjubBls12; use super::*; #[test] fn round_trip_serialization() { let rng = &mut thread_rng(); let p_g = FixedGenerators::SpendingKeyGenerator; let params = &JubjubBls12::new(); for _ in 0..1000 { let sk = PrivateKey::<Bls12>(rng.gen()); let vk = PublicKey::from_private(&sk, p_g, params); let msg = b"Foo bar"; let sig = sk.sign(msg, rng, p_g, params); let mut sk_bytes = [0u8; 32]; let mut vk_bytes = [0u8; 32]; let mut sig_bytes = [0u8; 64]; sk.write(&mut sk_bytes[..]).unwrap(); vk.write(&mut vk_bytes[..]).unwrap(); sig.write(&mut sig_bytes[..]).unwrap(); let sk_2 = PrivateKey::<Bls12>::read(&sk_bytes[..]).unwrap(); let vk_2 = PublicKey::from_private(&sk_2, p_g, params); let mut vk_2_bytes = [0u8; 32]; vk_2.write(&mut vk_2_bytes[..]).unwrap(); assert!(vk_bytes == vk_2_bytes); let vk_2 = PublicKey::<Bls12>::read(&vk_bytes[..], params).unwrap(); let sig_2 = Signature::read(&sig_bytes[..]).unwrap(); assert!(vk.verify(msg, &sig_2, p_g, params)); assert!(vk_2.verify(msg, &sig, p_g, params)); assert!(vk_2.verify(msg, &sig_2, p_g, params)); } } #[test] fn random_signatures() { let rng = &mut thread_rng(); let p_g = FixedGenerators::SpendingKeyGenerator; let params = &JubjubBls12::new(); for _ in 0..1000 { let sk = PrivateKey::<Bls12>(rng.gen()); let vk = PublicKey::from_private(&sk, p_g, params); let msg1 = b"Foo bar"; let msg2 = b"Spam eggs"; let sig1 = sk.sign(msg1, rng, p_g, params); let sig2 = sk.sign(msg2, rng, p_g, params); assert!(vk.verify(msg1, &sig1, p_g, params)); assert!(vk.verify(msg2, &sig2, p_g, params)); assert!(!vk.verify(msg1, &sig2, p_g, params)); assert!(!vk.verify(msg2, &sig1, p_g, params)); let alpha = rng.gen(); let rsk = sk.randomize(alpha); let rvk = vk.randomize(alpha, p_g, params); let sig1 = rsk.sign(msg1, rng, p_g, params); let sig2 = rsk.sign(msg2, rng, p_g, params); assert!(rvk.verify(msg1, &sig1, p_g, params)); assert!(rvk.verify(msg2, &sig2, p_g, params)); assert!(!rvk.verify(msg1, &sig2, p_g, params)); assert!(!rvk.verify(msg2, &sig1, p_g, params)); } } }
true
39894cd8bc40bb11f6f012b8fc1e833254c72701
Rust
mkeeter/advent-of-code
/2019/18/src/main.rs
UTF-8
4,418
2.875
3
[]
no_license
use smallvec::{smallvec, SmallVec}; use std::collections::{HashMap, HashSet, VecDeque}; use std::io::BufRead; //////////////////////////////////////////////////////////////////////////////// type Map = HashMap<(i32, i32), char>; type Bots = SmallVec<[(i32, i32); 4]>; #[derive(Clone, Hash, Eq, PartialEq)] struct State { bots: Bots, keys: u32, } type Cache = HashMap<State, u32>; struct Edge { target_x: i32, target_y: i32, required_keys: u32, new_key: u32, steps: u32, } type Edges = HashMap<(i32, i32), Vec<Edge>>; //////////////////////////////////////////////////////////////////////////////// fn explore(x: i32, y: i32, map: &Map) -> Vec<Edge> { let mut todo = VecDeque::new(); todo.push_back((x, y, 0, 0)); let mut found = Vec::new(); let mut seen = HashSet::new(); while let Some((tx, ty, keys, step)) = todo.pop_front() { if !seen.insert((tx, ty)) { continue; } let c = *map.get(&(tx, ty)).unwrap_or(&'#'); let mut door = 0; // Found a key :D if char::is_lowercase(c) && (tx != x || ty != y) { let key = 1 << ((c as u8) - b'a') as u32; found.push(Edge { target_x: tx, target_y: ty, required_keys: keys, new_key: key, steps: step, }); // Found a wall :( } else if c == '#' { continue; // Found a door :) } else if char::is_uppercase(c) { door = 1 << ((c as u8) - b'A') as u32; } // Take new steps for (dx, dy) in &[(0, 1), (0, -1), (1, 0), (-1, 0)] { todo.push_back((tx + dx, ty + dy, keys | door, step + 1)); } } found } fn solve(state: State, target: u32, edges: &Edges, cache: &mut Cache) -> u32 { if state.keys == target { return 0; } else if let Some(c) = cache.get(&state) { return *c; } let r = state .bots .iter() .enumerate() .flat_map(|(i, b)| edges.get(&(b.0, b.1)).unwrap().iter().map(move |e| (i, e))) .filter(|(_i, e)| { (e.required_keys & state.keys) == e.required_keys && (e.new_key & state.keys) == 0 }) .map(|(i, e)| { let mut next = state.clone(); next.bots[i].0 = e.target_x; next.bots[i].1 = e.target_y; next.keys |= e.new_key; e.steps + solve(next, target, edges, cache) }) .min() .unwrap(); cache.insert(state, r); r } fn build_graph(bots: &Bots, tiles: &Map) -> Edges { tiles .iter() .filter(|(_k, v)| char::is_lowercase(**v)) .map(|(k, _v)| k) .chain(bots.iter()) .map(|p| (*p, explore(p.0, p.1, &tiles))) .collect::<Edges>() } fn main() { let mut tiles: Map = HashMap::new(); let mut target = 0; let mut start = (0, 0); for (y, line) in std::io::stdin().lock().lines().enumerate() { for (x, c) in line.unwrap().chars().enumerate() { tiles.insert((x as i32, y as i32), c); if char::is_lowercase(c) { let key = 1 << ((c as u8) - b'a') as u32; target |= key; } else if c == '@' { start = (x as i32, y as i32); } } } //////////////////////////////////////////////////////////////////////////// // Part 1 let mut cache = Cache::new(); let bots = smallvec![(start.0, start.1)]; let edges = build_graph(&bots, &tiles); let state = State { bots, keys: 0 }; println!("Part 1: {}", solve(state, target, &edges, &mut cache)); //////////////////////////////////////////////////////////////////////////// // Part 2 let mut cache = Cache::new(); let mut tiles = tiles; // Modify the map to add a cross pattern at the bot's original location for (dx, dy) in &[(0, 0), (1, 0), (-1, 0), (0, 1), (0, -1)] { tiles.insert((start.0 + dx, start.1 + dy), '#'); } // We're now running four bots simultaneously let bots = [(1, 1), (1, -1), (-1, 1), (-1, -1)] .iter() .map(|(dx, dy)| (start.0 + dx, start.1 + dy)) .collect(); let edges = build_graph(&bots, &tiles); let state = State { bots, keys: 0 }; println!("Part 2: {}", solve(state, target, &edges, &mut cache)); }
true