blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
227a0db5a406c657952a2365aa47f10317f34f46
Rust
rethab/rab
/src/http.rs
UTF-8
3,156
3.203125
3
[ "Apache-2.0" ]
permissive
use url::{Position, Url}; pub struct Response { pub status: u16, pub server: Option<String>, // Server header pub body_length: Option<usize>, } impl Response { pub fn parse(resp: &[u8], status_only: bool) -> Result<Self, String> { let ascii_num = |c: u8| (c - 48) as u16; if let [a, b, c] = resp[9..12] { let status = ascii_num(a) * 100 + ascii_num(b) * 10 + ascii_num(c); let (server, body_length) = if status_only { (None, None) } else { (parse_server(resp), body_length(resp)) }; Ok(Response { status, server, body_length, }) } else { Err(format!( "Cannot parse as HTTP header: {}", String::from_utf8_lossy(resp) )) } } } fn parse_server(resp: &[u8]) -> Option<String> { String::from_utf8_lossy(resp) .split("\r\n") .find_map(|line| { // TODO this copies the string if line.to_ascii_lowercase().starts_with("server:") { Some(line[8..].to_owned()) } else { None } }) } fn body_length(resp: &[u8]) -> Option<usize> { String::from_utf8_lossy(resp) .split("\r\n\r\n") .last() .map(|b| b.len()) } pub fn create_request(url: &Url, use_head: bool) -> String { let host = url.host_str().expect("Missing host"); let path = &url[Position::BeforePath..]; let method = if use_head { "HEAD" } else { "GET" }; format!( "{} {} HTTP/1.0\r\nHost: {}\r\n{}\r\n\r\n", method, path, host, "Accept: */*" ) } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_status_code() { assert_eq!( 200, Response::parse("HTTP/1.1 200 OK".as_bytes(), true) .unwrap() .status ); } #[test] fn test_parse_server() { let google_response = "HTTP/1.1 200 OK\r\n\ Date: Thu, 18 Mar 2021 19:24:37 GMT\r\n\ P3P: CP=\"This is not a P3P policy! See g.co/p3phelp for more info.\"\r\n\ Server: gws\r\n\ "; assert_eq!( Some("gws".to_owned()), Response::parse(google_response.as_bytes(), false) .unwrap() .server ); let google_response_simple = "HTTP/1.1 200 OK\r\nServer: gws\r\n"; assert_eq!( Some("gws".to_owned()), Response::parse(google_response_simple.as_bytes(), false) .unwrap() .server ); let no_server_response = "HTTP/1.1 404 Not Found\r\nContent-Type: application/json;charset=UTF-8\r\n"; assert_eq!( None, Response::parse(no_server_response.as_bytes(), false) .unwrap() .server ); assert_eq!( None, Response::parse(google_response.as_bytes(), true) .unwrap() .server ); } }
true
0eea93913571e88f49d542b16e9b72cd83863100
Rust
Leonti/rust-socket-server
/src/sensors/encoder.rs
UTF-8
1,825
2.9375
3
[]
no_license
use futures::sync::mpsc; use std::sync::{Arc, Mutex}; use std::thread; use sysfs_gpio::{Direction, Edge, Pin}; use crate::event::{EncoderEvent, Event, TimedEvent, Wheel}; type Tx = mpsc::UnboundedSender<TimedEvent>; pub struct Encoder { tx: Arc<Mutex<Tx>>, } fn port_listen(pin_number: u64, wheel: Wheel, tx: Arc<Mutex<Tx>>) -> sysfs_gpio::Result<()> { let input = Pin::new(pin_number); input.with_exported(|| { input.set_direction(Direction::In)?; input.set_edge(Edge::RisingEdge)?; let mut poller = input.get_poller()?; loop { match poller.poll(1000)? { Some(_val) => { let encoder_event = EncoderEvent { wheel: wheel.clone(), }; let event = Event::Encoder { event: encoder_event, }; let s_tx = tx.lock().unwrap(); match s_tx.unbounded_send(TimedEvent::new(event)) { Ok(_) => (), Err(e) => println!("encoder send error = {:?}", e), } } None => (), } } }) } impl Encoder { pub fn new(tx: Arc<Mutex<Tx>>) -> Encoder { Encoder { tx } } pub fn run(self) -> () { let left_tx = self.tx.clone(); thread::spawn(move || match port_listen(23, Wheel::Left, left_tx) { Ok(_) => (), Err(e) => println!("Interrupt failed on pin {} {}", 23, e), }); let right_tx = self.tx.clone(); thread::spawn(move || match port_listen(22, Wheel::Right, right_tx) { Ok(_) => (), Err(e) => println!("Interrupt failed on pin {} {}", 22, e), }); () } }
true
b43155dc727bb9ec61ab03b43296c172788d328a
Rust
DrSloth/Rocket
/core/lib/src/fairing/fairings.rs
UTF-8
4,307
2.6875
3
[ "Apache-2.0", "MIT" ]
permissive
use crate::{Rocket, Request, Response, Data, Build, Orbit}; use crate::fairing::{Fairing, Info, Kind}; use crate::log::PaintExt; use yansi::Paint; #[derive(Default)] pub struct Fairings { all_fairings: Vec<Box<dyn Fairing>>, failures: Vec<Info>, // Index into `attach` of last run attach fairing. last_launch: usize, // The vectors below hold indices into `all_fairings`. launch: Vec<usize>, liftoff: Vec<usize>, request: Vec<usize>, response: Vec<usize>, } macro_rules! iter { ($_self:ident . $kind:ident) => ({ let all_fairings = &$_self.all_fairings; $_self.$kind.iter().filter_map(move |i| all_fairings.get(*i).map(|f| &**f)) }) } impl Fairings { #[inline] pub fn new() -> Fairings { Fairings::default() } pub fn add(&mut self, fairing: Box<dyn Fairing>) -> &dyn Fairing { let kind = fairing.info().kind; let index = self.all_fairings.len(); self.all_fairings.push(fairing); if kind.is(Kind::Ignite) { self.launch.push(index); } if kind.is(Kind::Liftoff) { self.liftoff.push(index); } if kind.is(Kind::Request) { self.request.push(index); } if kind.is(Kind::Response) { self.response.push(index); } &*self.all_fairings[index] } pub fn append(&mut self, others: &mut Fairings) { for fairing in others.all_fairings.drain(..) { self.add(fairing); } } pub async fn handle_ignite(mut rocket: Rocket<Build>) -> Rocket<Build> { while rocket.fairings.last_launch < rocket.fairings.launch.len() { // We're going to move `rocket` while borrowing `fairings`... let mut fairings = std::mem::replace(&mut rocket.fairings, Fairings::new()); for fairing in iter!(fairings.launch).skip(fairings.last_launch) { let info = fairing.info(); rocket = match fairing.on_ignite(rocket).await { Ok(rocket) => rocket, Err(rocket) => { fairings.failures.push(info); rocket } }; fairings.last_launch += 1; } // Note that `rocket.fairings` may now be non-empty since launch // fairings could have added more fairings! Move them to the end. fairings.append(&mut rocket.fairings); rocket.fairings = fairings; } rocket } #[inline(always)] pub async fn handle_liftoff(&self, rocket: &Rocket<Orbit>) { let liftoff_futures = iter!(self.liftoff).map(|f| f.on_liftoff(rocket)); futures::future::join_all(liftoff_futures).await; } #[inline(always)] pub async fn handle_request(&self, req: &mut Request<'_>, data: &mut Data) { for fairing in iter!(self.request) { fairing.on_request(req, data).await } } #[inline(always)] pub async fn handle_response<'r>(&self, request: &'r Request<'_>, response: &mut Response<'r>) { for fairing in iter!(self.response) { fairing.on_response(request, response).await; } } pub fn audit(&self) -> Result<(), &[Info]> { match self.failures.is_empty() { true => Ok(()), false => Err(&self.failures) } } pub fn pretty_print(&self) { if !self.all_fairings.is_empty() { launch_info!("{}{}:", Paint::emoji("📡 "), Paint::magenta("Fairings")); } for fairing in &self.all_fairings { launch_info_!("{} ({})", Paint::default(fairing.info().name).bold(), Paint::blue(fairing.info().kind).bold()); } } } impl std::fmt::Debug for Fairings { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn debug_info<'a>(iter: impl Iterator<Item = &'a dyn Fairing>) -> Vec<Info> { iter.map(|f| f.info()).collect() } f.debug_struct("Fairings") .field("launch", &debug_info(iter!(self.launch))) .field("liftoff", &debug_info(iter!(self.liftoff))) .field("request", &debug_info(iter!(self.request))) .field("response", &debug_info(iter!(self.response))) .finish() } }
true
da9a419f84d096612893ea4f8c1b6997e517dffc
Rust
jonfast565/price-tracker
/price-scraper/src/utilities.rs
UTF-8
342
2.546875
3
[]
no_license
extern crate reqwest; use substring::Substring; pub async fn simple_get_request(url: &str) -> Result<String, Box<dyn std::error::Error>> { let resp = reqwest::get(url).await?; assert!(resp.status().is_success()); let resp_text = resp.text().await?; logger::info(format!("{}", resp_text.substring(0, 50))); Ok(resp_text) }
true
a78be912e7954841ea2f74777b339ffc732e1f73
Rust
bddap/anagram-solver
/src/solver.rs
UTF-8
6,310
3.59375
4
[]
no_license
use enum_map::EnumMap; use std::ops::Index; #[derive(Enum, Copy, Clone, Debug)] pub enum Letter { A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z, } impl Letter { pub fn from_byte(cha: &u8) -> Option<Letter> { let mut cha = *cha; if cha >= b'A' && cha <= b'Z' { cha = cha - b'A' + b'a'; } match cha { b'a' => Some(Letter::A), b'b' => Some(Letter::B), b'c' => Some(Letter::C), b'd' => Some(Letter::D), b'e' => Some(Letter::E), b'f' => Some(Letter::F), b'g' => Some(Letter::G), b'h' => Some(Letter::H), b'i' => Some(Letter::I), b'j' => Some(Letter::J), b'k' => Some(Letter::K), b'l' => Some(Letter::L), b'm' => Some(Letter::M), b'n' => Some(Letter::N), b'o' => Some(Letter::O), b'p' => Some(Letter::P), b'q' => Some(Letter::Q), b'r' => Some(Letter::R), b's' => Some(Letter::S), b't' => Some(Letter::T), b'u' => Some(Letter::U), b'v' => Some(Letter::V), b'w' => Some(Letter::W), b'x' => Some(Letter::X), b'y' => Some(Letter::Y), b'z' => Some(Letter::Z), _ => None, } } pub fn from_bytes(other: &[u8]) -> Option<Vec<Letter>> { other.iter().map(Letter::from_byte).collect() } pub fn inc(self) -> Option<Letter> { match self { Letter::A => Some(Letter::B), Letter::B => Some(Letter::C), Letter::C => Some(Letter::D), Letter::D => Some(Letter::E), Letter::E => Some(Letter::F), Letter::F => Some(Letter::G), Letter::G => Some(Letter::H), Letter::H => Some(Letter::I), Letter::I => Some(Letter::J), Letter::J => Some(Letter::K), Letter::K => Some(Letter::L), Letter::L => Some(Letter::M), Letter::M => Some(Letter::N), Letter::N => Some(Letter::O), Letter::O => Some(Letter::P), Letter::P => Some(Letter::Q), Letter::Q => Some(Letter::R), Letter::R => Some(Letter::S), Letter::S => Some(Letter::T), Letter::T => Some(Letter::U), Letter::U => Some(Letter::V), Letter::V => Some(Letter::W), Letter::W => Some(Letter::X), Letter::X => Some(Letter::Y), Letter::Y => Some(Letter::Z), Letter::Z => None, } } } pub struct LetterCounts(EnumMap<Letter, usize>); impl LetterCounts { pub fn count_letters(word: &[Letter]) -> LetterCounts { let mut ret = enum_map!{ Letter::A => 0, _ => 0, }; for letter in word { ret[*letter] += 1; } LetterCounts(ret) } pub fn checked_sub(&self, other: &LetterCounts) -> Option<LetterCounts> { let mut ret = EnumMap::new(); for (key, val) in self.0 { ret[key] = val.checked_sub(other.0[key])?; } Some(LetterCounts(ret)) } } impl Default for LetterCounts { fn default() -> LetterCounts { LetterCounts(enum_map!{_ => 0}) } } impl Index<Letter> for LetterCounts { type Output = usize; fn index(&self, index: Letter) -> &usize { &self.0[index] } } pub struct CountedWord { word: Vec<Letter>, counts: LetterCounts, } impl CountedWord { pub fn from_word(word: Vec<Letter>) -> CountedWord { let counts = LetterCounts::count_letters(&word); CountedWord { word, counts } } pub fn from_words(mut words: Vec<Vec<Letter>>) -> Vec<CountedWord> { words .drain(..) .map(|word| CountedWord::from_word(word)) .collect() } } pub struct WordList<'a> { sorted_lists: EnumMap<Letter, Vec<&'a CountedWord>>, } impl<'a> WordList<'a> { pub fn new(words: &'a [CountedWord]) -> WordList<'a> { let sorted_lists = enum_map!{ letter => WordList::sorted_list_for(words, letter), }; WordList { sorted_lists, } } fn sorted_list_for(words: &'a [CountedWord], letter: Letter) -> Vec<&'a CountedWord> { let mut ret: Vec<&CountedWord> = words.iter().filter(|cw| cw.counts[letter] != 0).collect(); ret.sort_by(|a, b| a.counts[letter].cmp(&b.counts[letter])); ret } } pub fn find<'a>(wl: &WordList, target: LetterCounts) -> Option<Vec<Vec<Letter>>> { _find(wl, Letter::A, target).map(|vec| { vec.iter() .map(|counted_word| counted_word.word.clone()) .collect() }) } fn _find<'a>( wl: &'a WordList, letter: Letter, target: LetterCounts, ) -> Option<Vec<&'a CountedWord>> { if target[letter] == 0 { // go to next letter // incrementing past Z returns none // none means we found an anagram return match letter.inc() { Some(next_letter) => _find(wl, next_letter, target), None => Some(Vec::new()), }; } for word in wl.sorted_lists[letter] .iter() .take_while(|w| w.counts[letter] <= target[letter]) { match target.checked_sub(&word.counts) { Some(new_target) => match _find(wl, letter, new_target) { Some(mut vec) => { vec.push(word); return Some(vec); } None => {} }, None => {} } } None } #[cfg(test)] mod tests { use super::{find, CountedWord, Letter, LetterCounts, WordList}; use english_words::get_words; #[test] fn find_word() { let words = get_words(); let counted_words = CountedWord::from_words(words); let word_list = WordList::new(&counted_words); let letters = Letter::from_bytes(b"racecarracecar").unwrap(); let letter_counts = LetterCounts::count_letters(&letters); find(&word_list, letter_counts).unwrap(); } }
true
6f710ca9d6e0572710fc55041f5c82eca9c48d53
Rust
waywardmonkeys/limn
/src/widgets/button.rs
UTF-8
5,721
2.671875
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use glutin; use text_layout::Align; use cassowary::strength::*; use layout::constraint::*; use event::EventArgs; use widget::WidgetBuilder; use widget::property::Property; use widget::property::states::*; use widgets::text::TextBuilder; use input::mouse::{WidgetMouseButton, ClickEvent}; use draw::rect::{RectState, RectStyle}; use draw::text::TextStyle; use geometry::Size; use color::*; static COLOR_BUTTON_DEFAULT: Color = GRAY_80; static COLOR_BUTTON_PRESSED: Color = GRAY_60; static COLOR_BUTTON_ACTIVATED: Color = GRAY_40; static COLOR_BUTTON_ACTIVATED_PRESSED: Color = GRAY_30; static COLOR_BUTTON_INACTIVE: Color = GRAY_90; static COLOR_BUTTON_TEXT_INACTIVE: Color = GRAY_70; static BUTTON_BORDER: (f32, Color) = (1.0, GRAY_40); static BUTTON_BORDER_INACTIVE: (f32, Color) = (1.0, GRAY_70); lazy_static! { pub static ref STYLE_BUTTON: Vec<RectStyle> = { style!( RectStyle::BackgroundColor: selector!(COLOR_BUTTON_DEFAULT, ACTIVATED_PRESSED: COLOR_BUTTON_ACTIVATED_PRESSED, ACTIVATED: COLOR_BUTTON_ACTIVATED, PRESSED: COLOR_BUTTON_PRESSED, INACTIVE: COLOR_BUTTON_INACTIVE), RectStyle::CornerRadius: Some(5.0), RectStyle::Border: selector!(Some(BUTTON_BORDER), INACTIVE: Some(BUTTON_BORDER_INACTIVE)) ) }; pub static ref STYLE_BUTTON_TEXT: Vec<TextStyle> = { style!(TextStyle::TextColor: selector!(BLACK, INACTIVE: COLOR_BUTTON_TEXT_INACTIVE)) }; } // show whether button is held down or not fn button_handle_mouse_down(event: &WidgetMouseButton, mut args: EventArgs) { if !args.widget.props().contains(&Property::Inactive) { let &WidgetMouseButton(state, _) = event; match state { glutin::ElementState::Pressed => args.widget.add_prop(Property::Pressed), glutin::ElementState::Released => args.widget.remove_prop(Property::Pressed), } } } pub enum ToggleEvent { On, Off, } // show whether toggle button is activated fn toggle_button_handle_mouse(event: &WidgetMouseButton, mut args: EventArgs) { if let WidgetMouseButton(glutin::ElementState::Released, _) = *event { let activated = args.widget.props().contains(&Property::Activated); if activated { args.widget.event(ToggleEvent::Off); args.widget.remove_prop(Property::Activated); } else { args.widget.event(ToggleEvent::On); args.widget.add_prop(Property::Activated); } } } pub struct ToggleButtonBuilder { pub widget: WidgetBuilder, } widget_wrapper!(ToggleButtonBuilder); impl ToggleButtonBuilder { pub fn new() -> Self { let mut widget = WidgetBuilder::new("toggle_button"); widget .set_draw_state_with_style(RectState::new(), STYLE_BUTTON.clone()) .add_handler_fn(button_handle_mouse_down) .add_handler_fn(toggle_button_handle_mouse); widget.layout().add(constraints![ min_size(Size::new(70.0, 30.0)), shrink(), ]); ToggleButtonBuilder { widget: widget } } pub fn set_text(&mut self, on_text: &'static str, off_text: &'static str) -> &mut Self { let style = style!(parent: STYLE_BUTTON_TEXT, TextStyle::Text: selector!(off_text.to_owned(), ACTIVATED: on_text.to_owned()), TextStyle::Align: Align::Middle); let mut button_text_widget = TextBuilder::new_with_style(style); button_text_widget.set_name("button_text"); button_text_widget.layout().add(constraints![ bound_left(&self.widget).padding(20.0), bound_right(&self.widget).padding(20.0), bound_top(&self.widget).padding(10.0), bound_bottom(&self.widget).padding(10.0), center(&self.widget), ]); self.widget.add_child(button_text_widget); self } pub fn on_toggle<F>(&mut self, callback: F) -> &mut Self where F: Fn(&ToggleEvent, EventArgs) + 'static { self.widget.add_handler_fn(callback); self } } pub struct PushButtonBuilder { pub widget: WidgetBuilder, } widget_wrapper!(PushButtonBuilder); impl PushButtonBuilder { pub fn new() -> Self { let mut widget = WidgetBuilder::new("push_button"); widget .set_draw_state_with_style(RectState::new(), STYLE_BUTTON.clone()) .add_handler_fn(button_handle_mouse_down); widget.layout().add(constraints![ min_size(Size::new(100.0, 50.0)).strength(STRONG), shrink(), ]); PushButtonBuilder { widget: widget } } pub fn set_text(&mut self, text: &'static str) -> &mut Self { let style = style!(parent: STYLE_BUTTON_TEXT, TextStyle::Text: text.to_owned(), TextStyle::Align: Align::Middle); let mut button_text_widget = TextBuilder::new_with_style(style); button_text_widget.set_name("button_text"); button_text_widget.layout().add(constraints![ bound_left(&self.widget).padding(20.0), bound_right(&self.widget).padding(20.0), bound_top(&self.widget).padding(10.0), bound_bottom(&self.widget).padding(10.0), center(&self.widget), ]); self.widget.add_child(button_text_widget); self } } impl WidgetBuilder { pub fn on_click<F>(&mut self, on_click: F) -> &mut Self where F: Fn(&ClickEvent, &mut EventArgs) + 'static { self.add_handler_fn(move |event, mut args| { (on_click)(event, &mut args); *args.handled = true; }) } }
true
830de5f601e360e0bdba4e5f69f38c0f39cc8bd0
Rust
sagiegurari/duckscript
/duckscript_sdk/src/sdk/std/random/range/mod.rs
UTF-8
2,152
2.75
3
[ "Apache-2.0", "LicenseRef-scancode-free-unknown" ]
permissive
use crate::utils::pckg; use duckscript::types::command::{Command, CommandResult}; use rand::{thread_rng, Rng}; #[cfg(test)] #[path = "./mod_test.rs"] mod mod_test; #[derive(Clone)] pub(crate) struct CommandImpl { package: String, } impl Command for CommandImpl { fn name(&self) -> String { pckg::concat(&self.package, "Range") } fn aliases(&self) -> Vec<String> { vec!["random_range".to_string(), "rand_range".to_string()] } fn help(&self) -> String { include_str!("help.md").to_string() } fn clone_and_box(&self) -> Box<dyn Command> { Box::new((*self).clone()) } fn run(&self, arguments: Vec<String>) -> CommandResult { if arguments.len() < 2 { CommandResult::Error("Missing random min/max values.".to_string()) } else { match arguments[0].parse() { Ok(min) => match arguments[1].parse() { Ok(max) => { if min > max { CommandResult::Error( format!("Min value: {} bigger than max value: {}", min, max) .to_string(), ) } else { let mut rng = thread_rng(); let min_128: i128 = min; let max_128: i128 = max; let rand_value: i128 = rng.gen_range(min_128..max_128); CommandResult::Continue(Some(rand_value.to_string())) } } Err(_) => CommandResult::Error( format!("Non numeric max value: {} provided.", &arguments[1]).to_string(), ), }, Err(_) => CommandResult::Error( format!("Non numeric min value: {} provided.", &arguments[0]).to_string(), ), } } } } pub(crate) fn create(package: &str) -> Box<dyn Command> { Box::new(CommandImpl { package: package.to_string(), }) }
true
c048c0dc0e39681bd1162bc3c363f3a6ff6d80e9
Rust
qlurkin/qlurkin.github.io
/docs/courses/gpu/introrust/7_move_fn.rs
UTF-8
155
2.921875
3
[]
no_license
fn display(msg: String) { println!("{}", msg); } fn main() { let a: String = String::from("Hello"); display(a); println!("{}", a); }
true
d8eb2b976663c8c13abc1bdf0c10505d832c874d
Rust
Evan-Coleman/rust_practice
/10.3_lifetime/src/main.rs
UTF-8
576
3.84375
4
[]
no_license
use std::fmt::Display; fn main() { let x = String::from("Hello this is a longer string!"); let y = "This is short!"; let ann = "CONGRATS!"; println!("{}", longest_with_an_announcement(&x, &y, ann)); } // This example has a lifetime and generic in the function definition // The return type is a reference with a lifetime and a bound trait of Display fn longest_with_an_announcement<'a, T>(x: &'a str, y: &'a str, ann: T) -> &'a str where T: Display { println!("Announcement! {}", ann); if x.len() > y.len() { x } else { y } }
true
217883932bf6d3255cd58775e063008959cfe6b9
Rust
apeverse/oe4
/crates/runtime/src/agent/mod.rs
UTF-8
2,626
2.53125
3
[ "Apache-2.0" ]
permissive
// Copyright 2021 The OpenEthereum Authors. // Licensed under the Apache License, Version 2.0. mod local; mod remote; use async_trait::async_trait; use std::{ sync::{ atomic::{AtomicBool, Ordering}, Arc, }, time::Duration, }; use tokio::{ task::{self, JoinHandle}, time, }; /// Specifies the amount of time the agent runtime needs to wait /// before invoking the next step of the runloop pub enum Repeat { Auto, Never, After(Duration), } #[async_trait] pub trait Runloop: Send + Sync + Sized + Clone + 'static { /// Gets invoked in a loop for the duration of the lifetime of the /// agent until it is aborted or returns async fn step(&self) -> Repeat { Repeat::Never } /// Optional setup code that is executed once before the first step /// of the runloop is invoked async fn setup(&self) {} /// Optional cleanup code that runs once when an abort is requested. async fn teardown(&self) {} } #[derive(Clone)] pub struct Agent<Impl> where Impl: Runloop, { inner: Arc<Impl>, worker: Arc<JoinHandle<()>>, aborted: Arc<AtomicBool>, } impl<Impl> Agent<Impl> where Impl: Runloop, { pub async fn new(instance: Impl) -> Result<Self, Box<dyn std::error::Error>> { let inner = Arc::new(instance); let aborted = Arc::new(AtomicBool::new(false)); let inner_worker = inner.clone(); let aborted_worker = aborted.clone(); let worker = Arc::new(task::spawn(async move { inner_worker.setup().await; while !aborted_worker.load(Ordering::SeqCst) { match inner_worker.step().await { Repeat::Never => break, Repeat::Auto => task::yield_now().await, Repeat::After(duration) => time::sleep(duration).await, } } })); Ok(Agent { inner: inner.clone(), worker: worker, aborted: aborted, }) } pub async fn abort(self) { if !self.aborted.load(Ordering::Relaxed) { self.inner.teardown().await; self.aborted.store(true, Ordering::SeqCst); self.worker.abort(); } } } impl<Impl> std::ops::Deref for Agent<Impl> where Impl: Runloop + Send + Sync, { type Target = Impl; fn deref(&self) -> &Self::Target { &self.inner } } impl<Impl> std::future::Future for Agent<Impl> where Impl: Runloop, { type Output = (); fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { match self.aborted.load(Ordering::Relaxed) { true => std::task::Poll::Ready(()), false => { cx.waker().wake_by_ref(); std::task::Poll::Pending } } } }
true
656fc988bca464ef25b2dd09065ad4663f2edaa6
Rust
Mark-Simulacrum/json-rust
/src/parser.rs
UTF-8
18,023
3.109375
3
[ "MIT" ]
permissive
use std::{ str, char, f64 }; use std::collections::BTreeMap; use { JsonValue, JsonError, JsonResult }; const MAX_PRECISION: u64 = 576460752303423500; struct Position { pub line: usize, pub column: usize, } struct Parser<'a> { source: &'a str, byte_ptr: *const u8, index: usize, length: usize, } macro_rules! expect_byte { ($parser:ident) => ({ if $parser.is_eof() { return Err(JsonError::UnexpectedEndOfJson); } let ch = $parser.read_byte(); $parser.bump(); ch }) } macro_rules! sequence { ($parser:ident, $( $ch:pat ),*) => { $( match expect_byte!($parser) { $ch => {}, ch => return $parser.unexpected_character(ch), } )* } } macro_rules! read_num { ($parser:ident, $num:ident, $then:expr) => { loop { if $parser.is_eof() { break; } let ch = $parser.read_byte(); match ch { b'0' ... b'9' => { $parser.bump(); let $num = ch - b'0'; $then; }, _ => break } } } } macro_rules! consume_whitespace { ($parser:ident, $ch:ident) => { match $ch { // whitespace 9 ... 13 | 32 => { loop { match expect_byte!($parser) { 9 ... 13 | 32 => {}, ch => { $ch = ch; break } } } }, _ => {} } } } macro_rules! expect { ($parser:ident, $byte:expr) => ({ let mut ch = expect_byte!($parser); consume_whitespace!($parser, ch); if ch != $byte { return $parser.unexpected_character(ch) } }); {$parser:ident $(, $byte:pat => $then:expr )*} => ({ let mut ch = expect_byte!($parser); consume_whitespace!($parser, ch); match ch { $( $byte => $then, )* _ => return $parser.unexpected_character(ch) } }) } const QU: bool = false; // double quote 0x22 const BS: bool = false; // backslash 0x5C const CT: bool = false; // control character 0x00 ... 0x1F const __: bool = true; // Look up table that marks which characters are allowed in their raw // form in a string. static ALLOWED: [bool; 256] = [ // 0 1 2 3 4 5 6 7 8 9 A B C D E F CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, // 0 CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, CT, // 1 __, __, QU, __, __, __, __, __, __, __, __, __, __, __, __, __, // 2 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 3 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 4 __, __, __, __, __, __, __, __, __, __, __, __, BS, __, __, __, // 5 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 6 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 7 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 8 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 9 __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // A __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // B __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // C __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // D __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // E __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // F ]; macro_rules! expect_string { ($parser:ident) => ({ let result: String;// = unsafe { mem::uninitialized() }; let start = $parser.index; loop { let ch = expect_byte!($parser); if ALLOWED[ch as usize] { continue; } if ch == b'"' { result = (&$parser.source[start .. $parser.index - 1]).into(); break; } if ch == b'\\' { result = try!($parser.read_complex_string(start)); break; } return $parser.unexpected_character(ch); } result }) } fn exponent_to_power(e: i32) -> f64 { static POWERS: [f64; 22] = [ 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19, 1e20, 1e21, 1e22 ]; static NEG_POWERS: [f64; 22] = [ 1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-8, 1e-9, 1e-10, 1e-11, 1e-12, 1e-13, 1e-14, 1e-15, 1e-16, 1e-17, 1e-18, 1e-19, 1e-20, 1e-21, 1e-22 ]; let index = (e.abs() - 1) as usize; // index=0 is e=1 if index < 22 { if e < 0 { NEG_POWERS[index] } else { POWERS[index] } } else { // powf is more accurate 10f64.powf(e as f64) } } fn make_float(num: u64, e: i32) -> f64 { (num as f64) * exponent_to_power(e) } macro_rules! expect_number { ($parser:ident, $first:ident) => ({ let mut num = ($first - b'0') as u64; let result: f64; // Cap on how many iterations we do while reading to u64 // in order to avoid an overflow. loop { if num >= 576460752303423500 { result = try!($parser.read_big_number(num)); break; } if $parser.is_eof() { result = num as f64; break; } let ch = $parser.read_byte(); match ch { b'0' ... b'9' => { $parser.bump(); // Avoid multiplication with bitshifts and addition num = (num << 1) + (num << 3) + (ch - b'0') as u64; }, b'.' | b'e' | b'E' => { result = try!($parser.read_number_with_fraction(num, 0)); break; }, _ => { result = num as f64; break; } } } result }) } macro_rules! expect_value { {$parser:ident $(, $byte:pat => $then:expr )*} => ({ let mut ch = expect_byte!($parser); consume_whitespace!($parser, ch); match ch { $( $byte => $then, )* b'[' => JsonValue::Array(try!($parser.read_array())), b'{' => JsonValue::Object(try!($parser.read_object())), b'"' => JsonValue::String(expect_string!($parser)), b'0' => { let num = try!($parser.read_number_with_fraction(0, 0)); JsonValue::Number(num) }, b'1' ... b'9' => { let num = expect_number!($parser, ch); JsonValue::Number(num) }, b'-' => { let ch = expect_byte!($parser); let num = match ch { b'0' => try!($parser.read_number_with_fraction(0, 0)), b'1' ... b'9' => expect_number!($parser, ch), _ => return $parser.unexpected_character(ch) }; JsonValue::Number(-num) } b't' => { sequence!($parser, b'r', b'u', b'e'); JsonValue::Boolean(true) }, b'f' => { sequence!($parser, b'a', b'l', b's', b'e'); JsonValue::Boolean(false) }, b'n' => { sequence!($parser, b'u', b'l', b'l'); JsonValue::Null }, _ => return $parser.unexpected_character(ch) } }) } impl<'a> Parser<'a> { pub fn new(source: &'a str) -> Self { Parser { source: source, byte_ptr: source.as_ptr(), index: 0, length: source.len(), } } #[inline(always)] fn is_eof(&mut self) -> bool { self.index == self.length } #[inline(always)] fn read_byte(&mut self) -> u8 { unsafe { *self.byte_ptr.offset(self.index as isize) } } #[inline(always)] fn bump(&mut self) { self.index += 1; } fn source_position_from_index(&self, index: usize) -> Position { let (bytes, _) = self.source.split_at(index-1); Position { line: bytes.lines().count(), column: bytes.lines().last().map_or(1, |line| { line.chars().count() + 1 }) } } fn unexpected_character<T: Sized>(&mut self, byte: u8) -> JsonResult<T> { let pos = self.source_position_from_index(self.index); let ch = if byte & 0x80 != 0 { let mut buf = [byte,0,0,0]; let mut len = 0usize; if byte & 0xE0 == 0xCE { // 2 bytes, 11 bits len = 2; buf[1] = expect_byte!(self); } else if byte & 0xF0 == 0xE0 { // 3 bytes, 16 bits len = 3; buf[1] = expect_byte!(self); buf[2] = expect_byte!(self); } else if byte & 0xF8 == 0xF0 { // 4 bytes, 21 bits len = 4; buf[1] = expect_byte!(self); buf[2] = expect_byte!(self); buf[3] = expect_byte!(self); } let slice = try!( str::from_utf8(&buf[0..len]) .map_err(|_| JsonError::FailedUtf8Parsing) ); slice.chars().next().unwrap() } else { // codepoints < 128 are safe ASCII compatibles unsafe { char::from_u32_unchecked(byte as u32) } }; Err(JsonError::UnexpectedCharacter { ch: ch, line: pos.line, column: pos.column, }) } fn read_hexdec_digit(&mut self) -> JsonResult<u32> { let ch = expect_byte!(self); Ok(match ch { b'0' ... b'9' => (ch - b'0'), b'a' ... b'f' => (ch + 10 - b'a'), b'A' ... b'F' => (ch + 10 - b'A'), ch => return self.unexpected_character(ch), } as u32) } fn read_hexdec_codepoint(&mut self) -> JsonResult<u32> { Ok( try!(self.read_hexdec_digit()) << 12 | try!(self.read_hexdec_digit()) << 8 | try!(self.read_hexdec_digit()) << 4 | try!(self.read_hexdec_digit()) ) } fn read_codepoint(&mut self, buffer: &mut Vec<u8>) -> JsonResult<()> { let mut codepoint = try!(self.read_hexdec_codepoint()); match codepoint { 0x0000 ... 0xD7FF => {}, 0xD800 ... 0xDBFF => { codepoint -= 0xD800; codepoint <<= 10; sequence!(self, b'\\', b'u'); let lower = try!(self.read_hexdec_codepoint()); if let 0xDC00 ... 0xDFFF = lower { codepoint = (codepoint | lower - 0xDC00) + 0x010000; } else { return Err(JsonError::FailedUtf8Parsing) } }, 0xE000 ... 0xFFFF => {}, _ => return Err(JsonError::FailedUtf8Parsing) } match codepoint { 0x0000 ... 0x007F => buffer.push(codepoint as u8), 0x0080 ... 0x07FF => buffer.extend_from_slice(&[ (((codepoint >> 6) as u8) & 0x1F) | 0xC0, ((codepoint as u8) & 0x3F) | 0x80 ]), 0x0800 ... 0xFFFF => buffer.extend_from_slice(&[ (((codepoint >> 12) as u8) & 0x0F) | 0xE0, (((codepoint >> 6) as u8) & 0x3F) | 0x80, ((codepoint as u8) & 0x3F) | 0x80 ]), 0x10000 ... 0x10FFFF => buffer.extend_from_slice(&[ (((codepoint >> 18) as u8) & 0x07) | 0xF0, (((codepoint >> 12) as u8) & 0x3F) | 0x80, (((codepoint >> 6) as u8) & 0x3F) | 0x80, ((codepoint as u8) & 0x3F) | 0x80 ]), _ => return Err(JsonError::FailedUtf8Parsing) } Ok(()) } fn read_complex_string(&mut self, start: usize) -> JsonResult<String> { let mut buffer = Vec::new(); let mut ch = b'\\'; buffer.extend_from_slice(self.source[start .. self.index - 1].as_bytes()); loop { if ALLOWED[ch as usize] { buffer.push(ch); ch = expect_byte!(self); continue; } match ch { b'"' => break, b'\\' => { let escaped = expect_byte!(self); let escaped = match escaped { b'u' => { try!(self.read_codepoint(&mut buffer)); ch = expect_byte!(self); continue; }, b'"' | b'\\' | b'/' => escaped, b'b' => 0x8, b'f' => 0xC, b't' => b'\t', b'r' => b'\r', b'n' => b'\n', _ => return self.unexpected_character(escaped) }; buffer.push(escaped); }, _ => return self.unexpected_character(ch) } ch = expect_byte!(self); } // Since the original source is already valid UTF-8, and `\` // cannot occur in front of a codepoint > 127, this is safe. Ok(unsafe { String::from_utf8_unchecked(buffer) }) } fn read_big_number(&mut self, num: u64) -> JsonResult<f64> { // Attempt to continue reading digits that would overflow // u64 into freshly converted f64 let mut e = 0i32; loop { if self.is_eof() { return Ok(make_float(num, e)); } match self.read_byte() { b'0' ... b'9' => { self.bump(); e += 1; }, _ => break } } self.read_number_with_fraction(num, e) } fn read_number_with_fraction(&mut self, mut num: u64, mut e: i32) -> JsonResult<f64> { if self.is_eof() { return Ok(make_float(num, e)); } let mut ch = self.read_byte(); if ch == b'.' { self.bump(); loop { if self.is_eof() { return Ok(make_float(num, e)); } ch = self.read_byte(); match ch { b'0' ... b'9' => { self.bump(); if num < MAX_PRECISION { num = (num << 3) + (num << 1) + (ch - b'0') as u64; e -= 1; } }, _ => break } } } if ch == b'e' || ch == b'E' { self.bump(); ch = expect_byte!(self); let sign = match ch { b'-' => { ch = expect_byte!(self); -1 }, b'+' => { ch = expect_byte!(self); 1 }, _ => 1 }; let num = make_float(num, e); let mut e = match ch { b'0' ... b'9' => (ch - b'0') as i32, _ => return self.unexpected_character(ch), }; read_num!(self, digit, e = (e << 3) + (e << 1) + digit as i32); return Ok(num * exponent_to_power(e * sign)); } Ok(make_float(num, e)) } fn read_object(&mut self) -> JsonResult<BTreeMap<String, JsonValue>> { let mut object = BTreeMap::new(); let key = expect!{ self, b'}' => return Ok(object), b'\"' => expect_string!(self) }; expect!(self, b':'); object.insert(key, expect_value!(self)); loop { let key = expect!{ self, b'}' => break, b',' => { expect!(self, b'"'); expect_string!(self) } }; expect!(self, b':'); object.insert(key, expect_value!(self)); } Ok(object) } fn read_array(&mut self) -> JsonResult<Vec<JsonValue>> { let first = expect_value!{ self, b']' => return Ok(Vec::new()) }; let mut array = Vec::with_capacity(20); array.push(first); loop { expect!{ self, b']' => break, b',' => { let value = expect_value!(self); array.push(value); } }; } Ok(array) } fn ensure_end(&mut self) -> JsonResult<()> { while !self.is_eof() { match self.read_byte() { 9 ... 13 | 32 => self.bump(), ch => { self.bump(); return self.unexpected_character(ch); } } } Ok(()) } fn value(&mut self) -> JsonResult<JsonValue> { Ok(expect_value!(self)) } } pub fn parse(source: &str) -> JsonResult<JsonValue> { let mut parser = Parser::new(source); let value = try!(parser.value()); try!(parser.ensure_end()); Ok(value) }
true
3039db0058702fb10b3bbb05d37f1e51fa78a31c
Rust
jasl/bitvec-sgx
/src/lib.rs
UTF-8
3,473
3.109375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
/*! `bitvec` – `[bool]` in overdrive. This crate provides views into slices of bits that are truly `[u1]`. Each bit in the data segment is used, unlike `[bool]` which ignores seven bits out of every byte. `bitvec`’s data structures provide strong guarantees about, and fine-grained control of, the bit-level representation of a sequence of memory. The user is empowered to choose the fundamental type underlying the store – `u8`, `u16`, `u32`, or `u64` – and the order in which each primitive is traversed – big-endian, from the most significant bit to the least, or little-endian, from the least significant bit to the most. This level of control is not necessary for most use cases where users just want to put bits in a sequence, but it is critically important for users making packets that leave main memory and hit some external device like a peripheral controller or a network socket. In order to provide convencienc to users for whom the storage details do not matter, `bitvec` types default to using big-endian bit order on `u8`. This means that the bits you would write down on paper match up with the bits as they are stored in memory. For example, the bit sequence `[0, 1, 1, 0, 1, 0, 0, 1]` inserted into `bitvec` structures with no extra type specification will produce the `<BigEndian, u8>` variant, so the bits in memory are `0b01101001`. With little-endian bit order, the memory value would be `0b10010110` (reversed order!). In addition to providing compact, efficient, and powerful storage and manipulation of bits in memory, the `bitvec` structures are capable of acting as a queue, set, or stream of bits. They implement the bit-wise operators for Boolean arithmetic, arithmetic operators for 2’s-complement numeric arithmetic, read indexing, bit shifts, and access to the underlying storage fundamental elements as a slice. (Write indexing is impossible in Rust semantics.) !*/ #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "alloc")] extern crate alloc; #[cfg(feature = "std")] extern crate core; #[cfg(feature = "serde")] extern crate serde; #[cfg(all(test, feature = "serde"))] extern crate serde_test; #[macro_use] mod macros; mod access; pub mod bits; pub mod cursor; mod domain; pub mod indices; mod pointer; pub mod prelude; pub mod slice; pub mod store; #[cfg(feature = "alloc")] pub mod boxed; #[cfg(feature = "alloc")] pub mod vec; #[cfg(feature = "serde")] mod serdes; /// Expose crate internals for use in doctests and external tests. #[cfg(feature = "testing")] pub mod testing { pub use crate::{ atomic::*, bits::*, boxed::*, cursor::*, domain::*, macros::*, pointer::*, slice::*, store::*, vec::*, }; } /** Perform single-bit ripple-carry addition. This function performs carry-aware binary addition on single bits of each addend. It is used in multiple places throughout the library, and so is pulled here for deduplication. # Parameters - `a: bool`: One bit of addend. - `b: bool`: One bit of addend. - `c: bool`: The carry-bit input. # Returns - `.0: bool`: The sum of `a + b + c`. - `.1: bool`: The carry-out of `a + b + c`. **/ #[inline] fn rca1(a: bool, b: bool, c: bool) -> (bool, bool) { // Ripple-carry addition is a reduction operation from three bits of input // (a, b, carry-in) to two outputs (sum, carry-out). // Compute the sum from left, right and carry-in let yz = a as u8 + b as u8 + c as u8; // Split them (yz & 0b01 != 0, yz & 0b10 != 0) }
true
23782fcdd34e34db62b1f869071dbbfe2c555461
Rust
james-lomax/AdventofCode2019
/day_13/src/game.rs
UTF-8
3,859
2.859375
3
[]
no_license
use std::sync::mpsc::{Receiver, Sender}; use std::collections::HashMap; use std::time::{Duration, Instant}; use quicksilver::{ Result, geom::{Circle, Line, Rectangle, Transform, Triangle, Vector}, graphics::{Background::Col, Color}, lifecycle::{Settings, State, Window, run_with}, input::{Key, ButtonState} }; const WALL: i32 = 1; const BLOCK: i32 = 2; const PADDLE: i32 = 3; const BALL: i32 = 4; pub struct DrawGeometry { itx: Sender<i32>, orx: Receiver<(i32, i32, i32)>, screen: HashMap<(i32, i32), i32>, score: i32, last_in_time: Instant, last_input: i32, auto: bool, update_counter: i32 } impl DrawGeometry { fn with_rx(itx: Sender<i32>, orx: Receiver<(i32, i32, i32)>) -> Result<Self> { Ok(Self { itx: itx, orx: orx, screen: HashMap::new(), score: 0, last_in_time: Instant::now(), last_input: 100, auto: false, update_counter: 0 }) } fn send_input(&mut self, j: i32) { let elapsed = Instant::now() - self.last_in_time; if j != self.last_input || elapsed > Duration::from_millis(250) { self.last_in_time = Instant::now(); self.last_input = j; self.itx.send(j).unwrap(); } } fn find(&mut self, block: i32) -> Vec<(i32, i32)> { self.screen.iter() .filter(|(_, b)| **b == block) .map(|(v, _)| *v) .collect() } } impl State for DrawGeometry { fn new() -> Result<Self> { Err(quicksilver::Error::ContextError("Ehajd".to_string())) } fn draw(&mut self, window: &mut Window) -> Result<()> { // Receive updated positions while let Ok((x, y, b)) = self.orx.try_recv() { if x < 0 { self.score = b; println!("Score = {}", self.score); } else if y < 0 { window.close(); } else { self.screen.insert((x, y), b); if b == BALL || b == PADDLE { self.update_counter += 1; } } } // Check inputs and send to the machine if window.keyboard()[Key::Left] == ButtonState::Held { self.send_input(-1); } else if window.keyboard()[Key::Right] == ButtonState::Held { self.send_input(1); } else if window.keyboard()[Key::Space] == ButtonState::Held { self.send_input(0); } else if window.keyboard()[Key::A] == ButtonState::Held { self.auto = true; } // Automatically play the program if self.auto && self.update_counter >= 2 { let ball = self.find(BALL)[0]; let paddle = self.find(PADDLE)[0]; if ball.0 > paddle.0 { self.itx.send(1).unwrap(); } else if ball.0 < paddle.0 { self.itx.send(-1).unwrap(); } else { self.itx.send(0).unwrap(); } self.update_counter = 0; } // 20 pixel unit (block length) size let unit = 15; window.clear(Color::BLACK)?; // Draw screen for ((x, y), b) in self.screen.iter() { let col = match *b { WALL => Color::WHITE, BLOCK => Color::GREEN, PADDLE => Color::BLUE, BALL => Color::RED, _ => Color::BLACK }; window.draw(&Rectangle::new((x * unit, y * unit), (unit, unit)), Col(col)); } Ok(()) } } pub fn start(itx: Sender<i32>, orx: Receiver<(i32, i32, i32)>) { run_with("Draw", Vector::new(800, 800), Settings::default(), move || { DrawGeometry::with_rx(itx, orx) }); }
true
276c38b41096ce82713f15c107617ed1fe48b331
Rust
UP934378/SoftEngProject_Pegasus
/src/request.rs
UTF-8
4,456
2.984375
3
[ "Apache-2.0" ]
permissive
//! Request data information from data probe connected to the network use log::{debug, error, log_enabled, info, Level}; use tokio::runtime::Runtime; /// Read data url from ssdp client response pub fn get_data_url(rt: &Runtime, response: &ssdp_client::SearchResponse) -> Option<String> { let http_client = reqwest::Client::new(); // Request schema from probe let schema_tree = match reqwest::Url::parse(response.location()) { Ok(schema_url) => { match rt.block_on(http_client.get(schema_url).send()) { Ok(a) => { match rt.block_on(a.text()) { Ok(schema_text) => { match xmltree::Element::parse(schema_text.as_bytes()){ Ok(xml) => Some(xml), // TODO: Add error handling Err(e) => { error!("{}", e); None } } }, // TODO: Add error handling Err(e) => { error!("{}", e); None } } }, // TODO: Add error handling Err(e) => { error!("{}", e); None } } }, // TODO: Add error handling Err(e) => { error!("{}", e); None } }; match schema_tree { Some(ref st) => parse_presentation_url(st), None => None } } /// Extract data url from XML tree fn parse_presentation_url(schema: &xmltree::Element) -> Option<String> { let base_url = match schema.get_child("URLBase") { Some(base) => match base.get_text() { Some(url) => (*url).to_string(), None => return None }, None => return None }; match schema.get_child("device") { Some(device) => { match device.get_child("presentationURL") { Some(presentation_url) => match presentation_url.get_text() { Some(url) => Some(base_url + &*url), None => None }, None => None } }, None => None } } /// Make data request from URL pub fn make_request(url: &str, rt: &Runtime) -> Result<String, Box<dyn std::error::Error>> { let http_client = reqwest::Client::new(); let request_url = match reqwest::Url::parse(url) { Ok(r) => r, Err(e) => { error!("parsing url: {} produced error: {}", url, e); return Err(Box::new(e)); } }; let response = match rt.block_on(http_client.get(request_url).send()){ Ok(r) => r, Err(e) => { error!("{}", e); return Err(Box::new(e)); } }; let json_string = match rt.block_on(response.text()) { Ok(r) => r, Err(e) => { error!("{}", e); return Err(Box::new(e)); } }; Ok(json_string.clone()) } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_presentation_url() -> Result<(), xmltree::ParseError> { let case1 = xmltree::Element::parse("<root></root>".as_bytes())?; let case2 = xmltree::Element::parse("<root><notdevice><child1></child1><presentationURL>sometext</presentationURL></notdevice></root>".as_bytes())?; let case3 = xmltree::Element::parse("<root><device><child1></child1><child2>sometext</child2></device></root>".as_bytes())?; let case4 = xmltree::Element::parse("<root><device><child1></child1><presentationURL>sometext</presentationURL></device></root>".as_bytes())?; let case5 = xmltree::Element::parse("<root><URLBase>URLBasetext</URLBase><device><child1></child1><presentationURL>sometext</presentationURL></device></root>".as_bytes())?; let text: Option<String> = Some("URLBasetextsometext".to_string()); assert_eq!(None, parse_presentation_url(&case1)); assert_eq!(None, parse_presentation_url(&case2)); assert_eq!(None, parse_presentation_url(&case3)); assert_eq!(None, parse_presentation_url(&case4)); assert_eq!(text, parse_presentation_url(&case5)); Ok(()) } }
true
887de7b79b46a2392fd5f6b7576e457d1f615c9a
Rust
katharostech/parry
/src/query/clip/clip_halfspace_polygon.rs
UTF-8
1,559
3.140625
3
[ "Apache-2.0" ]
permissive
use crate::math::{Point, Real, Vector}; use crate::query::{self, Ray}; /// Cuts a polygon with the given half-space. /// /// Given the half-space `center` and outward `normal`, /// this computes the intersecting between the half-space and /// the polygon. (Note that a point `pt` is considered as inside of /// the half-space if `normal.dot(&(pt - center)) <= 0.0`. pub fn clip_halfspace_polygon( center: &Point<Real>, normal: &Vector<Real>, polygon: &[Point<Real>], result: &mut Vec<Point<Real>>, ) { result.clear(); if polygon.is_empty() { return; } let keep_point = |pt: &Point<Real>| (pt - center).dot(normal) <= 0.0; let last_pt = polygon.last().unwrap(); let mut last_keep = keep_point(last_pt); if last_keep { result.push(*last_pt); } for i in 0..polygon.len() { let pt = &polygon[i]; let keep = keep_point(pt); if keep != last_keep { // We crossed the plane, so we need // to cut the edge. let prev_i = if i == 0 { polygon.len() - 1 } else { i - 1 }; let prev_pt = &polygon[prev_i]; let ray = Ray::new(*prev_pt, pt - prev_pt); if let Some(toi) = query::details::ray_toi_with_halfspace(&center, normal, &ray) { if toi > 0.0 && toi < 1.0 { result.push(ray.origin + ray.dir * toi) } } last_keep = keep; } if keep && i != polygon.len() - 1 { result.push(*pt); } } }
true
b4d05bdd6215133a75c042a86f0d17abf243887e
Rust
oberien/logitech-g910-handler-rs
/src/heatmap.rs
UTF-8
3,724
2.953125
3
[ "Apache-2.0", "MIT" ]
permissive
use std::collections::HashMap; use libusb::Result as UsbResult; use g910::*; pub struct HeatmapHandler { heatmap: Heatmap, } impl HeatmapHandler { pub fn new() -> HeatmapHandler { HeatmapHandler { heatmap: Heatmap::new(), } } fn init(&mut self, keyboard: &mut Keyboard) -> UsbResult<()> { keyboard.set_all_colors(Color::new(0, 0, 0)) } fn accept_key(&self, evt: &KeyEvent) -> bool { match evt { // we can't set colors of media keys &KeyEvent::KeyPressed(Key::Media(_)) => false, &KeyEvent::KeyPressed(_) => true, _ => false } } fn handle_key(&mut self, evt: &KeyEvent, keyboard: &mut Keyboard) -> UsbResult<()> { let key = match evt { &KeyEvent::KeyPressed(ref key) => key, _ => unreachable!() }; self.heatmap.increment(key); keyboard.set_key_colors(self.heatmap.colors()) } } impl From<HeatmapHandler> for Handler { fn from(handler: HeatmapHandler) -> Handler { HandlerBuilder::new(handler) .init_fn(|handler, keyboard| handler.init(keyboard)) .accept_key_fn(|handler, evt| handler.accept_key(evt)) .handle_key_fn(|handler, evt, keyboard| handler.handle_key(evt, keyboard)) .build() } } const GRADIENT: [Color; 6] = [ Color { red: 0, green: 0, blue: 0 }, Color { red: 0, green: 0, blue: 255 }, Color { red: 0, green: 255, blue: 255 }, Color { red: 0, green: 255, blue: 0 }, Color { red: 255, green: 255, blue: 0 }, Color { red: 255, green: 0, blue: 0 }, ]; pub struct Heatmap { data: HashMap<Key, u64>, } impl Heatmap { pub fn new() -> Heatmap { let mut data = HashMap::new(); for key in Key::values() { match key { // we can't set the color of media keys Key::Media(_) => {}, // and we don't want to set in for Logos Key::Logo(_) => {}, k => { data.insert(k, 0); }, } } Heatmap { data: data, } } pub fn increment(&mut self, key: &Key) { match self.data.get_mut(&key) { Some(mut count) => *count += 1, None => unreachable!() } } /// Six Color Gradient: /// (1) black, (2) blue, (3) cyan, (4) green, (5) yellow, (6) red /// (http://www.andrewnoske.com/wiki/Code_-_heatmaps_and_color_gradients) pub fn colors<'a>(&'a self) -> Vec<KeyColor> { let max = match self.data.iter().map(|(_, v)| v).max() { Some(max) => max, None => unreachable!() }; self.data.iter().map(|(k, v)| { let color; let v_scaled = *v as f64 / *max as f64; if v_scaled <= 0f64 { color = GRADIENT[0]; } else if v_scaled >= 1f64 { color = GRADIENT[GRADIENT.len()-1]; } else { let idx = (v_scaled * (GRADIENT.len()-1) as f64) as usize; let diff = (v_scaled * (GRADIENT.len()-1) as f64) - idx as f64; color = Color::new( ((((GRADIENT[idx+1].red as i16 - GRADIENT[idx].red as i16) as f64) * diff) as i16 + GRADIENT[idx].red as i16) as u8, ((((GRADIENT[idx+1].green as i16 - GRADIENT[idx].green as i16) as f64) * diff) as i16 + GRADIENT[idx].green as i16) as u8, ((((GRADIENT[idx+1].blue as i16 - GRADIENT[idx].blue as i16) as f64) * diff) as i16 + GRADIENT[idx].blue as i16) as u8, ); } KeyColor::new(k.clone(), color) }).collect() } }
true
69579ee73c3a29ebad38e779e5dad2ef2e05b1cd
Rust
iha2/advent-of-code-2019
/rust-solutions/src/main.rs
UTF-8
586
3.078125
3
[]
no_license
use std::fs; fn calculate_fuel(mass: i32) -> f64 { let value:f64 = f64::from(mass) / 3.0; return value - 2.0; } fn main() { let file_contents = fs::read_to_string("inputs/input-1-2019.txt").expect("File import does not work"); let lines = file_contents.split("\n").collect(); let values = lines.map(|s| s.to_string()).remove(lines.len() - 1); let onlyValues = values.map(|line| line.parse::<i32>()); for line in files { match line { Ok(value) => println!("{}", value), Err(e) => println!("{}", e), } } }
true
d88bd0fb43b2653e94a8a2f804402400011e4c78
Rust
jbro885/domain
/src/bits/name/iter.rs
UTF-8
11,757
3.25
3
[ "MIT" ]
permissive
//! Iterators for domain names. use std::mem; use std::borrow::Cow; use std::collections::VecDeque; use super::label::Label; use super::{DNameBuf, DNameSlice, Labelette, LabelIter, ParsedDName}; //------------ NameLabels ---------------------------------------------------- /// An iterator over the labels in a domain name. /// /// This type can work with both compressed and uncompressed domain names. /// It forms the foundation of the [`DName`] trait. /// /// [`DName`]: trait.DName.html #[derive(Clone, Debug)] pub struct NameLabels<'a> { inner: Flavor<'a>, } /// What sort of iterator are we? #[derive(Clone, Debug)] enum Flavor<'a> { /// Iterating forward only over a domain name slice. Slice(&'a DNameSlice), /// Iterating forward only over a parsed domain name. Parsed(ParsedDName<'a>), /// Iterating both ways over a domain name slice. DoubleSlice { /// The remaining slice. /// /// We only keep it for the `to_cow()` method. By storing the raw /// bytes we can shorten it quickly by using the length of the labels /// only. bytes: &'a [u8], /// The labels of the remaining name slice. labels: VecDeque<&'a Label>, }, /// Iterating both ways over a parsed domain name. /// /// This will only be used if the parsed name contains compressed labels. /// If it does not, starting two-way operation will transform it into /// the `DoubleSlice` flavor. DoubleParsed(VecDeque<&'a Label>), /// Done iterating. Empty } impl<'a> NameLabels<'a> { /// Creates an iterator for a domain name slice. pub fn from_slice(slice: &'a DNameSlice) -> Self { NameLabels{inner: Flavor::Slice(slice)} } /// Creates an iterator for a parsed domain name. pub fn from_parsed(name: ParsedDName<'a>) -> Self { NameLabels{inner: Flavor::Parsed(name)} } /// Returns a cow of the remaining labels in the domain name. pub fn to_cow(&self) -> Cow<'a, DNameSlice> { match self.inner { Flavor::Slice(slice) => { Cow::Borrowed(slice) } Flavor::Parsed(ref name) => { name.unpack() } Flavor::DoubleSlice{bytes, ..} => { Cow::Borrowed(unsafe { DNameSlice::from_bytes_unsafe(bytes) }) } Flavor::DoubleParsed(ref labels) => { Cow::Owned(DNameBuf::try_from_iter(labels.iter().map(|x| *x)) .unwrap()) } Flavor::Empty => Cow::Borrowed(DNameSlice::empty()) } } /// Ensures the iterator is ready for double ended iterating. fn ensure_double(&mut self) { let new_inner = match self.inner { Flavor::Slice(slice) => { Flavor::DoubleSlice { bytes: slice.as_bytes(), labels: slice.labels().collect(), } } Flavor::Parsed(ref name) => { if let Some(slice) = name.as_slice() { Flavor::DoubleSlice { bytes: slice.as_bytes(), labels: slice.labels().collect(), } } else { Flavor::DoubleParsed(name.labels().collect()) } } _ => return }; self.inner = new_inner; } } //--- Iterator impl<'a> Iterator for NameLabels<'a> { type Item = &'a Label; fn next(&mut self) -> Option<Self::Item> { let (res, new_inner) = match self.inner { Flavor::Slice(ref mut slice) => { match slice.split_first() { Some((label, name)) => { *slice = name; return Some(label) } None => (None, Flavor::Empty) } } Flavor::Parsed(ref mut name) => { let (res, new_name) = name.split_first().unwrap(); if let Some(new_name) = new_name { *name = new_name; return Some(res) } (Some(res), Flavor::Empty) } Flavor::DoubleSlice{ref mut bytes, ref mut labels} => { match labels.pop_front() { Some(label) => { *bytes = &bytes[label.len()..]; return Some(label) } None => (None, Flavor::Empty) } } Flavor::DoubleParsed(ref mut labels) => { match labels.pop_front() { Some(label) => return Some(label), None => (None, Flavor::Empty) } } Flavor::Empty => return None, }; self.inner = new_inner; res } } //--- DoubleEndedIterator impl<'a> DoubleEndedIterator for NameLabels<'a> { fn next_back(&mut self) -> Option<Self::Item> { self.ensure_double(); let (res, new_inner) = match self.inner { Flavor::Slice(..) | Flavor::Parsed(..) => unreachable!(), Flavor::DoubleSlice{ref mut bytes, ref mut labels} => { match labels.pop_back() { Some(label) => { *bytes = &bytes[..bytes.len() - label.len()]; return Some(label) } None => (None, Flavor::Empty) } } Flavor::DoubleParsed(ref mut labels) => { match labels.pop_back() { Some(label) => return Some(label), None => (None, Flavor::Empty) } } Flavor::Empty => return None, }; self.inner = new_inner; res } } //------------ NameLabelettes ------------------------------------------------ /// An iterator over the labelettes of a domain name. /// /// See [`Labelette`] for a discussion what these ominous labelettes are. /// /// [`Labelette`]: struct.Labelette.html #[derive(Clone, Debug)] pub struct NameLabelettes<'a> { name: NameLabels<'a>, label: DoubleLabels<'a>, } impl<'a> NameLabelettes<'a> { /// Creates a new labelette iterator from a label iterator. pub fn new(iter: NameLabels<'a>) -> Self { NameLabelettes{name: iter, label: DoubleLabels::new()} } /// Creates a domain name with the remaining labelettes. pub fn to_name(&self) -> Cow<'a, DNameSlice> { if let Some(cow) = self.label.front().and_then(|iter| iter.to_name()) { let mut name = cow.into_owned(); name.append_iter(self.name.clone()).unwrap(); self.label.back() .map(|iter| iter.push_name(&mut name).unwrap()); Cow::Owned(name) } else { let res = self.name.to_cow(); if let Some(iter) = self.label.back() { let mut res = res.into_owned(); iter.push_name(&mut res).unwrap(); Cow::Owned(res) } else { res } } } } impl<'a> Iterator for NameLabelettes<'a> { type Item = Labelette<'a>; fn next(&mut self) -> Option<Self::Item> { loop { if let Some(ref mut label) = self.label.front_mut() { if let Some(x) = label.next() { return Some(x) } } if !self.label.next(self.name.next()) { return None } } } } impl<'a> DoubleEndedIterator for NameLabelettes<'a> { fn next_back(&mut self) -> Option<Self::Item> { loop { if let Some(ref mut label) = self.label.back_mut() { if let Some(x) = label.next_back() { return Some(x) } } if !self.label.next_back(self.name.next_back()) { return None } } } } //------------ DoubleLabels -------------------------------------------------- /// The labels the labelette iterator operates currently on. #[derive(Clone, Debug)] enum DoubleLabels<'a> { /// Neither front nor back None, /// Front label, no back label Front(LabelIter<'a>), /// No front label but a back label Back(LabelIter<'a>), /// Both but different Both(LabelIter<'a>, LabelIter<'a>), /// Both on the same label Same(LabelIter<'a>), } impl<'a> DoubleLabels<'a> { fn new() -> Self { DoubleLabels::None } fn front(&self) -> Option<&LabelIter<'a>> { match *self { DoubleLabels::None | DoubleLabels::Back(..) => None, DoubleLabels::Front(ref front) | DoubleLabels::Both(ref front, _) => Some(front), DoubleLabels::Same(ref same) => Some(same) } } fn front_mut(&mut self) -> Option<&mut LabelIter<'a>> { match *self { DoubleLabels::None | DoubleLabels::Back(..) => None, DoubleLabels::Front(ref mut front) | DoubleLabels::Both(ref mut front, _) => Some(front), DoubleLabels::Same(ref mut same) => Some(same) } } fn back(&self) -> Option<&LabelIter<'a>> { match *self { DoubleLabels::None | DoubleLabels::Front(..) => None, DoubleLabels::Back(ref back) | DoubleLabels::Both(_, ref back) => Some(back), DoubleLabels::Same(ref same) => Some(same) } } fn back_mut(&mut self) -> Option<&mut LabelIter<'a>> { match *self { DoubleLabels::None | DoubleLabels::Front(..) => None, DoubleLabels::Back(ref mut back) | DoubleLabels::Both(_, ref mut back) => Some(back), DoubleLabels::Same(ref mut same) => Some(same) } } fn next(&mut self, front: Option<&'a Label>) -> bool { if let Some(front) = front { let front = front.iter(); *self = match mem::replace(self, DoubleLabels::None) { DoubleLabels::None | DoubleLabels::Front(_) => DoubleLabels::Front(front), DoubleLabels::Back(back) | DoubleLabels::Both(_, back) => DoubleLabels::Both(front, back), DoubleLabels::Same(_) => unreachable!(), }; true } else { let (res, new) = match mem::replace(self, DoubleLabels::None) { DoubleLabels::Both(_, back) => (true, DoubleLabels::Same(back)), _ => (false, DoubleLabels::None) }; *self = new; res } } fn next_back(&mut self, back: Option<&'a Label>) -> bool { if let Some(back) = back { let back = back.iter(); *self = match mem::replace(self, DoubleLabels::None) { DoubleLabels::None | DoubleLabels::Back(_) => DoubleLabels::Back(back), DoubleLabels::Front(front) | DoubleLabels::Both(front, _) => DoubleLabels::Both(front, back), DoubleLabels::Same(_) => unreachable!() }; true } else { let (res, new) = match mem::replace(self, DoubleLabels::None) { DoubleLabels::Both(front, _) => (true, DoubleLabels::Same(front)), _ => (false, DoubleLabels::None) }; *self = new; res } } }
true
d7988afd7999d85283300355cdd3495800dcf767
Rust
bouzuya/rust-atcoder
/cargo-atcoder/contests/abc143/src/bin/b.rs
UTF-8
302
2.65625
3
[]
no_license
use proconio::input; fn main() { input! { n: usize, d: [i64; n], }; let mut sum = 0_i64; for i in 0..n { let x = d[i]; for j in i + 1..n { let y = d[j]; sum += x * y; } } let ans = sum; println!("{}", ans); }
true
ba0991009f8c1016cf0417a0d968d7b80c2b723c
Rust
alekratz/Cursive
/src/views/stack_view.rs
UTF-8
8,011
3.1875
3
[ "MIT" ]
permissive
use Printer; use With; use direction::Direction; use event::{Event, EventResult}; use std::any::Any; use std::ops::Deref; use theme::ColorStyle; use vec::Vec2; use view::{Offset, Position, Selector, View}; use views::{Layer, ShadowView}; /// Simple stack of views. /// Only the top-most view is active and can receive input. pub struct StackView { layers: Vec<Child>, last_size: Vec2, } enum Placement { Floating(Position), Fullscreen, } impl Placement { pub fn compute_offset<S, A, P>( &self, size: S, available: A, parent: P ) -> Vec2 where S: Into<Vec2>, A: Into<Vec2>, P: Into<Vec2>, { match *self { Placement::Floating(ref position) => { position.compute_offset(size, available, parent) } Placement::Fullscreen => Vec2::zero(), } } } struct Child { view: Box<View>, size: Vec2, placement: Placement, // We cannot call `take_focus` until we've called `layout()` // (for instance, a textView must know it will scroll to be focusable). // So we want to call `take_focus` right after the first call to `layout`. // This flag remembers when we've done that. virgin: bool, } new_default!(StackView); impl StackView { /// Creates a new empty StackView pub fn new() -> Self { StackView { layers: Vec::new(), last_size: Vec2::zero(), } } /// Adds a new full-screen layer on top of the stack. /// /// Fullscreen layers have no shadow. pub fn add_fullscreen_layer<T>(&mut self, view: T) where T: 'static + View, { self.layers.push(Child { view: Box::new(Layer::new(view)), size: Vec2::zero(), placement: Placement::Fullscreen, virgin: true, }); } /// Adds new view on top of the stack in the center of the screen. pub fn add_layer<T>(&mut self, view: T) where T: 'static + View, { self.add_layer_at(Position::center(), view); } /// Adds new view on top of the stack in the center of the screen. /// /// Chainable variant. pub fn layer<T>(self, view: T) -> Self where T: 'static + View, { self.with(|s| s.add_layer(view)) } /// Adds a new full-screen layer on top of the stack. /// /// Chainable variant. pub fn fullscreen_layer<T>(self, view: T) -> Self where T: 'static + View, { self.with(|s| s.add_fullscreen_layer(view)) } /// Adds a view on top of the stack. pub fn add_layer_at<V, T>(&mut self, position: Position, view: T) where V: View + 'static, T: Into<Box<V>>, { self.layers.push(Child { // Skip padding for absolute/parent-placed views view: Box::new( ShadowView::new(Layer::new(*view.into())) .top_padding(position.y == Offset::Center) .left_padding(position.x == Offset::Center), ), size: Vec2::new(0, 0), placement: Placement::Floating(position), virgin: true, }); } /// Adds a view on top of the stack. /// /// Chainable variant. pub fn layer_at<T>(self, position: Position, view: T) -> Self where T: 'static + View, { self.with(|s| s.add_layer_at(position, view)) } /// Attempts to remove the top-most layer, returning it if there was one. pub fn pop_layer(&mut self) -> Option<Box<View>> { self.layers.pop() .map(|child| { child.view }) } /// Computes the offset of the current top view. pub fn offset(&self) -> Vec2 { let mut previous = Vec2::zero(); for layer in &self.layers { let offset = layer.placement.compute_offset( layer.size, self.last_size, previous, ); previous = offset; } previous } /// Returns the size for each layer in this view. pub fn layer_sizes(&self) -> Vec<Vec2> { self.layers.iter().map(|layer| layer.size).collect() } } struct StackPositionIterator<R: Deref<Target = Child>, I: Iterator<Item = R>> { inner: I, previous: Vec2, total_size: Vec2, } impl<R: Deref<Target = Child>, I: Iterator<Item = R>> StackPositionIterator<R, I> { /// Returns a new StackPositionIterator pub fn new(inner: I, total_size: Vec2) -> Self { let previous = Vec2::zero(); StackPositionIterator { inner, previous, total_size, } } } impl<R: Deref<Target = Child>, I: Iterator<Item = R>> Iterator for StackPositionIterator<R, I> { type Item = (R, Vec2); fn next(&mut self) -> Option<(R, Vec2)> { self.inner.next().map(|v| { let offset = v.placement.compute_offset( v.size, self.total_size, self.previous, ); self.previous = offset; // eprintln!("{:?}", offset); (v, offset) }) } } impl View for StackView { fn draw(&self, printer: &Printer) { let last = self.layers.len(); printer.with_color(ColorStyle::Primary, |printer| { for (i, (v, offset)) in StackPositionIterator::new(self.layers.iter(), printer.size) .enumerate() { v.view .draw(&printer.sub_printer(offset, v.size, i + 1 == last)); } }); } fn on_event(&mut self, event: Event) -> EventResult { // Use the stack position iterator to get the offset of the top layer. // TODO: save it instead when drawing? match StackPositionIterator::new( self.layers.iter_mut(), self.last_size, ).last() { None => EventResult::Ignored, Some((v, offset)) => v.view.on_event(event.relativized(offset)), } } fn layout(&mut self, size: Vec2) { self.last_size = size; // The call has been made, we can't ask for more space anymore. // Let's make do with what we have. for layer in &mut self.layers { // Give each guy what he asks for, within the budget constraints. let size = Vec2::min(size, layer.view.required_size(size)); layer.size = size; layer.view.layout(layer.size); // We need to call `layout()` on the view before giving it focus // for the first time. Otherwise it will not be properly set up. // Ex: examples/lorem.rs: the text view takes focus because it's // scrolling, but it only knows that after a call to `layout()`. if layer.virgin { layer.view.take_focus(Direction::none()); layer.virgin = false; } } } fn required_size(&mut self, size: Vec2) -> Vec2 { // The min size is the max of all children's self.layers .iter_mut() .map(|layer| layer.view.required_size(size)) .fold(Vec2::new(1, 1), Vec2::max) } fn take_focus(&mut self, source: Direction) -> bool { match self.layers.last_mut() { None => false, Some(v) => v.view.take_focus(source), } } fn call_on_any<'a>( &mut self, selector: &Selector, mut callback: Box<FnMut(&mut Any) + 'a>, ) { for layer in &mut self.layers { layer .view .call_on_any(selector, Box::new(|any| callback(any))); } } fn focus_view(&mut self, selector: &Selector) -> Result<(), ()> { for layer in &mut self.layers { if layer.view.focus_view(selector).is_ok() { return Ok(()); } } Err(()) } }
true
61dbcee77e5b04fb4f0303988fa7628ba762b033
Rust
mikedilger/pemmican
/src/plugins/session.rs
UTF-8
4,096
3.015625
3
[ "Apache-2.0", "MIT" ]
permissive
use futures::Future; use crate::plugins::{Plugin, PluginData}; use hyper::header::Cookie as CookieHeader; use hyper::header::SetCookie; use cookie::Cookie; use textnonce::TextNonce; header! { (Dnt, "Dnt") => [String] } header! { (Tk, "Tk") => [String] } /// This plugin implements sessions. Sessions associate subsequent requests with /// earlier requests. Sessions are maintained automatically by always setting a /// cookie initially and finding it again on subsequent requests. /// /// This plugin only manages the cookie and maintains PluginData.session_id. /// Associating data with that session_id is left up to the consumer of this library /// (hint: Store it in your shared state, the S type parameter on Pemmican, perhaps /// with a CHashMap) /// /// Plug this in before main content handling plugins pub struct Session { cookie_name: String, secure: bool, http_only: bool, respect_dnt_ad_absurdum: bool, } impl Session { /// Create the Session plugin. /// /// `cookie_name` is the name of the cookie (e.g. PHP uses PHP_SESS_ID) /// /// `secure` is whether or not to allow transmission of the cookie over HTTP (without SSL) /// /// `http_only` is whether or not to restrict the cookie to the HTTP protocol (or else /// allow javascript to access it) pub fn new(cookie_name: String, secure: bool, http_only: bool) -> Session { Session { cookie_name: cookie_name, secure: secure, http_only: http_only, respect_dnt_ad_absurdum: false, } } /// If you set this, then clients setting the "DNT: 1" HTTP header will be unable /// to get sessions (using a cookie and checking it later is, strictly speaking, /// tracking). pub fn respect_dnt_ad_absurdum(&mut self) { self.respect_dnt_ad_absurdum = true; } } impl<S,E> Plugin<S,E> for Session where S: 'static, E: 'static { fn handle(&self, mut data: PluginData<S>) -> Box<dyn Future<Item = PluginData<S>, Error = E>> { if self.respect_dnt_ad_absurdum { // Respect Dnt let mut dnt = false; if let Some(header) = data.request.headers().get::<Dnt>() { match *header { Dnt(ref s) => { if &*s != "0" { dnt = true; } }, } } if dnt { // The user has requested Do Not Track. We strictly comply by removing // any existing session and refusing to start one while this header is // present data.session_id = None; // Set the Tk header, informing them that we are not tracking data.response.headers_mut().set(Tk("N".to_owned())); return Box::new(::futures::future::ok(data)); } } let mut maybe_key: Option<String> = None; if let Some(cookie_header) = data.request.headers().get::<CookieHeader>() { if let Some(cookie_value) = cookie_header.get(&*self.cookie_name) { maybe_key = Some(cookie_value.to_owned()); } } if let Some(key) = maybe_key { // Associate existing session data.session_id = Some(key.to_owned()); return Box::new(::futures::future::ok(data)); } // Create new session let key = TextNonce::new().into_string(); data.session_id = Some(key.clone()); // Create the cookie let mut cookie = Cookie::new(self.cookie_name.clone(), key); // expiry defaults to 'on close' // max_age defaults to None cookie.set_path("/"); // force a root path cookie.set_secure(self.secure); cookie.set_http_only(self.http_only); // Set the cookie data.response.headers_mut().set( SetCookie(vec![ cookie.to_string() ])); // Pass data on through Box::new(::futures::future::ok(data)) } }
true
018748601b3f7c72f6e8ec0d47a40eb2768eea64
Rust
lclarkmichalek/rust-sg3
/src/sg/error.rs
UTF-8
1,439
2.96875
3
[]
no_license
use std::result; use std::error; use std::io; use std::fmt; use image; pub type Result<T> = result::Result<T, Error>; #[derive(Debug)] pub enum Error { IoError(io::Error), ImageError(image::ImageError), MalformedFile(String), MalformedImage(), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Error::IoError(ref e) => e.fmt(f), &Error::ImageError(ref e) => e.fmt(f), &Error::MalformedFile(ref e) => write!(f, "File was malformed: {:?}", e), &Error::MalformedImage() => write!(f, "Image byte stream was malformed"), } } } impl error::Error for Error { fn description(&self) -> &str { match *self { Error::IoError(..) => &"IO error", Error::ImageError(..) => &"Image error", Error::MalformedFile(..) => &"Malformed file", Error::MalformedImage() => &"Malformed image", } } fn cause(&self) -> Option<&error::Error> { match *self { Error::IoError(ref err) => Some(err), Error::ImageError(ref err) => Some(err), _ => None, } } } impl From<io::Error> for Error { fn from(err: io::Error) -> Error { Error::IoError(err) } } impl From<image::ImageError> for Error { fn from(err: image::ImageError) -> Error { Error::ImageError(err) } }
true
1392e43b061f8bda58b055e5dda032c89f1447b6
Rust
prisma/prisma-engines
/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_8265.rs
UTF-8
2,205
2.546875
3
[ "Apache-2.0" ]
permissive
use query_engine_tests::*; #[test_suite(schema(schema))] mod mongodb { use indoc::indoc; use query_engine_tests::{run_query_json, Runner}; fn schema() -> String { let schema = indoc! { r#" model Order { #id(id, String, @id) order_lines OrderLine[] } model OrderLine { #id(id, String, @id) external_id String created_at DateTime @default(now()) updated_at DateTime @default(now()) @updatedAt order_id String order Order @relation(fields: [order_id], references: [id]) } "# }; schema.to_owned() } #[connector_test] async fn nested_update_many_timestamps(runner: Runner) -> TestResult<()> { let resp = run_query_json!( runner, r#"mutation { createOneOrder(data: { id: "order_1" order_lines: { create: { id: "order_line_1" external_id: "1" } } }) { order_lines { updated_at } }}"# ); let updated_at = &resp["data"]["createOneOrder"]["order_lines"][0]["updated_at"]; std::thread::sleep(std::time::Duration::from_millis(1000)); let updated = run_query_json!( runner, r#"mutation { updateOneOrder( where: { id: "order_1" } data: { order_lines: { updateMany: { where: { external_id: { not: { equals: "something" }}} data: { external_id: { set: "changed" }} } } } ) { order_lines { updated_at } } } "# ); let changed_updated_at = &updated["data"]["updateOneOrder"]["order_lines"][0]["updated_at"]; assert_ne!(updated_at, changed_updated_at); Ok(()) } }
true
deff8e4d7170c61eeaae3bed451a09ab8ed78ca7
Rust
LMckim/rust-mini-feed
/src/fh_tools/database.rs
UTF-8
1,967
2.796875
3
[]
no_license
use super::parser::Message; use mysql::prelude::Queryable; use mysql::*; pub struct FhDb { host: String, user: String, password: String, pool: Option<Pool>, conn: Option<PooledConn>, insert_limit: i32, } impl FhDb { pub fn new(host: String, user: String, password: String) -> FhDb { return FhDb { host: host, user: user, password: password, pool: None, conn: None, insert_limit: 10, }; } pub fn connect(&mut self) { let url = format!("mysql://{}:{}@{}:3306", self.user, self.password, self.host); self.pool = Some(Pool::new(url).expect("Could not connect")); match &self.pool { Some(x) => self.conn = Some(x.get_conn().expect("Error during connection")), None => println!("Error during connect"), } } pub fn insert_vals(&mut self, table_name: String, msgs: Vec<Message>) { let head = format!( "INSERT INTO {}(SEQ,DATE,TIME,SYMBOL,VOLUME,PRICE,VENUE) VALUES", table_name ); let mut query = head.clone(); let mut inserts = 0; for msg in msgs.iter() { query += &msg.make_sql_val_string(); query += ","; inserts += 1; if inserts >= self.insert_limit { self.execute_insertion(&mut query); inserts = 0; query = head.clone(); } } if inserts > 0 { self.execute_insertion(&mut query); } } fn execute_insertion(&mut self, query: &mut String) -> bool { query.pop(); match &mut self.conn { Some(x) => { x.query_drop(query.to_string()).expect(query); return true; } None => { println!("Issue during insert"); return false; } } } }
true
77b8d6e172d41e439e3eaae159b3604116cdde8b
Rust
quark-zju/cov
/cov/src/error.rs
UTF-8
5,909
2.859375
3
[ "MIT" ]
permissive
//! Errors related to the `cov` crate. //! //! Please see documentation of the [`error-chain` crate](https://docs.rs/error-chain/0.10.0/error_chain/) for detailed //! usage. #![allow(renamed_and_removed_lints, unused_doc_comments)] // ^ remove a release with https://github.com/rust-lang-nursery/error-chain/pull/247 is published. use raw::{Ident, Type, Version}; use std::{fmt, io}; use std::error::Error as StdError; use std::path::PathBuf; use std::result::Result as StdResult; error_chain! { foreign_links { Io(io::Error) /** Wrapper of standard I/O error. */; Json(::serde_json::Error) #[cfg(feature="serde_json")] /** Wrapper of JSON error. */; } errors { /// Trying to read a file which is not GCNO/GCDA format. UnknownFileType(magic: u32) { description("unknown file type") display("unknown file type, magic 0x{:08x} not recognized", magic) } /// Version of a [`Gcov`] does not match that of the [`Graph`] when using [`merge()`]. /// /// [`Gcov`]: ../raw/struct.Gcov.html /// [`Graph`]: ../graph/struct.Graph.html /// [`merge()`]: ../graph/struct.Graph.html#method.merge VersionMismatch(expected: Version, actual: Version) { description("version mismatch") display("version mismatch, existing graph has \"{}\", incoming file has \"{}\"", expected, actual) } /// Reached the end of a record when reading. Usually not fatal. Eof { description("encountered EOF record") } /// Encountered an unknown record. UnknownTag(tag: u32) { description("unknown record") display("unknown record, tag 0x{:08x} not recognized", tag) } /// Encountered an unknown block/arc flag. UnsupportedAttr(kind: &'static str, raw_flag: u32) { description("unsupported flags") display("unsupported {} flags 0x{:x}", kind, raw_flag) } /// The GCNO/GCDA is created for a GCC version that is not recognized by the `cov` crate. UnsupportedVersion(version: u32) { description("unsupported gcov version") display("unsupported gcov version 0x{:08x}", version) } /// The GCDA provides statistics of a function which cannot be found from the [`Graph`]. This error typically /// arises when merging a GCDA before its corresponding GCNO, or running an outdated version of program after /// the code has been recompiled (which generates a new GCNO). /// /// [`Graph`]: ../graph/struct.Graph.html MissingFunction(file_checksum: u32, ident: Ident) { description("missing function") display("function from *.gcda cannot be found in the *.gcno (checksum: {}, ident: {})", file_checksum, ident) } /// Encountered a GCNO record without the corresponding function. This means the GCNO file is corrupt. RecordWithoutFunction { description("encountered a record without the corresponding function") } /// The expected number of profilable arcs on the GCDA and GCNO differs. CountsMismatch(kind: &'static str, ty: Type, expected: usize, actual: usize) { description("counts mismatch") display("{0} counts mismatch on *.{3}, expecting {1} {0}, received {2} {0}", kind, expected, actual, ty) } } } //---------------------------------------------------------------------------------------------------------------------- /// The location where an error happened. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum Location { /// No source location. None, /// Cursor position in a file. Cursor(u64), /// Record index in a GCNO/GCDA file. RecordIndex(usize), /// Path to a GCNO/GCDA file. File(PathBuf), } impl Location { /// Annotates the result with context information. pub fn wrap<T, E: Into<Error>, F: FnOnce() -> StdResult<T, E>>(self, f: F) -> Result<T> { f().map_err(|e| self.wrap_error(e)) } /// Annotates the error with context information. pub fn wrap_error<E: Into<Error>>(self, e: E) -> Error { let mut error = e.into(); if self != Location::None { let cause = Box::new(AtError { location: self, cause: error.1.next_error, }); error.1.next_error = Some(cause); } error } } #[derive(Debug)] struct AtError { location: Location, cause: Option<Box<StdError + Send + 'static>>, } impl fmt::Display for AtError { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self.location { Location::Cursor(cursor) => write!(fmt, "reading at file position {0} (0x{0:x})", cursor), Location::RecordIndex(index) => write!(fmt, "parsing record index #{}", index), Location::File(ref path) => write!(fmt, "parsing file {}", path.display()), Location::None => write!(fmt, "nothing"), } } } impl StdError for AtError { fn description(&self) -> &str { "<error context>" } fn cause(&self) -> Option<&StdError> { self.cause.as_ref().map(|e| -> &StdError { &**e }) } } /// A trait to check if an error is an EOF error. pub trait IsEof { /// Checks whether the error is caused by an unexpected EOF. fn is_eof(&self) -> bool; } impl IsEof for ErrorKind { fn is_eof(&self) -> bool { match *self { ErrorKind::Io(ref e) => e.is_eof(), ErrorKind::Eof => true, _ => false, } } } impl IsEof for Error { fn is_eof(&self) -> bool { self.kind().is_eof() } } impl IsEof for io::Error { fn is_eof(&self) -> bool { self.kind() == io::ErrorKind::UnexpectedEof } }
true
4e35922ccf72820cc27dc11ccbce09b3e989e24f
Rust
kowaalczyk/latte
/src/util/mapper.rs
UTF-8
2,663
2.890625
3
[ "MIT" ]
permissive
use std::fmt::Debug; use crate::frontend::ast::{Block, Class, ClassItem, Expression, Function, Program, Reference, Statement}; pub trait AstMapper<FromMeta, ToMeta, ErrT> { /// reference to a variable or object property fn map_var_reference(&mut self, r: &Reference<FromMeta>) -> Result<Reference<ToMeta>, Vec<ErrT>>; /// reference to function or object method fn map_func_reference(&mut self, r: &Reference<FromMeta>) -> Result<Reference<ToMeta>, Vec<ErrT>>; fn map_block(&mut self, block: &Block<FromMeta>) -> Result<Block<ToMeta>, Vec<ErrT>>; fn map_expression(&mut self, expr: &Expression<FromMeta>) -> Result<Expression<ToMeta>, Vec<ErrT>>; fn map_statement(&mut self, stmt: &Statement<FromMeta>) -> Result<Statement<ToMeta>, Vec<ErrT>>; fn map_class(&mut self, class: &Class<FromMeta>) -> Result<Class<ToMeta>, Vec<ErrT>>; fn map_function(&mut self, function: &Function<FromMeta>) -> Result<Function<ToMeta>, Vec<ErrT>>; /// main ast mapper function, default implementation fn map_program( &mut self, program: &Program<FromMeta>, ) -> Result<Program<ToMeta>, Vec<ErrT>> where ToMeta: Debug + Clone + Sized, ErrT: Debug { // map all functions, collect errors let (mut mapped_func, mut errors): (Vec<_>, Vec<_>) = program.functions .values() .map(|func| self.map_function(func)) .partition(Result::is_ok); let mut errors: Vec<ErrT> = errors .into_iter() .map(Result::unwrap_err) .flatten() .collect(); let mut mapped_func: Vec<Function<ToMeta>> = mapped_func .into_iter() .map(Result::unwrap) .collect(); // map all classes, collect errors let (mut mapped_cls, mut cls_errors): (Vec<_>, Vec<_>) = program.classes .values() .map(|cls| self.map_class(cls)) .partition(Result::is_ok); let mut cls_errors: Vec<ErrT> = cls_errors .into_iter() .map(Result::unwrap_err) .flatten() .collect(); let mut mapped_cls: Vec<Class<ToMeta>> = mapped_cls .into_iter() .map(Result::unwrap) .collect(); errors.append(&mut cls_errors); if errors.is_empty() { if let Ok(prog) = Program::new(&mut mapped_cls, &mut mapped_func) { Ok(prog) } else { // not possible to fail program creation when re-constructed from previously valid program unreachable!() } } else { Err(errors) } } }
true
cbe49d7a43f1d393193dab3833e0c03078bbeaa0
Rust
MatchaChoco010/vulkan-ash-tutorial-rust-memo
/techlabxe_vulkan_book_2/src/animation/pmd_loader.rs
UTF-8
22,820
2.609375
3
[]
no_license
#![allow(dead_code)] use std::{ ffi::CString, fs::File, io::{prelude::*, BufReader, Read, SeekFrom}, mem, path::Path, }; use cgmath::{BaseFloat, Vector2, Vector3, Vector4}; use encoding::{all::WINDOWS_31J, DecoderTrap, EncoderTrap, Encoding}; /// bytesからf32の値を読み込む関数。 fn read_f32(reader: &mut impl Read) -> f32 { let mut buf = [0_u8; 4]; reader .read_exact(&mut buf) .expect("Failed to read f32 from bytes."); unsafe { mem::transmute::<[u8; 4], f32>(buf) } } /// bytesからu8の値を読み込む関数。 fn read_u8(reader: &mut impl Read) -> u8 { let mut buf = [0_u8; 1]; reader .read_exact(&mut buf) .expect("Failed to read u8 from bytes."); buf[0] } /// bytesからu16の値を読み込む関数。 fn read_u16(reader: &mut impl Read) -> u16 { let mut buf = [0_u8; 2]; reader .read_exact(&mut buf) .expect("Failed to read u16 from bytes."); unsafe { mem::transmute::<[u8; 2], u16>(buf) } } /// bytesからu32の値を読み込む関数。 fn read_u32(reader: &mut impl Read) -> u32 { let mut buf = [0_u8; 4]; reader .read_exact(&mut buf) .expect("Failed to read u32 from bytes."); unsafe { mem::transmute::<[u8; 4], u32>(buf) } } /// bytesからVector2<f32>の値を読み込む関数。 fn read_vector2(reader: &mut impl Read) -> Vector2<f32> { let x = read_f32(reader); let y = read_f32(reader); Vector2::new(x, y) } /// bytesからVector3<f32>の値を読み込む関数。 fn read_vector3(reader: &mut impl Read) -> Vector3<f32> { let x = read_f32(reader); let y = read_f32(reader); let z = read_f32(reader); Vector3::new(x, y, z) } /// bytesからVector4<f32>の値を読み込む関数。 fn read_vector4(reader: &mut impl Read) -> Vector4<f32> { let x = read_f32(reader); let y = read_f32(reader); let z = read_f32(reader); let w = read_f32(reader); Vector4::new(x, y, z, w) } /// flip_to_rhを生やすための拡張トレイト。 trait Vector3Ext { fn flip_to_rh(&self) -> Self; } impl<T: BaseFloat> Vector3Ext for Vector3<T> { fn flip_to_rh(&self) -> Self { Vector3::new(self.x, self.y, -self.z) } } /// flip_to_rhを生やすための拡張トレイト。 trait Vector4Ext { fn flip_to_rh(&self) -> Self; } impl<T: BaseFloat> Vector4Ext for Vector4<T> { fn flip_to_rh(&self) -> Self { Vector4::new(self.x, self.y, -self.z, -self.w) } } /// PMDHeader情報の構造体。 struct PMDHeader { magic: [u8; 3], version: f32, name: String, comment: String, } impl PMDHeader { fn load(reader: &mut impl Read) -> Self { let mut buf = [0_u8; 3]; reader .read_exact(&mut buf) .expect("Failed to read magic from bytes."); let magic = buf; let version = read_f32(reader); let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert name to String"); let mut buf = [0_u8; 256]; reader .read_exact(&mut buf) .expect("Failed to read comment from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let comment = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert comment to String"); Self { magic, version, name, comment, } } } /// PMDファイルの頂点情報を保持する構造体。 pub struct PMDVertex { position: Vector3<f32>, normal: Vector3<f32>, texcoord: Vector2<f32>, bone_num: [u16; 2], bone_weight: u8, edge_flag: u8, } impl PMDVertex { pub fn new( position: Vector3<f32>, normal: Vector3<f32>, texcoord: Vector2<f32>, bone_num: [u16; 2], bone_weight: u8, edge_flag: u8, ) -> Self { Self { position, normal, texcoord, bone_num, bone_weight, edge_flag, } } pub fn position(&self) -> Vector3<f32> { self.position } pub fn normal(&self) -> Vector3<f32> { self.normal } pub fn texcoord(&self) -> Vector2<f32> { self.texcoord } pub fn bone_index(&self, index: usize) -> u16 { self.bone_num[index] } pub fn bone_weight(&self, index: usize) -> f32 { if index == 0 { self.bone_weight as f32 / 100.0 } else { (100 - self.bone_weight) as f32 / 100.0 } } pub fn edge_flag(&self) -> u8 { self.edge_flag } fn load(reader: &mut impl Read) -> Self { let position = read_vector3(reader).flip_to_rh(); let normal = read_vector3(reader).flip_to_rh(); let texcoord = read_vector2(reader); let bone_num: [u16; 2] = [read_u16(reader), read_u16(reader)]; let bone_weight = read_u8(reader); let edge_flag = read_u8(reader); Self { position, normal, texcoord, bone_num, bone_weight, edge_flag, } } } /// PMDのマテリアル情報を保持する構造体。 pub struct PMDMaterial { diffuse: Vector3<f32>, alpha: f32, shininess: f32, specular: Vector3<f32>, ambient: Vector3<f32>, toon_id: u8, edge_flag: u8, number_of_polygons: u32, texture: String, } impl PMDMaterial { pub fn diffuse(&self) -> Vector3<f32> { self.diffuse } pub fn ambient(&self) -> Vector3<f32> { self.ambient } pub fn alpha(&self) -> f32 { self.alpha } pub fn shininess(&self) -> f32 { self.shininess } pub fn specular(&self) -> Vector3<f32> { self.specular } pub fn texture(&self) -> &str { &self.texture } pub fn edge_flag(&self) -> u8 { self.edge_flag } pub fn number_of_polygons(&self) -> u32 { self.number_of_polygons } fn load(reader: &mut impl Read) -> Self { let diffuse = read_vector3(reader); let alpha = read_f32(reader); let shininess = read_f32(reader); let specular = read_vector3(reader); let ambient = read_vector3(reader); let toon_id = read_u8(reader); let edge_flag = read_u8(reader); let number_of_polygons = read_u32(reader); let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read texture file name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let texture = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert texture filename to String"); Self { diffuse, alpha, shininess, specular, ambient, toon_id, edge_flag, number_of_polygons, texture, } } } /// PMDのボーン情報を保持する構造体。 pub struct PMDBone { name: String, parent: u16, child: u16, ty: u8, target_bone: u16, position: Vector3<f32>, } impl PMDBone { pub fn name(&self) -> &str { &self.name } pub fn parent(&self) -> u16 { self.parent } pub fn target(&self) -> u16 { self.target_bone } pub fn position(&self) -> Vector3<f32> { self.position } fn load(reader: &mut impl Read) -> Self { let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read bone name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert bone name to String"); let parent = read_u16(reader); let child = read_u16(reader); let ty = read_u8(reader); let target_bone = read_u16(reader); let position = read_vector3(reader).flip_to_rh(); Self { name, parent, child, ty, target_bone, position, } } } /// PMDのIk情報を保持する構造体。 pub struct PMDIk { bone_index: u16, bone_target: u16, num_chains: u8, num_iterations: u16, angle_limit: f32, ik_bones: Vec<u16>, } impl PMDIk { pub fn target_bone_id(&self) -> u16 { self.bone_target } pub fn bone_eff(&self) -> u16 { self.bone_index } pub fn chains(&self) -> &Vec<u16> { &self.ik_bones } pub fn iterations(&self) -> u16 { self.num_iterations } pub fn angle_limit(&self) -> f32 { self.angle_limit } fn load(reader: &mut impl Read) -> Self { let bone_index = read_u16(reader); let bone_target = read_u16(reader); let num_chains = read_u8(reader); let num_iterations = read_u16(reader); let angle_limit = read_f32(reader) * std::f32::consts::PI; let mut ik_bones = vec![]; for _ in 0..num_chains { ik_bones.push(read_u16(reader)); } Self { bone_index, bone_target, num_chains, num_iterations, angle_limit, ik_bones, } } } /// PMDFace pub enum FaceType { BASE, EYEBROW, EYE, LIP, OTHER, } impl FaceType { fn from_u8(id: u8) -> Self { match id { 0 => FaceType::BASE, 1 => FaceType::EYEBROW, 2 => FaceType::EYE, 3 => FaceType::LIP, _ => FaceType::OTHER, } } } pub struct PMDFace { name: String, num_vertices: u32, face_type: FaceType, face_indices: Vec<u32>, face_vertices: Vec<Vector3<f32>>, } impl PMDFace { pub fn name(&self) -> &str { &self.name } pub fn ty(&self) -> &FaceType { &self.face_type } pub fn vertex_count(&self) -> u32 { self.face_vertices.len() as u32 } pub fn index_count(&self) -> u32 { self.face_indices.len() as u32 } pub fn face_vertices(&self) -> &Vec<Vector3<f32>> { &self.face_vertices } pub fn face_indices(&self) -> &Vec<u32> { &self.face_indices } fn load(reader: &mut impl Read) -> Self { let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read face name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert face name to String"); let num_vertices = read_u32(reader); let face_type = FaceType::from_u8(read_u8(reader)); let mut face_vertices = vec![]; let mut face_indices = vec![]; for _ in 0..num_vertices { face_indices.push(read_u32(reader)); face_vertices.push(read_vector3(reader).flip_to_rh()); } Self { name, num_vertices, face_type, face_indices, face_vertices, } } } /// PMDRigidParam pub enum ShapeType { SPHERE, BOX, CAPSULE, } impl ShapeType { fn from_u8(id: u8) -> Self { match id { 0 => ShapeType::SPHERE, 1 => ShapeType::BOX, 2 => ShapeType::CAPSULE, _ => unreachable!(), } } } pub enum RigidBodyType { BONE, PHYSICS, #[allow(non_camel_case_types)] PHYSICS_BONE_CORRECT, } impl RigidBodyType { fn from_u8(id: u8) -> Self { match id { 0 => RigidBodyType::BONE, 1 => RigidBodyType::PHYSICS, 2 => RigidBodyType::PHYSICS_BONE_CORRECT, _ => unreachable!(), } } } pub struct PMDRigidParam { name: String, bone_id: u16, group_id: u8, group_mask: u16, shape_type: ShapeType, body_type: RigidBodyType, shape_w: f32, shape_h: f32, shape_d: f32, position: Vector3<f32>, rotation: Vector3<f32>, weight: f32, attenuation_pos: f32, attenuation_rot: f32, recoil: f32, friction: f32, } impl PMDRigidParam { fn load(reader: &mut impl Read) -> Self { let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read rigid param name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert rigid name to String"); let bone_id = read_u16(reader); let group_id = read_u8(reader); let group_mask = read_u16(reader); let shape_type = ShapeType::from_u8(read_u8(reader)); let shape_w = read_f32(reader); let shape_h = read_f32(reader); let shape_d = read_f32(reader); let position = read_vector3(reader); let rotation = read_vector3(reader); let weight = read_f32(reader); let attenuation_pos = read_f32(reader); let attenuation_rot = read_f32(reader); let recoil = read_f32(reader); let friction = read_f32(reader); let body_type = RigidBodyType::from_u8(read_u8(reader)); Self { name, bone_id, group_id, group_mask, shape_type, shape_w, shape_h, shape_d, position, rotation, weight, attenuation_pos, attenuation_rot, recoil, friction, body_type, } } } /// PMDJointParam pub struct PMDJointParam { name: String, target_rigid_bodies: [u32; 2], position: Vector3<f32>, rotation: Vector3<f32>, constraint_pos: [Vector3<f32>; 2], constraint_rot: [Vector3<f32>; 2], spring_pos: Vector3<f32>, spring_rot: Vector3<f32>, } impl PMDJointParam { fn load(reader: &mut impl Read) -> Self { let mut buf = [0_u8; 20]; reader .read_exact(&mut buf) .expect("Failed to read joint name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert joint name to String"); let target_rigid_bodies: [u32; 2] = [read_u32(reader), read_u32(reader)]; let position = read_vector3(reader); let rotation = read_vector3(reader); let constraint_pos: [_; 2] = [read_vector3(reader), read_vector3(reader)]; let constraint_rot: [_; 2] = [read_vector3(reader), read_vector3(reader)]; let spring_pos = read_vector3(reader); let spring_rot = read_vector3(reader); Self { name, target_rigid_bodies, position, rotation, constraint_pos, constraint_rot, spring_pos, spring_rot, } } } /// PMDFile pub struct PMDFile { version: f32, name: String, comment: String, vertices: Vec<PMDVertex>, indices: Vec<u16>, materials: Vec<PMDMaterial>, bones: Vec<PMDBone>, iks: Vec<PMDIk>, faces: Vec<PMDFace>, toon_textures: Vec<String>, rigid_bodies: Vec<PMDRigidParam>, joints: Vec<PMDJointParam>, } impl PMDFile { pub fn name(&self) -> &str { &self.name } pub fn version(&self) -> f32 { self.version } pub fn comment(&self) -> &str { &self.comment } pub fn vertex_count(&self) -> usize { self.vertices.len() } pub fn index_count(&self) -> usize { self.indices.len() } pub fn material_count(&self) -> usize { self.materials.len() } pub fn bone_count(&self) -> usize { self.bones.len() } pub fn ik_count(&self) -> usize { self.iks.len() } pub fn face_count(&self) -> usize { self.faces.len() } pub fn rigid_body_count(&self) -> usize { self.rigid_bodies.len() } pub fn joint_count(&self) -> usize { self.joints.len() } pub fn vertices(&self) -> &Vec<PMDVertex> { &self.vertices } pub fn indices(&self) -> &Vec<u16> { &self.indices } pub fn material(&self, index: usize) -> &PMDMaterial { &self.materials[index] } pub fn bone(&self, index: usize) -> &PMDBone { &self.bones[index] } pub fn ik(&self, index: usize) -> &PMDIk { &self.iks[index] } pub fn face(&self, index: usize) -> &PMDFace { &self.faces[index] } pub fn face_base(&self) -> &PMDFace { for face in self.faces.iter() { match face.face_type { FaceType::BASE => return face, _ => continue, } } unreachable!() } pub fn new(reader: &mut (impl Read + Seek)) -> Self { let header = PMDHeader::load(reader); let version = header.version; let name = header.name; let comment = header.comment; let vertex_count = read_u32(reader); let mut vertices = Vec::with_capacity(vertex_count as usize); for _ in 0..vertex_count { vertices.push(PMDVertex::load(reader)); } let index_count = read_u32(reader); let polygon_count = index_count / 3; let mut indices = Vec::with_capacity(index_count as usize); for _ in 0..polygon_count { let idx0 = read_u16(reader); let idx2 = read_u16(reader); let idx1 = read_u16(reader); indices.push(idx0); indices.push(idx1); indices.push(idx2); } let material_count = read_u32(reader); let mut materials = Vec::with_capacity(material_count as usize); for _ in 0..material_count { materials.push(PMDMaterial::load(reader)); } let bone_count = read_u16(reader); let mut bones = Vec::with_capacity(bone_count as usize); for _ in 0..bone_count { bones.push(PMDBone::load(reader)); } let ik_list_count = read_u16(reader); let mut iks = Vec::with_capacity(ik_list_count as usize); for _ in 0..ik_list_count { iks.push(PMDIk::load(reader)); } let face_count = read_u16(reader); let mut faces = Vec::with_capacity(face_count as usize); for _ in 0..face_count { faces.push(PMDFace::load(reader)); } // 表情枠。Skip let face_disp_count = read_u8(reader); reader .seek(SeekFrom::Current( face_disp_count as i64 * mem::size_of::<u16>() as i64, )) .expect("Failed to seek face disp"); // ボーン枠名前。Skip let bone_disp_name_count = read_u8(reader); reader .seek(SeekFrom::Current( bone_disp_name_count as i64 * mem::size_of::<[u8; 50]>() as i64, )) .expect("Failed to seek bone disp name."); // ボーン枠。Skip let bone_disp_count = read_u32(reader); reader .seek(SeekFrom::Current( bone_disp_count as i64 * mem::size_of::<[u8; 3]>() as i64, )) .expect("Failed to seek bone disp."); // 英語名ヘッダ。Skip let eng_name_count = read_u8(reader); reader .seek(SeekFrom::Current( eng_name_count as i64 * mem::size_of::<[u8; 20 + 256]>() as i64, )) .expect("Failed to seek eng name"); // 英語名ボーン。Skip reader .seek(SeekFrom::Current( (bones.len() * mem::size_of::<[u8; 20]>()) as i64, )) .expect("Failed to seek eng bone name"); // 英語名表情リスト。Skip reader .seek(SeekFrom::Current( ((faces.len() - 1) * mem::size_of::<[u8; 20]>()) as i64, )) .expect("Failed to seek eng face"); // 英語名ボーン枠。Skip reader .seek(SeekFrom::Current( bone_disp_name_count as i64 * mem::size_of::<[u8; 50]>() as i64, )) .expect("Failed to seek eng bone disp name"); // トゥーンテクスチャリスト。 let mut toon_textures = Vec::with_capacity(10); for _ in 0..10 { let mut buf = [0_u8; 100]; reader .read_exact(&mut buf) .expect("Failed to read toon texture name from bytes."); let end = buf .iter() .position(|&u| u == 0_u8) .expect("Failed to find null char."); let name = WINDOWS_31J .decode(&buf[0..end], DecoderTrap::Ignore) .expect("Failed to convert toon texture name to String"); toon_textures.push(name); } // 物理演算・剛体。 let rigid_body_count = read_u32(reader); let mut rigid_bodies = Vec::with_capacity(rigid_body_count as usize); for _ in 0..rigid_body_count { rigid_bodies.push(PMDRigidParam::load(reader)); } // 物理演算・ジョイント。 let joint_count = read_u32(reader); let mut joints = Vec::with_capacity(joint_count as usize); for _ in 0..joint_count { joints.push(PMDJointParam::load(reader)); } Self { version, name, comment, vertices, indices, materials, bones, iks, faces, toon_textures, rigid_bodies, joints, } } }
true
376593aaef298cf255239733ac38d5d69a3f35c4
Rust
libdist-rs/libchatter-rs
/consensus/libp2p-experiment/client/error.rs
UTF-8
1,142
3.0625
3
[]
no_license
use std::io; #[derive(Debug)] pub enum Error { ConnectFailed, ConnectionClosed, BlockReadFailed(io::Error), TxWriteFailed(io::Error), } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Error::ConnectFailed => write!(f, "Connection Failed"), Error::ConnectionClosed => write!(f, "Connection Closed"), Error::BlockReadFailed(e) => write!(f, "Failed to read and parse blocks with error[{}]", e), Error::TxWriteFailed(e) => write!(f, "Failed to parse/write the transactions to the server with error [{}]", e), } } } impl std::error::Error for Error { fn cause(&self) -> Option<&dyn std::error::Error> { match self { _ => None, } } fn description(&self) -> &str { match self { Error::ConnectFailed => "Connection failed", Error::ConnectionClosed => "Connection closed", Error::TxWriteFailed(_) => "Failed to write tx", Error::BlockReadFailed(_) => "Failed to read block", } } }
true
750134de3df8f8b3c62129ceb97d632e8e6c1e74
Rust
iThinkyouknow/exercism
/bracket-push/src/lib.rs
UTF-8
445
3.296875
3
[]
no_license
pub fn brackets_are_balanced(string: &str) -> bool { let mut stack = Vec::new(); !string.chars().any(|c| match c { ']' | ')' | '}' => stack.pop() != Some(c), '[' => { stack.push(']'); false } '(' => { stack.push(')'); false } '{' => { stack.push('}'); false } _ => false, }) && stack.is_empty() }
true
50f4f0a86baa2f68a7fcd2382253f79d3e2d3504
Rust
sumeet/twitchchat
/src/runner/status.rs
UTF-8
299
2.625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
/// Status of the client after running #[derive(Debug, Copy, Clone, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Status { /// The connection timedout Timeout, /// It ran to completion Eof, /// It was cancelled Canceled, }
true
0484e090319fa00907634e8f13668528651a06bd
Rust
OAyomide/paystack-rs
/src/paystack/payment_pages.rs
UTF-8
4,510
2.859375
3
[]
no_license
use chrono::{DateTime, Local}; use reqwest::blocking::Response; use serde::Serialize; use serde_json::Value as JSON; use crate::utils::{make_get_request, make_request, REQUEST}; #[derive(Debug, Default)] /// The Payment Pages API provides a quick and secure way to collect payment for products. pub struct PaymentPages { pub(crate) bearer_auth: String, } #[derive(Debug, Serialize)] pub struct CreatePaymentPagesBody<'a> { /// Name of page pub name: &'a str, /// A description for this page pub description: Option<&'a str>, /// Amount should be in kobo if currency is NGN, pesewas, if currency is GHS, and cents, if currency is ZAR pub amount: Option<i128>, /// URL slug you would like to be associated with this page. Page will be accessible at https://paystack.com/pay/[slug] pub slug: Option<String>, /// Extra data to configure the payment page including subaccount, logo image, transaction charge pub metadata: Option<JSON>, /// If you would like Paystack to redirect someplace upon successful payment, specify the URL here. pub redirect_url: Option<&'a str>, /// If you would like to accept custom fields, specify them here. pub custom_fields: Option<Vec<JSON>>, } #[derive(Debug, Serialize)] pub struct ListPagesParams { /// Specify how many records you want to retrieve per page. If not specify we use a default value of 50. #[serde(rename = "perPage")] pub per_page: Option<i128>, /// Specify exactly what page you want to retrieve. If not specify we use a default value of 1. pub page: Option<i128>, /// A timestamp from which to start listing product e.g. 2016-09-24T00:00:05.000Z, 2016-09-21 pub from: Option<DateTime<Local>>, /// A timestamp at which to stop listing product e.g. 2016-09-24T00:00:05.000Z, 2016-09-21 pub to: Option<DateTime<Local>>, } #[derive(Debug, Serialize)] pub struct UpdatePageBody<'a> { /// Name of page pub name: &'a str, /// A description for this page pub description: Option<&'a str>, /// Default amount you want to accept using this page. If none is set, customer is free to provide any amount of their choice. The latter scenario is useful for accepting donations pub amount: Option<i128>, /// Set to false to deactivate page url pub active: Option<bool>, } #[derive(Debug, Serialize)] pub struct AddProductsBody { /// Ids of all the products pub product: Vec<i128>, } const PAYMENT_PAGES_URL: &str = "https://api.paystack.co/page"; impl PaymentPages { /// Create a payment page on your integration pub fn create_pages(&self, body: CreatePaymentPagesBody) -> Result<Response, String> { let res = make_request( &self.bearer_auth, PAYMENT_PAGES_URL, Some(body), REQUEST::POST, ); return res; } /// List payment pages available on your integration. pub fn list_pages(&self, params: Option<ListPagesParams>) -> Result<Response, String> { let res = make_get_request(&self.bearer_auth, PAYMENT_PAGES_URL, params); return res; } /// Get details of a payment page on your integration. /// id_or_slug: The page ID or slug you want to fetch. pub fn fetch_page(&self, id_or_slug: &str) -> Result<Response, String> { let url = format!("{}/{}", PAYMENT_PAGES_URL, id_or_slug); let res = make_get_request(&self.bearer_auth, &url, None::<String>); return res; } /// Update a payment page details on your integration pub fn update_page(&self, id_or_slug: &str, body: UpdatePageBody) -> Result<Response, String> { let url = format!("{}/{}", PAYMENT_PAGES_URL, id_or_slug); let res = make_request(&self.bearer_auth, &url, Some(body), REQUEST::POST); return res; } /// Check the availability of a slug for a payment page. /// slug: URL slug to be confirmed pub fn check_slug_availability(&self, slug: &str) -> Result<Response, String> { let url = format!("{}/check_slug_availability/{}", PAYMENT_PAGES_URL, slug); let res = make_get_request(&self.bearer_auth, &url, None::<String>); return res; } /// Add products to a payment page pub fn add_products(&self, id: i128, body: AddProductsBody) -> Result<Response, String> { let url = format!("{}/{}/product", PAYMENT_PAGES_URL, id); let res = make_request(&self.bearer_auth, &url, Some(body), REQUEST::POST); return res; } }
true
fece2a3f8cdd6015687e12b3678fa6a6982c9c4f
Rust
a-bakos/rust-playground
/archived-learning/rust-prog-for-beginners/tests-integration/rusty_pizza/tests/pizza.rs
UTF-8
1,115
3.390625
3
[]
no_license
use rusty_pizza::Pizza; #[test] fn can_make_pepperoni_pizza() { let pizza = Pizza::pepperoni(12); assert_eq!(pizza.topping, "pepperoni"); assert_eq!(pizza.inches, 12); } #[test] fn can_make_mozzarella_pizza() { let pizza = Pizza::mozzarella(16); assert_eq!(pizza.topping, "mozzarella"); assert_eq!(pizza.inches, 16); } // Now that our integration tests setup is done, we can execute the cargo test // command to see the results // Inspecting the output reveals that Rust places each kind of test in a // different section: unit tests, integration tests, and documentation tests. // In the integration tests section, we can see that our two tests inside the // tests/pizzas.rs file were collected and executed by the test suite. // Only library crates can be tested via integration tests because binary crates // don't expose any functionality that other crates can use. But, many Rust // crates that provide a binary have a succinct src/main.rs file that calls // logic that lives in a src/lib.rs file. Using that structure, integration // tests can test the important parts of the code.
true
7ead3f09aa6e3ace3ecfcdca0d77404f261f90c5
Rust
mnts26/aws-sdk-rust
/sdk/marketplaceentitlement/src/model.rs
UTF-8
10,280
2.890625
3
[ "Apache-2.0" ]
permissive
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// <p>An entitlement represents capacity in a product owned by the customer. For example, a /// customer might own some number of users or seats in an SaaS application or some amount of /// data capacity in a multi-tenant database.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct Entitlement { /// <p>The product code for which the given entitlement applies. Product codes are provided by /// AWS Marketplace when the product listing is created.</p> pub product_code: std::option::Option<std::string::String>, /// <p>The dimension for which the given entitlement applies. Dimensions represent categories of /// capacity in a product and are specified when the product is listed in AWS /// Marketplace.</p> pub dimension: std::option::Option<std::string::String>, /// <p>The customer identifier is a handle to each unique customer in an application. Customer /// identifiers are obtained through the ResolveCustomer operation in AWS Marketplace Metering /// Service.</p> pub customer_identifier: std::option::Option<std::string::String>, /// <p>The EntitlementValue represents the amount of capacity that the customer is entitled to /// for the product.</p> pub value: std::option::Option<crate::model::EntitlementValue>, /// <p>The expiration date represents the minimum date through which this entitlement is /// expected to remain valid. For contractual products listed on AWS Marketplace, the expiration date /// is the date at which the customer will renew or cancel their contract. Customers who are opting /// to renew their contract will still have entitlements with an expiration date.</p> pub expiration_date: std::option::Option<smithy_types::Instant>, } impl std::fmt::Debug for Entitlement { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("Entitlement"); formatter.field("product_code", &self.product_code); formatter.field("dimension", &self.dimension); formatter.field("customer_identifier", &self.customer_identifier); formatter.field("value", &self.value); formatter.field("expiration_date", &self.expiration_date); formatter.finish() } } /// See [`Entitlement`](crate::model::Entitlement) pub mod entitlement { /// A builder for [`Entitlement`](crate::model::Entitlement) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) product_code: std::option::Option<std::string::String>, pub(crate) dimension: std::option::Option<std::string::String>, pub(crate) customer_identifier: std::option::Option<std::string::String>, pub(crate) value: std::option::Option<crate::model::EntitlementValue>, pub(crate) expiration_date: std::option::Option<smithy_types::Instant>, } impl Builder { /// <p>The product code for which the given entitlement applies. Product codes are provided by /// AWS Marketplace when the product listing is created.</p> pub fn product_code(mut self, input: impl Into<std::string::String>) -> Self { self.product_code = Some(input.into()); self } pub fn set_product_code(mut self, input: std::option::Option<std::string::String>) -> Self { self.product_code = input; self } /// <p>The dimension for which the given entitlement applies. Dimensions represent categories of /// capacity in a product and are specified when the product is listed in AWS /// Marketplace.</p> pub fn dimension(mut self, input: impl Into<std::string::String>) -> Self { self.dimension = Some(input.into()); self } pub fn set_dimension(mut self, input: std::option::Option<std::string::String>) -> Self { self.dimension = input; self } /// <p>The customer identifier is a handle to each unique customer in an application. Customer /// identifiers are obtained through the ResolveCustomer operation in AWS Marketplace Metering /// Service.</p> pub fn customer_identifier(mut self, input: impl Into<std::string::String>) -> Self { self.customer_identifier = Some(input.into()); self } pub fn set_customer_identifier( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.customer_identifier = input; self } /// <p>The EntitlementValue represents the amount of capacity that the customer is entitled to /// for the product.</p> pub fn value(mut self, input: crate::model::EntitlementValue) -> Self { self.value = Some(input); self } pub fn set_value( mut self, input: std::option::Option<crate::model::EntitlementValue>, ) -> Self { self.value = input; self } /// <p>The expiration date represents the minimum date through which this entitlement is /// expected to remain valid. For contractual products listed on AWS Marketplace, the expiration date /// is the date at which the customer will renew or cancel their contract. Customers who are opting /// to renew their contract will still have entitlements with an expiration date.</p> pub fn expiration_date(mut self, input: smithy_types::Instant) -> Self { self.expiration_date = Some(input); self } pub fn set_expiration_date( mut self, input: std::option::Option<smithy_types::Instant>, ) -> Self { self.expiration_date = input; self } /// Consumes the builder and constructs a [`Entitlement`](crate::model::Entitlement) pub fn build(self) -> crate::model::Entitlement { crate::model::Entitlement { product_code: self.product_code, dimension: self.dimension, customer_identifier: self.customer_identifier, value: self.value, expiration_date: self.expiration_date, } } } } impl Entitlement { /// Creates a new builder-style object to manufacture [`Entitlement`](crate::model::Entitlement) pub fn builder() -> crate::model::entitlement::Builder { crate::model::entitlement::Builder::default() } } #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub enum EntitlementValue { /// <p>The BooleanValue field will be populated with a boolean value when the entitlement is a /// boolean type. Otherwise, the field will not be set.</p> BooleanValue(bool), /// <p>The DoubleValue field will be populated with a double value when the entitlement is a /// double type. Otherwise, the field will not be set.</p> DoubleValue(f64), /// <p>The IntegerValue field will be populated with an integer value when the entitlement is an /// integer type. Otherwise, the field will not be set.</p> IntegerValue(i32), /// <p>The StringValue field will be populated with a string value when the entitlement is a /// string type. Otherwise, the field will not be set.</p> StringValue(std::string::String), } impl EntitlementValue { pub fn as_boolean_value(&self) -> std::result::Result<&bool, &Self> { if let EntitlementValue::BooleanValue(val) = &self { Ok(&val) } else { Err(&self) } } pub fn is_boolean_value(&self) -> bool { self.as_boolean_value().is_ok() } pub fn as_double_value(&self) -> std::result::Result<&f64, &Self> { if let EntitlementValue::DoubleValue(val) = &self { Ok(&val) } else { Err(&self) } } pub fn is_double_value(&self) -> bool { self.as_double_value().is_ok() } pub fn as_integer_value(&self) -> std::result::Result<&i32, &Self> { if let EntitlementValue::IntegerValue(val) = &self { Ok(&val) } else { Err(&self) } } pub fn is_integer_value(&self) -> bool { self.as_integer_value().is_ok() } pub fn as_string_value(&self) -> std::result::Result<&std::string::String, &Self> { if let EntitlementValue::StringValue(val) = &self { Ok(&val) } else { Err(&self) } } pub fn is_string_value(&self) -> bool { self.as_string_value().is_ok() } } #[non_exhaustive] #[derive( std::clone::Clone, std::cmp::Eq, std::cmp::Ord, std::cmp::PartialEq, std::cmp::PartialOrd, std::fmt::Debug, std::hash::Hash, )] pub enum GetEntitlementFilterName { CustomerIdentifier, Dimension, /// Unknown contains new variants that have been added since this code was generated. Unknown(String), } impl std::convert::From<&str> for GetEntitlementFilterName { fn from(s: &str) -> Self { match s { "CUSTOMER_IDENTIFIER" => GetEntitlementFilterName::CustomerIdentifier, "DIMENSION" => GetEntitlementFilterName::Dimension, other => GetEntitlementFilterName::Unknown(other.to_owned()), } } } impl std::str::FromStr for GetEntitlementFilterName { type Err = std::convert::Infallible; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(GetEntitlementFilterName::from(s)) } } impl GetEntitlementFilterName { pub fn as_str(&self) -> &str { match self { GetEntitlementFilterName::CustomerIdentifier => "CUSTOMER_IDENTIFIER", GetEntitlementFilterName::Dimension => "DIMENSION", GetEntitlementFilterName::Unknown(s) => s.as_ref(), } } pub fn values() -> &'static [&'static str] { &["CUSTOMER_IDENTIFIER", "DIMENSION"] } } impl AsRef<str> for GetEntitlementFilterName { fn as_ref(&self) -> &str { self.as_str() } }
true
18104a20cc786d424fec512cf29d677d338077f2
Rust
ChrisRG/AOC2020
/day16/src/main.rs
UTF-8
2,677
3.15625
3
[]
no_license
// // Advent of Code: Day 16 // #![feature(str_split_once)] use std::time::Instant; fn main() { let now = Instant::now(); let (rules, my_ticket, near_tickets) = parse_data("input.txt"); println!("Ticket scanning error rate: {}", invalid_fields(&near_tickets, &rules)); println!("Time: {}ms", now.elapsed().as_millis()); } #[derive(Debug)] struct Rule { name: String, range1: (u32, u32), range2: (u32, u32), } // Part 1 fn invalid_fields(tickets: &Vec<Vec<u32>>, rules: &[Rule]) -> u32 { let mut invalid_fields: Vec<u32> = Vec::new(); for tick in tickets { for field in tick { if !is_valid(*field, &rules) { invalid_fields.push(*field) } } } return invalid_fields.iter().sum::<u32>(); } fn is_valid(field: u32, rules: &[Rule]) -> bool { for rule in rules { if (rule.range1.0..=rule.range1.1).contains(&field) || (rule.range2.0..=rule.range2.1).contains(&field) { return true; } } false } fn parse_data(filename: &str) -> (Vec<Rule>, Vec<u32>, Vec<Vec<u32>>) { let input = std::fs::read_to_string(filename).unwrap(); let mut rules: Vec<Rule> = Vec::new(); let mut near_tickets: Vec<Vec<u32>> = Vec::new(); let splitted: Vec<&str> = input.split("\n\n").collect(); for rule in splitted[0].lines() { let rname_split: Vec<&str> = rule.trim().split(':').collect(); let rname = rname_split[0]; let r_split = rname_split[1].split_whitespace().collect::<Vec<_>>(); let r1 = r_split[0].trim().split_once('-').unwrap(); let r2 = r_split[2].trim().split_once('-').unwrap(); rules.push(Rule { name: rname[..rname.len()-1].to_string(), range1: (r1.0.parse::<u32>().unwrap(), r1.1.parse::<u32>().unwrap()), range2: (r2.0.parse::<u32>().unwrap(), r2.1.parse::<u32>().unwrap()), }); } let my_ticket = splitted[1].lines().nth(1).unwrap().split(',') .map(|c| c.parse::<u32>().unwrap()) .collect::<Vec<u32>>(); for line in splitted[2].lines().skip(1) { let ticket = line.split(',') .map(|c| c.parse::<u32>().unwrap()) .collect::<Vec<u32>>(); near_tickets.push(ticket); } (rules, my_ticket, near_tickets) } #[cfg(test)] mod tests { use super::*; #[test] fn test_part1() { let (rules, _, near_tickets) = parse_data("input_test.txt"); assert_eq!(invalid_fields(&near_tickets, &rules), 71); } #[test] fn test_part2() { let (rules, my_ticket, near_tickets) = parse_data("input_test.txt"); // TODO } }
true
4910f5bcf6e6a0af59fc6e2a6a2ab69b2fe579ff
Rust
Telixia/leetcode-3
/Medium/1658-Minimum Operations to Reduce X to Zero/Two Pointers.rs
UTF-8
603
2.875
3
[]
no_license
impl Solution { pub fn min_operations(nums: Vec<i32>, x: i32) -> i32 { let mut l = nums.len(); let mut r = 0; let mut sum = nums.iter().sum::<i32>(); let mut ret = -1; while r < nums.len() { if sum == x && (ret == -1 || l + r < ret as usize) { ret = (l + r) as i32; } if (sum > x && l > 0) || l + r >= nums.len() { l -= 1; sum -= nums[l]; } else { r += 1; sum += nums[nums.len() - r]; } } ret } }
true
ef605e9c41853e245d2b8c41c23a782619d10b47
Rust
MattiasBuelens/advent-of-code-2020
/src/day19.rs
UTF-8
3,160
3.5
4
[ "MIT" ]
permissive
use std::collections::{HashMap, HashSet}; use std::str::FromStr; #[derive(Debug, Clone)] pub enum Rule { Single(char), Union(Vec<Vec<usize>>), } impl FromStr for Rule { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { if let Some(s) = s.strip_prefix('"') { Ok(Rule::Single(s.chars().next().unwrap())) } else { let options = s .split(" | ") .map(|option| option.split(' ').map(|id| id.parse().unwrap()).collect()) .collect(); Ok(Rule::Union(options)) } } } type Input = (HashMap<usize, Rule>, Vec<String>); #[aoc_generator(day19)] pub fn input_generator(input: &str) -> Input { let mut lines = input.lines(); let rules = lines .by_ref() .take_while(|line| !line.is_empty()) .map(|line| { let mut parts = line.split(": "); let id = parts.next().unwrap().parse().unwrap(); let rule = parts.next().unwrap().parse().unwrap(); (id, rule) }) .collect(); let messages = lines.map(|line| line.to_owned()).collect(); (rules, messages) } fn match_rule_in_state<'a>( rule: &Rule, rules: &HashMap<usize, Rule>, state: &'a str, ) -> HashSet<&'a str> { match rule { Rule::Single(c) => state.strip_prefix(*c).into_iter().collect(), Rule::Union(options) => { // Try all of the options options .iter() .flat_map(|sequence| { // Match all sub rules in sequence sequence .iter() .fold(Some(state).into_iter().collect(), |states, sub_rule| { match_rule(*sub_rule, rules, &states) }) }) .collect() } } } fn match_rule<'a>( rule_id: usize, rules: &HashMap<usize, Rule>, states: &HashSet<&'a str>, ) -> HashSet<&'a str> { if states.is_empty() { return HashSet::new(); } let rule = rules.get(&rule_id).unwrap(); // Advance through all possible states simultaneously states .into_iter() .flat_map(|&state| match_rule_in_state(rule, rules, state)) .collect() } fn match_rule_complete(rule_id: usize, rules: &HashMap<usize, Rule>, s: &str) -> bool { let final_states = match_rule(rule_id, rules, &Some(s).into_iter().collect()); // At least one match must have consumed the entire string final_states.contains(&"") } #[aoc(day19, part1)] pub fn part1(input: &Input) -> usize { let (rules, messages) = input; messages .iter() .filter(|message| match_rule_complete(0, rules, message)) .count() } #[aoc(day19, part2)] pub fn part2(input: &Input) -> usize { let (rules, messages) = input; let mut rules = rules.clone(); rules.insert(8, "42 | 42 8".parse().unwrap()); rules.insert(11, "42 31 | 42 11 31".parse().unwrap()); messages .iter() .filter(|message| match_rule_complete(0, &rules, message)) .count() }
true
3b9136241b8e7ea01b7ccc43c00bd037a3221be1
Rust
yurapyon/maru.rs
/src/state_machine.rs
UTF-8
3,433
3.578125
4
[ "MIT" ]
permissive
pub enum Transition<I, E> { Pop, Push(Box<dyn State<I, E>>), Switch(Box<dyn State<I, E>>), } pub struct StateMachine<I, E> { states: Vec<Box<dyn State<I, E>>>, transition: Option<Transition<I, E>>, } // note: // doesnt pop to empty // no outside way to push a state, eveything has to be handled by states impl<I, E> StateMachine<I, E> { pub fn new(initial_state: Box<dyn State<I, E>>) -> Self { Self { states: vec![initial_state], transition: None, } } pub fn start(&mut self, immut_data: &I) { let state = self.states.last_mut().expect("state stack underflow"); state.on_start(immut_data); } pub fn maybe_do_transition(&mut self, immut_data: &I) { if let Some(tr) = self.transition.take() { match tr { Transition::Pop => self.pop(immut_data), Transition::Push(st) => self.push(st, immut_data), Transition::Switch(st) => self.switch(st, immut_data), } } } pub fn handle_event(&mut self, immut_data: &I, ev: &E) { let state = self.states.last_mut().expect("state stack underflow"); state.handle_event(immut_data, ev); } pub fn frame(&mut self, immut_data: &I) { if let Some((last, elems)) = self.states.split_last_mut() { elems.iter_mut().for_each(| s | s.frame_hidden(immut_data)); self.transition = last.frame(immut_data); } else { panic!("state stack underflow"); } } pub fn fixed_frame(&mut self, immut_data: &I) { if let Some((last, elems)) = self.states.split_last_mut() { elems.iter_mut().for_each(| s | s.fixed_frame_hidden(immut_data)); last.fixed_frame(immut_data); } else { panic!("state stack underflow"); } } fn pop(&mut self, immut_data: &I) { if self.states.len() == 0 { panic!("state stack underflow"); } else if self.states.len() == 1 { panic!("attempting to .pop() last state on stack"); } let mut prev_state = self.states.pop().unwrap(); prev_state.on_stop(immut_data); let next_state = self.states.last_mut().unwrap(); next_state.on_resume(immut_data); } fn push(&mut self, next: Box<dyn State<I, E>>, immut_data: &I) { let state = self.states.last_mut().expect("state stack underflow"); state.on_pause(immut_data); let mut next = next; next.on_start(immut_data); self.states.push(next); } fn switch(&mut self, next: Box<dyn State<I, E>>, immut_data: &I) { let mut state = self.states.pop().expect("state stack underflow"); state.on_stop(immut_data); let mut next = next; next.on_start(immut_data); self.states.push(next); } } // pub trait State<I, E> { fn on_start(&mut self, _immut_data: &I) {} fn on_stop(&mut self, _immut_data: &I) {} fn on_pause(&mut self, _immut_data: &I) {} fn on_resume(&mut self, _immut_data: &I) {} fn handle_event(&mut self, _immut_data: &I, _ev: &E) {} fn frame(&mut self, _immut_data: &I) -> Option<Transition<I, E>> { None } fn fixed_frame(&mut self, _immut_data: &I) { } fn frame_hidden(&mut self, _immut_data: &I) {} fn fixed_frame_hidden(&mut self, _immut_data: &I) {} }
true
b7c789c21daa0a2264e265cbf2e459ec6830ce75
Rust
alvarobasia/fm-rust-CLI
/exceptions/src/lib.rs
UTF-8
718
3.453125
3
[ "MIT" ]
permissive
use std::fmt; use std::convert; use std::io; #[derive(Debug)] pub struct Error<'a>{ name : &'a str, err: Option<io::Error> } impl fmt::Display for Error<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f,"{}", self.name) } } impl convert::From<io::Error> for Error<'_> { fn from(error: io::Error) -> Self { Self {name: "error" , err : Some(error) } } } impl<'a> Error<'a> { pub fn new(name: &'a str) -> Self { Error { name, err: None } } pub fn print_error(&self){ eprint!("{}", &self) } } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
true
8e4e7b87dd8f1d61e7c1cda51cd510709745cde5
Rust
mirrexagon/lmarkov
/src/lib.rs
UTF-8
5,845
3.265625
3
[ "MIT", "Apache-2.0" ]
permissive
#[cfg(test)] mod tests; use std::collections::HashMap; #[cfg(feature = "serialization")] use std::fmt; use rand::seq::SliceRandom; #[cfg(feature = "serialization")] use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; #[cfg(feature = "serialization")] const KEY_NO_WORD: &str = "\n"; #[cfg(feature = "serialization")] const KEY_SEPARATOR: &str = " "; /// A sequence of words, used as the key in a `Chain`'s map. #[derive(Debug, Hash, Clone, PartialEq, Eq)] pub struct ChainKey(Vec<Option<String>>); impl ChainKey { pub fn blank(order: usize) -> Self { ChainKey(vec![None; order]) } pub fn from_vec(vec: Vec<Option<String>>) -> Self { ChainKey(vec) } pub fn to_vec(self) -> Vec<Option<String>> { self.0 } pub fn advance(&mut self, next_word: &Option<String>) { self.0 = self.0[1..self.0.len()].to_vec(); self.0.push(next_word.clone()); } #[cfg(feature = "serialization")] fn to_string(&self) -> String { let mut result = String::new(); let mut first = true; for word in &self.0 { if first { first = false; } else { result.push_str(KEY_SEPARATOR); } if let Some(word) = word { result.push_str(&word); } else { result.push_str(KEY_NO_WORD); } } result } /// TODO: Check input for correctness. #[cfg(feature = "serialization")] fn from_str(string: &str) -> Self { let mut result = Vec::new(); for word in string.split(KEY_SEPARATOR) { if word == KEY_NO_WORD { result.push(None); } else { result.push(Some(word.to_string())); } } ChainKey(result) } } #[cfg(feature = "serialization")] impl Serialize for ChainKey { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.to_string()) } } #[cfg(feature = "serialization")] struct ChainKeyVisitor; #[cfg(feature = "serialization")] impl<'de> Visitor<'de> for ChainKeyVisitor { type Value = ChainKey; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(ChainKey::from_str(value)) } } #[cfg(feature = "serialization")] impl<'de> Deserialize<'de> for ChainKey { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_str(ChainKeyVisitor) } } /// A Markov chain. #[derive(Clone, Debug)] #[cfg_attr(feature = "serialization", derive(Serialize, Deserialize))] pub struct Chain { /// A map from `order` words to the possible following words. map: HashMap<ChainKey, Vec<Option<String>>>, order: usize, } impl Chain { pub fn new(order: usize) -> Self { Chain { map: HashMap::new(), order, } } pub fn train(&mut self, string: &str) { // Create a Vec that starts with `self.order` `None`s, then all the // words in the string wrapped in `Some()`, then a single `None`. let mut words = vec![None; self.order]; for word in string.split_whitespace() { words.push(Some(word.to_string())); } words.push(None); // Now slide a window over `words` to produce slices where the last // element is the resulting word and the rest is the key to that word. for window in words.windows(self.order + 1) { let key = &window[..self.order]; let word = &window[self.order]; let map_entry = self .map .entry(ChainKey::from_vec(key.to_vec())) .or_insert(Vec::new()); map_entry.push(word.clone()); } } // Generate a string. pub fn generate(&self) -> Option<String> { // Start with a key of all `None` to match starting from the start of // one of the training inputs. let seed = ChainKey::blank(self.order); self.generate_from_seed(&seed) } /// Generate a string based on some seed words. /// Returns `None` if there is no way to start a generated string with /// that seed, eg. it is longer than `self.order`. pub fn generate_from_seed(&self, seed: &ChainKey) -> Option<String> { if !self.map.contains_key(seed) { return None; } let mut rng = rand::thread_rng(); let mut result: Vec<String> = Vec::new(); let mut cursor = seed.clone(); loop { let possible_words = &self.map[&cursor]; // Any entry in the map is guaranteed to have at least one word in // it, so this unwrap is okay. let next_word = possible_words.choose(&mut rng).unwrap(); if let Some(next_word) = next_word { result.push(next_word.clone()); } else { // Terminator word. break; } // Advance the cursor along by popping the front and appending the // new word on the end. cursor.advance(next_word); } Some(result.join(" ")) } /// Serialize this chain to JSON. #[cfg(feature = "serialization")] pub fn to_json(&self) -> serde_json::Result<String> { serde_json::to_string(self) } /// Load a chain from JSON. #[cfg(feature = "serialization")] pub fn from_json(json: &str) -> serde_json::Result<Self> { serde_json::from_str(json) } }
true
d6fd3712f891c1cb6d934da5b720f31534c4a396
Rust
ltfschoen/collateral
/runtime/src/debt/test.rs
UTF-8
7,736
2.5625
3
[ "Unlicense" ]
permissive
#[cfg(test)] // allows us to compile code, based on the "test" flag. use super::*; use support::{impl_outer_origin}; use runtime_io::with_externalities; use primitives::{H256, Blake2Hasher}; //called substrate_primitives as primitives use support::{assert_ok, assert_noop}; use runtime_primitives::{ BuildStorage, traits::{IdentityLookup, BlakeTwo256}, // Test wrapper for this specific type/ looks up the identity; returns Result testing::{Digest, DigestItem, Header} }; // impl outer origin impl_outer_origin! { pub enum Origin for Test {} } // For testing the module, we construct most of a mock runtime. This means // first constructing a configuration type (`Test`) which `impl`s each of the // configuration traits of modules we want to use. #[derive(Clone, Eq, PartialEq)] pub struct Test; impl system::Trait for Test { // We are just aliasing the types with the type, or some easier abstration!! type Origin = Origin; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type Digest = Digest; type AccountId = u64; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = (); type Log = DigestItem; } // code above inherits but still have to declare it in test impl balances::Trait for Test { type Balance = u64; // aliasing u64 as "balance" to mock the balance type OnFreeBalanceZero = (); type OnNewAccount = (); type Event = (); type TransactionPayment = (); type TransferPayment = (); type DustRemoval = (); } impl timestamp::Trait for Test { type Moment = u64; type OnTimestampSet = (); } impl erc721::Trait for Test{ type Event = (); // type Currency = Balance; } // this module, implements the traits. impl Trait for Test { type Event = (); type Currency = balances::Module<Test>; // any custom traits from this module? } // Alias the module names for easy usage type Debt = Module<Test>; type Balance = balances::Module<Test>; type Timestamp = timestamp::Module<Test>; type ERC = erc721::Module<Test>; // This function basically just builds a genesis storage key/value store according to // our desired mockup. fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> { let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap().0; t.extend(balances::GenesisConfig::<Test>{ balances: vec![(0, 100),(1, 100),(2, 100)], //initializes some accts with balances transaction_base_fee: 0, transaction_byte_fee: 0, transfer_fee: 0, creation_fee: 0, existential_deposit: 0, vesting: vec![], }.build_storage().unwrap().0); t.extend(timestamp::GenesisConfig::<Test>{ minimum_period: 5, }.build_storage().unwrap().0); t.into() // what does this do? } // UNIT Tests #[test] fn should_create_debt_request() { with_externalities(&mut new_test_ext(), || { // uses the Alias assert_ok!(Debt::borrow(Origin::signed(0), 0, 1, 100, 1, 1, 2)); // Timestamp hasn't incremented, so hash should stay the time assert_noop!(Debt::borrow( Origin::signed(0), 0, 1, 100, 1, 1, 2), "Error: Debt request already exists"); }); } #[test] fn should_fulfill_request() { with_externalities(&mut new_test_ext(), || { // set up ERC::create_token(Origin::signed(0)); let token_id = ERC::token_by_index(0); // uses the aliasing 5% assert_ok!(Debt::borrow(Origin::signed(0), 0, 1, 100, 500, 1, 3)); let debt_id = Debt::get_debt_id(0); // Debt isn't collateralized yet assert!(Debt::fulfill(Origin::signed(1), debt_id).is_err()); // should be able to fulfill debt assert_ok!(ERC::collateralize_token(Origin::signed(0), token_id, debt_id)); assert!(Debt::fulfill(Origin::signed(1), debt_id).is_ok()); assert_eq!(0, Balance::free_balance(&1)); assert_eq!(200, Balance::free_balance(&0)); // 3rd person cannot fulfill debt... bc creditor exists now. assert!(Debt::fulfill(Origin::signed(1), debt_id).is_err()); }); } #[test] fn can_repay() { with_externalities(&mut new_test_ext(), || { // SETUP... is there a way to refactor this ERC::create_token(Origin::signed(1)); let token_id = ERC::token_by_index(0); Debt::borrow(Origin::signed(1), 1, 1, 100, 5, 1, 3); let debt_id = Debt::get_debt_id(0); ERC::collateralize_token(Origin::signed(1), token_id, debt_id); Debt::fulfill(Origin::signed(2), debt_id).is_ok(); // repay should clear debt, return collateral assert_ok!(Debt::repay(Origin::signed(1), debt_id, 50)); assert_eq!(50, Balance::free_balance(&2)); }); } #[test] fn can_repay_interest_first() { with_externalities(&mut new_test_ext(), || { ERC::create_token(Origin::signed(1)); let token_id = ERC::token_by_index(0); Debt::borrow(Origin::signed(1), 1, 1, 100, 1000, 1, 3); //100 loan, 10%, 1 period let debt_id = Debt::get_debt_id(0); ERC::collateralize_token(Origin::signed(1), token_id, debt_id); Debt::fulfill(Origin::signed(2), debt_id).is_ok(); assert_ok!(Debt::repay(Origin::signed(1), debt_id, 0)); assert_eq!(Debt::get_debt(debt_id).principal, 100); assert_ok!(Debt::repay(Origin::signed(1), debt_id, 10)); // repaying before interest accrual assert_eq!(Debt::get_debt(debt_id).principal, 90); assert_eq!(Debt::get_debt(debt_id).interest, 0); Timestamp::set_timestamp(2); assert_ok!(Debt::repay(Origin::signed(1), debt_id, 10)); //paying partial assert_eq!(Debt::get_debt(debt_id).principal, 90); assert_eq!(Debt::get_debt(debt_id).interest, 8); // uses remainder to pay principal assert_ok!(Debt::repay(Origin::signed(1), debt_id, 19)); //paying partial assert_eq!(Debt::get_debt(debt_id).principal, 79); assert_eq!(Debt::get_debt(debt_id).interest, 0); }); } #[test] fn can_seize() { with_externalities(&mut new_test_ext(), || { ERC::create_token(Origin::signed(1)); let token_id = ERC::token_by_index(0); Debt::borrow(Origin::signed(1), 1, 1, 100, 500, 1, 3); //term length is 3, int period is 1 let debt_id = Debt::get_debt_id(0); ERC::collateralize_token(Origin::signed(1), token_id, debt_id); Debt::fulfill(Origin::signed(2), debt_id).is_ok(); // term start is 0 // should accurately increment time and update debts assert!(Debt::seize(Origin::signed(2), debt_id).is_err()); //should fail Timestamp::set_timestamp(6); assert!(Debt::seize(Origin::signed(2), debt_id).is_ok()); //should work }); } #[test] fn can_simple_interest() { with_externalities(&mut new_test_ext(), || { ERC::create_token(Origin::signed(1)); let token_id = ERC::token_by_index(0); // 10% interest per period // 10: interest period, every 10 seconds interest is compounded // 500 seconds before collat is seized Debt::borrow(Origin::signed(1), 1, 1, 100, 1000, 10, 500); let debt_id = Debt::get_debt_id(0); ERC::collateralize_token(Origin::signed(1), token_id, debt_id); Debt::fulfill(Origin::signed(2), debt_id).is_ok(); // term start is 0 // balance should be 100, interest should be 0 Timestamp::set_timestamp(6); assert!(Debt::update_balance(debt_id).is_ok()); assert_eq!(Debt::get_debt(debt_id).principal, 100); assert_eq!(Debt::get_debt(debt_id).interest, 0); // balance should be 110, interest should be 10 Timestamp::set_timestamp(10); assert!(Debt::update_balance(debt_id).is_ok()); assert_eq!(Debt::get_debt(debt_id).interest, 10); // balance should be 121, interest should be 21 Timestamp::set_timestamp(21); assert!(Debt::update_balance(debt_id).is_ok()); assert_eq!(Debt::get_debt(debt_id).interest, 20); Timestamp::set_timestamp(59); assert!(Debt::update_balance(debt_id).is_ok()); assert_eq!(Debt::get_debt(debt_id).interest, 50); }); }
true
b8cd2bef982f12555b17425a13aea9e22906776e
Rust
FrostyHitoshura/woxar
/src/main.rs
UTF-8
34,929
2.515625
3
[ "MIT" ]
permissive
//! Might and Magic: World of Xeen Archiver. Implemented based on documentation available at: //! //! https://xeen.fandom.com/wiki/CC_File_Format use anyhow::{anyhow, bail, ensure, Context}; use bitvec::{prelude::*, vec::BitVec}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use clap::{ App, Arg, ArgMatches, Error as ClapError, ErrorKind::{HelpDisplayed, VersionDisplayed}, SubCommand, }; use smallvec::SmallVec; use std::{ collections::{btree_map::Entry::Vacant, BTreeMap}, convert::TryFrom, env, ffi::OsStr, fmt::Display, fs::File, io::{ stdin, stderr, stdout, Cursor, Error as IoError, Read, Seek, SeekFrom, Write, }, path::Path, process::exit, str, u16, }; use thiserror::Error; use vfs::{PhysicalFS, VfsPath}; use woxar::name::{WoxName, WoxHashedName, WoxReverseDictionary, ReadWoxReverseDictionary, WoxNameError}; const VERSION: &str = env!("CARGO_PKG_VERSION"); const AUTHOR: &str = env!("CARGO_PKG_AUTHORS"); #[derive(Debug, Error)] enum WoxError { #[error("Invalid file name")] InvalidFileName(#[from] WoxNameError), #[error("Found invalid data in the table of contents")] ReadToc, #[error("Can't encode file '{0}' as hash {1:#06x} is already in use")] GenerateToc(String, WoxHashedName), #[error("Hit a race condition")] Race, #[error("The former has {0} file(s) while the latter has {1} file(s)")] DifferentFileCount(usize, usize), #[error("The table of contents differs")] TocDiffers, #[error("One or more file content differs")] ContentDiffers, #[error("Failed to find file hash {0} in archive")] NoHash(WoxHashedName), #[error("Requires 2 or more archives to compare")] Requires2PlusFiles, #[error("No subcommand provided")] NoSubcommand, #[error("Archives '{0}' and '{1}' differ")] ArchivesDiffer(String, String), } const ROTATE_ADD_INITIAL: u8 = 0xac; // None for Crypt: Used for the initial 2 bytes of the file #[derive(Clone)] enum Crypt { RotateAdd(u8), // Used for the table of contents Xor, // Used for the file contents } type FileCount = u16; type FileSize = u16; struct Contents { data: Vec<u8>, list: WoxReverseDictionary, } #[derive(PartialEq, Debug)] enum Direction { Read, Write, } struct Decrypt<S> { cursor: S, crypt: Option<Crypt>, } struct Encrypt<S> { sink: S, crypt: Option<Crypt>, } #[derive(Copy, Clone)] struct TocEntry { id: WoxHashedName, offset: u32, // XXX: Actually an u24... len: FileSize, // In the file... padding: u8 which is expected to be always 0 } struct TocIter<'a> { cursor: Decrypt<Cursor<&'a [u8]>>, idx: usize, total: usize, // A bit array where we keep track of all seen hashes. verify: BitVec, } struct PayloadIter<'a> { toc: TocIter<'a>, contents_crypt: Option<Crypt>, contents: &'a Contents, } struct PayloadBufferedIter<'a> { tocs: Vec<TocEntry>, idx: usize, contents: &'a Contents, contents_crypt: Option<Crypt>, } impl Contents { fn new(data: Vec<u8>, list: WoxReverseDictionary) -> Contents { Contents { data, list } } fn find_entry(&self, hash: WoxHashedName) -> Option<&WoxName> { self.list.inner().get(&hash) } fn entry_name(&self, entry: &TocEntry) -> String { match self.find_entry(entry.id) { Some(entry) => entry.inner().to_owned(), None => format!("{}", entry.id), } } fn read_cursor_at( &self, offset: u64, crypt: Option<Crypt>, ) -> Result<Decrypt<Cursor<&[u8]>>, IoError> { let mut decrypt = Decrypt::new(Cursor::new(self.data.as_slice()), crypt); decrypt.seek(SeekFrom::Start(offset))?; Ok(decrypt) } fn read_cursor(&self) -> Decrypt<Cursor<&[u8]>> { Decrypt::new(Cursor::new(&self.data), None) } fn file_count(&self) -> Result<FileCount, IoError> { self.read_cursor().read_u16::<LittleEndian>() } fn toc_iter(&self) -> Result<TocIter, IoError> { let mut cursor = self.read_cursor(); // XXX: Remplace with self.file_count() let total = cursor.read_u16::<LittleEndian>()? as usize; cursor.crypt = Some(Crypt::RotateAdd(ROTATE_ADD_INITIAL)); Ok(TocIter { cursor, idx: 0, total, verify: bitvec![0; WoxHashedName::MAX as usize], }) } fn payload_iter(&self, contents_crypt: Option<Crypt>) -> Result<PayloadIter, anyhow::Error> { Ok(PayloadIter { toc: self.toc_iter()?, contents_crypt, contents: self, }) } fn payload_filtered_ordered_iter( &self, hashes: &[WoxHashedName], contents_crypt: Option<Crypt>, ) -> Result<PayloadBufferedIter, anyhow::Error> { // Read the whole toc and save the entries we are interested for // XXX: This is suboptimal: if we found all the entries, we can stop reading the toc // XXX: We clone TocEntry 2 times in this function, looks excessive let mut acc: Vec<Option<TocEntry>> = vec![None; hashes.len()]; for entry_result in self.toc_iter()? { let entry = entry_result?; hashes.iter().enumerate().for_each(|(idx, hash)| { if entry.id == *hash { acc[idx] = Some(entry); } }); } let results = acc .iter() .enumerate() .map(|(idx, optional_entry)| { if let Some(entry) = optional_entry { Ok(*entry) } else { // XXX: Improve error message Err(WoxError::NoHash(hashes[idx])) } }) .collect::<Result<Vec<TocEntry>, WoxError>>()?; Ok(PayloadBufferedIter { tocs: results, idx: 0, contents: self, contents_crypt, }) } fn fetch_payload( &self, entry: &TocEntry, crypt: Option<Crypt>, ) -> Result<Vec<u8>, anyhow::Error> { let mut payload = vec![0; entry.len as usize]; self.read_cursor_at(entry.offset as u64, crypt)? .read_exact(&mut payload)?; Ok(payload) } } impl Crypt { fn crypt_byte(&mut self, direction: Direction, byte: u8) -> u8 { const ROTATE: u32 = 6; const ADD: u8 = 0x67; match (direction, self) { (Direction::Read, Crypt::RotateAdd(ref mut state)) => { let decrypted = u8::wrapping_add(byte.rotate_right(ROTATE), *state); *state = u8::wrapping_add(*state, ADD); decrypted } (Direction::Write, Crypt::RotateAdd(ref mut state)) => { let crypted = u8::wrapping_sub(byte, *state).rotate_left(ROTATE); *state = u8::wrapping_add(*state, ADD); crypted } (_, Crypt::Xor) => byte ^ 0x35, } } } fn crypt(optional_crypt: &mut Option<Crypt>, direction: Direction, byte: u8) -> u8 { optional_crypt .as_mut() .map_or(byte, |crypt| crypt.crypt_byte(direction, byte)) } impl<S> Decrypt<S> { fn new(cursor: S, crypt: Option<Crypt>) -> Self { Self { cursor, crypt } } } impl<S> Read for Decrypt<S> where S: Read, { fn read(&mut self, buf: &mut [u8]) -> Result<usize, IoError> { let bytes_read = self.cursor.read(buf)?; buf[..bytes_read] .iter_mut() .for_each(|byte| *byte = crypt(&mut self.crypt, Direction::Read, *byte)); Ok(bytes_read) } } impl<S> Seek for Decrypt<S> where S: Seek, { fn seek(&mut self, pos: SeekFrom) -> Result<u64, IoError> { self.cursor.seek(pos) } } impl<S> Encrypt<S> { fn new(sink: S, crypt: Option<Crypt>) -> Self { Self { sink, crypt } } } impl<S> Write for Encrypt<S> where S: Write, { fn write(&mut self, buf: &[u8]) -> Result<usize, IoError> { let crypted = buf .iter() .map(|byte| crypt(&mut self.crypt, Direction::Write, *byte)) .collect::<Vec<u8>>(); self.sink.write_all(&crypted)?; Ok(buf.len()) } fn flush(&mut self) -> Result<(), IoError> { self.sink.flush() } } impl TocEntry { fn new<S>(source: &mut S) -> Result<TocEntry, anyhow::Error> where S: Read, { let entry = TocEntry { id: WoxHashedName::from(source.read_u16::<LittleEndian>()?), offset: source.read_u24::<LittleEndian>()?, len: source.read_u16::<LittleEndian>()?, }; // Ensure that the padding byte is set to 0 if source.read_u8()? != 0 { bail!(WoxError::ReadToc) } else { Ok(entry) } } fn write<S>(&self, sink: &mut S) -> Result<(), IoError> where S: Write, { sink.write_u16::<LittleEndian>(self.id.raw())?; sink.write_u24::<LittleEndian>(self.offset)?; sink.write_u16::<LittleEndian>(self.len)?; sink.write_u8(0) } } impl<'a> Iterator for TocIter<'a> { type Item = Result<TocEntry, anyhow::Error>; fn next(&mut self) -> Option<Self::Item> { self.idx += 1; if self.idx <= self.total { Some(TocEntry::new(&mut self.cursor).and_then(|entry| { let bit = entry.id.raw() as usize; if self.verify[bit] { Err(anyhow!(WoxError::ReadToc)) } else { self.verify.set(bit, true); Ok(entry) } })) } else { None } } } impl<'a> Iterator for PayloadIter<'a> { type Item = Result<(TocEntry, Vec<u8>), anyhow::Error>; fn next(&mut self) -> Option<Self::Item> { self.toc.next().map(|entry_result| { entry_result.and_then(|entry| { self.contents .fetch_payload(&entry, self.contents_crypt.clone()) .map(|decrypted| (entry, decrypted)) }) }) } } impl<'a> Iterator for PayloadBufferedIter<'a> { type Item = Result<(TocEntry, Vec<u8>), anyhow::Error>; fn next(&mut self) -> Option<Self::Item> { if self.idx < self.tocs.len() { let toc = self.tocs[self.idx]; self.idx += 1; Some( self.contents .fetch_payload(&toc, self.contents_crypt.clone()) .map(|decrypted| (toc, decrypted)), ) } else { None } } } fn extract_cc_file<A, S, L>( stdout: &mut S, archive_stream: &mut A, list_stream: L, root_directory: &VfsPath, hashes_to_extract: &[WoxHashedName], contents_crypt: Option<Crypt>, ) -> Result<(), anyhow::Error> where A: Read, S: Write, L: Read, { let mut data = Vec::<u8>::new(); archive_stream.read_to_end(&mut data)?; let contents = Contents::new(data, WoxReverseDictionary::try_from(ReadWoxReverseDictionary(list_stream))?); root_directory.create_dir_all()?; if !hashes_to_extract.is_empty() { // Extract specific files arm: writing order is important to respect the order the user set // by the user on the command line contents .payload_filtered_ordered_iter(hashes_to_extract, contents_crypt)? .try_for_each( |payload_result: Result<(TocEntry, Vec<u8>), anyhow::Error>| -> Result<(), anyhow::Error> { let (_entry, contents) = payload_result?; stdout.write_all(&contents)?; Ok(()) }, ) } else { // Extract all files arm: writing order isn't important contents.payload_iter(contents_crypt)?.try_for_each( |payload_result: Result<(TocEntry, Vec<u8>), anyhow::Error>| -> Result<(), anyhow::Error> { let payload = payload_result?; let mut file = root_directory.join(&contents.entry_name(&payload.0))?.create_file()?; file.write_all(&payload.1)?; Ok(()) }, ) } } struct FilePayload { entry: TocEntry, payload: Vec<u8>, } fn create_cc_file<W: Write>( archive_writer: W, root_directory: &VfsPath, contents_crypt: Option<Crypt>, ) -> Result<(), anyhow::Error> { const TOC_START: usize = 2; const TOC_EACH_SIZE: usize = 8; // sizeof(TocEntry) + 1 let mut cache: BTreeMap<WoxHashedName, FilePayload> = BTreeMap::new(); // Starts with a u16 about the number of files present in the archive let mut archive_size = TOC_START; root_directory .read_dir()? .try_for_each(|path| -> Result<(), anyhow::Error> { let mut file_reader = path.open_file()?; let mut payload = vec![]; file_reader.read_to_end(&mut payload)?; // The hashing algorithm used only considers the file name. Even if the archive doesn't // support directories, all VfsPath are prefixed by a '/' that we need to remove. // Consider anything after the last slash to be the file name. let full_path_str = path.as_str(); let path_str = &full_path_str[full_path_str.rfind('/').unwrap() + 1..]; let toc = TocEntry { // If "extract_cc_file" doesn't know the file name, it will output the hash in // decimal as file name. So first try to parse the file name as a u16 and it // works, then assume it's the hash. Otherwise, it's a real file name and // compute the hash from it. id: path_str .parse::<u16>() .map(WoxHashedName::from) .unwrap_or_else(|_| WoxHashedName::from(path_str.as_bytes())), offset: 0, // Will be filled later len: FileSize::try_from(payload.len())?, }; // Make sure the file we add doesn't clash hash-wise with a file we already cached if let Vacant(slot) = cache.entry(toc.id) { let file_payload = FilePayload { entry: toc, payload, }; // Make sure that the size we read from the directory entry matches with what we // read from the actual file... if file_payload.entry.len as usize == file_payload.payload.len() { archive_size += TOC_EACH_SIZE + file_payload.payload.len(); slot.insert(file_payload); Ok(()) } else { // Race condition between readdir and when we actually read the file? Err(anyhow!(WoxError::Race)) } } else { Err(anyhow!(WoxError::GenerateToc( path.as_str().to_string(), toc.id ))) } })?; let archive_files = u16::try_from(cache.len())?; let mut encrypt = Encrypt::new(archive_writer, None); let mut payload_offset = TOC_START + TOC_EACH_SIZE * archive_files as usize; // Step 1: Write the number of files in this archive encrypt.write_u16::<LittleEndian>(archive_files)?; // Step 2: Get ready and write the table of contents encrypt.crypt = Some(Crypt::RotateAdd(ROTATE_ADD_INITIAL)); cache .values_mut() .try_for_each(|file_payload| -> Result<(), anyhow::Error> { // Modify the value in the hash since we will use this information in step 3 // XXX: Incorrect, offset is actually an u24... file_payload.entry.offset = u32::try_from(payload_offset)?; payload_offset += file_payload.payload.len(); Ok(file_payload.entry.write(&mut encrypt)?) })?; // Step 3: Get ready and write all the contents of the archive encrypt.crypt = contents_crypt; cache .values() .try_for_each(|file_payload| encrypt.write_all(&file_payload.payload))?; // Step 4: Actually write the file on disk Ok(encrypt.flush()?) } fn full_read<R>(reader: &mut R) -> Result<Vec<u8>, IoError> where R: Read, { let mut buf = vec![]; reader.read_to_end(&mut buf)?; Ok(buf) } fn compare_cc_files<A, B>(a: &mut A, b: &mut B) -> Result<(), anyhow::Error> where A: Read, B: Read, { type Toc = BTreeMap<WoxHashedName, TocEntry>; // We don't care about the crypto used for the file contents, use the least expensive one let contents_crypt = None; // Step 1: Load archives data from disk let contents = [ Contents::new(full_read(a)?, WoxReverseDictionary::default()), Contents::new(full_read(b)?, WoxReverseDictionary::default()), ]; // Step 2: If there's a difference in file count, then the archive are different let file_counts = contents .iter() .map(|content| content.file_count()) .collect::<Result<SmallVec<[FileSize; 2]>, IoError>>()?; ensure!( file_counts[0] == file_counts[1], WoxError::DifferentFileCount(file_counts[0].into(), file_counts[1].into()), ); let tocs = contents .iter() .map(|content| { content .toc_iter()? .map(|toc_result| toc_result.map(|toc| (toc.id, toc))) .collect::<Result<Toc, anyhow::Error>>() }) .collect::<Result<SmallVec<[Toc; 2]>, anyhow::Error>>()?; // Step 3: If the TOC is different, then the archives are different. ensure!( tocs[0] .values() .zip(tocs[1].values()) .all(|(a_entry, b_entry)| a_entry.id == b_entry.id && a_entry.len == b_entry.len), WoxError::TocDiffers ); // Step 4: Last and more expensive check: make sure that the file contents is the same for (a_entry, b_entry) in tocs[0].values().zip(tocs[1].values()) { let a_payload = contents[0].fetch_payload(a_entry, contents_crypt.clone())?; let b_payload = contents[1].fetch_payload(b_entry, contents_crypt.clone())?; ensure!(a_payload == b_payload, WoxError::ContentDiffers); } Ok(()) } fn open_file_or_stdin(path: &OsStr) -> Result<Box<dyn Read>, anyhow::Error> { if path == "-" { Ok(Box::new(stdin())) } else { Ok(Box::new(File::open(path)?)) } } fn create_file_or_stdout(path: &OsStr) -> Result<Box<dyn Write>, anyhow::Error> { if path == "-" { Ok(Box::new(stdout())) } else { Ok(Box::new(File::create(path)?)) } } trait Job<S> where S: Write, { fn name(&self) -> &'static str; fn subcommand(&self) -> App; fn execute(&self, args: &ArgMatches, stdout: &mut S) -> Result<(), anyhow::Error>; } fn new_subcommand<'a>(name: &'a str, about: &'a str) -> App<'a, 'a> { SubCommand::with_name(name) .about(about) .version(VERSION) .author(AUTHOR) } struct Extract {} impl Extract { fn execute_with_dictionary<S, D>( &self, matches: &ArgMatches, stdout: &mut S, dictionary: D, ) -> Result<(), anyhow::Error> where S: Write, D: Read, { let optional_hashes = matches.values_of("file").map(|files_iter| { files_iter .map(|file| { // If it's a number, it's already a hash and use it as is file.parse::<WoxHashedName>() .unwrap_or_else(|_| WoxHashedName::from(file.as_bytes())) }) .collect::<Vec<_>>() }); extract_cc_file( stdout, &mut open_file_or_stdin(matches.value_of_os("archive").unwrap())?, dictionary, &VfsPath::new(PhysicalFS::new( Path::new( matches .value_of_os("root") .unwrap_or_else(|| OsStr::new(".")), ) .to_path_buf(), )), if let Some(ref hashes) = optional_hashes { hashes } else { &[] }, if matches.is_present("disable-contents-crypt") { None } else { Some(Crypt::Xor) }, ) } } impl<S> Job<S> for Extract where S: Write, { fn name(&self) -> &'static str { "extract" } fn subcommand(&self) -> App { new_subcommand(<Self as Job<S>>::name(self), "Extract an archive to a new directory") .arg( Arg::with_name("archive") .long("archive") .short("a") .required(true) .value_name("FILE") .help("Archive file to extract, use '-' for stdin"), ) .arg( Arg::with_name("dictionary") .long("dictionary") .value_name("FILE") .help("Archived files dictionary file"), ) .arg( Arg::with_name("root") .long("root") .short("C") .value_name("DIRECTORY") .help("Directory to extract to, if not provided, extract to current directory"), ) .arg( Arg::with_name("file") .long("file") .short("f") .multiple(true) .value_name("ARCHIVED_FILE") .help("File name from the archive to extract, written to stdout. If a number is provided, it's assumed to be a file hash."), ) .arg( Arg::with_name("disable-contents-crypt") .long("disable-contents-crypt") .required(false) .takes_value(false) .help("Don't decrypt the file contents"), ) } fn execute(&self, matches: &ArgMatches, stdout: &mut S) -> Result<(), anyhow::Error> { if let Some(dict) = matches.value_of_os("dictionary") { self.execute_with_dictionary(matches, stdout, &mut File::open(dict)?) } else { self.execute_with_dictionary(matches, stdout, &mut Cursor::new(&[])) } } } struct Create {} impl<S> Job<S> for Create where S: Write, { fn name(&self) -> &'static str { "create" } fn subcommand(&self) -> App { new_subcommand( <Self as Job<S>>::name(self), "Create an archive from an existing directory", ) .arg( Arg::with_name("archive") .long("archive") .short("a") .required(true) .value_name("FILE") .help("Archive file to create, use '-' for stdout"), ) .arg( Arg::with_name("root") .long("root") .short("C") .required(true) .value_name("DIRECTORY") .help("Directory containing the files to archive"), ) .arg( Arg::with_name("disable-contents-crypt") .long("disable-contents-crypt") .required(false) .takes_value(false) .help("Don't decrypt the file contents"), ) } fn execute(&self, matches: &ArgMatches, _stdout: &mut S) -> Result<(), anyhow::Error> { create_cc_file( create_file_or_stdout(matches.value_of_os("archive").unwrap())?, &VfsPath::new(PhysicalFS::new( Path::new(matches.value_of_os("root").unwrap()).to_path_buf(), )), if matches.is_present("disable-contents-crypt") { None } else { Some(Crypt::Xor) }, ) } } struct Compare {} impl<S> Job<S> for Compare where S: Write, { fn name(&self) -> &'static str { "compare" } fn subcommand(&self) -> App { new_subcommand(<Self as Job<S>>::name(self), "Compare two or more archives") .arg(Arg::with_name("archives").multiple(true)) } fn execute(&self, matches: &ArgMatches, _stdout: &mut S) -> Result<(), anyhow::Error> { let mut iter = matches.values_of("archives").unwrap(); let reference_path = iter.next().ok_or(WoxError::Requires2PlusFiles)?; let mut reference = File::open(Path::new(reference_path))?; let mut did_compare = false; for comparee_path in iter { // // When we compare more than 2 files, rewind the first file (we call it "reference" // here) and compare it with the third (or fourth, etc...) file. It's not the best // way to do this because: // // 1) It implies that "reference" implements Seek. // 2) We are going to extract "reference" more than once. // if did_compare { reference.seek(SeekFrom::Start(0))?; } compare_cc_files(&mut reference, &mut File::open(Path::new(comparee_path))?) .with_context(|| { WoxError::ArchivesDiffer(reference_path.into(), comparee_path.into()) })?; did_compare = true; } ensure!(did_compare, WoxError::Requires2PlusFiles); Ok(()) } } struct Hash {} impl<S> Job<S> for Hash where S: Write, { fn name(&self) -> &'static str { "hash" } fn subcommand(&self) -> App { new_subcommand( <Self as Job<S>>::name(self), "Compute the hash of a file name and output it on stdout", ) .arg(Arg::with_name("name").required(true)) } fn execute(&self, matches: &ArgMatches, stdout: &mut S) -> Result<(), anyhow::Error> { Ok(writeln!( stdout, "{}", WoxHashedName::from(matches.value_of("name").unwrap().as_bytes()) )?) } } fn build_known_jobs<S>() -> [Box<dyn Job<S>>; 4] where S: Write, { [ Box::new(Extract {}), Box::new(Create {}), Box::new(Compare {}), Box::new(Hash {}), ] } fn exec_cmdline<A, S>(args: &[A], stdout: &mut S) -> Result<(), anyhow::Error> where A: AsRef<str> + AsRef<OsStr>, S: Write, String: From<A>, { let jobs = build_known_jobs::<S>(); let mut app = App::new("woxar") .version(VERSION) .author(AUTHOR) .about("Might and Magic: World of Xeen Archiver"); for job in jobs.iter() { app = app.subcommand(job.subcommand()); } let matches = app.get_matches_from_safe(&*args)?; if let Some((found, submatches)) = jobs.iter().find_map(|job| { matches .subcommand_matches(job.name()) .map(|submatches| (job, submatches)) }) { found.execute(submatches, stdout)?; stdout.flush()?; Ok(()) } else { bail!(WoxError::NoSubcommand) } } fn exec_cmdline_manage_errors<'a, A, S, E>(args: &[A], stdout: &mut S, stderr: &mut E) -> bool where A: AsRef<str> + Display + From<&'a str> + AsRef<OsStr>, S: Write, E: Write, String: From<A>, { if let Err(err) = exec_cmdline(args, stdout) { match err.downcast_ref::<ClapError>() { Some(ClapError { kind, message, .. }) if matches!(*kind, HelpDisplayed | VersionDisplayed) => { writeln!(stdout, "{}", message).unwrap(); true } _ => { writeln!( stderr, "{}: ERROR: {}", args.get(0).unwrap_or(&"<unknown>".into()), err ) .unwrap(); false } } } else { true } } fn main() { let args: Vec<String> = env::args().collect(); let mut stdout = stdout(); let mut stderr = stderr(); exit( if exec_cmdline_manage_errors(&args, &mut stdout, &mut stderr) { 0 } else { 1 }, ) } #[cfg(test)] mod tests { use super::*; use std::io::empty; use vfs::MemoryFS; #[test] fn rotate_add_crypt() { type Buf = [u8; 8]; const PLAINTEXT: Buf = [0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]; const EXPECTED_CRYPT: Buf = [176, 159, 143, 126, 110, 93, 77, 60]; let mut state = Some(Crypt::RotateAdd(ROTATE_ADD_INITIAL)); let crypted = PLAINTEXT .iter() .map(|byte| crypt(&mut state, Direction::Read, *byte)) .collect::<SmallVec<Buf>>(); assert_eq!(&crypted[..], EXPECTED_CRYPT); let mut state = Some(Crypt::RotateAdd(ROTATE_ADD_INITIAL)); let decrypted = crypted .iter() .map(|byte| crypt(&mut state, Direction::Write, *byte)) .collect::<SmallVec<Buf>>(); assert_eq!(PLAINTEXT, &decrypted[..]); } fn cmdline_expect(subcmd: Option<&str>, arg: &str, on_stdout: bool) { let mut stdout = Vec::<u8>::new(); let mut stderr = Vec::<u8>::new(); let mut cmdline = SmallVec::<[&str; 3]>::new(); cmdline.push("unit-test"); if let Some(subcmd_str) = subcmd { cmdline.push(subcmd_str); } cmdline.push(arg); assert!(exec_cmdline_manage_errors( &cmdline, &mut stdout, &mut stderr )); if on_stdout { assert!(!stdout.is_empty()); } assert!(stderr.is_empty()); } #[test] fn cmdline_help_n_version() { let jobs = build_known_jobs::<Vec<u8>>(); // Note: clap behavior is different for --help and --version. The former will write the // message in the generated Error while the latter will print directly to stdout, bypassing // our output capture strategy. No idea if this behavior is intended by clap or not... but // it's annoying for unit testing purposes. for arg in [ ("--help", true), ("-h", true), ("--version", false), ("-V", false), ] .iter() { cmdline_expect(None, arg.0, arg.1); for job in &jobs { cmdline_expect(Some(job.name()), arg.0, arg.1); } } } #[test] fn compare_archives() { assert!(compare_cc_files(&mut empty(), &mut empty()).is_err()); const ARCHIVE_WITH_NO_FILES: [u8; 2] = [0, 0]; compare_cc_files( &mut Cursor::new(&ARCHIVE_WITH_NO_FILES), &mut Cursor::new(&ARCHIVE_WITH_NO_FILES), ) .unwrap(); } fn fs_write(root: &VfsPath, name: &str, contents: &[u8]) { let path = root.join(name).unwrap(); let mut file = path.create_file().unwrap(); file.write_all(contents).unwrap(); } fn build_memory_fs_with_b_name(b_name: &str) -> VfsPath { let root = VfsPath::new(MemoryFS::new()); fs_write(&root, "A.TXT", b"A"); fs_write(&root, b_name, b"BB"); fs_write(&root, "C.TXT", b"CCC"); root } fn build_memory_fs() -> VfsPath { build_memory_fs_with_b_name("B.TXT") } // List of files in the file system created by build_memory_fs(). fn archive_dictionary() -> Cursor<&'static [u8]> { Cursor::new(b"A.TXT\nB.TXT\nC.TXT\n") } // An archived version of the file system created by build_memory_fs(). const ARCHIVE_CONTENTS: &[u8] = &[ 3, 0, 130, 132, 40, 199, 46, 148, 186, 224, 184, 182, 90, 249, 32, 198, 172, 210, 174, 168, 204, 235, 18, 57, 158, 196, 116, 119, 119, 118, 118, 118 ]; fn fs_as_tree(root: &VfsPath) -> BTreeMap<String, Vec<u8>> { root.read_dir().unwrap().map(|path| { let mut reader = path.open_file().unwrap(); let mut contents = vec![]; reader.read_to_end(&mut contents).unwrap(); (path.as_str().into(), contents) }).collect() } fn compare_fs(a: &VfsPath, b: &VfsPath) -> bool { fs_as_tree(a) == fs_as_tree(b) } #[test] fn create_archive() { let root = build_memory_fs(); let mut archive = vec![]; create_cc_file(&mut archive, &root, Some(Crypt::Xor)).unwrap(); assert_eq!( ARCHIVE_CONTENTS, archive.as_slice() ); } #[test] fn extract_whole_archive() { let root = VfsPath::new(MemoryFS::new()); let mut stdout = vec![]; extract_cc_file(&mut stdout, &mut Cursor::new(ARCHIVE_CONTENTS), archive_dictionary(), &root, &[], Some(Crypt::Xor)).unwrap(); // When no hashes are provided, the contents of the archive will be written to the provided // file system. assert!(stdout.is_empty()); assert!(compare_fs(&root, &build_memory_fs())); } #[test] fn extract_with_partial_dictionary() { let root = VfsPath::new(MemoryFS::new()); let partial_dictionary = Cursor::new(b"A.TXT\nC.TXT\n"); let b_hashed_name = format!("{}", WoxHashedName::from("B.TXT".as_bytes()).raw()); let mut stdout = vec![]; extract_cc_file(&mut stdout, &mut Cursor::new(ARCHIVE_CONTENTS), partial_dictionary, &root, &[], Some(Crypt::Xor)).unwrap(); // In this test, we don't have "B.TXT" in the dictionary. As a result, that file will have // the hashed value as name. assert!(stdout.is_empty()); assert!(compare_fs(&root, &build_memory_fs_with_b_name(&b_hashed_name))); } #[test] fn extract_selected_hash_archive() { let root = VfsPath::new(MemoryFS::new()); let hashes = [ WoxHashedName::from("A.TXT".as_bytes()), WoxHashedName::from("C.TXT".as_bytes()), WoxHashedName::from("B.TXT".as_bytes()), ]; let mut stdout = vec![]; extract_cc_file(&mut stdout, &mut Cursor::new(ARCHIVE_CONTENTS), archive_dictionary(), &root, &hashes, Some(Crypt::Xor)).unwrap(); // With provided hashes, the selected hashes of the archive will be written to the provided // "stdout" stream. Ordering of the hashes is expected to be reflected in the output. assert_eq!(&stdout, b"ACCCBB"); assert_eq!(root.read_dir().unwrap().count(), 0); } }
true
bbf337cf98fcf4b0d16b25bd25bc08b17b8a5b8d
Rust
beamiter/leetcode-rust
/src/dd_challenge/s00161_one_edit_distance.rs
UTF-8
1,839
3.5
4
[]
no_license
use crate::dd_challenge::Solution; use std::cmp::min; /// /// 对比两个字符串对应位置上的字符,如果遇到不同的时候,这时看两个字符串的长度关系, /// 如果相等,则比较当前位置后的字串是否相同,如果s的长度大,那么比较s的下一个位置开始的子串, /// 和t的当前位置开始的子串是否相同,反之如果t的长度大,则比较t的下一个位置开始的子串, /// 和s的当前位置开始的子串是否相同。如果循环结束,都没有找到不同的字符, /// 那么此时看两个字符串的长度是否相差1 /// impl Solution { pub fn is_one_edit_distance(s: String, t: String) -> bool { let s_bytes = s.as_bytes(); let t_bytes = t.as_bytes(); let s_len = s_bytes.len(); let t_len = t_bytes.len(); let loop_len = min(s_len, t_len); for idx in 0..loop_len { if s_bytes[idx] != t_bytes[idx] { return if s_len == t_len { s_bytes[(idx + 1)..] == t_bytes[(idx + 1)..] } else if s_len > t_len { s_bytes[(idx + 1)..] == t_bytes[idx..] } else { s_bytes[idx..] == t_bytes[(idx + 1)..] } } } return (s_len as i32 - t_len as i32).abs() == 1; } } #[cfg(test)] mod tests { use crate::dd_challenge::Solution; #[test] fn test_is_one_edit_distance() { assert!(Solution::is_one_edit_distance("ab".to_string(), "abc".to_string())); assert!(!Solution::is_one_edit_distance("ab".to_string(), "abcd".to_string())); assert!(!Solution::is_one_edit_distance("cab".to_string(), "ad".to_string())); assert!(Solution::is_one_edit_distance("1203".to_string(), "1213".to_string())); } }
true
297c85b2771f0ab44ff61a8e851080591209edcc
Rust
zhaoshenglong/Leetcode
/1143_medimum_longest_common_subsequence/main.rs
UTF-8
1,179
3.15625
3
[]
no_license
struct Solution; impl Solution { pub fn longest_common_subsequence(text1: String, text2: String) -> i32 { let m = text1.len(); let n = text2.len(); let mut dp: Vec<Vec<i32>> = Vec::new(); dp.resize_with(m, || {let mut v = Vec::new(); v.resize(n, 0); v}); dp[0][0] = (text1[0..1] == text2[0..1]) as i32; for i in 1..m { dp[i][0] = (text1[i..i + 1] == text2[0..1]) as i32; dp[i][0] = dp[i][0].max(dp[i - 1][0]); } for j in 1..n { dp[0][j] = (text1[0..1] == text2[j..j + 1]) as i32; dp[0][j] = dp[0][j].max(dp[0][j - 1]); } for i in 1..m { for j in 1..n { if text1[i..i + 1] == text2[j..j + 1] { dp[i][j] = 1 + dp[i - 1][j - 1]; } else { dp[i][j] = dp[i - 1][j].max(dp[i][j - 1]).max(dp[i - 1][j - 1]); } } } println!("{:?}", &dp); dp[m - 1][n - 1] } } fn main() { let t1 = String::from("abcde"); let t2 = String::from("ace"); println!("{}\n", Solution::longest_common_subsequence(t1, t2)); }
true
f6fabf1c2a0fff998909dee7093a5c9aa44f0350
Rust
astral-sh/ruff
/crates/ruff/src/rules/flake8_gettext/settings.rs
UTF-8
2,004
2.640625
3
[ "BSD-3-Clause", "0BSD", "LicenseRef-scancode-free-unknown", "GPL-1.0-or-later", "MIT", "Apache-2.0" ]
permissive
use serde::{Deserialize, Serialize}; use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions}; #[derive( Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions, )] #[serde( deny_unknown_fields, rename_all = "kebab-case", rename = "Flake8GetTextOptions" )] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct Options { #[option( default = r#"["_", "gettext", "ngettext"]"#, value_type = "list[str]", example = r#"function-names = ["_", "gettext", "ngettext", "ugettetxt"]"# )] /// The function names to consider as internationalization calls. pub function_names: Option<Vec<String>>, #[option( default = r#"[]"#, value_type = "list[str]", example = r#"extend-function-names = ["ugettetxt"]"# )] /// Additional function names to consider as internationalization calls, in addition to those /// included in `function-names`. pub extend_function_names: Option<Vec<String>>, } #[derive(Debug, CacheKey)] pub struct Settings { pub functions_names: Vec<String>, } fn default_func_names() -> Vec<String> { vec![ "_".to_string(), "gettext".to_string(), "ngettext".to_string(), ] } impl Default for Settings { fn default() -> Self { Self { functions_names: default_func_names(), } } } impl From<Options> for Settings { fn from(options: Options) -> Self { Self { functions_names: options .function_names .unwrap_or_else(default_func_names) .into_iter() .chain(options.extend_function_names.unwrap_or_default()) .collect(), } } } impl From<Settings> for Options { fn from(settings: Settings) -> Self { Self { function_names: Some(settings.functions_names), extend_function_names: Some(Vec::new()), } } }
true
a1d573e4c3d83fb64aa8b744a9e318a8373bba1a
Rust
Chanakya888/Rust-rest
/src/vectors.rs
UTF-8
339
3.59375
4
[]
no_license
pub fn run(){ let mut number : Vec<i32> = vec![1,2,3,4]; number.push(3); println!("numbers are {:?}",number); //loop through values for value in number.iter(){ println!("{}",value) } //loop and mutate values for value in number.iter_mut(){ *value *= 2 } println!("{:?}",number) }
true
56520314284bce05b924f370abffa0ef8c0071e0
Rust
KISSMonX/happyrust
/result/src/main.rs
UTF-8
749
3.078125
3
[ "Apache-2.0" ]
permissive
use core::panic; use std::fs::File; use std::io::ErrorKind; fn main() { let f = File::open("/dev/null"); let f = match f { Ok(file) => file, Err(err) => match err.kind(){ ErrorKind::NotFound => match File::create("result rust demo.txt") { Ok(fc) => fc, Err(e) => panic!("Error craeting file: {:?}", e), }, other_err =>panic!("Error opening file: {:?}", other_err), } }; println!("Hello, world!"); } fn gaussian(mean: f64, variance: f64) -> impl Fn(f64) -> f64 { let std_dev = variance.sqrt(); move |x| { (1.0 / (std_dev * 2.0 * std::f64::consts::PI).sqrt()) * (-1.0 * (x - mean).powi(2) / (2.0 * variance)).exp() } }
true
e2e622af44eb51bbdaf57b8b662792fda05bbdbe
Rust
frankhart2018/rust-learn
/ch-4-understanding-ownership/function_ownership/src/main.rs
UTF-8
460
4.03125
4
[]
no_license
fn main() { let s = String::from("hello"); takes_ownership(s); // Same as variable ownership, s will be moved to some_string // This will throw an error // println!("{}", s); let x = 5; makes_copy(x); // Same as variable ownership, x will be copied (not moved) to some_integer } fn takes_ownership(some_string: String) { println!("{}", some_string); } fn makes_copy(some_integer: i32) { println!("{}", some_integer); }
true
3ef1a75fadde22b54adc693bdd6e3bb40425aff3
Rust
DarinM223/ctci-rs
/src/chapter8/robot_in_grid.rs
UTF-8
2,422
3.328125
3
[]
no_license
pub type Point = (usize, usize); #[derive(Clone, Copy)] pub struct Dim { pub rows: usize, pub cols: usize, } pub fn get_path(start: Point, end: Point, dim: Dim, offlimits: &[Vec<bool>]) -> Option<Vec<Point>> { if start == end { Some(vec![start]) } else if out_of_bounds(start, dim) || off_limits(offlimits, start) { None } else { get_path((start.0 + 1, start.1), end, dim, offlimits) .or_else(|| get_path((start.0, start.1 + 1), end, dim, offlimits)) .map(|mut path| { path.push(start); path }) } } pub fn get_path_memo( start: Point, end: Point, dim: Dim, offlimits: &[Vec<bool>], cache: &mut Vec<Vec<Option<Option<Vec<Point>>>>>, ) -> Option<Vec<Point>> { if start == end { Some(vec![start]) } else if out_of_bounds(start, dim) || off_limits(offlimits, start) { None } else if let Some(cached) = cache[start.0][start.1].take() { // If there is a cached value return the cached value. cached } else { let result = get_path_memo((start.0 + 1, start.1), end, dim, offlimits, cache) .or_else(|| get_path_memo((start.0, start.1 + 1), end, dim, offlimits, cache)) .map(|mut path| { path.push(start); path }); // Cache the computation. cache[start.0][start.1] = Some(result.clone()); result } } fn out_of_bounds(point: Point, dim: Dim) -> bool { point.0 >= dim.rows || point.1 >= dim.cols } fn off_limits(offlimits: &[Vec<bool>], pos: Point) -> bool { offlimits.get(pos.0).and_then(|row| row.get(pos.1)) == Some(&true) } #[cfg(test)] mod tests { use super::*; #[test] fn test_robot() { let dim = Dim { rows: 4, cols: 4 }; let mut offlimits = vec![vec![false; dim.cols]; dim.rows]; offlimits[2][0] = true; offlimits[1][1] = true; offlimits[2][3] = true; let mut cache = vec![vec![None; dim.cols]; dim.rows]; assert_eq!( get_path((0, 0), (3, 3), dim, &offlimits), Some(vec![(3, 3), (3, 2), (2, 2), (1, 2), (0, 2), (0, 1), (0, 0)]) ); assert_eq!( get_path_memo((0, 0), (3, 3), dim, &offlimits, &mut cache), Some(vec![(3, 3), (3, 2), (2, 2), (1, 2), (0, 2), (0, 1), (0, 0)]) ); } }
true
fda5ca7d48b8ac1fafd1ae65af188610ee50b7f0
Rust
iCodeIN/flapy-bird-bevy
/src/components.rs
UTF-8
295
2.59375
3
[]
no_license
use crate::prelude::*; pub struct Velocity (pub Vec2); #[derive(Debug, Default)] pub struct Damage { pub value: u32, } pub struct Collider { pub width: f32, pub height: f32, } impl Collider { pub fn as_vec2(&self) -> Vec2 { Vec2::new(self.width, self.height) } }
true
f95601f9e5e59a5d1455db08c06998c079b15f1e
Rust
lerouxrgd/rsgen-avro
/tests/schemas/nullable_logical_dates.rs
UTF-8
2,319
2.859375
3
[ "MIT" ]
permissive
/// Date type #[derive(Debug, PartialEq, Eq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct DateLogicalType { #[serde(deserialize_with = "nullable_datelogicaltype_birthday")] #[serde(serialize_with = "chrono::naive::serde::ts_seconds::serialize")] pub birthday: chrono::NaiveDateTime, pub meeting_time: Option<chrono::NaiveDateTime>, #[serde(deserialize_with = "nullable_datelogicaltype_release_datetime_micro")] #[serde(serialize_with = "chrono::naive::serde::ts_microseconds::serialize")] pub release_datetime_micro: chrono::NaiveDateTime, } #[inline(always)] fn nullable_datelogicaltype_birthday<'de, D>(deserializer: D) -> Result<chrono::NaiveDateTime, D::Error> where D: serde::Deserializer<'de>, { use serde::Deserialize; #[derive(serde::Deserialize)] struct Wrapper(#[serde(with = "chrono::naive::serde::ts_seconds")] chrono::NaiveDateTime); let opt = Option::<Wrapper>::deserialize(deserializer)?.map(|w| w.0); Ok(opt.unwrap_or_else(|| default_datelogicaltype_birthday() )) } #[inline(always)] fn nullable_datelogicaltype_release_datetime_micro<'de, D>(deserializer: D) -> Result<chrono::NaiveDateTime, D::Error> where D: serde::Deserializer<'de>, { use serde::Deserialize; #[derive(serde::Deserialize)] struct Wrapper(#[serde(with = "chrono::naive::serde::ts_microseconds")] chrono::NaiveDateTime); let opt = Option::<Wrapper>::deserialize(deserializer)?.map(|w| w.0); Ok(opt.unwrap_or_else(|| default_datelogicaltype_release_datetime_micro() )) } #[inline(always)] fn default_datelogicaltype_birthday() -> chrono::NaiveDateTime { chrono::NaiveDateTime::from_timestamp_opt(1681601653, 0).unwrap() } #[inline(always)] fn default_datelogicaltype_meeting_time() -> Option<chrono::NaiveDateTime> { None } #[inline(always)] fn default_datelogicaltype_release_datetime_micro() -> chrono::NaiveDateTime { chrono::NaiveDateTime::from_timestamp_micros(1570903062000000).unwrap() } impl Default for DateLogicalType { fn default() -> DateLogicalType { DateLogicalType { birthday: default_datelogicaltype_birthday(), meeting_time: default_datelogicaltype_meeting_time(), release_datetime_micro: default_datelogicaltype_release_datetime_micro(), } } }
true
f4643a73043e8646c9cc9f408963d12637dd5326
Rust
garethkcjones/rays
/src/hittable/constantmedium.rs
UTF-8
3,150
2.765625
3
[]
no_license
use super::{Aabb, HitRecord, Hittable}; use crate::{Isotropic, Material, Ray, Texture, Vec3}; use rand::prelude::*; use std::{ops::Range, sync::Arc}; /** * Type for an isotropic medium. */ #[derive(Debug)] pub struct ConstantMedium { boundary: Arc<dyn Hittable>, phase_function: Arc<dyn Material>, neg_inv_density: f64, } impl ConstantMedium { #[must_use] pub fn new( boundary: Arc<dyn Hittable>, density: f64, albedo: impl Into<Arc<dyn Texture>>, ) -> Self { let phase_function = Isotropic::new_material(albedo); let neg_inv_density = -density.recip(); Self { boundary, phase_function, neg_inv_density, } } #[must_use] pub fn new_hittable( boundary: Arc<dyn Hittable>, density: f64, albedo: impl Into<Arc<dyn Texture>>, ) -> Arc<dyn Hittable> { Arc::new(Self::new(boundary, density, albedo)) } } impl Hittable for ConstantMedium { fn hit(&self, r: &Ray, tr: Range<f64>) -> Option<HitRecord> { #![allow(clippy::many_single_char_names)] // Print occasional samples when debugging. To enable, set to `true`. const ENABLE_DEBUG: bool = false; let debugging = ENABLE_DEBUG && thread_rng().gen::<f64>() < 0.00001; if let Some(rec1) = self.boundary.hit(r, -f64::INFINITY..f64::INFINITY) { if let Some(rec2) = self.boundary.hit(r, (rec1.t() + 0.0001)..f64::INFINITY) { if debugging { eprint!("\nt_min = {:?}, t_max = {:?}\n", rec1.t(), rec2.t()); } let t1 = rec1.t().max(tr.start); let t2 = rec2.t().min(tr.end); if t1 < t2 { let t1 = t1.max(0.0); let ray_length = r.direction().length(); let distance_inside_boundary = (t2 - t1) * ray_length; let hit_distance = self.neg_inv_density * thread_rng().gen::<f64>().ln(); if hit_distance <= distance_inside_boundary { let t = t1 + hit_distance / ray_length; let p = r.at(t); if debugging { eprintln!("hit_distance = {:?}", hit_distance); eprintln!("rec.t = {:?}", t); eprintln!("rec.p = {:?}", p); } let normal = Vec3(1.0, 0.0, 0.0); // Arbitrary. let u = 0.0; // Arbitrary. let v = 0.0; // Arbitrary. return Some(HitRecord::new( r, p, normal, t, u, v, Arc::clone(&self.phase_function), )); } } } } None } fn bounding_box(&self, tr: Range<f64>) -> Aabb { self.boundary.bounding_box(tr) } }
true
4600d14e98c2bec2a6b3bc0d34b050c6ec6ae6a0
Rust
icanrealizeum/manjarred
/system/Z575/OSes/manjaro/filesystem_now/home/emacs/projects/rust_book/ordering/src/main.rs
UTF-8
1,249
4.1875
4
[ "CC0-1.0", "Unlicense", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-public-domain" ]
permissive
use std::cmp::Ordering; use std::cmp::Ordering::Greater; fn cmp(a: i32, b: i32) -> Ordering { if a < b { Ordering::Less } else if a > b { Greater } else { Ordering::Equal } // if a + b == 12 { Ordering::Greater } //the actual error is that this entire line should be unreachable(when uncommented) but it's understandable why it isn't } fn main() { let x = 5; let y = 10; /* let ordering = cmp(x, y); // ordering: Ordering if ordering == Ordering::Less { println!("less"); } else if ordering == Greater { println!("greater"); } else if ordering == Ordering::Equal { println!("equal"); }*/ //using match instead: match cmp(x, y) { Ordering::Less => println!("less"), Greater => println!("greater"), Ordering::Equal => println!("equal"), } /*This version has way less noise, and it also checks exhaustively to make sure that we have * covered all possible variants of Ordering. With our if/else version, if we had forgotten the * Greater case, for example, our program would have happily compiled. If we forget in the * match, it will not. Rust helps us make sure to cover all of our bases. * Exactly how I want it!*/ }
true
50011ff69e7f82506d771a164afea16587221f5f
Rust
nicholasbishop/ipjdb
/src/lock.rs
UTF-8
894
3.21875
3
[ "Apache-2.0" ]
permissive
use crate::error::Error; use fs2::FileExt; use std::fs; use std::path::Path; pub struct FileLock { file: fs::File, is_locked: bool, } impl FileLock { pub fn exclusive(path: &Path) -> Result<FileLock, Error> { let file = fs::File::open(path)?; file.lock_exclusive()?; Ok(FileLock { file, is_locked: true, }) } pub fn shared(path: &Path) -> Result<FileLock, Error> { let file = fs::File::open(path)?; file.lock_shared()?; Ok(FileLock { file, is_locked: true, }) } pub fn unlock(&mut self) -> Result<(), Error> { self.file.unlock()?; self.is_locked = false; Ok(()) } } impl Drop for FileLock { fn drop(&mut self) { if self.is_locked { self.unlock().expect("failed to unlock file"); } } }
true
363fab8d55ec95341a7759bf978af8a8b84aac7d
Rust
fernandezvara/gin
/crates/gin-macros/src/message_macro/enum_item.rs
UTF-8
3,226
2.71875
3
[]
no_license
use proc_macro2::{Literal, Span, TokenStream}; use quote::{quote, quote_spanned}; use syn::{ parse_quote, punctuated::{Pair, Punctuated}, token::Eq, Fields, ItemEnum, Variant, }; pub(in crate::message_macro) fn enum_item(item: syn::ItemEnum) -> TokenStream { if let Some(span) = non_unit_field(&item) { return quote_spanned! { span => compile_error!("Gin currently only supports enums if all variants are unit-style"); }; } let ItemEnum { attrs, vis, enum_token, ident, generics, brace_token, variants, } = item; let attrs = { let mut updated = vec![parse_quote! { #[derive(Debug, Copy, Clone, ::prost::Enumeration)] }]; updated.extend(attrs); updated }; let variants = map_punctuated(variants, add_discriminant); let enum_name = ident.clone(); let item = ItemEnum { attrs, vis, enum_token, ident, generics, brace_token, variants, }; quote! { #item impl ::prost::Message for #enum_name { fn encode_raw<B>(&self, buf: &mut B) where B: prost::bytes::BufMut, Self: Sized, { i32::from(*self).encode_raw(buf) } fn merge_field<B>( &mut self, tag: u32, wire_type: prost::encoding::WireType, buf: &mut B, ctx: prost::encoding::DecodeContext, ) -> Result<(), prost::DecodeError> where B: prost::bytes::Buf, Self: Sized, { i32::from(*self).merge_field(tag, wire_type, buf, ctx) } fn encoded_len(&self) -> usize { i32::from(*self).encoded_len() } fn clear(&mut self) { *self = Self::default() } } } } fn non_unit_field(item: &syn::ItemEnum) -> Option<Span> { item.variants .iter() .find(|v| !matches!(v.fields, Fields::Unit)) .map(|v| syn::spanned::Spanned::span(&v)) } /// Add the [prost::Enumeration] attribute + a discriminant, if necessary fn add_discriminant( Variant { attrs, ident, fields, discriminant, }: Variant, index: usize, ) -> Variant { let index = Literal::usize_unsuffixed(index); let discriminant = match discriminant { Some(d) => d, None => (Eq(Span::call_site()), parse_quote![#index]), }; Variant { attrs, ident, fields, discriminant: Some(discriminant), } } fn map_punctuated<Item, MappedItem, Punctuation>( punctuated: Punctuated<Item, Punctuation>, mapper: impl Fn(Item, usize) -> MappedItem, ) -> Punctuated<MappedItem, Punctuation> { punctuated .into_pairs() .enumerate() .map(|(i, pair)| match pair { Pair::Punctuated(item, punctuation) => Pair::Punctuated(mapper(item, i), punctuation), Pair::End(item) => Pair::End(mapper(item, i)), }) .collect() }
true
6761bc9b5dd43feb31db64482fa4762f38a77d70
Rust
scottschroeder/advent_of_code_2019
/src/display.rs
UTF-8
6,112
3.34375
3
[ "MIT" ]
permissive
/*! Display for terminal Coordinates are integers (that may be negative) The size can be unknown */ use std::fmt; use std::ops::{Add, Sub}; #[derive(Default)] pub struct VON; #[derive(Default)] pub struct VOF; pub trait VerticalOrientation: Default { fn offset(ymin: i32, ymax: i32, y: i32) -> i32; fn absolute(ymin: i32, ymax: i32, dy: i32) -> i32; } impl VerticalOrientation for VON { #[inline] fn offset(_: i32, ymax: i32, y: i32) -> i32 { ymax - y } #[inline] fn absolute(_: i32, ymax: i32, dy: i32) -> i32 { ymax - dy } } impl VerticalOrientation for VOF { #[inline] fn offset(ymin: i32, _: i32, y: i32) -> i32 { y - ymin } #[inline] fn absolute(ymin: i32, _: i32, dy: i32) -> i32 { dy + ymin } } #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Point { pub x: i32, pub y: i32, } impl Add for Point { type Output = Point; fn add(self, rhs: Self) -> Self::Output { Point { x: self.x + rhs.x, y: self.y + rhs.y, } } } impl Sub for Point { type Output = Point; fn sub(self, rhs: Self) -> Self::Output { Point { x: self.x - rhs.x, y: self.y - rhs.y, } } } impl fmt::Debug for Point { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self) } } impl fmt::Display for Point { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "({}, {})", self.x, self.y) } } impl From<(i32, i32)> for Point { fn from(p: (i32, i32)) -> Self { Point { x: p.0, y: p.1 } } } impl From<&(i32, i32)> for Point { fn from(p: &(i32, i32)) -> Self { Point { x: p.0, y: p.1 } } } impl From<&Point> for Point { fn from(p: &Point) -> Self { *p } } impl Point { pub fn new(x: i32, y: i32) -> Point { Point { x, y } } pub fn step_dist(&self, other: &Point) -> i32 { (self.x - other.x).abs() + (self.y - other.y).abs() } } pub type ImageNormal<T> = Image<T, VON>; pub type ImageFlip<T> = Image<T, VOF>; pub struct Image<T, V> { frame: Frame, grid: bool, pub data: Vec<Option<T>>, _v: std::marker::PhantomData<V>, } impl<T, V> Image<T, V> { pub fn width(&self) -> usize { self.frame.width() as usize } } impl<T: Clone, V: VerticalOrientation> Image<T, V> { pub fn display_grid(&mut self, enable: bool) { self.grid = enable; } pub fn create<'a, I, P>(iter: &'a I) -> Image<T, V> where &'a I: IntoIterator<Item = (&'a P, &'a T)>, &'a P: Into<Point>, T: 'a, P: 'a, { let frame = size_frame(iter); let mut img = Image { frame, grid: false, data: Vec::new(), _v: std::marker::PhantomData::default(), }; img.update(iter); img } pub fn update<'a, I, P>(&mut self, iter: &'a I) where &'a I: IntoIterator<Item = (&'a P, &'a T)>, &'a P: Into<Point>, T: 'a, P: 'a, { self.data = vec![None; self.frame.len()]; for (p, t) in iter { let pt = p.into(); self.data[self.frame.index::<V>(pt)] = Some(t.clone()); } } } impl<T: fmt::Display, V: VerticalOrientation> fmt::Display for Image<T, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let w = self.width() as usize; for (idx, v) in self.data.iter().enumerate() { if idx % w == 0 { let p = self.frame.point::<V>(idx); if self.grid { write!(f, "\n{}\t", p.y)?; } else { writeln!(f, "")?; } } if let Some(t) = v { write!(f, "{}", t)?; } else { write!(f, " ")?; } } if self.grid { write!(f, "\n\t")?; for x in self.frame.min_x..(self.frame.max_x + 1) { write!(f, "{}", (x % 10).abs())?; } writeln!(f, "")?; } Ok(()) } } pub(crate) struct Frame { min_x: i32, max_x: i32, min_y: i32, max_y: i32, } impl Frame { #[inline] fn width(&self) -> i32 { self.max_x + 1 - self.min_x } #[inline] fn height(&self) -> i32 { self.max_y + 1 - self.min_y } #[inline] fn len(&self) -> usize { (self.width() * self.height()) as usize } #[inline] fn index<V: VerticalOrientation>(&self, p: Point) -> usize { let w = self.width(); let dx = p.x - self.min_x; let dy = V::offset(self.min_y, self.max_y, p.y); let idx = (dx + w * dy) as usize; idx } #[inline] fn point<V: VerticalOrientation>(&self, idx: usize) -> Point { let idx = idx as i32; let w = self.width(); let x = idx % w; let y = V::absolute(self.min_y, self.max_y, idx / w); Point { x, y } } } pub(crate) fn size_frame<'a, I, T: 'a, P>(iter: &'a I) -> Frame where &'a I: IntoIterator<Item = (&'a P, &'a T)>, P: 'a, &'a P: Into<Point>, { let mut min_x = None; let mut max_x = None; let mut min_y = None; let mut max_y = None; for p in iter { let (p, _) = p; let Point { x, y } = p.into(); min_x = Some(if let Some(mx) = min_x { std::cmp::min(mx, x) } else { x }); max_x = Some(if let Some(mx) = max_x { std::cmp::max(mx, x) } else { x }); min_y = Some(if let Some(my) = min_y { std::cmp::min(my, y) } else { y }); max_y = Some(if let Some(my) = max_y { std::cmp::max(my, y) } else { y }); } Frame { min_x: min_x.unwrap(), max_x: max_x.unwrap(), min_y: min_y.unwrap(), max_y: max_y.unwrap(), } }
true
1e790aaf91fcd0eed47b5a73858355ec887c01f1
Rust
remexre/nihctfplat
/src/logic/mod.rs
UTF-8
663
2.765625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Business logic. //! //! > **Logic** is the "business (or domain) logic" of the application. The router will pull the //! > necessary information out of the HTTP request, and call into this module as quickly as //! > possible to do all the actual work. pub mod auth; use crate::dal::DB; use failure::Error; use futures::Future; use uuid::Uuid; /// Creates a team. pub fn create_team(db: DB, user: i32, name: String) -> impl Future<Item = (), Error = Error> { db.create_team(user, name).map(|_| ()) } /// Joins a team. pub fn join_team(db: DB, user: i32, team: Uuid) -> impl Future<Item = (), Error = Error> { db.join_team(user, team).map(|_| ()) }
true
3b01bd9599863fe4af4f37b3e0819ef14b1b0c16
Rust
kryo4096/rust-game
/src/components.rs
UTF-8
1,779
2.984375
3
[]
no_license
use math::*; use specs::*; use mesh::*; #[derive(Copy, Clone, Component)] #[component(VecStorage)] pub struct Transform { pos: Vec3, scale: f32, rot: Quat, } impl Transform { pub fn new() -> Self{ Self {pos: Vec3::zero(), scale: 1., rot: Quat::zero()} } pub fn mov(&mut self, v: Vec3) { self.pos += v; } pub fn mov_to(&mut self, v: Vec3) { self.pos = v; } pub fn rot(&mut self, q: Quat) { self.rot = q * self.rot; } pub fn rot_to(&mut self, q: Quat) { self.rot = q; } pub fn rot_axes(&mut self, x: f32, y: f32, z: f32) { let rot = Quat::from_angle_x(Deg(x)) * Quat::from_angle_x(Deg(x)) * Quat::from_angle_x(Deg(x)); self.rot = rot * self.rot; } pub fn rot_to_axes(&mut self, x: f32, y: f32, z: f32) { let rot = Quat::from_angle_x(Deg(x)) * Quat::from_angle_x(Deg(x)) * Quat::from_angle_x(Deg(x)); self.rot = rot; } pub fn mov_rel(&mut self, v: Vec3) { self.pos += self.rot * v; } pub fn scale(&mut self, s: f32) { self.scale *= s; } pub fn set_scale(&mut self, s: f32) { self.scale = s; } pub fn model_m(&self) -> Mat4 { let scale_m = Mat4::from_scale(self.scale); let rotation_m = Mat4::from(self.rot); let translation_m = Mat4::from_translation(self.pos); translation_m * rotation_m * scale_m } } pub struct Camera { pub perspective: Perspective, } #[derive(Copy, Clone, Component)] #[component(VecStorage)] impl Camera { pub fn perspective_m(&self) -> Mat4 { self.perspective.into() } } #[derive(Copy, Clone, Component)] #[component(VecStorage)] pub struct MeshCarrier { pub mesh: Mesh, }
true
6e383e9366a0a6b78bb76ff7d06b3aa5d8cb349a
Rust
cambricorp/rust-ruter
/src/main.rs
UTF-8
2,414
2.6875
3
[]
no_license
#![recursion_limit = "1024"] #[macro_use] extern crate error_chain; extern crate reqwest; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate clap; extern crate chrono; extern crate regex; mod errors; mod api; mod stop; use std::collections::HashSet; use clap::{App, Arg}; use chrono::{UTC}; use regex::Regex; use errors::*; use stop::Stop; quick_main!(run); fn run() -> Result<()> { let app = App::new("Ruter API Command Line Client") .version("1.0") .author("Sindre I. Johansen <sindre@sindrejohansen.no>") .arg(Arg::with_name("STOP") .help("Select the stop to search for") .required(true) .index(1)); let matches = app.get_matches(); let stop_name = matches.value_of("STOP").unwrap(); let stop = Stop::find_by_name(stop_name)?; let departures = stop.fetch_departures()?; println!("Avganger fra {}", stop.name); let direction_regex = Regex::new(r"^(\d*) \(([^()]*)\)$").unwrap(); let directions:HashSet<_> = departures.iter().map(|dep| { direction_regex.captures(&dep.platform) .map(|captures| captures.get(2).unwrap().as_str()) }).collect(); for direction in directions { println!(""); if let Some(dirname) = direction { println!("{}", dirname); } let departures = departures.iter() .filter(|dep| { let dep_direction = direction_regex.captures(&dep.platform) .map(|captures| captures.get(2).unwrap().as_str()); dep_direction == direction }); for departure in departures.take(10) { println!("{:>4} {:25} {:>7} {}", departure.line_number, departure.destination, pretty_time(departure.arrival_time), direction_regex.captures(&departure.platform) .map(|captures| captures.get(1).unwrap().as_str()) .unwrap_or(&departure.platform), ); } } Ok(()) } fn pretty_time(time: chrono::DateTime<UTC>) -> String { let duration = time - UTC::now(); let seconds = duration.num_seconds(); let minutes = seconds / 60; let seconds = seconds % 60; if minutes > 10 { return format!("{}", time.with_timezone(&chrono::Local).format("%H:%M")) } format!("{:>2}m {:>2}s", minutes, seconds) }
true
d351d0af8cedc92208b1a7b9f63c18993f25f84b
Rust
RobertZ2011/yith
/sothoth/kernel/common/src/mutex.rs
UTF-8
306
2.640625
3
[]
no_license
use spin::*; pub unsafe fn force_acquire<T: ?Sized>(mutex: &Mutex<T>) -> MutexGuard<T> { let mut tried = mutex.try_lock(); if !tried.is_some() { while !tried.is_some() { mutex.force_unlock(); tried = mutex.try_lock(); } } return tried.unwrap(); }
true
f9bcd379fbe51952c0fb30639875c175ba1e5ce1
Rust
mgottschlag/f4
/src/led.rs
UTF-8
926
3
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! User LEDs use stm32f429::{GPIOG, RCC}; /// All the user LEDs pub static LEDS: [Led; 2] = [ Led { i: 13 }, Led { i: 14 }, ]; /// An LED pub struct Led { i: u8, } impl Led { /// Turns off the LED pub fn off(&self) { // NOTE(safe) atomic write unsafe { (*GPIOG.get()).bsrr.write(|w| w.bits(1 << (self.i + 16))) } } /// Turns on the LED pub fn on(&self) { // NOTE(safe) atomic write unsafe { (*GPIOG.get()).bsrr.write(|w| w.bits(1 << self.i)) } } } /// Initializes all the user LEDs pub fn init(gpiog: &GPIOG, rcc: &RCC) { // Power up peripherals rcc.ahb1enr.modify(|_, w| w.gpiogen().set_bit()); // Configure pins 8-15 as outputs gpiog .moder .modify( |_, w| { w.moder13() .bits(1) .moder14() .bits(1) }, ); }
true
14abefb88d22077695a8c66609f8e538dc202e1b
Rust
PaulB86UK/PP_ART.github.io
/RUST/Image_combiner/main.rs
UTF-8
5,161
3.421875
3
[]
no_license
mod arc; use arc::Args; use image::ImageError; use image::{io::Reader, ImageFormat, DynamicImage, ImageBuffer, GenericImageView, imageops::FilterType::Triangle}; use std::{io::BufReader, fs::File}; use std::convert::TryInto; #[derive(Debug)] enum ImageDataErrors { DifferentImageFormats, BuffertooSmall, UnableToReadImageFromPath(std::io::Error), //This one exists in the standard library UnableToFormatImage(String), UnableToDecodeImage(ImageError), UnableToSaveImage(ImageError) } struct FloatingImage { width: u32, height: u32, data: Vec<u8>, name: String, } impl FloatingImage { fn new(width: u32, height: u32, name: String) -> Self { let buffer_capacity = height * width * 4; let buffer = Vec::with_capacity(buffer_capacity.try_into().unwrap()); FloatingImage { width, height, data: buffer, name, } } fn set_data(&mut self, data:Vec<u8>) -> Result<(),ImageDataErrors> { if data.len() > self.data.capacity() { //check if we can store return Err(ImageDataErrors::BuffertooSmall); } self.data = data; Ok(()) } } fn main() -> Result<(),ImageDataErrors>{ let args = Args::new(); let (image_1,image_format_1) = find_image_from_path(args.image_1)?; //Since we added the errors the ? propagates the error into the function, its the error propagation operator!! let (image_2,image_format_2) = find_image_from_path(args.image_2)?; if image_format_1 != image_format_2 { return Err(ImageDataErrors::DifferentImageFormats); } let (image_1,image_2) = standarized_size(image_1, image_2); let mut output = FloatingImage::new(image_1.width(),image_1.height(), args.output); let combined_data = combine_image(image_1, image_2); output.set_data(combined_data)?; // check question functiionality "?" error propagation operator? if let Err(e) = image::save_buffer_with_format(output.name,&output.data, output.width, output.height, image::ColorType::Rgba8, image_format_1) { Err(ImageDataErrors::UnableToSaveImage(e)) } else { Ok(()) } } fn find_image_from_path(path: String) -> Result<(DynamicImage, ImageFormat), ImageDataErrors>{ //let image_reader: Reader<BufReader<File>> = Reader::open(path).unwrap(); //let image_format: ImageFormat = image_reader.format().unwrap(); //let image: DynamicImage = image_reader.decode().unwrap(); //The following is the translation of the above adding proper error handling. match Reader::open(&path) { Ok(image_reader) => { //2nd Line if let Some(image_format) = image_reader.format() { //3rd Line match image_reader.decode() { Ok(image) => Ok((image, image_format)), Err(e) => Err(ImageDataErrors::UnableToDecodeImage(e)) //e pass error as is; } } else { return Err(ImageDataErrors::UnableToFormatImage(path)); } }, Err(e) => Err(ImageDataErrors::UnableToReadImageFromPath(e)) } } fn get_smallest_dimension(dim_1: (u32,u32), dim_2: (u32,u32)) -> (u32,u32) { let pix_1 = dim_1.0 * dim_1.1; let pix_2 = dim_2.0 * dim_2.1; return if pix_1 < pix_2 {dim_1} else {dim_2}; } fn standarized_size(image_1: DynamicImage, image_2: DynamicImage) -> (DynamicImage,DynamicImage) { let (width,height) = get_smallest_dimension(image_1.dimensions(), image_2.dimensions()); println!("width: {}, height: {}\n", width,height); if image_2.dimensions() == (width,height) { (image_1.resize_exact(width,height,Triangle), image_2) } else { (image_1, image_2.resize_exact(width , height, Triangle )) } } fn combine_image(image_1: DynamicImage, image_2: DynamicImage) -> Vec<u8> { let vec_1 = image_1.to_rgba8().into_vec(); let vec_2 = image_2.to_rgba8().into_vec(); alternate_pixels(vec_1,vec_2) } fn alternate_pixels(vec_1: Vec<u8>, vec_2: Vec<u8>) -> Vec<u8>{ //if vec_1.len() == 5, then the vec macro create a vector like [0,0,0,0,0] (5) let mut combined_data = vec![0u8; vec_1.len()]; //while loop to go on each pixel+ /// need to declare i (this is not python) let mut i = 0; //obviusly its mut if not it wont change its value while i < vec_1.len() { if i % 8 == 0 { combined_data.splice(i..= i +3,set_rgba(&vec_1,i,i+3)); //i..= i +3 this means from i to i + 3 } else { combined_data.splice(i..=i+3,set_rgba(&vec_2,i,i+3)); //not fully understand why this time is else since i=9 will replace preivous points? } i += 4; //increase by 4 because we are using rgb (3+1) } combined_data } fn set_rgba(vec:&Vec<u8>, start: usize, end: usize) -> Vec<u8> { let mut rgba = Vec::new(); for i in start..=end { let val = match vec.get(i) { Some(d) => *d, None => panic!("Index out of bounds"), }; rgba.push(val); } rgba }
true
934ede031dcaf0de4a9e722a37603723cdd243b5
Rust
featureenvy/rust-embedded
/src/driver/switch.rs
UTF-8
1,282
3.390625
3
[]
no_license
use hal::gpio; pub struct Switch { pin: gpio::DigitalPin, direction: gpio::Logic, } pub enum InternalSwitch { Switch0, Switch1, } impl Switch { pub fn new(port: gpio::Port, pin_num: gpio::Pins, direction: gpio::Logic) -> Switch { let pin = gpio::DigitalPin::new_input(port, pin_num); Switch { pin: pin, direction: direction } } pub fn new_internal(switch: InternalSwitch) -> Switch { let switch = match switch { InternalSwitch::Switch0 => Switch::new(gpio::Port::PortF, gpio::Pins::Pin4, gpio::Logic::Positive), InternalSwitch::Switch1 => Switch::new(gpio::Port::PortF, gpio::Pins::Pin0, gpio::Logic::Positive), }; switch.enable_pull_up(); switch } pub fn enable_pull_up(&self) { self.pin.enable_pull_up(); } pub fn wait_until_on(&self) { while self.is_off() { }; } pub fn wait_until_off(&self) { while self.is_on() { }; } pub fn is_on(&self) -> bool { match self.direction { gpio::Logic::Positive => self.pin.read() == 0, gpio::Logic::Negative => self.pin.read() != 0, } } pub fn is_off(&self) -> bool { !self.is_on() } }
true
c6743a0aceba9009cf08c18791b4664da64cebcc
Rust
cwruRobotics/BMC-CAN-Fw
/src/error_codes.rs
UTF-8
256
2.65625
3
[]
no_license
use defmt::Format; #[derive(Format, PartialEq, Eq)] #[repr(u8)] pub enum ErrorCode { None = 0, MotorDriverFault = 1, CanError = 2, Other = 3, } impl From<ErrorCode> for u8 { fn from(code: ErrorCode) -> u8 { code as u8 } }
true
fb56554b0c1913f8ceb7ff179b3d7f18abd8312f
Rust
Qix-/rose-tools
/rose-info/src/main.rs
UTF-8
3,157
3
3
[ "MIT" ]
permissive
use roselib::files::{HIM, TIL, ZON}; use roselib::io::RoseFile; use std::collections::HashMap; use std::path::{Path, PathBuf}; fn main() { let mut args: Vec<String> = std::env::args().skip(1).collect(); if args.len() < 2 { println!("Usage: rose-info <command> <paths...>"); println!("Commands: til_brushes, zon_brushes"); ::std::process::exit(1); } let cmd = args.remove(0); match cmd.as_str() { "him_range" => him_range(args.as_slice()), "til_brushes" => til_brush_info(args.as_slice()), "zon_brushes" => zon_brush_info(args.as_slice()), _ => { eprintln!("Command not recognized: {0}", cmd); ::std::process::exit(1); } } } fn him_range(paths: &[String]) { let mut min_height = 9999999.0; let mut max_height = -9999999.0; for him_path in paths { let him_path = Path::new(him_path); let him = HIM::from_path(&him_path).expect("Invalid him file"); if him.min_height < min_height { min_height = him.min_height; } if him.max_height > max_height { max_height = him.max_height; } } println!("Min height: {}", min_height); println!("Max height: {}", max_height); } fn til_brush_info(paths: &[String]) { let mut map_brushes: HashMap<&Path, Vec<u8>> = HashMap::new(); let mut max_map_path = PathBuf::new(); let mut max_map_brush_count = 0; for til_path in paths { let til_path = Path::new(til_path); let til_parent = til_path.parent().unwrap(); let til = TIL::from_path(&til_path).expect("Invalid TIL file"); for tile_row in til.tiles { for tile in tile_row { if map_brushes.contains_key(til_parent) { if let Some(v) = map_brushes.get_mut(til_parent) { v.push(tile.brush_id); } } else { map_brushes.insert(til_parent, vec![tile.brush_id]); } } } } for (map_path, brushes) in map_brushes.iter_mut() { brushes.sort_unstable(); brushes.dedup(); let brush_count = brushes.len(); if brush_count > max_map_brush_count { max_map_brush_count = brush_count; max_map_path = map_path.to_path_buf(); } println!( "{} unique brushes used in {}", brush_count, map_path.display() ); } println!( "MAX: {} unique brushes used in {}", max_map_brush_count, max_map_path.display(), ); } fn zon_brush_info(paths: &[String]) { let mut max_brushes = 0; let mut max_brush_path = ""; for zon_path in paths { let zon = ZON::from_path(Path::new(&zon_path)).expect("Invalid ZON file"); let brushes = zon.tiles.len(); println!("{} brushes in {}", brushes, &zon_path); if brushes > max_brushes { max_brushes = brushes; max_brush_path = zon_path; } } println!("MAX: {} brushes in {}", max_brushes, max_brush_path); }
true
ff0349fafb26130e1a4c14202d0402e7d7660299
Rust
Zach41/webdemo
/examples/time.rs
UTF-8
1,023
2.625
3
[]
no_license
extern crate webdemo; extern crate time; extern crate env_logger; use webdemo::prelude::*; use time::precise_time_ns; struct ResponseTime; impl types::Key for ResponseTime { type Value = u64; } impl BeforeMiddleware for ResponseTime { fn before(&self, req: &mut Request) -> WebResult<()> { req.extensions.insert::<ResponseTime>(precise_time_ns()); Ok(()) } } impl AfterMiddleware for ResponseTime { fn after(&self, req: &mut Request, res: Response) -> WebResult<Response> { let delta = precise_time_ns() - *req.extensions.get::<ResponseTime>().unwrap(); println!("Request took: {} ms", (delta as f64) / 1000000.0); Ok(res) } } fn main() { let _ = env_logger::init(); let mut chain = Chain::new(|req: &mut Request| { println!("{:?}", req.url.path()); Ok(Response::with((StatusCode::Ok, "Hello, World!"))) }); chain.before(ResponseTime); chain.after(ResponseTime); Web::new(chain).http("0.0.0.0:8080").unwrap(); }
true
bfc43a4561467722e2e2564782067dc5778bedb5
Rust
quodlibetor/cargo-readme
/src/cargo_info.rs
UTF-8
1,156
3.109375
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Read crate information from `Cargo.toml` use std::fs::File; use std::io::Read; use std::path::Path; use toml; /// Cargo.toml crate information #[derive(Clone, Deserialize)] pub struct Cargo { pub package: CargoPackage, pub lib: Option<CargoLib>, pub bin: Option<Vec<CargoLib>>, } /// Cargo.toml crate package information #[derive(Clone, Deserialize)] pub struct CargoPackage { pub name: String, pub license: Option<String>, } /// Cargo.toml crate lib information #[derive(Clone, Deserialize)] pub struct CargoLib { pub path: String, } /// Try to get crate name and license from Cargo.toml pub fn get_cargo_info(project_root: &Path) -> Result<Cargo, String> { let mut cargo_toml = match File::open(project_root.join("Cargo.toml")) { Ok(file) => file, Err(e) => return Err(format!("Could not read Cargo.toml: {}", e)), }; let mut buf = String::new(); match cargo_toml.read_to_string(&mut buf) { Err(e) => return Err(format!("{}", e)), Ok(_) => {} } match toml::from_str(&buf) { Err(e) => return Err(format!("{}", e)), Ok(cargo) => Ok(cargo), } }
true
a93f155208dd49ee5c863f1311baabb3b0a34ecf
Rust
upanshu21/rust_concepts
/structs.rs
UTF-8
1,351
4.09375
4
[]
no_license
//similar to tuple // can be imagined as custom data type with user defined fields // we can use structs by creating instances of it. #[derive(Debug)] //this is only applied to structs, enums and union. struct Car { //multiple instance of CAR can be made making it reusable. name: String, model: String, colour: String, year: u32, status: bool, } struct Rectangle { number1: u32, number2: u32, } fn main() { let honda = Car { name: String::from("city"), model: String::from("xzi"), colour: String::from("Golden"), year: 2007, status: true, }; let maruti = Car { name: String::from("swift"), model: String::from("xv"), ..honda //using remaining values defined in honda // colour: honda.colour, // year: honda.year, // status: honda.status, }; // let car_name = &honda.name; // println!("{:?}", honda); println!("{:?}", maruti); //tuples as structs let tuple_one = (1,2,3,4); let result = tuple_one.0 + tuple_one.1; println!("{}", result); //Rectangle struct example let rec_fig = Rectangle { number1: 1, number2: 5, }; println!("print: {}",multiply(&rec_fig)); } fn multiply(fig: &Rectangle) -> u32 { fig.number1 * fig.number2 }
true
94e05b3a6630421387ba25f56531446ae348f0d9
Rust
lygstate/rudy
/examples/removal.rs
UTF-8
570
2.65625
3
[ "Apache-2.0", "MIT" ]
permissive
extern crate rudy; use rudy::rudymap::RudyMap; fn main() { let mut map = RudyMap::new(); let low = 0u32; let high = 10000u32; for i in low..high { map.insert(i, i + 1); } for i in low..high { assert_eq!(map.get(i).cloned(), Some(i + 1)); } println!("{}", map.len()); for i in low..high { assert_eq!(map.remove(i), Some(i + 1)); } for i in low..high { assert_eq!(map.get(i), None); } for i in 5..7 { println!("{} => {:?}", i, map.get(i)); } println!("{}", map.len()); }
true
56112ea0b63553c3ddf4cdcc3aada1bb02fe5edd
Rust
gevious/monopoly
/backend/src/publisher.rs
UTF-8
6,028
2.921875
3
[ "MIT" ]
permissive
use std::fs; use async_process::Command; use super::game::{Game}; use super::square::Square; const TEMP_FILE :&str = "/tmp/index.html"; /// Print the game summary // Prints out stats for each player fn print_summary(game: &Game) { println!("==== Summary ===="); for p_ref in game.players().iter() { let p = p_ref.borrow(); let board = game.board(); let occupying_square = board.get(p.position()) .expect("Player is not on the board"); print!("{} ", p.name()); if p.left_game() { println!("\t Has left the game"); continue; } match p.is_in_jail() { true => println!("\t is IN JAIL 🚧, but still has ${}", p.cash()), false => println!("\t is on {} with ${}", occupying_square.name(), p.cash()) }; if p.num_get_out_of_jail_cards() > 0 { println!("\t has {} get-out-of-jail cards", p.num_get_out_of_jail_cards()); } let board = game.board(); let owned_streets = board.iter() .filter(|&x| { match x.asset.borrow().owner() { None => false, Some(owner_idx) => owner_idx == p.turn_idx() } }) .collect::<Vec<&Square>>(); match owned_streets.len() { 0 => println!("\t owns nothing :("), _ => { println!("\t owns {} assets:", owned_streets.len()); for s in owned_streets.iter() { let mut x = s.name().to_string(); let a = s.asset.borrow(); if a.is_mortgaged() { x.push_str(" (mortgaged)"); } else if a.has_hotel() { x.push_str(" (🏨)"); } else if a.house_num() > 0 { match a.house_num() { 1 => x.push_str(&format!(" ({} 🏠)", a.house_num())), 2 => x.push_str(&format!(" ({} 🏡)", a.house_num())), 3 => x.push_str(&format!(" ({} 🏘️)", a.house_num())), _ => x.push_str(&format!(" ({} 🏘️)", a.house_num())) } } match s.get_street_details().unwrap().get_suburb() { Some(s) => { println!("\t\t {} ({:?})", x, s.color()); }, None => { println!("\t\t {}", x); } } } } }; } println!("================="); } /// Publish game summary to www.gevious.com/monopoly pub fn publish(game: &Game) { print_summary(game); let mut sb = String::from("<h1>Monopoly</h1>"); for p_ref in game.players().iter() { let p = p_ref.borrow(); let board = game.board(); let occupying_square = board.get(p.position()) .expect("Player is not on the board"); if p.left_game() { sb.push_str(&format!("{} has left the game", p.name())); continue; } sb.push_str(&format!("{} : ${}", p.name(), p.cash())); sb.push_str("<ul>"); match p.is_in_jail() { true => sb.push_str(&format!("<li> is IN JAIL 🚧, but still has ${}</li>", p.cash())), false => sb.push_str(&format!("<li>is on {} with ${}</li>", occupying_square.name(), p.cash())) }; if p.num_get_out_of_jail_cards() > 0 { sb.push_str(&format!("<li>has {} get-out-of-jail cards</li>", p.num_get_out_of_jail_cards())); } let board = game.board(); let owned_streets = board.iter() .filter(|&x| { match x.asset.borrow().owner() { None => false, Some(owner_idx) => owner_idx == p.turn_idx() }}) .collect::<Vec<&Square>>(); match owned_streets.len() { 0 => sb.push_str(&format!("<li>owns nothing :(</li>")), _ => { sb.push_str(&format!("<li>owns: <ul>")); for s in owned_streets.iter() { let mut x = s.name().to_string(); let a = s.asset.borrow(); if a.is_mortgaged() { x.push_str(" (mortgaged)"); } else if a.has_hotel() { x.push_str(" (🏨)"); } else if a.house_num() > 0 { match a.house_num() { 1 => x.push_str(&format!(" ({} 🏠)", a.house_num())), 2 => x.push_str(&format!(" ({} 🏡)", a.house_num())), 3 => x.push_str(&format!(" ({} 🏘️)", a.house_num())), _ => x.push_str(&format!(" ({} 🏘️)", a.house_num())) } } match s.get_street_details().unwrap().get_suburb() { Some(s) => { sb.push_str(&format!("<li>{} ({:?})</li>", x, s.color())); }, None => { sb.push_str(&format!("<li>{}</li>", x)); } } } sb.push_str("</ul></li>"); } }; sb.push_str("</ul>"); } let summary = format!("<!DOCTYPE html><html><head><meta charset=\"UTF-8\"></head><body>{}</body></html>", sb); fs::write(TEMP_FILE, summary); upload(); } /// Upload summary to S3 fn upload() { // For now, i'm just calling a CLI command. // TODO: Implement AWS SDK to make this more robust Command::new("./src/upload.sh") .output(); } #[cfg(test)] mod tests { }
true
a4216aa0c9d9feb75562f8ed5b62de2ab681f848
Rust
amling/r4
/ars/ds/src/bit_state.rs
UTF-8
1,700
3.46875
3
[]
no_license
use std::fmt::Debug; use std::hash::Hash; pub trait Bits: Copy + Send + Sync + Hash + Eq + Debug { fn zero() -> Self; fn size() -> usize; fn get_bit(&self, n: usize) -> bool; fn set_bit(&mut self, n: usize, v: bool); fn or(&self, rhs: &Self) -> Self; } // I would love not to have to macro this mess, but rust's num traits suck so unbelievably badly. macro_rules! uxx_bits_impl { ($t:ty, $n:expr) => { impl Bits for $t { fn zero() -> Self { 0 } fn size() -> usize { $n } fn get_bit(&self, n: usize) -> bool { ((*self >> n) & 1) != 0 } fn set_bit(&mut self, n: usize, v: bool) { *self &= !(1 << n); if v { *self |= 1 << n; } } fn or(&self, rhs: &Self) -> Self { *self | *rhs } } } } uxx_bits_impl!(u32, 32); uxx_bits_impl!(u64, 64); uxx_bits_impl!(u128, 128); impl<A: Bits, B: Bits> Bits for (A, B) { fn zero() -> Self { (A::zero(), B::zero()) } fn size() -> usize { A::size() + B::size() } fn get_bit(&self, n: usize) -> bool { if n < A::size() { self.0.get_bit(n) } else { self.1.get_bit(n - A::size()) } } fn set_bit(&mut self, n: usize, v: bool) { if n < A::size() { self.0.set_bit(n, v) } else { self.1.set_bit(n - A::size(), v) } } fn or(&self, rhs: &Self) -> Self { (self.0.or(&rhs.0), self.1.or(&rhs.1)) } }
true
17198fd3b6dc26d404a8ae30245f2cc4a27a07f5
Rust
nikdeapen/address
/src/url/path.rs
UTF-8
1,786
3.53125
4
[]
no_license
/// A URL path. #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] pub struct Path<'a> { path: &'a str, } impl<'a> Path<'a> { //! Construction /// Creates a new URL path. pub(in crate::url) fn new(path: &'a str) -> Self { debug_assert!(!path.is_empty()); debug_assert_eq!(path.as_bytes()[0], b'/'); Self { path } } } impl<'a> Path<'a> { //! Properties /// Gets the path string. (this will always start with a '/') pub const fn as_str(&self) -> &'a str { self.path } /// Gets the number of path segments. (at least one) pub fn segment_count(&self) -> usize { self.path.as_bytes().iter().filter(|c| **c == b'/').count() } } impl<'a> Path<'a> { //! Iteration /// Creates a new iterator for the path segments. pub fn iter(&self) -> PathIterator<'a> { PathIterator { path: self.path } } } /// Responsible for iterating over path segments. #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] pub struct PathIterator<'a> { path: &'a str, } impl<'a> Iterator for PathIterator<'a> { type Item = &'a str; fn next(&mut self) -> Option<Self::Item> { if self.path.is_empty() { None } else { self.path = &self.path[1..]; match self.path.as_bytes().iter().position(|c| *c == b'/') { Some(slash) => { let result: &'a str = &self.path[..slash]; self.path = &self.path[slash..]; Some(result) } None => { let result: &'a str = self.path; self.path = ""; Some(result) } } } } }
true
73eea8d7b95fd089e19a2668e4101247ab2d51b0
Rust
yanshiyason/cob-rust
/src/config.rs
UTF-8
277
2.59375
3
[]
no_license
use serde_derive::Deserialize; #[derive(Deserialize, Clone)] pub struct Config { pub prefix: String, pub github: GithubConfig, } #[derive(Deserialize, Clone)] pub struct GithubConfig { pub username: String, pub password: String, pub auth_token: String, }
true
71410756c2063b454bdcde6af67120b12dd41a50
Rust
Trolldemorted/isla
/isla-lib/src/ir/linearize.rs
UTF-8
16,364
2.640625
3
[ "BSD-2-Clause" ]
permissive
// BSD 2-Clause License // // Copyright (c) 2020 Alasdair Armstrong // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //! This module provides a function [linearize()] that converts IR //! from function bodies containing loops and other IR, into a linear //! sequence of instructions without any control flow. //! //! The way this works is as follows: //! //! ```text //! A A: declare x; if b ... //! / \ B: then { x = f(x) } //! B C C: else { x = g(x) } //! \ / D: return x //! D //! ``` //! //! This is then converted into SSA form, like: //! //! ```text //! A A: declare x/1; if b //! / \ B: then { x/2 = f(x/1) } //! B C C: else { x/3 = g(x/1) } //! \ / D: x/4 = φ(x/2, x/3); return x/4 //! D //! ``` //! //! Finally, we come out of SSA form by placing the control flow graph //! into topological order, and replacing the phi functions with `ite` //! functions that map directly to the `ite` construct in the SMT //! solver. //! //! ```text //! A A: declare x/1; //! | B: declare x/2; //! B x/2 = f(x/1); //! | C: declare x/3; //! C x/3 = g(x/1); //! | D: declare x/4; //! D x/4 = ite(b, x/2, x/3); //! return x/4 //! ``` //! //! The obvious limitations of this are that the function in question //! needs to be pure (it can only read architectural state), and its //! control flow graph must be acyclic so it can be placed into a //! topological order. use petgraph::algo; use petgraph::graph::{EdgeIndex, NodeIndex}; use petgraph::Direction; use std::cmp; use std::ops::{BitAnd, BitOr}; use super::ssa::{unssa_ty, BlockInstr, Edge, SSAName, Terminator, CFG}; use super::*; use crate::config::ISAConfig; use crate::primop::{binary_primops, variadic_primops}; /// The reachability of a node in an SSA graph is determined by a /// boolean formula over edges which can be taken to reach that node. #[derive(Clone)] enum Reachability { True, False, Edge(EdgeIndex), And(Box<Reachability>, Box<Reachability>), Or(Box<Reachability>, Box<Reachability>), } fn terminator_reachability_exp(terminator: &Terminator, edge: &Edge) -> Exp<SSAName> { match (terminator, edge) { (Terminator::Continue, Edge::Continue) => Exp::Bool(true), (Terminator::Goto(_), Edge::Goto) => Exp::Bool(true), (Terminator::Jump(exp, _, _), Edge::Jump(true)) => exp.clone(), (Terminator::Jump(exp, _, _), Edge::Jump(false)) => Exp::Call(Op::Not, vec![exp.clone()]), (_, _) => panic!("Bad terminator/edge pair in SSA"), } } impl Reachability { fn exp<B: BV>(&self, cfg: &CFG<B>) -> Exp<SSAName> { use Reachability::*; match self { True => Exp::Bool(true), False => Exp::Bool(false), Edge(edge) => { if let Some((pred, _)) = cfg.graph.edge_endpoints(*edge) { terminator_reachability_exp(&cfg.graph[pred].terminator, &cfg.graph[*edge]) } else { panic!("Edge in reachability condition does not exist!") } } And(lhs, rhs) => Exp::Call(Op::And, vec![lhs.exp(cfg), rhs.exp(cfg)]), Or(lhs, rhs) => Exp::Call(Op::Or, vec![lhs.exp(cfg), rhs.exp(cfg)]), } } } impl BitOr for Reachability { type Output = Self; fn bitor(self, rhs: Self) -> Self::Output { use Reachability::*; match (self, rhs) { (True, _) => True, (_, True) => True, (False, rhs) => rhs, (lhs, False) => lhs, (lhs, rhs) => Or(Box::new(lhs), Box::new(rhs)), } } } impl BitAnd for Reachability { type Output = Self; fn bitand(self, rhs: Self) -> Self::Output { use Reachability::*; match (self, rhs) { (True, rhs) => rhs, (lhs, True) => lhs, (False, _) => False, (_, False) => False, (lhs, rhs) => And(Box::new(lhs), Box::new(rhs)), } } } /// Computes the reachability condition for each node in an acyclic graph. fn compute_reachability<B: BV>(cfg: &CFG<B>, topo_order: &[NodeIndex]) -> HashMap<NodeIndex, Reachability> { let mut reachability: HashMap<NodeIndex, Reachability> = HashMap::new(); for ix in topo_order { let mut r = if *ix == cfg.root { Reachability::True } else { Reachability::False }; for pred in cfg.graph.neighbors_directed(*ix, Direction::Incoming) { let edge = cfg.graph.find_edge(pred, *ix).unwrap(); let (pred, _) = cfg.graph.edge_endpoints(edge).unwrap(); let pred_r = reachability.get(&pred).unwrap().clone(); r = r | (pred_r & Reachability::Edge(edge)) } reachability.insert(*ix, r); } reachability } fn unssa_loc(loc: &Loc<SSAName>, symtab: &mut Symtab, names: &mut HashMap<SSAName, Name>) -> Loc<Name> { use Loc::*; match loc { Id(id) => Id(id.unssa(symtab, names)), Field(loc, field) => Field(Box::new(unssa_loc(loc, symtab, names)), field.unssa(symtab, names)), Addr(loc) => Addr(Box::new(unssa_loc(loc, symtab, names))), } } fn unssa_exp(exp: &Exp<SSAName>, symtab: &mut Symtab, names: &mut HashMap<SSAName, Name>) -> Exp<Name> { use Exp::*; match exp { Id(id) => Id(id.unssa(symtab, names)), Ref(r) => Ref(r.unssa(symtab, names)), Bool(b) => Bool(*b), Bits(bv) => Bits(*bv), String(s) => String(s.clone()), Unit => Unit, I64(n) => I64(*n), I128(n) => I128(*n), Undefined(ty) => Undefined(unssa_ty(ty)), Struct(s, fields) => Struct( s.unssa(symtab, names), fields.iter().map(|(field, exp)| (field.unssa(symtab, names), unssa_exp(exp, symtab, names))).collect(), ), Kind(ctor, exp) => Kind(ctor.unssa(symtab, names), Box::new(unssa_exp(exp, symtab, names))), Unwrap(ctor, exp) => Unwrap(ctor.unssa(symtab, names), Box::new(unssa_exp(exp, symtab, names))), Field(exp, field) => Field(Box::new(unssa_exp(exp, symtab, names)), field.unssa(symtab, names)), Call(op, args) => Call(*op, args.iter().map(|arg| unssa_exp(arg, symtab, names)).collect()), } } fn unssa_block_instr<B: BV>( instr: &BlockInstr<B>, symtab: &mut Symtab, names: &mut HashMap<SSAName, Name>, ) -> Instr<Name, B> { use BlockInstr::*; match instr { Decl(v, ty) => Instr::Decl(v.unssa(symtab, names), unssa_ty(ty)), Init(v, ty, exp) => Instr::Init(v.unssa(symtab, names), unssa_ty(ty), unssa_exp(exp, symtab, names)), Copy(loc, exp) => Instr::Copy(unssa_loc(loc, symtab, names), unssa_exp(exp, symtab, names)), Monomorphize(v) => Instr::Monomorphize(v.unssa(symtab, names)), Call(loc, ext, f, args) => Instr::Call( unssa_loc(loc, symtab, names), *ext, *f, args.iter().map(|arg| unssa_exp(arg, symtab, names)).collect(), ), PrimopUnary(loc, fptr, exp) => { Instr::PrimopUnary(unssa_loc(loc, symtab, names), *fptr, unssa_exp(exp, symtab, names)) } PrimopBinary(loc, fptr, exp1, exp2) => Instr::PrimopBinary( unssa_loc(loc, symtab, names), *fptr, unssa_exp(exp1, symtab, names), unssa_exp(exp2, symtab, names), ), PrimopVariadic(loc, fptr, args) => Instr::PrimopVariadic( unssa_loc(loc, symtab, names), *fptr, args.iter().map(|arg| unssa_exp(arg, symtab, names)).collect(), ), } } fn apply_label<B: BV>(label: &mut Option<usize>, instr: Instr<Name, B>) -> LabeledInstr<B> { if let Some(label) = label.take() { LabeledInstr::Labeled(label, instr) } else { LabeledInstr::Unlabeled(instr) } } #[allow(clippy::too_many_arguments)] fn ite_chain<B: BV>( label: &mut Option<usize>, i: usize, path_conds: &[Exp<SSAName>], id: Name, first: SSAName, rest: &[SSAName], ty: &Ty<Name>, names: &mut HashMap<SSAName, Name>, symtab: &mut Symtab, linearized: &mut Vec<LabeledInstr<B>>, ) { let ite = *variadic_primops::<B>().get("ite").unwrap(); if let Some((second, rest)) = rest.split_first() { let gs = symtab.gensym(); linearized.push(apply_label(label, Instr::Decl(gs, ty.clone()))); ite_chain(label, i + 1, path_conds, gs, *second, rest, ty, names, symtab, linearized); linearized.push(apply_label( label, Instr::PrimopVariadic( Loc::Id(id), ite, vec![unssa_exp(&path_conds[i], symtab, names), Exp::Id(first.unssa(symtab, names)), Exp::Id(gs)], ), )) } else { linearized.push(apply_label(label, Instr::Copy(Loc::Id(id), Exp::Id(first.unssa(symtab, names))))) } } #[allow(clippy::too_many_arguments)] fn linearize_phi<B: BV>( label: &mut Option<usize>, id: SSAName, args: &[SSAName], n: NodeIndex, cfg: &CFG<B>, reachability: &HashMap<NodeIndex, Reachability>, names: &mut HashMap<SSAName, Name>, types: &HashMap<Name, Ty<Name>>, symtab: &mut Symtab, linearized: &mut Vec<LabeledInstr<B>>, ) { let mut path_conds = Vec::new(); for pred in cfg.graph.neighbors_directed(n, Direction::Incoming) { let edge = cfg.graph.find_edge(pred, n).unwrap(); let cond = reachability[&pred].clone() & Reachability::Edge(edge); path_conds.push(cond.exp(cfg)) } // A phi function with no arguments has been explicitly pruned, so // we do nothing in that case. if let Some((first, rest)) = args.split_first() { let ty = &types[&id.base_name()]; ite_chain(label, 0, &path_conds, id.unssa(symtab, names), *first, rest, ty, names, symtab, linearized) } } fn linearize_block<B: BV>( n: NodeIndex, cfg: &CFG<B>, reachability: &HashMap<NodeIndex, Reachability>, names: &mut HashMap<SSAName, Name>, types: &HashMap<Name, Ty<Name>>, symtab: &mut Symtab, linearized: &mut Vec<LabeledInstr<B>>, ) { let block = cfg.graph.node_weight(n).unwrap(); let mut label = block.label; for (id, args) in &block.phis { let ty = &types[&id.base_name()]; linearized.push(apply_label(&mut label, Instr::Decl(id.unssa(symtab, names), ty.clone()))); // We never have to insert ites for phi functions with unit // types, and in fact cannot because unit is always concrete. match ty { Ty::Unit => (), _ => linearize_phi(&mut label, *id, args, n, cfg, reachability, names, types, symtab, linearized), } } for instr in &block.instrs { if let Some(id) = instr.write_ssa() { if instr.declares().is_none() { let ty = types[&id.base_name()].clone(); linearized.push(apply_label(&mut label, Instr::Decl(id.unssa(symtab, names), ty))) } } linearized.push(apply_label(&mut label, unssa_block_instr(instr, symtab, names))) } } pub fn linearize<B: BV>(instrs: Vec<Instr<Name, B>>, ret_ty: &Ty<Name>, symtab: &mut Symtab) -> Vec<Instr<Name, B>> { use LabeledInstr::*; let labeled = prune_labels(label_instrs(instrs)); let mut cfg = CFG::new(&labeled); cfg.ssa(); if let Ok(topo_order) = algo::toposort(&cfg.graph, None) { let reachability = compute_reachability(&cfg, &topo_order); let types = cfg.all_vars_typed(ret_ty); let mut linearized = Vec::new(); let mut names = HashMap::new(); let mut last_return = -1; for ix in cfg.graph.node_indices() { let node = &cfg.graph[ix]; for instr in &node.instrs { if let Some(id) = instr.write_ssa() { if id.base_name() == RETURN { last_return = cmp::max(id.ssa_number(), last_return) } } } for (id, _) in &node.phis { if id.base_name() == RETURN { last_return = cmp::max(id.ssa_number(), last_return) } } } for ix in &topo_order { linearize_block(*ix, &cfg, &reachability, &mut names, &types, symtab, &mut linearized) } if last_return >= 0 { linearized.push(Unlabeled(Instr::Copy( Loc::Id(RETURN), Exp::Id(SSAName::new_ssa(RETURN, last_return).unssa(symtab, &mut names)), ))) } linearized.push(Unlabeled(Instr::End)); unlabel_instrs(linearized) } else { unlabel_instrs(labeled) } } /// Test that a rewritten function body is equivalent to the original /// body by constructing a symbolic execution problem that proves /// this. Note that this function should called with an uninitialized /// architecture. #[allow(clippy::too_many_arguments)] pub fn self_test<'ir, B: BV>( num_threads: usize, mut arch: Vec<Def<Name, B>>, mut symtab: Symtab<'ir>, isa_config: &ISAConfig<B>, args: &[Name], arg_tys: &[Ty<Name>], ret_ty: &Ty<Name>, instrs1: Vec<Instr<Name, B>>, instrs2: Vec<Instr<Name, B>>, ) -> bool { use crate::executor; use crate::init::{initialize_architecture, Initialized}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; let fn1 = symtab.intern("self_test_fn1#"); let fn2 = symtab.intern("self_test_fn2#"); let comparison = symtab.intern("self_test_compare#"); arch.push(Def::Val(fn1, arg_tys.to_vec(), ret_ty.clone())); arch.push(Def::Fn(fn1, args.to_vec(), instrs1)); arch.push(Def::Val(fn2, arg_tys.to_vec(), ret_ty.clone())); arch.push(Def::Fn(fn2, args.to_vec(), instrs2)); arch.push(Def::Val(comparison, arg_tys.to_vec(), Ty::Bool)); arch.push(Def::Fn(comparison, args.to_vec(), { use super::Instr::*; let x = symtab.gensym(); let y = symtab.gensym(); let eq_anything = *binary_primops::<B>().get("eq_anything").unwrap(); vec![ Decl(x, ret_ty.clone()), Call(Loc::Id(x), false, fn1, args.iter().map(|id| Exp::Id(*id)).collect()), Decl(y, ret_ty.clone()), Call(Loc::Id(y), false, fn2, args.iter().map(|id| Exp::Id(*id)).collect()), PrimopBinary(Loc::Id(RETURN), eq_anything, Exp::Id(x), Exp::Id(y)), End, ] })); let Initialized { regs, lets, shared_state } = initialize_architecture(&mut arch, symtab, isa_config, AssertionMode::Optimistic); let (args, _, instrs) = shared_state.functions.get(&comparison).unwrap(); let task = executor::LocalFrame::new(comparison, args, None, instrs).add_lets(&lets).add_regs(&regs).task(0); let result = Arc::new(AtomicBool::new(true)); executor::start_multi(num_threads, None, vec![task], &shared_state, result.clone(), &executor::all_unsat_collector); result.load(Ordering::Acquire) }
true
5fd309d30b472f14dac96fc577ff3718cf98a6b7
Rust
HappyStoic/lazydb
/src/ui.rs
UTF-8
5,164
2.640625
3
[]
no_license
use tui::{ layout::{Constraint, Direction, Layout}, style::{Color, Modifier, Style}, widgets::{Block, Borders, Cell, Row, Table}, Terminal, Frame, text::{Span, Spans}, }; use crate::app::{StatefulTable, App}; use tui::backend::{TermionBackend, Backend}; use std::io; use termion::{input::MouseTerminal, raw::RawTerminal, raw::IntoRawMode, screen::AlternateScreen}; use std::io::Stdout; use tui::layout::Rect; use crate::CliArgs; use tui::widgets::{Paragraph, Wrap, BorderType}; // TODO add cache for generated widgets so they are not recomputed every frame? // TODO Does it TUI library itself or not? pub struct View { terminal: Terminal<TermionBackend<AlternateScreen<MouseTerminal<RawTerminal<Stdout>>>>>, //TODO redo this somehow to generics } impl View { pub fn new() -> View { // Terminal initialization let stdout = io::stdout().into_raw_mode().unwrap(); let stdout = MouseTerminal::from(stdout); let stdout = AlternateScreen::from(stdout); let backend = TermionBackend::new(stdout); let terminal = Terminal::new(backend).unwrap(); View { terminal } } pub fn draw(&mut self, table: &mut StatefulTable, args: &CliArgs) { self.terminal.draw(|f| { let chunks = Layout::default() .direction(Direction::Horizontal) .constraints( [ Constraint::Percentage(20), Constraint::Percentage(80), ] .as_ref(), ) .split(f.size()); draw_left_tab(f, args, chunks[0]); draw_table(f, table, chunks[1]); }); } } fn draw_left_tab<B>(f: &mut Frame<B>, args: &CliArgs, area: Rect) where B: Backend, { let chunks = Layout::default() .constraints( [ Constraint::Percentage(40), Constraint::Percentage(60), ] .as_ref(), ) .split(area); draw_information(f, args, chunks[0]); draw_controls(f, chunks[1]); } fn draw_controls<B>(f: &mut Frame<B>, area: Rect) where B: Backend, { let text = vec![ Spans::from(vec![ Span::styled("q", Style::default().add_modifier(Modifier::BOLD)), Span::raw(" -> "), Span::styled("quit", Style::default().add_modifier(Modifier::ITALIC)), ]), ]; let block = new_block("Controls"); let paragraph = Paragraph::new(text).block(block).wrap(Wrap { trim: true }); f.render_widget(paragraph, area); } fn draw_information<B>(f: &mut Frame<B>, args: &CliArgs, area: Rect) where B: Backend, { let default = String::from("not-provided"); //TODO make default view with distinguishable style let text = vec![ Spans::from(vec![ Span::styled("Host", Style::default().fg(Color::Blue)), Span::raw(": "), Span::raw(&args.host), ]), Spans::from(vec![ Span::styled("Port", Style::default().fg(Color::Blue)), Span::raw(": "), Span::raw((&args.port).to_string()), ]), Spans::from(vec![ Span::styled("Username", Style::default().fg(Color::Blue)), Span::raw(": "), Span::raw((args.username.as_ref()).unwrap_or(&default)), ]), Spans::from(vec![ Span::styled("Password", Style::default().fg(Color::Blue)), Span::raw(": "), Span::raw((args.password.as_ref()).unwrap_or(&default)), ]), ]; let block = new_block("Information"); let paragraph = Paragraph::new(text).block(block).wrap(Wrap { trim: true }); f.render_widget(paragraph, area); } fn draw_table<B>(f: &mut Frame<B>, table: &mut StatefulTable, area: Rect) where B: Backend, { let mut odd = false; let items: Vec<Row> = table .items .iter() .map(|(k,v)|{ let cells = vec![ Cell::from(k.as_str()), Cell::from(v.as_str()), ]; let style = if odd { Style::default() } else { Style::default().fg(Color::DarkGray) }; odd = !odd; Row::new(cells).style(style) }).collect(); let selected_style = Style::default().add_modifier(Modifier::REVERSED); let header_cells = ["Key", "Value"] .iter() .map(|h| Cell::from(*h).style(Style::default().fg(Color::Red))); let header = Row::new(header_cells); let table_block = Table::new(items) .header(header) .highlight_style(selected_style) .highlight_symbol(">> ") .block(new_block("Data")) .widths(&[ Constraint::Ratio(1, 3), Constraint::Ratio(1, 3), ]); f.render_stateful_widget(table_block, area, &mut table.state); } fn new_block(title: &str) -> Block { Block::default().borders(Borders::ALL).title(Span::styled( title, Style::default() .fg(Color::Cyan) .add_modifier(Modifier::BOLD), )) }
true
50faae2fea99b35ea373b797aa86e13575ab2859
Rust
stackcats/leetcode
/algorithms/easy/hamming_distance.rs
UTF-8
217
2.625
3
[ "MIT" ]
permissive
impl Solution { pub fn hamming_distance(x: i32, y: i32) -> i32 { let mut z = x ^ y; let mut ct = 0; while z > 0 { ct += z & 1; z >>= 1; } ct } }
true
1242dc480b89382aaf7c075c8ebf6d38b6952038
Rust
ebarnard/carproject
/rust_controller/ui/src/colors.rs
UTF-8
643
3.015625
3
[]
no_license
pub type Color = [f32; 4]; pub const WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0]; pub const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0]; pub const RED: [f32; 4] = [1.0, 0.0, 0.0, 1.0]; pub const BLUE: [f32; 4] = [0.4, 0.4, 1.0, 1.0]; pub const MAGENTA: [f32; 4] = [0.9, 0.9, 0.0, 1.0]; pub(crate) fn pack_color(color: Color) -> u32 { fn sat(val: f32) -> u32 { if val < 0.0 { 0 } else if val > 1.0 { 255 } else { (val * 255.0) as u32 } } // input: [R G B A] // output: A | R | G | B sat(color[0]) << 16 | sat(color[1]) << 8 | sat(color[2]) | sat(color[3]) << 24 }
true
888fd73091f8da058d8432bfb13407c245198906
Rust
yozeff/Rust
/prime_numbers/src/main.rs
UTF-8
2,302
3.84375
4
[]
no_license
//Joseph Harrison 2019 //factorise a number into its primes use std::io; fn is_prime(n: i64) -> bool { //accounts for 1, 0 and negative integers if n < 2 { return false; //two is prime despite being even } else if n == 2 { return true; //any even numbers are composite } else if n % 2 == 0 { return false; } else { //potential factor let mut i = 3 as i64; //square root of i let r = (n as f64).sqrt() as i64; //only iterate factors up to √n while i < r { //if i divides n, n is composite if n % i == 0 { return false; } else { //only test odd factors i = i + 2; } } //if all factors tested n is prime return true; } } fn prime_factorise(mut n: i64) -> Vec<i64> { //vector to store primes let mut factors: Vec<i64> = Vec::new(); while n != 1 { //divisible by 2 test can //be done seperately to use //only odd factors later if n % 2 == 0 { n = n / 2; factors.push(2); } else { let mut i = 3; //test odd factors while n % i != 0 || !is_prime(i) { i = i + 2; } n = n / i; factors.push(i); } } return factors; } fn main() { println!("number:"); let mut n = String::new(); //get number input io::stdin().read_line(&mut n) .expect("failed read"); //parse and raise errors let n: i64 = n.trim().parse() .expect("failed parse"); if is_prime(n) { println!("{} is prime", n); } else { println!("{} is composite", n); } println!("prime factors:"); if n != 0 { if n < 0 { //handle negatives let mut factors = prime_factorise(n * -1); factors.push(-1); for i in &factors { println!("{}", i); } } else { let factors = prime_factorise(n); for i in &factors { println!("{}", i); } } } else { println!("0 has no prime factorisation"); } }
true
a93823c3999d6fe899e2899ddb1c3546fe005e0c
Rust
derekdreery/pdf
/src/filter.rs
UTF-8
3,567
2.796875
3
[ "Apache-2.0" ]
permissive
//! Filter types and functionality to filter streams use {Result}; use pdf_par_ser::primitive::{Name, Dictionary}; use pdf_par_ser::util::is_whitespace; /// A list of the possible filters, and logic for decoding pub enum Filter { ASCIIHexDecode, ASCII85Decode, LZWDecode, FlateDecode, RunLengthDecode, CCITTFaxDecode, JBIG2Decode, DCTDecode, JPXDecode, Crypt } impl Filter { fn parse_from(name: Name, params: Dictionary) -> Result<Filter> { match &name[..] { b"ASCIIHexDecode" => Ok(Filter::ASCIIHexDecode), b"ASCII85Decode" => Ok(Filter::ASCII85Decode), b"LZWDecode" => Ok(Filter::LZWDecode), b"FlateDecode" => Ok(Filter::FlateDecode), b"RunLengthDecode" => Ok(Filter::RunLengthDecode), b"CCITTFaxDecode" => Ok(Filter::CCITTFaxDecode), b"JBIG2Decode" => Ok(Filter::JBIG2Decode), b"DCTDecode" => Ok(Filter::DCTDecode), b"JPXDecode" => Ok(Filter::JPXDecode), b"Crypt" => Ok(Filter::Crypt), _ => bail!("Unknown filter type: {}", String::from_utf8_lossy(&name[..])), } } pub fn decode<I>(&self, input: I) -> FilterDecoder<I> where I: Iterator<Item=u8> { unimplemented!(); } } pub struct FilterDecoder<I> where I: Iterator<Item=u8> { input: I, filter: Filter, } pub struct ASCIIHexDecode<I> where I: Iterator<Item=u8> { inner: I, } pub struct ASCII85Decode<I> where I: Iterator<Item=u8> { inner: I, } pub struct LZWDecode<I> where I: Iterator<Item=u8> { inner: I, } pub struct FlateDecode<I> where I: Iterator<Item=u8> { inner: I, } pub struct RunLengthDecode; pub struct CCITTFaxDecode; pub struct JBIG2Decode; pub struct DCTDecode; pub struct JPXDecode; pub struct Crypt; impl<I> Iterator for ASCIIHexDecode<I> where I: Iterator<Item=u8> { type Item = u8; fn next(&mut self) -> Option<u8> { let ch1 = loop { match self.inner.next() { Some(ch) => match ch { b'0' ... b'9' => break ch - b'0', b'a' ... b'f' => break ch - b'a' + 10, b'A' ... b'F' => break ch - b'A' + 10, w if is_whitespace(w) => (), b'>' => { return None }, _ => { return None; } // currently we don't handle errors }, None => { return None; } }; }; let ch2 = loop { match self.inner.next() { Some(ch) => match ch { b'0' ... b'9' => break ch - b'0', b'a' ... b'f' => break ch - b'a' + 10, b'A' ... b'F' => break ch - b'A' + 10, w if is_whitespace(w) => (), b'>' => { break 0 }, _ => { return None; } // currently we don't handle errors }, None => { break 0 } }; }; Some(ch1 << 4 + ch2) } } impl<I> Iterator for ASCII85Decode<I> where I: Iterator<Item=u8> { type Item = u8; fn next(&mut self) -> Option<u8> { unimplemented!(); } } impl<I> Iterator for LZWDecode<I> where I: Iterator<Item=u8> { type Item = u8; fn next(&mut self) -> Option<u8> { unimplemented!(); } } impl<I> Iterator for FlateDecode<I> where I: Iterator<Item=u8> { type Item = u8; fn next(&mut self) -> Option<u8> { unimplemented!(); } }
true
72970a256f3a378fadfe064651e69ef9be08d3ea
Rust
cbarrete/keyell
/src/math.rs
UTF-8
282
3.265625
3
[]
no_license
use std::f64::consts::PI; use crate::types::Vec3; pub fn dot(v1: &Vec3, v2: &Vec3) -> f64 { v1.x * v2.x + v1.y * v2.y + v1.z * v2.z } pub fn deg_to_radians(d: f64) -> f64 { (PI * d) / 180. } pub fn same_orientation(v1: &Vec3, v2: &Vec3) -> bool { dot(v1, v2) > 0. }
true
c0c78bd6ed852604b020e0124ff3b7e0c86e014c
Rust
asubmissions/hedgecut
/src/tree.rs
UTF-8
24,464
2.53125
3
[]
no_license
#![allow(deprecated)] // TODO use rand_xorshift crate to remove this... use rand::{Rng, SeedableRng, XorShiftRng}; use rand::seq::SliceRandom; use rayon::prelude::*; use std::marker::Sync; use std::borrow::Cow; use hashbrown::HashMap; use crate::scan::{scan, scan_simd_numerical, scan_simd_categorical}; use crate::utils::as_bytes; use crate::split_stats::{SplitStats, is_robust, gini_impurity}; use crate::dataset::{Dataset, Sample, AttributeType}; #[derive(Eq,PartialEq,Clone,Debug)] pub enum Split { Numerical { attribute_index: u8, cut_off: u8 }, Categorical { attribute_index: u8, subset: u64 } } impl Split { pub fn new_numerical(attribute_index: u8, cut_off: u8) -> Split { Split::Numerical { attribute_index, cut_off } } pub fn new_categorical(attribute_index: u8, subset: u64) -> Split { Split::Categorical { attribute_index, subset } } pub fn attribute_index(&self) -> u8 { match self { Split::Numerical { attribute_index, cut_off: _ } => *attribute_index, Split::Categorical { attribute_index, subset: _ } => *attribute_index, } } } pub struct ExtremelyRandomizedTrees { trees: Vec<Tree>, } impl ExtremelyRandomizedTrees { pub fn fit<D, S>( dataset: &D, samples: Vec<S>, seed: u64, num_trees: usize, min_leaf_size: usize, max_tries_per_split: usize, ) -> ExtremelyRandomizedTrees where D: Dataset + Sync, S: Sample + Sync { let epsilon = 1.0 / 1000.0; ExtremelyRandomizedTrees::fit_with_epsilon( dataset, samples, seed, num_trees, min_leaf_size, max_tries_per_split, epsilon ) } pub fn fit_with_epsilon<D, S>( dataset: &D, samples: Vec<S>, seed: u64, num_trees: usize, min_leaf_size: usize, max_tries_per_split: usize, epsilon: f64, ) -> ExtremelyRandomizedTrees where D: Dataset + Sync, S: Sample + Sync { let num_attributes_to_try_per_split = (dataset.num_attributes() as f64).sqrt().round() as usize; let target_robustness = ((dataset.num_records() as f64) * epsilon).round() as usize; // eprintln!( // "Fitting {} trees on {} records with num_attributes_to_try_per_split={}, \ // target_robustness={}, max_tries_per_split={}", // num_trees, // dataset.num_records(), // num_attributes_to_try_per_split, // target_robustness, // max_tries_per_split // ); let trees: Vec<Tree> = (0..num_trees) .into_par_iter() .map(|tree_index| Tree::fit( dataset, samples.clone().as_mut_slice(), seed, tree_index as u64, min_leaf_size, num_attributes_to_try_per_split, target_robustness, max_tries_per_split )) .collect(); ExtremelyRandomizedTrees { trees } } pub fn predict<S>( &self, sample: &S ) -> bool where S: Sample + Sync { let num_plus: usize = self.trees .par_iter() .filter(|tree| tree.predict(sample)) .count(); num_plus * 2 > self.trees.len() } pub fn forget<S>(&mut self, sample: &S) where S: Sample + Sync { self.trees.par_iter_mut().for_each(|tree| { tree.forget(sample); }) } } #[derive(Eq,PartialEq,Debug)] enum TreeElement { Node { split: Split }, Leaf { num_samples: u32, num_plus: u32 } } struct Tree { index: usize, rng: XorShiftRng, tree_elements: HashMap<u64, TreeElement>, alternative_subtrees: HashMap<u64, Vec<AlternativeTree>>, min_leaf_size: usize, num_attributes_to_try_per_split: usize, max_tries_per_split: usize, } struct AlternativeTree { split: Split, split_stats: SplitStats, tree: Tree, } impl Tree { fn fit<D: Dataset, S: Sample>( dataset: &D, samples: &mut [S], seed: u64, tree_index: u64, min_leaf_size: usize, num_attributes_to_try_per_split: usize, target_robustness: usize, max_tries_per_split: usize ) -> Tree { let rng = XorShiftRng::from_seed(as_bytes(seed, tree_index)); let mut tree = Tree { index: tree_index as usize, rng, tree_elements: HashMap::new(), alternative_subtrees: HashMap::new(), min_leaf_size, num_attributes_to_try_per_split, max_tries_per_split, }; let gini_initial = gini_impurity(dataset.num_plus(), dataset.num_records()); let mut constant_attribute_indexes: Cow<[u8]> = Cow::from(Vec::new()); tree.determine_split( gini_initial, target_robustness, samples, dataset, 1, 0, &mut constant_attribute_indexes ); return tree; } fn leaf(num_samples: u32, num_plus: u32) -> TreeElement { TreeElement::Leaf { num_samples, num_plus } } fn node(split: Split) -> TreeElement { TreeElement::Node { split } } fn predict<S: Sample>(&self, sample: &S) -> bool { let mut current_tree = self; let mut element_id = 1; loop { let element = current_tree.tree_elements.get(&element_id); match element { Some(TreeElement::Node { split }) => { if sample.is_left_of(split) { element_id = element_id * 2; } else { element_id = (element_id * 2) + 1; } } Some(TreeElement::Leaf { num_samples, num_plus }) => { return (*num_plus * 2) > *num_samples; } None => { let alternative_trees = current_tree.alternative_subtrees.get(&element_id).unwrap(); // First tree in this list is the current best one by definition current_tree = &alternative_trees.first().unwrap().tree; } } } } fn forget<S: Sample>(&mut self, sample: &S) { Tree::forget_from(self, sample, 1); } fn forget_from<S: Sample>(tree: &mut Tree, sample: &S, element_id_to_start: u64) { let mut element_id = element_id_to_start; loop { let element = tree.tree_elements.get(&element_id); match element { Some(TreeElement::Node { split }) => { if sample.is_left_of(split) { element_id = element_id * 2; } else { element_id = (element_id * 2) + 1; } } Some(TreeElement::Leaf { num_samples, num_plus }) => { let new_num_samples = num_samples - 1; let new_num_plus = if sample.true_label() { *num_plus - 1 } else { *num_plus }; let updated_leaf = Tree::leaf(new_num_samples, new_num_plus); tree.tree_elements.insert(element_id, updated_leaf); break; } None => { // We hit a non-robust node //eprintln!("Hit a non-robust node!"); // First we have to update the split stats let alternative_trees = &mut *tree.alternative_subtrees.get_mut(&element_id).unwrap(); alternative_trees.iter_mut().for_each(|alternative_tree| { let stats = &mut alternative_tree.split_stats; if sample.is_left_of(&alternative_tree.split) { if sample.true_label() { stats.num_plus_left -= 1; } else { stats.num_minus_left -= 1; } } else { if sample.true_label() { stats.num_plus_right -= 1; } else { stats.num_minus_right -= 1; } } stats.update_score_and_impurity_before(); }); // TODO alternative_trees could be a heap, but it probably does not matter // Make sure the split with the highest score is in the first position alternative_trees.sort_by(|tree_a, tree_b| { tree_b.split_stats.score.cmp(&tree_a.split_stats.score) }); // Afterwards, we invoke the forgetting procedure on the alternative trees alternative_trees.iter_mut().for_each(|alternative_tree| { Tree::forget_from(&mut alternative_tree.tree, sample, element_id); }); break; } } } } fn generate_candidate_splits<D: Dataset>( &mut self, dataset: &D, constant_attribute_indexes: &Cow<[u8]> ) -> Vec<Split> { let mut attribute_indexes: Vec<u8> = (0..dataset.num_attributes()) // TODO This searches linearly, but does it matter here? .filter(|attribute_index| !constant_attribute_indexes.contains(attribute_index)) .collect(); attribute_indexes.shuffle(&mut self.rng); // TODO can we allocate once and reuse the vec somehow? let split_candidates: Vec<Split> = attribute_indexes.iter() .take(self.num_attributes_to_try_per_split) .map(|attribute_index| generate_random_split(&mut self.rng, dataset, *attribute_index)) .collect(); split_candidates } fn determine_split<D: Dataset, S: Sample>( &mut self, impurity_before: f64, target_robustness: usize, samples: &mut [S], dataset: &D, current_id: u64, num_tries: usize, constant_attribute_indexes: &mut Cow<[u8]> ) { // All attributes are constant, we create a leaf now if constant_attribute_indexes.len() == dataset.num_attributes() as usize { let num_plus = samples.iter().filter(|sample| sample.true_label()).count(); let num_samples = samples.len(); let leaf = Tree::leaf(num_samples as u32, num_plus as u32); self.tree_elements.insert(current_id, leaf); return; } let candidate_splits = self.generate_candidate_splits(dataset, &constant_attribute_indexes); let split_stats = compute_split_stats( impurity_before, &samples, dataset, &candidate_splits ); let maybe_best_split_stats = split_stats.iter().enumerate() .filter(|(_, stats)| stats.score >= 0) .max_by(|(_, stats1), (_, stats2)| stats1.score.cmp(&stats2.score)); if maybe_best_split_stats.is_none() { if num_tries < self.max_tries_per_split { self.determine_split( impurity_before, target_robustness, samples, dataset, current_id, num_tries + 1, constant_attribute_indexes ); return; } else { // We only need stats that are indepent of the split let some_stats = split_stats.first().unwrap(); let num_plus = some_stats.num_plus_left + some_stats.num_plus_right; let num_samples = some_stats.num_minus_left + some_stats.num_minus_right + num_plus; let leaf = Tree::leaf(num_samples, num_plus); self.tree_elements.insert(current_id, leaf); return; } } let (index_of_best_stats, best_split_stats) = maybe_best_split_stats.unwrap(); let best_split_candidate = candidate_splits.get(index_of_best_stats).unwrap(); let mut at_least_one_non_robust = false; let mut _num_removals_required = 0; for (index, stats) in split_stats.iter().enumerate() { if index != index_of_best_stats { let (is_robust_split, num_removals_evaluated) = is_robust(best_split_stats, stats, target_robustness); if !is_robust_split { at_least_one_non_robust = true; _num_removals_required = num_removals_evaluated; break; } } } if at_least_one_non_robust && num_tries < self.max_tries_per_split { //println!("Non-robust split found, retrying..."); self.determine_split( impurity_before, target_robustness, samples, dataset, current_id, num_tries + 1, constant_attribute_indexes ); } else { if at_least_one_non_robust { let mut alternative_splits: Vec<(usize, usize)> = split_stats.iter() .enumerate() .filter(|(index, _)| *index != index_of_best_stats) .filter_map(|(index, stats)| { let (is_robust_split, num_removals_required_to_break_split) = is_robust(best_split_stats, stats, target_robustness); if is_robust_split { None } else { Some((index, num_removals_required_to_break_split)) } }) .collect(); // eprintln!( // "Non-robust split ({}) on {} records with {} alternatives \ // for element_id {} in tree {}.", // _num_removals_required, // samples.len(), // alternative_splits.len(), // current_id, // self.index // ); alternative_splits.push((index_of_best_stats, 0)); let mut alternative_trees: Vec<AlternativeTree> = Vec::with_capacity(alternative_splits.len()); for (index, num_removals_required_to_break_split) in alternative_splits { let alternative_target_robustness = target_robustness - num_removals_required_to_break_split; let mut copy_of_samples = samples.to_vec(); let replacement_tree = Tree { index: self.index, rng: self.rng.clone(), tree_elements: HashMap::new(), alternative_subtrees: HashMap::new(), min_leaf_size: self.min_leaf_size, num_attributes_to_try_per_split: self.num_attributes_to_try_per_split, max_tries_per_split: self.max_tries_per_split }; let alternative_candidate_split = candidate_splits.get(index).unwrap(); let alternative_split_stats = split_stats.get(index).unwrap(); let mut alternative_tree = AlternativeTree { split: alternative_candidate_split.clone(), split_stats: alternative_split_stats.clone(), tree: replacement_tree }; alternative_tree.tree.split_and_continue( alternative_target_robustness, copy_of_samples.as_mut_slice(), dataset, current_id, &mut constant_attribute_indexes.clone(), alternative_candidate_split, alternative_split_stats ); alternative_trees.push(alternative_tree); } // TODO alternative_trees could be a heap, but it probably does not matter // Make sure the split with the highest score is in the first position alternative_trees.sort_by(|tree_a, tree_b| { tree_b.split_stats.score.cmp(&tree_a.split_stats.score) }); self.alternative_subtrees.insert(current_id, alternative_trees); } else { self.split_and_continue( target_robustness, samples, dataset, current_id, constant_attribute_indexes, best_split_candidate, best_split_stats ); } } } fn split_and_continue<D: Dataset, S: Sample>( &mut self, target_robustness: usize, samples: &mut [S], dataset: &D, current_id: u64, constant_attribute_indexes: &mut Cow<[u8]>, best_split: &Split, best_split_stats: &SplitStats ) { let (samples_left, constant_on_the_left, samples_right, constant_on_the_right) = split(samples, best_split); let node = Tree::node(best_split.clone()); self.tree_elements.insert(current_id, node); let left_child_id = current_id * 2; let label_constant_on_the_left = best_split_stats.num_minus_left == 0 || best_split_stats.num_plus_left == 0; if samples_left.len() <= self.min_leaf_size || label_constant_on_the_left { //println!("Building leaf for {} records", record_ids_left.len()); let leaf = Tree::leaf( best_split_stats.num_plus_left + best_split_stats.num_minus_left, best_split_stats.num_plus_left ); self.tree_elements.insert(left_child_id, leaf); } else { let mut constant_attribute_indexes_left = constant_attribute_indexes.clone(); if constant_on_the_left { //println!("Constant attribute found in {} records", record_ids_left.len()); let attribute_index = best_split.attribute_index(); constant_attribute_indexes_left.to_mut().push(attribute_index); } self.determine_split( best_split_stats.impurity_left, target_robustness, samples_left, dataset, left_child_id, 0, &mut constant_attribute_indexes_left ); } let right_child_id = (current_id * 2) + 1; let label_constant_on_the_right = best_split_stats.num_minus_right == 0 || best_split_stats.num_plus_right == 0; if samples_right.len() <= self.min_leaf_size || label_constant_on_the_right { //println!("Building leaf for {} records", record_ids_right.len()); let leaf = Tree::leaf( best_split_stats.num_plus_right + best_split_stats.num_minus_right, best_split_stats.num_plus_right ); self.tree_elements.insert(right_child_id, leaf); } else { let mut constant_attribute_indexes_right = constant_attribute_indexes.clone(); if constant_on_the_right { //println!("Constant attribute found in {} records", record_ids_right.len()); let attribute_index = best_split.attribute_index(); constant_attribute_indexes_right.to_mut().push(attribute_index); } self.determine_split( best_split_stats.impurity_right, target_robustness, samples_right, dataset, right_child_id, 0, &mut constant_attribute_indexes_right ); } } } fn compute_split_stats<S: Sample, D: Dataset>( impurity_before: f64, samples: &[S], dataset: &D, candidate_splits: &Vec<Split>, ) -> Vec<SplitStats> { let mut all_stats: Vec<SplitStats> = Vec::with_capacity(candidate_splits.len()); for candidate in candidate_splits { let mut stats = match candidate { Split::Numerical { attribute_index: _, cut_off: _ } => { scan_simd_numerical(samples, candidate) }, Split::Categorical { attribute_index, subset: _ } => { // TODO we also need a SIMD version here let (_, range) = dataset.attribute_range(*attribute_index); if range <= 16 { scan_simd_categorical(samples, candidate) } else { scan(samples, &candidate) } }, }; stats.update_score(impurity_before); all_stats.push(stats); } all_stats } fn split<'a, S: Sample>( samples: &'a mut [S], split: &Split ) -> (&'a mut [S], bool, &'a mut [S], bool) { let mut cursor = 0; let mut cursor_end = samples.len(); let mut constant_on_the_left = true; let mut first_value_on_the_left: Option<u8> = None; let mut constant_on_the_right = true; let mut first_value_on_the_right: Option<u8> = None; loop { // TODO Maybe remove boundary checks here later let sample = samples.get(cursor).unwrap(); let attribute_value: u8 = sample.attribute_value(split.attribute_index()); if sample.is_left_of(&split) { if constant_on_the_left { if first_value_on_the_left.is_none() { first_value_on_the_left = Some(attribute_value); } else if attribute_value != first_value_on_the_left.unwrap() { constant_on_the_left = false; } } cursor += 1; } else { if constant_on_the_right { if first_value_on_the_right.is_none() { first_value_on_the_right = Some(attribute_value); } else if attribute_value != first_value_on_the_right.unwrap() { constant_on_the_right = false; } } cursor_end -= 1; //println!("Swapping {} and {} with record {}({}), cursor_end is now {}", // cursor, cursor_end, record_id, value, cursor_end); samples.swap(cursor, cursor_end); } if cursor == cursor_end - 1 { break; } } let (samples_left, samples_right) = samples.split_at_mut(cursor); (samples_left, constant_on_the_left, samples_right, constant_on_the_right) } fn generate_random_split<D: Dataset>( rng: &mut XorShiftRng, dataset: &D, attribute_index: u8 ) -> Split { match dataset.attribute_type(attribute_index) { AttributeType::Numerical => { let (min_value, max_value) = dataset.attribute_range(attribute_index); let random_cut_off = rng.gen_range(min_value, max_value + 1); Split::new_numerical(attribute_index, random_cut_off) }, AttributeType::Categorical => { let (_, cardinality) = dataset.attribute_range(attribute_index); let how_many = rng.gen_range(0, cardinality + 1); // TODO lets get rid of the allocation here... let mut possible_values: Vec<u8> = (0..(cardinality + 1)).collect(); possible_values.shuffle(rng); let mut subset: u64 = 0; for bit_to_set in possible_values.iter().take(how_many as usize) { subset |= 1_u64 << *bit_to_set as u64 } Split::new_categorical(attribute_index, subset) } } }
true
a9252f1a1dfb71d68e90f0904f13fbbc90f7a503
Rust
iptq/rosu-v2
/src/request/ranking.rs
UTF-8
8,463
2.78125
3
[ "MIT" ]
permissive
use crate::{ model::{ ranking::{ChartRankings, CountryRankings, RankingType, Rankings, Spotlight}, GameMode, }, request::{Pending, Query, Request}, routing::Route, Osu, }; use futures::future::TryFutureExt; use serde::Deserialize; /// Get a [`ChartRankings`](crate::model::ranking::ChartRankings) struct /// containing a [`Spotlight`](crate::model::ranking::Spotlight), its /// [`Beatmapset`](crate::model::beatmap::Beatmapset)s, and participating /// [`UserCompact`](crate::model::user::UserCompact). /// /// The mapset will have their `maps` option filled. /// /// The user statistics contain specific, spotlight related data. /// All fields depends only on scores on maps of the spotlight. /// The statistics vector is ordered by `ranked_score`. /// The `user` option is filled. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct GetChartRankings<'a> { fut: Option<Pending<'a, ChartRankings>>, osu: &'a Osu, mode: GameMode, spotlight: Option<u32>, } impl<'a> GetChartRankings<'a> { #[inline] pub(crate) fn new(osu: &'a Osu, mode: GameMode) -> Self { Self { fut: None, osu, mode, spotlight: None, } } /// Specify the spotlight id. If none is given, /// the latest spotlight will be returned. #[inline] pub fn spotlight(mut self, spotlight_id: u32) -> Self { self.spotlight.replace(spotlight_id); self } fn start(&mut self) -> Pending<'a, ChartRankings> { #[cfg(feature = "metrics")] self.osu.metrics.chart_rankings.inc(); let mut query = Query::new(); if let Some(spotlight) = self.spotlight { query.push("spotlight", &spotlight); } let route = Route::GetRankings { mode: self.mode, ranking_type: RankingType::Charts, }; let req = Request::with_query(route, query); Box::pin(self.osu.inner.request(req)) } } poll_req!(GetChartRankings => ChartRankings); /// Get a [`CountryRankings`](crate::model::ranking::CountryRankings) struct /// containing a vec of [`CountryRanking`](crate::model::ranking::CountryRanking)s /// which will be sorted by the country's total pp. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct GetCountryRankings<'a> { fut: Option<Pending<'a, CountryRankings>>, osu: &'a Osu, mode: GameMode, page: Option<u32>, } impl<'a> GetCountryRankings<'a> { #[inline] pub(crate) fn new(osu: &'a Osu, mode: GameMode) -> Self { Self { fut: None, osu, mode, page: None, } } #[inline] pub fn page(mut self, page: u32) -> Self { self.page.replace(page); self } fn start(&mut self) -> Pending<'a, CountryRankings> { #[cfg(feature = "metrics")] self.osu.metrics.country_rankings.inc(); let mut query = Query::new(); if let Some(page) = self.page { query.push("cursor[page]", &page); } let route = Route::GetRankings { mode: self.mode, ranking_type: RankingType::Country, }; let req = Request::with_query(route, query); Box::pin(self.osu.inner.request(req)) } } poll_req!(GetCountryRankings => CountryRankings); /// Get a [`Rankings`](crate::model::ranking::Rankings) struct whose /// [`UserCompact`](crate::model::user::UserCompact)s are sorted /// by their pp, i.e. the current pp leaderboard. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct GetPerformanceRankings<'a> { fut: Option<Pending<'a, Rankings>>, osu: &'a Osu, mode: GameMode, country: Option<String>, variant: Option<&'static str>, page: Option<u32>, } impl<'a> GetPerformanceRankings<'a> { #[inline] pub(crate) fn new(osu: &'a Osu, mode: GameMode) -> Self { Self { fut: None, osu, mode, country: None, variant: None, page: None, } } /// Specify a country code. #[inline] pub fn country(mut self, country: impl Into<String>) -> Self { self.country.replace(country.into()); self } /// Consider only 4K scores. Only relevant for osu!mania. #[inline] pub fn variant_4k(mut self) -> Self { self.variant.replace("4k"); self } /// Consider only 7K scores. Only relevant for osu!mania. #[inline] pub fn variant_7k(mut self) -> Self { self.variant.replace("7k"); self } /// Pages range from 1 to 200. #[inline] pub fn page(mut self, page: u32) -> Self { self.page.replace(page); self } fn start(&mut self) -> Pending<'a, Rankings> { #[cfg(feature = "metrics")] self.osu.metrics.performance_rankings.inc(); let mode = self.mode; let mut query = Query::new(); if let Some(ref country) = self.country { query.push("country", country); } // ! Adjust filter once there are non-mania variants if let Some(variant) = self.variant.filter(|_| mode == GameMode::MNA) { query.push("variant", &variant); } if let Some(page) = self.page { query.push("cursor[page]", &page); } let route = Route::GetRankings { mode, ranking_type: RankingType::Performance, }; let req = Request::with_query(route, query); let fut = self .osu .inner .request(req) .map_ok(move |mut rankings: Rankings| { rankings.mode.replace(mode); rankings.ranking_type.replace(RankingType::Performance); rankings }); Box::pin(fut) } } poll_req!(GetPerformanceRankings => Rankings); /// Get a [`Rankings`](crate::model::ranking::Rankings) struct whose /// [`UserCompact`](crate::model::user::UserCompact)s are sorted /// by their ranked score, i.e. the current ranked score leaderboard. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct GetScoreRankings<'a> { fut: Option<Pending<'a, Rankings>>, osu: &'a Osu, mode: GameMode, page: Option<u32>, } impl<'a> GetScoreRankings<'a> { #[inline] pub(crate) fn new(osu: &'a Osu, mode: GameMode) -> Self { Self { fut: None, osu, mode, page: None, } } /// Pages range from 1 to 200. #[inline] pub fn page(mut self, page: u32) -> Self { self.page.replace(page); self } fn start(&mut self) -> Pending<'a, Rankings> { #[cfg(feature = "metrics")] self.osu.metrics.score_rankings.inc(); let mode = self.mode; let mut query = Query::new(); if let Some(page) = self.page { query.push("cursor[page]", &page); } let route = Route::GetRankings { mode, ranking_type: RankingType::Score, }; let req = Request::with_query(route, query); let fut = self .osu .inner .request(req) .map_ok(move |mut rankings: Rankings| { rankings.mode.replace(mode); rankings.ranking_type.replace(RankingType::Score); rankings }); Box::pin(fut) } } poll_req!(GetScoreRankings => Rankings); /// Get a vec of [`Spotlight`](crate::model::ranking::Spotlight)s. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct GetSpotlights<'a> { fut: Option<Pending<'a, Vec<Spotlight>>>, osu: &'a Osu, } impl<'a> GetSpotlights<'a> { #[inline] pub(crate) fn new(osu: &'a Osu) -> Self { Self { fut: None, osu } } fn start(&mut self) -> Pending<'a, Vec<Spotlight>> { #[cfg(feature = "metrics")] self.osu.metrics.spotlights.inc(); let req = Request::new(Route::GetSpotlights); let fut = self .osu .inner .request(req) .map_ok(|s: Spotlights| s.spotlights); Box::pin(fut) } } poll_req!(GetSpotlights => Vec<Spotlight>); #[derive(Deserialize)] struct Spotlights { spotlights: Vec<Spotlight>, }
true
d6da8ffe576d645eb39f64d6321e5be8aaec2e8c
Rust
BonsaiDen/shooter-rs
/client/src/renderer/allegro/traits.rs
UTF-8
4,132
2.65625
3
[ "MIT" ]
permissive
// External Dependencies ------------------------------------------------------ use allegro::{ Core, Display, DisplayOption, DisplayOptionImportance, EventQueue, OPENGL }; // Internal Dependencies ------------------------------------------------------ use shared::Lithium::{ Client, ClientHandler, EntityState, EntityRegistry, Event, BaseLevel, Renderer }; use super::AllegroRenderer; // Allegro Renderer Trait Implementation -------------------------------------- impl Renderer for AllegroRenderer { // Statics ---------------------------------------------------------------- fn run< H: ClientHandler<Self, G, L, E, S>, E: Event, S: EntityState, L: BaseLevel<S>, G: EntityRegistry<S, L, Self> >(mut client: Client<H, Self, G, L, E, S>) where Self: Sized { // Init Allegro let mut core = Core::init().unwrap(); let q = EventQueue::new(&core).unwrap(); // Keyboard core.install_keyboard().unwrap(); q.register_event_source(core.get_keyboard_event_source()); // Create Display core.set_new_display_flags(OPENGL); core.set_new_display_option( DisplayOption::SampleBuffers, 2, DisplayOptionImportance::Suggest ); core.set_new_display_option( DisplayOption::Samples, 16, DisplayOptionImportance::Suggest ); let disp = Display::new( &core, 256, 256 ).ok().expect("Failed to create OPENGL context."); q.register_event_source(disp.get_event_source()); // Create renderer let mut renderer = AllegroRenderer::new(core, disp, q); // Init callback client.init(&mut renderer); // Mainloop let mut last_tick_time = 0.0; let mut last_frame_time = 0.0; let mut frames_per_tick = 0; while renderer.running() { if renderer.should_draw() { let frame_time = renderer.time(); let tick_rate = renderer.tick_rate(); if frames_per_tick == 0 { if client.tick(&mut renderer) { frames_per_tick = renderer.fps() / tick_rate; last_tick_time = frame_time; } } renderer.set_delta_time((frame_time - last_frame_time) as f32); renderer.set_delta_u( 1.0 / (1.0 / tick_rate as f32) * (frame_time - last_tick_time) as f32 ); client.draw(&mut renderer); renderer.draw(); last_frame_time = frame_time; // TODO handle this more nicely? if frames_per_tick > 0 { frames_per_tick -= 1; } } renderer.events(); } client.destroy(&mut renderer); } // Time Related ----------------------------------------------------------- fn time(&self) -> f64 { self.time } fn set_time(&mut self, time: f64) { self.time = time; } fn delta_time(&self) -> f32{ self.dt } fn set_delta_time(&mut self, dt: f32) { self.dt = dt; } fn delta_u(&self) -> f32 { self.u } fn set_delta_u(&mut self, u: f32) { self.u = u; } // Frame / Tick Rate ------------------------------------------------------ fn fps(&self) -> u32 { self.frame_rate } fn set_fps(&mut self, frame_rate: u32) { self.frame_rate = frame_rate; self.timer.set_speed(1.0 / frame_rate as f64); } fn tick_rate(&self) -> u32 { self.tick_rate } fn set_tick_rate(&mut self, tick_rate: u32) { self.tick_rate = tick_rate; } // Interpolation ---------------------------------------------------------- fn interpolation_ticks(&self) -> usize { self.interpolation_ticks } fn set_interpolation_ticks(&mut self, ticks: usize) { self.interpolation_ticks = ticks; } }
true
84a7f140488dfa3453658edf1fc21c33cb9e3694
Rust
batduck27/micro-http
/fuzz/src/main.rs
UTF-8
1,050
2.640625
3
[ "Apache-2.0" ]
permissive
#[macro_use] extern crate afl; use micro_http::{HttpServer, Response, StatusCode}; use std::io::prelude::*; fn main() { fuzz!(|data: &[u8]| { let path_to_socket = "/tmp/test.sock"; std::fs::remove_file(path_to_socket).unwrap_or_default(); // Start the server. let mut server = HttpServer::new(path_to_socket).unwrap(); server.start_server().unwrap(); // Connect a client to the server so it doesn't block in our example. let mut socket = std::os::unix::net::UnixStream::connect(path_to_socket).unwrap(); socket.write_all(data).unwrap(); let mut i = 0; // Server loop processing requests. loop { for request in server.requests().unwrap() { let response = request.process(|request| { // Your code here. Response::new(request.http_version(), StatusCode::NoContent) }); server.respond(response); std::process::exit(0); } i = i + 1; if i >= 2 { break; } } socket.shutdown(std::net::Shutdown::Both).unwrap(); std::fs::remove_file(path_to_socket).unwrap_or_default(); }); }
true
fb92d6a37aef4990cdbf000c69479d2461324813
Rust
Vidrochka/anthill-game-engine
/anthill-game-engine/src/core/engine_core.rs
UTF-8
2,588
2.6875
3
[ "MIT" ]
permissive
use crate::utils::{ logger_builder::LoggerBuilder, time::TimeTracker }; use crate::gui::{ //types::render_api::RenderApi, //window::{WindowSystem, WindowBuildOptions, WindowBuilder, Window} }; use crate::config::CoreConfig; use std::sync::{Arc,Mutex}; use anthill_di::{DiError, Injection}; use anthill_window_lib::window::window::Window; use anthill_window_lib::window::window_build_options::WindowBuildOptionsBuilder; use anthill_window_lib::window::window_system::WindowSystem; pub struct EngineCore { time_tracker: Arc<Mutex<TimeTracker>>, window_system: Arc<Mutex<WindowSystem>>, window: Arc<Mutex<Window>>, config: CoreConfig, log_handle: log4rs::Handle, is_closed: bool, } impl Injection for EngineCore { fn build_injection(injector: &mut anthill_di::Injector) -> Result<Self, anthill_di::DiError> { let log_handler = injector.get_new_instance()?; let window_system = injector.get_singletone::<WindowSystem>()?; let window = window_system.lock() .map_err(|e| DiError::CustomInjectTimeError(e.to_string()))? .new_window( WindowBuildOptionsBuilder::default() .lable("Anthill game engine".to_string()) .height(600) .width(800) .build().map_err(|e|DiError::CustomInjectTimeError(e.to_string()))? ); window.lock().map_err(|e|DiError::CustomInjectTimeError(e.to_string()))? .set_screen(injector.get_new_instance()?); window.lock().map_err(|e|DiError::CustomInjectTimeError(e.to_string()))? .show(); let core = Self { log_handle: log_handler, window_system: window_system, window: window, time_tracker: injector.get_singletone()?, config: injector.get_new_instance()?, is_closed: false }; log::info!("Core created"); Ok(core) } } impl EngineCore { pub fn run(&mut self) -> Result<(), String> { loop { self.time_tracker.lock().map_err(|e|e.to_string())?.new_step(); let time_tracker = self.time_tracker.as_ref().lock().unwrap(); //log::info!("{}", time_tracker.get_fps()); self.window_system.lock().map_err(|e|e.to_string())?.swap_all_render_context(); if self.is_closed() { break; } } log::info!("Engine shuting down"); Result::Ok(()) } pub fn is_closed(&self) -> bool { self.is_closed } }
true
9589f0c87970dd1241c12bebc18abaa00d018e4f
Rust
wcwaterbender/buoy_scraper
/src/main.rs
UTF-8
1,356
2.90625
3
[]
no_license
use scraper::{Html, Selector}; #[tokio::main] async fn main() -> Result<(), reqwest::Error> { let res = reqwest::get("https://wavecast.com/buoys/").await?; println!("Status: {}", res.status()); let body = res.text().await?; let document = Html::parse_document(&body); let selector = Selector::parse("img").unwrap(); let mut images = vec!(); //get the wave plot images only for element in document.select(&selector) { match element.value().attr("src") { Some(x) if x.contains("plot_wave") => images.push(x), _x => drop(_x), } } let mut counter = 1; //make requests for the byte data and save as images locally for imgurl in images { let img_bytes = reqwest::get(imgurl).await?; let bytes = img_bytes.bytes().await?; match image::load_from_memory(&bytes){ Ok(x) => { let mut filename = String::from("imgs/buoy_"); filename+=&counter.to_string(); filename += &String::from(".png"); match x.save(filename){ Ok(_) => println!("Saved Image"), Err(_) => println!("failed to save img"), }; counter+=1; }, Err(x) => println!("no image {}",x), } } Ok(()) }
true
fc42aaa27e708cecb2cede7a14921157cb7e10cc
Rust
AndrewMendezLacambra/rust-programming-contest-solutions
/atcoder/arc059_c.rs
UTF-8
2,071
3.015625
3
[]
no_license
const MOD: usize = 1e9 as usize + 7; fn main() { let s = std::io::stdin(); let mut sc = Scanner { stdin: s.lock() }; let n = sc.read(); let c: usize = sc.read(); let a: Vec<usize> = sc.vec(n); let b: Vec<usize> = sc.vec(n); let max_b: usize = *b.iter().max().unwrap(); let mut pow = vec![vec![0; c + 1]; max_b + 1]; for x in 0..(max_b + 1) { pow[x][0] = 1; for i in 0..c { pow[x][i + 1] = (pow[x][i] * x) % MOD; } } let mut sum_pow = vec![vec![0; c + 1]; max_b + 2]; for i in 0..(c + 1) { for x in 0..(max_b + 1) { sum_pow[x + 1][i] = (sum_pow[x][i] + pow[x][i]) % MOD; } } let mut dp = vec![0; c + 1]; dp[0] = 1; for i in 0..n { let (a, b) = (a[i], b[i]); let mut next = vec![0; c + 1]; for cur in 0..(c + 1) { if dp[cur] == 0 { continue; } for add in 0..(c + 1) { if cur + add > c { break; } next[cur + add] += dp[cur] * (sum_pow[b + 1][add] + MOD - sum_pow[a][add]); next[cur + add] %= MOD; } } dp = next; } println!("{}", dp[c]); } pub struct Scanner<R> { stdin: R, } impl<R: std::io::Read> Scanner<R> { pub fn read<T: std::str::FromStr>(&mut self) -> T { use std::io::Read; let buf = self .stdin .by_ref() .bytes() .map(|b| b.unwrap()) .skip_while(|&b| b == b' ' || b == b'\n' || b == b'\r') .take_while(|&b| b != b' ' && b != b'\n' && b != b'\r') .collect::<Vec<_>>(); unsafe { std::str::from_utf8_unchecked(&buf) } .parse() .ok() .expect("Parse error.") } pub fn vec<T: std::str::FromStr>(&mut self, n: usize) -> Vec<T> { (0..n).map(|_| self.read()).collect() } pub fn chars(&mut self) -> Vec<char> { self.read::<String>().chars().collect() } }
true
f838782f36b385c3e50c30c95f1aa39e229fb8be
Rust
aki-ks/mineroute
/src/net/handshake/handshake.rs
UTF-8
1,091
2.859375
3
[]
no_license
use crate::net::{Protocol, Packet, PacketCodec}; use crate::net::buffer::{Buffer, BufferMut}; /// The first packet that is ever send to the server. /// It indicates whether the client wants to join or view the server status. #[derive(Debug, Clone)] pub struct HandshakePacket { pub protocol_version: i32, pub server_address: String, pub server_port: u16, pub next_protocol: Protocol, } impl Packet for HandshakePacket {} impl PacketCodec for HandshakePacket { fn decode<B: Buffer>(buf: &mut B) -> Result<Self, ()> { Ok(HandshakePacket { protocol_version: buf.read_var_int()?, server_address: buf.read_string()?, server_port: buf.read_u16()?, next_protocol: Protocol::from_int(buf.read_var_int()?).ok_or(())? }) } fn encode<B: BufferMut>(&self, buf: &mut B) -> Result<(), ()> { buf.write_var_int(self.protocol_version); buf.write_string(&self.server_address); buf.write_u16(self.server_port); buf.write_var_int(self.next_protocol.to_int()); Ok(()) } }
true
358a2b71f83bb7f87efb497a6eefd30022838df4
Rust
CRefice/prayer
/src/geom/aabb.rs
UTF-8
2,567
2.890625
3
[ "MIT" ]
permissive
use crate::ray::Ray; use crate::vec::{self, glm, Vec3}; #[derive(Clone)] pub struct AABB { pub min: Vec3, pub max: Vec3, } pub trait Bounds { fn bounds(&self) -> AABB; } impl AABB { pub fn intersects(&self, r: &Ray) -> bool { let tx1 = (self.min.x - r.origin.x) * r.inv_dir.x; let tx2 = (self.max.x - r.origin.x) * r.inv_dir.x; let ty1 = (self.min.y - r.origin.y) * r.inv_dir.y; let ty2 = (self.max.y - r.origin.y) * r.inv_dir.y; let tz1 = (self.min.z - r.origin.z) * r.inv_dir.z; let tz2 = (self.max.z - r.origin.z) * r.inv_dir.z; let (txmin, txmax) = (f32::min(tx1, tx2), f32::max(tx1, tx2)); let (tymin, tymax) = (f32::min(ty1, ty2), f32::max(ty1, ty2)); let (tzmin, tzmax) = (f32::min(tz1, tz2), f32::max(tz1, tz2)); let tmin = f32::max(f32::max(txmin, tymin), tzmin); let tmax = f32::min(f32::min(txmax, tymax), tzmax); tmax >= 0.0 && tmin <= tmax } pub fn surface_area(&self) -> f32 { let width = self.max.x - self.min.x; let height = self.max.y - self.min.y; let depth = self.max.z - self.min.z; 2.0 * ((width * height) + (height * depth) + (width * depth)) } pub fn union(&self, other: &AABB) -> AABB { let (min, max) = vec::component_minmax((self.min, self.max), &other.min); let (min, max) = vec::component_minmax((min, max), &other.max); AABB { min, max } } pub fn split_dimension(&self, x: f32, dimension: usize) -> (AABB, AABB) { let mut left_max = self.max; left_max.data[dimension] = x; let mut right_min = self.min; right_min.data[dimension] = x; let left = AABB { min: self.min, max: left_max, }; let right = AABB { min: right_min, max: self.max, }; (left, right) } } impl Default for AABB { fn default() -> Self { AABB { min: glm::zero(), max: glm::zero(), } } } impl Bounds for AABB { fn bounds(&self) -> AABB { self.clone() } } impl<'a, I> From<I> for AABB where I: IntoIterator<Item = &'a Vec3>, { fn from(it: I) -> AABB { let (min, max) = component_minmax(it.into_iter()).unwrap_or((glm::zero(), glm::zero())); AABB { min, max } } } fn component_minmax<'a, I: Iterator<Item = &'a Vec3>>(mut it: I) -> Option<(Vec3, Vec3)> { let a = *it.next()?; let minmax = (a, a); Some(it.fold(minmax, vec::component_minmax)) }
true
46e04b5e02b24a7beffa01f1b6b11c5928dee6e0
Rust
glugg23/rox
/tests/limit.rs
UTF-8
541
2.609375
3
[ "MIT" ]
permissive
use std::process::Command; use std::str; #[test] fn loop_too_large() { let result = Command::new("cargo") .args(&[ "run", "-q", "--release", "--", "tests/resources/limit/loop_too_large.lox", ]) .output() .expect("Error while running limit/loop_too_large()"); assert_eq!( str::from_utf8(&result.stderr).unwrap(), "[line 2351] Error at '}': Loop body too large.\n" ); assert_eq!(result.status.code().unwrap(), 65); }
true
59d947e4806f74875a97278706afd96c9293d727
Rust
brooks-builds/jungle
/src/lib.rs
UTF-8
3,055
2.625
3
[ "MIT" ]
permissive
pub mod config; mod draw_systems; mod game_objects; mod handle_input; mod images; pub mod initialize; mod life_systems; mod physics_systems; mod scenes; use config::Config; use ggez::event::EventHandler; use ggez::{graphics, Context, GameResult}; use ggez::{graphics::BLACK, timer}; use handle_input::HandleInput; use images::Images; use scenes::{ end_scene::EndScene, main_scene::MainScene, pause_scene::PauseScene, start_scene::StartScene, ActiveScene, }; pub struct GameState { active_scene: ActiveScene, starting_scene: StartScene, main_scene: MainScene, pause_scene: PauseScene, end_scene: EndScene, handle_input: HandleInput, config: Config, images: Images, } impl GameState { pub fn new(config: Config, context: &mut Context) -> GameResult<Self> { let active_scene = ActiveScene::Start; let starting_scene = StartScene::new(&config, context); let mut images = Images::new(context, &config)?; let main_scene = MainScene::new(&config, context, &mut images)?; let pause_scene = PauseScene::new(); let end_scene = EndScene::new(); let handle_input = HandleInput::new(&config)?; Ok(Self { active_scene, starting_scene, main_scene, pause_scene, end_scene, handle_input, config, images, }) } } impl EventHandler for GameState { fn update(&mut self, context: &mut Context) -> GameResult { while timer::check_update_time(context, 30) { let command = self.handle_input.run(&self.active_scene); match self.active_scene { ActiveScene::Start => self .starting_scene .update(command, &mut self.active_scene)?, ActiveScene::Main => { self.main_scene .update(command, &self.config, &mut self.images, context)? } ActiveScene::Pause => self.pause_scene.update()?, ActiveScene::End => self.end_scene.update()?, } } Ok(()) } fn draw(&mut self, context: &mut Context) -> GameResult { graphics::clear(context, BLACK); match self.active_scene { ActiveScene::Start => self.starting_scene.draw(context)?, ActiveScene::Main => self .main_scene .draw(context, &self.config, &mut self.images)?, ActiveScene::Pause => self.pause_scene.draw(context)?, ActiveScene::End => self.end_scene.draw(context)?, } graphics::present(context) } } #[cfg(test)] mod test { use super::*; #[test] fn test_create_game_state() { let config = config::load("config.json").unwrap(); let (context, _) = &mut initialize::initialize(&config).unwrap(); let game_state = GameState::new(config, context).unwrap(); assert_eq!(game_state.active_scene, ActiveScene::Start); } }
true
bfbe61d777e1e79e61e6e72e4a3fe8c2289def1e
Rust
sbechet/mspacmab
/video-offset-to-xy/src/main.rs
UTF-8
743
3.125
3
[]
no_license
mod reverse; use reverse::Reverse; use std::env; fn help() { println!("usage: video-offset-to-xy [0x]<offset-in-hex> -- Return (x,y) video coords"); } fn main() -> Result<(), core::convert::Infallible> { let args: Vec<String> = env::args().collect(); match args.len() { 1 => help(), 2 => { let arg_without_prefix = &args[1].trim_start_matches("0x"); match i64::from_str_radix(arg_without_prefix, 16) { /* modulo 0x400 so 0x4000 and 0x4400 video offset ok */ Ok(n) => println!("{:?}", Reverse::reverse((n % 0x400) as usize)), Err(_e) => help(), } }, _ => { help(); } } Ok( () ) }
true
3aaf3047b0a84c9a277a6722e89392fb74a8b688
Rust
prototype-A/csci3055u-project-template
/basic-syntax/declaring_variables.rs
UTF-8
869
3.78125
4
[]
no_license
fn main() { // Variables can always by type annotated let my_boolean = true; let my_annotated_boolean: bool = true; // Annotated // The two below applies to unsigned integers and floating-point values as well let my_integer: i32 = 5; // Default annotation let my_suffix_annot_integer = 5i32; // Suffix annotation // Variables must be declared mutable in order to be able to change their value let mut my_mutable_variable = 10; println!("My mutable variable is {}", my_mutable_variable); // Starts out as 10 my_mutable_variable = 11; println!("My mutable variable is now {}", my_mutable_variable); // Now 11 // Variable type can be changed with "shadowing" let my_mutable_variable = true; println!("And now, my mutable variable is now {}", my_mutable_variable); // Now a boolean with the value of true }
true
16db021592a4782447c68493505812865acb155e
Rust
TayWeiChong/lta-rs
/lta_utils_commons/src/lib.rs
UTF-8
10,118
2.78125
3
[ "MIT" ]
permissive
//! Utilities for transforming data and other misc #[macro_use] extern crate lazy_static; pub use chrono; pub use reqwest; pub use serde; use serde::Serialize; use std::fmt::Debug; /// Result type for lta-rs pub type LTAResult<T> = reqwest::Result<T>; /// Error type for lta-rs pub type LTAError = reqwest::Error; /// Regex patterns pub mod regex { use regex::Regex; lazy_static! { pub static ref BUS_FREQ_RE: Regex = Regex::new(r"^(\d{1,3})?-?(\d{1,3})?$").unwrap(); pub static ref CARPARK_COORDS_RE: Regex = Regex::new(r"^([+-]?([0-9]*[.])?[0-9]+) ([+-]?([0-9]*[.])?[0-9]+)$").unwrap(); pub static ref SPEED_BAND_RE: Regex = Regex::new(r"^([+-]?([0-9]*[.])?[0-9]+) ([+-]?([0-9]*[.])?[0-9]+) ([+-]?([0-9]*[.])?[0-9]+) ([+-]?([0-9]*[.])?[0-9]+)$") .unwrap(); } } /// Utils for date types pub mod serde_date { pub mod ymd_hms_option { use chrono::{DateTime, TimeZone, Utc}; use serde::{Deserialize, Deserializer, Serializer}; const FORMAT: &str = "%Y-%m-%d %H:%M:%S"; pub fn serialize<S>(date: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { match date { Some(time) => { let s = format!("{}", time.format(FORMAT)); serializer.serialize_str(&s) } None => serializer.serialize_str("-"), } } pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Utc.datetime_from_str(&s, FORMAT) .map(Some) .map_err(serde::de::Error::custom) } } pub mod str_time_option { use chrono::{NaiveTime, Timelike}; use serde::{Deserialize, Deserializer, Serializer}; pub fn ser_str_time_opt<S>( opt_time: &Option<NaiveTime>, serializer: S, ) -> Result<S::Ok, S::Error> where S: Serializer, { match opt_time { Some(time) => { let hr = time.hour(); let min = time.minute(); let mut sec_str = String::with_capacity(1); sec_str.push_str("0"); let s = [hr.to_string(), min.to_string(), sec_str].join(":"); serializer.serialize_str(&s) } None => serializer.serialize_none(), } } pub fn de_str_time_opt_erp<'de, D>(deserializer: D) -> Result<Option<NaiveTime>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; if s.eq("-") { return Ok(None); } let hr = &mut s[0..1].parse().map_err(serde::de::Error::custom)?; let min = &s[3..4].parse().map_err(serde::de::Error::custom)?; if *hr == 24 { *hr = 0 } let time = NaiveTime::from_hms_opt(*hr, *min, 0); Ok(time) } pub fn de_str_time_opt_br<'de, D>(deserializer: D) -> Result<Option<NaiveTime>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; if s.eq("-") { return Ok(None); } let hr = &mut s[0..1].parse().map_err(serde::de::Error::custom)?; let min = &s[2..3].parse().map_err(serde::de::Error::custom)?; if *hr == 24 { *hr = 0 } let time = NaiveTime::from_hms_opt(*hr, *min, 0); Ok(time) } } pub mod str_date { use chrono::NaiveDate; use serde::{Deserialize, Deserializer, Serializer}; const FORMAT: &str = "%Y-%m-%d"; pub fn serialize<S>(date: &NaiveDate, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let s = format!("{}", date.format(FORMAT)); serializer.serialize_str(&s) } pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveDate, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; NaiveDate::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom) } } } /// Deserialisation utils pub mod de { use std::fmt; use std::fmt::Display; use std::iter::FromIterator; use std::marker::PhantomData as Phantom; use std::str::FromStr; use crate::{regex::*, Coordinates, Location}; use serde::de::{self, Visitor}; use serde::export::Formatter; use serde::{Deserialize, Deserializer}; use serde_json::Value; /// Error for wrapped data pub struct WrapErr; /// Separator trait pub trait Sep { fn delimiter() -> &'static str; } impl Display for WrapErr { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "StringWrapErr") } } /// If error, return None pub fn treat_error_as_none<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error> where T: Deserialize<'de>, D: Deserializer<'de>, { let value: Value = Deserialize::deserialize(deserializer)?; Ok(T::deserialize(value).ok()) } /// Simple conversion of Y and N to boolean pub fn from_str_to_bool<'de, D>(deserializer: D) -> Result<bool, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; match s.as_ref() { "Y" | "Yes" => Ok(true), "N" | "No" => Ok(false), _ => Ok(false), } } /// To be used when coordinates are space separated /// in a string and you would like to convert them to a Coordinates /// structure. pub fn from_str_to_coords<'de, D>(deserializer: D) -> Result<Option<Coordinates>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; if s.is_empty() || !CARPARK_COORDS_RE.is_match(s.as_str()) { return Ok(None); } let caps = CARPARK_COORDS_RE.captures(&s).unwrap(); let lat: f64 = caps.get(1).map_or(0.0, |m| m.as_str().parse().unwrap()); let long: f64 = caps.get(3).map_or(0.0, |m| m.as_str().parse().unwrap()); Ok(Some(Coordinates::new(lat, long))) } pub fn from_str_loc_to_loc<'de, D>(deserializer: D) -> Result<Option<Location>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; if s.is_empty() || !SPEED_BAND_RE.is_match(s.as_str()) { return Ok(None); } let caps = SPEED_BAND_RE.captures(&s).unwrap(); let lat_start = caps.get(1).map_or(0.0, |m| m.as_str().parse().unwrap()); let long_start = caps.get(3).map_or(0.0, |m| m.as_str().parse().unwrap()); let lat_end = caps.get(5).map_or(0.0, |m| m.as_str().parse().unwrap()); let long_end = caps.get(7).map_or(0.0, |m| m.as_str().parse().unwrap()); Ok(Some(Location::new( lat_start, long_start, lat_end, long_end, ))) } pub fn from_str<'de, T, D>(deserializer: D) -> Result<T, D::Error> where T: FromStr, T::Err: Display, D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; T::from_str(&s).map_err(de::Error::custom) } pub fn delimited<'de, V, T, D>(deserializer: D) -> Result<V, D::Error> where V: FromIterator<T>, T: FromStr + Sep, T::Err: Display, D: Deserializer<'de>, { struct DelimitedBy<V, T>(Phantom<V>, Phantom<T>); impl<'de, V, T> Visitor<'de> for DelimitedBy<V, T> where V: FromIterator<T>, T: FromStr + Sep, T::Err: Display, { type Value = V; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string containing / separated elements") } fn visit_str<E>(self, s: &str) -> Result<Self::Value, E> where E: de::Error, { let iter = s.split(T::delimiter()).map(FromStr::from_str); Result::from_iter(iter).map_err(de::Error::custom) } } let visitor = DelimitedBy(Phantom, Phantom); deserializer.deserialize_str(visitor) } } /// A `Client` to make requests with /// The `Client` holds a connection pool internally, so it is advised that you create one and reuse it pub trait Client<C, RB> { /// General constructor fn new(api_key: Option<String>, client: C) -> Self; /// This method not assign the `api_key` in struct if the provided key is empty or whitespaces /// Instead, assign `None` fn with_api_key<S>(api_key: S) -> Self where S: Into<String>; /// Make sure that you check that the `api_key` is not `None`! fn get_req_builder(&self, url: &str) -> RB; } /// Starting and ending location #[derive(Debug, Clone, PartialEq, Serialize)] pub struct Location { pub start: Coordinates, pub end: Coordinates, } impl Location { pub fn new(start_lat: f64, start_lang: f64, end_lat: f64, end_lang: f64) -> Self { Location { start: Coordinates::new(start_lat, start_lang), end: Coordinates::new(end_lat, end_lang), } } pub fn from_coords(start: Coordinates, end: Coordinates) -> Self { Location { start, end } } } /// Coordinate on the map #[derive(Debug, Clone, PartialEq, Serialize)] pub struct Coordinates { pub lat: f64, pub long: f64, } impl Coordinates { pub fn new(lat: f64, long: f64) -> Self { Coordinates { lat, long } } }
true
dda55d3cff9948391ee5b2c1d954014f2663b1cd
Rust
shreve/cryptopals
/src/encrypt.rs
UTF-8
285
2.6875
3
[]
no_license
use crate::Bytes; pub fn repeating_key_cipher(input: &Bytes, key: &Bytes) -> Bytes { let keylen = key.len(); let msglen = input.len(); let mut out = Bytes::new(); for i in 0..msglen { let j = i % keylen; out.push(input[i] ^ key[j]); } out }
true
39eac92a67602dd102a98005a3718b13c2cf21ab
Rust
rwaldvogel/grib-rs
/src/bin/gribber/commands/inspect.rs
UTF-8
3,813
2.84375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use clap::{App, Arg, ArgMatches, SubCommand}; use console::{Style, Term}; use grib::context::{SectionInfo, TemplateInfo}; use crate::cli; pub fn cli() -> App<'static, 'static> { SubCommand::with_name("inspect") .about("Inspects and describes the data structure") .arg( Arg::with_name("sections") .help("Prints sections constructing the GRIB message") .short("s") .long("sections"), ) .arg( Arg::with_name("templates") .help("Prints templates used in the GRIB message") .short("t") .long("templates"), ) .arg(Arg::with_name("file").required(true)) .after_help( "\ This subcommand is mainly targeted at (possible) developers and engineers, who wants to understand the data structure for the purpose of debugging, enhancement, and education.\ ", ) } pub fn exec(args: &ArgMatches<'static>) -> Result<(), cli::CliError> { let file_name = args.value_of("file").unwrap(); let grib = cli::grib(file_name)?; let mut view = InspectView::new(); if args.is_present("sections") { view.add(InspectItem::Sections(grib.sections())); } if args.is_present("templates") { let tmpls = grib.list_templates(); view.add(InspectItem::Templates(tmpls)); } if view.items.len() == 0 { view.add(InspectItem::Sections(grib.sections())); let tmpls = grib.list_templates(); view.add(InspectItem::Templates(tmpls)); } let user_attended = console::user_attended(); let term = Term::stdout(); let (height, _width) = term.size(); if view.num_lines() > height.into() { cli::start_pager(); } if user_attended { console::set_colors_enabled(true); } let with_header = view.with_headers(); let mut items = view.items.into_iter().peekable(); loop { let item = match items.next() { None => break, Some(i) => i, }; if with_header { let yellow = Style::new().yellow().bold(); let s = format!("{}:", item.title()); println!("{}", yellow.apply_to(s)); } match item { InspectItem::Sections(sects) => { for sect in sects.iter() { println!("{}", sect); } } InspectItem::Templates(tmpls) => { for tmpl in tmpls.iter() { println!("{}", tmpl); } } } if let Some(_) = items.peek() { println!(""); } } Ok(()) } struct InspectView<'i> { items: Vec<InspectItem<'i>>, } impl<'i> InspectView<'i> { fn new() -> Self { Self { items: Vec::new() } } fn add(&mut self, item: InspectItem<'i>) { self.items.push(item); } fn with_headers(&self) -> bool { !(self.items.len() < 2) } fn num_lines(&self) -> usize { let mut count = 0; for item in self.items.iter() { if self.with_headers() { count += 1; } count += item.len(); } count += self.items.len() - 1; // empty lines count } } enum InspectItem<'i> { Sections(&'i Box<[SectionInfo]>), Templates(Vec<TemplateInfo>), } impl<'i> InspectItem<'i> { fn title(&self) -> &'static str { match self { InspectItem::Sections(_) => "Sections", InspectItem::Templates(_) => "Templates", } } fn len(&self) -> usize { match self { InspectItem::Sections(sects) => sects.len(), InspectItem::Templates(tmpls) => tmpls.len(), } } }
true
e6bc1baaaf25a14844adf422c78b744105f899c9
Rust
barudisshu/dive-into-rust
/dive_into_rust/src/chap25/mod.rs
UTF-8
9,296
4.15625
4
[]
no_license
//! //! //! | 容器 | 描述 | //! |:---------------------:|:-------------------------------------------:| //! | `Vec` | 可变长数组,连续存储 | //! | `VecDeque` | 双向队列,适用于从头部和尾部插入删除数据 | //! | `LinkedList` | 双向链表,非连续存储 | //! | `HashMap` | 基于`Hash`算法存储一系列键值对 | //! | `BTreeMap` | 基于`B`树存储一系列键值对 | //! | `HashSet` | 基于`Hash`算法的集合,相当于没有值的`HashMap` | //! | `BTreeSet` | 基于`B`树的集合,相当于没有值的`BTreeMap` | //! | `BinaryHeap` | 基于二叉堆实现的优先级队列 | //! //! use std::collections::vec_deque::VecDeque; use std::collections::btree_map::BTreeMap; /// /// 一个Vec中能存储的元素个数最多为`std::usize::MAX`个,超过了会发生panic。因为它记录元素个数, /// 用的就是usize类型》如果我们指定元素的类型是0大小的类型,那么,这个Vec根本不需要在堆上分配任何空间。 /// /// #[test] fn _25_01_01_collections() { // 常见的几种构造Vec的方式 // 1. `new()` 方法与 `default()` 方法一样,构造一个空的`Vec` let v1 = Vec::<i32>::default(); // 2. `with_capacity()`方法可以预先分配一个较大空间,避免插入数据的时候动态扩容 let v2: Vec<String> = Vec::with_capacity(1000); // 3. 利用宏来初始化,语法跟数组初始化类似 let v3 = vec![1, 2, 3]; // 插入数据 let mut v4 = Vec::new(); // 多种插入数据的方式 v4.push(1); v4.extend_from_slice(&[10, 20, 30, 40, 50]); v4.insert(2, 100); println!("capacity: {} length: {}", v4.capacity(), v4.len()); // 访问数据 // 调用 IndexMut 运算符,可以写入数据 v4[5] = 5; let i = v4[5]; println!("{}", i); // Index 运算符直接访问,如果越界则会造成panic,而get方法不会,因为它返回一个`Option<T>` if let Some(i) = v4.get(6) { println!("{}", i); } // Index 运算符支持使用各种 Range 作为索引 let slice = &v4[4..]; println!("{:?}", slice); } /// 另外,因为Vec里面存在一个指向堆上的指针,它永远是非空的状态,编译器可以据此做优化,使得 /// `size_of::<Option<Vec<T>>>() == size_of::<Vec<T>>()`。 #[test] fn _23_01_02_collections() { struct ZeroSized {} let mut v = Vec::<ZeroSized>::new(); println!("capacity: {} length: {}", v.capacity(), v.len()); v.push(ZeroSized {}); v.push(ZeroSized {}); println!("capacity: {} length: {}", v.capacity(), v.len()); // p 永远指向 `align_of::<ZeroSized>()`,不需要调用 allocator let p = v.as_ptr(); println!("ptr:{:p}", p); let size1 = std::mem::size_of::<Vec<i32>>(); let size2 = std::mem::size_of::<Option<Vec<i32>>>(); println!("size of Vec: {} size of option vec: {}", size1, size2); } /// /// `VecDeque` /// VecDeque是一个双向队列。在它的头部或者尾部执行添加或者删除操作,都是效率很高的。它的用法和Vec非常相似, /// 主要是多了`pop_front()` `push_front()`等方法。 /// #[test] fn _23_02_01_collections() { use std::collections::VecDeque; let mut queue = VecDeque::with_capacity(64); // 向尾部书序插入一堆数据 for i in 1..10 { queue.push_back(i); } // 从头部按顺序一个个取出来 while let Some(i) = queue.pop_front() { println!("{}", i); } } /// /// `HashMap` /// Rust中的HashMap要求,key要满足Eq+Hash的约束, /// /// HashMap的查找、插入、删除操作的平均时间复杂度都是O(1)。 /// #[test] fn _23_03_01_collections() { use std::collections::HashMap; #[derive(Hash, Eq, PartialEq, Debug)] struct Person { first_name: String, last_name: String, } impl Person { fn new(first: &str, last: &str) -> Self { Person { first_name: first.to_string(), last_name: last.to_string(), } } } let mut book = HashMap::new(); book.insert(Person::new("John", "Smith"), "521-8976"); book.insert(Person::new("Sandra", "Dee"), "521-9655"); book.insert(Person::new("Ted", "Baker"), "418-4165"); let p = Person::new("John", "Smith"); // 查找键对应的值 if let Some(phone) = book.get(&p) { println!("Phone number found: {}", phone); } // 删除 book.remove(&p); // 查询是否存在 println!("Find key: {}", book.contains_key(&p)); } /// /// /// HashMap里面,key存储的位置跟它本身的值密切相关,如果key本身变了,那么它存放的位置 /// 也需要相应变化。所以,HashMap设计的各种API中,指向key的借用一般是只读借用,防止用户 /// 修改它。但是,只读借用并不能完全保证它不被修改,读者应该能想到,只读借用依然可以改变具备 /// 内部可变性特点的类型。 /// #[test] fn _23_03_02_collections() { use std::hash::{Hash, Hasher}; use std::collections::HashMap; use std::cell::Cell; #[derive(Eq, PartialEq)] struct BadKey { value: Cell<i32>, } impl BadKey { fn new(v: i32) -> Self { BadKey { value: Cell::new(v) } } } impl Hash for BadKey { fn hash<H: Hasher>(&self, state: &mut H) { self.value.get().hash(state); } } let mut map = HashMap::new(); map.insert(BadKey::new(1), 100); map.insert(BadKey::new(2), 200); for key in map.keys() { key.value.set(key.value.get() * 2); } println!("Find key 1:{:?}", map.get(&BadKey::new(1))); println!("Find key 2:{:?}", map.get(&BadKey::new(2))); println!("Find key 4:{:?}", map.get(&BadKey::new(4))); // 这里设计了一个具备内部可变性的类型作为key。然后直接在容器内部把它的值改变,接下来继续做查找 // 可以看到,我们再也找不到这几个key了,不论是用修改前的key值,还是用修改后的key值,都找不到。这属于逻辑错误 } /// /// `BTreeMap` /// /// BTreeMap对key的要求是满足Ord约束,即具备“全序”特征。 /// /// #[test] fn _23_03_03_collections() { use std::collections::BTreeMap; #[derive(Ord, PartialOrd, PartialEq, Eq, Debug, Default)] struct Person{ first_name: String, last_name: String, } impl Person { fn new(first: &str, last: &str) -> Self { Person { first_name: first.to_string(), last_name: last.to_string(), } } } let mut book = BTreeMap::new(); book.insert(Person::new("John", "Smith"), "521-8976"); book.insert(Person::new("Sandra", "Dee"), "521-9655"); book.insert(Person::new("Ted", "Baker"), "418-4165"); let p = Person::new("John", "Smith"); // 查找键对应的值 if let Some(phone) = book.get(&p) { println!("Phone number found: {}", phone); } // 删除 book.remove(&p); // 查询是否存在 println!("Find key: {}", book.contains_key(&p)); } /// /// BTreeMap比HashMap多的一项功能是,它不仅可以查询单个key的结果,还可以查询一个区间的结果 /// #[test] fn _25_03_04_collections() { use std::collections::BTreeMap; let mut map = BTreeMap::new(); map.insert(3, "a"); map.insert(5, "b"); map.insert(8, "c"); for (k, v) in map.range(2..6) { println!("{} : {}", k, v); } } /// /// 迭代器 /// /// #[test] fn _25_04_01_collections() { use std::iter::Iterator; struct Seq { current: i32, } impl Seq { fn new() -> Self { Seq { current: 0 } } } impl Iterator for Seq { type Item = i32; fn next(&mut self) -> Option<i32> { if self.current < 100 { self.current += 1; return Some(self.current); } else { return None; } } } let mut seq = Seq::new(); while let Some(i) = seq.next() { println!("{}", i); } } /// /// Rust迭代器的强大之处在于可以组合,组合的形式由: /// /// producer + adapter + consumer /// /// #[test] fn _25_04_02_collections() { let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; let mut iter = v.iter() .take(5) .filter(|&x| x % 2 == 0) .map(|&x| x * x) .enumerate(); while let Some((i, v)) = iter.next() { println!("{} {}", i, v); } } /// /// for循环 ,它实际上是对IntoIterator trait的语法糖 /// #[test] fn _25_05_01_collections() { use std::collections::HashMap; let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9]; for i in v { println!("{}", i); } let map: HashMap<i32, char> = [].iter().cloned().collect(); for (k, v) in &map { println!("{} : {}", k, v); } }
true
a1f744bed170a1c508137b77fcc2ba3163ed9d3b
Rust
andir/shutdown
/src/hass/attributes.rs
UTF-8
546
3.125
3
[]
no_license
use serde_json::Value; use std::collections::HashMap; #[derive(Serialize, Deserialize, Debug)] pub struct Attributes(HashMap<String, Value>); impl Attributes { pub fn new() -> Self { Attributes(HashMap::new()) } pub fn set(mut self, key: impl AsRef<str>, value: impl Into<Value>) -> Self { self.0.insert(key.as_ref().to_string(), value.into()); self } } #[cfg(test)] mod tests { use super::*; #[test] fn test_attribute() { Attributes::new().set("foo", 1).set("baz", "foo"); } }
true
7eaf47137e476d833dac328842e9b8cd9ad90801
Rust
pinusc/batnet
/src/runner.rs
UTF-8
1,809
2.65625
3
[]
no_license
use std::process::Command; use peer; use std::io::Write; use crypto_hash::{Algorithm, hex_digest}; pub fn handle_command(peer: &mut peer::Peer, msg: peer::Msg) { if let Some(stream) = peer.stream { match msg.command.as_ref() { "close" => { peer.stream = None; }, "auth" => { if let Some(pass) = msg.argument { let digest = hex_digest(Algorithm::SHA256, pass.as_bytes().to_vec()); if digest == "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae" { peer.authenticated = true; let _ = stream.write("{\"result\":\"Authenticated!\"}".as_bytes()); } else { let _ = stream.write("{\"result\":\"Not authenticated!\"}".as_bytes()); } } }, "ping" => { let _ = stream.write("pong".as_bytes()); }, "cmd" => { if let Some(arg) = msg.argument { peer.output = Command::new("/bin/sh") .arg("-c") .arg(&arg) .output() .ok(); peer.running = Some(arg); } }, "output" => { if let Some(ref out) = peer.output { let _ = stream.write(&out.stdout); } else { let _ = stream.write("ERROR: There is no output!".as_bytes()); } }, "knock" => { let _ = stream.write("{\"command\":\"knock-back\"}".as_bytes()); }, _ => println!("invalid command") }; } }
true
a992cc2bc62931e54675d744778f02e931df081f
Rust
ywatanabee/learning-systems-programming-in-rust
/chapter9/src/9_5_4/main.rs
UTF-8
648
3.234375
3
[]
no_license
use lib::path::*; use std::path::Path; fn main() -> Result<(), PathError> { //存在しないパスを指定した場合、エラーを返す // パスをそのままクリーンにする let path = Path::new("./chapter9/src/../src/9_5_4/main.rs").clean()?; println!("{:?}", path); // パスを絶対パスに整形 let abs_path = Path::new("./chapter9/src/9_5_4/main.rs").canonicalize()?; println!("{:?}", abs_path); // パスを相対パスに整形 let abs_path = Path::new("/usr/local/go/bin/go"); let rel_path = abs_path.strip_prefix("/usr/local/go")?; println!("{:?}", rel_path); Ok(()) }
true
54ddbf81af133afbe1813dd8f51cecc799e0f66b
Rust
cedrickcooke/litx
/src/unslice.rs
UTF-8
1,568
3.25
3
[]
no_license
use std; pub unsafe trait Unslice { fn unslice<'a>(&'a self, next: &'a Self) -> Option<&'a Self> { if self.is_adjacent(next) { unsafe { return Some(self.unslice_unchecked(next)); } } None } fn is_adjacent(&self, next: &Self) -> bool; unsafe fn unslice_unchecked<'a>(&'a self, next: &'a Self) -> &'a Self; } unsafe impl Unslice for str { fn is_adjacent(&self, next: &Self) -> bool { unsafe { self.as_ptr().offset(self.len() as isize) == next.as_ptr() } } unsafe fn unslice_unchecked<'a>(&'a self, next: &'a Self) -> &'a Self { let slice = std::slice::from_raw_parts(self.as_ptr(), self.len() + next.len()); std::str::from_utf8_unchecked(slice) } } unsafe impl <T> Unslice for [T] { fn is_adjacent(&self, next: &Self) -> bool { unsafe { self.as_ptr().offset(self.len() as isize) == next.as_ptr() } } unsafe fn unslice_unchecked<'a>(&'a self, next: &'a Self) -> &'a Self { std::slice::from_raw_parts(self.as_ptr(), self.len() + next.len()) } } #[cfg(test)] mod test { use super::Unslice; const SRC: &'static str = "FOO BAR"; fn slice() -> (&'static str, &'static str, &'static str) { (&SRC[0..3], &SRC[3..4], &SRC[4..7]) } #[test] #[should_panic] fn unslice_foo_bar() { let (foo, spc, bar) = slice(); foo.unslice(bar).unwrap(); } #[test] fn unslice() { let (foo, spc, bar) = slice(); assert!(foo.unslice(spc).unwrap().unslice(bar).unwrap() == SRC); } }
true