blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
25c4944b010dd7d4dda4a34b83ef4af3bcfff5d6
Rust
ryanobeirne/advent_of_code_2018
/bin/day07.rs
UTF-8
5,387
3
3
[]
no_license
use std::collections::BTreeMap; use std::io::Read; use std::result; use std::str::FromStr; use std::error::Error; use std::fmt; type Result<T> = result::Result<T, Box<Error>>; fn main() -> Result<()> { let mut input = String::new(); std::io::stdin().read_to_string(&mut input)?; let instructions: Vec<Instruction> = input.lines() .map(|line| Instruction::from_str(line)) .filter_map(|o| o.ok()) .collect(); let answer1 = part1(instructions); println!("Day 7, Part 1:\t{}", answer1); if answer1 != "LAPFCRGHVZOTKWENBXIMSUDJQY" { println!("WRONG! Answer:\tLAPFCRGHVZOTKWENBXIMSUDJQY"); std::process::exit(1); } Ok(()) } fn part1(instructions: Vec<Instruction>) -> String { let mut order = Vec::<Name>::new(); // let mut order_queue = Vec::<Vec<Name>>::new(); let prereq_map = Instruction::prereq_map(instructions); let mut status_map = StatusMap::new(&prereq_map); // Get initial instructions without prerequisites for prereqs in prereq_map.values() { for p in prereqs { if ! prereq_map.contains_key(&p) && ! order.contains(&p) { status_map.mark_done(p); order.push(*p); } } } for (key, value) in &prereq_map { println!("{}:\n{:?}\n", key, value); } assert!( ! prereq_map.values().all(|v| v.is_empty()) ); while status_map.is_not_done() { for (name, prereqs) in &prereq_map { if status_map.satisfied(prereqs) && !order.contains(name){ status_map.mark_done(name); order.push(*name); } } } order.iter().collect() } type Name = char; type PreReq = char; #[derive(Debug, Ord, PartialOrd, Eq, PartialEq)] struct Instruction { name: Name, prereq: PreReq, } type PrereqMap = BTreeMap<Name, Vec<PreReq>>; impl Instruction { fn prereq_map(instructions: Vec<Instruction>) -> PrereqMap { let mut prereq_map = PrereqMap::new(); for i in instructions { prereq_map .entry(i.name) .or_insert(Vec::new()) .push(i.prereq); } for prereqs in prereq_map.values_mut() { prereqs.sort(); } prereq_map } } impl FromStr for Instruction { type Err = Box<Error>; fn from_str(s: &str) -> Result<Instruction> { let split = s.split_whitespace().collect::<Vec<&str>>(); let name = split[7].chars().collect::<Vec<char>>()[0]; let prereq = split[1].chars().collect::<Vec<char>>()[0]; Ok( Instruction { name, prereq } ) } } #[derive(Debug, Eq, PartialEq)] enum Progress { NotStarted, // InProgress, Done, } #[derive(Debug)] struct StatusMap(BTreeMap<Name, Progress>); impl StatusMap { fn new(prereq_map: &PrereqMap) -> StatusMap { let mut status_map = BTreeMap::new(); for (name, prereqs) in prereq_map { status_map.insert(*name, Progress::NotStarted); for p in prereqs { status_map.insert(*p, Progress::NotStarted); } } StatusMap(status_map) } fn mark_done(&mut self, name: &Name) { if self.0.contains_key(name) { self.0.insert(*name, Progress::Done); } else { panic!("Status map does not contain key '{}'", name); } } fn is_done(&self) -> bool { self.0.values().all(|s| *s == Progress::Done) } fn is_not_done(&self) -> bool { !self.is_done() } fn did(&self, name: &Name) -> bool { self.0.contains_key(name) && *self.0.get(name) .expect("StatusMap does not contain key. Cannot check if done") == Progress::Done } #[allow(dead_code)] fn did_not_do(&self, name: &Name) -> bool { !self.did(name) } fn satisfied(&self, names: &Vec<PreReq>) -> bool { names.iter().all(|n| self.did(n)) } #[allow(dead_code)] fn unsatisfied(&self, names: &Vec<PreReq>) -> bool { !self.satisfied(names) } } impl fmt::Display for StatusMap { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut format = String::new(); for (key, value) in &self.0 { format.push_str(format!("\t{}: {:?}\n", key, value).as_str()); } write!(f, "\nStatusMap{{\n{}}}\n", format) } } #[cfg(test)] mod tests { use super::*; #[test] fn all_done() { let prereqs = &vec!['A', 'B', 'C']; let mut status_map = StatusMap(BTreeMap::new()); status_map.0.insert('A', Progress::Done); status_map.0.insert('B', Progress::Done); status_map.0.insert('C', Progress::Done); assert!(status_map.is_done()); assert!(status_map.satisfied(prereqs)); let prereqs = &vec!['1', '2', '3']; let mut status_map = StatusMap(BTreeMap::new()); status_map.0.insert('1', Progress::Done); status_map.0.insert('2', Progress::NotStarted); status_map.0.insert('3', Progress::Done); assert!(status_map.unsatisfied(prereqs)); assert!(status_map.is_not_done()); status_map.mark_done(&'2'); assert!(status_map.satisfied(prereqs)); assert!(status_map.is_done()); } }
true
fa12b779ab7bd92caa1a87b76233e2c98411d447
Rust
silvadanilo/stubborn-delivery
/src/bin/fake-client.rs
UTF-8
1,055
2.6875
3
[ "MIT" ]
permissive
use std::env; use std::{thread, time}; use std::io::Write; use std::net::TcpStream; use std::fs::OpenOptions; fn main () { let mut stream = TcpStream::connect("127.0.0.1:12345").expect("StubbornSink seems down"); for argument in env::args().skip(1) { let v = argument.splitn(2, ':').collect::<Vec<_>>(); let action: (&str, &str) = (v[0], v[1]); match action { (key, value) => { match key { "sleep" => { thread::sleep(time::Duration::from_millis(value.parse::<u64>().unwrap())); }, "send" => { let _ = stream.write((value.to_string() + "\n").as_bytes()); let mut file = OpenOptions::new().append(true).open("/tmp/fake-client.txt").unwrap(); file.write_all((value.to_string() + "\n").as_bytes()); } x => panic!("command `{}` is not supported", x), } } } } }
true
a422609d87b3a289a6fc2c350ac5aedf2026a876
Rust
dipique/rust_book
/10_generic_types_traits_and_lifetimes/src/traits.rs
UTF-8
3,094
3.921875
4
[]
no_license
pub trait Summary { fn summarize(&self) -> String; } // traits look like interfaces! pub struct NewsArticle { pub headline: String, pub location: String, pub author: String, pub content: String, } impl Summary for NewsArticle { fn summarize(&self) -> String { format!("{}, by {} ({})", self.headline, self.author, self.location) } } pub struct Tweet { pub username: String, pub content: String, pub reply: bool, pub retweet: bool, } impl Summary for Tweet { fn summarize(&self) -> String { format!("{}: {}", self.username, self.content) } } // create a default implementatino pub trait DefaultedSummary { fn summarize_author(&self) -> String; fn dsummarize(&self) -> String { format!("(Read more from {}...)", self.summarize_author()) } } impl DefaultedSummary for NewsArticle { fn summarize_author(&self) -> String { self.author.to_string() } fn dsummarize(&self) -> String { self.summarize() } } impl DefaultedSummary for Tweet { fn summarize_author(&self) -> String { format!("@{}", self.username) } } // use default implementation // require parameter to have a trait pub fn notify(item: &impl DefaultedSummary) { println!("Breaking news! {}", item.dsummarize()); } // notify is syntactical sugar for a longer form called // "trait bound": pub fn dnotify<T: DefaultedSummary>(item: &T) { println!("Breaking news! {}", item.dsummarize()); } // this also allows us to create multiple parameters that // we force to be the same type pub fn mnotify<T: DefaultedSummary>(first: T, second: T) { println!("{} {}", first.summarize_author(), second.summarize_author()) } // with multiple traits: // fn some_function<T: Display + Clone, U: Display + Clone>(t: &T, u: &u) -> i32 {} // we can also use a where clause to express the same restrictions: fn some_function<T, U>(t: &T, u: &U) -> i32 where T: Summary + Clone, U: Summary + Clone { 1 } // to return a type with a trait fn returns_summarizable() -> impl Summary { Tweet { username: "".to_string(), content: String::from(""), reply: true, retweet: false, } } // however, you can only return one type from a function, even if // the different types implement the same trait :( //// conditionally applied traits use std::fmt::Display; struct Pair<T> { x: T, y: T, } impl<T> Pair<T> { fn new(x: T, y: T) -> Self { // hey! cool! Self { x, y } } } // this function is only available if T implements these traits impl<T: Display + PartialOrd> Pair<T> { fn cmp_display(&self) { if self.x >= self.y { println!("The largest member is x = {}", self.x); } else { println!("The largest member is y = {}", self.y); } } } // we can also implement a function for any type that implements a trait // impl<T: Display> ToString for T { }
true
5073f26e9b9f758770f325230b3ca403824ece55
Rust
awto-rs/awto
/awto-cli/src/main.rs
UTF-8
2,572
2.546875
3
[ "Apache-2.0", "MIT" ]
permissive
//! <div align="center"> //! <h1>awto</h1> //! //! <p> //! <strong>Awtomate your 🦀 microservices with awto</strong> //! </p> //! //! </div> //! //! # awto-cli //! //! Command-line-interface for compiling projects built with [`awto`](https://docs.rs/awto). //! //! See more on the [repository](https://github.com/awto-rs/awto). use std::io::Write; use anyhow::Result; use async_trait::async_trait; use clap::Parser; use colored::Colorize; use compile::Compile; use log::{error, Level, LevelFilter}; mod compile; mod macros; mod util; /// Awto cli #[derive(Parser)] struct Opts { #[clap(subcommand)] pub subcmd: SubCommand, } #[derive(Parser)] enum SubCommand { Compile(Compile), } #[tokio::main] async fn main() { let opts: Opts = Opts::parse(); let mut cmd = match opts.subcmd { SubCommand::Compile(compile) => match compile.subcmd { Some(compile::SubCommand::Database(database)) => runnable_cmd!(database), Some(compile::SubCommand::Protobuf(protobuf)) => runnable_cmd!(protobuf), None => runnable_cmd!(compile), }, }; let log_level = if cmd.is_verbose() { LevelFilter::Debug } else { LevelFilter::Info }; env_logger::Builder::new() .filter_level(log_level) .format(|buf, record| { let prefix = match record.level() { Level::Error => "error".red(), Level::Warn => "warn".yellow(), Level::Info => "info".blue(), Level::Debug => "debug".purple(), Level::Trace => "trace".cyan(), } .bold(); writeln!(buf, "{} {}", prefix, record.args()) }) .init(); if let Err(err) = cmd.run().await { error!("{}", err); if cmd.is_verbose() { let err_chain = err.chain().skip(1); if err_chain.clone().next().is_some() { eprintln!("{}", "\nCaused by:".italic().truecolor(190, 190, 190)); } err_chain .for_each(|cause| eprintln!(" - {}", cause.to_string().truecolor(190, 190, 190))); } #[cfg(not(debug_assertions))] eprintln!( "\nIf the problem persists, please submit an issue on the Github repository.\n{}", "https://github.com/awto-rs/awto/issues/new".underline() ); std::process::exit(1); } } #[async_trait] pub trait Runnable { async fn run(&mut self) -> Result<()>; fn is_verbose(&self) -> bool { false } }
true
ffe60db34cb265ad7283901b5be4e42e3358532c
Rust
AustinHaugerud/oxidsys
/src/language/operations/troop/troop_add_items.rs
UTF-8
794
2.5625
3
[ "MIT" ]
permissive
use language::operations::{make_param_doc, Operation, ParamInfo}; pub struct TroopAddItemsOp; const DOC: &str = "Adds multiple items of specified type to the troop."; pub const OP_CODE: u32 = 1535; pub const IDENT: &str = "troop_add_items"; impl Operation for TroopAddItemsOp { fn op_code(&self) -> u32 { OP_CODE } fn documentation(&self) -> &'static str { DOC } fn identifier(&self) -> &'static str { IDENT } fn param_info(&self) -> ParamInfo { ParamInfo { num_required: 3, num_optional: 0, param_docs: vec![ make_param_doc("<troop_id>", ""), make_param_doc("<item_id>", ""), make_param_doc("<number>", ""), ], } } }
true
616b993d372a95c452229a1111a8c8f865eb6f46
Rust
Binbiubiubiu/design-pattern-in-rust
/examples/single.rs
UTF-8
1,150
3.34375
3
[]
no_license
use std::mem::MaybeUninit; use std::sync::{Mutex, Once}; use std::thread; #[derive(Debug)] struct Config { config_str: String, } // 单例函数 // 第一 唯一 // 第二 用Maybeunit创建 未被初始化的内存空间 fn single_config() -> &'static Mutex<Config> { static mut CONFIG: MaybeUninit<Mutex<Config>> = MaybeUninit::uninit(); static ONCE: Once = Once::new(); ONCE.call_once(|| unsafe { CONFIG.as_mut_ptr().write(Mutex::new(Config { config_str: "test config".to_string(), })); }); unsafe { &*CONFIG.as_ptr() } } fn main() { let config1 = single_config(); let config2 = single_config(); println!("{:?}", config1); println!("{:?}", config2); { let mut conf = config1.lock().unwrap(); conf.config_str = "config1".to_string(); } println!("{:?}", config1); println!("{:?}", config2); let handle = thread::spawn(move || { let mut conf = single_config().lock().unwrap(); conf.config_str = "thread change".to_string(); }); handle.join().unwrap(); println!("{:?}", config1); println!("{:?}", config2); }
true
2938a548e6a4b174ae43335c40fd219af461336c
Rust
ytausky/gbemu-core
/src/cpu/tests/alu.rs
UTF-8
9,774
2.8125
3
[]
no_license
use super::*; #[test] fn add() { for test_case in ADDITION_TEST_CASES { if !test_case.input.carry_in { test_adder_for_all_r(&encode_add_a_r, test_case); test_add_deref_hl(test_case) } } } fn encode_add_a_r(r: R) -> Vec<u8> { vec![0b10_000_000 | r.code()] } fn test_add_deref_hl(test_case: &AluTestCase) { const ADD_DEREF_HL: &[u8] = &[0x86]; test_addition_deref_hl(ADD_DEREF_HL, test_case) } fn test_addition_deref_hl(opcode: &[u8], test_case: &AluTestCase) { let mut cpu = Cpu::default(); cpu.data.a = test_case.input.x; cpu.data.f.cy = test_case.input.carry_in; cpu.data.h = 0x12; cpu.data.l = 0x34; cpu.test_simple_instr( opcode, &[ (input!(), output!(bus: bus_read(cpu.data.hl()))), (input!(data: test_case.input.y), output!()), ], ); assert_eq!(cpu.data.a, test_case.expected.result); assert_eq!(cpu.data.f, test_case.expected.flags) } #[test] fn adc_a_r() { for test_case in ADDITION_TEST_CASES { test_adder_for_all_r(&encode_adc_a_r, test_case); test_adc_deref_hl(test_case) } } fn encode_adc_a_r(r: R) -> Vec<u8> { vec![0b10_001_000 | r.code()] } fn test_adc_deref_hl(test_case: &AluTestCase) { const ADC_A_DEREF_HL: &[u8] = &[0x8e]; test_addition_deref_hl(ADC_A_DEREF_HL, test_case) } fn test_adder_for_all_r<F: Fn(R) -> Vec<u8>>(encoder: &F, test_case: &AluTestCase) { if test_case.is_applicable_for_a() { test_adder(R::A, encoder, test_case) } for &r in &[R::B, R::C, R::D, R::E, R::H, R::L] { test_adder(r, encoder, test_case) } } fn test_adder<F: Fn(R) -> Vec<u8>>(r: R, encoder: &F, test_case: &AluTestCase) { let mut cpu = Cpu::default(); cpu.data.a = test_case.input.x; cpu.data.write(r, test_case.input.y); cpu.data.f.cy = test_case.input.carry_in; cpu.test_simple_instr(&encoder(r), &[]); assert_eq!(cpu.data.a, test_case.expected.result); assert_eq!(cpu.data.f, test_case.expected.flags) } struct AluTestCase { input: AluInput, expected: AluOutput, } struct AluInput { x: u8, y: u8, carry_in: bool, } impl AluTestCase { fn is_applicable_for_a(&self) -> bool { self.input.x == self.input.y } } const ADDITION_TEST_CASES: &[AluTestCase] = &[ AluTestCase { input: AluInput { x: 0x08, y: 0x08, carry_in: false, }, expected: AluOutput { result: 0x10, flags: flags!(h), }, }, AluTestCase { input: AluInput { x: 0x80, y: 0x80, carry_in: false, }, expected: AluOutput { result: 0x00, flags: flags!(z, cy), }, }, AluTestCase { input: AluInput { x: 0x12, y: 0x34, carry_in: false, }, expected: AluOutput { result: 0x46, flags: flags!(), }, }, AluTestCase { input: AluInput { x: 0x0f, y: 0x01, carry_in: false, }, expected: AluOutput { result: 0x10, flags: flags!(h), }, }, AluTestCase { input: AluInput { x: 0xf0, y: 0xf0, carry_in: false, }, expected: AluOutput { result: 0xe0, flags: flags!(cy), }, }, AluTestCase { input: AluInput { x: 0xf0, y: 0x10, carry_in: false, }, expected: AluOutput { result: 0x00, flags: flags!(z, cy), }, }, AluTestCase { input: AluInput { x: 0xff, y: 0x00, carry_in: true, }, expected: AluOutput { result: 0x00, flags: flags!(z, h, cy), }, }, ]; #[test] fn sub_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x07; cpu.test_simple_instr(&encode_sub_r(R::A), &[]); assert_eq!(cpu.data.a, 0); assert_eq!(cpu.data.f, flags!(z, n)) } #[test] fn sub_b() { let mut cpu = Cpu::default(); cpu.data.b = 0x01; cpu.test_simple_instr(&encode_sub_r(R::B), &[]); assert_eq!(cpu.data.a, 0xff); assert_eq!(cpu.data.f, flags!(n, h, cy)) } #[test] fn sub_c() { let mut cpu = Cpu::default(); cpu.data.c = 0x10; cpu.test_simple_instr(&encode_sub_r(R::C), &[]); assert_eq!(cpu.data.a, 0xf0); assert_eq!(cpu.data.f, flags!(n, cy)) } #[test] fn sub_d() { let mut cpu = Cpu::default(); cpu.data.a = 0x10; cpu.data.d = 0x01; cpu.test_simple_instr(&encode_sub_r(R::D), &[]); assert_eq!(cpu.data.a, 0x0f); assert_eq!(cpu.data.f, flags!(n, h)) } fn encode_sub_r(r: R) -> Vec<u8> { vec![0b10_010_000 | r.code()] } #[test] fn sub_n() { let mut cpu = Cpu::default(); cpu.data.a = 0x07; cpu.test_simple_instr(&[0b11_010_110, 0x05], &[]); assert_eq!(cpu.data.a, 0x02); assert_eq!(cpu.data.f, flags!(n)) } #[test] fn sbc_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x07; cpu.test_simple_instr(&encode_sbc_r(R::A), &[]); assert_eq!(cpu.data.a, 0); assert_eq!(cpu.data.f, flags!(z, n)) } #[test] fn sbc_b() { let mut cpu = Cpu::default(); cpu.data.a = 0x07; cpu.data.b = 0x07; cpu.data.f.cy = true; cpu.test_simple_instr(&encode_sbc_r(R::B), &[]); assert_eq!(cpu.data.a, 0xff); assert_eq!(cpu.data.f, flags!(n, h, cy)) } fn encode_sbc_r(r: R) -> Vec<u8> { vec![0b10_011_000 | r.code()] } #[test] fn and_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x42; cpu.test_simple_instr(&encode_and_r(R::A), &[]); assert_eq!(cpu.data.a, 0x42); assert_eq!(cpu.data.f, flags!(h)) } #[test] fn and_b() { let mut cpu = Cpu::default(); cpu.data.a = 0x0f; cpu.data.b = 0x55; cpu.test_simple_instr(&encode_and_r(R::B), &[]); assert_eq!(cpu.data.a, 0x05); assert_eq!(cpu.data.f, flags!(h)) } #[test] fn and_c() { let mut cpu = Cpu::default(); cpu.data.a = 0x0f; cpu.data.b = 0xf0; cpu.test_simple_instr(&encode_and_r(R::C), &[]); assert_eq!(cpu.data.a, 0x00); assert_eq!(cpu.data.f, flags!(z, h)) } fn encode_and_r(r: R) -> Vec<u8> { vec![0b10_100_000 | r.code()] } #[test] fn xor_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x42; cpu.test_simple_instr(&encode_xor_r(R::A), &[]); assert_eq!(cpu.data.a, 0x00); assert_eq!(cpu.data.f, flags!(z)) } #[test] fn xor_b() { let mut cpu = Cpu::default(); cpu.data.a = 0x55; cpu.data.b = 0xaa; cpu.test_simple_instr(&encode_xor_r(R::B), &[]); assert_eq!(cpu.data.a, 0xff); assert_eq!(cpu.data.f, flags!()) } fn encode_xor_r(r: R) -> Vec<u8> { vec![0b10_101_000 | r.code()] } #[test] fn or_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x55; cpu.test_simple_instr(&encode_or_r(R::A), &[]); assert_eq!(cpu.data.a, 0x55); assert_eq!(cpu.data.f, flags!()) } #[test] fn or_b() { let mut cpu = Cpu::default(); cpu.data.a = 0x05; cpu.data.b = 0x55; cpu.test_simple_instr(&encode_or_r(R::B), &[]); assert_eq!(cpu.data.a, 0x55); assert_eq!(cpu.data.f, flags!()) } #[test] fn or_c() { let mut cpu = Cpu::default(); cpu.data.a = 0x05; cpu.data.c = 0x54; cpu.test_simple_instr(&encode_or_r(R::C), &[]); assert_eq!(cpu.data.a, 0x55); assert_eq!(cpu.data.f, flags!()) } #[test] fn or_d() { let mut cpu = Cpu::default(); cpu.test_simple_instr(&encode_or_r(R::D), &[]); assert_eq!(cpu.data.a, 0x00); assert_eq!(cpu.data.f, flags!(z)) } fn encode_or_r(r: R) -> Vec<u8> { vec![0b10_110_000 | r.code()] } #[test] fn cp_a() { let mut cpu = Cpu::default(); cpu.data.a = 0x07; cpu.test_simple_instr(&encode_cp_r(R::A), &[]); assert_eq!(cpu.data.a, 0x07); assert_eq!(cpu.data.f, flags!(z, n)) } #[test] fn cp_b() { let mut cpu = Cpu::default(); cpu.data.b = 0x01; cpu.test_simple_instr(&encode_cp_r(R::B), &[]); assert_eq!(cpu.data.a, 0x00); assert_eq!(cpu.data.f, flags!(n, h, cy)) } #[test] fn cp_c() { let mut cpu = Cpu::default(); cpu.data.c = 0x10; cpu.test_simple_instr(&encode_cp_r(R::C), &[]); assert_eq!(cpu.data.a, 0x00); assert_eq!(cpu.data.f, flags!(n, cy)) } #[test] fn cp_d() { let mut cpu = Cpu::default(); cpu.data.a = 0x10; cpu.data.d = 0x01; cpu.test_simple_instr(&encode_cp_r(R::D), &[]); assert_eq!(cpu.data.a, 0x10); assert_eq!(cpu.data.f, flags!(n, h)) } fn encode_cp_r(r: R) -> Vec<u8> { vec![0b10_111_000 | r.code()] } #[test] fn inc_a() { test_inc_r(R::A) } #[test] fn inc_b() { test_inc_r(R::B) } #[test] fn inc_c() { test_inc_r(R::C) } #[test] fn inc_d() { test_inc_r(R::D) } #[test] fn inc_e() { test_inc_r(R::E) } #[test] fn inc_h() { test_inc_r(R::H) } #[test] fn inc_l() { test_inc_r(R::L) } fn test_inc_r(r: R) { let mut cpu = Cpu::default(); cpu.data.write(r, 0xff); cpu.test_simple_instr(&encode_inc_r(r), &[]); assert_eq!(cpu.data.read(r), 0x00); assert_eq!(cpu.data.f, flags!(z, h)) } fn encode_inc_r(r: R) -> Vec<u8> { vec![0b00_000_100 | r.code() << 3] } #[test] fn inc_deref_hl() { let mut cpu = Cpu::default(); cpu.data.h = 0x12; cpu.data.l = 0x34; cpu.test_simple_instr( &[0b00_110_100], &[ (input!(), output!(bus: bus_read(0x1234))), (input!(data: 0x01), output!()), (input!(), output!(bus: bus_write(0x1234, 0x02))), (input!(), output!()), ], ); assert_eq!(cpu.data.f, flags!()) }
true
d83bb7b6361595ea4c3013a70a702fd64c7a86bd
Rust
filipstefansson/cargo-semver
/tests/integration.rs
UTF-8
4,636
2.765625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use assert_cmd::prelude::*; use predicates::prelude::*; use std::process::Command; use std::{fs, io::Write}; use tempfile::NamedTempFile; #[cfg(test)] fn setup_command(file: &mut NamedTempFile, version: &str, command: Vec<&str>) -> (Command, String) { writeln!( file, "[package]\nversion = \"{}\"\n\n[dependencies]\nversion = \"{}\"", version, version, ) .unwrap(); let path = file.path().to_str().unwrap(); let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap(); cmd.args(vec!["semver"]); cmd.arg("--config").arg(path); cmd.args(command); (cmd, path.to_string()) } #[test] fn get() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["get"]); cmd.assert().success().stdout("1.0.0\n"); let contains = predicate::str::contains("1.0.0"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn patch() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "patch"]); cmd.assert().success().stdout("1.0.1\n"); let contains = predicate::str::contains("1.0.1"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn minor() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "minor"]); cmd.assert().success().stdout("1.1.0\n"); let contains = predicate::str::contains("1.1.0"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn major() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "major"]); cmd.assert().success().stdout("2.0.0\n"); let contains = predicate::str::contains("2.0.0"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn major_pre() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "major", "alpha"]); cmd.assert().success().stdout("2.0.0-alpha.1\n"); let contains = predicate::str::contains("2.0.0-alpha.1"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn pre() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "pre", "alpha"]); cmd.assert().success().stdout("1.0.0-alpha.1\n"); let contains = predicate::str::contains("1.0.0-alpha.1"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); // run again without `alpha` let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "pre"]); cmd.assert().success().stdout("1.0.0-alpha.2\n"); let contains = predicate::str::contains("1.0.0-alpha.2"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); // change to `beta` let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "pre", "beta"]); cmd.assert().success().stdout("1.0.0-beta.1\n"); let contains = predicate::str::contains("1.0.0-beta.1"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn keep_dependency_version() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "major"]); cmd.assert().success().stdout("2.0.0\n"); let contains = predicate::str::contains("version = \"2.0.0\""); let contains_dep = predicate::str::contains("version = \"1.0.0\""); let content = &fs::read_to_string(&path).unwrap(); assert_eq!(true, contains.eval(&content)); assert_eq!(true, contains_dep.eval(&content)); } #[test] fn bad_input() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["set", "1.0"]); cmd.assert() .failure() .stderr(predicate::str::contains("expected more input")); let contains = predicate::str::contains("1.0.0"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); } #[test] fn missing_pre_version() { let mut file = NamedTempFile::new().unwrap(); let (mut cmd, path) = setup_command(&mut file, "1.0.0", vec!["bump", "pre"]); cmd.assert().failure().stderr(predicate::str::contains( "run `cargo-semver pre [alpha|beta]` first to add a new pre-release version.", )); let contains = predicate::str::contains("1.0.0"); assert_eq!(true, contains.eval(&fs::read_to_string(path).unwrap())); }
true
9ce84b57c8611f263555c04a4d81042f2a19057d
Rust
antbern/dsaclk
/firmware/src/panel.rs
UTF-8
10,707
2.890625
3
[]
no_license
#![allow(dead_code)] use crate::display::Display; use crate::SharedState; const STR_DECIMAL_10: [&str; 10] = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]; const STR_DECIMAL_60: [&str; 60] = [ "00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", ]; pub enum Panels { Time, Alarm, } #[derive(PartialEq)] pub enum CursorState { Off, Underline(u8, u8), Blinking(u8, u8), } pub trait Panel<D: Display> { fn next(&mut self, state: &mut SharedState); fn previous(&mut self, state: &mut SharedState); fn enter(&mut self, state: &mut SharedState); fn leave(&mut self, state: &mut SharedState); fn display(&self, disp: &mut D, state: &mut SharedState) -> Result<(), D::Error>; fn get_cursor_state(&self, state: &SharedState) -> CursorState; fn is_editing(&self) -> bool; } pub mod time { use super::{DecF, MonthF, OnOffF, WeekdayF}; use crate::display::Display; use crate::SharedState; use super::CursorState; enum SelectedField { Hour, Minute, Second, Weekday, Day, Month, Year, AlarmHour, AlarmMinute, AlarmEnabled, } pub struct TimePanel { in_edit: bool, selected: SelectedField, } impl TimePanel { pub fn new() -> Self { TimePanel { in_edit: false, selected: SelectedField::Hour, } } } impl<D: Display> crate::panel::Panel<D> for TimePanel { fn enter(&mut self, _state: &mut SharedState) { self.in_edit = !self.in_edit; } fn leave(&mut self, _state: &mut SharedState) { self.in_edit = false; } fn next(&mut self, state: &mut SharedState) { if self.in_edit { match self.selected { SelectedField::Hour => DecF::next(&mut state.clock.hour, 0, 23), SelectedField::Minute => DecF::next(&mut state.clock.minute, 0, 59), SelectedField::Second => DecF::next(&mut state.clock.second, 0, 59), SelectedField::Weekday => WeekdayF::next(&mut state.clock.weekday), SelectedField::Day => DecF::next(&mut state.clock.day, 1, 31), SelectedField::Month => MonthF::next(&mut state.clock.month), SelectedField::Year => DecF::next(&mut state.clock.year, 0, 40), SelectedField::AlarmHour => DecF::next(&mut state.alarm.hour, 0, 23), SelectedField::AlarmMinute => DecF::next(&mut state.alarm.minute, 0, 59), SelectedField::AlarmEnabled => OnOffF::next(&mut state.alarm.enabled), } } else { self.selected = match self.selected { SelectedField::Hour => SelectedField::Minute, SelectedField::Minute => SelectedField::Second, SelectedField::Second => SelectedField::Weekday, SelectedField::Weekday => SelectedField::Day, SelectedField::Day => SelectedField::Month, SelectedField::Month => SelectedField::Year, SelectedField::Year => SelectedField::AlarmHour, SelectedField::AlarmHour => SelectedField::AlarmMinute, SelectedField::AlarmMinute => SelectedField::AlarmEnabled, SelectedField::AlarmEnabled => SelectedField::Hour, } } } fn previous(&mut self, state: &mut SharedState) { if self.in_edit { match self.selected { SelectedField::Hour => DecF::previous(&mut state.clock.hour, 0, 23), SelectedField::Minute => DecF::previous(&mut state.clock.minute, 0, 59), SelectedField::Second => DecF::previous(&mut state.clock.second, 0, 59), SelectedField::Weekday => WeekdayF::previous(&mut state.clock.weekday), SelectedField::Day => DecF::previous(&mut state.clock.day, 1, 31), SelectedField::Month => MonthF::previous(&mut state.clock.month), SelectedField::Year => DecF::previous(&mut state.clock.year, 0, 40), SelectedField::AlarmHour => DecF::previous(&mut state.alarm.hour, 0, 23), SelectedField::AlarmMinute => DecF::previous(&mut state.alarm.minute, 0, 59), SelectedField::AlarmEnabled => OnOffF::previous(&mut state.alarm.enabled), } } else { self.selected = match self.selected { SelectedField::Hour => SelectedField::AlarmEnabled, SelectedField::Minute => SelectedField::Hour, SelectedField::Second => SelectedField::Minute, SelectedField::Weekday => SelectedField::Second, SelectedField::Day => SelectedField::Weekday, SelectedField::Month => SelectedField::Day, SelectedField::Year => SelectedField::Month, SelectedField::AlarmHour => SelectedField::Year, SelectedField::AlarmMinute => SelectedField::AlarmHour, SelectedField::AlarmEnabled => SelectedField::AlarmMinute, } } } fn display(&self, disp: &mut D, state: &mut SharedState) -> Result<(), D::Error> { disp.set_cursor_position(0, 0)?; disp.write(b"Time")?; disp.set_cursor_position(0, 7)?; disp.write(DecF::get_str(state.clock.hour, 0, 23).as_bytes())?; disp.write(b":")?; disp.write(DecF::get_str(state.clock.minute, 0, 59).as_bytes())?; disp.write(b":")?; disp.write(DecF::get_str(state.clock.second, 0, 59).as_bytes())?; disp.set_cursor_position(1, 0)?; disp.write(b"Date")?; disp.set_cursor_position(1, 5)?; disp.write(WeekdayF::get_str(state.clock.weekday).as_bytes())?; disp.set_cursor_position(1, 9)?; disp.write(DecF::get_str(state.clock.day, 1, 31).as_bytes())?; disp.set_cursor_position(1, 12)?; disp.write(MonthF::get_str(state.clock.month).as_bytes())?; disp.set_cursor_position(1, 16)?; disp.write(b"20")?; disp.write(DecF::get_str(state.clock.year, 0, 40).as_bytes())?; disp.set_cursor_position(2, 0)?; disp.write(b"Alarm")?; disp.set_cursor_position(2, 7)?; disp.write(DecF::get_str(state.alarm.hour, 0, 23).as_bytes())?; disp.write(b":")?; disp.write(DecF::get_str(state.alarm.minute, 0, 59).as_bytes())?; disp.set_cursor_position(2, 13)?; disp.write(OnOffF::get_str(state.alarm.enabled).as_bytes())?; Ok(()) } fn get_cursor_state(&self, _state: &SharedState) -> CursorState { use SelectedField::*; let row = match self.selected { Hour | Minute | Second => 0, Weekday | Day | Month | Year => 1, AlarmHour | AlarmMinute | AlarmEnabled => 2, }; let col = match self.selected { Hour => 8, Minute => 11, Second => 14, Weekday => 7, Day => 10, Month => 14, Year => 19, AlarmHour => 8, AlarmMinute => 11, AlarmEnabled => 15, }; match self.in_edit { true => CursorState::Blinking(row, col), false => CursorState::Underline(row, col), } } fn is_editing(&self) -> bool { self.in_edit } } } // empty struct only containing static methods for dealing with on/off values struct OnOffF {} impl OnOffF { fn next(state: &mut bool) { *state = !*state } fn previous(state: &mut bool) { *state = !*state } fn get_str(state: bool) -> &'static str { match state { true => " ON", false => "OFF", } } } struct DecF {} impl DecF { fn next(state: &mut u8, min: u8, max: u8) { if *state >= max - 1 { *state = min } else { *state += 1 } } fn previous(state: &mut u8, min: u8, max: u8) { if *state <= min { *state = max } else { *state -= 1 } } fn get_str(state: u8, min: u8, max: u8) -> &'static str { if max < 10 { match state { n if min <= n && n <= max => STR_DECIMAL_10[n as usize], _ => "?", } } else { match state { n if min <= n && n <= max && max < 60 => STR_DECIMAL_60[n as usize], _ => "??", } } } } struct WeekdayF {} impl WeekdayF { fn next(state: &mut u8) { if *state >= 7 { *state = 1 } else { *state += 1 } } fn previous(state: &mut u8) { if *state <= 1 { *state = 7 } else { *state -= 1 } } fn get_str(state: u8) -> &'static str { match state { 1 => "MON", 2 => "TUE", 3 => "WED", 4 => "THU", 5 => "FRI", 6 => "SAT", 7 => "SUN", _ => "XXX", } } } struct MonthF {} impl MonthF { fn next(state: &mut u8) { if *state >= 12 { *state = 1 } else { *state += 1 } } fn previous(state: &mut u8) { if *state <= 1 { *state = 12 } else { *state -= 1 } } fn get_str(state: u8) -> &'static str { match state { 1 => "JAN", 2 => "FEB", 3 => "MAR", 4 => "APR", 5 => "MAY", 6 => "JUN", 7 => "JUL", 8 => "AUG", 9 => "SEP", 10 => "OCT", 11 => "NOV", 12 => "DEC", _ => "XXX", } } }
true
a9dc596e10b53919f8c43aac45e790d97931e771
Rust
IThawk/rust-project
/rust-master/src/test/ui/substs-ppaux.rs
UTF-8
1,882
3.015625
3
[ "MIT", "LicenseRef-scancode-other-permissive", "Apache-2.0", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
// // revisions: verbose normal // //[verbose] compile-flags: -Z verbose trait Foo<'b, 'c, S=u32> { fn bar<'a, T>() where T: 'a {} fn baz() {} } impl<'a,'b,T,S> Foo<'a, 'b, S> for T {} fn main() {} fn foo<'z>() where &'z (): Sized { let x: () = <i8 as Foo<'static, 'static, u8>>::bar::<'static, char>; //[verbose]~^ ERROR mismatched types //[verbose]~| expected type `()` //[verbose]~| found type `fn() {<i8 as Foo<ReStatic, ReStatic, u8>>::bar::<ReStatic, char>}` //[normal]~^^^^ ERROR mismatched types //[normal]~| expected type `()` //[normal]~| found type `fn() {<i8 as Foo<'static, 'static, u8>>::bar::<'static, char>}` let x: () = <i8 as Foo<'static, 'static, u32>>::bar::<'static, char>; //[verbose]~^ ERROR mismatched types //[verbose]~| expected type `()` //[verbose]~| found type `fn() {<i8 as Foo<ReStatic, ReStatic>>::bar::<ReStatic, char>}` //[normal]~^^^^ ERROR mismatched types //[normal]~| expected type `()` //[normal]~| found type `fn() {<i8 as Foo<'static, 'static>>::bar::<'static, char>}` let x: () = <i8 as Foo<'static, 'static, u8>>::baz; //[verbose]~^ ERROR mismatched types //[verbose]~| expected type `()` //[verbose]~| found type `fn() {<i8 as Foo<ReStatic, ReStatic, u8>>::baz}` //[normal]~^^^^ ERROR mismatched types //[normal]~| expected type `()` //[normal]~| found type `fn() {<i8 as Foo<'static, 'static, u8>>::baz}` let x: () = foo::<'static>; //[verbose]~^ ERROR mismatched types //[verbose]~| expected type `()` //[verbose]~| found type `fn() {foo::<ReStatic>}` //[normal]~^^^^ ERROR mismatched types //[normal]~| expected type `()` //[normal]~| found type `fn() {foo::<'static>}` <str as Foo<u8>>::bar; //[verbose]~^ ERROR the size for values of type //[normal]~^^ ERROR the size for values of type }
true
90bbc6e14e8178812b4514afdbc40ef79531346a
Rust
geo-engine/geoengine
/operators/src/processing/circle_merging_quadtree/quadtree.rs
UTF-8
12,621
2.78125
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use std::collections::VecDeque; use geoengine_datatypes::primitives::{BoundingBox2D, Circle, Coordinate2D, TimeInterval}; use super::{circle_of_points::CircleOfPoints, circle_radius_model::CircleRadiusModel, node::Node}; #[derive(Debug)] pub struct CircleMergingQuadtree<C: CircleRadiusModel> { head: Box<Node>, circle_radius_model: C, max_items_per_node: usize, } pub struct IntoIter { stack: Vec<Node>, } pub struct Iter<'t> { stack: Vec<&'t Node>, output: Vec<CircleOfPoints>, } pub struct RectangleIter<'a> { stack: VecDeque<&'a Node>, } impl<C> CircleMergingQuadtree<C> where C: CircleRadiusModel, { pub fn new(bounds: BoundingBox2D, circle_radius_model: C, max_items_per_node: usize) -> Self { CircleMergingQuadtree { head: Box::new(Node::new(bounds)), circle_radius_model, max_items_per_node, } } pub fn insert(&mut self, coordinate: &Coordinate2D) { let new_circle = CircleOfPoints::new_with_one_point( Circle::from_coordinate(coordinate, self.circle_radius_model.min_radius()), TimeInterval::default(), Default::default(), // TODO: allow inserting attribute data ); self.insert_circle(new_circle); } /// Use the output of a tree as input to another one. /// /// This is useful to build a lower resolution tree upon existing circles. pub fn insert_tree(&mut self, tree: CircleMergingQuadtree<C>) { for circle in tree { self.insert_circle(circle); } } pub fn insert_circle(&mut self, new_circle: CircleOfPoints) { let mut insert_attempt = self.head.try_insert( new_circle, self.circle_radius_model.delta(), self.max_items_per_node, ); while let Err((circle1, circle2)) = insert_attempt { let mut new_circle = circle1; new_circle.merge(&circle2, &self.circle_radius_model); insert_attempt = self.head.try_insert( new_circle, self.circle_radius_model.delta(), self.max_items_per_node, ); } } pub fn iter_rectangles(&self) -> RectangleIter { let mut stack = VecDeque::new(); stack.push_back(&*self.head); RectangleIter { stack } } } impl<C> IntoIterator for CircleMergingQuadtree<C> where C: CircleRadiusModel, { type Item = CircleOfPoints; type IntoIter = IntoIter; fn into_iter(self) -> Self::IntoIter { IntoIter { stack: vec![*self.head], } } } impl Iterator for IntoIter { type Item = CircleOfPoints; fn next(&mut self) -> Option<Self::Item> { let mut top_node = self.stack.pop(); while let Some(mut node) = top_node { match node.circles.pop() { Some(circle) => { self.stack.push(node); return Some(circle); } None => { if let Some(link) = node.link { self.stack.append(&mut (link as Box<[_]>).into_vec()); } top_node = self.stack.pop(); } } } None } } impl<'t, C> IntoIterator for &'t CircleMergingQuadtree<C> where C: CircleRadiusModel, { type Item = CircleOfPoints; type IntoIter = Iter<'t>; fn into_iter(self) -> Self::IntoIter { Iter { stack: vec![self.head.as_ref()], output: Vec::new(), } } } impl<'t> Iterator for Iter<'t> { type Item = CircleOfPoints; fn next(&mut self) -> Option<Self::Item> { if let Some(circle) = self.output.pop() { return Some(circle); } let mut top_node = self.stack.pop(); while let Some(node) = top_node { self.output.append(&mut node.circles.clone()); if let Some(ref quad_reference) = node.link { self.stack .extend_from_slice(&quad_reference.iter().collect::<Vec<&Node>>()); } if self.output.is_empty() { top_node = self.stack.pop(); } else { return self.output.pop(); } } None } } impl<'a> Iterator for RectangleIter<'a> { type Item = &'a BoundingBox2D; fn next(&mut self) -> Option<Self::Item> { self.stack.pop_front().map(|node| { if let Some(ref link) = node.link { for child_node in link.iter() { self.stack.push_back(child_node); } } &node.rectangle }) } } #[cfg(test)] mod test { use crate::processing::circle_merging_quadtree::circle_radius_model::LogScaledRadius; use super::*; use std::{cmp::Ordering, num::NonZeroUsize}; #[test] fn insert_single_item() { let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new( BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(), circle_radius_model, 1, ); let p1 = Coordinate2D::new(0.0, 0.0); tree.insert(&p1); let mut iter = tree.into_iter(); assert_eq!( Some( CircleOfPoints::new( Circle::new(0.0, 0.0, 5.0), 1, TimeInterval::default(), Default::default() ) .unwrap() ), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn insert_two_items_merge() { let points = vec![Coordinate2D::new(50.0, 50.0), Coordinate2D::new(50.0, 50.0)]; let points_size = NonZeroUsize::new(points.len()).unwrap(); let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new( BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(), circle_radius_model, 1, ); for p in &points { tree.insert(p); } let mut iter = tree.into_iter(); assert_eq!( Some( CircleOfPoints::new( Circle::new( 50.0, 50.0, circle_radius_model.calculate_radius(points_size) ), points_size.get(), TimeInterval::default(), Default::default() ) .unwrap() ), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn insert_two_items_merge_with_move() { let points = vec![Coordinate2D::new(48.0, 48.0), Coordinate2D::new(50.0, 50.0)]; let points_size = NonZeroUsize::new(points.len()).unwrap(); let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new( BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(), circle_radius_model, 1, ); for p in &points { tree.insert(p); } let mut iter = tree.into_iter(); assert_eq!( Some( CircleOfPoints::new( Circle::new( 49.0, 49.0, circle_radius_model.calculate_radius(points_size) ), points_size.get(), TimeInterval::default(), Default::default() ) .unwrap() ), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn insert_five_items_merge() { let points = vec![ Coordinate2D::new(20.0, 50.0), Coordinate2D::new(20.0, 50.0), Coordinate2D::new(20.0, 50.0), Coordinate2D::new(20.0, 50.0), Coordinate2D::new(20.0, 50.0), ]; let points_size = points.len(); let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new( BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(), circle_radius_model, 1, ); for p in &points { tree.insert(p); } let mut iter = tree.into_iter(); assert_eq!( Some( CircleOfPoints::new( Circle::new( 20.0, 50.0, circle_radius_model.calculate_radius(NonZeroUsize::new(5).unwrap()) ), points_size, TimeInterval::default(), Default::default() ) .unwrap() ), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn insert_two_items_no_merge() { let points = vec![Coordinate2D::new(50.0, 50.0), Coordinate2D::new(75.0, 75.0)]; let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new( BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(), circle_radius_model, 1, ); for p in &points { tree.insert(p); } let mut results = Vec::with_capacity(2); for circle in tree { results.push(circle); } results.sort_by(|a, b| match a.circle.x().partial_cmp(&b.circle.x()) { Some(Ordering::Greater | Ordering::Less) => Ordering::Greater, _ => a .circle .y() .partial_cmp(&b.circle.y()) .unwrap_or(Ordering::Equal), }); let mut iter = results.into_iter(); assert_eq!( Some( CircleOfPoints::new( Circle::new(50.0, 50.0, 5.0), 1, TimeInterval::default(), Default::default() ) .unwrap(), ), iter.next() ); assert_eq!( Some( CircleOfPoints::new( Circle::new(75.0, 75.0, 5.0), 1, TimeInterval::default(), Default::default() ) .unwrap(), ), iter.next() ); assert_eq!(None, iter.next()); } #[test] fn bounding_rectangle() { let bounds = BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(); let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let tree = CircleMergingQuadtree::new(bounds, circle_radius_model, 1); let mut iter = tree.iter_rectangles(); assert_eq!(Some(&bounds), iter.next()); assert_eq!(None, iter.next()); } #[test] fn bounding_rectangle_after_split() { let bounds = BoundingBox2D::new_from_center(Coordinate2D::new(100.0, 100.0), 100.0, 100.0).unwrap(); let points = vec![ Coordinate2D::new(150.0, 150.0), Coordinate2D::new(50.0, 50.0), ]; let circle_radius_model = LogScaledRadius::new(5.0, 1.0).unwrap(); let mut tree = CircleMergingQuadtree::new(bounds, circle_radius_model, 1); for point in &points { tree.insert(point); } let bounding_boxes = tree.iter_rectangles().collect::<Vec<_>>(); assert_eq!( bounding_boxes, vec![ &bounds, &BoundingBox2D::new_from_center(Coordinate2D::new(50.0, 50.0), 50.0, 50.0).unwrap(), &BoundingBox2D::new_from_center(Coordinate2D::new(150.0, 50.0), 50.0, 50.0) .unwrap(), &BoundingBox2D::new_from_center(Coordinate2D::new(50.0, 150.0), 50.0, 50.0) .unwrap(), &BoundingBox2D::new_from_center(Coordinate2D::new(150.0, 150.0), 50.0, 50.0) .unwrap(), ] ); } }
true
081db5b968cf2d178fb3a77e2acfdd8c95e50954
Rust
lukki15/butterfly-effect
/src/main.rs
UTF-8
28,844
2.5625
3
[]
no_license
use bevy::core::FixedTimestep; use bevy::prelude::*; use bevy::render::pass::ClearColor; const SCORE_BOARD_HEIGHT: u32 = 2; const ARENA_HEIGHT: u32 = 16; const ARENA_WIDTH: u32 = 24; const SPRITE_HEIGHT: u32 = 32; const SPRITE_WIDTH: u32 = 32; const WINDOW_HEIGHT: u32 = (ARENA_HEIGHT + SCORE_BOARD_HEIGHT) * SPRITE_HEIGHT; const WINDOW_WIDTH: u32 = ARENA_WIDTH * SPRITE_WIDTH; const MAX_TURNS: u32 = 10; #[derive(SystemLabel, Debug, Hash, PartialEq, Eq, Clone)] pub enum RocketMovement { Input, Movement, Reset, Target, Loading, Path, } #[derive(Default, Copy, Clone, Eq, PartialEq, Hash)] struct Position { x: i32, y: i32, } struct Size { width: f32, height: f32, } impl Size { pub fn square(x: f32) -> Self { Self { width: x, height: x, } } } struct Rocket { direction: Direction, turns_left: u32, } #[derive(Default)] struct RocketPath(Vec<Position>, Vec<Entity>); struct Wall {} struct Target {} struct TargetEvent(); struct FindPathEvent(); struct ResetEvent(); struct NextLevelEvent(); struct GameOverEvent(); #[derive(Default)] struct LevelInfo { current_level: usize, counter_completion: u32, } #[derive(PartialEq, Copy, Clone)] enum Direction { Left, Up, Right, Down, StandStill, } impl Direction { fn opposite(self) -> Self { match self { Self::Left => Self::Right, Self::Right => Self::Left, Self::Up => Self::Down, Self::Down => Self::Up, Self::StandStill => Self::StandStill, } } } fn spawn_wall( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, asset_server: &Res<AssetServer>, wall_position: Position, ) -> Entity { let texture_handle = asset_server.load("LunarLander/Moon Tiles/MoonTile_square.png"); commands .spawn_bundle(SpriteBundle { material: materials.add(texture_handle.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Wall {}) .insert(wall_position) .insert(Size::square(0.9)) .id() } fn _spawn_debris( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, asset_server: &Res<AssetServer>, wall_position: Position, index: usize, ) { let path = format!("LunarLander/Space Background/debris_{}.png", index % 6); let texture_handle = asset_server.load(&path[..]); commands .spawn_bundle(SpriteBundle { material: materials.add(texture_handle.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Wall {}) .insert(wall_position) .insert(Size::square(0.9)); } fn spawn_target( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, wall_position: Position, ) { commands .spawn_bundle(SpriteBundle { material: materials.add(Color::rgb(1.0, 0.8, 0.0).into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Target {}) .insert(wall_position) .insert(Size::square(0.9)); } fn spawn_border( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, asset_server: &Res<AssetServer>, ) { for y in 0..ARENA_HEIGHT as i32 { spawn_wall(commands, materials, asset_server, Position { x: 0, y }); spawn_wall( commands, materials, asset_server, Position { x: ARENA_WIDTH as i32 - 1, y, }, ); } for x in 1..ARENA_WIDTH as i32 - 1 { spawn_wall(commands, materials, asset_server, Position { x, y: 0 }); spawn_wall( commands, materials, asset_server, Position { x, y: ARENA_HEIGHT as i32 - 1, }, ); } } fn load_game_over( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, asset_server: Res<AssetServer>, wall_query: Query<Entity, With<Wall>>, target_query: Query<Entity, With<Target>>, mut reader: EventReader<GameOverEvent>, ) { if reader.iter().next().is_some() { // unload all walls for wall in wall_query.iter() { commands.entity(wall).despawn(); } for target in target_query.iter() { commands.entity(target).despawn(); } let game_over_data = vec![ " ".to_string(), " WWW WWW W W WWW ".to_string(), " W W W WW WW W ".to_string(), " W WW WWW W W W WWW ".to_string(), " W W W W W W W ".to_string(), " WWWW W W W W WWW ".to_string(), " ".to_string(), " WWW W W WWW WWW ".to_string(), " W W W W W W W ".to_string(), " W W W W WWW WWW ".to_string(), " W W W W W WW ".to_string(), " WWW WW WWW W W ".to_string(), " ".to_string(), " ".to_string(), ]; load_level_from_data( &mut commands, &mut materials, &asset_server, &game_over_data, ); } } fn load_level( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, asset_server: &Res<AssetServer>, current_level: usize, ) { let level_data_0: Vec<String> = vec![ "WWWWWWWWWWW WWWWWWWWWT".to_string(), "WWWWWWWWWW WWWWWWWW ".to_string(), "WWWWWWWWW ".to_string(), "WWWWWWWWWW WWWWWWWW ".to_string(), "WWWWWWWWWWW WWWWWWWWW ".to_string(), "WWWWWWWWWWW WWWWWWWW ".to_string(), " ".to_string(), " WWWWWWWWW WWWWWWWWWW".to_string(), " WWWWWWWWWW WWWWWWWWWW".to_string(), " WWWWWWWWW WWWWWWWWWW".to_string(), " WWWWWWWWWW".to_string(), " WWWWWWWWW WWWWWWWWWW".to_string(), " WWWWWWWWWW WWWWWWWWWW".to_string(), "SWWWWWWWWWWWWWWWWWWWWW".to_string(), ]; let level_data_1: Vec<String> = vec![ " WWWW T".to_string(), " WWWW WWWW ".to_string(), " WWWW WWWW ".to_string(), " WWWW WWWW ".to_string(), " WWWW WWWW ".to_string(), " WWWW WWWW WWWWW".to_string(), " WWWW WWWW WWWWW".to_string(), " WWWW WWWW WWWWW".to_string(), " WWWW WWWW WWWWW".to_string(), " WWWW WWWW WWWWW".to_string(), " WWWW WWWWW".to_string(), " WWWW WWWWW".to_string(), " WWWW WWWWW".to_string(), "s WWWWW WWWWW".to_string(), ]; let level_data_2: Vec<String> = vec![ " ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWW W WWWWWWWW ".to_string(), " ".to_string(), " WWWWWWWWW T WWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), " WWWWWWWWWW WWWWWWWWW ".to_string(), "S ".to_string(), ]; let level_data_3: Vec<String> = vec![ " WW ".to_string(), " WW ".to_string(), " WTTW ".to_string(), " W W ".to_string(), "W W W W W W".to_string(), " TW W W W W WT ".to_string(), " W W W W WW W W W W ".to_string(), " W WW W W W W WW W ".to_string(), " W WW W W WW W ".to_string(), " W W WW W W ".to_string(), " WW WW ".to_string(), " WWWWW WWWWW ".to_string(), " WW WW ".to_string(), "S ".to_string(), ]; let you_won = vec![ " T".to_string(), " W W WWW W W ".to_string(), " W W W W W W ".to_string(), " WWW W W W W ".to_string(), " W W W W W ".to_string(), " W WWW WWW ".to_string(), " ".to_string(), " ".to_string(), " W W WWW W W ".to_string(), " W W W W WW W ".to_string(), " W W W W W W W ".to_string(), " W W W W W W WW ".to_string(), " W W WWW W W ".to_string(), " ".to_string(), ]; let levels = vec![&level_data_0, &level_data_1, &level_data_2, &level_data_3]; let current_level_data = levels.get(current_level); let level_data: &Vec<String>; if let Some(data) = current_level_data { level_data = data; } else { level_data = &you_won; } load_level_from_data(commands, materials, asset_server, level_data); } fn load_level_from_data( commands: &mut Commands, materials: &mut ResMut<Assets<ColorMaterial>>, asset_server: &Res<AssetServer>, level_data: &Vec<String>, ) { for (y, line_data) in level_data.iter().rev().enumerate() { for (x, c) in line_data.chars().enumerate() { let pos = Position { x: x as i32 + 1, y: y as i32 + 1, }; if c == 'W' { spawn_wall(commands, materials, asset_server, pos); } else if c == 'T' { spawn_target(commands, materials, pos); } } } } fn load_next_level( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, asset_server: Res<AssetServer>, wall_query: Query<Entity, With<Wall>>, target_query: Query<Entity, With<Target>>, mut level_info: ResMut<LevelInfo>, mut reader: EventReader<NextLevelEvent>, ) { if reader.iter().next().is_some() { // unload all walls for wall in wall_query.iter() { commands.entity(wall).despawn(); } for target in target_query.iter() { commands.entity(target).despawn(); } level_info.current_level += 1; level_info.counter_completion = 0; spawn_border(&mut commands, &mut materials, &asset_server); load_level( &mut commands, &mut materials, &asset_server, level_info.current_level, ); } } fn setup_scoreboard( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, asset_server: Res<AssetServer>, ) { let control_w = asset_server.load("controls_w.png"); let control_a = asset_server.load("controls_a.png"); let control_s = asset_server.load("controls_s.png"); let control_d = asset_server.load("controls_d.png"); let control_r = asset_server.load("controls_r.png"); commands .spawn_bundle(SpriteBundle { material: materials.add(control_w.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 4 * 3, y: ARENA_HEIGHT as i32 + 1, }) .insert(Size::square(0.8)); commands .spawn_bundle(SpriteBundle { material: materials.add(control_a.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 4 * 3 - 1, y: ARENA_HEIGHT as i32, }) .insert(Size::square(0.8)); commands .spawn_bundle(SpriteBundle { material: materials.add(control_s.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 4 * 3, y: ARENA_HEIGHT as i32, }) .insert(Size::square(0.8)); commands .spawn_bundle(SpriteBundle { material: materials.add(control_d.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 4 * 3 + 1, y: ARENA_HEIGHT as i32, }) .insert(Size::square(0.8)); commands .spawn_bundle(SpriteBundle { material: materials.add(control_r.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 4 * 3 + 2, y: ARENA_HEIGHT as i32 + 1, }) .insert(Size::square(0.8)); commands .spawn_bundle(Text2dBundle { text: Text { sections: vec![TextSection { value: "turns left: ".to_string(), style: TextStyle { font: asset_server.load("fonts/press-start/prstart.ttf"), font_size: 20.0, color: Color::rgb(0.125, 0.164, 0.266), }, }], ..Default::default() }, ..Default::default() }) .insert(Position { x: ARENA_WIDTH as i32 / 3 + 1, y: ARENA_HEIGHT as i32, }); } fn _setup_statusbar( mut commands: Commands, asset_server: Res<AssetServer>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ){ let texture_handle = asset_server.load("status_bar.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(32.0, 8.0), 4, 1); let texture_atlas_handle = texture_atlases.add(texture_atlas); commands .spawn_bundle(SpriteSheetBundle { texture_atlas: texture_atlas_handle, sprite: TextureAtlasSprite { index: 0, ..Default::default() }, ..Default::default() }).insert(Position { x: ARENA_WIDTH as i32 / 2, y: ARENA_HEIGHT as i32 + 1 , }); } fn setup( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, asset_server: Res<AssetServer>, mut level_info: ResMut<LevelInfo>, ) { commands.spawn_bundle(OrthographicCameraBundle::new_2d()); // Add border walls level_info.current_level = 0; level_info.counter_completion = 0; spawn_border(&mut commands, &mut materials, &asset_server); load_level( &mut commands, &mut materials, &asset_server, level_info.current_level, ); } fn spawn_rocket( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, mut rocket_path: ResMut<RocketPath>, ) { let texture_handle = asset_server.load("LunarLander/Ships/Spaceships_green_4.png"); let start_position = Position { x: 1, y: 1 }; commands .spawn_bundle(SpriteBundle { material: materials.add(texture_handle.into()), sprite: Sprite::new(Vec2::new(SPRITE_WIDTH as f32, SPRITE_HEIGHT as f32)), ..Default::default() }) .insert(Rocket { direction: Direction::StandStill, turns_left: MAX_TURNS, }) .insert(start_position.clone()) .insert(Size::square(0.8)) .id(); rocket_path.0 = vec![start_position.clone()]; rocket_path.1 = vec![]; } fn rocket_movement_input(keyboard_input: Res<Input<KeyCode>>, mut rockets: Query<&mut Rocket>) { let right = keyboard_input.pressed(KeyCode::Right) || keyboard_input.pressed(KeyCode::D); let left = keyboard_input.pressed(KeyCode::Left) || keyboard_input.pressed(KeyCode::A); let up = keyboard_input.pressed(KeyCode::Up) || keyboard_input.pressed(KeyCode::W); let down = keyboard_input.pressed(KeyCode::Down) || keyboard_input.pressed(KeyCode::S); if let Some(mut rocket) = rockets.iter_mut().next() { if rocket.turns_left <= 0 { return; } let old_dir = rocket.direction.clone(); let dir: Direction = if left { Direction::Left } else if down { Direction::Down } else if up { Direction::Up } else if right { Direction::Right } else { rocket.direction }; if dir != rocket.direction.opposite() { rocket.direction = dir; } if rocket.direction != old_dir { rocket.turns_left -= 1; } } } fn rocket_movement( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, mut rocket_query: Query<(&Rocket, &mut Position)>, collider_query: Query<&Transform, With<Wall>>, target_query: Query<&Transform, With<Target>>, windows: Res<Windows>, mut target_writer: EventWriter<TargetEvent>, mut rocket_path: ResMut<RocketPath>, asset_server: Res<AssetServer>, mut level_info: ResMut<LevelInfo>, ) { let window = windows.get_primary().unwrap(); if let Some((rocket, mut rocket_pos)) = rocket_query.iter_mut().next() { let mut next_position = rocket_pos.clone(); match &rocket.direction { Direction::Left => { if rocket_pos.x > 0 { next_position.x -= 1; } } Direction::Right => { if rocket_pos.x < ARENA_WIDTH as i32 - 1 { next_position.x += 1; } } Direction::Up => { if rocket_pos.y < ARENA_HEIGHT as i32 - 1 { next_position.y += 1; } } Direction::Down => { if rocket_pos.y > 0 { next_position.y -= 1; } } Direction::StandStill => {} }; let mut no_collision = true; for wall_transform in collider_query.iter() { let next_transform_x = convert_x(next_position.x, window.width()); let next_transform_y = convert_y(next_position.y, window.height()); if wall_transform.translation.x == next_transform_x && wall_transform.translation.y == next_transform_y { no_collision = false; } } if no_collision && rocket.direction != Direction::StandStill { rocket_pos.x = next_position.x; rocket_pos.y = next_position.y; rocket_path.0.push(rocket_pos.clone()); if rocket_path.0.len() >= 3 { let previous = rocket_path.0[rocket_path.0.len() - 3]; let middle = rocket_path.0[rocket_path.0.len() - 2]; let next = rocket_path.0[rocket_path.0.len() - 1]; if previous.x != next.x && previous.y != next.y { rocket_path.1.push(spawn_wall( &mut commands, &mut materials, &asset_server, middle, )); } } } for target_transform in target_query.iter() { let next_transform_x = convert_x(next_position.x, window.width()); let next_transform_y = convert_y(next_position.y, window.height()); if target_transform.translation.x == next_transform_x && target_transform.translation.y == next_transform_y { level_info.counter_completion += 1; target_writer.send(TargetEvent {}); } } } } fn size_scaling(windows: Res<Windows>, mut q: Query<(&Size, &mut Sprite)>) { let window = windows.get_primary().unwrap(); for (sprite_size, mut sprite) in q.iter_mut() { sprite.size = Vec2::new( sprite_size.width / ARENA_WIDTH as f32 * window.width() as f32, sprite_size.height / (ARENA_HEIGHT + SCORE_BOARD_HEIGHT) as f32 * window.height() as f32, ); } } fn convert(pos: f32, bound_window: f32, bound_game: f32) -> f32 { let tile_size = bound_window / bound_game; pos / bound_game * bound_window - (bound_window / 2.) + (tile_size / 2.) } fn convert_x(pos: i32, window_width: f32) -> f32 { convert(pos as f32, window_width, ARENA_WIDTH as f32) } fn convert_y(pos: i32, window_height: f32) -> f32 { convert( pos as f32, window_height, (ARENA_HEIGHT + SCORE_BOARD_HEIGHT) as f32, ) } fn position_translation(windows: Res<Windows>, mut q: Query<(&Position, &mut Transform)>) { let window = windows.get_primary().unwrap(); for (pos, mut transform) in q.iter_mut() { transform.translation = Vec3::new( convert_x(pos.x, window.width()), convert_y(pos.y, window.height()), 0.0, ); } } fn rotation_translation(mut q: Query<(&mut Transform, &Rocket)>) { for (mut transform, rocket) in q.iter_mut() { transform.rotation = match rocket.direction { Direction::Right => Quat::from_rotation_z(0.0), Direction::Down => Quat::from_rotation_z(-std::f32::consts::PI * 0.5), Direction::Left => Quat::from_rotation_z(std::f32::consts::PI), Direction::Up => Quat::from_rotation_z(std::f32::consts::PI * 0.5), _ => Quat::from_rotation_z(0.0), }; } } fn reached_target( mut reader: EventReader<TargetEvent>, mut rocket_query: Query<(&mut Rocket, &mut Position)>, mut segments: ResMut<RocketPath>, mut next_level_writer: EventWriter<NextLevelEvent>, level_info: Res<LevelInfo>, mut find_path_event: EventWriter<FindPathEvent>, ) { if reader.iter().next().is_some() { //TODO: update score if let Some((mut rocket, mut rocket_pos)) = rocket_query.iter_mut().next() { rocket.direction = Direction::StandStill; rocket.turns_left = MAX_TURNS; rocket_pos.x = 1; rocket_pos.y = 1; segments.0.clear(); segments.0.push(rocket_pos.clone()); segments.1.clear(); if level_info.counter_completion > 2 { next_level_writer.send(NextLevelEvent {}); } else { find_path_event.send(FindPathEvent {}); } } } } fn scoreboard_system(mut rocket_query: Query<&Rocket>, mut query: Query<&mut Text>) { if let Some(rocket) = rocket_query.iter_mut().next() { let mut text = query.single_mut().unwrap(); text.sections[0].value = format!("turns left: {}", rocket.turns_left); } } fn reset_input(keyboard_input: Res<Input<KeyCode>>, mut reset_writer: EventWriter<ResetEvent>) { if keyboard_input.pressed(KeyCode::R) { reset_writer.send(ResetEvent {}); } } fn reset_last_one( mut commands: Commands, mut reader: EventReader<ResetEvent>, mut target_writer: EventWriter<TargetEvent>, mut rocket_path: ResMut<RocketPath>, ) { if reader.iter().next().is_some() { for i in 0..rocket_path.1.len() { let wall = rocket_path.1[i]; commands.entity(wall).despawn(); } rocket_path.1.clear(); target_writer.send(TargetEvent {}); } } use petgraph::algo::dijkstra; use petgraph::graph::{NodeIndex, UnGraph}; fn path_finder( wall_query: Query<&Position, With<Wall>>, target_query: Query<&Position, With<Target>>, mut reader: EventReader<FindPathEvent>, mut game_over_writer: EventWriter<GameOverEvent>, ) { if reader.iter().next().is_some() { for target_position in target_query.iter() { let target_node: u32 = target_position.x as u32 * ARENA_WIDTH + target_position.y as u32; let mut array = [[true; ARENA_HEIGHT as usize]; ARENA_WIDTH as usize]; for wall_position in wall_query.iter() { array[wall_position.x as usize][wall_position.y as usize] = false; } let mut edges: Vec<(u32, u32)> = vec![]; for x in 1..ARENA_WIDTH - 1 { for y in 1..ARENA_HEIGHT - 1 { if array[x as usize][y as usize] && array[x as usize + 1][y as usize] { edges.push((x * ARENA_WIDTH + y, (x + 1) * ARENA_WIDTH + y)); } if array[x as usize][y as usize] && array[x as usize][y as usize + 1] { edges.push((x * ARENA_WIDTH + y, x * ARENA_WIDTH + y + 1)); } } } let g = UnGraph::<i32, ()>::from_edges(&edges); // Find the shortest path from source to tarte using `1` as the cost for every edge. let node_map = dijkstra( &g, (ARENA_WIDTH + 1).into(), Some(target_node.into()), |_| 1, ); if node_map.contains_key(&NodeIndex::new(target_node as usize)) { return; } } game_over_writer.send(GameOverEvent {}); } } #[cfg(not(target_arch = "wasm32"))] fn audio_system( asset_server: Res<AssetServer>, audio: Res<Audio>, ) { let music_handle = asset_server.load("parallel_universes.mp3"); audio.play(music_handle); } fn main() { let mut app = App::build(); app.insert_resource(ClearColor(Color::rgb(0.04, 0.04, 0.04))) .insert_resource(WindowDescriptor { title: "BUTTERFLY EFFECT".to_string(), width: WINDOW_WIDTH as f32, height: WINDOW_HEIGHT as f32, ..Default::default() }) .insert_resource(RocketPath::default()) .insert_resource(LevelInfo::default()) .add_startup_system(setup.system()) .add_startup_system(setup_scoreboard.system()) .add_startup_stage("game_setup", SystemStage::single(spawn_rocket.system())) .add_system(scoreboard_system.system()) .add_system( rocket_movement_input .system() .label(RocketMovement::Input) .before(RocketMovement::Movement), ) .add_system_set( SystemSet::new() .with_run_criteria(FixedTimestep::step(0.10)) .with_system(rocket_movement.system().label(RocketMovement::Movement)), ) .add_system( reached_target .system() .label(RocketMovement::Target) .after(RocketMovement::Movement), ) .add_system( reset_input .system() .label(RocketMovement::Reset) .after(RocketMovement::Movement), ) .add_system(reset_last_one.system().after(RocketMovement::Reset)) .add_system( load_next_level .system() .label(RocketMovement::Loading) .after(RocketMovement::Target), ) .add_system( path_finder .system() .label(RocketMovement::Path) .after(RocketMovement::Loading) .after(RocketMovement::Reset), ) .add_system(load_game_over.system().after(RocketMovement::Path)) .add_system_set_to_stage( CoreStage::PostUpdate, SystemSet::new() .with_system(position_translation.system()) .with_system(rotation_translation.system()) .with_system(size_scaling.system()), ) .add_event::<TargetEvent>() .add_event::<ResetEvent>() .add_event::<NextLevelEvent>() .add_event::<FindPathEvent>() .add_event::<GameOverEvent>() .add_plugins(DefaultPlugins); #[cfg(target_arch = "wasm32")] app.add_plugin(bevy_webgl2::WebGL2Plugin); #[cfg(not(target_arch = "wasm32"))] app.add_startup_system(audio_system.system()); app.run(); }
true
d3fe7d1f3654674ee6a5d7a2194e80c0b52f287e
Rust
mztikk/rget
/src/main.rs
UTF-8
3,469
3.09375
3
[]
no_license
use indicatif::{HumanBytes, ProgressBar, ProgressStyle}; use reqwest::blocking::Response; use std::fs::File; use structopt::StructOpt; /// Downloads the given uri #[derive(StructOpt)] struct Cli { /// URI to download uri: String, /// Optional filename, otherwise will be taken from response or uri #[structopt(short = "f", long = "filename")] filename: Option<String>, } fn filename_from_headers(resp: &Response) -> Result<String, String> { match resp.headers().get("Content-Disposition") { Some(header) => match header.to_str() { Ok(content_disposition) => match content_disposition.rfind("filename=") { Some(filename_index) => { Ok(content_disposition[filename_index + "filename=".len()..].to_string()) } None => Err(format!( "Couldn't read filename from Content-Disposition: {}", content_disposition )), }, Err(e) => Err(e.to_string()), }, None => Err("No Content-Disposition Header".to_string()), } } fn filename_from_uri(resp: &Response) -> Result<String, String> { let uri = resp.url().to_string(); match uri.rfind('/') { Some(last_slash) => { let remaining = &uri[last_slash + "/".len()..uri.len()]; if !remaining.is_empty() { Ok(remaining.to_string()) } else { Err(format!("URI has no trailing filename '{}'", uri)) } } None => Err(format!("URI has no trailing filename '{}'", uri)), } } fn write_line(str: String) { println!("{}", str); } fn main() -> Result<(), Box<dyn std::error::Error>> { let args = Cli::from_args(); let uri = if !args.uri.starts_with("http://") && !args.uri.starts_with("https://") { format!("http://{}", args.uri) } else { args.uri.to_string() }; println!("Sending request to '{}'", &uri); match reqwest::blocking::get(&uri) { Ok(mut resp) => { let filename_getters = [filename_from_headers, filename_from_uri]; let filename = args.filename.unwrap_or_else(|| { filename_getters .iter() .find_map(|f| f(&resp).map_err(write_line).ok()) .unwrap_or_else(|| "index.html".to_string()) }); println!("Filename set to: '{}'", filename); let mut file = File::create(filename)?; let n_bytes = resp.content_length().unwrap_or(0); if n_bytes != 0 { println!("Download size is: {}", HumanBytes(n_bytes)); } else { println!("No size for download found"); } let pb = ProgressBar::new(n_bytes); // pb.set_style(ProgressStyle::default_bar().template("{spinner:.green} [{elapsed_precise}] [{wide_bar.cyan/blue}] {bytes}/{total_bytes} ({eta})").progress_chars("#>-")); pb.set_style(ProgressStyle::default_bar() .template("{msg}{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})") .progress_chars("#>-")); std::io::copy(&mut resp, &mut pb.wrap_write(&mut file))?; pb.finish_with_message("downloaded"); } Err(e) => println!("Failed to GET URI: '{}' ({})", uri, e), } Ok(()) }
true
4b5175cda0479cf71f53e0cb268d603fa190da5a
Rust
k124k3n/competitive-programming-answer
/leetcode/minimum-index-sum-of-two-lists.rs
UTF-8
802
2.625
3
[ "MIT" ]
permissive
impl Solution { pub fn find_restaurant(list1: Vec<String>, list2: Vec<String>) -> Vec<String> { let mut uni: std::collections::HashMap<String, (usize, usize)> = std::collections::HashMap::new(); for (i, i_) in list1.iter().enumerate() { for (j, j_) in list2.iter().enumerate() { if i_ == j_ { uni.insert(j_.clone(), (i, j)); } } } let mut min = std::usize::MAX; let mut name = Vec::new(); for (k, v) in uni.iter() { if v.0 + v.1 < min { min = v.0 + v.1; } } for (k, v) in uni.iter() { if v.0 + v.1 == min { name.push(k.to_string()); } } name } }
true
bb016c819b939dd1e84a47db9f86e2766a6926ff
Rust
rustbunker/mme
/src/user/test.rs
UTF-8
2,804
2.96875
3
[ "MIT" ]
permissive
use super::*; use std::io::{Error, ErrorKind}; const INPUT_BLUE_FIXED_COLOR: &str = "blue"; const INPUT_GREEN_FIXED_COLOR: &str = "green"; const INPUT_WHITE_TEXT_COLOR: &str = "white"; const INPUT_BLACK_TEXT_COLOR: &str = "black"; const INPUT_YELLOW_HIGH_COLOR: &str = "yellow"; const INPUT_CYAN_HIGH_COLOR: &str = "cyan"; const INPUT_HOME_PATH: &str = "~"; const INPUT_ROOT_PATH: &str = "/"; #[test] fn no_previous_preferences_no_path() { assert_eq!( Config::get_preferences( Err(PreferencesError::Io(Error::new(ErrorKind::Other, "oh no!"))), "" ), Err("Path not configured yet".to_string()) ); } #[test] fn no_previous_preferences_no_path_but_colors() { let mut preferences_with_colors: HashMap<String, String> = HashMap::new(); preferences_with_colors.insert(FIXED_COLOR.to_string(), INPUT_BLUE_FIXED_COLOR.to_string()); assert_eq!( Config::get_preferences(Ok(preferences_with_colors), ""), Err("Path not configured yet, you also need a path".to_string()) ); } #[test] fn change_all_configurations() { let mut all_preferences: HashMap<String, String> = HashMap::new(); all_preferences.insert(PATH.to_string(), INPUT_ROOT_PATH.to_string()); all_preferences.insert(FIXED_COLOR.to_string(), INPUT_BLUE_FIXED_COLOR.to_string()); all_preferences.insert(TEXT_COLOR.to_string(), INPUT_WHITE_TEXT_COLOR.to_string()); all_preferences.insert(HIGH_COLOR.to_string(), INPUT_YELLOW_HIGH_COLOR.to_string()); Config::change_preference_if_new(&INPUT_HOME_PATH.to_string(), &mut all_preferences, PATH); Config::change_preference_if_new( &INPUT_GREEN_FIXED_COLOR.to_string(), &mut all_preferences, FIXED_COLOR, ); Config::change_preference_if_new( &INPUT_BLACK_TEXT_COLOR.to_string(), &mut all_preferences, TEXT_COLOR, ); Config::change_preference_if_new( &INPUT_CYAN_HIGH_COLOR.to_string(), &mut all_preferences, HIGH_COLOR, ); assert_eq!( all_preferences.get(PATH), Some(&INPUT_HOME_PATH.to_string()) ); assert_eq!( all_preferences.get(FIXED_COLOR), Some(&INPUT_GREEN_FIXED_COLOR.to_string()) ); assert_eq!( all_preferences.get(TEXT_COLOR), Some(&INPUT_BLACK_TEXT_COLOR.to_string()) ); assert_eq!( all_preferences.get(HIGH_COLOR), Some(&INPUT_CYAN_HIGH_COLOR.to_string()) ); } #[test] fn change_path_configuration_by_creation() { let mut path_preferences: HashMap<String, String> = HashMap::new(); Config::change_preference_if_new(&INPUT_HOME_PATH.to_string(), &mut path_preferences, PATH); assert_eq!( path_preferences.get(PATH), Some(&INPUT_HOME_PATH.to_string()) ); }
true
d924757dc59e70c193b5787c38edd5bc6f880b89
Rust
pradovic/fastuuid-rs
/src/lib.rs
UTF-8
9,567
3.5625
4
[ "MIT" ]
permissive
//!`fastuuid` provides fast UUID generation of guessable and unique 192-bit universally unique identifiers and simple support for 128-bit RFC-4122 V4 UUID. //!Generated UUIDs are not unguessable as every generated UUID is adjacent to the previously generated UUID. //! //!It avoids generating reading 192 bit from rand on each UUID generation, and offers a API to fetch 128-bit string reference, //!with or without allocating a new heap string object as well, with both safe and unsafe versions of the same function. //! //!Benchmarks are included. On my machine generation of a 192-bit UUID takes ~7n, while generating the 128-bit string //!without and with additional heap allocation (unsafe version) takes <20ns & ~95ns respectively. Safe versions take additional ~10ns. //! //!It can be depended on with: //! //!```toml //![dependencies] //!fastuuid = "0.3.0" //!``` //! //!## Examples //!#### 192-bit UUID //!```rust //!use fastuuid::Generator; //! //!fn main() { //! let generator = Generator::new(); //! let uuid:[u8;24] = generator.next(); //!} //!``` //! //!#### 128-bit UUID //!```rust //!// with new string allocation //!use fastuuid::Generator; //! fn main() { //! let generator = Generator::new(); //! let uuid = generator.hex128_as_string().unwrap(); //!} //!``` //!```rust //! // without new string allocation //!use fastuuid::Generator; //! //!fn main() { //! let generator = Generator::new(); //! let mut buffer: [u8; 36] = [0; 36]; //! let uuid = generator.hex128_as_str(&mut buffer).unwrap(); //!} //!``` //! //!Note: there is also an unsafe version of both functions, which uses unsafe cast to string from utf8, making them a bit faster. //!It is ok to use all of those concurrently. extern crate faster_hex; extern crate rand; use rand::Rng; use std::convert::TryInto; use std::error::Error; use std::sync::atomic::{AtomicUsize, Ordering}; // Generator is a uuid generator that generates unique and guessable 192-bit UUIDs, starting from a random sequence. pub struct Generator { // The constant (random) 192-bit seed. // the first 8 bytes are stored in the counter and used for generating new UUIDs seed: [u8; 24], counter: AtomicUsize, } impl Generator { #[allow(dead_code)] pub fn new() -> Generator { let seed = rand::thread_rng().gen::<[u8; 24]>(); Generator { seed, counter: AtomicUsize::new( u64::from_le_bytes(seed[0..8].try_into().unwrap()) .try_into() .unwrap(), ), } } // Next returns the next UUID from the generator. // Only the first 8 bytes differ from the previous one. // It can be used concurrently. pub fn next(&self) -> [u8; 24] { let current = self.counter.fetch_add(1, Ordering::SeqCst); let mut uuid: [u8; 24] = Default::default(); uuid[..8].copy_from_slice(&current.to_le_bytes()); uuid[8..].copy_from_slice(&self.seed[8..]); return uuid; } // hex128_as_str returns hex128(Generator::next()) as &str (without heap allocation of the result) pub fn hex128_as_str<'a>(&self, buffer: &'a mut [u8; 36]) -> Result<&'a str, Box<dyn Error>> { match std::str::from_utf8(Generator::hex128_from_bytes(&self.next(), buffer)) { Ok(res) => Ok(res), Err(err) => Err(Box::new(err)), } } // hex128_as_str_unchecked returns hex128(Generator::next()) as &str (without heap allocation of the result) // Uses unsafe cast to string from utf8 pub unsafe fn hex128_as_str_unchecked<'a>(&self, buffer: &'a mut [u8; 36]) -> &'a str { std::str::from_utf8_unchecked(Generator::hex128_from_bytes(&self.next(), buffer)) } // hex128_as_string returns hex128(Generator::next()) as boxed String value pub unsafe fn hex128_as_string_unchecked(&self) -> String { let mut buffer: [u8; 36] = [0; 36]; std::str::from_utf8_unchecked(Generator::hex128_from_bytes(&self.next(), &mut buffer)) .to_owned() } // hex128_as_string returns hex128(Generator::next()) as boxed String value pub fn hex128_as_string(&self) -> Result<String, Box<dyn Error>> { let mut buffer: [u8; 36] = [0; 36]; match std::str::from_utf8(Generator::hex128_from_bytes(&self.next(), &mut buffer)) { Ok(res) => Ok(res.to_owned()), Err(err) => Err(Box::new(err)), } } // Hex128 returns an RFC4122 V4 representation of the // first 128 bits of the given UUID, with hyphens. // // Example: 11febf98-c108-4383-bb1e-739ffcd44341 // // Before encoding, it swaps bytes 6 and 9 // so that all the varying bits of Generator.next() // are reflected in the resulting UUID. // // Note: If you want unpredictable UUIDs, you might want to consider // hashing the uuid (using SHA256, for example) before passing it // to Hex128. fn hex128_from_bytes<'a>(uuid: &[u8; 24], buffer: &'a mut [u8; 36]) -> &'a [u8] { let mut temp_uuid: [u8; 24] = [0; 24]; temp_uuid.copy_from_slice(uuid); temp_uuid.swap(6, 9); // V4 temp_uuid[6] = (temp_uuid[6] & 0x0f) | 0x40; // RFC4122 temp_uuid[8] = temp_uuid[8] & 0x3f | 0x80; faster_hex::hex_encode(&temp_uuid[0..16], &mut buffer[0..32]).unwrap(); buffer.copy_within(20..32, 24); // needs rust stable 1.37.0!! buffer.copy_within(16..20, 19); buffer.copy_within(12..16, 14); buffer.copy_within(8..12, 9); buffer[8] = b'-'; buffer[13] = b'-'; buffer[18] = b'-'; buffer[23] = b'-'; &buffer[..] } // Returns true if provided string is a valid 128-bit UUID pub fn is_valid_hex128(uuid: &str) -> bool { let uuid_bytes = uuid.as_bytes(); if uuid.len() != 36 || uuid_bytes.len() != 36 || uuid_bytes[8] != b'-' || uuid_bytes[13] != b'-' || uuid_bytes[18] != b'-' || uuid_bytes[23] != b'-' { return false; } return Generator::valid_hex(&uuid[..8]) && Generator::valid_hex(&uuid[9..13]) && Generator::valid_hex(&uuid[14..18]) && Generator::valid_hex(&uuid[19..23]) && Generator::valid_hex(&uuid[24..]); } fn valid_hex(hex: &str) -> bool { hex.chars() .all(|c| '0' <= c && c <= '9' || 'a' <= c && c <= 'f') } } #[cfg(test)] mod tests { use crate::Generator; use std::thread; use std::collections::HashMap; use std::sync::{RwLock, Arc}; #[test] fn next() { let generator = Generator::new(); let mut first = generator.next(); for _ in 0..10 { let second = generator.next(); assert_eq!( first.len(), second.len(), "Arrays don't have the same length" ); first = second; } } #[test] fn hex128() { let generator = Generator::new(); let mut buffer: [u8; 36] = [0; 36]; assert!( Generator::is_valid_hex128(&generator.hex128_as_str(&mut buffer).unwrap()[..]), "should be valid hex" ); } #[test] fn uniqueness() { let mut uuids: HashMap<String, bool> = HashMap::new(); let generator = Generator::new(); let mut buffer: [u8; 36] = [0; 36]; for _ in 0..100000 { let next = generator.hex128_as_str(&mut buffer).unwrap(); assert!(!uuids.contains_key(&next.to_string()), "duplicate found"); uuids.insert(next.to_string(), true); } } #[test] fn uniqueness_concurrent() { let generator = Arc::new(Generator::new()); let data = Arc::new(RwLock::new(HashMap::new())); let threads: Vec<_> = (0..100) .map(|_| { let data = Arc::clone(&data); let generator = generator.clone(); thread::spawn( move || { let mut map = data.write().unwrap(); map.insert(generator.hex128_as_string().unwrap(), true); })}) .collect(); for t in threads { t.join().expect("Thread panicked"); } let map = data.read().unwrap(); assert_eq!(map.len(), 100, "generated non-unique uuids"); } #[test] fn valid_hex() { // valid v4 uuid assert!( Generator::is_valid_hex128("11febf98-c108-4383-bb1e-739ffcd44341"), "should be valid hex" ); // invalid uuid assert!( !Generator::is_valid_hex128("11febf98-c108-4383-bb1e-739ffcd4434"), "should be invalid hex" ); assert!( !Generator::is_valid_hex128("11febf98-c108-4383-bb1e-739ffcd443412"), "should be invalid hex" ); assert!( !Generator::is_valid_hex128("11febf98c1-08-4383-bb1e-739ffcd44341"), "should be invalid hex" ); assert!( !Generator::is_valid_hex128("11febf98-c1084-383-bb1e-739ffcd44341"), "should be invalid hex" ); assert!( !Generator::is_valid_hex128("11febf98-c108-4383bb-1e-739ffcd44341"), "should be invalid hex" ); assert!( !Generator::is_valid_hex128("11febf98-c108-4383-bb1e7-39ffcd44341"), "should be invalid hex" ); } }
true
8616addd7aa5790e3c1e21ff2cfcf74da9807531
Rust
jamesrweb/rust-coursework
/08 - Structures/06 - Associated Functions/main.rs
UTF-8
276
3.453125
3
[]
no_license
#[derive(Debug)] struct Rectangle { width: u32, height: u32 } impl Rectangle { fn build(width: u32, height: u32) -> Rectangle { return Rectangle { width: width, height: height }; } } fn main() { let rect = Rectangle::build(100, 50); println!("{:?}", rect); }
true
eacc5473bc68bca7355ec52b59602428f37ee98b
Rust
andreyferriyan/pcap-analyzer
/libpcap-analyzer/src/ip6_defrag.rs
UTF-8
3,854
3
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
use pnet_macros_support::types::{u13be, u16be, u32be}; use pnet_packet::ip::IpNextHeaderProtocol; #[derive(PartialEq)] /// A structure enabling manipulation of on the wire packets pub struct IPv6FragmentPacket<'p> { packet: ::pnet_macros_support::packet::PacketData<'p>, } impl<'a> IPv6FragmentPacket<'a> { /// Constructs a new Ipv6Packet. If the provided buffer is less than the minimum required /// packet size, this will return None. #[inline] pub fn new(packet: &[u8]) -> Option<IPv6FragmentPacket> { if packet.len() >= IPv6FragmentPacket::minimum_packet_size() { use ::pnet_macros_support::packet::PacketData; Some(IPv6FragmentPacket { packet: PacketData::Borrowed(packet), }) } else { None } } /// The minimum size (in bytes) a packet of this type can be. It's based on the total size /// of the fixed-size fields. #[inline] pub fn minimum_packet_size() -> usize { 8 } #[inline] #[allow(trivial_numeric_casts)] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] pub fn get_next_header(&self) -> IpNextHeaderProtocol { #[inline(always)] #[allow(trivial_numeric_casts)] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] fn get_arg0(_self: &IPv6FragmentPacket) -> u8 { let co = 0; (_self.packet[co] as u8) } IpNextHeaderProtocol::new(get_arg0(&self)) } /// Get the identification field. This field is always stored big-endian /// within the struct, but this accessor returns host order. #[inline] #[allow(trivial_numeric_casts)] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] pub fn get_identification(&self) -> u32be { let _self = self; let co = 4; let b0 = u32be::from(_self.packet[co]) << 24; let b1 = u32be::from(_self.packet[co + 1]) << 16; let b2 = u32be::from(_self.packet[co + 2]) << 8; let b3 = u32be::from(_self.packet[co + 3]); b0 | b1 | b2 | b3 } /// Get the fragment_offset field. This field is always stored big-endian /// within the struct, but this accessor returns host order. #[inline] #[allow(trivial_numeric_casts)] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] pub fn get_fragment_offset(&self) -> u13be { let _self = self; let co = 2; let b0 = u16be::from(_self.packet[co]) << 8; let b1 = u16::from(_self.packet[co + 1]); ((b0 | b1) >> 3) as u13be } #[inline] #[allow(trivial_numeric_casts)] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] pub fn more_fragments(&self) -> bool { let _self = self; let co = 3; let b0 = _self.packet[co] & 0x1; b0 != 0 } } impl<'a> ::pnet_macros_support::packet::Packet for IPv6FragmentPacket<'a> { #[inline] fn packet(&self) -> &[u8] { &self.packet[..] } #[inline] #[cfg_attr(feature = "clippy", allow(used_underscore_binding))] fn payload(&self) -> &[u8] { let _self = self; let start = 8; let end = _self.packet.len(); if _self.packet.len() <= start { return &[]; } &_self.packet[start..end] } } #[cfg(test)] mod tests { use super::IPv6FragmentPacket; use pnet_packet::ip::IpNextHeaderProtocols; const DATA: &[u8] = b"\x11\x00\x00\x01\xf8\x8e\xb4\x66"; #[test] fn ipv6fragment_test() { let packet = IPv6FragmentPacket::new(DATA).expect("IPv6FragmentPacket"); assert_eq!(packet.get_next_header(), IpNextHeaderProtocols::Udp); assert_eq!(packet.more_fragments(), true); assert_eq!(packet.get_identification(), 0xf88e_b466); } }
true
ea7fab428ce76c5d6072d8b31bade6dff78db7bc
Rust
lambdabear/ws-2-ch-rs485-hat
/src/lib.rs
UTF-8
1,799
2.96875
3
[]
no_license
use rppal::gpio::{Gpio, OutputPin}; use rppal::uart::{Parity, Queue, Uart}; use std::error::Error; const CHANNEL1_EN: u8 = 27; const CHANNEL2_EN: u8 = 22; pub struct Ws2ChRs485Hat { ch_1: Uart, ch_2: Uart, en1: OutputPin, en2: OutputPin, } impl Ws2ChRs485Hat { pub fn new(ch1_baud_rate: u32, ch2_baud_rate: u32) -> Result<Self, Box<dyn Error>> { let ch_1 = Uart::with_path("/dev/ttySC0", ch1_baud_rate, Parity::None, 8, 1)?; let ch_2 = Uart::with_path("/dev/ttySC1", ch2_baud_rate, Parity::None, 8, 1)?; let mut en1 = Gpio::new()?.get(CHANNEL1_EN)?.into_output(); let mut en2 = Gpio::new()?.get(CHANNEL2_EN)?.into_output(); // default set receive enable en1.set_low(); en2.set_low(); Ok(Self { ch_1, ch_2, en1, en2, }) } pub fn ch_1_read(&mut self, buffer: &mut [u8]) -> Result<usize, rppal::uart::Error> { self.en1.set_low(); self.ch_1.read(buffer) } pub fn ch_2_read(&mut self, buffer: &mut [u8]) -> Result<usize, rppal::uart::Error> { self.en2.set_low(); self.ch_2.read(buffer) } pub fn ch_1_write(&mut self, buffer: &[u8]) -> Result<usize, rppal::uart::Error> { self.en1.set_high(); self.ch_1.write(buffer) } pub fn ch_2_write(&mut self, buffer: &[u8]) -> Result<usize, rppal::uart::Error> { self.en2.set_high(); self.ch_2.write(buffer) } pub fn ch_1_flush(&self) -> Result<(), rppal::uart::Error> { self.ch_1.flush(Queue::Both) } pub fn ch_2_flush(&self) -> Result<(), rppal::uart::Error> { self.ch_2.flush(Queue::Both) } } // the tests is in examples #[cfg(test)] mod tests { #[test] fn it_works() {} }
true
df05103cd5b5e32b318366fdda91cec4869c4db8
Rust
fasidOnGit/rust-first-look
/src/stack_heap.rs
UTF-8
686
3.328125
3
[]
no_license
#![allow(dead_code)] //! This Module contains whatever //! #Example //! ``` //! let uname = "John"; //! println!("{} {}!", phrases:greetings::english::hello()); //! ``` use std::mem::size_of_val; struct Point { x: f64, y: f64 } fn origin() -> Point { Point{x: 0.0, y:0.0} } /// Stack heap! /// /// In this case it's out `stack_and_heap()` function pub fn stack_and_heap() { let p1 = origin(); let p2 = Box::new(origin()); println!("p1 takes up {} bytes", size_of_val(&p1)); // --> stacked value println!("p2 takes up {} bytes", size_of_val(&p2)); // --> Stacked value println!("p2 takes up {} bytes", size_of_val(&*p2)); // --> Heaped value }
true
1a81aab6043a68d83e3ac3c6d16b7182cac246bd
Rust
darksv/libsyntax2
/crates/libsyntax2/tests/test/main.rs
UTF-8
3,785
2.625
3
[ "Apache-2.0", "MIT" ]
permissive
extern crate libsyntax2; #[macro_use] extern crate test_utils; extern crate walkdir; use std::{ fs, path::{Path, PathBuf}, fmt::Write, }; use libsyntax2::{ File, utils::{dump_tree, check_fuzz_invariants}, }; #[test] fn lexer_tests() { dir_tests(&["lexer"], |text| { let tokens = libsyntax2::tokenize(text); dump_tokens(&tokens, text) }) } #[test] fn parser_tests() { dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| { let file = File::parse(text); dump_tree(file.syntax()) }) } #[test] fn parser_fuzz_tests() { for (_, text) in collect_tests(&["parser/fuzz-failures"]) { check_fuzz_invariants(&text) } } /// Read file and normalize newlines. /// /// `rustc` seems to always normalize `\r\n` newlines to `\n`: /// /// ``` /// let s = " /// "; /// assert_eq!(s.as_bytes(), &[10]); /// ``` /// /// so this should always be correct. fn read_text(path: &Path) -> String { fs::read_to_string(path).unwrap().replace("\r\n", "\n") } pub fn dir_tests<F>(paths: &[&str], f: F) where F: Fn(&str) -> String, { for (path, input_code) in collect_tests(paths) { let parse_tree = f(&input_code); let path = path.with_extension("txt"); if !path.exists() { println!("\nfile: {}", path.display()); println!("No .txt file with expected result, creating...\n"); println!("{}\n{}", input_code, parse_tree); fs::write(&path, parse_tree).unwrap(); panic!("No expected result") } let expected = read_text(&path); let expected = expected.as_str(); let parse_tree = parse_tree.as_str(); assert_equal_text(expected, parse_tree, &path); } } const REWRITE: bool = false; fn assert_equal_text(expected: &str, actual: &str, path: &Path) { if expected == actual { return; } let dir = project_dir(); let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); if expected.trim() == actual.trim() { println!("whitespace difference, rewriting"); println!("file: {}\n", pretty_path.display()); fs::write(path, actual).unwrap(); return; } if REWRITE { println!("rewriting {}", pretty_path.display()); fs::write(path, actual).unwrap(); return; } assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); } fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> { paths .iter() .flat_map(|path| { let path = test_data_dir().join(path); test_from_dir(&path).into_iter() }) .map(|path| { let text = read_text(&path); (path, text) }) .collect() } fn test_from_dir(dir: &Path) -> Vec<PathBuf> { let mut acc = Vec::new(); for file in fs::read_dir(&dir).unwrap() { let file = file.unwrap(); let path = file.path(); if path.extension().unwrap_or_default() == "rs" { acc.push(path); } } acc.sort(); acc } fn project_dir() -> PathBuf { let dir = env!("CARGO_MANIFEST_DIR"); PathBuf::from(dir) .parent() .unwrap() .parent() .unwrap() .to_owned() } fn test_data_dir() -> PathBuf { project_dir().join("crates/libsyntax2/tests/data") } fn dump_tokens(tokens: &[libsyntax2::Token], text: &str) -> String { let mut acc = String::new(); let mut offset = 0; for token in tokens { let len: u32 = token.len.into(); let len = len as usize; let token_text = &text[offset..offset + len]; offset += len; write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap() } acc }
true
5fe7ec3da368de0b91066084381c3561bd94f192
Rust
ttaubert/rust-cryptopals
/challenge15/src/lib.rs
UTF-8
1,441
3.015625
3
[]
no_license
pub trait PKCS7Unpad { fn pkcs7_unpad(&self) -> Option<Vec<u8>>; } impl PKCS7Unpad for [u8] { fn pkcs7_unpad(&self) -> Option<Vec<u8>> { let len = self.len(); if len == 0 { return None; } let pad = self[len - 1] as usize; if pad == 0 || pad > len { return None; } if (len-pad..len-1).any(|i| self[i] != self[len - 1]) { return None; } Some(self[..len-pad].to_vec()) } } #[cfg(test)] mod test { use PKCS7Unpad; #[test] fn test() { assert_eq!(b"YELLOW SUBMARINE\x01".pkcs7_unpad(), Some(b"YELLOW SUBMARINE".to_vec())); assert_eq!(b"YELLOW SUBMARINE\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10".pkcs7_unpad(), Some(b"YELLOW SUBMARINE".to_vec())); assert_eq!(b"YELLOW SUBMARINE\x01".pkcs7_unpad(), Some(b"YELLOW SUBMARINE".to_vec())); assert_eq!(b"YELLOW SUBMARINE\x04\x04\x04\x04".pkcs7_unpad(), Some(b"YELLOW SUBMARINE".to_vec())); assert_eq!(b"ICE ICE BABY\x04\x04\x04\x04".pkcs7_unpad(), Some(b"ICE ICE BABY".to_vec())); assert_eq!(b"\x05\x05\x05\x05\x05".pkcs7_unpad(), Some(b"".to_vec())); } #[test] fn test_fail() { assert_eq!(b"ICE ICE BABY\x05\x05\x05\x05".pkcs7_unpad(), None); assert_eq!(b"ICE ICE BABY\x01\x02\x03\x04".pkcs7_unpad(), None); assert_eq!(b"ICE ICE BABY\xff\xff\xff\xff".pkcs7_unpad(), None); assert_eq!(b"\x00".pkcs7_unpad(), None); assert_eq!(b"".pkcs7_unpad(), None); } }
true
1552874066c6a03214f34d6cb4032c9860b5c9a7
Rust
maxdobeck/gimme
/src/hyperlinks.rs
UTF-8
871
3.265625
3
[ "MIT" ]
permissive
extern crate linkify; use linkify::{LinkFinder, LinkKind}; /// `find_links` will search the source str for any /// hypertext links (URLs) pub fn find_links(source: &str) -> Vec<String> { let mut link_finder = LinkFinder::new(); link_finder.kinds(&[LinkKind::Url]); let linkify_urls: Vec<_> = link_finder.links(source).collect(); let mut urls: Vec<String> = linkify_urls.iter().map(|link| link.as_str().to_string()).collect(); urls.sort(); urls.dedup(); urls } #[cfg(test)] mod tests { #[test] fn should_return_one_url() { let link = super::find_links("https://en.wikipedia.org/wiki/Link_(The_Legend_of_Zelda)"); assert_eq!(link.len(), 1); } #[test] fn no_duplicate_urls() { let link = super::find_links("example.com, example.com, https://example.com"); assert_eq!(link.len(), 1) } }
true
c35fad09b7b2dfa430d2fedf14bc119381673907
Rust
pengt/defi_projects
/layer2/zksync/core/bin/zksync_api/src/api_server/rest/v1/operations.rs
UTF-8
4,104
2.703125
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Operations part of API implementation. // Built-in uses // External uses use actix_web::{ web::{self, Json}, Scope, }; use serde::{Deserialize, Serialize}; // Workspace uses use zksync_storage::{ConnectionPool, QueryResult}; use zksync_types::BlockNumber; // Local uses use super::{ blocks::BlockInfo, client::{Client, ClientError}, transactions::TxReceipt, Error as ApiError, JsonResult, }; /// Shared data between `api/v1/operations` endpoints. #[derive(Debug, Clone)] struct ApiOperationsData { pool: ConnectionPool, } impl ApiOperationsData { pub fn new(pool: ConnectionPool) -> Self { Self { pool } } pub async fn priority_op(&self, serial_id: u64) -> QueryResult<Option<PriorityOpReceipt>> { let mut storage = self.pool.access_storage().await?; let executed_op = storage .chain() .operations_schema() .get_executed_priority_operation(serial_id as u32) .await?; let executed_op = if let Some(executed_op) = executed_op { executed_op } else { return Ok(None); }; let blocks = storage .chain() .block_schema() .load_block_range(executed_op.block_number as BlockNumber, 1) .await?; let block_info = blocks.into_iter().next().map(BlockInfo::from); let status = match block_info { None => TxReceipt::Pending, Some(info) if info.verify_tx_hash.is_some() => TxReceipt::Verified { block: info.block_number, }, Some(info) if info.commit_tx_hash.is_some() => TxReceipt::Committed { block: info.block_number, }, Some(_) => TxReceipt::Executed, }; Ok(Some(PriorityOpReceipt { status, index: executed_op.block_index as u64, })) } } // Data transfer objects. #[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] #[serde(rename_all = "camelCase")] pub struct PriorityOpReceipt { #[serde(flatten)] pub status: TxReceipt, pub index: u64, } // Client implementation /// Operations API part. impl Client { /// Gets priority operation receipt. pub async fn priority_op( &self, serial_id: u64, ) -> Result<Option<PriorityOpReceipt>, ClientError> { self.get(&format!("operations/priority_op/{}", serial_id)) .send() .await } } // Server implementation async fn priority_op( data: web::Data<ApiOperationsData>, web::Path(serial_id): web::Path<u64>, ) -> JsonResult<Option<PriorityOpReceipt>> { let receipt = data .priority_op(serial_id) .await .map_err(ApiError::internal)?; Ok(Json(receipt)) } pub fn api_scope(pool: ConnectionPool) -> Scope { let data = ApiOperationsData::new(pool); web::scope("operations") .data(data) .route("priority_op/{id}", web::get().to(priority_op)) } #[cfg(test)] mod tests { use super::{ super::test_utils::{TestServerConfig, COMMITTED_OP_SERIAL_ID, VERIFIED_OP_SERIAL_ID}, *, }; #[actix_rt::test] async fn test_operations_scope() -> anyhow::Result<()> { let cfg = TestServerConfig::default(); cfg.fill_database().await?; let (client, server) = cfg.start_server(|cfg| api_scope(cfg.pool.clone())); let requests = vec![ ( VERIFIED_OP_SERIAL_ID, Some(PriorityOpReceipt { index: 2, status: TxReceipt::Verified { block: 2 }, }), ), ( COMMITTED_OP_SERIAL_ID, Some(PriorityOpReceipt { index: 1, status: TxReceipt::Committed { block: 4 }, }), ), ]; for (serial_id, expected_op) in requests { assert_eq!(client.priority_op(serial_id).await?, expected_op); } server.stop().await; Ok(()) } }
true
c051ab0ea674f83c8f1d12c638cafc99d018a017
Rust
critter-mj/akochan-reviewer
/convlog/src/conv.rs
UTF-8
19,018
2.625
3
[ "Apache-2.0" ]
permissive
use crate::mjai; use crate::tenhou; use crate::Pai; use std::str; use thiserror::Error; #[derive(Debug, Error)] pub enum ConvertError { #[error("invalid naki string: {0:?}")] InvalidNaki(String), #[error("invalid pai string")] InvalidPai(#[source] <u8 as std::str::FromStr>::Err), #[error("insufficient dora indicators: at kyoku={kyoku} honba={honba}")] InsufficientDoraIndicators { kyoku: u8, honba: u8 }, #[error("insufficient takes sequence size: at kyoku={kyoku} honba={honba} for actor={actor}")] InsufficientTakes { kyoku: u8, honba: u8, actor: u8 }, #[error( "insufficient discards sequence size: at kyoku={kyoku} honba={honba} for actor={actor}" )] InsufficientDiscards { kyoku: u8, honba: u8, actor: u8 }, } pub type Result<T> = std::result::Result<T, ConvertError>; /// Transform a tenhou.net/6 format log into mjai format. pub fn tenhou_to_mjai(log: &tenhou::Log) -> Result<Vec<mjai::Event>> { let mut events = vec![]; events.push(mjai::Event::StartGame { kyoku_first: log.game_length as u8, aka_flag: log.has_aka, names: log.names.clone(), }); for kyoku in &log.kyokus { tenhou_kyoku_to_mjai_events(&mut events, kyoku)?; } events.push(mjai::Event::EndGame); Ok(events) } fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Kyoku) -> Result<()> { // first of all, transform all takes and discards to events. let mut take_events: Vec<_> = (0..4) .map(|i| { take_action_to_events(i, &kyoku.action_tables[i as usize].takes) .map(|ev| ev.into_iter().peekable()) }) .collect::<Result<Vec<_>>>()?; let mut discard_events: Vec<_> = (0..4) .map(|i| { discard_action_to_events(i, &kyoku.action_tables[i as usize].discards) .map(|ev| ev.into_iter().peekable()) }) .collect::<Result<Vec<_>>>()?; // then emit the events in order. let oya = kyoku.meta.kyoku_num % 4; let mut dora_feed = kyoku.dora_indicators.clone().into_iter(); let mut reach_flag: Option<usize> = None; let mut last_tsumo = Pai(0); let mut last_dahai = Pai(0); let mut need_new_dora = false; events.push(mjai::Event::StartKyoku { bakaze: Pai(41 + kyoku.meta.kyoku_num / 4), kyoku: kyoku.meta.kyoku_num % 4 + 1, honba: kyoku.meta.honba, kyotaku: kyoku.meta.kyotaku, dora_marker: dora_feed .next() .ok_or(ConvertError::InsufficientDoraIndicators { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, })?, oya, scores: kyoku.scoreboard, tehais: [ kyoku.action_tables[0].haipai, kyoku.action_tables[1].haipai, kyoku.action_tables[2].haipai, kyoku.action_tables[3].haipai, ], }); let mut actor = oya as usize; loop { // start to process a take event. let take = take_events[actor] .next() .ok_or(ConvertError::InsufficientTakes { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, actor: actor as u8, })?; // record the pai so that it can be filled in tsumogiri dahai. if let mjai::Event::Tsumo { pai, .. } = take { last_tsumo = pai; } // if a reach event was emitted before, set it as accepted now. if let Some(actor) = reach_flag.take() { events.push(mjai::Event::ReachAccepted { actor: actor as u8 }); } // skip one discard if the take is daiminkan. // and then immediately consume the next take event from the same actor. if let mjai::Event::Daiminkan { .. } = take { events.push(take); discard_events[actor].next(); // cannot use .skip(1) here as the types do not match need_new_dora = true; continue; } // emit the take event. events.push(take); // check if the kyoku ends here, can be ryukyoku (九種九牌) or tsumo. // here it simply checks if there is no more discard for current actor. if discard_events[actor].peek().is_none() { end_kyoku(events, kyoku); break; } // start to process a discard event. let discard = discard_events[actor] .next() .ok_or(ConvertError::InsufficientDiscards { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, actor: actor as u8, })? .fill_possible_tsumogiri(last_tsumo); // record the pai to check if someone naki it. if let mjai::Event::Dahai { pai, .. } = discard { last_dahai = pai; } // emit the discard event. events.push(discard.clone()); // process previous minkan. if need_new_dora { events.push(mjai::Event::Dora { dora_marker: dora_feed .next() .ok_or(ConvertError::InsufficientDoraIndicators { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, })?, }); need_new_dora = false; } // process reach declare. // a reach declare actually consists of two events (reach + dahai). if let mjai::Event::Reach { .. } = discard { reach_flag = Some(actor); let dahai = discard_events[actor] .next() .ok_or(ConvertError::InsufficientDiscards { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, actor: actor as u8, })? .fill_possible_tsumogiri(last_tsumo); if let mjai::Event::Dahai { pai, .. } = dahai { last_dahai = pai; } events.push(dahai); } // check if the kyoku ends here, can be ryukyoku or ron. // here it simply checks if there is no more take for every single actor. if (0..4).all(|i| take_events[i].peek().is_none()) { end_kyoku(events, kyoku); break; } // check if the last discard was ankan or kakan. // for kan, it will immediately consume the next take event from the same actor. match discard { mjai::Event::Ankan { .. } => { // ankan triggers a dora event immediately. events.push(mjai::Event::Dora { dora_marker: dora_feed.next().ok_or( ConvertError::InsufficientDoraIndicators { kyoku: kyoku.meta.kyoku_num, honba: kyoku.meta.honba, }, )?, }); continue; } mjai::Event::Kakan { .. } => { need_new_dora = true; continue; } _ => (), } // decide who is the next actor. // if someone takes taki of the previous discard, then it will be him, // otherwise it will be the shimocha. actor = (0..4) .filter(|&i| i != actor) .find(|&i| { if let Some(take) = take_events[i].peek() { if let Some((target, pai)) = take.naki_info() { return target == actor as u8 && pai == last_dahai; } } false }) .unwrap_or((actor + 1) % 4); } Ok(()) } fn take_action_to_events(actor: u8, takes: &[tenhou::ActionItem]) -> Result<Vec<mjai::Event>> { takes .iter() .map(|take| match take { &tenhou::ActionItem::Pai(pai) => Ok(mjai::Event::Tsumo { actor, pai }), tenhou::ActionItem::Naki(naki_string) => { let naki = naki_string.as_bytes(); if naki.contains(&b'c') { // chi // you can only chi from kamicha right...? if naki_string.len() != 7 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } // e.g. "c275226" => chi 7p with 06p from kamicha Ok(mjai::Event::Chi { actor, target: (actor + 3) % 4, pai: pai_from_bytes(&naki[1..3])?, consumed: mjai::Consumed2([ pai_from_bytes(&naki[3..5])?, pai_from_bytes(&naki[5..7])?, ]), }) } else if let Some(idx) = naki_string.find('p') { // pon if naki_string.len() != 7 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } match idx { // from kamicha // e.g. "p252525" => pon 5p from kamicha 0 => Ok(mjai::Event::Pon { actor, target: (actor + 3) % 4, pai: pai_from_bytes(&naki[1..3])?, consumed: mjai::Consumed2([ pai_from_bytes(&naki[3..5])?, pai_from_bytes(&naki[5..7])?, ]), }), // from toimen // e.g. "12p1212" => pon 2m from toimen 2 => Ok(mjai::Event::Pon { actor, target: (actor + 2) % 4, pai: pai_from_bytes(&naki[3..5])?, consumed: mjai::Consumed2([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[5..7])?, ]), }), // from shimocha // e.g. "3737p37" => pon 7s from shimocha 4 => Ok(mjai::Event::Pon { actor, target: (actor + 1) % 4, pai: pai_from_bytes(&naki[5..7])?, consumed: mjai::Consumed2([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[2..4])?, ]), }), // ??? _ => Err(ConvertError::InvalidNaki(naki_string.clone())), } } else if let Some(idx) = naki_string.find('m') { // daiminkan if naki_string.len() != 9 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } match idx { // from kamicha // e.g. "m39393939" => kan 9s from kamicha 0 => Ok(mjai::Event::Daiminkan { actor, target: (actor + 3) % 4, pai: pai_from_bytes(&naki[1..3])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[3..5])?, pai_from_bytes(&naki[5..7])?, pai_from_bytes(&naki[7..9])?, ]), }), // from toimen // e.g. "26m262626" => kan 6p from toimen 2 => Ok(mjai::Event::Daiminkan { actor, target: (actor + 2) % 4, pai: pai_from_bytes(&naki[3..5])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[5..7])?, pai_from_bytes(&naki[7..9])?, ]), }), // from shimocha // e.g. "131313m13" => kan 3m from shimocha 6 => Ok(mjai::Event::Daiminkan { actor, target: (actor + 1) % 4, pai: pai_from_bytes(&naki[7..9])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[2..4])?, pai_from_bytes(&naki[4..6])?, ]), }), // ??? _ => Err(ConvertError::InvalidNaki(naki_string.clone())), } } else { Err(ConvertError::InvalidNaki(naki_string.clone())) } } }) .collect() } fn discard_action_to_events( actor: u8, discards: &[tenhou::ActionItem], ) -> Result<Vec<mjai::Event>> { let mut ret = vec![]; for discard in discards { match discard { &tenhou::ActionItem::Pai(pai) => { let ev = mjai::Event::Dahai { actor, pai, // must be filled later if it is tsumogiri tsumogiri: pai.0 == 60, }; ret.push(ev); } tenhou::ActionItem::Naki(naki_string) => { let naki = naki_string.as_bytes(); // only ankan, kakan and reach are possible if let Some(idx) = naki_string.find('k') { // kakan if naki_string.len() != 9 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } let ev = match idx { // previously pon from toimen // e.g. "k16161616" => pon 6m from kamicha then kan 0 => mjai::Event::Kakan { actor, pai: pai_from_bytes(&naki[1..3])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[3..5])?, pai_from_bytes(&naki[5..7])?, pai_from_bytes(&naki[7..9])?, ]), }, // previously pon from toimen // e.g. "41k414141" => pon 1z from toimen then kan 2 => mjai::Event::Kakan { actor, pai: pai_from_bytes(&naki[3..5])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[5..7])?, pai_from_bytes(&naki[7..9])?, ]), }, // previously pon from shimocha // e.g. "4646k4646" => pon 6z from shimocha then kan 4 => mjai::Event::Kakan { actor, pai: pai_from_bytes(&naki[5..7])?, consumed: mjai::Consumed3([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[2..4])?, pai_from_bytes(&naki[7..9])?, ]), }, // ??? _ => { return Err(ConvertError::InvalidNaki(naki_string.clone())); } }; ret.push(ev); } else if naki.contains(&b'a') { // ankan // for ankan, 'a' can only appear at [6] // e.g. "424242a42" => ankan 2z if naki_string.len() != 9 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } let pai = pai_from_bytes(&naki[7..9])?; let ev = mjai::Event::Ankan { actor, consumed: mjai::Consumed4([ pai_from_bytes(&naki[0..2])?, pai_from_bytes(&naki[2..4])?, pai_from_bytes(&naki[4..6])?, pai, ]), }; ret.push(ev); } else { // reach // e.g. "r35" => discard 5s to reach if naki_string.len() != 3 { return Err(ConvertError::InvalidNaki(naki_string.clone())); } let pai = pai_from_bytes(&naki[1..3])?; ret.push(mjai::Event::Reach { actor }); ret.push(mjai::Event::Dahai { actor, pai, // must be filled later if it is tsumogiri tsumogiri: pai.0 == 60, }); } } }; } Ok(ret) } fn end_kyoku(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Kyoku) { match &kyoku.end_status { tenhou::kyoku::EndStatus::Hora { details } => { events.extend(details.iter().map(|detail| mjai::Event::Hora { actor: detail.who, target: detail.target, deltas: Some(detail.score_deltas), })); } tenhou::kyoku::EndStatus::Ryukyoku { score_deltas } => { events.push(mjai::Event::Ryukyoku { deltas: Some(*score_deltas), }); } }; events.push(mjai::Event::EndKyoku); } #[inline] fn pai_from_bytes(b: &[u8]) -> Result<Pai> { let s = unsafe { str::from_utf8_unchecked(b) }; let pai = Pai(s.parse().map_err(ConvertError::InvalidPai)?); Ok(pai) } impl mjai::Event { #[inline] fn fill_possible_tsumogiri(self, last_tsumo: Pai) -> Self { match self { mjai::Event::Dahai { actor, tsumogiri: true, .. } => mjai::Event::Dahai { actor, pai: last_tsumo, tsumogiri: true, }, _ => self, } } }
true
22bfff5c80043335fc249ea16459215b46ca306c
Rust
huseyinyilmaz/datastructures
/rust/linkedlist/src/linkedlist.rs
UTF-8
3,525
3.84375
4
[]
no_license
use std::rc::Rc; use std::iter::FromIterator; use std::iter::DoubleEndedIterator; #[derive(Debug)] struct Node<T> { value: T, next: Option<Rc<Node<T>>>, } #[derive(Debug)] pub struct LinkedList<T> { root: Option<Rc<Node<T>>> } impl<T> LinkedList<T> { pub fn new() -> Self { LinkedList { root: None } } pub fn add(&mut self, value: T) -> Self { LinkedList { root: Some(Rc::new(Node { value: value, next: self.root.as_ref().map(|rc| rc.clone()), } )) } } pub fn head(&self) -> Option<&T> { self.root.as_ref().map(|node|&node.value) } pub fn tail(&self) -> Option<LinkedList<T>> { self.root.as_ref().map(|root| { LinkedList{ root: root.next.clone() } }) } } impl<T> FromIterator<T> for LinkedList<T> { // fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> Self { fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> Self { // we put everything in a vector so we can use rfold. // rfold is on DoubleEndedIterator Vec::from_iter(iter).into_iter().rfold(Self::new(), |mut acc, v| acc.add(v)) } } impl<T> IntoIterator for LinkedList<T> { type Item = T; type IntoIter = std::vec::IntoIter<T>; // fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> Self { fn into_iter(self) -> Self::IntoIter { // we put everything in a vector so we can use rfold. // rfold is on DoubleEndedIterator Vec::from(self).into_iter() } } // impl<T> From<Vec<T>> for LinkedList<T> { // fn from(vec: Vec<T>) -> Self { // } // } impl<T> From<LinkedList<T>> for Vec<T> { fn from(mut list: LinkedList<T>) -> Vec<T> { let mut result = Vec::new(); let mut root = list.root.take(); while let Some(node) = root { if let Ok(mut node) = Rc::try_unwrap(node) { result.push(node.value); root = node.next.take(); } else { break; } } result } } impl<T> Drop for LinkedList<T> { fn drop(&mut self) { let mut root = self.root.take(); while let Some(node) = root { if let Ok(mut node) = Rc::try_unwrap(node) { root = node.next.take(); } else { break; } } } } mod test { use super::LinkedList; use std::iter::FromIterator; #[test] fn head() { let list: LinkedList<i32> = LinkedList::from_iter((0..100).into_iter()); // println!("{:?}", list); assert_eq!(list.head(), Some(&0)); } #[test] fn tail() { let list: LinkedList<i32> = LinkedList::from_iter((0..100).into_iter()); // println!("{:?}", list); assert_eq!(list.tail().unwrap().head(), Some(&1)); } #[test] fn drop_test_big_list() { LinkedList::from_iter((0..100000).into_iter()); } #[test] fn from_vec() { let list = Vec::from(LinkedList::from_iter((0..3).into_iter())); let target = vec![0, 1, 2]; assert_eq!(list, target); } #[test] fn into_iter() { let mut result = Vec::new(); let list = LinkedList::from_iter((0..3).into_iter()); // for loop using into_iter(); for i in list.into_iter() { result.push(i); } assert_eq!(result, vec![0, 1, 2]); } }
true
a43c0f5a243a236dc3b668cdc94e83dca2fb7608
Rust
dtrain157/blip
/blip/src/events/mouse_event.rs
UTF-8
2,518
2.890625
3
[ "MIT" ]
permissive
use super::event::{Event, EventCategory, EventType}; use std::fmt; /***********************************************************/ pub struct MouseMovedEvent { pub is_handled: bool, pub mouse_x: f64, pub mouse_y: f64, } impl Event for MouseMovedEvent { fn get_event_type(&self) -> EventType { EventType::MouseMoved } fn get_category_flags(&self) -> EventCategory { EventCategory::MOUSE | EventCategory::INPUT } } impl fmt::Display for MouseMovedEvent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Mouse Moved Event: ({}, {})", self.mouse_x, self.mouse_y) } } /***********************************************************/ pub struct MouseScrolledEvent { pub is_handled: bool, pub mouse_x_offset: f64, pub mouse_y_offset: f64, } impl Event for MouseScrolledEvent { fn get_event_type(&self) -> EventType { EventType::MouseScrolled } fn get_category_flags(&self) -> EventCategory { EventCategory::MOUSE | EventCategory::INPUT } } impl fmt::Display for MouseScrolledEvent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Mouse Scrolled Event: ({}, {})", self.mouse_x_offset, self.mouse_y_offset) } } /***********************************************************/ pub struct MouseButtonPressedEvent { pub is_handled: bool, pub mouse_button: u8, } impl Event for MouseButtonPressedEvent { fn get_event_type(&self) -> EventType { EventType::MouseButtonPressed } fn get_category_flags(&self) -> EventCategory { EventCategory::MOUSE | EventCategory::INPUT } } impl fmt::Display for MouseButtonPressedEvent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Mouse Button Pressed Event: {}", self.mouse_button) } } /***********************************************************/ pub struct MouseButtonReleasedEvent { pub is_handled: bool, pub mouse_button: u8, } impl Event for MouseButtonReleasedEvent { fn get_event_type(&self) -> EventType { EventType::MouseButtonReleased } fn get_category_flags(&self) -> EventCategory { EventCategory::MOUSE | EventCategory::INPUT } } impl fmt::Display for MouseButtonReleasedEvent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Mouse Button Released Event: {}", self.mouse_button) } } /***********************************************************/
true
abc8985e34324f14f19d0686baf6a3addc7c4b9d
Rust
kvnallsn/motd-rs
/src/commands/linux/netlink/nlsocket.rs
UTF-8
4,185
2.546875
3
[ "BSD-3-Clause" ]
permissive
//! Rust wrapper around libc socket, send/recv use crate::commands::linux::netlink::NetlinkRequest; use std::{io::Error, ops::Drop, os::unix::io::RawFd}; /// Don't send any flags const FLAGS: i32 = 0; /// Represents the various different kernel modules that we can /// interact with. #[allow(dead_code)] pub enum NetlinkFamily { /// Routing/Device hook Route = libc::NETLINK_ROUTE as isize, /// Reserved for user mode socket protocols UserSock = libc::NETLINK_USERSOCK as isize, /// Unused number, formerly ip_queue Firewall = libc::NETLINK_FIREWALL as isize, /// Socket monitoring SockDiag = libc::NETLINK_SOCK_DIAG as isize, /// Netfilter/iptables ULOG NFLog = libc::NETLINK_NFLOG as isize, /// IPSec XFRM = libc::NETLINK_XFRM as isize, /// SELinux event notifications SELinux = libc::NETLINK_SELINUX as isize, /// Open-iSCSI ISCSI = libc::NETLINK_ISCSI as isize, /// Auditing Audit = libc::NETLINK_AUDIT as isize, FibLookup = libc::NETLINK_FIB_LOOKUP as isize, Connector = libc::NETLINK_CONNECTOR as isize, /// Netfilter subsystem Netfilter = libc::NETLINK_NETFILTER as isize, Ip6Fw = libc::NETLINK_IP6_FW as isize, /// DECnet Routing Messages DNRTMSG = libc::NETLINK_DNRTMSG as isize, /// Kernel messages to userpsace KObjectUevent = libc::NETLINK_KOBJECT_UEVENT as isize, Generic = libc::NETLINK_GENERIC as isize, /// SCSI Transports ScsiTransport = libc::NETLINK_SCSITRANSPORT as isize, ECryptFs = libc::NETLINK_ECRYPTFS as isize, Rdma = libc::NETLINK_RDMA as isize, /// Crypto Layer Crypto = libc::NETLINK_CRYPTO as isize, } /// Represents a NETLINK socket that can send and receive NETLINK messages pub struct NetlinkSocket(RawFd); impl Drop for NetlinkSocket { fn drop(&mut self) { unsafe { libc::close(self.0); } } } impl NetlinkSocket { /// Creates a new NETLINK socket with the specified netlink family. The netlink family /// is responsible for selecting the correct kernel module or resource to communicate with /// after the socket is opened. If an error occurs, a std::io::Error will be returned. On /// success, the opened NetlinkSocket will be returned. /// /// Note: There is no need to call close(), the socket will automatically be closed when /// the reference to this struct is dropped. /// /// # Arguments /// /// * `family` - Kernel module/resource to communicate with (e.g., SockDiag) pub fn new(family: NetlinkFamily) -> Result<NetlinkSocket, Error> { let fd = unsafe { libc::socket(libc::AF_NETLINK, libc::SOCK_DGRAM, family as i32) }; if fd == -1 { Err(Error::last_os_error()) } else { Ok(NetlinkSocket(fd)) } } /// Sends a message through the opened socket, returning the number of bytes read. /// The parameter `msg` must implement NetlinkRequest and it *must* have the /// #[repr(C)] attribute. A reference to the struct will be cast as c_void ptr /// and then passed to send() in an unsafe call. If the structure of `msg` does /// not exactly match the structure in the appropriate manpage then the call will /// most likely fail /// /// # Arguments /// /// * `msg` - A struct that implements a NetlinkRequest pub fn send<M: NetlinkRequest>(&self, msg: &M) -> Result<usize, Error> { let len = std::mem::size_of::<M>(); let buffer: *const M = msg; let sent = unsafe { libc::send(self.0, buffer as *const _, len as usize, FLAGS) }; if sent < 0 { Err(Error::last_os_error()) } else { Ok(sent as usize) } } /// Receives a message sent from the kernel module/resource pub fn recv(&self, buffer: &mut [u8]) -> Result<usize, Error> { let len = buffer.len(); let received = unsafe { libc::recv(self.0, buffer.as_mut_ptr() as *mut _, len as usize, FLAGS) }; if received < 0 { Err(Error::last_os_error()) } else { Ok(received as usize) } } }
true
55e9c5d3548c02b1442478c40dbb43624989ec09
Rust
pikajude/prattle-rs
/src/errors.rs
UTF-8
3,754
3.0625
3
[ "MIT" ]
permissive
// errors.rs - MIT License // MIT License // Copyright (c) 2018 Tyler Laing (ZerothLaw) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. //! # Errors Module //! //! Contains the utilitarian ParseError enum that wraps useful information //! of what exactly went wrong during parsing. //! //! Generally, your rules shouldn't manually return these errors - the parser will //! return these errors where they make the best sense. use std::marker::{Send, Sync}; use node::SimpleNode; use token::Token; /// # ParseError /// ## Explanation /// This enum implements failure::Fail, which in turn requires /// Debug + Display + Send + Sync + 'static to ensure the errors /// can be sent to other threads and referenced from other threads. /// /// This in turn forces types that deal with ParseError to be /// Send + Sync + 'static as well as Token implementations. /// /// Deriving Fail implies implementation of std::error::Error trait. /// #[derive(Clone, Debug, Eq, Fail, Hash, Ord, PartialEq, PartialOrd)] pub enum ParseError<T: Token + Send + Sync + 'static> { /// # ParseError::MalformedSyntax /// Represents parser context when a syntax rule fails. /// Not returned by the general parser implementation. /// Usage: during a syntax rule, if this error is to be /// returned, use *node* for the current node passed to /// the syntax rule, and *token* for the token that lead to /// the error to be returned. #[fail( display = "incorrect syntax, failed on node: {} with token: {}", node, token )] MalformedSyntax { node: SimpleNode<T>, token: T }, /// Returned by the parser when a rule is not found for a specific token. /// Generally only should be seen during development of a language spec. #[fail(display = "missing a {} syntax rule for: {}", ty, token)] MissingRule { token: T, ty: String }, /// Expected more input than was available. Returned by the parser. #[fail(display = "token iteration ended before parsing context finished")] Incomplete, /// <P as Parser<T>>::consume(end_token: T) was called, and the required /// token was not found as the next token(returned by peek/next_token). #[fail( display = "parser.consume(end_token: {}) didn't find expected token, instead found: {}.", expected, found )] ConsumeFailed { expected: T, found: T }, } #[cfg(test)] mod test { use super::*; //Catch Send/Sync changes #[test] fn test_parseerror_send() { fn assert_send<T: Send>() {} assert_send::<ParseError<String>>(); } #[test] fn test_parseerror_sync() { fn assert_sync<T: Sync>() {} assert_sync::<ParseError<String>>(); } }
true
2a38d072a9027e3faa9f533334402a34bde6a46a
Rust
yjhmelody/lua-rs
/src/compiler/parser.rs
UTF-8
31,091
2.9375
3
[]
no_license
#![allow(dead_code)] use std::process::exit; use crate::compiler::ast::*; use crate::compiler::error::*; use crate::compiler::lexer::*; use crate::compiler::token::*; use crate::number::parser::*; /// parse gets a lexer and returns a Lua Block which is Lua AST pub fn parse_block(lexer: &mut impl Lex) -> Result<Block> { let stats = parse_stats(lexer)?; let res = parse_ret_exps(lexer); let ret_exps; match res { Ok(res) => { ret_exps = Some(res); } Err(Error::NoReturnValue) => { ret_exps = None; } Err(err) => { return Err(err); } } let last_line = lexer.current_line(); Ok(Block::new( stats, ret_exps, last_line, )) } fn parse_stats(lexer: &mut impl Lex) -> Result<Vec<Stat>> { let mut stats = vec![]; while !_is_return_or_block_end(lexer.look_ahead()) { let stat = parse_stat(lexer)?; match stat { Stat::Empty => {} stat => { stats.push(stat); } } } Ok(stats) } fn parse_ret_exps(lexer: &mut impl Lex) -> Result<Vec<Exp>> { match lexer.look_ahead() { Ok(Token::KwReturn) => {} _ => return Err(Error::NoReturnValue), }; // skip `return` lexer.skip_next_token(); match lexer.look_ahead() { Err(Error::EOF { line: _ }) | Ok(Token::KwElse) | Ok(Token::KwElseIf) | Ok(Token::KwEnd) | Ok(Token::KwUntil) => Ok(vec![]), Ok(Token::SepSemi) => { lexer.skip_next_token(); Ok(vec![]) } _ => { let exps = parse_exp_list(lexer); if let Ok(Token::SepSemi) = lexer.look_ahead() { lexer.skip_next_token(); } exps } } } fn parse_exp_list(lexer: &mut impl Lex) -> Result<Vec<Exp>> { // exp {, exp} let mut exp_list = vec![]; exp_list.push(parse_exp(lexer)?); while let Ok(Token::SepComma) = lexer.look_ahead() { lexer.skip_next_token(); exp_list.push(parse_exp(lexer)?); } Ok(exp_list) } /********************* Parse Statement **********************/ fn parse_stat(lexer: &mut impl Lex) -> Result<Stat> { match lexer.look_ahead()? { // deal with `;` Token::SepSemi => parse_empty_stat(lexer), Token::KwBreak => parse_break_stat(lexer), Token::SepLabel => parse_label_stat(lexer), Token::KwGoto => parse_goto_stat(lexer), Token::KwDo => parse_do_stat(lexer), Token::KwWhile => parse_while_stat(lexer), Token::KwIf => parse_if_stat(lexer), Token::KwRepeat => parse_repeat_stat(lexer), Token::KwFor => parse_for_stat(lexer), Token::KwFunction => parse_fn_def_stat(lexer), Token::KwLocal => parse_local_assign_or_fn_def_stat(lexer), _ => parse_assign_or_fn_call_stat(lexer), } } fn parse_empty_stat(lexer: &mut impl Lex) -> Result<Stat> { lexer.skip_next_token(); Ok(Stat::Empty) } fn parse_break_stat(lexer: &mut impl Lex) -> Result<Stat> { lexer.skip_next_token(); Ok(Stat::Break(lexer.current_line())) } fn parse_label_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `::` lexer.skip_next_token(); let name = lexer.next_ident()?; // check `::` if lexer.check_next_token(Token::SepLabel) { Ok(Stat::Label(name)) } else { Err(Error::IllegalStat { line: lexer.current_line() }) } } fn parse_goto_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `goto` lexer.skip_next_token(); let name = lexer.next_ident()?; Ok(Stat::Goto(name)) } fn parse_do_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `do` lexer.skip_next_token(); let block = Box::new(parse_block(lexer)?); match lexer.next_token() { Ok(Token::KwEnd) => Ok(Stat::Do(block)), _ => Err(Error::IllegalStat { line: lexer.current_line() }), } } fn parse_while_stat(lexer: &mut impl Lex) -> Result<Stat> { lexer.skip_next_token(); let exp = parse_exp(lexer)?; match lexer.next_token() { Ok(Token::KwDo) => { let block = Box::new(parse_block(lexer)?); if lexer.check_next_token(Token::KwEnd) { Ok(Stat::While(exp, block)) } else { Err(Error::IllegalStat { line: lexer.current_line() }) } } _ => Err(Error::IllegalStat { line: lexer.current_line() }), } } fn parse_repeat_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `repeat` lexer.skip_next_token(); let block = Box::new(parse_block(lexer)?); match lexer.next_token() { Ok(Token::KwUntil) => { let exp = parse_exp(lexer)?; Ok(Stat::Repeat(exp, block)) } _ => Err(Error::IllegalStat { line: lexer.current_line() }), } } fn parse_if_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `if` lexer.skip_next_token(); let mut exps = vec![]; let mut blocks = vec![]; exps.push(parse_exp(lexer)?); // skip `then` if lexer.check_next_token(Token::KwThen) { blocks.push(parse_block(lexer)?); } else { return Err(Error::IllegalStat { line: lexer.current_line() }); } // elseif while let Ok(Token::KwElseIf) = lexer.look_ahead() { lexer.skip_next_token(); exps.push(parse_exp(lexer)?); if lexer.check_next_token(Token::KwThen) { blocks.push(parse_block(lexer)?); } else { return Err(Error::IllegalStat { line: lexer.current_line() }); } } // else -> elseif true if let Ok(Token::KwElse) = lexer.look_ahead() { lexer.skip_next_token(); exps.push(Exp::True(lexer.current_line())); // demo: if false then elseif false then else end blocks.push(parse_block(lexer)?); } lexer.skip_next_token(); Ok(Stat::Condition(exps, blocks)) } fn parse_for_stat(lexer: &mut impl Lex) -> Result<Stat> { lexer.skip_next_token(); let line_of_for = lexer.current_line(); let name = lexer.next_ident()?; if let Ok(Token::OpAssign) = lexer.look_ahead() { // `=` _parse_for_num_stat(lexer, line_of_for, name) } else { // `in` _parse_for_in_stat(lexer, name) } } fn _parse_for_num_stat(lexer: &mut impl Lex, line_of_for: Line, var_name: String) -> Result<Stat> { lexer.skip_next_token(); let init_exp = parse_exp(lexer)?; let limit_exp = match lexer.look_ahead() { Ok(Token::SepComma) => { lexer.skip_next_token(); parse_exp(lexer)? } _ => { return Err(Error::IllegalStat { line: lexer.current_line() }); } }; // optional exp, default to 1 let step_exp = match lexer.look_ahead() { Ok(Token::SepComma) => { lexer.skip_next_token(); parse_exp(lexer)? } _ => Exp::Integer(1, lexer.current_line()), }; let line_of_do = match lexer.next_token() { Ok(Token::KwDo) => lexer.current_line(), _ => { return Err(Error::IllegalStat { line: lexer.current_line() }); } }; let block = Box::new(parse_block(lexer)?); if !lexer.check_next_token(Token::KwEnd) { return Err(Error::IllegalStat { line: lexer.current_line() }); } Ok(Stat::ForNum(ForNum::new(var_name, init_exp, limit_exp, step_exp, block, line_of_for, line_of_do))) } fn _parse_for_in_stat(lexer: &mut impl Lex, name: String) -> Result<Stat> { let name_list = _parse_name_list(lexer, name)?; match lexer.next_token() { Ok(Token::KwIn) => { let exp_list = parse_exp_list(lexer)?; let line_of_do = match lexer.next_token() { Ok(Token::KwDo) => lexer.current_line(), _ => { return Err(Error::IllegalStat { line: lexer.current_line() }); } }; let block = Box::new(parse_block(lexer)?); match lexer.next_token() { Ok(Token::KwEnd) => Ok(Stat::ForIn(ForIn::new(name_list, exp_list, block), line_of_do)), _ => Err(Error::IllegalStat { line: lexer.current_line() }), } } _ => Err(Error::IllegalStat { line: lexer.current_line() }), } } fn parse_local_assign_or_fn_def_stat(lexer: &mut impl Lex) -> Result<Stat> { lexer.skip_next_token(); match lexer.look_ahead() { Ok(Token::KwFunction) => _parse_local_fn_def_stat(lexer), _ => _parse_local_var_decl_stat(lexer), } } fn _parse_local_fn_def_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `function` lexer.skip_next_token(); let name = lexer.next_ident()?; let exp = parse_fn_def_exp(lexer)?; match exp { Exp::FnDef(fn_def) => { Ok(Stat::LocalFnDef(name, fn_def)) } _ => unreachable!() } } fn _parse_local_var_decl_stat(lexer: &mut impl Lex) -> Result<Stat> { let name0 = lexer.next_ident()?; let name_list = _parse_name_list(lexer, name0)?; let exp_list = if let Ok(Token::OpAssign) = lexer.look_ahead() { lexer.skip_next_token(); parse_exp_list(lexer)? } else { vec![] }; let last_line = lexer.current_line(); Ok(Stat::LocalVarDecl( name_list, exp_list, last_line, )) } fn parse_assign_or_fn_call_stat(lexer: &mut impl Lex) -> Result<Stat> { let prefix_exp = parse_prefix_exp(lexer); match prefix_exp { Ok(Exp::FnCall(fn_call)) => { Ok(Stat::FnCall(fn_call)) } _ => { parse_assign_stat(lexer, prefix_exp.unwrap()) } } } fn parse_assign_stat(lexer: &mut impl Lex, var0: Exp) -> Result<Stat> { let var_list = _parse_var_list(lexer, var0)?; if lexer.check_next_token(Token::OpAssign) { let exp_list = parse_exp_list(lexer)?; let last_line = lexer.current_line(); Ok(Stat::Assign(var_list, exp_list, last_line)) } else { Err(Error::MissingAssignment { line: lexer.current_line() }) } } fn parse_fn_def_stat(lexer: &mut impl Lex) -> Result<Stat> { // skip `function` lexer.skip_next_token(); let mut has_colon = false; let fn_name = _parse_fn_name(lexer, &mut has_colon)?; let mut fn_body = parse_fn_def_exp(lexer)?; // v:name(args) => v.name(self, args) // insert `self` to the first arg // todo: refactor if let Exp::FnDef(ref mut fn_def) = fn_body { if has_colon { fn_def.par_list.params.reverse(); fn_def.par_list.params.push("self".to_string()); fn_def.par_list.params.reverse(); } // transfer function definition to assignment let last_line = fn_def.last_line; return Ok(Stat::Assign(vec![fn_name], vec![fn_body], last_line)); } unreachable!() } fn _parse_var_list(lexer: &mut impl Lex, var0: Exp) -> Result<Vec<Exp>> { let mut var_list = vec![]; if _is_var_exp(&var0) { var_list.push(var0); } else { return Err(Error::NotVarExpression { line: lexer.current_line() }); } while let Ok(Token::SepComma) = lexer.look_ahead() { lexer.skip_next_token(); let exp = parse_prefix_exp(lexer)?; var_list.push(exp); } Ok(var_list) } fn _parse_name_list(lexer: &mut impl Lex, name0: String) -> Result<Vec<String>> { let mut name_list = vec![name0]; while let Ok(Token::SepComma) = lexer.look_ahead() { lexer.skip_next_token(); let name = lexer.next_ident()?; name_list.push(name); } Ok(name_list) } fn _parse_fn_name(lexer: &mut impl Lex, has_colon: &mut bool) -> Result<Exp> { // fn_name ::= Name {`.` Name} [`:` Name] let name = lexer.next_ident()?; let line = lexer.current_line(); let mut exp = Box::new(Exp::Name(name, line)); while let Ok(Token::SepDot) = lexer.look_ahead() { lexer.skip_next_token(); let name = lexer.next_ident()?; let line = lexer.current_line(); let key = Box::new(Exp::String(name, line)); exp = Box::new(Exp::TableAccess(exp, key, line)); } // check `:` if let Ok(Token::SepColon) = lexer.look_ahead() { lexer.skip_next_token(); let name = lexer.next_ident()?; let line = lexer.current_line(); *has_colon = true; let key = Box::new(Exp::String(name, line)); exp = Box::new(Exp::TableAccess(exp, key, line)); } Ok(*exp) } /******************* Parse Expression *************************/ fn parse_exp(lexer: &mut impl Lex) -> Result<Exp> { parse_exp12(lexer) } fn parse_exp12(lexer: &mut impl Lex) -> Result<Exp> { // x or y let mut exp = Box::new(parse_exp11(lexer)?); while let Ok(Token::OpOr) = lexer.look_ahead() { let op = lexer.next_token().or(Err(Error::NotOperator { line: lexer.current_line() }))?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp11(lexer)?), line)); } Ok(*exp) } fn parse_exp11(lexer: &mut impl Lex) -> Result<Exp> { // x and y let mut exp = Box::new(parse_exp10(lexer)?); while let Ok(Token::OpAnd) = lexer.look_ahead() { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp10(lexer)?), line)); } Ok(*exp) } fn parse_exp10(lexer: &mut impl Lex) -> Result<Exp> { // x `cmp` y let mut exp = Box::new(parse_exp9(lexer)?); loop { match lexer.look_ahead() { Ok(Token::OpGe) | Ok(Token::OpGt) | Ok(Token::OpLe) | Ok(Token::OpLt) | Ok(Token::OpNe) | Ok(Token::OPEq) => { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp9(lexer)?), line)); } _ => { break; } } } Ok(*exp) } fn parse_exp9(lexer: &mut impl Lex) -> Result<Exp> { // x | y let mut exp = Box::new(parse_exp8(lexer)?); while let Ok(Token::OpBitOr) = lexer.look_ahead() { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp8(lexer)?), line)); } Ok(*exp) } fn parse_exp8(lexer: &mut impl Lex) -> Result<Exp> { // x ~ y let mut exp = Box::new(parse_exp7(lexer)?); while let Ok(Token::OpWave) = lexer.look_ahead() { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp7(lexer)?), line)); } Ok(*exp) } fn parse_exp7(lexer: &mut impl Lex) -> Result<Exp> { // x & y let mut exp = Box::new(parse_exp6(lexer)?); while let Ok(Token::OpBitAnd) = lexer.look_ahead() { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp6(lexer)?), line)); } Ok(*exp) } fn parse_exp6(lexer: &mut impl Lex) -> Result<Exp> { // x >>/<< y let mut exp = Box::new(parse_exp5(lexer)?); loop { match lexer.look_ahead() { Ok(Token::OpShl) | Ok(Token::OpShr) => { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp5(lexer)?), line)); } _ => { break; } } } Ok(*exp) } fn parse_exp5(lexer: &mut impl Lex) -> Result<Exp> { // x .. y let exp = parse_exp4(lexer)?; match lexer.look_ahead() { Ok(Token::OpConcat) => { let mut line = 0; let mut exps = vec![]; while let Ok(Token::OpConcat) = lexer.look_ahead() { lexer.skip_next_token(); line = lexer.current_line(); exps.push(parse_exp4(lexer)?); } Ok(Exp::Concat(exps, line)) } _ => { Ok(exp) } } } fn parse_exp4(lexer: &mut impl Lex) -> Result<Exp> { // x +/- y let mut exp = Box::new(parse_exp3(lexer)?); loop { match lexer.look_ahead() { Ok(Token::OpAdd) | Ok(Token::OpMinus) => { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp3(lexer)?), line)); } _ => { break; } } } Ok(*exp) } fn parse_exp3(lexer: &mut impl Lex) -> Result<Exp> { // * % / // let mut exp = Box::new(parse_exp2(lexer)?); loop { match lexer.look_ahead() { Ok(Token::OpMul) | Ok(Token::OpDiv) | Ok(Token::OpIDiv) | Ok(Token::OpMod) => { let op = lexer.next_token()?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp2(lexer)?), line)); } _ => { break; } } } Ok(*exp) } fn parse_exp2(lexer: &mut impl Lex) -> Result<Exp> { // unary ops: not # - ~ match lexer.look_ahead() { Ok(Token::OpNot) | Ok(Token::OpLen) | Ok(Token::OpWave) | Ok(Token::OpMinus) => { let op = lexer.next_token()?; let line = lexer.current_line(); let exp = Box::new(parse_exp2(lexer)?); Ok(Exp::Unop(op, exp, line)) } _ => Ok(parse_exp1(lexer)?), } } fn parse_exp1(lexer: &mut impl Lex) -> Result<Exp> { // x ^ y let mut exp = Box::new(parse_exp0(lexer)?); if let Ok(Token::OpPow) = lexer.look_ahead() { let op = lexer.next_token().or(Err(Error::NotOperator { line: lexer.current_line() }))?; let line = lexer.current_line(); exp = Box::new(Exp::Binop(exp, op, Box::new(parse_exp2(lexer)?), line)); } Ok(*exp) } fn parse_exp0(lexer: &mut impl Lex) -> Result<Exp> { // primary match lexer.look_ahead() { Ok(Token::VarArg) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(Exp::Vararg(line)) } Ok(Token::KwNil) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(Exp::Nil(line)) } Ok(Token::KwTrue) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(Exp::True(line)) } Ok(Token::KwFalse) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(Exp::False(line)) } Ok(Token::String(val)) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(Exp::String(val, line)) } Ok(Token::Number(_)) => parse_number_exp(lexer), // followings are recursive Ok(Token::SepLcurly) => parse_table_constructor_exp(lexer), Ok(Token::KwFunction) => { lexer.skip_next_token(); parse_fn_def_exp(lexer) } _ => parse_prefix_exp(lexer), } } /******************* Parse Primary *************************/ fn parse_number_exp(lexer: &mut impl Lex) -> Result<Exp> { let num = lexer.next_token(); let line = lexer.current_line(); if let Ok(Token::Number(val)) = num { match parse_integer(val.clone()) { Err(_) => { let num = parse_float(val)?; Ok(Exp::Float(num, line)) } Ok(num) => Ok(Exp::Integer(num, line)) } } else { Err(Error::IllegalNumLiteral { line: lexer.current_line() }) } } fn parse_table_constructor_exp(lexer: &mut impl Lex) -> Result<Exp> { // `{` if !lexer.check_next_token(Token::SepLcurly) { return Err(Error::IllegalExpression { line: lexer.current_line() }); } let line = lexer.current_line(); // [fieldlist] let fields = _parse_field_list(lexer)?; // `}` if !lexer.check_next_token(Token::SepRcurly) { return Err(Error::IllegalExpression { line: lexer.current_line() }); } Ok(Exp::TableConstructor(fields, line)) } fn parse_fn_def_exp(lexer: &mut impl Lex) -> Result<Exp> { // it has skip `function` keyword let line = lexer.current_line(); if !lexer.check_next_token(Token::SepLparen) { return Err(Error::IllegalToken { line, }); } let mut is_vararg = false; let par_list = _parse_par_list(lexer, &mut is_vararg)?; let line = lexer.current_line(); if !lexer.check_next_token(Token::SepRparen) { return Err(Error::IllegalToken { line, }); } let block = Box::new(parse_block(lexer)?); let line = lexer.current_line(); if !lexer.check_next_token(Token::KwEnd) { return Err(Error::IllegalToken { line, }); } let last_line = lexer.current_line(); Ok(Exp::FnDef(FnDef::new(ParList::new(par_list, is_vararg), block, line, last_line))) } fn parse_prefix_exp(lexer: &mut impl Lex) -> Result<Exp> { let exp; if let Ok(Token::Identifier(val)) = lexer.look_ahead() { lexer.skip_next_token(); let line = lexer.current_line(); exp = Exp::Name(val, line); } else { // `(` exp `)` exp = parse_parens_exp(lexer)?; } let mut exp = Box::new(exp); loop { match lexer.look_ahead() { Ok(Token::SepLbrack) => { // `[` exp `]` lexer.skip_next_token(); let key = Box::new(parse_exp(lexer)?); if !lexer.check_next_token(Token::SepRbrack) { let line = lexer.current_line(); return Err(Error::NotMatchBrackets { line }); } let last_line = lexer.current_line(); exp = Box::new(Exp::TableAccess(exp, key, last_line)); } Ok(Token::SepDot) => { lexer.skip_next_token(); let name = lexer.next_ident()?; let line = lexer.current_line(); let key = Box::new(Exp::String(name, line)); let last_line = line; exp = Box::new(Exp::TableAccess(exp, key, last_line)); } Ok(Token::SepColon) | Ok(Token::SepLparen) | Ok(Token::SepLcurly) | Ok(Token::String(_)) => { // [`:` Name] args exp = Box::new(_parse_fn_call_exp(lexer, exp)?); } _ => { return Ok(*exp); } } } } fn parse_parens_exp(lexer: &mut impl Lex) -> Result<Exp> { if !lexer.check_next_token(Token::SepLparen) { return Err(Error::IllegalExpression { line: lexer.current_line() }); } let exp = parse_exp(lexer)?; if !lexer.check_next_token(Token::SepRparen) { let line = lexer.current_line(); return Err(Error::NotMatchBrackets { line }); } // The semantics of vararg and fn call will be changed by parens let exp = match exp { exp @ Exp::Vararg(_) => Exp::Parens(Box::new(exp)), exp @ Exp::FnCall(_) => Exp::Parens(Box::new(exp)), exp @ Exp::Name(_, _) => Exp::Parens(Box::new(exp)), exp @ Exp::TableAccess(_, _, _) => Exp::Parens(Box::new(exp)), _ => exp, }; Ok(exp) } fn _parse_fn_call_exp(lexer: &mut impl Lex, prefix_exp: Box<Exp>) -> Result<Exp> { // [`:` Name] let name_exp = _parse_fn_name_exp(lexer).ok(); let line = lexer.current_line(); // args let args = _parse_fn_call_args(lexer)?; let last_line = lexer.current_line(); Ok(Exp::FnCall(FnCall::new(prefix_exp, name_exp, args, line, last_line))) } fn _parse_fn_name_exp(lexer: &mut impl Lex) -> Result<Box<Exp>> { if let Ok(Token::SepColon) = lexer.look_ahead() { lexer.skip_next_token(); let val = lexer.next_ident()?; let line = lexer.current_line(); Ok(Box::new(Exp::String(val, line))) } else { // just represent a option token Err(Error::NoMoreTokens { line: lexer.current_line() }) } } fn _parse_fn_call_args(lexer: &mut impl Lex) -> Result<Vec<Exp>> { match lexer.look_ahead() { // (arg1, arg2 ...) Ok(Token::SepLparen) => { lexer.skip_next_token(); if let Ok(Token::SepRparen) = lexer.look_ahead() { lexer.skip_next_token(); Ok(vec![]) } else { let exp = parse_exp_list(lexer); if !lexer.check_next_token(Token::SepRparen) { let line = lexer.current_line(); Err(Error::NotMatchBrackets { line }) } else { exp } } } // function print_prices(table) // print("The clothes costs " .. table.medium) //end Ok(Token::SepLcurly) => { Ok(vec![parse_table_constructor_exp(lexer)?]) } // LiteralString: print "2" "3" "3" Ok(Token::String(val)) => { lexer.skip_next_token(); let line = lexer.current_line(); Ok(vec![Exp::String(val, line)]) } _ => { Err(Error::IllegalFnCall { line: lexer.current_line() }) } } } fn _parse_field_list(lexer: &mut impl Lex) -> Result<Vec<Field>> { let mut fields = vec![]; if let Ok(Token::SepRcurly) = lexer.look_ahead() { return Ok(fields); } let (k, v) = _parse_field(lexer)?; fields.push(Field::new(k, v)); while _is_field_sep(lexer.look_ahead()) { lexer.skip_next_token(); // when meet `}` match lexer.look_ahead() { Ok(Token::SepRcurly) => { break; } _ => { let (k, v) = _parse_field(lexer)?; fields.push(Field::new(k, v)); } } } Ok(fields) } fn _parse_field(lexer: &mut impl Lex) -> Result<(Option<Exp>, Exp)> { // field ::= `[` exp `]` `=` exp | Name `=` exp | exp if let Ok(Token::SepLbrack) = lexer.look_ahead() { lexer.skip_next_token(); let key = parse_exp(lexer)?; if !lexer.check_next_token(Token::SepRbrack) { let line = lexer.current_line(); return Err(Error::NotMatchBrackets { line }); } if !lexer.check_next_token(Token::OpAssign) { return Err(Error::MissingAssignment { line: lexer.current_line() }); } let val = parse_exp(lexer)?; Ok((Some(key), val)) } else { // `key` or `value` let exp = parse_exp(lexer)?; if let Exp::Name(ref val, line) = exp { if let Ok(Token::OpAssign) = lexer.look_ahead() { lexer.skip_next_token(); let key = Exp::String(val.to_string(), line); let val = parse_exp(lexer)?; return Ok((Some(key), val)); } } Ok((None, exp)) } } fn _parse_par_list(lexer: &mut impl Lex, is_vararg: &mut bool) -> Result<Vec<String>> { let mut params = vec![]; match lexer.look_ahead() { Ok(Token::SepRparen) => { return Ok(params); } Ok(Token::VarArg) => { lexer.skip_next_token(); *is_vararg = true; return Ok(params); } _ => {} } params.push(lexer.next_ident()?); while let Ok(Token::SepComma) = lexer.look_ahead() { lexer.skip_next_token(); match lexer.look_ahead() { Ok(Token::Identifier(s)) => { params.push(s); lexer.skip_next_token(); } Ok(Token::VarArg) => { *is_vararg = true; break; } _ => { return Err(Error::IllegalFunction { line: lexer.current_line() }); } } } Ok(params) } #[inline] fn _is_return_or_block_end(tok: Result<Token>) -> bool { match tok { Err(Error::EOF { line: _ }) | Ok(Token::KwReturn) | Ok(Token::KwEnd) | Ok(Token::KwElse) | Ok(Token::KwElseIf) | Ok(Token::KwUntil) => true, _ => false, } } #[inline] fn _is_var_exp(exp: &Exp) -> bool { match exp { Exp::Name(_, _) => true, Exp::TableAccess(_, _, _) => true, _ => false, } } #[inline] fn _is_field_sep(tok: Result<Token>) -> bool { match tok { Ok(Token::SepComma) | Ok(Token::SepSemi) => true, _ => false, } } #[cfg(test)] mod tests { use std::string::ToString; use super::*; #[test] fn test_parse() { let s = r##" local g = { a = 1, b = {} } -- comment local a = true and false or false or not true local b = ((1 | 2) & 3) >> 1 << 1 local c = (3 + 2 - 1) * (5 % 2) // 2 / 2 ^ 2 local d = not not not not not false local e = - - - - -1 local f = ~ ~ ~ ~ ~1 function preloadSearcher(modname) if package.preload[modname] ~= nil then return package.preload[modname] else return 1 end end package.preload.mymod = function(modname) local loader = function(modname, extra) print("loading") end return loader, "" end co = function () print("hello") end function hello() function world() end end "##.to_string(); let mut lexer = Lexer::from_iter(s.into_bytes(), "test".to_string()); parse_block(&mut lexer).expect("parse error"); let s = r##" local g = { a = 1, b = {} }"##.to_string(); let mut lexer = Lexer::from_iter(s.into_bytes(), "test".to_string()); let block = parse_block(&mut lexer).expect("parse error"); // println!("{:#?}", block); } }
true
9a4ae97f4565411958be131400bbd2bbe58f1f41
Rust
apache/thrift
/lib/rs/src/server/mod.rs
UTF-8
4,210
2.625
3
[ "Apache-2.0", "MIT", "FSFAP", "LicenseRef-scancode-public-domain-disclaimer", "BSD-3-Clause" ]
permissive
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Types used to implement a Thrift server. use crate::protocol::{TInputProtocol, TMessageIdentifier, TMessageType, TOutputProtocol}; use crate::{ApplicationError, ApplicationErrorKind}; mod multiplexed; mod threaded; pub use self::multiplexed::TMultiplexedProcessor; pub use self::threaded::TServer; /// Handles incoming Thrift messages and dispatches them to the user-defined /// handler functions. /// /// An implementation is auto-generated for each Thrift service. When used by a /// server (for example, a `TSimpleServer`), it will demux incoming service /// calls and invoke the corresponding user-defined handler function. /// /// # Examples /// /// Create and start a server using the auto-generated `TProcessor` for /// a Thrift service `SimpleService`. /// /// ```no_run /// use thrift::protocol::{TInputProtocol, TOutputProtocol}; /// use thrift::server::TProcessor; /// /// // /// // auto-generated /// // /// /// // processor for `SimpleService` /// struct SimpleServiceSyncProcessor; /// impl SimpleServiceSyncProcessor { /// fn new<H: SimpleServiceSyncHandler>(processor: H) -> SimpleServiceSyncProcessor { /// unimplemented!(); /// } /// } /// /// // `TProcessor` implementation for `SimpleService` /// impl TProcessor for SimpleServiceSyncProcessor { /// fn process(&self, i: &mut dyn TInputProtocol, o: &mut dyn TOutputProtocol) -> thrift::Result<()> { /// unimplemented!(); /// } /// } /// /// // service functions for SimpleService /// trait SimpleServiceSyncHandler { /// fn service_call(&self) -> thrift::Result<()>; /// } /// /// // /// // user-code follows /// // /// /// // define a handler that will be invoked when `service_call` is received /// struct SimpleServiceHandlerImpl; /// impl SimpleServiceSyncHandler for SimpleServiceHandlerImpl { /// fn service_call(&self) -> thrift::Result<()> { /// unimplemented!(); /// } /// } /// /// // instantiate the processor /// let processor = SimpleServiceSyncProcessor::new(SimpleServiceHandlerImpl {}); /// /// // at this point you can pass the processor to the server /// // let server = TServer::new(..., processor); /// ``` pub trait TProcessor { /// Process a Thrift service call. /// /// Reads arguments from `i`, executes the user's handler code, and writes /// the response to `o`. /// /// Returns `()` if the handler was executed; `Err` otherwise. fn process(&self, i: &mut dyn TInputProtocol, o: &mut dyn TOutputProtocol) -> crate::Result<()>; } /// Convenience function used in generated `TProcessor` implementations to /// return an `ApplicationError` if thrift message processing failed. pub fn handle_process_result( msg_ident: &TMessageIdentifier, res: crate::Result<()>, o_prot: &mut dyn TOutputProtocol, ) -> crate::Result<()> { if let Err(e) = res { let e = match e { crate::Error::Application(a) => a, _ => ApplicationError::new(ApplicationErrorKind::Unknown, format!("{:?}", e)), }; let ident = TMessageIdentifier::new( msg_ident.name.clone(), TMessageType::Exception, msg_ident.sequence_number, ); o_prot.write_message_begin(&ident)?; crate::Error::write_application_error_to_out_protocol(&e, o_prot)?; o_prot.write_message_end()?; o_prot.flush() } else { Ok(()) } }
true
43562648e7a367b5836474fb3d9813e2953c3268
Rust
EmperorYP7/rust-fractal
/src/main.rs
UTF-8
2,766
2.84375
3
[ "Apache-2.0" ]
permissive
use error_chain::error_chain; use image::{ImageBuffer, Pixel, Rgb}; use num::complex::Complex; use std::sync::mpsc::{channel, RecvError}; use threadpool::ThreadPool; error_chain! { foreign_links { MpscRecv(RecvError); Io(std::io::Error); } } // Function converting intensity values to RGB // Based on http://www.efg2.com/Lab/ScienceAndEngineering/Spectra.htm fn wavelength_to_rgb(wavelength: u32) -> Rgb<u8> { let wave = wavelength as f32; let (r, g, b) = match wavelength { 380..=439 => ((440. - wave) / (440. - 380.), 0.0, 1.0), 440..=489 => (0.0, (wave - 440.) / (490. - 440.), 1.0), 490..=509 => (0.0, 1.0, (510. - wave) / (510. - 490.)), 510..=579 => ((wave - 510.) / (580. - 510.), 1.0, 0.0), 580..=644 => (1.0, (645. - wave) / (645. - 580.), 0.0), 645..=780 => (1.0, 0.0, 0.0), _ => (0.0, 0.0, 0.0), }; let factor = match wavelength { 380..=419 => 0.3 + 0.7 * (wave - 380.) / (420. - 380.), 701..=780 => 0.3 + 0.7 * (780. - wave) / (780. - 700.), _ => 1.0, }; let (r, g, b) = ( normalize(r, factor), normalize(g, factor), normalize(b, factor), ); Rgb::from_channels(r, g, b, 0) } // Maps Julia set distance estimation to intensity values fn julia(c: Complex<f32>, x: u32, y: u32, width: u32, height: u32, max_iter: u32) -> u32 { let width = width as f32; let height = height as f32; let mut z = Complex { // scale and translate the point to image coordinates re: 3.0 * (x as f32 - 0.5 * width) / width, im: 2.0 * (y as f32 - 0.5 * height) / height, }; let mut i = 0; for t in 0..max_iter { if z.norm() >= 2.0 { break; } z = z * z + c; i = t; } i } // Normalizes color intensity values within RGB range fn normalize(color: f32, factor: f32) -> u8 { ((color * factor).powf(0.8) * 255.) as u8 } fn main() -> Result<()> { let (width, height) = (1920, 1080); let mut img = ImageBuffer::new(width, height); let iterations = 300; let c = Complex::new(-0.8, 0.156); let pool = ThreadPool::new(num_cpus::get()); let (tx, rx) = channel(); for y in 0..height { let tx = tx.clone(); pool.execute(move || { for x in 0..width { let i = julia(c, x, y, width, height, iterations); let pixel = wavelength_to_rgb(380 + i * 400 / iterations); tx.send((x, y, pixel)).expect("Could not send data!"); } }); } for _ in 0..(width * height) { let (x, y, pixel) = rx.recv()?; img.put_pixel(x, y, pixel); } let _ = img.save("output.png"); Ok(()) }
true
a3c9c2bcb0cae37604a27c78f11a35014038db04
Rust
hustfisher/starcoin
/config/src/sync_config.rs
UTF-8
2,324
2.640625
3
[ "Apache-2.0" ]
permissive
use crate::{BaseConfig, ChainNetwork, ConfigModule, StarcoinOpt}; use anyhow::{format_err, Result}; use serde::{Deserialize, Serialize}; use starcoin_logger::prelude::*; use std::fmt::{Display, Formatter}; use std::str::FromStr; #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(default, deny_unknown_fields)] pub struct SyncConfig { sync_mode: SyncMode, } impl SyncConfig { pub fn is_state_sync(&self) -> bool { self.sync_mode == SyncMode::FAST } pub fn is_light(&self) -> bool { self.sync_mode == SyncMode::LIGHT } //just for test pub fn fast_sync_mode(&mut self) { self.sync_mode = SyncMode::FAST; } pub fn full_sync_mode(&mut self) { self.sync_mode = SyncMode::FULL; } } impl ConfigModule for SyncConfig { fn default_with_net(net: ChainNetwork) -> Self { SyncConfig { sync_mode: if net.is_dev() { SyncMode::FULL } else { SyncMode::FAST }, } } fn load(&mut self, base: &BaseConfig, opt: &StarcoinOpt) -> Result<()> { self.sync_mode = if base.net.is_dev() { SyncMode::FULL } else { opt.sync_mode.clone() }; info!("sync mode : {:?} : {:?}", opt.sync_mode, self.sync_mode); Ok(()) } } impl Default for SyncConfig { fn default() -> Self { SyncConfig::default_with_net(ChainNetwork::default()) } } #[allow(non_camel_case_types)] #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(tag = "type")] pub enum SyncMode { LIGHT, FAST, FULL, } impl FromStr for SyncMode { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "light" => Ok(SyncMode::LIGHT), "fast" => Ok(SyncMode::FAST), "full" => Ok(SyncMode::FULL), _ => Err(format_err!("")), } } } impl Display for SyncMode { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { SyncMode::LIGHT => write!(f, "light"), SyncMode::FAST => write!(f, "fast"), SyncMode::FULL => write!(f, "full"), } } } impl Default for SyncMode { fn default() -> Self { SyncMode::FULL } }
true
0303c37e9c69b97bbf68f62b5c52ee862f43e2eb
Rust
ciaran2/feo3boy
/web-debugger/src/main.rs
UTF-8
15,041
2.515625
3
[ "MIT" ]
permissive
use std::collections::BTreeMap; use std::num::NonZeroU32; use std::rc::Rc; use feo3boy::gb::Gb; use feo3boy::memdev::{BiosRom, Cartridge}; use gloo::storage::errors::StorageError; use gloo::storage::{LocalStorage, Storage}; use gloo::timers::callback::Timeout; use log::warn; use owning_ref::RcRef; use serde::{Deserialize, Serialize}; use wasm_bindgen::JsCast; use web_sys::HtmlInputElement; use yew::prelude::*; use breakpoints::{Breakpoint, Breakpoints}; use derefs::{ComputedDerefs, Derefs}; use memview::Memview; use regs::Regs; use romload::{RomFile, RomLoader, SavedRom}; use serial::Serial; use speedctl::SpeedCtl; mod breakpoints; mod bytesup; mod derefs; mod instrs; mod memview; mod regs; mod romload; mod serial; mod speedctl; trait CompareAssign { /// "Not Equals - Assign". Apply a change to self. If `self == val`, return `false` /// (no change applied), otherwise set self to `val` and return `true` (change made). fn ne_assign(&mut self, val: Self) -> bool; } impl<T: PartialEq> CompareAssign for T { fn ne_assign(&mut self, val: Self) -> bool { if *self != val { *self = val; true } else { false } } } const SAVED_STATE_KEY: &str = "feo3boy.webdebugger.savestate-v1"; /// Saved debugger state. #[derive(Default, Serialize, Deserialize)] struct SaveState { #[serde(default)] bios: Option<SavedRom>, #[serde(default)] cart: Option<SavedRom>, #[serde(default)] breakpoints: BTreeMap<u16, Breakpoint>, } /// Tick settings controls how fast the emulator runs when auto-ticking. #[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum AutoTickSpeed { /// Delay sets a delay between Delay { ms: NonZeroU32, }, Single, Repeat { iterations: NonZeroU32, }, } impl AutoTickSpeed { /// Number of milliseconds to delay between ticks at this speed. fn delay_ms(self) -> u32 { match self { Self::Delay { ms } => ms.get(), _ => 0, } } /// Number of iterations to run per tick at this speed. fn iterations(self) -> u32 { match self { Self::Repeat { iterations } => iterations.get(), _ => 1, } } } impl Default for AutoTickSpeed { fn default() -> Self { Self::Single } } enum Msg { /// Set the bios rom to use. Resets the GB. SetBios { bios: Option<RomFile<BiosRom>>, }, /// Set the cartridge rom to use. Resets the GB. SetCart { cart: Option<RomFile<Cartridge>>, }, /// Start or stop automatic ticking. SetRunning { should_run: bool, }, /// Run one tick. If singlestep is true, the `running` property will be ignored (and /// cleared if set). Tick { singlestep: bool, }, Reset, /// Sets the iteration speed. SetSpeed { speed: AutoTickSpeed, }, /// Add a breakpoint at the specified address. If one is set, replace it. AddBreakpoint { addr: u16, name: String, enabled: bool, }, /// Delete a breakpoint at the specified address. DeleteBreakpoint { addr: u16, }, /// Set the enabled state of a breakpoint at the specified address. ToggleBreakpoint { addr: u16, new_enabled: bool, }, } struct App { /// Bios to load into the GB. bios: Option<RomFile<BiosRom>>, /// Cartridge to load into the GB. cart: Option<RomFile<Cartridge>>, /// GB instance being run. gb: Rc<Gb>, /// Next pending tick. pending_tick: Option<Timeout>, /// Breakpoints for the program. breakpoints: Rc<BTreeMap<u16, Breakpoint>>, /// Serial data output by the program. serial: Rc<String>, /// Speed to run auto-ticking at. auto_tick_speed: AutoTickSpeed, } impl App { /// Rebuild the GB for new roms. fn reset_roms(&mut self) { self.gb = Rc::new(Gb::new(self.bios(), self.cart())); self.serial = Rc::new(String::new()); } /// Clone the selected bios rom or get the default bios. fn bios(&self) -> BiosRom { self.bios .as_ref() .map(|bios| bios.rom.clone()) .unwrap_or_default() } /// Clone the selected cartridge rom or get the default cartridge. fn cart(&self) -> Cartridge { self.cart .as_ref() .map_or_else(|| Cartridge::None, |cart| cart.rom.clone()) } fn save_state(&self) { let saved_state = SaveState { bios: self.bios.as_ref().map(|bios| bios.clone_for_save()), cart: self.cart.as_ref().map(|cart| cart.clone_for_save()), breakpoints: self.breakpoints.as_ref().clone(), }; if let Err(e) = LocalStorage::set(SAVED_STATE_KEY, &saved_state) { warn!("Unable to save database: {}", e); } } /// Schedule an auto-tick on self. fn schedule_auto_tick(&mut self, ctx: &Context<Self>) { let step = ctx.link().callback(|_| Msg::Tick { singlestep: false }); self.pending_tick = Some(Timeout::new(self.auto_tick_speed.delay_ms(), move || { step.emit(()) })); } } impl Component for App { type Message = Msg; type Properties = (); fn create(_ctx: &Context<Self>) -> Self { let saved_state: SaveState = LocalStorage::get(SAVED_STATE_KEY).unwrap_or_else(|e| { if !matches!(e, StorageError::KeyNotFound(_)) { warn!("Failed to load database: {}", e); } Default::default() }); let mut app = Self { bios: saved_state .bios .and_then(|bios| match RomFile::<BiosRom>::try_from(bios) { Ok(rom) => Some(rom), Err(e) => { warn!("Failed to parse saved rom: {}", e); None } }), cart: saved_state .cart .and_then(|bios| match RomFile::<Cartridge>::try_from(bios) { Ok(rom) => Some(rom), Err(e) => { warn!("Failed to parse saved rom: {}", e); None } }), gb: Rc::new(Gb::new(BiosRom::default(), Cartridge::None)), pending_tick: None, breakpoints: Rc::new(saved_state.breakpoints), serial: Rc::new(String::new()), auto_tick_speed: Default::default(), }; app.reset_roms(); app } fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool { match msg { Msg::SetBios { bios } => { self.bios = bios; self.save_state(); self.reset_roms(); self.pending_tick.take().map(|tick| tick.cancel()); true } Msg::SetCart { cart } => { self.cart = cart; self.save_state(); self.reset_roms(); self.pending_tick.take().map(|tick| tick.cancel()); true } Msg::SetRunning { should_run } => { if should_run && self.pending_tick.is_none() { self.schedule_auto_tick(ctx); true } else if !should_run && self.pending_tick.is_some() { self.pending_tick.take().unwrap().cancel(); true } else { false } } Msg::Tick { singlestep: true } => { self.pending_tick.take().map(|tick| tick.cancel()); let gb = Rc::make_mut(&mut self.gb); gb.tick(); let serial_out = gb.serial.stream.receive_bytes(); if serial_out.len() != 0 { let serial = Rc::make_mut(&mut self.serial); serial.extend(serial_out.map(|b| b as char)); } true } Msg::Tick { singlestep: false } => { { // For performance (to avoid the overhead of continuously cloning the // gb state and rerendering the whole page), run multiple ticks per // web timer step at high auto-tick speeds. let gb = Rc::make_mut(&mut self.gb); for _ in 0..self.auto_tick_speed.iterations() { gb.tick(); match self.breakpoints.get(&gb.cpustate.regs.pc) { Some(breakpoint) if breakpoint.enabled => break, _ => {} } } let serial_out = gb.serial.stream.receive_bytes(); if serial_out.len() != 0 { let serial = Rc::make_mut(&mut self.serial); serial.extend(serial_out.map(|b| b as char)); } } match self.breakpoints.get(&self.gb.cpustate.regs.pc) { Some(breakpoint) if breakpoint.enabled => self.pending_tick = None, _ => self.schedule_auto_tick(ctx), } true } Msg::Reset => { self.pending_tick.take().map(|tick| tick.cancel()); self.reset_roms(); true } Msg::SetSpeed { speed } => { self.auto_tick_speed = speed; if let Some(tick) = self.pending_tick.take() { tick.cancel(); self.schedule_auto_tick(ctx); } true } Msg::AddBreakpoint { addr, name, enabled, } => { let breakpoints = Rc::make_mut(&mut self.breakpoints); breakpoints.insert(addr, Breakpoint { name, enabled }); self.save_state(); true } Msg::DeleteBreakpoint { addr } => { if Rc::make_mut(&mut self.breakpoints).remove(&addr).is_some() { self.save_state(); true } else { false } } Msg::ToggleBreakpoint { addr, new_enabled } => { if let Some(bp) = Rc::make_mut(&mut self.breakpoints).get_mut(&addr) { if bp.enabled != new_enabled { bp.enabled = new_enabled; self.save_state(); true } else { false } } else { false } } } } fn view(&self, ctx: &yew::Context<Self>) -> yew::Html { let link = ctx.link(); let setbios = link.callback(|bios| Msg::SetBios { bios }); let setcart = link.callback(|cart| Msg::SetCart { cart }); let run = link.callback(|_| Msg::SetRunning { should_run: true }); let pause = link.callback(|_| Msg::SetRunning { should_run: false }); let step = link.callback(|_| Msg::Tick { singlestep: true }); let reset = link.callback(|_| Msg::Reset); let changespeed = link.callback(|speed| Msg::SetSpeed { speed }); let add_breakpoint = link.callback(|(addr, name, enabled)| Msg::AddBreakpoint { addr, name, enabled, }); let delete_breakpoint = link.callback(|addr| Msg::DeleteBreakpoint { addr }); let toggle_breakpoint = link.callback(|(addr, new_enabled)| Msg::ToggleBreakpoint { addr, new_enabled }); html! { <div class="App"> <div class="header"> <h1>{"feo3boy debugger"}</h1> </div> <div class="body row"> <div class="column scroll main"> <div class="romselect column smallgap"> <h3>{"ROM Selection"}</h3> <div class="row biggap"> <RomLoader<BiosRom> onchange={setbios} input_id="bios-load" label="BIOS" current={self.bios.as_ref().map(RomFile::info)} /> <RomLoader<Cartridge> onchange={setcart} input_id="cart-load" label="Cartridge" current={self.cart.as_ref().map(RomFile::info)} /> </div> </div> <div class="row"> <div class="column"> <div class="runcontrols"> <div class="playbuttons"> <button onclick={reset}>{"Reset"}</button> if self.pending_tick.is_none() { <button onclick={run}>{"Run"}</button> <button onclick={step}>{"Step"}</button> } else { <button onclick={pause}>{"Pause"}</button> <button disabled=true>{"Step"}</button> } </div> <SpeedCtl speed={self.auto_tick_speed} {changespeed} /> </div> <Regs regs={self.gb.cpustate.regs.clone()} ime={self.gb.cpustate.interrupt_master_enable} /> </div> <div class="column"> <Derefs derefs={ComputedDerefs::from(&*self.gb)} /> </div> <div class="column"> <Breakpoints breakpoints={self.breakpoints.clone()} {add_breakpoint} {delete_breakpoint} {toggle_breakpoint} /> </div> </div> <Serial serial_output={self.serial.clone()} /> </div> <div class="column scroll main"> <Memview mem={RcRef::new(self.gb.clone()).map(|gb| gb.mmu.as_ref())} /> </div> </div> </div> } } } fn get_value_from_input_event(e: InputEvent) -> String { let target: HtmlInputElement = e.target().unwrap().dyn_into().unwrap(); target.value() } fn main() { console_log::init_with_level(log::Level::Info).expect("Unable to init logger"); yew::Renderer::<App>::new().render(); }
true
3a5cdb3d909fcc8683102d377f1e7f8448a90106
Rust
ravendyg/rust-playground
/src/_3main.rs
UTF-8
1,177
3.546875
4
[]
no_license
struct Foo<'a> { x: &'a mut i32, } impl<'a> Foo<'a> { fn x(&self) -> & i32 { self.x } } fn main() { println!("working"); let mut v = vec![1, 2, 3]; v.push(10); let r = refff(&v); let m = borrow(&mut v); for i in &mut v { *i += 1; } for i in &v { println!("{}", i); } let mut x = 1; { let y = &mut x; *y = 10; // println!("x: {}, y: {}", x, y); // won't work with a borrowed binding println!("y: {}", y); } // *y = 10; println!("x: {}", x); let i = 1; let r; { r = &i; } println!("{}", r); let line = "lang:en=Hello World!"; let lang = "en"; let v; { let p = format!("lang:{}=", lang); // -+ p goes into scope v = skip_prefix(line, p.as_str()); // | } // -+ p goes out of scope println!("{}", v); let y = &mut 18; let f = Foo { x: y }; *f.x += 1; // *y += 1; println!("{}", f.x()); } fn skip_prefix<'a, 'b>(line: &'a str, prefix: &'b str) -> &'a str { line } fn refff(v: &Vec<i32>) -> i32 { 42 } fn borrow(v: &mut Vec<i32>) -> i32 { v[1] = 42; 42 } fn take(v: Vec<i32>) { }
true
a635f47b9c1ecae458b8fe6f6b90e81028aeaf4d
Rust
webern/exile
/src/parser/pi.rs
UTF-8
3,842
3.03125
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::parser::error::Result; use crate::parser::Iter; use crate::Pi; /// The iter should be pointing to the opening `<` of a processing instruction. pub(crate) fn parse_pi_logic(iter: &mut Iter<'_>) -> Result<(String, String)> { expect!(iter, '<')?; iter.advance_or_die()?; expect!(iter, '?')?; iter.advance_or_die()?; // handle the special case <??> if iter.is('?') { iter.advance_or_die()?; expect!(iter, '>')?; iter.advance(); return Ok(("".into(), "".into())); } let target = parse_pi_target(iter)?; let mut data = String::new(); loop { if iter.is('?') && iter.peek_is('>') { iter.advance_or_die()?; iter.advance(); break; } data.push(iter.st.c); iter.advance_or_die()?; } Ok((target, data)) } /// Must be a valid name terminated by whitespace. fn parse_pi_target(iter: &mut Iter<'_>) -> Result<String> { if !iter.is_name_start_char() { return parse_err!(iter, "expected name start char, found '{}'", iter.st.c); } let mut name = String::new(); name.push(iter.st.c); iter.advance_or_die()?; loop { if iter.is_whitespace() { iter.advance_or_die()?; break; } else if iter.is('?') { // e.g. <?target?? break; } else if !iter.is_name_char() { return parse_err!(iter, "expected name char, found '{}'", iter.st.c); } else { name.push(iter.st.c); } iter.advance_or_die()?; } Ok(name) } /// The iter should be pointing to the opening `<` of a processing instruction. pub(crate) fn parse_pi(iter: &mut Iter<'_>) -> Result<Pi> { let (target, data) = parse_pi_logic(iter)?; Ok(Pi::new_unchecked(target, data)) } //////////////////////////////////////////////////////////////////////////////////////////////////// #[test] fn parse_pi_easy() { let pi_str = "<?target data?>"; let mut iter = Iter::new(pi_str).unwrap(); let pi = parse_pi(&mut iter).unwrap(); assert_eq!("target", pi.target()); assert_eq!("data", pi.data()); assert!(!iter.advance()); } #[test] fn parse_pi_peasy() { let pi_str = "<?target data?>X"; let mut iter = Iter::new(pi_str).unwrap(); let pi = parse_pi(&mut iter).unwrap(); assert_eq!("target", pi.target()); assert_eq!("data", pi.data()); assert!(iter.is('X')); } #[test] fn parse_pi_funky_1() { let pi_str = "<?pi some data ? >"; let mut iter = Iter::new(pi_str).unwrap(); let parse_result = parse_pi(&mut iter); assert!(parse_result.is_err()); } #[test] fn parse_pi_funky_2() { let pi_str = "<??>"; let mut iter = Iter::new(pi_str).unwrap(); let pi = parse_pi(&mut iter).unwrap(); assert_eq!("", pi.target()); assert!(pi.data().is_empty()); } #[test] fn parse_pi_funky_3() { // established as not-well-formed by jclark_not_wf_sa_003.xml let pi_str = "<? ?>"; let mut iter = Iter::new(pi_str).unwrap(); let parse_result = parse_pi(&mut iter); assert!(parse_result.is_err()); } #[test] fn parse_pi_funky_4() { let pi_str = "< ? ? >"; let mut iter = Iter::new(pi_str).unwrap(); let parse_result = parse_pi(&mut iter); assert!(parse_result.is_err()); } #[test] fn parse_pi_funky_5() { let pi_str = "<?bones?>"; let mut iter = Iter::new(pi_str).unwrap(); let pi = parse_pi(&mut iter).unwrap(); assert_eq!("bones", pi.target()); assert!(pi.data().is_empty()); } #[test] fn parse_pi_funky_6() { // this is from jclark_valid_sa_017.xml let pi_str = "<?pi some data ? > <??>"; let mut iter = Iter::new(pi_str).unwrap(); let pi = parse_pi(&mut iter).unwrap(); assert_eq!("pi", pi.target()); assert_eq!("some data ? > <?", pi.data()); }
true
07404b6eecd093771d631fae49088ea898ee897b
Rust
TrustTheRust/rust-actix-realworld-example-app
/src/mdl/article.rs
UTF-8
1,272
2.53125
3
[]
no_license
use chrono::NaiveDateTime; use crate::schema::{articles, favorite_articles}; #[derive(Debug, Queryable)] pub struct Article { pub id: i32, pub author_id: i32, pub slug: String, pub title: String, pub description: String, pub body: String, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, } #[derive(Debug, Insertable)] #[table_name = "articles"] pub struct NewArticle { pub author_id: i32, pub slug: String, pub title: String, pub description: String, pub body: String, } #[cfg(test)] impl Default for NewArticle { fn default() -> Self { NewArticle { author_id: 0, slug: String::new(), title: String::new(), description: String::new(), body: String::new(), } } } // XXX: One can create an ArticleChange with title but without slug. // It should be avoided. #[derive(Debug, AsChangeset)] #[table_name = "articles"] pub struct ArticleChange { pub slug: Option<String>, pub title: Option<String>, pub description: Option<String>, pub body: Option<String>, } #[derive(Debug, Insertable)] #[table_name = "favorite_articles"] pub struct NewFavoriteArticle { pub user_id: i32, pub article_id: i32, }
true
e66601c6668f88ff93d8455b14a1f5bb5328e0fa
Rust
2color/prisma-engines
/libs/prisma-models/src/order_by.rs
UTF-8
1,083
2.78125
3
[ "Apache-2.0" ]
permissive
use crate::{ModelRef, RelationFieldRef, ScalarFieldRef}; use std::string::ToString; #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct OrderBy { pub field: ScalarFieldRef, pub path: Vec<RelationFieldRef>, pub sort_order: SortOrder, } impl OrderBy { pub fn new(field: ScalarFieldRef, path: Vec<RelationFieldRef>, sort_order: SortOrder) -> Self { Self { field, path, sort_order, } } } pub trait IntoOrderBy { fn into_order_by(self, model: ModelRef) -> OrderBy; } #[derive(Clone, Copy, PartialEq, Debug, Eq, Hash)] pub enum SortOrder { Ascending, Descending, } impl ToString for SortOrder { fn to_string(&self) -> String { match self { SortOrder::Ascending => String::from("ASC"), SortOrder::Descending => String::from("DESC"), } } } impl From<ScalarFieldRef> for OrderBy { fn from(field: ScalarFieldRef) -> Self { Self { field, path: vec![], sort_order: SortOrder::Ascending, } } }
true
9adb72239553438f8a7de5d63eda71ab55fbec67
Rust
yossan/pdfbat
/src/parser.rs
UTF-8
4,516
2.96875
3
[]
no_license
use std::collections::HashMap; use crate::stream::{Stream, ReadSeek}; use crate::lexer::{Lexer}; use crate::primitives::Primitives::{self, Null, Int, Str, HexStr, Real, Array, Dict, Ref, Cmd, EOF}; use crate::primitives::Name; use crate::error::Error; macro_rules! primitive { ($token:expr) => { $token.ok_or_else(|| Error::ParserError) }; } pub struct Parser<T> { lexer: Lexer<T>, allow_streams: bool, buf1: Option<Primitives>, buf2: Option<Primitives>, } impl<T: ReadSeek> Parser<T> { pub fn new(lexer: Lexer<T>, allow_streams: bool) -> Self { let mut p = Parser { lexer: lexer, allow_streams: allow_streams, buf1: None, buf2: None, }; p.refill(); p } pub fn buf1(&self) -> Option<Primitives> { self.buf1.clone() } pub fn buf2(&self) -> Option<Primitives> { self.buf2.clone() } fn refill(&mut self) { self.buf1 = self.lexer.get_obj().ok(); self.buf2 = self.lexer.get_obj().ok(); } fn shift(&mut self) -> Option<Primitives> { let gone = self.buf1.take(); if self.buf2 == Primitives::cmd("ID") { self.buf1 = self.buf2.take(); self.buf2 = None; } else { self.buf1 = self.buf2.take(); self.buf2 = self.lexer.get_obj().ok(); } gone } /* fn trye_shift() { } */ pub fn get_obj(&mut self) -> Result<Primitives, Error> { let buf1 = primitive!(self.shift())?; if let Cmd(ref cmd) = buf1 { /* if cmd == b"BI" { // inline image returns self.make_inline_image(); }*/ if cmd == b"[" { // array let mut array = Vec::new(); while self.buf1 != Primitives::cmd("]") && self.buf1 != EOF { array.push(self.get_obj()?); } if self.buf1 == EOF { eprintln!("End of file inside array"); return Ok(Array(array)); } self.shift(); return Ok(Array(array)); } else if cmd == b"<<" { let mut dict = HashMap::<Name, Primitives>::new(); let mut i = 0; while self.buf1 != Primitives::cmd(">>") && self.buf1 != EOF { if primitive!(self.buf1.as_ref())?.is_name() { if let Primitives::Name(name) = primitive!(self.buf1.take())? { self.shift(); if self.buf1 == EOF { break; } dict.insert(Name(name.0), self.get_obj()?); } } else { eprintln!("Malformed dictionary: key must be a name object"); self.shift(); continue; } } if self.buf1 == EOF { eprintln!("End of file inside dictionary"); return Ok(Dict(dict)); } // Stream objects are not allowed inside content streams or object streams. if self.buf2.as_ref().unwrap().is_cmd("stream") { if self.allow_streams { return self.make_stream(dict) } else { return Ok(Dict(dict)); } } self.shift(); return Ok(Dict(dict)); } else { return Ok(buf1); } } if let Some(num1) = buf1.get_integer() { if primitive!(self.buf1.as_ref())?.is_integer() && primitive!(self.buf2.as_ref())?.is_cmd("R") { if let Int(num2) = primitive!(self.buf1.take())? { self.shift(); self.shift(); return Ok(Ref(num1 as u32, num2 as u32)); } } return Ok(Int(num1)); } if buf1.is_string() { // if (cipher_transform) { // // cipherTransform.decrypt_string(buf1) // } return Ok(buf1); } // simple object Ok(buf1) } fn make_stream(&self, dict: HashMap<Name, Primitives>) -> Result<Primitives, Error> { Err(Error::ParserError) } }
true
84bbe6ffd6ba2012e1d503d35711c8784799891e
Rust
isgasho/dbcrossbar
/dbcrossbarlib/src/drivers/gs/mod.rs
UTF-8
2,905
2.734375
3
[ "MIT", "Apache-2.0" ]
permissive
//! Support for Google Cloud Storage. use std::{fmt, str::FromStr}; use crate::common::*; use crate::drivers::bigquery::BigQueryLocator; mod local_data; mod prepare_as_destination; mod write_local_data; mod write_remote_data; use local_data::local_data_helper; pub(crate) use prepare_as_destination::prepare_as_destination_helper; use write_local_data::write_local_data_helper; use write_remote_data::write_remote_data_helper; /// Locator scheme for Google Cloud Storage. pub(crate) const GS_SCHEME: &str = "gs:"; #[derive(Clone, Debug)] pub(crate) struct GsLocator { url: Url, } impl GsLocator { /// Access the `gs://` URL in this locator. pub(crate) fn as_url(&self) -> &Url { &self.url } } impl fmt::Display for GsLocator { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.url.fmt(f) } } impl FromStr for GsLocator { type Err = Error; fn from_str(s: &str) -> Result<Self> { if s.starts_with(GS_SCHEME) { let url = s .parse::<Url>() .with_context(|_| format!("cannot parse {}", s))?; if !url.path().starts_with('/') { Err(format_err!("{} must start with gs://", url)) } else if !url.path().ends_with('/') { Err(format_err!("{} must end with a '/'", url)) } else { Ok(GsLocator { url }) } } else { Err(format_err!("expected {} to begin with gs://", s)) } } } impl Locator for GsLocator { fn as_any(&self) -> &dyn Any { self } fn local_data( &self, ctx: Context, _schema: Table, _temporary_storage: TemporaryStorage, ) -> BoxFuture<Option<BoxStream<CsvStream>>> { local_data_helper(ctx, self.url.clone()).into_boxed() } fn write_local_data( &self, ctx: Context, schema: Table, data: BoxStream<CsvStream>, _temporary_storage: TemporaryStorage, if_exists: IfExists, ) -> BoxFuture<BoxStream<BoxFuture<()>>> { write_local_data_helper(ctx, self.url.clone(), schema, data, if_exists) .into_boxed() } fn supports_write_remote_data(&self, source: &dyn Locator) -> bool { // We can only do `write_remote_data` if `source` is a `BigQueryLocator`. // Otherwise, we need to do `write_local_data` like normal. source.as_any().is::<BigQueryLocator>() } fn write_remote_data( &self, ctx: Context, schema: Table, source: BoxLocator, temporary_storage: TemporaryStorage, if_exists: IfExists, ) -> BoxFuture<()> { write_remote_data_helper( ctx, schema, source, self.to_owned(), temporary_storage, if_exists, ) .into_boxed() } }
true
0fc3cf2800aa30a713ad371f2430872ef50dc37e
Rust
jlopezscala/rust-book
/chapter4_ownership/src/the_slice.rs
UTF-8
4,072
4.5
4
[]
no_license
/* SLICE: - Does not have ownership - Let you reference a CONTIGUOUS sequence of element in a collection String slice for example would be a reference to part of a String. String literals (stored inside the binary) are Slices let s = "Hello, world!"; s is &str, a slice pointing that specific point of the binary &str being an immutable reference */ // EXAMPLE /* Write a function that takes a string and returns the first word it finds in that string. If the function doesn’t find a space in the string, the whole string must be one word, so the entire string should be returned. */ fn first_world_main() { // The bad one { let mut s = String::from("hello world"); let word = first_world_example1(&s); // word will get the value 5 s.clear(); // this empties the String, making it equal to "" // word still has the value 5 here, but there's no more string that // we could meaningfully use the value 5 with. word is now totally invalid! // THIS COMPILES -.- !! (of course, word is not connected to the state of s at all!)} } // Using slices help you prevent the above: // BOOM, compile error. { let mut s = String::from("hello world"); let word = first_word_example2_slices(&s); // immutable borrow here s.clear(); // mutable borrow here println!("the first word is: {}", word); // immutable borrow used here // BOOM, compile error. } // The actual way of doing it.. // Now first_word takes a string slice not a reference to a String { let my_string = String::from("hello world"); // first_word works on slices of `String`s let word = first_word_example3_slices(&my_string[..]); let my_string_literal = "hello world"; // first_word works on slices of string literals let word = first_word_example3_slices(&my_string_literal[..]); // Because string literals *are* string slices already, // this works too, without the slice syntax! let word = first_word_example3_slices(my_string_literal); } } fn first_world_example1(s: &String) -> usize { // Returns the index of the end of the word let word_in_bytes = s.as_bytes(); // Converts a string to an array of bytes /* .iter() creates an iterator that returns each of the elements in a collection .enumerate() returns a tuple that look like (index, &element_at_index) Tuple can be unpacked like (i, &item) .enumerate returns references of the items in the collection we use & */ for (i, &item) in word_in_bytes.iter().enumerate() { if item == b' ' { return i; } } s.len() } fn first_word_example2_slices(s: &String) -> &str { let bytes_from_s = s.as_bytes(); for (i, &item) in bytes_from_s.iter().enumerate() { if item == b' ' { return &s[0..i]; } } &s[..] // Much better! } fn first_word_example3_slices(s: &str) -> &str { // Signature change to receive /* Now if we have a string slice we can pass it directly Better to define a function to take a string slice instead of a reference to a string */ let bytes_from_s = s.as_bytes(); for (i, &item) in bytes_from_s.iter().enumerate() { if item == b' ' { return &s[0..i]; } } &s[..] // Even better!! } fn some_slice_examples() { let s = String::from("hello world"); let hello = &s[0..5]; // Reference to a part of s let world = &s[6..11]; // 6 is index to start and 11 to include until 10 (yes, one less) // Following two examples are equals let s = String::from("hello"); let slice = &s[0..2]; let slice = &s[..2]; // Another example let s = String::from("hello"); let len = s.len(); let slice = &s[3..len]; let slice = &s[3..]; let slice = &s[0..len]; // Or take all of it let slice = &s[..]; // with this }
true
a49f78a25ab1cbbc63e90dc4e4b1abaf49eedb4f
Rust
malthe/blend2d-rs
/src/font.rs
UTF-8
17,274
2.609375
3
[ "CC-BY-4.0", "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use core::{fmt, slice, str}; use std::{ffi::CString, path::Path}; use crate::{ array::Array, error::{errcode_to_result, Result}, font_defs::*, glyph_buffer::GlyphBuffer, variant::{BlVariantImpl, WrappedBlCore}, DataAccessFlags, Tag, }; /// Font Data #[repr(transparent)] pub struct FontData { core: ffi::BLFontDataCore, } unsafe impl WrappedBlCore for FontData { type Core = ffi::BLFontDataCore; const IMPL_TYPE_INDEX: usize = crate::variant::ImplType::FontData as usize; fn from_core(core: Self::Core) -> Self { FontData { core } } } impl FontData { pub fn list_tags(&self) -> Result<Array<Tag>> { unsafe { let mut arr = Array::<Tag>::new(); errcode_to_result((self.impl_().virt().listTags.unwrap())( self.impl_(), arr.core_mut(), )) .map(|_| arr) } } // FIXME figure out how query tables works /*pub fn query_tables(&self, tags: &[Tag]) -> (FontTable, usize) { unsafe { let mut dst = FontTable { data: ptr::null(), size: 0, }; let n = (self.impl_().virt().queryTables.unwrap())( self.impl_(), &mut dst as *mut _ as *mut _, tags.as_ptr() as *const _ as *const _, tags.len(), ); (dst, n) } }*/ } impl PartialEq for FontData { fn eq(&self, other: &Self) -> bool { unsafe { ffi::blFontDataEquals(self.core(), other.core()) } } } impl Drop for FontData { #[inline] fn drop(&mut self) { unsafe { ffi::blFontDataReset(&mut self.core) }; } } impl fmt::Debug for FontData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FontData").finish() } } /// Font Loader #[repr(transparent)] pub struct FontLoader { core: ffi::BLFontLoaderCore, } unsafe impl WrappedBlCore for FontLoader { type Core = ffi::BLFontLoaderCore; const IMPL_TYPE_INDEX: usize = crate::variant::ImplType::FontLoader as usize; fn from_core(core: Self::Core) -> Self { FontLoader { core } } } impl FontLoader { /// Creates a new font by reading a file at the given path. pub fn from_path<P: AsRef<Path>>(path: P, read_flags: DataAccessFlags) -> Result<Self> { let mut this = Self::from_core(*Self::none()); let path = CString::new(path.as_ref().to_string_lossy().into_owned().into_bytes()).unwrap(); unsafe { errcode_to_result(ffi::blFontLoaderCreateFromFile( this.core_mut(), path.as_ptr(), read_flags.bits(), )) .map(|_| this) } } /// Creates a new font from the given [`Array`]. pub fn from_data_array(data: &Array<u8>) -> Result<Self> { let mut this = Self::from_core(*Self::none()); unsafe { errcode_to_result(ffi::blFontLoaderCreateFromDataArray( this.core_mut(), data.core(), )) .map(|_| this) } } // FIXME lifetimes /*pub fn from_data<R: AsRef<[u8]>>(data: R) -> Result<Self> { let mut this = Self::from_core(*Self::none()); unsafe { errcode_to_result(ffi::blFontLoaderCreateFromData( this.core_mut(), data.as_ref().as_ptr() as *const _, data.as_ref().len(), None, ptr::null_mut(), )) .map(|_| this) } }*/ #[inline] pub fn create_font_face(&self, index: u32) -> Result<FontFace> { FontFace::from_loader(self, index) } #[inline] pub fn data_by_face_index(&mut self, idx: u32) -> FontData { FontData::from_core(ffi::BLFontDataCore { impl_: unsafe { (self.impl_().virt().dataByFaceIndex.unwrap())(self.impl_mut(), idx) }, }) } /// Type of font-face of the loader content. /// /// It doesn't matter if the content is a single font or a collection. In /// any case `face_type` would always return the type of the font-face /// that will be created by /// [`FontFace::from_loader`](struct.FontFace.html#method.from_loader). #[inline] pub fn face_type(&self) -> FontFaceType { (self.impl_().faceType as u32).into() } /// Returns the number of faces this loader provides. /// /// If the loader is initialized to a single font it would be 1, and if the /// loader is initialized to a font collection then the return would /// correspond to the number of font-faces within that collection. #[inline] pub fn face_count(&self) -> u32 { self.impl_().faceCount } /// Returns the [`FontLoaderFlags`]. #[inline] pub fn loader_flags(&self) -> FontLoaderFlags { FontLoaderFlags::from_bits_truncate(self.impl_().loaderFlags) } } impl PartialEq for FontLoader { #[inline] fn eq(&self, other: &Self) -> bool { unsafe { ffi::blFontLoaderEquals(self.core(), other.core()) } } } impl Clone for FontLoader { fn clone(&self) -> Self { Self::from_core(self.init_weak()) } } impl Drop for FontLoader { #[inline] fn drop(&mut self) { unsafe { ffi::blFontLoaderReset(&mut self.core) }; } } impl fmt::Debug for FontLoader { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FontLoader").finish() } } /// Font Face #[repr(transparent)] pub struct FontFace { pub(in crate) core: ffi::BLFontFaceCore, } unsafe impl WrappedBlCore for FontFace { type Core = ffi::BLFontFaceCore; const IMPL_TYPE_INDEX: usize = crate::variant::ImplType::FontFace as usize; #[inline] fn from_core(core: Self::Core) -> Self { FontFace { core } } } impl FontFace { /// Creates a new FontFace from a given path. pub fn from_path<P: AsRef<Path>>(path: P, read_flags: DataAccessFlags) -> Result<Self> { let mut this = Self::from_core(*Self::none()); let path = CString::new(path.as_ref().to_string_lossy().into_owned().into_bytes()).unwrap(); unsafe { errcode_to_result(ffi::blFontFaceCreateFromFile( this.core_mut(), path.as_ptr(), read_flags.bits(), )) .map(|_| this) } } /// Creates a new FontFace from the given [`FontLoader`]. pub fn from_loader(loader: &FontLoader, face_index: u32) -> Result<Self> { let mut this = Self::from_core(*Self::none()); unsafe { errcode_to_result(ffi::blFontFaceCreateFromLoader( this.core_mut(), loader.core(), face_index, )) .map(|_| this) } } /// Creates a new [`Font`] from this FontFace. pub fn create_font(&self, size: f32) -> Result<Font> { Font::from_face(self, size) } /// Returns the [`FontFaceInfo`]. #[inline] pub fn face_info(&self) -> &FontFaceInfo { unsafe { &*(&self.impl_().faceInfo as *const _ as *const _) } } /// Returns the [`FontFaceType`]. #[inline] pub fn face_type(&self) -> FontFaceType { self.face_info().face_type } /// Returns the [`FontFaceFlags`]. #[inline] pub fn face_flags(&self) -> FontFaceFlags { self.face_info().face_flags } /// Returns a zero-based index of this font-face. /// /// NOTE: Face index does only make sense if this face is part of a TrueType /// or OpenType font collection. In that case the returned value would be /// the index of this face in that collection. If the face is not part of a /// collection then the returned value would always be zero. #[inline] pub fn face_index(&self) -> u32 { self.face_info().face_index } /// Returns the [`FontOutlineType`]. #[inline] pub fn outline_type(&self) -> FontOutlineType { self.face_info().outline_type } /// Returns the number of glyphs the face provides. #[inline] pub fn glyph_count(&self) -> u32 { self.face_info().glyph_count } /// Returns the [`FontFaceDiagFlags`]. #[inline] pub fn diag_flags(&self) -> FontFaceDiagFlags { self.face_info().diag_flags } /// Returns a unique identifier describing this FontFace. #[inline] pub fn face_unique_id(&self) -> u64 { self.impl_().faceUniqueId } /// Returns the [`FontWeight`]. #[inline] pub fn weight(&self) -> FontWeight { (self.impl_().weight as u32).into() } /// Returns the [`FontStretch`]. #[inline] pub fn stretch(&self) -> FontStretch { (self.impl_().stretch as u32).into() } /// Returns the [`FontStyle`]. #[inline] pub fn style(&self) -> FontStyle { (self.impl_().style as u32).into() } /// Returns the [`FontData`] associated with this font-face. #[inline] pub fn data(&self) -> &FontData { unsafe { &*(&self.impl_().data as *const _ as *const _) } } /// Returns the [`FontLoader`] associated with this font-face. #[inline] pub fn loader(&self) -> &FontLoader { unsafe { &*(&self.impl_().loader as *const _ as *const _) } } /// Returns the design metrics of this [`FontFace`]. #[inline] pub fn design_metrics(&self) -> &FontDesignMetrics { unsafe { &*(&self.impl_().designMetrics as *const _ as *const _) } } /// Returns the units per em, which are part of font's design metrics. #[inline] pub fn units_per_em(&self) -> i32 { self.design_metrics().units_per_em } // TODO panose #[inline] pub fn unicode_coverage(&self) -> &FontUnicodeCoverage { unsafe { &*(&self.impl_().unicodeCoverage as *const _ as *const _) } } /// Returns the full name. #[inline] pub fn full_name(&self) -> &str { bl_string_to_str(&self.impl_().fullName) } /// Returns the family name. #[inline] pub fn family_name(&self) -> &str { bl_string_to_str(&self.impl_().familyName) } /// Returns the subfamily name. #[inline] pub fn subfamily_name(&self) -> &str { bl_string_to_str(&self.impl_().subfamilyName) } /// Returns the post script name. #[inline] pub fn post_script_name(&self) -> &str { bl_string_to_str(&self.impl_().postScriptName) } } #[inline] fn bl_string_to_str(bl_string: &ffi::BLStringCore) -> &str { unsafe { let ffi_slice = (*bl_string.impl_).__bindgen_anon_1.__bindgen_anon_1; str::from_utf8_unchecked(slice::from_raw_parts(ffi_slice.data as _, ffi_slice.size)) } } impl PartialEq for FontFace { fn eq(&self, other: &Self) -> bool { unsafe { ffi::blFontFaceEquals(self.core(), other.core()) } } } impl Clone for FontFace { fn clone(&self) -> Self { Self::from_core(self.init_weak()) } } impl Drop for FontFace { #[inline] fn drop(&mut self) { unsafe { ffi::blFontFaceReset(&mut self.core) }; } } impl fmt::Debug for FontFace { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FontFace").finish() } } /// Font #[repr(transparent)] pub struct Font { core: ffi::BLFontCore, } unsafe impl WrappedBlCore for Font { type Core = ffi::BLFontCore; const IMPL_TYPE_INDEX: usize = crate::variant::ImplType::Font as usize; #[inline] fn from_core(core: Self::Core) -> Self { Font { core } } } impl Font { /// Creates a new font from the given [`FontFace`]. pub fn from_face(face: &FontFace, size: f32) -> Result<Self> { let mut this = Self::from_core(*Self::none()); unsafe { errcode_to_result(ffi::blFontCreateFromFace( this.core_mut(), face.core(), size, )) .map(|_| this) } } /// Returns a font-face of the font. /// /// Returns the same font-face, which was passed to /// [`from_face`](struct.Font.html#method.from_face). pub fn face(&self) -> &FontFace { unsafe { &*(&self.impl_().face as *const _ as *const _) } } /// Returns the [`FontFaceType`] of the font. pub fn face_type(&self) -> FontFaceType { self.face().face_type() } /// Returns the [`FontFaceFlags`] of the font. pub fn face_flags(&self) -> FontFaceFlags { self.face().face_flags() } /// Returns the "units per em" (UPEM) of the font's associated font-face. pub fn units_per_em(&self) -> i32 { self.face().units_per_em() } /// Returns the size of the font (as float). pub fn size(&self) -> f32 { self.impl_().metrics.size } /// Returns the font-features used by this font. pub fn features(&self) -> &Array<FontFeature> { unsafe { &*(&self.impl_().features as *const _ as *const _) } } /// Returns the font-variations used by this font. pub fn variations(&self) -> &Array<FontVariation> { unsafe { &*(&self.impl_().variations as *const _ as *const _) } } /// Returns the weight of the font. #[inline] pub fn weight(&self) -> FontWeight { (self.impl_().weight as u32).into() } /// Returns the stretch of the font. #[inline] pub fn stretch(&self) -> FontStretch { (self.impl_().stretch as u32).into() } /// Returns the style of the font. #[inline] pub fn style(&self) -> FontStyle { (self.impl_().style as u32).into() } /// Returns a 2x2 matrix of the font. /// /// The returned [`FontMatrix`] is used to scale fonts from design units /// into user units. The matrix usually has a negative value at the 3rd /// index of the internal array as fonts use a different coordinate /// system than Blend2D. #[inline] pub fn font_matrix(&self) -> &FontMatrix { unsafe { &*(&self.impl_().matrix as *const _ as *const _) } } /// Returns a scaled metrics of this font. /// /// The returned metrics is a scale of design metrics that match the font /// size and its options. #[inline] pub fn font_metrics(&self) -> &FontMetrics { unsafe { &*(&self.impl_().metrics as *const _ as *const _) } } /// Returns a design metrics of this font. /// /// The returned metrics is compatible with the metrics of [FontFace] /// associated with this font. #[inline] pub fn design_metrics(&self) -> &FontDesignMetrics { self.face().design_metrics() } #[inline] pub fn shape(&self, buf: &mut GlyphBuffer) -> Result<()> { unsafe { errcode_to_result(ffi::blFontShape(self.core(), &mut buf.core)) } } #[inline] pub fn map_text_to_glyphs(&self, buf: &mut GlyphBuffer) -> Result<GlyphMappingState> { let mut state = GlyphMappingState { glyph_count: 0, undefined_first: 0, undefined_count: 0, }; unsafe { errcode_to_result(ffi::blFontMapTextToGlyphs( self.core(), &mut buf.core, &mut state as *mut _ as *mut _, )) .map(|_| state) } } // TODO positionGlyphs #[inline] pub fn apply_kerning(&self, buf: &mut GlyphBuffer) -> Result<()> { unsafe { errcode_to_result(ffi::blFontApplyKerning(self.core(), &mut buf.core)) } } #[inline] pub fn apply_g_sub(&self, buf: &mut GlyphBuffer, index: usize, lookups: usize) -> Result<()> { unsafe { errcode_to_result(ffi::blFontApplyGSub( self.core(), &mut buf.core, index, lookups, )) } } #[inline] pub fn apply_g_pos(&self, buf: &mut GlyphBuffer, index: usize, lookups: usize) -> Result<()> { unsafe { errcode_to_result(ffi::blFontApplyGPos( self.core(), &mut buf.core, index, lookups, )) } } #[inline] pub fn get_text_metrics(&self, buf: &mut GlyphBuffer) -> Result<TextMetrics> { let mut metrics = TextMetrics::default(); unsafe { errcode_to_result(ffi::blFontGetTextMetrics( self.core(), &mut buf.core, &mut metrics as *mut _ as *mut _, )) .map(|_| metrics) } } //TODO getGlyphBounds //TODO getGlyphAdvances //TODO getGlyphOutlines //TODO getGlyphRunOutlines } impl PartialEq for Font { fn eq(&self, other: &Self) -> bool { unsafe { ffi::blFontEquals(self.core(), other.core()) } } } impl Clone for Font { fn clone(&self) -> Self { Self::from_core(self.init_weak()) } } impl Drop for Font { #[inline] fn drop(&mut self) { unsafe { ffi::blFontReset(&mut self.core) }; } } impl fmt::Debug for Font { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Font").finish() } }
true
03e6ba2231d697ea3310d4c92ed40acd9ba3c875
Rust
renatolond/advent_of_code
/2019/day_6.rs
UTF-8
3,617
3.1875
3
[]
no_license
use std::process; use std::io; use std::io::prelude::*; use std::convert::TryInto; use std::collections::VecDeque; struct Planet { name: String, orbiting: Option<usize>, orbiters: Vec<usize>, accum: i32 } fn main() { let mut planets : Vec<Planet> = Vec::new(); let mut max_orbiters = 0; let mut start : usize = 999; for line in io::stdin().lock().lines() { let line_str: String = line.unwrap(); let orbit_desc : Vec<&str> = line_str.split(')').collect(); let planet_center = orbit_desc[0].to_string(); let planet_orbiter = orbit_desc[1].to_string(); let mut planet_center_idx : Option<usize> = None; let mut planet_orbiter_idx : Option<usize> = None; for i in 0..planets.len() { if planets[i].name == planet_center { planet_center_idx = i.try_into().unwrap(); } if planets[i].name == planet_orbiter { planet_orbiter_idx = i.try_into().unwrap(); } } if planet_center_idx == None { let idx = planets.len(); planet_center_idx = Some(idx.try_into().unwrap()); planets.push(Planet { name: planet_center, orbiting: None, orbiters: Vec::new(), accum: 0 }); } if planet_orbiter_idx == None { let idx = planets.len(); planet_orbiter_idx = Some(idx.try_into().unwrap()); planets.push( Planet { name: planet_orbiter, orbiting: None, orbiters: Vec::new(), accum: 0 }); } { let ref mut planet_center_s : Planet; planet_center_s = &mut planets[planet_center_idx.unwrap()]; planet_center_s.orbiters.push(planet_orbiter_idx.unwrap().try_into().unwrap()); if planet_center_s.orbiters.len() > max_orbiters { max_orbiters = planet_center_s.orbiters.len(); } if planet_center_s.name == "COM" { start = planet_center_idx.unwrap(); } } { let ref mut planet_orbiter_s : Planet; planet_orbiter_s = &mut planets[planet_orbiter_idx.unwrap()]; planet_orbiter_s.orbiting = planet_center_idx; } } let mut visited = vec![false; planets.len()]; let mut curr_idx = start; let mut to_visit : VecDeque<usize> = VecDeque::new(); loop { if visited[curr_idx] { println!("Oh noes."); process::exit(1) } println!("Looking at {}", planets[curr_idx].name); let accum; match planets[curr_idx].orbiting { Some(idx) => { accum = planets[idx].accum + 1} None => { accum = 0 } } { let ref mut curr_planet : Planet = planets[curr_idx]; for i in curr_planet.orbiters.iter() { to_visit.push_back(*i); } curr_planet.accum = accum; } visited[curr_idx] = true; if to_visit.is_empty() { break } curr_idx = to_visit.pop_front().unwrap(); } let mut orbits = 0; for i in 0..planets.len() { orbits += planets[i].accum; println!("{} {}", planets[i].name, planets[i].accum); } println!("{}", orbits); // let none = "NONE".to_string(); // let ref planet; // if orbits[i] == -1 { // planet = &none; // } else { // planet = &planets[orbits[i] as usize]; // } // println!("{}: {} {}", planets[i], planet, pointing[i]); //} //println!("{}", planets.len()); }
true
cfca1503339c3150bb41f49f5fe835a69d2be38a
Rust
woodgear/kvs
/src/bin/kvs.rs
UTF-8
1,036
2.9375
3
[]
no_license
use structopt; use structopt::StructOpt; use kvs::{KvStore, Result}; #[derive(Debug, StructOpt)] #[structopt(name = "kvs", about = "an simple in memory kv db")] struct Opt { #[structopt(subcommand)] subcmd: SubCmd, } #[derive(Debug, StructOpt)] enum SubCmd { Get { key: String }, Set { key: String, val: String }, Rm { key: String }, } fn main() -> Result<()> { let opt = Opt::from_args(); let mut store = KvStore::open(std::env::current_dir()?)?; match opt.subcmd { SubCmd::Get { key } => match store.get(key)? { Some(v) => { println!("{}", v); } None => { println!("Key not found"); } }, SubCmd::Set { key, val } => { store.set(key, val)?; } SubCmd::Rm { key } => match store.remove(key) { Ok(_) => {} Err(e) => { println!("Key not found"); std::process::exit(-1); } }, }; Ok(()) }
true
95ea66807baa6e7db2fa1efe7ad40ba434633bc9
Rust
kalkyl/postcard
/src/ser/flavors.rs
UTF-8
12,865
3.328125
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! # Flavors - Plugins for `postcard` //! //! "Flavors" in `postcard` are used as modifiers to the serialization //! process. Flavors typically modify one or both of the following: //! //! 1. The output medium of the serialization, e.g. whether the data is serialized to a `[u8]` slice, or a `heapless::Vec`. //! 2. The format of the serialization, such as encoding the serialized output in a COBS format, performing CRC32 checksumming while serializing, etc. //! //! Flavors are implemented using the `SerFlavor` trait, which acts as a "middleware" for receiving the bytes as serialized by `serde`. //! Multiple flavors may be combined to obtain a desired combination of behavior and storage. //! When flavors are combined, it is expected that the storage flavor (such as `Slice` or `HVec`) is the innermost flavor. //! //! Custom flavors may be defined by users of the `postcard` crate, however some commonly useful flavors have been provided in //! this module. If you think your custom flavor would be useful to others, PRs adding flavors are very welcome! //! //! ## Usability //! //! Flavors may not always be convenient to use directly, as they may expose some implementation details of how the //! inner workings of the flavor behaves. It is typical to provide a convenience method for using a flavor, to prevent //! the user from having to specify generic parameters, setting correct initialization values, or handling the output of //! the flavor correctly. See `postcard::to_vec()` for an example of this. //! //! It is recommended to use the [`serialize_with_flavor()`](../fn.serialize_with_flavor.html) method for serialization. See it's documentation for information //! regarding usage and generic type parameters. //! //! ## Examples //! //! ### Using a single flavor //! //! In the first example, we use the `Slice` flavor, to store the serialized output into a mutable `[u8]` slice. //! No other modification is made to the serialization process. //! //! ```rust //! use postcard::{ //! serialize_with_flavor, //! flavors::Slice, //! }; //! //! let mut buf = [0u8; 32]; //! //! let data: &[u8] = &[0x01, 0x00, 0x20, 0x30]; //! let buffer = &mut [0u8; 32]; //! let res = serialize_with_flavor::<[u8], Slice, &mut [u8]>( //! data, //! Slice::new(buffer) //! ).unwrap(); //! //! assert_eq!(res, &[0x04, 0x01, 0x00, 0x20, 0x30]); //! ``` //! //! ### Using combined flavors //! //! In the second example, we mix `Slice` with `Cobs`, to cobs encode the output while //! the data is serialized. Notice how `Slice` (the storage flavor) is the innermost flavor used. //! //! ```rust //! use postcard::{ //! serialize_with_flavor, //! flavors::{Cobs, Slice}, //! }; //! //! let mut buf = [0u8; 32]; //! //! let data: &[u8] = &[0x01, 0x00, 0x20, 0x30]; //! let buffer = &mut [0u8; 32]; //! let res = serialize_with_flavor::<[u8], Cobs<Slice>, &mut [u8]>( //! data, //! Cobs::try_new(Slice::new(buffer)).unwrap(), //! ).unwrap(); //! //! assert_eq!(res, &[0x03, 0x04, 0x01, 0x03, 0x20, 0x30, 0x00]); //! ``` use crate::error::{Error, Result}; use crate::varint::VarintUsize; use cobs::{EncoderState, PushResult}; use core::ops::Index; use core::ops::IndexMut; #[cfg(feature = "heapless")] pub use heapless_vec::*; #[cfg(feature = "use-std")] pub use std_vec::*; #[cfg(feature = "alloc")] pub use alloc_vec::*; /// The SerFlavor trait acts as a combinator/middleware interface that can be used to pass bytes /// through storage or modification flavors. See the module level documentation for more information /// and examples. pub trait SerFlavor { /// The `Output` type is what this flavor "resolves" to when the serialization is complete. /// For storage flavors, this is typically a concrete type. For modification flavors, this is /// typically a generic parameter for the storage flavor they are wrapped around. type Output; /// The try_extend() trait method can be implemented when there is a more efficient way of processing /// multiple bytes at once, such as copying a slice to the output, rather than iterating over one byte /// at a time. fn try_extend(&mut self, data: &[u8]) -> core::result::Result<(), ()> { data.iter() .try_for_each(|d| self.try_push(*d)) .map_err(|_| ()) } /// The try_push() trait method can be used to push a single byte to be modified and/or stored fn try_push(&mut self, data: u8) -> core::result::Result<(), ()>; /// The try_push_varint_usize() trait method can be used to push a `VarintUsize`. The default /// implementation uses try_extend() to process the encoded `VarintUsize` bytes, which is likely /// the desired behavior for most circumstances. fn try_push_varint_usize(&mut self, data: &VarintUsize) -> core::result::Result<(), ()> { let mut buf = VarintUsize::new_buf(); let used_buf = data.to_buf(&mut buf); self.try_extend(used_buf) } /// The release() trait method finalizes the modification or storage operation, and resolved into /// the type defined by `SerFlavor::Output` associated type. fn release(self) -> core::result::Result<Self::Output, ()>; } //////////////////////////////////////////////////////////////////////////////// // Storage Flavors //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////// // Slice //////////////////////////////////////// /// The `Slice` flavor is a storage flavor, storing the serialized (or otherwise modified) bytes into a plain /// `[u8]` slice. The `Slice` flavor resolves into a sub-slice of the original slice buffer. pub struct Slice<'a> { buf: &'a mut [u8], idx: usize, } impl<'a> Slice<'a> { /// Create a new `Slice` flavor from a given backing buffer pub fn new(buf: &'a mut [u8]) -> Self { Slice { buf, idx: 0 } } } impl<'a> SerFlavor for Slice<'a> { type Output = &'a mut [u8]; fn try_extend(&mut self, data: &[u8]) -> core::result::Result<(), ()> { let len = data.len(); if (len + self.idx) > self.buf.len() { return Err(()); } self.buf[self.idx..self.idx + len].copy_from_slice(data); self.idx += len; Ok(()) } fn try_push(&mut self, data: u8) -> core::result::Result<(), ()> { if self.idx >= self.buf.len() { return Err(()); } self.buf[self.idx] = data; self.idx += 1; Ok(()) } fn release(self) -> core::result::Result<Self::Output, ()> { let (used, _unused) = self.buf.split_at_mut(self.idx); Ok(used) } } impl<'a> Index<usize> for Slice<'a> { type Output = u8; fn index(&self, idx: usize) -> &u8 { &self.buf[idx] } } impl<'a> IndexMut<usize> for Slice<'a> { fn index_mut(&mut self, idx: usize) -> &mut u8 { &mut self.buf[idx] } } #[cfg(feature = "heapless")] mod heapless_vec { use heapless::Vec; use super::SerFlavor; use super::Index; use super::IndexMut; //////////////////////////////////////// // HVec //////////////////////////////////////// /// The `HVec` flavor is a wrapper type around a `heapless::Vec`. This is a stack /// allocated data structure, with a fixed maximum size and variable amount of contents. pub struct HVec<const B: usize>(Vec<u8, B>); impl<'a, const B: usize> SerFlavor for HVec<B> { type Output = Vec<u8, B>; #[inline(always)] fn try_extend(&mut self, data: &[u8]) -> core::result::Result<(), ()> { self.0.extend_from_slice(data) } #[inline(always)] fn try_push(&mut self, data: u8) -> core::result::Result<(), ()> { self.0.push(data).map_err(|_| ()) } fn release(self) -> core::result::Result<Vec<u8, B>, ()> { Ok(self.0) } } impl<const B: usize> Index<usize> for HVec<B> { type Output = u8; fn index(&self, idx: usize) -> &u8 { &self.0[idx] } } impl<const B: usize> IndexMut<usize> for HVec<B> { fn index_mut(&mut self, idx: usize) -> &mut u8 { &mut self.0[idx] } } impl<const B: usize> Default for HVec<B> { fn default() -> Self { Self(Vec::new()) } } } #[cfg(feature = "use-std")] mod std_vec { extern crate std; use std::vec::Vec; use super::SerFlavor; use super::Index; use super::IndexMut; /// The `StdVec` flavor is a wrapper type around a `std::vec::Vec`. /// /// This type is only available when the (non-default) `use-std` feature is active pub struct StdVec(pub Vec<u8>); impl SerFlavor for StdVec { type Output = Vec<u8>; #[inline(always)] fn try_extend(&mut self, data: &[u8]) -> core::result::Result<(), ()> { self.0.extend_from_slice(data); Ok(()) } #[inline(always)] fn try_push(&mut self, data: u8) -> core::result::Result<(), ()> { self.0.push(data); Ok(()) } fn release(self) -> core::result::Result<Self::Output, ()> { Ok(self.0) } } impl Index<usize> for StdVec { type Output = u8; fn index(&self, idx: usize) -> &u8 { &self.0[idx] } } impl IndexMut<usize> for StdVec { fn index_mut(&mut self, idx: usize) -> &mut u8 { &mut self.0[idx] } } } #[cfg(feature = "alloc")] mod alloc_vec { extern crate alloc; use alloc::vec::Vec; use super::SerFlavor; use super::Index; use super::IndexMut; /// The `AllocVec` flavor is a wrapper type around an `alloc::vec::Vec`. /// /// This type is only available when the (non-default) `alloc` feature is active pub struct AllocVec(pub Vec<u8>); impl SerFlavor for AllocVec { type Output = Vec<u8>; #[inline(always)] fn try_extend(&mut self, data: &[u8]) -> core::result::Result<(), ()> { self.0.extend_from_slice(data); Ok(()) } #[inline(always)] fn try_push(&mut self, data: u8) -> core::result::Result<(), ()> { self.0.push(data); Ok(()) } fn release(self) -> core::result::Result<Self::Output, ()> { Ok(self.0) } } impl Index<usize> for AllocVec { type Output = u8; fn index(&self, idx: usize) -> &u8 { &self.0[idx] } } impl IndexMut<usize> for AllocVec { fn index_mut(&mut self, idx: usize) -> &mut u8 { &mut self.0[idx] } } } //////////////////////////////////////////////////////////////////////////////// // Modification Flavors //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////// // COBS //////////////////////////////////////// /// The `Cobs` flavor implements [Consistent Overhead Byte Stuffing] on /// the serialized data. The output of this flavor includes the termination/sentinel /// byte of `0x00`. /// /// This protocol is useful when sending data over a serial interface without framing such as a UART /// /// [Consistent Overhead Byte Stuffing]: https://en.wikipedia.org/wiki/Consistent_Overhead_Byte_Stuffing pub struct Cobs<B> where B: SerFlavor + IndexMut<usize, Output = u8>, { flav: B, cobs: EncoderState, } impl<B> Cobs<B> where B: SerFlavor + IndexMut<usize, Output = u8>, { /// Create a new Cobs modifier Flavor. If there is insufficient space /// to push the leading header byte, the method will return an Error pub fn try_new(mut bee: B) -> Result<Self> { bee.try_push(0).map_err(|_| Error::SerializeBufferFull)?; Ok(Self { flav: bee, cobs: EncoderState::default(), }) } } impl<'a, B> SerFlavor for Cobs<B> where B: SerFlavor + IndexMut<usize, Output = u8>, { type Output = <B as SerFlavor>::Output; #[inline(always)] fn try_push(&mut self, data: u8) -> core::result::Result<(), ()> { use PushResult::*; match self.cobs.push(data) { AddSingle(n) => self.flav.try_push(n), ModifyFromStartAndSkip((idx, mval)) => { self.flav[idx] = mval; self.flav.try_push(0) } ModifyFromStartAndPushAndSkip((idx, mval, nval)) => { self.flav[idx] = mval; self.flav.try_push(nval)?; self.flav.try_push(0) } } } fn release(mut self) -> core::result::Result<Self::Output, ()> { let (idx, mval) = self.cobs.finalize(); self.flav[idx] = mval; self.flav.try_push(0)?; self.flav.release() } }
true
20cf09179632a485201c9cc91eb1d6108365e906
Rust
juarezr/rsfbclient
/examples/select.rs
UTF-8
2,299
3.046875
3
[ "MIT" ]
permissive
//! //! Rust Firebird Client //! //! Example of select //! //! You need create a database with this table: //! create table test (col_a int generated by default as identity, col_b float, col_c varchar(10)); //! //! You can use the insert example to populate //! the database ;) //! #![allow(unused_variables, unused_mut)] use rsfbclient::{prelude::*, FbError}; fn main() -> Result<(), FbError> { #[cfg(feature = "linking")] let mut conn = rsfbclient::builder_native() .with_dyn_link() .with_remote() .host("localhost") .db_name("examples.fdb") .user("SYSDBA") .pass("masterkey") .transaction(TransactionConfiguration { lock_resolution: TrLockResolution::NoWait, ..TransactionConfiguration::default() }) .connect()?; #[cfg(feature = "dynamic_loading")] let mut conn = rsfbclient::builder_native() .with_dyn_load("./fbclient.lib") .with_remote() .host("localhost") .db_name("examples.fdb") .user("SYSDBA") .pass("masterkey") .transaction(TransactionConfiguration { lock_resolution: TrLockResolution::NoWait, ..TransactionConfiguration::default() }) .connect()?; #[cfg(feature = "pure_rust")] let mut conn = rsfbclient::builder_pure_rust() .host("localhost") .db_name("examples.fdb") .user("SYSDBA") .pass("masterkey") .transaction(TransactionConfiguration { lock_resolution: TrLockResolution::NoWait, ..TransactionConfiguration::default() }) .connect()?; // `query_iter` for large quantities of rows, will allocate space for one row at a time let rows = conn.query_iter("select col_a, col_b, col_c from test", ())?; println!("| col_a | col_b | col_c |"); println!("| ----- | ----- | ------- |"); for row in rows { let (col_a, col_b, col_c): (i32, f32, String) = row?; println!("| {:^5} | {:^5} | {:7} |", col_a, col_b, col_c); } // `query` for small quantities of rows, will allocate a vector with all rows let rows: Vec<(i32, f32, String)> = conn.query("select col_a, col_b, col_c from test", ())?; println!("{:?}", rows); Ok(()) }
true
7a818f3bc5ceab7925ccf8ed06b91da14628335d
Rust
pkgw/stund
/tokio-pty-process/src/lib.rs
UTF-8
24,825
2.640625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
// Copyright 2018-2019, 2023 Peter Williams <peter@newton.cx> // Licensed under both the MIT License and the Apache-2.0 license. #![deny(missing_docs)] #![doc(html_root_url = "https://docs.rs/tokio-pty-process/0.4.0")] //! Spawn a child process under a pseudo-TTY, interacting with it //! asynchronously using Tokio. //! //! A [pseudo-terminal](https://en.wikipedia.org/wiki/Pseudoterminal) (or //! “pseudo-TTY” or “PTY”) is a special Unix file handle that models the kind //! of text terminal through which users used to interact with computers. A //! PTY enables a specialized form of bidirectional interprocess communication //! that a variety of user-facing Unix programs take advantage of. //! //! The basic way to use this crate is: //! //! 1. Create a Tokio [Reactor](https://docs.rs/tokio/*/tokio/reactor/struct.Reactor.html) //! that will handle all of your asynchronous I/O. //! 2. Create an `AsyncPtyMaster` that represents your ownership of //! an OS pseudo-terminal. //! 3. Use your master and the `spawn_pty_async` or `spawn_pty_async_raw` //! functions of the `CommandExt` extension trait, which extends //! `std::process::Command`, to launch a child process that is connected to //! your master. //! 4. Optionally control the child process (e.g. send it signals) through the //! `Child` value returned by that function. //! //! This crate only works on Unix since pseudo-terminals are a Unix-specific //! concept. //! //! The `Child` type is largely copied from Alex Crichton’s //! [tokio-process](https://github.com/alexcrichton/tokio-process) crate. use futures::future::FlattenStream; use futures::{try_ready, Async, Future, Poll, Stream}; use libc::{c_int, c_ushort}; use mio::event::Evented; use mio::unix::{EventedFd, UnixReady}; use mio::{PollOpt, Ready, Token}; use std::ffi::{CStr, OsStr, OsString}; use std::fmt; use std::fs::{File, OpenOptions}; use std::io::{self, Read, Write}; use std::mem; use std::os::unix::prelude::*; use std::os::unix::process::CommandExt as StdUnixCommandExt; use std::process::{self, ExitStatus}; use tokio::io::{AsyncRead, AsyncWrite}; use tokio::reactor::PollEvented2; use tokio_signal::unix::Signal; use tokio_signal::IoFuture; mod split; pub use split::{AsyncPtyMasterReadHalf, AsyncPtyMasterWriteHalf}; // First set of hoops to jump through: a read-write pseudo-terminal master // with full async support. As far as I can tell, we need to create an inner // wrapper type to implement Evented on a type that we can then wrap in a // PollEvented. Lame. #[derive(Debug)] struct AsyncPtyFile(File); impl AsyncPtyFile { pub fn new(inner: File) -> Self { AsyncPtyFile(inner) } } impl Read for AsyncPtyFile { fn read(&mut self, bytes: &mut [u8]) -> io::Result<usize> { self.0.read(bytes) } } impl Write for AsyncPtyFile { fn write(&mut self, bytes: &[u8]) -> io::Result<usize> { self.0.write(bytes) } fn flush(&mut self) -> io::Result<()> { self.0.flush() } } impl Evented for AsyncPtyFile { fn register( &self, poll: &mio::Poll, token: Token, interest: Ready, opts: PollOpt, ) -> io::Result<()> { EventedFd(&self.0.as_raw_fd()).register(poll, token, interest | UnixReady::hup(), opts) } fn reregister( &self, poll: &mio::Poll, token: Token, interest: Ready, opts: PollOpt, ) -> io::Result<()> { EventedFd(&self.0.as_raw_fd()).reregister(poll, token, interest | UnixReady::hup(), opts) } fn deregister(&self, poll: &mio::Poll) -> io::Result<()> { EventedFd(&self.0.as_raw_fd()).deregister(poll) } } /// A handle to a pseudo-TTY master that can be interacted with /// asynchronously. /// /// This type implements both `AsyncRead` and `AsyncWrite`. pub struct AsyncPtyMaster(PollEvented2<AsyncPtyFile>); impl AsyncPtyMaster { /// Open a pseudo-TTY master. /// /// This function performs the C library calls `posix_openpt()`, /// `grantpt()`, and `unlockpt()`. It also sets the resulting pseudo-TTY /// master handle to nonblocking mode. pub fn open() -> Result<Self, io::Error> { let inner = unsafe { // On MacOS, O_NONBLOCK is not documented as an allowed option to // posix_openpt(), but it is in fact allowed and functional, and // trying to add it later with fcntl() is forbidden. Meanwhile, on // FreeBSD, O_NONBLOCK is *not* an allowed option to // posix_openpt(), and the only way to get a nonblocking PTY // master is to add the nonblocking flag with fcntl() later. So, // we have to jump through some #[cfg()] hoops. const APPLY_NONBLOCK_AFTER_OPEN: bool = cfg!(target_os = "freebsd"); let fd = if APPLY_NONBLOCK_AFTER_OPEN { libc::posix_openpt(libc::O_RDWR | libc::O_NOCTTY) } else { libc::posix_openpt(libc::O_RDWR | libc::O_NOCTTY | libc::O_NONBLOCK) }; if fd < 0 { return Err(io::Error::last_os_error()); } if libc::grantpt(fd) != 0 { return Err(io::Error::last_os_error()); } if libc::unlockpt(fd) != 0 { return Err(io::Error::last_os_error()); } if APPLY_NONBLOCK_AFTER_OPEN { let flags = libc::fcntl(fd, libc::F_GETFL, 0); if flags < 0 { return Err(io::Error::last_os_error()); } if libc::fcntl(fd, libc::F_SETFL, flags | libc::O_NONBLOCK) == -1 { return Err(io::Error::last_os_error()); } } File::from_raw_fd(fd) }; Ok(AsyncPtyMaster(PollEvented2::new(AsyncPtyFile::new(inner)))) } /// Split the AsyncPtyMaster into an AsyncPtyReadHalf implementing `Read` and /// and `AsyncRead` as well as an `AsyncPtyWriteHalf` implementing /// `AsyncPtyWrite`. pub fn split(self) -> (AsyncPtyMasterReadHalf, AsyncPtyMasterWriteHalf) { split::split(self) } /// Open a pseudo-TTY slave that is connected to this master. /// /// The resulting file handle is *not* set to non-blocking mode. fn open_sync_pty_slave(&self) -> Result<File, io::Error> { let mut buf: [libc::c_char; 512] = [0; 512]; let fd = self.as_raw_fd(); #[cfg(not(any(target_os = "macos", target_os = "freebsd")))] { if unsafe { libc::ptsname_r(fd, buf.as_mut_ptr(), buf.len()) } != 0 { return Err(io::Error::last_os_error()); } } #[cfg(any(target_os = "macos", target_os = "freebsd"))] unsafe { let st = libc::ptsname(fd); if st.is_null() { return Err(io::Error::last_os_error()); } libc::strncpy(buf.as_mut_ptr(), st, buf.len()); } let ptsname = OsStr::from_bytes(unsafe { CStr::from_ptr(&buf as _) }.to_bytes()); OpenOptions::new().read(true).write(true).open(ptsname) } } impl AsRawFd for AsyncPtyMaster { fn as_raw_fd(&self) -> RawFd { self.0.get_ref().0.as_raw_fd() } } impl Read for AsyncPtyMaster { fn read(&mut self, bytes: &mut [u8]) -> io::Result<usize> { self.0.read(bytes) } } impl AsyncRead for AsyncPtyMaster {} impl Write for AsyncPtyMaster { fn write(&mut self, bytes: &[u8]) -> io::Result<usize> { self.0.write(bytes) } fn flush(&mut self) -> io::Result<()> { self.0.flush() } } impl AsyncWrite for AsyncPtyMaster { fn shutdown(&mut self) -> Poll<(), io::Error> { self.0.shutdown() } } // Now, the async-ified child process framework. /// A child process that can be interacted with through a pseudo-TTY. #[must_use = "futures do nothing unless polled"] pub struct Child { inner: process::Child, kill_on_drop: bool, reaped: bool, sigchld: FlattenStream<IoFuture<Signal>>, } impl fmt::Debug for Child { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("Child") .field("pid", &self.inner.id()) .field("inner", &self.inner) .field("kill_on_drop", &self.kill_on_drop) .field("reaped", &self.reaped) .field("sigchld", &"..") .finish() } } impl Child { fn new(inner: process::Child) -> Child { Child { inner: inner, kill_on_drop: true, reaped: false, sigchld: Signal::new(libc::SIGCHLD).flatten_stream(), } } /// Returns the OS-assigned process identifier associated with this child. pub fn id(&self) -> u32 { self.inner.id() } /// Forces the child to exit. /// /// This is equivalent to sending a SIGKILL on unix platforms. pub fn kill(&mut self) -> io::Result<()> { if self.reaped { Ok(()) } else { self.inner.kill() } } /// Drop this `Child` without killing the underlying process. /// /// Normally a `Child` is killed if it's still alive when dropped, but this /// method will ensure that the child may continue running once the `Child` /// instance is dropped. pub fn forget(mut self) { self.kill_on_drop = false; } /// Check whether this `Child` has exited yet. pub fn poll_exit(&mut self) -> Poll<ExitStatus, io::Error> { assert!(!self.reaped); loop { if let Some(e) = self.try_wait()? { self.reaped = true; return Ok(e.into()); } // If the child hasn't exited yet, then it's our responsibility to // ensure the current task gets notified when it might be able to // make progress. // // As described in `spawn` above, we just indicate that we can // next make progress once a SIGCHLD is received. if self.sigchld.poll()?.is_not_ready() { return Ok(Async::NotReady); } } } fn try_wait(&self) -> io::Result<Option<ExitStatus>> { let id = self.id() as c_int; let mut status = 0; loop { match unsafe { libc::waitpid(id, &mut status, libc::WNOHANG) } { 0 => return Ok(None), n if n < 0 => { let err = io::Error::last_os_error(); if err.kind() == io::ErrorKind::Interrupted { continue; } return Err(err); } n => { assert_eq!(n, id); return Ok(Some(ExitStatus::from_raw(status))); } } } } } impl Future for Child { type Item = ExitStatus; type Error = io::Error; fn poll(&mut self) -> Poll<ExitStatus, io::Error> { self.poll_exit() } } impl Drop for Child { fn drop(&mut self) { if self.kill_on_drop { drop(self.kill()); } } } /// A Future for getting the Pty file descriptor. /// /// # Example /// /// ``` /// extern crate tokio; /// extern crate tokio_pty_process; /// /// use tokio_pty_process::{AsyncPtyMaster, AsyncPtyFd}; /// use tokio::prelude::*; /// /// fn main() { /// let master = AsyncPtyMaster::open() /// .expect("Could not open the PTY"); /// /// let fd = AsyncPtyFd::from(master).wait() /// .expect("Could not get the File descriptor"); /// } /// ``` pub struct AsyncPtyFd<T: AsAsyncPtyFd>(T); impl<T: AsAsyncPtyFd> AsyncPtyFd<T> { /// Construct a new AsyncPtyFd future pub fn from(inner: T) -> Self { AsyncPtyFd(inner) } } impl<T: AsAsyncPtyFd> Future for AsyncPtyFd<T> { type Item = RawFd; type Error = io::Error; fn poll(&mut self) -> Poll<RawFd, io::Error> { self.0.as_async_pty_fd() } } /// Trait to asynchronously get the `RawFd` of the master side of the PTY pub trait AsAsyncPtyFd { /// Return a `Poll` containing the RawFd fn as_async_pty_fd(&self) -> Poll<RawFd, io::Error>; } impl AsAsyncPtyFd for AsyncPtyMaster { fn as_async_pty_fd(&self) -> Poll<RawFd, io::Error> { Ok(Async::Ready(self.as_raw_fd())) } } /// Trait containing generalized methods for PTYs pub trait PtyMaster { /// Return the full pathname of the slave device counterpart /// /// # Example /// /// ``` /// extern crate tokio; /// extern crate tokio_pty_process; /// /// use std::ffi::OsString; /// use tokio::prelude::*; /// use tokio_pty_process::{AsyncPtyMaster, PtyMaster}; /// /// struct PtsName<T: PtyMaster>(T); /// /// impl<T: PtyMaster> Future for PtsName<T> { /// type Item = OsString; /// type Error = std::io::Error; /// /// fn poll(&mut self) -> Poll<Self::Item, Self::Error> { /// self.0.ptsname() /// } /// } /// /// fn main() { /// let master = AsyncPtyMaster::open().expect("Could not open the PTY"); /// /// let ptsname = PtsName(master).wait().expect("Could not get the ptsname"); /// /// println!("PTS name: {}", ptsname.to_string_lossy()); /// } /// ``` fn ptsname(&self) -> Poll<OsString, io::Error>; /// Resize the PTY /// /// # Example /// /// ``` /// extern crate tokio; /// extern crate tokio_pty_process; /// extern crate libc; /// /// use tokio_pty_process::{AsyncPtyMaster, PtyMaster, CommandExt}; /// use tokio::prelude::*; /// use std::ffi::OsString; /// use libc::c_ushort; /// struct Resize<T: PtyMaster> { /// pty: T, /// rows: c_ushort, /// cols: c_ushort, /// } /// /// impl<T: PtyMaster> Future for Resize<T> { /// type Item = (); /// type Error = std::io::Error; /// /// fn poll(&mut self) -> Poll<Self::Item, Self::Error> { /// self.pty.resize(self.rows, self.cols) /// } /// } /// /// fn main() { /// let master = AsyncPtyMaster::open().expect("Could not open the PTY"); /// /// // On macos, it's only possible to resize a PTY with a child spawned /// // On it, so let's just do that: /// #[cfg(target_os="macos")] /// let mut child = std::process::Command::new("cat") /// .spawn_pty_async(&master) /// .expect("Could not spawn child"); /// /// Resize { /// pty: master, /// cols: 80, /// rows: 50, /// } /// .wait() /// .expect("Could not resize the PTY"); /// /// #[cfg(target_os="macos")] /// child.kill().expect("Could not kill child"); /// } /// ``` fn resize(&self, rows: c_ushort, cols: c_ushort) -> Poll<(), io::Error>; /// Get the PTY size /// /// # Example /// /// ``` /// extern crate tokio; /// extern crate tokio_pty_process; /// extern crate libc; /// /// use tokio_pty_process::{AsyncPtyMaster, PtyMaster, CommandExt}; /// use tokio::prelude::*; /// use std::ffi::OsString; /// use libc::c_ushort; /// /// struct GetSize<'a, T: PtyMaster> (&'a T); /// impl<'a, T: PtyMaster> Future for GetSize<'a, T> { /// type Item = (c_ushort, c_ushort); /// type Error = std::io::Error; /// fn poll(&mut self) -> Poll<Self::Item, Self::Error> { /// self.0.winsize() /// } /// } /// /// fn main() { /// let master = AsyncPtyMaster::open().expect("Could not open the PTY"); /// /// // On macos, it's only possible to resize a PTY with a child spawned /// // On it, so let's just do that: /// #[cfg(target_os="macos")] /// let mut child = std::process::Command::new("cat") /// .spawn_pty_async(&master) /// .expect("Could not spawn child"); /// /// let (rows, cols) = GetSize(&master) /// .wait() /// .expect("Could not get PTY size"); /// /// #[cfg(target_os="macos")] /// child.kill().expect("Could not kill child"); /// } /// ``` fn winsize(&self) -> Poll<(c_ushort, c_ushort), io::Error>; } impl<T: AsAsyncPtyFd> PtyMaster for T { fn ptsname(&self) -> Poll<OsString, io::Error> { let mut buf: [libc::c_char; 512] = [0; 512]; let fd = try_ready!(self.as_async_pty_fd()); #[cfg(not(any(target_os = "macos", target_os = "freebsd")))] { if unsafe { libc::ptsname_r(fd, buf.as_mut_ptr(), buf.len()) } != 0 { return Err(io::Error::last_os_error()); } } #[cfg(any(target_os = "macos", target_os = "freebsd"))] unsafe { let st = libc::ptsname(fd); if st.is_null() { return Err(io::Error::last_os_error()); } libc::strncpy(buf.as_mut_ptr(), st, buf.len()); } let ptsname = OsStr::from_bytes(unsafe { CStr::from_ptr(&buf as _) }.to_bytes()); Ok(Async::Ready(ptsname.to_os_string())) } fn winsize(&self) -> Poll<(c_ushort, c_ushort), io::Error> { let fd = try_ready!(self.as_async_pty_fd()); let mut winsz: libc::winsize = unsafe { std::mem::zeroed() }; if unsafe { libc::ioctl(fd, libc::TIOCGWINSZ.into(), &mut winsz) } != 0 { return Err(io::Error::last_os_error()); } Ok(Async::Ready((winsz.ws_row, winsz.ws_col))) } fn resize(&self, rows: c_ushort, cols: c_ushort) -> Poll<(), io::Error> { let fd = try_ready!(self.as_async_pty_fd()); let winsz = libc::winsize { ws_row: rows, ws_col: cols, ws_xpixel: 0, ws_ypixel: 0, }; if unsafe { libc::ioctl(fd, libc::TIOCSWINSZ.into(), &winsz) } != 0 { return Err(io::Error::last_os_error()); } Ok(Async::Ready(())) } } /// A private trait for the extending `std::process::Command`. trait CommandExtInternal { fn spawn_pty_async_full(&mut self, ptymaster: &AsyncPtyMaster, raw: bool) -> io::Result<Child>; } impl CommandExtInternal for process::Command { fn spawn_pty_async_full(&mut self, ptymaster: &AsyncPtyMaster, raw: bool) -> io::Result<Child> { let master_fd = ptymaster.as_raw_fd(); let slave = ptymaster.open_sync_pty_slave()?; let slave_fd = slave.as_raw_fd(); self.stdin(slave.try_clone()?); self.stdout(slave.try_clone()?); self.stderr(slave); // XXX any need to close slave handles in the parent process beyond // what's done here? unsafe { self.pre_exec(move || { if raw { let mut attrs: libc::termios = mem::zeroed(); if libc::tcgetattr(slave_fd, &mut attrs as _) != 0 { return Err(io::Error::last_os_error()); } libc::cfmakeraw(&mut attrs as _); if libc::tcsetattr(slave_fd, libc::TCSANOW, &attrs as _) != 0 { return Err(io::Error::last_os_error()); } } // This is OK even though we don't own master since this process is // about to become something totally different anyway. if libc::close(master_fd) != 0 { return Err(io::Error::last_os_error()); } if libc::setsid() < 0 { return Err(io::Error::last_os_error()); } if libc::ioctl(0, libc::TIOCSCTTY.into(), 1) != 0 { return Err(io::Error::last_os_error()); } Ok(()) }); } Ok(Child::new(self.spawn()?)) } } /// An extension trait for the `std::process::Command` type. /// /// This trait provides new `spawn_pty_async` and `spawn_pty_async_raw` /// methods that allow one to spawn a new process that is connected to the /// current process through a pseudo-TTY. pub trait CommandExt { /// Spawn a subprocess that connects to the current one through a /// pseudo-TTY in canonical (“cooked“, not “raw”) mode. /// /// This function creates the necessary PTY slave and uses /// `std::process::Command::before_exec` to do the neccessary setup before /// the child process is spawned. In particular, it calls `setsid()` to /// launch a new TTY sesson. /// /// The child process’s standard input, standard output, and standard /// error are all connected to the pseudo-TTY slave. fn spawn_pty_async(&mut self, ptymaster: &AsyncPtyMaster) -> io::Result<Child>; /// Spawn a subprocess that connects to the current one through a /// pseudo-TTY in raw (“non-canonical”, not “cooked”) mode. /// /// This function creates the necessary PTY slave and uses /// `std::process::Command::before_exec` to do the neccessary setup before /// the child process is spawned. In particular, it sets the slave PTY /// handle to raw mode and calls `setsid()` to launch a new TTY sesson. /// /// The child process’s standard input, standard output, and standard /// error are all connected to the pseudo-TTY slave. fn spawn_pty_async_raw(&mut self, ptymaster: &AsyncPtyMaster) -> io::Result<Child>; } impl CommandExt for process::Command { fn spawn_pty_async(&mut self, ptymaster: &AsyncPtyMaster) -> io::Result<Child> { self.spawn_pty_async_full(ptymaster, false) } fn spawn_pty_async_raw(&mut self, ptymaster: &AsyncPtyMaster) -> io::Result<Child> { self.spawn_pty_async_full(ptymaster, true) } } #[cfg(test)] mod tests { extern crate errno; extern crate libc; use super::*; /// Test that the PTY master file descriptor is in nonblocking mode. We do /// this in a pretty hacky and dumb way, by creating the AsyncPtyMaster /// and then just snarfing its FD and seeing whether a Unix `read(2)` call /// errors out with EWOULDBLOCK (instead of blocking forever). In /// principle it would be nice to actually spawn a subprogram and test /// reading through the whole Tokio I/O subsystem, but that's annoying to /// implement and can actually muddy the picture. Namely: if you try to /// `master.read()` inside a Tokio event loop here, on Linux you'll get an /// ErrorKind::WouldBlock I/O error from Tokio without it even attempting /// the underlying `read(2)` system call, because Tokio uses epoll to test /// the FD's readiness in a way that works orthogonal to whether it's set /// to non-blocking mode. #[test] fn basic_nonblocking() { let master = AsyncPtyMaster::open().unwrap(); let fd = master.as_raw_fd(); let mut buf = [0u8; 128]; let rval = unsafe { libc::read(fd, buf.as_mut_ptr() as *mut libc::c_void, 128) }; let errno: i32 = errno::errno().into(); assert_eq!(rval, -1); assert_eq!(errno, libc::EWOULDBLOCK as i32); } struct GetSize<'a, T: PtyMaster>(&'a T); impl<'a, T: PtyMaster> Future for GetSize<'a, T> { type Item = (c_ushort, c_ushort); type Error = std::io::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { self.0.winsize() } } struct Resize<'a, T: PtyMaster> { pty: &'a T, rows: c_ushort, cols: c_ushort, } impl<'a, T: PtyMaster> Future for Resize<'a, T> { type Item = (); type Error = std::io::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { self.pty.resize(self.rows, self.cols) } } #[test] fn test_winsize() { let master = AsyncPtyMaster::open().expect("Could not open the PTY"); // On macos, it's only possible to resize a PTY with a child spawned // On it, so let's just do that: #[cfg(target_os = "macos")] let mut child = std::process::Command::new("cat") .spawn_pty_async(&master) .expect("Could not spawn child"); // Set the size Resize { pty: &master, cols: 80, rows: 50, } .wait() .expect("Could not resize the PTY"); let (rows, cols) = GetSize(&master).wait().expect("Could not get PTY size"); assert_eq!(cols, 80); assert_eq!(rows, 50); #[cfg(target_os = "macos")] child.kill().expect("Could not kill child"); } }
true
1d8f78c5b9f3bf13614396bd6fb1de5a5ac854bf
Rust
shaipe/china-areas
/src/error.rs
UTF-8
4,851
3.515625
4
[]
no_license
//! copyright © ecdata.cn 2020 - present //! 自定义错误信息处理 //! created by shaipe use std::convert::Into; use std::error::Error as StdError; use std::fmt; #[derive(Debug, PartialEq, Eq)] pub enum LogLevel { // 信息输出 Info, // 错误 Error, // 警告 Warn, // 监控 Watch, } impl LogLevel { /// 获取等级的字符信息 pub fn as_str(&self) -> &str { match *self { LogLevel::Error => "error", LogLevel::Warn => "warn", LogLevel::Watch => "watch", _ => "info", } } } #[derive(Debug)] pub enum ErrorKind { Msg(String), Io(::std::io::Error), Custom { code: i32, msg: String }, } /// The Error type #[derive(Debug)] pub struct Error { /// Kind of error pub kind: ErrorKind, pub source: Option<Box<dyn StdError>>, } unsafe impl Sync for Error {} unsafe impl Send for Error {} /// 继承标准接口 impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { let source = self.source.as_ref().map(|c| &**c); if source.is_none() { match self.kind { // ErrorKind::Custom(ref err) => source = err.source(), _ => (), }; } source } } /// 格式化显示设置 impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.kind { ErrorKind::Msg(ref message) => write!(f, "{}", message), ErrorKind::Custom { code, ref msg } => { write!(f, "custom error code: {}, message: {}", code, msg) } ErrorKind::Io(ref e) => write!(f, "{}", e), } } } impl Error { /// 获取错误代码 pub fn get_code(&self) -> i32 { match self.kind { ErrorKind::Msg(ref _message) => 3000, ErrorKind::Custom { code, ref msg } => { let _ = msg; code } ErrorKind::Io(ref _e) => 4000, } } /// 获取错误中的消息 pub fn get_message(&self) -> String { match self.kind { ErrorKind::Msg(ref msg) => msg.to_string(), ErrorKind::Custom { code, ref msg } => { let _ = code; msg.to_string() } ErrorKind::Io(ref e) => format!("{}", e), } } /// Creates generic error pub fn msg(value: impl ToString) -> Self { Self { kind: ErrorKind::Msg(value.to_string()), source: None, } } /// Creates generic error with a cause pub fn chain(value: impl ToString, source: impl Into<Box<dyn StdError>>) -> Self { Self { kind: ErrorKind::Msg(value.to_string()), source: Some(source.into()), } } /// 自定义错误 pub fn custom(code: i32, msg: impl ToString) -> Self { Self { kind: ErrorKind::Custom { code, msg: format!("{}", msg.to_string()), }, source: None, } } // pub fn error_log(code: i32, msg: impl ToString, log: impl ToString) -> Self { // // 将错误信息的日志记录下来 // Self::write_to_file( // LogLevel::Error, // format!("error: {}; log: {}", msg.to_string(), log.to_string()), // ); // Self { // kind: ErrorKind::Custom { // code, // msg: format!("{}", msg.to_string()), // }, // source: None, // } // } /// 自定义错误 pub fn custom_err(code: i32, msg: impl ToString, source: impl Into<Box<dyn StdError>>) -> Self { Self { kind: ErrorKind::Custom { code, msg: msg.to_string(), }, source: Some(source.into()), } } /// 获取错误跟踪信息 pub fn trace_info() -> String { return "".to_owned(); // 下面的在此处的宏中是没有意义的 // let file_paths = file!() // .split("/src/") // .map(|s| s.to_owned()) // .collect::<Vec<_>>(); // let file_path = if file_paths.len() > 1 { // file_paths[1].replace(".rs", "") // } else { // "".to_owned() // }; // format!("{}::{}:{}", module_path!(), file_path, line!()) } } impl From<&str> for Error { fn from(e: &str) -> Self { Self::msg(e) } } impl From<String> for Error { fn from(e: String) -> Self { Self::msg(e) } } impl From<::std::io::Error> for Error { fn from(e: ::std::io::Error) -> Self { Self { kind: ErrorKind::Io(e), source: None, } } }
true
6f1cb9b8b9e1240e09b4a49e82f192249e4416c2
Rust
winksaville/fuchsia
/src/connectivity/bluetooth/tools/bt-hci-emulator/src/main.rs
UTF-8
1,862
2.5625
3
[ "BSD-3-Clause" ]
permissive
// Copyright 2018 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { failure::Error, fuchsia_bluetooth::hci_emulator::Emulator, futures::future::pending, rand::{self, Rng}, }; fn usage(appname: &str) { eprintln!("usage: {}", appname); eprintln!(" {} DEVICE_NAME", appname); eprintln!(" {} --help", appname); eprintln!(""); eprintln!("Instantiate and manipulate a new bt-hci device emulator"); eprintln!( "examples: {} - Instantiates a new emulator device with a random ID", appname ); eprintln!( "examples: {} my-device-name - Instantiates a new emulator device named \"my-device-name\"", appname ); } // TODO(armansito): Add ways to pass controller settings. #[fuchsia_async::run_singlethreaded] async fn main() -> Result<(), Error> { let args: Vec<_> = std::env::args().collect(); let appname = &args[0]; let device_name = match args.len() { 1 => { let mut rng = rand::thread_rng(); format!("bt-hci-emulator-{:X}", rng.gen::<u32>()) } 2 => { let arg = &args[1]; if arg == "--help" { usage(appname); return Ok(()); } arg.clone() } _ => { usage(appname); return Ok(()); } }; let _emulator = Emulator::create_and_publish(&device_name).await?; eprintln!("Instantiated emulator named {}", device_name); // TODO(armansito): Instantiate a REPL here. For now we await forever to make sure that the // emulator device remains alive until the user terminates this program (it will be removed when // `emulator` drops). pending().await }
true
4643723d89d009afb2ef4232a3a78f6e570cdfb9
Rust
mneumann/toyforth
/src/main.rs
UTF-8
11,403
3.0625
3
[ "MIT" ]
permissive
use std::io::{self, BufRead}; use std::str::FromStr; #[derive(Copy, Clone, Debug)] enum CompiledInstruction { DUP, DROP, SWAP, ADD, SUB, MUL, DIV, CALL, JUMP, RET, NOP, // Used to push data onto the data stack IMM(usize), PRINT, } struct Word { name: String, inline_iseq: Vec<CompiledInstruction>, } #[derive(Copy, Clone)] enum CompileMode { TopLevel, Definition, DefinitionBody, } struct VM { data_stack: Vec<usize>, call_stack: Vec<usize>, instruction_memory: Vec<CompiledInstruction>, instruction_pointer: usize, compile_mode: CompileMode, words: Vec<Word>, in_comment: bool, } #[derive(Debug, Clone)] enum VMErr { StackUnderflow, InvalidToken(String), } impl VM { fn new() -> VM { VM { data_stack: vec![], call_stack: vec![], instruction_memory: Vec::new(), instruction_pointer: 0, compile_mode: CompileMode::TopLevel, in_comment: false, words: vec![ Word { name: String::from("DUP"), inline_iseq: vec![CompiledInstruction::DUP], }, Word { name: String::from("DROP"), inline_iseq: vec![CompiledInstruction::DROP], }, Word { name: String::from("SWAP"), inline_iseq: vec![CompiledInstruction::SWAP], }, Word { name: String::from("+"), inline_iseq: vec![CompiledInstruction::ADD], }, Word { name: String::from("-"), inline_iseq: vec![CompiledInstruction::SUB], }, Word { name: String::from("*"), inline_iseq: vec![CompiledInstruction::MUL], }, Word { name: String::from("/"), inline_iseq: vec![CompiledInstruction::DIV], }, Word { name: String::from("CALL"), inline_iseq: vec![CompiledInstruction::CALL], }, Word { name: String::from("JUMP"), inline_iseq: vec![CompiledInstruction::JUMP], }, Word { name: String::from(";"), inline_iseq: vec![CompiledInstruction::RET], }, Word { name: String::from("."), inline_iseq: vec![CompiledInstruction::PRINT], }, ], } } fn exec_ins(&mut self, ins: CompiledInstruction) -> Result<(), VMErr> { match ins { CompiledInstruction::NOP => { // No operation } CompiledInstruction::DUP => { let tos = self.pop_data_stack()?; self.data_stack.push(tos); self.data_stack.push(tos); } CompiledInstruction::DROP => { let _tos = self.pop_data_stack()?; } CompiledInstruction::SWAP => { let b = self.pop_data_stack()?; let a = self.pop_data_stack()?; self.data_stack.push(b); self.data_stack.push(a); } CompiledInstruction::ADD => { let b = self.pop_data_stack()?; let a = self.pop_data_stack()?; self.data_stack.push(a + b); } CompiledInstruction::SUB => { let b = self.pop_data_stack()?; let a = self.pop_data_stack()?; self.data_stack.push(a - b); } CompiledInstruction::MUL => { let b = self.pop_data_stack()?; let a = self.pop_data_stack()?; self.data_stack.push(a * b); } CompiledInstruction::DIV => { let b = self.pop_data_stack()?; let a = self.pop_data_stack()?; self.data_stack.push(a / b); } CompiledInstruction::IMM(n) => { self.data_stack.push(n); } CompiledInstruction::CALL => { self.call_stack.push(self.instruction_pointer); self.instruction_pointer = self.pop_data_stack()?; } CompiledInstruction::JUMP => { self.instruction_pointer = self.pop_data_stack()?; } CompiledInstruction::RET => { self.instruction_pointer = self.pop_call_stack()?; } CompiledInstruction::PRINT => { let tos = self.pop_data_stack()?; print!(" {}", tos); } } Ok(()) } fn pop_data_stack(&mut self) -> Result<usize, VMErr> { match self.data_stack.pop() { Some(n) => Ok(n), None => Err(VMErr::StackUnderflow), } } fn pop_call_stack(&mut self) -> Result<usize, VMErr> { match self.call_stack.pop() { Some(n) => Ok(n), None => Err(VMErr::StackUnderflow), } } // Places `ins_seq` somewhere in the instruction_memory and execute it. pub fn run(&mut self, ins_seq: &[CompiledInstruction]) -> Result<(), VMErr> { self.instruction_pointer = self.instruction_memory.len(); let old_imem_len = self.instruction_memory.len(); for &ins in ins_seq { self.instruction_memory.push(ins); } loop { if self.instruction_pointer == self.instruction_memory.len() { // restore original instruction memory self.instruction_memory.truncate(old_imem_len); return Ok(()); } let ins = *self.instruction_memory .get(self.instruction_pointer) .unwrap(); // XXX self.instruction_pointer += 1; if let Err(err) = self.exec_ins(ins) { // restore original instruction memory self.instruction_memory.truncate(old_imem_len); return Err(err); } } } pub fn in_compile_mode(&self) -> bool { match self.compile_mode { CompileMode::TopLevel => false, CompileMode::Definition => true, CompileMode::DefinitionBody => true, } } // Compiles `line` into a sequence of instructions which is appended to `ins_seq`. // As a side-effect, when a ":" definition is occured, this will add a // word to the dictionary. pub fn compile_line( &mut self, line: &str, ins_seq: &mut Vec<CompiledInstruction>, ) -> Result<(), VMErr> { let mut remainder: &str = line; loop { match remainder.find(char::is_whitespace) { None => { if remainder.len() > 0 { let _ = self.compile_token(remainder, ins_seq)?; } return Ok(()); } Some(pos) => { if pos > 0 { // if pos == 0, then we found a whitespace at the beginning let (token, rest) = remainder.split_at(pos); let _ = self.compile_token(token, ins_seq)?; remainder = rest; } else { let (_token, rest) = remainder.split_at(1); remainder = rest; } } } } } fn compile_token( &mut self, token: &str, ins_seq: &mut Vec<CompiledInstruction>, ) -> Result<(), VMErr> { // process comments if self.in_comment { if token == ")" { self.in_comment = false; } return Ok(()); } else { if token == "(" { self.in_comment = true; return Ok(()); } } let compile_mode = self.compile_mode; match compile_mode { CompileMode::TopLevel => { match token { ":" => { // starts a definition self.compile_mode = CompileMode::Definition; } _ => { for ins in self.token_to_instruction_seq(token)? { ins_seq.push(ins); } } } } CompileMode::Definition => { self.words.push(Word { name: token.into(), inline_iseq: vec![ CompiledInstruction::IMM(self.instruction_memory.len()), CompiledInstruction::CALL, ], }); self.compile_mode = CompileMode::DefinitionBody; } CompileMode::DefinitionBody => { for ins in self.token_to_instruction_seq(token)? { self.instruction_memory.push(ins); } if token == ";" { // ends a definition self.compile_mode = CompileMode::TopLevel; } } } Ok(()) } fn token_to_instruction_seq(&self, token: &str) -> Result<Vec<CompiledInstruction>, VMErr> { match self.lookup_word(token) { None => { // it's not a word. it might be a number, or an invalid token match usize::from_str(token) { Ok(num) => { return Ok(vec![CompiledInstruction::IMM(num)]); } Err(_) => { return Err(VMErr::InvalidToken(token.into())); } } } Some(word) => { return Ok(word.inline_iseq.clone()); } } } fn lookup_word(&self, token: &str) -> Option<&Word> { self.words.iter().find(|w| w.name == token) } } fn read_line() -> String { let stdin = io::stdin(); let mut iterator = stdin.lock().lines(); return iterator.next().unwrap().unwrap(); } fn main() { println!("ToyForth started"); let mut vm = VM::new(); let mut ins_seq = Vec::new(); loop { let line = read_line(); ins_seq.clear(); match vm.compile_line(&line, &mut ins_seq) { Ok(()) => { match vm.run(&ins_seq) { Ok(()) => { if vm.in_compile_mode() { println!(" compiled"); } else { println!(" ok"); } } Err(err) => { println!("Error: {:?}", err); } } } Err(err) => { println!("Error: {:?}", err); } } } }
true
7a71fa67e11e0d98c8993a09156922ad3c61a2f2
Rust
fuchsnj/lading
/lading_common/src/block.rs
UTF-8
2,421
2.859375
3
[ "MIT" ]
permissive
use crate::payload::{self, Serialize}; use metrics::gauge; use rand::prelude::SliceRandom; use rand::Rng; use std::convert::TryInto; use std::num::NonZeroU32; #[derive(Debug)] pub enum Error { Payload(payload::Error), Empty, } impl From<payload::Error> for Error { fn from(error: payload::Error) -> Self { Error::Payload(error) } } #[derive(Debug)] pub struct Block { pub total_bytes: NonZeroU32, pub lines: u64, pub bytes: Vec<u8>, } #[inline] fn total_newlines(input: &[u8]) -> u64 { bytecount::count(input, b'\n') as u64 } pub fn chunk_bytes<R>(rng: &mut R, total_bytes: usize, block_byte_sizes: &[usize]) -> Vec<usize> where R: Rng + Sized, { assert!(!block_byte_sizes.is_empty()); let mut chunks = Vec::new(); let mut bytes_remaining = total_bytes; let minimum = *block_byte_sizes.iter().min().unwrap(); let maximum = *block_byte_sizes.iter().max().unwrap(); while bytes_remaining > minimum { let bytes_max = std::cmp::min(maximum, bytes_remaining); let block_bytes = block_byte_sizes.choose(rng).unwrap(); if *block_bytes > bytes_max { continue; } chunks.push(*block_bytes); bytes_remaining = bytes_remaining.saturating_sub(*block_bytes); } chunks } #[allow(clippy::ptr_arg)] #[allow(clippy::cast_precision_loss)] pub fn construct_block_cache<S>( serializer: &S, block_chunks: &[usize], labels: &Vec<(String, String)>, ) -> Vec<Block> where S: Serialize, { let mut block_cache: Vec<Block> = Vec::with_capacity(block_chunks.len()); for block_size in block_chunks { let mut block: Vec<u8> = Vec::with_capacity(*block_size); let rng = rand::thread_rng(); serializer.to_bytes(rng, *block_size, &mut block).unwrap(); block.shrink_to_fit(); // For unknown reasons this fails. Will need to start property testing // this library. // assert!(!block.is_empty()); if block.is_empty() { continue; } let total_bytes = NonZeroU32::new(block.len().try_into().unwrap()).unwrap(); let newlines = total_newlines(&block); block_cache.push(Block { total_bytes, lines: newlines, bytes: block, }); } assert!(!block_cache.is_empty()); gauge!("block_construction_complete", 1.0, labels); block_cache }
true
e48d065814d3fd24b334570b72a9c1c64c163a3d
Rust
togatoga/procon-archive
/atcoder.jp/abc117/abc117_c/Main.rs
UTF-8
2,159
3.25
3
[]
no_license
fn solve() { let s = std::io::stdin(); let mut sc = Scanner { stdin: s.lock() }; let n: usize = sc.read(); let m: usize = sc.read(); let mut xs: Vec<i32> = sc.vec(m); xs.sort(); if n == 1 { let mut sum: u64 = 0; for i in 0..m - 1 { let tmp = xs[i + 1] - xs[i]; sum += tmp as u64; } println!("{}", sum); return ; } let n = n - 2; let mut ys: Vec<u64> = Vec::new(); for i in 0..m - 1 { let tmp = xs[i + 1] - xs[i]; ys.push(tmp as u64); } ys.sort_by(|x, y| y.cmp(x)); let mut result = 0; for i in n+1..ys.len() { result += ys[i]; } println!("{}", result); } fn main() { std::thread::Builder::new() .stack_size(64 * 1024 * 1024) // 64MB .spawn(|| solve()) .unwrap() .join() .unwrap(); } //snippet from kenkoooo pub struct Scanner<R> { stdin: R, } impl<R: std::io::Read> Scanner<R> { pub fn read<T: std::str::FromStr>(&mut self) -> T { use std::io::Read; let buf = self.stdin .by_ref() .bytes() .map(|b| b.unwrap()) .skip_while(|&b| b == b' ' || b == b'\n' || b == b'\r') .take_while(|&b| b != b' ' && b != b'\n' && b != b'\r') .collect::<Vec<_>>(); unsafe { std::str::from_utf8_unchecked(&buf) } .parse() .ok() .expect("Parse error.") } pub fn read_line(&mut self) -> String { use std::io::Read; let buf = self.stdin .by_ref() .bytes() .map(|b| b.unwrap()) .skip_while(|&b| b == b'\n' || b == b'\r') .take_while(|&b| b != b'\n' && b != b'\r') .collect::<Vec<_>>(); unsafe { std::str::from_utf8_unchecked(&buf) } .parse() .ok() .expect("Parse error.") } pub fn vec<T: std::str::FromStr>(&mut self, n: usize) -> Vec<T> { (0..n).map(|_| self.read()).collect() } pub fn chars(&mut self) -> Vec<char> { self.read::<String>().chars().collect() } }
true
6d05e0433278d99f585985707a0851b56a474fc7
Rust
clouddra/connected_component
/src/main.rs
UTF-8
1,388
3.078125
3
[]
no_license
use std::io; use ndarray::{Array2, Axis}; fn main() { let mut input = read_line().unwrap(); let size = input.len(); let mut matrix: Array2<bool> = Array2::<bool>::default((size, size)); let mut visited: Vec<bool> = vec![false; size]; populate_matrix(&mut matrix, &input, 0); for i in 1..size { input = read_line().unwrap(); populate_matrix(&mut matrix, &input, i); } let component_count = (0..size) .fold(0, |acc, root| acc + dfs(&mut matrix, &mut visited, root)); println!("{}", component_count); } fn read_line() -> Result<String, io::Error> { let mut input = String::new(); io::stdin().read_line(&mut input).and_then(|_| Ok(input.trim_right().to_string())) } fn populate_matrix(matrix: &mut Array2<bool>, line: &str, from: usize) { for (to, character) in line.chars().enumerate() { match character { '1' => { matrix[[from, to]] = true } _ => { matrix[[from, to]] = false } }; } } fn dfs(matrix: &Array2<bool>, visited: &mut Vec<bool>, root: usize) -> u32 { if visited[root] { return 0; } visited[root] = true; let col_count = matrix.len_of(Axis(1)); for neighbor in 0..col_count { println!("n{}", neighbor); if matrix[[root, neighbor]] { dfs(matrix, visited, neighbor); } } return 1; }
true
aec108a6e6626b194b5266f2f7e1c9a64869a151
Rust
kjempelodott/adventofcode
/aoc2020/src/bin/day14.rs
UTF-8
2,048
3.109375
3
[]
no_license
extern crate adventofcode2020; use adventofcode2020::read_from_stdin; extern crate parse_display; use parse_display::{Display,FromStr}; use std::collections::HashMap; #[derive(Debug,FromStr,Display)] enum Program { #[display("mask = {0}")] Mask(String), #[display("mem[{0}] = {1}")] MemSet(u64,u64) } #[derive(Default)] struct BitMask { m: u64, c: u64, fl: Vec<u64> } impl BitMask { fn mask(&self, value: u64) -> u64 { self.c | self.m & value } fn update(&mut self, s: &str) { self.m = u64::from_str_radix({ &s.chars() .map(|c| if c == 'X' { '1' } else { '0' }) .collect::<String>() },2).unwrap(); self.c = u64::from_str_radix({ &s.chars() .map(|c| if c == 'X' { '0' } else { c }) .collect::<String>() },2).unwrap(); self.fl = (0..36).map(|i| (1 << i) & self.m) .filter(|v| *v != 0) .collect(); } fn superpositions(&self, addr: u64) -> Vec<u64> { let mut space = vec![(self.c | self.m | addr)^self.m]; for bit in self.fl.iter() { let new: Vec<u64> = space.iter() .map(|addr| addr | bit) .collect(); space.extend_from_slice(&new); } space } } fn main() { let mut mask = BitMask::default(); let mut mem: HashMap<u64,u64> = HashMap::new(); let mut mem_v2: HashMap<u64,u64> = HashMap::new(); read_from_stdin().lines() .map(|l| l.parse::<Program>().unwrap()) .for_each(|p| { match p { Program::Mask(s) => mask.update(&s), Program::MemSet(i,n) => { mem.insert(i, mask.mask(n)); for a in mask.superpositions(i) { mem_v2.insert(a, n); } } } }); println!("{}", mem.values().sum::<u64>()); println!("{}", mem_v2.values().sum::<u64>()); }
true
204aef0dc33e5d4acc87a34df3f4ba30758a588f
Rust
sigp/lighthouse
/watch/src/block_packing/database.rs
UTF-8
4,317
2.6875
3
[ "Apache-2.0" ]
permissive
use crate::database::{ schema::{beacon_blocks, block_packing}, watch_types::{WatchHash, WatchSlot}, Error, PgConn, MAX_SIZE_BATCH_INSERT, }; use diesel::prelude::*; use diesel::{Insertable, Queryable}; use log::debug; use serde::{Deserialize, Serialize}; use std::time::Instant; #[derive(Debug, Queryable, Insertable, Serialize, Deserialize)] #[diesel(table_name = block_packing)] pub struct WatchBlockPacking { pub slot: WatchSlot, pub available: i32, pub included: i32, pub prior_skip_slots: i32, } /// Insert a batch of values into the `block_packing` table. /// /// On a conflict, it will do nothing, leaving the old value. pub fn insert_batch_block_packing( conn: &mut PgConn, packing: Vec<WatchBlockPacking>, ) -> Result<(), Error> { use self::block_packing::dsl::*; let mut count = 0; let timer = Instant::now(); for chunk in packing.chunks(MAX_SIZE_BATCH_INSERT) { count += diesel::insert_into(block_packing) .values(chunk) .on_conflict_do_nothing() .execute(conn)?; } let time_taken = timer.elapsed(); debug!("Block packing inserted, count: {count}, time taken: {time_taken:?}"); Ok(()) } /// Selects the row from the `block_packing` table where `slot` is minimum. pub fn get_lowest_block_packing(conn: &mut PgConn) -> Result<Option<WatchBlockPacking>, Error> { use self::block_packing::dsl::*; let timer = Instant::now(); let result = block_packing .order_by(slot.asc()) .limit(1) .first::<WatchBlockPacking>(conn) .optional()?; let time_taken = timer.elapsed(); debug!("Block packing requested: lowest, time_taken: {time_taken:?}"); Ok(result) } /// Selects the row from the `block_packing` table where `slot` is maximum. pub fn get_highest_block_packing(conn: &mut PgConn) -> Result<Option<WatchBlockPacking>, Error> { use self::block_packing::dsl::*; let timer = Instant::now(); let result = block_packing .order_by(slot.desc()) .limit(1) .first::<WatchBlockPacking>(conn) .optional()?; let time_taken = timer.elapsed(); debug!("Block packing requested: highest, time_taken: {time_taken:?}"); Ok(result) } /// Selects a single row of the `block_packing` table corresponding to a given `root_query`. pub fn get_block_packing_by_root( conn: &mut PgConn, root_query: WatchHash, ) -> Result<Option<WatchBlockPacking>, Error> { use self::beacon_blocks::dsl::{beacon_blocks, root}; use self::block_packing::dsl::*; let timer = Instant::now(); let join = beacon_blocks.inner_join(block_packing); let result = join .select((slot, available, included, prior_skip_slots)) .filter(root.eq(root_query)) .first::<WatchBlockPacking>(conn) .optional()?; let time_taken = timer.elapsed(); debug!("Block packing requested: {root_query}, time_taken: {time_taken:?}"); Ok(result) } /// Selects a single row of the `block_packing` table corresponding to a given `slot_query`. pub fn get_block_packing_by_slot( conn: &mut PgConn, slot_query: WatchSlot, ) -> Result<Option<WatchBlockPacking>, Error> { use self::block_packing::dsl::*; let timer = Instant::now(); let result = block_packing .filter(slot.eq(slot_query)) .first::<WatchBlockPacking>(conn) .optional()?; let time_taken = timer.elapsed(); debug!("Block packing requested: {slot_query}, time_taken: {time_taken:?}"); Ok(result) } /// Selects `slot` from all rows of the `beacon_blocks` table which do not have a corresponding /// row in `block_packing`. #[allow(dead_code)] pub fn get_unknown_block_packing( conn: &mut PgConn, slots_per_epoch: u64, ) -> Result<Vec<Option<WatchSlot>>, Error> { use self::beacon_blocks::dsl::{beacon_blocks, root, slot}; use self::block_packing::dsl::block_packing; let join = beacon_blocks.left_join(block_packing); let result = join .select(slot) .filter(root.is_null()) // Block packing cannot be retrieved for epoch 0 so we need to exclude them. .filter(slot.ge(slots_per_epoch as i32)) .order_by(slot.desc()) .nullable() .load::<Option<WatchSlot>>(conn)?; Ok(result) }
true
a28bf498a0d48c44c702f7dba26fb9b5add48d4a
Rust
animanga/vm-rs
/vmm/kvm/kvm-sys/src/structs/mem.rs
UTF-8
1,439
3.109375
3
[ "MIT", "Apache-2.0" ]
permissive
//! Structures related to memory management. /// A guest physical memory slot. /// /// Memory regions in the same address space must not overlap. /// /// The physical and virtual addresses should be aligned on 2 MiB boundaries, /// for maximum efficiency. #[derive(Debug, Default, Copy, Clone)] #[repr(C)] pub struct MemoryRegion { /// The memory slot to be added or changed. /// /// Must be less than the `MaxMemSlots` capability. pub slot: u16, /// The address space of this region, /// if multiple address spaces are supported. /// /// The maximum number of address spaces is returned /// by the capability check. pub address_space: u16, /// Flags describing the properties of this region. pub flags: Flags, // Reserved, used internally by KVM. _padding: u16, /// The address in the guest's physical address space. pub guest_phys_addr: u64, /// Size in bytes of this memory region. /// /// If `Flags::READ_ONLY` is set, this must be 0. pub size: u64, /// Starting address of the host virtual memory region. pub host_virt_addr: u64, } bitflags! { #[derive(Default)] pub struct Flags: u16 { /// If set, KVM will keep track of writes to this slot. const LOG_DIRTY_PAGES = 1 << 0; /// This slot will be made read-only, /// any writes will notify sent to userspace. const READ_ONLY = 1 << 1; } }
true
22dbaf75e0b32ecc7e2ba2d861dc85d2074fe145
Rust
toumorokoshi/greyhawk-language
/src/lexer/stringreader.rs
UTF-8
1,258
3
3
[]
no_license
use super::tokenizer::Tokenizer; use super::token::TokenType; use super::symboltree::TokenDef; use super::symboltree::FinalNode; use super::symboltree; pub const SYMBOLS: &'static [TokenDef] = &[ TokenDef{path: "if", token: TokenType::If}, TokenDef{path: "else", token: TokenType::Else}, TokenDef{path: "return", token: TokenType::Return}, ]; pub struct StringReader { string: String, keywords: FinalNode, } impl StringReader { pub fn new() -> StringReader { return StringReader{ string: String::new(), keywords: symboltree::generate_tree(SYMBOLS) }; } } impl Tokenizer for StringReader { fn reset(&mut self) { self.string = String::new(); } fn read(&mut self, c: char) -> bool { match c { 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' => { self.string.push(c); true }, _ => false, } } fn publish(&mut self) -> Vec<TokenType> { let mut tokens = Vec::new(); tokens.push(match self.keywords.find(&self.string) { Some(t) => t, None => TokenType::Symbol(self.string.clone()), }); self.reset(); return tokens; } }
true
88974c414eb8613fe6459a17e7913551ea32945b
Rust
dmvict/wTools
/rust/test/former/all/basic.rs
UTF-8
5,948
2.953125
3
[ "MIT" ]
permissive
#[ allow( unused_imports ) ] use super::*; only_for_wtools! { #[ allow( unused_imports ) ] use wtools::meta::*; #[ allow( unused_imports ) ] use wtools::former::Former; } only_for_local_module! { #[ allow( unused_imports ) ] use meta_tools::*; #[ allow( unused_imports ) ] use former::Former; } use std::collections::HashMap; use std::collections::HashSet; #[derive( Debug, PartialEq, Former )] pub struct Struct1 { pub int_1 : i32, string_1 : String, int_optional_1 : Option< i32 >, string_optional_1 : Option< String >, vec_1 : Vec< String >, hashmap_strings_1 : HashMap< String, String >, hashset_strings_1 : HashSet< String >, } // include!( "basic_only_test.rs" ); // // output : // // impl Struct1 // { // pub fn former() -> Struct1Former // { // Struct1Former // { // int_1 : core::option::Option::None, // string_1 : core::option::Option::None, // int_optional_1 : core::option::Option::None, // string_optional_1 : core::option::Option::None, // vec_1 : core::option::Option::None, // hashmap_strings_1 : core::option::Option::None, // hashset_strings_1 : core::option::Option::None, // } // } // } // // // // // #[derive( Debug )] // pub struct Struct1Former // { // pub int_1 : core::option::Option< i32 >, // pub string_1 : core::option::Option< String >, // pub int_optional_1 : core::option::Option< i32 >, // pub string_optional_1 : core::option::Option< String >, // pub vec_1 : core::option::Option< Vec< String > >, // pub hashmap_strings_1 : core::option::Option< std::collections::HashMap< String, String > >, // pub hashset_strings_1 : core::option::Option< std::collections::HashSet< String > >, // } // // // // // impl Struct1Former // { // fn form( mut self ) -> Struct1 // { // // let int_1 = if self.int_1.is_some() // { // self.int_1.take().unwrap() // } // else // { // let val : i32 = Default::default(); // val // }; // // let string_1 = if self.string_1.is_some() // { // self.string_1.take().unwrap() // } // else // { // let val : String = Default::default(); // val // }; // // let int_optional_1 = if self.int_optional_1.is_some() // { // Some( self.int_optional_1.take().unwrap() ) // } // else // { // None // }; // // let string_optional_1 = if self.string_optional_1.is_some() // { // Some( self.string_optional_1.take().unwrap() ) // } // else // { // None // }; // // let vec_1 = if self.vec_1.is_some() // { // self.vec_1.take().unwrap() // } // else // { // let val : Vec< String > = Default::default(); // val // }; // // let hashmap_strings_1 = if self.hashmap_strings_1.is_some() // { // self.hashmap_strings_1.take().unwrap() // } // else // { // let val : std::collections::HashMap< String, String > = Default::default(); // val // }; // // let hashset_strings_1 = if self.hashset_strings_1.is_some() // { // self.hashset_strings_1.take().unwrap() // } // else // { // let val : std::collections::HashSet< String > = Default::default(); // val // }; // // Struct1 // { // int_1, // string_1, // int_optional_1, // string_optional_1, // vec_1, // hashmap_strings_1, // hashset_strings_1, // } // // } // // pub fn int_1< Src >( mut self, src : Src ) -> Self // where Src : core::convert::Into< i32 >, // { // debug_assert!( self.int_1.is_none() ); // self.int_1 = Some( src.into() ); // self // } // // pub fn string_1< Src >( mut self, src : Src ) -> Self // where Src : core::convert::Into< String >, // { // debug_assert!( self.string_1.is_none() ); // self.string_1 = Some( src.into() ); // self // } // // pub fn string_optional_1< Src >( mut self, src : Src ) -> Self // where Src : core::convert::Into< String > // { // debug_assert!( self.string_optional_1.is_none() ); // self.string_optional_1 = Some( src.into() ); // self // } // // pub fn vec_1( mut self ) -> former::runtime::VectorFormer // < // String, // Vec< String >, // Struct1Former, // impl Fn( &mut Struct1Former, core::option::Option< Vec< String > > ) // > // { // let container = self.vec_1.take(); // let on_end = | former : &mut Struct1Former, container : core::option::Option< Vec< String > > | // { // former.vec_1 = container; // }; // former::runtime::VectorFormer::new( self, container, on_end ) // } // // pub fn hashmap_strings_1( mut self ) -> former::runtime::HashMapFormer // < // String, // String, // std::collections::HashMap< String, String >, // Struct1Former, // impl Fn( &mut Struct1Former, core::option::Option< std::collections::HashMap< String, String > > ) // > // { // let container = self.hashmap_strings_1.take(); // let on_end = | former : &mut Struct1Former, container : core::option::Option< std::collections::HashMap< String, String > > | // { // former.hashmap_strings_1 = container; // }; // former::runtime::HashMapFormer::new( self, container, on_end ) // } // // pub fn hashset_strings_1( mut self ) -> former::runtime::HashSetFormer // < // String, // std::collections::HashSet< String >, // Struct1Former, // impl Fn( &mut Struct1Former, core::option::Option< std::collections::HashSet< String > > ) // > // { // let container = self.hashset_strings_1.take(); // let on_end = | former : &mut Struct1Former, container : core::option::Option< std::collections::HashSet< String > > | // { // former.hashset_strings_1 = container; // }; // former::runtime::HashSetFormer::new( self, container, on_end ) // } // // }
true
4ee48c6e0c0f712b324a2fc2b99f775d5d14c790
Rust
maxblee/clipivot
/src/errors.rs
UTF-8
4,314
3.53125
4
[ "MIT", "Apache-2.0" ]
permissive
//! The module for describing recoverable errors in my CSV command-line tools. //! //! > *Note:* All of the error handling in this module is structured from //! > [this error handling guide](https://blog.burntsushi.net/rust-error-handling) //! > and from the source code of the [csv crate](https://github.com/BurntSushi/rust-csv) //! > in Rust. If you're hoping to implement you're own library or binary in Rust, //! > I highly recommend looking at both (and, especiialy, the guide). //! //! You can characterize all four error types in two general categories: //! errors configuring the CSV reader and errors parsing individual lines. //! For errors relating to configuration, my goal is simply to be as specific //! and clear as possible about the nature of a given error. For errors relating to //! parsing, however, I also think it's important to display record numbers to help //! users debug errors they run into. Currently, this refers to the 0-indexed number in //! which a record appears in a CSV document. So record 5 of a CSV would be the seventh line //! of a CSV with a header row and the sixth line of a CSV without a header row. //! //! This indexing plan is meant to interact nicely with the `xsv slice` subcommand in the //! [`xsv`](https://github.com/BurntSushi/xsv) toolkit. So if you run into an error, you can type: //! ```shell //! $ xsv slice YOUR_FILENAME -i <RECORD_NUMBER> //! ``` //! to see the full line that caused you to run into an error. //! extern crate csv; use std::fmt; use std::io; use std::result; /// An alias for CsvCliError // from https://github.com/BurntSushi/rust-csv/blob/master/src/error.rs pub type CsvCliResult<T> = result::Result<T, CsvCliError>; /// The type of CSV error #[derive(Debug)] pub enum CsvCliError { /// Errors from reading a CSV file. /// /// This should be limited to inconsistencies in the number of lines appearing in a given row /// or errors parsing data as UTF-8. CsvError(csv::Error), /// Errors in the initial configuration from command-line arguments. /// /// This error likely occurs most frequently because of problems in how fields are named /// but can also occur because of errors parsing delimiters as single UTF-8 characters. InvalidConfiguration(String), /// A standard IO error. Typically from trying to read a file that does not exist Io(io::Error), /// Errors trying to parse a new value. /// The way in which `clipivot` parses values depends on the aggregation function /// and command-line flags, but all errors in converting the string records in the values /// column into a particular data type result in a `ParsingError`. ParsingError { /// The current line number. This conforms with the way that `xsv slice` operates /// so you can easily find the row that failed by running `xsv slice`. line_num: usize, /// The string that failed to parse. This allows you to avoid having to run operations /// like `xsv slice` in most cases str_to_parse: String, /// The general error message. This is specific to the type of error, so failures to parse /// data as datetimes will tell you they failed to parse datetimes, etc. err: String, }, } impl fmt::Display for CsvCliError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CsvCliError::CsvError(ref err) => err.fmt(f), CsvCliError::InvalidConfiguration(ref err) => { write!(f, "Could not properly configure the aggregator: {}", err) } CsvCliError::Io(ref err) => err.fmt(f), // adapted from https://github.com/BurntSushi/rust-csv/blob/master/src/error.rs CsvCliError::ParsingError { ref line_num, ref str_to_parse, ref err, } => write!( f, "Could not parse record `{}` with index {}: {}", str_to_parse, line_num, err ), } } } impl From<io::Error> for CsvCliError { fn from(err: io::Error) -> CsvCliError { CsvCliError::Io(err) } } impl From<csv::Error> for CsvCliError { fn from(err: csv::Error) -> CsvCliError { CsvCliError::CsvError(err) } }
true
58b7f1a763a806329e2268cb03d8676278514f43
Rust
hidva/waitforgraph
/src/intern.rs
UTF-8
1,283
2.6875
3
[ "Apache-2.0" ]
permissive
/* Copyright 2020 <盏一 w@hidva.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::borrow::Borrow; use std::collections::HashSet; use std::hash::Hash; // implement an Iterator to iterate Internment.0. for now, just make Internment.0 public. pub struct Internment<T>(pub HashSet<Box<T>>); impl<T> Default for Internment<T> { fn default() -> Self { Self(HashSet::new()) } } impl<T> Internment<T> where T: Eq + Hash, { // How to define a newtype Id for *const T? like: type Id = *const T; pub fn intern(&mut self, t: T) -> *const T { if let Some(v) = self.0.get(&t) { v.borrow() as *const T } else { let b = Box::new(t); let ret = b.borrow() as *const T; self.0.insert(b); ret } } }
true
9b9fa062dfe28fc19af49fb05e16349624b8df53
Rust
ctfhacker/adventofcode-2019
/day04/src/main.rs
UTF-8
3,851
3.859375
4
[]
no_license
use std::collections::HashSet; /// Check if the number passwords is valid for Stage 1 fn is_password_1(input: &str) -> bool { let mut prev = 0; let mut repeated = false; for i in input.chars() { // Digit's hex value is also increasing like the digit itself, so this // conversion is still valid without having to parse the exact digit let curr = i as u8; // If we are starting, just set the first character as prev and continue if prev == 0 { prev = curr; continue; } // Quick return false if the string is not in increasing order of digits if prev > curr { return false; } if prev == curr { repeated = true; } // Set the prev element to the current for the next iteration prev = curr; } repeated } /// Check if the number password is valid for Stage 2 fn is_password_2(input: &str) -> bool { let mut prev = 0; let mut repeated = HashSet::new(); let mut curr_count = 1; for i in input.chars() { // Digit's hex value is also increasing like the digit itself, so this // conversion is still valid without having to parse the exact digit let curr = i as u8; // If we are starting, just set the first character as prev and continue if prev == 0 { prev = curr; continue; } // Quick return false if the string is not in increasing order of digits if prev > curr { return false; } if prev != curr { // If the count of the previous digit is more than a double (2) it is // invalid, so remove it from the repeated HashSet. if curr_count > 2 { repeated.remove(&prev); } curr_count = 1; } else { // Current element is the same as previous, increase the current seen count curr_count += 1; repeated.insert(curr); } // Set the prev element to the current for the next iteration prev = curr; } // Need to check this one more time just in case the last characters were an // odd contiguous amount if curr_count > 2 { // If the count of the previous digit is more than a double (2) it is // invalid, so remove it from the repeated HashSet. repeated.remove(&prev); } // Only return true if we have seen at least one repeated digit repeated.len() > 0 } fn main() { let passwords = (246540..787419) .filter(|num| is_password_1(&format!("{}", num))) .count(); print!("Stage 1: {}\n", passwords); let passwords = (246540..787419) .filter(|num| is_password_2(&format!("{}", num))) .count(); print!("Stage 2: {}\n", passwords); } #[cfg(test)] mod tests { use super::*; #[test] fn test_num_check_1() { assert_eq!(is_password_1(&"111111"), true); assert_eq!(is_password_1(&"223450"), false); assert_eq!(is_password_1(&"123789"), false); } #[test] fn test_num_check_2() { assert_eq!(is_password_2(&"112233"), true); assert_eq!(is_password_2(&"123444"), false); assert_eq!(is_password_2(&"134445"), false); assert_eq!(is_password_2(&"344456"), false); assert_eq!(is_password_2(&"444567"), false); assert_eq!(is_password_2(&"134456"), true); assert_eq!(is_password_2(&"111122"), true); assert_eq!(is_password_2(&"111123"), false); assert_eq!(is_password_2(&"111111"), false); assert_eq!(is_password_2(&"111115"), false); assert_eq!(is_password_2(&"223450"), false); assert_eq!(is_password_2(&"223455"), true); } }
true
338bc2bb06f33bc0e3b91d0474fd243d1de3439f
Rust
marco-c/gecko-dev-comments-removed
/third_party/rust/itertools/src/free.rs
UTF-8
3,371
2.75
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#[cfg(feature = "use_alloc")] use std::fmt::Display; use std::iter::{self, Zip}; #[cfg(feature = "use_alloc")] type VecIntoIter<T> = alloc::vec::IntoIter<T>; #[cfg(feature = "use_alloc")] use alloc::{ string::String, }; use crate::Itertools; use crate::intersperse::{Intersperse, IntersperseWith}; pub use crate::adaptors::{ interleave, merge, put_back, }; #[cfg(feature = "use_alloc")] pub use crate::put_back_n_impl::put_back_n; #[cfg(feature = "use_alloc")] pub use crate::multipeek_impl::multipeek; #[cfg(feature = "use_alloc")] pub use crate::peek_nth::peek_nth; #[cfg(feature = "use_alloc")] pub use crate::kmerge_impl::kmerge; pub use crate::zip_eq_impl::zip_eq; pub use crate::merge_join::merge_join_by; #[cfg(feature = "use_alloc")] pub use crate::rciter_impl::rciter; pub fn intersperse<I>(iterable: I, element: I::Item) -> Intersperse<I::IntoIter> where I: IntoIterator, <I as IntoIterator>::Item: Clone { Itertools::intersperse(iterable.into_iter(), element) } pub fn intersperse_with<I, F>(iterable: I, element: F) -> IntersperseWith<I::IntoIter, F> where I: IntoIterator, F: FnMut() -> I::Item { Itertools::intersperse_with(iterable.into_iter(), element) } pub fn enumerate<I>(iterable: I) -> iter::Enumerate<I::IntoIter> where I: IntoIterator { iterable.into_iter().enumerate() } pub fn rev<I>(iterable: I) -> iter::Rev<I::IntoIter> where I: IntoIterator, I::IntoIter: DoubleEndedIterator { iterable.into_iter().rev() } #[deprecated(note="Use [std::iter::zip](https://doc.rust-lang.org/std/iter/fn.zip.html) instead", since="0.10.4")] pub fn zip<I, J>(i: I, j: J) -> Zip<I::IntoIter, J::IntoIter> where I: IntoIterator, J: IntoIterator { i.into_iter().zip(j) } pub fn chain<I, J>(i: I, j: J) -> iter::Chain<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter> where I: IntoIterator, J: IntoIterator<Item = I::Item> { i.into_iter().chain(j) } pub fn cloned<'a, I, T: 'a>(iterable: I) -> iter::Cloned<I::IntoIter> where I: IntoIterator<Item=&'a T>, T: Clone, { iterable.into_iter().cloned() } pub fn fold<I, B, F>(iterable: I, init: B, f: F) -> B where I: IntoIterator, F: FnMut(B, I::Item) -> B { iterable.into_iter().fold(init, f) } pub fn all<I, F>(iterable: I, f: F) -> bool where I: IntoIterator, F: FnMut(I::Item) -> bool { iterable.into_iter().all(f) } pub fn any<I, F>(iterable: I, f: F) -> bool where I: IntoIterator, F: FnMut(I::Item) -> bool { iterable.into_iter().any(f) } pub fn max<I>(iterable: I) -> Option<I::Item> where I: IntoIterator, I::Item: Ord { iterable.into_iter().max() } pub fn min<I>(iterable: I) -> Option<I::Item> where I: IntoIterator, I::Item: Ord { iterable.into_iter().min() } #[cfg(feature = "use_alloc")] pub fn join<I>(iterable: I, sep: &str) -> String where I: IntoIterator, I::Item: Display { iterable.into_iter().join(sep) } #[cfg(feature = "use_alloc")] pub fn sorted<I>(iterable: I) -> VecIntoIter<I::Item> where I: IntoIterator, I::Item: Ord { iterable.into_iter().sorted() }
true
9ec7973e43e5675725b08be39bc65b523d8a0ffe
Rust
christianscott/rusty-lox
/src/parse.rs
UTF-8
13,260
3.3125
3
[]
no_license
use crate::stmt::{Expr, Stmt}; use crate::token::{Literal, Token, TokenKind}; pub fn parse(tokens: Vec<Token>) -> Vec<Stmt> { Parser { tokens, current: 0 }.parse() } pub struct ParseErr { token: Token, message: String, } #[derive(Debug)] struct Parser { tokens: Vec<Token>, current: usize, } macro_rules! check { ($self:ident, $($p:pat),+) => { match $self.peek() { $(Token { kind: $p, .. }) |+ => true, _ => false, } }; } macro_rules! eat { ($self:ident, $($p:pat),+) => { match $self.peek() { $(Token { kind: $p, .. }) |+ => Some($self.advance()), _ => None, } }; } macro_rules! did_eat { ($self:ident, $($p:pat),+) => { match $self.peek() { $(Token { kind: $p, .. }) |+ => { $self.advance(); true }, _ => false, } }; } macro_rules! consume { ($self:ident, $p:pat, $message:literal) => { if let Some(tok) = eat!($self, $p) { Ok(tok) } else { Err(ParseErr { message: $message.to_string(), token: $self.peek(), }) } }; } impl Parser { fn parse(&mut self) -> Vec<Stmt> { let mut statments = Vec::new(); while !self.is_at_end() { match self.declaration() { Ok(statement) => statments.push(statement), Err(ParseErr { message, token }) => { self.synchronize(); println!("parse error at {:?}: {}", token.kind, message) } } } statments } fn declaration(&mut self) -> Result<Stmt, ParseErr> { if eat!(self, TokenKind::Var).is_some() { self.var_declaration() } else { self.statement() } } fn var_declaration(&mut self) -> Result<Stmt, ParseErr> { let name = consume!(self, TokenKind::Identifier, "Expect variable name.")?; let initializer = if eat!(self, TokenKind::Equal).is_some() { Some(self.expression()?) } else { None }; consume!( self, TokenKind::Semicolon, "Expect ';' after variable declaration." )?; Ok(Stmt::Var { name, initializer }) } fn statement(&mut self) -> Result<Stmt, ParseErr> { if did_eat!(self, TokenKind::For) { self.for_statement() } else if did_eat!(self, TokenKind::If) { self.if_statement() } else if did_eat!(self, TokenKind::Print) { self.print_statement() } else if did_eat!(self, TokenKind::While) { self.while_statement() } else if did_eat!(self, TokenKind::LeftBrace) { Ok(Stmt::Block { statements: self.block()?, }) } else { self.expression_statement() } } /// for statements are de-sugared into while loops fn for_statement(&mut self) -> Result<Stmt, ParseErr> { consume!(self, TokenKind::LeftParen, "Expect '(' after 'for'.")?; let initializer = if did_eat!(self, TokenKind::Semicolon) { None } else if did_eat!(self, TokenKind::Var) { Some(self.var_declaration()?) } else { Some(self.expression_statement()?) }; let condition = if check!(self, TokenKind::Semicolon) { Expr::Literal { val: Literal::Bool(true), } } else { self.expression()? }; consume!( self, TokenKind::Semicolon, "Expect ';' after loop condition." )?; let increment = if check!(self, TokenKind::RightParen) { None } else { Some(self.expression()?) }; consume!(self, TokenKind::RightParen, "Expect ')' after for clauses.")?; let mut body = self.statement()?; if let Some(increment) = increment { body = Stmt::Block { statements: vec![body, Stmt::Expression { expr: increment }], }; } body = Stmt::While { body: Box::new(body), condition, }; if let Some(initializer) = initializer { body = Stmt::Block { statements: vec![initializer, body], } } Ok(body) } fn if_statement(&mut self) -> Result<Stmt, ParseErr> { consume!(self, TokenKind::LeftParen, "Expect '(' after 'if'.")?; let condition = self.expression()?; consume!( self, TokenKind::RightParen, "Expect ')' after if condition." )?; let then_branch = Box::new(self.statement()?); let else_branch = if eat!(self, TokenKind::Else).is_some() { Some(Box::new(self.statement()?)) } else { None }; Ok(Stmt::If { condition, else_branch, then_branch, }) } fn print_statement(&mut self) -> Result<Stmt, ParseErr> { let expr = self.expression()?; consume!(self, TokenKind::Semicolon, "Expect ';' after value.")?; Ok(Stmt::Print { expr }) } fn while_statement(&mut self) -> Result<Stmt, ParseErr> { consume!(self, TokenKind::LeftParen, "Expect '(' after 'while'.")?; let condition = self.expression()?; consume!( self, TokenKind::RightParen, "Expect ')' after while condition." )?; let body = self.statement()?; Ok(Stmt::While { condition, body: Box::new(body), }) } fn block(&mut self) -> Result<Vec<Stmt>, ParseErr> { let mut statements = Vec::new(); while !check!(self, TokenKind::RightBrace) && !self.is_at_end() { statements.push(self.declaration()?); } consume!(self, TokenKind::RightBrace, "Expect '}' after block.")?; Ok(statements) } fn expression_statement(&mut self) -> Result<Stmt, ParseErr> { let expr = self.expression()?; consume!(self, TokenKind::Semicolon, "Expect ';' after value.")?; Ok(Stmt::Expression { expr }) } fn expression(&mut self) -> Result<Expr, ParseErr> { self.assignment() } fn assignment(&mut self) -> Result<Expr, ParseErr> { let expr = self.or()?; if let Some(equals) = eat!(self, TokenKind::Equal) { let value = self.assignment()?; if let Expr::Variable { name } = expr { return Ok(Expr::Assign { name, value: Box::new(value), }); } return Err(ParseErr { token: equals, message: "Invalid assignment target.".to_string(), }); } Ok(expr) } fn or(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.and()?; while let Some(operator) = eat!(self, TokenKind::Or) { let right = self.and()?; expr = Expr::Logical { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn and(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.equality()?; while let Some(operator) = eat!(self, TokenKind::And) { let right = self.equality()?; expr = Expr::Logical { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn equality(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.comparison()?; while let Some(operator) = eat!(self, TokenKind::BangEqual, TokenKind::EqualEqual) { let right = self.comparison()?; expr = Expr::Binary { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn comparison(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.addition()?; use TokenKind::*; while let Some(operator) = eat!(self, Greater, GreaterEqual, Less, LessEqual) { let right = self.addition()?; expr = Expr::Binary { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn addition(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.multiplication()?; while let Some(operator) = eat!(self, TokenKind::Minus, TokenKind::Plus) { let right = self.multiplication()?; expr = Expr::Binary { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn multiplication(&mut self) -> Result<Expr, ParseErr> { let mut expr = self.unary()?; while let Some(operator) = eat!(self, TokenKind::Slash, TokenKind::Star) { let right = self.unary()?; expr = Expr::Binary { left: Box::new(expr), operator, right: Box::new(right), }; } Ok(expr) } fn unary(&mut self) -> Result<Expr, ParseErr> { if let Some(operator) = eat!(self, TokenKind::Bang, TokenKind::Minus) { let right = self.unary()?; Ok(Expr::Unary { operator, right: Box::new(right), }) } else { self.primary() } } fn primary(&mut self) -> Result<Expr, ParseErr> { let tok = self.advance(); use TokenKind::*; match tok.kind { False => Ok(Expr::Literal { val: Literal::Bool(false), }), True => Ok(Expr::Literal { val: Literal::Bool(true), }), Nil => Ok(Expr::Literal { val: Literal::Nil }), Number(val) => Ok(Expr::Literal { val: Literal::Number(val), }), Str(contents) => Ok(Expr::Literal { val: Literal::Str(contents), }), LeftParen => { let expr = self.expression()?; consume!(self, TokenKind::RightParen, "Expect ')' after expression.")?; Ok(Expr::Grouping { expr: Box::new(expr), }) } Identifier => Ok(Expr::Variable { name: tok }), _ => Err(ParseErr { token: tok, message: "Expect expression.".to_string(), }), } } fn synchronize(&mut self) { self.advance(); while !self.is_at_end() { if let TokenKind::Semicolon = self.previous().kind { return; } match self.peek().kind { TokenKind::Class | TokenKind::Fun | TokenKind::Var | TokenKind::For | TokenKind::If | TokenKind::While | TokenKind::Print | TokenKind::Return => return, _ => { self.advance(); } } } } fn advance(&mut self) -> Token { if !self.is_at_end() { self.current += 1; } self.previous() } fn is_at_end(&self) -> bool { if let Token { kind: TokenKind::Eof, .. } = self.peek() { true } else { false } } fn peek(&self) -> Token { self.peek_nth(0) } fn previous(&self) -> Token { self.peek_nth(-1) } fn peek_nth(&self, n: i16) -> Token { self.tokens[((self.current as i16) + n) as usize].clone() // TODO: avoidable?? } } #[cfg(test)] mod test { use super::*; use crate::token::{Range, Source, Token, TokenKind, TokenKind::*}; use std::rc::Rc; fn new_token_factory() -> impl Fn(TokenKind) -> Token { let source = Rc::new(Source::new("For testing".to_string(), vec![])); move |kind| Token { kind, line: 1, span: Range(0, 0), source: Rc::clone(&source), } } #[test] fn test_eof() { let token = new_token_factory(); assert_eq!(parse(vec![token(Eof)]), vec![],); } #[test] fn test_var_with_init() { let token = new_token_factory(); assert_eq!( parse(vec![ token(Var), token(Identifier), token(Equal), token(Number(0f64)), token(Semicolon), token(Eof), ]), vec![Stmt::Var { name: token(Identifier), initializer: Some(Expr::Literal { val: crate::token::Literal::Number(0.0f64) }), }], ); } }
true
58abd5f7315c920fda9bfdb72e3ef3602b562cd5
Rust
randomPoison/cs-bindgen
/cs-bindgen/src/exports.rs
UTF-8
2,299
3.03125
3
[]
no_license
//! Shared functionality that needs to be exported in the built dylib. //! //! In order for the generated dylib to work on all platforms, you MUST invoke the //! [`export`] macro once at the root of your crate: //! //! ``` //! cs_bindgen::export!(); //! ``` //! //! Ideally users of this crate shouldn't need to do anything to re-export these //! symbols, cs_bindgen should be able to handle this automatically. In practice, it //! seems like on Linux the symbols are not exported. See https://github.com/rust-lang/rfcs/issues/2771 //! for more information. //! //! [`export`]: ../macro.export.html use crate::abi::{self, Abi, RawSlice, RawString, RawVec}; macro_rules! drop_vec { ( $( $prim:ty => [$drop_fn:ident, $convert_fn:ident], )* ) => { $( pub unsafe fn $drop_fn(raw: RawVec<$prim>) { let _ = raw.into_vec(); } pub unsafe fn $convert_fn(raw: RawSlice<<$prim as Abi>::Abi>) -> RawVec<$prim> { abi::convert_list(raw) } )* } } drop_vec! { u8 => [__cs_bindgen_drop_vec_u8, __cs_bindgen_convert_vec_u8], u16 => [__cs_bindgen_drop_vec_u16, __cs_bindgen_convert_vec_u16], u32 => [__cs_bindgen_drop_vec_u32, __cs_bindgen_convert_vec_u32], u64 => [__cs_bindgen_drop_vec_u64, __cs_bindgen_convert_vec_u64], usize => [__cs_bindgen_drop_vec_usize, __cs_bindgen_convert_vec_usize], i8 => [__cs_bindgen_drop_vec_i8, __cs_bindgen_convert_vec_i8], i16 => [__cs_bindgen_drop_vec_i16, __cs_bindgen_convert_vec_i16], i32 => [__cs_bindgen_drop_vec_i32, __cs_bindgen_convert_vec_i32], i64 => [__cs_bindgen_drop_vec_i64, __cs_bindgen_convert_vec_i64], isize => [__cs_bindgen_drop_vec_isize, __cs_bindgen_convert_vec_isize], f32 => [__cs_bindgen_drop_vec_f32, __cs_bindgen_convert_vec_f32], f64 => [__cs_bindgen_drop_vec_f64, __cs_bindgen_convert_vec_f64], bool => [__cs_bindgen_drop_vec_bool, __cs_bindgen_convert_vec_bool], char => [__cs_bindgen_drop_vec_char, __cs_bindgen_convert_vec_char], } /// Converts a C# string (i.e. a UTF-16 slice) into a Rust string. pub unsafe fn __cs_bindgen_string_from_utf16(raw: RawSlice<u16>) -> RawString { raw.into_string() .expect("Failed to convert C# string to Rust string") .into() }
true
c0e150a8c15451d01a372fa09bc1af8f2ff9b904
Rust
gitter-badger/mode
/src/mode.rs
UTF-8
14,590
3.375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
// Copyright 2019 Andrew Thomas Christensen // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the // MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. This file may not be copied, // modified, or distributed except according to those terms. use crate::Family; /// Trait that defines the transition behavior of a state within an `Automaton`. /// /// Every `Automaton` contains a single `Mode` instance that represents the active state of the state machine. An /// `Automaton<F>` can **only** switch between `Mode`s with the same `Family` type `F`. The `Automaton` only allows the /// active `Mode` to be accessed as a `F::Base` reference, so only functions exposed on the `Base` type are callable on /// the `Mode` from outside the `Automaton`. /// /// See [`Automaton`](struct.Automaton.html) for more details. /// /// # Usage /// ``` /// use mode::*; /// /// struct MyFamily; /// impl Family for MyFamily { /// type Base = dyn MyMode; /// type Mode = Box<dyn MyMode>; /// type Input = (); /// type Output = Box<dyn MyMode>; /// } /// /// trait MyMode : boxed::Mode<Family = MyFamily> { /// // TODO: Define some common interface for ModeA and ModeB. /// } /// /// struct ModeA; // TODO: Add fields. /// impl MyMode for ModeA { } /// /// impl boxed::Mode for ModeA { /// type Family = MyFamily; /// fn swap(self : Box<Self>, _input : ()) -> Box<dyn MyMode> { /// // Transition to ModeB. ModeA can swap to ModeB because both share the same Family. /// Box::new(ModeB) /// } /// } /// /// struct ModeB; // TODO: Add fields. /// impl MyMode for ModeB { } /// /// impl boxed::Mode for ModeB { /// type Family = MyFamily; /// fn swap(self : Box<Self>, _input : ()) -> Box<dyn MyMode> { self } // Returning self means don't transition. /// } /// ``` /// /// # Transitioning /// `Mode`s can choose to transition to any other `Mode` with the same `Family` associated `type`. This is accomplished /// by returning a new `Mode` from the `swap()` function, which will cause the parent `Automaton` to switch to this /// `Mode` immediately. Since /// /// See [`Automaton::next()`](struct.Automaton.html#method.next) for more details. /// /// # The `Family` parameter /// You will notice from the [example](#usage) above that `ModeA` and `ModeB` implement `Mode` and `MyMode` separately, /// but the `MyMode` trait itself does **not** extend `Mode`, i.e. is defined as `trait MyMode` as opposed to /// `trait MyMode : Mode<Base = MyMode>`. We want to use `MyMode` as the `Base` type for `ModeA` and `ModeB`, but /// unfortunately having `MyMode` extend `Mode<Base = MyMode>` would create a circular dependency between the two types, /// and would cause a compile error. Hence, while it is possible to cast `ModeA` or `ModeB` to `MyMode` or `Mode`, /// casting between `MyMode` and `Mode` is not allowed. /// /// # Returning a value from `Mode::swap()` /// It is possible to output a value in addition to the `Mode` that is returned from `swap()`. In order to do this, the /// `Output` type of the `Family` for this `Mode` should be given a tuple containing `Family::Mode` as the first /// parameter and some other type as the second, which will become the return type for `Mode::swap()`. The /// `Automaton::next_with_output()` function will interpret the first parameter as the new `Mode` to switch in, and the /// second parameter will be returned as a result. /// /// **NOTE:** If you do this, you will be required to use `Automaton::next_with_output()` or /// `Automaton::next_with_input_and_output()`, instead of `Automaton::next()` or `Automaton::next_with_input()`, due to /// the `impl` bounds on these functions. /// /// # Passing context into `Mode::swap()` /// The `Mode::swap()` function takes a single `input` parameter that can be used in situations where some context is /// necessary in order to allow the current `Mode` to swap itself. This parameter is of type `Family::Input`, and is /// passed into the `swap()` function by value. If no context is necessary to switch `Mode`s, the value can effectively /// be ignored by setting `Family::Input` to the empty tuple type, `()`. /// /// **NOTE:** When a non-empty `Family::Input` type is used, you will be required to use `Automaton::next_with_input()` /// or `Automaton::next_with_input_and_output()`, instead of `Automaton::next()` or `Automaton::next_with_output()`, due /// to the `impl` bounds on these functions. /// /// # Alternative `trait Mode`s for pointer types /// When storing `Mode`s with a large amount of data or that should be accessed through some `dyn Trait` reference, it /// is desirable to have the `Automaton` operate on a **pointer** to a `Mode`, as opposed to storing the current `Mode` /// in place. This is possible by setting the `Family::Mode` type to a pointer type wrapping a `Family::Base`, e.g. /// /// ``` /// use mode::Family; /// # use mode::boxed::Mode; /// # /// # trait SomeTrait : Mode<Family = FamilyWithPointerMode> { } /// /// struct FamilyWithPointerMode; /// impl Family for FamilyWithPointerMode { /// type Base = dyn SomeTrait; /// type Mode = Box<dyn SomeTrait>; // All Modes in this Family will be stored as a Box<dyn SomeTrait> internally. /// type Input = (); /// type Output = Box<dyn SomeTrait>; /// } /// ``` /// /// However, when doing so, the responsibility for swapping in the next `Mode` needs to be delegated to the /// type **stored** in the pointer, not the pointer itself. /// /// Hence, this module defines a number of other `trait Mode`s that are meant to be extended **in place of** /// `mode::Mode` when a `std` pointer type, e.g. `Box` or `Arc`, is being used. These are all stored in separate /// submodules that rougly correspond to the path of the pointer type under `std`, e.g. `mode::boxed::Mode` wraps a /// `std::boxed::Box`, and `mode::sync::Mode` wraps a `std::sync::Arc`. These define a slightly different `swap()` /// function that accepts the **pointer** type as `self`, e.g. `self : Box<Self>`. There are multiple advantages to /// this, but the main one is that the `Mode` implementation can return its own pointer from the `swap()` function when /// it wants to remain active, instead of returning a new pointer wrapping itself. Moving a pointer into and out of the /// `swap()` function can be **much** cheaper than moving the object itself around, especially for `Mode`s that store /// large amounts of data. /// /// When writing an `impl` for a `struct` in a `Family` that stores a pointer type, the corresponding `Mode` /// implementation (e.g. `mode::boxed::Mode`) should be used **instead of** `mode::Mode` itself. The crate provides auto /// `impl mode::Mode`s for each of these, allowing them to be used in the `Automaton`. (See example below.) /// /// ``` /// use mode::{sync, Family}; /// use std::sync::Arc; /// /// trait SomeTrait : sync::Mode<Family = FamilyWithArcMode> { } /// /// struct FamilyWithArcMode; /// impl Family for FamilyWithArcMode { /// type Base = dyn SomeTrait; /// type Mode = Arc<dyn SomeTrait>; // All Modes in this Family will be stored as an Arc<dyn SomeTrait> internally. /// type Input = (); /// type Output = Arc<dyn SomeTrait>; /// } /// /// struct SomeMode; /// impl SomeTrait for SomeMode { } // TODO /// /// // Note that we ONLY impl sync::Mode for SomeMode. There is an auto-impl of mode::Mode for Arc<T : sync::Mode>, so /// // we don't need to implement mode::Mode ourselves. /// // /// impl sync::Mode for SomeMode { /// type Family = FamilyWithArcMode; /// fn swap(self : Arc<Self>, _input : ()) -> Arc<dyn SomeTrait> { /// // TODO: Insert logic here to switch states by returning a different Arc. /// self /// } /// } /// ``` /// pub trait Mode { /// The `Family` type to which this `Mode` implementation belongs. `Mode` implementations are **only** allowed /// to return another `Mode`s from the `swap()` method if it has the exact same `Family` type as itself. /// Swapping between `Mode`s with different `Family` types is **not** allowed, even if the associated `type`s in /// two separate `Family` implementations are identical to each other. This is because reusing the same /// `Base` interface or `Mode` type between `Mode` implementations does not *necessarily* imply that both states /// are meant to represent states in the same state machine. /// /// See [`Family`](trait.Family.html) for more details. /// type Family : Family + ?Sized; /// Every time one of the `Automaton::next*()` functions is called, the `Automaton` will call this function on the /// current `Mode` to determine whether it wants another `Mode` to become active. If this function returns `self`, /// the current `Mode` will remain active. However, if it returns another object implementing `Mode` with the same /// `Family` type, the `Automaton` will make the `Mode` that was returned active immediately after the `swap()` /// function returns, consuming the `Mode` that was previously active. Since the original `Mode` is consumed, it is /// possible for the current `Mode` to move state out of itself and into the new `Mode` being created. /// /// This function returns `Self::Family::Output`, which can either be the `Self::Family::Mode` type or some /// `(mode, result)` tuple, where `mode` represents the new `Self::Family::Mode` to switch in as active and /// `result` represents some value that should be returned to the caller. Regardless of the `Self::Family::Output` /// type, the `Mode` **must** return a `Self::Family::Mode` type to transition in. If `self` is returned from this /// function, the current `Mode` will remain active. /// /// See [`Automaton::next()`](struct.Automaton.html#method.next) and /// [`Automaton::next_with_output()`](struct.Automaton.html#method.next_with_output) for more details. /// fn swap(self, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output; } /// Defines types that can be used to set up an `Automaton` that stores a `Box<Mode>` instead of a `Mode` in place. /// pub mod boxed { use crate::Family; /// Alternate `trait Mode` that takes a `Box<Mode>` as the `self` parameter instead of `Mode`. /// /// For more on how to use this `trait`, see `mode::Mode`. /// pub trait Mode { /// The `Family` type to which this `Mode` implementation belongs. In order to use the `boxed::Mode` trait, this /// `Family` should be a `Box<T>` where `T : boxed::Mode`. /// /// See `mode::Mode` for more details. /// type Family : Family + ?Sized; /// Will be called on the current `Mode` by `Automaton::next()` or `Automaton::next_with_output()` in order to /// determine whether it wants the `Automaton` to transition to another `Mode`. Note that this `trait`'s /// `swap()` function takes a `Box<Self>` instead of just `self`. /// /// See `mode::Mode` for more details. /// fn swap(self : Box<Self>, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output; } impl<T, F> crate::Mode for Box<T> where F : Family + ?Sized, T : self::Mode<Family = F> + ?Sized, { type Family = F; fn swap(self, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output { self.swap(input) } } } /// Defines types that can be used to set up an `Automaton` that stores an `Rc<Mode>` instead of a `Mode` in place. /// pub mod rc { use crate::Family; use std::rc::Rc; /// Alternate `trait Mode` that takes an `Rc<Mode>` as the `self` parameter instead of `Mode`. /// /// For more on how to use this `trait`, see `mode::Mode`. /// pub trait Mode { /// The `Family` type to which this `Mode` implementation belongs. In order to use the `rc::Mode` trait, this /// `Family` should be an `Rc<T>` where `T : rc::Mode`. /// /// See `mode::Mode` for more details. /// type Family : Family + ?Sized; /// Will be called on the current `Mode` by `Automaton::next()` or `Automaton::next_with_output()` in order to /// determine whether it wants the `Automaton` to transition to another `Mode`. Note that this `trait`'s /// `swap()` function takes an `Rc<Self>` instead of just `self`. /// /// See `mode::Mode` for more details. /// fn swap(self : Rc<Self>, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output; } impl<T, F> crate::Mode for Rc<T> where F : Family + ?Sized, T : self::Mode<Family = F> + ?Sized, { type Family = F; fn swap(self, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output { self.swap(input) } } } /// Defines types that can be used to set up an `Automaton` that stores an `Arc<Mode>` instead of a `Mode` in place. /// pub mod sync { use crate::Family; use std::sync::Arc; /// Alternate `trait Mode` that takes an `Arc<Mode>` as the `self` parameter instead of `Mode`. /// /// For more on how to use this `trait`, see `mode::Mode`. /// pub trait Mode { /// The `Family` type to which this `Mode` implementation belongs. In order to use the `sync::Mode` trait, this /// `Family` should be an `Arc<T>` where `T : sync::Mode`. /// /// See `mode::Mode` for more details. /// type Family : Family + ?Sized; /// Will be called on the current `Mode` by `Automaton::next()` or `Automaton::next_with_output()` in order to /// determine whether it wants the `Automaton` to transition to another `Mode`. Note that this `trait`'s /// `swap()` function takes an `Arc<Self>` instead of just `self`. /// /// See `mode::Mode` for more details. /// fn swap(self : Arc<Self>, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output; } impl<T, F> crate::Mode for Arc<T> where F : Family + ?Sized, T : self::Mode<Family = F> + ?Sized, { type Family = F; fn swap(self, input : <Self::Family as Family>::Input) -> <Self::Family as Family>::Output { self.swap(input) } } }
true
bd28f8c2fb1a357d94654bd4ca9303e17154bdb5
Rust
clojure-rs/ClojureRS
/src/rust_core/gte.rs
UTF-8
2,665
3.453125
3
[ "Apache-2.0" ]
permissive
use crate::error_message; use crate::ifn::IFn; use crate::value::{ToValue, Value}; use std::rc::Rc; /// (gte x y) /// x >= y #[derive(Debug, Clone)] pub struct GteFn {} impl ToValue for GteFn { fn to_value(&self) -> Value { Value::IFn(Rc::new(self.clone())) } } impl IFn for GteFn { fn invoke(&self, args: Vec<Rc<Value>>) -> Value { if args.len() != 2 { return error_message::wrong_arg_count(2, args.len()); } match args.get(0).unwrap().to_value() { Value::I32(a) => match args.get(1).unwrap().to_value() { Value::I32(b) => Value::Boolean(a >= b), Value::F64(b) => Value::Boolean(a as f64 >= b), b_ => Value::Condition(format!( // TODO: what error message should be returned regarding using typetags? "Type mismatch; Expecting: (i32 | i64 | f32 | f64), Found: {}", b_.type_tag() )), }, Value::F64(a) => match args.get(0).unwrap().to_value() { Value::I32(b) => Value::Boolean(a >= b as f64), Value::F64(b) => Value::Boolean(a >= b), b_ => Value::Condition(format!( // TODO: what error message should be returned regarding using typetags? "Type mismatch; Expecting: (i32 | i64 | f32 | f64), Found: {}", b_.type_tag() )), }, a_ => Value::Condition(format!( // TODO: what error message should be returned regarding using typetags? "Type mismatch; Expecting: (i32 | i64 | f32 | f64), Found: {}", a_.type_tag() )), } } } #[cfg(test)] mod tests { mod gte_tests { use crate::ifn::IFn; use crate::rust_core::GteFn; use crate::value::Value; use std::rc::Rc; #[test] fn one_is_greater_than_zero() { let gte = GteFn {}; let args = vec![Rc::new(Value::I32(1)), Rc::new(Value::I32(0))]; assert_eq!(Value::Boolean(true), gte.invoke(args)); } #[test] fn one_is_gte_than_one() { let gte = GteFn {}; let args = vec![Rc::new(Value::I32(1)), Rc::new(Value::I32(1))]; assert_eq!(Value::Boolean(true), gte.invoke(args)); } #[test] fn one_is_not_gte_than_one_and_fractions() { let gte = GteFn {}; let args = vec![Rc::new(Value::I32(1)), Rc::new(Value::F64(1.00001))]; assert_eq!(Value::Boolean(false), gte.invoke(args)); } } }
true
e6c2adff9d6de9aa66d9a0968febc831ca34fcf2
Rust
cdown/mack
/src/types.rs
UTF-8
1,581
2.96875
3
[ "MIT" ]
permissive
use clap::Parser; use id3::Tag; use std::path::PathBuf; pub struct Track { pub path: PathBuf, pub tag: Tag, } #[derive(Debug, PartialEq, Eq)] pub struct TrackFeat { pub title: String, pub featured_artists: Vec<String>, pub original_title: String, } #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] pub struct Config { #[arg( long, short = 'n', help = "Don't actually rename or tag files, only display what would happen" )] pub dry_run: bool, #[arg( long, short, help = "Ignore .lastmack timestamp, run on all files present regardless" )] pub force: bool, #[arg( long, short, help = "Use a different output directory (by default, it's the same as the input dir)" )] pub output_dir: Option<PathBuf>, /// The format to apply to files, excluding the extension. /// /// Substitutions can be applied inside curly brackets, for example with {artist} to get the /// track artist. Any formats returning data with "/" will have it transformed to "_". /// /// Available formats: /// /// TAG: /// /// artist /// album /// track (width: 2) /// title /// /// LITERAL: /// /// {{ and }} indicate literal brackets. #[arg( long, verbatim_doc_comment, default_value = "{artist}/{album}/{track} {title}" )] pub fmt: String, #[arg(help = "Directories to find music files in.")] pub paths: Option<Vec<PathBuf>>, }
true
e7680eb3211f1ed9d412beeb4c6de569926b7b50
Rust
Nemo157/paw
/examples/structopt.rs
UTF-8
692
3.015625
3
[ "MIT", "Apache-2.0" ]
permissive
use std::io::prelude::*; use std::net::TcpListener; // With the "paw" feature enabled in structopt #[derive(structopt::StructOpt)] struct Args { /// Port to listen on. #[structopt(short = "p", long = "port", env = "PORT", default_value = "8080")] port: u16, /// Address to listen on. #[structopt(short = "a", long = "address", default_value = "127.0.0.1")] address: String, } #[paw::main] fn main(args: Args) -> Result<(), std::io::Error> { let listener = TcpListener::bind((&*args.address, args.port))?; println!("listening on {}", listener.local_addr()?); for stream in listener.incoming() { stream?.write(b"hello world!")?; } Ok(()) }
true
5a3d580bd0b3e3d53cdc7bcbacb3a117856e3a91
Rust
gaoxiaojun/kline
/src/util.rs
UTF-8
7,805
2.5625
3
[]
no_license
use crate::bar::Bar; use crate::candle::Candle; use crate::fractal::{FractalType, Fractal}; use crate::time::*; use chrono::{DateTime, NaiveDateTime, Utc}; use std::env; use std::error::Error; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::vec::Vec; pub fn read_file_content(filename: &str)-> std::io::Result<String> { let mut file = File::open(filename)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; Ok(contents) } pub fn load_bar_from_csv(filename: &str) -> std::io::Result<Vec<Bar>> { let mut file = File::open(filename)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; Ok(load_csv_from_str(contents.as_str())) } fn parse_time(timestr: &str) -> Time { let dt = NaiveDateTime::parse_from_str(timestr, "%Y.%m.%d %H:%M:%S").unwrap(); let datetime: DateTime<Utc> = DateTime::from_utc(dt, Utc); datetime.timestamp_millis() } fn load_csv_from_str(csv: &str) -> Vec<Bar> { let mut bars: Vec<Bar> = Vec::new(); let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_reader(csv.as_bytes()); for record in reader.records() { let record = record.unwrap(); let timestr: &str = AsRef::<str>::as_ref(&record[0]); let time = parse_time(timestr); let open = AsRef::<str>::as_ref(&record[1]).parse::<f64>().unwrap(); let close = AsRef::<str>::as_ref(&record[4]).parse::<f64>().unwrap(); let high = AsRef::<str>::as_ref(&record[2]).parse::<f64>().unwrap(); let low = AsRef::<str>::as_ref(&record[3]).parse::<f64>().unwrap(); let vol = AsRef::<str>::as_ref(&record[5]).parse::<f64>().unwrap(); let bar = Bar::new(time, open, high, low, close, vol); bars.push(bar); } bars } /* pub fn load_fx_from_csv(filename: &str) -> std::io::Result<Vec<Fx>> { let mut file = File::open(filename)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; Ok(load_fx_from_str(contents.as_str())) } fn load_fx_from_str(csv: &str) -> Vec<Fractal> { let mut fxs: Vec<Fractal> = Vec::new(); let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_reader(csv.as_bytes()); let mut index: u64 = 0; for record in reader.records() { let record = record.unwrap(); let time = parse_time(AsRef::<str>::as_ref(&record[0])); let fx_mark_str = AsRef::<str>::as_ref(&record[1]); let fx_mark = if fx_mark_str == "g" { FractalType::Top } else { FractalType::Bottom }; let fx = AsRef::<str>::as_ref(&record[2]).parse::<f64>().unwrap(); let start_dt = parse_time(AsRef::<str>::as_ref(&record[3])); let end_dt = parse_time(AsRef::<str>::as_ref(&record[4])); let fx_high = AsRef::<str>::as_ref(&record[5]).parse::<f64>().unwrap(); let fx_low = AsRef::<str>::as_ref(&record[6]).parse::<f64>().unwrap(); let fx = if fx_mark == FractalType::Top { Fractal::new(time, fx_mark, fx, start_dt, end_dt, fx, fx_low, fx_high, fx_low, index) }else { Fractal::new(time, fx_mark, fx, start_dt, end_dt, fx_high, fx,fx_high, fx_low, index) }; index += 1; fxs.push(fx); } fxs }*/ /* pub fn load_bi_from_csv(filename: &str) -> std::io::Result<Vec<Fx>> { let mut file = File::open(filename)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; Ok(load_bi_from_str(contents.as_str())) } fn load_bi_from_str(csv: &str) -> Vec<Fx> { let mut fxs: Vec<Fx> = Vec::new(); let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_reader(csv.as_bytes()); let mut index: u64 = 0; for record in reader.records() { let record = record.unwrap(); let time = parse_time(AsRef::<str>::as_ref(&record[0])); let fx_mark_str = AsRef::<str>::as_ref(&record[1]); let fx_mark = if fx_mark_str == "g" { FractalType::Top } else { FractalType::Bottom }; let fx = AsRef::<str>::as_ref(&record[6]).parse::<f64>().unwrap(); let start_dt = parse_time(AsRef::<str>::as_ref(&record[2])); let end_dt = parse_time(AsRef::<str>::as_ref(&record[3])); let fx_high = AsRef::<str>::as_ref(&record[4]).parse::<f64>().unwrap(); let fx_low = AsRef::<str>::as_ref(&record[5]).parse::<f64>().unwrap(); let fx = if fx_mark == FractalType::Top { Fx::new(time, fx_mark, fx, start_dt, end_dt, fx, fx_low, fx_high, fx_low, index) }else { Fx::new(time, fx_mark, fx, start_dt, end_dt, fx_high, fx,fx_high, fx_low, index) }; index += 1; fxs.push(fx); } fxs } pub fn dump_bi_to_csv(filename: &str, bis: &Vec<Fractal>) -> Result<(), Box<dyn Error>> { let file = File::create(filename)?; let mut wtr = csv::Writer::from_writer(file); // write header wtr.write_record(&[ "datetime", "fx_mark", "fx", "start_dt", "end_st", "fx_high", "fx_low", ])?; for record in bis { let dt_str = time_to_str(record.time); let start_str = time_to_str(record.start); let end_str = time_to_str(record.end); let mark_str = if record.fractal_type() == FractalType::Top { "g".to_string() } else { "d".to_string() }; let price_str = format!("{}", record.price); let high_str = format!("{}", record.high); let low_str = format!("{}", record.low); wtr.write_record(&[ dt_str, mark_str, price_str, start_str, end_str, high_str, low_str, ])?; } wtr.flush()?; Ok(()) } */ pub fn dump_fx_to_csv(filename: &str, fxs: &Vec<Fractal>) -> Result<(), Box<dyn Error>> { let file = File::create(filename)?; let mut wtr = csv::Writer::from_writer(file); // write header wtr.write_record(&[ "datetime", "type","price","high", "low" ])?; for record in fxs { let dt_str = time_to_str(record.time()); let mark_str = if record.fractal_type() == FractalType::Top { "Top".to_string() } else { "Bottom".to_string() }; let price_str = format!("{}", record.price()); let high_str = format!("{}", record.high()); let low_str = format!("{}", record.low()); wtr.write_record(&[ dt_str, mark_str, price_str, high_str, low_str, ])?; } wtr.flush()?; Ok(()) } pub fn dump_candle_to_csv(filename: &str, candles: &Vec<Candle>) -> Result<(), Box<dyn Error>> { let file = File::create(filename)?; let mut wtr = csv::Writer::from_writer(file); // write header wtr.write_record(&[ "datatime","open","high","low","close","volume" ])?; for record in candles { let dt_str = time_to_str(record.bar.time); let open_str = format!("{}", record.bar.open); let high_str = format!("{}", record.bar.high); let low_str = format!("{}", record.bar.low); let close_str = format!("{}", record.bar.close); let vol_str = format!("{}", record.bar.vol); wtr.write_record(&[ dt_str, open_str, high_str, low_str,close_str, vol_str ])?; } wtr.flush()?; Ok(()) } pub fn cargo_path(join_path:Option<&str>) -> PathBuf { let mut path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); if join_path.is_some() { path = path.join(join_path.unwrap()) } path } pub fn write_content_to_file(filename:&str, contents: &str) -> std::io::Result<()> { let mut file = File::create(filename)?; file.write_all(contents.as_bytes())?; file.flush()?; Ok(()) }
true
cc5b9de4ed319c5f328815cd805e8f1160e5cb02
Rust
sitetester/recipe-stats-calculator-rs
/src/recipe/ExpectedOutput.rs
UTF-8
4,178
2.90625
3
[]
no_license
use std::collections::HashMap; use serde::Deserialize; use serde::Serialize; use crate::recipe::calculator::CustomPostcodeDeliveryTime; #[derive(Debug, Serialize, Deserialize)] pub struct ExpectedOutput { #[serde(rename(serialize = "unique_recipe_count"))] uniqueRecipeCount: i32, #[serde(rename(serialize = "count_per_recipe"))] sortedRecipesCount: Vec<CountPerRecipe>, #[serde(rename(serialize = "busiest_postcode"))] busiestPostcode: BusiestPostcode, #[serde(rename(serialize = "count_per_postcode_and_time"))] countPerPostcodeAndTime: CountPerPostcodeAndTime, #[serde(rename(serialize = "match_by_name"))] sortedRecipeNames: Vec<String>, #[serde(rename(serialize = "total_json_objects"))] totalObjects: i64, } #[derive(Debug, Serialize, Deserialize)] struct BusiestPostcode { #[serde(rename(serialize = "postcode"))] postcode: String, #[serde(rename(serialize = "delivery_count"))] deliveryCount: i32, } #[derive(Debug, Serialize, Deserialize)] struct CountPerRecipe { recipe: String, count: i32, } #[derive(Debug, Serialize, Deserialize)] struct CountPerPostcodeAndTime { postcode: String, #[serde(rename(serialize = "from"))] fromAM: String, #[serde(rename(serialize = "to"))] toPM: String, #[serde(rename(serialize = "delivery_count"))] deliveryCount: i32, } pub fn getExpectedOutput( countPerRecipe: &HashMap<String, i32>, countPerPostcode: &HashMap<String, i32>, deliveriesCountPerPostcode: &HashMap<String, i32>, customPostcodeDeliveryTime: &CustomPostcodeDeliveryTime, filteredRecipeNames: &mut Vec<String>, totalObjects: i64, ) -> ExpectedOutput { filteredRecipeNames.sort(); let deliveryCount = if deliveriesCountPerPostcode.contains_key(&customPostcodeDeliveryTime.postcode) { deliveriesCountPerPostcode[&customPostcodeDeliveryTime.postcode] } else { 0 }; let expectedOutput = ExpectedOutput { uniqueRecipeCount: getUniqueRecipeCount(&countPerRecipe), sortedRecipesCount: getSortedRecipeCount(&countPerRecipe), busiestPostcode: getBusiestPostcode(countPerPostcode), countPerPostcodeAndTime: getDeliveriesCountForPostCode( customPostcodeDeliveryTime, deliveryCount, ), sortedRecipeNames: filteredRecipeNames.to_owned(), totalObjects, }; expectedOutput } // counts the number of unique recipe names fn getUniqueRecipeCount(countPerRecipe: &HashMap<String, i32>) -> i32 { let mut uniqueRecipeCount = 0; for (_, count) in countPerRecipe { if *count == 1 as i32 { uniqueRecipeCount += 1; } } uniqueRecipeCount } // counts the number of occurrences for each unique recipe name (alphabetically ordered by recipe name) fn getSortedRecipeCount(countPerRecipe: &HashMap<String, i32>) -> Vec<CountPerRecipe> { let mut v: Vec<_> = countPerRecipe.into_iter().collect(); v.sort_by(|x, y| x.0.cmp(&y.0)); let vCountPerRecipe: Vec<CountPerRecipe> = v .into_iter() .map(|(recipe, &count)| CountPerRecipe { recipe: recipe.to_string(), count, }) .collect(); vCountPerRecipe } // finds the postcode with most delivered recipes fn getBusiestPostcode(countPerPostcode: &HashMap<String, i32>) -> BusiestPostcode { let mut v: Vec<_> = countPerPostcode.into_iter().collect(); v.sort_by(|x, y| x.1.cmp(&y.1)); let last = v.pop().unwrap(); BusiestPostcode { postcode: last.0.to_string(), deliveryCount: *last.1, } } // counts the number of deliveries to postcode `10120` that lie within the delivery time between `10AM` and `3PM` fn getDeliveriesCountForPostCode( customPostcodeDeliveryTime: &CustomPostcodeDeliveryTime, deliveryCount: i32, ) -> CountPerPostcodeAndTime { CountPerPostcodeAndTime { postcode: customPostcodeDeliveryTime.postcode.to_string(), fromAM: format!("{}{}", customPostcodeDeliveryTime.from, "AM"), toPM: format!("{}{}", customPostcodeDeliveryTime.to, "PM"), deliveryCount, } }
true
cfa41566a700cf504db7d8b45f464c9d0f13d11c
Rust
BenoitZugmeyer/RustyAdventOfCode
/2015/src/bin/day02.rs
UTF-8
1,008
3.359375
3
[]
no_license
use std::io; use std::io::BufRead; fn compute_surface_and_length(line: &str) -> (i32, i32) { let mut numbers = line .split('x') .map(|n| n.parse::<i32>().unwrap_or(0)) .collect::<Vec<i32>>(); numbers.sort(); let surface = match numbers[..] { [l, w, h] => { let surfaces = vec![l * w, l * h, w * h]; surfaces.iter().sum::<i32>() * 2 + surfaces.iter().min().unwrap() } _ => 0, }; let length = numbers.iter().product::<i32>() + numbers.iter().take(2).sum::<i32>() * 2; (surface, length) } fn main() { let (surface, length) = io::stdin() .lock() .lines() .filter_map(|line| line.ok()) .fold((0, 0), |(total_surface, total_length), ref line| { let (surface, length) = compute_surface_and_length(line); (total_surface + surface, total_length + length) }); println!("Total surface: {}", surface); println!("Total length: {}", length); }
true
9fdac81bfba2a61137db649f82210501db69affc
Rust
mandx/urlpick
/src/main.rs
UTF-8
1,551
2.71875
3
[ "MIT" ]
permissive
extern crate dialoguer; extern crate failure; extern crate open; extern crate url; use std::collections::VecDeque; use std::io; use dialoguer::Select; use failure::Error; use url::Url; fn main() -> Result<(), Error> { let mut urls: Vec<Url> = Vec::new(); let mut queue: VecDeque<Url> = VecDeque::new(); std::env::args() .skip(1) .for_each(|arg| match Url::parse(&arg) { Ok(url) => queue.push_back(url), Err(_) => println!("Could not parse {:?} as URL", arg), }); while let Some(url) = queue.pop_front() { for (_, value) in url.query_pairs() { if let Ok(url) = Url::parse(&value) { queue.push_back(url); } } urls.push(url); } if urls.is_empty() { eprintln!("No URLs detected"); return Err(io::Error::from_raw_os_error(1).into()); } let options = urls.iter().map(|url| url.as_str()).collect::<Vec<_>>(); let selected_index = Select::new().items(&options).interact()?; match options.get(selected_index) { Some(selection) => open::that(selection) .and_then(|status| match status.code() { Some(code) => if code == 0 { Ok(()) } else { Err(io::Error::from_raw_os_error(code)) }, None => Err(io::Error::from_raw_os_error(1)), }) .or_else(|error| Err(error.into())), None => Err(io::Error::from_raw_os_error(1).into()), } }
true
50d20853688588c921350e6ef19a7f52ca75d4c6
Rust
sux2mfgj/nslfmt
/src/ast.rs
UTF-8
18,344
2.734375
3
[ "MIT" ]
permissive
use std::collections::LinkedList; use std::fmt; use token; macro_rules! not_implemented { () => { panic!("not implemented yet. at line {} in {}.", line!(), file!()) }; } macro_rules! get_top { ($t:ident) => { $t.generate().pop_front().unwrap() }; } #[derive(Debug, Clone, PartialEq)] pub enum ASTClass { Identifire(String), Number(String), String(String), Simulation, BitSlice(Box<ASTNode>, Option<Box<ASTNode>>), Operator(token::Operator), UnaryOperator(token::UnaryOperator), /* * block * e.g. * { * input hello[12]; * func_out ok() : hello; * } */ Block(Vec<Box<ASTNode>>), // identifire, block Declare(Box<ASTNode>, Box<ASTNode>, bool), // <id(struct name)>, (<id(member name)>, <number(bit width)>) Struct(Box<ASTNode>, Vec<(Box<ASTNode>, Option<Box<ASTNode>>)>), // identifire, inputs, output FuncIn(Box<ASTNode>, Vec<Box<ASTNode>>, Option<Box<ASTNode>>), // identifire, outputs, input FuncOut(Box<ASTNode>, Vec<Box<ASTNode>>, Option<Box<ASTNode>>), // identifire, inputs, output FuncSelf(Box<ASTNode>, Vec<Box<ASTNode>>, Option<Box<ASTNode>>), /* * identifire, expression or Identifire * e.g. * input hello[A_WIDTH / 2]; * input hello[B_WIDTH]; * input hello[3]; */ Input(Box<ASTNode>, Option<Box<ASTNode>>), Output(Box<ASTNode>, Option<Box<ASTNode>>), InOut(Box<ASTNode>, Option<Box<ASTNode>>), // ----- Module ------ // identifire, block Module(Box<ASTNode>, Box<ASTNode>), // <id(submodule name)>, { <id> [<expr>] }* ; // e.g. // test in1, in2[2], in3; Submodule(Box<ASTNode>, Vec<(Box<ASTNode>, Option<Box<ASTNode>>)>), //MacroSubModule(Vec<token::Token>), // id, , args ProcName(Box<ASTNode>, Vec<Box<ASTNode>>), StateName(Vec<Box<ASTNode>>), // id ,[12] , [12] , initial value Mem( Vec<( Box<ASTNode>, Box<ASTNode>, Option<Box<ASTNode>>, Option<Vec<Box<ASTNode>>>, )>, ), // id , expression Assign(Box<ASTNode>, Box<ASTNode>), RegAssign(Box<ASTNode>, Box<ASTNode>), // id , block Func(Box<ASTNode>, Option<Box<ASTNode>>, Box<ASTNode>), // expression , block Any(Vec<(Box<ASTNode>, Box<ASTNode>)>), Return(Box<ASTNode>), Goto(Box<ASTNode>), Else, // <id(submodule)>, <id(port)> ModulePort(Box<ASTNode>, Box<ASTNode>), FuncCall(Box<ASTNode>, Vec<Box<ASTNode>>, Option<Box<ASTNode>>), // state name, block State(Box<ASTNode>, Box<ASTNode>), // if (<expression>) <block>, <else_node> If(Box<ASTNode>, Box<ASTNode>, Option<Box<ASTNode>>), // ----- Macros ------ MacroInclude(Box<ASTNode>), MacroUndef(Box<ASTNode>), MacroIfdef(Box<ASTNode>), MacroIfndef(Box<ASTNode>), MacroElse, MacroEndif, MacroDefine(Box<ASTNode>, Option<String>), // wire enable, data[12]; // id , width Wire(Vec<(Box<ASTNode>, Option<Box<ASTNode>>)>), // id , width , initial_value Reg(Vec<(Box<ASTNode>, Option<Box<ASTNode>>, Option<Box<ASTNode>>)>), // operand , operation , operand, is required parances Expression(Box<ASTNode>, Box<ASTNode>, Box<ASTNode>), // expr , bitslice BitslicedExpr(Box<ASTNode>, Box<ASTNode>), // unary operator, expression UnaryOperation(Box<ASTNode>, Box<ASTNode>), CPPStyleComment(String), CStyleComment(Vec<String>), // Newline, EndOfProgram, } #[derive(Debug, Clone, PartialEq)] pub struct ASTNode { pub class: ASTClass, pub position: usize, } impl ASTNode { pub fn new(class: ASTClass, position: usize) -> ASTNode { ASTNode { class: class, position: position, } } pub fn generate(&self) -> LinkedList<String> { let mut list = LinkedList::new(); match self.class { ASTClass::Declare(ref id, ref block, ref is_sim) => { if *is_sim { list.push_back(format!("declare {} simulation", id)); } else { list.push_back(format!("declare {}", id)); } list.append(&mut block.generate()); } ASTClass::Module(ref id, ref block) => { list.push_back(format!("module {}", id)); list.append(&mut block.generate()); } ASTClass::Struct(ref id, ref member_info) => { list.push_back(format!("struct {}", id)); let mut struct_members = LinkedList::new(); for c in member_info { if let Some(ref width) = c.1 { struct_members.push_back(format!("{}[{}]", c.0, width)); } else { struct_members.push_back(format!("{}", c.0)); } } list.push_back("{".to_string()); list.append( &mut struct_members .iter() .map(|c| format!(" {}", c)) .collect(), ); list.push_back("}".to_string()); } ASTClass::Block(ref contents) => { for c in contents { match c.class { ASTClass::Any(_) => { list.append(&mut c.generate()); } ASTClass::Func(_, _, _) => { list.append(&mut c.generate()); } ASTClass::If(_, _, _) => { list.append(&mut c.generate()); } ASTClass::State(_, _) => { list.append(&mut c.generate()); } ASTClass::CPPStyleComment(_) => { list.append(&mut c.generate()); } //TODO _ => { list.push_back(format!("{};", get_top!(c))); } } } let mut nm: LinkedList<String> = list.iter().map(|c| format!(" {}", c)).collect(); nm.push_front("{".to_string()); nm.push_back("}".to_string()); return nm; } ASTClass::Any(ref contents) => { for (expr, block) in contents { let expr_str = if let Some(top) = expr.generate().pop_front() { top } else { panic!(); }; list.push_back(format!("{}:", expr_str)); list.append(&mut block.generate()); } let mut nm: LinkedList<String> = list.iter().map(|c| " ".to_string() + c).collect(); nm.push_front("{".to_string()); nm.push_back("}".to_string()); nm.push_front("any".to_string()); return nm; } ASTClass::Else => { list.push_back("else".to_string()); } ASTClass::Expression(ref operand1, ref operator, ref operand2) => { list.push_back(format!( // "({} {} {})", "{} {} {}", get_top!(operand1), operator, get_top!(operand2) )); } ASTClass::Identifire(ref id) => { list.push_back(format!("{}", id)); } ASTClass::ModulePort(ref id, ref port) => { list.push_back(format!("{}.{}", id, port)); } ASTClass::FuncCall(ref id, ref args, ref second_some) => { let arg_str = args .iter() .map(|id| format!("{}", get_top!(id))) .collect::<Vec<String>>() .join(", "); if let Some(second) = second_some { list.push_back(format!("{}.{}({})", id, second, arg_str)); } else { list.push_back(format!("{}({})", id, arg_str)); } } ASTClass::Number(ref num) => { list.push_back(format!("{}", num)); } ASTClass::String(ref _id) => { not_implemented!(); } ASTClass::Submodule(ref submodule, ref contents) => { let l: Vec<String> = contents .iter() .map(|ref r| { let mut def = format!("{}", r.0); if let Some(ref width) = r.1 { def.push_str(&format!("[{}]", get_top!(width))); } return def; }) .collect(); list.push_back(format!("{} {}", submodule, l.join(", "))); } ASTClass::BitSlice(ref msb, ref some_lsb) => { let m = get_top!(msb); if let Some(lsb) = some_lsb { list.push_back(format!("{}:{}", m, get_top!(lsb))); } else { list.push_back(format!("{}", m)); } } ASTClass::BitslicedExpr(ref expr, ref bitslice) => { list.push_back(format!("{}[{}]", get_top!(expr), get_top!(bitslice))); } ASTClass::FuncIn(ref id, ref args, ref result) => { let arg_str = args .iter() .map(|id| format!("{}", id)) .collect::<Vec<String>>() .join(", "); if let Some(return_port) = result { list.push_back(format!( "func_in {}({}) : {}", id, arg_str, return_port )); } else { list.push_back(format!("func_in {}({})", id, arg_str)); } } ASTClass::FuncOut(ref _id, ref _args, ref _result) => { not_implemented!(); } ASTClass::FuncSelf(ref id, ref args, ref result) => { let arg_str = args .iter() .map(|id| format!("{}", id)) .collect::<Vec<String>>() .join(", "); if let Some(return_port) = result { list.push_back(format!( "func_self {}({}) : {}", id, arg_str, return_port )); } else { list.push_back(format!("func_self {}({})", id, arg_str)); } } ASTClass::Input(ref id, ref some_expr) => { if let Some(expr) = some_expr { list.push_back(format!("input {}[{}]", id, get_top!(expr))); } else { list.push_back(format!("input {}", id)); } } ASTClass::Output(ref id, ref some_expr) => { if let Some(expr) = some_expr { list.push_back(format!("output {}[{}]", id, get_top!(expr))); } else { list.push_back(format!("output {}", id)); } } ASTClass::InOut(ref id, ref some_expr) => { if let Some(expr) = some_expr { list.push_back(format!("inout {}[{}]", id, get_top!(expr))); } else { list.push_back(format!("inout {}", id)); } } ASTClass::Mem(ref _contents) => { not_implemented!(); } ASTClass::Wire(ref contents) => { let l: Vec<String> = contents .iter() .map(|ref r| { let mut def = format!("{}", r.0); if let Some(ref width) = r.1 { def.push_str(&format!("[{}]", get_top!(width))); } return def; }) .collect(); list.push_back(format!("wire {}", l.join(", "))); } ASTClass::Reg(ref contents) => { let l: Vec<String> = contents .iter() .map(|ref r| { let mut define = format!("{}", r.0); if let Some(ref width) = r.1 { define.push_str(&format!("[{}]", get_top!(width))) } if let Some(ref init) = r.2 { define.push_str(&format!(" = {}", init)); } return define; }) .collect(); list.push_back(format!("reg {}", l.join(", "))); } // ASTClass::Newline => { // not_implemented!() // } ASTClass::CPPStyleComment(ref comment) => { list.push_back(format!("//{}", comment)); } ASTClass::CStyleComment(ref comments) => { list.push_back(format!("/*{}*/", comments.join("\n"))); } ASTClass::ProcName(ref _id, ref _args) => { not_implemented!(); } ASTClass::StateName(ref ids) => { let ids_str = ids .iter() .map(|id_node| format!("{}", id_node)) .collect::<Vec<String>>() .join(", "); list.push_back(format!("state_name {}", ids_str)); } ASTClass::Assign(ref id, ref expr) => { list.push_back(format!("{} = {}", get_top!(id), get_top!(expr))); } ASTClass::RegAssign(ref id, ref expr) => { list.push_back(format!("{} := {}", id, get_top!(expr))); } ASTClass::Func(ref id, ref func, ref block) => { if let Some(fname) = func { list.push_back(format!("func {}.{}", id, fname)); } else { list.push_back(format!("func {}", id)); } list.append(&mut block.generate()); } ASTClass::Return(ref value) => { list.push_back(format!("return {}", get_top!(value))); } ASTClass::Goto(ref id) => { list.push_back(format!("goto {}", id)); } ASTClass::State(ref id, ref block) => { list.push_back(format!("state {}", id)); list.append(&mut block.generate()); } ASTClass::If(ref expr, ref if_block, ref else_block) => { list.push_back(format!("if ({})", get_top!(expr))); list.append(&mut if_block.generate()); if let Some(block) = else_block { list.push_back(format!("else")); list.append(&mut block.generate()); } } ASTClass::InOut(ref _id, ref _expr) => { not_implemented!(); } ASTClass::Operator(ref _op) => { not_implemented!(); } ASTClass::UnaryOperator(ref op) => { list.push_back(format!("{}", op)); } ASTClass::UnaryOperation(ref a, ref b) => { list.push_back(format!("{}{}", get_top!(a), get_top!(b))); } ASTClass::MacroDefine(ref id, ref value) => { if let Some(v) = value { list.push_back(format!("#define {} {}", id, v)); } else { list.push_back(format!("#define {}", id)); } } ASTClass::MacroInclude(ref path) => { list.push_back(format!("#include {}", path)); } ASTClass::MacroIfdef(ref id) => { list.push_back(format!("#ifdef {}", id)); } ASTClass::MacroIfndef(ref id) => { list.push_back(format!("#ifndef {}", id)); } ASTClass::MacroElse => { list.push_back(format!("#else")); } ASTClass::MacroEndif => { list.push_back(format!("#endif")); } ASTClass::MacroUndef(ref id) => { list.push_back(format!("#undef {}", id)); } ASTClass::EndOfProgram => { not_implemented!(); } ASTClass::Simulation => { list.push_back("simulation".to_string()); } } list } } impl fmt::Display for ASTNode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.class { ASTClass::Identifire(ref s) => { return write!(f, "{}", s); } ASTClass::Number(ref num) => { return write!(f, "{}", num); } ASTClass::String(ref path) => { return write!(f, "\"{}\"", path); } ASTClass::Operator(ref op) => write!(f, "{}", op), ASTClass::UnaryOperator(ref uop) => write!(f, "{}", uop), ASTClass::UnaryOperation(ref a, ref b) => write!(f, "{}{}", a, b), _ => { panic!( "For the node {:?}, fmt::Display does not implemented yet.", self ); } } } }
true
3db8196a6a17e3d7d33c1e6ba0f228dd8cbce9ae
Rust
Aleru/tokio
/tokio-executor/tests/current_thread.rs
UTF-8
19,629
2.578125
3
[ "MIT" ]
permissive
#![warn(rust_2018_idioms)] use tokio::sync::oneshot; use tokio_executor::current_thread::{self, block_on_all, CurrentThread, TaskExecutor}; use tokio_executor::TypedExecutor; use std::any::Any; use std::cell::{Cell, RefCell}; use std::future::Future; use std::pin::Pin; use std::rc::Rc; use std::task::{Context, Poll}; use std::thread; use std::time::Duration; mod from_block_on_all { use super::*; fn test<F: Fn(Pin<Box<dyn Future<Output = ()>>>) + 'static>(spawn: F) { let cnt = Rc::new(Cell::new(0)); let c = cnt.clone(); let msg = block_on_all(async move { c.set(1 + c.get()); // Spawn! spawn(Box::pin(async move { c.set(1 + c.get()); })); "hello" }); assert_eq!(2, cnt.get()); assert_eq!(msg, "hello"); } #[test] fn spawn() { test(current_thread::spawn) } #[test] fn execute() { test(|f| { TaskExecutor::current().spawn(f).unwrap(); }); } } #[test] fn block_waits() { let (tx, rx) = oneshot::channel(); thread::spawn(|| { thread::sleep(Duration::from_millis(1000)); tx.send(()).unwrap(); }); let cnt = Rc::new(Cell::new(0)); let cnt2 = cnt.clone(); block_on_all(async move { rx.await.unwrap(); cnt.set(1 + cnt.get()); }); assert_eq!(1, cnt2.get()); } #[test] fn spawn_many() { const ITER: usize = 200; let cnt = Rc::new(Cell::new(0)); let mut tokio_current_thread = CurrentThread::new(); for _ in 0..ITER { let cnt = cnt.clone(); tokio_current_thread.spawn(async move { cnt.set(1 + cnt.get()); }); } tokio_current_thread.run().unwrap(); assert_eq!(cnt.get(), ITER); } mod does_not_set_global_executor_by_default { use super::*; fn test<F: Fn(Pin<Box<dyn Future<Output = ()> + Send>>) -> Result<(), E> + 'static, E>( spawn: F, ) { block_on_all(async { spawn(Box::pin(async {})).unwrap_err(); }); } #[test] fn spawn() { test(|f| tokio_executor::DefaultExecutor::current().spawn(f)) } } mod from_block_on_future { use super::*; fn test<F: Fn(Pin<Box<dyn Future<Output = ()>>>)>(spawn: F) { let cnt = Rc::new(Cell::new(0)); let cnt2 = cnt.clone(); let mut tokio_current_thread = CurrentThread::new(); tokio_current_thread.block_on(async move { let cnt3 = cnt2.clone(); spawn(Box::pin(async move { cnt3.set(1 + cnt3.get()); })); }); tokio_current_thread.run().unwrap(); assert_eq!(1, cnt.get()); } #[test] fn spawn() { test(current_thread::spawn); } #[test] fn execute() { test(|f| { current_thread::TaskExecutor::current().spawn(f).unwrap(); }); } } mod outstanding_tasks_are_dropped_when_executor_is_dropped { use super::*; async fn never(_rc: Rc<()>) { loop { yield_once().await; } } fn test<F, G>(spawn: F, dotspawn: G) where F: Fn(Pin<Box<dyn Future<Output = ()>>>) + 'static, G: Fn(&mut CurrentThread, Pin<Box<dyn Future<Output = ()>>>), { let mut rc = Rc::new(()); let mut tokio_current_thread = CurrentThread::new(); dotspawn(&mut tokio_current_thread, Box::pin(never(rc.clone()))); drop(tokio_current_thread); // Ensure the daemon is dropped assert!(Rc::get_mut(&mut rc).is_some()); // Using the global spawn fn let mut rc = Rc::new(()); let rc2 = rc.clone(); let mut tokio_current_thread = CurrentThread::new(); tokio_current_thread.block_on(async move { spawn(Box::pin(never(rc2))); }); drop(tokio_current_thread); // Ensure the daemon is dropped assert!(Rc::get_mut(&mut rc).is_some()); } #[test] fn spawn() { test(current_thread::spawn, |rt, f| { rt.spawn(f); }) } #[test] fn execute() { test( |f| { current_thread::TaskExecutor::current().spawn(f).unwrap(); }, // Note: `CurrentThread` doesn't currently implement // `futures::Executor`, so we'll call `.spawn(...)` rather than // `.execute(...)` for now. If `CurrentThread` is changed to // implement Executor, change this to `.execute(...).unwrap()`. |rt, f| { rt.spawn(f); }, ); } } #[test] #[should_panic] fn nesting_run() { block_on_all(async { block_on_all(async {}); }); } mod run_in_future { use super::*; #[test] #[should_panic] fn spawn() { block_on_all(async { current_thread::spawn(async { block_on_all(async {}); }); }); } #[test] #[should_panic] fn execute() { block_on_all(async { current_thread::TaskExecutor::current() .spawn(async { block_on_all(async {}); }) .unwrap(); }); } } #[test] fn tick_on_infini_future() { let num = Rc::new(Cell::new(0)); async fn infini(num: Rc<Cell<usize>>) { loop { num.set(1 + num.get()); yield_once().await } } CurrentThread::new() .spawn(infini(num.clone())) .turn(None) .unwrap(); assert_eq!(1, num.get()); } mod tasks_are_scheduled_fairly { use super::*; async fn spin(state: Rc<RefCell<[i32; 2]>>, idx: usize) { loop { // borrow_mut scope { let mut state = state.borrow_mut(); if idx == 0 { let diff = state[0] - state[1]; assert!(diff.abs() <= 1); if state[0] >= 50 { return; } } state[idx] += 1; if state[idx] >= 100 { return; } } yield_once().await; } } fn test<F: Fn(Pin<Box<dyn Future<Output = ()>>>)>(spawn: F) { let state = Rc::new(RefCell::new([0, 0])); block_on_all(async move { spawn(Box::pin(spin(state.clone(), 0))); spawn(Box::pin(spin(state, 1))); }); } #[test] fn spawn() { test(current_thread::spawn) } #[test] fn execute() { test(|f| { current_thread::TaskExecutor::current().spawn(f).unwrap(); }) } } mod and_turn { use super::*; fn test<F, G>(spawn: F, dotspawn: G) where F: Fn(Pin<Box<dyn Future<Output = ()>>>) + 'static, G: Fn(&mut CurrentThread, Pin<Box<dyn Future<Output = ()>>>), { let cnt = Rc::new(Cell::new(0)); let c = cnt.clone(); let mut tokio_current_thread = CurrentThread::new(); // Spawn a basic task to get the executor to turn dotspawn(&mut tokio_current_thread, Box::pin(async {})); // Turn once... tokio_current_thread.turn(None).unwrap(); dotspawn( &mut tokio_current_thread, Box::pin(async move { c.set(1 + c.get()); // Spawn! spawn(Box::pin(async move { c.set(1 + c.get()); })); }), ); // This does not run the newly spawned thread tokio_current_thread.turn(None).unwrap(); assert_eq!(1, cnt.get()); // This runs the newly spawned thread tokio_current_thread.turn(None).unwrap(); assert_eq!(2, cnt.get()); } #[test] fn spawn() { test(current_thread::spawn, |rt, f| { rt.spawn(f); }) } #[test] fn execute() { test( |f| { current_thread::TaskExecutor::current().spawn(f).unwrap(); }, // Note: `CurrentThread` doesn't currently implement // `futures::Executor`, so we'll call `.spawn(...)` rather than // `.execute(...)` for now. If `CurrentThread` is changed to // implement Executor, change this to `.execute(...).unwrap()`. |rt, f| { rt.spawn(f); }, ); } } mod in_drop { use super::*; struct OnDrop<F: FnOnce()>(Option<F>); impl<F: FnOnce()> Drop for OnDrop<F> { fn drop(&mut self) { (self.0.take().unwrap())(); } } async fn noop(_data: Box<dyn Any>) {} fn test<F, G>(spawn: F, dotspawn: G) where F: Fn(Pin<Box<dyn Future<Output = ()>>>) + 'static, G: Fn(&mut CurrentThread, Pin<Box<dyn Future<Output = ()>>>), { let mut tokio_current_thread = CurrentThread::new(); let (tx, rx) = oneshot::channel(); dotspawn( &mut tokio_current_thread, Box::pin(noop(Box::new(OnDrop(Some(move || { spawn(Box::pin(async move { tx.send(()).unwrap(); })); }))))), ); tokio_current_thread.block_on(rx).unwrap(); tokio_current_thread.run().unwrap(); } #[test] fn spawn() { test(current_thread::spawn, |rt, f| { rt.spawn(f); }) } #[test] fn execute() { test( |f| { current_thread::TaskExecutor::current().spawn(f).unwrap(); }, // Note: `CurrentThread` doesn't currently implement // `futures::Executor`, so we'll call `.spawn(...)` rather than // `.execute(...)` for now. If `CurrentThread` is changed to // implement Executor, change this to `.execute(...).unwrap()`. |rt, f| { rt.spawn(f); }, ); } } /* #[test] fn hammer_turn() { use futures::sync::mpsc; const ITER: usize = 100; const N: usize = 100; const THREADS: usize = 4; for _ in 0..ITER { let mut ths = vec![]; // Add some jitter for _ in 0..THREADS { let th = thread::spawn(|| { let mut tokio_current_thread = CurrentThread::new(); let (tx, rx) = mpsc::unbounded(); tokio_current_thread.spawn({ let cnt = Rc::new(Cell::new(0)); let c = cnt.clone(); rx.for_each(move |_| { c.set(1 + c.get()); Ok(()) }) .map_err(|e| panic!("err={:?}", e)) .map(move |v| { assert_eq!(N, cnt.get()); v }) }); thread::spawn(move || { for _ in 0..N { tx.unbounded_send(()).unwrap(); thread::yield_now(); } }); while !tokio_current_thread.is_idle() { tokio_current_thread.turn(None).unwrap(); } }); ths.push(th); } for th in ths { th.join().unwrap(); } } } */ #[test] fn turn_has_polled() { let mut tokio_current_thread = CurrentThread::new(); // Spawn oneshot receiver let (sender, receiver) = oneshot::channel::<()>(); tokio_current_thread.spawn(async move { let _ = receiver.await; }); // Turn once... let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); // Should've polled the receiver once, but considered it not ready assert!(res.has_polled()); // Turn another time let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); // Should've polled nothing, the receiver is not ready yet assert!(!res.has_polled()); // Make the receiver ready sender.send(()).unwrap(); // Turn another time let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); // Should've polled the receiver, it's ready now assert!(res.has_polled()); // Now the executor should be empty assert!(tokio_current_thread.is_idle()); let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); // So should've polled nothing assert!(!res.has_polled()); } // Our own mock Park that is never really waiting and the only // thing it does is to send, on request, something (once) to a oneshot // channel struct MyPark { sender: Option<oneshot::Sender<()>>, send_now: Rc<Cell<bool>>, } struct MyUnpark; impl tokio_executor::park::Park for MyPark { type Unpark = MyUnpark; type Error = (); fn unpark(&self) -> Self::Unpark { MyUnpark } fn park(&mut self) -> Result<(), Self::Error> { // If called twice with send_now, this will intentionally panic if self.send_now.get() { self.sender.take().unwrap().send(()).unwrap(); } Ok(()) } fn park_timeout(&mut self, _duration: Duration) -> Result<(), Self::Error> { self.park() } } impl tokio_executor::park::Unpark for MyUnpark { fn unpark(&self) {} } #[test] fn turn_fair() { let send_now = Rc::new(Cell::new(false)); let (sender, receiver) = oneshot::channel::<()>(); let (sender_2, receiver_2) = oneshot::channel::<()>(); let (sender_3, receiver_3) = oneshot::channel::<()>(); let my_park = MyPark { sender: Some(sender_3), send_now: send_now.clone(), }; let mut tokio_current_thread = CurrentThread::new_with_park(my_park); let receiver_1_done = Rc::new(Cell::new(false)); let receiver_1_done_clone = receiver_1_done.clone(); // Once an item is received on the oneshot channel, it will immediately // immediately make the second oneshot channel ready tokio_current_thread.spawn(async move { receiver.await.unwrap(); sender_2.send(()).unwrap(); receiver_1_done_clone.set(true); }); let receiver_2_done = Rc::new(Cell::new(false)); let receiver_2_done_clone = receiver_2_done.clone(); tokio_current_thread.spawn(async move { receiver_2.await.unwrap(); receiver_2_done_clone.set(true); }); // The third receiver is only woken up from our Park implementation, it simulates // e.g. a socket that first has to be polled to know if it is ready now let receiver_3_done = Rc::new(Cell::new(false)); let receiver_3_done_clone = receiver_3_done.clone(); tokio_current_thread.spawn(async move { receiver_3.await.unwrap(); receiver_3_done_clone.set(true); }); // First turn should've polled both and considered them not ready let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); assert!(res.has_polled()); // Next turn should've polled nothing let res = tokio_current_thread .turn(Some(Duration::from_millis(0))) .unwrap(); assert!(!res.has_polled()); assert!(!receiver_1_done.get()); assert!(!receiver_2_done.get()); assert!(!receiver_3_done.get()); // After this the receiver future will wake up the second receiver future, // so there are pending futures again sender.send(()).unwrap(); // Now the first receiver should be done, the second receiver should be ready // to be polled again and the socket not yet let res = tokio_current_thread.turn(None).unwrap(); assert!(res.has_polled()); assert!(receiver_1_done.get()); assert!(!receiver_2_done.get()); assert!(!receiver_3_done.get()); // Now let our park implementation know that it should send something to sender 3 send_now.set(true); // This should resolve the second receiver directly, but also poll the socket // and read the packet from it. If it didn't do both here, we would handle // futures that are woken up from the reactor and directly unfairly and would // favour the ones that are woken up directly. let res = tokio_current_thread.turn(None).unwrap(); assert!(res.has_polled()); assert!(receiver_1_done.get()); assert!(receiver_2_done.get()); assert!(receiver_3_done.get()); // Don't send again send_now.set(false); // Now we should be idle and turning should not poll anything assert!(tokio_current_thread.is_idle()); let res = tokio_current_thread.turn(None).unwrap(); assert!(!res.has_polled()); } #[test] fn spawn_from_other_thread() { let mut current_thread = CurrentThread::new(); let handle = current_thread.handle(); let (sender, receiver) = oneshot::channel::<()>(); thread::spawn(move || { handle .spawn(async move { sender.send(()).unwrap(); }) .unwrap(); }); let _ = current_thread.block_on(receiver).unwrap(); } #[test] fn spawn_from_other_thread_unpark() { use std::sync::mpsc::channel as mpsc_channel; let mut current_thread = CurrentThread::new(); let handle = current_thread.handle(); let (sender_1, receiver_1) = oneshot::channel::<()>(); let (sender_2, receiver_2) = mpsc_channel::<()>(); thread::spawn(move || { let _ = receiver_2.recv().unwrap(); handle .spawn(async move { sender_1.send(()).unwrap(); }) .unwrap(); }); // Ensure that unparking the executor works correctly. It will first // check if there are new futures (there are none), then execute the // lazy future below which will cause the future to be spawned from // the other thread. Then the executor will park but should be woken // up because *now* we have a new future to schedule let _ = current_thread.block_on(async move { // inlined 'lazy' async move { sender_2.send(()).unwrap(); } .await; receiver_1.await.unwrap(); }); } #[test] fn spawn_from_executor_with_handle() { let mut current_thread = CurrentThread::new(); let handle = current_thread.handle(); let (tx, rx) = oneshot::channel(); current_thread.spawn(async move { handle .spawn(async move { tx.send(()).unwrap(); }) .unwrap(); }); current_thread.block_on(rx).unwrap(); } #[test] fn handle_status() { let current_thread = CurrentThread::new(); let handle = current_thread.handle(); assert!(handle.status().is_ok()); drop(current_thread); assert!(handle.spawn(async { () }).is_err()); assert!(handle.status().is_err()); } #[test] fn handle_is_sync() { let current_thread = CurrentThread::new(); let handle = current_thread.handle(); let _box: Box<dyn Sync> = Box::new(handle); } async fn yield_once() { YieldOnce(false).await } struct YieldOnce(bool); impl Future for YieldOnce { type Output = (); fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> { if self.0 { Poll::Ready(()) } else { self.0 = true; // Push to the back of the executor's queue cx.waker().wake_by_ref(); Poll::Pending } } }
true
7dde63ada01a7a5492e74079cf600d29ec0921f5
Rust
fossabot/necsim-rust
/rustcoalescence/algorithms/monolithic/src/arguments.rs
UTF-8
2,814
2.78125
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT" ]
permissive
use serde::Deserialize; use serde_state::DeserializeState; use necsim_core_bond::{Partition, PositiveF64}; #[derive(Debug)] #[allow(clippy::module_name_repetitions)] pub struct MonolithicArguments { pub parallelism_mode: ParallelismMode, } impl<'de> DeserializeState<'de, Partition> for MonolithicArguments { fn deserialize_state<D>(partition: &mut Partition, deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de>, { let raw = MonolithicArgumentsRaw::deserialize_state(partition, deserializer)?; let parallelism_mode = match raw.parallelism_mode { Some(parallelism_mode) => parallelism_mode, None => { if partition.partitions().get() > 1 { ParallelismMode::OptimisticLockstep } else { ParallelismMode::Monolithic } }, }; Ok(MonolithicArguments { parallelism_mode }) } } #[derive(Default, Debug, DeserializeState)] #[serde(default, deny_unknown_fields)] #[serde(deserialize_state = "Partition")] struct MonolithicArgumentsRaw { #[serde(deserialize_state)] parallelism_mode: Option<ParallelismMode>, } #[derive(Debug, Deserialize)] pub struct OptimisticParallelismMode { pub delta_sync: PositiveF64, } #[derive(Debug, Deserialize)] pub struct AveragingParallelismMode { pub delta_sync: PositiveF64, } #[derive(Debug, Deserialize)] pub enum ParallelismMode { Monolithic, Optimistic(OptimisticParallelismMode), Lockstep, OptimisticLockstep, Averaging(AveragingParallelismMode), } impl<'de> DeserializeState<'de, Partition> for ParallelismMode { fn deserialize_state<D>(partition: &mut Partition, deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de>, { use serde::de::Error; let parallelism_mode = ParallelismMode::deserialize(deserializer)?; match parallelism_mode { ParallelismMode::Monolithic if partition.partitions().get() > 1 => { Err(D::Error::custom(format!( "parallelism_mode {:?} is incompatible with non-monolithic partitioning.", parallelism_mode ))) }, ParallelismMode::Optimistic(..) | ParallelismMode::Lockstep | ParallelismMode::OptimisticLockstep | ParallelismMode::Averaging(..) if partition.partitions().get() == 1 => { Err(D::Error::custom(format!( "parallelism_mode {:?} is incompatible with monolithic partitioning.", parallelism_mode ))) }, partition_mode => Ok(partition_mode), } } }
true
78ad525addd4fbf18fd686d7d53ba6a98191c03a
Rust
JackWFinlay/advent_of_code_2020
/day_02/password_philosophy/src/main.rs
UTF-8
3,035
3.484375
3
[]
no_license
mod input; use std::collections::HashMap; struct Policy { lower_bound: u32, upper_bound: u32, letter: u8 } fn main() { let data = &input::get_input(); let validate_passwords_method_one = |data: &str| validate_password_1(data); let valid_passwords_1 = validate_passwords(validate_passwords_method_one, data); let validate_passwords_method_two = |data: &str| validate_password_2(data); let valid_passwords_2 = validate_passwords(validate_passwords_method_two, data); println!("Number of valid passwords with method1: {}", valid_passwords_1); println!("Number of valid passwords with method2: {}", valid_passwords_2); } fn validate_passwords<F>(f:F, data: &Vec<&str>) -> u32 where F: Fn(&str) -> bool { let mut valid_password_count = 0u32; for input in data { if f(input) { valid_password_count += 1; } } return valid_password_count; } fn validate_password_1(input: &str) -> bool { let (policy_str, password) = extract_policy_and_password(input); let policy = parse_policy(policy_str); let mut map = HashMap::<&u8,u32>::new(); for k in password.as_bytes() { let count = map.entry(k).or_insert(0); *count += 1; } if map.contains_key(&policy.letter) { let count = match map.get(&policy.letter) { Some(v) => v, None => panic!("This shouldn't be possible!") }; if *count >= policy.lower_bound && *count <= policy.upper_bound { return true; } } return false; } fn validate_password_2(input: &str) -> bool { let (policy_str, password) = extract_policy_and_password(input); let policy = parse_policy(policy_str); let password_bytes = password.as_bytes(); let first_pos = password_bytes[(policy.lower_bound - 1) as usize]; let second_pos = password_bytes[(policy.upper_bound - 1) as usize]; println!("{} {} {}", policy.letter, first_pos, second_pos); let result = (first_pos == policy.letter && second_pos != policy.letter) || (second_pos == policy.letter && first_pos != policy.letter); return result; } fn extract_policy_and_password(input: &str) -> (&str, &str) { let split = input.split(":"); let vec = split.collect::<Vec<&str>>(); return (vec[0], &vec[1][1..]); } fn parse_policy(policy_str: &str) -> Policy { let split = policy_str.split(" ") .collect::<Vec<&str>>(); let range = split[0]; let letter = split[1].as_bytes()[0]; let range = range.split("-") .collect::<Vec<&str>>(); let lower = range[0].trim() .parse() .expect("lower bound wasn't an integer"); let upper = range[1].trim() .parse() .expect("upper bound wasn't an integer"); return Policy { lower_bound: lower, upper_bound: upper, letter: letter }; }
true
6ebda9bdbca18e297373bc425f5af26aa6a0d722
Rust
sunli829/xengine
/xphysics/src/collision/shapes/chain.rs
UTF-8
4,259
2.8125
3
[]
no_license
use crate::collision::distance::DistanceProxy; use crate::settings; use crate::{MassData, RayCastInput, RayCastOutput, Shape, ShapeEdge, ShapeType}; use std::borrow::Cow; use xmath::{Multiply, Real, Transform, Vector2, AABB}; pub struct ShapeChain<T> { pub(crate) vertices: Vec<Vector2<T>>, pub(crate) prev_vertex: Option<Vector2<T>>, pub(crate) next_vertex: Option<Vector2<T>>, } impl<T: Real> ShapeChain<T> { fn check_vertices(vertices: &Vec<Vector2<T>>) { for i in 1..vertices.len() { let v1 = vertices[i - 1]; let v2 = vertices[i]; assert!( v1.distance_squared(&v2) > settings::linear_slop::<T>() * settings::linear_slop::<T>() ); } } pub fn create_loop<I, V>(vertices: I) -> ShapeChain<T> where I: IntoIterator<Item = V>, V: Into<Vector2<T>>, { let mut vertices = vertices.into_iter().map(|v| v.into()).collect::<Vec<_>>(); assert!(vertices.len() >= 3); Self::check_vertices(&vertices); vertices.push(vertices[0]); let prev_vertex = Some(vertices[vertices.len() - 2]); let next_vertex = Some(vertices[1]); ShapeChain { vertices, prev_vertex, next_vertex, } } pub fn create_chain<I, V>(vertices: I) -> ShapeChain<T> where I: IntoIterator<Item = V>, V: Into<Vector2<T>>, { let vertices = vertices.into_iter().map(|v| v.into()).collect::<Vec<_>>(); assert!(vertices.len() >= 2); Self::check_vertices(&vertices); ShapeChain { vertices, prev_vertex: None, next_vertex: None, } } pub fn get_child_edge(&self, index: usize) -> ShapeEdge<T> { assert!(index < self.vertices.len() - 1); ShapeEdge { vertex1: self.vertices[index + 0], vertex2: self.vertices[index + 1], vertex0: if index > 0 { Some(self.vertices[index - 1]) } else { self.prev_vertex }, vertex3: if index < self.vertices.len() - 2 { Some(self.vertices[index + 2]) } else { self.next_vertex }, } } } impl<T: Real> Shape<T> for ShapeChain<T> { fn shape_type(&self) -> ShapeType { ShapeType::Chain } fn radius(&self) -> T { settings::polygon_radius() } fn child_count(&self) -> usize { self.vertices.len() - 1 } fn test_point(&self, _xf: &Transform<T>, _p: &Vector2<T>) -> bool { false } fn ray_cast( &self, input: &RayCastInput<T>, xf: &Transform<T>, child_index: usize, ) -> Option<RayCastOutput<T>> { assert!(child_index < self.vertices.len()); let i1 = child_index; let mut i2 = child_index + 1; if i2 == self.vertices.len() { i2 = 0; } let edge = ShapeEdge::new(self.vertices[i1], self.vertices[i2]); edge.ray_cast(input, xf, 0) } fn compute_aabb(&self, xf: &Transform<T>, child_index: usize) -> AABB<T> { assert!(child_index < self.vertices.len()); let i1 = child_index; let mut i2 = child_index + 1; if i2 == self.vertices.len() { i2 = 0; } let v1 = xf.multiply(self.vertices[i1]); let v2 = xf.multiply(self.vertices[i2]); AABB { lower_bound: v1.min(v2), upper_bound: v1.max(v2), } } fn compute_mass(&self, _density: T) -> MassData<T> { MassData { mass: T::zero(), center: Vector2::zero(), i: T::zero(), } } fn distance_proxy(&self, index: usize) -> DistanceProxy<'_, T> { assert!(index < self.vertices.len()); let v1 = self.vertices[index]; let v2 = if index + 1 < self.vertices.len() { self.vertices[index + 1] } else { self.vertices[0] }; let vertices = vec![v1, v2]; DistanceProxy { vertices: Cow::Owned(vertices), radius: self.radius(), } } }
true
4d823b11dd8cbe5956d171c8c79af1b4a48ecdb3
Rust
xenith-studios/ataxia
/ataxia-engine/src/bin/engine.rs
UTF-8
3,125
2.640625
3
[ "Apache-2.0", "MIT", "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Binary source for the game engine //! There should be minimal functionality in this file. It exists mainly to set up the engine and //! call out to the library code. #![deny( trivial_casts, trivial_numeric_casts, unsafe_code, unused_import_braces, unused_qualifications, clippy::all, clippy::pedantic, clippy::perf, clippy::style )] // Include this file to get access to the datetime of the last time we compiled include!(concat!(env!("OUT_DIR"), "/version.rs")); use std::fs::File; use std::io::Write; use std::path::PathBuf; use std::process; use log::{error, info}; use simplelog::{ ColorChoice, CombinedLogger, Config, LevelFilter, TermLogger, TerminalMode, WriteLogger, }; fn main() -> Result<(), anyhow::Error> { // Load settings from config file while allowing command-line overrides let config = ataxia_core::Config::new().unwrap_or_else(|err| { eprintln!("Unable to load the configuration file: {err}"); std::process::exit(1); }); // Initialize logging subsystem CombinedLogger::init(vec![ TermLogger::new( if config.debug() { LevelFilter::Debug } else if config.verbose() { LevelFilter::Info } else { LevelFilter::Warn }, Config::default(), TerminalMode::Mixed, ColorChoice::Auto, ), WriteLogger::new( if config.debug() { LevelFilter::Debug } else { LevelFilter::Info }, Config::default(), File::create(config.log_file())?, ), ])?; info!("Loading Ataxia Engine, compiled on {}", ATAXIA_COMPILED); // TODO: Figure out a system for catching/handling signals (SIGINT, SIGQUIT, SIGHUP) // Clean up from previous unclean shutdown if necessary // Write PID to file // TODO: Acquire lock on PID file as additional method of insuring only a single instance is running? let pid_file = PathBuf::from(config.pid_file()); // FIXME: Remove once we have a startup/supervisor system in place to handle unclean shutdown if pid_file.exists() { std::fs::remove_file(&pid_file)?; } File::create(&pid_file)?.write_all(format!("{}", process::id()).as_ref())?; // Initialize support subsystems // Environment // Queues // Database // Initialize engine subsystem let server = ataxia_engine::Engine::new(config).unwrap_or_else(|err| { error!("Unable to initialize the engine: {}", err); std::process::exit(1); }); // Initialize async networking subsystem in a dedicated thread // Start main game loop if let Err(e) = server.run() { error!("Unresolved system error: {}", e); std::process::exit(1); } // If the game loop exited without an error, we have a clean shutdown // Flush pending database writes and close database connection // Remove the PID file if pid_file.exists() { std::fs::remove_file(&pid_file)?; } Ok(()) }
true
131de5ec89521a201c1820fb5634b1cb055e9876
Rust
apognu/knox
/libknox/src/util/git.rs
UTF-8
5,384
2.546875
3
[ "MIT" ]
permissive
use std::error::Error; use git2::{Commit, Config, Cred, IndexAddOption, ObjectType, PushOptions, RemoteCallbacks, Repository, Signature}; use crate::{util::VaultError, VaultContext}; pub fn exists(vault: &VaultContext) -> bool { vault.has_pack(".git") } fn last_commit(repo: &Repository) -> Result<Commit, git2::Error> { let object = repo.head()?.resolve()?.peel(ObjectType::Commit)?; object.into_commit().map_err(|_| git2::Error::from_str("could not find latest commit")) } pub(crate) fn init(vault: &VaultContext) -> Result<(), Box<dyn Error>> { match Repository::init(&vault.path) { Ok(_) => commit(&vault, "Initialized knox repository."), Err(_) => Err(VaultError::throw("could not init git repository")), } } pub(crate) fn commit(vault: &VaultContext, message: &str) -> Result<(), Box<dyn Error>> { if !exists(&vault) { return Ok(()); } match Repository::open(&vault.path) { Ok(repo) => { let (name, email) = Config::open_default()? .snapshot() .map(|c| { ( c.get_string("user.name").unwrap_or_else(|_| "Knox".to_string()), c.get_string("user.email").unwrap_or_else(|_| "N/A".to_string()), ) }) .unwrap_or(("Knox".to_string(), "N/A".to_string())); let last_commit = last_commit(&repo).ok(); let parent = match last_commit { Some(ref commit) => vec![commit], None => vec![], }; let mut index = repo.index()?; index.add_all(&["*"], IndexAddOption::DEFAULT, None)?; let tree = repo.find_tree(index.write_tree()?)?; let author = Signature::now(&name, &email)?; repo.commit(Some("HEAD"), &author, &author, message, &tree, &parent)?; index.write()?; } Err(_) => { return Err(VaultError::throw("could not open git repository, data was still written locally")); } } Ok(()) } pub(crate) fn set_origin(vault: &VaultContext, origin: &str) -> Result<(), Box<dyn Error>> { if !exists(&vault) { return Err(VaultError::throw("local git repository does not exist")); } let repo = Repository::open(&vault.path)?; repo.remote_set_url("origin", origin)?; Ok(()) } pub(crate) fn push(vault: &VaultContext) -> Result<(), Box<dyn Error>> { if !exists(&vault) { return Err(VaultError::throw("local git repository does not exist")); } let repo = Repository::open(&vault.path)?; let mut remote = repo.find_remote("origin")?; let mut retry = false; let mut callbacks = RemoteCallbacks::new(); callbacks.credentials(|_, user, credentials| { if retry { return Err(git2::Error::from_str("ssh agent did not provide valid public key")); } if credentials.contains(git2::CredentialType::USERNAME) { return Cred::username(user.unwrap_or("git")); } retry = true; Cred::ssh_key_from_agent("git") }); let mut options = PushOptions::new(); options.remote_callbacks(callbacks); remote.push(&["refs/heads/master:refs/heads/master"], Some(&mut options))?; Ok(()) } #[cfg(test)] mod tests { use git2::{Repository, Sort}; use knox_testing::spec; use crate::*; #[test] fn init() { let tmp = spec::setup(); let context = VaultContext { path: tmp.path().to_str().unwrap().to_string(), vault: Vault::new(), }; git::init(&context).expect("could not initialize git repository"); let repo = Repository::open(tmp.path()).expect("could not open repository"); let mut revwalk = repo.revwalk().expect("could not get revwalk"); revwalk.push_head().expect("could not find HEAD"); revwalk.set_sorting(Sort::REVERSE).expect("could not set sort"); let log: Vec<_> = revwalk.collect(); assert_eq!(log.len(), 1); for rev in log { let commit = repo.find_commit(rev.unwrap()).expect("could not find root commit"); assert_eq!(commit.summary().unwrap_or(""), "Initialized knox repository."); } } #[test] fn commit() { let tmp = spec::setup(); let mut context = crate::spec::get_test_vault(tmp.path()).expect("could not get vault"); context.git_init().expect("could not create local git repository"); context.write_entry("a", &Entry::default()).expect("could not write entry"); context.commit("abcdef").expect("could not commit changes"); let repo = Repository::open(tmp.path()).expect("could not open repository"); let mut revwalk = repo.revwalk().expect("could not get revwalk"); revwalk.push_head().expect("could not find HEAD"); revwalk.set_sorting(Sort::REVERSE).expect("could not get GPG key"); for (i, rev) in revwalk.enumerate() { if i == 1 { let commit = repo.find_commit(rev.unwrap()).expect("could not find root commit"); assert_eq!(commit.summary().unwrap_or(""), "abcdef"); } } } #[test] fn set_remote() { let tmp = spec::setup(); let context = crate::spec::get_test_vault(tmp.path()).expect("could not get vault"); context.git_init().expect("could not create local git repository"); let repo = Repository::open(tmp.path()).expect("could not open repository"); repo.remote("testremote", "https://git.example.com").expect("could not set remote URL"); let remote = repo.find_remote("testremote").expect("could not find created remote"); assert_eq!(remote.url().unwrap_or(""), "https://git.example.com"); } }
true
1fad3df3777b56a92940eb3283e652110f156caf
Rust
clitetailor/cursortanium
/src/test/test.rs
UTF-8
1,484
2.671875
3
[ "MIT" ]
permissive
use crate::cursor::Cursor; use crate::test::capture_result::CaptureResult; use crate::utils::parse_label; pub struct Test { pub no_label: bool, pub prefix: String, } impl Test { pub fn new() -> Test { Test { no_label: true, prefix: String::from("🧀"), } } pub fn capture(&self, input: &str) -> CaptureResult { let mut doc: String = String::from(""); let mut indices: Vec<(String, usize)> = vec![]; let mut offset: usize = 0; let mut cursor = Cursor::from(input); let mut last_index = cursor.get_index(); let prefix_len = self.prefix.chars().count(); while !cursor.is_eof() { if cursor.starts_with(&self.prefix) { doc.push_str(cursor.read_from(&last_index)); last_index = cursor.get_index(); cursor.next(&prefix_len); let label = if self.no_label { String::from("") } else { parse_label(&mut cursor) }; offset = offset + cursor.get_index() - last_index; indices .push((label, cursor.get_index() - offset)); last_index = cursor.get_index(); } else { cursor.next(&1); }; } doc.push_str(cursor.read_from(&last_index)); CaptureResult { doc, indices } } }
true
9164f24ded06d2200c0ed614eef2237280fd3cf5
Rust
supervitas/path-tracer
/src/renderer/scene.rs
UTF-8
1,115
3.15625
3
[]
no_license
use crate::renderables::renderable::Renderable; use crate::renderer::light::Light; use crate::math::color::Color; use crate::gl::obj_loader::load_obj; pub struct Scene { background: Color, lights: Vec<Light>, renderables: Vec<Box<dyn Renderable + Send + 'static>> } impl Scene { pub fn new(background: Color) -> Self { Scene { background, lights: Vec::new(), renderables: Vec::new(), } } pub fn get_renderables(&self) -> &Vec<Box<dyn Renderable + Send + 'static >> { &self.renderables } pub fn get_lights(&self) -> &Vec<Light>{ &self.lights } pub fn add_light(&mut self, light: Light) {self.lights.push(light)} pub fn get_background(&self) -> &Color { &self.background } pub fn add_renderable(&mut self, renderable: Box<dyn Renderable + Send + 'static>) { self.renderables.push(renderable); } pub fn load_model(&mut self, path: String) { let meshes = load_obj(&path); for mesh in meshes { self.add_renderable(Box::new(mesh)); } } }
true
cd62f9ac0b40683e983b41db83f5eeb86035fff3
Rust
regonn/AOJ
/ITP1/11/A.rs
UTF-8
1,741
3.4375
3
[]
no_license
use std::io::*; use std::str::FromStr; fn read<T: FromStr>() -> T { let stdin = stdin(); let stdin = stdin.lock(); let token: String = stdin .bytes() .map(|c| c.expect("failed to read char") as char) .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()) .collect(); token.parse().ok().expect("failed to parse token") } fn reverse_index(dice_index: usize) -> usize { match dice_index { 0 => return 5, 1 => return 4, 2 => return 3, 3 => return 2, 4 => return 1, 5 => return 0, _ => return 0, } } fn update_dice_index( dice_top: usize, dice_front: usize, dice_right: usize, direct: char, ) -> (usize, usize, usize) { match direct { 'N' => return (dice_front, reverse_index(dice_top), dice_right), 'W' => return (dice_right, dice_front, reverse_index(dice_top)), 'S' => return (reverse_index(dice_front), dice_top, dice_right), 'E' => return (reverse_index(dice_right), dice_front, dice_top), _ => return (0, 0, 0), } } fn main() { let mut dice: Vec<u8> = vec![]; let mut last_dice_top: usize = 0; let mut last_dice_front: usize = 1; let mut last_dice_right: usize = 2; for _ in 0..6 { let number: u8 = read(); dice.push(number); } let directs: String = read(); for direct in directs.chars() { let (dice_top, dice_front, dice_right) = update_dice_index(last_dice_top, last_dice_front, last_dice_right, direct); last_dice_top = dice_top; last_dice_front = dice_front; last_dice_right = dice_right; } println!("{}", dice[last_dice_top]); }
true
b2bdf0681c5312ef9ea59ccc7d62b3df73ce53ec
Rust
tokio-rs/tokio
/tokio/src/loom/std/atomic_u32.rs
UTF-8
1,214
3.125
3
[ "MIT" ]
permissive
use std::cell::UnsafeCell; use std::fmt; use std::ops::Deref; /// `AtomicU32` providing an additional `unsync_load` function. pub(crate) struct AtomicU32 { inner: UnsafeCell<std::sync::atomic::AtomicU32>, } unsafe impl Send for AtomicU32 {} unsafe impl Sync for AtomicU32 {} impl AtomicU32 { pub(crate) const fn new(val: u32) -> AtomicU32 { let inner = UnsafeCell::new(std::sync::atomic::AtomicU32::new(val)); AtomicU32 { inner } } /// Performs an unsynchronized load. /// /// # Safety /// /// All mutations must have happened before the unsynchronized load. /// Additionally, there must be no concurrent mutations. pub(crate) unsafe fn unsync_load(&self) -> u32 { core::ptr::read(self.inner.get() as *const u32) } } impl Deref for AtomicU32 { type Target = std::sync::atomic::AtomicU32; fn deref(&self) -> &Self::Target { // safety: it is always safe to access `&self` fns on the inner value as // we never perform unsafe mutations. unsafe { &*self.inner.get() } } } impl fmt::Debug for AtomicU32 { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { self.deref().fmt(fmt) } }
true
ac427e6fef193d6b10c04fcdbc82a5e750b02400
Rust
BurNiinTRee/ticktacktoe
/src/ttt.rs
UTF-8
3,314
3.21875
3
[]
no_license
use std::io; use std::io::prelude::*; use super::ttterror::TttError; const WINNERS: [[u8; 3]; 8] = [ [0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6], [2, 4, 7], [3, 5, 8], [0, 4, 8], [2, 4, 6] ]; #[derive(Debug, Copy, Clone)] pub struct Ttt { pub board: [Field; 9] } impl Ttt { pub fn new() -> Ttt { Ttt { board: [Field::Empty; 9] } } pub fn tick(&mut self, field: usize, player: Field) -> Result<(), TttError> { if field > 9 || field < 1 { return Err(TttError::InvalidField); } if let Field::Empty = player { return Err(TttError::NonPlayer); } if self.board[field-1] == Field::Empty { self.board[field-1] = player; Ok(()) } else { Err(TttError::OccupiedField) } } pub fn line(&self, line: usize) -> String { let mut output: String = String::with_capacity(7); output.push('|'); for i in 0..3 { output.push(match self.board[i+line*3] { Field::Empty => (i+line*3+1).to_string().chars().nth(0).unwrap(), Field::Circle => 'O', Field::Cross => 'X', }); output.push('|'); } output } pub fn read_input(&mut self, player: Field) -> (Field, usize) { println!("Player {}: Enter field to tick", player); let mut buffer = String::new(); let stdin = io::stdin(); let mut stdin = stdin.lock(); if stdin.read_line(&mut buffer).is_ok(){ if let Ok(num) = buffer.trim().parse::<usize>() { return (player, num) } } drop(stdin); return self.read_input(player); } pub fn is_won(&self) -> Field { let mut winner = 0; let players = [Field::Cross, Field::Circle]; for player in players.into_iter() { for win in WINNERS.into_iter() { for field in win.into_iter() { match self.board[*field as usize] == *player { true => { winner += 1; continue; }, false => { winner = 0; break; } }; } if winner == 3 { return *player; } }; } return Field::Empty; } } impl ::std::fmt::Display for Ttt { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { fn seperator() -> String { "|-----|".to_string() } write!(f, "{}\n{}\n{}\n{}\n{}\n{}\n{}\n", seperator(), self.line(0), seperator(), self.line(1), seperator(), self.line(2), seperator()) } } #[derive(Debug, Copy, Clone, PartialEq)] pub enum Field { Empty, Cross, Circle } impl ::std::fmt::Display for Field { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { match *self { Field::Empty => write!(f, "Empty Field"), Field::Cross => write!(f, "Cross"), Field::Circle => write!(f, "Circle"), } } }
true
9228f1dcb5e56eb72912da36c460bfc4afde4366
Rust
dmexe/todo-demo-rs
/src/common/response_ext.rs
UTF-8
745
2.640625
3
[]
no_license
use serde::Serialize; use serde_json; use hyper::{Response, StatusCode}; use hyper::header::{ContentLength, ContentType}; pub trait ResponseExt { fn json<T>(self, value: &T) -> Response where T: Serialize + 'static; fn not_found(self) -> Response; } impl ResponseExt for Response { fn json<T>(self, value: &T) -> Response where T: Serialize + 'static, { let body = serde_json::to_string(&value).expect("json serialization cannot be fail"); let len = body.len(); self .with_body(body) .with_header(ContentType::json()) .with_header(ContentLength(len as u64)) } fn not_found(self) -> Response { self .with_status(StatusCode::NotFound) .with_header(ContentLength(0)) } }
true
3621d9b625bd5d6be4c9c5d1f6a190297eb000e6
Rust
SpectralSequences/sseq
/ext/examples/filtration_one.rs
UTF-8
964
2.578125
3
[ "Apache-2.0", "MIT" ]
permissive
//! This computes all available filtration one products for a module. This only works at the prime //! 2 for the moment. //! //! We omit outputs where the target bidegree is zero (or not yet computed) use ext::chain_complex::{ChainComplex, FreeChainComplex}; use ext::utils::query_module; fn main() -> anyhow::Result<()> { let resolution = query_module(None, false)?; assert_eq!(*resolution.prime(), 2); for (s, n, t) in resolution.iter_stem() { let mut i = 0; while resolution.has_computed_bidegree(s + 1, t + (1 << i)) { // TODO: This doesn't work with the reordered Adams basis let products = resolution.filtration_one_product(1 << i, 0, s, t).unwrap(); for (idx, row) in products.into_iter().enumerate() { if !row.is_empty() { println!("h_{i} x_({n}, {s}, {idx}) = {row:?}"); } } i += 1; } } Ok(()) }
true
bc74773363de6894ccc78d812a1436cba652677c
Rust
saethlin/omnichat
/slack/src/http/api.rs
UTF-8
448
2.6875
3
[ "Apache-2.0" ]
permissive
use std::collections::HashMap; /// Checks API calling code. /// /// Wraps https://api.slack.com/methods/api.test #[derive(Debug, Clone, Serialize, new)] pub struct TestRequest<'a> { /// Error response to return #[new(default)] error: Option<&'a str>, /// example property to return #[new(default)] foo: Option<&'a str>, } #[derive(Debug, Clone, Deserialize)] pub struct TestResponse { args: HashMap<String, String>, }
true
d7e2a17a3c8ac88be39fb50ed74ab09e6fcfcfa5
Rust
guihao-liang/numeric
/src/tensor/mul_floats.rs
UTF-8
2,426
2.5625
3
[ "MIT" ]
permissive
use std::ops::Mul; use tensor::Tensor; use blas; macro_rules! add_impl { ($t:ty, $tbmv:ident, $sbmv:ident, $scal:ident) => ( impl Mul<Tensor<$t>> for Tensor<$t> { type Output = Tensor<$t>; fn mul(mut self, rhs: Tensor<$t>) -> Tensor<$t> { assert_eq!(self.shape, rhs.shape); if cfg!(noblas) { for i in 0..self.size() { self.data[i] *= rhs.data[i]; } } else { blas::$tbmv(b'L', b'T', b'N', self.size(), 0, &rhs.data, 1, &mut self.data, 1); } self } } impl<'a> Mul<&'a Tensor<$t>> for Tensor<$t> { type Output = Tensor<$t>; fn mul(mut self, rhs: &Tensor<$t>) -> Tensor<$t> { assert_eq!(self.shape, rhs.shape); if cfg!(noblas) { for i in 0..self.size() { self.data[i] *= rhs.data[i]; } } else { blas::$tbmv(b'L', b'T', b'N', self.size(), 0, &rhs.data, 1, &mut self.data, 1); } self } } // TODO: Change to separate lifetimes impl<'a> Mul<&'a Tensor<$t>> for &'a Tensor<$t> { type Output = Tensor<$t>; fn mul(self, rhs: &Tensor<$t>) -> Tensor<$t> { assert_eq!(self.shape, rhs.shape); let mut t = self.clone(); if cfg!(noblas) { for i in 0..self.size() { t.data[i] *= rhs.data[i]; } } else { blas::$sbmv(b'L', self.size(), 0, 1.0, &self.data, 1, &rhs.data, 1, 0.0, &mut t.data, 1); } t } } // T * S impl Mul<$t> for Tensor<$t> { type Output = Tensor<$t>; fn mul(mut self, rhs: $t) -> Tensor<$t> { if cfg!(noblas) { for i in 0..self.size() { self.data[i] *= rhs; } } else { blas::$scal(self.size(), rhs, &self.data, 1); } self } } ) } add_impl!(f32, stbmv, ssbmv, sscal); add_impl!(f64, dtbmv, dsbmv, dscal);
true
07e07ac2d1ee6e6b1beffe37617549077a2cdef7
Rust
ltoddy/leetcode
/algorithms/merge-intervals.rs
UTF-8
1,344
3.171875
3
[ "MIT" ]
permissive
// Definition for an interval. // #[derive(Debug, PartialEq, Eq)] // pub struct Interval { // pub start: i32, // pub end: i32, // } // // impl Interval { // #[inline] // pub fn new(start: i32, end: i32) -> Self { // Interval { // start, // end // } // } // } impl Solution { pub fn merge(intervals: Vec<Interval>) -> Vec<Interval> { if intervals.is_empty() { return intervals; } let mut intervals = intervals; intervals.sort_by(|prev, cur| { if (prev.start < cur.start) || (prev.start == cur.start && prev.end < cur.end) { std::cmp::Ordering::Less } else { std::cmp::Ordering::Greater } }); let mut start = intervals[0].start; let mut end = intervals[0].end; let mut result: Vec<Interval> = Vec::with_capacity(intervals.len()); for interval in intervals.iter().skip(1) { if end >= interval.start { if end < interval.end { end = interval.end; } } else { result.push(Interval::new(start, end)); start = interval.start; end = interval.end; } } result.push(Interval::new(start, end)); result } }
true
33ee2756bcbff5dad89eb0a04663d5ec09012cc8
Rust
hellodk/dumpyard
/rust_examples/tra/src/trait_inheritance.rs
UTF-8
928
3.671875
4
[]
no_license
// trait_inheritance.rs trait Vehicle { fn get_price(&self) -> u64; } trait Car: Vehicle { // dependent on Vehicle trait // convenient type alias for the implementing type within the trait's impl blocks fn model(&self) -> String; } struct TeslaRoadster { // This is a car model: String, release_date: u16 } impl TeslaRoadster { // method for the struct fn new(model: &str, release_date: u16) -> Self { //self as the return type Self { model: model.to_string(), release_date } } } impl Car for TeslaRoadster { fn model(&self) -> String { "Tesla Roadster I".to_string() } } // /* impl Vehicle for TeslaRoadster { fn get_price(&self) -> u64 { 200_000 } } // */ fn main() { let my_roadster = TeslaRoadster::new("Tesla Roadster II", 2020); println!("{} {} is priced at ${}", my_roadster.release_date, my_roadster.model, my_roadster.get_price()); }
true
a57307046e84e2f4b91464d0e765d2ddc135d4fd
Rust
plopgrizzly/afi_docf
/src/lib.rs
UTF-8
1,524
2.921875
3
[ "MIT" ]
permissive
// Aldaron's Format Interface / Aldaron's Document Format (docf) // Copyright (c) 2017 Plop Grizzly, Jeron Lau <jeron.lau@plopgrizzly.com> // Licensed under the MIT LICENSE // // src/lib.rs //! Aldaron's Format Interface / docf is a library developed by Plop Grizzly for //! reading and writing docf (Aldaron's Document Format) files. #![no_std] #![warn(missing_docs)] #![doc(html_logo_url = "http://plopgrizzly.com/afi_docf/icon.png", html_favicon_url = "http://plopgrizzly.com/afi_docf/icon.png", html_root_url = "http://plopgrizzly.com/afi_docf/")] /// Text alignment #[repr(u8)] #[derive(PartialEq, Copy, Clone)] pub enum Align { /// Left aligned Left = 0u8, /// Horizontally centered Centered = 1u8, /// Right aligned Right = 2u8, /// Justified Justified = 3u8, } /// Text emphasis #[repr(u8)] #[derive(PartialEq, Copy, Clone)] pub enum Emphasis { /// Regular None = 0b_0000_0000_u8, /// Strikethrough StrikeOut = 0b_0000_0001_u8, /// Overline Overline = 0b_0000_0010_u8, /// Underline Continuous Underline = 0b_0000_0100_u8, /// Underline Discontinuous UnderlineDC = 0b_0000_1000_u8, /// Double Underline UnderlineX2 = 0b_0001_0000_u8, /// Invert Colors InvertColor = 0b_0010_0000_u8, /// Bold Bold = 0b_0100_0000_u8, /// Italic Italic = 0b_1000_0000_u8, } /// Text color #[repr(u8)] #[derive(PartialEq, Copy, Clone)] pub enum FontColor { /// Black on light background, or white on dark background Default, /// RGBA 32 bits RgbaInt(u8, u8, u8, u8), /// RGBA Floating Point RgbaFloat(f32, f32, f32, f32), }
true
a36b64f5594adef3f46eabf6fab0b38d853fbb1b
Rust
tamamu/gmml
/src/parse.rs
UTF-8
21,889
3.25
3
[ "MIT" ]
permissive
use std::collections::HashMap; use std::convert::From; use std::fs::File; use std::io; use std::io::prelude::*; #[derive(PartialEq, Debug, Clone)] pub enum Symbol { LeftBracket, RightBracket, LeftParen, RightParen, LeftBrace, RightBrace, Comma, Point, Semicolon, Colon, Minus, Lt, Bt, Equal, } #[derive(PartialEq, Debug, Clone)] pub enum Token { Identifier(String), Number(f64), String(String), Symbol(Symbol), Arrow, Newline, Whitespace, } #[derive(Clone)] pub struct Scanner { filename: String, buf: Vec<char>, pos: usize, sym: Vec<String>, } impl Scanner { pub fn new(filename: String) -> Self { let buf = open(&filename).expect("error when reading the file"); Self { filename: filename, buf: buf.chars().collect(), pos: 0, sym: Vec::new(), } } fn lex_string(&mut self) -> Option<(Token, usize)> { let start = self.pos + 1; let mut end = self.pos + 1; while let Some(&c) = self.buf.get(end) { if c == '"' { break; } else { end += 1; } } if end > self.buf.len() { None } else { let range = start..end; Some(( Token::String(self.buf[range].iter().collect()), end - start + 2, )) } } fn lex_number(&mut self) -> Option<(Token, usize)> { let start = self.pos; let mut end = self.pos; let mut point_count = 0; while self.pos < self.buf.len() { match self.buf.get(end) { Some('.') => { if point_count > 1 { break; } else { point_count += 1; } end += 1; } Some(c) if c.is_numeric() => { end += 1; } _ => { break; } } } if end >= self.buf.len() { None } else { let range = start..end; let tmp: String = self.buf[range].iter().collect(); Some((Token::Number(tmp.parse::<f64>().unwrap()), end - start)) } } fn lex_identifier(&mut self) -> Option<(Token, usize)> { let start = self.pos; let mut end = self.pos; while self.pos < self.buf.len() { match self.buf.get(end) { Some(c) if c.is_alphabetic() || c.is_numeric() || c == &'_' => { end += 1; } _ => { break; } } } if end >= self.buf.len() { None } else { let range = start..end; Some(( Token::Identifier(self.buf[range].iter().collect()), end - start, )) } } } impl Iterator for Scanner { type Item = Token; fn next(&mut self) -> Option<Token> { if self.pos >= self.buf.len() { return None; } let c = self.buf.get(self.pos).unwrap().clone(); match c { ' ' | '\t' => { self.pos += 1; while self.pos < self.buf.len() { let d = self.buf.get(self.pos).unwrap(); if d == &' ' || d == &'\t' { self.pos += 1; } else { break; } } Some(Token::Whitespace) } '"' => { let (tok, len) = self.lex_string().expect("parsing error: string start"); self.pos += len; Some(tok) } ':' => { self.pos += 1; Some(Token::Symbol(Symbol::Colon)) } ';' => { while self.pos < self.buf.len() && self.buf.get(self.pos).unwrap() != &'\n' { self.pos += 1; } Some(Token::Whitespace) } ',' => { self.pos += 1; Some(Token::Symbol(Symbol::Comma)) } '.' => { self.pos += 1; Some(Token::Symbol(Symbol::Point)) } '(' => { self.pos += 1; Some(Token::Symbol(Symbol::LeftParen)) } ')' => { self.pos += 1; Some(Token::Symbol(Symbol::RightParen)) } '[' => { self.pos += 1; Some(Token::Symbol(Symbol::LeftBracket)) } ']' => { self.pos += 1; Some(Token::Symbol(Symbol::RightBracket)) } '{' => { self.pos += 1; Some(Token::Symbol(Symbol::LeftBrace)) } '}' => { self.pos += 1; Some(Token::Symbol(Symbol::RightBrace)) } '-' => { self.pos += 1; if self.pos < self.buf.len() && self.buf[self.pos] == '>' { self.pos += 1; Some(Token::Arrow) } else { Some(Token::Symbol(Symbol::Minus)) } } '>' => { self.pos += 1; Some(Token::Symbol(Symbol::Lt)) } '=' => { self.pos += 1; Some(Token::Symbol(Symbol::Equal)) } '\n' => { self.pos += 1; Some(Token::Newline) } c if c.is_alphabetic() => { let (tok, len) = self.lex_identifier().expect("parsing error: identifier"); self.pos += len; Some(tok) } c if c.is_numeric() => { let (tok, len) = self.lex_number().expect("parsing error: number"); self.pos += len; Some(tok) } c => { println!("unmatch: {}", c); None } } } } fn open(path: &str) -> io::Result<String> { let mut f = File::open(path)?; let mut contents = String::new(); f.read_to_string(&mut contents)?; Ok(contents) } #[derive(Debug, Clone)] pub enum AST { Block { name: String, content: Vec<AST> }, LeafDef { target: Box<AST>, stmt: Box<AST> }, Edge { from: Box<AST>, to: Box<AST> }, EdgeDef { target: Box<AST>, stmt: Box<AST> }, Struct(Vec<AST>), Message { name: String, args: Vec<AST> }, String(String), Number(f64), Symbol(String), List(Vec<AST>), } pub struct Parser { cur: usize, toks: Vec<Token>, } impl Parser { pub fn new(toks: Vec<Token>) -> Self { Self { cur: 0, toks: toks } } fn skip_blank(&mut self) { for i in self.cur..self.toks.len() { match self.toks[i] { Token::Newline => {} Token::Whitespace => {} _ => { self.cur = i; break; } } } } fn skip_whitespace(&mut self) { for i in self.cur..self.toks.len() { match self.toks[i] { Token::Whitespace => {} _ => { self.cur = i; break; } } } } fn parse_block(&mut self) -> Result<AST, String> { self.skip_blank(); let first = self.toks[self.cur].clone(); match first { Token::Symbol(Symbol::LeftBracket) => { self.cur += 1; } _ => { panic!("parsing error: expect ["); } } let second = self.toks[self.cur].clone(); self.cur += 1; let name = match second { Token::Identifier(name) => Some(name), _ => None, }; let name = name.expect("parsing error: expect identifier"); let third = self.toks[self.cur].clone(); self.cur += 1; match third { Token::Symbol(Symbol::RightBracket) => { self.skip_whitespace(); let fourth = self.toks[self.cur].clone(); self.cur += 1; match fourth { Token::Newline => { let content = self.parse_content()?; let block = AST::Block { name: name, content: content, }; Ok(block) } _ => panic!("parsing error: expect newline"), } } _ => { panic!("parsing error: expect ]"); } } } fn parse_content(&mut self) -> Result<Vec<AST>, String> { let mut content: Vec<AST> = Vec::new(); while self.cur < self.toks.len() { let head = self.toks[self.cur].clone(); match head { Token::Symbol(Symbol::LeftBracket) => { break; } Token::Newline => { self.cur += 1; break; } Token::Identifier(_) | Token::Number(_) | Token::String(_) => { let first = self.parse_target()?; self.skip_whitespace(); let second = self.toks[self.cur].clone(); self.cur += 1; match second { Token::Newline => { content.push(first); } Token::Symbol(Symbol::Colon) => match &first { AST::Edge { from: _, to: _ } => { self.skip_blank(); let third = self.parse_value()?; self.skip_whitespace(); let fourth = self.toks[self.cur].clone(); self.cur += 1; match fourth { Token::Newline => { let target = first.clone(); let stmt = third.clone(); content.push(AST::EdgeDef { target: Box::new(target), stmt: Box::new(stmt), }); } _ => panic!("parsing error: expect newline"), } } _ => panic!("parsing error: edge : stmt ?"), }, Token::Symbol(Symbol::Equal) => match &first { AST::Symbol(_) | AST::String(_) | AST::Number(_) => { self.skip_blank(); let third = self.parse_value()?; self.skip_whitespace(); let fourth = self.toks[self.cur].clone(); self.cur += 1; match fourth { Token::Newline => { let target = first.clone(); let stmt = third.clone(); content.push(AST::LeafDef { target: Box::new(target), stmt: Box::new(stmt), }); } _ => panic!("parsing error: expect newline"), } } _ => panic!("parsing error: leaf = stmt ?"), }, _ => panic!("parsing error: expect : or newline"), } } _ => panic!("parsing error: expect newline or identifier"), } } Ok(content) } fn parse_key(&mut self) -> Result<AST, String> { let first = self.toks[self.cur].clone(); self.cur += 1; match first { Token::Identifier(name) => Ok(AST::Symbol(name.to_string())), Token::String(string) => Ok(AST::String(string.to_string())), Token::Number(number) => Ok(AST::Number(number)), _ => panic!("parsing error: expect identifier or string or number"), } } fn parse_target(&mut self) -> Result<AST, String> { let left = self.parse_key()?; let cur = self.cur; self.skip_blank(); let second = self.toks[self.cur].clone(); self.cur += 1; match second { Token::Arrow => { self.skip_blank(); let right = self.parse_key()?; Ok(AST::Edge { from: Box::new(left), to: Box::new(right), }) } _ => { self.cur = cur; Ok(left) } } } fn parse_value(&mut self) -> Result<AST, String> { let first = self.toks[self.cur].clone(); match first { Token::Symbol(Symbol::LeftBrace) => self.parse_struct(), Token::Symbol(Symbol::LeftParen) => self.parse_list(), Token::String(string) => { self.cur += 1; Ok(AST::String(string)) } Token::Number(number) => { self.cur += 1; Ok(AST::Number(number)) } Token::Identifier(_) => self.parse_message(), _ => panic!("parsing error: expect {, string, number, or identifier"), } } fn parse_pair(&mut self) -> Result<AST, String> { let pair_left = self.parse_key()?; self.skip_blank(); let second = self.toks[self.cur].clone(); self.cur += 1; match second { Token::Symbol(Symbol::Colon) => {} _ => panic!("parsing error: expect :"), } self.skip_blank(); Ok(AST::LeafDef { target: Box::new(pair_left), stmt: Box::new(self.parse_value()?), }) } fn parse_list(&mut self) -> Result<AST, String> { let first = self.toks[self.cur].clone(); self.cur += 1; match first { Token::Symbol(Symbol::LeftParen) => {} _ => panic!("parsing error: expect ("), } let mut content: Vec<AST> = Vec::new(); self.skip_blank(); while self.cur < self.toks.len() { let second = self.toks[self.cur].clone(); match second { Token::Symbol(Symbol::RightParen) => { return Ok(AST::List(content)); } _ => {} } let value = self.parse_value()?; content.push(value); self.skip_blank(); let comma = self.toks[self.cur].clone(); match comma { Token::Symbol(Symbol::Comma) => { self.cur += 1; self.skip_blank(); } _ => { break; } } } let third = self.toks[self.cur].clone(); self.cur += 1; match third { Token::Symbol(Symbol::RightParen) => Ok(AST::List(content)), _ => panic!("parsing error: expect )"), } } fn parse_struct(&mut self) -> Result<AST, String> { let first = self.toks[self.cur].clone(); self.cur += 1; match first { Token::Symbol(Symbol::LeftBrace) => {} _ => panic!("parsing error: expect {"), } let mut content: Vec<AST> = Vec::new(); self.skip_blank(); while self.cur < self.toks.len() { let second = self.toks[self.cur].clone(); match second { Token::Symbol(Symbol::RightBrace) => { return Ok(AST::List(content)); } _ => {} } let pair = self.parse_pair()?; content.push(pair); self.skip_blank(); let comma = self.toks[self.cur].clone(); match comma { Token::Symbol(Symbol::Comma) => { self.cur += 1; self.skip_blank(); } _ => { break; } } } let third = self.toks[self.cur].clone(); self.cur += 1; match third { Token::Symbol(Symbol::RightBrace) => Ok(AST::Struct(content)), _ => panic!("parsing error: expect }"), } } fn parse_message(&mut self) -> Result<AST, String> { let first = self.toks[self.cur].clone(); self.cur += 1; let message_name: String; match first { Token::Identifier(name) => { message_name = name; } _ => panic!("parsing error: expect identifier"), } let second = self.toks[self.cur].clone(); match second { Token::Symbol(Symbol::LeftParen) => Ok(AST::Message { name: message_name, args: self.parse_args()?, }), _ => Ok(AST::Symbol(message_name)), } } fn parse_args(&mut self) -> Result<Vec<AST>, String> { let first = self.toks[self.cur].clone(); self.cur += 1; match first { Token::Symbol(Symbol::LeftParen) => {} _ => panic!("parsing error: expect ("), } let mut content: Vec<AST> = Vec::new(); self.skip_blank(); while self.cur < self.toks.len() { let second = self.toks[self.cur].clone(); match second { Token::Symbol(Symbol::RightParen) => { break; } _ => {} } let value = self.parse_value()?; content.push(value); self.skip_blank(); let comma = self.toks[self.cur].clone(); match comma { Token::Symbol(Symbol::Comma) => { self.cur += 1; self.skip_blank(); } _ => { break; } } } let third = self.toks[self.cur].clone(); self.cur += 1; match third { Token::Symbol(Symbol::RightParen) => Ok(content), _ => panic!("parsing error: expect )"), } } fn get_ast(&mut self) -> Result<Vec<AST>, String> { let mut blocks: Vec<AST> = Vec::new(); while self.cur < self.toks.len() { let block = self.parse_block()?; blocks.push(block); } Ok(blocks) } pub fn parse(&mut self) -> Result<HashMap<String, GValue>, String> { let root_ast = self.get_ast()?; let mut root: HashMap<String, GValue> = HashMap::new(); let blocks: Vec<GValue> = root_ast.into_iter().map(GValue::from).collect(); for block in blocks { match block { GValue::Pair(key, value) => match *key { GValue::String(key_string) => { root.entry(key_string).or_insert(*value); } _ => panic!("convert error: key should be String"), }, _ => panic!("convert error: invalid block syntax"), } } Ok(root) } } #[derive(Debug)] pub enum GValue { String(String), Number(f64), Symbol(String), Message(String, Vec<GValue>), Edge(Box<GValue>, Box<GValue>), Vec(Vec<GValue>), Pair(Box<GValue>, Box<GValue>), } impl From<AST> for GValue { fn from(ast: AST) -> Self { match ast { AST::String(string) => GValue::String(string.to_string()), AST::Number(number) => GValue::Number(number), AST::Symbol(name) => GValue::Symbol(name.to_string()), AST::LeafDef { target, stmt } => GValue::Pair( Box::new(GValue::from(*target)), Box::new(GValue::from(*stmt)), ), AST::Edge { from, to } => { GValue::Edge(Box::new(GValue::from(*from)), Box::new(GValue::from(*to))) } AST::EdgeDef { target, stmt } => GValue::Pair( Box::new(GValue::from(*target)), Box::new(GValue::from(*stmt)), ), AST::Message { name, args } => { GValue::Message(name, args.into_iter().map(GValue::from).collect()) } AST::Struct(content) => GValue::Vec(content.into_iter().map(GValue::from).collect()), AST::List(content) => GValue::Vec(content.into_iter().map(GValue::from).collect()), AST::Block { name, content } => GValue::Pair( Box::new(GValue::String(name.to_string())), Box::new(GValue::Vec(content.into_iter().map(GValue::from).collect())), ), } } }
true
ce7418e73977e0bb0e983f6999eccc6455caf4b5
Rust
nixpulvis/hell
/take5/take5-remote/src/starting_hand/mod.rs
UTF-8
2,269
3.453125
3
[]
no_license
use take5::Configuration; use wrapper::Card; /// A hand which must have `take5::Configuration::turn_count()` cards /// in it. #[derive(Debug, Serialize)] pub struct StartingHand(Vec<Card>); impl StartingHand { pub fn new(cards: Vec<Card>) -> Result<Self, err::Error> { if cards.len() != Configuration::turn_count() { Err(err::Error::CardLength(cards.len())) } else { Ok(StartingHand(cards)) } } pub fn take(self) -> Vec<Card> { self.0 } } mod err; mod de; #[cfg(test)] mod tests { use super::*; use serde_json as json; use wrapper::Card; #[test] fn test_new() { let mut cards = Card::deck(|_| 3); cards.split_off(10); let hand = StartingHand::new(cards); assert!(hand.is_ok()); } #[test] fn test_new_too_few() { let mut cards = Card::deck(|_| 3); cards.split_off(9); let hand = StartingHand::new(cards); assert!(hand.is_err()); } #[test] fn test_new_too_many() { let mut cards = Card::deck(|_| 3); cards.split_off(11); let hand = StartingHand::new(cards); assert!(hand.is_err()); } #[test] fn test_encode_starting_hand() { let mut cards = Card::deck(|_| 2); cards.split_off(10); let hand = StartingHand::new(cards).unwrap(); let json = json::to_string(&hand).unwrap(); assert_eq!("[[1,2],[2,2],[3,2],[4,2],[5,2],[6,2],[7,2],[8,2],[9,2],[10,2]]", json); } #[test] fn test_decode_starting_hand() { let json = "[[1,2],[2,2],[3,2],[4,2],[5,2],[6,2],[7,2],[8,2],[9,2],[10,2]]"; let hand = json::from_str::<StartingHand>(json).unwrap(); assert_eq!(10, hand.take().len()); } #[test] fn test_decode_starting_hand_too_few() { let json = "[[1,2],[2,2],[3,2],[4,2],[5,2],[6,2],[7,2],[8,2],[9,2]]"; let result = json::from_str::<StartingHand>(json); assert!(result.is_err()); } #[test] fn test_decode_starting_hand_too_many() { let json = "[[1,2],[2,2],[3,2],[4,2],[5,2],[6,2],[7,2],[8,2],[9,2],[10,2],[11,2]]"; let result = json::from_str::<StartingHand>(json); assert!(result.is_err()); } }
true
6e4335621105da221f66a3103a550f06068d3910
Rust
matthewmturner/Bradfield-Distributed-Systems
/blue/src/client/handler.rs
UTF-8
2,500
2.921875
3
[]
no_license
use std::io::{self, BufRead, BufReader, ErrorKind, Stdin}; use super::super::ipc::message; use super::super::ipc::message::request::Command; pub fn read_client_request(stdin: &mut Stdin) -> io::Result<String> { let mut reader = BufReader::new(stdin); let mut line = String::new(); reader.read_line(&mut line)?; Ok(line) } pub fn parse_request(input: String) -> io::Result<message::Request> { let tokens: Vec<&str> = input.split(' ').collect(); let command = extract_command(tokens)?; // println!("{:?}", command); Ok(message::Request { command: Some(command), }) } fn extract_command(tokens: Vec<&str>) -> io::Result<Command> { let command = match tokens[0].trim() { "get" | "Get" | "GET" => Ok(get_handler(&tokens)?), "set" | "Set" | "SET " => Ok(set_handler(&tokens)?), // "backup" | "Backup" | "BACKUP " => Ok(backup_handler(&tokens)?), _ => Err(io::Error::new(ErrorKind::InvalidData, "Invalid command")), }; command } fn get_handler(tokens: &[&str]) -> io::Result<Command> { match tokens.len() { 1 => Ok(Command::Get(message::Get::default())), 2 => Ok(Command::Get(message::Get { key: tokens[1].trim().to_string(), write_to_wal: false, })), _ => Err(io::Error::new( ErrorKind::InvalidData, "Too many tokens for get command", )), } } fn set_handler(tokens: &[&str]) -> io::Result<Command> { match tokens.len() { 2 => { let pairs: Vec<&str> = tokens[1].split('=').collect(); Ok(Command::Set(message::Set { key: pairs[0].to_string(), value: pairs[1].trim().to_string(), write_to_wal: true, })) } _ => Err(io::Error::new( ErrorKind::InvalidData, "Too many tokens for get command", )), } } // fn backup_handler(tokens: &Vec<&str>) -> io::Result<Command> { // println!("{:?}", tokens); // match tokens.len() { // 2 => { // let addr = // SocketAddr::from_str(tokens[1].trim()).expect("TODO: Handle poorly formatted addr"); // Ok(Command::InitiateBackup(message::InitiateBackup { // addr: addr.to_string(), // })) // } // _ => Err(io::Error::new( // ErrorKind::InvalidData, // "Too many tokens for get command", // )), // } // }
true
8b32a06a510d56837c12c1fa99843aeb20f24cd9
Rust
purplg/orrient
/src/cli.rs
UTF-8
4,873
3.109375
3
[ "MIT" ]
permissive
use std::path::PathBuf; use chrono::Duration; use clap::{App, Arg}; /// Contains all the possible arguments passed from the command line. #[derive(Debug)] pub struct Options { pub config_path: Option<PathBuf>, pub gateway: Option<String>, pub apikey: Option<String>, pub offline: bool, pub verbose: bool, pub cache_path: Option<String>, pub cache_age: Option<Duration>, pub cache_compression: bool, pub starting_tab: Option<usize>, } impl Default for Options { fn default() -> Self { Options { config_path: None, gateway: None, apikey: None, offline: false, verbose: false, cache_path: None, cache_age: None, cache_compression: false, starting_tab: None, } } } impl Options { // Automatically grabs, parses, and returns an Options object with all the selected user options pub fn new() -> Self { #[rustfmt::skip] let matches = App::new("Orrient") .version("0.1.0") .about("Keep track of dailies, achievements, crafting, etc in Guild Wars 2") .args(&vec![ Arg::with_name("config") .short("c") .long("config") .value_name("FILE") .help("Specify a config file to use") .takes_value(true), Arg::with_name("gateway") .short("g") .long("gateway") .value_name("URL") .help("Specify a different API gateway to use") .takes_value(true), Arg::with_name("apikey") .short("k") .long("apikey") .value_name("API KEY") .help("Specify an API key to use for requests that require it") .takes_value(true), Arg::with_name("offline") .short("o") .long("offline") .alias("dryrun") .help("Only use the local cache. Do not query GW2 API"), Arg::with_name("dryrun") .long("dryrun") .alias("offline") .help("Same thing as --offline"), Arg::with_name("verbose") .short("v") .long("verbose") .help("Enable verbose/debug logging"), Arg::with_name("cache-path") .short("h") .long("cache-path") .help("The location to store the cache") .takes_value(true), Arg::with_name("cache-age") .short("a") .long("cache-age") .help("The maximum age of cached items (in seconds) before they'll be refetched") .takes_value(true), Arg::with_name("cache-compression") .short("z") .long("cache-compress") .help("Compress the cache file"), Arg::with_name("starting-tab") .short("t") .long("starting-tab") .value_name("TAB_NUMBER") .takes_value(true) .help("The tab number to open on."), ]) .get_matches(); let mut options = Options::default(); if let Some(config_path) = matches.value_of("config") { options.config_path = Some(PathBuf::from(config_path.to_string())); } else { options.config_path = config_path(); } if let Some(cache_path) = matches.value_of("cache-file") { options.cache_path = Some(cache_path.to_string()); } if let Some(cache_age) = matches.value_of("cache-age") { if let Ok(cache_age) = cache_age.parse::<i64>() { options.cache_age = Some(Duration::seconds(cache_age)); } } options.gateway = matches.value_of("gateway").map(ToOwned::to_owned); options.apikey = matches.value_of("apikey").map(ToOwned::to_owned); options.offline = matches.is_present("offline"); options.verbose = matches.is_present("verbose"); options.cache_compression = matches.is_present("cache-compression"); options.starting_tab = matches .value_of("starting-tab") .map(|starting_tab| starting_tab.parse::<usize>().ok()) .flatten(); options } } pub fn config_path() -> Option<PathBuf> { if let Some(mut config_dir) = dirs::config_dir() { config_dir.push("orrient/config.yaml"); Some(config_dir) } else { None } }
true
c678dd0ae5f36835349e90cbb6df9b3370796f60
Rust
JefersonBatista/random-writer
/src/markov_chain.rs
UTF-8
1,554
3.59375
4
[]
no_license
use std::collections::HashMap; pub fn build(text: &str, k: usize) -> HashMap<&str, String> { let mut chain = start_building(&text, k); let first_state_byte_count = after_last_char_pos(&text, 0, k); let mut state = &text[..first_state_byte_count]; let rest_of_text = &text[first_state_byte_count..]; let mut begin = 0; let mut end = first_state_byte_count; for ch in rest_of_text.chars() { begin = after_last_char_pos(&text, begin, 1); end = after_last_char_pos(&text, end, 1); let next_state = &text[begin..end]; chain.entry(state).and_modify(|edges| edges.push(ch)); chain.entry(next_state).or_insert_with(String::new); state = next_state; } chain } fn start_building(text: &str, k: usize) -> HashMap<&str, String> { let mut chain = HashMap::new(); chain.insert(&text[..after_last_char_pos(&text, 0, k)], String::new()); chain } /** * Rust slices str by bytes, not by chars. * To deal with it, this function receives a text (&str), * a initial position (for bytes) and the quantity of * desired chars, and returns the position of the next byte * after the last byte of the last char. The init must be * char boundary for text. */ pub fn after_last_char_pos(text: &str, init: usize, qtd: usize) -> usize { if qtd < 1 { init } else { let mut pos = init; for _ in 0..qtd { pos += 1; while !text.is_char_boundary(pos) { pos += 1; } } pos } }
true
d95d905f564a99e5c10af4754eee67262ceadb9d
Rust
surfingtomchen/rust-exercism
/sieve/src/lib.rs
UTF-8
364
2.734375
3
[]
no_license
pub fn primes_up_to(upper_bound: u64) -> Vec<u64> { let mut v: Vec<u64> = (0..=upper_bound).collect(); for i in 2..=(upper_bound as f64).sqrt() as usize { if v[i] == 0 { continue; } for j in (i * 2..=upper_bound as usize).step_by(i) { v[j] = 0; } } v.iter().skip(2).filter(|x| **x != 0).map(|x| *x).collect() }
true
c162ec16a863b0e56c524cda9665047545affd34
Rust
ErinBailey/tic-tac-toe
/src/board.rs
UTF-8
4,363
3.9375
4
[]
no_license
#![allow(dead_code)] use std::fmt; #[derive(Debug, Default)] pub struct Board { player: Player, rows: [[Option<Player>; 3]; 3] } impl Board { pub fn new() -> Board { Default::default() } pub fn make_move(&mut self, position: usize) { let column = (position-1)%3; let row = (position-1)/3; self.rows[row][column] = Some(self.player); self.player = self.player.opponent(); } pub fn calculate_winner(&self) -> Option<Player> { for row in self.rows.iter() { if row.iter().all(|x| x == &Some(Player::X)) { return Some(Player::X); } } let columns = (0..3).map(|column| { (0..3).map(move |row| { self.rows[row][column] }) }); for mut column in columns { if column.all(|x| x == Some(Player::X)) { return Some(Player::X); } } None } } impl fmt::Display for Board { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let mut count = 0; let board = self.rows.into_iter().map(|row| { row.into_iter().map(|cell| { count += 1; match *cell { None => count.to_string(), Some(Player::X) => "X".to_owned(), Some(Player::O) => "O".to_owned(), } }).collect::<Vec<String>>().join("|") }).collect::<Vec<String>>().join("\n-----\n"); write!(formatter, "{}", board) } } #[derive(Debug, Clone, Copy, PartialEq)] pub enum Player { X, O } impl Player { fn opponent(self) -> Player { match self { Player::O => Player::X, Player::X => Player::O } } } impl Default for Player { fn default() -> Player { Player::X } } #[cfg(test)] mod tests { use super::*; #[test] fn it_can_create_a_board() { let board = Board::new(); let printed_board = format!("{}", board); assert_eq!(printed_board, vec![ "1|2|3", "-----", "4|5|6", "-----", "7|8|9", ].join("\n")) } #[test] fn it_can_make_player1_move() { let mut board = Board::new(); board.make_move(3); let printed_board = format!("{}", board); assert_eq!(printed_board, vec![ "1|2|X", "-----", "4|5|6", "-----", "7|8|9", ].join("\n")) } #[test] fn it_can_make_player2_move() { let mut board = Board::new(); board.make_move(1); board.make_move(5); let printed_board = format!("{}", board); assert_eq!(printed_board, vec![ "X|2|3", "-----", "4|O|6", "-----", "7|8|9", ].join("\n")) } #[test] fn it_can_make_multiple_moves() { let mut board = Board::new(); board.make_move(2); board.make_move(6); board.make_move(8); let printed_board = format!("{}", board); assert_eq!(printed_board, vec![ "1|X|3", "-----", "4|5|O", "-----", "7|X|9", ].join("\n")) } #[test] fn there_is_no_winner_when_no_moves_are_made() { test_calculate_winner(vec![], None) } #[test] fn there_is_no_winner_mid_game() { test_calculate_winner(vec![1, 2, 3, 4, 5], None) } #[test] fn player_x_wins_horizontally_1_2_3() { test_calculate_winner(vec![1, 5, 2, 7, 3], Some(Player::X)) } #[test] fn player_x_wins_horizontally_4_5_6() { test_calculate_winner(vec![4, 8, 5, 7, 6], Some(Player::X)) } #[test] fn player_x_wins_horizontally_7_8_9() { test_calculate_winner(vec![7, 3, 8, 5, 9], Some(Player::X)) } #[test] fn player_x_wins_vertically_1_4_7() { test_calculate_winner(vec![1, 2, 4, 5, 7], Some(Player::X)) } fn test_calculate_winner(moves: Vec<usize>, player: Option<Player>) { let mut board = Board::new(); for position in moves { board.make_move(position); } assert_eq!(board.calculate_winner(), player); } }
true
9f447c0766ed83acfbe1b276658b498bcddf2a6f
Rust
ruma/ruma
/crates/ruma-macros/src/serde/attr.rs
UTF-8
1,464
2.65625
3
[ "MIT" ]
permissive
use syn::{ parse::{Parse, ParseStream}, LitStr, Token, }; use super::case::RenameRule; mod kw { syn::custom_keyword!(alias); syn::custom_keyword!(rename); syn::custom_keyword!(rename_all); } #[derive(Default)] pub struct EnumAttrs { pub rename: Option<LitStr>, pub aliases: Vec<LitStr>, } pub enum Attr { Alias(LitStr), Rename(LitStr), } impl Parse for Attr { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let lookahead = input.lookahead1(); if lookahead.peek(kw::alias) { let _: kw::alias = input.parse()?; let _: Token![=] = input.parse()?; Ok(Self::Alias(input.parse()?)) } else if lookahead.peek(kw::rename) { let _: kw::rename = input.parse()?; let _: Token![=] = input.parse()?; Ok(Self::Rename(input.parse()?)) } else { Err(lookahead.error()) } } } pub struct RenameAllAttr(RenameRule); impl RenameAllAttr { pub fn into_inner(self) -> RenameRule { self.0 } } impl Parse for RenameAllAttr { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _: kw::rename_all = input.parse()?; let _: Token![=] = input.parse()?; let s: LitStr = input.parse()?; Ok(Self( s.value() .parse() .map_err(|_| syn::Error::new_spanned(s, "invalid value for rename_all"))?, )) } }
true
cb4f76bcddc840aefe1e05d16b611e0dbe3afe4a
Rust
oshunter/fuchsia
/third_party/rust_crates/vendor/tokio/src/task/blocking.rs
UTF-8
5,238
3.59375
4
[ "MIT", "BSD-3-Clause" ]
permissive
use crate::task::JoinHandle; cfg_rt_threaded! { /// Runs the provided blocking function on the current thread without /// blocking the executor. /// /// In general, issuing a blocking call or performing a lot of compute in a /// future without yielding is not okay, as it may prevent the executor from /// driving other futures forward. This function runs the closure on the /// current thread by having the thread temporarily cease from being a core /// thread, and turns it into a blocking thread. See the [CPU-bound tasks /// and blocking code][blocking] section for more information. /// /// Although this function avoids starving other independently spawned /// tasks, any other code running concurrently in the same task will be /// suspended during the call to `block_in_place`. This can happen e.g. when /// using the [`join!`] macro. To avoid this issue, use [`spawn_blocking`] /// instead. /// /// Note that this function can only be used on the [threaded scheduler]. /// /// Code running behind `block_in_place` cannot be cancelled. When you shut /// down the executor, it will wait indefinitely for all blocking operations /// to finish. You can use [`shutdown_timeout`] to stop waiting for them /// after a certain timeout. Be aware that this will still not cancel the /// tasks — they are simply allowed to keep running after the method /// returns. /// /// [blocking]: ../index.html#cpu-bound-tasks-and-blocking-code /// [threaded scheduler]: fn@crate::runtime::Builder::threaded_scheduler /// [`spawn_blocking`]: fn@crate::task::spawn_blocking /// [`join!`]: ../macro.join.html /// [`thread::spawn`]: fn@std::thread::spawn /// [`shutdown_timeout`]: fn@crate::runtime::Runtime::shutdown_timeout /// /// # Examples /// /// ``` /// use tokio::task; /// /// # async fn docs() { /// task::block_in_place(move || { /// // do some compute-heavy work or call synchronous code /// }); /// # } /// ``` #[cfg_attr(docsrs, doc(cfg(feature = "blocking")))] pub fn block_in_place<F, R>(f: F) -> R where F: FnOnce() -> R, { crate::runtime::thread_pool::block_in_place(f) } } cfg_blocking! { /// Runs the provided closure on a thread where blocking is acceptable. /// /// In general, issuing a blocking call or performing a lot of compute in a /// future without yielding is not okay, as it may prevent the executor from /// driving other futures forward. This function runs the provided closure /// on a thread dedicated to blocking operations. See the [CPU-bound tasks /// and blocking code][blocking] section for more information. /// /// Tokio will spawn more blocking threads when they are requested through /// this function until the upper limit configured on the [`Builder`] is /// reached. This limit is very large by default, because `spawn_blocking` is /// often used for various kinds of IO operations that cannot be performed /// asynchronously. When you run CPU-bound code using `spawn_blocking`, you /// should keep this large upper limit in mind; to run your CPU-bound /// computations on only a few threads, you should use a separate thread /// pool such as [rayon] rather than configuring the number of blocking /// threads. /// /// This function is intended for non-async operations that eventually /// finish on their own. If you want to spawn an ordinary thread, you should /// use [`thread::spawn`] instead. /// /// Closures spawned using `spawn_blocking` cannot be cancelled. When you /// shut down the executor, it will wait indefinitely for all blocking /// operations to finish. You can use [`shutdown_timeout`] to stop waiting /// for them after a certain timeout. Be aware that this will still not /// cancel the tasks — they are simply allowed to keep running after the /// method returns. /// /// Note that if you are using the [basic scheduler], this function will /// still spawn additional threads for blocking operations. The basic /// scheduler's single thread is only used for asynchronous code. /// /// [`Builder`]: struct@crate::runtime::Builder /// [blocking]: ../index.html#cpu-bound-tasks-and-blocking-code /// [rayon]: https://docs.rs/rayon /// [basic scheduler]: fn@crate::runtime::Builder::basic_scheduler /// [`thread::spawn`]: fn@std::thread::spawn /// [`shutdown_timeout`]: fn@crate::runtime::Runtime::shutdown_timeout /// /// # Examples /// /// ``` /// use tokio::task; /// /// # async fn docs() -> Result<(), Box<dyn std::error::Error>>{ /// let res = task::spawn_blocking(move || { /// // do some compute-heavy work or call synchronous code /// "done computing" /// }).await?; /// /// assert_eq!(res, "done computing"); /// # Ok(()) /// # } /// ``` pub fn spawn_blocking<F, R>(f: F) -> JoinHandle<R> where F: FnOnce() -> R + Send + 'static, R: Send + 'static, { crate::runtime::spawn_blocking(f) } }
true