blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
3da47e82ef4a92204426305d0954e78c45a7d084
|
Rust
|
ISibboI/go_ai
|
/src/main.rs
|
UTF-8
| 7,817
| 2.78125
| 3
|
[] |
no_license
|
use ggez::{Context, ContextBuilder, GameResult};
use ggez::event::{self, EventHandler, MouseButton};
use ggez::graphics;
use go::GoGame;
use go::ai::{GoAI, voronoi_ai::VoronoiAI};
use go::board::{GoStone, GoCoordinates};
use ui::button::Button;
use ui::label::Label;
pub mod go;
pub mod ui;
const BACKGROUND_COLOR: graphics::Color = graphics::Color::new(0.831, 0.776, 0.509, 1.0);
const BOARD_LINE_COLOR: graphics::Color = graphics::Color::new(0.2, 0.1, 0.1, 1.0);
const WHITE_STONE_COLOR: graphics::Color = graphics::Color::new(0.9, 0.9, 0.9, 1.0);
const BLACK_STONE_COLOR: graphics::Color = graphics::Color::new(0.1, 0.1, 0.1, 1.0);
const NO_TEINT: graphics::Color = graphics::Color::WHITE;
const GHOST_TEINT: graphics::Color = graphics::Color::new(1.0, 1.0, 1.0, 0.5);
const WINDOW_WIDTH: u16 = 800;
const WINDOW_HEIGHT: u16 = 600;
fn main() {
// Make a Context and an EventLoop.
let (mut ctx, event_loop) =
ContextBuilder::new("Go AI", "Sebastian Schmidt")
.build()
.unwrap();
// Create an instance of your event handler.
// Usually, you should provide it with the Context object
// so it can load resources like images during setup.
let my_game = MyGame::new(&mut ctx, VoronoiAI::new());
// Run!
/*match event::run(ctx, event_loop, my_game) {
Ok(_) => println!("Exited cleanly."),
Err(_) => println!("Error"), //println!("Error occured: {}", e)
}*/
event::run(ctx, event_loop, my_game)
}
struct MyGame<AI> {
grid: [f32; 9],
grid_box_len: f32,
game: GoGame,
mouse_x: f32,
mouse_y: f32,
undo_button: Button,
black_captures_label: Label,
white_captures_label: Label,
ai: AI,
}
impl<AI: GoAI> MyGame<AI> {
pub fn new(_ctx: &mut Context, mut ai: AI) -> Self {
// Load/create resources here: images, fonts, sounds, etc.
let mut grid = [0.0; 9];
let square_side_len = WINDOW_WIDTH.min(WINDOW_HEIGHT) as f32;
let grid_box_len = square_side_len / 9.0;
for i in 0..9 {
grid[i] = (i as f32 + 0.5) * grid_box_len;
}
let undo_button = Button::new(Label::new("Undo", [610.0, 10.0].into()));
let black_captures_label = Label::new("Black captures: 0", [610.0, 30.0].into());
let white_captures_label = Label::new("White captures: 0", [610.0, 50.0].into());
let game = GoGame::new();
ai.set_game(game.clone());
MyGame { grid, grid_box_len, game, mouse_x: -1.0, mouse_y: -1.0, undo_button, black_captures_label, white_captures_label, ai }
}
}
impl<AI: GoAI> EventHandler for MyGame<AI> {
fn mouse_motion_event(
&mut self,
_ctx: &mut Context,
x: f32,
y: f32,
_dx: f32,
_dy: f32
) {
self.mouse_x = x;
self.mouse_y = y;
}
fn mouse_button_up_event(
&mut self,
ctx: &mut Context,
button: MouseButton,
x: f32,
y: f32
) {
if button == MouseButton::Left && x >= 0.0 && y >= 0.0 && x <= self.grid_box_len * 9.0 && y <= self.grid_box_len * 9.0 && self.game.current_turn() == GoStone::BLACK {
let x = ((x / self.grid_box_len).max(0.0) as usize).min(8);
let y = ((y / self.grid_box_len).max(0.0) as usize).min(8);
match self.game.play_stone(GoCoordinates::new_usize(x, y)) {
Ok(_) => self.ai.set_game(self.game.clone()),
Err(_) => println!("Could not play stone"),
}
}
self.undo_button.mouse_button_up_event(ctx, button, x, y);
}
fn update(&mut self, _ctx: &mut Context) -> GameResult<()> {
if self.undo_button.consume_was_clicked() {
match self.game.undo() {
Ok(_) => self.ai.set_game(self.game.clone()),
Err(_) => println!("Could not undo"),
}
}
if self.game.current_turn() == GoStone::WHITE {
if let Some(coordinates) = self.ai.best_move() {
match self.game.play_stone(coordinates) {
Ok(_) => self.ai.set_game(self.game.clone()),
Err(_) => println!("AI move {}, {} is invalid", coordinates.x(), coordinates.y()),
}
}
}
self.black_captures_label.set_text(&format!("Black captures: {}", self.game.black_captures()));
self.white_captures_label.set_text(&format!("White captures: {}", self.game.white_captures()));
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {
graphics::clear(ctx, BACKGROUND_COLOR);
let mut board = graphics::MeshBuilder::new();
for i in 0..9 {
board.line(&[graphics::mint::Point2::from([self.grid[0], self.grid[i]]), graphics::mint::Point2::from([self.grid[8], self.grid[i]])], 2.0, BOARD_LINE_COLOR)?;
board.line(&[graphics::mint::Point2::from([self.grid[i], self.grid[0]]), graphics::mint::Point2::from([self.grid[i], self.grid[8]])], 2.0, BOARD_LINE_COLOR)?;
}
board.circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([self.grid[2], self.grid[2]]), 6.0, 1.0, BOARD_LINE_COLOR)?;
board.circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([self.grid[2], self.grid[6]]), 6.0, 1.0, BOARD_LINE_COLOR)?;
board.circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([self.grid[6], self.grid[2]]), 6.0, 1.0, BOARD_LINE_COLOR)?;
board.circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([self.grid[6], self.grid[6]]), 6.0, 1.0, BOARD_LINE_COLOR)?;
let board = board.build(ctx)?;
graphics::draw(ctx, &board, (graphics::mint::Point2::from([0.0, 0.0]), 0.0, NO_TEINT))?;
let black_stone = graphics::MeshBuilder::new().circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([0.0, 0.0]), self.grid_box_len / 2.0, 0.5, BLACK_STONE_COLOR)?.build(ctx)?;
let white_stone = graphics::MeshBuilder::new().circle(graphics::DrawMode::fill(), graphics::mint::Point2::from([0.0, 0.0]), self.grid_box_len / 2.0, 0.5, WHITE_STONE_COLOR)?.build(ctx)?;
for x in 0..9 {
for y in 0..9 {
let c = GoCoordinates::new_usize(x, y);
match self.game.current_board().get_stone(c) {
GoStone::BLACK => graphics::draw(ctx, &black_stone, (graphics::mint::Point2::from([self.grid[x], self.grid[y]]), 0.0, NO_TEINT))?,
GoStone::WHITE => graphics::draw(ctx, &white_stone, (graphics::mint::Point2::from([self.grid[x], self.grid[y]]), 0.0, NO_TEINT))?,
GoStone::NONE => {
let x_f32 = x as f32;
let y_f32 = y as f32;
if self.grid_box_len * x_f32 < self.mouse_x && self.mouse_x < self.grid_box_len * (x_f32 + 1.0) &&
self.grid_box_len * y_f32 < self.mouse_y && self.mouse_y < self.grid_box_len * (y_f32 + 1.0) {
match self.game.current_turn() {
GoStone::BLACK => graphics::draw(ctx, &black_stone, (graphics::mint::Point2::from([self.grid[x], self.grid[y]]), 0.0, GHOST_TEINT))?,
GoStone::WHITE => graphics::draw(ctx, &white_stone, (graphics::mint::Point2::from([self.grid[x], self.grid[y]]), 0.0, GHOST_TEINT))?,
GoStone::NONE => {}
}
}
}
}
}
}
self.undo_button.draw(ctx)?;
self.black_captures_label.draw(ctx)?;
self.white_captures_label.draw(ctx)?;
graphics::draw_queued_text(ctx, graphics::DrawParam::default(), None, graphics::FilterMode::Nearest)?;
graphics::present(ctx)
}
}
| true
|
8480ece67f1dcba5cd5ee0942c8b76193604bed4
|
Rust
|
ThePerkinrex/ArmesCPU_old_RUST
|
/armes_cpu_wasm/src/lib.rs
|
UTF-8
| 2,525
| 2.578125
| 3
|
[] |
no_license
|
extern crate wasm_bindgen;
extern crate console_error_panic_hook;
use std::{panic, thread};
use wasm_bindgen::prelude::*;
use std::ops::Deref;
mod wasm_memory;
use wasm_memory::WasmMemory;
mod configloader;
extern crate armes_cpu_lib;
use armes_cpu_lib::{compile, Config, ConnectedRegister, Bus};
use std::collections::HashMap;
#[wasm_bindgen]
extern {
fn alert(s: &str);
fn printToCLI(s: &str);
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
macro_rules! console_log {
// Note that this is using the `log` function imported above during
// `bare_bones`
($($t:tt)*) => (log(&format_args!($($t)*).to_string()))
}
struct Logger {}
impl armes_cpu_lib::Logger for Logger {
fn log(&self, s: &String) {
log(s);
}
}
struct Output {
reg: armes_cpu_lib::ConnectedRegister
}
impl Output {
pub fn new(length: usize) -> Output {
Output {
reg: ConnectedRegister::new(length)
}
}
}
impl armes_cpu_lib::Output for Output {
fn in_from_bus(&mut self, bus: &Bus) {
self.reg.get_from_bus(bus);
}
fn show(&self) {
printToCLI(&format!("OUT >> {}", self.reg._get()));
}
}
//let mut ram: armes_cpu_lib::Memory;
//#[wasm_bindgen]
//trait wasm_abi: wasm_bindgen::convert::IntoWasmAbi {}
//impl wasm_bindgen::convert::IntoWasmAbi for Config {}
#[wasm_bindgen]
pub fn compile_asm(s: &str, conf_s: &str) -> WasmMemory {
let conf = configloader::load_cfg(conf_s);
let mem = compile::asm(String::from(s), conf);
wasm_memory::memory_to_wasm(mem)
}
#[wasm_bindgen]
pub fn compile_rom(s: &str, conf_s: &str) -> WasmMemory {
let conf = configloader::load_cfg(conf_s);
let mem = compile::rom(String::from(s), conf);
wasm_memory::memory_to_wasm(mem)
}
#[wasm_bindgen]
pub fn load_instructions(cfg: &str) -> String {
format!("{:?}",configloader::load_cfg(cfg).instructions)
}
#[wasm_bindgen]
pub fn print_mem(m: WasmMemory) {
console_log!("{}", wasm_memory::wasm_to_memory(m));
}
#[wasm_bindgen]
pub fn init_wasm() {
panic::set_hook(Box::new(console_error_panic_hook::hook));
//console_log!("Hello, wasm!\nFormat {} t #{}", "this", 2);
}
#[wasm_bindgen]
pub fn run_cpu(cfg: &str, ram: WasmMemory, rom: WasmMemory) {
let conf = configloader::load_cfg(cfg);
let mut out = Output::new(conf.data_length);
armes_cpu_lib::run_cpu(conf, wasm_memory::wasm_to_memory(ram), wasm_memory::wasm_to_memory(rom), Logger{}, &mut out);
//printToCLI("Thread started");
}
| true
|
7c88cbb901a1e45f37ae3f597de48cd69108d0dd
|
Rust
|
zhangwuqiao/pdf
|
/pdf/examples/text.rs
|
UTF-8
| 4,013
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
extern crate pdf;
use std::env::args;
use std::collections::HashMap;
use std::convert::TryInto;
use pdf::file::File;
use pdf::content::*;
use pdf::primitive::Primitive;
use pdf::font::*;
use pdf::parser::Lexer;
use pdf::parser::parse_with_lexer;
use pdf::object::{Resolve, NoResolve, RcRef};
use pdf::encoding::BaseEncoding;
use pdf::error::PdfError;
struct FontInfo {
font: RcRef<Font>,
cmap: ToUnicodeMap,
}
struct Cache {
fonts: HashMap<String, FontInfo>
}
impl Cache {
fn new() -> Self {
Cache {
fonts: HashMap::new()
}
}
fn add_font(&mut self, name: impl Into<String>, font: RcRef<Font>) {
println!("add_font({:?})", font);
if let Some(to_unicode) = font.to_unicode() {
self.fonts.insert(name.into(), FontInfo { font, cmap: to_unicode.unwrap() });
}
}
fn get_font(&self, name: &str) -> Option<&FontInfo> {
self.fonts.get(name)
}
}
fn add_string(data: &[u8], out: &mut String, info: &FontInfo) {
if let Some(encoding) = info.font.encoding() {
match encoding.base {
BaseEncoding::IdentityH => {
for w in data.windows(2) {
let cp = u16::from_be_bytes(w.try_into().unwrap());
if let Some(s) = info.cmap.get(cp) {
out.push_str(s);
}
}
}
_ => {
for &b in data {
if let Some(s) = info.cmap.get(b as u16) {
out.push_str(s);
} else {
out.push(b as char);
}
}
}
};
}
}
fn main() -> Result<(), PdfError> {
let path = args().nth(1).expect("no file given");
println!("read: {}", path);
let file = File::<Vec<u8>>::open(&path).unwrap();
let mut out = String::new();
for page in file.pages() {
let page = page?;
let resources = page.resources.as_ref().unwrap();
let mut cache = Cache::new();
// make sure all fonts are in the cache, so we can reference them
for (name, &font) in &resources.fonts {
cache.add_font(name, file.get(font)?);
}
for gs in resources.graphics_states.values() {
if let Some((font, _)) = gs.font {
let font = file.get(font)?;
if let Some(font_name) = &font.name {
cache.add_font(font_name.clone(), font);
}
}
}
let mut current_font = None;
let contents = page.contents.as_ref().unwrap();
for op in contents.operations(&file)?.iter() {
match op {
Op::GraphicsState { name } => {
let gs = resources.graphics_states.get(name).unwrap();
if let Some((font, _)) = gs.font {
let font = file.get(font)?;
if let Some(font_name) = &font.name{
current_font = cache.get_font(font_name.as_str());
}
}
}
// text font
Op::TextFont { name, .. } => {
current_font = cache.get_font(name);
}
Op::TextDraw { text } => if let Some(font) = current_font {
add_string(&text.data, &mut out, font);
}
Op::TextDrawAdjusted { array } => if let Some(font) = current_font {
for data in array {
if let TextDrawAdjusted::Text(text) = data {
add_string(&text.data, &mut out, font);
}
}
}
Op::TextNewline => {
out.push('\n');
}
_ => {}
}
}
}
println!("{}", out);
Ok(())
}
| true
|
b65128ed81f4b6bd12b4c764adfd5725a03bd82b
|
Rust
|
JoNil/strfmt
|
/src/tests/strfmt.rs
|
UTF-8
| 3,118
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use super::super::{strfmt, Fmt};
#[test]
fn test_values() {
let mut vars: HashMap<String, String> = HashMap::new();
let too_long = "toooloooong".to_string();
vars.insert("x".to_string(), "X".to_string());
vars.insert("long".to_string(), too_long.clone()); // len=10
vars.insert("hi".to_string(), "hi".to_string());
// format, expected, error
let values = vec![
// simple positioning
("{x}", "X", false),
("{x:}", "X", false),
("{x:3}", " X", false),
("{x:>3}", " X", false),
("{x:<3}", "X ", false),
("{x:^3}", " X ", false),
("{x:^4}", " X ", false),
// extra text
(" {x}yz", " Xyz", false),
(" hi {x:^4}-you rock", " hi X -you rock", false),
// fill confusion
("{x:10}", " X", false),
("{long:.3}", "too", false),
("{long:<5.3}", "too ", false),
("{long:5.3}", " too", false),
("{long:5.7}", "toooloo", false),
("{long:<5.7}", "toooloo", false),
("{long:^5.7}", "toooloo", false),
("{long:<}", &too_long, false),
("{long:<<}", &too_long, false),
("{long:<<5}", &too_long, false),
// fun
("{x:<>}", "X", false),
("{x:<>3}", "<<X", false),
("{{}}", "{}", false),
("{{{x}}}", "{X}", false),
("{{{x}{{{{{{", "{X{{{", false),
("{x}}}}", "X}}", false),
// invalid
("{}", "", true),
("{:3}", "", true),
("{x:*}", "", true),
("{x::}", "", true),
("{x:<<<}", "", true),
("{xxx: <88.3}", "", true),
// escape
("{{}}", "{}", false),
("{{long}}", "{long}", false),
("{{{x}}}", "{X}", false),
];
for (fmtstr, expected, expect_err) in values {
let result = strfmt(fmtstr, &vars);
let mut err = expect_err != result.is_err();
if !err && !expect_err {
err = match &result {
&Ok(ref r) => r != expected,
_ => unreachable!(),
};
}
if err {
let ex_type = if expect_err {
"fail"
} else {
"pass"
};
let fmt = Fmt::from_str(fmtstr);
println!("FAIL: expected {}", ex_type);
println!(" input: {:?}", (fmtstr, expected, expect_err));
println!(" fmt: {:?}", fmt);
if !expect_err {
println!(" expected: {:?}", expected);
}
match result {
Ok(v) => println!(" got: {:?}", v),
Err(v) => println!(" got: {:?}", v),
}
assert!(false);
}
}
}
// #[bench]
// fn bench_strfmt(b: &mut Bencher) {
// let mut vars: HashMap<String, String> = HashMap::new();
// let too_long = "toooloooong".to_string();
// vars.insert("x".to_string(), "X".to_string());
// let fmtstr = "short: {x:*^10.3} long: {long:%<14.9}";
// b.iter(|| strfmt(fmtstr, &vars));
// }
| true
|
64d24f373fb43b5b543d4e0c4b4ee1a144ccea5d
|
Rust
|
Axect/Peroxide_Gallery
|
/Numeric/lm_with_weight/src/main.rs
|
UTF-8
| 1,012
| 2.75
| 3
|
[] |
no_license
|
#[macro_use]
extern crate peroxide;
use peroxide::fuga::*;
fn main() {
// Generate Data
let x = seq(1, 10, 1);
let eps_true = rnorm!(10, 0, 1);
let y = x.fmap(|t| 2f64 * t + 1f64).add_v(&eps_true);
let data = cbind(x.into(), y.into());
// Generate Normal error (eps ~ N(0, 1^2))
let eps_guess = rnorm!(10, 0, 1);
// Optimize with new error
let mut opt = Optimizer::new(data, |x, w| linear_regression(x, w, &eps_guess));
let p = opt.set_init_param(c!(1, 1))
.set_max_iter(50)
.set_method(LevenbergMarquardt)
.optimize();
p.print();
opt.get_error().print();
}
// Linear regression with error: y_i = w_0 + w_1 * x_i + epsilon_i
fn linear_regression(x: &Vec<f64>, w: Vec<Number>, epsilon: &Vec<f64>) -> Option<Vec<Number>> {
Some(
x.iter()
.map(|&t| Number::from_f64(t))
.zip(epsilon.iter().map(|&t| Number::from_f64(t)))
.map(|(t, eps)| w[0] + w[1] * t + eps)
.collect()
)
}
| true
|
18cb29c9e6c194faa13e3bda48f56f2b684cadaf
|
Rust
|
xfbs/euler
|
/src/018-maximum-path-sum-i/rust/src/lib.rs
|
UTF-8
| 1,259
| 3.34375
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
fn max_path_sum(triangle: &Vec<Vec<u8>>) -> u32 {
if triangle.len() == 1 {
triangle[0][0] as u32
} else {
let mut pos = 1;
let mut cur: Vec<u32> = triangle[triangle.len() - pos]
.iter()
.map(|n| *n as u32)
.collect();
let mut prv: Vec<u32>;
while triangle.len() > pos {
pos += 1;
prv = cur;
cur = triangle[triangle.len() - pos]
.iter()
.map(|n| *n as u32)
.collect();
for i in 0..cur.len() {
if prv[i] > prv[i + 1] {
cur[i] = cur[i] + prv[i];
} else {
cur[i] = cur[i] + prv[i + 1];
}
}
}
cur[0]
}
}
pub fn solve(triangle: &Vec<Vec<u8>>) -> u32 {
max_path_sum(triangle)
}
#[test]
fn test_max_path_sum() {
let vecs = vec![vec![1], vec![2, 3]];
assert_eq!(max_path_sum(&vecs), 1 + 3);
let vecs = vec![vec![1], vec![3, 3], vec![2, 3, 4]];
assert_eq!(max_path_sum(&vecs), 1 + 3 + 4);
}
#[test]
fn test_solve() {
let vecs = vec![vec![1], vec![2, 3], vec![4, 5, 6], vec![7, 7, 7, 7]];
assert_eq!(solve(&vecs), 1 + 3 + 6 + 7);
}
| true
|
16b62a7b4b4a908fd0c54e70d1dc67373eaecc05
|
Rust
|
johnrayn/hackaway
|
/books/TRPL/cp4_ownership/slice.rs
|
UTF-8
| 435
| 3.734375
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
// slice is a view of the real data
// slice can be use as parameter
// slice .......
fn main() {
let a = [1, 2, 3, 4, 5];
let slice = &a[1..3];
println!("the slice of a is {:?}", slice);
let s = "haha";
let len = length(s);
println!("the length of s in {}", len);
println!("Yes, {} is still accessible", s);
}
// slice as parameter
fn length(s: &str) -> usize {
return s.len();
}
| true
|
499af2793f0427d37eb02e61e8f3e3bcf5d3608e
|
Rust
|
alexzhenyu/rust-gpu-tools
|
/src/program.rs
|
UTF-8
| 11,452
| 3.359375
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#[cfg(feature = "cuda")]
use crate::cuda;
use crate::error::GPUError;
#[cfg(feature = "opencl")]
use crate::opencl;
/// Abstraction for running programs on CUDA or OpenCL.
pub enum Program {
/// CUDA program.
#[cfg(feature = "cuda")]
Cuda(cuda::Program),
/// OpenCL program.
#[cfg(feature = "opencl")]
Opencl(opencl::Program),
}
impl Program {
/// Run some code in the context of the program.
///
/// There is an implementation for OpenCL and for CUDA. Both use different Rust types, but
/// [`opencl::Program`] and [`cuda::Program`] implement the same API. This means that same
/// code code can be used to run on either of them. The only difference is the type of the
/// `Program`.
///
/// You need to pass in two closures, one for OpenCL, one for CUDA, both get their
/// corresponding program type as parameter. For convenience there is the [`define_closures`]
/// macro defined, which can help reducing code duplication by creating two closures out of
/// a single one.
///
/// CUDA and OpenCL support can be enabled/disabled by the `opencl` and `cuda` features. If
/// one of them is disabled, you still need to pass in two closures. This way the API stays,
/// the same, but you can disable it things at compile-time.
///
/// The second parameter is a single arbitrary argument, which will be passed on into the
/// closure. This is useful when you e.g. need to pass in a mutable reference. Such a reference
/// cannot be shared between closures, hence we pass it on, so that the compiler knows that it
/// is used at most once.
#[cfg(all(feature = "cuda", feature = "opencl"))]
pub fn run<F1, F2, R, E, A>(&self, fun: (F1, F2), arg: A) -> Result<R, E>
where
E: From<GPUError>,
F1: FnOnce(&cuda::Program, A) -> Result<R, E>,
F2: FnOnce(&opencl::Program, A) -> Result<R, E>,
{
match self {
Self::Cuda(program) => program.run(fun.0, arg),
Self::Opencl(program) => program.run(fun.1, arg),
}
}
/// Run some code in the context of the program.
///
/// There is an implementation for OpenCL and for CUDA. Both use different Rust types, but
/// [`opencl::Program`] and [`cuda::Program`] implement the same API. This means that same
/// code code can be used to run on either of them. The only difference is the type of the
/// `Program`.
///
/// You need to pass in two closures, one for OpenCL, one for CUDA, both get their
/// corresponding program type as parameter. For convenience there is the [`define_closures`]
/// macro defined, which can help reducing code duplication by creating two closures out of
/// a single one.
///
/// CUDA and OpenCL support can be enabled/disabled by the `opencl` and `cuda` features. If
/// one of them is disabled, you still need to pass in two closures. This way the API stays,
/// the same, but you can disable it things at compile-time.
///
/// The second parameter is a single arbitrary argument, which will be passed on into the
/// closure. This is useful when you e.g. need to pass in a mutable reference. Such a reference
/// cannot be shared between closures, hence we pass it on, so that the compiler knows that it
/// is used at most once.
#[cfg(all(feature = "cuda", not(feature = "opencl")))]
pub fn run<F1, F2, R, E, A>(&self, fun: (F1, F2), arg: A) -> Result<R, E>
where
E: From<GPUError>,
F1: FnOnce(&cuda::Program, A) -> Result<R, E>,
{
match self {
Self::Cuda(program) => program.run(fun.0, arg),
}
}
/// Run some code in the context of the program.
///
/// There is an implementation for OpenCL and for CUDA. Both use different Rust types, but
/// [`opencl::Program`] and [`cuda::Program`] implement the same API. This means that same
/// code code can be used to run on either of them. The only difference is the type of the
/// `Program`.
///
/// You need to pass in two closures, one for OpenCL, one for CUDA, both get their
/// corresponding program type as parameter. For convenience there is the [`define_closures`]
/// macro defined, which can help reducing code duplication by creating two closures out of
/// a single one.
///
/// CUDA and OpenCL support can be enabled/disabled by the `opencl` and `cuda` features. If
/// one of them is disabled, you still need to pass in two closures. This way the API stays,
/// the same, but you can disable it things at compile-time.
///
/// The second parameter is a single arbitrary argument, which will be passed on into the
/// closure. This is useful when you e.g. need to pass in a mutable reference. Such a reference
/// cannot be shared between closures, hence we pass it on, so that the compiler knows that it
/// is used at most once.
#[cfg(all(not(feature = "cuda"), feature = "opencl"))]
pub fn run<F1, F2, R, E, A>(&self, fun: (F1, F2), arg: A) -> Result<R, E>
where
E: From<GPUError>,
F2: FnOnce(&opencl::Program, A) -> Result<R, E>,
{
match self {
Self::Opencl(program) => program.run(fun.1, arg),
}
}
/// Returns the name of the GPU, e.g. "GeForce RTX 3090".
pub fn device_name(&self) -> &str {
match self {
#[cfg(feature = "cuda")]
Self::Cuda(program) => program.device_name(),
#[cfg(feature = "opencl")]
Self::Opencl(program) => program.device_name(),
}
}
}
/// Creates two closures, one for CUDA, one for OpenCL for the given one.
///
/// This macro is used to be able to interact with rust-gpu-tools with unified code for both,
/// CUDA and OpenCL, without the need to repeat the code. The input parameter is a `program` and
/// it will be mapped to [`&cuda::Program`] and [`&opencl::Program`].
///
/// The second parameter is a single arbitrary argument, which will be passed on into the closure.
/// This is useful when you e.g. need to pass in a mutable reference. Such a reference cannot be
/// shared between closures, hence we pass it on, so that the compiler knows that it is used at
/// most once.
///
/// Depending on whether the `cuda` and/or `opencl` feature is enabled, it will do the correct
/// thing and not specify one of them if it is appropriate.
///
/// ### Example
///
/// ```
/// use rust_gpu_tools::{cuda, opencl, program_closures};
///
/// let closures = program_closures!(|program, arg: u8| -> bool {
/// true
/// });
///
/// // Generates
/// let closures = (
/// |program: &cuda::Program, arg: u8| { true },
/// |program: &opencl::Program, arg: u8| { true },
/// );
///
/// // If e.g. the `cuda` feature is disabled, it would generate
/// let closures_without_cuda = (
/// (),
/// |program: &opencl::Program, arg: u8| { true },
/// );
/// ```
#[cfg(all(feature = "cuda", feature = "opencl"))]
#[macro_export]
macro_rules! program_closures {
// Additional argument without a type
(|$program:ident, $arg:ident| -> $ret:ty $body:block) => {
(
|$program: &$crate::cuda::Program, $arg| -> $ret { $body },
|$program: &$crate::opencl::Program, $arg| -> $ret { $body },
)
};
// Additional argument with a type
(|$program:ident, $arg:ident: $arg_type:ty| -> $ret:ty $body:block) => {
(
|$program: &$crate::cuda::Program, $arg: $arg_type| -> $ret { $body },
|$program: &$crate::opencl::Program, $arg: $arg_type| -> $ret { $body },
)
};
}
/// Creates two closures, one for CUDA, one for OpenCL for the given one.
///
/// This macro is used to be able to interact with rust-gpu-tools with unified code for both,
/// CUDA and OpenCL, without the need to repeat the code. The input parameter is a `program` and
/// it will be mapped to [`&cuda::Program`] and [`&opencl::Program`].
///
/// The second parameter is a single arbitrary argument, which will be passed on into the closure.
/// This is useful when you e.g. need to pass in a mutable reference. Such a reference cannot be
/// shared between closures, hence we pass it on, so that the compiler knows that it is used at
/// most once.
///
/// Depending on whether the `cuda` and/or `opencl` feature is enabled, it will do the correct
/// thing and not specify one of them if it is appropriate.
///
/// ### Example
///
/// ```
/// use rust_gpu_tools::{cuda, opencl, program_closures};
///
/// let closures = program_closures!(|program, arg: u8| -> bool {
/// true
/// });
///
/// // Generates
/// let closures = (
/// |program: &cuda::Program, arg: u8| { true },
/// |program: &opencl::Program, arg: u8| { true },
/// );
///
/// // If e.g. the `cuda` feature is disabled, it would generate
/// let closures_without_cuda = (
/// (),
/// |program: &opencl::Program, arg: u8| { true },
/// );
/// ```
#[macro_export]
#[cfg(all(feature = "cuda", not(feature = "opencl")))]
macro_rules! program_closures {
// Additional argument without a type
(|$program:ident, $arg:ident| -> $ret:ty $body:block) => {
(
|$program: &$crate::cuda::Program, $arg| -> $ret { $body },
(),
)
};
// Additional argument with a type
(|$program:ident, $arg:ident: $arg_type:ty| -> $ret:ty $body:block) => {
(
|$program: &$crate::cuda::Program, $arg: $arg_type| -> $ret { $body },
(),
)
};
}
/// Creates two closures, one for CUDA, one for OpenCL for the given one.
///
/// This macro is used to be able to interact with rust-gpu-tools with unified code for both,
/// CUDA and OpenCL, without the need to repeat the code. The input parameter is a `program` and
/// it will be mapped to [`&cuda::Program`] and [`&opencl::Program`].
///
/// The second parameter is a single arbitrary argument, which will be passed on into the closure.
/// This is useful when you e.g. need to pass in a mutable reference. Such a reference cannot be
/// shared between closures, hence we pass it on, so that the compiler knows that it is used at
/// most once.
///
/// Depending on whether the `cuda` and/or `opencl` feature is enabled, it will do the correct
/// thing and not specify one of them if it is appropriate.
///
/// ### Example
///
/// ```
/// use rust_gpu_tools::{cuda, opencl, program_closures};
///
/// let closures = program_closures!(|program, arg: u8| -> bool {
/// true
/// });
///
/// // Generates
/// let closures = (
/// |program: &cuda::Program, arg: u8| { true },
/// |program: &opencl::Program, arg: u8| { true },
/// );
///
/// // If e.g. the `cuda` feature is disabled, it would generate
/// let closures_without_cuda = (
/// (),
/// |program: &opencl::Program, arg: u8| { true },
/// );
/// ```
#[macro_export]
#[cfg(all(not(feature = "cuda"), feature = "opencl"))]
macro_rules! program_closures {
// Additional argument without a type
(|$program:ident, $arg:ident| -> $ret:ty $body:block) => {
((), |$program: &$crate::opencl::Program, $arg| -> $ret {
$body
})
};
// Additional argument with a type
(|$program:ident, $arg:ident: $arg_type:ty| -> $ret:ty $body:block) => {
(
(),
|$program: &$crate::opencl::Program, $arg: $arg_type| -> $ret { $body },
)
};
}
| true
|
e2cec2b376063eeed0584162a36866b62884ac24
|
Rust
|
nathankot/plaid-rust
|
/src/api/product/connect.rs
|
UTF-8
| 2,504
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
//! Connect is a product that Plaid offers. It allows you to retrieve account balance
//! and transaction history data.
//!
//! ## Endpoint example
//!
//! ```
//! # #[macro_use(http_stub)] extern crate plaid;
//! # #[macro_use] extern crate yup_hyper_mock as hyper_mock;
//! # extern crate hyper;
//! #
//! # fn main() {
//! #
//! # http_stub!(StubPolicy, 200, include_str!("fixtures/post_connect_success.json"));
//! #
//! # let hyper = hyper::Client::with_connector(StubPolicy::default());
//! #
//! use plaid::api::client::{ Client, Response, Payload };
//! use plaid::api::product;
//! use plaid::api::types::*;
//! use plaid::api::user::{ User };
//!
//! let client = Client { endpoint: "https://tartan.plaid.com",
//! client_id: "testclient",
//! secret: "testsecret",
//! hyper: &hyper };
//!
//! let user = User { access_token: "testaccesstoken".to_string() };
//!
//! let response = client.request(
//! product::Connect,
//! Payload::FetchData(client, user, None))
//! .unwrap();
//! #
//! # match response {
//! # Response::ProductData(ref data) => {
//! # assert_eq!(data.accounts[0].current_balance, 742.93 as Amount);
//! # assert_eq!(data.accounts[1].current_balance, 100030.32 as Amount);
//! # assert_eq!(data.transactions[0].amount, -700 as Amount);
//! # assert_eq!(data.transactions[1].id, "testtransactionid2".to_string());
//! # },
//! # _ => panic!("Expected product data")
//! # };
//! # }
//! ```
use api::product::{ Product };
use api::account::Account;
use api::transaction::Transaction;
use api::client::Payload;
/// `Connect` is the endpoint you need to fetch transaction for a `User`
#[derive(Debug)]
pub struct Connect;
/// Representation of data that is retrieved from the `Connect` product.
#[derive(Debug, RustcDecodable)]
pub struct ConnectData {
/// List of accounts associated with the user
pub accounts: Vec<Account>,
/// List of transactions associated with the user
pub transactions: Vec<Transaction>
}
impl Product for Connect {
type Data = ConnectData;
fn description<'a>(&self) -> &'a str { "Connect" }
fn endpoint<'a, 'b>(&self, payload: &'b Payload) -> &'a str {
match *payload {
Payload::StepMFA(..) => "/connect/step",
Payload::FetchData(..) => "/connect/get",
Payload::Upgrade(..) => "/upgrade?upgrade_to=connect",
_ => "/connect"
}
}
}
| true
|
686d8395db581872f56f9a51dac81d15704f5d25
|
Rust
|
pythonesque/rusty-lisp
|
/src/main.rs
|
UTF-8
| 22,646
| 2.6875
| 3
|
[] |
no_license
|
#![feature(box_patterns)]
//extern crate rusty_lisp;
extern crate parser;
use parser::{Checkable, Inferable, Name};
use std::collections::{HashMap, VecDeque};
use std::rc::Rc;
#[derive(Clone)]
pub enum Value {
Lam(Rc<Fn(Value) -> Value>),
Star,
Pi(Box<Info>, Rc<Fn(Value) -> Value>),
Neutral(Neutral),
Nat,
Zero,
Succ(Box<Value>),
Vec(Box<Value>, Box<Value>),
Nil(Box<Value>),
Cons(Box<Value>, Box<Value>, Box<Value>, Box<Value>),
}
#[derive(Clone)]
pub enum Neutral {
Free(Name),
App(Box<Neutral>, Box<Value>),
NatElim(Box<Value>, Box<Value>, Box<Value>, Box<Neutral>),
VecElim(Box<Value>, Box<Value>, Box<Value>, Box<Value>, Box<Value>, Box<Neutral>),
}
fn vfree(name: Name) -> Value {
Value::Neutral(Neutral::Free(name))
}
type Env = VecDeque<Value>;
fn eval_up(term: Inferable, d: Env) -> Value {
use parser::Inferable::*;
match term {
Ann(e, _) => eval_down(e, d),
Star => Value::Star,
Pi(t, t_) => {
Value::Pi(Box::new(eval_down(t, d.clone())), Rc::new(move |x| {
let mut d = d.clone();
d.push_front(x);
eval_down(t_.clone(), d)
}))
},
Free(x) => vfree(x),
Bound(i) => d[i].clone(),
App(box e, e_) => {
let e = eval_up(e, d.clone());
let e_ = eval_down(e_, d);
vapp(e, e_)
},
Nat => Value::Nat,
Zero => Value::Zero,
Succ(k) => Value::Succ(Box::new(eval_down(k, d))),
NatElim(m, mz, ms, k) => {
let mz = eval_down(mz, d.clone());
let ms = eval_down(ms, d.clone());
fn rec(k: Value, d: Env, m: Checkable, mz: Value, ms: Value) -> Value {
match k {
Value::Zero => mz,
Value::Succ(box l) => vapp(vapp(ms.clone(), l.clone()), rec(l, d, m, mz, ms)),
Value::Neutral(k) =>
Value::Neutral(Neutral::NatElim(Box::new(eval_down(m, d)), Box::new(mz),
Box::new(ms), Box::new(k))),
_ => panic!("internal: eval natElim"),
}
}
rec(eval_down(k, d.clone()), d, m, mz, ms)
},
Vec(a, n) => Value::Vec(Box::new(eval_down(a, d.clone())), Box::new(eval_down(n, d))),
Nil(a) => Value::Nil(Box::new(eval_down(a, d))),
Cons(a, k, x, xs) => Value::Cons(Box::new(eval_down(a, d.clone())), Box::new(eval_down(k, d.clone())),
Box::new(eval_down(x, d.clone())), Box::new(eval_down(xs, d))),
VecElim(a, m, mn, mc, k, xs) => {
let mn = eval_down(mn, d.clone());
let mc = eval_down(mc, d.clone());
fn rec(k: Value, xs: Value, d: Env, a: Checkable, m: Checkable, mn: Value, mc: Value) -> Value {
match xs {
Value::Nil(_) => mn,
Value::Cons(_, box l, box x, box xs) =>
vec![l.clone(), x, xs.clone(), rec(l, xs, d.clone(), a, m, mn, mc.clone())].into_iter().fold(mc, vapp),
Value::Neutral(n) => Value::Neutral(
Neutral::VecElim(Box::new(eval_down(a, d.clone())),
Box::new(eval_down(m, d)), Box::new(mn), Box::new(mc),
Box::new(k), Box::new(n))),
_ => panic!("internal: eval vecElim"),
}
}
rec(eval_down(k, d.clone()), eval_down(xs, d.clone()), d, a, m, mn, mc)
},
}
}
fn vapp(value: Value, v: Value) -> Value {
match value {
Value::Lam(f) => f(v),
Value::Neutral(n) => Value::Neutral(Neutral::App(Box::new(n), Box::new(v))),
_ => panic!("Should only apply Lam and Neutral values!")
}
}
fn eval_down(term: Checkable, d: Env) -> Value {
use parser::Checkable::*;
match term {
Inf(box i) => eval_up(i, d),
Lam(box e) => {
Value::Lam(Rc::new(move |x| {
let mut d = d.clone();
d.push_front(x);
eval_down(e.clone(), d)
}))
},
}
}
macro_rules! throw_error ( ($s:expr) => {{ return Err($s.into()) }} );
fn type_up_0(ctx: Context, term: Inferable) -> Result<Info> {
type_up(0, ctx, term)
}
fn type_up(i: usize, mut ctx: Context, term: Inferable) -> Result<Info> {
use parser::Inferable::*;
match term {
Ann(e, p) => {
try!(type_down(i, ctx.clone(), p.clone(), Value::Star));
let t = eval_down(p, Env::new());
try!(type_down(i, ctx, e, t.clone()));
Ok(t)
},
Star => Ok(Value::Star),
Pi(p, p_) => {
try!(type_down(i, ctx.clone(), p.clone(), Value::Star));
let t = eval_down(p, Env::new());
ctx.push_front((Name::Local(i), t));
try!(type_down(i + 1, ctx, subst_down(0, Inferable::Free(Name::Local(i)), p_), Value::Star));
Ok(Value::Star)
},
Free(ref x) => match ctx.into_iter().find(|&(ref name, _)| name == x) {
Some((_, t)) => Ok(t),
None => throw_error!("unknown identifier"),
},
App(box e, e_) => {
let o = try!(type_up(i, ctx.clone(), e));
match o {
Value::Pi(box t, t_) => {
try!(type_down(i, ctx, e_.clone(), t));
Ok(t_(eval_down(e_, Env::new())))
},
_ => throw_error!("illegal application")
}
},
Bound(_) => panic!("Should never see a bound variable here, as this should be taken care of in the type rule for lambda abstraction."),
Nat => Ok(Value::Star),
Zero => Ok(Value::Nat),
Succ(k) => {
try!(type_down(i, ctx, k, Value::Nat));
Ok(Value::Nat)
},
NatElim(m, mz, ms, k)=> {
try!(type_down(i, ctx.clone(), m.clone(), Value::Pi(Box::new(Value::Nat), Rc::new(move |_| Value::Star))));
let m = eval_down(m, Env::new());
try!(type_down(i, ctx.clone(), mz, vapp(m.clone(), Value::Zero)));
let m_ = m.clone();
try!(type_down(i, ctx.clone(), ms, Value::Pi(Box::new(Value::Nat), Rc::new(move |l| {
let m_ = m.clone();
let l_ = l.clone();
Value::Pi(Box::new(vapp(m.clone(), l.clone())), Rc::new(move |_|
vapp(m_.clone(), Value::Succ(Box::new(l_.clone())))))
}))));
try!(type_down(i, ctx.clone(), k.clone(), Value::Nat));
let k = eval_down(k, Env::new());
Ok(vapp(m_, k))
},
Vec(a, k) => {
try!(type_down(i, ctx.clone(), a, Value::Star));
try!(type_down(i, ctx, k, Value::Nat));
Ok(Value::Star)
},
Nil(a) => {
try!(type_down(i, ctx, a.clone(), Value::Star));
let a = eval_down(a, Env::new());
Ok(Value::Vec(Box::new(a), Box::new(Value::Zero)))
},
Cons(a, k, x, xs) => {
try!(type_down(i, ctx.clone(), a.clone(), Value::Star));
let a = eval_down(a, Env::new());
try!(type_down(i, ctx.clone(), k.clone(), Value::Nat));
let k = eval_down(k, Env::new());
try!(type_down(i, ctx.clone(), x, a.clone()));
try!(type_down(i, ctx, xs, Value::Vec(Box::new(a.clone()), Box::new(k.clone()))));
Ok(Value::Vec(Box::new(a), Box::new(Value::Succ(Box::new(k)))))
},
VecElim(a, m, mn, mc, k, vs) => {
try!(type_down(i, ctx.clone(), a.clone(), Value::Star));
let a = eval_down(a, Env::new());
let a_ = a.clone();
try!(type_down(i, ctx.clone(), m.clone(),
Value::Pi(Box::new(Value::Nat), Rc::new(move |k|
Value::Pi(Box::new(Value::Vec(Box::new(a_.clone()), Box::new(k))), Rc::new(move |_|
Value::Star))))));
let m = eval_down(m, Env::new());
try!(type_down(i, ctx.clone(), mn, vec![Value::Zero, Value::Nil(Box::new(a.clone()))]
.into_iter().fold(m.clone(), vapp)));
let m_ = m.clone();
let a_ = a.clone();
try!(type_down(i, ctx.clone(), mc, Value::Pi(Box::new(Value::Nat), Rc::new(move |l| {
let a_ = a_.clone();
let m_ = m_.clone();
Value::Pi(Box::new(a_.clone()), Rc::new(move |y| {
let a_ = a_.clone();
let m_ = m_.clone();
let l_ = l.clone();
Value::Pi(Box::new(Value::Vec(Box::new(a_.clone()), Box::new(l_.clone()))), Rc::new(move |ys| {
let a_ = a_.clone();
let m_ = m_.clone();
let l_ = l_.clone();
let y_ = y.clone();
Value::Pi(Box::new(vec![l_.clone(), ys.clone()].into_iter().fold(m_.clone(), vapp)), Rc::new(move |_|
vec![Value::Succ(Box::new(l_.clone())),
Value::Cons(Box::new(a_.clone()), Box::new(l_.clone()), Box::new(y_.clone()),
Box::new(ys.clone()))].into_iter().fold(m_.clone(), vapp)))
}))}))}))));
try!(type_down(i, ctx.clone(), k.clone(), Value::Nat));
let k = eval_down(k, Env::new());
try!(type_down(i, ctx, vs.clone(), Value::Vec(Box::new(a), Box::new(k.clone()))));
let vs = eval_down(vs, Env::new());
Ok(vec![k, vs].into_iter().fold(m, vapp))
},
}
}
fn type_down(i: usize, mut ctx: Context, term: Checkable, ty: Info) -> Result<()> {
use parser::Checkable::*;
match (term, ty) {
(Inf(box e), v) => {
let v_ = try!(type_up(i, ctx.clone(), e));
if quote_0(v) != quote_0(v_) { throw_error!("type mismatch"); }
Ok(())
},
(Lam(box e), Value::Pi(box t, t_)) => {
//let mut ctx = ctx.clone();
ctx.push_front((Name::Local(i), t));
type_down(i + 1, ctx, subst_down(0, Inferable::Free(Name::Local(i)), e), t_(vfree(Name::Local(i))))
},
_ => throw_error!("type mismatch")
}
}
fn subst_up(i: usize, r: Inferable, term: Inferable) -> Inferable {
use parser::Inferable::*;
match term {
Ann(e, t) => Ann(subst_down(i, r.clone(), e), subst_down(i, r, t)),
Star => Star,
Pi(t, t_) => Pi(subst_down(i, r.clone(), t), subst_down(i + 1, r, t_)),
Bound(j) => if i == j { r } else { Bound(j) },
Free(y) => Free(y),
App(box e, e_) => {
let term = subst_down(i, r.clone(), e_);
App(Box::new(subst_up(i, r, e)), term)
},
Nat => Nat,
Zero => Zero,
Succ(k) => Succ(subst_down(i, r, k)),
NatElim(m, mz, ms, k) =>
NatElim(subst_down(i, r.clone(), m), subst_down(i, r.clone(), mz),
subst_down(i, r.clone(), ms), subst_down(i, r, k)),
Vec(a, n) => Vec(subst_down(i, r.clone(), a), subst_down(i, r, n)),
Nil(a) => Nil(subst_down(i, r, a)),
Cons(a, k, x, xs) =>
Cons(subst_down(i, r.clone(), a), subst_down(i, r.clone(), k),
subst_down(i, r.clone(), x), subst_down(i, r.clone(), xs)),
VecElim(a, m, mn, mc, k, vs) =>
VecElim(subst_down(i, r.clone(), a), subst_down(i, r.clone(), m),
subst_down(i, r.clone(), mn), subst_down(i, r.clone(), mc),
subst_down(i, r.clone(), k), subst_down(i, r, vs)),
}
}
fn subst_down(i: usize, r: Inferable, term: Checkable) -> Checkable {
use parser::Checkable::*;
match term {
Inf(box e) => Inf(Box::new(subst_up(i, r, e))),
Lam(box e) => Lam(Box::new(subst_down(i + 1, r, e))),
}
}
fn quote_0(value: Value) -> Checkable {
quote(0, value)
}
fn quote(i: usize, value: Value) -> Checkable {
match value {
Value::Lam(f) => Checkable::Lam(Box::new(quote(i + 1, f(vfree(Name::Quote(i)))))),
Value::Star => Checkable::Inf(Box::new(Inferable::Star)),
Value::Pi(box v, f) => Checkable::Inf(Box::new(Inferable::Pi(quote(i, v), quote(i + 1, f(vfree(Name::Quote(i))))))),
Value::Neutral(n) => Checkable::Inf(Box::new(neutral_quote(i, n))),
Value::Nat => Checkable::Inf(Box::new(Inferable::Nat)),
Value::Zero => Checkable::Inf(Box::new(Inferable::Zero)),
Value::Succ(box v) => Checkable::Inf(Box::new(Inferable::Succ(quote(i, v)))),
Value::Vec(box a, box n) => Checkable::Inf(Box::new(Inferable::Vec(quote(i, a), quote(i, n)))),
Value::Nil(box a) => Checkable::Inf(Box::new(Inferable::Nil(quote(i, a)))),
Value::Cons(box a, box k, box x, box xs) =>
Checkable::Inf(Box::new(Inferable::Cons(quote(i, a), quote(i, k), quote(i, x),
quote(i, xs)))),
}
}
fn neutral_quote(i: usize, neutral: Neutral) -> Inferable {
match neutral {
Neutral::Free(x) => boundfree(i, x),
Neutral::App(box n, box v) => Inferable::App(Box::new(neutral_quote(i, n)), quote(i, v)),
Neutral::NatElim(box m, box mz, box ms, box k) =>
Inferable::NatElim(quote(i, m), quote(i, mz), quote(i, ms),
Checkable::Inf(Box::new(neutral_quote(i, k)))),
Neutral::VecElim(box a, box m, box mn, box mc, box k, box vs) =>
Inferable::VecElim(quote(i, a), quote(i, m), quote(i, mn), quote(i, mc), quote(i, k),
Checkable::Inf(Box::new(neutral_quote(i, vs)))),
}
}
fn boundfree(i: usize, name: Name) -> Inferable {
match name {
Name::Quote(k) => Inferable::Bound(i - k - 1),
x => Inferable::Free(x),
}
}
fn global_sub_up(r: &Bindings, term: Inferable) -> Inferable {
use parser::Inferable::*;
match term {
Ann(e, t) => Ann(global_sub_down(r, e), global_sub_down(r, t)),
Star => Star,
Pi(t, t_) => Pi(global_sub_down(r, t), global_sub_down(r, t_)),
Bound(j) => Bound(j),
Free(Name::Global(y)) => match r.get(&y) {
Some(term) => term.clone(),
None => Free(Name::Global(y))
},
Free(n) => Free(n),
App(box e, e_) => {
let term = global_sub_down(r, e_);
App(Box::new(global_sub_up(r, e)), term)
},
Nat => Nat,
Zero => Zero,
Succ(k) => Succ(global_sub_down(r, k)),
NatElim(m, mz, ms, k) =>
NatElim(global_sub_down(r, m), global_sub_down(r, mz),
global_sub_down(r, ms), global_sub_down(r, k)),
Vec(a, n) => Vec(global_sub_down(r, a), global_sub_down(r, n)),
Nil(a) => Nil(global_sub_down(r, a)),
Cons(a, k, x, xs) =>
Cons(global_sub_down(r, a), global_sub_down(r, k),
global_sub_down(r, x), global_sub_down(r, xs)),
VecElim(a, m, mn, mc, k, vs) =>
VecElim(global_sub_down(r, a), global_sub_down(r, m),
global_sub_down(r, mn), global_sub_down(r, mc),
global_sub_down(r, k), global_sub_down(r, vs)),
}
}
fn global_sub_down(r: &Bindings, term: Checkable) -> Checkable {
use parser::Checkable::*;
match term {
Inf(box e) => Inf(Box::new(global_sub_up(r, e))),
Lam(box e) => Lam(Box::new(global_sub_down(r, e))),
}
}
fn find_up(i: &Name, term: &Inferable) -> bool {
use parser::Inferable::*;
match *term {
Ann(ref e, ref t) => find_down(i, e) || find_down(i, t),
Star => false,
Pi(ref t, ref t_) => find_down(i, t) || find_down(i, t_),
Bound(_) => false,
Free(ref y) => y == i,
App(box ref e, ref e_) => find_down(i, e_) || find_up(i, e),
Nat => false,
Zero => false,
Succ(ref k) => find_down(i, k),
NatElim(ref m, ref mz, ref ms, ref k) =>
find_down(i, m) || find_down(i, mz) ||
find_down(i, ms) || find_down(i, k),
Vec(ref a, ref n) => find_down(i, a) || find_down(i, n),
Nil(ref a) => find_down(i, a),
Cons(ref a, ref k, ref x, ref xs) =>
find_down(i, a) || find_down(i, k) ||
find_down(i, x) || find_down(i, xs),
VecElim(ref a, ref m, ref mn, ref mc, ref k, ref vs) =>
find_down(i, a) || find_down(i, m) ||
find_down(i, mn) || find_down(i, mc) ||
find_down(i, k) || find_down(i, vs),
}
}
fn find_down(i: &Name, term: &Checkable) -> bool {
use parser::Checkable::*;
match *term {
Inf(box ref e) => find_up(i, e),
Lam(box ref e) => find_down(i, e),
}
}
fn bound_name(term: &Checkable, d: &mut VecDeque<usize>) -> usize {
let mut x = d.iter().next().map(|&x|x).unwrap_or(d.len()) + 1;
while find_down(&Name::Global(format!("v{}", x)), term) { x = x + 1; }
d.push_front(x);
x
}
#[derive(Clone,Copy)] enum Assoc { Left, Right, }
fn print_up(term: Inferable, mut d: VecDeque<usize>, assoc: Assoc) -> String {
use parser::Inferable::*;
match term {
Ann(e, t) => format!("{} : {}", print_down(e, d.clone(), Assoc::Left), print_down(t, d, Assoc::Right)),
Star => "*".into(),
Pi(t, t_) => {
let t = print_down(t, d.clone(), Assoc::Right);
let x = bound_name(&t_, &mut d);
match assoc {
Assoc::Left => format!("(Π (v{} : {}) → {})", x, t, print_down(t_, d, Assoc::Left)),
Assoc::Right => format!("Π (v{} : {}) → {}", x, t, print_down(t_, d, Assoc::Left)),
}
},
Free(Name::Global(x)) => x,
Free(n) => panic!("Did not expect {:?} during print_up", n),
Bound(i) => format!("v{}", d[i]),
App(box e, e_) => match assoc {
Assoc::Left => format!("{} {}", print_up(e, d.clone(), Assoc::Left), print_down(e_, d, Assoc::Left)),
Assoc::Right => format!("({} {})", print_up(e, d.clone(), Assoc::Left), print_down(e_, d, Assoc::Left)),
},
Nat => "Nat".into(),
Zero => "0".into(),
Succ(k) => {
let mut n = 1;
let mut k_ = k.clone();
while let Checkable::Inf(box Succ(k)) = k_ {
n += 1;
k_ = k;
}
match k_ {
Checkable::Inf(box Zero) => format!("{}", n),
_ => print_up(App(Box::new(Free(Name::Global("Succ".into()))), k), d, Assoc::Left),
}
},
NatElim(m, mz, ms, k) => print_up(App(
Box::new(App(Box::new(App(Box::new(App(Box::new(Free(Name::Global("natElim".into()))),
m)), mz)), ms)), k), d, Assoc::Left),
Vec(a, n) => print_up(App(Box::new(App(Box::new(Free(Name::Global("Vec".into()))), a)), n),
d, Assoc::Left),
Nil(a) => print_up(App(Box::new(Free(Name::Global("Nil".into()))), a), d, Assoc::Left),
Cons(a, k, x, xs) => print_up(App(
Box::new(App(Box::new(App(Box::new(App(Box::new(Free(Name::Global("Cons".into()))),
a)), k)), x)), xs), d, Assoc::Left),
VecElim(a, m, mn, mc, k, vs) => print_up(App(
Box::new(App(Box::new(App(
Box::new(App(Box::new(App(Box::new(App(Box::new(Free(Name::Global("vecElim".into()))),
a)), m)), mn)), mc)), k)), vs), d, Assoc::Left),
}
}
fn print_down(term: Checkable, mut d: VecDeque<usize>, assoc:Assoc) -> String {
use parser::Checkable::*;
match term {
Inf(box i) => format!("{}", print_up(i, d, Assoc::Right)),
Lam(box e) => {
let x = bound_name(&e, &mut d);
match assoc {
Assoc::Right => format!("λ v{} → {}", x, print_down(e, d, Assoc::Right)),
Assoc::Left => format!("(λ v{} → {})", x, print_down(e, d, Assoc::Right)),
}
},
}
}
pub type Info = Value;
pub type Context = VecDeque<(Name, Info)>;
pub type Bindings = HashMap<String, Inferable>;
type Result<A> = ::std::result::Result<A, String>;
pub fn parse(s: &str, ctx: &mut Context, bindings: &mut Bindings) -> ::std::result::Result<Option<Inferable>, ()> {
use parser::Stmt::*;
match parser::parse(s) {
Some(res) => res.map( |inf| match inf {
Decl(d) => {
for (v, c) in d {
let c = global_sub_down(bindings, c);
if type_down(0, ctx.clone(), c.clone(), Value::Star).is_ok() {
ctx.push_front((v, eval_down(c, VecDeque::new())));
}
}
None
},
Expr(e) => {
let e = global_sub_up(bindings, e);
Some(e)
},
Bind(v, e) => {
let e = global_sub_up(bindings, e);
Some(match type_up_0(ctx.clone(), e.clone()) {
Ok(ty) => {
bindings.insert(v.clone(), e.clone());
ctx.push_front((Name::Global(v.clone()), ty));
Inferable::Free(Name::Global(v))
},
Err(_) => e
})
},
}),
None => Ok(None)
}
}
fn main() {
use std::collections::HashMap;
use std::io::{self, BufRead, Write};
let mut ctx = Context::new();
let mut bindings = HashMap::new();
let stdin = io::stdin();
let mut stdout = io::stdout();
let _ = write!(stdout, "≫ ");
let _ = stdout.flush();
for line in stdin.lock().lines() {
match line {
Ok(line) => match parse(&line, &mut ctx, &mut bindings) {
Ok(Some(term)) => {
match type_up_0(ctx.clone(), term.clone()) {
Ok(ty) => println!("{}", print_up(Inferable::Ann(quote_0(eval_up(term, Env::new())), quote_0(ty)), VecDeque::new(), Assoc::Right)),
Err(e) => println!("Type error: {} {}", print_up(term, VecDeque::new(), Assoc::Left), e)
}
},
Ok(None) => (),
Err(()) => println!("Parse error.")
},
Err(e) => println!("I/O error: {}", e),
}
let _ = write!(stdout, "≫ ");
let _ = stdout.flush();
}
}
| true
|
c93fa4bb2c84a2c7368233a21cd4b4405c70d0b2
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/ui/maybe-bounds-where.rs
|
UTF-8
| 963
| 3.390625
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
struct S1<T>(T) where (T): ?Sized;
//~^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared
struct S2<T>(T) where u8: ?Sized;
//~^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared
struct S3<T>(T) where &'static T: ?Sized;
//~^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared
trait Trait<'a> {}
struct S4<T>(T) where for<'a> T: ?Trait<'a>;
//~^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared
struct S5<T>(*const T) where T: ?Trait<'static> + ?Sized;
//~^ ERROR type parameter has more than one relaxed default bound
//~| WARN default bound relaxed for a type parameter
impl<T> S1<T> {
fn f() where T: ?Sized {}
//~^ ERROR `?Trait` bounds are only permitted at the point where a type parameter is declared
}
fn main() {
let u = vec![1, 2, 3];
let _s: S5<[u8]> = S5(&u[..]); // OK
}
| true
|
7d3c48b838f57a39bd57d4d8b18a38f1b137f081
|
Rust
|
infinityb/rust-irc-bot
|
/src/command_mapper/format.rs
|
UTF-8
| 19,892
| 3.3125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::collections::BTreeMap;
use super::Token;
#[derive(Debug, PartialEq, Eq)]
pub enum FormatParseError {
EmptyFormat,
InvalidAtom(String),
BrokenFormat,
}
pub type FormatResult<T> = Result<T, FormatParseError>;
#[derive(Debug, PartialEq, Eq)]
pub enum ValueParseError {
Mismatch(&'static str),
MessageTooShort,
MessageTooLong,
}
pub type ValueResult<T> = Result<T, ValueParseError>;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum AtomType {
String,
WholeNumeric
}
#[derive(Debug, PartialEq, Eq, Clone)]
// TODO: remove pub
pub enum Atom {
// Literal(value)
Literal(String),
// Formatted(name, kind)
Formatted(String, AtomType),
// Rest(name)
Rest(String),
Whitespace,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Value {
Literal(String),
String(String),
WholeNumeric(String)
}
impl Value {
fn parse(kind: AtomType, input: &str) -> ValueResult<Value> {
match kind {
AtomType::String => Ok(Value::String(input.to_string())),
AtomType::WholeNumeric => {
// TODO: check if it is a numberish thing
Ok(Value::WholeNumeric(input.to_string()))
}
}
}
}
fn consume_token<'a>(from: &'a str) -> ValueResult<(&'a str, &'a str)> {
match from.find(' ') {
Some(idx) => Ok((&from[..idx], &from[idx..])),
None => Ok((from, ""))
}
}
fn consume_literal<'a>(from: &'a str, literal: &str) -> ValueResult<(&'a str, &'a str)> {
let from_s = from.to_lowercase();
if from_s.starts_with(literal) {
let length = literal.len();
Ok((&from[..length], &from[length..]))
} else {
Err(ValueParseError::Mismatch("literal mismatch"))
}
}
fn consume_whitespace<'a>(from: &'a str) -> (&'a str, &'a str) {
let mut idx = 0;
while from[idx..].starts_with(" ") {
idx += 1;
}
(&from[..idx], &from[idx..])
}
impl Atom {
fn consume<'a>(&self, input: &'a str) -> ValueResult<(Option<Value>, &'a str)> {
match *self {
Atom::Literal(ref val) => {
let (lit, rest) = try!(consume_literal(input, &val));
let value = Value::Literal(lit.to_string());
Ok((Some(value), rest))
},
Atom::Formatted(_, kind) => {
let (lit, rest) = try!(consume_token(input));
let value = try!(Value::parse(kind, lit));
Ok((Some(value), rest))
},
Atom::Rest(_) => {
let value = try!(Value::parse(AtomType::String, input));
Ok((Some(value), ""))
},
Atom::Whitespace => {
let (whitespace, rest) = consume_whitespace(input);
if whitespace.len() == 0 {
return Err(ValueParseError::Mismatch("Missing whitespace"));
}
Ok((None, rest))
}
}
}
}
#[derive(Debug)]
pub struct Format {
atoms: Vec<Atom>
}
#[derive(Debug, Clone)]
pub struct CommandPhrase {
pub token: Token,
pub command: String,
pub original_command: String,
args: BTreeMap<String, Value>
}
impl CommandPhrase {
pub fn get<T: ValueExtract>(&self, key: &str) -> Option<T> {
match self.args.get(&key.to_string()) {
Some(value) => ValueExtract::value_extract(value),
None => None
}
}
}
pub trait ValueExtract: Sized {
fn value_extract(val: &Value) -> Option<Self>;
}
impl ValueExtract for String {
fn value_extract(val: &Value) -> Option<String> {
match *val {
Value::String(ref str_val) => Some(str_val.clone()),
_ => None
}
}
}
impl ValueExtract for u64 {
fn value_extract(val: &Value) -> Option<u64> {
match *val {
Value::WholeNumeric(ref str_val) => str_val.parse().ok(),
_ => None
}
}
}
impl Format {
pub fn from_str(definition: &str) -> FormatResult<Format> {
match atom_parser::parse_atoms(definition) {
Ok(atoms) => {
match atoms[0] {
Atom::Literal(_) => Ok(Format { atoms: atoms }),
_ => return Err(FormatParseError::InvalidAtom(
"first atom must be literal".to_string()))
}
},
Err(err) => Err(err)
}
}
pub fn parse(&self, token: Token, input: &str) -> ValueResult<CommandPhrase> {
let original_input: &str = input;
let input: &str = input;
let mut args_map: BTreeMap<String, Value> = BTreeMap::new();
let command = match self.atoms[0] {
Atom::Literal(ref literal) => literal.to_string(),
_ => return Err(ValueParseError::Mismatch("first atom must be literal"))
};
let mut remaining = input;
for atom in self.atoms.iter() {
if remaining == "" {
return Err(ValueParseError::MessageTooShort)
}
let value = match atom.consume(remaining) {
Ok((Some(value), tmp)) => {
remaining = tmp;
value
},
Ok((None, tmp)) => {
remaining = tmp;
continue;
},
Err(err) => return Err(err)
};
let name = match *atom {
Atom::Literal(_) => continue,
Atom::Whitespace => continue,
Atom::Formatted(ref name, _) => name.clone(),
Atom::Rest(ref name) => name.clone(),
};
match value {
Value::Literal(_) => (),
Value::String(_) | Value::WholeNumeric(_) => {
args_map.insert(name, value);
},
};
}
if !remaining.bytes().all(|x| x == b' ') {
return Err(ValueParseError::MessageTooLong)
}
let cmd_phrase = CommandPhrase {
token: token,
command: command.trim_right_matches(' ').to_lowercase(),
original_command: original_input.to_string(),
args: args_map,
};
println!("{:?} - {:?} is consuming on {:?}", token, input, cmd_phrase);
Ok(cmd_phrase)
}
}
// use self::atom_parser::parse_atom;
pub mod atom_parser {
use super::{Atom, AtomType, FormatResult, FormatParseError};
static ASCII_ALPHANUMERIC: [u8; 62] = [
b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9',
b'A', b'B', b'C', b'D', b'E', b'F', b'G', b'H', b'I', b'J',
b'K', b'L', b'M', b'N', b'O', b'P', b'Q', b'R', b'S', b'T',
b'U', b'V', b'W', b'X', b'Y', b'Z',
b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i', b'j',
b'k', b'l', b'm', b'n', b'o', b'p', b'q', b'r', b's', b't',
b'u', b'v', b'w', b'x', b'y', b'z'
];
#[inline]
fn is_ascii_alphanumeric(target: u8) -> bool {
for &allowed in ASCII_ALPHANUMERIC.iter() {
if target == allowed {
return true;
}
}
false
}
fn parse_var_atom(atom: &str) -> FormatResult<Atom> {
let (name, format_spec) = match atom.find(':') {
Some(idx) => (&atom[..idx], Some(&atom[1 + idx ..])),
None => (atom, None)
};
let format_kind = match format_spec {
Some("") => return Err(FormatParseError::InvalidAtom(
"atom has empty format specifier".to_string())),
Some("s") => AtomType::String,
Some("d") => AtomType::WholeNumeric,
Some(spec) => return Err(FormatParseError::InvalidAtom(
format!("atom has unknown format specifier `{}'", spec))),
None => AtomType::String
};
Ok(Atom::Formatted(name.to_string(), format_kind))
}
#[derive(Clone, Copy)]
enum State {
Zero,
InLiteral,
InWhitespace,
InVariable,
InRestVariable,
ForceEnd,
Errored,
}
struct AtomParser {
byte_idx: usize,
atoms: Vec<Atom>,
state: State,
cur_atom: Vec<u8>,
error: Option<FormatParseError>
}
impl AtomParser {
fn new() -> AtomParser {
AtomParser {
byte_idx: 0,
atoms: Vec::new(),
state: State::Zero,
cur_atom: Vec::new(),
error: None,
}
}
fn finalize_literal(&mut self) {
{
// These should be fine unless we break parse_atom ...
let string = String::from_utf8_lossy(self.cur_atom.as_slice());
self.atoms.push(Atom::Rest(string.into_owned()));
}
self.cur_atom.clear();
}
fn push_byte(&mut self, byte: u8) {
use self::State::{
Zero, InLiteral, InVariable, InRestVariable, ForceEnd, Errored, InWhitespace
};
let new_state = match (self.state, byte) {
(Zero, b'{') => InVariable,
(Zero, b' ') => InWhitespace,
(Zero, cur_byte) => {
self.cur_atom.push(cur_byte);
InLiteral
}
(InVariable, b'}') => {
let atom_res = {
// These should be fine unless we break parse_atom ...
let string = String::from_utf8_lossy(self.cur_atom.as_slice());
parse_var_atom(&string)
};
match atom_res {
Ok(atom) => {
self.atoms.push(atom);
self.cur_atom.clear();
Zero
},
Err(err) => {
self.error = Some(err);
Errored
}
}
},
(InVariable, b'*') if self.cur_atom.len() == 0 => {
InRestVariable
},
(InVariable, b'*') => Errored,
(InVariable, b':') if self.cur_atom.len() > 0 => {
self.cur_atom.push(b':');
InVariable
},
(InVariable, b':') => Errored,
(InVariable, cur_byte) if is_ascii_alphanumeric(cur_byte) => {
self.cur_atom.push(cur_byte);
InVariable
},
(InVariable, _) => {
self.error = Some(FormatParseError::BrokenFormat);
Errored
},
(InRestVariable, b'}') => {
self.finalize_literal();
ForceEnd
},
(InRestVariable, cur_byte) if is_ascii_alphanumeric(cur_byte) => {
self.cur_atom.push(cur_byte);
InRestVariable
},
(InRestVariable, _) => {
self.error = Some(FormatParseError::BrokenFormat);
Errored
},
(InWhitespace, b' ') => {
InWhitespace
},
(InWhitespace, b'{') => {
assert_eq!(self.cur_atom.len(), 0);
self.atoms.push(Atom::Whitespace);
InVariable
},
(InWhitespace, cur_byte) => {
assert_eq!(self.cur_atom.len(), 0);
self.atoms.push(Atom::Whitespace);
self.cur_atom.push(cur_byte);
InLiteral
},
(InLiteral, b' ') => {
{
// These should be fine unless we break parse_atom ...
let string = String::from_utf8_lossy(self.cur_atom.as_slice());
self.atoms.push(Atom::Literal(string.into_owned()));
}
self.cur_atom.clear();
InWhitespace
},
(InLiteral, b'{') => {
{
// These should be fine unless we break parse_atom ...
let string = String::from_utf8_lossy(self.cur_atom.as_slice());
self.atoms.push(Atom::Literal(string.into_owned()));
}
self.cur_atom.clear();
InVariable
},
(InLiteral, cur_byte) => {
self.cur_atom.push(cur_byte);
InLiteral
},
(Errored, _) => Errored,
(ForceEnd, _) => {
self.error = Some(FormatParseError::BrokenFormat);
Errored
},
};
self.byte_idx += 1;
self.state = new_state;
}
fn finish(mut self) -> FormatResult<Vec<Atom>> {
match self.state {
State::InLiteral => {
let string = String::from_utf8_lossy(self.cur_atom.as_slice());
self.atoms.push(Atom::Literal(string.into_owned()));
Ok(self.atoms)
},
State::InWhitespace => {
self.atoms.pop();
Ok(self.atoms)
},
State::Zero | State::ForceEnd => Ok(self.atoms),
State::Errored => Err(self.error.unwrap()),
State::InVariable => Err(FormatParseError::BrokenFormat),
State::InRestVariable => Err(FormatParseError::BrokenFormat),
}
}
}
pub fn parse_atoms(atom: &str) -> FormatResult<Vec<Atom>> {
let mut parser = AtomParser::new();
for &byte in atom.as_bytes().iter() {
parser.push_byte(byte);
}
match parser.finish() {
Ok(vec) => {
if vec.len() == 0 {
return Err(FormatParseError::EmptyFormat)
}
Ok(vec)
},
Err(err) => Err(err),
}
}
#[cfg(test)]
mod tests {
use super::parse_atoms;
use super::super::{Atom, AtomType};
#[test]
fn test_basics() {
let atoms = parse_atoms("deer").ok().unwrap();
assert_eq!(atoms, vec!(Atom::Literal("deer".to_string())));
let atoms = parse_atoms("deer{a}").ok().unwrap();
assert_eq!(atoms, vec!(
Atom::Literal("deer".to_string()),
Atom::Formatted("a".to_string(), AtomType::String),
));
let atoms = parse_atoms("deer {a}").ok().unwrap();
assert_eq!(atoms, vec!(
Atom::Literal("deer".to_string()),
Atom::Whitespace,
Atom::Formatted("a".to_string(), AtomType::String),
));
let atoms = parse_atoms("deer {a} {*b}").ok().unwrap();
assert_eq!(atoms, vec!(
Atom::Literal("deer".to_string()),
Atom::Whitespace,
Atom::Formatted("a".to_string(), AtomType::String),
Atom::Whitespace,
Atom::Rest("b".to_string()),
));
assert!(parse_atoms("deer {a} {*b}xxx").is_err());
match parse_atoms("deer {a:s} {*b}") {
Ok(_ok) => (),
Err(err) => assert!(false, format!("{:?}", err))
};
}
}
}
#[test]
fn cons_the_basics() {
{
let fmt_str = "articles {foo} {category:s} {id:d}";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
assert_eq!(fmt.atoms.len(), 7);
assert_eq!(fmt.atoms[0], Atom::Literal("articles".to_string()));
assert_eq!(fmt.atoms[1], Atom::Whitespace);
assert_eq!(
fmt.atoms[2],
Atom::Formatted("foo".to_string(), AtomType::String));
assert_eq!(fmt.atoms[3], Atom::Whitespace);
assert_eq!(
fmt.atoms[4],
Atom::Formatted("category".to_string(), AtomType::String));
assert_eq!(fmt.atoms[5], Atom::Whitespace);
assert_eq!(
fmt.atoms[6],
Atom::Formatted("id".to_string(), AtomType::WholeNumeric));
}
match Format::from_str("") {
Ok(_) => panic!("empty string must not succeed"),
Err(FormatParseError::EmptyFormat) => (),
Err(err) => panic!("wrong error for empty: {:?}", err),
};
match Format::from_str("{category:s} articles") {
Ok(_) => panic!("first atom must be literal"),
Err(_) => ()
};
{
let fmt_str = "articles {foo} {*rest}";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
let cmdlet = match fmt.parse(Token(0), "articles bar test article argument") {
Ok(cmdlet) => cmdlet,
Err(err) => panic!("parse failure: {:?}", err)
};
assert_eq!(&cmdlet.command, "articles");
assert_eq!(
cmdlet.args["foo"],
Value::String("bar".to_string()));
assert_eq!(
cmdlet.args["rest"],
Value::String("test article argument".to_string()));
}
}
#[test]
fn simple001() {
let cmd_str = "articles my_bar my_category 1234";
let fmt_str = "articles {foo} {category:s} {id:d}";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
assert!(fmt.parse(Token(0), "articles").is_err());
let cmdlet = match fmt.parse(Token(0), cmd_str) {
Ok(cmdlet) => cmdlet,
Err(err) => panic!("parse failure: {:?}", err)
};
assert_eq!(&cmdlet.command, "articles");
assert_eq!(
cmdlet.get::<String>("foo"),
Some("my_bar".to_string()));
assert_eq!(
cmdlet.get::<String>("category"),
Some("my_category".to_string()));
assert_eq!(cmdlet.get::<u64>("id"), Some(1234));
}
#[test]
fn simple002() {
match Format::from_str("") {
Ok(_) => panic!("empty string must not succeed"),
Err(FormatParseError::EmptyFormat) => (),
Err(err) => panic!("wrong error for empty: {:?}", err),
};
}
#[test]
fn simple003() {
let cmd_str = "articles ";
let fmt_str = "articles";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
if let Err(err) = fmt.parse(Token(0), cmd_str) {
panic!("Error processing {:?} with {:?}: {:?}", cmd_str, fmt_str, err);
}
}
#[test]
fn simple004() {
let cmd_str = "articlestest";
let fmt_str = "articles {foo}";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
match fmt.parse(Token(0), cmd_str) {
Err(ValueParseError::Mismatch(_)) => (),
p @ _ => panic!("{:?} should not parse. Got {:?}", cmd_str, p),
};
}
#[test]
fn simple005() {
let cmd_str = "irc-colors more";
let fmt_str = "irc-colors more";
let fmt = match Format::from_str(fmt_str) {
Ok(fmt) => fmt,
Err(err) => panic!("parse failure: {:?}", err)
};
println!("{:?}", fmt);
let cmdlet = match fmt.parse(Token(0), cmd_str) {
Ok(cmdlet) => cmdlet,
Err(err) => panic!("parse failure: {:?}", err)
};
assert_eq!(&cmdlet.command, "irc-colors");
}
| true
|
9de418c099c72f8cf8351822d094687c1be4b976
|
Rust
|
mhintz/half-edge-mesh-rs
|
/src/edge.rs
|
UTF-8
| 3,752
| 3.1875
| 3
|
[] |
no_license
|
use std::hash;
use ptr::{Ptr, EdgePtr, VertPtr, FacePtr, EdgeRc, VertRc, FaceRc};
use iterators::*;
#[derive(Debug)]
pub struct Edge {
pub next: EdgePtr,
pub pair: EdgePtr,
pub origin: VertPtr,
pub face: FacePtr,
pub id: u32,
}
// TODO: change the name of set_*_rc to just set_*, and change the current set_* to set_*_ptr
// because set_*_rc is used way more than set_* at the moment.
impl Edge {
pub fn empty(id: u32) -> Edge {
Edge {
id: id,
next: EdgePtr::empty(),
pair: EdgePtr::empty(),
origin: VertPtr::empty(),
face: FacePtr::empty(),
}
}
pub fn with_origin(id: u32, origin: VertPtr) -> Edge {
Edge {
id: id,
next: EdgePtr::empty(),
pair: EdgePtr::empty(),
origin: origin,
face: FacePtr::empty(),
}
}
pub fn take_next(&mut self, next: EdgePtr) { self.next = next; }
pub fn set_next(&mut self, next: & EdgePtr) { self.next = next.clone(); }
pub fn set_next_rc(&mut self, next: & EdgeRc) { self.next = Ptr::new(next); }
pub fn take_pair(&mut self, pair: EdgePtr) { self.pair = pair; }
pub fn set_pair(&mut self, pair: & EdgePtr) { self.pair = pair.clone(); }
pub fn set_pair_rc(&mut self, pair: & EdgeRc) { self.pair = Ptr::new(pair); }
pub fn take_origin(&mut self, origin: VertPtr) { self.origin = origin; }
pub fn set_origin(&mut self, origin: & VertPtr) { self.origin = origin.clone(); }
pub fn set_origin_rc(&mut self, origin: & VertRc) { self.origin = Ptr::new(origin); }
pub fn set_face(&mut self, face: & FacePtr) { self.face = face.clone(); }
pub fn take_face(&mut self, face: FacePtr) { self.face = face; }
pub fn set_face_rc(&mut self, face: & FaceRc) { self.face = Ptr::new(face); }
// The tests in this function are in order of "subjective likeliness of being invalid"
pub fn is_valid(& self) -> bool { self.pair.is_valid() && self.face.is_valid() && self.origin.is_valid() && self.next.is_valid() }
pub fn get_next(& self) -> Option<EdgeRc> { self.next.upgrade() }
pub fn get_pair(& self) -> Option<EdgeRc> { self.pair.upgrade() }
pub fn get_origin(& self) -> Option<VertRc> { self.origin.upgrade() }
pub fn get_face(& self) -> Option<FaceRc> { self.face.upgrade() }
pub fn get_next_next(& self) -> Option<EdgeRc> { self.get_next().and_then(|n| n.borrow().get_next()) }
pub fn get_next_pair(& self) -> Option<EdgeRc> { self.get_next().and_then(|n| n.borrow().get_pair()) }
pub fn get_target(& self) -> Option<VertRc> { self.get_next().and_then(|n| n.borrow().get_origin()) }
pub fn get_pair_face(& self) -> Option<FaceRc> { self.get_pair().and_then(|p| p.borrow().get_face()) }
/// Yields edge.origin, then edge.next.origin
/// Gives you first the source of the half-edge, and then its target
pub fn adjacent_verts(& self) -> EdgeAdjacentVertIterator {
EdgeAdjacentVertIterator::new(self)
}
/// Gives you the edges connected to the source of the half-edge first (in *clockwise* order)
/// and then the edges connected to the target of the half-edge (also *clockwise* order)
pub fn adjacent_edges(& self) -> EdgeAdjacentEdgeIterator {
EdgeAdjacentEdgeIterator::new(self)
}
/// Yields edge.face, then edge.pair.face
/// Gives you the "left" face to the half edge, and then the "right" face
/// Note that the "right" face is not connected to this edge, but to its pair
pub fn adjacent_faces(& self) -> EdgeAdjacentFaceIterator {
EdgeAdjacentFaceIterator::new(self)
}
}
impl PartialEq<Edge> for Edge {
fn eq(& self, other: & Edge) -> bool { self.id == other.id }
}
impl Eq for Edge {}
impl hash::Hash for Edge {
fn hash<H>(& self, state: &mut H) where H: hash::Hasher {
state.write_u32(self.id);
state.finish();
}
}
| true
|
a5c97b7aa1eed27a9f5ab336638044ff022aa208
|
Rust
|
redox-os/ion
|
/members/ranges/src/index.rs
|
UTF-8
| 1,493
| 3.890625
| 4
|
[
"MIT"
] |
permissive
|
use std::fmt::Display;
/// Index into a vector-like object
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum Index {
/// Index starting from the beginning of the vector, where `Forward(0)`
/// is the first element
Forward(usize),
/// Index starting from the end of the vector, where `Backward(0)` is the
/// last element. `
Backward(usize),
}
impl Display for Index {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Index::Forward(index) => write!(f, "{}", index),
Index::Backward(actual_index) => {
let minus_index = actual_index + 1;
write!(f, "-{}", minus_index)
}
}
}
}
impl Index {
pub fn resolve(&self, vector_length: usize) -> Option<usize> {
match *self {
Index::Forward(n) => Some(n),
Index::Backward(n) if n >= vector_length => None,
Index::Backward(n) => Some(vector_length - (n + 1)),
}
}
/// Construct an index using the following convetions:
/// - A positive value `n` represents `Forward(n)`
/// - A negative value `-n` reprents `Backwards(n - 1)` such that:
/// ```ignore,rust
/// assert_eq!(Index::new(-1), Index::Backward(0))
/// ```
pub fn new(input: isize) -> Index {
if input < 0 {
Index::Backward((input.abs() as usize) - 1)
} else {
Index::Forward(input.abs() as usize)
}
}
}
| true
|
082ad2d9c0e6db4335aae1edaadc26d8095625b7
|
Rust
|
AntonHermann/volume-control
|
/src/main.rs
|
UTF-8
| 1,636
| 3.046875
| 3
|
[] |
no_license
|
use std::io::Read;
use std::process::{Command, Stdio};
use std::thread;
use std::time::Duration;
use std::error::Error;
pub type Result<T> = std::result::Result<T, String>;
fn main() {
run().unwrap();
}
fn run() -> Result<()> {
println!("Welcome to system_monitor.rs");
let name = "Master";
let mut monitor = Command::new("sh")
.args(&["-c", "stdbuf -oL alsactl monitor"])
.stdout(Stdio::piped())
.spawn()
.expect("Failed to start alsactl monitor")
.stdout
.expect("Failed to pipe alsactl monitor output");
let mut buffer = [0; 1024];
loop {
if let Ok(_) = monitor.read(&mut buffer) {
print_sound_info(name)?;
}
thread::sleep(Duration::new(0,250_000_000))
}
}
fn print_sound_info(name: &str) -> Result<()> {
let output: String = Command::new("sh")
.args(&["-c", format!("amixer get {}", name).as_str()])
.output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_owned())
.map_err(|e| e.description().to_owned())?;
eprintln!("{}", output);
let last = output.lines().last().ok_or("coulnd't get left channel")?;
const FILTER_PATTERN: &[char] = &['[', ']', '%'];
let mut els = last.split_whitespace().filter(|x| x.starts_with('['))
.map(|s| s.trim_matches(FILTER_PATTERN));
let vol = els.next().ok_or("coulnd't read volume")?.parse::<u32>()
.map_err(|_| "failed parsing volume")?;
let muted = els.next().ok_or("couldn't get muted state")
.map(|s| s == "off")?;
println!("Volume: {}, Muted: {}", vol, muted);
Ok(())
}
| true
|
63f4351ecdad951613cf591cc864ca699086121c
|
Rust
|
tlinford/raytracer-challenge-rs
|
/raytracer/src/geometry/mod.rs
|
UTF-8
| 4,382
| 3.0625
| 3
|
[] |
no_license
|
pub mod intersection;
pub mod shape;
use crate::{
bounding_box::BoundingBox, material::Material, matrix::Matrix, point::Point, ray::Ray,
vector::Vector,
};
use std::{any::Any, fmt::Debug, ptr};
use self::intersection::Intersection;
#[derive(Debug, PartialEq)]
pub struct BaseShape {
transform: Matrix,
pub transform_inverse: Matrix,
transform_inverse_transpose: Matrix,
pub material: Material,
bounding_box: BoundingBox,
shadow: bool,
}
impl Default for BaseShape {
fn default() -> Self {
let transform = Matrix::identity(4, 4);
let transform_inverse = Matrix::identity(4, 4);
let transform_inverse_transpose = Matrix::identity(4, 4);
Self {
transform,
transform_inverse,
transform_inverse_transpose,
material: Material::default(),
bounding_box: BoundingBox::default(),
shadow: true,
}
}
}
pub trait Shape: Debug + Send + Sync {
fn get_base(&self) -> &BaseShape;
fn get_base_mut(&mut self) -> &mut BaseShape;
fn local_intersect(&self, ray: &Ray) -> Vec<Intersection>;
fn local_normal_at(&self, point: Point, intersection: &Intersection) -> Vector;
fn as_any(&self) -> &dyn Any;
fn equals(&self, other: &dyn Shape) -> bool;
fn intersect(&self, ray: &Ray) -> Vec<Intersection> {
let local_ray = ray.transform(&self.get_base().transform_inverse);
self.local_intersect(&local_ray)
}
fn normal_at(&self, point: Point, intersection: &Intersection) -> Vector {
let local_point = &self.get_base().transform_inverse * point;
let local_normal = self.local_normal_at(local_point, intersection);
let world_normal = &self.get_base().transform_inverse_transpose * local_normal;
world_normal.normalize()
}
fn material(&self) -> &Material {
&self.get_base().material
}
fn material_mut(&mut self) -> &mut Material {
&mut self.get_base_mut().material
}
fn set_material(&mut self, material: Material) {
self.get_base_mut().material = material;
}
fn transform(&self) -> &Matrix {
&self.get_base().transform
}
fn set_transform(&mut self, transform: Matrix) {
self.get_base_mut().bounding_box = self
.get_bounds()
.transform(&self.get_base().transform_inverse);
let inverse = transform.inverse();
let inverse_transpose = inverse.transpose();
self.get_base_mut().transform = transform;
self.get_base_mut().transform_inverse = inverse;
self.get_base_mut().transform_inverse_transpose = inverse_transpose;
self.get_base_mut().bounding_box = self.get_bounds().transform(self.transform());
}
fn includes(&self, other: &dyn Shape) -> bool {
ptr::eq(self.get_base(), other.get_base())
}
fn get_bounds(&self) -> &BoundingBox {
&self.get_base().bounding_box
}
fn parent_space_bounds(&self) -> BoundingBox {
self.get_bounds().transform(&Matrix::identity(4, 4))
}
fn divide(&mut self, _threshold: usize) {}
fn has_shadow(&self) -> bool {
self.get_base().shadow
}
fn no_shadow(&mut self) {
self.get_base_mut().shadow = false;
}
}
impl<'a, 'b> PartialEq<dyn Shape + 'b> for dyn Shape + 'a {
fn eq(&self, other: &dyn Shape) -> bool {
self.equals(other)
}
}
#[cfg(test)]
mod tests {
use std::f64::consts::PI;
use shape::Sphere;
use crate::transform::{rotation_y, scaling, translation};
use super::{shape::Group, *};
#[test]
fn normal_on_child_object() {
let mut g1 = Group::default();
g1.set_transform(rotation_y(PI / 2.0));
let mut g2 = Box::new(Group::default());
g2.set_transform(scaling(1, 2, 3));
let mut s = Box::new(Sphere::default());
s.set_transform(translation(5, 0, 0));
g2.add_child(s);
g1.add_child(g2);
let g2: &Group = (g1.children[0])
.as_ref()
.as_any()
.downcast_ref::<Group>()
.unwrap();
let s = &g2.children[0];
let n = s.normal_at(
Point::new(1.7321, 1.1547, -5.5774),
&Intersection::new(-100.0, s.as_ref()),
);
assert_eq!(n, Vector::new(0.2857, 0.42854, -0.85716));
}
}
| true
|
2e0aecba0237ecaa9cf7f63409f9e4bbbe2bdf1f
|
Rust
|
ByteUnits/ByteUnitsExamples
|
/rust_hello_world/Hello.rs
|
UTF-8
| 83
| 2.765625
| 3
|
[] |
no_license
|
fn main(){
let hello_string = "TEST";
println!("Hello {}", hello_string);
}
| true
|
5771f8bf467ef94ed482966398ddc8381b577c7e
|
Rust
|
harpsword/tex-rs
|
/src/tex_the_program/section_0276.rs
|
UTF-8
| 1,553
| 2.5625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! @ To save a value of |eqtb[p]| that was established at level |l|, we
//! can use the following subroutine.
//
// @p procedure eq_save(@!p:pointer;@!l:quarterword); {saves |eqtb[p]|}
/// saves `eqtb[p]`
#[allow(unused_variables)]
#[cfg_attr(feature = "trace", tracing::instrument(level = "trace"))]
pub(crate) fn eq_save(globals: &mut TeXGlobals, p: pointer, l: quarterword) {
// begin check_full_save_stack;
check_full_save_stack!(globals);
// if l=level_zero then save_type(save_ptr):=restore_zero
if l == level_zero {
save_type!(globals, globals.save_ptr) = restore_zero;
}
// else begin save_stack[save_ptr]:=eqtb[p]; incr(save_ptr);
else {
globals.save_stack[globals.save_ptr] = globals.eqtb[p];
incr!(globals.save_ptr);
// save_type(save_ptr):=restore_old_value;
save_type!(globals, globals.save_ptr) = restore_old_value;
// end;
}
// save_level(save_ptr):=l; save_index(save_ptr):=p; incr(save_ptr);
save_level!(globals, globals.save_ptr) = l;
save_index!(globals, globals.save_ptr) = p;
incr!(globals.save_ptr);
// end;
}
use crate::section_0004::TeXGlobals;
use crate::section_0016::incr;
use crate::section_0113::quarterword;
use crate::section_0115::pointer;
use crate::section_0221::level_zero;
use crate::section_0268::restore_old_value;
use crate::section_0268::restore_zero;
use crate::section_0268::save_index;
use crate::section_0268::save_level;
use crate::section_0268::save_type;
use crate::section_0273::check_full_save_stack;
| true
|
8c5d83010d7661e551fa1692dcc72ce953dcb043
|
Rust
|
himazin19990927/lilacc
|
/ast/src/token.rs
|
UTF-8
| 1,250
| 3.734375
| 4
|
[] |
no_license
|
use crate::lit::*;
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
/// A literal token: `1`, `true`
Lit(Lit),
/// An identity token: `x`, `foo`
Ident(String),
/// The `=` token
Eq,
/// The `<` token
Lt,
/// The `<=` token
Le,
/// The `==` token
EqEq,
/// The `!=` token
Ne,
/// The `>=` token
Ge,
/// The `>` token
Gt,
/// The `&&` token
AndAnd,
/// The `||` token
OrOr,
/// The `!` token
Not,
/// The `+` token
Plus,
/// The `-` token
Minus,
/// The `*` token
Star,
/// The `/` token
Slash,
/// The `&` token
And,
/// The `|` token
Or,
/// The `(` token
OpenParen,
/// The `)` token
CloseParen,
/// The `[` token
OpenBracket,
/// The `]` token
CloseBracket,
/// The `{` token
OpenBrace,
/// The `}` token
CloseBrace,
/// The '.' token
Dot,
/// The `,` token
Comma,
/// The `;` token
Semi,
/// The `:` token
Colon,
/// The `->` token
RArrow,
/// The `<-` token
LArrow,
/// The `let` token
Let,
/// The `fn` token
Fn,
/// The `return` token
Return,
}
| true
|
7be03cee7e4dd5ef77070382fac7aab9ca231e5b
|
Rust
|
renestein/winmd-rs
|
/src/error.rs
|
UTF-8
| 838
| 2.875
| 3
|
[
"MIT",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
use std::io;
#[derive(Debug)]
pub enum Error {
Io(io::Error),
ParseError(ParseError),
}
impl std::convert::From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::Io(error)
}
}
impl std::convert::From<ParseError> for Error {
fn from(error: ParseError) -> Self {
Error::ParseError(error)
}
}
pub type ParseResult<T> = Result<T, ParseError>;
#[derive(Debug)]
pub enum ParseError {
Io(io::Error),
InvalidData(&'static str),
}
pub(crate) fn unexpected_eof() -> ParseError {
ParseError::Io(io::Error::from(io::ErrorKind::UnexpectedEof))
}
pub(crate) fn unsupported_blob() -> ParseError {
ParseError::InvalidData("Unsupported blob")
}
impl std::convert::From<io::Error> for ParseError {
fn from(error: io::Error) -> Self {
ParseError::Io(error)
}
}
| true
|
276034a2ae7f4803402af9ca18d0e6ac96930c7c
|
Rust
|
open-marketplace-applications/marketplace-api
|
/src/user/handler.rs
|
UTF-8
| 2,053
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
use crate::database::Pool;
use crate::errors::ServiceError;
use crate::user::model::{LoggedUser, SlimUser, UserData};
use crate::user::service as user;
use actix_identity::{Identity, RequestIdentity};
use actix_web::dev::Payload;
use actix_web::{web, Error, FromRequest, HttpRequest, HttpResponse};
impl FromRequest for LoggedUser {
type Error = Error;
type Future = futures::future::Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
let identity = req.get_identity();
let slim_user = if let Some(identity) = identity {
match serde_json::from_str::<SlimUser>(&identity) {
Err(e) => return futures::future::err(e.into()),
Ok(y) => Ok(Some(y)),
}
} else {
Ok(None)
};
futures::future::ready(slim_user.map(LoggedUser))
}
}
pub async fn register(
user_data: web::Json<UserData>,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
user::register(user_data.into_inner(), pool).map(|res| HttpResponse::Ok().json(&res))
}
#[derive(Debug, Deserialize)]
pub(super) struct LoginQuery {
pub email: String,
pub password: String,
}
pub(super) async fn login(
auth_data: web::Json<LoginQuery>,
id: Identity,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
user::login(&auth_data.email, &auth_data.password, pool).and_then(|res| {
let user_string =
serde_json::to_string(&res).map_err(|_| ServiceError::InternalServerError)?;
debug!("user_string={}", user_string);
id.remember(user_string);
Ok(HttpResponse::Ok().json(res))
})
}
pub fn me(logged_user: LoggedUser) -> HttpResponse {
match logged_user.0 {
None => HttpResponse::Unauthorized().json(ServiceError::Unauthorized),
Some(user) => HttpResponse::Ok().json(user),
}
}
pub fn logout(id: Identity) -> HttpResponse {
id.forget();
HttpResponse::Ok().finish()
}
| true
|
5553eb8db3c48599f89d94c5deb3d75b30e056ec
|
Rust
|
rsb007/rust_user_service
|
/user_service_impl/src/env_setup/table.rs
|
UTF-8
| 661
| 2.59375
| 3
|
[] |
no_license
|
use cdrs::query::QueryExecutor;
use crate::constants::queries::{EVENT_TABLE, STATE_TABLE};
use crate::env_setup::connection::CurrentSession;
/// create_table takes Current Session and table_name
/// * and create tables in database and return string
pub fn create_table(session: &CurrentSession) -> &'static str {
session.query(EVENT_TABLE).expect("Event Table creation error");
session.query(STATE_TABLE).expect("State Table creation error");
"Tables created successfully"
}
#[test]
fn test_create_table() {
use crate::env_setup::connection::connect;
assert_eq!("Tables created successfully",
create_table(&connect()));
}
| true
|
737a0a48ae9c9faa1d67fd7d9fa12f9132ea408e
|
Rust
|
mikejquinn/rust-tetris
|
/src/main.rs
|
UTF-8
| 16,474
| 3.09375
| 3
|
[] |
no_license
|
extern crate libc;
extern crate rand;
mod util;
mod display;
mod terminal;
use display::Display;
use std::thread;
use std::sync::mpsc;
use std::time::Duration;
use util::*;
const BOARD_WIDTH: u32 = 10;
const BOARD_HEIGHT: u32 = 20;
const HIDDEN_ROWS: u32 = 2;
enum Key {
Up,
Down,
Left,
Right,
Space,
CtrlC,
Char(char),
}
enum GameUpdate {
KeyPress(Key),
Tick,
}
#[derive(Debug, Copy, Clone)]
struct Point {
x: i32,
y: i32,
}
struct Board {
cells: [[Option<Color>; BOARD_WIDTH as usize]; BOARD_HEIGHT as usize],
}
impl Board {
pub fn render(&self, display: &mut Display) {
for y in HIDDEN_ROWS..BOARD_HEIGHT {
display.set_text("|", 0, y, Color::Red, Color::Black);
display.set_text("|", BOARD_WIDTH * 2 + 1, y, Color::Red, Color::Black);
}
for x in 0..(BOARD_WIDTH * 2 + 1) {
display.set_text("-", x, BOARD_HEIGHT, Color::Red, Color::Black);
}
for row in 0..BOARD_HEIGHT {
for col in 0..BOARD_WIDTH {
match self.cells[row as usize][col as usize] {
Some(color) => {
let c = 1 + (col * 2);
display.set_text(" ", c, row, color, color);
display.set_text(" ", c + 1, row, color, color);
},
None => ()
}
}
}
}
pub fn lock_piece(&mut self, piece: &Piece, origin: Point) {
piece.each_point(&mut |row, col| {
let x = origin.x + (col as i32);
let y = origin.y + (row as i32);
self.cells[y as usize][x as usize] = Some(piece.color);
});
}
pub fn collision_test(&self, piece: &Piece, origin: Point) -> bool {
let mut found = false;
piece.each_point(&mut |row, col| {
if !found {
let x = origin.x + col;
let y = origin.y + row;
if x < 0 || x >= (BOARD_WIDTH as i32) || y < 0 || y >= (BOARD_HEIGHT as i32) ||
self.cells[y as usize][x as usize] != None {
found = true;
}
}
});
found
}
/// Clears the board of any complete lines, shifting down rows to take their place.
/// Returns the total number of lines that were cleared.
fn clear_lines(&mut self) -> u32 {
let mut cleared_lines: usize = 0;
for row in (0..self.cells.len()).rev() {
if (row as i32) - (cleared_lines as i32) < 0 {
break;
}
if cleared_lines > 0 {
self.cells[row] = self.cells[row - cleared_lines];
self.cells[row - cleared_lines] = [None; BOARD_WIDTH as usize];
}
while !self.cells[row].iter().any(|x| *x == None) {
cleared_lines += 1;
self.cells[row] = self.cells[row - cleared_lines];
self.cells[row - cleared_lines] = [None; BOARD_WIDTH as usize];
}
}
cleared_lines as u32
}
}
struct Piece {
color: Color,
shape: Vec<Vec<u8>>,
}
impl Clone for Piece {
fn clone(&self) -> Piece {
let mut p = Piece{
color: self.color,
shape: Vec::with_capacity(self.shape.len())
};
for row in &self.shape {
p.shape.push(row.clone());
}
p
}
}
impl Piece {
pub fn new_o() -> Piece {
Piece{
color: Color::Cyan,
shape: vec![vec![1, 1],
vec![1, 1]]
}
}
pub fn new_l() -> Piece {
Piece{
color: Color::Orange,
shape: vec![vec![0, 0, 1],
vec![1, 1, 1],
vec![0, 0, 0]]
}
}
pub fn new_j() -> Piece {
Piece{
color: Color::Blue,
shape: vec![vec![1, 0, 0],
vec![1, 1, 1],
vec![0, 0, 0]]
}
}
pub fn new_t() -> Piece {
Piece{
color: Color::Purple,
shape: vec![vec![0, 1, 0],
vec![1, 1, 1],
vec![0, 0, 0]]
}
}
pub fn new_s() -> Piece {
Piece{
color: Color::Green,
shape: vec![vec![0, 1, 1],
vec![1, 1, 0],
vec![0, 0, 0]]
}
}
pub fn new_z() -> Piece {
Piece{
color: Color::Red,
shape: vec![vec![1, 1, 0],
vec![0, 1, 1],
vec![0, 0, 0]]
}
}
pub fn new_i() -> Piece {
Piece{
color: Color::Cyan,
shape: vec![vec![0, 0, 0, 0],
vec![1, 1, 1, 1],
vec![0, 0, 0, 0],
vec![0, 0, 0, 0]]
}
}
fn rotate(&mut self, direction: Direction) {
let size = self.shape.len();
for row in 0..size/2 {
for col in row..(size - row - 1) {
let t = self.shape[row][col];
match direction {
Direction::Left => {
self.shape[row][col] = self.shape[col][size - row - 1];
self.shape[col][size - row - 1] = self.shape[size - row - 1][size - col - 1];
self.shape[size - row - 1][size - col - 1] = self.shape[size - col - 1][row];
self.shape[size - col - 1][row] = t;
},
Direction::Right => {
self.shape[row][col] = self.shape[size - col - 1][row];
self.shape[size - col - 1][row] = self.shape[size - row - 1][size - col - 1];
self.shape[size - row - 1][size - col - 1] = self.shape[col][size - row - 1];
self.shape[col][size - row - 1] = t;
}
}
}
}
}
fn each_point(&self, callback: &mut FnMut(i32, i32)) {
let piece_width = self.shape.len() as i32;
for row in 0..piece_width {
for col in 0..piece_width {
if self.shape[row as usize][col as usize] != 0 {
callback(row, col);
}
}
}
}
}
/// Implements a queue of randomized tetrominoes.
///
/// Instead of a purely random stream of tetromino types, this queue generates a random ordering of all
/// possible types and ensures all of those pieces are used before re-generating a new random set. This helps
/// avoid pathological cases where purely random generation provides the same piece type repeately in a row,
/// or fails to provide a required piece for a very long time.
struct PieceBag {
pieces: Vec<Piece>
}
impl PieceBag {
fn new() -> PieceBag {
let mut p = PieceBag{
pieces: Vec::new()
};
p.fill_bag();
p
}
/// Removes and returns the next piece in the queue.
fn pop(&mut self) -> Piece {
let piece = self.pieces.remove(0);
if self.pieces.is_empty() {
self.fill_bag();
}
piece
}
/// Returns a copy of the next piece in the queue.
fn peek(&self) -> Piece {
match self.pieces.first() {
Some(p) => p.clone(),
None => panic!("No next piece in piece bag")
}
}
/// Generates a random ordering of all possible pieces and adds them to the piece queue.
fn fill_bag(&mut self) {
use rand::Rng;
let mut pieces: Vec<Piece> = vec![
Piece::new_o(),
Piece::new_l(),
Piece::new_j(),
Piece::new_t(),
Piece::new_s(),
Piece::new_z(),
Piece::new_i()
];
let mut rng = rand::thread_rng();
while !pieces.is_empty() {
let i = rng.gen::<usize>() % pieces.len();
self.pieces.push(pieces.swap_remove(i));
}
}
}
struct Game {
board: Board,
piece_bag: PieceBag,
piece: Piece,
piece_position: Point,
}
impl Game {
fn new() -> Game {
let mut piece_bag = PieceBag::new();
let piece = piece_bag.pop();
let mut game = Game {
board: Board{
cells: [[None; BOARD_WIDTH as usize]; BOARD_HEIGHT as usize]
},
piece_bag: piece_bag,
piece: piece,
piece_position: Point{ x: 0, y: 0 }
};
game.place_new_piece();
game
}
/// Returns the new position of the current piece if it were to be dropped.
fn find_dropped_position(&self) -> Point {
let mut origin = self.piece_position;
while !self.board.collision_test(&self.piece, origin) {
origin.y += 1;
}
origin.y -= 1;
origin
}
/// Draws the game to the display.
fn render(&self, display: &mut Display) {
// Render the board
self.board.render(display);
// Render the level
let left_margin = BOARD_WIDTH * 2 + 5;
display.set_text("Level: 1", left_margin, 3, Color::Red, Color::Black);
// Render the currently falling piece
let x = 1 + (2 * self.piece_position.x);
self.render_piece(display, &self.piece, Point{ x: x, y: self.piece_position.y });
// Render a ghost piece
let ghost_position = self.find_dropped_position();
self.render_piece(display, &self.piece, Point{ x: x, y: ghost_position.y });
// Render the next piece
display.set_text("Next piece:", left_margin, 7, Color::Red, Color::Black);
let next_piece = self.piece_bag.peek();
self.render_piece(display, &next_piece, Point{ x: (left_margin as i32) + 2, y: 9 });
}
fn render_piece(&self, display: &mut Display, piece: &Piece, origin: Point) {
let color = piece.color;
piece.each_point(&mut |row, col| {
let x = (origin.x + 2 * col) as u32;
let y = (origin.y + row) as u32;
display.set_text(" ", x, y, color, color);
display.set_text(" ", x + 1, y, color, color);
});
}
/// Moves the current piece in the specified direction. Returns true if the piece could be moved and
/// didn't collide.
fn move_piece(&mut self, x: i32, y: i32) -> bool {
let new_position = Point{
x: self.piece_position.x + x,
y: self.piece_position.y + y,
};
if self.board.collision_test(&self.piece, new_position) {
false
} else {
self.piece_position = new_position;
true
}
}
/// Rotates the current piece in the specified direction. Returns true if the piece could be rotated
/// without any collisions.
fn rotate_piece(&mut self, direction: Direction) -> bool {
let mut new_piece = self.piece.clone();
new_piece.rotate(direction);
if self.board.collision_test(&new_piece, self.piece_position) {
false
} else {
self.piece = new_piece;
true
}
}
/// Positions the current piece at the top of the board. Returns true if the piece can be placed without
/// any collisions.
fn place_new_piece(&mut self) -> bool {
let origin = Point{
x: ((BOARD_WIDTH - (self.piece.shape.len() as u32)) / 2) as i32,
y: 0,
};
if self.board.collision_test(&self.piece, origin) {
false
} else {
self.piece_position = origin;
true
}
}
/// Advances the game by moving the current piece down one step. If the piece cannot move down, the piece
/// is locked and the game is set up to drop the next piece. Returns true if the game could be advanced,
/// false if the player has lost.
fn advance_game(&mut self) -> bool {
if !self.move_piece(0, 1) {
self.board.lock_piece(&self.piece, self.piece_position);
self.board.clear_lines();
self.piece = self.piece_bag.pop();
if !self.place_new_piece() {
return false;
}
}
true
}
/// Drops the current piece to the lowest spot on the board where it fits without collisions and
/// advances the game.
fn drop_piece(&mut self) -> bool {
while self.move_piece(0, 1) {}
self.advance_game()
}
fn keypress(&mut self, key: Key) {
match key {
Key::Left => self.move_piece(-1, 0),
Key::Right => self.move_piece(1, 0),
Key::Down => self.advance_game(),
Key::Up => self.rotate_piece(Direction::Left),
Key::Space => self.drop_piece(),
Key::Char('q') => self.rotate_piece(Direction::Left),
Key::Char('e') => self.rotate_piece(Direction::Right),
_ => false,
};
}
fn play(&mut self, display: &mut Display) {
let (tx_event, rx_event) = mpsc::channel();
// Spawn a thread which sends periodic game ticks to advance the piece
{
let tx_event = tx_event.clone();
thread::spawn(move || {
loop {
thread::sleep(Duration::from_millis(500));
tx_event.send(GameUpdate::Tick).unwrap();
};
});
}
// Spawn a thread which listens for keyboard input
{
let tx_event = tx_event.clone();
thread::spawn(move || {
let stdin = &mut std::io::stdin();
loop {
match get_input(stdin) {
Some(k) => tx_event.send(GameUpdate::KeyPress(k)).unwrap(),
None => ()
}
}
});
}
// Main game loop. The loop listens and responds to timer and keyboard updates received on a channel
// as sent by the threads spawned above.
loop {
display.clear_buffer();
self.render(display);
display.render();
match rx_event.recv() {
Ok(update) => {
match update {
GameUpdate::KeyPress(key) => {
match key {
Key::Char('z') | Key::CtrlC => break,
k => { self.keypress(k); }
};
},
GameUpdate::Tick => { self.advance_game(); }
};
},
Err(err) => panic!(err)
}
}
}
}
fn get_input(stdin: &mut std::io::Stdin) -> Option<Key> {
use std::io::Read;
let c = &mut [0u8];
match stdin.read(c) {
Ok(_) => {
match std::str::from_utf8(c) {
Ok("w") => Some(Key::Up),
Ok("a") => Some(Key::Left),
Ok("s") => Some(Key::Down),
Ok("d") => Some(Key::Right),
Ok(" ") => Some(Key::Space),
Ok("\x03") => Some(Key::CtrlC),
// Escape sequence started - must read two more bytes.
Ok("\x1b") => {
let code = &mut [0u8; 2];
match stdin.read(code) {
Ok(_) => {
match std::str::from_utf8(code) {
Ok("[A") => Some(Key::Up),
Ok("[B") => Some(Key::Down),
Ok("[C") => Some(Key::Right),
Ok("[D") => Some(Key::Left),
_ => None
}
},
Err(msg) => panic!(format!("could not read from standard in: {}", msg))
}
},
Ok(n) => Some(Key::Char(n.chars().next().unwrap())),
_ => None
}
},
Err(msg) => panic!(format!("could not read from standard in: {}", msg))
}
}
fn main() {
let display = &mut Display::new(BOARD_WIDTH * 2 + 100, BOARD_HEIGHT + 2);
let game = &mut Game::new();
let _restorer = terminal::set_terminal_raw_mode();
game.play(display);
}
| true
|
faa618fb6923228947b6c6c968ef4eddf858e08a
|
Rust
|
Qqwy/Jux
|
/jux_rust/src/main.rs
|
UTF-8
| 209
| 2.921875
| 3
|
[] |
no_license
|
fn main() {
println!("Hello, world!");
}
struct Token;
fn read_token() -> Token {
}
fn execute_function_stack(in: State) -> State {
}
fn execute_top_of_function_stack(in: State) -> State {
}
| true
|
f8579330f787f95ba170e8b75bca4d4a84740b27
|
Rust
|
shybyte/rusty-synth
|
/src/main.rs
|
UTF-8
| 4,462
| 2.546875
| 3
|
[] |
no_license
|
#[macro_use]
extern crate chan;
extern crate sdl2;
extern crate portmidi as pm;
extern crate chan_signal;
use std::f32;
use chan_signal::Signal;
use std::thread;
use std::sync::mpsc;
use pm::{PortMidi};
use sdl2::audio::{AudioCallback, AudioSpecDesired};
use std::time::Duration;
#[derive(Debug)]
enum Command {
NoteOn(f32),
NoteOff()
}
struct SquareWave {
commands: mpsc::Receiver<Command>,
freq: f32,
phase: f32,
volume: f32,
spec_freq: f32,
is_on: bool
}
impl AudioCallback for SquareWave {
type Channel = f32;
fn callback(&mut self, out: &mut [f32]) {
// Generate a square wave
for command in self.commands.try_iter() {
println!("command = {:?}, {:?}", command, out.len());
match command {
Command::NoteOn(freq) => {
self.freq = freq;
self.is_on = true;
}
Command::NoteOff() => {
self.is_on = false;
}
}
}
let phase_inc: f32 = self.freq / self.spec_freq;
for x in out.iter_mut() {
if !self.is_on {
*x = 0.0;
continue;
}
*x = match self.phase {
0.0 ... 0.5 => self.volume,
_ => -self.volume
};
self.phase = (self.phase + phase_inc) % 1.0;
}
}
}
fn print_devices(pm: &PortMidi) {
for dev in pm.devices().unwrap() {
println!("{}", dev);
}
}
const BUF_LEN: usize = 1024;
fn main() {
println!("Starting Rusty Synth...");
let context = pm::PortMidi::new().unwrap();
print_devices(&context);
let in_devices: Vec<pm::DeviceInfo> = context.devices()
.unwrap()
.into_iter()
.filter(|dev| dev.is_input())
.collect();
let in_ports: Vec<pm::InputPort> = in_devices.into_iter()
.filter_map(|dev| {
context.input_port(dev, BUF_LEN)
.ok()
})
.collect();
let os_signal = chan_signal::notify(&[Signal::INT, Signal::TERM]);
let (tx, rx) = chan::sync(0);
thread::spawn(move || {
let timeout = Duration::from_millis(10);
loop {
for port in &in_ports {
if let Ok(Some(events)) = port.read_n(BUF_LEN) {
tx.send((port.device(), events));
}
}
thread::sleep(timeout);
}
});
let sdl_context = sdl2::init().unwrap();
let audio_subsystem = sdl_context.audio().unwrap();
let desired_spec = AudioSpecDesired {
freq: Some(44100),
channels: Some(1),
// mono
samples: None // default sample size
};
let (tx_audio, rx_audio) = mpsc::channel();
let device = audio_subsystem.open_playback(None, &desired_spec, |spec| {
// Show obtained AudioSpec
println!("{:?}", spec);
SquareWave {
commands: rx_audio,
freq: 220.0,
spec_freq: spec.freq as f32,
phase: 0.0,
volume: 0.25,
is_on: false
}
}).unwrap();
// Start playback
device.resume();
loop {
chan_select! {
rx.recv() -> midi_events => {
let (_device, events) = midi_events.unwrap();
for event in events {
match event.message.status {
248 => continue,
192 => {
println!("program change {:?}", event.message);
},
0x90 => {
let midi_note = event.message.data1;
let f = (2.0 as f32).powf((midi_note as f32 - 57.0) / 12.0) * 220.0;
tx_audio.send(Command::NoteOn(f)).unwrap();
},
0x80 => {
tx_audio.send(Command::NoteOff()).unwrap();
}
_ => {
println!("event = {:?}", event);
}
}
}
},
os_signal.recv() -> os_sig => {
println!("received os signal: {:?}", os_sig);
if os_sig == Some(Signal::INT) {
break;
}
}
}
}
}
| true
|
d21c1ddc83a92c2fc7c678535a3465653d8133c3
|
Rust
|
fanciful-marmot/rt-weekend
|
/src/geometry/bvh.rs
|
UTF-8
| 3,239
| 3.0625
| 3
|
[] |
no_license
|
use rand::Rng;
use std::cmp::Ordering;
use crate::geometry::{Hit, Hittable, AABB};
use crate::math::Ray;
pub struct BVHNode {
aabb: Option<AABB>,
children: (Box<dyn Hittable>, Option<Box<dyn Hittable>>),
}
impl BVHNode {
pub fn new(objects: Vec<Box<dyn Hittable>>) -> BVHNode {
let mut objects = objects;
let axis = rand::thread_rng().gen_range(0, 3);
let comparator = |a: &Box<dyn Hittable>, b: &Box<dyn Hittable>| {
let (min_a, min_b) = (
a.bounding_box().unwrap().min[axis],
b.bounding_box().unwrap().min[axis],
);
if min_a < min_b {
Ordering::Less
} else if min_a > min_b {
Ordering::Greater
} else {
Ordering::Equal
}
};
let (left, right): (Box<dyn Hittable>, Option<Box<dyn Hittable>>) = match objects.len() {
1 => (objects.remove(0), None),
2 => {
if comparator(&objects[0], &objects[1]) == Ordering::Less {
(objects.remove(0), Some(objects.remove(0)))
} else {
(objects.remove(1), Some(objects.swap_remove(0)))
}
}
_ => {
objects.sort_unstable_by(comparator);
let mid = objects.len() / 2;
let mut left = objects;
let right = left.split_off(mid);
(
Box::new(BVHNode::new(left)),
Some(Box::new(BVHNode::new(right))),
)
}
};
let aabb1 = left.bounding_box();
let aabb2 = match &right {
Some(hittable) => hittable.bounding_box(),
None => None,
};
let aabb = match (aabb1, aabb2) {
(Some(box1), Some(box2)) => Some(AABB::merge(box1, box2)),
(Some(box1), None) => Some(box1.clone()),
(None, Some(box2)) => Some(box2.clone()),
(None, None) => None,
};
BVHNode {
aabb,
children: (left, right),
}
}
}
impl Hittable for BVHNode {
fn intersects_ray(&self, ray: &Ray, t_range: (f32, f32)) -> Option<Hit> {
let hit = match &self.aabb {
Some(aabb) => aabb.hit(&ray, t_range),
None => true,
};
if hit {
let left_hit = self.children.0.intersects_ray(&ray, t_range);
let t_max = match &left_hit {
Some(hit) => hit.t,
None => t_range.1,
};
let right_hit = match &self.children.1 {
Some(hittable) => hittable.intersects_ray(&ray, (t_range.0, t_max)),
None => None,
};
if right_hit.is_some() {
right_hit
} else {
left_hit
}
} else {
None
}
}
fn bounding_box(&self) -> Option<&AABB> {
match &self.aabb {
Some(aabb) => Some(&aabb),
None => None,
}
}
}
| true
|
a1d0819f172ecba5dd2164284f70488ef6f21c37
|
Rust
|
FenrirWolf/megaton-hammer
|
/megaton-hammer/src/kernel/session.rs
|
UTF-8
| 1,068
| 2.578125
| 3
|
[] |
no_license
|
use ipc::{Request, Response};
use kernel::svc::send_sync_request;
use kernel::{FromKObject, KObject};
use error::*;
use tls::TlsStruct;
#[derive(Debug)]
pub struct Session(KObject);
impl Session {
pub unsafe fn from_raw(handle: KObject) -> Session {
Session(handle)
}
// TODO: This is basically CMIF, instead of being a true low-level session.
pub fn send<T: Clone, Y: Clone>(&self, req: Request<T>) -> Result<Response<Y>> {
let mut tls = TlsStruct::borrow_mut();
let ipc_buf = &mut tls.ipc_buf;
req.pack(ipc_buf);
let err = unsafe { send_sync_request((self.0).0) };
if err != 0 {
return Err(Error(err));
}
Response::unpack(&mut ipc_buf[..])
}
}
impl AsRef<KObject> for Session {
fn as_ref(&self) -> &KObject {
&self.0
}
}
// TODO: Impl from instead
impl Into<KObject> for Session {
fn into(self) -> KObject {
self.0
}
}
impl FromKObject for Session {
unsafe fn from_kobject(obj: KObject) -> Session {
Session(obj)
}
}
| true
|
b872f5d07618d4f9f8073635a03ccae82dd9d737
|
Rust
|
isgasho/simple-log
|
/examples/toml_log.rs
|
UTF-8
| 888
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
//! `cargo run --example toml_log`
//!
//! With Output
//! ```bash
//! 2020-12-12 17:16:02:340877000 [INFO] <toml_log:37>:info toml simple_log
//! 2020-12-12 17:16:02:341504000 [WARN] <toml_log:38>:warn toml simple_log
//! 2020-12-12 17:16:02:341569000 [ERROR] <toml_log:39>:error toml simple_log
//! ```
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
use simple_log::LogConfig;
#[derive(Deserialize)]
struct LogConfigWrap {
log_config: LogConfig,
}
fn main() {
let config = r#"
[log_config]
path = "./log/tmp.log"
level = "debug"
size = 10
out_kind = ["console","file"]
roll_count = 10
"#;
let wrap: LogConfigWrap = toml::from_str(config).unwrap();
simple_log::new(wrap.log_config).unwrap(); //init log
info!("info toml simple_log");
warn!("warn toml simple_log");
error!("error toml simple_log");
}
| true
|
3e9998a55b1e41873b052ba69f57ff28a5676cca
|
Rust
|
alvarogonzalezsotillo/pruebas-rust
|
/src/main.rs
|
UTF-8
| 4,169
| 2.578125
| 3
|
[] |
no_license
|
// https://www.quantamagazine.org/the-map-of-mathematics-20200213/
use std::env;
pub mod crossteaser;
pub mod ravioli;
pub mod search;
use crate::crossteaser::crossteaser_search::*;
use crate::search::astar::*;
use crate::search::*;
fn estados_posibles() {
let posiciones_de_una_pieza = 6.0 * 4.0;
let mut piezas_en_8_sitios = 1.0;
for _x in 1..8 {
piezas_en_8_sitios = piezas_en_8_sitios * posiciones_de_una_pieza
}
println!("Estados posibles: {}", piezas_en_8_sitios)
}
fn aproxima<'a>(goal: Board<'a>, board: Board<'a>, changes: u8, max_level: u64 ) -> Option<(Vec<Direction>,Board<'a>)> {
let search_some_changes = BoardSearchSomeChanges {
goal: goal,
max_depth: Some(max_level),
changes: changes,
};
let (found, _, _) = a_star_search(board, &search_some_changes);
match found{
None => {
None
}
Some(found) => {
let to_root = root_path_state(&found);
println!("APROXIMACION ENCONTRADA:\n");
to_root.iter().for_each( |b| println!("{}\n\n",b.ascii_art_string()) );
let moves = Board::infer_moves_to_empty_position(to_root);
let ret_board = found.borrow().state.clone_with_pieceset(goal.piece_set);
Some(
(
moves,
ret_board
)
)
}
}
}
#[allow(dead_code)]
fn soluciona_por_pasos<'a>(goal: Board<'a>, board: Board<'a>) -> bool {
println!("GOAL:");
println!("{}\n\n\n\n", goal.ascii_art_string());
println!("BOARD:");
println!("{}\n\n\n\n", board.ascii_art_string());
let max_level = 28;
let aproximacion = aproxima(goal,board,3,max_level);
if aproximacion.is_none(){
println!("No hay aproximación a 3");
return false;
}
let aproximacion = aproximacion.unwrap();
println!("Aproximación a 3:{:?}", aproximacion.0 );
println!("{}\n\n", aproximacion.1.ascii_art_string() );
let aproximacion = aproxima(goal,aproximacion.1,2,max_level);
if aproximacion.is_none(){
println!("No hay aproximación a 2");
return false;
}
let aproximacion = aproximacion.unwrap();
println!("Aproximación a 2:{:?}", aproximacion.0 );
println!("{}\n\n", aproximacion.1.ascii_art_string() );
let diffs = goal.compute_difs(&aproximacion.1);
let moves = moves_for_changes(diffs.clone(), max_level);
if moves.is_none(){
println!("No hay movimientos para diferencias finales: {:?}", diffs );
return false;
}
let moves = moves.unwrap();
let board_copy = aproximacion.1.clone();
let mut current = board_copy.apply_moves_to_empty_position(&moves).
unwrap().
last().
unwrap().clone();
println!("Aplico rotación para diferencias finales: {:?}", moves );
while current != board_copy{
current = current.apply_moves_to_empty_position(&moves).
unwrap().
last().
unwrap().clone();
println!("Aplico rotación para diferencias finales: {:?}", moves );
if current == goal{
return true;
}
}
println!("La rotación final no ha tenido éxito" );
return false;
}
fn main() {
let args: Vec<String> = env::args().collect();
println!("Los argumentos son: {:?}", args);
estados_posibles();
#[allow(unused_imports)]
use crate::crossteaser::crossteaser_search::Color::{B, G, O, P, R, Y};
let piece_set = PieceSet::from_piece(&Piece::seed());
for piece_index in 1..piece_set.get_number_of_pieces() {
println!("Probando con pieza número:{}", piece_index );
let _goal = Board::from_piece(&piece_set, piece_index);
let colors_original: [Option<[Color; 2]>; 9] = [
Some([O, P]),
Some([R, O]),
Some([O, B]),
Some([B, G]),
None,
Some([B, P]),
Some([O, R]),
Some([Y, B]),
Some([Y, R]),
];
let _original = Board::from_colors(&piece_set, colors_original);
return;
}
}
| true
|
08b31b506d7f612aff5b4338c36926604c116cde
|
Rust
|
prisma/prisma-engines
|
/query-engine/connectors/sql-query-connector/src/error.rs
|
UTF-8
| 14,950
| 2.765625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use connector_interface::{error::*, Filter};
use prisma_models::prelude::DomainError;
use quaint::error::ErrorKind as QuaintKind;
use std::{any::Any, string::FromUtf8Error};
use thiserror::Error;
use user_facing_errors::query_engine::DatabaseConstraint;
pub(crate) enum RawError {
IncorrectNumberOfParameters {
expected: usize,
actual: usize,
},
QueryInvalidInput(String),
ConnectionClosed,
Database {
code: Option<String>,
message: Option<String>,
},
UnsupportedColumnType {
column_type: String,
},
}
impl From<RawError> for SqlError {
fn from(re: RawError) -> SqlError {
match re {
RawError::IncorrectNumberOfParameters { expected, actual } => {
Self::IncorrectNumberOfParameters { expected, actual }
}
RawError::QueryInvalidInput(message) => Self::QueryInvalidInput(message),
RawError::UnsupportedColumnType { column_type } => Self::RawError {
code: String::from("N/A"),
message: format!(
r#"Failed to deserialize column of type '{column_type}'. If you're using $queryRaw and this column is explicitly marked as `Unsupported` in your Prisma schema, try casting this column to any supported Prisma type such as `String`."#
),
},
RawError::ConnectionClosed => Self::ConnectionClosed,
RawError::Database { code, message } => Self::RawError {
code: code.unwrap_or_else(|| String::from("N/A")),
message: message.unwrap_or_else(|| String::from("N/A")),
},
}
}
}
impl From<quaint::error::Error> for RawError {
fn from(e: quaint::error::Error) -> Self {
match e.kind() {
quaint::error::ErrorKind::IncorrectNumberOfParameters { expected, actual } => {
Self::IncorrectNumberOfParameters {
expected: *expected,
actual: *actual,
}
}
quaint::error::ErrorKind::ConnectionClosed => Self::ConnectionClosed,
quaint::error::ErrorKind::UnsupportedColumnType { column_type } => Self::UnsupportedColumnType {
column_type: column_type.to_owned(),
},
quaint::error::ErrorKind::QueryInvalidInput(message) => Self::QueryInvalidInput(message.to_owned()),
_ => Self::Database {
code: e.original_code().map(ToString::to_string),
message: e.original_message().map(ToString::to_string),
},
}
}
}
// Catching the panics from the database driver for better error messages.
impl From<Box<dyn Any + Send>> for RawError {
fn from(e: Box<dyn Any + Send>) -> Self {
Self::Database {
code: None,
message: Some(*e.downcast::<String>().unwrap()),
}
}
}
#[derive(Debug, Error)]
pub enum SqlError {
#[error("Unique constraint failed: {:?}", constraint)]
UniqueConstraintViolation { constraint: DatabaseConstraint },
#[error("Null constraint failed: {:?}", constraint)]
NullConstraintViolation { constraint: DatabaseConstraint },
#[error("Foreign key constraint failed")]
ForeignKeyConstraintViolation { constraint: DatabaseConstraint },
#[error("Record does not exist.")]
RecordDoesNotExist,
#[error("Table {} does not exist", _0)]
TableDoesNotExist(String),
#[error("Column {} does not exist", _0)]
ColumnDoesNotExist(String),
#[error("Error creating a database connection. ({})", _0)]
ConnectionError(QuaintKind),
#[error("Error querying the database: {}", _0)]
QueryError(Box<dyn std::error::Error + Send + Sync>),
#[error("Invalid input provided to query: {}", _0)]
QueryInvalidInput(String),
#[error("The column value was different from the model")]
ColumnReadFailure(Box<dyn std::error::Error + Send + Sync>),
#[error("Field cannot be null: {}", field)]
FieldCannotBeNull { field: String },
#[error("{}", _0)]
DomainError(DomainError),
#[error("Record not found: {:?}", _0)]
RecordNotFoundForWhere(Filter),
#[error(
"Violating a relation {} between {} and {}",
relation_name,
model_a_name,
model_b_name
)]
RelationViolation {
relation_name: String,
model_a_name: String,
model_b_name: String,
},
#[error(
"The relation {} has no record for the model {} connected to a record for the model {} on your write path.",
relation_name,
parent_name,
child_name
)]
RecordsNotConnected {
relation_name: String,
parent_name: String,
// parent_where: Option<Box<RecordFinderInfo>>,
child_name: String,
// child_where: Option<Box<RecordFinderInfo>>,
},
#[error("Conversion error: {0}")]
ConversionError(anyhow::Error),
#[error("Database error. error code: {}, error message: {}", code, message)]
RawError { code: String, message: String },
#[error(
"Incorrect number of parameters given to a statement. Expected {}: got: {}.",
expected,
actual
)]
IncorrectNumberOfParameters { expected: usize, actual: usize },
#[error("Server terminated the connection.")]
ConnectionClosed,
#[error("{}", _0)]
TransactionAlreadyClosed(String),
#[error("{}", _0)]
InvalidIsolationLevel(String),
#[error("Transaction write conflict")]
TransactionWriteConflict,
#[error("ROLLBACK statement has no corresponding BEGIN statement")]
RollbackWithoutBegin,
#[error("Query parameter limit exceeded error: {0}.")]
QueryParameterLimitExceeded(String),
#[error("Cannot find a fulltext index to use for the search")]
MissingFullTextSearchIndex,
#[error("External connector error")]
ExternalError(i32),
}
impl SqlError {
pub(crate) fn into_connector_error(self, connection_info: &quaint::prelude::ConnectionInfo) -> ConnectorError {
match self {
SqlError::UniqueConstraintViolation { constraint } => {
ConnectorError::from_kind(ErrorKind::UniqueConstraintViolation { constraint })
}
SqlError::NullConstraintViolation { constraint } => {
ConnectorError::from_kind(ErrorKind::NullConstraintViolation { constraint })
}
SqlError::ForeignKeyConstraintViolation { constraint } => {
ConnectorError::from_kind(ErrorKind::ForeignKeyConstraintViolation { constraint })
}
SqlError::RecordDoesNotExist => ConnectorError::from_kind(ErrorKind::RecordDoesNotExist),
SqlError::TableDoesNotExist(table) => ConnectorError::from_kind(ErrorKind::TableDoesNotExist { table }),
SqlError::ColumnDoesNotExist(column) => ConnectorError::from_kind(ErrorKind::ColumnDoesNotExist { column }),
SqlError::ConnectionError(e) => ConnectorError {
user_facing_error: user_facing_errors::quaint::render_quaint_error(&e, connection_info),
kind: ErrorKind::ConnectionError(e.into()),
transient: false,
},
SqlError::ColumnReadFailure(e) => ConnectorError::from_kind(ErrorKind::ColumnReadFailure(e)),
SqlError::FieldCannotBeNull { field } => ConnectorError::from_kind(ErrorKind::FieldCannotBeNull { field }),
SqlError::DomainError(e) => ConnectorError::from_kind(ErrorKind::DomainError(e)),
SqlError::RecordNotFoundForWhere(info) => {
ConnectorError::from_kind(ErrorKind::RecordNotFoundForWhere(info))
}
SqlError::RelationViolation {
relation_name,
model_a_name,
model_b_name,
} => ConnectorError::from_kind(ErrorKind::RelationViolation {
relation_name,
model_a_name,
model_b_name,
}),
SqlError::RecordsNotConnected {
relation_name,
parent_name,
child_name,
} => ConnectorError::from_kind(ErrorKind::RecordsNotConnected {
relation_name,
parent_name,
child_name,
}),
SqlError::ConversionError(e) => ConnectorError::from_kind(ErrorKind::ConversionError(e)),
SqlError::QueryInvalidInput(e) => ConnectorError::from_kind(ErrorKind::QueryInvalidInput(e)),
SqlError::IncorrectNumberOfParameters { expected, actual } => {
ConnectorError::from_kind(ErrorKind::IncorrectNumberOfParameters { expected, actual })
}
SqlError::QueryError(e) => {
let quaint_error: Option<&QuaintKind> = e.downcast_ref();
match quaint_error {
Some(quaint_error) => ConnectorError {
user_facing_error: user_facing_errors::quaint::render_quaint_error(
quaint_error,
connection_info,
),
kind: ErrorKind::QueryError(e),
transient: false,
},
None => ConnectorError::from_kind(ErrorKind::QueryError(e)),
}
}
SqlError::RawError { code, message } => {
ConnectorError::from_kind(ErrorKind::RawDatabaseError { code, message })
}
SqlError::ConnectionClosed => ConnectorError::from_kind(ErrorKind::ConnectionClosed),
SqlError::TransactionAlreadyClosed(message) => {
ConnectorError::from_kind(ErrorKind::TransactionAlreadyClosed { message })
}
SqlError::TransactionWriteConflict => ConnectorError::from_kind(ErrorKind::TransactionWriteConflict),
SqlError::RollbackWithoutBegin => ConnectorError::from_kind(ErrorKind::RollbackWithoutBegin),
SqlError::QueryParameterLimitExceeded(e) => {
ConnectorError::from_kind(ErrorKind::QueryParameterLimitExceeded(e))
}
SqlError::MissingFullTextSearchIndex => ConnectorError::from_kind(ErrorKind::MissingFullTextSearchIndex),
SqlError::InvalidIsolationLevel(msg) => ConnectorError::from_kind(ErrorKind::InternalConversionError(msg)),
SqlError::ExternalError(error_id) => ConnectorError::from_kind(ErrorKind::ExternalError(error_id)),
}
}
}
impl From<prisma_models::ConversionFailure> for SqlError {
fn from(e: prisma_models::ConversionFailure) -> Self {
Self::ConversionError(e.into())
}
}
impl From<quaint::error::Error> for SqlError {
fn from(e: quaint::error::Error) -> Self {
match QuaintKind::from(e) {
QuaintKind::RawConnectorError { status, reason } => Self::RawError {
code: status,
message: reason,
},
QuaintKind::QueryError(qe) => Self::QueryError(qe),
QuaintKind::QueryInvalidInput(qe) => Self::QueryInvalidInput(qe),
e @ QuaintKind::IoError(_) => Self::ConnectionError(e),
QuaintKind::NotFound => Self::RecordDoesNotExist,
QuaintKind::UniqueConstraintViolation { constraint } => Self::UniqueConstraintViolation {
constraint: constraint.into(),
},
QuaintKind::NullConstraintViolation { constraint } => Self::NullConstraintViolation {
constraint: constraint.into(),
},
QuaintKind::ForeignKeyConstraintViolation { constraint } => Self::ForeignKeyConstraintViolation {
constraint: constraint.into(),
},
QuaintKind::MissingFullTextSearchIndex => Self::MissingFullTextSearchIndex,
e @ QuaintKind::ConnectionError(_) => Self::ConnectionError(e),
QuaintKind::ColumnReadFailure(e) => Self::ColumnReadFailure(e),
QuaintKind::ColumnNotFound { column } => SqlError::ColumnDoesNotExist(format!("{column}")),
QuaintKind::TableDoesNotExist { table } => SqlError::TableDoesNotExist(format!("{table}")),
QuaintKind::ConnectionClosed => SqlError::ConnectionClosed,
QuaintKind::InvalidIsolationLevel(msg) => Self::InvalidIsolationLevel(msg),
QuaintKind::TransactionWriteConflict => Self::TransactionWriteConflict,
QuaintKind::RollbackWithoutBegin => Self::RollbackWithoutBegin,
QuaintKind::ExternalError(error_id) => Self::ExternalError(error_id),
e @ QuaintKind::UnsupportedColumnType { .. } => SqlError::ConversionError(e.into()),
e @ QuaintKind::TransactionAlreadyClosed(_) => SqlError::TransactionAlreadyClosed(format!("{e}")),
e @ QuaintKind::IncorrectNumberOfParameters { .. } => SqlError::QueryError(e.into()),
e @ QuaintKind::ConversionError(_) => SqlError::ConversionError(e.into()),
e @ QuaintKind::ResultIndexOutOfBounds { .. } => SqlError::QueryError(e.into()),
e @ QuaintKind::ResultTypeMismatch { .. } => SqlError::QueryError(e.into()),
e @ QuaintKind::LengthMismatch { .. } => SqlError::QueryError(e.into()),
e @ QuaintKind::ValueOutOfRange { .. } => SqlError::QueryError(e.into()),
e @ QuaintKind::UUIDError(_) => SqlError::ConversionError(e.into()),
e @ QuaintKind::DatabaseUrlIsInvalid { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::DatabaseDoesNotExist { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::AuthenticationFailed { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::DatabaseAccessDenied { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::DatabaseAlreadyExists { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::InvalidConnectionArguments => SqlError::ConnectionError(e),
e @ QuaintKind::ConnectTimeout => SqlError::ConnectionError(e),
e @ QuaintKind::SocketTimeout => SqlError::ConnectionError(e),
e @ QuaintKind::PoolTimeout { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::PoolClosed { .. } => SqlError::ConnectionError(e),
e @ QuaintKind::TlsError { .. } => Self::ConnectionError(e),
}
}
}
impl From<DomainError> for SqlError {
fn from(e: DomainError) -> SqlError {
SqlError::DomainError(e)
}
}
impl From<serde_json::error::Error> for SqlError {
fn from(e: serde_json::error::Error) -> SqlError {
SqlError::ConversionError(e.into())
}
}
impl From<uuid::Error> for SqlError {
fn from(e: uuid::Error) -> SqlError {
SqlError::ColumnReadFailure(e.into())
}
}
impl From<FromUtf8Error> for SqlError {
fn from(e: FromUtf8Error) -> SqlError {
SqlError::ColumnReadFailure(e.into())
}
}
| true
|
28d8a5e10d9e4bf0ccc36b3b63948ebe425a9fc5
|
Rust
|
samcday/exercism
|
/rust/bowling/src/lib.rs
|
UTF-8
| 3,225
| 3.296875
| 3
|
[] |
no_license
|
#![deny(clippy::all, clippy::pedantic)]
#[derive(Debug, PartialEq)]
pub enum Error {
NotEnoughPinsLeft,
GameComplete,
}
#[derive(Clone, Copy, Debug)]
enum Frame {
Strike,
Spare(u16, u16),
Open(u16, u16),
Last(u16, u16, u16),
}
impl Frame {
fn score<I: Iterator<Item = Self>>(self, following: I) -> u16 {
let following = following.flat_map(|frame| match frame {
Frame::Strike => vec![10],
Frame::Spare(first, second) | Frame::Open(first, second) | Frame::Last(first, second, _) => {
vec![first, second]
}
});
match self {
Frame::Strike => 10 + following.take(2).sum::<u16>(),
Frame::Spare(_, _) => 10 + following.take(1).sum::<u16>(),
Frame::Open(first, second) => first + second,
Frame::Last(first, second, third) => first + second + third,
}
}
}
#[derive(Debug)]
pub struct BowlingGame {
frames: Vec<Frame>,
rolls: Vec<u16>,
}
impl Default for BowlingGame {
fn default() -> Self {
Self::new()
}
}
impl BowlingGame {
pub fn new() -> Self {
Self {
frames: vec![],
rolls: vec![],
}
}
fn record_frame(&mut self, frame: Frame) {
self.frames.push(frame);
self.rolls.clear();
}
pub fn roll(&mut self, pins: u16) -> Result<(), Error> {
if self.frames.len() == 10 {
return Err(Error::GameComplete);
}
if pins > 10 {
return Err(Error::NotEnoughPinsLeft);
}
self.rolls.push(pins);
let frame_total = self.rolls.iter().sum::<u16>();
let frame_rolls = self.rolls.len();
if self.frames.len() == 9 {
let first_two = self.rolls.iter().take(2).sum::<u16>();
let last_two = self.rolls.iter().skip(1).take(2).sum::<u16>();
if (first_two != 10 && first_two != 20) && last_two > 10 {
return Err(Error::NotEnoughPinsLeft);
}
if frame_rolls == 3 {
self.record_frame(Frame::Last(self.rolls[0], self.rolls[1], self.rolls[2]))
} else if frame_rolls == 2 && frame_total < 10 {
self.record_frame(Frame::Last(self.rolls[0], self.rolls[1], 0))
}
} else {
if frame_total > 10 && self.frames.len() < 9 {
return Err(Error::NotEnoughPinsLeft);
}
if frame_total == 10 && frame_rolls == 1 {
self.record_frame(Frame::Strike);
} else if frame_rolls == 2 {
self.record_frame(if frame_total == 10 {
Frame::Spare(self.rolls[0], self.rolls[1])
} else {
Frame::Open(self.rolls[0], self.rolls[1])
});
}
}
Ok(())
}
pub fn score(&self) -> Option<u16> {
if self.frames.len() == 10 {
let mut score = 0;
for (idx, frame) in self.frames.iter().enumerate() {
score += frame.score(self.frames[idx..].iter().skip(1).cloned());
}
Some(score)
} else {
None
}
}
}
| true
|
737038ecd3b3244c79d3849d6896835b93beb740
|
Rust
|
elsid/CodeBall
|
/src/game.rs
|
UTF-8
| 420
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
use crate::model::Game;
impl Game {
pub fn opposite(&self) -> Game {
Game {
current_tick: self.current_tick,
players: self.players.iter().map(|v| v.opposite()).collect(),
robots: self.robots.iter().map(|v| v.opposite()).collect(),
nitro_packs: self.nitro_packs.iter().map(|v| v.opposite()).collect(),
ball: self.ball.opposite(),
}
}
}
| true
|
86cfa1bd4fd9577e2fe8eaee0c0ac21c26dde3e8
|
Rust
|
hug-dev/rust-spiffe-1
|
/src/bundle/mod.rs
|
UTF-8
| 1,510
| 2.625
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! X.509 bundle and JWT bundle types.
use crate::spiffe_id::TrustDomain;
use std::error::Error;
pub mod jwt;
pub mod x509;
/// Represents a collection of public keys.
pub trait Bundle {}
/// Represents a source of bundles queryable by [`TrustDomain`].
pub trait BundleRefSource {
/// The type of the bundles provided by the source.
type Item: Bundle;
/// Returns the reference to bundle (set of public keys authorities) associated to the [`TrustDomain`].
/// If it cannot be found a bundle associated to the trust domain, it returns `Ok(None)`.
/// If there's is an error in source fetching the bundle, it returns an `Err<Box<dyn Error + Send + Sync + 'static>>`.
fn get_bundle_for_trust_domain(
&self,
trust_domain: &TrustDomain,
) -> Result<Option<&Self::Item>, Box<dyn Error + Send + Sync + 'static>>;
}
/// Represents a source of bundles queryable by [`TrustDomain`].
pub trait BundleSource {
/// The type of the bundles provided by the source.
type Item: Bundle;
/// Returns a owned bundle (set of public keys authorities) associated to the [`TrustDomain`].
/// If it cannot be found a bundle associated to the trust domain, it returns `Ok(None)`.
/// If there's is an error in source fetching the bundle, it returns an `Err<Box<dyn Error + Send + Sync + 'static>>`.
fn get_bundle_for_trust_domain(
&self,
trust_domain: &TrustDomain,
) -> Result<Option<Self::Item>, Box<dyn Error + Send + Sync + 'static>>;
}
| true
|
8ef8f46060438375346850037e28fb9dbe250240
|
Rust
|
k0pernicus/aoc2019
|
/aoc_12/src/lib/system.rs
|
UTF-8
| 1,361
| 2.96875
| 3
|
[] |
no_license
|
use super::moon::{Moon, Velocity};
use std::default::Default;
use std::str::FromStr;
pub struct System {
pub moons: Vec<Moon>,
pub velocities: Vec<Velocity>,
}
impl Default for System {
fn default() -> System {
System {
moons: Vec::new(),
velocities: Vec::new(),
}
}
}
impl System {
pub fn add_moon(&mut self, s: &str) {
self.moons.push(Moon::from_str(s).unwrap());
self.velocities.push(Velocity::default());
}
pub fn compute_step(&mut self) -> Vec<Moon> {
let clones: Vec<Moon> = self.moons.clone();
let mut velocities: Vec<Velocity> = Vec::with_capacity(clones.len());
for (i, m) in self.moons.iter_mut().enumerate() {
let mut o: Vec<Moon> = Vec::with_capacity(clones.len() - 1);
for x in 0..i {
o.push(clones[x].clone());
}
for x in i + 1..clones.len() {
o.push(clones[x].clone());
}
let c_v = m.compute_velocity(o, &self.velocities[i]);
self.velocities[i] = c_v.clone();
velocities.push(c_v);
}
// Once velocities have been computed, add velocities...
let z = self.moons.iter().zip(velocities.iter());
z.map(|(m, v)| (*m).apply_velocity(v))
.collect::<Vec<Moon>>()
}
}
| true
|
e617d5b742df558d4180750200c95808dc01bc98
|
Rust
|
AngelOfSol/weiss-schwarz-dsl
|
/rules/src/model/model_ref.rs
|
UTF-8
| 2,395
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
use crate::model::{Card, CardId, Game, Player, ZoneId, ZoneVec};
pub trait GameDeref {
type Target;
fn get<'game>(&self, game: &'game Game) -> &'game Self::Target;
fn get_mut<'game>(&self, game: &'game mut Game) -> &'game mut Self::Target;
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct PlayerRef {
player: usize,
}
impl PlayerRef {
pub fn new(player: usize) -> Self {
Self { player }
}
pub fn zone_ref(&self, zone: ZoneId) -> ZoneRef {
ZoneRef {
player: *self,
zone,
}
}
}
impl GameDeref for PlayerRef {
type Target = Player;
fn get<'game>(&self, game: &'game Game) -> &'game Self::Target {
&game.players[self.player]
}
fn get_mut<'game>(&self, game: &'game mut Game) -> &'game mut Self::Target {
&mut game.players[self.player]
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ZoneRef {
player: PlayerRef,
zone: ZoneId,
}
impl ZoneRef {
pub fn id(&self) -> ZoneId {
self.zone
}
pub fn owner(&self) -> PlayerRef {
self.player
}
pub fn card_ref(&self, card_id: CardId, index: usize) -> CardRef {
CardRef {
zone: *self,
card_id,
index,
}
}
}
impl GameDeref for ZoneRef {
type Target = ZoneVec;
fn get<'game>(&self, game: &'game Game) -> &'game Self::Target {
let player = self.player.get(game);
&player.zones[&self.zone]
}
fn get_mut<'game>(&self, game: &'game mut Game) -> &'game mut Self::Target {
let player = self.player.get_mut(game);
player.zones.get_mut(&self.zone).unwrap()
}
}
pub struct CardRef {
zone: ZoneRef,
card_id: CardId,
index: usize,
}
impl CardRef {
pub fn owner(&self) -> PlayerRef {
self.zone.owner()
}
pub fn zone(&self) -> ZoneRef {
self.zone
}
pub fn id(&self) -> CardId {
self.card_id
}
}
impl GameDeref for CardRef {
type Target = Card;
fn get<'game>(&self, game: &'game Game) -> &'game Self::Target {
&self.zone.owner().get(game).cards[&self.zone.get(game)[self.index]]
}
fn get_mut<'game>(&self, game: &'game mut Game) -> &'game mut Self::Target {
let key = self.zone.get(game)[self.index];
self.zone.owner().get_mut(game).cards.get_mut(&key).unwrap()
}
}
| true
|
04725027d4afa8a8d27231c6b47a726d44ec0d55
|
Rust
|
silverdrake11/aoc_2020
|
/src/day18.rs
|
UTF-8
| 2,079
| 3.375
| 3
|
[] |
no_license
|
use std::fs;
const RADIX: u32 = 10;
fn simplify_bracket(ex: &[Vec<char>]) -> Vec<Vec<char>> {
let mut new: Vec<Vec<char>> = Vec::new();
let mut first: Vec<char> = ex[0].clone();
first.remove(0);
new.push(first);
let mut count = 0;
for (i,op) in ex.iter().enumerate() {
for &c in op {
if c == '(' {
count += 1;
}
if c == ')' {
count -= 1;
if count == 0 {
let mut last: Vec<char> = ex[i].clone();
last.pop();
new.push(last);
return new;
}
}
}
if i != 0 {
new.push(op.clone());
}
}
return new;
}
fn hello(ex: &Vec<Vec<char>>) -> usize {
let mut total: usize = 0;
let mut cursor: usize = 0;
while cursor < ex.len() {
let cur_char = ex[cursor][0];
let next_char = ex[cursor+1][0];
if cur_char == '*' || cur_char == '+' {
let next_exp: usize;
let next_len: usize;
if next_char == '(' {
let v = simplify_bracket(&ex[cursor+1..]);
next_len = v.len()+1;
next_exp = hello(&v);
} else {
next_exp = next_char.to_digit(RADIX).unwrap() as usize;
next_len = 2;
}
if cur_char == '*' {
total *= next_exp;
} else {
total += next_exp;
}
cursor += next_len;
} else if cur_char == '(' {
let cur_ex = simplify_bracket(&ex[cursor..]);
cursor += cur_ex.len();
total = hello(&cur_ex);
} else { // Note it will only be a digit at the beginning of expression
total = cur_char.to_digit(RADIX).unwrap() as usize;
cursor += 1;
}
}
return total;
}
pub fn advent() {
let filename: String = "18.txt".to_string();
let text = fs::read_to_string(filename).unwrap();
let mut values: usize = 0;
for line in text.lines() {
let splitted: Vec<&str> = line.split_whitespace().collect();
let mut init: Vec<Vec<char>> = Vec::new();
for item in splitted {
init.push(item.chars().collect());
}
values += hello(&init);
}
println!("{:?}", values);
}
| true
|
d1d526bb8e139fb62b25ec8e94b955bd95b1ec4b
|
Rust
|
Juhlinus/languages
|
/compiled_starters/sqlite-starter-rust/src/schema.rs
|
UTF-8
| 828
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
#[derive(Debug)]
pub struct Schema {
kind: String,
name: String,
table_name: String,
root_page: u8,
sql: String,
}
impl Schema {
/// Parses a record into a schema
pub fn parse(record: Vec<Vec<u8>>) -> Option<Self> {
let mut items = record.into_iter();
let kind = items.next()?;
let name = items.next()?;
let table_name = items.next()?;
let root_page = *items.next()?.get(0)?;
let sql = items.next()?;
let schema = Self {
kind: String::from_utf8_lossy(&kind).to_string(),
name: String::from_utf8_lossy(&name).to_string(),
table_name: String::from_utf8_lossy(&table_name).to_string(),
root_page,
sql: String::from_utf8_lossy(&sql).to_string(),
};
Some(schema)
}
}
| true
|
b181911bb2b6cebe058cc159c1ea62a5dabb930e
|
Rust
|
ItsHoff/Rusty
|
/src/pt_renderer/tracers/bdpt/vertex.rs
|
UTF-8
| 18,253
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
use cgmath::prelude::*;
use cgmath::{Point3, Vector3};
use crate::camera::PTCamera;
use crate::color::Color;
use crate::config::*;
use crate::consts;
use crate::float::*;
use crate::intersect::{Interaction, Ray};
use crate::light::Light;
use crate::pt_renderer::PathType;
use crate::sample;
use crate::scene::Scene;
fn dir_and_dist(from: &dyn Vertex, to: &dyn Vertex) -> (Vector3<Float>, Float) {
let to_next = to.pos() - from.pos();
let dist = to_next.magnitude();
let dir = to_next / dist;
(dir, dist)
}
/// Get the area pdf of scattering v1 -> v2 -> v3
/// Return None if pdf is a delta distribution
pub fn pdf_scatter(v1: &dyn Vertex, v2: &SurfaceVertex, v3: &dyn Vertex) -> Option<Float> {
if v2.delta_dir() {
return None;
}
let (wo, _) = dir_and_dist(v2, v1);
let (wi, dist) = dir_and_dist(v2, v3);
let pdf_dir = v2.isect.pdf(wo, wi);
Some(sample::to_area_pdf(
pdf_dir,
dist.powi(2),
v3.cos_g(wi).abs(),
))
}
/// Get the area pdf of forward and backward scattering v1 -> v2 -> v3
/// Return None if pdf is a delta distribution
/// This used for scatterings along pure paths (only camera or light and not mixed),
/// which means that the rays that generated v2 and v3 contain the correct direction
/// and lengths. This is more efficient that computing them again, but more importantly
/// recomputing the direction will cause subtle errors since the computed direction won't
/// match the real sampled direction due to ray origin shift.
/// r_
/// exaggerated | \_
/// visualization | \_ != x -----> o
/// x \-> o
pub fn pdf_precompute(
v1: &dyn Vertex,
v2: &SurfaceVertex,
v3: &SurfaceVertex,
) -> (Option<Float>, Option<Float>) {
if v2.delta_dir() {
return (None, None);
}
let wo = -v2.ray.dir;
let wi = v3.ray.dir;
let mut pdf_fwd = v2.isect.pdf(wo, wi);
let mut pdf_rev = v2.isect.pdf(wi, wo);
pdf_fwd = sample::to_area_pdf(pdf_fwd, v3.ray.length.powi(2), v3.cos_g(wi).abs());
pdf_rev = sample::to_area_pdf(pdf_rev, v2.ray.length.powi(2), v1.cos_g(wo).abs());
(Some(pdf_fwd), Some(pdf_rev))
}
pub struct BDPath<'a> {
light_vertex: &'a LightVertex<'a>,
light_path: &'a [SurfaceVertex<'a>],
light_pdf_fwd: Vec<Option<Float>>,
light_pdf_rev: Vec<Option<Float>>,
camera_vertex: &'a CameraVertex<'a>,
camera_path: &'a [SurfaceVertex<'a>],
camera_pdf_fwd: Vec<Option<Float>>,
camera_pdf_rev: Vec<Option<Float>>,
config: &'a RenderConfig,
}
impl<'a> BDPath<'a> {
pub fn new(
light_vertex: &'a LightVertex<'a>,
light_path: &'a [SurfaceVertex<'a>],
camera_vertex: &'a CameraVertex,
camera_path: &'a [SurfaceVertex<'a>],
config: &'a RenderConfig,
) -> Self {
// Precompute fwd and rev pdfs
// None pdf corresponds to a delta distribution
// TODO: handle delta distributions already in primitives and not just here
let mut light_pdf_fwd = Vec::new();
let mut light_pdf_rev = Vec::new();
for i in 0..=light_path.len() {
if i == 0 {
if light_vertex.light.delta_pos() {
light_pdf_fwd.push(None);
} else {
light_pdf_fwd.push(Some(light_vertex.pdf_pos));
}
} else if i == 1 {
if light_vertex.delta_dir() {
light_pdf_fwd.push(None);
} else {
light_pdf_fwd.push(Some(light_vertex.pdf_next(&light_path[0])));
}
} else {
let v_prev: &dyn Vertex = if i == 2 {
light_vertex
} else {
&light_path[i - 3]
};
let v_mid = &light_path[i - 2];
let v_next = &light_path[i - 1];
let (pdf_fwd, pdf_rev) = pdf_precompute(v_prev, v_mid, v_next);
light_pdf_fwd.push(pdf_fwd);
light_pdf_rev.push(pdf_rev);
}
}
let mut camera_pdf_fwd = Vec::new();
let mut camera_pdf_rev = Vec::new();
for i in 0..=camera_path.len() {
if i == 0 {
// Pinhole camera
camera_pdf_fwd.push(None);
} else if i == 1 {
if camera_vertex.delta_dir() {
camera_pdf_fwd.push(None);
} else {
camera_pdf_fwd.push(Some(camera_vertex.pdf_next(&camera_path[0])));
}
} else {
let v_prev: &dyn Vertex = if i == 2 {
camera_vertex
} else {
&camera_path[i - 3]
};
let v_mid = &camera_path[i - 2];
let v_next = &camera_path[i - 1];
let (pdf_fwd, pdf_rev) = pdf_precompute(v_prev, v_mid, v_next);
camera_pdf_fwd.push(pdf_fwd);
camera_pdf_rev.push(pdf_rev);
}
}
Self {
light_vertex,
light_path,
light_pdf_fwd,
light_pdf_rev,
camera_vertex,
camera_path,
camera_pdf_fwd,
camera_pdf_rev,
config,
}
}
/// Get a subpath with s light vertices and t camera vertices
/// Will panic if (s, t) is not a valid subpath
pub fn subpath(&self, s: usize, t: usize) -> SubPath {
let bounces = s + t - 2;
assert!(
bounces <= self.config.max_bounces,
"Path contains {} bounces but it can't contain more than {} bounces!",
bounces,
self.config.max_bounces,
);
assert!(
s <= self.light_path.len() + 1,
"Cannot construct sub path with {} light vertices when there are only {}!",
s,
self.light_path.len() + 1,
);
assert!(
t <= self.camera_path.len() + 1,
"Cannot construct sub path with {} camera vertices when there are only {}!",
t,
self.camera_path.len() + 1,
);
assert!(
s != 0 || self.camera_path[t - 2].isect.tri.is_emissive(),
"Sub path ({}, {}) does not end at a emissive vertex!",
s,
t,
);
SubPath {
path: self,
s,
t,
tmp_light_vertex: None,
}
}
/// Get a sub path with only camera vertices which ends at light_vertex
pub fn subpath_with_light(&self, light_vertex: LightVertex<'a>, t: usize) -> SubPath {
let mut subpath = self.subpath(0, t);
subpath.tmp_light_vertex = Some(light_vertex);
subpath
}
}
pub struct SubPath<'a> {
path: &'a BDPath<'a>,
s: usize,
t: usize,
tmp_light_vertex: Option<LightVertex<'a>>,
}
impl SubPath<'_> {
/// Compute the weight for the radiance that is transported along this path
pub fn weight(&self) -> Float {
let bounces = self.s + self.t - 2;
if bounces == 0 {
1.0
} else if !self.path.config.mis {
1.0 / (bounces + 2).to_float()
} else {
let power = 2; // for power heuristic
let mut sum = 1.0;
let mut light_ratio = 1.0;
for si in (0..self.s).rev() {
light_ratio *= (self.camera_pdf(si + 1).unwrap_or(1.0)
/ self.light_pdf(si + 1).unwrap_or(1.0))
.powi(power);
let delta_light = if si == 0 {
// No need to care about the tmp_light_vertex, since if it exists
// then self.s is always 0, and this branch is not evaluated.
self.path.light_vertex.light.delta_pos()
} else {
self.get_vertex(si).delta_dir()
};
if !delta_light && !self.get_vertex(si + 1).delta_dir() {
sum += light_ratio;
}
}
let mut camera_ratio = 1.0;
for ti in (2..=self.t).rev() {
let si = self.t_to_s(ti);
camera_ratio *= (self.light_pdf(si).unwrap_or(1.0)
/ self.camera_pdf(si).unwrap_or(1.0))
.powi(power);
if !self.get_vertex(si).delta_dir() && !self.get_vertex(si + 1).delta_dir() {
sum += camera_ratio;
}
}
1.0 / sum
}
}
/// Map camera side index t to light side index s
fn t_to_s(&self, t: usize) -> usize {
self.s + self.t - t + 1
}
/// Map light side index s to camera side index t
fn s_to_t(&self, s: usize) -> usize {
self.s + self.t - s + 1
}
/// Get the vertex s of the path
fn get_vertex(&self, s: usize) -> &dyn Vertex {
if s == 1 {
if let Some(light) = &self.tmp_light_vertex {
light
} else {
self.path.light_vertex
}
} else if self.s_to_t(s) == 1 {
self.path.camera_vertex
} else {
self.get_surface(s)
}
}
/// Get the s:th surface vertex on the path
/// Will panic if the vertex does not exist
fn get_surface(&self, s: usize) -> &SurfaceVertex {
if s <= self.s {
&self.path.light_path[s - 2]
} else {
&self.path.camera_path[self.s_to_t(s) - 2]
}
}
/// Get the pdf of sampling vertex s from direction of the light
fn light_pdf(&self, s: usize) -> Option<Float> {
let mut pdf = if s <= self.s {
self.path.light_pdf_fwd[s - 1]?
} else {
let t = self.s_to_t(s);
// Connection vertex interpreted as light
if self.s == 0 && t == self.t {
let light = self.tmp_light_vertex.as_ref().unwrap();
light.pdf_pos
// Sampling emitted light from connection vertex
} else if self.s == 0 && t == self.t - 1 {
let light = self.tmp_light_vertex.as_ref().unwrap();
light.pdf_next(self.get_surface(s))
// Connection vertex sampled from the light
} else if self.s == 1 && t == self.t {
self.path.light_vertex.pdf_next(self.get_surface(s))
// Scattering from the light direction for the connection vertices.
} else if t >= self.t - 1 {
let v1 = self.get_vertex(s - 2);
let v2 = self.get_surface(s - 1);
let v3 = self.get_vertex(s);
if v2.delta_dir() {
return None;
} else {
pdf_scatter(v1, v2, v3)?
}
// Backwards scattering along the light path
} else {
self.path.camera_pdf_rev[t - 1]?
}
};
// Check if russian roulette bounce was needed to sample the vertex
if let RussianRoulette::Static(rr_prob) = self.path.config.russian_roulette {
if s > 2 && s - 2 > self.path.config.pre_rr_bounces {
pdf *= rr_prob;
}
}
Some(pdf)
}
/// Get the pdf of sampling vertex s from direction of the camera
fn camera_pdf(&self, s: usize) -> Option<Float> {
let t = self.s_to_t(s);
let mut pdf = if s > self.s {
// Regular sampling of the camera path
self.path.camera_pdf_fwd[t - 1]?
} else {
// Connection vertex sampled from the camera
if self.t == 1 && s == self.s {
self.path.camera_vertex.pdf_next(self.get_surface(s))
// Scattering from the camera direction for the connection vertices.
} else if s >= self.s - 1 {
let v1 = self.get_vertex(s + 2);
let v2 = self.get_surface(s + 1);
let v3 = self.get_vertex(s);
if v2.delta_dir() {
return None;
} else {
pdf_scatter(v1, v2, v3)?
}
// Backwards scattering along the light path
} else {
self.path.light_pdf_rev[s - 1]?
}
};
// Check if russian roulette bounce was needed to sample the vertex
if let RussianRoulette::Static(rr_prob) = self.path.config.russian_roulette {
if t > 2 && t - 2 > self.path.config.pre_rr_bounces {
pdf *= rr_prob;
}
}
Some(pdf)
}
}
pub trait Vertex: std::fmt::Debug {
/// Get the position of the vertex
fn pos(&self) -> Point3<Float>;
/// Get the shadow ray origin for dir
fn shadow_origin(&self, dir: Vector3<Float>) -> Point3<Float>;
/// Geometric cosine
fn cos_g(&self, dir: Vector3<Float>) -> Float;
/// Shading cosine
fn cos_s(&self, dir: Vector3<Float>) -> Float;
/// Is the directional distribution a delta distribution
fn delta_dir(&self) -> bool;
/// Evaluate the throughput for a path continuing in dir
fn path_throughput(&self, dir: Vector3<Float>) -> Color;
/// Connect vertex to a surface vertex.
/// Return the shadow ray and total path throughput.
/// Will panic if other is not a surface vertex.
fn connect_to(&self, other: &dyn Vertex) -> (Ray, Color) {
let ray = Ray::shadow(self.shadow_origin(other.pos() - self.pos()), other.pos());
let beta = self.path_throughput(ray.dir) * other.path_throughput(-ray.dir);
let g = (self.cos_s(ray.dir) * other.cos_s(ray.dir) / ray.length.powi(2)).abs();
(ray, g * beta)
}
}
#[derive(Debug)]
pub struct CameraVertex<'a> {
pub camera: &'a PTCamera,
ray: Ray,
}
impl<'a> CameraVertex<'a> {
pub fn new(camera: &'a PTCamera, ray: Ray) -> Self {
Self { camera, ray }
}
pub fn sample_next(&self) -> (Color, Ray) {
// This is the real value but it always equals to 1.0
// let dir = self.ray.dir;
// let beta = self.camera.we(dir) * self.camera.cos_s(dir).abs() / self.camera.pdf(dir);
let beta = Color::white();
(beta, self.ray.clone())
}
pub fn pdf_next(&self, next: &SurfaceVertex) -> Float {
let (dir, dist) = dir_and_dist(self, next);
let pdf_dir = self.camera.pdf_dir(dir);
sample::to_area_pdf(pdf_dir, dist.powi(2), next.cos_g(dir).abs())
}
}
impl Vertex for CameraVertex<'_> {
fn pos(&self) -> Point3<Float> {
self.camera.pos
}
fn shadow_origin(&self, _dir: Vector3<Float>) -> Point3<Float> {
// There is no physical camera so no need to care for self shadowing
self.camera.pos
}
fn cos_g(&self, dir: Vector3<Float>) -> Float {
self.camera.cos_g(dir)
}
fn cos_s(&self, dir: Vector3<Float>) -> Float {
self.cos_g(dir)
}
fn delta_dir(&self) -> bool {
false
}
fn path_throughput(&self, dir: Vector3<Float>) -> Color {
self.camera.we(dir)
}
}
#[derive(Clone, Debug)]
pub struct LightVertex<'a> {
light: &'a dyn Light,
pos: Point3<Float>,
pdf_pos: Float,
}
impl<'a> LightVertex<'a> {
pub fn new(light: &'a dyn Light, pos: Point3<Float>, pdf_pos: Float) -> Self {
Self {
light,
pos,
pdf_pos,
}
}
pub fn sample_next(&self) -> (Color, Ray) {
let (le, dir, dir_pdf) = self.light.sample_dir();
let ray = Ray::from_dir(self.pos + consts::EPSILON * dir, dir);
let beta = le * self.cos_s(ray.dir).abs() / (self.pdf_pos * dir_pdf);
(beta, ray)
}
pub fn pdf_next(&self, next: &SurfaceVertex) -> Float {
let (dir, dist) = dir_and_dist(self, next);
let pdf_dir = self.light.pdf_dir(dir);
sample::to_area_pdf(pdf_dir, dist.powi(2), next.cos_g(dir).abs())
}
}
impl Vertex for LightVertex<'_> {
fn pos(&self) -> Point3<Float> {
self.pos
}
fn shadow_origin(&self, _dir: Vector3<Float>) -> Point3<Float> {
panic!("Shadow rays starting from lights not implemented!");
}
fn cos_g(&self, dir: Vector3<Float>) -> Float {
self.light.cos_g(dir)
}
fn cos_s(&self, dir: Vector3<Float>) -> Float {
self.cos_g(dir)
}
fn delta_dir(&self) -> bool {
false
}
fn path_throughput(&self, dir: Vector3<Float>) -> Color {
self.light.le(dir) / self.pdf_pos
}
}
#[derive(Clone, Debug)]
pub struct SurfaceVertex<'a> {
/// Ray that generated this vertex
pub ray: Ray,
/// Attenuation for radiance scattered from this vertex
beta: Color,
path_type: PathType,
pub isect: Interaction<'a>,
}
impl<'a> SurfaceVertex<'a> {
pub fn new(ray: Ray, beta: Color, path_type: PathType, isect: Interaction<'a>) -> Self {
Self {
ray,
beta,
isect,
path_type,
}
}
/// Radiance along the path ending at this vertex
pub fn path_radiance(&self) -> Color {
self.beta * self.isect.le(-self.ray.dir)
}
/// Attempt to convert the vertex to a light vertex
pub fn to_light_vertex(&self, scene: &Scene) -> Option<LightVertex> {
let tri = self.isect.tri;
if tri.is_emissive() {
let pdf_light = scene.pdf_light(tri);
let pdf_pos = tri.pdf_pos();
Some(LightVertex::new(tri, self.isect.p, pdf_light * pdf_pos))
} else {
None
}
}
}
impl Vertex for SurfaceVertex<'_> {
fn pos(&self) -> Point3<Float> {
self.isect.p
}
fn shadow_origin(&self, dir: Vector3<Float>) -> Point3<Float> {
self.isect.ray_origin(dir)
}
fn cos_g(&self, dir: Vector3<Float>) -> Float {
self.isect.cos_g(dir)
}
fn cos_s(&self, dir: Vector3<Float>) -> Float {
self.isect.cos_s(dir)
}
fn delta_dir(&self) -> bool {
self.isect.is_specular()
}
fn path_throughput(&self, dir: Vector3<Float>) -> Color {
self.beta * self.isect.bsdf(-self.ray.dir, dir, self.path_type)
}
}
| true
|
c078fe1a6c5a4e890071403c041ebf52936e436a
|
Rust
|
elektropay/arboric
|
/src/arboric/config/mod.rs
|
UTF-8
| 5,396
| 3.03125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! The arboric::config module holds the structures and functions
//! for Arboric's configuration model
use crate::abac::PDP;
use hyper::Uri;
use std::env;
use std::net::{IpAddr, SocketAddr};
mod listener_builder;
pub use listener_builder::ListenerBuilder;
pub mod yaml;
/// The 'root' level configuration
#[derive(Debug)]
pub struct Configuration {
pub listeners: Vec<Listener>,
}
impl Configuration {
// Creates a new, empty [Configuration](arboric::config::Configuration)
pub fn new() -> Configuration {
Configuration {
listeners: Vec::new(),
}
}
pub fn listener<F>(&mut self, f: F)
where
F: FnOnce(ListenerBuilder) -> ListenerBuilder,
{
let listener_builder = f(ListenerBuilder::new());
self.listeners.push(listener_builder.build());
}
pub fn add_listener(&mut self, listener: Listener) {
self.listeners.push(listener);
}
}
/// A [KeyEncoding](arboric::config::KeyEncoding) just tells us whether the value is encoded as
/// hex or base64
#[derive(Debug, Clone)]
pub enum KeyEncoding {
Bytes,
Hex,
Base64,
}
/// A [JwtSigningKeySource](arboric::config::JwtSigningKeySource) defines
/// where and how to retrieve the signing key used to validate JWT bearer tokens.
/// It can be one of
///
/// * a hard-coded `Value`,
/// * an environment variable, or
/// * a file
///
/// And in any of the above cases, the value can be either be:
///
/// * the string value or file contents taken as 'raw' bytes,
/// * a hex encoded value, or
/// * a base64 encoded value
#[derive(Debug, Clone)]
pub enum JwtSigningKeySource {
Value(String, KeyEncoding),
FromEnv {
key: String,
encoding: KeyEncoding,
},
FromFile {
filename: String,
encoding: KeyEncoding,
},
}
impl JwtSigningKeySource {
pub fn hex(s: String) -> JwtSigningKeySource {
JwtSigningKeySource::Value(s, KeyEncoding::Hex)
}
pub fn base64(s: String) -> JwtSigningKeySource {
JwtSigningKeySource::Value(s, KeyEncoding::Base64)
}
pub fn hex_from_env(key: String) -> JwtSigningKeySource {
JwtSigningKeySource::FromEnv {
key: key,
encoding: KeyEncoding::Hex,
}
}
pub fn base64_from_env(key: String) -> JwtSigningKeySource {
JwtSigningKeySource::FromEnv {
key: key,
encoding: KeyEncoding::Base64,
}
}
pub fn get_secret_key_bytes(&self) -> crate::Result<Vec<u8>> {
match self {
JwtSigningKeySource::Value(secret, encoding) => match encoding {
KeyEncoding::Hex => Ok(hex::decode(&secret)?),
KeyEncoding::Base64 => Ok(base64::decode(&secret)?),
x => Err(crate::ArboricError::general(format!(
"Not yet implemented: {:?}!",
x
))),
},
JwtSigningKeySource::FromEnv { key, encoding } => {
let secret = env::var(key)?;
match encoding {
KeyEncoding::Hex => Ok(hex::decode(&secret)?),
KeyEncoding::Base64 => Ok(base64::decode(&secret)?),
x => Err(crate::ArboricError::general(format!(
"Not yet implemented: {:?}!",
x
))),
}
}
x => Err(crate::ArboricError::general(format!(
"{:?} not yet implemented!",
x
))),
}
}
}
/// An [Listener](arboric::config::Listener) defines:
///
/// * an inbound endpoint, comprising:
/// * a 'bind' IP address
/// * an optional 'path' or prefix, e.g. `"/graphql"`
/// * a back-end API URL
/// * an `arboric::abac::PDP` or set of ABAC policies
#[derive(Debug, Clone)]
pub struct Listener {
pub listener_address: SocketAddr,
pub listener_path: Option<String>,
pub api_uri: Uri,
pub jwt_signing_key_source: Option<JwtSigningKeySource>,
pub pdp: crate::abac::PDP,
}
impl Listener {
/// Construct a [Listener](arboric::config::Listener) that binds to the given
/// [IpAddr](std::net::IpAddr), port, and forwards to the API at the given [Uri](hyper::Uri)
pub fn ip_addr_and_port(ip_addr: IpAddr, port: u16, api_uri: &Uri) -> Listener {
Listener {
listener_address: SocketAddr::new(ip_addr, port),
listener_path: None,
api_uri: api_uri.clone(),
jwt_signing_key_source: None,
pdp: PDP::default(),
}
}
}
#[cfg(test)]
mod tests {
// Import names from outer (for mod tests) scope.
use super::*;
use std::net::{Ipv4Addr, SocketAddrV4};
#[test]
fn test_config_builder() {
let mut configuration = Configuration::new();
assert!(configuration.listeners.is_empty());
configuration.listener(|listener| {
listener
.localhost()
.port(4000)
.proxy("http://localhost:3000/graphql".parse::<Uri>().unwrap())
});
assert!(!configuration.listeners.is_empty());
assert_eq!(1, configuration.listeners.iter().count());
assert_eq!(
SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::LOCALHOST, 4000)),
configuration.listeners.first().unwrap().listener_address
);
}
}
| true
|
911f7e128ea3740fe09c33e4d9fdcafcb16a4300
|
Rust
|
rust-skia/rust-skia
|
/skia-bindings/build_support/skia/config.rs
|
UTF-8
| 13,859
| 2.640625
| 3
|
[
"MIT"
] |
permissive
|
//! Full build support for the Skia library.
use crate::build_support::{
binaries_config,
cargo::{self, Target},
features,
platform::{self, prelude::*},
};
use std::{
env,
path::{Path, PathBuf},
process::{Command, Stdio},
};
/// The build configuration for Skia.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BuildConfiguration {
/// Do we build _on_ a Windows OS?
pub on_windows: bool,
/// Set the optimization level (0-3, s or z). Clang and GCC use the same notation
/// as Rust, so we just pass this option through from Cargo.
pub opt_level: Option<String>,
/// Build Skia in a debug configuration?
pub skia_debug: bool,
/// The Skia feature set to compile.
pub features: features::Features,
/// C compiler to use
pub cc: String,
/// C++ compiler to use
pub cxx: String,
/// The target (arch-vendor-os-abi)
pub target: Target,
}
/// Builds a Skia configuration from a Features set.
impl BuildConfiguration {
pub fn from_features(features: features::Features, skia_debug: bool) -> Self {
// Yocto SDKs set CLANGCC/CLANGCXX, which is a better choice to determine clang,
// as CC/CXX are likely referring to gcc.
let cc = cargo::env_var("CLANGCC")
.or_else(|| cargo::env_var("CC"))
.unwrap_or_else(|| "clang".to_string());
let cxx = cargo::env_var("CLANGCXX")
.or_else(|| cargo::env_var("CXX"))
.unwrap_or_else(|| "clang++".to_string());
// It's possible that the provided command line for the compiler already includes --target.
// We assume that it's most specific/appropriate, extract and use is. It might for example include
// a vendor infix, while cargo targets usually don't.
let target = cc
.find("--target=")
.map(|target_option_offset| {
let target_tail = &cc[(target_option_offset + "--target=".len())..];
let target_str = target_tail
.split_once(' ')
.map_or(target_tail, |(target_str, ..)| target_str);
cargo::parse_target(target_str)
})
.unwrap_or_else(cargo::target);
BuildConfiguration {
on_windows: cargo::host().is_windows(),
// `OPT_LEVEL` is set by Cargo itself.
opt_level: cargo::env_var("OPT_LEVEL"),
features,
skia_debug,
cc,
cxx,
target,
}
}
}
/// This is the final, low level build configuration.
#[derive(Debug)]
pub struct FinalBuildConfiguration {
/// The Skia source directory.
pub skia_source_dir: PathBuf,
/// Arguments passed to GN.
pub gn_args: Vec<(String, String)>,
/// Whether to use system libraries or not.
pub use_system_libraries: bool,
/// The target (arch-vendor-os-abi)
pub target: Target,
/// An optional target sysroot
pub sysroot: Option<String>,
}
impl FinalBuildConfiguration {
pub fn from_build_configuration(
build: &BuildConfiguration,
use_system_libraries: bool,
skia_source_dir: &Path,
) -> FinalBuildConfiguration {
let features = &build.features;
// `SDKROOT` is the environment variable used on macOS to specify the sysroot.
// `SDKTARGETSYSROOT` is the environment variable set in Yocto Linux SDKs when
// cross-compiling.
let sysroot = cargo::env_var("SDKTARGETSYSROOT").or_else(|| cargo::env_var("SDKROOT"));
let mut builder = GnArgsBuilder::new(&build.target, use_system_libraries);
let gn_args = {
builder
.arg("is_official_build", yes_if(!build.skia_debug))
.arg("is_debug", yes_if(build.skia_debug))
.arg("skia_enable_svg", yes_if(features.svg))
.arg("skia_enable_gpu", yes_if(features.gpu()))
.arg("skia_enable_skottie", no());
// Always enable PDF document support, because it gets switched off for WASM builds.
// See <https://github.com/rust-skia/rust-skia/issues/694>
builder
.arg("skia_enable_pdf", yes())
.arg("skia_use_gl", yes_if(features.gl))
.arg("skia_use_egl", yes_if(features.egl))
.arg("skia_use_x11", yes_if(features.x11))
.arg("skia_use_system_libpng", yes_if(use_system_libraries))
.arg("skia_use_libwebp_encode", yes_if(features.webp_encode))
.arg("skia_use_libwebp_decode", yes_if(features.webp_decode))
.arg("skia_use_system_zlib", yes_if(use_system_libraries))
.arg("skia_use_xps", no())
.arg("skia_use_dng_sdk", yes_if(features.dng))
.arg("cc", quote(&build.cc))
.arg("cxx", quote(&build.cxx));
if features.vulkan {
builder
.arg("skia_use_vulkan", yes())
.arg("skia_enable_spirv_validation", no());
}
if features.metal {
builder.arg("skia_use_metal", yes());
}
if features.d3d {
builder.arg("skia_use_direct3d", yes());
}
// further flags that limit the components of Skia debug builds.
if build.skia_debug {
builder
.arg("skia_enable_spirv_validation", no())
.arg("skia_enable_tools", no())
.arg("skia_enable_vulkan_debug_layers", no())
.arg("skia_use_libheif", no())
.arg("skia_use_lua", no());
}
if features.text_layout {
builder
.arg("skia_enable_skshaper", yes())
.arg("skia_use_icu", yes())
.arg("skia_use_system_icu", yes_if(use_system_libraries))
.arg("skia_use_harfbuzz", yes())
.arg("skia_pdf_subset_harfbuzz", yes())
.arg("skia_use_system_harfbuzz", yes_if(use_system_libraries))
.arg("skia_use_sfntly", no())
.arg("skia_enable_skparagraph", yes());
// note: currently, tests need to be enabled, because modules/skparagraph
// is not included in the default dependency configuration.
// ("paragraph_tests_enabled", no()),
} else {
builder
.arg("skia_use_icu", no())
.arg("skia_use_harfbuzz", no());
}
if features.webp_encode || features.webp_decode {
builder.arg("skia_use_system_libwebp", yes_if(use_system_libraries));
}
if features.embed_freetype {
builder.arg("skia_use_system_freetype2", no());
}
// target specific gn args.
let target = &build.target;
if let Some(sysroot) = &sysroot {
builder.cflag(format!("--sysroot={sysroot}"));
}
let jpeg_sys_cflags: Vec<String>;
if cfg!(feature = "use-system-jpeg-turbo") {
let paths = cargo::env_var("DEP_JPEG_INCLUDE").expect("mozjpeg-sys include path");
jpeg_sys_cflags = std::env::split_paths(&paths)
.map(|arg| format!("-I{}", arg.display()))
.collect();
builder.cflags(jpeg_sys_cflags);
builder.arg("skia_use_system_libjpeg_turbo", yes());
} else {
builder.arg(
"skia_use_system_libjpeg_turbo",
yes_if(use_system_libraries),
);
}
if let Some(opt_level) = &build.opt_level {
/* LTO generates corrupt libraries on the host platforms when building with --release
if opt_level.parse::<usize>() != Ok(0) {
builder.skia_cflag("-flto");
}
*/
// When targeting windows `-O` isn't supported.
if !target.is_windows() {
builder.cflag(format!("-O{opt_level}"));
}
}
// Always compile expat
builder.arg("skia_use_expat", yes());
builder.arg("skia_use_system_expat", yes_if(use_system_libraries));
// Add platform specific args
platform::gn_args(build, builder)
};
FinalBuildConfiguration {
skia_source_dir: skia_source_dir.into(),
gn_args,
use_system_libraries,
target: build.target.clone(),
sysroot,
}
}
}
/// Orchestrates the entire build of Skia based on the arguments provided.
pub fn build(
build: &FinalBuildConfiguration,
config: &binaries_config::BinariesConfiguration,
ninja_command: Option<PathBuf>,
gn_command: Option<PathBuf>,
offline: bool,
) {
let python = &prerequisites::locate_python3_cmd();
println!("Python 3 found: {python:?}");
let ninja = ninja_command.unwrap_or_else(|| {
env::current_dir()
.unwrap()
.join("depot_tools")
.join(ninja::default_exe_name())
});
if !offline && !build.use_system_libraries {
println!("Synchronizing Skia dependencies");
#[cfg(feature = "binary-cache")]
crate::build_support::binary_cache::resolve_dependencies();
assert!(
Command::new(python)
// Explicitly providing `GIT_SYNC_DEPS_PATH` fixes a problem with `git-sync-deps`
// accidentally resolving an absolute directory for `GIT_SYNC_DEPS_PATH` when MingW
// Python 3 runs on Windows under MSys.
.env("GIT_SYNC_DEPS_PATH", "skia/DEPS")
.arg("skia/tools/git-sync-deps")
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.unwrap()
.success(),
"`skia/tools/git-sync-deps` failed"
);
}
configure_skia(build, config, python, gn_command.as_deref());
build_skia(config, &ninja);
}
/// Configures Skia by calling gn
pub fn configure_skia(
build: &FinalBuildConfiguration,
config: &binaries_config::BinariesConfiguration,
python: &Path,
gn_command: Option<&Path>,
) {
let gn_args = build
.gn_args
.iter()
.map(|(name, value)| name.clone() + "=" + value)
.collect::<Vec<String>>()
.join(" ");
let gn_command = gn_command
.map(|p| p.to_owned())
.unwrap_or_else(|| build.skia_source_dir.join("bin").join("gn"));
println!("Skia args: {}", &gn_args);
let output = Command::new(gn_command)
.args([
"gen",
config.output_directory.to_str().unwrap(),
&format!("--script-executable={}", python.to_str().unwrap()),
&format!("--args={gn_args}"),
])
.envs(env::vars())
.current_dir(&build.skia_source_dir)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output()
.expect("gn error");
assert!(
output.status.code() == Some(0),
"{:?}",
String::from_utf8(output.stdout).unwrap()
);
}
/// Builds Skia.
///
/// This function assumes that all prerequisites are in place and that the output directory
/// contains a fully configured Skia source tree generated by gn.
pub fn build_skia(config: &binaries_config::BinariesConfiguration, ninja_command: &Path) {
let ninja_status = Command::new(ninja_command)
// Order of arguments do matter here: See <https://github.com/rust-skia/rust-skia/pull/643>
// for details.
.args(["-C", config.output_directory.to_str().unwrap()])
.args(&config.ninja_built_libraries)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status();
assert!(
ninja_status
.expect("failed to run `ninja`, does the directory depot_tools/ exist?")
.success(),
"`ninja` returned an error, please check the output for details."
);
}
mod prerequisites {
use std::path::PathBuf;
use std::process::Command;
/// Resolves the full path
pub fn locate_python3_cmd() -> PathBuf {
const PYTHON_CMDS: [&str; 2] = ["python", "python3"];
for python in PYTHON_CMDS.as_ref() {
println!("Probing '{python}'");
if let Some(true) = is_python_version_3(python) {
return python.into();
}
}
panic!(">>>>> Probing for Python 3 failed, please make sure that it's available in PATH, probed executables are: {PYTHON_CMDS:?} <<<<<");
}
/// Returns `true` if the given python executable identifies itself as a python version 3
/// executable. Returns `None` if the executable was not found.
fn is_python_version_3(exe: impl AsRef<str>) -> Option<bool> {
Command::new(exe.as_ref())
.arg("--version")
.output()
.map(|output| {
let mut str = String::from_utf8(output.stdout).unwrap();
if str.is_empty() {
// Python2 seems to push the version to stderr.
str = String::from_utf8(output.stderr).unwrap()
}
// Don't parse version output, for example output
// might be "Python 2.7.15+"
str.starts_with("Python 3.")
})
.ok()
}
}
mod ninja {
use std::path::PathBuf;
pub fn default_exe_name() -> PathBuf {
if cfg!(windows) { "ninja.exe" } else { "ninja" }.into()
}
}
| true
|
e8319f17d9c523a81da163608b0cd9906ed7a190
|
Rust
|
quintenpalmer/learn-rust-calculator
|
/lib/calclogic/src/parse.rs
|
UTF-8
| 1,237
| 2.9375
| 3
|
[] |
no_license
|
use std::slice;
use models::{Result, Error, Token, Operation, Value};
pub fn parse_tokens(tokens: Vec<Token>) -> Result<Value> {
return parse_expr(&mut tokens.iter().clone());
}
fn parse_expr(tokens: &mut slice::Iter<Token>) -> Result<Value> {
return match try!(next_token(tokens)) {
Token::Open => {
let op = try!(parse_operation(tokens));
let v1 = try!(parse_expr(tokens));
let v2 = try!(parse_expr(tokens));
try!(match try!(next_token(tokens)) {
Token::Close => Ok(()),
token => Err(Error::WrongToken("Close", token.clone())),
});
return Ok(Value::Operate(op, Box::new(v1), Box::new(v2)));
}
Token::Num(n) => Ok(Value::Num(n)),
token => Err(Error::WrongToken("Open or Num", token.clone())),
};
}
fn parse_operation(tokens: &mut slice::Iter<Token>) -> Result<Operation> {
return match try!(next_token(tokens)) {
Token::Operation(operation) => Ok(operation),
token => Err(Error::WrongToken("Operation", token.clone())),
};
}
fn next_token(tokens: &mut slice::Iter<Token>) -> Result<Token> {
return tokens.next().map(|v| *v).ok_or(Error::UnexpectedEOF);
}
| true
|
741e074da432fd0fabb119c82828f187e630afec
|
Rust
|
iCodeIN/slabmap
|
/tests/proptests.rs
|
UTF-8
| 4,083
| 3.21875
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use proptest::prelude::*;
use slabmap::*;
use std::collections::HashMap;
use std::fmt::Debug;
struct Tester<T> {
slab: SlabMap<T>,
m: HashMap<usize, T>,
log: bool,
}
impl<T: Clone + Eq + PartialEq + Debug + PartialOrd + Ord> Tester<T> {
pub fn new(log: bool) -> Self {
Self {
slab: SlabMap::new(),
m: HashMap::new(),
log,
}
}
pub fn insert(&mut self, value: T) {
let key = self.slab.insert(value.clone());
self.m.insert(key, value.clone());
if self.log {
eprintln!("insert({:?}) -> {}", value, key);
}
}
pub fn remove(&mut self, key: usize) {
let l = self.slab.remove(key);
let r = self.m.remove(&key);
assert_eq!(l, r, "remove {}", key);
if self.log {
eprintln!("remove({}) -> {:?}", key, l);
}
}
pub fn clear(&mut self) {
self.slab.clear();
self.m.clear();
if self.log {
eprintln!("clear");
}
}
pub fn optimize(&mut self) {
self.slab.optimize();
if self.log {
eprintln!("optimize()");
}
}
pub fn reserve(&mut self, additional: usize) {
self.slab.reserve(additional);
if self.log {
eprintln!("reserve({})", additional);
}
assert!(self.slab.capacity() >= self.slab.len() + additional);
}
pub fn check(&mut self) {
assert_eq!(self.slab.len(), self.m.len(), "len");
let mut l: Vec<_> = self
.slab
.iter()
.map(|(key, value)| (key, value.clone()))
.collect();
let mut l_mut: Vec<_> = self
.slab
.iter_mut()
.map(|(key, value)| (key, value.clone()))
.collect();
let mut r: Vec<_> = self
.m
.iter()
.map(|(key, value)| (*key, value.clone()))
.collect();
l.sort();
l_mut.sort();
r.sort();
assert_eq!(l, r, "items");
assert_eq!(l_mut, r, "items mut");
if self.log {
eprintln!("{:?}", l);
}
}
}
#[derive(Debug, Clone)]
enum Action {
Insert,
Remove(usize),
Clear,
Optimize,
Reserve(usize),
}
fn do_actions(actions: &[Action], log: bool) {
let mut t = Tester::new(log);
let mut c = 0;
for a in actions {
match a {
Action::Insert => t.insert(c),
Action::Remove(key) => t.remove(*key % (c + 2)),
Action::Clear => t.clear(),
Action::Optimize => t.optimize(),
Action::Reserve(additional) => t.reserve(*additional),
}
t.check();
c += 1;
}
}
fn make_action(max_key: usize) -> impl Strategy<Value = Action> {
prop_oneof![
5 => Just(Action::Insert),
5 => (0..max_key).prop_map(Action::Remove),
1 => Just(Action::Clear),
1 => Just(Action::Optimize),
1 => (0..100usize).prop_map(Action::Reserve)
]
}
fn make_actions() -> impl Strategy<Value = (usize, Vec<Action>)> {
(1..100usize).prop_flat_map(|n| (Just(n), prop::collection::vec(make_action(n), n)))
}
proptest! {
#[test]
fn test_random(ref actions in make_actions()) {
do_actions(&actions.1, false);
}
}
#[test]
fn test_x1() {
use Action::*;
let actions = vec![
Insert,
Insert,
Insert,
Insert,
Insert,
Remove(3),
Remove(1),
Remove(2),
Remove(0),
Insert,
Insert,
Insert,
Insert,
Insert,
];
do_actions(&actions, false);
}
#[test]
fn test_x2() {
use Action::*;
let actions = vec![Insert, Insert, Insert, Remove(0), Remove(1)];
do_actions(&actions, false);
}
#[test]
fn test_xx() {
// use Action::*;
// let actions = vec![Insert];
// do_actions(&actions, true);
}
#[test]
fn debug() {
let mut s = SlabMap::new();
s.insert(5);
s.insert(10);
assert_eq!(format!("{:?}", s), "{0: 5, 1: 10}");
}
| true
|
196293c223f4b7f73cc382673a70da7267b45196
|
Rust
|
rizakrko/cifar-10-loader-rs
|
/src/dataset.rs
|
UTF-8
| 10,177
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
extern crate itertools;
extern crate regex;
extern crate walkdir;
#[cfg(feature = "rayon")]
extern crate rayon;
///CifarDataset is Top Level Struct of cifar_10_loader.
///
///CifarDataset include labels of Cifar10, train_datas, test_datas and their count.
///
pub struct CifarDataset {
///Cifar10 lables.
///
/// ["airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck"]
pub labels: Vec<String>,
/// Count of training datas.
///
/// 50000 datas
pub train_count: usize,
/// Count of test datas.
///
/// 10000 datas
pub test_count: usize,
/// Dataset for training.
pub train_dataset: Vec<super::CifarImage>,
/// Dataset for test.
pub test_dataset: Vec<super::CifarImage>,
}
///For inner use struct
struct CifarFilePaths {
meta_data_paths: Vec<::std::path::PathBuf>,
train_data_paths: Vec<::std::path::PathBuf>,
test_data_paths: Vec<::std::path::PathBuf>,
}
impl CifarDataset {
///Creates a new CifarDataset.
pub fn new<P: AsRef<::std::path::Path>>(path: P) -> Result<Self, String> {
use std::thread::spawn;
let CifarFilePaths {
meta_data_paths,
train_data_paths,
test_data_paths,
} = CifarDataset::get_file_paths(path)?;
let meta_data_handle = spawn(move || CifarDataset::get_meta_data(&meta_data_paths));
let train_images_handle = spawn(move || {
CifarDataset::get_images(CifarDataset::get_byte_datas(&train_data_paths)?)
});
let test_images_handle = spawn(move || {
CifarDataset::get_images(CifarDataset::get_byte_datas(&test_data_paths)?)
});
let labels = CifarDataset::for_cifardataset_join_thread(meta_data_handle)?;
let train_images = CifarDataset::for_cifardataset_join_thread(train_images_handle)?;
let test_images = CifarDataset::for_cifardataset_join_thread(test_images_handle)?;
let cifar_dataset = CifarDataset {
labels: labels,
train_count: train_images.len() as usize,
train_dataset: train_images,
test_count: test_images.len() as usize,
test_dataset: test_images,
};
Ok(cifar_dataset)
}
fn for_cifardataset_join_thread<T>(
p: ::std::thread::JoinHandle<Result<T, ::std::io::Error>>,
) -> Result<T, String> {
p.join()
.map(|content| content.map_err(|err| err.to_string()))
.map_err(|_| "thread panicked".to_string())?
}
fn get_file_paths<P: AsRef<::std::path::Path>>(path: P) -> Result<CifarFilePaths, String> {
use self::regex::Regex;
let paths = &walkdir::WalkDir::new(path)
.into_iter()
.flat_map(|x| x.map(|x| x.path().to_path_buf()))
.filter(|x| x.is_file())
.collect::<Vec<::std::path::PathBuf>>();
let train_data_path_re = Regex::new("data_batch_[1-5].bin").map_err(|err| err.to_string())?;
let test_data_path_re = Regex::new("test_batch.bin").map_err(|err| err.to_string())?;
let cifar_file_paths = CifarFilePaths {
meta_data_paths: CifarDataset::get_meta_data_paths(paths)?,
train_data_paths: CifarDataset::get_paths_regex(paths, &train_data_path_re)?,
test_data_paths: CifarDataset::get_paths_regex(paths, &test_data_path_re)?,
};
Ok(cifar_file_paths)
}
#[cfg(feature = "rayon")]
fn get_meta_data_paths(
paths: &[::std::path::PathBuf],
) -> Result<Vec<::std::path::PathBuf>, String> {
use self::rayon::prelude::*;
use std::path::{Path, PathBuf};
let meta_data_file_name = Path::new("batches.meta.txt");
let fpaths: Vec<PathBuf> = paths
.into_par_iter()
.filter(|path| {
path.file_name()
.map(|file_name| file_name == meta_data_file_name)
.unwrap_or(false)
})
.cloned()
.collect();
if fpaths.is_empty() {
Err("Can't Find Meta Data Files!!".to_string())
} else {
Ok(fpaths)
}
}
#[cfg(not(feature = "rayon"))]
fn get_meta_data_paths(
paths: &[::std::path::PathBuf],
) -> Result<Vec<::std::path::PathBuf>, String> {
use std::path::{Path, PathBuf};
let meta_data_file_name = Path::new("batches.meta.txt");
let fpaths: Vec<PathBuf> = paths
.into_iter()
.filter(|path| {
path.file_name()
.map(|file_name| file_name == meta_data_file_name)
.unwrap_or(false)
})
.cloned()
.collect();
if fpaths.is_empty() {
Err("Can't Find Meta Data Files!!".to_string())
} else {
Ok(fpaths)
}
}
#[cfg(feature = "rayon")]
fn get_paths_regex(
paths: &[::std::path::PathBuf],
re: &self::regex::Regex,
) -> Result<Vec<::std::path::PathBuf>, String> {
use self::rayon::prelude::*;
use std::path::PathBuf;
let fpaths: Vec<PathBuf> = paths
.par_iter()
.filter(|path| {
path.file_name()
.map(|file_name| {
let file_name = file_name.to_string_lossy();
re.is_match(file_name.as_ref())
})
.unwrap_or(false)
})
.cloned()
.collect();
if fpaths.is_empty() {
Err("Can't Find Train Data Files!!".to_string())
} else {
Ok(fpaths)
}
}
#[cfg(not(feature = "rayon"))]
fn get_paths_regex(
paths: &[::std::path::PathBuf],
re: &self::regex::Regex,
) -> Result<Vec<::std::path::PathBuf>, String> {
use std::path::PathBuf;
let fpaths: Vec<PathBuf> = paths
.iter()
.filter(|path| {
path.file_name()
.map(|file_name| {
let file_name = file_name.to_string_lossy();
re.is_match(file_name.as_ref())
})
.unwrap_or(false)
})
.cloned()
.collect();
if fpaths.is_empty() {
Err("Can't Find Train Data Files!!".to_string())
} else {
Ok(fpaths)
}
}
fn get_meta_data(paths: &[::std::path::PathBuf]) -> Result<Vec<String>, ::std::io::Error> {
use std::io::Read;
paths
.iter()
.map(|meta_path| -> Result<String, ::std::io::Error> {
::std::fs::File::open(meta_path).and_then(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents).map(|_| contents)
})
})
.map(|lines| -> Result<Vec<String>, ::std::io::Error> {
lines.map(|l| -> Vec<String> {
l.lines()
.filter(|x| !x.is_empty())
.map(|x| x.into())
.collect()
})
})
.collect::<Result<Vec<Vec<String>>, ::std::io::Error>>()
.map(|v| v.concat())
}
#[cfg(feature = "rayon")]
fn get_byte_datas(paths: &[::std::path::PathBuf]) -> Result<Vec<Vec<u8>>, ::std::io::Error> {
use std::io::{BufReader, Read};
use self::rayon::prelude::*;
paths
.par_iter()
.map(|file_path| -> Result<Vec<u8>, ::std::io::Error> {
::std::fs::File::open(file_path).and_then(|file| {
let mut byte_data: Vec<u8> = Vec::new();
BufReader::new(file)
.read_to_end(&mut byte_data)
.map(|_| byte_data)
})
})
.map(|byte_data| -> Result<Vec<Vec<u8>>, ::std::io::Error> {
byte_data.map(|b| -> Vec<Vec<u8>> {
b.chunks(3073)
.map(|byte_img| -> Vec<u8> { byte_img.to_vec() })
.collect()
})
})
.collect::<Result<Vec<Vec<Vec<u8>>>, ::std::io::Error>>()
.map(|v| v.concat())
}
#[cfg(not(feature = "rayon"))]
fn get_byte_datas(paths: &[::std::path::PathBuf]) -> Result<Vec<Vec<u8>>, ::std::io::Error> {
use std::io::{BufReader, Read};
paths
.iter()
.map(|file_path| -> Result<Vec<u8>, ::std::io::Error> {
::std::fs::File::open(file_path).and_then(|file| {
let mut byte_data: Vec<u8> = Vec::new();
BufReader::new(file)
.read_to_end(&mut byte_data)
.map(|_| byte_data)
})
})
.map(|byte_data| -> Result<Vec<Vec<u8>>, ::std::io::Error> {
byte_data.map(|b| -> Vec<Vec<u8>> {
b.chunks(3073)
.map(|byte_img| -> Vec<u8> { byte_img.to_vec() })
.collect()
})
})
.collect::<Result<Vec<Vec<Vec<u8>>>, ::std::io::Error>>()
.map(|v| v.concat())
}
#[cfg(feature = "rayon")]
fn get_images(byte_datas: Vec<Vec<u8>>) -> Result<Vec<super::CifarImage>, ::std::io::Error> {
use self::rayon::prelude::*;
byte_datas
.into_par_iter()
.map(|byte_img| {
use super::CifarImageTrait;
super::CifarImage::new(&byte_img)
})
.collect::<Result<Vec<super::CifarImage>, ::std::io::Error>>()
}
#[cfg(not(feature = "rayon"))]
fn get_images(byte_datas: Vec<Vec<u8>>) -> Result<Vec<super::CifarImage>, ::std::io::Error> {
byte_datas
.into_iter()
.map(|byte_img| {
use super::CifarImageTrait;
super::CifarImage::new(&byte_img)
})
.collect::<Result<Vec<super::CifarImage>, ::std::io::Error>>()
}
}
| true
|
0e8345676d2b23d6aacd9da1b3814de09484c480
|
Rust
|
jakule/yew-library
|
/src/main.rs
|
UTF-8
| 4,866
| 3.15625
| 3
|
[] |
no_license
|
use serde::{Deserialize, Serialize};
use yew::prelude::*;
use yew_router::prelude::*;
#[derive(Routable, PartialEq, Clone, Debug)]
pub enum Route {
#[at("/posts/:id")]
Post { id: u64 },
#[at("/posts")]
Posts,
#[at("/authors/:id")]
Author { id: u64 },
#[at("/authors")]
Authors,
#[at("/")]
Home,
#[not_found]
#[at("/404")]
NotFound,
}
enum Msg {
AddOne,
SubtractOne,
}
struct Model {
value: i64,
}
impl Component for Model {
type Message = Msg;
type Properties = ();
fn create(_ctx: &Context<Self>) -> Self {
Self { value: 0 }
}
fn update(&mut self, _ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
Msg::AddOne => {
self.value += 1;
// the value has changed so we need to
// re-render for it to appear on the page
true
}
Msg::SubtractOne => {
self.value -= 1;
true
}
}
}
fn view(&self, ctx: &Context<Self>) -> Html {
html! {
<div>
<h1>{ "Hello World!" }</h1>
<span class="subtitle">{ "from Yew with " }<i class="heart" /></span>
<button onclick={ctx.link().callback(|_| Msg::AddOne)}>{ "+1" }</button>
<button onclick={ctx.link().callback(|_| Msg::SubtractOne)}>{ "-1" }</button>
<p>{ self.value }</p>
<TestReq/>
</div>
}
}
}
#[derive(Serialize, Deserialize, Debug)]
struct Book {
id: i32,
title: String,
authors: Vec<String>,
publication_date: chrono::NaiveDate,
}
enum TestReqMsg {
SetFetchState(Vec<Book>),
Fetch,
}
struct TestReq {
response: Vec<Book>,
}
impl From<Vec<Book>> for TestReqMsg {
fn from(s: Vec<Book>) -> Self {
TestReqMsg::SetFetchState(s)
}
}
fn get_current_url() -> String {
let window = web_sys::window().unwrap();
let location = window.location();
let host = location.host().unwrap();
let protocol = location.protocol().unwrap();
return format!("{}{}", protocol, host);
}
async fn fetch_data() -> Vec<Book> {
let url = get_current_url();
let resp = reqwest::get(&format!("{}/api/books", url)).await;
return resp.unwrap().json().await.unwrap();
}
impl Component for TestReq {
type Message = TestReqMsg;
type Properties = ();
fn create(ctx: &Context<Self>) -> Self {
ctx.link().send_message(TestReqMsg::Fetch);
Self { response: vec![] }
}
fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
TestReqMsg::Fetch => {
ctx.link().send_future(async { fetch_data().await });
false
}
TestReqMsg::SetFetchState(val) => {
self.response = val;
true
}
}
}
fn view(&self, ctx: &Context<Self>) -> Html {
html! {
<div>
<button type="button" class="btn btn-primary" onclick={ctx.link().callback(|_| TestReqMsg::Fetch)}>{"Get Books"}</button>
<p>{"REST response:"}</p>
<table class="table">
<thead>
<tr>
<th scope="col">{"#"}</th>
<th scope="col">{"Title"}</th>
<th scope="col">{"Authors"}</th>
<th scope="col">{"Publication Date"}</th>
</tr>
</thead>
<tbody>
{
for self.response.iter().map(
|e| html! {
<tr>
<th scope="row">{e.id}</th>
<th>{&e.title}</th>
<th>{format!("{:?}", e.authors)}</th>
<th>{format!("{}", e.publication_date)}</th>
</tr>
}
)
}
</tbody>
</table>
</div>
}
}
}
fn switch(routes: &Route) -> Html {
// let onclick_callback = Callback::from(|_: Route| yew_router::push_route(Route::Home));
match routes {
Route::Home => html! { <Model/> },
// Route::Secure => html! {
// <div>
// <h1>{ "Secure" }</h1>
// <button onclick={onclick_callback}>{ "Go Home" }</button>
// </div>
// },
Route::NotFound => html! { <h1>{ "404" }</h1> },
_ => html! { <></> },
}
}
struct Home {}
impl Component for Home {
type Message = ();
type Properties = ();
fn create(_ctx: &Context<Self>) -> Self {
Home {}
}
fn view(&self, _ctx: &Context<Self>) -> Html {
html! {
<Router<Route> render={Router::render(switch)} />
}
}
}
fn main() {
web_sys::console::log_1(&"Start".into());
yew::start_app::<Home>();
}
| true
|
58ac1603603a94f7f955457bc325da5a71f394ab
|
Rust
|
pvdrz/pacman
|
/src/agents/greedy_agent.rs
|
UTF-8
| 967
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
use game::Agent;
use game::Direction;
use game::GameState;
use rand::thread_rng;
use rand::Rng;
pub struct GreedyAgent;
impl Agent for GreedyAgent {
fn get_action(&mut self, state: &GameState, index: usize) -> Direction {
let mut actions = state.legal_actions(index);
actions.remove_item(&Direction::Stop);
let mut best_score = isize::min_value();
let mut best_actions = vec![Direction::Stop];
for action in actions {
if let Ok(score) = state.gen_successor(index, action).map(|s| s.score()) {
if score > best_score {
best_score = score;
best_actions.clear();
best_actions.push(action);
} else if score == best_score {
best_actions.push(action);
}
}
}
*thread_rng()
.choose(&best_actions)
.unwrap_or(&Direction::Stop)
}
}
| true
|
ecf91f13f473871dc7328991cc40efda890ebf53
|
Rust
|
bddap/map
|
/src/main.rs
|
UTF-8
| 3,680
| 3.3125
| 3
|
[] |
no_license
|
#[macro_use]
extern crate clap;
use clap::{AppSettings::TrailingVarArg, Arg};
use std::io::BufRead;
use std::process::exit;
use std::process::{Command, Stdio};
use std::str::from_utf8;
fn main() {
let matches = app_from_crate!()
.arg(
Arg::with_name("separator")
.short("s")
.long("separator")
.help("Single character delimiter beween input values.")
.default_value("\\n")
.takes_value(true),
).arg(
Arg::with_name("argname")
.help("Arbitrary string to be inserted into command")
.required(true),
).setting(TrailingVarArg)
.arg(
Arg::with_name("command")
.help("Command template to be run for every input")
.multiple(true)
.required(true),
).get_matches();
let sepb = {
let sep = matches.value_of("separator").expect("Checked by clap.");
match unescape_delimiter(sep) {
Ok(del) => del,
Err(_) => {
eprintln!("couldn't interpret delimiter as single byte character, try something like \"\\n\" or \",\"");
exit(1);
}
}
};
let argname = matches.value_of("argname").expect("Checked by clap.");
let command = matches
.values_of("command")
.expect("Checked by clap.")
.collect::<Vec<&str>>();
exit(match run(sepb, argname, &command) {
Ok(exit_code) => exit_code,
Err(maperr) => {
eprintln!("{}", maperr.0);
1
}
});
}
fn run(separator: u8, argname: &str, command: &[&str]) -> Result<i32, MapErr> {
let stdin = std::io::stdin();
for value in stdin.lock().split(separator) {
let value = value.map_err(|_| MapErr("io err"))?;
let value =
from_utf8(&value).map_err(|_| MapErr("recieved invalid utf8 as argument on stdin"))?;
let command: Vec<String> = command
.iter()
.map(|s| s.to_string().replace(argname, value.into()))
.collect();
let (prog, args) = command.split_first().ok_or(MapErr("no command supplied"))?;
let status = Command::new(prog)
.args(args)
.stdin(Stdio::null())
.status()
.map_err(|_| MapErr("couldn't execute command"))?;
if !status.success() {
return Ok(status.code().unwrap_or(1));
}
}
Ok(0)
}
#[derive(PartialEq, Debug)]
struct MapErr(&'static str);
fn unescape_delimiter(input: &str) -> Result<u8, UnescapeErr> {
if input.as_bytes().len() == 1 {
return Ok(input.as_bytes()[0]);
}
let result = match input {
"\\a" => 7,
"\\b" => 8,
"\\f" => 0xc,
"\\n" => b'\n',
"\\r" => b'\r',
"\\t" => b'\t',
"\\v" => 0x0b,
"\\\\" => b'\\',
"\\'" => b'\'',
"\\\"" => b'"',
"\\?" => b'?',
"\\e" => 0x1b,
"\\0" => 0,
_ => return Err(UnescapeErr),
};
return Ok(result);
}
#[derive(PartialEq, Debug)]
struct UnescapeErr;
#[cfg(test)]
mod test {
use unescape_delimiter;
#[test]
fn unescape() {
unescape_delimiter("").unwrap_err();
unescape_delimiter("\\n\\n").unwrap_err();
unescape_delimiter(" ").unwrap_err();
unescape_delimiter("aa").unwrap_err();
assert_eq!(unescape_delimiter("\\n"), Ok(b'\n'));
assert_eq!(unescape_delimiter(" "), Ok(b' '));
assert_eq!(unescape_delimiter("a"), Ok(b'a'));
assert_eq!(unescape_delimiter("\\0"), Ok(b'\0'));
}
}
| true
|
9854a2151a2ecb8e8276e5025e6c9b64cea309e5
|
Rust
|
flier/tokio-kafka
|
/src/consumer/protocol.rs
|
UTF-8
| 7,060
| 2.5625
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::borrow::Cow;
use std::collections::HashMap;
use serde::{de, ser};
use consumer::{Assignment, Subscription};
use protocol::Nullable;
const CONSUMER_PROTOCOL_V0: i16 = 0;
pub const CONSUMER_PROTOCOL: &str = "consumer";
pub struct ConsumerProtocol {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConsumerProtocolHeader {
version: i16,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubscriptionSchema {
header: ConsumerProtocolHeader,
topics: Vec<String>,
user_data: Nullable<Vec<u8>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TopicAssignment {
topics: String,
partitions: Vec<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssignmentSchema {
header: ConsumerProtocolHeader,
topic_partitions: Vec<TopicAssignment>,
user_data: Nullable<Vec<u8>>,
}
impl<'a> ser::Serialize for Subscription<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
let mut schema = SubscriptionSchema {
header: ConsumerProtocolHeader {
version: CONSUMER_PROTOCOL_V0,
},
topics: self.topics
.iter()
.map(|topic_name| String::from(topic_name.to_owned()))
.collect(),
user_data: self.user_data.as_ref().map(|user_data| user_data.to_vec()).into(),
};
schema.topics.sort();
schema.serialize(serializer)
}
}
impl<'a, 'de> de::Deserialize<'de> for Subscription<'a> {
fn deserialize<D>(deserializer: D) -> Result<Subscription<'a>, D::Error>
where
D: de::Deserializer<'de>,
{
let SubscriptionSchema {
header,
topics,
user_data,
} = SubscriptionSchema::deserialize(deserializer)?;
if header.version < CONSUMER_PROTOCOL_V0 {
Err(de::Error::custom(format!(
"unsupported subscription version: {}",
header.version
)))
} else {
Ok(Subscription {
topics: topics.into_iter().map(Cow::Owned).collect(),
user_data: user_data.into_raw().map(Cow::Owned),
})
}
}
}
impl<'a> ser::Serialize for Assignment<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
let mut topic_partitions = HashMap::new();
for tp in &self.partitions {
topic_partitions
.entry(tp.topic_name.to_owned())
.or_insert_with(Vec::new)
.push(tp.partition_id);
}
let mut schema = AssignmentSchema {
header: ConsumerProtocolHeader {
version: CONSUMER_PROTOCOL_V0,
},
topic_partitions: topic_partitions
.into_iter()
.map(|(topic_name, partitions)| TopicAssignment {
topics: String::from(topic_name.to_owned()),
partitions,
})
.collect(),
user_data: self.user_data.as_ref().map(|user_data| user_data.to_vec()).into(),
};
schema.topic_partitions.sort_by(|lhs, rhs| lhs.topics.cmp(&rhs.topics));
schema.serialize(serializer)
}
}
impl<'a, 'de> de::Deserialize<'de> for Assignment<'a> {
fn deserialize<D>(deserializer: D) -> Result<Assignment<'a>, D::Error>
where
D: de::Deserializer<'de>,
{
let AssignmentSchema {
header,
topic_partitions,
user_data,
} = AssignmentSchema::deserialize(deserializer)?;
if header.version < CONSUMER_PROTOCOL_V0 {
Err(de::Error::custom(format!(
"unsupported assignment version: {}",
header.version
)))
} else {
let partitions = topic_partitions
.iter()
.flat_map(|assignment| {
let topic_name = assignment.topics.to_owned();
assignment
.partitions
.iter()
.map(move |&partition| topic_partition!(topic_name.clone(), partition))
})
.collect();
Ok(Assignment {
partitions,
user_data: user_data.into_raw().map(Cow::Owned),
})
}
}
}
#[cfg(test)]
mod tests {
use std::io::Cursor;
use super::*;
use protocol::Schema;
lazy_static! {
static ref TEST_SUBSCRIPTION: Subscription<'static> = Subscription {
topics: vec!["t0".into(), "t1".into()],
user_data: Some(b"data".to_vec().into()),
};
static ref TEST_SUBSCRIPTION_DATA: Vec<u8> = vec![
// SubscriptionSchema
// header: ConsumerProtocolHeader
0, 0, // version
// topic_partitions: [&str]
0, 0, 0, 2,
0, 2, b't', b'0',
0, 2, b't', b'1',
// user_data
0, 0, 0, 4, b'd', b'a', b't', b'a',
];
static ref TEST_ASSIGNMENT: Assignment<'static> = Assignment {
partitions: vec![
topic_partition!("t0", 0),
topic_partition!("t0", 1),
topic_partition!("t1", 0),
topic_partition!("t1", 1)
],
user_data: Some(b"data".to_vec().into()),
};
static ref TEST_ASSIGNMENT_DATA: Vec<u8> = vec![
// AssignmentSchema
// header: ConsumerProtocolHeader
0, 0, // version
// partitions: [TopicAssignment]
0, 0, 0, 2,
// TopicAssignment
0, 2, b't', b'0', // topics
0, 0, 0, 2, // partitions
0, 0, 0, 0,
0, 0, 0, 1,
// TopicAssignment
0, 2, b't', b'1', // topics
0, 0, 0, 2, // partitions
0, 0, 0, 0,
0, 0, 0, 1,
// user_data
0, 0, 0, 4, b'd', b'a', b't', b'a',
];
}
#[test]
fn test_subscription_serializer() {
assert_eq!(Schema::serialize(&*TEST_SUBSCRIPTION).unwrap(), *TEST_SUBSCRIPTION_DATA);
}
#[test]
fn test_subscription_deserializer() {
let subscription: Subscription = Schema::deserialize(Cursor::new(TEST_SUBSCRIPTION_DATA.clone())).unwrap();
assert_eq!(subscription, *TEST_SUBSCRIPTION);
}
#[test]
fn test_assignment_serializer() {
assert_eq!(Schema::serialize(&*TEST_ASSIGNMENT).unwrap(), *TEST_ASSIGNMENT_DATA);
}
#[test]
fn test_assignment_deserializer() {
let assignment: Assignment = Schema::deserialize(Cursor::new(TEST_ASSIGNMENT_DATA.clone())).unwrap();
assert_eq!(assignment, *TEST_ASSIGNMENT);
}
}
| true
|
67e662f39e3a6bf242344b8de9fff6ae74705acf
|
Rust
|
poyeker/simple_rand
|
/src/lib.rs
|
UTF-8
| 4,342
| 2.8125
| 3
|
[] |
no_license
|
use std::ops::{Deref, DerefMut};
use itertools::*;
pub use rand::prelude::*;
pub use rand::rngs::StdRng;
use rand::seq::SliceRandom;
pub use rand_distr::*;
pub use rand_pcg::{Pcg32, Pcg64, Pcg64Mcg};
pub use rand_xorshift::XorShiftRng;
pub use rand_xoshiro::*;
pub struct Rand<R: SeedableRng + Rng> {
rng: R,
}
impl<R: SeedableRng + Rng> Default for Rand<R> {
fn default() -> Self {
Rand {
rng: R::from_rng(thread_rng()).unwrap(),
}
}
}
impl<R: SeedableRng + Rng> Deref for Rand<R> {
type Target = R;
fn deref(&self) -> &Self::Target {
&self.rng
}
}
impl<R: SeedableRng + Rng> DerefMut for Rand<R> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.rng
}
}
impl<R: SeedableRng + Rng> Rand<R> {
pub fn new(rng: R) -> Self {
Rand { rng }
}
pub fn rand_bool(&mut self, p: f64) -> bool {
self.gen_bool(p)
}
pub fn rand_int(&mut self, low: i64, high: i64) -> i64 {
self.gen_range(low, high)
}
pub fn rand_float(&mut self, low: f64, high: f64) -> f64 {
self.gen_range(low, high)
}
pub fn rand_exp(&mut self, mean: f64) -> f64 {
let distr = Exp::new(1. / mean).unwrap();
self.sample(distr)
}
pub fn rand_normal(&mut self, mean: f64, std: f64) -> f64 {
let distr = Normal::new(mean, std).unwrap();
self.sample(distr)
}
pub fn rand_gamma(&mut self, shape: f64, scale: f64) -> f64 {
let distr = Gamma::new(shape, scale).unwrap();
self.sample(distr)
}
pub fn one_of<T>(&mut self, slice: &[T]) -> T
where
T: Clone,
{
slice.choose(&mut self.rng).unwrap().clone()
}
pub fn n_of<T>(&mut self, slice: &[T], amount: usize) -> Vec<T>
where
T: Clone,
{
slice
.choose_multiple(&mut self.rng, amount)
.cloned()
.collect()
}
pub fn one_of_weighted<X, T, I, W>(&mut self, slice: &[T], weights: W) -> T
where
T: Clone,
W: IntoIterator,
W::Item: rand::distributions::uniform::SampleBorrow<X>,
X: rand::distributions::uniform::SampleUniform
+ PartialOrd
+ for<'a> ::core::ops::AddAssign<&'a X>
+ Clone
+ Default,
{
let w = rand::distributions::WeightedIndex::new(weights).unwrap();
slice[self.rng.sample(w)].clone()
}
pub fn one_of_weighted_by_key<T, K, F>(&mut self, slice: &[T], key: F) -> T
where
T: Clone,
K: Ord
+ Clone
+ Default
+ rand_distr::uniform::SampleUniform
+ for<'a> std::ops::AddAssign<&'a K>,
F: FnMut(&T) -> K,
{
let weights = slice.iter().map(key);
let w = rand::distributions::WeightedIndex::new(weights).unwrap();
slice[self.rng.sample(w)].clone()
}
pub fn n_of_weighted_by_key<T, K, F>(&mut self, slice: &[T], amount: usize, key: F) -> Vec<T>
where
T: Clone,
K: Ord
+ Clone
+ Default
+ rand_distr::uniform::SampleUniform
+ for<'a> std::ops::AddAssign<&'a K>,
F: FnMut(&T) -> K,
{
let weights = slice.iter().map(key);
let w = rand::distributions::WeightedIndex::new(weights).unwrap();
(0..amount)
.map(|_| slice[self.sample(&w)].clone())
.collect()
}
pub fn n_of_weighted<X, T, W>(&mut self, slice: &[T], weights: W, amount: usize) -> Vec<T>
where
T: Clone,
W: IntoIterator,
W::Item: rand::distributions::uniform::SampleBorrow<X>,
X: rand::distributions::uniform::SampleUniform
+ PartialOrd
+ for<'a> ::core::ops::AddAssign<&'a X>
+ Clone
+ Default,
{
let w = rand::distributions::WeightedIndex::new(weights).unwrap();
(0..amount)
.map(|_| slice[self.sample(&w)].clone())
.collect()
}
pub fn shuffle<T>(&mut self, slice: &[T]) -> Vec<T>
where
T: Clone,
{
let mut idx: Vec<_> = (0..slice.len()).collect();
idx.shuffle(&mut self.rng);
let sorted_tuples = idx.iter().zip(slice.iter()).sorted_by_key(|x| *x.0);
sorted_tuples.map(|x| x.1.clone()).collect()
}
}
| true
|
ce27b57684199451ceb48fb732de568af800b3a3
|
Rust
|
jamesmunns/postcard
|
/postcard-derive/src/max_size.rs
|
UTF-8
| 3,938
| 2.9375
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use proc_macro2::{Span, TokenStream};
use quote::{quote, quote_spanned};
use syn::{
parse_macro_input, parse_quote, spanned::Spanned, Data, DeriveInput, Fields, GenericParam,
Generics,
};
pub fn do_derive_max_size(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(item as DeriveInput);
let span = input.span();
let name = input.ident;
// Add a bound `T: MaxSize` to every type parameter T.
let generics = add_trait_bounds(input.generics);
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let sum = max_size_sum(&input.data, span).unwrap_or_else(syn::Error::into_compile_error);
let expanded = quote! {
impl #impl_generics ::postcard::experimental::max_size::MaxSize for #name #ty_generics #where_clause {
const POSTCARD_MAX_SIZE: usize = #sum;
}
};
expanded.into()
}
/// Add a bound `T: MaxSize` to every type parameter T.
fn add_trait_bounds(mut generics: Generics) -> Generics {
for param in &mut generics.params {
if let GenericParam::Type(ref mut type_param) = *param {
type_param
.bounds
.push(parse_quote!(::postcard::experimental::max_size::MaxSize));
}
}
generics
}
/// Generate a constant expression that sums up the maximum size of the type.
fn max_size_sum(data: &Data, span: Span) -> Result<TokenStream, syn::Error> {
match data {
Data::Struct(data) => Ok(sum_fields(&data.fields)),
Data::Enum(data) => {
let variant_count = data.variants.len();
let recurse = data.variants.iter().map(|v| sum_fields(&v.fields));
let discriminant_size = varint_size_discriminant(variant_count as u32) as usize;
// Generate a tree of max expressions.
let max = recurse.fold(quote!(0), |acc, x| {
quote! {
{
let lhs = #acc;
let rhs = #x;
if lhs > rhs {
lhs
} else {
rhs
}
}
}
});
Ok(quote! {
#discriminant_size + #max
})
}
Data::Union(_) => Err(syn::Error::new(
span,
"unions are not supported by `postcard::MaxSize`",
)),
}
}
fn sum_fields(fields: &Fields) -> TokenStream {
match fields {
syn::Fields::Named(fields) => {
// Expands to an expression like
//
// 0 + <Field1Type>::POSTCARD_MAX_SIZE + <Field2Type>::POSTCARD_MAX_SIZE + ...
//
// but using fully qualified syntax.
let recurse = fields.named.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => <#ty as ::postcard::experimental::max_size::MaxSize>::POSTCARD_MAX_SIZE }
});
quote! {
0 #(+ #recurse)*
}
}
syn::Fields::Unnamed(fields) => {
let recurse = fields.unnamed.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => <#ty as ::postcard::experimental::max_size::MaxSize>::POSTCARD_MAX_SIZE }
});
quote! {
0 #(+ #recurse)*
}
}
syn::Fields::Unit => quote!(0),
}
}
fn varint_size_discriminant(max_n: u32) -> u32 {
const BITS_PER_BYTE: u32 = 8;
const BITS_PER_VARINT_BYTE: u32 = 7;
// How many data bits do we need for `max_n`.
let bits = core::mem::size_of::<u32>() as u32 * BITS_PER_BYTE - max_n.leading_zeros();
let roundup_bits = bits + (BITS_PER_VARINT_BYTE - 1);
// Apply division, using normal "round down" integer division
roundup_bits / BITS_PER_VARINT_BYTE
}
| true
|
ed6687c63c6726cb4b32dc4f73211d403d5c325f
|
Rust
|
jelford/rustup.rs
|
/src/rustup-utils/src/tty.rs
|
UTF-8
| 1,420
| 2.53125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copied from rustc. atty crate did not work as expected
#[cfg(unix)]
pub fn stderr_isatty() -> bool {
use libc;
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
// FIXME: Unfortunately this doesn't detect msys terminals so rustup
// is always colorless there (just like rustc and cargo).
#[cfg(windows)]
pub fn stderr_isatty() -> bool {
type DWORD = u32;
type BOOL = i32;
type HANDLE = *mut u8;
const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
extern "system" {
fn GetStdHandle(which: DWORD) -> HANDLE;
fn GetConsoleMode(hConsoleHandle: HANDLE,
lpMode: *mut DWORD) -> BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
#[cfg(unix)]
pub fn stdout_isatty() -> bool {
use libc;
unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 }
}
#[cfg(windows)]
pub fn stdout_isatty() -> bool {
type DWORD = u32;
type BOOL = i32;
type HANDLE = *mut u8;
const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD;
extern "system" {
fn GetStdHandle(which: DWORD) -> HANDLE;
fn GetConsoleMode(hConsoleHandle: HANDLE,
lpMode: *mut DWORD) -> BOOL;
}
unsafe {
let handle = GetStdHandle(STD_OUTPUT_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
| true
|
4aaaf1fdc18695834a5d2fc5e79fe5f89612f548
|
Rust
|
tommady/finmind-rs
|
/src/schema.rs
|
UTF-8
| 10,872
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
use chrono::NaiveDate;
use serde::Deserialize;
pub type Result<T> = std::result::Result<T, FinmindError>;
// Trading_Volume(成交量)
// Trading_money(成交金額)
// Trading_turnover(周轉率):週轉率高代表股票交易越活絡
// close(收盤價)
// date(日期)
// max(當日最高價)
// min(當日最低價)
// open(開盤價)
// spread(震幅)
// stock_id(股票代碼)
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockPrice {
#[serde(alias = "Trading_Volume")]
pub trading_volume: u64,
#[serde(alias = "Trading_money")]
pub trading_money: u64,
pub open: f64,
pub max: f64,
pub min: f64,
pub close: f64,
pub spread: f64,
#[serde(alias = "Trading_turnover")]
pub trading_turnover: f64,
pub date: NaiveDate,
pub stock_id: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockInstitutionalInvestorsBuySell {
pub buy: u64,
pub name: String,
pub sell: u64,
pub date: NaiveDate,
pub stock_id: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockTotalInstitutionalInvestors {
pub buy: u64,
pub name: String,
pub sell: u64,
pub date: NaiveDate,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockShareholding {
pub date: NaiveDate,
pub stock_id: String,
pub stock_name: String,
#[serde(alias = "InternationalCode")]
pub international_code: String,
#[serde(alias = "ForeignInvestmentRemainingShares")]
pub foreign_investment_remaining_shares: u64,
#[serde(alias = "ForeignInvestmentShares")]
pub foreign_investment_shares: u64,
#[serde(alias = "ForeignInvestmentRemainRatio")]
pub foreign_investment_remain_ratio: f64,
#[serde(alias = "ForeignInvestmentSharesRatio")]
pub foreign_investment_shares_ratio: f64,
#[serde(alias = "ForeignInvestmentUpperLimitRatio")]
pub foreign_investment_upper_limit_ratio: f64,
#[serde(alias = "ChineseInvestmentUpperLimitRatio")]
pub chinese_investment_upper_limit_ratio: f64,
#[serde(alias = "NumberOfSharesIssued")]
pub number_of_shares_issued: u64,
#[serde(alias = "RecentlyDeclareDate")]
pub recently_declare_date: NaiveDate,
#[serde(skip_deserializing)]
pub note: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockMarginPurchaseShortSale {
pub date: NaiveDate,
pub stock_id: String,
#[serde(alias = "MarginPurchaseBuy")]
pub margin_purchase_buy: u64,
#[serde(alias = "MarginPurchaseCashRepayment")]
pub margin_purchase_cash_repayment: u64,
#[serde(alias = "MarginPurchaseLimit")]
pub margin_purchase_limit: u64,
#[serde(alias = "MarginPurchaseSell")]
pub margin_purchase_sell: u64,
#[serde(alias = "MarginPurchaseTodayBalance")]
pub margin_purchase_today_balance: u64,
#[serde(alias = "MarginPurchaseYesterdayBalance")]
pub margin_purchase_yesterday_balance: u64,
#[serde(skip_deserializing, alias = "Note")]
pub note: String,
#[serde(alias = "OffsetLoanAndShort")]
pub offset_loan_and_short: u64,
#[serde(alias = "ShortSaleBuy")]
pub short_sale_buy: u64,
#[serde(alias = "ShortSaleCashRepayment")]
pub short_sale_cash_repayment: u64,
#[serde(alias = "ShortSaleLimit")]
pub short_sale_limit: u64,
#[serde(alias = "ShortSaleSell")]
pub short_sale_sell: u64,
#[serde(alias = "ShortSaleTodayBalance")]
pub short_sale_today_balance: u64,
#[serde(alias = "ShortSaleYesterdayBalance")]
pub short_sale_yesterday_balance: u64,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct TaiwanStockMonthRevenue {
pub date: NaiveDate,
pub stock_id: String,
pub country: String,
pub revenue: u64,
pub revenue_month: u32,
pub revenue_year: u32,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "snake_case", untagged)]
pub enum Data {
TaiwanStockPrice(TaiwanStockPrice),
TaiwanStockInstitutionalInvestorsBuySell(TaiwanStockInstitutionalInvestorsBuySell),
TaiwanStockTotalInstitutionalInvestors(TaiwanStockTotalInstitutionalInvestors),
TaiwanStockShareholding(TaiwanStockShareholding),
TaiwanStockMarginPurchaseShortSale(TaiwanStockMarginPurchaseShortSale),
TaiwanStockMonthRevenue(TaiwanStockMonthRevenue),
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct Response {
pub msg: String,
pub status: usize,
pub data: Vec<Data>,
}
pub enum Dataset {
Unknown,
TaiwanStockPrice,
TaiwanStockTotalInstitutionalInvestors,
TaiwanStockInstitutionalInvestorsBuySell,
TaiwanStockShareholding,
TaiwanStockMarginPurchaseShortSale,
TaiwanStockMonthRevenue,
}
impl std::fmt::Display for Dataset {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
Dataset::Unknown => write!(f, "Unknown"),
Dataset::TaiwanStockPrice => write!(f, "TaiwanStockPrice"),
Dataset::TaiwanStockTotalInstitutionalInvestors => {
write!(f, "TaiwanStockTotalInstitutionalInvestors")
}
Dataset::TaiwanStockInstitutionalInvestorsBuySell => {
write!(f, "TaiwanStockInstitutionalInvestorsBuySell")
}
Dataset::TaiwanStockShareholding => write!(f, "TaiwanStockShareholding"),
Dataset::TaiwanStockMarginPurchaseShortSale => {
write!(f, "TaiwanStockMarginPurchaseShortSale")
}
Dataset::TaiwanStockMonthRevenue => write!(f, "TaiwanStockMonthRevenue"),
}
}
}
pub struct Args {
pub dataset: Dataset,
pub data_id: String,
pub start_date: NaiveDate,
pub end_date: NaiveDate,
pub token: String,
}
impl Default for Args {
fn default() -> Self {
Args {
dataset: Dataset::Unknown,
data_id: "".to_owned(),
start_date: chrono::offset::Utc::today().naive_utc(),
end_date: chrono::offset::Utc::today().naive_utc(),
token: "".to_owned(),
}
}
}
impl From<()> for Args {
fn from(_: ()) -> Self {
Self::default()
}
}
impl From<Dataset> for Args {
fn from(dataset: Dataset) -> Self {
Self {
dataset: dataset,
..Self::default()
}
}
}
impl From<(Dataset, NaiveDate, NaiveDate)> for Args {
fn from((dataset, start_date, end_date): (Dataset, NaiveDate, NaiveDate)) -> Self {
Self {
dataset: dataset,
start_date: start_date,
end_date: end_date,
..Self::default()
}
}
}
impl From<(Dataset, String, NaiveDate, NaiveDate)> for Args {
fn from(
(dataset, data_id, start_date, end_date): (Dataset, String, NaiveDate, NaiveDate),
) -> Self {
Self {
dataset: dataset,
data_id: data_id,
start_date: start_date,
end_date: end_date,
..Self::default()
}
}
}
impl From<(Dataset, NaiveDate, NaiveDate, String)> for Args {
fn from(
(dataset, start_date, end_date, token): (Dataset, NaiveDate, NaiveDate, String),
) -> Self {
Self {
dataset: dataset,
start_date: start_date,
end_date: end_date,
token: token,
..Self::default()
}
}
}
impl From<(Dataset, String, NaiveDate, NaiveDate, String)> for Args {
fn from(
(dataset, stock_id, start_date, end_date, token): (
Dataset,
String,
NaiveDate,
NaiveDate,
String,
),
) -> Self {
Self {
dataset: dataset,
data_id: stock_id,
start_date: start_date,
end_date: end_date,
token: token,
}
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct ErrorResponse {
pub status: usize,
pub msg: String,
}
impl std::fmt::Display for ErrorResponse {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"FinmindAPI: {{ status:{}, msg:{} }}",
self.status, self.msg,
)
}
}
impl std::error::Error for ErrorResponse {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
#[derive(Debug)]
pub enum FinmindError {
// error from serde_json lib
SerdeJson(serde_json::Error),
// error from reqwest lib
Reqwest(reqwest::Error),
// Url parsing error
Url(url::ParseError),
// errors from http response status
// 402 response status
RateLimitReached,
// 400 response status
BadRequest,
// unknown error
Unknown(ErrorResponse),
}
impl std::fmt::Display for FinmindError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
FinmindError::Url(ref e) => write!(f, "Url Parse error: {}", e),
FinmindError::SerdeJson(ref e) => write!(f, "Serde_json Lib error: {}", e),
FinmindError::Reqwest(ref e) => write!(f, "Reqwest Lib error: {}", e),
FinmindError::RateLimitReached => write!(f, "Rate limit reached"),
FinmindError::BadRequest => write!(f, "Bad Request"),
FinmindError::Unknown(ref e) => write!(f, "Unknown error: {}", e),
}
}
}
impl std::error::Error for FinmindError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match *self {
FinmindError::Url(ref e) => Some(e),
FinmindError::SerdeJson(ref e) => Some(e),
FinmindError::Reqwest(ref e) => Some(e),
FinmindError::RateLimitReached => None,
FinmindError::BadRequest => None,
FinmindError::Unknown(ref _e) => None,
}
}
}
impl From<url::ParseError> for FinmindError {
fn from(err: url::ParseError) -> FinmindError {
FinmindError::Url(err)
}
}
impl From<serde_json::Error> for FinmindError {
fn from(err: serde_json::Error) -> FinmindError {
FinmindError::SerdeJson(err)
}
}
impl From<reqwest::Error> for FinmindError {
fn from(err: reqwest::Error) -> FinmindError {
FinmindError::Reqwest(err)
}
}
impl From<String> for FinmindError {
fn from(err: String) -> FinmindError {
FinmindError::Unknown(ErrorResponse {
status: 500,
msg: err,
})
}
}
impl From<ErrorResponse> for FinmindError {
fn from(err: ErrorResponse) -> FinmindError {
match err.status {
400 => FinmindError::BadRequest,
402 => FinmindError::RateLimitReached,
_ => FinmindError::Unknown(err),
}
}
}
| true
|
0c63692cee195d04ff9ff7ff55a02d9c985b4129
|
Rust
|
jz4o/codingames
|
/rust/practice/classic_puzzle/medium/carmichael-numbers.rs
|
UTF-8
| 1,407
| 3.515625
| 4
|
[] |
no_license
|
use std::io;
macro_rules! parse_input {
($x:expr, $t:ident) => ($x.trim().parse::<$t>().unwrap())
}
/**
* Auto-generated code below aims at helping you parse
* the standard input according to the problem statement.
**/
fn main() {
let mut input_line = String::new();
io::stdin().read_line(&mut input_line).unwrap();
let n = parse_input!(input_line, i32);
// Write an answer using println!("message...");
// To debug: eprintln!("Debug message...");
let mut is_carmichael: bool = !is_prime(n);
for prime_factor in prime_division(n) {
if (n - 1) % (prime_factor - 1) != 0 {
is_carmichael = false;
break;
}
}
let result: &str = if is_carmichael { "YES" } else { "NO" };
// println!("YES|NO");
println!("{result}");
}
fn prime_division(num: i32) -> Vec<i32> {
let mut result: Vec<i32> = Vec::new();
let mut temp_num: i32 = num;
while temp_num % 2 == 0 {
result.push(2);
temp_num /= 2;
}
let mut i: i32 = 3;
while temp_num != 1 {
if temp_num % i == 0 {
result.push(i);
temp_num /= i;
} else {
i += 2;
}
}
return result;
}
fn is_prime(num: i32) -> bool {
let mut i: i32 = 2;
while i < num {
if num % i == 0 {
return false;
}
i += 1;
}
return true;
}
| true
|
50f07a9070e616efdb11f99d61bbe4907d42f892
|
Rust
|
JustinK77/RustBlackjack
|
/src/main.rs
|
UTF-8
| 6,375
| 3.46875
| 3
|
[] |
no_license
|
use std::io;
use std::fmt;
use rand::Rng;
use std::cmp::Ordering;
//use std::vec;
use std::{thread, time};
struct Card{
writable: String,
value: u32
}
impl fmt::Display for Card{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.writable)
}
}
//TO-DO:
//Split
//Deck logic?
fn main() {
println!("~~~Welcome to the table!~~~");
let mut money : f32 = 100.0;
loop{
if money <= 0.0{
println!("Out of money, adding 10 dollars");
money = 10.0;
}
println!("You have {}, enter bet amount: ", money);
let mut bet_string = String::new();
io::stdin()
.read_line(&mut bet_string)
.expect("Please input valid amount.");
let mut bet : f32 = match bet_string.trim().parse(){
Ok(num) => num,
Err(_) => continue,
};
if bet > money{
println!("You don't have that much!");
continue;
}
let mut dealer_hand: Vec<Card> = Vec::new();
dealer_hand.push(get_card());
let mut dealer_hand_value = dealer_hand[0].value;
println!("Dealer showing {}, total {}", dealer_hand[0].writable, dealer_hand[0].value);
let five_hundred_millis = time::Duration::from_millis(500);
thread::sleep(five_hundred_millis);
let mut hand: Vec<Card> = Vec::new();
let mut hand_val = 0;
hand.push(get_card());
hand.push(get_card());
for card in &hand{
println!("{}", card);
thread::sleep(five_hundred_millis);
hand_val += card.value;
}
if hand_val == 21{
println!("Blackjack!");
money += bet * 1.5;
continue;
}
if hand_val == 22{
hand_val = 12;
hand.remove(0);
}
println!("Current hand value: {}", hand_val);
while hand_val < 21{
println!("Hit (H), Stand (S), or Double Down (D)?");
let mut action = String::new();
io::stdin()
.read_line(&mut action)
.expect("Please input H or S.");
let action : &str = &action[..].trim();
match action{
"H" | "h" => {
thread::sleep(five_hundred_millis);
let drawn_card : Card = get_card();
hand_val += drawn_card.value;
hand.push(drawn_card);
println!("Drew {}", hand[hand.len() - 1].writable);
if hand_val > 21 && has_ace(&hand){
hand_val -= 10;
hand.clear();
}
println!("Current hand value: {}", hand_val);
}
"D" | "d" => {
bet *= 1.5;
println!("New bet: {}", bet);
thread::sleep(five_hundred_millis);
let drawn_card : Card = get_card();
hand_val += drawn_card.value;
hand.push(drawn_card);
println!("Drew {}", hand[hand.len() - 1].writable);
if hand_val > 21 && has_ace(&hand){
hand_val -= 10;
hand.clear();
}
println!("Current hand value: {}", hand_val);
break;
}
"S" | "s" => {
println!("Standing at {}", hand_val);
thread::sleep(five_hundred_millis);
break;
}
_ => break
}
}
if hand_val > 21{
println!("Bust!");
money -= bet;
}
else{
while dealer_hand_value < 17{
dealer_hand.push(get_card());
dealer_hand_value += dealer_hand[dealer_hand.len() - 1].value;
print!("Dealer draws {}, ", dealer_hand[dealer_hand.len() - 1].writable);
if dealer_hand_value > 21 && has_ace(&dealer_hand){
dealer_hand_value -= 10;
dealer_hand.clear();
}
println!("total {}", dealer_hand_value);
thread::sleep(five_hundred_millis);
}
if dealer_hand_value > 21{
println!("Dealer busts, you win!");
money += bet;
}
else{
match hand_val.cmp(&dealer_hand_value){
Ordering::Less => {
println!("Lose!");
money -= bet;
}
Ordering::Equal => println!("Push!"),
Ordering::Greater => {
println!("Win!");
money += bet;
}
}
}
}
println!();
println!("~~~NEW HAND~~~");
}
}
fn get_card() -> Card{
let mut value = rand::thread_rng().gen_range(1..=13);
let suit = rand::thread_rng().gen_range(1..=4);
let mut writable = String::new();
match value{
1 => writable.push_str("Ace of "),
2 => writable.push_str("Two of "),
3 => writable.push_str("Three of "),
4 => writable.push_str("Four of "),
5 => writable.push_str("Five of "),
6 => writable.push_str("Six of "),
7 => writable.push_str("Seven of "),
8 => writable.push_str("Eight of "),
9 => writable.push_str("Nine of "),
10 => writable.push_str("Ten of "),
11 => writable.push_str("Jack of "),
12 => writable.push_str("Queen of "),
13 => writable.push_str("King of "),
_ => writable.push_str("INVALID")
}
match suit{
1 => writable.push_str("Diamonds"),
2 => writable.push_str("Hearts"),
3 => writable.push_str("Spades"),
4 => writable.push_str("Clubs"),
_ => writable.push_str("INVALID")
}
if value > 10{
value = 10;
}
if value == 1{
value = 11;
}
Card{
writable: writable,
value: value
}
}
fn has_ace(hand : &Vec<Card>) -> bool{
for card in hand{
if card.value == 11{
return true;
}
}
false
}
| true
|
d1fb1faa5c9aa9d9511a15b88c6b950aa451ab8b
|
Rust
|
akerber47/knoxide
|
/src/bin/mixxd.rs
|
UTF-8
| 2,582
| 2.859375
| 3
|
[] |
no_license
|
extern crate knoxide;
use knoxide::mix_fmt;
use std::io::prelude::*;
fn main() {
let args: Vec<String> = std::env::args().collect();
let mut flag_r = false;
let mut flag_h = false;
let mut flag_d = false;
let mut infile: Option<&String> = None;
let mut outfile: Option<&String> = None;
// Poor man's getopt
for arg in args[1..].iter() {
if arg == "-h" || arg == "--help" {
flag_h = true;
}
else if arg == "-r" || arg == "--reverse" {
flag_r = true;
}
else if arg == "-d" || arg == "--debug" {
flag_d = true;
}
else if infile == None {
infile = Some(arg);
} else if outfile == None {
outfile = Some(arg);
} else {
eprintln!("mixxd: Too many arguments!");
std::process::exit(1);
}
}
if flag_h {
println!("Usage: mixxd [-r] [-v] [infile] [outfile]");
println!("-r: reverse operation (convert dump to binary)");
println!("-v: verbose operation");
std::process::exit(2);
}
// If '-' is passed as infile or outfile, use stdin/stdout instead.
if let Some(s) = infile {
if s == "-" {
infile = None;
}
}
if let Some(s) = outfile {
if s == "-" {
outfile = None;
}
}
if flag_r {
panic!("Not yet implemented");
} else {
let mut in_bytes: Vec<u8> = vec![];
match infile {
Some(s) => {
let mut f = std::fs::File::open(s).expect(
"mixxd: Failed to open file!");
if let Err(e) = f.read_to_end(&mut in_bytes) {
eprintln!("mixxd: read failed! {}", e);
}
},
None => {
if let Err(e) = std::io::stdin().read_to_end(&mut in_bytes) {
eprintln!("mixxd: read failed! {}", e);
}
},
};
let dump = mix_fmt::fmt_words(&in_bytes);
match outfile {
Some(s) => {
let mut f = std::fs::File::create(s).expect(
"mixxd: Failed to open file!");
if let Err(e) = write!(f, "{}", dump) {
eprintln!("mixxd: write failed! {}", e);
}
},
None => {
if let Err(e) = write!(std::io::stdout(), "{}", dump) {
eprintln!("mixxd: write failed! {}", e);
}
},
};
}
}
| true
|
49bc1e76c0f7983ddb939cffe89a869af083bc5e
|
Rust
|
KristonCosta/four-am
|
/src/error.rs
|
UTF-8
| 387
| 2.703125
| 3
|
[] |
no_license
|
use rusttype::Error as RTError;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
Io(std::io::Error),
Font(RTError),
}
impl From<RTError> for Error {
fn from(other: RTError) -> Self {
Error::Font(other)
}
}
impl From<std::io::Error> for Error {
fn from(other: std::io::Error) -> Self {
Error::Io(other)
}
}
| true
|
fe01ebdc8c97fd018824f1add5bd04823fd34915
|
Rust
|
ysndr/doorman
|
/src/interfaces/services.rs
|
UTF-8
| 3,447
| 2.796875
| 3
|
[] |
no_license
|
use std::{collections::HashMap, error::Error, fmt::Debug, time::Duration};
use async_trait::async_trait;
pub trait ServiceError: Error + std::fmt::Debug + Send + Sync {}
#[async_trait]
pub trait Detector {
type Device: Debug;
type DetectorError: ServiceError;
/// Detect a device asynchronously
async fn wait_for_device(&self) -> Result<&Self::Device, Self::DetectorError>;
}
pub trait Registry {
type Ident;
type Device;
type RegistryError: ServiceError;
/// Register a new device
fn register_device_with(
&mut self,
ident: Self::Ident,
device: Self::Device,
) -> Result<(), Self::RegistryError>;
/// Imports a map of devices
fn from_map<I: Into<Self::Ident>, D: Into<Self::Device> + Clone>(
&mut self,
devices: HashMap<I, D>,
) -> Result<(), Self::RegistryError> {
for (ident, device) in devices {
self.register_device_with(ident.into(), device.into())?
}
Ok(())
}
/// Register a new device deriving the identifier from the divice
/// Prefer using this method of there are no naming conflicts
fn register_device<D: Into<Self::Ident> + Into<Self::Device> + Clone>(
&mut self,
device: D,
) -> Result<(), Self::RegistryError> {
self.register_device_with(device.clone().into(), device.into())
}
/// Imports a list of devices
/// This works as long as an identifier can be derived from the Device
fn from_list<D: Into<Self::Ident> + Into<Self::Device> + Clone>(
&mut self,
devices: impl IntoIterator<Item = D>,
) -> Result<(), Self::RegistryError> {
for device in devices.into_iter() {
self.register_device(device)?
}
Ok(())
}
/// Unregisters an existing device with a given ident
/// Returns an error if the device is unknown
fn unregister_device(&mut self, ident: &Self::Ident) -> Result<(), Self::RegistryError>;
/// Checks whether devices is registered
/// returns the device or None
fn check(&self, ident: &Self::Ident) -> Option<&Self::Device>;
/// List all registered devices
fn list(&self) -> Vec<&Self::Device>;
}
pub trait RegistryKnownType: Registry {
fn import_list(&mut self, devices: Vec<Self::Device>) -> Result<(), Self::RegistryError>;
}
impl<R> RegistryKnownType for R
where
Self::Device: From<Self::Device> + Clone,
Self::Ident: From<Self::Device>,
R: Registry,
{
fn import_list(&mut self, devices: Vec<Self::Device>) -> Result<(), Self::RegistryError> {
self.from_list(devices)
}
}
#[derive(Debug)]
pub enum AuthenticateResult {
Allow,
Deny,
}
#[async_trait]
pub trait Authenticate {
type Device;
type AuthenticateError: ServiceError;
/// request an authentiation
async fn authenticate(
&self,
device: &Self::Device,
timeout: Option<Duration>,
) -> Result<AuthenticateResult, Self::AuthenticateError>;
}
pub trait Actuator {
type ActuatorError: ServiceError;
/// Actuate the opening mechanism
fn open(&mut self) -> Result<(), Self::ActuatorError>;
}
#[async_trait]
pub trait Locker {
type LockerError: ServiceError;
/// Await engagement of the mechanism
async fn wait_for_lock(&self) -> Result<(), Self::LockerError>;
/// Confirm lock to the user
async fn confirm_lock(&self) -> Result<(), Self::LockerError>;
}
| true
|
2d22e231ccf20dfabb5a12658849ea44449081e1
|
Rust
|
SpacialCircumstances/minipython-compiler
|
/minipython/src/compiler.rs
|
UTF-8
| 1,225
| 2.9375
| 3
|
[] |
no_license
|
use std::path::Path;
use std::fs;
use crate::parser;
use crate::ir;
use crate::codegen;
use std::fs::File;
use std::io::{BufWriter, Write};
pub struct CompilerInstance<'a> {
input_file: &'a Path,
output_file: &'a Path,
}
impl<'a> CompilerInstance<'a> {
pub fn new(input_file: &'a Path, output_file: &'a Path) -> Result<CompilerInstance<'a>, String> {
if input_file.exists() {
Ok(CompilerInstance {
input_file,
output_file
})
} else {
Err(String::from(format!("Input file {} does not exist", input_file.display())))
}
}
pub fn run(&mut self) -> Result<(), String> {
let code = fs::read_to_string(self.input_file).map_err(|e| format!("{}", e))?;
let (name_store, ast_res) = parser::parse_program(&code);
let ast = ast_res?;
let ir = ir::convert_program_to_ir(&ast, &name_store)?;
let file = File::create(self.output_file).map_err(|e| format!("{}", e))?;
let mut writer = BufWriter::new(&file);
codegen::compile_to_c(&ir, &name_store, &mut writer).map_err(|e| format!("{}", e))?;
writer.flush().map_err(|e| format!("{}", e))?;
Ok(())
}
}
| true
|
c04824609e406c1295d1fae5911a8b50956baab5
|
Rust
|
joseluis/raytracing_series
|
/src/render.rs
|
UTF-8
| 954
| 2.921875
| 3
|
[] |
no_license
|
use crate::{Ray, Vec3};
///
pub fn hit_sphere(center: &Vec3, radius: f32, ray: &Ray) -> f32 {
let oc: Vec3 = ray.origin() - center;
let a: f32 = ray.direction().dot(ray.direction());
let b: f32 = oc.dot(ray.direction()) * 2.0;
let c: f32 = oc.dot(&oc) - radius * radius;
let discriminant: f32 = b * b - 4. * a * c;
if discriminant < 0. {
-1.0
} else {
(-b - discriminant.sqrt()) / (2. * a)
}
}
///
pub fn color(ray: &Ray) -> Vec3 {
let t: f32 = hit_sphere(&Vec3(0., 0., -1.), 0.5, ray);
if t > 0. {
let n: Vec3 = (ray.point_at_parameter(t) - Vec3(0., 0., 1.)).unit_vector();
return Vec3(n.x() + 1., n.y() + 1., n.z() + 1.) * 0.5;
}
let unit_direction: Vec3 = ray.direction().unit_vector();
let t: f32 = 0.5 * (unit_direction.y() + 1.0);
// blends white and blue, depending on the up/downess of the y
Vec3(1., 1., 1.) * (1.0 - t) + Vec3(0.5, 0.7, 1.0) * t
}
| true
|
41c81e7a529d240bcacc1aab8607631391abb254
|
Rust
|
adamnemecek/emui
|
/src/widgets/progress_bar.rs
|
UTF-8
| 2,241
| 2.875
| 3
|
[] |
no_license
|
use crate::layout;
use crate::theme;
use crate::util;
use crate::widget;
use embedded_graphics::egrectangle;
#[derive(Debug, Default)]
pub struct State {
layout: layout::State,
}
#[derive(Debug)]
pub struct ProgressBar<'a> {
state: &'a mut State,
value: u32,
max: u32,
}
impl<'a> ProgressBar<'a> {
pub fn new(state: &'a mut State, value: u32, max: u32) -> Self {
Self { state, value, max }
}
}
impl<'a, T> widget::Node<'a, T> for ProgressBar<'a>
where
T: theme::Theme<'a>,
{
fn visit<V>(&self, visitor: V) -> V::Output
where
V: widget::Visitor<'a, T>,
{
visitor.accept_leaf(self)
}
fn visit_mut<V>(&mut self, visitor: V) -> V::Output
where
V: widget::MutVisitor<'a, T>,
{
visitor.accept_leaf_mut(self)
}
}
impl<'a, T> widget::Widget<'a, T> for ProgressBar<'a>
where
T: theme::Theme<'a>,
{
type Output = core::iter::Chain<util::RectIter<T::Color>, util::RectIter<T::Color>>;
fn draw(&self, context: &widget::DrawContext<T>) -> Self::Output {
let x1 = context.position.x;
let y1 = context.position.y;
let xval = x1 + ((self.value * (context.size.width - 1)) / self.max) as i32;
let x2 = x1 + context.size.width as i32 - 1;
let y2 = y1 + context.size.height as i32 - 1;
egrectangle!(
(x1, y1),
(xval, y2),
fill = Some(context.theme.fill_color())
)
.into_iter()
.chain(egrectangle!(
(x1, y1),
(x2, y2),
stroke = Some(context.theme.border_color())
))
}
fn layout_style(&self, theme: &T) -> layout::Style {
layout::Style {
flex_shrink: 1.0,
flex_grow: 1.0,
flex_basis: stretch::style::Dimension::Auto,
min_size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(20.0),
height: stretch::style::Dimension::Points(3.0),
},
..theme.base_style()
}
}
fn layout_state(&self) -> &layout::State {
&self.state.layout
}
fn layout_state_mut(&mut self) -> &mut layout::State {
&mut self.state.layout
}
}
| true
|
0efd04023c6a8d713e19c113e3c17316f68c17bd
|
Rust
|
Drumato/Depth
|
/src/compile/frontend/token/token.rs
|
UTF-8
| 3,434
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
#[derive(Eq, PartialEq, Clone)]
pub enum Token {
/* symbol */
PLUS,
MINUS,
STAR,
SLASH,
PERCENT,
ASSIGN,
LPAREN,
RPAREN,
LBRACE,
RBRACE,
LBRACKET,
RBRACKET,
LSHIFT,
LT,
GT,
LTEQ,
GTEQ,
EQ,
NTEQ,
SEMICOLON,
COLON,
DOUBLECOLON,
COMMA,
DOT,
AMPERSAND,
/* keyword */
FUNC,
TYPE,
RETURN,
IF,
ELSE,
CONDLOOP,
LET,
MUT,
GOTO,
STRUCT,
I64,
COMPINT,
POINTER(Box<Token>),
ARRAY(Box<Token>, Box<Token>),
INFORMATION(String),
/* etc */
INTEGER(i128),
IDENT(String),
EOF,
BLANK,
LF,
COMMENT,
HASH,
}
impl Token {
pub fn name(&self) -> Option<String> {
if let Self::IDENT(name) = self {
return Some(name.to_string());
}
None
}
pub fn string(&self) -> String {
match self {
Token::INTEGER(int) => format!("INTEGER<{}>", int),
Token::PLUS => "PLUS".to_string(),
Token::MINUS => "MINUS".to_string(),
Token::STAR => "STAR".to_string(),
Token::SLASH => "SLASH".to_string(),
Token::PERCENT => "PERCENT".to_string(),
Token::ASSIGN => "ASSIGN".to_string(),
Token::LPAREN => "LPAREN".to_string(),
Token::RPAREN => "RPAREN".to_string(),
Token::LBRACE => "LBRACE".to_string(),
Token::RBRACE => "RBRACE".to_string(),
Token::LBRACKET => "LBRACKET".to_string(),
Token::RBRACKET => "RBRACKET".to_string(),
Token::LSHIFT => "LSHIFT".to_string(),
Token::LT => "LESSTHAN".to_string(),
Token::GT => "GREATERTHAN".to_string(),
Token::LTEQ => "LESSTHANEQUAL".to_string(),
Token::GTEQ => "GREATERTHANEQUAL".to_string(),
Token::EQ => "EQUAL".to_string(),
Token::NTEQ => "NOTEQUAL".to_string(),
Token::SEMICOLON => "SEMICOLON".to_string(),
Token::COLON => "COLON".to_string(),
Token::DOUBLECOLON => "DOUBLECOLON".to_string(),
Token::COMMA => "COMMA".to_string(),
Token::DOT => "DOT".to_string(),
Token::AMPERSAND => "AMPERSAND".to_string(),
Token::RETURN => "RETURN".to_string(),
Token::EOF => "EOF".to_string(),
Token::FUNC => "FUNCTION".to_string(),
Token::TYPE => "TYPE".to_string(),
Token::IDENT(name) => format!("IDENTIFIER<{}>", name),
Token::IF => "IF".to_string(),
Token::ELSE => "ELSE".to_string(),
Token::CONDLOOP => "CONDLOOP".to_string(),
Token::LET => "LET".to_string(),
Token::MUT => "MUTABLE".to_string(),
Token::STRUCT => "STRUCT".to_string(),
Token::I64 => "i64".to_string(),
Token::COMPINT => "compint".to_string(),
Token::POINTER(ptr_to) => format!("POINTER<{}>", ptr_to.string()),
Token::ARRAY(elem_type, ary_size) => {
format!("ARRAY<{},{}>", elem_type.string(), ary_size.string())
}
Token::INFORMATION(_) => "@info".to_string(),
_ => "".to_string(),
}
}
pub fn should_ignore(&self) -> bool {
match self {
Token::BLANK | Token::LF | Token::COMMENT | Token::HASH => true,
_ => false,
}
}
}
| true
|
d3c3564c185d19a7efa9d44f89c2ea474442c8c4
|
Rust
|
ito-hiroki/rust_to_do_app
|
/src/to_do/structs/traits/edit.rs
|
UTF-8
| 224
| 2.71875
| 3
|
[] |
no_license
|
pub trait Edit {
fn set_to_done(&self, title: &str) {
println!("{} is being set to done", title);
}
fn set_to_pending(&self, title: &str) {
println!("{} is being set to pending", title);
}
}
| true
|
1755f5ad219d3d46a6a152b1340a7f3be899d0eb
|
Rust
|
zerosign/consul-client
|
/src/lib.rs
|
UTF-8
| 1,149
| 2.609375
| 3
|
[] |
no_license
|
// GET http://127.0.0.1:8500/v1/agent/members
//
//
extern crate env_logger;
extern crate hyper;
extern crate log;
extern crate serde;
extern crate tokio;
use serde::Deserialize;
use std::{collections::BTreeMap, fmt, time::Instant};
#[cfg(test)]
mod tests {
use super::Member;
use crate::env_logger;
use crate::hyper::{
rt::{self, Future, Stream},
Client, Uri,
};
use crate::log::debug;
use crate::serde::Deserialize;
use crate::tokio::runtime::Runtime;
#[test]
fn it_works() {
env_logger::init();
let client = Client::new();
let task = client
.get(Uri::from_static("http://127.0.0.1:8500/v1/agent/members"))
.and_then(|res| res.into_body().concat2())
.from_err::<FetchError>()
.and_then(|body| {
let members: Vec<Member> = serde_json::from_slice(&body)?;
Ok(members)
});
let mut runtime = Runtime::new().expect("create this thread runtime");
let result = runtime.block_on(task);
debug!("{:?}", result);
runtime.shutdown_on_idle();
}
}
| true
|
7bbd77e9f2aaec52f028eda9e66c896b88f7b8c5
|
Rust
|
naomijub/nfl
|
/src/model/json.rs
|
UTF-8
| 6,771
| 2.90625
| 3
|
[] |
no_license
|
use juniper::GraphQLObject;
use serde::Serialize;
use serde_json::Value;
use crate::model::error::Error;
#[derive(Debug, PartialEq, Clone, Serialize, GraphQLObject)]
pub struct Player {
#[serde(rename(serialize = "Name"))]
pub name: String,
#[serde(rename(serialize = "Team"))]
pub team: String,
#[serde(rename(serialize = "Position"))]
pub position: String,
#[serde(rename(serialize = "Rushing Attempts Per Game Average"))]
pub avg_attempts_per_game: f64,
#[serde(rename(serialize = "Rushing Attempts"))]
pub attemps: i32,
#[serde(rename(serialize = "Total Rushing Yards"))]
pub total_rushing_yards: i32,
#[serde(rename(serialize = "Rushing Average Yards Per Attempt"))]
pub average_rushing_yards_per_attemp: f64,
#[serde(rename(serialize = "Rushing Yards Per Game"))]
pub rushing_yards_per_game: f64,
#[serde(rename(serialize = "Total Rushing Touchdowns"))]
pub total_rushing_touchdowns: i32,
#[serde(rename(serialize = "Longest Rush"))]
pub longest_rush: String,
#[serde(rename(serialize = "Rushing First Downs"))]
pub rushing_first_downs: f64,
#[serde(rename(serialize = "Rushing First Down Percentage"))]
pub rushing_first_downs_percentage: f64,
#[serde(rename(serialize = "Rushing 20+ Yards Each"))]
pub rushing_20_yards: f64,
#[serde(rename(serialize = "Rushing 40+ Yards Each"))]
pub rushing_40_yards: f64,
#[serde(rename(serialize = "Rushing Fumbles"))]
pub rushing_fumbles: i32,
}
impl Player {
pub fn from_value(object: &Value) -> Result<Self, Error> {
if let Value::Object(obj) = object {
Ok(Player {
name: obj["Player"].as_str().unwrap().to_string(),
team: obj["Team"].as_str().unwrap().to_string(),
position: obj["Pos"].as_str().unwrap().to_string(),
avg_attempts_per_game: match &obj["Att/G"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("Att/G".to_string())),
},
attemps: match &obj["Att"] {
Value::Number(n) if n.is_i64() => n.as_i64().unwrap() as i32,
Value::Number(n) => n.to_string().parse::<i32>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<i32>().unwrap(),
_ => return Err(Error::AttributeParseError("Att".to_string())),
},
total_rushing_yards: match &obj["Yds"] {
Value::Number(n) if n.is_i64() => n.as_i64().unwrap() as i32,
Value::Number(n) => n.to_string().parse::<i32>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<i32>().unwrap(),
_ => return Err(Error::AttributeParseError("Yds".to_string())),
},
average_rushing_yards_per_attemp: match &obj["Avg"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("Avg".to_string())),
},
rushing_yards_per_game: match &obj["Yds/G"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("Yds/G".to_string())),
},
total_rushing_touchdowns: match &obj["TD"] {
Value::Number(n) if n.is_u64() => n.as_i64().unwrap() as i32,
Value::Number(n) => n.to_string().parse::<i32>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<i32>().unwrap(),
_ => return Err(Error::AttributeParseError("TD".to_string())),
},
longest_rush: obj["Lng"].as_str().unwrap_or("-1").to_string(),
rushing_first_downs: match &obj["1st"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("1st".to_string())),
},
rushing_first_downs_percentage: match &obj["1st%"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("1st%".to_string())),
},
rushing_20_yards: match &obj["20+"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("20+".to_string())),
},
rushing_40_yards: match &obj["40+"] {
Value::Number(n) if n.is_f64() => n.as_f64().unwrap(),
Value::Number(n) => n.to_string().parse::<f64>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<f64>().unwrap(),
_ => return Err(Error::AttributeParseError("40+".to_string())),
},
rushing_fumbles: match &obj["FUM"] {
Value::Number(n) if n.is_u64() => n.as_i64().unwrap() as i32,
Value::Number(n) => n.to_string().parse::<i32>().unwrap(),
Value::String(n) => n.replace(",", "").parse::<i32>().unwrap(),
_ => return Err(Error::AttributeParseError("FUM".to_string())),
},
})
} else {
Err(Error::JsonReaderError)
}
}
pub fn longest_rush(&self) -> i64 {
let td = self.longest_rush.clone();
if td.contains('T') {
let new_td = td.replace("T", "");
let td_val = new_td.parse::<i64>().unwrap();
td_val + 100
} else {
td.parse::<i64>().unwrap()
}
}
}
| true
|
509a3ea6790b7409ee4d6a35dcc06d84fbe48656
|
Rust
|
DanielHZhang/rusty-avl
|
/src/tree.rs
|
UTF-8
| 14,419
| 3.453125
| 3
|
[] |
no_license
|
use std::{cmp::Ordering, collections::VecDeque, iter::FromIterator};
use super::{
iter::{IterInorder, IterPostorder, IterPreorder},
node::{Branch, Extract, Node},
};
/// An AVL tree implemented using purely iterative lookups. Stores a pointer to the root node and
/// the current number of unique nodes within the tree.
#[derive(Debug)]
pub struct AvlTree<K, V> {
root: Branch<K, V>,
size: usize,
}
impl<K, V> Default for AvlTree<K, V> {
fn default() -> Self {
Self {
root: None,
size: 0,
}
}
}
impl<K: Ord, V: PartialEq> FromIterator<(K, V)> for AvlTree<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut tree = Self::default();
for (key, value) in iter {
tree.insert(key, value);
}
tree
}
}
impl<K: Ord + PartialEq + Clone> FromIterator<K> for AvlTree<K, K> {
fn from_iter<T: IntoIterator<Item = K>>(iter: T) -> Self {
let mut tree = Self::default();
for key in iter {
tree.insert(key.clone(), key);
}
tree
}
}
impl<K: Ord, V: PartialEq> AvlTree<K, V> {
/// Creates a new AVL tree with an empty root.
pub fn new(root: Node<K, V>) -> Self {
Self {
root: Some(Box::new(root)),
size: 1,
}
}
/// Returns true if the AVL tree contains no nodes.
pub fn is_empty(&self) -> bool {
self.root.is_none()
}
/// Returns true if the AVL tree contains a node with the provided key.
pub fn contains(&self, key: &K) -> bool {
self.get(&key).is_some()
}
/// Clears the AVL tree, removing all nodes.
pub fn clear(&mut self) {
self.root.take();
self.size = 0;
}
/// Returns the number of unique nodes within the AVL tree.
pub fn len(&self) -> usize {
self.size
}
/// Returns a reference to the node with the provided key.
pub fn get(&self, target: &K) -> Option<&Node<K, V>> {
self
.root
.as_ref()
.map(|root| {
let mut cur = root;
loop {
if target < &cur.key {
cur = match &cur.left {
Some(node) => node,
None => return None,
};
} else if target > &cur.key {
cur = match &cur.right {
Some(node) => node,
None => return None,
};
} else {
return Some(cur.as_ref());
}
}
})
.unwrap_or(None)
}
/// Returns a mutable reference to the node with the provided key.
pub fn get_mut(&mut self, target: &K) -> Option<&mut Node<K, V>> {
self
.root
.as_mut()
.map(|root| {
let mut cur = root;
loop {
if target < &cur.key {
cur = match &mut cur.left {
Some(node) => node,
None => return None,
};
} else if target > &cur.key {
cur = match &mut cur.right {
Some(node) => node,
None => return None,
};
} else {
return Some(cur.as_mut());
}
}
})
.unwrap_or(None)
}
/// Inserts a key-value pair into the tree. If the tree did not previously contain the given key,
/// None is returned, otherwise the old value associated with the key is returned.
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
let mut visited: Vec<*mut Node<K, V>> = Vec::new(); // Store raw mut pointers
let mut cur = &mut self.root;
while let Some(ref mut node) = cur {
visited.push(node.as_mut());
match key.cmp(&node.key) {
Ordering::Less => cur = &mut node.left,
Ordering::Greater => cur = &mut node.right,
Ordering::Equal => {
let old = std::mem::replace(&mut node.value, value);
return Some(old);
}
};
}
*cur = Some(Box::new(Node::new(key, value)));
self.size += 1;
// Trace backwards through visited parents, updating their heights
for parent in visited.into_iter().rev() {
let node = unsafe { &mut *parent };
node.update_height();
node.rebalance();
}
None
}
/// Removes the Node with the given key from the tree, returning its value if a Node with the key
/// was previously in the tree.
pub fn remove(&mut self, key: &K) -> Option<V> {
let mut visited = Vec::<*mut Node<K, V>>::new(); // Store raw mut pointers
let mut target = &mut self.root;
while let Some(node) = target.as_ref() {
match key.cmp(&node.key) {
Ordering::Less => {
let node = target.as_deref_mut().unwrap();
visited.push(node);
target = &mut node.left;
}
Ordering::Greater => {
let node = target.as_deref_mut().unwrap();
visited.push(node);
target = &mut node.right;
}
Ordering::Equal => {
break;
}
}
}
if target.is_none() {
return None;
}
self.size -= 1;
let mut node = target.take().unwrap();
match (node.left.as_mut(), node.right.as_mut()) {
(None, None) => *target = None,
(Some(_), None) => *target = node.left.take(),
(None, Some(_)) => *target = node.right.take(),
(Some(_), Some(_)) => {
let mut extracted = node.right.extract_min();
if let Some(ref mut root) = extracted {
root.left = node.left;
root.right = node.right;
root.update_height();
root.rebalance();
}
*target = extracted;
}
};
for parent in visited.into_iter().rev() {
let node = unsafe { &mut *parent };
node.update_height();
node.rebalance();
}
Some(node.value)
}
/// Returns a reference to the minimum Node in the tree.
pub fn min(&self) -> Option<&Node<K, V>> {
self.root.as_ref().map(|root| root.min()).unwrap_or(None)
}
/// Returns a reference to the minimum Node in the tree.
pub fn min_mut(&mut self) -> Option<&mut Node<K, V>> {
self
.root
.as_mut()
.map(|root| root.min_mut())
.unwrap_or(None)
}
/// Returns a reference to the maximum Node in the tree.
pub fn max(&self) -> Option<&Node<K, V>> {
self.root.as_ref().map(|root| root.max()).unwrap_or(None)
}
/// Returns a mutable reference to maximum Node in the tree.
pub fn max_mut(&mut self) -> Option<&mut Node<K, V>> {
self
.root
.as_mut()
.map(|root| root.max_mut())
.unwrap_or(None)
}
/// Returns a reference to the successor Node of the given key. The successor is defined as the
/// Node with the minimum key value that is larger than the provided key.
pub fn successor(&mut self, key: &K) -> Option<&Node<K, V>> {
self
.root
.as_deref()
.map(|root| {
let mut visited = Vec::from([root]);
while let Some(node) = visited.last() {
match key.cmp(&node.key) {
Ordering::Less => match node.left.as_deref() {
Some(left) => visited.push(left),
None => break,
},
Ordering::Greater => match node.right.as_deref() {
Some(right) => visited.push(right),
None => break,
},
Ordering::Equal => match node.right.as_deref() {
Some(right) => return right.min().or(Some(right)),
None => {
// Trace backwards through visited parents, until encountering successor
return visited.into_iter().rev().find(|parent| &parent.key > key);
}
},
};
}
None
})
.unwrap_or(None)
}
/// Returns a reference to the predecessor Node of the given key. The predecessor is defined as
/// the Node with the maximum key value that is smaller than the provided key.
pub fn predecessor(&mut self, key: &K) -> Option<&Node<K, V>> {
self
.root
.as_deref()
.map(|root| {
let mut visited = Vec::from([root]);
while let Some(node) = visited.last() {
match key.cmp(&node.key) {
Ordering::Less => match node.left.as_deref() {
Some(node) => visited.push(node),
None => break,
},
Ordering::Greater => match node.right.as_deref() {
Some(node) => visited.push(node),
None => break,
},
Ordering::Equal => match node.left.as_deref() {
Some(left) => return left.max().or(Some(left)),
None => {
// Trace backwards through visited parents, until encounting predecessor
return visited.into_iter().rev().find(|parent| &parent.key < key);
}
},
}
}
None
})
.unwrap_or(None)
}
/// Returns the maximum node count (not edge count) from the root node to a leaf node.
pub fn height(&self) -> usize {
self
.root
.as_deref()
.map(|root| {
let mut height = 0;
let mut queue = Vec::from([root])
.into_iter()
.collect::<VecDeque<&Node<K, V>>>();
while !queue.is_empty() {
let mut size = queue.len();
while size > 0 {
let front = queue.pop_front().unwrap();
if let Some(node) = front.left.as_deref() {
queue.push_back(node);
}
if let Some(node) = front.right.as_deref() {
queue.push_back(node);
}
size -= 1;
}
height += 1;
}
height
})
.unwrap_or(0)
}
/// Returns an iterator that performs a pre-order traversal of the tree
pub fn iter_preorder(&self) -> IterPreorder<K, V> {
IterPreorder::new(self.root.as_deref())
}
/// Returns an iterator that performs an in-order traversal of the tree
pub fn iter_inorder(&self) -> IterInorder<K, V> {
IterInorder::new(self.root.as_deref())
}
/// Returns an iterator that performs a post-order traversal of the tree
pub fn iter_postorder(&self) -> IterPostorder<K, V> {
IterPostorder::new(self.root.as_deref())
}
}
#[cfg(test)]
mod test {
use super::AvlTree;
use super::Node;
#[test]
fn new() {
let avl = AvlTree::<i32, i32>::default();
assert!(avl.root.is_none());
assert!(avl.is_empty());
assert_eq!(avl.len(), 0);
let avl = AvlTree::new(Node::new("key", "value"));
assert!(avl.root.is_some());
assert!(!avl.is_empty());
assert_eq!(avl.len(), 1);
}
#[test]
fn get() {
let mut avl = AvlTree::new(Node::new(2, 2));
avl.insert(4, 4);
assert!(avl.get(&0).is_none(), "non-existent key returns None");
let found = avl.get(&2).unwrap();
assert_eq!(found.key, 2);
assert_eq!(found.value, 2);
let found = avl.get_mut(&4).unwrap();
assert_eq!(found.key, 4);
assert_eq!(found.value, 4);
}
#[test]
fn contains() {
let avl = AvlTree::new(Node::new(5, 2));
assert!(avl.contains(&5));
assert!(!avl.contains(&10));
}
#[test]
fn insert() {
let mut avl = AvlTree::default();
for key in [1, 2, 3, 4, 5] {
let result = avl.insert(key, key);
assert!(
result.is_none(),
"inserting unique key returned {:?}",
result
);
}
assert_eq!(avl.len(), 5, "length is updated");
let root = avl.root.as_ref().unwrap();
assert_eq!(root.key, 2, "rebalancing of root node");
let right = root.right.as_ref().unwrap();
assert_eq!(right.key, 4, "rebalancing of right node");
assert_eq!(root.height, 3, "height is correct after rebalancing");
assert_eq!(
avl.insert(2, 12),
Some(2),
"inserting existing key returns previous value"
);
}
#[test]
fn remove() {
/* Tree after insertion rebalancing:
5
2 12
1 3 8 15
10
*/
let mut avl = Vec::from([5, 2, 12, 1, 3, 8, 15, 10])
.into_iter()
.collect::<AvlTree<_, _>>();
assert!(avl.remove(&20).is_none(), "non-existent key");
assert_eq!(avl.remove(&5), Some(5), "remove root key 5");
assert_eq!(avl.root.as_ref().unwrap().key, 8, "new root is correct");
assert_eq!(avl.remove(&8), Some(8), "remove root key 8");
assert_eq!(avl.root.as_ref().unwrap().key, 10, "new root is correct");
assert_eq!(avl.remove(&15), Some(15), "remove leaf with no children");
assert_eq!(
avl.remove(&10),
Some(10),
"remove root key 10, causing rebalance"
);
assert_eq!(avl.root.as_ref().unwrap().key, 2, "new root is correct");
assert_eq!(
avl.remove(&1),
Some(1),
"remove root key 1, causing rebalance"
);
assert_eq!(avl.root.as_ref().unwrap().key, 3, "new root is correct");
assert_eq!(avl.remove(&3), Some(3));
assert_eq!(avl.remove(&12), Some(12));
assert_eq!(avl.remove(&2), Some(2));
assert!(avl.root.is_none());
assert_eq!(avl.len(), 0);
}
#[test]
fn smallest() {
let mut avl = AvlTree::default();
avl.insert(5, "five");
avl.insert(2, "two");
avl.insert(1, "one");
let smallest_mut = avl.min_mut();
assert_eq!(smallest_mut.unwrap().key, 1);
let smallest = avl.min();
assert_eq!(smallest.unwrap().key, 1);
}
#[test]
fn largest() {
let mut avl = AvlTree::default();
avl.insert(5, "five");
avl.insert(10, "ten");
avl.insert(16, "sixteen");
let largest_mut = avl.max_mut();
assert_eq!(largest_mut.unwrap().key, 16);
let largest = avl.max();
assert_eq!(largest.unwrap().key, 16);
}
const TEST_NODES: [i32; 8] = [5, 2, 1, 3, 4, 7, 6, 8];
#[test]
fn successor() {
let mut avl = Vec::from(TEST_NODES).into_iter().collect::<AvlTree<_, _>>();
let mut key = 1;
for expected in [2, 3, 4, 5, 6, 7, 8] {
let suc = avl
.successor(&key)
.unwrap_or_else(|| panic!("Missing successor of {}", key));
assert_eq!(suc.key, expected);
key = suc.key;
}
assert!(avl.successor(&8).is_none());
}
#[test]
fn predecessor() {
let mut avl = Vec::from(TEST_NODES).into_iter().collect::<AvlTree<_, _>>();
let mut key = 8;
for expected in [7, 6, 5, 4, 3, 2, 1] {
let pre = avl
.predecessor(&key)
.unwrap_or_else(|| panic!("Missing predecessor of {}", key));
assert_eq!(pre.key, expected);
key = pre.key;
}
assert!(avl.predecessor(&1).is_none());
}
}
| true
|
5d75b86f6718e5aff1b8c7011b279aa80a314f34
|
Rust
|
Jemimacat/occlum
|
/src/libos/src/net/socket_file/recv.rs
|
UTF-8
| 4,677
| 2.6875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
use super::*;
impl SocketFile {
// TODO: need sockaddr type to implement send/sento
/*
pub fn recv(&self, buf: &mut [u8], flags: MsgFlags) -> Result<usize> {
let (bytes_recvd, _) = self.recvfrom(buf, flags, None)?;
Ok(bytes_recvd)
}
pub fn recvfrom(&self, buf: &mut [u8], flags: MsgFlags, src_addr: Option<&mut [u8]>) -> Result<(usize, usize)> {
let (bytes_recvd, src_addr_len, _, _) = self.do_recvmsg(
&mut buf[..],
flags,
src_addr,
None,
)?;
Ok((bytes_recvd, src_addr_len))
}*/
pub fn recvmsg<'a, 'b>(&self, msg: &'b mut MsgHdrMut<'a>, flags: MsgFlags) -> Result<usize> {
// Allocate a single data buffer is big enough for all iovecs of msg.
// This is a workaround for the OCall that takes only one data buffer.
let mut data_buf = {
let data_buf_len = msg.get_iovs().total_bytes();
let data_vec = vec![0; data_buf_len];
data_vec.into_boxed_slice()
};
let (bytes_recvd, namelen_recvd, controllen_recvd, flags_recvd) = {
let data = &mut data_buf[..];
// Acquire mutable references to the name and control buffers
let (name, control) = msg.get_name_and_control_mut();
// Fill the data, the name, and the control buffers
self.do_recvmsg(data, flags, name, control)?
};
// Update the lengths and flags
msg.set_name_len(namelen_recvd)?;
msg.set_control_len(controllen_recvd)?;
msg.set_flags(flags_recvd);
let recv_data = &data_buf[..bytes_recvd];
// TODO: avoid this one extra copy due to the intermediate data buffer
msg.get_iovs_mut().scatter_copy_from(recv_data);
Ok(bytes_recvd)
}
fn do_recvmsg(
&self,
data: &mut [u8],
flags: MsgFlags,
mut name: Option<&mut [u8]>,
mut control: Option<&mut [u8]>,
) -> Result<(usize, usize, usize, MsgFlags)> {
// Prepare the arguments for OCall
// Host socket fd
let host_fd = self.host_fd;
// Name
let (msg_name, msg_namelen) = name.get_mut_ptr_and_len();
let msg_name = msg_name as *mut c_void;
let mut msg_namelen_recvd = 0_u32;
// Data
let msg_data = data.as_mut_ptr();
let msg_datalen = data.len();
// Control
let (msg_control, msg_controllen) = control.get_mut_ptr_and_len();
let msg_control = msg_control as *mut c_void;
let mut msg_controllen_recvd = 0;
// Flags
let flags = flags.to_u32() as i32;
let mut msg_flags_recvd = 0;
// Do OCall
let retval = try_libc!({
let mut retval = 0_isize;
let status = ocall_recvmsg(
&mut retval as *mut isize,
host_fd,
msg_name,
msg_namelen as u32,
&mut msg_namelen_recvd as *mut u32,
msg_data,
msg_datalen,
msg_control,
msg_controllen,
&mut msg_controllen_recvd as *mut usize,
&mut msg_flags_recvd as *mut i32,
flags,
);
assert!(status == sgx_status_t::SGX_SUCCESS);
// TODO: what if retval < 0 but buffers are modified by the
// untrusted OCall? We reset the potentially tampered buffers.
retval
});
// Check values returned from outside the enclave
let bytes_recvd = {
// Guarantted by try_libc!
debug_assert!(retval >= 0);
let retval = retval as usize;
// Check bytes_recvd returned from outside the enclave
assert!(retval <= data.len());
retval
};
let msg_namelen_recvd = msg_namelen_recvd as usize;
assert!(msg_namelen_recvd <= msg_namelen);
assert!(msg_controllen_recvd <= msg_controllen);
let flags_recvd = MsgFlags::from_u32(msg_flags_recvd as u32)?;
Ok((
bytes_recvd,
msg_namelen_recvd,
msg_controllen_recvd,
flags_recvd,
))
}
}
extern "C" {
fn ocall_recvmsg(
ret: *mut ssize_t,
fd: c_int,
msg_name: *mut c_void,
msg_namelen: libc::socklen_t,
msg_namelen_recv: *mut libc::socklen_t,
msg_data: *mut u8,
msg_data: size_t,
msg_control: *mut c_void,
msg_controllen: size_t,
msg_controllen_recv: *mut size_t,
msg_flags: *mut c_int,
flags: c_int,
) -> sgx_status_t;
}
| true
|
21fa8eed61a5a70755d66150d3d141543c32aeb7
|
Rust
|
makoConstruct/TinyTemplate
|
/src/lib.rs
|
UTF-8
| 8,460
| 3.734375
| 4
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! ## TinyTemplate
//!
//! TinyTemplate is a minimal templating library originally designed for use in [Criterion.rs].
//! It deliberately does not provide all of the features of a full-power template engine, but in
//! return it provides a simple API, clear templating syntax, decent performance and very few
//! dependencies.
//!
//! ## Features
//!
//! The most important features are as follows (see the [syntax](syntax/index.html) module for full
//! details on the template syntax):
//!
//! * Rendering values - `{ myvalue }`
//! * Conditionals - `{{ if foo }}Foo is true{{ else }}Foo is false{{ endif }}`
//! * Loops - `{{ for value in row }}{value}{{ endfor }}`
//! * Customizable value formatters `{ value | my_formatter }`
//! * Macros `{{ call my_template with foo }}`
//!
//! ## Restrictions
//!
//! TinyTemplate was designed with the assumption that the templates are available as static strings,
//! either using string literals or the `include_str!` macro. Thus, it borrows `&str` slices from the
//! template text itself and uses them during the rendering process. Although it is possible to use
//! TinyTemplate with template strings loaded at runtime, this is not recommended.
//!
//! Additionally, TinyTemplate can only render templates into Strings. If you need to render a
//! template directly to a socket or file, TinyTemplate may not be right for you.
//!
//! ## Example
//!
//! ```
//! #[macro_use]
//! extern crate serde_derive;
//! extern crate tinytemplate;
//!
//! use tinytemplate::TinyTemplate;
//! use std::error::Error;
//!
//! #[derive(Serialize)]
//! struct Context {
//! name: String,
//! }
//!
//! static TEMPLATE : &'static str = "Hello {name}!";
//!
//! pub fn main() -> Result<(), Box<Error>> {
//! let mut tt = TinyTemplate::new();
//! tt.add_template("hello", TEMPLATE)?;
//!
//! let context = Context {
//! name: "World".to_string(),
//! };
//!
//! let rendered = tt.render("hello", &context)?;
//! # assert_eq!("Hello World!", &rendered);
//! println!("{}", rendered);
//!
//! Ok(())
//! }
//! ```
//!
//! [Criterion.rs]: https://github.com/bheisler/criterion.rs
//!
extern crate serde;
extern crate serde_json;
#[cfg(test)]
#[cfg_attr(test, macro_use)]
extern crate serde_derive;
mod compiler;
pub mod error;
mod instruction;
pub mod syntax;
mod template;
use error::*;
use serde::Serialize;
use serde_json::Value;
use std::collections::HashMap;
use std::fmt::Write;
use template::Template;
/// Type alias for closures which can be used as value formatters.
pub type ValueFormatter = dyn Fn(&Value, &mut String) -> Result<()>;
/// Appends `value` to `output`, performing HTML-escaping in the process.
pub fn escape(value: &str, output: &mut String) {
// Algorithm taken from the rustdoc source code.
let value_str = value;
let mut last_emitted = 0;
for (i, ch) in value.bytes().enumerate() {
match ch as char {
'<' | '>' | '&' | '\'' | '"' => {
output.push_str(&value_str[last_emitted..i]);
let s = match ch as char {
'>' => ">",
'<' => "<",
'&' => "&",
'\'' => "'",
'"' => """,
_ => unreachable!(),
};
output.push_str(s);
last_emitted = i + 1;
}
_ => {}
}
}
if last_emitted < value_str.len() {
output.push_str(&value_str[last_emitted..]);
}
}
/// The format function is used as the default value formatter for all values unless the user
/// specifies another. It is provided publicly so that it can be called as part of custom formatters.
/// Values are formatted as follows:
///
/// * `Value::Null` => the empty string
/// * `Value::Bool` => true|false
/// * `Value::Number` => the number, as formatted by `serde_json`.
/// * `Value::String` => the string, HTML-escaped
///
/// Arrays and objects are not formatted, and attempting to do so will result in a rendering error.
pub fn format(value: &Value, output: &mut String) -> Result<()> {
match value {
Value::Null => Ok(()),
Value::Bool(b) => {
write!(output, "{}", b)?;
Ok(())
}
Value::Number(n) => {
write!(output, "{}", n)?;
Ok(())
}
Value::String(s) => {
escape(s, output);
Ok(())
}
_ => Err(unprintable_error()),
}
}
/// Identical to [`format`](fn.format.html) except that this does not perform HTML escaping.
pub fn format_unescaped(value: &Value, output: &mut String) -> Result<()> {
match value {
Value::Null => Ok(()),
Value::Bool(b) => {
write!(output, "{}", b)?;
Ok(())
}
Value::Number(n) => {
write!(output, "{}", n)?;
Ok(())
}
Value::String(s) => {
output.push_str(s);
Ok(())
}
_ => Err(unprintable_error()),
}
}
/// The TinyTemplate struct is the entry point for the TinyTemplate library. It contains the
/// template and formatter registries and provides functions to render templates as well as to
/// register templates and formatters.
pub struct TinyTemplate<'template> {
templates: HashMap<&'template str, Template<'template>>,
formatters: HashMap<&'template str, Box<ValueFormatter>>,
default_formatter: &'template ValueFormatter,
}
impl<'template> TinyTemplate<'template> {
/// Create a new TinyTemplate registry. The returned registry contains no templates, and has
/// [`format_unescaped`](fn.format_unescaped.html) registered as a formatter named "unescaped".
pub fn new() -> TinyTemplate<'template> {
let mut tt = TinyTemplate {
templates: HashMap::default(),
formatters: HashMap::default(),
default_formatter: &format,
};
tt.add_formatter("unescaped", format_unescaped);
tt
}
/// Parse and compile the given template, then register it under the given name.
pub fn add_template(&mut self, name: &'template str, text: &'template str) -> Result<()> {
let template = Template::compile(text)?;
self.templates.insert(name, template);
Ok(())
}
/// Changes the default formatter from [`format`](fn.format.html) to `formatter`. Usefull in combination with [`format_unescaped`](fn.format_unescaped.html) to deactivate HTML-escaping
pub fn set_default_formatter<F>(&mut self, formatter: &'template F)
where
F: 'static + Fn(&Value, &mut String) -> Result<()>,
{
self.default_formatter = formatter;
}
/// Register the given formatter function under the given name.
pub fn add_formatter<F>(&mut self, name: &'template str, formatter: F)
where
F: 'static + Fn(&Value, &mut String) -> Result<()>,
{
self.formatters.insert(name, Box::new(formatter));
}
/// Render the template with the given name using the given context object. The context
/// object must implement `serde::Serialize` as it will be converted to `serde_json::Value`.
pub fn render<C>(&self, template: &str, context: &C) -> Result<String>
where
C: Serialize,
{
let value = serde_json::to_value(context)?;
match self.templates.get(template) {
Some(tmpl) => tmpl.render(
&value,
&self.templates,
&self.formatters,
self.default_formatter,
),
None => Err(Error::GenericError {
msg: format!("Unknown template '{}'", template),
}),
}
}
}
impl<'template> Default for TinyTemplate<'template> {
fn default() -> TinyTemplate<'template> {
TinyTemplate::new()
}
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Serialize)]
struct Context {
name: String,
}
static TEMPLATE: &'static str = "Hello {name}!";
#[test]
pub fn test_set_default_formatter() {
let mut tt = TinyTemplate::new();
tt.add_template("hello", TEMPLATE).unwrap();
tt.set_default_formatter(&format_unescaped);
let context = Context {
name: "<World>".to_string(),
};
let rendered = tt.render("hello", &context).unwrap();
assert_eq!(rendered, "Hello <World>!")
}
}
| true
|
a1c2911ad4d44c357e0efc9ba4995d22b14a1649
|
Rust
|
Leinnan/chrustos
|
/src/color.rs
|
UTF-8
| 623
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#[allow(unused)]
#[derive(Copy, Clone)]
#[repr(u8)]
pub enum Color {
Black = 0x0,
White = 0xF,
Blue = 0x1,
BrightBlue = 0x9,
Green = 0x2,
BrightGreen = 0xA,
Cyan = 0x3,
BrightCyan = 0xB,
Red = 0x4,
BrightRed = 0xC,
Magenta = 0x5,
BrightMagenta = 0xD,
Brown = 0x6,
Yellow = 0xE,
Gray = 0x7,
DarkGray = 0x8,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(transparent)]
pub struct ColorCode(u8);
impl ColorCode {
pub fn new(foreground: Color, background: Color) -> ColorCode {
ColorCode((background as u8) << 4 | (foreground as u8))
}
}
| true
|
f94c6f40bc44f6c1c637d9238939a51b51cad1f8
|
Rust
|
xd009642/tarpaulin
|
/tests/data/loops/src/lib.rs
|
UTF-8
| 244
| 2.609375
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#![allow(dead_code)]
#[test]
fn it_works() {
let mut x = 0i32;
loop {
x += 1;
if x > 10 {
break;
}
}
while x > 0 {
x -= 1;
}
for _y in 0..10 {
x += 1;
}
}
| true
|
bbd26778d77930c9ec126492fb3d08c36a14899c
|
Rust
|
piotrek-szczygiel/klocki
|
/src/settings.rs
|
UTF-8
| 10,975
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
use std::{fs, path::PathBuf};
use dirs;
use ggez::{conf::NumSamples, graphics::Image, Context, GameResult};
use imgui::{self, im_str, ComboBox, FontId, ImStr, ImString, Slider, Ui};
use serde::{Deserialize, Serialize};
use toml;
use crate::utils;
#[derive(Serialize, Deserialize)]
pub struct Settings {
pub graphics: Graphics,
pub gameplay: Gameplay,
pub audio: Audio,
pub input: Input,
}
#[derive(Serialize, Deserialize)]
pub struct Graphics {
pub window_size: (u32, u32),
pub fullscreen: bool,
pub vsync: bool,
pub animated_background: bool,
pub hide_menu: bool,
pub multi_sampling: NumSamples,
}
#[derive(Serialize, Deserialize)]
pub struct Gameplay {
pub block_size: i32,
pub ghost_piece: u32,
pub entry_delay: u32,
pub lock_delay: u32,
pub clear_delay: u32,
pub skin: String,
pub stack_grid: bool,
pub stack_outline: bool,
}
#[derive(Serialize, Deserialize)]
pub struct Audio {
pub music_volume: u32,
pub sfx_volume: u32,
}
#[derive(Serialize, Deserialize)]
pub struct Input {
pub das: u32,
pub arr: u32,
}
#[derive(Default)]
pub struct SettingsState {
pub skins: Vec<PathBuf>,
pub skins_imstr: Vec<ImString>,
pub skin_id: usize,
pub skin_switched: bool,
pub restart: bool,
}
static SAMPLINGS: [NumSamples; 6] = [
NumSamples::Zero,
NumSamples::One,
NumSamples::Two,
NumSamples::Four,
NumSamples::Eight,
NumSamples::Sixteen,
];
impl Settings {
pub fn new() -> Settings {
if let Some(settings) = Settings::load() {
settings
} else {
Settings {
graphics: Graphics {
window_size: (800, 800),
fullscreen: false,
multi_sampling: NumSamples::Zero,
vsync: true,
animated_background: true,
hide_menu: false,
},
gameplay: Gameplay {
block_size: 43,
ghost_piece: 10,
entry_delay: 0,
lock_delay: 500,
clear_delay: 250,
skin: String::from("nblox.png"),
stack_grid: true,
stack_outline: true,
},
audio: Audio {
music_volume: 50,
sfx_volume: 50,
},
input: Input { das: 133, arr: 33 },
}
}
}
fn path() -> PathBuf {
let mut path = dirs::data_local_dir().unwrap_or_default();
path.push("klocki");
path.push("config.toml");
path
}
pub fn save(&self) {
let toml = toml::to_string(self).unwrap();
let path = Settings::path();
fs::write(&path, toml).unwrap_or_else(|e| panic!("Unable to save settings: {:?}", e));
log::info!("Saved settings to: {:?}", &path);
}
fn load() -> Option<Settings> {
let path = Settings::path();
if let Ok(contents) = fs::read_to_string(&path) {
if let Ok(settings) = toml::from_str(&contents) {
log::info!("Loaded settings from: {:?}", &path);
return Some(settings);
} else {
log::error!("Error while reading config file");
}
} else {
log::warn!("Unable to find config file");
}
None
}
pub fn tileset(&self, ctx: &mut Context, state: &SettingsState) -> GameResult<Image> {
Image::new(
ctx,
utils::path(ctx, state.skins[state.skin_id].to_str().unwrap()),
)
}
pub fn draw(&mut self, state: &mut SettingsState, ui: &Ui, bold: FontId) {
let pos = 120.0;
let header_color = [0.6, 0.8, 1.0, 1.0];
if let Some(menu) = ui.begin_menu(im_str!("Settings"), true) {
ui.separator();
let id = ui.push_font(bold);
ui.text_colored(header_color, im_str!("Graphics"));
id.pop(&ui);
ui.separator();
{
let mut restart_popup = false;
ui.text(im_str!("Fullscreen"));
ui.same_line(pos);
ui.checkbox(im_str!("<F11>"), &mut self.graphics.fullscreen);
let mut sampling_id = SAMPLINGS
.iter()
.position(|&s| s == self.graphics.multi_sampling)
.unwrap();
ui.text(im_str!("V-Sync"));
ui.same_line(pos);
let id = ui.push_id(im_str!("vsync"));
if ui.checkbox(im_str!(""), &mut self.graphics.vsync) {
restart_popup = true;
}
id.pop(&ui);
ui.text(im_str!("Background"));
ui.same_line(pos);
let id = ui.push_id(im_str!("background"));
ui.checkbox(im_str!(""), &mut self.graphics.animated_background);
id.pop(&ui);
ui.text(im_str!("Hide menu"));
ui.same_line(pos);
if ui.checkbox(im_str!("<Left Alt>"), &mut self.graphics.hide_menu) {
ui.open_popup(im_str!("Menu visibility information"));
}
ui.text(im_str!("Sampling"));
ui.same_line(pos);
let id = ui.push_id(im_str!("sampling"));
if ComboBox::new(im_str!("")).build_simple_string(
&ui,
&mut sampling_id,
&[
im_str!("Off"),
im_str!("1x"),
im_str!("2x"),
im_str!("4x"),
im_str!("8x"),
im_str!("16x"),
],
) {
self.graphics.multi_sampling = SAMPLINGS[sampling_id];
restart_popup = true;
}
id.pop(&ui);
if restart_popup {
ui.open_popup(im_str!("Restart needed"));
}
}
ui.separator();
let id = ui.push_font(bold);
ui.text_colored(header_color, im_str!("Gameplay"));
id.pop(&ui);
ui.separator();
{
ui.text(im_str!("Ghost piece"));
ui.same_line(pos);
let id = ui.push_id(im_str!("ghost_piece"));
Slider::new(im_str!(""), 0..=100).build(&ui, &mut self.gameplay.ghost_piece);
id.pop(&ui);
ui.text(im_str!("Block size"));
ui.same_line(pos);
let id = ui.push_id(im_str!("block_size"));
Slider::new(im_str!(""), 24..=43).build(&ui, &mut self.gameplay.block_size);
id.pop(&ui);
ui.text(im_str!("Entry delay"));
ui.same_line(pos);
let id = ui.push_id(im_str!("entry_delay"));
Slider::new(im_str!(""), 0..=500).build(&ui, &mut self.gameplay.entry_delay);
id.pop(&ui);
ui.text(im_str!("Lock delay"));
ui.same_line(pos);
let id = ui.push_id(im_str!("lock_delay"));
Slider::new(im_str!(""), 0..=1000).build(&ui, &mut self.gameplay.lock_delay);
id.pop(&ui);
ui.text(im_str!("Clear delay"));
ui.same_line(pos);
let id = ui.push_id(im_str!("clear_delay"));
Slider::new(im_str!(""), 0..=500).build(&ui, &mut self.gameplay.clear_delay);
id.pop(&ui);
ui.text(im_str!("Skin"));
ui.same_line(pos);
let skins: Vec<&ImStr> = state.skins_imstr.iter().map(|s| s.as_ref()).collect();
let id = ui.push_id(im_str!("skins"));
if ComboBox::new(im_str!("")).build_simple_string(&ui, &mut state.skin_id, &skins) {
state.skin_switched = true;
self.gameplay.skin = String::from(
state
.skins_imstr
.get(state.skin_id)
.unwrap_or(&ImString::new(""))
.to_str(),
);
}
id.pop(&ui);
ui.text(im_str!("Stack grid"));
ui.same_line(pos);
let id = ui.push_id(im_str!("stack_grid"));
ui.checkbox(im_str!(""), &mut self.gameplay.stack_grid);
id.pop(&ui);
ui.text(im_str!("Stack outline"));
ui.same_line(pos);
let id = ui.push_id(im_str!("stack_outline"));
ui.checkbox(im_str!(""), &mut self.gameplay.stack_outline);
id.pop(&ui);
}
ui.separator();
let id = ui.push_font(bold);
ui.text_colored(header_color, im_str!("Audio"));
id.pop(&ui);
ui.separator();
{
ui.text(im_str!("Music"));
ui.same_line(pos);
let id = ui.push_id(im_str!("music"));
Slider::new(im_str!(""), 0..=100).build(&ui, &mut self.audio.music_volume);
id.pop(&ui);
ui.text(im_str!("SFX"));
ui.same_line(pos);
let id = ui.push_id(im_str!("sfx"));
Slider::new(im_str!(""), 0..=100).build(&ui, &mut self.audio.sfx_volume);
id.pop(&ui);
}
ui.separator();
let id = ui.push_font(bold);
ui.text_colored(header_color, im_str!("Input"));
id.pop(&ui);
ui.separator();
{
ui.text(im_str!("DAS"));
ui.same_line(pos);
let id = ui.push_id(im_str!("das"));
Slider::new(im_str!(""), 100..=500).build(&ui, &mut self.input.das);
id.pop(&ui);
ui.text(im_str!("ARR"));
ui.same_line(pos);
let id = ui.push_id(im_str!("arr"));
Slider::new(im_str!(""), 5..=200).build(&ui, &mut self.input.arr);
id.pop(&ui);
}
ui.popup_modal(im_str!("Restart needed")).build(|| {
ui.text(im_str!(
"You need to restart the game to apply these settings"
));
ui.separator();
if ui.button(im_str!("Cancel"), [0.0, 0.0]) {
ui.close_current_popup();
}
ui.same_line_with_spacing(0.0, 10.0);
if ui.button(im_str!("Restart the game"), [0.0, 0.0]) {
state.restart = true;
}
});
menu.end(ui);
}
}
}
| true
|
f6bff8769d63be056fb1c3dc44c8af3ce86d7ec4
|
Rust
|
0e4ef622/aoc
|
/2021/day02/src/solution.rs
|
UTF-8
| 955
| 2.8125
| 3
|
[] |
no_license
|
use std::collections::*;
use rand::random;
use serde_scan::scan as s;
use util::*;
pub fn part1(input: &str) -> impl std::fmt::Display {
let mut depth = 0;
let mut pos = 0;
for line in input.lines() {
let mut w = line.split_whitespace();
let d = w.next().unwrap();
let s = w.next().unwrap().parse::<i64>().unwrap();
match d {
"forward" => pos += s,
"down" => depth += s,
_ => depth -= s,
}
}
depth*pos
}
pub fn part2(input: &str) -> impl std::fmt::Display {
let mut depth = 0;
let mut pos = 0;
let mut aim = 0;
for line in input.lines() {
let mut w = line.split_whitespace();
let d = w.next().unwrap();
let s = w.next().unwrap().parse::<i64>().unwrap();
match d {
"forward" => {pos += s; depth += aim*s; }
"down" => aim += s,
_ => aim -= s,
}
}
depth*pos
}
| true
|
3831ad38df7dea170773f157aad87a4dceb88629
|
Rust
|
bluejekyll/tokio
|
/tokio/src/tests/backoff.rs
|
UTF-8
| 721
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
pub(crate) struct Backoff(usize, bool);
pub(crate) fn backoff(n: usize) -> impl Future<Output = ()> {
Backoff(n, false)
}
/// Back off, but clone the waker each time
pub(crate) fn backoff_clone(n: usize) -> impl Future<Output = ()> {
Backoff(n, true)
}
impl Future for Backoff {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
if self.0 == 0 {
return Poll::Ready(());
}
self.0 -= 1;
if self.1 {
cx.waker().clone().wake();
} else {
cx.waker().wake_by_ref();
}
Poll::Pending
}
}
| true
|
25360c12d03ec445cbc49b56bd03aa50baf1ca65
|
Rust
|
busarovalex/accounting
|
/src/persistence/migrate.rs
|
UTF-8
| 3,397
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
use failure::Error as FailureError;
use serde_json::Value;
use uuid::Uuid;
use std::fs::File;
use std::io::{Read, Write};
use std::path::PathBuf;
use persistence::error::PersistenceError;
#[derive(Debug)]
pub enum Migration {
RemoveField(String),
AddField(String, Value),
GenerateUid(String),
}
pub fn migrate(table_file_path: PathBuf, migration: Migration) -> Result<(), FailureError> {
info!(
"migrating table {:?} with migration: {:?}",
&table_file_path, &migration
);
let migration_data = migrate_to_string(table_file_path.clone(), migration)?;
let mut overridden = File::create(table_file_path)?;
overridden.write_all(migration_data.as_bytes())?;
Ok(())
}
fn migrate_to_string(
table_file_path: PathBuf,
migration: Migration,
) -> Result<String, FailureError> {
let mut table_file = File::open(&table_file_path)?;
let mut content = String::with_capacity(2048);
table_file.read_to_string(&mut content)?;
let mut result = String::with_capacity(2048);
for line in content.lines() {
let entry: Value = ::serde_json::from_str(line)?;
trace!("{:?}", &entry);
let migrated_entry = migration.apply(entry)?;
let json_serialized = ::serde_json::to_string(&migrated_entry)?;
result.push_str(&json_serialized);
result.push('\n');
}
Ok(result)
}
impl Migration {
pub fn remove(field_name: String) -> Migration {
Migration::RemoveField(field_name)
}
pub fn add_from_str(field_name: String, value: &str) -> Result<Migration, FailureError> {
let value: Value = ::serde_json::from_str(value)?;
Ok(Migration::AddField(field_name, value))
}
pub fn generate_uid(field_name: String) -> Migration {
Migration::GenerateUid(field_name)
}
fn apply(&self, value: Value) -> Result<Value, FailureError> {
let migrated = match value {
Value::Object(mut key_value_map) => match self {
&Migration::RemoveField(ref field_name) => {
if !key_value_map.remove(field_name).is_some() {
return Err(PersistenceError::NoSuchKeyInJsonValue.into());
}
Value::Object(key_value_map)
}
&Migration::AddField(ref field_name, ref field_value) => {
if key_value_map
.insert(field_name.clone(), field_value.clone())
.is_some()
{
return Err(PersistenceError::KeyWasAlreadyInObject.into());
}
Value::Object(key_value_map)
}
&Migration::GenerateUid(ref field_name) => {
let uid = format!("{}", Uuid::new_v4());
if key_value_map
.insert(field_name.clone(), Value::String(uid))
.is_some()
{
return Err(PersistenceError::KeyWasAlreadyInObject.into());
}
Value::Object(key_value_map)
}
},
_ => {
error!("json value {:?} is not an object", &value);
return Err(PersistenceError::JsonValueIsNotObject.into());
}
};
Ok(migrated)
}
}
| true
|
2c47c5cb07b7e7b086a0ba2062298ae0c83fe5b0
|
Rust
|
deps-rs/deps.rs
|
/libs/badge/badge.rs
|
UTF-8
| 7,998
| 2.546875
| 3
|
[
"Bitstream-Vera",
"LicenseRef-scancode-public-domain",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
//! Simple badge generator
use base64::display::Base64Display;
use once_cell::sync::Lazy;
use rusttype::{point, Font, Point, PositionedGlyph, Scale};
use serde::Deserialize;
const FONT_DATA: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/DejaVuSans.ttf"));
const FONT_SIZE: f32 = 11.;
const SCALE: Scale = Scale {
x: FONT_SIZE,
y: FONT_SIZE,
};
/// Badge style name.
///
/// Default style is "flat".
///
/// Matches style names from shields.io.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum BadgeStyle {
Flat,
FlatSquare,
ForTheBadge,
}
impl Default for BadgeStyle {
fn default() -> Self {
Self::Flat
}
}
#[derive(Debug, Clone)]
pub struct BadgeOptions {
/// Subject will be displayed on the left side of badge
pub subject: String,
/// Status will be displayed on the right side of badge
pub status: String,
/// HTML color of badge
pub color: String,
/// Style of badge.
pub style: BadgeStyle,
}
impl Default for BadgeOptions {
fn default() -> BadgeOptions {
BadgeOptions {
subject: "build".to_owned(),
status: "passing".to_owned(),
color: "#4c1".to_owned(),
style: BadgeStyle::Flat,
}
}
}
struct BadgeStaticData {
font: Font<'static>,
scale: Scale,
offset: Point<f32>,
}
static DATA: Lazy<BadgeStaticData> = Lazy::new(|| {
let font = Font::try_from_bytes(FONT_DATA).expect("failed to parse font collection");
let v_metrics = font.v_metrics(SCALE);
let offset = point(0.0, v_metrics.ascent);
BadgeStaticData {
font,
scale: SCALE,
offset,
}
});
pub struct Badge {
options: BadgeOptions,
}
impl Badge {
pub fn new(options: BadgeOptions) -> Badge {
Badge { options }
}
pub fn to_svg_data_uri(&self) -> String {
format!(
"data:image/svg+xml;base64,{}",
Base64Display::with_config(self.to_svg().as_bytes(), base64::STANDARD)
)
}
pub fn to_svg(&self) -> String {
match self.options.style {
BadgeStyle::Flat => self.to_flat_svg(),
BadgeStyle::FlatSquare => self.to_flat_square_svg(),
BadgeStyle::ForTheBadge => self.to_for_the_badge_svg(),
}
}
pub fn to_flat_svg(&self) -> String {
let left_width = self.calculate_width(&self.options.subject) + 6;
let right_width = self.calculate_width(&self.options.status) + 6;
let total_width = left_width + right_width;
let left_center = left_width / 2;
let right_center = left_width + (right_width / 2);
let color = &self.options.color;
let subject = &self.options.subject;
let status = &self.options.status;
let svg = format!(
r###"<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="{total_width}" height="20">
<linearGradient id="smooth" x2="0" y2="100%">
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
<stop offset="1" stop-opacity=".1"/>
</linearGradient>
<mask id="round">
<rect width="{total_width}" height="20" rx="3" fill="#fff"/>
</mask>
<g mask="url(#round)">
<rect width="{left_width}" height="20" fill="#555"/>
<rect width="{right_width}" height="20" x="{left_width}" fill="{color}"/>
<rect width="{total_width}" height="20" fill="url(#smooth)"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11" text-rendering="geometricPrecision">
<text x="{left_center}" y="15" fill="#010101" fill-opacity=".3">{subject}</text>
<text x="{left_center}" y="14">{subject}</text>
<text x="{right_center}" y="15" fill="#010101" fill-opacity=".3">{status}</text>
<text x="{right_center}" y="14">{status}</text>
</g>
</svg>"###
);
svg
}
pub fn to_flat_square_svg(&self) -> String {
let left_width = self.calculate_width(&self.options.subject) + 6;
let right_width = self.calculate_width(&self.options.status) + 6;
let total_width = left_width + right_width;
let left_center = left_width / 2;
let right_center = left_width + (right_width / 2);
let color = &self.options.color;
let subject = &self.options.subject;
let status = &self.options.status;
let svg = format!(
r###"<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="{total_width}" height="20" text-rendering="geometricPrecision">
<g>
<rect width="{left_width}" height="20" fill="#555"/>
<rect width="{right_width}" height="20" x="{left_width}" fill="{color}"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
<text x="{left_center}" y="14">{subject}</text>
<text x="{right_center}" y="14">{status}</text>
</g>
</svg>
"###,
);
svg
}
pub fn to_for_the_badge_svg(&self) -> String {
let left_width = self.calculate_width(&self.options.subject) + 38;
let right_width = self.calculate_width(&self.options.status) + 38;
let total_width = left_width + right_width;
let left_center = left_width / 2;
let right_center = left_width + (right_width / 2);
let color = &self.options.color;
let subject = self.options.subject.to_uppercase();
let status = self.options.status.to_uppercase();
let svg = format!(
r###"<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="{total_width}" height="28">
<g>
<rect width="{left_width}" height="28" fill="#555"/>
<rect width="{right_width}" height="28" x="{left_width}" fill="{color}"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" font-size="10" text-rendering="geometricPrecision">
<text x="{left_center}" y="18" letter-spacing="1">{subject}</text>
<text x="{right_center}" y="18" font-weight="bold" letter-spacing="1">{status}</text>
</g>
</svg>
"###,
);
svg
}
fn calculate_width(&self, text: &str) -> u32 {
let glyphs: Vec<PositionedGlyph> =
DATA.font.layout(text, DATA.scale, DATA.offset).collect();
let width = glyphs
.iter()
.rev()
.filter_map(|g| {
g.pixel_bounding_box()
.map(|b| b.min.x as f32 + g.unpositioned().h_metrics().advance_width)
})
.next()
.unwrap_or(0.0);
(width + ((text.len() as f32 - 1f32) * 1.3)).ceil() as u32
}
}
#[cfg(test)]
mod tests {
use super::*;
fn options() -> BadgeOptions {
BadgeOptions::default()
}
#[test]
fn test_calculate_width() {
let badge = Badge::new(options());
assert_eq!(badge.calculate_width("build"), 29);
assert_eq!(badge.calculate_width("passing"), 44);
}
#[test]
#[ignore]
fn test_to_svg() {
use std::fs::File;
use std::io::Write;
let mut file = File::create("test.svg").unwrap();
let options = BadgeOptions {
subject: "latest".to_owned(),
status: "v4.0.0-beta.21".to_owned(),
style: BadgeStyle::ForTheBadge,
color: "#fe7d37".to_owned(),
};
let badge = Badge::new(options);
file.write_all(badge.to_svg().as_bytes()).unwrap();
}
#[test]
fn deserialize_badge_style() {
#[derive(Debug, Deserialize)]
struct Foo {
style: BadgeStyle,
}
let style = serde_urlencoded::from_str::<Foo>("style=flat").unwrap();
assert_eq!(style.style, BadgeStyle::Flat);
let style = serde_urlencoded::from_str::<Foo>("style=flat-square").unwrap();
assert_eq!(style.style, BadgeStyle::FlatSquare);
}
}
| true
|
1e94508c3c59bb349803b408f6f21d93d303f9ca
|
Rust
|
usmansaleem/filecoin_wallet_exporter
|
/src/main.rs
|
UTF-8
| 1,159
| 2.953125
| 3
|
[] |
no_license
|
use anyhow::{anyhow, Result};
use serde_json::Value;
use std::fs;
use std::path::PathBuf;
use structopt::StructOpt;
extern crate base64;
extern crate hex;
#[derive(StructOpt, Debug)]
#[structopt(
name = "fc_wallet_exporter",
about = "Export Private Key from FileCoin Wallet"
)]
struct Cli {
/// The path to the filecoin wallet file to read
#[structopt(parse(from_os_str))]
wallet_path: PathBuf,
}
fn main() -> Result<()> {
let cli = Cli::from_args();
let encoded_wallet = fs::read_to_string(cli.wallet_path)?;
println!("Encoded Wallet: {}", encoded_wallet);
let decoded_bytes = hex::decode(&encoded_wallet)?;
let value: Value = serde_json::from_slice(&decoded_bytes)?;
println!("JSON: {}", value);
// the private key is base64 encoded in little endian order
if let Some(private_key_encoded) = value["PrivateKey"].as_str() {
let mut pk = base64::decode(private_key_encoded)?;
// reverse bytes to make it big endian
pk.reverse();
println!("Private Key (HEX): {}", hex::encode(pk));
Ok(())
} else {
Err(anyhow!("PrivateKey in Json not found"))
}
}
| true
|
978258b26f04e1d18576305675f02a088bd819fe
|
Rust
|
pengguoguo/protobuf
|
/rust/lib.rs
|
UTF-8
| 3,068
| 2.59375
| 3
|
[
"LicenseRef-scancode-protobuf"
] |
permissive
|
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//! Rust Protobuf Runtime
#[cfg(cpp_kernel)]
pub extern crate cpp as __runtime;
#[cfg(upb_kernel)]
pub extern crate upb as __runtime;
pub use __runtime::Arena;
use std::ops::Deref;
use std::ptr::NonNull;
use std::slice;
/// Represents serialized Protobuf wire format data. It's typically produced by
/// `<Message>.serialize()`.
pub struct SerializedData {
data: NonNull<u8>,
len: usize,
arena: *mut Arena,
}
impl SerializedData {
pub unsafe fn from_raw_parts(arena: *mut Arena, data: NonNull<u8>, len: usize) -> Self {
SerializedData { arena, data, len }
}
}
impl Deref for SerializedData {
type Target = [u8];
fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.data.as_ptr() as *const _, self.len) }
}
}
impl Drop for SerializedData {
fn drop(&mut self) {
unsafe { Arena::free(self.arena) };
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialized_data_roundtrip() {
let arena = unsafe { Arena::new() };
let original_data = b"Hello world";
let len = original_data.len();
let serialized_data = unsafe {
SerializedData::from_raw_parts(
arena,
NonNull::new(original_data as *const _ as *mut _).unwrap(),
len,
)
};
assert_eq!(&*serialized_data, b"Hello world");
}
}
| true
|
3389993c74b5b07a999a9815467471a4decd5354
|
Rust
|
jmagnuson/rust_core_bluetooth
|
/src/uuid.rs
|
UTF-8
| 8,244
| 3.109375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use objc::*;
use objc::runtime::*;
use static_assertions::assert_impl_all;
use std::fmt;
use std::ops::{Deref, DerefMut};
use crate::platform::*;
use std::str::FromStr;
const BASE_UUID_BYTES: [u8; 16] = [0, 0, 0, 0, 0, 0, 0x10, 0, 0x80, 0, 0, 0x80, 0x5F, 0x9B, 0x34, 0xFB];
/// Bluetooth-tailored UUID.
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Uuid([u8; 16]);
impl Uuid {
/// Returns UUID with all bytes set to zero.
pub const fn zeroed() -> Self {
Self([0; 16])
}
/// Returns the Base UUID (`00000000-0000-1000-8000-00805F9B34FB`) as defined by the specs.
pub const fn base() -> Self {
Self(BASE_UUID_BYTES)
}
/// Constructs instance from the specified bytes.
pub const fn from_bytes(bytes: [u8; 16]) -> Self {
Self(bytes)
}
/// Constructs instance from the specified slice of variable length.
/// The supported lengths are 2 for `uuid16`, 4 for `uuid32` and 16 for a standard UUID.
///
/// # Panics
///
/// Panics if `bytes` length is not 2, 4 or 16.
pub fn from_slice(bytes: &[u8]) -> Self {
Self(match bytes.len() {
2 => {
let mut r = BASE_UUID_BYTES;
r[2] = bytes[0];
r[3] = bytes[1];
r
}
4 => {
let mut r = BASE_UUID_BYTES;
r[0] = bytes[0];
r[1] = bytes[1];
r[2] = bytes[2];
r[3] = bytes[3];
r
}
16 => {
let mut r = [0; 16];
r.copy_from_slice(bytes);
r
}
_ => panic!("invalid slice len {}, expected 2, 4 or 16 bytes", bytes.len()),
})
}
/// Returns inner bytes array.
pub fn bytes(&self) -> [u8; 16] {
self.0
}
/// Returns the shortest possible UUID that is equivalent of this UUID.
pub fn shorten(&self) -> &[u8] {
if self.0[4..] == BASE_UUID_BYTES[4..] {
if self.0[0..2] == BASE_UUID_BYTES[0..2] {
&self.0[2..4]
} else {
&self.0[..4]
}
} else {
&self.0
}
}
}
assert_impl_all!(Uuid: Send, Sync);
impl Deref for Uuid {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Uuid {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl fmt::Display for Uuid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{:02x}{:02x}{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}",
self.0[0], self.0[1], self.0[2], self.0[3],
self.0[4], self.0[5], self.0[6], self.0[7],
self.0[8], self.0[9], self.0[10], self.0[11],
self.0[12], self.0[13], self.0[14], self.0[15])
}
}
impl fmt::Debug for Uuid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Uuid({})", self)
}
}
impl From<[u8; 16]> for Uuid {
fn from(v: [u8; 16]) -> Self {
Self::from_bytes(v)
}
}
impl From<&[u8]> for Uuid {
fn from(v: &[u8]) -> Self {
Self::from_slice(v)
}
}
impl FromStr for Uuid {
type Err = UuidParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.as_bytes();
if s.len() != 36 {
return Err(UuidParseError(()));
}
const PARTS: [(usize, usize); 4] = [(8, 4), (13, 6), (18, 8), (23, 10)];
if s[PARTS[0].0] != b'-'
|| s[PARTS[1].0] != b'-'
|| s[PARTS[2].0] != b'-'
|| s[PARTS[3].0] != b'-'
{
return Err(UuidParseError(()));
}
fn decode(src: &[u8], dst: &mut [u8]) -> Result<(), UuidParseError> {
debug_assert_eq!(src.len() % 2, 0);
debug_assert_eq!(dst.len(), src.len() / 2);
fn dig(c: u8) -> Result<u8, UuidParseError> {
Ok(match c {
b'0'..=b'9' => c - b'0',
b'a'..=b'f' => c - b'a' + 10,
b'A'..=b'F' => c - b'A' + 10,
_ => return Err(UuidParseError(())),
})
}
for (s, d) in src.chunks(2).zip(dst.iter_mut()) {
*d = (dig(s[0])? << 4) | dig(s[1])?;
}
Ok(())
}
let mut buf = [0; 16];
decode(&s[..PARTS[0].0], &mut buf[..PARTS[0].1])?;
decode(&s[PARTS[0].0 + 1..PARTS[1].0], &mut buf[PARTS[0].1..PARTS[1].1])?;
decode(&s[PARTS[1].0 + 1..PARTS[2].0], &mut buf[PARTS[1].1..PARTS[2].1])?;
decode(&s[PARTS[2].0 + 1..PARTS[3].0], &mut buf[PARTS[2].1..PARTS[3].1])?;
decode(&s[PARTS[3].0 + 1..], &mut buf[PARTS[3].1..])?;
Ok(buf.into())
}
}
#[derive(Debug)]
pub struct UuidParseError(());
impl fmt::Display for UuidParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "invalid UUID string")
}
}
impl std::error::Error for UuidParseError {}
object_ptr_wrapper!(NSUUID);
impl NSUUID {
pub fn from_uuid(uuid: Uuid) -> StrongPtr<Self> {
unsafe {
let mut r: *mut Object = msg_send![class!(NSUUID), alloc];
r = msg_send![r, initWithUUIDBytes:uuid.as_ptr()];
StrongPtr::wrap(Self::wrap(r))
}
}
pub fn to_uuid(&self) -> Uuid {
unsafe {
let mut r = Uuid::zeroed();
let _: () = msg_send![self.as_ptr(), getUUIDBytes:r.as_mut_ptr()];
r
}
}
}
object_ptr_wrapper!(CBUUID);
impl CBUUID {
pub fn from_uuid(uuid: Uuid) -> Self {
unsafe {
let data = NSData::from_bytes(uuid.shorten());
let r: *mut Object = msg_send![class!(CBUUID), UUIDWithData:data];
Self::wrap(r)
}
}
pub fn array_from_uuids(uuids: &[Uuid]) -> NSArray {
NSArray::from_iter(uuids.iter().copied().map(CBUUID::from_uuid))
}
pub fn to_uuid(&self) -> Uuid {
let data = unsafe {
let data: *mut Object = msg_send![self.as_ptr(), data];
NSData::wrap(data)
};
Uuid::from_slice(data.as_bytes())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn shorten() {
fn base(prefix: &[u8]) -> [u8; 16] {
let mut r = BASE_UUID_BYTES;
r[..prefix.len()].copy_from_slice(&prefix);
r
}
let data = &[
(base(&[0, 0, 0, 0]), &[0, 0][..]),
(base(&[0, 0, 0, 1]), &[0, 1][..]),
(base(&[0, 0, 0xff, 0xff]), &[0xff, 0xff][..]),
(base(&[0, 1, 0, 0]), &[0, 1, 0, 0][..]),
(base(&[0xff, 0xff, 0xff, 0xff]), &[0xff, 0xff, 0xff, 0xff][..]),
(base(&[0xff, 0xff, 0xff, 0xff]), &[0xff, 0xff, 0xff, 0xff][..]),
(base(&[0, 0, 0, 0, 1]), &base(&[0, 0, 0, 0, 1])[..]),
];
for &(inp, exp) in data {
assert_eq!(Uuid::from_bytes(inp).shorten(), exp);
}
}
#[test]
fn parse_ok() {
let data = &[
("00000000-0000-0000-0000-000000000000", Uuid::zeroed()),
("12345678-9AbC-Def0-1234-56789aBCDEF0", Uuid::from_bytes(
[0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC,
0xDE, 0xF0])),
("00000000-0000-1000-8000-00805F9B34FB", Uuid::base()),
];
for &(inp, exp) in data {
let act = inp.parse::<Uuid>().unwrap();
assert_eq!(act, exp);
assert_eq!(inp.to_ascii_lowercase(), act.to_string());
}
}
#[test]
fn parse_fail() {
let data = &[
"",
"0",
"00000000_0000-0000-0000-000000000000",
"00000000-0000*0000-0000-000000000000",
"00000000-0000-0000+0000-000000000000",
"00000000-0000-0000-0000~000000000000",
"00000000-0000-00z0-0000-000000000000",
"00000000-0000-0000-0000-_00000000000",
];
for &inp in data {
assert!(inp.parse::<Uuid>().is_err());
}
}
}
| true
|
925ad994aee0622710c3b23f3d4653302af60122
|
Rust
|
doodlewind/quickjs-rs
|
/src/bindings/droppable_value.rs
|
UTF-8
| 817
| 3.5625
| 4
|
[
"MIT"
] |
permissive
|
/// A small wrapper that frees resources that have to be freed
/// automatically when they go out of scope.
pub struct DroppableValue<T, F>
where
F: FnMut(&mut T),
{
value: T,
drop_fn: F,
}
impl<T, F> DroppableValue<T, F>
where
F: FnMut(&mut T),
{
pub fn new(value: T, drop_fn: F) -> Self {
Self { value, drop_fn }
}
}
impl<T, F> Drop for DroppableValue<T, F>
where
F: FnMut(&mut T),
{
fn drop(&mut self) {
(self.drop_fn)(&mut self.value);
}
}
impl<T, F> std::ops::Deref for DroppableValue<T, F>
where
F: FnMut(&mut T),
{
type Target = T;
fn deref(&self) -> &T {
&self.value
}
}
impl<T, F> std::ops::DerefMut for DroppableValue<T, F>
where
F: FnMut(&mut T),
{
fn deref_mut(&mut self) -> &mut T {
&mut self.value
}
}
| true
|
d281cddf47b67ebe24e8d24f8af7ea58d36e5d27
|
Rust
|
jonlamb-gh/rpi3-rust-workspace
|
/bcm2837-hal/src/mailbox/msg/get_temp.rs
|
UTF-8
| 5,362
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
use crate::mailbox::{Error, Msg, MsgEmitter, ReqRespCode, Result, Tag, TagId, LAST_TAG_SIZE};
const TAG: TagId = TagId::GetTemperature;
const REQ_LEN: usize = 2;
const REQ_SIZE: usize = REQ_LEN * 4;
const RESP_LEN: usize = 2;
const RESP_SIZE: usize = RESP_LEN * 4;
#[derive(Debug, PartialEq)]
pub struct Req<T: AsRef<[u32]>> {
buffer: T,
}
impl<T: AsRef<[u32]>> Req<T> {
pub fn new_unchecked(buffer: T) -> Req<T> {
Req { buffer }
}
pub fn new_checked(buffer: T) -> Result<Req<T>> {
let req = Self::new_unchecked(buffer);
req.check_len()?;
Ok(req)
}
pub fn check_len(&self) -> Result<()> {
let len = self.buffer.as_ref().len();
if len < REQ_LEN {
Err(Error::Truncated)
} else {
Ok(())
}
}
pub fn into_inner(self) -> T {
self.buffer
}
#[inline]
pub fn sensor_id(&self) -> u32 {
let data = self.buffer.as_ref();
data[0]
}
#[inline]
pub fn temp(&self) -> u32 {
let data = self.buffer.as_ref();
data[1]
}
}
impl<T: AsRef<[u32]> + AsMut<[u32]>> Req<T> {
#[inline]
pub fn set_sensor_id(&mut self, id: u32) {
let data = self.buffer.as_mut();
data[0] = id;
}
#[inline]
pub fn set_temp(&mut self, temp: u32) {
let data = self.buffer.as_mut();
data[1] = temp;
}
}
impl<T: AsRef<[u32]>> AsRef<[u32]> for Req<T> {
fn as_ref(&self) -> &[u32] {
self.buffer.as_ref()
}
}
#[derive(Debug, PartialEq)]
pub struct Resp<T: AsRef<[u32]>> {
buffer: T,
}
impl<T: AsRef<[u32]>> Resp<T> {
pub fn new_unchecked(buffer: T) -> Resp<T> {
Resp { buffer }
}
pub fn new_checked(buffer: T) -> Result<Resp<T>> {
let req = Self::new_unchecked(buffer);
req.check_len()?;
Ok(req)
}
pub fn check_len(&self) -> Result<()> {
let len = self.buffer.as_ref().len();
if len < RESP_LEN {
Err(Error::Truncated)
} else {
Ok(())
}
}
pub fn into_inner(self) -> T {
self.buffer
}
#[inline]
pub fn sensor_id(&self) -> u32 {
let data = self.buffer.as_ref();
data[0]
}
#[inline]
pub fn temp(&self) -> u32 {
let data = self.buffer.as_ref();
data[1]
}
}
impl<T: AsRef<[u32]>> AsRef<[u32]> for Resp<T> {
fn as_ref(&self) -> &[u32] {
self.buffer.as_ref()
}
}
/// A high-level representation of a GetTemp command/response
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct Repr {
sensor_id: u32,
/// Temperature of the SoC in thousandths of a degree C
temp: u32,
}
/// A default GetTemp request
impl Default for Repr {
fn default() -> Repr {
Repr {
sensor_id: 0,
temp: 0,
}
}
}
impl Repr {
pub fn new(sensor_id: u32) -> Self {
Repr { sensor_id, temp: 0 }
}
pub fn sensor_id(&self) -> u32 {
self.sensor_id
}
/// Temperature of the SoC in thousandths of a degree C
pub fn temp(&self) -> u32 {
self.temp
}
/// Temperature of the SoC in degrees C
pub fn temp_real(&self) -> f32 {
self.temp as f32 / 1000.0
}
pub fn parse_response<T: AsRef<[u32]> + ?Sized>(msg: &Msg<&T>) -> Result<Repr> {
if msg.buffer_size()
!= (Msg::<&T>::header_size() + Tag::<&T>::header_size() + RESP_SIZE + LAST_TAG_SIZE)
{
return Err(Error::Malformed);
}
if msg.reqresp_code() != ReqRespCode::ResponseSuccess {
return Err(Error::Malformed);
}
let tag = Tag::new_checked(msg.payload())?;
if tag.tag_id()? != TAG {
return Err(Error::Malformed);
}
if tag.response_size() != RESP_SIZE {
return Err(Error::Malformed);
}
let resp = Resp::new_checked(tag.payload())?;
Ok(Repr {
sensor_id: resp.sensor_id(),
temp: resp.temp(),
})
}
/// Return the size of a packet that will be emitted from this high-level
/// representation
pub fn buffer_size(&self) -> usize {
// Request and response are the same size/shape
RESP_SIZE
}
pub fn emit_request<T: AsRef<[u32]> + AsMut<[u32]>>(&self, msg: &mut Msg<T>) -> Result<()> {
msg.set_buffer_size(
Msg::<&T>::header_size() + Tag::<&T>::header_size() + REQ_SIZE + LAST_TAG_SIZE,
);
msg.set_reqresp_code(ReqRespCode::Request);
let mut tag = Tag::new_unchecked(msg.payload_mut());
tag.set_tag_id(TAG);
tag.set_request_size(REQ_SIZE);
tag.set_response_size(RESP_SIZE);
tag.check_len()?;
let mut req = Req::new_unchecked(tag.payload_mut());
req.set_sensor_id(self.sensor_id());
req.set_temp(0);
req.check_len()?;
msg.fill_last_tag()?;
msg.check_len()?;
Ok(())
}
}
impl MsgEmitter for Repr {
fn msg_size(&self) -> usize {
Msg::<&dyn AsRef<[u32]>>::header_size()
+ Tag::<&dyn AsRef<[u32]>>::header_size()
+ RESP_SIZE
+ LAST_TAG_SIZE
}
fn emit_msg<T: AsRef<[u32]> + AsMut<[u32]>>(&self, msg: &mut Msg<T>) -> Result<()> {
self.emit_request(msg)
}
}
| true
|
e45cdbffa63953f7a7c8dab93e57609e74d9b7a8
|
Rust
|
kryo4096/rust-game
|
/src/generator.rs
|
UTF-8
| 2,357
| 2.953125
| 3
|
[] |
no_license
|
use mesh::*;
use noise::*;
use math::*;
pub const CHUNK_SIZE : u32 = 128;
pub struct Chunk {
chunk_x: i32,
chunk_z: i32,
mesh: Mesh,
noise: TerrainNoise,
}
impl Chunk {
pub fn generate(chunk_x: i32, chunk_z: i32, noise: TerrainNoise) -> Self {
let mut chunk = Self {chunk_x, chunk_z, noise, mesh: Mesh::empty()};
chunk.build_mesh();
chunk
}
fn build_mesh(&mut self) {
self.mesh = Mesh::empty();
for x in 0..CHUNK_SIZE {
for z in 0..CHUNK_SIZE {
let normal;
let this;
{
let x = x as i32;
let z = z as i32;
this = self.noise.get(x,z);
let left = self.noise.get(x-1,z);
let right = self.noise.get(x+1,z);
let front = self.noise.get(x,z -1);
let back = self.noise.get(x, z + 1);
normal = Vec3::new(left-right,2.,front-back).normalize();
}
self.mesh.push_vertex( Vertex{
position: [
x as f32,
this,
z as f32
],
normal: normal.into(),
});
if x < CHUNK_SIZE -1 && z < CHUNK_SIZE - 1 {
let q = z * CHUNK_SIZE + x;
self.mesh.push_index(q);
self.mesh.push_index(q+CHUNK_SIZE);
self.mesh.push_index(q+CHUNK_SIZE+1);
self.mesh.push_index(q);
self.mesh.push_index(q+1);
self.mesh.push_index(q+CHUNK_SIZE+1);
}
}
}
}
pub fn mesh(&self) -> &Mesh {
&self.mesh
}
pub fn model_m(&self) -> Mat4{
Mat4::from_translation(Vec3::new(self.chunk_x as f32 * CHUNK_SIZE as f32, 0., self.chunk_z as f32 * CHUNK_SIZE as f32 ))
}
}
pub struct TerrainNoise {
noise : Fbm<f32>,
}
impl TerrainNoise {
pub fn new() -> Self{
let noise = Fbm::new();
let noise = noise.set_frequency(0.01);
Self {noise}
}
pub fn get(&self, x: i32, z: i32) -> f32 {
((self.noise.get([x as f32, z as f32])*2.).tanh()+1.)/2.*60.
}
}
| true
|
683f9a1a7e879f224b945e85f13a6ee7555fd42e
|
Rust
|
danshiovitz/adventofcode
|
/2021/day11/src/main.rs
|
UTF-8
| 2,612
| 2.875
| 3
|
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
use std::collections::HashSet;
extern crate common;
use common::framework::{parse_grid, run_day, BaseDay, InputReader};
use common::grid::{eight_neighbors, print_grid, Coord, Grid};
struct Day11 {
vals: Grid<i32>,
}
fn print_one(_c: &Coord, maybe_val: Option<&i32>) -> String {
let val = *maybe_val.unwrap();
if val > 9 {
return "*".to_owned();
}
return val.to_string().to_owned();
}
fn run_flashes(grid: &Grid<i32>, pt1: bool, verbose: bool) -> i32 {
let mut cur_grid = grid.coords.clone();
let mut flash_count = 0;
if verbose {
println!("Initial");
print_grid(&grid, &mut print_one);
println!();
}
let num_steps = if pt1 { 100 } else { 10000 };
for step in 0..num_steps {
let mut should_flash: Vec<Coord> = Vec::new();
let mut has_flashed = HashSet::new();
for (k, v) in cur_grid.iter_mut() {
*v += 1;
if *v > 9 {
should_flash.push(*k);
}
}
while let Some(cur) = should_flash.pop() {
if has_flashed.contains(&cur) {
continue;
}
flash_count += 1;
has_flashed.insert(cur);
for ngh in eight_neighbors(&cur) {
if let Some(nv) = cur_grid.get_mut(&ngh) {
*nv += 1;
if *nv > 9 {
should_flash.push(ngh);
}
}
}
}
for cur in has_flashed.iter() {
cur_grid.insert(*cur, 0);
}
if verbose {
let grid_copy = Grid { min: grid.min, max: grid.max, coords: cur_grid.clone() };
println!("Step {}", step + 1);
print_grid(&grid_copy, &mut print_one);
println!();
}
if !pt1 && has_flashed.len() == 100 {
return step + 1;
}
}
return flash_count;
}
impl BaseDay for Day11 {
fn parse(&mut self, input: &mut InputReader) {
fn parse_coord(c: char, _coord: &Coord) -> Option<i32> {
let val = c.to_digit(10).unwrap() as i32;
Some(val)
}
self.vals = parse_grid(input, &mut parse_coord);
}
fn pt1(&mut self) -> String {
let flash_count = run_flashes(&self.vals, true, false);
return flash_count.to_string();
}
fn pt2(&mut self) -> String {
let sync_step = run_flashes(&self.vals, false, false);
return sync_step.to_string();
}
}
fn main() {
let mut day = Day11 { vals: Grid::new() };
run_day(&mut day);
}
| true
|
8e867451873a4e00f68d3699d94016f5eaa27c6d
|
Rust
|
ssachtleben/battleship-rs
|
/src/models/point.rs
|
UTF-8
| 1,549
| 3.921875
| 4
|
[] |
no_license
|
use utils::stringutil as Stringutil;
pub struct Point {
x: usize,
y: usize
}
impl Point {
pub fn new(x: usize, y: usize) -> Point {
Point {
x: x,
y: y
}
}
pub fn get_x(&self) -> usize {
self.x
}
pub fn get_y(&self) -> usize {
self.y
}
pub fn from_string(string: &str) -> Option<Point> {
if string.len() < 2 || string.len() > 3 {
println!("Invalid input");
return None;
}
let first_char = String::from(string).to_uppercase().chars().skip(0).take(1).next().unwrap();
match Stringutil::get_position_in_alphabet(first_char) {
Some(x) => {
match string.split_at(1).1.parse::<usize>() {
Ok(y) => return Some(Point::new(x, y)),
Err(_) => return None
}
}
None => return None
}
}
}
#[cfg(test)]
mod tests {
use models::point::Point as Point;
#[test]
fn get_x() {
assert_eq!(3, Point::new(3, 5).get_x());
}
#[test]
fn get_y() {
assert_eq!(5, Point::new(3, 5).get_y());
}
#[test]
fn from_string() {
assert_eq!(true, Point::from_string("C5").is_some());
assert_eq!(true, Point::from_string("CC").is_none());
assert_eq!(true, Point::from_string("55").is_none());
let point: Point = Point::from_string("C5").unwrap();
assert_eq!(3, point.get_x());
assert_eq!(5, point.get_y());
}
}
| true
|
9997d699f1bf88e7e4ea02026ffb3c4cc2e1584d
|
Rust
|
hegza/daily-planner
|
/src/dom/timebox.rs
|
UTF-8
| 1,084
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use crate::time::Duration;
use super::Activity;
use super::TimeSlotKind;
/// A time box is an activity container with the option for a time slot.
///
/// A time box can be done or not done. Adjust policy determines how the time
/// box moves in relation to having its or its surroundings time change.
#[derive(Clone, Debug)]
pub struct TimeBox {
pub time: Option<TimeSlotKind>,
pub activity: Activity,
pub done: bool,
pub adjust_policy: AdjustPolicy,
}
#[derive(Clone, Debug, PartialEq)]
pub enum AdjustPolicy {
Normal,
/// This time does not move unless moved as the primary item
Fixed,
}
impl Default for TimeBox {
fn default() -> Self {
TimeBox {
time: None,
activity: Activity::default(),
done: false,
adjust_policy: AdjustPolicy::Normal,
}
}
}
impl TimeBox {
pub fn adjust_absolute(&mut self, adjust_duration: &Duration, adjust_start: bool) {
if let Some(time) = &mut self.time {
time.adjust_absolute(adjust_duration, adjust_start)
}
}
}
| true
|
5d2b41c20058aab396bbc9b6a7c1c4388cb395e2
|
Rust
|
NangiDev/AdventOfCode2020
|
/tests/day18.rs
|
UTF-8
| 4,246
| 3.171875
| 3
|
[] |
no_license
|
#[cfg(test)]
mod day_18 {
use adventofcode_2020::day18::{
calculate, calculate_ordered, calculate_with_nested_expressions,
calculate_with_nested_expressions_ordered,
};
#[test]
fn calc_ordered_expression_to_be_231() {
let expression = "1 + 2 * 3 + 4 * 5 + 6".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_ordered(exp);
assert_eq!(result, 231);
}
#[test]
fn calc_ordered_expression_to_be_51() {
let expression = "1 + (2 * 3) + (4 * (5 + 6))".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions_ordered(exp);
assert_eq!(result, 51);
}
#[test]
fn calc_ordered_expression_to_be_46() {
let expression = "2 * 3 + (4 * 5)".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions_ordered(exp);
assert_eq!(result, 46);
}
#[test]
fn calc_ordered_expression_to_be_1445() {
let expression = "5 + (8 * 3 + 9 + 3 * 4 * 3)".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions_ordered(exp);
assert_eq!(result, 1445);
}
#[test]
fn calc_ordered_expression_to_be_669060() {
let expression = "5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions_ordered(exp);
assert_eq!(result, 669060);
}
#[test]
fn calc_ordered_expression_to_be_23340() {
let expression = "((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions_ordered(exp);
assert_eq!(result, 23340);
}
#[test]
fn calc_expression_to_be_71() {
let expression = "1 + 2 * 3 + 4 * 5 + 6".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate(exp);
assert_eq!(result, 71);
}
#[test]
fn calc_expression_to_be_51() {
let expression = "1 + (2 * 3) + (4 * (5 + 6))".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 51);
}
#[test]
fn calc_expression_to_be_26() {
let expression = "2 * 3 + (4 * 5)".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 26);
}
#[test]
fn calc_expression_to_be_437() {
let expression = "5 + (8 * 3 + 9 + 3 * 4 * 3)".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 437);
}
#[test]
fn calc_expression_to_be_12240() {
let expression = "5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 12240);
}
#[test]
fn calc_expression_to_be_13632() {
let expression = "((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 13632);
}
#[test]
fn calc_expression_to_be_81() {
let expression = "1 + (2 * (3 + 4)) * 5 + 6".to_string();
let exp: Vec<String> = expression.split(' ').map(|s| s.to_string()).collect();
let result = calculate_with_nested_expressions(exp);
assert_eq!(result, 81);
}
}
| true
|
c30d5b87eefd6cae6280de386d1028c94b7341e9
|
Rust
|
msiglreith/glace
|
/glace/src/microfacet/fresnel.rs
|
UTF-8
| 280
| 3.1875
| 3
|
[] |
no_license
|
pub struct Schlick {
t: f32,
}
impl Schlick {
pub fn new(cos_theta: f32) -> Self {
Schlick {
t: (1.0 - cos_theta).powf(5.0)
}
}
/// Spectral operation
pub fn eval(&self, f0: f32, f90) -> f32 {
f0 + (f90 - f0) * t
}
}
| true
|
1237b2c223327c7d5f9dd40e2291ad1e6932fc76
|
Rust
|
inerte/ctci-rust
|
/src/chapter_1/question_4.rs
|
UTF-8
| 1,176
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
fn char_frequency(string: String) -> HashMap<char, usize> {
let mut frequency = HashMap::new();
for c in string.chars() {
if !c.is_whitespace() {
let counter = frequency.entry(c).or_insert(0);
*counter += 1;
}
}
return frequency;
}
pub fn is_permutation_of_palindrome(string: &str) -> bool {
let string = string.to_lowercase();
let char_frequency = char_frequency(string);
let mut odd_found = false;
for frequency in char_frequency.values() {
if frequency % 2 == 1 {
if odd_found {
return false;
}
odd_found = true;
}
}
return true;
}
#[test]
fn example_is_permutation_of_palindrome() {
assert!(is_permutation_of_palindrome("Tact Coa"));
}
#[test]
fn one_character_is_permutation_of_palindrome() {
assert!(is_permutation_of_palindrome("a"));
}
#[test]
fn a_palindrome_is_a_permutation_of_palindrome() {
assert!(is_permutation_of_palindrome("abba"));
}
#[test]
fn my_name_is_not_a_permutation_of_palindrome() {
assert_eq!(is_permutation_of_palindrome("Julio"), false);
}
| true
|
bf25ca934b652a2ebbae1ab1703f4bf2d656dc21
|
Rust
|
gcnhax/bmd-rs
|
/src/sections/shp1.rs
|
UTF-8
| 1,769
| 2.703125
| 3
|
[] |
no_license
|
use byteorder::{ReadBytesExt, BE};
use std::io::{Read, Seek, SeekFrom};
use crate::error::Error;
use crate::util::SeekExt;
pub struct Shp1 {}
#[derive(Debug)]
pub struct Offsets {
batch_table: u32,
index_table: u32,
attrib_table: u32,
matrix_table: u32,
primitive_data: u32,
matrix_data: u32,
packet_table: u32,
}
impl Offsets {
fn parse<R>(r: &mut R) -> Result<Offsets, Error>
where R: Read + Seek {
let batch_table = r.read_u32::<BE>()?;
let index_table = r.read_u32::<BE>()?;
r.seek(SeekFrom::Current(0x4))?;
let attrib_table = r.read_u32::<BE>()?;
let matrix_table = r.read_u32::<BE>()?;
let primitive_data = r.read_u32::<BE>()?;
let matrix_data = r.read_u32::<BE>()?;
let packet_table = r.read_u32::<BE>()?;
Ok(Offsets {
batch_table,
index_table,
attrib_table,
matrix_table,
primitive_data,
matrix_data,
packet_table,
})
}
}
impl Shp1 {
pub fn parse<R>(r: &mut R) -> Result<Shp1, Error>
where
R: Read + Seek,
{
let section_begin_offset = r.whereami()?;
// assert that we're starting in the right place
if &{
let mut x = [0u8; 4];
r.read_exact(&mut x)?;
x
} != b"SHP1"
{
return Err(Error::InvalidMagic);
}
let section_size = r.read_u32::<BE>()?;
let batch_count = r.read_u16::<BE>()?;
r.seek(SeekFrom::Current(0x2))?;
let offsets = Offsets::parse(r)?;
println!("offsets: {:?}", offsets);
r.seek(SeekFrom::Start(section_begin_offset + section_size as u64))?;
Ok(Shp1 {})
}
}
| true
|
ddbbedba69f52483c7e5d66973de7185a93763e2
|
Rust
|
pcein/rust-gecskp
|
/examples/all/16.rs
|
UTF-8
| 119
| 3.078125
| 3
|
[] |
no_license
|
/* Integer range / for loop */
fn main() {
let r = 0 .. 10;
for x in r {
println!("{}", x);
}
}
| true
|
ef1176afb160d24aaa029ed3b1a40661003856a3
|
Rust
|
Rustinante/math
|
/src/sample.rs
|
UTF-8
| 3,079
| 3.34375
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! # Blanket implementations for online sampling algorithms
use rand::distributions::{Distribution, Uniform};
use crate::{
set::traits::Finite,
traits::{Collecting, ToIterator},
};
pub mod trait_impl;
pub trait Sample<'a, I: Iterator<Item = E>, E, O: Collecting<E> + Default>:
Finite + ToIterator<'a, I, E> {
/// samples `size` elements without replacement
/// `size`: the number of samples to be drawn
/// returns Err if `size` is larger than the population size
fn sample_subset_without_replacement<'s: 'a>(
&'s self,
size: usize,
) -> Result<O, String> {
let mut remaining = self.size();
if size > remaining {
return Err(format!(
"desired sample size {} > population size {}",
size, remaining
));
}
let mut samples = O::default();
let mut needed = size;
let mut rng = rand::thread_rng();
let uniform = Uniform::new(0., 1.);
for element in self.to_iter() {
if uniform.sample(&mut rng) <= (needed as f64 / remaining as f64) {
samples.collect(element);
needed -= 1;
}
remaining -= 1;
}
Ok(samples)
}
fn sample_with_replacement<'s: 'a>(
&'s self,
size: usize,
) -> Result<O, String> {
let population_size = self.size();
if population_size == 0 {
return Err(
"cannot sample from a population of 0 elements".to_string()
);
}
let mut samples = O::default();
let mut rng = rand::thread_rng();
let uniform = Uniform::new(0., population_size as f64);
for _ in 0..size {
samples.collect(
self.to_iter()
.nth(uniform.sample(&mut rng) as usize)
.unwrap(),
);
}
Ok(samples)
}
}
#[cfg(test)]
mod tests {
use crate::set::{
contiguous_integer_set::ContiguousIntegerSet,
ordered_integer_set::OrderedIntegerSet, traits::Finite,
};
use super::Sample;
#[test]
fn test_sampling_without_replacement() {
let interval = ContiguousIntegerSet::new(0, 100);
let num_samples = 25;
let samples = interval
.sample_subset_without_replacement(num_samples)
.unwrap();
assert_eq!(samples.size(), num_samples);
let set =
OrderedIntegerSet::from_slice(&[[-89, -23], [-2, 100], [300, 345]]);
let num_samples = 18;
let samples =
set.sample_subset_without_replacement(num_samples).unwrap();
assert_eq!(samples.size(), num_samples);
}
#[test]
fn test_sampling_with_replacement() {
let num_samples = 25;
let v = vec![1];
let samples = v.sample_with_replacement(num_samples);
assert_eq!(samples, Ok(vec![1; num_samples]));
assert!(Vec::<f32>::new()
.sample_with_replacement(num_samples)
.is_err());
}
}
| true
|
301e886fa4c81a09a7924556399b6ac68b84788b
|
Rust
|
itaibn/scheme
|
/src/read/lexer.rs
|
UTF-8
| 23,097
| 3.140625
| 3
|
[] |
no_license
|
/// A Scheme lexer following the description in Section 7.1 of R7RS. Currently
/// incomplete and doesn't support Unicode.
use std::str::FromStr;
use lazy_static::lazy_static;
use nom::{
self,
IResult,
branch::alt,
bytes::complete::{tag, tag_no_case, take_till, take_while},
character::complete::{anychar, digit1, hex_digit1, multispace1, none_of,
oct_digit1, one_of},
combinator::{eof, flat_map, map, map_opt, opt, peek, recognize, value,
success},
multi::{fold_many0, many0},
regexp::str::{re_find, re_capture},
sequence::{delimited, preceded, pair, terminated, tuple},
};
use num::{self, BigRational, FromPrimitive, ToPrimitive};
use crate::number::{self, Exactness};
use crate::scheme::Scheme;
/*
lazy_static! {
static ref IDENTIFIER: Regex = Regex::new(
//"^[[:alpha:]!$%&*:/<=>?@^_~][[:alnum:]!$%&*:/<=>?@^_~]*"
"^[[:alpha:]!$%&*:/<=>?^_~][[:alnum:]!$%&*:/<=>?^_~+-.@]*"
).unwrap();
// static ref STRING: Regex = Regex::new(
// "^(:?[^\"\\\\]|\\\\(:?[\"\\\\|abtnr]))*\""
}
*/
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
LeftParen,
LeftVector,
LeftBytevector,
RightParen,
Dot,
DatumComment,
FoldCase(bool),
PrefixOp(&'static str),
Identifier(String),
Boolean(bool),
Number(Number),
Character(char),
String(Vec<char>),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Number(number::Number);
#[derive(Debug)]
pub struct Lexer<'a>(&'a str);
type LexerError = &'static str;
/*
/// Check whether a character is a delimiter. Unicode whitespace is not support.
fn is_delimiter(c: char) -> bool {
is_scheme_whitespace(c) || "|()\";".contains(c)
}
/// Check whether a character is whitespace according to the definition in
/// Section 7.1 of R7RS. Unicode is not supported.
fn is_scheme_whitespace(c: char) -> bool {
" \t\n\r".contains(c)
}
/// Match <initial> pattern. Unicode not supported.
fn is_scheme_identifier_initial(c: char) -> bool {
c.is_ascii_alphabetic() || "!$%&*:/<=>?^_~".contains(c)
}
/// Match <subsequence> pattern. Unicode not supported.
fn is_scheme_identifier_subsequent(c: char) -> bool {
is_scheme_identifier_initial(c) || c.is_digit(10) || "+-.@".contains(c)
}
*/
impl Number {
/// If self is an exact u8 return it, otherwise return Nothing.
pub fn as_u8(&self) -> Option<u8> {
/*
if self.exactness == Exactness::Inexact || !self.value.is_integer() {
None
} else {
self.value.to_integer().to_u8()
}
*/
if self.0.is_exact() {self.0.to_u8()} else {None}
}
/// Convert a number into a Scheme value. Implementation is currently
/// incomplete.
pub fn to_scheme(&self) -> Scheme {
Scheme::number(self.0.clone())
}
fn big_rational(r: BigRational, exact: Exactness) -> Number {
let exact_num = number::Number::from_exact_complex(r.into());
Number(exact_num.to_exactness(exact))
}
/// Rational i64 with exactness. Used in testing
#[cfg(test)]
fn rational_i64(num: i64, den: i64, exact: Exactness) -> Number {
let rat = BigRational::from_i64(num).unwrap() /
BigRational::from_i64(den).unwrap();
Number::big_rational(rat, exact)
}
}
// Unsound and probably unnecessary:
impl Eq for Number {}
impl Token {
#[cfg(test)]
fn from_i64(x: i64) -> Token {
let rat = BigRational::from_i64(x).unwrap();
let num = Number::big_rational(rat, Exactness::Exact);
Token::Number(num)
}
#[cfg(test)]
fn from_str(s: &str) -> Token {
Token::String(s.chars().collect())
}
}
/// Parser which recognizes delimiters without consuming it.
fn delimiter(inp: &str) -> IResult<&str, ()> {
peek(alt((
value((), one_of(" \t\n\r|()\";")),
value((), eof)
)))(inp)
}
fn radix(inp: &str) -> IResult<&str, u32> {
map(preceded(tag("#"), one_of("bBoOdDxX")), |c| match c {
'b' | 'B' => 2u32,
'o' | 'O' => 8,
'd' | 'D' => 10,
'x' | 'X' => 16,
_ => unreachable!(),
})(inp)
}
fn exactness(inp: &str) -> IResult<&str, Exactness> {
map(preceded(tag("#"), one_of("eEiI")), |c| match c {
'e' | 'E' => Exactness::Exact,
'i' | 'I' => Exactness::Inexact,
_ => unreachable!(),
})(inp)
}
fn prefix(inp: &str) -> IResult<&str, (u32, Option<Exactness>)> {
alt((
map(pair(radix, exactness), |(b, e)| (b, Some(e))),
map(pair(exactness, radix), |(e, b)| (b, Some(e))),
map(radix, |b| (b, None)),
map(exactness, |e| (10, Some(e))),
success((10, None)),
))(inp)
}
fn real<'inp>(base: u32, exactness: Option<Exactness>, input: &'inp str) ->
IResult<&'inp str, Token> {
use num::{BigInt, Num, Zero};
let uinteger_raw = move |inp: &'inp str| match base {
2 => re_find(nom::regex::Regex::new("^[01]+").unwrap())(inp),
8 => oct_digit1(inp),
10 => digit1(inp),
16 => hex_digit1(inp),
_ => unreachable!(),
};
let uinteger = map_opt(uinteger_raw, move |digits|
BigInt::from_str_radix(digits, base).ok());
/*
let mut decimal = map_opt(re_capture(nom::regex::regex::new(
r"^([0-9]*)(\.([0-9])*)?([ee]([+-]?[0-9]+))?"
).unwrap()), |s| f64::from_str(s).ok()
.and_then(bigrational::from_f64).map(|n|
number {
exactness: exactness::inexact,
value: n
}
)
);
*/
let float_re = nom::regex::Regex::new(
r"^([0-9]*)(\.([0-9]*))?([eE]([+-]?[0-9]+))?").unwrap();
// This is really messy.
let decimal = map_opt(re_capture(float_re.clone()), move |c_raw|
{
let c = float_re.captures(c_raw[0]).unwrap();
if
base != 10 ||
c.get(1).unwrap().as_str().len() == 0 &&
c.get(3).map_or(true, |m| m.as_str().len() == 0) ||
c.get(2).is_none() && c.get(4).is_none()
{
None
} else {
let integral_str = c.get(1).unwrap().as_str();
let mut integral = if integral_str.len() > 0
{BigInt::from_str_radix(integral_str, 10).unwrap()}
else {BigInt::zero()};
let fractional = c.get(3).map_or(
BigInt::zero(),
|s| if s.as_str().len() > 0 {
BigInt::from_str_radix(s.as_str(), 10).unwrap()}
else {BigInt::zero()}
);
let frac_len = c.get(3).map_or(0, |s| s.end() -
s.start());
integral = integral * BigInt::from_u32(10)?
.pow(frac_len.to_u32()?) + fractional;
let exponent = c.get(5).map_or(0,
|s| isize::from_str(s.as_str()).unwrap());
let total_exponent = exponent.to_i32()?
.checked_sub(frac_len.to_i32()?)?;
let rational = BigRational::from_integer(integral) *
BigRational::from_u32(10)?.pow(total_exponent);
Some((Exactness::Inexact, rational))
}
});
let ureal = alt((
decimal,
map(uinteger,
|n| (Exactness::Exact, n.into())
),
));
let sign = map(opt(one_of("+-")), |s| match s {
Some('+') | None => 1i32,
Some('-') => -1i32,
_ => unreachable!(),
});
let real = map(pair(sign, ureal), |(s, (e, mut value))| {
value = value * BigRational::from_integer(s.into());
(e, value)
});
map(real, move |mut n| {
n.0 = exactness.unwrap_or(n.0);
let num = Number::big_rational(n.1, n.0);
Token::Number(num)
})(input)
}
/// nom parser for a number.
///
/// As a hack, also parses the identifiers `+` or `-`. Currently only supports
/// integers (at any base) and floats. Supports exactness specifiers.
fn number<'inp>(inp: &'inp str) -> IResult<&'inp str, Token> {
use num::{BigInt, Num, Zero};
let num = flat_map(prefix, |(base, exactness)| move |inp| real(base,
exactness, inp));
// For compatibility with old version of number lexer incorporate sign
// identifier
terminated(alt((
num,
map(one_of("+-"), |s| Token::Identifier(s.to_string())),
)), delimiter)(inp)
}
/// Shared escape codes used in both pipe identifiers and string literals.
/// Corresponds to <inline hex escape> or <mnemonic escape>, as well as \" and
/// \| and \\ as described in R7RS errata.
fn escape_code(inp: &str) -> IResult<&str, char> {
alt((
value('\u{7}', tag("\\a")),
value('\u{8}', tag("\\b")),
value('\u{9}', tag("\\t")),
value('\u{a}', tag("\\n")),
value('\u{d}', tag("\\r")),
value('\\', tag("\\\\")),
value('|', tag("\\|")),
delimited(tag("\\x"),
map_opt(hex_digit1,
|esc| u32::from_str_radix(esc, 16).ok()
.and_then(std::char::from_u32)),
tag(";"))
))(inp)
}
fn string_literal(inp: &str) -> IResult<&str, Token> {
map(delimited(
tag("\""),
fold_many0(alt((
map(none_of("\r\n\\\""), Some),
value(Some('\n'), alt((tag("\n"), tag("\r\n"), tag("\r")))),
map(escape_code, Some),
value(None, tuple((
tag("\\"),
take_while(|c| c == ' ' || c == '\t'),
alt((tag("\n"), tag("\r\n"), tag("\r"))),
take_while(|c| c == ' ' || c == '\t')))),
)),
Vec::new(),
|mut v, maybe_c| {
maybe_c.map(|c| v.push(c));
v
}
),
tag("\"")
), |v_char| Token::String(v_char))(inp)
}
/// nom parser which consumes whitespace and comments.
///
/// This is like the <intertoken space> category in the standard, except it
/// doesn't support datum comments, and directives are counted as tokens.
///
/// TODO: Improve the type signature.
fn intertoken_space(inp: &str) -> IResult<&str, Vec<&str>> {
lazy_static! {
// A non-nesting part of a nesting comment
static ref NON_NESTING_COMMENT: nom::regex::Regex =
nom::regex::Regex::new(r"^([^|#]|#+([^|]|$)|\|+([^#]|$))+")
.unwrap();
}
// A custom version of not_line_ending necessary since the nom version
// doesn't recognize a lone '\r' (cf. nom issue #1273)
let not_line_ending = take_till(|c| c == '\n' || c == '\r');
// Strictly speaking this differs from the standard which would
// recognize ";<line>\r\n" as a single <comment>, whereas with this
// definition ";<line>\r" would match <comment> and then "\n" would
// match as a second <atmosphere>. Since this function matches an entire
// <intertoken space> this distinction is irrelevant.
let line_comment = recognize(tuple((tag(";"), not_line_ending,
one_of("\n\r"))));
fn nested_comment(inp: &str) -> IResult<&str, &str> {
let non_nesting_comment = re_find(NON_NESTING_COMMENT.clone());
recognize(tuple((
tag("#|"),
many0(alt((non_nesting_comment, nested_comment))),
tag("|#")
)))(inp)
}
many0(alt((line_comment, multispace1, nested_comment)))(inp)
}
fn simple_token(inp: &str) -> IResult<&str, Token> {
alt((
value(Token::LeftParen, tag("(")),
value(Token::LeftVector, tag("#(")),
value(Token::LeftBytevector, tag_no_case("#u8(")),
value(Token::RightParen, tag(")")),
value(Token::DatumComment, tag("#;")),
value(Token::PrefixOp("quote"), tag("'")),
value(Token::PrefixOp("quasiquote"), tag("`")),
value(Token::PrefixOp("unquote-splicing"), tag(",@")),
value(Token::PrefixOp("unquote"), tag(",")),
terminated(alt((
value(Token::Dot, tag(".")),
value(Token::Boolean(true), tag_no_case("#true")),
value(Token::Boolean(true), tag_no_case("#t")),
value(Token::Boolean(false), tag_no_case("#false")),
value(Token::Boolean(false), tag_no_case("#f")),
value(Token::FoldCase(true), tag_no_case("#!fold-case")),
value(Token::FoldCase(false), tag_no_case("#!no-fold-case")),
)), delimiter)
))(inp)
}
fn character(inp: &str) -> IResult<&str, Token> {
delimited(
tag("#\\"),
map(alt((
value('\u{7}', tag_no_case("alarm")),
value('\u{8}', tag_no_case("backspace")),
value('\u{7f}', tag_no_case("delete")),
value('\u{1b}', tag_no_case("escape")),
value('\u{a}', tag_no_case("newline")),
value('\u{0}', tag_no_case("null")),
value('\u{d}', tag_no_case("return")),
value(' ', tag_no_case("space")),
value('\u{9}', tag_no_case("tab")),
map_opt(preceded(tag_no_case("x"), hex_digit1),
|hex| u32::from_str_radix(hex, 16).ok()
.and_then(std::char::from_u32)),
anychar
)), Token::Character),
delimiter
)(inp)
}
fn simple_identifier(inp: &str) -> IResult<&str, Token> {
lazy_static! {
static ref IDENTIFIER: nom::regex::Regex = nom::regex::Regex::new(
//"^[[:alpha:]!$%&*/:<=>?@^_~][[:alnum:]!$%&*/:<=>?@^_~]*"
"^[[:alpha:]!$%&*/:<=>?^_~][[:alnum:]!$%&*/:<=>?^_~+-.@]*"
).unwrap();
}
map(
terminated(re_find(IDENTIFIER.clone()), delimiter),
|ident| Token::Identifier(ident.to_string())
)(inp)
}
fn token(inp: &str) -> IResult<&str, Token> {
preceded(intertoken_space,
alt((
simple_token,
number,
string_literal,
character,
simple_identifier
)))(inp)
}
// TODO: Comprehensive todo list, data comments, full number support, peculiar
// identifiers, strings, pipe notation for identifiers, verifying agreement with
// lexer specifications in Section 7.1.1
impl<'a> Lexer<'a> {
pub fn from_str(input_str: &'a str) -> Lexer<'a> {
Lexer(input_str)
}
/// Either consume and output a single token from the input stream, or
/// output None while consuming an unspecified numbere of characters from
/// the input stream.
pub fn get_token(&mut self) -> Option<Token> {
let (rest, tok) = token(self.0).ok()?;
self.0 = rest;
Some(tok)
}
}
impl Iterator for Lexer<'_> {
type Item = Result<Token, LexerError>;
// TODO: Distinguist EOF and error
fn next(&mut self) -> Option<Self::Item> {
self.get_token().map(Ok)
}
}
#[cfg(test)]
fn test_lexer(inp: &str, out: Token) {
assert_eq!(Lexer::from_str(inp).get_token(), Some(out));
}
#[cfg(test)]
fn test_lexer_fail(inp: &str) {
assert_eq!(Lexer::from_str(inp).get_token(), None)
}
#[ignore]
#[test]
fn test_ident() {
let ident_strs = &["a", "z", "A", "Z", "let*", "!as0", "+", "-", "+@", "+$",
"+a", "+.+", "..."];
for ident_str in ident_strs {
test_lexer(ident_str, Token::Identifier(ident_str.to_string()));
}
}
#[ignore]
#[test]
fn test_pipe_ident() {
test_lexer("| 0-!@\"5\\\\*\\|\\x100;\\a|",
Token::Identifier(" 0-!@\"5\\*|\u{100}\u{7}".to_string()));
}
#[test]
fn test_simple_tokens() {
test_lexer("(", Token::LeftParen);
test_lexer(")", Token::RightParen);
test_lexer("#(", Token::LeftVector);
test_lexer("#u8(", Token::LeftBytevector);
test_lexer("#U8(", Token::LeftBytevector);
test_lexer("#t", Token::Boolean(true));
test_lexer("#true", Token::Boolean(true));
test_lexer("#tRUe", Token::Boolean(true));
test_lexer("#TRUE", Token::Boolean(true));
test_lexer("#f", Token::Boolean(false));
test_lexer("#F", Token::Boolean(false));
test_lexer("#false", Token::Boolean(false));
test_lexer("'", Token::PrefixOp("quote"));
test_lexer("`", Token::PrefixOp("quasiquote"));
test_lexer(",", Token::PrefixOp("unquote"));
test_lexer(",@", Token::PrefixOp("unquote-splicing"));
test_lexer(".", Token::Dot);
test_lexer("#;", Token::DatumComment);
test_lexer("#!fold-case", Token::FoldCase(true));
test_lexer("#!no-fold-case", Token::FoldCase(false));
test_lexer("#!NO-fOLD-cAse", Token::FoldCase(false));
}
#[test]
fn test_boolean_double_consume() {
test_lexer_fail("#true#t");
test_lexer_fail("#false#f");
test_lexer_fail("#t#true");
test_lexer_fail("#f#false");
}
#[test]
fn test_whitespace() {
test_lexer(" \t\n1", Token::from_i64(1));
test_lexer("\r\n1", Token::from_i64(1));
test_lexer("\r1", Token::from_i64(1));
}
#[test]
fn test_comment() {
test_lexer("; blah ; 10!)#!fold-case \n 1", Token::from_i64(1));
test_lexer("; 123\r123", Token::from_i64(123));
test_lexer(";\n3", Token::from_i64(3));
test_lexer("#| simple intra-line comment |# 1", Token::from_i64(1));
test_lexer("#| | # #| a |# 22 |# 1", Token::from_i64(1));
test_lexer("#| #| |#| |# 1 ;|# 2", Token::from_i64(1));
test_lexer("#|# 1 |# 2", Token::from_i64(2));
test_lexer("#|# |# 1", Token::from_i64(1));
}
#[test]
fn test_character() {
test_lexer(r"#\ ", Token::Character(' '));
test_lexer(r"#\a", Token::Character('a'));
test_lexer(r"#\A", Token::Character('A'));
test_lexer(r"#\⅋", Token::Character('⅋'));
// Unsure of this one
test_lexer(r"#\x", Token::Character('x'));
// Test that characters require a delimiter
test_lexer_fail(r"#\f12");
test_lexer_fail(r"#\uident");
}
#[test]
fn test_character_escape() {
test_lexer(r"#\x0", Token::Character('\u{0}'));
test_lexer(r"#\x61", Token::Character('a'));
test_lexer(r"#\X61", Token::Character('a'));
test_lexer(r"#\x062", Token::Character('b'));
test_lexer(r"#\x214b", Token::Character('⅋'));
test_lexer(r"#\X1d538", Token::Character('𝔸'));
test_lexer(r"#\x100000", Token::Character('\u{100000}'));
test_lexer(r"#\x000000061", Token::Character('a'));
}
#[test]
fn test_character_name() {
test_lexer(r"#\alarm", Token::Character('\u{7}'));
test_lexer(r"#\backspace", Token::Character('\u{8}'));
test_lexer(r"#\delete", Token::Character('\u{7f}'));
test_lexer(r"#\escape", Token::Character('\u{1b}'));
test_lexer(r"#\newline", Token::Character('\n'));
test_lexer(r"#\null", Token::Character('\u{0}'));
test_lexer(r"#\return", Token::Character('\r'));
test_lexer(r"#\space", Token::Character(' '));
test_lexer(r"#\tab", Token::Character('\t'));
}
#[test]
fn test_string() {
test_lexer("\"\"", Token::from_str(""));
test_lexer("\"Hello, world!\"", Token::from_str("Hello, world!"));
test_lexer_fail("\"xx");
}
#[ignore]
#[test]
fn test_string_escapes() {
test_lexer("\"x\\\"x\"", Token::from_str("x\"x"));
test_lexer_fail("\"\\\"");
test_lexer("\"\\\\\"", Token::from_str("\\"));
test_lexer("\"\\au\"", Token::from_str("\u{7}u"));
// Mnemonic escapes are case-sensitive, cf. R7RS p. 61
test_lexer_fail("\"\\A\"");
test_lexer("\"s\\bs\"", Token::from_str("s\u{8}s"));
test_lexer("\"4\\tt\"", Token::from_str("4\tt"));
test_lexer("\" \\n\\n\"", Token::from_str(" \n\n"));
test_lexer("\"\\r\\n\"", Token::from_str("\r\n"));
test_lexer("\"\\||\"", Token::from_str("||"));
test_lexer("\"\\x61;\"", Token::from_str("a"));
test_lexer("\"\\X0100000;\"", Token::from_str("\u{100000}"));
test_lexer("\"a\\\nb\"", Token::from_str("ab"));
test_lexer("\"a\\ \n\tb\"", Token::from_str("ab"));
test_lexer("\"a\\\t \r b\"", Token::from_str("ab"));
test_lexer("\"a\\\t \r\n b\"", Token::from_str("ab"));
test_lexer("\"a\\ \n\nb\"", Token::from_str("a\nb"));
}
#[test]
// Any newline corresponds in the string literal corresponds to a '\n' in the
// corresponding string, see R7RS p. 46.
fn test_string_newline() {
test_lexer("\"a\\ \r\rb\"", Token::from_str("a\nb"));
test_lexer("\"a\\ \r\r\nc\"", Token::from_str("a\nc"));
test_lexer("\"a\\ \n\r\nd\"", Token::from_str("a\nd"));
test_lexer("\"\n\"", Token::from_str("\n"));
test_lexer("\"\r\"", Token::from_str("\n"));
test_lexer("\"\r\n\"", Token::from_str("\n"));
}
#[cfg(test)]
fn test_rational(inp: &str, num: i64, den: i64, exact: Exactness) {
test_lexer(inp, Token::Number(Number::rational_i64(num, den, exact)))
}
#[cfg(test)]
use self::Exactness::*;
#[test]
fn test_simple_int() {
test_lexer("13", Token::from_i64(13));
test_lexer("-4", Token::from_i64(-4));
test_lexer("+ 20", Token::Identifier("+".to_string()));
}
#[test]
fn test_radix_int() {
test_lexer("123", Token::from_i64(123));
test_lexer("#d123", Token::from_i64(123));
test_lexer("#x123", Token::from_i64(0x123));
test_lexer("#o123", Token::from_i64(0o123));
test_lexer("#b101", Token::from_i64(0b101));
test_lexer_fail("#bxxx101");
}
#[test]
fn test_prefixes() {
test_lexer("#e123", Token::from_i64(123));
test_rational("#i123", 123, 1, Inexact);
test_lexer("#e#x10", Token::from_i64(16));
test_lexer("#x#e10", Token::from_i64(16));
test_rational("#i#x10", 16, 1, Inexact);
test_rational("#x#i10", 16, 1, Inexact);
}
#[ignore]
#[test]
fn test_fraction() {
test_rational("1/2", 1, 2, Exact);
test_rational("#x1/10", 1, 16, Exact);
test_rational("#i1/2", 1, 2, Inexact);
test_lexer_fail("1.0/2");
test_lexer_fail("1/2.0");
}
#[test]
fn test_float() {
test_rational("1.234e2", 1234, 10, Inexact);
test_lexer("#e1e2", Token::from_i64(100));
test_lexer("#e1e0", Token::from_i64(1));
test_lexer("#e10e-1", Token::from_i64(1));
test_rational(".10", 1, 10, Inexact);
test_rational("1e2", 100, 1, Inexact);
test_rational("13e-10", 13, 10_000_000_000, Inexact);
test_rational("1.", 1, 1, Inexact);
test_rational("-1e1", -10, 1, Inexact);
test_lexer_fail("1e1e1");
}
#[test]
// Ensure an exact float is not pass a precision-losing
fn test_exact_float() {
assert_eq!(1.000000000000000001, 1.0);
test_rational("#e1.000000000000000001",
1000000000000000001,
1000000000000000000,
Exact);
}
#[ignore]
#[test]
fn test_infnan() {
//test_lexer("+inf.0", ...);
//test_lexer("-inf.0", ...);
//test_lexer("+nan.0", ...);
//test_lexer("-nan.0", ...);
}
#[ignore]
#[test]
fn test_complex() {
//test_lexer("1@-2.0", ...);
//test_lexer("-2+3i", ...);
//test_lexer("1-1e0i", ...);
//test_lexer("#x10+i", ...);
//test_lexer("1/2-i", ...);
//test_lexer("+i", ...);
//test_lexer("-i", ...);
//test_lexer("+1.0i", ...);
//test_lexer("#o-11/10i", ...);
//test_lexer("+inf.0i", ...);
//test_lexer("-inf.0i", ...);
//test_lexer("+nan.0i", ...);
//test_lexer("-nan.0i", ...);
//test_lexer("1.1e1+12/3i". ...);
}
| true
|
d9198837b51c38ec38820def50d91ecf2d767448
|
Rust
|
sharkAndshark/build_pbf_glyphs
|
/src/main.rs
|
UTF-8
| 6,525
| 2.71875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
//! This binary crate provides a CLI utility for batch converting a directory of fonts into
//! signed distance fields, encoded in a protocol buffer for renderers such as Mapbox GL. This
//! isn't really anything novel; it's just a frontend to
//! [pbf_font_tools](https://github.com/stadiamaps/pbf_font_tools) that behaves similar to
//! [node-fontnik](https://github.com/mapbox/node-fontnik), but is faster and (in our opinion)
//! a bit easier to use since it doesn't depend on node and all its headaches, or C++ libraries
//! that need to be built from scratch (this depends on FreeType, but that's widely available on
//! nearly any *nix-based system).
//!
//! Check out
//! [sdf_glyph_renderer](https://github.com/stadiamaps/sdf_glyph_renderer) for more technical
//! details on how this works.
//!
//! NOTE: This has requires you to have FreeType installed on your system. We recommend using
//! FreeType 2.10 or newer. Everything will still work against many older 2.x versions, but
//! the glyph generation improves over time so things will generally look better with newer
//! versions.
//!
//! ## Usage
//!
//! This tool will create `out_dir` if necessary, and will put each range (of 256 glyphs, for
//! compatibility with Mapbox fontstack convention) in a new subdirectory bearing the font name.
//! **Any existing glyphs will be overwritten in place.**
//!
//! ```
//! $ build_pbf_glyphs /path/to/font_dir /path/to/out_dir
//! ```
use std::fs::{create_dir_all, read_dir, File};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
use clap::{app_from_crate, crate_authors, crate_description, crate_name, crate_version, Arg};
use freetype::{Face, Library};
use protobuf::CodedOutputStream;
use protobuf::Message;
use spmc::{channel, Receiver};
use std::time::Instant;
static TOTAL_GLYPHS_RENDERED: AtomicUsize = AtomicUsize::new(0);
fn worker(
base_out_dir: String,
radius: usize,
cutoff: f64,
rx: Receiver<Option<(PathBuf, PathBuf)>>,
) {
let lib = Library::init().expect("Unable to initialize FreeType");
while let Ok(Some((path, stem))) = rx.recv() {
let out_dir =
Path::new(&base_out_dir).join(stem.to_str().expect("Unable to extract file stem"));
create_dir_all(&out_dir).expect("Unable to create output directory");
println!("Processing {}", path.to_str().unwrap());
// Load the font once to save useless I/O
let face = lib.new_face(&path, 0).expect("Unable to load font");
let num_faces = face.num_faces() as usize;
let faces: Vec<Face> = (0..num_faces)
.map(|face_index| {
lib.new_face(&path, face_index as isize)
.expect("Unable to load face")
})
.collect();
let mut start = 0;
let mut end = 255;
let mut glyphs_rendered = 0;
let path_str = path
.to_str()
.expect("Unable to convert path to a valid UTF-8 string.");
while start < 65536 {
let mut glyphs = pbf_font_tools::glyphs::glyphs::new();
for (face_index, face) in faces.iter().enumerate() {
if let Ok(stack) = pbf_font_tools::generate::glyph_range_for_face(
face, start, end, 24, radius, cutoff,
) {
glyphs_rendered += stack.glyphs.len();
glyphs.mut_stacks().push(stack);
} else {
println!(
"ERROR: Failed to render fontstack for face {} in {}",
face_index, path_str
)
}
}
let mut file = File::create(out_dir.join(format!("{}-{}.pbf", start, end)))
.expect("Unable to create file");
let mut cos = CodedOutputStream::new(&mut file);
glyphs.write_to(&mut cos).expect("Unable to write");
cos.flush().expect("Unable to flush");
start += 256;
end += 256;
}
println!(
"Found {} valid glyphs across {} face(s) in {}",
glyphs_rendered, num_faces, path_str
);
TOTAL_GLYPHS_RENDERED.fetch_add(glyphs_rendered, Ordering::Relaxed);
}
}
fn main() {
let matches = app_from_crate!()
.arg(Arg::with_name("FONT_DIR")
.help("Sets the source directory to be scanned for fonts")
.required(true)
.index(1))
.arg(Arg::with_name("OUT_DIR")
.help("Sets the output directory in which the PBF glyphs will be placed (each font will be placed in a new subdirectory with appropriately named PBF files)")
.required(true)
.index(2))
.get_matches();
let font_dir = Path::new(matches.value_of("FONT_DIR").unwrap());
let out_dir = matches.value_of("OUT_DIR").unwrap();
let (mut tx, rx) = channel();
let mut join_handles = Vec::new();
let num_threads = num_cpus::get();
println!("Starting {} worker threads...", num_threads);
for _ in 0..num_threads {
let rx = rx.clone();
let out_dir = String::from(out_dir);
join_handles.push(thread::spawn(move || worker(out_dir, 8, 0.25, rx)));
}
let render_start = Instant::now();
for entry in read_dir(font_dir).expect("Unable to open font directory") {
if let Ok(dir_entry) = entry {
let path = dir_entry.path();
if let (Some(stem), Some(extension)) = (path.file_stem(), path.extension()) {
if path.is_file()
&& (extension == "otf" || extension == "ttf" || extension == "ttc")
{
tx.send(Some((path.clone(), PathBuf::from(stem))))
.expect("Unable to push job to thread worker");
}
}
}
}
for _ in 0..num_threads {
// Sentinel value to signal the end of the work pool for each thread
tx.send(None)
.expect("Unable to push completion job to thread worker");
}
for handle in join_handles {
handle.join().unwrap();
}
let total_glyphs_rendered = TOTAL_GLYPHS_RENDERED.load(Ordering::Relaxed);
let render_duration = render_start.elapsed();
let duration_per_glyph = render_duration / total_glyphs_rendered as u32;
println!(
"Done. Rendered {} glyph(s) in {:?} ({:?}/glyph)",
total_glyphs_rendered, render_duration, duration_per_glyph
);
}
| true
|
cb568453c9a548bc32f37554124247de4d36a82e
|
Rust
|
ne0ndrag0n/reskit
|
/src/reskit/tileset.rs
|
UTF-8
| 4,539
| 2.6875
| 3
|
[] |
no_license
|
use crate::reskit::utility;
use std::process::exit;
use std::fs;
use std::fs::File;
use std::io::Write;
use image::{ GenericImageView, DynamicImage };
fn color_to_palette( r: u16, g: u16, b: u16, palette: &mut [u16; 16] ) -> u32 {
let final_val =
( ( r & 0x00F0 ) >> 4 ) |
( g & 0x00F0 ) |
( ( b & 0x00F0 ) << 4 );
// Does the color already exist?
for i in 0..palette.len() {
if palette[ i ] == final_val {
return i as u32;
}
}
// Place the colour in the next open slot
for i in 1..palette.len() {
if palette[ i ] == 0 {
palette[ i ] = final_val;
return i as u32;
}
}
utility::print_error( "image contains greater than 15 colours, exiting..." );
exit( 3 );
}
fn get_pixel( image: &DynamicImage, palette: &mut [u16; 16], x: u32, y: u32 ) -> u32 {
let ( max_x, max_y ) = image.dimensions();
if x >= max_x || y >= max_y {
return 0;
}
let pixel = image.get_pixel( x, y );
color_to_palette( pixel[ 0 ].into(), pixel[ 1 ].into(), pixel[ 2 ].into(), palette )
}
fn output_bin( image_filename: &str, output_filename: &str, palette: [u16; 16], body: Vec<u8> ) {
let mut output_palette: Vec< u8 > = Vec::new();
for i in 0..palette.len() {
let bytes = palette[ i ].to_be_bytes();
for i in 0..2 {
output_palette.push( bytes[ i ] );
}
}
let output_try = File::create( output_filename );
if let Ok( mut output_file ) = output_try {
output_file.write( &output_palette ).unwrap();
output_file.write( &body ).unwrap();
utility::print_good( format!( "converted file {}", image_filename ).as_str() );
} else {
utility::print_error( format!( "could not open filename for output {}", output_filename ).as_str() );
}
}
fn output_inc( image_filename: &str, output_filename: &str, palette: [u16; 16], body: Vec<u8> ) {
let mut output_palette: Vec< u8 > = Vec::new();
for i in 0..palette.len() {
let bytes = palette[ i ].to_be_bytes();
for i in 0..2 {
output_palette.push( bytes[ i ] );
}
}
let mut output_c: String = String::new();
let mut output_h: String = String::new();
// Set output_h based on image_filename
output_h += "#pragma once\n\n";
output_h += &format!( "extern const unsigned char {}[];\n", output_filename );
output_h += &format!( "extern const unsigned int {}_len;\n", output_filename );
let mut row_counter = 12;
// Spray palette
output_c += &format!( "const unsigned char {}[] = {{\n", output_filename );
for i in 0..32 {
if row_counter == 0 {
row_counter = 12;
output_c += "\n";
} else {
row_counter = row_counter - 1;
}
output_c += &format!( "0x{:X},", output_palette[ i ] );
}
for i in 0..body.len() {
if row_counter == 0 {
row_counter = 12;
output_c += "\n";
} else {
row_counter = row_counter - 1;
}
output_c += &format!( "0x{:X}", body[ i ] );
if i != ( body.len() - 1 ) {
output_c += ",";
}
}
output_c += "\n};\n";
output_c += &format!( "const unsigned int {}_len = {}\n", output_filename, output_palette.len() + body.len() );
fs::write( output_filename.to_string() + ".h", output_h ).expect( "Could not write header file" );
fs::write( output_filename.to_string() + ".c", output_c ).expect( "Could not write source file" );
utility::print_good( format!( "converted file {}", image_filename ).as_str() );
}
pub fn generate( image_filename: &str, output_filename: &str, output_mode: &str ) {
let img = image::open( image_filename );
if let Ok( img ) = img {
let ( mut max_x, mut max_y ) = img.dimensions();
if max_x % 8 != 0 { max_x = ( 8 * ( max_x / 8 ) ) + ( 8 - ( max_x % 8 ) ); }
if max_y % 8 != 0 { max_y = ( 8 * ( max_y / 8 ) ) + ( 8 - ( max_y % 8 ) ); }
let mut palette: [u16; 16] = [ 0; 16 ];
let mut body: Vec< u8 > = Vec::new();
for y in ( 0..max_y ).step_by( 8 ) {
for x in ( 0..max_x ).step_by( 8 ) {
for cell_y in 0..8 {
let mut series: u32 = 0;
for cell_x in 0..8 {
let nibble: u32 = get_pixel( &img, &mut palette, cell_x + x, cell_y + y ) << ( ( 7 - cell_x ) * 4 );
series = series | nibble;
}
let bytes = series.to_be_bytes();
for i in 0..4 {
body.push( bytes[ i ] );
}
}
}
}
if output_mode == "bin" {
output_bin( image_filename, output_filename, palette, body );
} else if output_mode == "inc" {
output_inc( image_filename, output_filename, palette, body );
} else {
utility::print_error( format!( "invalid output mode {}", output_mode ).as_str() );
}
} else {
utility::print_error( format!( "could not open filename {}", image_filename ).as_str() );
}
}
| true
|
9a25138e0b4170bf1ec4aab2e84688d04835684c
|
Rust
|
clpi/voile-rs
|
/voile-util/src/tags.rs
|
UTF-8
| 930
| 3.34375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::fmt::{Display, Error, Formatter};
/// Row-polymorphic types.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Ord, PartialOrd, Hash)]
pub enum VarRec {
Variant,
Record,
}
impl Display for VarRec {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
f.write_str(match self {
VarRec::Variant => "Sum",
VarRec::Record => "Rec",
})
}
}
/// Various kinds of dependent types
#[derive(Debug, PartialEq, Eq, Copy, Clone, Ord, PartialOrd, Hash)]
pub enum PiSig {
Pi,
Sigma,
}
impl Display for PiSig {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.write_str(match self {
PiSig::Pi => "\u{03A0}",
PiSig::Sigma => "\u{03A3}",
})
}
}
/// Visibility of a parameter -- it can be explicit or implicit
#[derive(Debug, PartialEq, Eq, Copy, Clone, Ord, PartialOrd, Hash)]
pub enum Plicit {
Ex,
Im,
}
| true
|
1fac40e7aeca830b78fe2cfcf0808ba1b7e20b6e
|
Rust
|
ToruNiina/rust-practice-project-euler
|
/problem024/src/main.rs
|
UTF-8
| 375
| 2.75
| 3
|
[] |
no_license
|
extern crate permutohedron;
use permutohedron::LexicalPermutation;
fn main() {
let mut data = [0,1,2,3,4,5,6,7,8,9];
let mut permutations = Vec::new();
loop {
permutations.push(data.to_vec());
if !data.next_permutation() {
break;
}
}
permutations.sort();
println!("the answer is {:?}", permutations[999999]);
}
| true
|
a1507c0d3fcb6b2aac3131db5532b91b5e0779a9
|
Rust
|
sybila/biodivine-lib-param-bn
|
/src/_impl_fn_update.rs
|
UTF-8
| 29,343
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
use crate::symbolic_async_graph::SymbolicContext;
use crate::FnUpdate::*;
use crate::_aeon_parser::FnUpdateTemp;
use crate::{BinaryOp, BooleanNetwork, ExtendedBoolean, FnUpdate, ParameterId, Space, VariableId};
use biodivine_lib_bdd::{Bdd, BddPartialValuation, BddVariable};
use std::collections::{HashMap, HashSet};
/// Constructor and destructor utility methods. These mainly avoid unnecessary boxing
/// and exhaustive pattern matching when not necessary.
impl FnUpdate {
/// Create a `true` formula.
pub fn mk_true() -> FnUpdate {
Const(true)
}
/// Create a `false` formula.
pub fn mk_false() -> FnUpdate {
Const(false)
}
/// Create an `x` formula where `x` is a Boolean variable.
pub fn mk_var(id: VariableId) -> FnUpdate {
Var(id)
}
/// Create a `p(x_1, ..., x_k)` formula where `p` is a parameter function and `x_1` through
/// `x_k` are its arguments.
pub fn mk_param(id: ParameterId, args: &[VariableId]) -> FnUpdate {
Param(id, args.to_vec())
}
/// Create a `!phi` formula, where `phi` is an inner `FnUpdate`.
pub fn mk_not(inner: FnUpdate) -> FnUpdate {
Not(Box::new(inner))
}
/// Create a `phi 'op' psi` where `phi` and `psi` are arguments of `op` operator.
pub fn mk_binary(op: BinaryOp, left: FnUpdate, right: FnUpdate) -> FnUpdate {
Binary(op, Box::new(left), Box::new(right))
}
/// Negate this function.
pub fn negation(self) -> FnUpdate {
FnUpdate::mk_not(self)
}
/// Create a conjunction.
pub fn and(self, other: FnUpdate) -> FnUpdate {
FnUpdate::mk_binary(BinaryOp::And, self, other)
}
/// Create a disjunction.
pub fn or(self, other: FnUpdate) -> FnUpdate {
FnUpdate::mk_binary(BinaryOp::Or, self, other)
}
/// Create an exclusive or.
pub fn xor(self, other: FnUpdate) -> FnUpdate {
FnUpdate::mk_binary(BinaryOp::Xor, self, other)
}
/// Create an implication.
pub fn implies(self, other: FnUpdate) -> FnUpdate {
FnUpdate::mk_binary(BinaryOp::Imp, self, other)
}
/// Create an equivalence.
pub fn iff(self, other: FnUpdate) -> FnUpdate {
FnUpdate::mk_binary(BinaryOp::Iff, self, other)
}
/// If `Const`, return the value, otherwise return `None`.
pub fn as_const(&self) -> Option<bool> {
match self {
Const(value) => Some(*value),
_ => None,
}
}
/// If `Var`, return the id, otherwise return `None`.
pub fn as_var(&self) -> Option<VariableId> {
match self {
Var(value) => Some(*value),
_ => None,
}
}
/// If `Param`, return the id and args, otherwise return `None`.
pub fn as_param(&self) -> Option<(ParameterId, &[VariableId])> {
match self {
Param(id, args) => Some((*id, args)),
_ => None,
}
}
/// If `Not`, return the inner function, otherwise return `None`.
pub fn as_not(&self) -> Option<&FnUpdate> {
match self {
Not(inner) => Some(inner),
_ => None,
}
}
/// If `Binary`, return the operator and left/right formulas, otherwise return `None`.
pub fn as_binary(&self) -> Option<(&FnUpdate, BinaryOp, &FnUpdate)> {
match self {
Binary(op, l, r) => Some((l, *op, r)),
_ => None,
}
}
}
/// Other utility methods.
impl FnUpdate {
/// Try to parse an update function from a string expression using the provided `network`
/// as context.
pub fn try_from_str(expression: &str, network: &BooleanNetwork) -> Result<FnUpdate, String> {
let tmp = FnUpdateTemp::try_from(expression)?;
let update = tmp.into_fn_update(network)?;
Ok(*update)
}
/// Build an update function from an instantiated `Bdd`.
///
/// The support set of the `Bdd` must be a subset of the state variables, i.e. the `Bdd`
/// can only depend on the network variables. Note that it should be possible to also build
/// a variant of this function where this requirement is lifted, but it's a bit more
/// complicated and so far we are ok with only building fully instantiated update functions.
///
/// The function produces a DNF representation based on all satisfying clauses. This is far
/// from minimal, but appears to be slightly more concise than the default translation in
/// lib-bdd.
pub fn build_from_bdd(context: &SymbolicContext, bdd: &Bdd) -> FnUpdate {
if bdd.is_true() {
return FnUpdate::mk_true();
}
if bdd.is_false() {
return FnUpdate::mk_false();
}
let state_variables: HashMap<BddVariable, VariableId> = context
.state_variables()
.iter()
.enumerate()
.map(|(i, v)| (*v, VariableId::from_index(i)))
.collect();
let support = bdd.support_set();
for k in &support {
if !state_variables.contains_key(k) {
panic!("Non-state variables found in the provided BDD.")
}
}
// Because the BDD isn't constant, there must be at least one clause and each clause
// must have at least one literal.
fn build_clause(
map: &HashMap<BddVariable, VariableId>,
clause: BddPartialValuation,
) -> FnUpdate {
fn build_literal(
map: &HashMap<BddVariable, VariableId>,
literal: (BddVariable, bool),
) -> FnUpdate {
let var = FnUpdate::mk_var(*map.get(&literal.0).unwrap());
if literal.1 {
var
} else {
FnUpdate::mk_not(var)
}
}
let mut literals = clause.to_values().into_iter();
let mut clause = build_literal(map, literals.next().unwrap());
for literal in literals {
let literal = build_literal(map, literal);
clause = FnUpdate::mk_binary(BinaryOp::And, clause, literal);
}
clause
}
let mut clauses = bdd.sat_clauses();
let mut result = build_clause(&state_variables, clauses.next().unwrap());
for clause in clauses {
let clause = build_clause(&state_variables, clause);
result = FnUpdate::mk_binary(BinaryOp::Or, result, clause);
}
result
}
/// Return a sorted vector of all variables that are actually used as inputs in this function.
pub fn collect_arguments(&self) -> Vec<VariableId> {
fn r_arguments(function: &FnUpdate, args: &mut HashSet<VariableId>) {
match function {
Const(_) => (),
Var(id) => {
args.insert(*id);
}
Param(_, p_args) => {
for id in p_args {
args.insert(*id);
}
}
Not(inner) => r_arguments(inner, args),
Binary(_, l, r) => {
r_arguments(l, args);
r_arguments(r, args);
}
};
}
let mut args = HashSet::new();
r_arguments(self, &mut args);
let mut result: Vec<VariableId> = args.into_iter().collect();
result.sort();
result
}
/// Return a sorted vector of all parameters (i.e. uninterpreted functions) that are used
/// in this update function.
pub fn collect_parameters(&self) -> Vec<ParameterId> {
fn r_parameters(function: &FnUpdate, params: &mut HashSet<ParameterId>) {
match function {
Const(_) => (),
Var(_) => (),
Param(id, _) => {
params.insert(*id);
}
Not(inner) => r_parameters(inner, params),
Binary(_, l, r) => {
r_parameters(l, params);
r_parameters(r, params);
}
};
}
let mut params = HashSet::new();
r_parameters(self, &mut params);
let mut result: Vec<ParameterId> = params.into_iter().collect();
result.sort();
result
}
/// Convert this update function to a string, taking names from the provided `BooleanNetwork`.
pub fn to_string(&self, context: &BooleanNetwork) -> String {
match self {
Const(value) => value.to_string(),
Var(id) => context.get_variable_name(*id).to_string(),
Not(inner) => format!("!{}", inner.to_string(context)),
Binary(op, l, r) => {
format!("({} {} {})", l.to_string(context), op, r.to_string(context))
}
Param(id, args) => {
if args.is_empty() {
context[*id].get_name().to_string()
} else {
let mut arg_string = format!("({}", context.get_variable_name(args[0]));
for arg in args.iter().skip(1) {
arg_string = format!("{}, {}", arg_string, context.get_variable_name(*arg));
}
format!("{}{})", context[*id].get_name(), arg_string)
}
}
}
}
/// If possible, evaluate this function using the given network variable valuation.
///
/// Note that this only works when the function output does not depend on parameters, and
/// all necessary variable values are part of the valuation. Otherwise, the function
/// returns `None`, as the value cannot be determined.
///
/// However, note that in some cases, even a partially specified function can be evaluated.
/// For example, `A & f(X, Y)` is false whenever `A = false`, regardless of uninterpreted
/// function `f`. In such cases, this method may still output the correct result.
///
/// In other words, the meaning of this method should be interpreted as "if it is possible
/// to unambiguously evaluate this function using the provided valuation, do it; otherwise
/// return `None`".
pub fn evaluate(&self, values: &HashMap<VariableId, bool>) -> Option<bool> {
match self {
Const(value) => Some(*value),
Var(id) => values.get(id).cloned(),
Param(_, _) => None,
Not(inner) => inner.evaluate(values).map(|it| !it),
Binary(op, left, right) => {
let left = left.evaluate(values);
let right = right.evaluate(values);
match op {
BinaryOp::And => match (left, right) {
(Some(false), _) => Some(false),
(_, Some(false)) => Some(false),
(Some(true), Some(true)) => Some(true),
_ => None,
},
BinaryOp::Or => match (left, right) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
(Some(false), Some(false)) => Some(false),
_ => None,
},
BinaryOp::Iff => match (left, right) {
(Some(left), Some(right)) => Some(left == right),
_ => None,
},
BinaryOp::Xor => match (left, right) {
(Some(left), Some(right)) => Some(left != right),
_ => None,
},
BinaryOp::Imp => match (left, right) {
(Some(false), _) => Some(true),
(_, Some(true)) => Some(true),
(Some(true), Some(false)) => Some(false),
_ => None,
},
}
}
}
}
/// Test that this update function is a syntactic specialisation of the provided `FnUpdate`.
///
/// Syntactic specialisation is a function that has the same abstract syntax tree, except that
/// some occurrences of parameters can be substituted for more concrete Boolean functions.
///
/// Note that this is not entirely bulletproof, as it does not check for usage of multiple
/// parameters within the same function, which could influence the semantics of the main
/// function, but does not influence the specialisation.
pub fn is_specialisation_of(&self, other: &FnUpdate) -> bool {
match other {
Const(_) => self == other,
Var(_) => self == other,
Not(inner) => {
if let Some(self_inner) = self.as_not() {
self_inner.is_specialisation_of(inner)
} else {
false
}
}
Binary(op, left, right) => {
if let Some((self_left, self_op, self_right)) = self.as_binary() {
self_op == *op
&& self_left.is_specialisation_of(left)
&& self_right.is_specialisation_of(right)
} else {
false
}
}
Param(_, args) => {
// Every argument in this sub-tree must be declared in the parameter.
self.collect_arguments()
.iter()
.all(|arg| args.contains(arg))
}
}
}
/// Allows us to iterate through all nodes of the abstract syntax tree of this function
/// in post-order.
///
/// Note that this is a preliminary version of the API. A more robust implementation should
/// provide a standard iterator interface.
pub fn walk_postorder<F>(&self, action: &mut F)
where
F: FnMut(&FnUpdate),
{
match self {
Const(_) => action(self),
Param(_, _) => action(self),
Var(_) => action(self),
Not(inner) => {
inner.walk_postorder(action);
action(self);
}
Binary(_, left, right) => {
left.walk_postorder(action);
right.walk_postorder(action);
action(self);
}
}
}
/// Create a copy of this function which replaces every occurrence of every
/// `VariableId` with a new one supplied by the provided vector (original `VariableId`
/// is the index into the vector). Similarly replaces every `ParameterId`.
pub fn substitute(&self, vars: &[VariableId], params: &[ParameterId]) -> FnUpdate {
match self {
Const(_) => self.clone(),
Param(id, args) => {
let new_args = args.iter().map(|it| vars[it.0]).collect();
Param(params[id.0], new_args)
}
Var(id) => FnUpdate::mk_var(vars[id.0]),
Not(inner) => {
let inner = inner.substitute(vars, params);
FnUpdate::mk_not(inner)
}
Binary(op, left, right) => {
let left = left.substitute(vars, params);
let right = right.substitute(vars, params);
FnUpdate::mk_binary(*op, left, right)
}
}
}
/// Returns true if this update function uses the given parameter.
pub fn contains_parameter(&self, parameter: ParameterId) -> bool {
let mut result = false;
let mut is_param = |it: &FnUpdate| {
if let Param(id, _) = it {
result = result || (*id == parameter);
}
};
self.walk_postorder(&mut is_param);
result
}
/// Returns true if this update function uses the given variable.
pub fn contains_variable(&self, variable: VariableId) -> bool {
let mut result = false;
let mut is_var = |it: &FnUpdate| match it {
Var(id) => result = result || (*id == variable),
Param(_, args) => result = result || args.contains(&variable),
_ => {}
};
self.walk_postorder(&mut is_var);
result
}
/// Perform a syntactic transformation of this update function which eliminates all binary
/// operators except for `&` and `|`. Negation is also preserved.
///
/// Note that the result is neither a conjunction or disjunctive normal form, it just
/// eliminates all operators other than conjunction and disjunction.
pub fn to_and_or_normal_form(&self) -> FnUpdate {
match self {
Const(_) | Var(_) | Param(_, _) => self.clone(),
Not(inner) => inner.to_and_or_normal_form().negation(),
Binary(op, left, right) => {
let left = left.to_and_or_normal_form();
let right = right.to_and_or_normal_form();
match op {
BinaryOp::And | BinaryOp::Or => FnUpdate::mk_binary(*op, left, right),
BinaryOp::Imp => {
// !left | right
left.negation().or(right)
}
BinaryOp::Xor => {
// (left | right) & !(left & right)
let both = left.clone().and(right.clone());
let one = left.and(right);
one.and(both.negation())
}
BinaryOp::Iff => {
// (left & right) | (!left & !right)
let both = left.clone().and(right.clone());
let neither = left.negation().and(right.negation());
both.or(neither)
}
}
}
}
}
/// Perform a syntactic transformation which pushes every negation to literals (constants,
/// variables, and parameter terms).
///
/// Note that constants will be automatically negated (true => false, false => true). Also,
/// keep in mind that this will rewrite binary operators (and => or, iff => xor, etc.), so
/// don't expect the function to look the same afterwards.
pub fn distribute_negation(&self) -> FnUpdate {
fn recursion(update: &FnUpdate, invert: bool) -> FnUpdate {
match update {
Const(value) => Const(*value != invert),
Var(var) => {
if invert {
Var(*var).negation()
} else {
update.clone()
}
}
Param(id, args) => {
if invert {
Param(*id, args.clone()).negation()
} else {
update.clone()
}
}
Not(inner) => recursion(inner, !invert),
Binary(op, left, right) => {
if !invert {
// If we are not inverting, just propagate the result.
FnUpdate::mk_binary(*op, recursion(left, false), recursion(right, false))
} else {
// Otherwise we must do magic.
match op {
BinaryOp::And => {
// !(left & right) = (!left | !right)
let left = recursion(left, true);
let right = recursion(right, true);
left.or(right)
}
BinaryOp::Or => {
// !(left | right) = (!left & !right)
let left = recursion(left, true);
let right = recursion(right, true);
left.and(right)
}
BinaryOp::Imp => {
// !(left => right) = (left & !right)
let left = recursion(left, false);
let right = recursion(right, true);
left.and(right)
}
BinaryOp::Xor => {
// !(left ^ right) = (left <=> right)
let left = recursion(left, false);
let right = recursion(right, false);
left.iff(right)
}
BinaryOp::Iff => {
// !(left <=> right) = (left ^ right)
let left = recursion(left, false);
let right = recursion(right, false);
left.xor(right)
}
}
}
}
}
}
recursion(self, false)
}
/// Perform partial evaluation of this function using extended Boolean values in the given
/// `Space`.
pub fn eval_in_space(&self, space: &Space) -> ExtendedBoolean {
match self {
Const(value) => {
if *value {
ExtendedBoolean::One
} else {
ExtendedBoolean::Zero
}
}
Var(var) => space[*var],
Param(_, _) => {
// We assume that a parameter can evaluate to anything.
ExtendedBoolean::Any
}
Not(inner) => inner.eval_in_space(space).negate(),
Binary(op, left, right) => {
let left = left.eval_in_space(space);
let right = right.eval_in_space(space);
match op {
BinaryOp::Or => left.or(right),
BinaryOp::And => left.and(right),
BinaryOp::Imp => left.implies(right),
BinaryOp::Iff => left.iff(right),
BinaryOp::Xor => left.xor(right),
}
}
}
}
}
#[cfg(test)]
mod tests {
use crate::symbolic_async_graph::SymbolicContext;
use crate::{BinaryOp, BooleanNetwork, FnUpdate, VariableId};
use biodivine_lib_bdd::bdd;
use std::collections::HashMap;
use std::convert::TryFrom;
#[test]
fn fn_update_specialisation_test() {
let bn = BooleanNetwork::try_from(
r"
a -> c1
b -> c1
a -> c2
b -> c2
a -> c3
b -> c3
$c1: !(a => b) | f(a, b)
$c2: !(a => b) | ((a <=> b) & g(b))
$c3: (a => b) | f(a, b)
",
)
.unwrap();
let c1 = bn.as_graph().find_variable("c1").unwrap();
let c2 = bn.as_graph().find_variable("c2").unwrap();
let c3 = bn.as_graph().find_variable("c3").unwrap();
let fn_c1 = bn.get_update_function(c1).as_ref().unwrap();
let fn_c2 = bn.get_update_function(c2).as_ref().unwrap();
let fn_c3 = bn.get_update_function(c3).as_ref().unwrap();
assert!(fn_c2.is_specialisation_of(fn_c1));
assert!(!fn_c1.is_specialisation_of(fn_c2));
assert!(!fn_c3.is_specialisation_of(fn_c1));
assert!(!fn_c3.is_specialisation_of(fn_c2));
assert!(fn_c3.is_specialisation_of(fn_c3));
}
#[test]
fn fn_update_eval_test() {
let bn = BooleanNetwork::try_from(
r"
a -> c
b -| c
$c: true & (!a | (a & b) | f(b))
",
)
.unwrap();
// This will not test all possible branches, but should cover the decisions
// reasonably well...
let a = bn.as_graph().find_variable("a").unwrap();
let b = bn.as_graph().find_variable("b").unwrap();
let c = bn.as_graph().find_variable("c").unwrap();
let fun = bn.get_update_function(c).as_ref().unwrap();
let mut vals = HashMap::new();
assert_eq!(None, fun.evaluate(&vals));
vals.insert(a, false);
assert_eq!(Some(true), fun.evaluate(&vals));
vals.insert(a, true);
vals.insert(b, true);
assert_eq!(Some(true), fun.evaluate(&vals));
vals.insert(a, true);
vals.insert(b, false);
assert_eq!(None, fun.evaluate(&vals));
}
#[test]
fn basic_fn_update_test() {
// Note that ids here are used dangerously (across different networks), but they work
// because everything has the same variables and parameters.
let bn = BooleanNetwork::try_from(
r"
a -> c
b -| c
# Note that this is not really a `valid` function in terms of the regulatory graph.
# But syntatically it is ok and should go through the parser.
$c: a & (a | (a ^ (a => (a <=> !(f(a, b) | (true | false))))))
# Another function just for comparisons.
c -| b
$b: !c
",
)
.unwrap();
let a = bn.as_graph().find_variable("a").unwrap();
let b = bn.as_graph().find_variable("b").unwrap();
let c = bn.as_graph().find_variable("c").unwrap();
let f = bn.find_parameter("f").unwrap();
let fun = bn.get_update_function(c).as_ref().unwrap();
let fun_string = fun.to_string(&bn);
let fun_parse = FnUpdate::try_from_str(
"a & (a | (a ^ (a => (a <=> !(f(a, b) | (true | false))))))",
&bn,
)
.unwrap();
assert_eq!(fun, &fun_parse);
assert_eq!(vec![a, b], fun.collect_arguments());
assert_eq!(
vec![bn.find_parameter("f").unwrap()],
fun.collect_parameters()
);
assert!(fun.contains_variable(a));
assert!(fun.contains_variable(b));
assert!(!fun.contains_variable(c));
assert!(fun.contains_parameter(f));
let fun_b = bn.get_update_function(b).as_ref().unwrap();
assert!(!fun_b.contains_variable(a));
assert!(!fun_b.contains_variable(b));
assert!(fun_b.contains_variable(c));
assert!(!fun_b.contains_parameter(f));
let mut bn = BooleanNetwork::try_from(
r"
a -> c
b -| c
",
)
.unwrap();
let id_f = bn.add_parameter("f", 2).unwrap();
bn.add_string_update_function("c", fun_string.as_str())
.unwrap();
assert_eq!(fun, bn.get_update_function(c).as_ref().unwrap());
// Construct a FnUpdate
let f_a_b = FnUpdate::mk_param(id_f, &vec![a, b]);
let f_a = FnUpdate::mk_var(a);
let mut fun_2 = f_a_b.or(FnUpdate::mk_true().or(FnUpdate::mk_false()));
fun_2 = f_a.clone().iff(fun_2.negation());
fun_2 = f_a.clone().implies(fun_2);
fun_2 = f_a.clone().xor(fun_2);
fun_2 = f_a.clone().or(fun_2);
fun_2 = f_a.clone().and(fun_2);
assert_eq!(fun, &fun_2);
// Destruct a FnUpdate
let (_, op, r) = fun_2.as_binary().unwrap();
assert_eq!(BinaryOp::And, op);
let (_, op, r) = r.as_binary().unwrap();
assert_eq!(BinaryOp::Or, op);
let (_, op, r) = r.as_binary().unwrap();
assert_eq!(BinaryOp::Xor, op);
let (_, op, r) = r.as_binary().unwrap();
assert_eq!(BinaryOp::Imp, op);
let (l, op, r) = r.as_binary().unwrap();
assert_eq!(BinaryOp::Iff, op);
assert_eq!(a, l.as_var().unwrap());
let inner = r.as_not().unwrap();
let (l, _, r) = inner.as_binary().unwrap();
assert_eq!((id_f, vec![a, b].as_slice()), l.as_param().unwrap());
let (l, _, r) = r.as_binary().unwrap();
assert!(l.as_const().unwrap());
assert!(!r.as_const().unwrap());
}
#[test]
pub fn test_symbolic_instantiation() {
let bn = BooleanNetwork::try_from(
"
a -> b
b -> a
b -| b
",
)
.unwrap();
let ctx = SymbolicContext::new(&bn).unwrap();
let vars = ctx.bdd_variable_set();
let var_a = &FnUpdate::mk_var(VariableId(0));
let var_b = &FnUpdate::mk_var(VariableId(1));
let not_var_a = &FnUpdate::mk_not(var_a.clone());
let not_var_b = &FnUpdate::mk_not(var_b.clone());
let bdd = bdd!(vars, "a");
assert_eq!(
FnUpdate::mk_var(VariableId(0)),
FnUpdate::build_from_bdd(&ctx, &bdd)
);
let bdd = bdd!(vars, "a" & "b");
assert_eq!(
FnUpdate::mk_binary(BinaryOp::And, var_a.clone(), var_b.clone()),
FnUpdate::build_from_bdd(&ctx, &bdd)
);
let bdd = bdd!(vars, "a" <=> "b");
let a_and_b = FnUpdate::mk_binary(BinaryOp::And, var_a.clone(), var_b.clone());
let not_a_and_b = FnUpdate::mk_binary(BinaryOp::And, not_var_a.clone(), not_var_b.clone());
assert_eq!(
FnUpdate::mk_binary(BinaryOp::Or, not_a_and_b, a_and_b),
FnUpdate::build_from_bdd(&ctx, &bdd)
);
}
}
| true
|
71e18c91bf2a7849219efcaa2acbc12c1028e908
|
Rust
|
shuymn-sandbox/trpl
|
/first_edition/4_syntax-and-semantics/primitive-types/src/main.rs
|
UTF-8
| 498
| 3.1875
| 3
|
[] |
no_license
|
fn main() {
let a1 = [1, 2, 3];
println!("a has {} elements", a1.len());
let names = ["Graydon", "Brian", "Niko"];
println!("This second name is: {}", names[1]);
let _a2 = [0, 1, 2, 3, 4];
let _complete = &_a2[..]; // [0, 1, 2, 3, 4]
let _middle = &_a2[1..4]; // [1, 2, 3]
let (x1, _y1, _z1) = (1, 2, 3);
println!("x1 is {}", x1);
let tuple = (1, 2, 3);
let x2 = tuple.0;
let _y2 = tuple.1;
let _z2 = tuple.2;
println!("x2 is {}", x2);
}
| true
|
3ddef6be709d064e7b8e8f251615e176af54453f
|
Rust
|
floatingmountain/harmony
|
/src/application.rs
|
UTF-8
| 10,322
| 2.859375
| 3
|
[
"Zlib"
] |
permissive
|
use std::{sync::Arc, time::Instant};
use winit::{
event::Event,
event_loop::{ControlFlow, EventLoop},
};
use legion::prelude::*;
use crate::{
core::input::Input,
graphics::{
self,
material::Skybox,
pipelines::{PBRPipelineDesc, SkyboxPipelineDesc, UnlitPipelineDesc},
RenderGraph, Renderer,
},
scene::Scene,
AssetManager, TransformCount,
};
use graphics::resources::GPUResourceManager;
pub trait AppState {
/// Is called after the engine has loaded an assets.
fn load(&mut self, _app: &mut Application) {}
/// Called to update app state.
fn update(&mut self, _app: &mut Application) {}
/// Called when the window resizes
fn resize(&mut self, _app: &mut Application) {}
}
pub struct Application {
pub renderer: Renderer,
clock: Instant,
fixed_timestep: f32,
elapsed_time: f32,
pub frame_time: f32,
pub delta_time: f32,
pub input: Input,
pub current_scene: Scene,
pub render_schedule: Schedule,
pub resources: Resources,
}
impl Application {
/// Creates a new application.
/// # Arguments
///
/// * `window_builder` - The winit WindowBuilder that harmony can use to setup the window for rendering.
/// * `event_loop` - A reference to winit's event loop.
/// * `asset_path` - Path to the asset folder.
///
/// *Note*: This returns a new instance of Application.
pub fn new<T>(
window_builder: winit::window::WindowBuilder,
event_loop: &EventLoop<()>,
asset_path: T,
mut render_systems: Vec<Box<dyn Schedulable>>,
) -> Self
where
T: Into<String>,
{
let scene = Scene::new(None, None);
let window = window_builder.build(event_loop).unwrap();
let size = window.inner_size();
let surface = wgpu::Surface::create(&window);
// Add resources
let mut resources = Resources::default();
resources.insert(crate::scene::resources::DeltaTime(0.05));
let renderer =
futures::executor::block_on(Renderer::new(window, size, surface, &mut resources));
let asset_manager = AssetManager::new(asset_path.into());
let mut render_schedule_builder = Schedule::builder()
.add_system(graphics::systems::skybox::create())
.add_system(graphics::systems::mesh::create());
for index in 0..render_systems.len() {
let system = render_systems.remove(index);
render_schedule_builder = render_schedule_builder.add_system(system);
}
let render_schedule = render_schedule_builder
.flush()
.add_thread_local_fn(graphics::systems::render::create())
.build();
resources.insert(asset_manager);
resources.insert(TransformCount(0));
Application {
renderer,
clock: Instant::now(),
fixed_timestep: 1.0 / 60.0,
elapsed_time: 0.0,
frame_time: 0.0,
delta_time: 0.0,
input: Input::new(),
current_scene: scene,
resources,
render_schedule,
}
}
/// Set's the current scene that harmony will use for rendering. Consider this a connivent place to store our specs world.
/// # Arguments
///
/// * `current_scene` - The current scene.
///
/// *Note*: Once you've set the current scene you can access it using: `app.current_scene`.
pub fn set_scene(&mut self, current_scene: Scene) {
self.current_scene = current_scene;
}
/// A function to help get the actual screen size as a LogicalSize<f32>
pub fn get_window_actual_size(&self) -> winit::dpi::LogicalSize<f32> {
let size = self.renderer.window.inner_size();
winit::dpi::LogicalSize {
width: size.width as f32,
height: size.height as f32,
}
}
/// Load's the entire application up. This also calls asset_manager.load and creates some default rendering pipelines.
/// # Arguments
///
/// * `app_state` - The app state you created which should implement the AppState trait.
///
pub fn load<T>(&mut self, app_state: &mut T)
where
T: AppState,
{
{
let mut asset_manager = self.resources.get_mut::<AssetManager>().unwrap();
let device = self.resources.get::<wgpu::Device>().unwrap();
let mut queue = self.resources.get_mut::<wgpu::Queue>().unwrap();
asset_manager.load(&device, &mut queue);
}
{
let render_graph = RenderGraph::new(&mut self.resources, true);
self.resources.insert(render_graph);
}
{
let asset_manager = self.resources.get_mut::<AssetManager>().unwrap();
let mut render_graph = self.resources.get_mut::<RenderGraph>().unwrap();
let mut resource_manager = self.resources.get_mut::<GPUResourceManager>().unwrap();
let device = self.resources.get::<wgpu::Device>().unwrap();
let sc_desc = self.resources.get::<wgpu::SwapChainDescriptor>().unwrap();
// Skybox pipeline
let skybox_pipeline_desc = SkyboxPipelineDesc::default();
render_graph.add(
&asset_manager,
&device,
&sc_desc,
&mut resource_manager,
"skybox",
skybox_pipeline_desc,
vec![],
false,
None,
false,
);
// Unlit pipeline
let unlit_pipeline_desc = UnlitPipelineDesc::default();
render_graph.add(
&asset_manager,
&device,
&sc_desc,
&mut resource_manager,
"unlit",
unlit_pipeline_desc,
vec!["skybox"],
true,
None,
false,
);
// PBR pipeline
let pbr_pipeline_desc = PBRPipelineDesc::default();
render_graph.add(
&asset_manager,
&device,
&sc_desc,
&mut resource_manager,
"pbr",
pbr_pipeline_desc,
vec!["skybox"],
true,
None,
false,
);
}
app_state.load(self);
// Once materials have been created we need to create more info for them.
{
let mut asset_manager = self.resources.get_mut::<AssetManager>().unwrap();
let device = self.resources.get::<wgpu::Device>().unwrap();
let mut resource_manager = self.resources.get_mut::<GPUResourceManager>().unwrap();
asset_manager.load_materials(&device, &mut resource_manager);
}
{
let mut resource_manager = self.resources.get_mut::<GPUResourceManager>().unwrap();
let query = <(Write<Skybox>,)>::query();
for (mut skybox,) in query.iter_mut(&mut self.current_scene.world) {
let device = self.resources.get::<wgpu::Device>().unwrap();
{
let material_layout = resource_manager.get_bind_group_layout("skybox_material");
skybox.create_bind_group2(&device, material_layout);
}
let bound_group = {
let pbr_bind_group_layout = resource_manager.get_bind_group_layout("skybox_pbr_material");
skybox.create_bind_group(&device, pbr_bind_group_layout)
};
resource_manager.add_single_bind_group("skybox_pbr_material", bound_group);
}
}
}
/// Run's the application which means two things.
/// 1. Update all internal state and call app_state.update()
/// 2. Draw all rendering data to the current screen and call app_state.update()
///
/// # Arguments
///
/// * `app_state` - The app state you created which should implement the AppState trait.
/// * `event` - The event data as a reference from winit.
/// * `control_flow` - a mutable reference to winit's control flow.
///
pub fn run<T>(
&mut self,
app_state: &mut T,
event: &Event<'_, ()>,
_control_flow: &mut ControlFlow, // TODO: Figure out if we actually will use this...
) where
T: AppState,
{
self.input.update_events(event);
match event {
Event::MainEventsCleared => {
let mut frame_time = self.clock.elapsed().as_secs_f32() - self.elapsed_time;
while frame_time > 0.0 {
self.delta_time = f32::min(frame_time, self.fixed_timestep);
self.current_scene
.update(self.delta_time, &mut self.resources);
self.input.clear();
frame_time -= self.delta_time;
self.elapsed_time += self.delta_time;
}
// Store current frame buffer.
{
let output = Arc::new(self.renderer.render());
self.resources.insert(output);
}
// Render's the scene.
self.render_schedule
.execute(&mut self.current_scene.world, &mut self.resources);
self.renderer.window.request_redraw();
}
Event::WindowEvent {
event: winit::event::WindowEvent::Resized(size),
..
} => {
{
let device = self.resources.get::<wgpu::Device>().unwrap();
let mut sc_desc = self
.resources
.get_mut::<wgpu::SwapChainDescriptor>()
.unwrap();
sc_desc.width = size.width;
sc_desc.height = size.height;
self.renderer.size = *size;
self.renderer.swap_chain =
device.create_swap_chain(&self.renderer.surface, &sc_desc);
}
app_state.resize(self);
}
_ => (),
}
}
}
| true
|
e689474fa4c24d2e052c791ab925731d05f0df41
|
Rust
|
jburell/trivia
|
/src/res/trivia.rs
|
UTF-8
| 1,040
| 2.921875
| 3
|
[] |
no_license
|
use actix_web::{HttpResponse, Query, Result};
use askama::Template;
use std::collections::HashMap;
#[derive(Template)]
#[template(path = "index.html")]
struct IndexTemplate<'a> {
name: &'a str,
}
#[derive(Deserialize, Debug)]
struct TriviaResponse {
response_code: u32,
results: Vec<TriviaSpec>,
}
#[derive(Deserialize, Debug)]
struct TriviaSpec {
category: String,
#[serde(rename = "type")]
ty: String,
difficulty: String,
question: String,
correct_answer: String,
incorrect_answers: Vec<String>,
}
pub fn index(query: Query<HashMap<String, String>>) -> Result<HttpResponse> {
let fallback_name = &"world".to_string();
let name = query.get("name").unwrap_or(fallback_name);
let trivia_response: TriviaResponse = reqwest::get("https://opentdb.com/api.php?amount=1")
.unwrap()
.json()
.unwrap();
println!("{:?}", trivia_response);
let s = IndexTemplate { name: name }.render().unwrap();
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
| true
|
ba2e4046a9ebb4b777ffd0dbeb4352d828a21fd2
|
Rust
|
animatedlew/rox
|
/src/tokens.rs
|
UTF-8
| 506
| 3.15625
| 3
|
[] |
no_license
|
#[cfg_attr(rustfmt, rustfmt_skip)]
#[derive(Debug, Copy, Clone)]
pub enum TokenType {
// Single-character tokens.
LeftParen, RightParen, LeftBrace, RightBrace, Comma, Dot, Minus, Plus, Semicolon, Slash, Star,
// One or two character tokens.
Bang, BangEqual, Equal, EqualEqual, Greater, GreaterEqual, Less, LessEqual, Comment,
// Literals.
Identifier, String, Number,
// Keywords.
And, Class, Else, False, Fun, For, If, Nil, Or, Print, Return, Super, This, True, Var, While,
Eof,
}
| true
|
b63e3d5d744300cb8e0ce83702df8f4f3ca9118a
|
Rust
|
augustuswm/mockito
|
/src/server.rs
|
UTF-8
| 3,867
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
use std::thread;
use std::io::Write;
use std::net::{TcpListener, TcpStream};
use serde_json;
use {Mock, SERVER_ADDRESS, Request};
pub fn try_start() {
if is_listening() {
return;
}
start()
}
fn start() {
thread::spawn(move || {
let mut mocks: Vec<Mock> = vec![];
let listener = TcpListener::bind(SERVER_ADDRESS).unwrap();
for stream in listener.incoming() {
match stream {
Ok(mut stream) => {
let request = Request::from(&mut stream);
if request.is_ok() {
handle_request(&mut mocks, request, stream);
} else {
stream
.write("HTTP/1.1 422 Unprocessable Entity\r\n\r\n".as_bytes())
.unwrap();
}
}
Err(_) => {}
}
}
});
while !is_listening() {}
}
fn is_listening() -> bool {
TcpStream::connect(SERVER_ADDRESS).is_ok()
}
fn handle_request(mut mocks: &mut Vec<Mock>, request: Request, stream: TcpStream) {
match (&*request.method, &*request.path) {
("POST", "/mocks") => handle_create_mock(mocks, request, stream),
("DELETE", "/mocks") => handle_delete_mock(mocks, request, stream),
_ => handle_match_mock(mocks, request, stream),
}
}
fn handle_create_mock(mut mocks: &mut Vec<Mock>, request: Request, mut stream: TcpStream) {
match serde_json::from_slice(&request.body) {
Ok(mock) => {
mocks.push(mock);
stream.write("HTTP/1.1 200 OK\r\n\r\n".as_bytes()).unwrap();
}
Err(err) => {
let message = err.to_string();
let response = format!("HTTP/1.1 422 Unprocessable Entity\r\ncontent-length: {}\r\n\r\n{}",
message.len(),
message);
stream.write(response.as_bytes()).unwrap();
}
}
}
fn handle_delete_mock(mut mocks: &mut Vec<Mock>, request: Request, mut stream: TcpStream) {
match request
.headers
.iter()
.find(|&(ref field, _)| field.to_lowercase() == "x-mock-id") {
// Remove the element with x-mock-id
Some((_, value)) => {
match mocks.iter().position(|mock| &mock.id == value) {
Some(pos) => {
mocks.remove(pos);
}
None => {}
};
}
// Remove all elements
None => {
mocks.clear();
}
}
stream.write("HTTP/1.1 200 OK\r\n\r\n".as_bytes()).unwrap();
}
fn handle_match_mock(mocks: &mut Vec<Mock>, request: Request, mut stream: TcpStream) {
match mocks.iter().rev().find(|mock| mock.matches(&request)) {
Some(mock) => {
let mut headers = String::new();
for &(ref key, ref value) in &mock.response.headers {
headers.push_str(key);
headers.push_str(": ");
headers.push_str(value);
headers.push_str("\r\n");
}
headers.push_str("x-mockito-matches: ");
headers.push_str((*mock.match_count.borrow()).to_string().as_str());
headers.push_str("\r\n");
let ref body = mock.response.body;
let response = format!("HTTP/1.1 {}\r\ncontent-length: {}\r\n{}\r\n{}",
mock.response.status,
body.len(),
headers,
body);
stream.write(response.as_bytes()).unwrap();
}
None => {
stream
.write("HTTP/1.1 501 Not Implemented\r\n\r\n".as_bytes())
.unwrap();
}
}
}
| true
|
13a8b147c07eec13f91f804fd6f58c2003cb0524
|
Rust
|
aatifsyed/monzo-objects
|
/src/lib.rs
|
UTF-8
| 3,954
| 2.5625
| 3
|
[] |
no_license
|
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use url;
#[derive(Debug, Deserialize, Serialize)]
pub struct WhoAmI {
authenticated: bool,
client_id: String,
user_id: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct WebhookInner {
account_id: String,
id: String,
url: url::Url,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Webhook {
webhook: WebhookInner,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Webhooks {
webhooks: Vec<WebhookInner>,
}
/// Monzo gives us `type` and `data` keys in json for webhooks, presumably for expandability.
#[derive(Debug, Deserialize, Serialize)]
#[serde(tag = "type", content = "data")]
#[non_exhaustive]
pub enum WebhookEvent {
#[serde(rename = "transaction.created")]
TransactionCreated(TransactionCreated),
}
#[derive(Debug, Deserialize, Serialize)]
pub struct TransactionCreated {
account_id: String,
amount: isize,
created: DateTime<Utc>,
currency: String,
description: String,
id: String,
category: String,
is_load: bool,
settled: DateTime<Utc>,
merchant: Merchant,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Address {
address: String,
city: String,
country: String,
latitude: f64,
longitude: f64,
postcode: String,
region: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Merchant {
address: Address,
created: DateTime<Utc>,
group_id: String,
id: String,
logo: String,
emoji: char,
name: String,
category: String,
}
/// From the Monzo Docs
pub mod example_objects {
pub const TRANSACTION_CREATED: &str = r#"{
"type": "transaction.created",
"data": {
"account_id": "acc_00008gju41AHyfLUzBUk8A",
"amount": -350,
"created": "2015-09-04T14:28:40Z",
"currency": "GBP",
"description": "Ozone Coffee Roasters",
"id": "tx_00008zjky19HyFLAzlUk7t",
"category": "eating_out",
"is_load": false,
"settled": "2015-09-05T14:28:40Z",
"merchant": {
"address": {
"address": "98 Southgate Road",
"city": "London",
"country": "GB",
"latitude": 51.54151,
"longitude": -0.08482400000002599,
"postcode": "N1 3JD",
"region": "Greater London"
},
"created": "2015-08-22T12:20:18Z",
"group_id": "grp_00008zIcpbBOaAr7TTP3sv",
"id": "merch_00008zIcpbAKe8shBxXUtl",
"logo": "https://pbs.twimg.com/profile_images/527043602623389696/68_SgUWJ.jpeg",
"emoji": "🍞",
"name": "The De Beauvoir Deli Co.",
"category": "eating_out"
}
}
}"#;
pub const REGISTER_WEBHOOK: &str = r#"{
"webhook": {
"account_id": "account_id",
"id": "webhook_id",
"url": "http://example.com"
}
}"#;
pub const LIST_WEBHOOKS: &str = r#"{
"webhooks": [
{
"account_id": "acc_000091yf79yMwNaZHhHGzp",
"id": "webhook_000091yhhOmrXQaVZ1Irsv",
"url": "http://example.com/callback"
},
{
"account_id": "acc_000091yf79yMwNaZHhHGzp",
"id": "webhook_000091yhhzvJSxLYGAceC9",
"url": "http://example2.com/anothercallback"
}
]
}"#;
}
#[cfg(test)]
mod tests {
use super::*;
use example_objects;
#[test]
fn transaction_created() {
serde_json::from_str::<WebhookEvent>(example_objects::TRANSACTION_CREATED).unwrap();
}
#[test]
fn webhooks() {
serde_json::from_str::<Webhook>(example_objects::REGISTER_WEBHOOK).unwrap();
serde_json::from_str::<Webhooks>(example_objects::LIST_WEBHOOKS).unwrap();
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.