blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
56198fc85b4b6d3b146b234e5400ea87ee0111a0
|
Rust
|
wezm/steno-lookup
|
/src/dictionary.rs
|
UTF-8
| 2,270
| 3.390625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::collections::HashMap;
use std::fmt;
use std::fs::File;
use std::path::Path;
use serde::{Deserialize, Serialize};
use crate::error::Error;
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Stroke(String);
#[derive(Debug, Deserialize, PartialEq, Eq, Hash)]
pub struct Translation(String);
#[derive(Debug, Deserialize)]
pub struct Dictionary(HashMap<Stroke, Translation>);
#[derive(Debug, PartialEq, Eq)]
pub struct InvertedDictionary(HashMap<Translation, Vec<Stroke>>);
impl fmt::Display for Stroke {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl Dictionary {
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, Error> {
let file = File::open(path)?;
serde_json::from_reader(file).map_err(Error::from)
}
pub fn invert(self) -> InvertedDictionary {
let dict = self.0;
let inverse = dict.into_iter().fold(
HashMap::new(),
|mut inverse: HashMap<_, Vec<_>>, (stroke, translation)| {
inverse.entry(translation).or_default().push(stroke);
inverse
},
);
InvertedDictionary(inverse)
}
}
impl InvertedDictionary {
pub fn get(&self, translation: String) -> Option<&Vec<Stroke>> {
self.0.get(&Translation(translation))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_invert() {
let dict = Dictionary(
vec![
(Stroke("TEFT".to_string()), Translation("test".to_string())),
(Stroke("TEF".to_string()), Translation("test".to_string())),
]
.into_iter()
.collect(),
);
let expected = InvertedDictionary(
vec![(
Translation("test".to_string()),
vec![Stroke("TEF".to_string()), Stroke("TEFT".to_string())],
)]
.into_iter()
.collect(),
);
// Ensure order of items is predictable for comparison in assert_eq
let mut inverted = dict.invert();
inverted
.0
.iter_mut()
.for_each(|(_, strokes)| strokes.sort());
assert_eq!(expected, inverted);
}
}
| true
|
f24af9cc5f295ec463c6c7bc7c14316fbd85b89c
|
Rust
|
asrcpq/eyhv
|
/src/cannon/laser_locker.rs
|
UTF-8
| 3,857
| 2.828125
| 3
|
[] |
no_license
|
use std::collections::VecDeque;
use rand::Rng;
use rand::SeedableRng;
use crate::algebra::{Mat2x2f, Point2f};
use crate::bullet::{bullet_graphic_objects, Bullet, SimpleBullet};
use crate::cannon::{CannonControllerInterface, CannonGeneratorInterface};
use crate::random_tools::simple_try;
const TRY_TIMES: u32 = 10;
#[derive(Clone)]
pub struct LaserLocker {
// relative to moving object
p: Point2f,
// Durations, phase = fire + cd
fire_duration: f32,
cycle_duration: f32,
// phase_timer takes value from 0-cycle_duration, and reset
phase_timer: f32,
// bullet shooted during fire phase
fire_interval: f32,
// timer between intervals
fire_cd: f32,
// direction, opening angle and bullet number
// bullets are uniformly distributed on opening angle
// and are shooted together
theta: f32,
bullet_speed: f32,
// status
switch: bool, // on/off
}
impl CannonGeneratorInterface for LaserLocker {
fn generate(seed: u64, difficulty: f32, correlation: f32) -> LaserLocker {
// difficulty expression
// difficulty = fire_duration * (bullet_speed / fire_interval)
// fire_freq = fd(cd * (0.2 - 1)) / cd(1 - 3) / fi(infer)
let mut rng = rand_pcg::Pcg64Mcg::seed_from_u64(seed);
let cycle_duration: f32 = rng.gen_range(1., 2.);
// k(fd / cd) * bs_ff^2
let result = simple_try(
TRY_TIMES,
|x| x[0] * x[1].powi(2),
vec![(0.01, 0.5), (0.02, 3.)], // 0.05-40
correlation,
difficulty,
rng.gen::<u64>(),
);
let (fire_duration, bs_ff) = (cycle_duration * result[0], result[1]);
let mut bullet_speed = bs_ff.sqrt();
let fire_interval = 0.05 * bullet_speed / bs_ff;
bullet_speed *= 600.;
LaserLocker {
p: Point2f::new(),
fire_duration,
cycle_duration,
fire_interval,
fire_cd: fire_interval,
theta: 0., // uninitialized
switch: true,
bullet_speed,
phase_timer: 0.,
}
}
}
impl LaserLocker {
fn update_theta(&mut self, player_p: Point2f, self_p: Point2f) {
// r points to player
let r = player_p - self_p - self.p;
self.theta = r.y.atan2(r.x);
}
}
impl CannonControllerInterface for LaserLocker {
#[inline]
fn switch(&mut self, switch: bool) {
if self.switch && !switch {
self.switch = false;
self.phase_timer = 0.;
self.fire_cd = self.fire_interval;
} else if switch {
self.switch = true;
}
}
fn tick(
&mut self,
host_p: Point2f,
player_p: Point2f,
mut dt: f32,
) -> VecDeque<Box<dyn Bullet>> {
self.update_theta(player_p, host_p);
let mut bullet_queue = VecDeque::new();
const BULLET_RADIUS: f32 = 3.;
'cycle: loop {
if self.phase_timer > self.fire_duration {
// note that fire_cd should be re-initialized somewhere
// (when entering cd phase(here) or when entering fire phase)
// if phase_timer < self.fire_duration and fire_cd > dt
// the phase_timer is shifted leaving fire_cd a dangling value
// if phase_timer has gone out of fire phase
if self.phase_timer + dt < self.cycle_duration {
self.phase_timer += dt;
break 'cycle bullet_queue;
} else {
dt -= self.cycle_duration - self.phase_timer;
self.phase_timer = 0.;
// self.fire_cd = self.fire_interval;
// fire immediately to fire 1 bullet at least
self.fire_cd = 0.;
}
}
while self.phase_timer < self.fire_duration {
if self.fire_cd > dt {
self.fire_cd -= dt;
self.phase_timer += dt;
break 'cycle bullet_queue;
}
dt -= self.fire_cd;
let normed_vec2f = Point2f::from_theta(self.theta);
bullet_queue.push_back(Box::new(SimpleBullet::new(
self.p + host_p,
normed_vec2f * self.bullet_speed,
Point2f::new(),
dt,
BULLET_RADIUS,
bullet_graphic_objects::LASER_BAR
.rotate(Mat2x2f::from_normed_vec2f(normed_vec2f)),
)));
self.fire_cd = self.fire_interval;
}
}
}
fn set_p(&mut self, p: Point2f) {
self.p = p;
}
}
| true
|
5ce6de10269e832ff23bbc8c8a3ca91a91c0c6fe
|
Rust
|
RustWorks/openshift-openapi-codegen
|
/src/v4_3/api/image/v1/image_layer_data.rs
|
UTF-8
| 3,683
| 2.953125
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Generated from definition com.github.openshift.api.image.v1.ImageLayerData
/// ImageLayerData contains metadata about an image layer.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct ImageLayerData {
/// MediaType of the referenced object.
pub media_type: String,
/// Size of the layer in bytes as defined by the underlying store. This field is optional if the necessary information about size is not available.
pub size: i64,
}
impl<'de> serde::Deserialize<'de> for ImageLayerData {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_media_type,
Key_size,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"mediaType" => Field::Key_media_type,
"size" => Field::Key_size,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = ImageLayerData;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("ImageLayerData")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_media_type: Option<String> = None;
let mut value_size: Option<i64> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_media_type => value_media_type = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_size => value_size = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(ImageLayerData {
media_type: value_media_type.ok_or_else(|| serde::de::Error::missing_field("mediaType"))?,
size: value_size.ok_or_else(|| serde::de::Error::missing_field("size"))?,
})
}
}
deserializer.deserialize_struct(
"ImageLayerData",
&[
"mediaType",
"size",
],
Visitor,
)
}
}
impl serde::Serialize for ImageLayerData {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"ImageLayerData",
2,
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "mediaType", &self.media_type)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "size", &self.size)?;
serde::ser::SerializeStruct::end(state)
}
}
| true
|
f19ebd42de6a0811125f66cecb3f0eafd46f8e06
|
Rust
|
mc738/dmap
|
/src/map.rs
|
UTF-8
| 4,542
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::{File, read_dir};
use std::io::{Read, Write};
use crypto::sha2::Sha256;
use crypto::digest::Digest;
use std::path::{Path};
use std::io;
use crate::common::{DirectoryInfo, FileInfo, InputType};
use crate::common;
use std::collections::HashMap;
use rlog::Log;
pub struct DMap {
base_path: String,
dir: DirectoryInfo,
//signature: String,
}
impl DMap {
pub fn create_from_input(input: InputType) -> Result<DMap, &'static str> {
match input {
InputType::Directory(p) => DMap::create(p),
InputType::Map(p) => DMap::load(p)
}
}
pub fn create(path: &Path) -> Result<DMap, &'static str> {
// TODO handle this better!
let base_path = path.display().to_string();
match map_directory(path, path) {
Ok(dir) => Ok(DMap {
base_path,
dir,
}),
Err(_) => Err("Could not create map")
}
}
pub fn load(path: &Path) -> Result<DMap, &'static str> {
let base_path = path.display().to_string();
// TODO Remove unwrap.
let mut file = File::open(path).unwrap();
let mut json = String::new();
match file.read_to_string(&mut json) {
Ok(_) => {
// TODO Remove unwrap.
let dir: DirectoryInfo = serde_json::from_str(json.as_str()).unwrap();
Ok(DMap {
base_path,
dir,
})
}
Err(_) => Err("Could not parse map")
}
}
pub fn get_hash(&self) -> String {
self.dir.hash.clone()
}
pub fn get_base_path(&self) -> String { self.base_path.clone() }
pub fn flatten(&self) -> HashMap<String, String> {
self.dir.flatten()
}
pub fn save(&self, path: &Path) -> Result<(), &'static str> {
let json = serde_json::to_string(&self.dir).unwrap();
// TODO remove unwrap.
let mut output = File::create(path).unwrap();
match output.write_all(json.as_ref()) {
Ok(_) => {
Log::print_success(String::from("dmap"), String::from("Mapped successfully"));
Ok(())
}
Err(_) => Err("Could not save map.")
}
}
}
fn map_directory(path: &Path, base_path: &Path) -> io::Result<DirectoryInfo> {
let mut entries = read_dir(path)?
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>, io::Error>>()?;
// The order in which `read_dir` returns entries is not guaranteed. If reproducible
// ordering is required the entries should be explicitly sorted.
// This may not be needed, tests pass without it, but it makes the map nicer.
entries.sort();
let mut files: Vec<FileInfo> = Vec::new();
let mut children: Vec<DirectoryInfo> = Vec::new();
for entry in entries {
let file = File::open(&entry);
match file {
Ok(mut f) => {
let mut data: Vec<u8> = Vec::new();
let metadata = f.metadata()?;
if metadata.is_file() {
match f.read_to_end(&mut data) {
Ok(_) => {
let hash = common::create_hash(data);
// TODO clean up this, or make a helper.
let fi = FileInfo::create(entry.strip_prefix(base_path).expect("").to_str().expect("").parse().unwrap(), hash);
files.push(fi);
}
Err(_) => {}
}
} else {
let dir = map_directory(entry.as_path(), base_path)?;
children.push(dir);
}
}
Err(_) => ()
};
//hash_file(entry.as_path());
};
let hash = hash_directory(&children, &files);
Ok(DirectoryInfo::create(path.to_string_lossy().parse().unwrap(), hash, children, files))
}
fn hash_directory(children: &Vec<DirectoryInfo>, files: &Vec<FileInfo>) -> String {
let mut buffer = String::new();
let mut hasher = Sha256::new();
// Append all directory hashes.
for c in children {
buffer.push_str(&c.hash);
};
// Append all file hashes.
for f in files {
buffer.push_str(&f.hash);
};
hasher.input_str(&*buffer);
hasher.result_str()
}
| true
|
5866414869fa5be449d7fb86e503168edec1a1c3
|
Rust
|
giTonyx/advent-of-code-2019
|
/src/solutions/day07.rs
|
UTF-8
| 2,775
| 2.84375
| 3
|
[] |
no_license
|
use crate::intcode::{read_input, IntCode};
use crate::solver::Solver;
use permutator::Permutation;
use std::io;
pub struct Problem;
impl Solver for Problem {
type Input = Vec<i64>;
type Output1 = i64;
type Output2 = i64;
fn parse_input<R: io::Read>(&self, r: R) -> Vec<i64> {
read_input(r)
}
fn solve_first(&self, input: &Self::Input) -> Self::Output1 {
let mut max_thrust = 0i64;
let mut phases = [0, 1, 2, 3, 4];
phases.permutation().for_each(|p| {
let thrust = run_async_chain(input, &p);
if thrust > max_thrust {
max_thrust = thrust;
}
});
max_thrust
}
fn solve_second(&self, input: &Self::Input) -> Self::Output2 {
let mut max_thrust = 0i64;
let mut phases = [5, 6, 7, 8, 9];
phases.permutation().for_each(|p| {
let thrust = run_async_chain(input, &p);
if thrust > max_thrust {
max_thrust = thrust;
}
});
max_thrust
}
}
fn run_async_chain(program: &Vec<i64>, phases: &Vec<i64>) -> i64 {
let mut int_code_a = IntCode::new(program);
int_code_a.input.push(phases[0]);
int_code_a.input.push(0);
let mut int_code_b = IntCode::new(program);
int_code_b.input.push(phases[1]);
let mut int_code_c = IntCode::new(program);
int_code_c.input.push(phases[2]);
let mut int_code_d = IntCode::new(program);
int_code_d.input.push(phases[3]);
let mut int_code_e = IntCode::new(program);
int_code_e.input.push(phases[4]);
while !int_code_e.finished {
int_code_a.advance(&mut int_code_b.input);
int_code_b.advance(&mut int_code_c.input);
int_code_c.advance(&mut int_code_d.input);
int_code_d.advance(&mut int_code_e.input);
int_code_e.advance(&mut int_code_a.input);
}
int_code_e.last_output
}
#[test]
fn test_run_aync_chain() {
let program: Vec<i64> = vec![
3, 52, 1001, 52, -5, 52, 3, 53, 1, 52, 56, 54, 1007, 54, 5, 55, 1005, 55, 26, 1001, 54, -5,
54, 1105, 1, 12, 1, 53, 54, 53, 1008, 54, 0, 55, 1001, 55, 1, 55, 2, 53, 55, 53, 4, 53,
1001, 56, -1, 56, 1005, 56, 6, 99, 0, 0, 0, 0, 10,
];
let phase: Vec<i64> = vec![9, 7, 8, 5, 6];
let output = run_async_chain(&program, &phase);
println!("{}", output);
assert!(output == 18216);
let program: Vec<i64> = vec![
3, 26, 1001, 26, -4, 26, 3, 27, 1002, 27, 2, 27, 1, 27, 26, 27, 4, 27, 1001, 28, -1, 28,
1005, 28, 6, 99, 0, 0, 5,
];
let phase: Vec<i64> = vec![9, 8, 7, 5, 6];
let output = run_async_chain(&program, &phase);
println!("{}", output);
assert!(output == 138547328); // number on web page seems wrong
}
| true
|
667009a2b0a0590d913f8038581761d1332c6bbd
|
Rust
|
rjfranco/sd2snes-lttp-rando-tracker
|
/src/lttp/mod.rs
|
UTF-8
| 7,187
| 2.734375
| 3
|
[] |
no_license
|
mod item;
use failure;
use std::convert::TryFrom;
use self::item::{
Armor,
BigKey,
Boomerang,
Bottle,
Bow,
Crystal,
FluteShovel,
Gloves,
Magic,
Pendant,
Shield,
ShroomPowder,
Sword,
};
#[derive(Debug, Default, Copy, Clone, Serialize, Deserialize)]
pub struct GameState {
// Items
pub bow: Bow,
pub boomerang: Boomerang,
pub hook_shot: bool,
pub bomb: u8,
pub shroom_powder: ShroomPowder,
pub fire_rod: bool,
pub ice_rod: bool,
pub bombos_medallion: bool,
pub ether_medallion: bool,
pub quake_medallion: bool,
pub lantern: bool,
pub hammer: bool,
pub flute_shovel: FluteShovel,
pub net: bool,
pub book: bool,
pub bottle: bool,
pub cane_somaria: bool,
pub cane_byrna: bool,
pub cape: bool,
pub mirror: bool,
// Abilities
pub gloves: Gloves,
pub boots: bool,
pub flippers: bool,
pub moon_pearl: bool,
// Weapon & Armor Progression
pub sword_level: Sword,
pub shield_level: Shield,
pub armor_level: Armor,
// Bottle content
pub bottle_content1: Bottle,
pub bottle_content2: Bottle,
pub bottle_content3: Bottle,
pub bottle_content4: Bottle,
pub rupees: u16,
pub heart_quarters: u8,
pub bomb_capacity: u8,
pub hearts: u8,
pub max_hearts: u8,
pub arrows: u8,
pub arrow_capacity: u8,
pub magic_progression: Magic,
pub small_keys: u8,
pub big_key: BigKey,
pub pendant: Pendant,
pub crystal: Crystal,
}
impl TryFrom<Vec<u8>> for GameState {
type Error = failure::Error;
fn try_from(response: Vec<u8>) -> Result<GameState, Self::Error> {
Ok(GameState {
bow: Bow::try_from(response[0x00])?,
boomerang: Boomerang::try_from(response[0x01])?,
hook_shot: response[0x02] > 0,
bomb: response[0x03],
shroom_powder: ShroomPowder::try_from(response[0x04])?,
fire_rod: response[0x05] > 0,
ice_rod: response[0x06] > 0,
bombos_medallion: response[0x07] > 0,
ether_medallion: response[0x08] > 0,
quake_medallion: response[0x09] > 0,
lantern: response[0x0A] > 0,
hammer: response[0x0B] > 0,
flute_shovel: FluteShovel::try_from(response[0x0C])?,
net: response[0x0D] > 0,
book: response[0x0E] > 0,
bottle: response[0x0F] > 0,
cane_somaria: response[0x10] > 0,
cane_byrna: response[0x11] > 0,
cape: response[0x12] > 0,
mirror: response[0x13] > 0,
gloves: Gloves::try_from(response[0x14])?,
boots: response[0x15] > 0,
flippers: response[0x16] > 0,
moon_pearl: response[0x17] > 0,
sword_level: Sword::try_from(response[0x19])?,
shield_level: Shield::try_from(response[0x1A])?,
armor_level: Armor::try_from(response[0x1B])?,
bottle_content1: Bottle::try_from(response[0x1C])?,
bottle_content2: Bottle::try_from(response[0x1D])?,
bottle_content3: Bottle::try_from(response[0x1E])?,
bottle_content4: Bottle::try_from(response[0x1F])?,
// Rupees are spread across two bytes, as the randomizer lifted the
// 255 Rupee limit, and it's stored little-endian.
rupees: ((response[0x23] as u16) << 8) + response[0x22] as u16,
heart_quarters: response[0x2B],
bomb_capacity: response[0x30] + 10,
hearts: response[0x2D],
max_hearts: response[0x2C],
arrows: response[0x37],
arrow_capacity: response[0x31] + 30,
magic_progression: Magic::try_from(response[0x3B])?,
small_keys: if response[0x2F] == 0xFF { 0 } else { response[0x2F] },
big_key: BigKey {
// BigKey1: 0x366
// Skull Woods
// |Ice Palace
// ||Tower of Hera
// |||Gargoyle's Domain
// ||||Turtle Rock
// |||||Gannon's Tower
// ||||||x
// |||||||x
// vvvvvvvv
// |--------|
// Bit: 7 0
gannons_tower: response[0x26] & 0b00000100 > 0,
turtle_rock: response[0x26] & 0b00001000 > 0,
thieves_town: response[0x26] & 0b00010000 > 0,
tower_of_hera: response[0x26] & 0b00100000 > 0,
ice_palace: response[0x26] & 0b01000000 > 0,
skull_woods: response[0x26] & 0b10000000 > 0,
// BigKey2: 0x367
// X
// |X
// ||Eastern Palace
// |||Desert Palace
// ||||X
// |||||Swamp Palace
// ||||||Dark Palace
// |||||||Misery Mire
// vvvvvvvv
// |--------|
// Bit: 7 0
misery_mire: response[0x27] & 0b00000001 > 0,
desert_palace: response[0x27] & 0b00000010 > 0,
swamp_palace: response[0x27] & 0b00000100 > 0,
palace_of_darkness: response[0x27] & 0b00010000 > 0,
eastern_palace: response[0x27] & 0b00100000 > 0,
},
// 0x374 -> Pendants (Bitmask)
// 1 - Red
// 2 - Blue
// 4 - Green
pendant: Pendant {
red: response[0x34] & 0b0001 > 0,
blue: response[0x34] & 0b0010 > 0,
green: response[0x34] & 0b0100 > 0,
},
// 0x37A -> Crystals (Bitmask)
// 1 - Misery Mire
// 2 - Dark Palace
// 4 - Ice Palace
// 8 - Turtle Rock
// 16 - Swamp Palace
// 32 - Gargoyle's Domain
// 64 - Skull Woods
crystal: Crystal {
one: response[0x3A] & 0b00000001 > 0,
three: response[0x3A] & 0b00000010 > 0,
five: response[0x3A] & 0b00000100 > 0,
four: response[0x3A] & 0b00001000 > 0,
two: response[0x3A] & 0b00010000 > 0,
six: response[0x3A] & 0b00100000 > 0,
seven: response[0x3A] & 0b01000000 > 0,
},
})
}
}
| true
|
6a2646f01d9184aec5fc616b1a63b5646a0f6b97
|
Rust
|
GambuzX/AdventOfCode
|
/2022/Day 03/main.rs
|
UTF-8
| 2,517
| 3.5625
| 4
|
[] |
no_license
|
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use std::collections::HashSet;
// The output is wrapped in a Result to allow matching on errors
// Returns an Iterator to the Reader of the lines of the file.
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn priority(c: char) -> u32 {
if c.is_lowercase() {
return c as u32 - 'a' as u32 + 1;
}
return c as u32 - 'A' as u32 + 27;
}
fn star1() {
let mut total = 0u32;
if let Ok(lines) = read_lines("./input.txt") {
for res in lines {
if let Ok(line) = res {
let l = line.len();
let mut first_compartment = HashSet::new();
for i in 0..l/2 {
first_compartment.insert(line.chars().nth(i).unwrap());
}
for i in l/2..l {
let c = line.chars().nth(i).unwrap();
//println!("{}", c);
if first_compartment.contains(&c) {
total += priority(c);
break;
}
}
}
}
}
println!("Star 1: {}", total);
}
fn star2() {
let mut total = 0u32;
let mut first_group = HashSet::new();
let mut second_group = HashSet::new();
let mut iter = 0;
if let Ok(lines) = read_lines("./input.txt") {
for res in lines {
if let Ok(line) = res {
if iter % 3 == 0 {
for c in line.chars() {
first_group.insert(c);
}
}
else if iter % 3 == 1 {
for c in line.chars() {
if first_group.contains(&c) {
second_group.insert(c);
}
}
}
else if iter % 3 == 2 {
for c in line.chars() {
if second_group.contains(&c) {
total += priority(c);
break;
}
}
first_group.clear();
second_group.clear();
}
iter += 1;
}
}
}
println!("Star 2: {}", total);
}
fn main() {
star1();
star2();
}
| true
|
79f28ad487a092c1cc605f4c96c4baca5eab2ccf
|
Rust
|
eliovir/rust-examples
|
/api-std-fs-file.rs
|
UTF-8
| 1,086
| 3.109375
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
//! <http://doc.rust-lang.org/std/fs/struct.File.html>
//!
//! @license MIT license <http://www.opensource.org/licenses/mit-license.php>
use std::io::BufReader;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
fn main() {
let mut f = File::open("foo.txt").ok().expect("foo.txt not open");
let mut buffer = Vec::new();
let result = f.read_to_end(&mut buffer);
println!("size: {}", result.ok().expect("foo.txt not read"));
/*
* Read a complete file
*/
let path = Path::new("inifile/src/data/config.ini");
let mut hw_file = File::open(&path).ok().expect("file not open");
let mut data = Vec::new();
match hw_file.read_to_end(&mut data) {
Ok(s) => println!("size: {}, data: {:?}", s, data),
Err(e) => panic!("error while reading {} : {}", path.to_str().unwrap(), e)
}
/*
* Iterate over the lines of a file
*/
let hw_file = File::open(&path).ok().expect("file not open");
let file = BufReader::new(hw_file);
for line in file.lines() {
match line {
Ok(nread) => println!("{}", nread),
Err(e) => println!("error reading: {}", e)
}
}
}
| true
|
9a70194724b6e71de1c0ecbe13cae4227d2d286e
|
Rust
|
ikanago/qz
|
/src/method.rs
|
UTF-8
| 900
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
use crate::status::StatusCode;
use std::{convert::TryFrom, fmt};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Method {
Get,
Post,
Options,
}
impl Default for Method {
fn default() -> Self {
Self::Get
}
}
impl fmt::Display for Method {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Method::Get => write!(f, "GET"),
Method::Post => write!(f, "POST"),
Method::Options => write!(f, "OPTIONS"),
}
}
}
impl TryFrom<&[u8]> for Method {
type Error = StatusCode;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match std::str::from_utf8(value) {
Ok("GET") => Ok(Method::Get),
Ok("POST") => Ok(Method::Post),
Ok("OPTONS") => Ok(Method::Options),
_ => Err(StatusCode::MethodNotAllowed),
}
}
}
| true
|
b9a6c41140cdab5593ba992bf63d39aed18465fc
|
Rust
|
Geal/proust
|
/src/parser/errors.rs
|
UTF-8
| 935
| 3.203125
| 3
|
[] |
no_license
|
pub enum InputError {
ParserError,
NotImplemented,
InvalidRequestSize,
InvalidMessageSize,
InvalidMessageSetSize,
InvalidMessage
}
impl InputError {
#[inline]
pub fn to_int(&self) -> u32 {
match *self {
InputError::ParserError => 1,
InputError::NotImplemented => 2,
InputError::InvalidRequestSize => 3,
InputError::InvalidMessageSetSize => 4,
InputError::InvalidMessageSize => 5,
InputError::InvalidMessage => 6
}
}
}
#[inline]
pub fn from_int(code: u32) -> Option<InputError> {
match code {
1 => Option::Some(InputError::ParserError),
2 => Option::Some(InputError::NotImplemented),
3 => Option::Some(InputError::InvalidRequestSize),
4 => Option::Some(InputError::InvalidMessageSetSize),
5 => Option::Some(InputError::InvalidMessageSize),
6 => Option::Some(InputError::InvalidMessage),
_ => Option::None
}
}
| true
|
e6a3ae0081034f0aef57c6bec4c9397cd91becd9
|
Rust
|
schedutron/ion-1
|
/src/lib/builtins/exists.rs
|
UTF-8
| 15,516
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#[cfg(test)]
use smallstring::SmallString;
#[cfg(test)]
use smallvec::SmallVec;
use std::fs;
use std::os::unix::fs::PermissionsExt;
use shell::Shell;
#[cfg(test)]
use shell::flow_control::{Function, Statement};
#[cfg(test)]
use shell;
pub(crate) fn exists(args: &[&str], shell: &Shell) -> Result<bool, String> {
let arguments = &args[1..];
evaluate_arguments(arguments, shell)
}
fn evaluate_arguments(arguments: &[&str], shell: &Shell) -> Result<bool, String> {
match arguments.first() {
Some(&s) if s.starts_with("--") => {
let (_, option) = s.split_at(2);
// If no argument was given, return `SUCCESS`, as this means a string starting
// with a dash was given
arguments.get(1).map_or(Ok(true), {
|arg|
// Match the correct function to the associated flag
Ok(match_option_argument(option, arg, shell))
})
}
Some(&s) if s.starts_with("-") => {
// Access the second character in the flag string: this will be type of the
// flag. If no flag was given, return `SUCCESS`, as this means a
// string with value "-" was checked.
s.chars().nth(1).map_or(Ok(true), |flag| {
// If no argument was given, return `SUCCESS`, as this means a string starting
// with a dash was given
arguments.get(1).map_or(Ok(true), {
|arg|
// Match the correct function to the associated flag
Ok(match_flag_argument(flag, arg, shell))
})
})
}
Some(string) => Ok(string_is_nonzero(string)),
None => Ok(false),
}
}
/// Matches flag arguments to their respective functionaity when the `-`
/// character is detected.
fn match_flag_argument(flag: char, argument: &str, shell: &Shell) -> bool {
match flag {
'a' => array_var_is_not_empty(argument, shell),
'b' => binary_is_in_path(argument, shell),
'd' => path_is_directory(argument),
'f' => path_is_file(argument),
's' => string_var_is_not_empty(argument, shell),
_ => false,
}
}
// Matches option arguments to their respective functionality
fn match_option_argument(option: &str, argument: &str, shell: &Shell) -> bool {
match option {
"fn" => function_is_defined(argument, &shell),
_ => false,
}
}
/// Returns true if the file is a regular file
fn path_is_file(filepath: &str) -> bool {
fs::metadata(filepath)
.ok()
.map_or(false, |metadata| metadata.file_type().is_file())
}
/// Returns true if the file is a directory
fn path_is_directory(filepath: &str) -> bool {
fs::metadata(filepath)
.ok()
.map_or(false, |metadata| metadata.file_type().is_dir())
}
/// Returns true if the binary is found in path (and is executable)
fn binary_is_in_path(binaryname: &str, shell: &Shell) -> bool {
// TODO: Maybe this function should reflect the logic for spawning new processes
// TODO: Right now they use an entirely different logic which means that it
// *might* be possible TODO: that `exists` reports a binary to be in the
// path, while the shell cannot find it or TODO: vice-versa
if let Some(path) = shell.get_var("PATH") {
for dir in path.split(":") {
let fname = format!("{}/{}", dir, binaryname);
if let Ok(metadata) = fs::metadata(&fname) {
if metadata.is_file() && file_has_execute_permission(&fname) {
return true;
}
}
}
};
false
}
/// Returns true if the file has execute permissions. This function is rather low level because
/// Rust currently does not have a higher level abstraction for obtaining non-standard file modes.
/// To extract the permissions from the mode, the bitwise AND operator will be used and compared
/// with the respective execute bits.
/// Note: This function is 1:1 the same as src/builtins/test.rs:file_has_execute_permission
/// If you change the following function, please also update the one in src/builtins/test.rs
fn file_has_execute_permission(filepath: &str) -> bool {
const USER: u32 = 0b1000000;
const GROUP: u32 = 0b1000;
const GUEST: u32 = 0b1;
// Collect the mode of permissions for the file
fs::metadata(filepath).map(|metadata| metadata.permissions().mode()).ok()
// If the mode is equal to any of the above, return `SUCCESS`
.map_or(false, |mode| mode & (USER + GROUP + GUEST) != 0)
}
/// Returns true if the string is not empty
fn string_is_nonzero(string: &str) -> bool { !string.is_empty() }
/// Returns true if the variable is an array and the array is not empty
fn array_var_is_not_empty(arrayvar: &str, shell: &Shell) -> bool {
match shell.variables.get_array(arrayvar) {
Some(array) => !array.is_empty(),
None => false,
}
}
/// Returns true if the variable is a string and the string is not empty
fn string_var_is_not_empty(stringvar: &str, shell: &Shell) -> bool {
match shell.get_var(stringvar) {
Some(string) => !string.is_empty(),
None => false,
}
}
/// Returns true if a function with the given name is defined
fn function_is_defined(function: &str, shell: &Shell) -> bool {
match shell.functions.get(function) {
Some(_) => true,
None => false,
}
}
#[test]
fn test_evaluate_arguments() {
use parser::assignments::{KeyBuf, Primitive};
let mut shell = shell::ShellBuilder::new().as_library();
// assert_eq!(evaluate_arguments(&[], &mut sink, &shell), Ok(false));
// no parameters
assert_eq!(evaluate_arguments(&[], &shell), Ok(false));
// multiple arguments
// ignores all but the first argument
assert_eq!(evaluate_arguments(&["foo", "bar"], &shell), Ok(true));
// check `exists STRING`
assert_eq!(evaluate_arguments(&[""], &shell), Ok(false));
assert_eq!(evaluate_arguments(&["string"], &shell), Ok(true));
assert_eq!(evaluate_arguments(&["string with space"], &shell), Ok(true));
assert_eq!(evaluate_arguments(&["-startswithdash"], &shell), Ok(true));
// check `exists -a`
// no argument means we treat it as a string
assert_eq!(evaluate_arguments(&["-a"], &shell), Ok(true));
shell
.variables
.set_array("emptyarray", SmallVec::from_vec(Vec::new()));
assert_eq!(evaluate_arguments(&["-a", "emptyarray"], &shell), Ok(false));
let mut vec = Vec::new();
vec.push("element".to_owned());
shell.variables.set_array("array", SmallVec::from_vec(vec));
assert_eq!(evaluate_arguments(&["-a", "array"], &shell), Ok(true));
shell.variables.unset_array("array");
assert_eq!(evaluate_arguments(&["-a", "array"], &shell), Ok(false));
// check `exists -b`
// TODO: see test_binary_is_in_path()
// no argument means we treat it as a string
assert_eq!(evaluate_arguments(&["-b"], &shell), Ok(true));
let oldpath = shell.get_var("PATH").unwrap_or("/usr/bin".to_owned());
shell.set_var("PATH", "testing/");
assert_eq!(
evaluate_arguments(&["-b", "executable_file"], &shell),
Ok(true)
);
assert_eq!(evaluate_arguments(&["-b", "empty_file"], &shell), Ok(false));
assert_eq!(
evaluate_arguments(&["-b", "file_does_not_exist"], &shell),
Ok(false)
);
// restore original PATH. Not necessary for the currently defined test cases
// but this might change in the future? Better safe than sorry!
shell.set_var("PATH", &oldpath);
// check `exists -d`
// no argument means we treat it as a string
assert_eq!(evaluate_arguments(&["-d"], &shell), Ok(true));
assert_eq!(evaluate_arguments(&["-d", "testing/"], &shell), Ok(true));
assert_eq!(
evaluate_arguments(&["-d", "testing/empty_file"], &shell),
Ok(false)
);
assert_eq!(
evaluate_arguments(&["-d", "does/not/exist/"], &shell),
Ok(false)
);
// check `exists -f`
// no argument means we treat it as a string
assert_eq!(evaluate_arguments(&["-f"], &shell), Ok(true));
assert_eq!(evaluate_arguments(&["-f", "testing/"], &shell), Ok(false));
assert_eq!(
evaluate_arguments(&["-f", "testing/empty_file"], &shell),
Ok(true)
);
assert_eq!(
evaluate_arguments(&["-f", "does-not-exist"], &shell),
Ok(false)
);
// check `exists -s`
// no argument means we treat it as a string
assert_eq!(evaluate_arguments(&["-s"], &shell), Ok(true));
shell.set_var("emptyvar", "");
assert_eq!(evaluate_arguments(&["-s", "emptyvar"], &shell), Ok(false));
shell.set_var("testvar", "foobar");
assert_eq!(evaluate_arguments(&["-s", "testvar"], &shell), Ok(true));
shell.variables.unset_var("testvar");
assert_eq!(evaluate_arguments(&["-s", "testvar"], &shell), Ok(false));
// also check that it doesn't trigger on arrays
let mut vec = Vec::new();
vec.push("element".to_owned());
shell.variables.unset_var("array");
shell.variables.set_array("array", SmallVec::from_vec(vec));
assert_eq!(evaluate_arguments(&["-s", "array"], &shell), Ok(false));
// check `exists --fn`
let name_str = "test_function";
let name = SmallString::from_str(name_str);
let mut args = Vec::new();
args.push(KeyBuf {
name: "testy".into(),
kind: Primitive::Any,
});
let mut statements = Vec::new();
statements.push(Statement::End);
let description = "description".to_owned();
shell.functions.insert(
name.clone(),
Function::new(Some(description), name, args, statements),
);
assert_eq!(evaluate_arguments(&["--fn", name_str], &shell), Ok(true));
shell.functions.remove(name_str);
assert_eq!(evaluate_arguments(&["--fn", name_str], &shell), Ok(false));
// check invalid flags / parameters (should all be treated as strings and
// therefore succeed)
assert_eq!(evaluate_arguments(&["--foo"], &shell), Ok(true));
assert_eq!(evaluate_arguments(&["-x"], &shell), Ok(true));
}
#[test]
fn test_match_flag_argument() {
let shell = shell::ShellBuilder::new().as_library();
// we don't really care about the passed values, as long as both sited return
// the same value
assert_eq!(
match_flag_argument('a', "ARRAY", &shell),
array_var_is_not_empty("ARRAY", &shell)
);
assert_eq!(
match_flag_argument('b', "binary", &shell),
binary_is_in_path("binary", &shell)
);
assert_eq!(
match_flag_argument('d', "path", &shell),
path_is_directory("path")
);
assert_eq!(
match_flag_argument('f', "file", &shell),
path_is_file("file")
);
assert_eq!(
match_flag_argument('s', "STR", &shell),
string_var_is_not_empty("STR", &shell)
);
// Any flag which is not implemented
assert_eq!(match_flag_argument('x', "ARG", &shell), false);
}
#[test]
fn test_match_option_argument() {
let shell = shell::ShellBuilder::new().as_library();
// we don't really care about the passed values, as long as both sited return
// the same value
assert_eq!(
match_option_argument("fn", "FUN", &shell),
array_var_is_not_empty("FUN", &shell)
);
// Any option which is not implemented
assert_eq!(match_option_argument("foo", "ARG", &shell), false);
}
#[test]
fn test_path_is_file() {
assert_eq!(path_is_file("testing/empty_file"), true);
assert_eq!(path_is_file("this-does-not-exist"), false);
}
#[test]
fn test_path_is_directory() {
assert_eq!(path_is_directory("testing"), true);
assert_eq!(path_is_directory("testing/empty_file"), false);
}
#[test]
fn test_binary_is_in_path() {
let mut shell = shell::ShellBuilder::new().as_library();
// TODO: We should probably also test with more complex PATH-variables:
// TODO: multiple/:directories/
// TODO: PATH containing directories which do not exist
// TODO: PATH containing directories without read permission (for user)
// TODO: PATH containing directories without execute ("enter") permission (for
// user) TODO: empty PATH?
shell.set_var("PATH", "testing/");
assert_eq!(binary_is_in_path("executable_file", &shell), true);
assert_eq!(binary_is_in_path("empty_file", &shell), false);
assert_eq!(binary_is_in_path("file_does_not_exist", &shell), false);
}
#[test]
fn test_file_has_execute_permission() {
assert_eq!(file_has_execute_permission("testing/executable_file"), true);
assert_eq!(file_has_execute_permission("testing"), true);
assert_eq!(file_has_execute_permission("testing/empty_file"), false);
assert_eq!(file_has_execute_permission("this-does-not-exist"), false);
}
#[test]
fn test_string_is_nonzero() {
assert_eq!(string_is_nonzero("NOT ZERO"), true);
assert_eq!(string_is_nonzero(""), false);
}
#[test]
fn test_array_var_is_not_empty() {
let mut shell = shell::ShellBuilder::new().as_library();
shell
.variables
.set_array("EMPTY_ARRAY", SmallVec::from_vec(Vec::new()));
assert_eq!(array_var_is_not_empty("EMPTY_ARRAY", &shell), false);
let mut not_empty_vec = Vec::new();
not_empty_vec.push("array not empty".to_owned());
shell
.variables
.set_array("NOT_EMPTY_ARRAY", SmallVec::from_vec(not_empty_vec));
assert_eq!(array_var_is_not_empty("NOT_EMPTY_ARRAY", &shell), true);
// test for array which does not even exist
shell.variables.unset_array("NOT_EMPTY_ARRAY");
assert_eq!(array_var_is_not_empty("NOT_EMPTY_ARRAY", &shell), false);
// array_var_is_not_empty should NOT match for non-array variables with the
// same name
shell.set_var("VARIABLE", "notempty-variable");
assert_eq!(array_var_is_not_empty("VARIABLE", &shell), false);
}
#[test]
fn test_string_var_is_not_empty() {
let mut shell = shell::ShellBuilder::new().as_library();
shell.set_var("EMPTY", "");
assert_eq!(string_var_is_not_empty("EMPTY", &shell), false);
shell.set_var("NOT_EMPTY", "notempty");
assert_eq!(string_var_is_not_empty("NOT_EMPTY", &shell), true);
// string_var_is_not_empty should NOT match for arrays with the same name
let mut vec = Vec::new();
vec.push("not-empty".to_owned());
shell
.variables
.set_array("ARRAY_NOT_EMPTY", SmallVec::from_vec(vec));
assert_eq!(string_var_is_not_empty("ARRAY_NOT_EMPTY", &shell), false);
// test for a variable which does not even exist
shell.variables.unset_var("NOT_EMPTY");
assert_eq!(string_var_is_not_empty("NOT_EMPTY", &shell), false);
}
#[test]
fn test_function_is_defined() {
use parser::assignments::{KeyBuf, Primitive};
let mut shell = shell::ShellBuilder::new().as_library();
// create a simple dummy function
let name_str = "test_function";
let name = SmallString::from_str(name_str);
let mut args = Vec::new();
args.push(KeyBuf {
name: "testy".into(),
kind: Primitive::Any,
});
let mut statements = Vec::new();
statements.push(Statement::End);
let description = "description".to_owned();
shell.functions.insert(
name.clone(),
Function::new(Some(description), name, args, statements),
);
assert_eq!(function_is_defined(name_str, &shell), true);
shell.functions.remove(name_str);
assert_eq!(function_is_defined(name_str, &shell), false);
}
| true
|
45763ce41469016ecffeaf07bf1642a4ee5fce1c
|
Rust
|
rust-accel/nvptx
|
/src/error.rs
|
UTF-8
| 2,763
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
use failure::Fail;
use std::{io, process};
#[derive(Debug, Clone, Copy)]
pub enum Step {
Install,
Ready,
Link,
Build,
Convert,
Load,
}
#[derive(Fail, Debug)]
pub enum CompileError {
#[fail(
display = "External command {} failed during {:?} step. Return code: {}",
command, step, error_code
)]
CommandFailure {
step: Step,
command: String,
error_code: i32,
},
#[fail(
display = "External command {} failed during {:?}. Please ensure it is installed.",
command, step
)]
CommandIOFailure {
step: Step,
command: String,
error: io::Error,
},
#[fail(
display = "Error during {:?} step: {:?}, error: {:?}",
step, comment, error
)]
OtherError {
step: Step,
comment: String,
error: failure::Error,
},
}
pub fn err_msg(step: Step, comment: &str) -> CompileError {
CompileError::OtherError {
step,
comment: comment.to_owned(),
error: failure::err_msg(comment.to_owned()),
}
}
pub type Result<T> = ::std::result::Result<T, CompileError>;
pub type ResultAny<T> = ::std::result::Result<T, failure::Error>;
pub trait Logging {
type T;
fn log_unwrap(self, step: Step) -> Result<Self::T>;
fn log(self, step: Step, comment: &str) -> Result<Self::T>;
}
impl<T, E: Into<failure::Error>> Logging for ::std::result::Result<T, E> {
type T = T;
fn log_unwrap(self, step: Step) -> Result<Self::T> {
self.log(step, "Unknown IO Error")
}
fn log(self, step: Step, comment: &str) -> Result<Self::T> {
self.map_err(|e| {
CompileError::OtherError {
step,
comment: comment.to_owned(),
error: e.into(),
}
.into()
})
}
}
pub trait CheckRun {
fn check_run(&mut self, step: Step) -> Result<()>;
}
impl CheckRun for process::Command {
fn check_run(&mut self, step: Step) -> Result<()> {
let st = self.status().map_err(|error| {
let command = format!("{:?}", self);
CompileError::CommandIOFailure {
step,
command,
error,
}
})?;
match st.code() {
Some(error_code) => {
if error_code != 0 {
let command = format!("{:?}", self);
Err(CompileError::CommandFailure {
step,
command,
error_code,
}
.into())
} else {
Ok(())
}
}
None => Ok(()),
}
}
}
| true
|
29dfe66240eea242adde6a373a512606d8197e80
|
Rust
|
emrsmsrli/phosphorus
|
/src/hittable.rs
|
UTF-8
| 1,793
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
pub use nalgebra::{Point3, Vector3};
pub use super::ray::Ray;
pub use super::material::Material;
pub struct HitRecord {
pub location: Point3<f64>,
pub normal: Vector3<f64>,
pub t: f64, // todo rename to distance
pub front_face: bool,
pub material: Material,
}
impl HitRecord {
pub fn new(ray: &Ray, location: Point3<f64>, normal: Vector3<f64>, t: f64, material: Material) -> Self {
let front_face = ray.direction.dot(&normal) < 0.0;
HitRecord { location, normal: if front_face { normal } else { -normal }, t, front_face, material }
}
}
pub trait Hittable {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;
}
pub enum Object {
Sphere { material: Material, center: Point3<f64>, radius: f64 }
}
impl Hittable for Object {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
match self {
Object::Sphere { material, center, radius } => {
let oc = &ray.origin - center;
let a = ray.direction.norm_squared();
let half_b = oc.dot(&ray.direction);
let c = oc.norm_squared() - radius * radius;
let discriminant = half_b * half_b - a * c;
if discriminant > 0.0 {
let disc_sqrt = discriminant.sqrt();
for disc in [-disc_sqrt, disc_sqrt].iter() {
let root = (-half_b + disc) / a;
if t_min < root && root < t_max {
let pos = ray.at(root);
return Some(HitRecord::new(&ray, pos, (pos - center) / *radius, root, *material));
}
}
}
None
}
}
}
}
| true
|
13aa2e5d48a86fde7fa19ab04fbf4004276df85f
|
Rust
|
ishanjain28/raytracing-the-next-week
|
/src/aabb.rs
|
UTF-8
| 843
| 3.125
| 3
|
[] |
no_license
|
use crate::types::{Ray, Vec3};
#[derive(Debug, Copy, Clone)]
pub struct Aabb {
pub min: Vec3,
pub max: Vec3,
}
impl Aabb {
pub const fn new(min: Vec3, max: Vec3) -> Self {
Self { min, max }
}
pub fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> bool {
let min = (self.min - ray.origin) / ray.direction;
let max = (self.max - ray.origin) / ray.direction;
let mins = min.min(max);
let maxs = min.max(max);
let tmin = mins.max_element(t_min);
let tmax = maxs.min_element(t_max);
tmax > tmin
}
pub fn surrounding_box(box0: Aabb, box1: Aabb) -> Self {
let smol_box = Vec3::min(box0.min, box1.min);
let big_box = Vec3::max(box0.max, box1.max);
Self {
min: smol_box,
max: big_box,
}
}
}
| true
|
54a62cbf108a4e39bebff812839b86c1a1d94020
|
Rust
|
ytausky/gbemu-core
|
/src/cpu/tests/mod.rs
|
UTF-8
| 4,533
| 2.625
| 3
|
[] |
no_license
|
use super::*;
macro_rules! input {
() => {
Input { data: None, r#if: 0x00 }
};
($field:ident $($tokens:tt)*) => {
{
let mut input = input!();
input!(@ input, $field $($tokens)*);
input
}
};
(@ $input:ident, $field:ident: $value:expr, $($tokens:tt)*) => {
input!(@ $input, $field: $value);
input!(@ $input, $($tokens)*)
};
(@ $input:ident, data: $data:expr) => {
$input.data = Some($data)
};
(@ $input:ident, if: $if:expr) => {
$input.r#if = $if
};
}
macro_rules! output {
() => {
Output { bus: None, ack: 0x00 }
};
($field:ident $($tokens:tt)*) => {
{
let mut output = output!();
output!(@ output, $field $($tokens)*);
output
}
};
(@ $output:ident, bus: $bus:expr) => {
$output.bus = Some($bus);
};
(@ $output:ident, ack: $ack:expr) => {
$output.ack = $ack;
};
}
mod alu;
mod branch;
mod interrupt;
mod ld;
impl R {
fn code(self) -> u8 {
match self {
R::A => 0b111,
R::B => 0b000,
R::C => 0b001,
R::D => 0b010,
R::E => 0b011,
R::H => 0b100,
R::L => 0b101,
}
}
}
impl Dd {
fn encode(self) -> u8 {
match self {
Dd::Bc => 0b00,
Dd::De => 0b01,
Dd::Hl => 0b10,
Dd::Sp => 0b11,
}
}
}
impl Qq {
fn encode(self) -> u8 {
match self {
Qq::Bc => 0b00,
Qq::De => 0b01,
Qq::Hl => 0b10,
Qq::Af => 0b11,
}
}
}
const RET: u8 = 0xc9;
struct TestBench {
cpu: Cpu,
r#if: u8,
trace: CpuTrace,
expected: CpuTrace,
}
type CpuTrace = Vec<(Input, Output)>;
impl Default for TestBench {
fn default() -> Self {
let mut cpu = Cpu::default();
cpu.data.sp = 0xd000;
cpu.data.ie = 0x1f;
cpu.data.ime = true;
Self {
cpu,
r#if: 0x00,
trace: Default::default(),
expected: Default::default(),
}
}
}
impl TestBench {
fn trace_nop(&mut self) {
self.trace_fetch(self.cpu.data.pc, &[NOP])
}
fn trace_ret(&mut self, addr: u16) {
let sp = self.cpu.data.sp;
self.trace_fetch(self.cpu.data.pc, &[RET]);
self.trace_bus_read(sp, low_byte(addr));
self.trace_bus_read(sp.wrapping_add(1), high_byte(addr));
self.trace_bus_no_op()
}
fn trace_fetch(&mut self, pc: u16, encoding: &[u8]) {
for (i, byte) in encoding.iter().enumerate() {
self.trace_bus_read(pc.wrapping_add(i as u16), *byte)
}
}
fn trace_bus_no_op(&mut self) {
self.trace_step(None, output!());
self.trace_step(None, output!())
}
fn trace_bus_read(&mut self, addr: u16, data: u8) {
self.trace_step(None, output!(bus: bus_read(addr)));
self.trace_step(Some(data), output!())
}
fn trace_bus_write(&mut self, addr: u16, data: u8) {
self.trace_step(None, output!(bus: bus_write(addr, data)));
self.trace_step(None, output!())
}
fn trace_step(&mut self, data: Option<u8>, output: Output) {
let input = Input {
data,
r#if: self.r#if,
};
self.trace.push((input.clone(), self.cpu.step(&input)));
self.expected.push((input, output))
}
}
impl Cpu {
fn test_simple_instr<'a, I>(&mut self, opcode: &[u8], steps: I)
where
I: IntoIterator<Item = &'a (Input, Output)>,
{
let steps: Vec<_> = steps
.into_iter()
.cloned()
.chain(vec![
(
input!(),
output!(bus: bus_read(self.data.pc + opcode.len() as u16)),
),
(input!(data: 0x00), output!()),
])
.collect();
self.test_opcode(opcode, &steps);
}
fn test_opcode<'a, I>(&mut self, opcode: &[u8], steps: I)
where
I: IntoIterator<Item = &'a (Input, Output)>,
{
let pc = self.data.pc;
for (i, byte) in opcode.iter().enumerate() {
assert_eq!(self.step(&input!()), output!(bus: bus_read(pc + i as u16)));
assert_eq!(self.step(&input!(data: *byte)), output!());
}
for (input, output) in steps {
assert_eq!(self.step(input), *output)
}
}
}
| true
|
210c708668919620317a4e18003b310a0159330a
|
Rust
|
beagleknight/roguelike-ecs
|
/src/item.rs
|
UTF-8
| 6,754
| 3.046875
| 3
|
[] |
no_license
|
use rand::{
distributions::{IndependentSample, Weighted, WeightedChoice},
Rng,
};
use specs::prelude::*;
use crate::components::{Equipable, Object, Pickable, Position, Usable};
use crate::game::colors;
use crate::map::{Map, Transition};
#[derive(Clone, Copy, PartialEq)]
pub enum ItemKind {
HealthPotion,
Sword,
Dagger,
Helmet,
}
#[derive(Clone, Copy, PartialEq)]
pub enum SlotKind {
LeftHand,
RightHand,
Head,
}
impl std::fmt::Display for SlotKind {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
SlotKind::LeftHand => write!(f, "left hand"),
SlotKind::RightHand => write!(f, "right hand"),
SlotKind::Head => write!(f, "head"),
}
}
}
pub struct Item {
pub kind: ItemKind,
pub position: Position,
}
impl Item {
pub fn place_items(map: &mut Map) -> Vec<Item> {
let max_items = Map::from_dungeon_level(
&[
Transition { level: 1, value: 1 },
Transition { level: 4, value: 2 },
],
map.level,
);
let mut items = vec![];
for room in &map.rooms {
let num_items = rand::thread_rng().gen_range(0, max_items + 1);
let item_chances = &mut [
Weighted {
weight: 35,
item: ItemKind::HealthPotion,
},
Weighted {
weight: Map::from_dungeon_level(
&[Transition {
level: 2,
value: 10,
}],
map.level,
),
item: ItemKind::Dagger,
},
Weighted {
weight: Map::from_dungeon_level(
&[Transition {
level: 3,
value: 10,
}],
map.level,
),
item: ItemKind::Helmet,
},
Weighted {
weight: Map::from_dungeon_level(
&[Transition { level: 4, value: 5 }],
map.level,
),
item: ItemKind::Sword,
},
];
let item_choice = WeightedChoice::new(item_chances);
for _ in 0..num_items {
let x = rand::thread_rng().gen_range(room.x1 + 1, room.x2);
let y = rand::thread_rng().gen_range(room.y1 + 1, room.y2);
if !map.is_occupied(x, y) {
items.push(Item {
kind: item_choice.ind_sample(&mut rand::thread_rng()),
position: Position { x, y },
});
map.occupied_places.push(Position { x, y });
}
}
}
items
}
pub fn build_entities(items: Vec<Item>, world: &mut World) {
Item::clean_entities(world);
for item in &items {
match item.kind {
ItemKind::HealthPotion => {
world
.create_entity()
.with(Object {
name: String::from("healing potion"),
color: colors::VIOLET,
character: '!',
})
.with(item.position.clone())
.with(Pickable)
.with(Usable {
kind: ItemKind::HealthPotion,
})
.build();
}
ItemKind::Sword => {
world
.create_entity()
.with(Object {
name: String::from("sword"),
color: colors::SKY,
character: '/',
})
.with(item.position.clone())
.with(Pickable)
.with(Equipable {
max_hp_bonus: 0,
power_bonus: 3,
defense_bonus: 0,
slot: SlotKind::RightHand,
})
.build();
}
ItemKind::Dagger => {
Item::create_dagger_entity(world, Some(item.position.clone()));
}
ItemKind::Helmet => {
Item::create_helmet_entity(world, Some(item.position.clone()));
}
}
}
}
pub fn clean_entities(world: &mut World) {
let pickables = world.read_storage::<Pickable>();
let positions = world.read_storage::<Position>();
let entities = world.entities();
for (entity, _, _) in (&entities, &pickables, &positions).join() {
entities.delete(entity).unwrap();
}
}
pub fn create_dagger_entity(
world: &mut World,
position: Option<Position>,
) -> (Entity, Equipable) {
let equipable = Equipable {
max_hp_bonus: 0,
power_bonus: 1,
defense_bonus: 0,
slot: SlotKind::RightHand,
};
let entity_builder = world
.create_entity()
.with(Object {
name: String::from("dagger"),
color: colors::SKY,
character: '-',
})
.with(Pickable)
.with(equipable);
let entity_builder = if let Some(position) = position {
entity_builder.with(position)
} else {
entity_builder
};
(entity_builder.build(), equipable)
}
pub fn create_helmet_entity(
world: &mut World,
position: Option<Position>,
) -> (Entity, Equipable) {
let equipable = Equipable {
max_hp_bonus: 0,
power_bonus: 0,
defense_bonus: 1,
slot: SlotKind::Head,
};
let entity_builder = world
.create_entity()
.with(Object {
name: String::from("helmet"),
color: colors::DARKER_ORANGE,
character: 'c',
})
.with(Pickable)
.with(equipable);
let entity_builder = if let Some(position) = position {
entity_builder.with(position)
} else {
entity_builder
};
(entity_builder.build(), equipable)
}
}
| true
|
c94ffe9b2b7b257c1ea10a3cb9e1596c61612bda
|
Rust
|
mma1979/wind-lang-rs
|
/src/scanner.rs
|
UTF-8
| 8,466
| 3.109375
| 3
|
[] |
no_license
|
use crate::{
error::{ScannerError, WindError},
token::{Token, TokenType},
types::LiteralType,
};
pub struct Scanner {
source: String,
tokens: Vec<Token>,
start: usize,
current: usize,
line: i32,
}
impl Scanner {
pub fn new(source: String) -> Scanner {
Scanner {
source,
tokens: Vec::new(),
start: 0,
current: 0,
line: 1,
}
}
pub fn scan_tokens(&mut self) -> Vec<Token> {
while !self.is_at_end() {
self.start = self.current;
match self.scan_token() {
Err(e) => {
e.report();
}
_ => (),
}
}
self.add_token(TokenType::EOF, LiteralType::Nil);
self.tokens.to_owned()
}
fn scan_token(&mut self) -> Result<(), ScannerError> {
let current_char = self.advance();
match current_char {
'(' => self.add_token(TokenType::LeftParen, LiteralType::Nil),
')' => self.add_token(TokenType::RightParen, LiteralType::Nil),
'{' => self.add_token(TokenType::LeftBrace, LiteralType::Nil),
'}' => self.add_token(TokenType::RightBrace, LiteralType::Nil),
',' => self.add_token(TokenType::Comma, LiteralType::Nil),
'.' => {
if self.match_char('.') {
self.add_token(TokenType::DotDot, LiteralType::Nil);
} else {
self.add_token(TokenType::Dot, LiteralType::Nil);
}
}
';' => self.add_token(TokenType::Semicolon, LiteralType::Nil),
'-' => {
if self.match_char('=') {
self.add_token(TokenType::MinusEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Minus, LiteralType::Nil);
}
}
'+' => {
if self.match_char('=') {
self.add_token(TokenType::PlusEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Plus, LiteralType::Nil);
}
}
'*' => {
if self.match_char('=') {
self.add_token(TokenType::StarEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Star, LiteralType::Nil);
}
}
'%' => {
if self.match_char('=') {
self.add_token(TokenType::PercentEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Percent, LiteralType::Nil);
}
}
'/' => {
if self.match_char('/') {
while !self.is_at_end() && self.peak() != '\n' {
self.advance();
}
} else if self.match_char('=') {
self.add_token(TokenType::SlashEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Slash, LiteralType::Nil);
}
}
'!' => {
if self.match_char('=') {
self.add_token(TokenType::BangEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Bang, LiteralType::Nil);
}
}
'=' => {
if self.match_char('=') {
self.add_token(TokenType::EqualEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Equal, LiteralType::Nil);
}
}
'<' => {
if self.match_char('=') {
self.add_token(TokenType::LessEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Less, LiteralType::Nil);
}
}
'>' => {
if self.match_char('=') {
self.add_token(TokenType::GreaterEqual, LiteralType::Nil);
} else {
self.add_token(TokenType::Greater, LiteralType::Nil);
}
}
'"' => self.scan_string()?,
' ' | '\r' | '\t' => {}
'\n' => {
self.line += 1;
}
_ => {
if current_char.is_numeric() {
self.scan_number()?;
} else if current_char.is_alphabetic() {
self.scan_identifier();
} else {
return Err(ScannerError::new(
self.line,
format!("unexpected character '{}'", current_char),
));
}
}
}
Ok(())
}
fn advance(&mut self) -> char {
let current_char = self.source.chars().nth(self.current).unwrap();
self.current += 1;
current_char
}
fn scan_string(&mut self) -> Result<(), ScannerError> {
while !self.is_at_end() && self.peak() != '"' {
if self.peak() == '\n' {
self.line += 1;
}
self.advance();
}
if self.is_at_end() {
return Err(ScannerError::new(
self.line,
"unterminated string".to_owned(),
));
}
self.advance(); // "
let string = self.source[self.start + 1..self.current - 1]
.to_owned()
.replace("\\n", "\n");
self.add_token(TokenType::String, LiteralType::String(string));
Ok(())
}
fn scan_number(&mut self) -> Result<(), ScannerError> {
while self.peak().is_numeric() {
self.advance();
}
if self.peak() == '.' && self.peak_next().is_numeric() {
self.advance();
while self.peak().is_numeric() {
self.advance();
}
}
let literal = self.source[self.start..self.current].to_owned();
let float: f32 = match literal.parse() {
Ok(v) => v,
Err(_) => {
return Err(ScannerError::new(
self.line,
"cannot parse number".to_owned(),
));
}
};
self.add_token(TokenType::Number, LiteralType::Number(float));
Ok(())
}
fn scan_identifier(&mut self) {
while self.peak().is_alphanumeric() || self.peak() == '_' {
self.advance();
}
let text = self.source[self.start..self.current].to_owned();
let token_type = self.match_keyword(&text);
self.add_token(token_type, LiteralType::Nil);
}
fn add_token(&mut self, t_type: TokenType, literal: LiteralType) {
let text = self.source[self.start..self.current].to_owned();
self.tokens
.push(Token::new(t_type, text, Box::new(literal), self.line));
}
fn is_at_end(&self) -> bool {
self.current >= self.source.len()
}
fn match_char(&mut self, expected: char) -> bool {
if self.is_at_end() || self.peak() != expected {
return false;
} else {
self.advance();
return true;
}
}
fn peak(&self) -> char {
if self.is_at_end() {
return '\0';
}
self.source.chars().nth(self.current).unwrap()
}
fn peak_next(&self) -> char {
if self.is_at_end() {
return '\0';
}
self.source.chars().nth(self.current + 1).unwrap()
}
fn match_keyword(&mut self, name: &str) -> TokenType {
match name {
"and" => TokenType::And,
"class" => TokenType::Class,
"else" => TokenType::Else,
"true" => TokenType::True,
"false" => TokenType::False,
"for" => TokenType::For,
"while" => TokenType::While,
"fun" => TokenType::Fun,
"if" => TokenType::If,
"nil" => TokenType::Nil,
"or" => TokenType::Or,
"return" => TokenType::Return,
"super" => TokenType::Super,
"this" => TokenType::This,
"var" => TokenType::Var,
"in" => TokenType::In,
_ => TokenType::Identifier,
}
}
}
| true
|
d4c068b9ac9cc7202a7bab4b059b65fff594a55e
|
Rust
|
Bruno-Messias/Rust-Learning
|
/owner/src/main.rs
|
UTF-8
| 2,558
| 4.09375
| 4
|
[
"MIT"
] |
permissive
|
fn main() {
let s1 = String::from("hello"); //Declre to use the heap
let s2 = s1; //Move -> invalid the last variable
let s3 = s2.clone(); //Clone -> do not invalid s2 (its expensive to do)
println!("{}, world!", s2); //can't print s1-> invalided
println!("{}, world!", s3); //can print s3-> its valid (but expensive)
//Ownership:
let s = String::from("hello");
println!("{}", s);
println!("{}", s);
owner(s);
//println!("{}",s); //Cant print s anymore(because is passed to the function and dropped)
let z1 = gives_ownership(); // receiver ownership(in scope)
let z2 = String::from("hello"); // z2 comes into scope
let z3 = takes_and_gives_back(z2); //z3 moved, z2 dropped
println!("{}",z1);
println!("{}",z3);
//Multiples outs(tuples)
let s1 = String::from("hello");
let (s2, len) = calculate_length(s1); //S1 dropped Need to move to s2 to used again
println!("The length of '{}' is {}.", s2, len);
//* Pass by reference
let s1 = String::from("hello");
let len = calculate_length_pointer(&s1); //Pass by reference the s1 is not dropped(borrow) (u cant modify)
println!("The length of '{}' is {}.", s1, len);
//* Mutable references
let mut s = String::from("hello");
{let r1 = &mut s;
//let r2 = &mut s; //! Only can had one mutable reference
change(r1);
println!("{}", r1);
} //r1 is dropped ca have a new mutable reference
let r2 = &mut s;
println!("{}",r2);
}
fn owner(string_in: String){
println!("{}",string_in); //Calling the function delete the original string
//Drop the string
}
fn gives_ownership() -> String { // gives_ownership will move its
// return value into the function
// that calls it
let some_string = String::from("hello"); // some_string comes into scope
some_string // some_string is returned and
// moves out to the calling
// function
}
// takes_and_gives_back will take a String and return one
fn takes_and_gives_back(a_string: String) -> String { // a_string comes into
// scope
a_string // a_string is returned and moves out to the calling function
}
fn calculate_length(s: String) -> (String, usize) {
let length = s.len(); // len() returns the length of a String
(s, length)
}
fn calculate_length_pointer(s: &String) -> usize {
s.len()
}
fn change(some_string: &mut String) {
some_string.push_str(", world");
}
// Don't return a pointer, return the string itself
| true
|
8bc83fcdb2f2a4b6a446b73859c0b24c8e15c66a
|
Rust
|
jethrodaniel/exercism
|
/rust/grains/src/lib.rs
|
UTF-8
| 660
| 3.546875
| 4
|
[
"MIT"
] |
permissive
|
// The total number of grains on tile s is
//
// Tile (s): 1 2 3 4 5 6 ... s
// Grains : 1 2 4 8 16 32 ... 2^(s - 1)
//
pub fn square(s: u32) -> u64 {
if s < 1 || s > 64 {
panic!("Square must be between 1 and 64")
}
2u64.pow(s - 1)
}
// The total number of grains on the board when k tiles are filled is
//
// Tiles filled (k): 1 2 3 4 5 6 ... n
//
// Number of Grains: 1 3 7 15 31 63 ...
//
// 64
// = 1 + ∑ 2^(n - 1)
// i=1
//
pub fn total() -> u64 {
(1..64 + 1).fold(0, |sum, n| sum + square(n))
}
| true
|
d0ff762d95d65b856b634d857daccdf2096c0f19
|
Rust
|
duck1123/huber
|
/src/procmacro/src/lib.rs
|
UTF-8
| 975
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
#[proc_macro]
pub fn process_lock(_item: TokenStream) -> TokenStream {
let result = quote! {
use huber_common::model::config::Config;
use std::fs::File;
use fs2::FileExt;
use log::{error, info};
let lock_path = Config::new().lock_file().unwrap();
let f = if !lock_path.exists() {
File::create(&lock_path)
} else {
File::open(&lock_path)
}.unwrap();
let r = f.try_lock_exclusive();
match r {
Ok(_) => {
info!("{}: {:?}", "Locking the operation", lock_path);
},
Err(e) => {
error!("{:?}: {:?}", lock_path, e);
return Err(anyhow!("huber is already running by another process for the exclusion operation. Please try after the operation finished. {:?}", e))
}
}
};
result.into()
}
| true
|
58d063d685107234d313020ae1716e17e22c321a
|
Rust
|
ahmed-masud/yew-bulma
|
/src/forms/text_input.rs
|
UTF-8
| 4,335
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
use std::{collections::HashMap, rc::Rc, str::FromStr};
use web_sys::HtmlInputElement;
use yew::prelude::*;
use crate::forms::FormField;
use super::storage::FormStorage;
pub struct TextInput<T, V>
where
T: FormField,
V: Clone + FromStr + ToString + std::fmt::Debug + PartialEq + 'static,
{
props: Props<T, V>,
text_value: String,
input: NodeRef,
link: ComponentLink<Self>,
}
#[derive(Clone, Properties)]
pub struct Props<T, V>
where
T: FormField,
V: Clone + FromStr + ToString + std::fmt::Debug + PartialEq + 'static,
{
#[prop_or_default]
pub on_value_changed: Callback<Option<V>>,
pub storage: FormStorage<Option<V>>,
pub field: T,
pub errors: Option<Rc<HashMap<T, Vec<Rc<Html>>>>>,
#[prop_or_default]
pub placeholder: String,
#[prop_or_default]
pub disabled: bool,
#[prop_or_default]
pub readonly: bool,
#[prop_or_default]
pub autofocus: bool,
}
pub enum Message {
KeyPressed,
}
impl<T, V> Component for TextInput<T, V>
where
T: FormField,
V: Clone + FromStr + ToString + std::fmt::Debug + PartialEq + 'static,
{
type Message = Message;
type Properties = Props<T, V>;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
let text_value = props
.storage
.value()
.unwrap_or(None)
.map(|v| v.to_string())
.unwrap_or_default();
TextInput {
props,
link,
input: NodeRef::default(),
text_value,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Message::KeyPressed => {
if let Some(input) = self.input.cast::<HtmlInputElement>() {
self.text_value = input.value();
if self.text_value.is_empty() {
self.props.storage.update_with_invalid_hint(None, false);
self.props.storage.update_invalid_hint(false);
self.props.on_value_changed.emit(None);
} else if let Ok(value) = V::from_str(&self.text_value) {
self.props
.storage
.update_with_invalid_hint(Some(value.clone()), false);
self.props.storage.update_invalid_hint(false);
self.props.on_value_changed.emit(Some(value));
} else {
self.props.storage.update_invalid_hint(true);
self.props
.on_value_changed
.emit(self.props.storage.value().unwrap_or_default());
}
}
}
}
false
}
fn view(&self) -> Html {
let errors = self
.props
.errors
.as_ref()
.map(|errors| errors.get(&self.props.field).cloned());
let css_class = match &errors {
Some(errors) => match errors {
Some(_) => "input is-danger",
None => "input",
},
None => "input",
};
html! {
<div class="control">
<input
id=self.props.field.form_id()
class=css_class
ref=self.input.clone()
type="text"
value=self.text_value
placeholder=&self.props.placeholder
onchange=self.link.callback(|_| Message::KeyPressed)
oninput=self.link.callback(|_| Message::KeyPressed)
disabled=self.props.disabled
readonly=self.props.readonly />
</div>
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.text_value = props
.storage
.unchecked_value()
.map(|v| v.to_string())
.unwrap_or_default();
self.props = props;
true
}
fn rendered(&mut self, first_render: bool) {
if first_render && self.props.autofocus {
if let Some(input) = self.input.cast::<HtmlInputElement>() {
let _ = input.focus();
}
}
}
}
| true
|
769bad1d594badb05a6f04cbc31a19b4f86bb57a
|
Rust
|
amikhalev/rinklers
|
/src/util.rs
|
UTF-8
| 12,350
| 3.46875
| 3
|
[] |
no_license
|
//! Contains various utilites that are used in the rest of the program
use chrono;
use time;
use std::cmp::{PartialOrd, Ordering};
use std::time::Duration;
use std::sync::mpsc::{Receiver, RecvTimeoutError};
use std::sync::{Condvar, Mutex, MutexGuard, LockResult, PoisonError, Arc};
use std::ops::{Deref, DerefMut};
use std::fmt;
use serde::{de, Deserialize};
/// Represents a time to wait for when waiting for an event to occur.
#[derive(Clone, PartialEq, Eq)]
pub enum WaitPeriod {
/// Just wait for the next occurence of the event
Wait,
/// Wait at most for the specified duration (wait for a timeout)
AtMost(Duration),
/// Don't wait at all and return immediately
None,
}
impl PartialOrd for WaitPeriod {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for WaitPeriod {
fn cmp(&self, other: &Self) -> Ordering {
use self::WaitPeriod::*;
match (self.clone(), other.clone()) {
(Wait, Wait) | (None, None) => Ordering::Equal,
(Wait, _) | (_, None) => Ordering::Greater,
(_, Wait) | (None, _) => Ordering::Less,
(AtMost(ref dur1), AtMost(ref dur2)) => dur1.cmp(dur2),
}
}
}
impl fmt::Debug for WaitPeriod {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::WaitPeriod::*;
match *self {
Wait => "Wait".fmt(f),
AtMost(ref dur) => write!(f, "AtMost({})", duration_string(dur)),
None => "None".fmt(f),
}
}
}
/// Receives from the specified `Receiver`, while also waiting for whatever amount `wait` is.
///
/// Returns None if `wait` was None or if the timeout was reached. Returns Some if a message was
/// received. Panics if the receive failed for an unspecified reason.
pub fn wait_receiver<T>(receiver: &Receiver<T>, period: &WaitPeriod) -> Option<T> {
use self::WaitPeriod::*;
match *period {
Wait => Some(receiver.recv().unwrap()),
AtMost(ref dur) => {
match receiver.recv_timeout(*dur) {
Ok(recv) => Some(recv),
Err(RecvTimeoutError::Timeout) => Option::None,
e @ Err(_) => {
e.unwrap();
unreachable!()
}
}
}
None => Option::None,
}
}
/// Waits on `condvar` for `period`, returning a `MutexGuard` for `mutex` if successful
pub fn wait_condvar<'a, T>(condvar: &Condvar,
mutex: &'a Mutex<T>,
period: &WaitPeriod)
-> LockResult<MutexGuard<'a, T>> {
use self::WaitPeriod::*;
let lock = mutex.lock();
match *period {
Wait => lock.and_then(|guard| condvar.wait(guard)),
AtMost(ref dur) => {
lock.and_then(|guard| {
condvar.wait_timeout(guard, *dur)
.map(|res| res.0)
.map_err(|err| PoisonError::new(err.into_inner().0))
})
}
None => lock,
}
}
/// Gets a human-readable string representation of a `chrono::Duration`
pub fn chrono_duration_string(dur: &::chrono::Duration) -> String {
if dur.is_zero() {
return "0".into();
}
let (dur, neg): (::chrono::Duration, bool) = {
if dur < &::chrono::Duration::zero() {
(-(*dur), true)
} else {
(*dur, false)
}
};
let weeks = dur.num_weeks();
let days = dur.num_days() % 7;
let hours = dur.num_hours() % 24;
let minutes = dur.num_minutes() % 60;
let secs = dur.num_seconds() % 60;
let millis = dur.num_milliseconds() % 1000;
let micros = dur.num_microseconds();
let nanos = dur.num_nanoseconds();
let mut s = String::new();
if neg {
s.push_str("-")
}
if weeks > 0 {
s.push_str(&(weeks.to_string() + "w"))
}
if days > 0 {
s.push_str(&(days.to_string() + "d"))
}
if hours > 0 {
s.push_str(&(hours.to_string() + "h"))
}
if minutes > 0 {
s.push_str(&(minutes.to_string() + "m"))
}
if secs > 0 {
s.push_str(&(secs.to_string() + "s"))
}
if millis > 0 {
s.push_str(&(millis.to_string() + "ms"))
}
if let Some(micros) = micros {
let micros = micros % 1000;
if micros > 0 {
s.push_str(&(micros.to_string() + "us"))
}
}
if let Some(nanos) = nanos {
let nanos = nanos % 1000;
if nanos > 0 {
s.push_str(&(nanos.to_string() + "ns"))
}
}
s
}
quick_error! {
/// An error that can occur when converting a `String` to a `Duration`
#[derive(Debug)]
pub enum DurationFromStrError {
/// There was an error converting a `chrono::Duration` to a `std::time::Duration`
DurationToStd(err: time::OutOfRangeError) {
description("chrono::Duration out of range of std::time::Duration")
from()
}
/// There was an unexpected character when parsing a Duration
UnexpectedChar(c: char) {
description("There was an unexpected character when parsing a Duration")
display("expected number, whitespace or postfix. found {}", c)
}
}
}
/// Converts a `&str` to a `chrono::Duration` in a format where
pub fn chrono_duration_from_str(s: &str) -> Result<chrono::Duration, DurationFromStrError> {
let mut dur = chrono::Duration::zero();
let mut num: i64 = 0;
let mut iter = s.chars()
.flat_map(|c| c.to_lowercase())
.peekable();
while let Some(c) = iter.next() {
if c.is_whitespace() {
continue;
}
if let Some(digit) = c.to_digit(10) {
num = num * 10 + digit as i64;
continue;
}
match (c, iter.peek().cloned()) {
('w', _) => dur = dur + chrono::Duration::weeks(num),
('d', _) => dur = dur + chrono::Duration::days(num),
('h', _) => dur = dur + chrono::Duration::hours(num),
('s', _) => dur = dur + chrono::Duration::seconds(num),
('m', Some('s')) => dur = dur + chrono::Duration::milliseconds(num),
('m', _) => dur = dur + chrono::Duration::minutes(num),
('u', Some('s')) => dur = dur + chrono::Duration::microseconds(num),
('n', Some('s')) => dur = dur + chrono::Duration::nanoseconds(num),
_ => return Err(DurationFromStrError::UnexpectedChar(c)),
}
num = 0;
}
Ok(dur)
}
/// Converts a `&str` to a `std::time::Duration`
pub fn duration_from_str(s: &str) -> Result<Duration, DurationFromStrError> {
chrono_duration_from_str(s).and_then(|dur| {
dur.to_std()
.map_err(|err| err.into())
})
}
/// Deserializes a `std::time::Duration`. Converts it to a string and then uses
/// [`duration_from_str`](#fn.duration_from_str).
pub fn deserialize_duration<D>(d: &mut D) -> Result<Duration, D::Error>
where D: de::Deserializer
{
let s = try!(String::deserialize(d));
duration_from_str(&s).map_err(|err| de::Error::custom(format!("{}", err)))
}
/// Gets a string representation of a `std::time::Duration`
pub fn duration_string(duration: &::std::time::Duration) -> String {
chrono_duration_string(&::chrono::Duration::from_std(*duration).unwrap())
}
/// A guard returned by
/// [`LockCondvarGuard.lock_condvar`](trait.LockCondvarGuard.html#fn.lock_condvar).
/// It `Deref`s and `DerefMut`s to the underlying `MutexGuard`. It notifies on the `Condvar` when
/// it is `Drop`ed.
pub struct CondvarGuard<'a, T>
where T: 'a
{
mutex_guard: MutexGuard<'a, T>,
condvar: &'a Condvar,
}
impl<'a, T> CondvarGuard<'a, T> {
fn new(mutex_guard: MutexGuard<'a, T>, condvar: &'a Condvar) -> Self {
CondvarGuard {
mutex_guard: mutex_guard,
condvar: condvar,
}
}
}
impl<'mutex, T> Deref for CondvarGuard<'mutex, T> {
type Target = T;
fn deref(&self) -> &T {
self.mutex_guard.deref()
}
}
impl<'mutex, T> DerefMut for CondvarGuard<'mutex, T> {
fn deref_mut(&mut self) -> &mut T {
self.mutex_guard.deref_mut()
}
}
impl<'a, T> Drop for CondvarGuard<'a, T> {
fn drop(&mut self) {
self.condvar.notify_one();
// this should immediately drop self and self.mutex_guard, which will unlock the mutex
}
}
/// For objects that can be locked with a `Condvar`
pub trait LockCondvarGuard<T> {
/// Locks `self`, notifying the `Convar` when the returned `CondvarGuard` is dropped and
/// unlocks `self`
fn lock_condvar<'a>(&'a self, condvar: &'a Condvar) -> LockResult<CondvarGuard<'a, T>>;
}
impl<T> LockCondvarGuard<T> for Mutex<T> {
fn lock_condvar<'a>(&'a self, condvar: &'a Condvar) -> LockResult<CondvarGuard<'a, T>> {
let guard = self.lock();
guard.map(|guard| CondvarGuard::new(guard, condvar))
.map_err(|poison_err| {
PoisonError::new(CondvarGuard::new(poison_err.into_inner(), condvar))
})
}
}
/// The state for a generic "runner" object. Specifically, this stores multi thread state in with a
/// `Mutex`, that when updated will notify a thread using a `Condvar`.
pub struct RunnerState<D> {
data: Mutex<D>,
condvar: Condvar,
}
impl<D> RunnerState<D> {
/// Creates a new `RunnerState` containing the specified data
pub fn new(data: D) -> Self {
RunnerState {
data: Mutex::new(data),
condvar: Condvar::new(),
}
}
/// Creates a new `RunnerState` in a `Box`
pub fn new_box(data: D) -> Box<Self> {
Box::new(Self::new(data))
}
/// Creates a new `RunnerState` in an `Arc`
pub fn new_arc(data: D) -> Arc<Self> {
Arc::new(Self::new(data))
}
/// Begins an update to the `RunnerState`, where changes to the underlying data can be made.
/// When the `CondvarGuard` is dropped, it will notify the runner thread of the update.
pub fn update(&self) -> CondvarGuard<D> {
self.data.lock_condvar(&self.condvar).unwrap()
}
/// Notifies the runner thread of an update
pub fn notify_update(&self) {
self.condvar.notify_one();
}
/// Waits for the state to update. Returns a `MutexGuard` on the data stored in the state.
pub fn wait_update(&self) -> LockResult<MutexGuard<D>> {
let guard = try!(self.data.lock());
self.condvar.wait(guard)
}
/// Waits for the state to update for `period`. Returns a `MutexGuard` on the data stored in
/// the state.
/// See [WaitPeriod](enum.WaitPeriod.html)
pub fn wait_update_for_period(&self, period: &WaitPeriod) -> LockResult<MutexGuard<D>> {
wait_condvar(&self.condvar, &self.data, period)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_chrono_duration_string() {
use ::chrono::Duration;
let cases: Vec<(Duration, &str)> = vec![(Duration::seconds(1), "1s"),
(Duration::seconds(150), "2m30s"),
(Duration::weeks(1), "1w"),
(Duration::days(1), "1d"),
(Duration::hours(1), "1h"),
(Duration::minutes(1), "1m"),
(Duration::milliseconds(1), "1ms"),
(Duration::microseconds(1), "1us"),
(Duration::nanoseconds(1), "1ns"),
(Duration::seconds(31449599) +
Duration::nanoseconds(999999999),
"51w6d23h59m59s999ms999us999ns")];
for &(ref dur, ref expected_string) in &cases {
let expected_string = expected_string.to_string();
let string = chrono_duration_string(dur);
assert_eq!(string, expected_string);
}
for &(ref expected_dur, ref string) in &cases {
let dur: Duration = chrono_duration_from_str(string).unwrap();
assert_eq!(dur, *expected_dur);
}
}
}
| true
|
57b2a6d553b1f7d77b17f479f94de13693e884bc
|
Rust
|
mkhan45/bouncy_ball
|
/rust/src/main.rs
|
UTF-8
| 2,125
| 2.890625
| 3
|
[] |
no_license
|
extern crate sdl2;
use sdl2::pixels::Color;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::gfx::primitives::DrawRenderer;
use std::time::Duration;
extern crate rand;
use rand::Rng;
const SPEED: i16 = 10;
const RADIUS: i16 = 10;
const SCREEN_WIDTH: i16 = 800;
const SCREEN_HEIGHT: i16 = 800;
struct Point{ //usually I'd use a tuple but I want it to be similar to the C version
pub x: i16,
pub y: i16,
}
struct Ball{
pub pos: Point,
pub vel: Point,
}
pub fn main() {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let window = video_subsystem.window("rust-sdl2 demo", SCREEN_WIDTH as u32, SCREEN_HEIGHT as u32)
.position_centered()
.build()
.unwrap();
let mut canvas = window.into_canvas().build().unwrap();
let mut rng = rand::thread_rng();
let mut ball = Ball{
pos: Point{
x: SCREEN_WIDTH/2,
y: SCREEN_HEIGHT/2 },
vel: Point{
x: rng.gen_range(0, SPEED) - SPEED/2,
y: rng.gen_range(0, SPEED) - SPEED/2 },
};
canvas.set_draw_color(Color::RGB(0, 0, 0));
canvas.clear();
canvas.present();
let mut event_pump = sdl_context.event_pump().unwrap();
'running: loop {
canvas.set_draw_color(Color::RGB(0, 0, 0));
canvas.clear();
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} |
Event::KeyDown { keycode: Some(Keycode::Escape), .. } => {
break 'running
},
_ => {}
}
}
if ball.pos.x + RADIUS/2 >= SCREEN_WIDTH || ball.pos.x - RADIUS/2 <= 0 {ball.vel.x *= -1;}
if ball.pos.y + RADIUS/2 >= SCREEN_HEIGHT || ball.pos.y - RADIUS/2 <= 0 {ball.vel.y *= -1;}
canvas.filled_circle::<Color>(ball.pos.x, ball.pos.y, RADIUS, Color::RGB(255, 255, 255));
ball.pos.x += ball.vel.x;
ball.pos.y += ball.vel.y;
canvas.present();
::std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 60));
}
}
| true
|
a4e690fd941b372a7851ff4ede17ceb2fbc4c1f4
|
Rust
|
mblonyox/adventofcode
|
/2021/src/day13.rs
|
UTF-8
| 3,383
| 3.484375
| 3
|
[] |
no_license
|
use std::{collections::HashSet, str::FromStr};
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
struct Dot(i32, i32);
struct DotParseError;
impl FromStr for Dot {
type Err = DotParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split(',');
Ok(Dot(
parts.next().unwrap().parse().unwrap(),
parts.next().unwrap().parse().unwrap(),
))
}
}
impl Dot {
fn flip(&self, ins: &Instruction) -> Dot {
match *ins {
Instruction::X(n) => {
if self.0 > n {
Dot((2 * n) - self.0, self.1)
} else {
Dot(self.0, self.1)
}
}
Instruction::Y(n) => {
if self.1 > n {
Dot(self.0, (2 * n) - self.1)
} else {
Dot(self.0, self.1)
}
}
}
}
}
enum Instruction {
X(i32),
Y(i32),
}
struct InstructionParseError;
impl FromStr for Instruction {
type Err = InstructionParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match &s[11..12] {
"x" => Ok(Instruction::X(s[13..].parse().unwrap())),
"y" => Ok(Instruction::Y(s[13..].parse().unwrap())),
_ => Err(InstructionParseError),
}
}
}
pub struct Input {
dots: Vec<Dot>,
instructions: Vec<Instruction>,
}
#[aoc_generator(day13)]
fn parse(input: &str) -> Input {
let mut parts = input.split("\n\n");
let dots = parts
.next()
.unwrap()
.lines()
.map(|l| l.parse().ok().unwrap())
.collect();
let instructions = parts
.next()
.unwrap()
.lines()
.map(|l| l.parse().ok().unwrap())
.collect();
Input { dots, instructions }
}
#[aoc(day13, part1)]
pub fn part1(input: &Input) -> i32 {
let ins = input.instructions.first().unwrap();
input
.dots
.iter()
.map(|d| d.flip(ins))
.collect::<HashSet<Dot>>()
.iter()
.count() as i32
}
#[aoc(day13, part2)]
pub fn part2(input: &Input) -> String {
let mut set = input.dots.iter().map(|d| *d).collect::<HashSet<Dot>>();
for ins in &input.instructions {
set = set.iter().map(|d| d.flip(ins)).collect();
}
let cols = set.iter().map(|d| d.0).max().unwrap();
let rows = set.iter().map(|d| d.1).max().unwrap();
"\n".to_owned() +
&(0..=rows)
.map(|y| {
(0..=cols)
.map(|x| if set.contains(&Dot(x, y)) { '#' } else { '.' })
.collect::<String>()
})
.collect::<Vec<String>>()
.join("\n")
}
#[cfg(test)]
mod tests {
use super::*;
static SAMPLE_INPUT: &str = r#"6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5"#;
#[test]
fn sample_parse() {
let input = parse(SAMPLE_INPUT);
assert_eq!(input.dots.len(), 18);
assert_eq!(input.instructions.len(), 2);
}
#[test]
fn sample_part1() {
let input = parse(SAMPLE_INPUT);
assert_eq!(part1(&input), 17);
}
#[test]
fn sample_part2() {
let input = parse(SAMPLE_INPUT);
assert_eq!(part2(&input), r#"
#####
#...#
#...#
#...#
#####"#);
}
}
| true
|
2ddeb9d2192fa511bfbde25a8d76bd66ade2c0f1
|
Rust
|
chrigu/rustgpx
|
/src/lib.rs
|
UTF-8
| 6,544
| 3.203125
| 3
|
[] |
no_license
|
use quick_xml::Reader;
use quick_xml::events::Event;
use std::fs;
use std::str;
use term_size;
use chrono::{DateTime, FixedOffset};
use chrono::format::ParseError;
pub fn run(config: Config) {
let contents = fs::read_to_string(config.filename)
.expect("Something went wrong reading the file");
let trackpoints = find_lat_lon(&contents);
let min_max = find_min_max(trackpoints);
println!("Max elevation:\n{}", min_max.max_elevation);
println!("Min elevation:\n{}", min_max.min_elevation);
println!("With height:\n{}", config.terminal_height);
}
fn find_lat_lon(contents: &String) -> Vec<TrackPoint> {
let mut reader = Reader::from_str(contents);
reader.trim_text(true);
// let mut txt = Vec::new();
let mut buf = Vec::new();
let mut state = XmlState::START;
let mut trackpoints:Vec<TrackPoint> = Vec::new();
let mut lon:f32 = 0.0;
let mut lat:f32 = 0.0;
let mut elevation:f32 = 0.0;
let mut datetime = String::from("");
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => {
match e.name() {
b"trkpt" => {
state = XmlState::TRACKPT;
let attributes = e.attributes().map(|a| a.unwrap().value).collect::<Vec<_>>();
lat = extract_float_from_attribute(&attributes[0]);
lon = extract_float_from_attribute(&attributes[1]);
// lat = str::from_utf8(&attributes[0]).unwrap();
// lat = &lat[1..lat.len() -1 ]
// let lon = str::from_utf8(&attributes[0]).unwrap();
// println!("{:?} {:?}", lat, lon);
// println!("attributes values: {:?}",
// e.attributes().map(|a| a.unwrap().value).collect::<Vec<_>>())
},
b"ele" => {
state = XmlState::ELEVATION;
},
b"time" => {
// println!("state {:?}", state);
match state {
XmlState::START => continue,
_ => state = XmlState::TIME
};
},
_ => (),
}
},
Ok(Event::End(ref e)) => {
match e.name() {
b"trkpt" => {
state = XmlState::TRACKPT;
let trackpoint = TrackPoint::new(elevation, DateTime::parse_from_rfc3339(&datetime).unwrap(), lat, lon);
trackpoints.push(trackpoint);
},
_ => (),
}
},
Ok(Event::Text(e)) => {
match state {
XmlState::ELEVATION => {
// println!("elevation {}", e.unescape_and_decode(&reader).unwrap()
elevation = e.unescape_and_decode(&reader).unwrap().parse::<f32>().expect("Could no read float");
},
XmlState::TIME => {
// println!("time {}", e.unescape_and_decode(&reader).unwrap())
datetime = e.unescape_and_decode(&reader).unwrap();
// println!("time: {:?}", datetime);
},
_ => ()
}
// println!("{}", e.unescape_and_decode(&reader).unwrap());
},
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (), // There are several other `Event`s we do not consider here
}
// println!("{}", str::from_utf8(&buf).unwrap());
// if we don't keep a borrow elsewhere, we can clear the buffer to keep memory usage low
buf.clear();
}
trackpoints
}
fn extract_float_from_attribute(attribute:&[u8]) -> f32 {
let attribute = str::from_utf8(attribute).unwrap();
return attribute.parse().expect("No valid number");
}
fn find_min_max(trackpoints:Vec<TrackPoint>) -> MinMax {
let mut min_elevation = 1000000.0;
let mut max_elevation = 0.0;
let mut min_lat = 0.0;
let mut max_lat = 0.0;
let mut min_lon = 0.0;
let mut max_lon = 0.0;
for trackpoint in &trackpoints {
min_elevation = test_min_value(trackpoint.elevation, min_elevation);
max_elevation = test_max_value(trackpoint.elevation, max_elevation);
min_lat = test_min_value(trackpoint.lat, min_lat);
max_lat = test_max_value(trackpoint.lat, max_lat);
min_lon = test_min_value(trackpoint.lon, min_lon);
max_lon = test_max_value(trackpoint.lon, max_lon);
}
MinMax {
min_elevation,
max_elevation,
min_lat,
max_lat,
min_lon,
max_lon
}
}
fn test_min_value(test_value:f32, min_value:f32) -> f32 {
if test_value < min_value {
return test_value
}
min_value
}
fn test_max_value(test_value:f32, max_value:f32) -> f32 {
if test_value > max_value {
return test_value
}
max_value
}
#[derive(Debug)]
enum XmlState {
START,
TRACKPT,
ELEVATION,
TIME
}
#[derive(Debug)]
struct TrackPoint {
elevation: f32,
datetime: DateTime<FixedOffset>,
lat: f32,
lon: f32
}
impl TrackPoint {
fn new (elevation: f32, datetime: DateTime<FixedOffset>, lat: f32, lon: f32) -> TrackPoint {
TrackPoint {
// elevation: elevation.parse::<f32>().expect("Float expected"),
elevation,
datetime,
lat,
lon
}
}
}
pub struct Config {
filename: String,
terminal_width: usize,
terminal_height: usize
}
impl Config {
pub fn new(args: &[String], ) -> Result<Config, &'static str> {
if args.len() < 2 {
return Err("not enough arguments");
};
let (terminal_width, terminal_height) = match term_size::dimensions() {
Some((w, h)) => (w, h),
None => return Err("Could not read terminal size")
};
let filename = args[1].clone();
Ok(Config { filename, terminal_height, terminal_width })
}
}
struct MinMax {
min_elevation: f32,
max_elevation: f32,
min_lat: f32,
max_lat: f32,
min_lon: f32,
max_lon: f32
}
| true
|
4c3764081c427b2f0cadfcae37b3c69589ce4c09
|
Rust
|
ChoppyThing/Voila
|
/src/form/post.rs
|
UTF-8
| 418
| 2.671875
| 3
|
[] |
no_license
|
use chrono::NaiveDateTime;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Category {
pub id: i32,
pub slug: String,
pub name: String,
pub created_at: NaiveDateTime,
}
#[derive(Debug, FromFormValue, Serialize, Deserialize)]
pub enum Categories {
A, B, C
}
#[derive(Debug, FromForm)]
pub struct FormInput {
pub title: String,
pub post: String,
pub category: i32,
}
| true
|
78de4861858f4ba8a1c37bc836ae100786a7a57e
|
Rust
|
petitviolet/rust_ex
|
/bsort/src/first.rs
|
UTF-8
| 4,324
| 3.21875
| 3
|
[] |
no_license
|
use crate::SortOrder;
use std::cmp::Ordering;
use futures::{ join, future };
use std::future::Future;
use std::pin::Pin;
pub fn sort<T: Clone + Ord>(x: &[T], order: &SortOrder) -> Result<Vec<T>, String> {
if x.len().is_power_of_two() {
let mut vec = x.to_vec();
match *order {
SortOrder::Ascending => {
_sort(&mut vec, false, &|a, b| a.cmp(b));
},
SortOrder::Descending => {
_sort(&mut vec, true, &|a, b| a.cmp(b));
}
};
return Ok(vec);
} else {
return Err(format!("Length of x({}) is invalid", x.len()));
}
}
pub fn sort_by<T: Clone, F>(x: &[T], comparator: &F) -> Result<Vec<T>, String>
where F: Fn(&T, &T) -> Ordering
{
if x.len().is_power_of_two() {
let mut vec = x.to_vec();
_sort(&mut vec, false, comparator);
return Ok(vec);
} else {
return Err(format!("Length of x({}) is invalid", x.len()));
}
}
use futures::future::{BoxFuture};
async fn _sort<T, F>(x: &mut [T], reverse: bool, comparator: &F) -> BoxFuture<'static, ()>
where F: Fn(&T, &T) -> Ordering
{
if x.len() > 1 {
let mid_point = mid_point(x);
let (former, latter) = x.split_at_mut(mid_point);
let t1 = _sort(former, false, comparator);
let t2 = _sort(latter, true, comparator);
futures::join!(t1, t2);
sub_sort(x, reverse, comparator).await;
}
return Box::pin(async {()});
}
async fn sub_sort<T, F>(x: &mut [T], reverse: bool, comparator: &F) -> BoxFuture<'static, ()>
where F: Fn(&T, &T) -> Ordering
{
if x.len() > 1 {
compare_and_swap(x, reverse, comparator);
let mid_point = mid_point(x);
let (former, latter) = x.split_at_mut(mid_point);
let t1 = async { sub_sort(former, reverse, comparator) };
let t2 = async { sub_sort(latter, reverse, comparator) };
futures::join!(t1, t2);
}
return Box::pin(async {()});
}
fn compare_and_swap<T, F>(x: &mut [T], reverse: bool, comparator: &F)
where F: Fn(&T, &T) -> Ordering
{
let mid_point = mid_point(x);
let ordering = if reverse { Ordering::Less } else { Ordering::Greater };
for i in 0..mid_point {
if comparator(&x[i], &x[mid_point + i]) == ordering {
x.swap(i, mid_point + i);
}
}
}
fn mid_point<A>(x: &[A]) -> usize {
return x.len() / 2;
}
#[cfg(test)]
mod tests {
use super::sort;
use super::sort_by;
use super::SortOrder;
use crate::utils::{is_sorted, new_u32_vec};
fn assert_sort_result<T: std::fmt::Debug + PartialEq>(result: Result<Vec<T>, String>, expected: Vec<T>) {
match result {
Ok(actual) => {
assert_eq!(actual, expected);
},
Err(err) =>
assert!(false, "should not get Err({})", err),
}
}
#[test]
fn sort_u32_ascending() {
let x = vec![10, 39, 11, 20, 4, 330, 21, 110];
assert_sort_result(sort(&x, &SortOrder::Ascending), vec![4, 10, 11, 20, 21, 39, 110, 330]);
}
#[test]
fn sort_u32_decending() {
let x = vec![10, 39, 11, 20, 4, 330, 21, 110];
assert_sort_result(sort(&x, &SortOrder::Descending), vec![330, 110, 39, 21, 20, 11, 10, 4]);
}
#[test]
fn sort_str_decending() {
let x = vec!["a", "c", "f", "e", "g", "b", "d", "h"];
assert_sort_result(sort(&x, &SortOrder::Descending), vec!["h", "g", "f", "e", "d", "c", "b", "a"]);
}
#[derive(PartialEq, Debug, Clone)]
struct User {
name: String,
age: u8,
}
impl User {
fn new(name: &str, age: u8) -> Self {
Self {
name: name.to_string(),
age: age,
}
}
}
#[test]
fn sort_users_descending() {
let alice = User::new("alice", 10);
let bob = User::new("bob", 40);
let charlie = User::new("charlie", 20);
let dave = User::new("dave", 50);
let users = vec![&alice, &dave, &charlie, &bob];
assert_sort_result(sort_by(&users, &|u1, u2| u1.age.cmp(&u2.age)), vec![&alice, &charlie, &bob, &dave]);
}
#[test]
fn sort_huge_number_arrays() {
let arr = new_u32_vec(65536);
match sort(&arr, &SortOrder::Ascending) {
Ok(res) =>
assert!(is_sorted(&res, &SortOrder::Ascending)),
Err(err) =>
assert!(false, "should not get Err({})", err),
}
match sort(&arr, &SortOrder::Descending) {
Ok(res) =>
assert!(is_sorted(&res, &SortOrder::Descending)),
Err(err) =>
assert!(false, "should not get Err({})", err),
}
}
}
| true
|
400ea6f1e8aa864ad1c5db46eb7d87d4a1ee5d8b
|
Rust
|
bengles/top-gun
|
/src/input.rs
|
UTF-8
| 2,494
| 3.015625
| 3
|
[] |
no_license
|
use super::*;
use std::collections::HashMap;
#[derive(Clone)]
pub struct Input {
pub keys_pressed: HashMap<Key, bool>,
pub keys_down: HashMap<Key, bool>,
pub keys_up: HashMap<Key, bool>,
pub mouse_position: Vector2,
pub dt: f32,
pub world_size: Vector2,
}
impl Default for Input {
fn default() -> Input {
let mut keys_pressed = HashMap::new();
keys_pressed.insert(Key::W, false);
keys_pressed.insert(Key::A, false);
keys_pressed.insert(Key::S, false);
keys_pressed.insert(Key::D, false);
keys_pressed.insert(Key::Space, false);
keys_pressed.insert(Key::Mouse1, false);
keys_pressed.insert(Key::Mouse2, false);
let mut keys_down = HashMap::new();
keys_down.insert(Key::W, false);
keys_down.insert(Key::A, false);
keys_down.insert(Key::S, false);
keys_down.insert(Key::D, false);
keys_down.insert(Key::Space, false);
keys_down.insert(Key::Mouse1, false);
keys_down.insert(Key::Mouse2, false);
let mut keys_up = HashMap::new();
keys_up.insert(Key::W, false);
keys_up.insert(Key::A, false);
keys_up.insert(Key::S, false);
keys_up.insert(Key::D, false);
keys_up.insert(Key::Space, false);
keys_up.insert(Key::Mouse1, false);
keys_up.insert(Key::Mouse2, false);
Input {
keys_pressed: keys_pressed,
keys_down: keys_down,
keys_up: keys_up,
mouse_position: Vector2::zeros(),
dt: 0.0,
world_size: Vector2::zeros(),
}
}
}
impl Input {
pub fn reset(&mut self) {
self.keys_down.insert(Key::W, false);
self.keys_down.insert(Key::A, false);
self.keys_down.insert(Key::S, false);
self.keys_down.insert(Key::D, false);
self.keys_down.insert(Key::Space, false);
self.keys_down.insert(Key::Mouse1, false);
self.keys_down.insert(Key::Mouse2, false);
self.keys_up.insert(Key::W, false);
self.keys_up.insert(Key::A, false);
self.keys_up.insert(Key::S, false);
self.keys_up.insert(Key::D, false);
self.keys_up.insert(Key::Space, false);
self.keys_up.insert(Key::Mouse1, false);
self.keys_up.insert(Key::Mouse2, false);
// do not reset mouse.
}
}
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum Key {
W,
A,
S,
D,
Space,
Mouse1,
Mouse2,
}
| true
|
1bd5e4820585b24aaf7e5e701604321b1c584800
|
Rust
|
DDRBoxman/blinkstick-rs
|
/src/lib.rs
|
UTF-8
| 2,437
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
extern crate hidapi;
use std::vec::Vec;
use hidapi::HidApi;
use hidapi::HidError;
use hidapi::HidResult;
const BLINKSTICK_VENDOR_ID: u16 = 0x20A0;
const BLINKSTICK_PRODUCT_ID: u16 = 0x41E5;
const BLINKSTICK_INDEXED_LED_MSG_PACKET_SIZE: usize = 6;
pub struct BlinkStickDevice {
device: hidapi::HidDevice,
}
impl BlinkStickDevice {
pub fn open_first() -> Result<BlinkStickDevice, HidError> {
match HidApi::new() {
Ok(api) => {
// Connect to device using its VID and PID
match api.open(BLINKSTICK_VENDOR_ID, BLINKSTICK_PRODUCT_ID) {
Ok(device) => Ok(BlinkStickDevice { device }),
Err(e) => return Err(e),
}
},
Err(e) => return Err(e),
}
}
pub fn open(serial: &str) -> Result<BlinkStickDevice, HidError> {
match HidApi::new() {
Ok(api) => {
// Connect to device using its VID and PID
match api.open_serial(BLINKSTICK_VENDOR_ID, BLINKSTICK_PRODUCT_ID, serial) {
Ok(device) => Ok(BlinkStickDevice { device }),
Err(e) => return Err(e),
}
},
Err(e) => return Err(e),
}
}
pub fn get_serials() -> Result<Vec<String>, HidError> {
match HidApi::new() {
Ok(api) => {
let mut vec: Vec<String> = Vec::new();
for device in api.devices() {
if device.product_id == BLINKSTICK_PRODUCT_ID && device.vendor_id == BLINKSTICK_VENDOR_ID {
if let Some(serial) = &device.serial_number {
vec.push(serial.to_owned());
}
}
}
return Ok(vec);
},
Err(e) => return Err(e),
}
}
pub fn off(&self, channel: u8, index: u8) -> HidResult<()> {
return self.set_color(channel, index, 0, 0, 0);
}
pub fn set_color(&self, channel: u8, index: u8, r: u8, g: u8, b: u8) -> HidResult<()> {
let mut buf: [u8; BLINKSTICK_INDEXED_LED_MSG_PACKET_SIZE] =
[0; BLINKSTICK_INDEXED_LED_MSG_PACKET_SIZE];
buf[0] = 0x05;
buf[1] = channel;
buf[2] = index;
buf[3] = r;
buf[4] = g;
buf[5] = b;
return self.device.send_feature_report(&buf);
}
}
| true
|
09143ff14d7a9deee36992636e304d461ba1f090
|
Rust
|
kshkss/easyopt
|
/dual/src/elementary.rs
|
UTF-8
| 551
| 2.625
| 3
|
[
"MIT"
] |
permissive
|
use super::Dual;
/*
use num_traits::{Float, Num};
impl<T, const N: usize> Num for Dual<T, N>
where
T: Num + Copy,
{
type FromStrRadixErr = T::FromStrRadixErr;
fn from_str_radix(s: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> {
Ok(Self {
x: T::from_str_radix(s, radix)?,
dx: [T::zero(); N],
})
}
}
impl<T, const N: usize> Float for Dual<T, N>
where
T: Float,
{
fn nan() -> Self {
Self {
x: T::nan(),
dx: [T::zero(); N],
}
}
}
*/
| true
|
c0c06a3d846c3b6318aa1eaf87801fff685d5fbe
|
Rust
|
philschmid/rust-hf-hub-loader
|
/src/hf_uri.rs
|
UTF-8
| 3,739
| 3.203125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use url::{ParseError, Position, Url};
#[derive(Debug)]
pub struct Repository {
pub name: String,
pub url: String,
pub filter: Option<String>,
}
pub fn parse(hf_uri: &str) -> Result<Repository, ParseError> {
let parsed_hf_uri = Url::parse(hf_uri)?;
assert_eq!(parsed_hf_uri.scheme(), "hf");
match parsed_hf_uri.scheme() {
"hf" => {}
_ => panic!(
"Schema {} is not supported you need to provide a hf uri",
parsed_hf_uri.scheme()
),
}
let uri_without_schema: &str = &parsed_hf_uri[Position::BeforeHost..];
let repository: Repository = match uri_without_schema.contains("//") {
true => {
let url_split: Vec<&str> = uri_without_schema.split("//").collect();
Repository {
name: String::from(url_split[0]),
url: UrlCreater::create_remote_repository_url(&url_split[0]),
filter: Some(String::from(url_split[1])),
}
}
_ => Repository {
name: String::from(uri_without_schema),
url: UrlCreater::create_remote_repository_url(&uri_without_schema),
filter: None,
},
};
Ok(repository)
}
pub struct UrlCreater {}
impl UrlCreater {
const BASE_HF_URL: &'static str = "https://huggingface.co";
pub fn create_remote_repository_url(repository: &str) -> String {
format!(
"{base_url}/api/models/{repo}",
base_url = UrlCreater::BASE_HF_URL,
repo = repository,
)
}
pub fn create_remote_file_url(repository: &str, file_path: &str) -> String {
format!(
"{base_url}/{repo}/resolve/main/{file_path}",
base_url = UrlCreater::BASE_HF_URL,
repo = repository,
file_path = file_path,
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic]
fn test_uri_schema() {
let hf_uri = "xs://philschmid/infinity-sentiment";
match parse(hf_uri) {
Ok(res) => println!("{:?}", res),
Err(err) => panic!("{}",err),
}
}
#[test]
fn test_uri_parser_without_filter() {
let hf_uri = "hf://philschmid/infinity-sentiment";
match parse(hf_uri) {
Ok(repository) => {
assert_eq!(
repository.name,
String::from("philschmid/infinity-sentiment")
);
assert_eq!(
repository.url,
String::from("https://huggingface.co/api/models/philschmid/infinity-sentiment")
);
match repository.filter {
None => {}
_ => panic!("should be None in the test"),
}
}
Err(err) => panic!("{}", err),
}
}
#[test]
fn test_uri_parser_with_filter() {
let hf_uri = "hf://philschmid/infinity-sentiment//infinity/config.json";
match parse(hf_uri) {
Ok(repository) => {
assert_eq!(
repository.name,
String::from("philschmid/infinity-sentiment")
);
assert_eq!(
repository.url,
String::from("https://huggingface.co/api/models/philschmid/infinity-sentiment")
);
match repository.filter {
None => {}
Some(filter) => assert_eq!(filter, "infinity/config.json"),
}
}
Err(err) => panic!("{}", err),
}
}
#[test]
fn test_create_remote_repository_url() {
let test_url = "https://huggingface.co/api/models/philschmid/infinity-sentiment";
let created_url = UrlCreater::create_remote_repository_url("philschmid/infinity-sentiment");
assert_eq!(test_url, created_url)
}
#[test]
fn test_create_remote_file_url() {
let test_url =
"https://huggingface.co/philschmid/infinity-sentiment/resolve/main/infinity/config.json";
let created_url =
UrlCreater::create_remote_file_url("philschmid/infinity-sentiment", "infinity/config.json");
assert_eq!(test_url, created_url)
}
}
| true
|
fbdef10ff40bdcedba8b023df3881f408c9f094d
|
Rust
|
pborzenkov/daily-coding-problem
|
/p0027/src/lib.rs
|
UTF-8
| 639
| 3.40625
| 3
|
[
"MIT"
] |
permissive
|
pub fn p0027(input: &str) -> bool {
let mut stack = Vec::new();
for c in input.chars() {
match c {
'(' => stack.push(')'),
'{' => stack.push('}'),
'[' => stack.push(']'),
')' | '}' | ']' => match stack.pop() {
Some(b) if b == c => (),
_ => return false,
},
_ => (),
}
}
stack.len() == 0
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example() {
assert!(true == p0027("([])[]({})"));
assert!(false == p0027("([)]"));
assert!(false == p0027("((()"));
}
}
| true
|
c7d766f0d9ea3d1642825b046136eff288434b6d
|
Rust
|
iCodeIN/twang
|
/examples/wav/mod.rs
|
UTF-8
| 2,110
| 2.75
| 3
|
[
"Apache-2.0",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Minimal WAV file writer.
//!
//! http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html
use fon::{chan::Ch16, stereo::Stereo16, Audio, Frame};
use std::convert::TryInto;
use std::{fs, io, mem::size_of};
/// Write a 16-bit PCM WAV file
pub(super) fn write<F: Frame>(audio: Audio<F>, filename: &str) -> io::Result<()>
where
Ch16: From<F::Chan>,
{
let audio =
Audio::<Stereo16>::with_stream(audio.sample_rate().floor(), &audio);
let mut buf = vec![];
write_header(&mut buf, &audio);
write_fmt_header(&mut buf, &audio);
write_audio_data(&mut buf, &audio);
fs::write(filename, buf)
}
fn write_header(buf: &mut Vec<u8>, audio: &Audio<Stereo16>) {
// Predict size of WAV subchunks.
let n: u32 = audio.len().try_into().unwrap();
// RIFF Chunk: ckID
buf.extend(b"RIFF");
// RIFF Chunk: cksize
buf.extend(&(36u32 + n).to_le_bytes());
// RIFF Chunk: WAVEID
buf.extend(b"WAVE");
}
fn write_fmt_header(buf: &mut Vec<u8>, audio: &Audio<Stereo16>) {
// RIFF Subchunk: "fmt "
buf.extend(b"fmt ");
// Chunk size: 16, 18 or 40
buf.extend(&(16u32).to_le_bytes());
// 0: WAVE_FORMAT_PCM
buf.extend(&(0x0001u16).to_le_bytes());
// 2: Stereo
buf.extend(&(2u16).to_le_bytes());
// 4: Sampling Rate
buf.extend(&(audio.sample_rate() as u32).to_le_bytes());
// 8: Bytes per second (i16 * 2 * sample rate)
buf.extend(&(4 * audio.sample_rate() as u32).to_le_bytes());
// 12. Data block size (bytes: i16 * 2)
buf.extend(&(size_of::<u16>() as u16 * 2u16).to_le_bytes());
// 14. Bits per sample
buf.extend(&(16u16).to_le_bytes());
}
fn write_audio_data(buf: &mut Vec<u8>, audio: &Audio<Stereo16>) {
// RIFF Subchunk: "data"
buf.extend(b"data");
// cksize (Bytes): Stereo (2) * i16 (2) * Frame Length
buf.extend(&(4 * audio.len() as u32).to_le_bytes());
// Sampled data
for sample in audio {
for channel in sample.channels().iter().cloned() {
let channel: i16 = channel.into();
buf.extend(&channel.to_le_bytes());
}
}
}
| true
|
bf0c31719d334147dc6ade17a1338e403ee4194f
|
Rust
|
kyleoneill/LeagueApp
|
/leagueapp_backend/src/model.rs
|
UTF-8
| 4,254
| 2.96875
| 3
|
[] |
no_license
|
use rusqlite::{Connection, Result};
use serde::{Serialize, Deserialize};
//champion, counter, items, runes
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct Champion {
name: String,
win_rate: f64,
pick_rate: f64,
ban_rate: f64,
human_readable_name: String,
title: String
}
impl Champion {
fn query_one_row(conn: &Connection, champ_name: &str) -> Result<Self> {
let mut stmt = conn.prepare("SELECT * FROM Champions WHERE champName = :champion")?;
let mut rows = stmt.query_named(&[(":champion", &champ_name)])?;
let champion: Self;
if let Some(Ok(row)) = rows.next() {
champion = Self {
name: row.get(1),
win_rate: row.get(2),
pick_rate: row.get(3),
ban_rate: row.get(4),
human_readable_name: row.get(5),
title: row.get(6)
};
}
else {
return Err(rusqlite::Error::QueryReturnedNoRows);
}
Ok(champion)
}
}
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct Counter {
name: String,
strong_against: String,
weak_against: String
}
impl Counter {
fn query_one_row(conn: &Connection, champ_name: &str) -> Result<Self> {
let mut stmt = conn.prepare("SELECT * FROM Counters WHERE counterChampionName = :champion")?;
let mut rows = stmt.query_named(&[(":champion", &champ_name)])?;
let counter: Self;
if let Some(Ok(row)) = rows.next() {
counter = Self {
name: row.get(1),
strong_against: row.get(2),
weak_against: row.get(3),
};
}
else {
return Err(rusqlite::Error::QueryReturnedNoRows);
}
Ok(counter)
}
}
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct Items {
name: String,
starting: String,
mythic_core: String,
fourth: String,
fifth: String,
sixth: String
}
impl Items {
fn query_one_row(conn: &Connection, champ_name: &str) -> Result<Self> {
let mut stmt = conn.prepare("SELECT * FROM Items WHERE itemChampionName = :champion")?;
let mut rows = stmt.query_named(&[(":champion", &champ_name)])?;
let items: Self;
if let Some(Ok(row)) = rows.next() {
items = Self {
name: row.get(1),
starting: row.get(2),
mythic_core: row.get(3),
fourth: row.get(4),
fifth: row.get(5),
sixth: row.get(6),
};
}
else {
return Err(rusqlite::Error::QueryReturnedNoRows);
}
Ok(items)
}
}
#[derive(Debug)]
#[derive(Serialize, Deserialize)]
pub struct Runes {
name: String,
rune_primary: String,
rune_secondary: String,
rune_tertiary: String
}
impl Runes {
fn query_one_row(conn: &Connection, champ_name: &str) -> Result<Self> {
let mut stmt = conn.prepare("SELECT * FROM Runes WHERE runeChampionName = :champion")?;
let mut rows = stmt.query_named(&[(":champion", &champ_name)])?;
let runes: Self;
if let Some(Ok(row)) = rows.next() {
runes = Self {
name: row.get(1),
rune_primary: row.get(2),
rune_secondary: row.get(3),
rune_tertiary: row.get(4),
};
}
else {
return Err(rusqlite::Error::QueryReturnedNoRows);
}
Ok(runes)
}
}
#[derive(Serialize, Deserialize)]
pub struct ChampionBuild {
champ_stats: Champion,
counters: Counter,
items: Items,
runes: Runes
}
impl ChampionBuild {
pub fn generate_build_from_champ_name(conn: &Connection, champ_name: &str) -> Result<Self> {
let champ_stats = Champion::query_one_row(conn, champ_name).unwrap();
let counters = Counter::query_one_row(conn, champ_name).unwrap();
let items = Items::query_one_row(conn, champ_name).unwrap();
let runes = Runes::query_one_row(conn, champ_name).unwrap();
Ok(Self {
champ_stats,
counters,
items,
runes
})
}
}
| true
|
45cce3006d11758e6634ad0b239604a6f9312312
|
Rust
|
blue68/bitflyer-rs
|
/src/api/get_bank_accounts.rs
|
UTF-8
| 886
| 2.515625
| 3
|
[] |
no_license
|
use crate::api;
use crate::api::{ApiResponseError, ProductCode, BEFORE_QUERY_KEY, AFTER_QUERY_KEY, COUNT_QUERY_KEY, CurrencyCode};
use std::collections::HashMap;
const PATH : &'static str = "/v1/me/getbankaccounts";
type GetBankAccountsResponse = Vec<BankAccountInfo>;
#[derive(Deserialize, Debug)]
pub struct BankAccountInfo {
pub id: u32,
pub is_verified: bool,
pub bank_name: String,
pub branch_name: String,
pub account_type: String,
pub account_name: String,
pub account_number: String,
}
pub async fn get_bank_accounts() -> Result<GetBankAccountsResponse, ApiResponseError> {
api::get::<GetBankAccountsResponse>(&PATH).await
}
#[cfg(test)]
mod tests {
use crate::test_api;
use crate::api::get_bank_accounts::get_bank_accounts;
#[tokio::test]
async fn get_bank_accounts_test() {
test_api!(get_bank_accounts());
}
}
| true
|
63320a3fb0e9adb39f8d72436df11429dfa51381
|
Rust
|
Swatinem/rust-analyzer
|
/xtask/src/codegen/gen_lint_completions.rs
|
UTF-8
| 3,919
| 2.6875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Generates descriptors structure for unstable feature from Unstable Book
use std::fmt::Write;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use xshell::{cmd, read_file};
use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
pub(crate) fn generate_lint_completions() -> Result<()> {
if !project_root().join("./target/rust").exists() {
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
}
let mut contents = String::from("use crate::completions::attribute::LintCompletion;\n\n");
generate_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
contents.push('\n');
cmd!("curl http://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?;
let contents = reformat(&contents)?;
let destination =
project_root().join("crates/ide_completion/src/generated_lint_completions.rs");
ensure_file_contents(destination.as_path(), &contents)?;
Ok(())
}
fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
buf.push_str(r#"pub(super) const FEATURES: &[LintCompletion] = &["#);
buf.push('\n');
["language-features", "library-features"]
.iter()
.flat_map(|it| WalkDir::new(src_dir.join(it)))
.filter_map(|e| e.ok())
.filter(|entry| {
// Get all `.md ` files
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
})
.for_each(|entry| {
let path = entry.path();
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
let doc = read_file(path).unwrap();
push_lint_completion(buf, &feature_ident, &doc);
});
buf.push_str("];\n");
Ok(())
}
#[derive(Default)]
struct ClippyLint {
help: String,
id: String,
}
fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
let file_content = read_file(path)?;
let mut clippy_lints: Vec<ClippyLint> = vec![];
for line in file_content.lines().map(|line| line.trim()) {
if line.starts_with(r#""id":"#) {
let clippy_lint = ClippyLint {
id: line
.strip_prefix(r#""id": ""#)
.expect("should be prefixed by id")
.strip_suffix(r#"","#)
.expect("should be suffixed by comma")
.into(),
help: String::new(),
};
clippy_lints.push(clippy_lint)
} else if line.starts_with(r#""What it does":"#) {
// Typical line to strip: "What is doest": "Here is my useful content",
let prefix_to_strip = r#""What it does": ""#;
let suffix_to_strip = r#"","#;
let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
clippy_lint.help = line
.strip_prefix(prefix_to_strip)
.expect("should be prefixed by what it does")
.strip_suffix(suffix_to_strip)
.expect("should be suffixed by comma")
.into();
}
}
buf.push_str(r#"pub(super) const CLIPPY_LINTS: &[LintCompletion] = &["#);
buf.push('\n');
clippy_lints.into_iter().for_each(|clippy_lint| {
let lint_ident = format!("clippy::{}", clippy_lint.id);
let doc = clippy_lint.help;
push_lint_completion(buf, &lint_ident, &doc);
});
buf.push_str("];\n");
Ok(())
}
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
writeln!(
buf,
r###" LintCompletion {{
label: "{}",
description: r##"{}"##
}},"###,
label, description
)
.unwrap();
}
| true
|
e14d4ca017bbb9bf1ce1fdb29669d488bd986bfc
|
Rust
|
tkyc/GoodNyte
|
/src/main.rs
|
UTF-8
| 883
| 2.8125
| 3
|
[] |
no_license
|
use std::error::Error;
use lambda_runtime::{error::HandlerError, lambda, Context};
use log::{self, error};
use serde_derive::{Deserialize, Serialize};
use simple_error::bail;
use simple_logger;
//TODO: Testing lambda events with rust -- refactor later
#[derive(Deserialize)]
struct CustomEvent {
#[serde(rename = "message")]
message: String,
}
#[derive(Serialize)]
struct CustomOutput {
message: String,
}
fn main() -> Result<(), Box<dyn Error>> {
simple_logger::init_with_level(log::Level::Debug)?;
lambda!(my_handler);
Ok(())
}
fn my_handler(e: CustomEvent, c: Context) -> Result<CustomOutput, HandlerError> {
if e.message == "" {
error!("Bad event parameters -- {:?}", c.aws_request_id);
bail!("Bad even parameters");
}
Ok(CustomOutput {
message: format!("Hello from rust lambda runtime!"),
})
}
| true
|
ea720906fbff0237763d7f4b4e252cbd3c00de48
|
Rust
|
learnrust/apis
|
/mcp/src/options/process.rs
|
UTF-8
| 5,029
| 2.6875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use super::output_formats;
use clap::{App, AppSettings, Arg, ArgSettings};
use glob;
pub fn new<'a, 'b>() -> App<'a, 'b> {
App::new("process")
.setting(AppSettings::ColoredHelp)
.alias("show")
.alias("merge")
.about(
"Merge JSON or YAML files from standard input from specified files. \
Multi-document YAML files are supported. \
Merging a single file is explicitly valid and can be used to check for syntax errors.",
)
.arg(
Arg::with_name("select")
.set(ArgSettings::RequireEquals)
.alias("from")
.long("select")
.short("s")
.takes_value(true)
.value_name("pointer")
.required(false)
.multiple(true)
.help("Use a JSON pointer to specify which sub-value to use. \
This affects only the next following --environment or <path>. \
Valid specifications are for example '0/a/b/4' or 'a.b.0', and they must point to a valid value. \
If it is specified last, without a following merged value, a sub-value is selected from the aggregated value."
)
)
.arg(
Arg::with_name("no-stdin")
.long("no-stdin")
.required(false)
.help("If set, we will not try to read structured data from standard input. This may be required \
in some situations where we are blockingly reading from a standard input which is attached \
to a pseudo-terminal.")
)
.arg(
Arg::with_name("at")
.set(ArgSettings::RequireEquals)
.alias("to")
.long("at")
.short("a")
.takes_value(true)
.value_name("pointer")
.required(false)
.multiple(true)
.help("Use a JSON pointer to specify an existing mapping at which the next merged value should be placed. \
This affects only the next following --environment or <path>. \
Valid specifications are for example '0/a/b/4' or 'a.b.0'. \
If it is specified last, without a following merged value, the entire aggregated value so far is moved."
)
)
.arg(
Arg::with_name("environment")
.set(ArgSettings::RequireEquals)
.long("environment")
.short("e")
.takes_value(true)
.default_value("*")
.value_name("filter")
.required(false)
.multiple(true)
.validator(|v| glob::Pattern::new(&v).map(|_| ()).map_err(|err| format!("{}", err)))
.help("Import all environment variables matching the given filter. If no filter is set, all variables are imported. \
Otherwise it is applied as a glob, e.g. 'FOO*' includes 'FOO_BAR', but not 'BAZ_BAR'.\
Other valid meta characters are '?' to find any character, e.g. 'FO?' matches 'FOO'.")
)
.arg(
Arg::with_name("no-overwrite")
.alias("no-override")
.long("no-overwrite")
.takes_value(false)
.required(false)
.multiple(true)
.help("If set, values in the merged document may not overwrite values already present. This is enabled by default,\
and can be explicitly turned off with --overwrite."),
)
.arg(
Arg::with_name("overwrite")
.alias("override")
.long("overwrite")
.takes_value(false)
.required(false)
.multiple(true)
.help("If set, values in the merged document can overwrite values already present. This is disabled by default,\
and can be explicitly turned off with --no-overwrite."),
)
.arg(
Arg::with_name("output")
.set(ArgSettings::RequireEquals)
.short("o")
.long("output")
.takes_value(true)
.required(false)
.value_name("mode")
.default_value("json")
.possible_values(output_formats())
.case_insensitive(true)
.help("Specifies how the merged result should be serialized."),
)
.arg(
Arg::with_name("path")
.value_name("path-or-value")
.takes_value(true)
.required(false)
.multiple(true)
.help(
"The path to the file to include, or '-' to read from standard input. It must be in a format that can be output using the --output flag. \
Alternatively it can be a value assignment like 'a=42' or a.b.c=value.",
),
)
}
| true
|
08bd3497e37bb2861a03292b0c4c37221b064a83
|
Rust
|
CircuitCoder/tp
|
/src/main.rs
|
UTF-8
| 3,373
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
use lazy_static::lazy_static;
use actix_web::{http, server, App, Path, Responder, Json, HttpResponse};
use serde::{Serialize, Deserialize};
use std::sync::{Mutex, Arc};
use leveldb::database::Database;
use leveldb::options::{Options, WriteOptions, ReadOptions};
use leveldb::kv::KV;
use uuid::Uuid; use serde_json;
use db_key::Key;
use rand::{self, Rng};
use rand::distributions::Alphanumeric;
#[derive(Clone)]
struct DBKey(String);
impl Key for DBKey {
fn from_u8(key: &[u8]) -> Self {
DBKey(String::from_utf8_lossy(key).to_string())
}
fn as_slice<T, F: Fn(&[u8]) -> T>(&self, f: F) -> T {
f(self.0.as_bytes())
}
}
lazy_static! {
static ref MASTER_KEY: Option<String> = std::env::var("MASTER_KEY").ok();
static ref DB: Arc<Mutex<Database<DBKey>>> = {
let mut opts = Options::new();
opts.create_if_missing = true;
let inner = Database::open(std::path::Path::new("./db"), opts).unwrap();
Arc::new(Mutex::new(inner))
};
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Payload {
key: Option<String>,
target: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Resp {
key: String,
slug: String,
}
fn redirect(path: Path<(String, )>) -> impl Responder {
let guard = DB.lock().unwrap();
match guard.get(ReadOptions::new(), DBKey(path.0.clone())).unwrap() {
Some(cont) => {
let payload: Payload = serde_json::from_slice(&cont).unwrap();
HttpResponse::Found().header(http::header::LOCATION, payload.target).finish()
},
None => HttpResponse::NotFound().finish(),
}
}
fn create(mut payload: Json<Payload>) -> impl Responder {
if MASTER_KEY.is_some() && *MASTER_KEY != payload.key {
return HttpResponse::Forbidden().finish();
}
let dbkey = DBKey(Uuid::new_v4().to_hyphenated().to_string());
let mut rng = rand::thread_rng();
let key = std::iter::repeat(())
.map(|()| rng.sample(Alphanumeric))
.take(32)
.collect();
let resp = Resp{
slug: dbkey.0.clone(),
key,
};
payload.key = Some(resp.key.clone());
let guard = DB.lock().unwrap();
guard.put(WriteOptions::new(), dbkey, &serde_json::to_vec(&*payload).unwrap()).unwrap();
HttpResponse::Created().json(resp)
}
fn edit(path: Path<(String, )>, mut payload: Json<Payload>) -> impl Responder {
let guard = DB.lock().unwrap();
let dbkey = DBKey(path.0.clone());
let original: Payload = match guard.get(ReadOptions::new(), dbkey.clone()).unwrap() {
Some(cont) => {
serde_json::from_slice(&cont).unwrap()
},
None => return HttpResponse::NotFound().finish(),
};
if original.key != payload.key &&
(MASTER_KEY.is_none() || *MASTER_KEY != payload.key) {
return HttpResponse::Forbidden().finish();
}
payload.key = original.key;
guard.put(WriteOptions::new(), dbkey, &serde_json::to_vec(&*payload).unwrap()).unwrap();
HttpResponse::Created().finish()
}
fn main() {
let server = server::new(|| App::new()
.route("/edit", http::Method::POST, create)
.route("/edit/{id}", http::Method::PUT, edit)
.resource("/{id}", |r| r.with(redirect)));
let bind = server.bind("127.0.0.1:7103").unwrap();
bind.run();
}
| true
|
b1dbcf84cb6c231764c4ab323a1bc41d343e57a3
|
Rust
|
pnaranja/rustbook
|
/ownership/src/main.rs
|
UTF-8
| 3,372
| 4.1875
| 4
|
[] |
no_license
|
fn main() {
// Will not compile because s1 was moved to s2
// let s1 = String::from("Paul");
// let s2 = s1;
// println!("{}", s1)
// Use clone to make a deep copy - copy the heap data
let s1 = String::from("Paul");
let s2 = s1.clone();
println!("s1: {}, s2: {}", s1, s2);
let x : &str = "hi";
let y = x;
let x2 = 10;
let y2 = x2;
println!("x: {}, y: {}", x, y);
println!("x2: {}, y2: {}", x2, y2);
ownership_test();
// calculate length is borrowing the s1 variable?
let s1_length = calculate_length(&s1);
println!("Length of String {} is {}", s1, s1_length);
let mut m1 = String::from("Paul");
let m1_length = calculate_length_mutate(&mut m1);
println!("Length of String {} is {}", m1, m1_length);
// Can't mix mut and immutable refs
let m2 = &m1;
let m3 = &m1;
//let m4 = &mut m1;
println!("{} is {}", m2, m3);
// No dangling refs
let good_ref = get_ref();
println!("{}",good_ref);
let p : &str = "asfs";
let d = (1,2);
// Slices
// Return the first word in a string
// Range using <a>..<b> where inclusive a -> exclusive b
let mut f = String::from("Hello World");
let hello = &f[0..5]; println!("{}", hello);
let world = &f[6..11]; println!("{}", world);
let hello2 = &f[..5]; println!("{}", hello2);
let world2 = &f[6..]; println!("{}", world2);
let hello_world = &f[..]; println!("{}", hello_world);
println!("First word of 'Hello World': {}",get_first_word(&f));
let firstword = get_first_word(&f);
// Cannot take a mutable ref since it's there's an immutable reference in the same scope
// f.clear();
// String literals are immutable references (slices)
let g = String::from("Hello World");
println!("{}", &g[..]);
let h = "Hello World";
println!("{}", h);
}
fn ownership_test()
{
let s = String::from("hello");
let s2 = s + " paul";
take_ownership(s2);
// This will do a compile error
// let s3 = s + + s2 + "asf";
let y = 45;
function_copy(y)
}
fn take_ownership(s : String)
{
println!("I have the string: {}", s)
}
fn function_copy(i : i32)
{
let a = i + 3;
println!("Copied the number {} and added 3 to get {}", i, a)
}
fn calculate_length(s: &String) -> usize
{
// Cannot mutate a reference
// s.push_str(" and hello");
s.len()
}
fn calculate_length_mutate(s : &mut String) -> usize
{
// Can mutate a mutable reference
s.push_str(" Orange");
s.len()
}
fn get_ref() -> String
{
let s = String::from("Bye!");
s
}
fn get_first_word(s : &String) -> &str
{
// as_bytes returns an array of bytes (8 bits)
let bytes : &[u8] = s.as_bytes();
// enumerate will return a tuple (index, item_ref)
for (i, &item) in bytes.iter().enumerate()
{
// look for a byte space
if item == b' '
{
return &s[..i]
}
}
&s[..]
}
fn get_first_word2(s : &str) -> &str
{
// as_bytes returns an array of bytes (8 bits)
let bytes : &[u8] = s.as_bytes();
// enumerate will return a tuple (index, item_ref)
for (i, &item) in bytes.iter().enumerate()
{
// look for a byte space
if item == b' '
{
return &s[..i]
}
}
&s[..]
}
| true
|
39af4d708921ec7d0cef450b1e236bfa42c45e65
|
Rust
|
imorph/vector
|
/lib/vrl/stdlib/src/to_unix_timestamp.rs
|
UTF-8
| 4,366
| 3.09375
| 3
|
[
"MPL-2.0"
] |
permissive
|
use std::str::FromStr;
use vrl::prelude::*;
#[derive(Clone, Copy, Debug)]
pub struct ToUnixTimestamp;
impl Function for ToUnixTimestamp {
fn identifier(&self) -> &'static str {
"to_unix_timestamp"
}
fn examples(&self) -> &'static [Example] {
&[
Example {
title: "default (seconds)",
source: "to_unix_timestamp(t'2000-01-01T00:00:00Z')",
result: Ok("946684800"),
},
Example {
title: "milliseconds",
source: r#"to_unix_timestamp(t'2010-01-01T00:00:00Z', unit: "milliseconds")"#,
result: Ok("1262304000000"),
},
Example {
title: "nanoseconds",
source: r#"to_unix_timestamp(t'2020-01-01T00:00:00Z', unit: "nanoseconds")"#,
result: Ok("1577836800000000000"),
},
]
}
fn parameters(&self) -> &'static [Parameter] {
&[
Parameter {
keyword: "value",
kind: kind::TIMESTAMP,
required: true,
},
Parameter {
keyword: "unit",
kind: kind::BYTES,
required: false,
},
]
}
fn compile(&self, mut arguments: ArgumentList) -> Compiled {
let value = arguments.required("value");
let unit = arguments
.optional_enum("unit", Unit::all_value().as_slice())?
.map(|s| {
Unit::from_str(&s.try_bytes_utf8_lossy().expect("unit not bytes"))
.expect("validated enum")
})
.unwrap_or_default();
Ok(Box::new(ToUnixTimestampFn { value, unit }))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Unit {
Seconds,
Milliseconds,
Nanoseconds,
}
impl Unit {
fn all_value() -> Vec<Value> {
use Unit::*;
vec![Seconds, Milliseconds, Nanoseconds]
.into_iter()
.map(|u| u.as_str().into())
.collect::<Vec<_>>()
}
const fn as_str(self) -> &'static str {
use Unit::*;
match self {
Seconds => "seconds",
Milliseconds => "milliseconds",
Nanoseconds => "nanoseconds",
}
}
}
impl Default for Unit {
fn default() -> Self {
Unit::Seconds
}
}
impl FromStr for Unit {
type Err = &'static str;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
use Unit::*;
match s {
"seconds" => Ok(Seconds),
"milliseconds" => Ok(Milliseconds),
"nanoseconds" => Ok(Nanoseconds),
_ => Err("unit not recognized"),
}
}
}
#[derive(Debug, Clone)]
struct ToUnixTimestampFn {
value: Box<dyn Expression>,
unit: Unit,
}
impl Expression for ToUnixTimestampFn {
fn resolve(&self, ctx: &mut Context) -> Resolved {
let ts = self.value.resolve(ctx)?.try_timestamp()?;
let time = match self.unit {
Unit::Seconds => ts.timestamp(),
Unit::Milliseconds => ts.timestamp_millis(),
Unit::Nanoseconds => ts.timestamp_nanos(),
};
Ok(time.into())
}
fn type_def(&self, _: &state::Compiler) -> TypeDef {
TypeDef::new().infallible().integer()
}
}
#[cfg(test)]
mod test {
use super::*;
use chrono::TimeZone;
test_function![
to_unix_timestamp => ToUnixTimestamp;
seconds {
args: func_args![value: chrono::Utc.ymd(2021, 1, 1).and_hms_milli(0, 0, 0, 0),
unit: "seconds"
],
want: Ok(1609459200i64),
tdef: TypeDef::new().infallible().integer(),
}
milliseconds {
args: func_args![value: chrono::Utc.ymd(2021, 1, 1).and_hms_milli(0, 0, 0, 0),
unit: "milliseconds"
],
want: Ok(1609459200000i64),
tdef: TypeDef::new().infallible().integer(),
}
nanoseconds {
args: func_args![value: chrono::Utc.ymd(2021, 1, 1).and_hms_milli(0, 0, 0, 0),
unit: "nanoseconds"
],
want: Ok(1609459200000000000i64),
tdef: TypeDef::new().infallible().integer(),
}
];
}
| true
|
8b7be507af8a91483c8bb56465c7ecfabb7f7e3d
|
Rust
|
radu-matei/wal
|
/src/parser.rs
|
UTF-8
| 20,142
| 3.25
| 3
|
[] |
no_license
|
use crate::{
ast::{
BlockStatement, CallExpression, Expression, FunctionLiteral, IfExpression, InfixExpression,
LetStatement, PrefixExpression, Program, ReturnStatement, Statement,
},
lexer::{Lexer, LexerError},
token::Token,
};
use std::mem;
#[derive(Debug)]
pub struct Parser<'a> {
lexer: Lexer<'a>,
current: Token,
peek: Token,
}
impl<'a> Parser<'a> {
pub fn new(l: Lexer<'a>) -> Result<Self, ParserError> {
let mut p = Parser {
lexer: l,
current: Token::EOF,
peek: Token::EOF,
};
p.next_token()?;
p.next_token()?;
return Ok(p);
}
pub fn parse(&mut self) -> Result<Program, ParserError> {
let mut p = Program::new();
while self.current != Token::EOF {
p.statements.push(self.parse_statement()?);
self.next_token()?;
}
Ok(p)
}
fn parse_statement(&mut self) -> Result<Statement, ParserError> {
match &self.current {
Token::LET => return self.parse_let_statement(),
Token::RETURN => return self.parse_return_statement(),
_ => return self.parse_expression_statement(),
}
}
fn parse_expression_statement(&mut self) -> Result<Statement, ParserError> {
let exp = self.parse_expression(Precedence::Lowest)?;
if self.peek == Token::SEMICOLON {
self.next_token()?;
}
Ok(Statement::Expression(exp))
}
fn parse_expression(&mut self, pr: Precedence) -> Result<Expression, ParserError> {
let mut left = self.prefix_parse()?;
let prec_val = pr as u32;
while self.peek != Token::SEMICOLON && prec_val < (precedence(&self.peek) as u32) {
// TODO: fix the clone call
match self.infix_parse(left.clone()) {
Some(infix) => left = infix?,
None => {
return Ok(left);
}
};
}
Ok(left)
}
fn parse_prefix_expression(&mut self) -> Result<Expression, ParserError> {
let op = self.current.clone();
self.next_token()?;
let exp = self.parse_expression(Precedence::Prefix)?;
Ok(Expression::Prefix(PrefixExpression {
operator: op,
right: Box::new(exp),
}))
}
fn parse_infix_expression(&mut self, left: Expression) -> Result<Expression, ParserError> {
self.next_token()?;
let op = self.current.clone();
let prec = precedence(&self.current);
self.next_token()?;
let right = Box::new(self.parse_expression(prec)?);
Ok(Expression::Infix(InfixExpression {
left: Box::new(left),
operator: op,
right: right,
}))
}
fn parse_groupped_expression(&mut self) -> Result<Expression, ParserError> {
self.next_token()?;
let exp = self.parse_expression(Precedence::Lowest)?;
if !self.expect_peek(&Token::RPAREN)? {
return Err(ParserError::InvalidNextToken(String::from(format!(
"expected next token to be {}, got {}",
Token::RPAREN,
self.current
))));
}
return Ok(exp);
}
fn parse_if_expression(&mut self) -> Result<Expression, ParserError> {
self.expect_peek(&Token::LPAREN)?;
self.next_token()?;
let condition = Box::new(self.parse_expression(Precedence::Lowest)?);
self.expect_peek(&Token::RPAREN)?;
self.expect_peek(&Token::LBRACE)?;
let consequence = if let Statement::Block(block) = self.parse_block_statement()? {
block
} else {
return Err(ParserError::InvalidNextToken(String::from(format!(
"expected {}, got {}",
"if block statement", self.current
))));
};
if self.peek != Token::ELSE {
return Ok(Expression::If(IfExpression {
condition,
consequence,
alternative: None,
}));
}
self.next_token()?;
self.expect_peek(&Token::LBRACE)?;
let alternative = if let Statement::Block(block) = self.parse_block_statement()? {
Some(block)
} else {
return Err(ParserError::InvalidNextToken(String::from(format!(
"expected {}, got {}",
"if block statement", self.current
))));
};
Ok(Expression::If(IfExpression {
condition,
consequence,
alternative,
}))
}
fn parse_block_statement(&mut self) -> Result<Statement, ParserError> {
self.next_token()?;
let mut statements = vec![];
while self.current != Token::RBRACE && self.current != Token::EOF {
statements.push(self.parse_statement()?);
self.next_token()?;
}
Ok(Statement::Block(BlockStatement { statements }))
}
fn parse_function_literal(&mut self) -> Result<Expression, ParserError> {
self.expect_peek(&Token::LPAREN)?;
let parameters = self.parse_function_params()?;
self.expect_peek(&Token::LBRACE)?;
let body = if let Statement::Block(block) = self.parse_block_statement()? {
block
} else {
return Err(ParserError::InvalidNextToken(String::from(format!(
"expected {}, got {}",
"if block statement", self.current
))));
};
Ok(Expression::Function(FunctionLiteral { parameters, body }))
}
fn parse_function_params(&mut self) -> Result<Vec<String>, ParserError> {
let mut params = vec![];
if self.peek == Token::RPAREN {
self.next_token()?;
return Ok(params);
}
self.next_token()?;
params.push(self.parse_identifier()?);
while self.peek == Token::COMMA {
self.next_token()?;
self.next_token()?;
params.push(self.parse_identifier()?);
}
self.expect_peek(&Token::RPAREN)?;
Ok(params)
}
fn parse_call_expression(&mut self, left: Expression) -> Result<Expression, ParserError> {
self.next_token()?;
if self.peek == Token::RPAREN {
self.next_token()?;
return Ok(Expression::Call(CallExpression {
function: Box::new(left.clone()),
arguments: vec![],
}));
}
let args = self.parse_expression_list(Token::RPAREN)?;
Ok(Expression::Call(CallExpression {
function: Box::new(left.clone()),
arguments: args,
}))
}
fn parse_expression_list(&mut self, end: Token) -> Result<Vec<Expression>, ParserError> {
if self.peek == end {
self.next_token()?;
return Ok(vec![]);
}
let mut exp = vec![];
self.next_token()?;
exp.push(self.parse_expression(Precedence::Lowest)?);
while self.peek == Token::COMMA {
self.next_token()?;
self.next_token()?;
exp.push(self.parse_expression(Precedence::Lowest)?);
}
self.expect_peek(&end)?;
Ok(exp)
}
fn parse_let_statement(&mut self) -> Result<Statement, ParserError> {
self.next_token()?;
let name = self.parse_identifier()?;
self.expect_peek(&Token::ASSIGN)?;
self.next_token()?;
let value = self.parse_expression(Precedence::Lowest)?;
if self.peek == Token::SEMICOLON {
self.next_token()?;
}
Ok(Statement::Let(LetStatement { name, value }))
}
fn parse_return_statement(&mut self) -> Result<Statement, ParserError> {
self.next_token()?;
let val = self.parse_expression(Precedence::Lowest)?;
if self.peek == Token::SEMICOLON {
self.next_token()?;
}
Ok(Statement::Return(ReturnStatement { value: val }))
}
fn parse_identifier(&mut self) -> Result<String, ParserError> {
if let Token::IDENTIFIER(ident) = &self.current {
Ok(ident.to_string())
} else {
Err(ParserError::InvalidIdentifier(String::from(format!(
"{:?}
",
self.current
))))
}
}
fn expect_peek(&mut self, tok: &Token) -> Result<bool, ParserError> {
match self.peek_token_is(&tok) {
true => {
self.next_token()?;
Ok(true)
}
false => Err(ParserError::InvalidNextToken(String::from(format!(
"expected next token to be {:?}, got {:?}",
tok, self.peek
)))),
}
}
fn peek_token_is(&self, tok: &Token) -> bool {
match (&tok, &self.peek) {
_ => tok == &self.peek,
}
}
fn next_token(&mut self) -> Result<(), ParserError> {
mem::swap(&mut self.current, &mut self.peek);
self.peek = self.lexer.next_token()?;
Ok(())
}
fn prefix_parse(&mut self) -> Result<Expression, ParserError> {
match &self.current {
Token::IDENTIFIER(_) => Ok(Expression::Identifier(self.parse_identifier()?)),
Token::INTEGER(i) => Ok(Expression::Integer(*i)),
Token::TRUE => Ok(Expression::Boolean(true)),
Token::FALSE => Ok(Expression::Boolean(false)),
Token::STRING(s) => Ok(Expression::String(s.to_string())),
Token::LPAREN => self.parse_groupped_expression(),
Token::IF => self.parse_if_expression(),
Token::FUNCTION => self.parse_function_literal(),
Token::BANG | Token::MINUS => self.parse_prefix_expression(),
_ => Err(ParserError::InvalidNextToken(String::from(format!(
"{:?}",
self.current
)))),
}
}
fn infix_parse(&mut self, left: Expression) -> Option<Result<Expression, ParserError>> {
match self.peek {
Token::PLUS
| Token::MINUS
| Token::ASTERISK
| Token::SLASH
| Token::EQ
| Token::NE
| Token::LT
| Token::GT => Some(self.parse_infix_expression(left)),
Token::LPAREN => Some(self.parse_call_expression(left)),
// Token::LBRACKET => Some(self.parse_index_expression(left)),
_ => None,
}
}
}
pub enum Precedence {
Lowest,
Equals,
LessGreater,
Sum,
Product,
Prefix,
Call,
Index,
}
fn precedence(tok: &Token) -> Precedence {
match tok {
Token::EQ => Precedence::Equals,
Token::NE => Precedence::Equals,
Token::LT => Precedence::LessGreater,
Token::GT => Precedence::LessGreater,
Token::PLUS => Precedence::Sum,
Token::MINUS => Precedence::Sum,
Token::SLASH => Precedence::Product,
Token::ASTERISK => Precedence::Product,
Token::LPAREN => Precedence::Call,
Token::LBRACE => Precedence::Index,
_ => Precedence::Lowest,
}
}
#[derive(Debug)]
pub enum ParserError {
LexerError(LexerError),
InvalidIdentifier(String),
InvalidNextToken(String),
UnknownStatement(String),
}
impl From<LexerError> for ParserError {
fn from(err: LexerError) -> ParserError {
ParserError::LexerError(err)
}
}
#[test]
fn test_let_statement() {
let input = r#"
let five = 46;
let ten = 10;
let foobar = 1234;
"#;
let st = vec![
LetStatement {
name: String::from("five"),
value: Expression::Integer(46),
},
LetStatement {
name: String::from("ten"),
value: Expression::Integer(10),
},
LetStatement {
name: String::from("foobar"),
value: Expression::Integer(1234),
},
];
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let pr = p.parse().unwrap();
assert_eq!(pr.statements.len(), 3);
for i in 0..pr.statements.len() {
assert_eq!(
is_expected_let_ident(
pr.statements.iter().nth(i).unwrap(),
st.iter().nth(i).unwrap()
),
true
);
}
}
#[cfg(test)]
fn is_expected_let_ident(s: &Statement, exp: &LetStatement) -> bool {
match s {
Statement::Let(ls) => return ls.name == exp.name && ls.value == exp.value,
_ => return false,
};
}
#[test]
fn test_return_statement() {
let input = r#"
return 3;
return x;
"#;
let st = vec![
ReturnStatement {
value: Expression::Integer(3),
},
ReturnStatement {
value: Expression::Identifier(String::from("x")),
},
];
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let pr = p.parse().unwrap();
assert_eq!(pr.statements.len(), 2);
for i in 0..pr.statements.len() {
assert_eq!(
is_expected_return(
pr.statements.iter().nth(i).unwrap(),
st.iter().nth(i).unwrap()
),
true
);
}
}
#[cfg(test)]
fn is_expected_return(s: &Statement, exp: &ReturnStatement) -> bool {
match s {
Statement::Return(ls) => return ls.value == exp.value,
_ => return false,
};
}
#[test]
fn test_identifier_expression() {
let input = r#"
foobar;
"#;
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let pr = p.parse().unwrap();
assert_eq!(pr.statements.len(), 1);
let exp = pr.statements.iter().nth(0).unwrap();
match exp {
Statement::Expression(Expression::Identifier(t)) => assert_eq!(t, "foobar"),
_ => panic!("expected identifier expression, got {:?}", exp),
}
}
#[test]
fn test_prefix_expression() {
let input = r#"
!5;
-15;
!true;
!false;
-x;
"#;
let exp = vec![
PrefixExpression {
operator: Token::BANG,
right: Box::new(Expression::Integer(5)),
},
PrefixExpression {
operator: Token::MINUS,
right: Box::new(Expression::Integer(15)),
},
PrefixExpression {
operator: Token::BANG,
right: Box::new(Expression::Boolean(true)),
},
PrefixExpression {
operator: Token::BANG,
right: Box::new(Expression::Boolean(false)),
},
PrefixExpression {
operator: Token::MINUS,
right: Box::new(Expression::Identifier(String::from("x"))),
},
];
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let pr = p.parse().unwrap();
for i in 0..pr.statements.len() {
assert_eq!(
is_expected_prefix_expression(
pr.statements.iter().nth(i).unwrap(),
exp.iter().nth(i).unwrap()
),
true
);
}
}
#[cfg(test)]
fn is_expected_prefix_expression(s: &Statement, exp: &PrefixExpression) -> bool {
match s {
Statement::Expression(Expression::Prefix(es)) => {
println!("expected {:?} to equal {:?}", es, exp);
return es.operator == exp.operator && es.right == exp.right;
}
_ => return false,
};
}
#[test]
fn test_infix_expression() {
let input = r#"
5+5;
5-5;
5*5;
5/5;
5>5;
5<5;
5==5;
5!=5;
"#;
let exp = vec![
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::PLUS,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::MINUS,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::ASTERISK,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::SLASH,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::GT,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::LT,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::EQ,
right: Box::new(Expression::Integer(5)),
},
InfixExpression {
left: Box::new(Expression::Integer(5)),
operator: Token::NE,
right: Box::new(Expression::Integer(5)),
},
];
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let pr = p.parse().unwrap();
for i in 0..pr.statements.len() {
assert_eq!(
is_expected_infix_expression(
pr.statements.iter().nth(i).unwrap(),
exp.iter().nth(i).unwrap()
),
true
);
}
}
#[cfg(test)]
fn is_expected_infix_expression(s: &Statement, exp: &InfixExpression) -> bool {
match s {
Statement::Expression(Expression::Infix(ie)) => {
println!("expected {:?} to equal {:?}", ie, exp);
return ie.right == exp.right && ie.operator == exp.operator && ie.right == exp.right;
}
_ => return false,
};
}
#[test]
fn test_operator_precedence() {
let tests = vec![
("-a * b", "((-a) * b)"),
("!-a", "(!(-a))"),
("a + b + c", "((a + b) + c)"),
("a + b - c", "((a + b) - c)"),
("a * b * c", "((a * b) * c)"),
("a * b / c", "((a * b) / c)"),
("a + b / c", "(a + (b / c))"),
("a + b * c + d / e - f", "(((a + (b * c)) + (d / e)) - f)"),
("3 + 4; -5 * 5", "(3 + 4)((-5) * 5)"),
("5 > 4 == 3 < 4", "((5 > 4) == (3 < 4))"),
("5 < 4 != 3 > 4", "((5 < 4) != (3 > 4))"),
(
"3 + 4 * 5 == 3 * 1 + 4 * 5",
"((3 + (4 * 5)) == ((3 * 1) + (4 * 5)))",
),
("true", "true"),
("false", "false"),
("3 > 5 == false", "((3 > 5) == false)"),
("3 < 5 == true", "((3 < 5) == true)"),
("1 + (2 + 3) + 4", "((1 + (2 + 3)) + 4)"),
("(5 + 5) * 2", "((5 + 5) * 2)"),
("2 / (5 + 5)", "(2 / (5 + 5))"),
("-(5 + 5)", "(-(5 + 5))"),
("!(true == true)", "(!(true == true))"),
// (
// "a * [1, 2, 3, 4][b * c] * d",
// "((a * ([1, 2, 3, 4][(b * c)])) * d)",
// ),
// (
// "add(a * b[2], b[1], 2 * [1, 2][1])",
// "add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))",
// ),
];
for (input, want) in tests {
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let got = format!("{}", p.parse().unwrap());
assert_eq!(want, got);
}
}
#[test]
fn parse_if_expression() {
let tests = vec!["if (x < y) { x }", "if (x > y) { x } else { y }"];
for input in tests {
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let got = format!("{}", p.parse().unwrap());
assert_eq!(input, got);
}
}
#[test]
fn test_parse_call_expressions() {
let tests = vec![
("add(1, 2 * 3, 4 + 5);", "add(1, (2 * 3), (4 + 5))"),
("a + add(b * c) + d", "((a + add((b * c))) + d)"),
(
"add(a, b, 1, 2 * 3, 4 + 5, add(6, 7 * 8))",
"add(a, b, 1, (2 * 3), (4 + 5), add(6, (7 * 8)))",
),
];
for (input, want) in tests {
let l = Lexer::new(input).unwrap();
let mut p = Parser::new(l).unwrap();
let got = format!("{}", p.parse().unwrap());
assert_eq!(want, got);
}
}
| true
|
4686659b7d9ceeccd236260e57f23a7e34bd9fa8
|
Rust
|
canpok1/atcoder-rust
|
/contests/abc184/src/bin/b.rs
|
UTF-8
| 733
| 3
| 3
|
[] |
no_license
|
fn main() {
let (n, x) = {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
let mut ws = line.trim_end().split_whitespace();
let n1: isize = ws.next().unwrap().parse().unwrap();
let n2: isize = ws.next().unwrap().parse().unwrap();
(n1, n2)
};
let s: Vec<char> = {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
let s = line.trim_end().to_owned();
s.chars().collect()
};
let mut p = x;
(0_usize..(n as usize)).for_each(|index| {
if s[index] == 'o' {
p += 1;
} else if p > 0 {
p -= 1;
}
});
println!("{}", p);
}
| true
|
6fbd2b85d17797410f41226cab32e9ff4c3f49ad
|
Rust
|
rusty-ecma/hash-chain
|
/src/imutable.rs
|
UTF-8
| 12,319
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
use std::{
borrow::Borrow,
collections::{hash_map::RandomState},
hash::{BuildHasher, Hash},
mem::take,
ops::Index,
};
use im_rc::{Vector, HashMap};
#[derive(Clone)]
pub struct LockedChainMap<K, V, S = RandomState> {
pub(crate) maps: Vector<HashMap<K, V, S>>,
}
impl<K, V, S: BuildHasher> LockedChainMap<K, V, S>
where
K: Hash + Eq + Clone,
V: Clone,
S: BuildHasher + Clone,
{
pub fn new(map: HashMap<K, V, S>) -> Self {
Self { maps: Vector::unit(map) }
}
/// Inserts a key-value pair into the map.
/// If the map did not have this key present, None is returned.
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
let map = self.maps.get_mut(self.maps.len() - 1)?;
map.insert(key, value)
}
pub fn insert_at(&mut self, idx: usize, key: K, value: V) -> Result<Option<V>, crate::Error> {
if let Some(map) = self.maps.get_mut(idx) {
Ok(map.insert(key, value))
} else {
Err(crate::Error::IndexOutOfRange)
}
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the key type.
pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
for map in self.maps.iter().rev() {
if let Some(v) = map.get(key) {
return Some(v);
}
}
None
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the key type.
pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
for map in self.maps.iter_mut().rev() {
if let Some(v) = map.get_mut(key) {
return Some(v);
}
}
None
}
pub fn get_before<Q: ?Sized>(&self, idx: usize, key: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
for i in (0..idx).rev() {
if let Some(map) = self.maps.get(i) {
if let Some(v) = map.get(key) {
return Some(v)
}
}
}
None
}
pub fn get_before_mut<Q: ?Sized>(&mut self, idx: usize, key: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
for (i, map) in self.maps.iter_mut().enumerate().rev() {
if i > idx {
continue;
}
if let Some(v) = map.get_mut(key) {
return Some(v)
}
}
None
}
pub fn new_child_with(&mut self, map: HashMap<K, V, S>) {
self.maps.push_back(map);
}
pub fn last_has<Q: ?Sized>(&self, key: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.has_at(self.maps.len() - 1, key)
}
pub fn has_at<Q: ?Sized>(&self, idx: usize, key: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
if let Some(map) = self.maps.get(idx) {
map.contains_key(key)
} else {
false
}
}
pub fn child_len(&self) -> usize {
self.maps.len()
}
pub fn get_last_index<Q: ?Sized>(&self, key: &Q) -> Option<usize>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
for (i, map) in self.maps.iter().enumerate().rev() {
if map.contains_key(key) {
return Some(i);
}
}
None
}
pub fn split_off(&mut self, idx: usize) -> Self {
let maps = self.maps.split_off(idx);
Self {
maps,
}
}
pub fn append(&mut self, other: Self) {
self.maps.append(other.maps);
}
}
impl<K: Hash + Eq + Clone, V: Clone, S: BuildHasher + Default + Clone> LockedChainMap<K, V, S> {
pub fn new_child(&mut self) {
self.maps.push_back(HashMap::default());
}
pub fn remove_child(&mut self) -> Option<HashMap<K, V, S>> {
if self.maps.len() == 1 {
let ret = take(&mut self.maps[0]);
Some(ret)
} else {
self.maps.pop_back()
}
}
}
impl<K, V> Default for LockedChainMap<K, V>
where
K: Hash + Eq + Clone,
V: Clone
{
fn default() -> Self {
let maps = Vector::unit(HashMap::new());
Self {
maps,
}
}
}
impl<K, Q: ?Sized, V: Clone, S> Index<&Q> for LockedChainMap<K, V, S>
where
K: Eq + Hash + Borrow<Q> + Clone,
Q: Eq + Hash + Clone,
S: BuildHasher + Clone,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[inline]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
impl<K, V, S> PartialEq for LockedChainMap<K, V, S>
where
K: Eq + Hash + Clone,
V: PartialEq + Clone,
S: std::hash::BuildHasher + Clone,
{
fn eq(&self, other: &LockedChainMap<K, V, S>) -> bool {
self.maps == other.maps
}
}
impl<K, V, S> Eq for LockedChainMap<K, V, S>
where
K: Eq + Hash + Clone,
V: Eq + Clone,
S: BuildHasher + Clone,
{
}
impl<K, V, S> core::fmt::Debug for LockedChainMap<K, V, S>
where
K: Eq + Hash + core::fmt::Debug + Clone,
V: core::fmt::Debug + Clone,
S: BuildHasher + Clone,
{
fn fmt(&self, f: &mut core::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("LockedChainMap")
.field("maps", &self.maps)
.finish()
}
}
#[cfg(test)]
mod test {
use super::*;
use std::default::Default;
#[test]
fn initialization() {
let mut test_map = HashMap::new();
test_map.insert("test", 1);
let chain_map = LockedChainMap::new(test_map);
assert!(chain_map.maps.len() > 0);
assert_eq!(chain_map.maps[0].get("test"), Some(&1));
}
#[test]
fn initialization_default() {
let chain_map: LockedChainMap<(), ()> = LockedChainMap::default();
assert!(chain_map.maps.len() > 0);
assert!(chain_map.maps[0].is_empty());
}
#[test]
fn insert() {
let mut chain_map = LockedChainMap::default();
assert!(chain_map.insert("test", 1).is_none());
assert_eq!(chain_map.maps[0].get("test"), Some(&1));
}
#[test]
fn insert_at() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("banana", "milk");
chain_map.new_child();
chain_map.insert_at(0, "strawberry", "soda").unwrap();
assert_eq!(chain_map.maps[0].get("strawberry"), Some(&"soda"));
assert_eq!(chain_map.maps[1].get("strawberry"), None);
}
#[test]
#[should_panic = "IndexOutOfRange"]
fn insert_at_out_of_bounds() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("banana", "milk");
chain_map.new_child();
chain_map.insert_at(37, "strawberry", "soda").unwrap();
}
#[test]
fn get() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
assert_eq!(chain_map.get(&"test"), Some(&1));
}
#[test]
fn get_none() {
let chain_map: LockedChainMap<&str, ()> = LockedChainMap::default();
assert_eq!(chain_map.get(&"test"), None);
}
#[test]
fn get_mut() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
let test_value = chain_map.get_mut(&"test");
assert_eq!(test_value, Some(&mut 1));
*test_value.unwrap() += 1;
let changed = chain_map.get(&"test");
assert_eq!(changed, Some(&2));
}
#[test]
fn get_mut_outer() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("outer", 1);
chain_map.new_child();
chain_map.insert("inner", 2);
let ret = chain_map.get_mut("outer").unwrap();
*ret += 9000;
let changed = chain_map.get(&"outer");
assert_eq!(changed, Some(&9001));
}
#[test]
fn index() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
assert_eq!(chain_map[&"test"], 1);
}
#[test]
fn new_child() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
chain_map.new_child();
assert!(chain_map.maps.len() > 1);
}
#[test]
fn scopes() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
chain_map.insert("y", 2);
chain_map.new_child();
chain_map.insert("x", 1);
assert_eq!(chain_map.get("x"), Some(&1));
assert_eq!(chain_map.get("y"), Some(&2));
}
#[test]
fn remove_child() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
chain_map.insert("y", 2);
chain_map.new_child();
chain_map.insert("x", 1);
let ret = chain_map.remove_child().unwrap();
assert_eq!(ret.get("x"), Some(&1));
assert_eq!(chain_map.get("x"), Some(&0));
}
#[test]
fn remove_child_length_1() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
let _ = chain_map.remove_child();
assert_eq!(chain_map.get("x"), None);
assert!(chain_map.maps.len() == 1);
}
#[test]
fn has_at_exists() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
assert!(chain_map.has_at(0, &"x"));
}
#[test]
fn has_at_doesnt_exist() {
let chain_map: LockedChainMap<&str, ()> = LockedChainMap::default();
assert!(!chain_map.has_at(11, &"x"));
}
#[test]
fn last_has_true() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
chain_map.new_child();
chain_map.insert("y", 1);
assert!(chain_map.last_has(&"y"));
}
#[test]
fn last_has_false() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("x", 0);
chain_map.new_child();
chain_map.insert("y", 1);
assert!(!chain_map.last_has(&"x"));
}
#[test]
fn child_len() {
let mut chain_map: LockedChainMap<&str, ()> = LockedChainMap::default();
assert_eq!(chain_map.child_len(), 1);
for i in 2..100 {
chain_map.new_child();
assert_eq!(chain_map.child_len(), i);
}
}
#[test]
fn get_before_exists() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
chain_map.new_child();
chain_map.insert("test", 2);
assert_eq!(chain_map.get_before(1, &"test"), Some(&1));
}
#[test]
fn get_before_mut_exists() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test", 1);
chain_map.new_child();
chain_map.insert("test", 2);
let test_value = chain_map.get_before_mut(1, &"test");
assert_eq!(test_value, Some(&mut 1));
*test_value.unwrap() += 2;
let changed = chain_map.get_before(1, &"test");
assert_eq!(changed, Some(&3));
let child = chain_map.get("test");
assert_eq!(child, Some(&2));
}
#[test]
fn get_last_index_exists() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test1", 1);
chain_map.new_child();
chain_map.insert("test2", 2);
assert_eq!(chain_map.get_last_index("test1"), Some(0));
assert_eq!(chain_map.get_last_index("test2"), Some(1));
}
#[test]
fn get_last_index_doesnt_exist() {
let mut chain_map = LockedChainMap::default();
chain_map.insert("test1", 1);
chain_map.new_child();
chain_map.insert("test2", 2);
assert_eq!(chain_map.get_last_index("shmee"), None);
}
}
| true
|
41f6cb35e4e83c46660a85f0eb28b16caffe5aa3
|
Rust
|
JonathonFry/AdventOfCode2017
|
/src/util.rs
|
UTF-8
| 313
| 2.953125
| 3
|
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
pub fn read(name: String) -> String {
let mut data = String::new();
let mut f = File::open(format!("{}{}{}", "./files/", name, ".txt"))
.expect("Unable to open file");
f.read_to_string(&mut data).expect("Unable to read string");
return data;
}
| true
|
16d91a5295dc1dd22da915fc23d832061e5156b8
|
Rust
|
Hexilee/roa
|
/roa/src/logger.rs
|
UTF-8
| 4,863
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
//! This module provides a middleware `logger`.
//!
//! ### Example
//!
//! ```rust
//! use roa::logger::logger;
//! use roa::preload::*;
//! use roa::App;
//! use roa::http::StatusCode;
//! use tokio::task::spawn;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! pretty_env_logger::init();
//! let app = App::new()
//! .gate(logger)
//! .end("Hello, World");
//! let (addr, server) = app.run()?;
//! spawn(server);
//! let resp = reqwest::get(&format!("http://{}", addr)).await?;
//! assert_eq!(StatusCode::OK, resp.status());
//! Ok(())
//! }
//! ```
use std::pin::Pin;
use std::time::Instant;
use std::{io, mem};
use bytes::Bytes;
use bytesize::ByteSize;
use futures::task::{self, Poll};
use futures::{Future, Stream};
use roa_core::http::{Method, StatusCode};
use tracing::{error, info};
use crate::http::Uri;
use crate::{Context, Executor, JoinHandle, Next, Result};
/// A finite-state machine to log success information in each successful response.
enum StreamLogger<S> {
/// Polling state, as a body stream.
Polling { stream: S, task: LogTask },
/// Logging state, as a logger future.
Logging(JoinHandle<()>),
/// Complete, as a empty stream.
Complete,
}
/// A task structure to log when polling is complete.
#[derive(Clone)]
struct LogTask {
counter: u64,
method: Method,
status_code: StatusCode,
uri: Uri,
start: Instant,
exec: Executor,
}
impl LogTask {
#[inline]
fn log(&self) -> JoinHandle<()> {
let LogTask {
counter,
method,
status_code,
uri,
start,
exec,
} = self.clone();
exec.spawn_blocking(move || {
info!(
"<-- {} {} {}ms {} {}",
method,
uri,
start.elapsed().as_millis(),
ByteSize(counter),
status_code,
)
})
}
}
impl<S> Stream for StreamLogger<S>
where
S: 'static + Send + Send + Unpin + Stream<Item = io::Result<Bytes>>,
{
type Item = io::Result<Bytes>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Option<Self::Item>> {
match &mut *self {
StreamLogger::Polling { stream, task } => {
match futures::ready!(Pin::new(stream).poll_next(cx)) {
Some(Ok(bytes)) => {
task.counter += bytes.len() as u64;
Poll::Ready(Some(Ok(bytes)))
}
None => {
let handler = task.log();
*self = StreamLogger::Logging(handler);
self.poll_next(cx)
}
err => Poll::Ready(err),
}
}
StreamLogger::Logging(handler) => {
futures::ready!(Pin::new(handler).poll(cx));
*self = StreamLogger::Complete;
self.poll_next(cx)
}
StreamLogger::Complete => Poll::Ready(None),
}
}
}
/// A middleware to log information about request and response.
///
/// Based on crate `log`, the log level must be greater than `INFO` to log all information,
/// and should be greater than `ERROR` when you need error information only.
pub async fn logger<S>(ctx: &mut Context<S>, next: Next<'_>) -> Result {
info!("--> {} {}", ctx.method(), ctx.uri().path());
let start = Instant::now();
let mut result = next.await;
let method = ctx.method().clone();
let uri = ctx.uri().clone();
let exec = ctx.exec.clone();
match &mut result {
Err(status) => {
let status_code = status.status_code;
let message = if status.expose {
status.message.clone()
} else {
// set expose to true; then root status_handler won't log this status.
status.expose = true;
// take unexposed message
mem::take(&mut status.message)
};
ctx.exec
.spawn_blocking(move || {
error!("<-- {} {} {}\n{}", method, uri, status_code, message,);
})
.await
}
Ok(_) => {
let status_code = ctx.status();
// logging when body polling complete.
let logger = StreamLogger::Polling {
stream: mem::take(&mut ctx.resp.body),
task: LogTask {
counter: 0,
method,
uri,
status_code,
start,
exec,
},
};
ctx.resp.write_stream(logger);
}
}
result
}
| true
|
609277f528b5bcc11ad72d986a801beb84eef7eb
|
Rust
|
tock/book
|
/imix/src/components/led.rs
|
UTF-8
| 1,086
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
//! Component for imix board LEDs.
//!
//! This provides one Component, LedComponent, which implements
//! a userspace syscall interface to the two imix on-board LEDs.
//!
//! Usage
//! -----
//! ```rust
//! let led = LedComponent::new().finalize();
//! ```
// Author: Philip Levis <pal@cs.stanford.edu>
// Last modified: 6/20/2018
#![allow(dead_code)] // Components are intended to be conditionally included
use capsules::led;
use kernel::component::Component;
use kernel::static_init;
pub struct LedComponent {}
impl LedComponent {
pub fn new() -> LedComponent {
LedComponent {}
}
}
impl Component for LedComponent {
type Output = &'static led::LED<'static, sam4l::gpio::GPIOPin>;
unsafe fn finalize(&mut self) -> Self::Output {
let led_pins = static_init!(
[(&'static sam4l::gpio::GPIOPin, led::ActivationMode); 1],
[(&sam4l::gpio::PC[10], led::ActivationMode::ActiveHigh),]
);
let led = static_init!(
led::LED<'static, sam4l::gpio::GPIOPin>,
led::LED::new(led_pins)
);
led
}
}
| true
|
4ac629ac99da2a6be131e96ad25ebdd9aed43fe8
|
Rust
|
mahkoh/gesund
|
/ui/textbox/mod.rs
|
UTF-8
| 2,481
| 3.046875
| 3
|
[] |
no_license
|
use cairo::{Surface, pango};
use cairo::pango::{FontDescription};
use utils::{CopyMut};
use colors;
pub static PADDING: f64 = 10f64;
macro_rules! scale {
($x:expr) => { self.scale.get() * $x }
}
pub struct Textbox {
pub text: String,
pub cursor: uint,
pub scroll: f64,
pub empty_text: Option<String>,
pub height: f64,
pub scale: CopyMut<f64>,
}
impl Textbox {
pub fn new(height: f64, scale: CopyMut<f64>) -> Textbox {
Textbox {
text: String::new(),
cursor: 0,
scroll: 0.0,
empty_text: None,
height: height,
scale: scale,
}
}
/*
pub fn click(&self, x: f64, y: f64) {
let mut font = FontDescription::new();
font.set_family("sans");
font.set_weight(pango::WeightNormal);
font.set_absolute_size(scale!(12.0));
let mut layout = cx.create_pango_layout();
layout.set_font_description(&font);
layout.set_text(self.text.as_slice());
layout.set_width(width - scale!(2*PADDING));
}
*/
pub fn draw(&self, width: f64, surface: &mut Surface) {
{
let mut cx = surface.create();
// Background
cx.set_source_rgb(colors::WHITE);
cx.paint();
// Border
cx.rectangle(scale!(1.0), scale!(1.0), width - scale!(2.0),
scale!(self.height - 2.0));
cx.set_source_rgb(colors::LIGHT_GREY);
cx.set_line_width(scale!(2.0));
cx.stroke();
}
{
let mut surface = surface.create_for_rectangle(scale!(PADDING),
scale!(PADDING),
width - scale!(2.0*PADDING),
scale!(self.height - 2.0*PADDING));
let mut cx = surface.create();
// Text
let mut font = FontDescription::new();
font.set_family("sans");
font.set_weight(pango::WeightNormal);
font.set_absolute_size(scale!(12.0));
let mut layout = cx.create_pango_layout();
layout.set_font_description(&font);
layout.set_text(self.text.as_slice());
layout.set_width(width - scale!(2.0*PADDING));
cx.set_source_rgb(colors::BLACK);
cx.show_pango_layout(&layout);
}
}
}
| true
|
15fc915f6ff8019fd0fa28ae285fce79bf4516b4
|
Rust
|
WillChilds-Klein/advent
|
/2020/src/bin/05.rs
|
UTF-8
| 3,108
| 3.4375
| 3
|
[] |
no_license
|
use std::{env, fs, io};
use std::io::BufRead;
use std::cmp;
#[derive(Debug)]
enum Direction {
Back,
Front,
Left,
Right,
}
impl std::str::FromStr for Direction {
type Err = String;
fn from_str(s: &str) -> Result<Direction, Self::Err> {
match s {
"B" => Ok(Direction::Back),
"F" => Ok(Direction::Front),
"L" => Ok(Direction::Left),
"R" => Ok(Direction::Right),
_ => Err("Unsupported Direction enum value!".to_string())
}
}
}
fn main() {
if env::args().count() < 3 {
panic!("Usage: cargo run --bin <bin> <vers> <input_file>")
}
let vers = env::args().nth(1).unwrap();
let path = env::args().nth(2).unwrap();
let infile = fs::File::open(path).unwrap();
let reader = io::BufReader::new(infile);
let seats: Vec<Vec<Direction>> = reader.lines()
.into_iter()
.map(|line|
line.unwrap()
.split("")
.into_iter()
.filter(|s| !s.is_empty())
.map(str::parse::<Direction>)
.map(Result::unwrap)
.collect()
)
.collect();
match vers.as_str() {
"01" => one(seats),
"02" => two(seats),
_ => panic!("Unsupported vers: {}", vers)
}
}
fn one(seats: Vec<Vec<Direction>>) {
let mut max_seat_id = -1;
for seat in seats {
let mut row_lo = 0;
let mut row_hi = 127;
let mut col_lo = 0;
let mut col_hi = 7;
for direction in seat {
match direction {
Direction::Back => row_lo += (row_hi-row_lo)/2+1,
Direction::Front => row_hi -= (row_hi-row_lo)/2+1,
Direction::Left => col_hi -= (col_hi-col_lo)/2+1,
Direction::Right => col_lo += (col_hi-col_lo)/2+1,
};
}
let row = cmp::min(row_lo, row_hi);
let col = cmp::min(col_lo, col_hi);
let seat_id = 8*row + col;
if seat_id > max_seat_id {
max_seat_id = seat_id;
}
}
println!("{}", max_seat_id);
}
fn two(seats: Vec<Vec<Direction>>) {
const N_ROWS: usize = 128;
const N_COLS: usize = 8;
let mut map = [false; N_COLS*N_ROWS];
for seat in seats {
let mut row_lo = 0;
let mut row_hi = N_ROWS-1;
let mut col_lo = 0;
let mut col_hi = N_COLS-1;
for direction in seat {
match direction {
Direction::Back => row_lo += (row_hi-row_lo)/2+1,
Direction::Front => row_hi -= (row_hi-row_lo)/2+1,
Direction::Left => col_hi -= (col_hi-col_lo)/2+1,
Direction::Right => col_lo += (col_hi-col_lo)/2+1,
};
}
let row = cmp::min(row_lo, row_hi);
let col = cmp::min(col_lo, col_hi);
map[8*row+col] = true;
}
let mut seat_id: i32 = -1;
for ii in 1..map.len()-2 {
if map[ii-1] && !map[ii] && map[ii+1] {
seat_id = ii as i32;
break;
}
}
println!("{}", seat_id);
}
| true
|
b310336a8cd2e761a59fb8a387612b004badc03a
|
Rust
|
jsoverson/wasm3-provider
|
/examples/demo.rs
|
UTF-8
| 1,519
| 2.6875
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate wapc;
use std::fs::File;
use std::io::prelude::*;
use std::time::Instant;
use wapc::WapcHost;
use wasm3_provider::Wasm3EngineProvider;
fn load_file(path: &str) -> Vec<u8> {
println!("{}", path);
let mut f = File::open(path).unwrap();
let mut buf = Vec::new();
f.read_to_end(&mut buf).unwrap();
buf
}
pub fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
env_logger::init();
let n = Instant::now();
let module_bytes = load_file(&std::env::args().skip(1).next().unwrap());
let engine = Wasm3EngineProvider::new(&module_bytes);
let host = WapcHost::new(Box::new(engine), host_callback)?;
let func = std::env::args().skip(2).next().unwrap();
// hello.wasm - operation is wapc:sample!Hello (use ' quotes for linux CLI)
// hello_as.wasm - operation is hello
// hello_tinygo.wasm - operation is hello
// hello_zig.wasm - operation is hello
println!("Calling guest (wasm) function");
let res = host.call(&func, b"this is a test")?;
println!("Result - {}", ::std::str::from_utf8(&res).unwrap());
println!("Elapsed - {}ms", n.elapsed().as_millis());
Ok(())
}
fn host_callback(
id: u64,
bd: &str,
ns: &str,
op: &str,
payload: &[u8],
) -> Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> {
println!(
"Guest {} invoked '{}->{}:{}' with payload of {}",
id,
bd,
ns,
op,
::std::str::from_utf8(payload).unwrap()
);
Ok(vec![])
}
| true
|
b5e6aaa4f7a1cb3bfbcd4fece7c7eaff2f7348f5
|
Rust
|
tov/succinct-rs
|
/src/rank/traits.rs
|
UTF-8
| 1,281
| 3.953125
| 4
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
/// Supports fast rank queries.
///
/// Associated type `Over` gives the type that we can query about. For
/// example, `RankSupport<Over=bool>` lets us rank `0` and `1`, whereas
/// `RankSupport<Over=u8>` will rank arbitrary bytes.
pub trait RankSupport {
/// The type of value to rank.
type Over: Copy;
/// Returns the rank of the given value at a given position.
///
/// This is the number of occurrences of `value` up to and including
/// that position.
///
/// # Panics
///
/// Panics if `position >= self.limit()`.
fn rank(&self, position: u64, value: Self::Over) -> u64;
/// The size of the vector being ranked.
fn limit(&self) -> u64;
}
/// Supports fast rank queries over `bool`s.
pub trait BitRankSupport: RankSupport<Over = bool> {
/// Returns the rank of 1 at the given position.
///
/// This is the number of occurrences of 1 up to and including that
/// position.
fn rank1(&self, position: u64) -> u64 {
self.rank(position, true)
}
/// Returns the rank of 0 at the given position.
///
/// This is the number of occurrences of 0 up to and including that
/// position.
fn rank0(&self, position: u64) -> u64 {
position + 1 - self.rank1(position)
}
}
| true
|
99a85c9d4efec456e33fcc249d98a978988c2d32
|
Rust
|
scottyla19/rust-book
|
/functions/src/main.rs
|
UTF-8
| 1,135
| 3.859375
| 4
|
[] |
no_license
|
fn main() {
let c: f32 = convert_f_to_c(37.0);
println!("The temp in C is {}", c);
let n = 9;
let fibs: i32 = fib(n);
println!("The {}th term is {}", n, fibs);
xmas();
}
fn convert_f_to_c(f: f32) -> f32 {
let c: f32 = (f - 32.0) * (5.0 / 9.0);
c
}
fn fib(n: i32) -> i32 {
let mut a = 0;
let mut b = 1;
let mut c = 1;
if (n == 0) {
return a;
}
for _val in (1..n) {
c = a + b;
a = b;
b = c;
}
return b;
}
fn xmas() {
let a = [
"A partridge in a pear tree",
"Two turtle doves",
"Three french hens",
];
let v = ["first", "second", "third"];
for (i, item) in a.iter().enumerate() {
if i == 0 {
println!(
"On the {} day of Christmas my true love sent to me {}",
v[i], item
);
} else {
let sliced_a = &a[0..i + 1];
let mystr = sliced_a.join(", ");
println!(
"On the {} day of Christmas my true love sent to me {}",
v[i], mystr
);
}
}
}
| true
|
fe7d7a80b8340ed08a3df098e7682e6a212467ac
|
Rust
|
Gasper/AdventOfCode2019
|
/day09/nine.rs
|
UTF-8
| 10,153
| 3.171875
| 3
|
[] |
no_license
|
use std::convert::From;
use std::fs::read;
use std::collections::HashMap;
const FINISH: i64 = 99;
const ADD: i64 = 1;
const MULTIPLY: i64 = 2;
const INPUT: i64 = 3;
const OUTPUT: i64 = 4;
const JMP_TRUE: i64 = 5;
const JMP_FALSE: i64 = 6;
const LESS_THAN: i64 = 7;
const EQUALS: i64 = 8;
const ADJUST_BASE: i64 = 9;
#[derive(PartialEq, Debug)]
enum ParameterMode {
PositionMode,
ImmediateMode,
RelativeMode,
}
impl From<i64> for ParameterMode {
fn from(number: i64) -> Self {
match number {
0 => ParameterMode::PositionMode,
1 => ParameterMode::ImmediateMode,
2 => ParameterMode:: RelativeMode,
_ => panic!("Invalid parameter mode"),
}
}
}
struct Instruction {
opcode: i64,
par1mode: ParameterMode,
par2mode: ParameterMode,
par3mode: ParameterMode,
}
fn main() {
let raw_input = match read("input.txt") {
Err(_) => panic!("Can't read input.txt!"),
Ok(file) => file,
};
let input_string = String::from_utf8_lossy(&raw_input);
let input_program = get_program(input_string.to_string());
let diagnostic_input = vec![1];
let (ip, output) = run_program(&input_program, &diagnostic_input, 0);
if ip.is_none() {
println!("Output was: {:?}", output);
}
else {
panic!("Program was missing some input");
}
}
fn get_program(input: String) -> Vec<i64> {
return input.split(',').map(|c| match (*c).parse::<i64>() {
Err(_) => panic!("Couldn't parse number {}", c),
Ok(num) => num,
}).collect();
}
fn run_program(program: &Vec<i64>, input_param: &Vec<i64>, ip: usize) -> (Option<usize>, Vec<i64>) {
let mut program = program.clone();
let mut input = input_param.clone();
let mut output = Vec::<i64>::new();
let mut virtual_memory = HashMap::new();
let mut relative_base: usize = 0;
let mut pic: usize = ip;
while program[pic] != FINISH {
let instruction = parse_instruction(program[pic]);
match instruction.opcode {
ADD => {
let (param1, param2, dest) = load_three_params(&program, &virtual_memory, pic, relative_base, instruction);
write_memory(&mut program, &mut virtual_memory, dest as usize, param1 + param2);
pic += 4;
},
MULTIPLY => {
let (param1, param2, dest) = load_three_params(&program, &virtual_memory, pic, relative_base, instruction);
write_memory(&mut program, &mut virtual_memory, dest as usize, param1 * param2);
pic += 4;
},
INPUT => {
let input_number: i64 = match input.pop() {
Some(num) => num,
None => {
// If there is no input available, switch to different program
return (Some(pic), output);
},
};
let position1 = read_memory(&program, &virtual_memory, pic + 1);
let dest = match instruction.par1mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position1 as usize),
ParameterMode::ImmediateMode => position1,
ParameterMode::RelativeMode => (relative_base as i64 + position1),
};
write_memory(&mut program, &mut virtual_memory, dest as usize, input_number);
pic += 2;
},
OUTPUT => {
let param1 = load_one_param(&program, &virtual_memory, pic, relative_base, instruction);
output.push(param1);
pic += 2;
},
JMP_TRUE => {
let (param1, param2) = load_two_params(&program, &virtual_memory, pic, relative_base, instruction);
if param1 != 0 {
pic = param2 as usize;
}
else {
pic += 3;
}
},
JMP_FALSE => {
let (param1, param2) = load_two_params(&program, &virtual_memory, pic, relative_base, instruction);
if param1 == 0 {
pic = param2 as usize;
}
else {
pic += 3;
}
},
LESS_THAN => {
let (param1, param2, dest) = load_three_params(&program, &virtual_memory, pic, relative_base, instruction);
if param1 < param2 {
write_memory(&mut program, &mut virtual_memory, dest as usize, 1);
}
else {
write_memory(&mut program, &mut virtual_memory, dest as usize, 0);
}
pic += 4;
},
EQUALS => {
let (param1, param2, dest) = load_three_params(&program, &virtual_memory, pic, relative_base, instruction);
if param1 == param2 {
write_memory(&mut program, &mut virtual_memory, dest as usize, 1);
}
else {
write_memory(&mut program, &mut virtual_memory, dest as usize, 0);
}
pic += 4;
}
ADJUST_BASE => {
let param1 = load_one_param(&program, &virtual_memory, pic, relative_base, instruction);
relative_base = (relative_base as i64 + param1) as usize;
pic += 2;
}
_ => panic!("Unknown opcode: {}", instruction.opcode),
};
}
return (None, output);
}
fn parse_instruction(code: i64) -> Instruction {
return Instruction {
opcode: code % 100,
par1mode: ParameterMode::from((code / 100) % 10),
par2mode: ParameterMode::from((code / 1000) % 10),
par3mode: ParameterMode::from((code / 10000) % 10),
};
}
fn load_one_param(program: &Vec<i64>, virtual_memory: &HashMap<usize, i64>, pic: usize, relative_base: usize, instruction: Instruction) -> i64 {
let position1 = read_memory(&program, &virtual_memory, pic + 1);
return match instruction.par1mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position1 as usize),
ParameterMode::ImmediateMode => position1,
ParameterMode::RelativeMode => read_memory(&program, &virtual_memory, (relative_base as i64 + position1) as usize),
};
}
fn load_two_params(program: &Vec<i64>, virtual_memory: &HashMap<usize, i64>, pic: usize, relative_base: usize, instruction: Instruction) -> (i64, i64) {
let position1 = read_memory(&program, &virtual_memory, pic + 1);
let param1 = match instruction.par1mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position1 as usize),
ParameterMode::ImmediateMode => position1,
ParameterMode::RelativeMode => read_memory(&program, &virtual_memory, (relative_base as i64 + position1) as usize),
};
let position2 = read_memory(&program, &virtual_memory, pic + 2);
let param2 = match instruction.par2mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position2 as usize),
ParameterMode::ImmediateMode => position2,
ParameterMode::RelativeMode => read_memory(&program, &virtual_memory, (relative_base as i64 + position2) as usize),
};
return (param1, param2);
}
fn load_three_params(program: &Vec<i64>, virtual_memory: &HashMap<usize, i64>, pic: usize, relative_base: usize, instruction: Instruction) -> (i64, i64, i64) {
let position1 = read_memory(&program, &virtual_memory, pic + 1);
let param1 = match instruction.par1mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position1 as usize),
ParameterMode::ImmediateMode => position1,
ParameterMode::RelativeMode => read_memory(&program, &virtual_memory, (relative_base as i64 + position1) as usize),
};
let position2 = read_memory(&program, &virtual_memory, pic + 2);
let param2 = match instruction.par2mode {
ParameterMode::PositionMode => read_memory(&program, &virtual_memory, position2 as usize),
ParameterMode::ImmediateMode => position2,
ParameterMode::RelativeMode => read_memory(&program, &virtual_memory, (relative_base as i64 + position2) as usize),
};
let position3 = read_memory(&program, &virtual_memory, pic + 3);
let param3 = match instruction.par3mode {
ParameterMode::PositionMode => position3,
ParameterMode::ImmediateMode => position3,
ParameterMode::RelativeMode => (relative_base as i64 + position3),
};
return (param1, param2, param3);
}
fn read_memory(program: &Vec<i64>, virtual_memory: &HashMap<usize, i64>, location: usize) -> i64 {
if location < program.len() {
return program[location];
}
else {
return match virtual_memory.get(&location) {
Some(value) => *value,
None => 0,
};
}
}
fn write_memory(program: &mut Vec<i64>, virtual_memory: &mut HashMap<usize, i64>, location: usize, value: i64) {
if location < program.len() {
program[location] = value;
} else {
virtual_memory.insert(location, value);
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_large_numbers() {
let input = vec![];
let mut program = vec![1102,34915192,34915192,7,4,7,99,0];
assert_eq!(run_program(&mut program, &input, 0), (None, vec![1219070632396864]));
}
#[test]
fn test_self_copy() {
let input = vec![];
let mut program = vec![109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99];
assert_eq!(run_program(&mut program, &input, 0), (None, vec![109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99]));
}
#[test]
fn test_large_number() {
let input = vec![];
let mut program = vec![104,1125899906842624,99];
assert_eq!(run_program(&mut program, &input, 0), (None, vec![1125899906842624]));
}
}
| true
|
91cd87d302583a1ae5b867bc1e51f44ce540bbf5
|
Rust
|
marco-c/gecko-dev-wordified
|
/third_party/rust/warp/src/filters/compression.rs
|
UTF-8
| 8,280
| 2.65625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
/
/
!
Compression
Filters
/
/
!
/
/
!
Filters
that
compress
the
body
of
a
response
.
#
[
cfg
(
feature
=
"
compression
-
brotli
"
)
]
use
async_compression
:
:
tokio
:
:
bufread
:
:
BrotliEncoder
;
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
use
async_compression
:
:
tokio
:
:
bufread
:
:
{
DeflateEncoder
GzipEncoder
}
;
use
http
:
:
header
:
:
HeaderValue
;
use
hyper
:
:
{
header
:
:
{
CONTENT_ENCODING
CONTENT_LENGTH
}
Body
}
;
use
tokio_util
:
:
io
:
:
{
ReaderStream
StreamReader
}
;
use
crate
:
:
filter
:
:
{
Filter
WrapSealed
}
;
use
crate
:
:
reject
:
:
IsReject
;
use
crate
:
:
reply
:
:
{
Reply
Response
}
;
use
self
:
:
internal
:
:
{
CompressionProps
WithCompression
}
;
enum
CompressionAlgo
{
#
[
cfg
(
feature
=
"
compression
-
brotli
"
)
]
BR
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
DEFLATE
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
GZIP
}
impl
From
<
CompressionAlgo
>
for
HeaderValue
{
#
[
inline
]
fn
from
(
algo
:
CompressionAlgo
)
-
>
Self
{
HeaderValue
:
:
from_static
(
match
algo
{
#
[
cfg
(
feature
=
"
compression
-
brotli
"
)
]
CompressionAlgo
:
:
BR
=
>
"
br
"
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
CompressionAlgo
:
:
DEFLATE
=
>
"
deflate
"
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
CompressionAlgo
:
:
GZIP
=
>
"
gzip
"
}
)
}
}
/
/
/
Compression
#
[
derive
(
Clone
Copy
Debug
)
]
pub
struct
Compression
<
F
>
{
func
:
F
}
/
/
TODO
:
The
implementation
of
gzip
(
)
deflate
(
)
and
brotli
(
)
could
be
replaced
with
/
/
generics
or
a
macro
/
/
/
Create
a
wrapping
filter
that
compresses
the
Body
of
a
[
Response
]
(
crate
:
:
reply
:
:
Response
)
/
/
/
using
gzip
adding
content
-
encoding
:
gzip
to
the
Response
'
s
[
HeaderMap
]
(
hyper
:
:
HeaderMap
)
/
/
/
/
/
/
#
Example
/
/
/
/
/
/
/
/
/
use
warp
:
:
Filter
;
/
/
/
/
/
/
let
route
=
warp
:
:
get
(
)
/
/
/
.
and
(
warp
:
:
path
:
:
end
(
)
)
/
/
/
.
and
(
warp
:
:
fs
:
:
file
(
"
.
/
README
.
md
"
)
)
/
/
/
.
with
(
warp
:
:
compression
:
:
gzip
(
)
)
;
/
/
/
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
pub
fn
gzip
(
)
-
>
Compression
<
impl
Fn
(
CompressionProps
)
-
>
Response
+
Copy
>
{
let
func
=
move
|
mut
props
:
CompressionProps
|
{
let
body
=
Body
:
:
wrap_stream
(
ReaderStream
:
:
new
(
GzipEncoder
:
:
new
(
StreamReader
:
:
new
(
props
.
body
)
)
)
)
;
props
.
head
.
headers
.
append
(
CONTENT_ENCODING
CompressionAlgo
:
:
GZIP
.
into
(
)
)
;
props
.
head
.
headers
.
remove
(
CONTENT_LENGTH
)
;
Response
:
:
from_parts
(
props
.
head
body
)
}
;
Compression
{
func
}
}
/
/
/
Create
a
wrapping
filter
that
compresses
the
Body
of
a
[
Response
]
(
crate
:
:
reply
:
:
Response
)
/
/
/
using
deflate
adding
content
-
encoding
:
deflate
to
the
Response
'
s
[
HeaderMap
]
(
hyper
:
:
HeaderMap
)
/
/
/
/
/
/
#
Example
/
/
/
/
/
/
/
/
/
use
warp
:
:
Filter
;
/
/
/
/
/
/
let
route
=
warp
:
:
get
(
)
/
/
/
.
and
(
warp
:
:
path
:
:
end
(
)
)
/
/
/
.
and
(
warp
:
:
fs
:
:
file
(
"
.
/
README
.
md
"
)
)
/
/
/
.
with
(
warp
:
:
compression
:
:
deflate
(
)
)
;
/
/
/
#
[
cfg
(
feature
=
"
compression
-
gzip
"
)
]
pub
fn
deflate
(
)
-
>
Compression
<
impl
Fn
(
CompressionProps
)
-
>
Response
+
Copy
>
{
let
func
=
move
|
mut
props
:
CompressionProps
|
{
let
body
=
Body
:
:
wrap_stream
(
ReaderStream
:
:
new
(
DeflateEncoder
:
:
new
(
StreamReader
:
:
new
(
props
.
body
)
)
)
)
;
props
.
head
.
headers
.
append
(
CONTENT_ENCODING
CompressionAlgo
:
:
DEFLATE
.
into
(
)
)
;
props
.
head
.
headers
.
remove
(
CONTENT_LENGTH
)
;
Response
:
:
from_parts
(
props
.
head
body
)
}
;
Compression
{
func
}
}
/
/
/
Create
a
wrapping
filter
that
compresses
the
Body
of
a
[
Response
]
(
crate
:
:
reply
:
:
Response
)
/
/
/
using
brotli
adding
content
-
encoding
:
br
to
the
Response
'
s
[
HeaderMap
]
(
hyper
:
:
HeaderMap
)
/
/
/
/
/
/
#
Example
/
/
/
/
/
/
/
/
/
use
warp
:
:
Filter
;
/
/
/
/
/
/
let
route
=
warp
:
:
get
(
)
/
/
/
.
and
(
warp
:
:
path
:
:
end
(
)
)
/
/
/
.
and
(
warp
:
:
fs
:
:
file
(
"
.
/
README
.
md
"
)
)
/
/
/
.
with
(
warp
:
:
compression
:
:
brotli
(
)
)
;
/
/
/
#
[
cfg
(
feature
=
"
compression
-
brotli
"
)
]
pub
fn
brotli
(
)
-
>
Compression
<
impl
Fn
(
CompressionProps
)
-
>
Response
+
Copy
>
{
let
func
=
move
|
mut
props
:
CompressionProps
|
{
let
body
=
Body
:
:
wrap_stream
(
ReaderStream
:
:
new
(
BrotliEncoder
:
:
new
(
StreamReader
:
:
new
(
props
.
body
)
)
)
)
;
props
.
head
.
headers
.
append
(
CONTENT_ENCODING
CompressionAlgo
:
:
BR
.
into
(
)
)
;
props
.
head
.
headers
.
remove
(
CONTENT_LENGTH
)
;
Response
:
:
from_parts
(
props
.
head
body
)
}
;
Compression
{
func
}
}
impl
<
FN
F
>
WrapSealed
<
F
>
for
Compression
<
FN
>
where
FN
:
Fn
(
CompressionProps
)
-
>
Response
+
Clone
+
Send
F
:
Filter
+
Clone
+
Send
F
:
:
Extract
:
Reply
F
:
:
Error
:
IsReject
{
type
Wrapped
=
WithCompression
<
FN
F
>
;
fn
wrap
(
&
self
filter
:
F
)
-
>
Self
:
:
Wrapped
{
WithCompression
{
filter
compress
:
self
.
clone
(
)
}
}
}
mod
internal
{
use
std
:
:
future
:
:
Future
;
use
std
:
:
pin
:
:
Pin
;
use
std
:
:
task
:
:
{
Context
Poll
}
;
use
bytes
:
:
Bytes
;
use
futures_util
:
:
{
ready
Stream
TryFuture
}
;
use
hyper
:
:
Body
;
use
pin_project
:
:
pin_project
;
use
crate
:
:
filter
:
:
{
Filter
FilterBase
Internal
}
;
use
crate
:
:
reject
:
:
IsReject
;
use
crate
:
:
reply
:
:
{
Reply
Response
}
;
use
super
:
:
Compression
;
/
/
/
A
wrapper
around
any
type
that
implements
[
Stream
]
(
futures
:
:
Stream
)
to
be
/
/
/
compatible
with
async_compression
'
s
Stream
based
encoders
#
[
pin_project
]
#
[
derive
(
Debug
)
]
pub
struct
CompressableBody
<
S
E
>
where
E
:
std
:
:
error
:
:
Error
S
:
Stream
<
Item
=
Result
<
Bytes
E
>
>
{
#
[
pin
]
body
:
S
}
impl
<
S
E
>
Stream
for
CompressableBody
<
S
E
>
where
E
:
std
:
:
error
:
:
Error
S
:
Stream
<
Item
=
Result
<
Bytes
E
>
>
{
type
Item
=
std
:
:
io
:
:
Result
<
Bytes
>
;
fn
poll_next
(
self
:
Pin
<
&
mut
Self
>
cx
:
&
mut
Context
<
'
_
>
)
-
>
Poll
<
Option
<
Self
:
:
Item
>
>
{
use
std
:
:
io
:
:
{
Error
ErrorKind
}
;
let
pin
=
self
.
project
(
)
;
S
:
:
poll_next
(
pin
.
body
cx
)
.
map_err
(
|
_
|
Error
:
:
from
(
ErrorKind
:
:
InvalidData
)
)
}
}
impl
From
<
Body
>
for
CompressableBody
<
Body
hyper
:
:
Error
>
{
fn
from
(
body
:
Body
)
-
>
Self
{
CompressableBody
{
body
}
}
}
/
/
/
Compression
Props
#
[
derive
(
Debug
)
]
pub
struct
CompressionProps
{
pub
(
super
)
body
:
CompressableBody
<
Body
hyper
:
:
Error
>
pub
(
super
)
head
:
http
:
:
response
:
:
Parts
}
impl
From
<
http
:
:
Response
<
Body
>
>
for
CompressionProps
{
fn
from
(
resp
:
http
:
:
Response
<
Body
>
)
-
>
Self
{
let
(
head
body
)
=
resp
.
into_parts
(
)
;
CompressionProps
{
body
:
body
.
into
(
)
head
}
}
}
#
[
allow
(
missing_debug_implementations
)
]
pub
struct
Compressed
(
pub
(
super
)
Response
)
;
impl
Reply
for
Compressed
{
#
[
inline
]
fn
into_response
(
self
)
-
>
Response
{
self
.
0
}
}
#
[
allow
(
missing_debug_implementations
)
]
#
[
derive
(
Clone
Copy
)
]
pub
struct
WithCompression
<
FN
F
>
{
pub
(
super
)
compress
:
Compression
<
FN
>
pub
(
super
)
filter
:
F
}
impl
<
FN
F
>
FilterBase
for
WithCompression
<
FN
F
>
where
FN
:
Fn
(
CompressionProps
)
-
>
Response
+
Clone
+
Send
F
:
Filter
+
Clone
+
Send
F
:
:
Extract
:
Reply
F
:
:
Error
:
IsReject
{
type
Extract
=
(
Compressed
)
;
type
Error
=
F
:
:
Error
;
type
Future
=
WithCompressionFuture
<
FN
F
:
:
Future
>
;
fn
filter
(
&
self
_
:
Internal
)
-
>
Self
:
:
Future
{
WithCompressionFuture
{
compress
:
self
.
compress
.
clone
(
)
future
:
self
.
filter
.
filter
(
Internal
)
}
}
}
#
[
allow
(
missing_debug_implementations
)
]
#
[
pin_project
]
pub
struct
WithCompressionFuture
<
FN
F
>
{
compress
:
Compression
<
FN
>
#
[
pin
]
future
:
F
}
impl
<
FN
F
>
Future
for
WithCompressionFuture
<
FN
F
>
where
FN
:
Fn
(
CompressionProps
)
-
>
Response
F
:
TryFuture
F
:
:
Ok
:
Reply
F
:
:
Error
:
IsReject
{
type
Output
=
Result
<
(
Compressed
)
F
:
:
Error
>
;
fn
poll
(
mut
self
:
Pin
<
&
mut
Self
>
cx
:
&
mut
Context
<
'
_
>
)
-
>
Poll
<
Self
:
:
Output
>
{
let
pin
=
self
.
as_mut
(
)
.
project
(
)
;
let
result
=
ready
!
(
pin
.
future
.
try_poll
(
cx
)
)
;
match
result
{
Ok
(
reply
)
=
>
{
let
resp
=
(
self
.
compress
.
func
)
(
reply
.
into_response
(
)
.
into
(
)
)
;
Poll
:
:
Ready
(
Ok
(
(
Compressed
(
resp
)
)
)
)
}
Err
(
reject
)
=
>
Poll
:
:
Ready
(
Err
(
reject
)
)
}
}
}
}
| true
|
d2dd9dcfc301957ce57dbbe4ff9e1dfcc7ba5433
|
Rust
|
gnoliyil/fuchsia
|
/src/sys/pkg/lib/far/rust/src/async_utf8_reader.rs
|
UTF-8
| 4,042
| 2.75
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2022 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::error::Error,
fuchsia_fs::file::{AsyncGetSize, AsyncReadAt},
};
/// A struct to open and read a FAR-formatted archive asynchronously.
/// Requires that all paths are valid UTF-8.
#[derive(Debug)]
pub struct AsyncUtf8Reader<T>
where
T: AsyncReadAt + AsyncGetSize + Unpin,
{
reader: crate::async_read::AsyncReader<T>,
}
impl<T> AsyncUtf8Reader<T>
where
T: AsyncReadAt + AsyncGetSize + Unpin,
{
/// Create a new AsyncUtf8Reader for the provided source.
pub async fn new(source: T) -> Result<Self, Error> {
let ret = Self { reader: crate::async_read::AsyncReader::new(source).await? };
let () = ret.try_list().try_for_each(|r| r.map(|_| ()))?;
Ok(ret)
}
/// Return a list of the items in the archive.
/// Individual items will error if their paths are not valid UTF-8.
fn try_list(&self) -> impl ExactSizeIterator<Item = Result<crate::Utf8Entry<'_>, Error>> {
self.reader.list().map(|e| {
Ok(crate::Utf8Entry {
path: std::str::from_utf8(e.path).map_err(|err| Error::PathDataInvalidUtf8 {
source: err,
path: e.path.into(),
})?,
offset: e.offset,
length: e.length,
})
})
}
/// Return a list of the items in the archive.
pub fn list(&self) -> impl ExactSizeIterator<Item = crate::Utf8Entry<'_>> {
self.try_list().map(|r| {
r.expect("AsyncUtf8Reader::new only succeeds if try_list succeeds for every element")
})
}
/// Read the entire contents of an entry with the specified path.
/// O(log(# directory entries))
pub async fn read_file(&mut self, path: &str) -> Result<Vec<u8>, Error> {
self.reader.read_file(path.as_bytes()).await
}
}
#[cfg(test)]
mod tests {
use {
super::*, assert_matches::assert_matches, fuchsia_async as fasync,
fuchsia_fs::file::Adapter, futures::io::Cursor,
};
#[fasync::run_singlethreaded(test)]
async fn new_rejects_non_utf8_path() {
let mut far_bytes = vec![];
let () = crate::write::write(
&mut far_bytes,
std::collections::BTreeMap::from_iter([(
b"\xff",
(0, Box::new("".as_bytes()) as Box<dyn std::io::Read>),
)]),
)
.unwrap();
assert_matches!(
AsyncUtf8Reader::new(Adapter::new(Cursor::new(far_bytes))).await,
Err(crate::Error::PathDataInvalidUtf8{source: _, path}) if path == b"\xff".to_vec()
);
}
#[fasync::run_singlethreaded(test)]
async fn list_does_not_panic() {
let mut far_bytes = vec![];
let () = crate::write::write(
&mut far_bytes,
std::collections::BTreeMap::from_iter([(
"valid-utf8",
(0, Box::new("".as_bytes()) as Box<dyn std::io::Read>),
)]),
)
.unwrap();
itertools::assert_equal(
AsyncUtf8Reader::new(Adapter::new(Cursor::new(far_bytes))).await.unwrap().list(),
[crate::Utf8Entry { path: "valid-utf8", offset: 4096, length: 0 }],
);
}
#[fasync::run_singlethreaded(test)]
async fn read_file() {
let mut far_bytes = vec![];
let () = crate::write::write(
&mut far_bytes,
std::collections::BTreeMap::from_iter([(
"valid-utf8",
(12, Box::new("test-content".as_bytes()) as Box<dyn std::io::Read>),
)]),
)
.unwrap();
assert_eq!(
AsyncUtf8Reader::new(Adapter::new(Cursor::new(far_bytes)))
.await
.unwrap()
.read_file("valid-utf8")
.await
.unwrap(),
b"test-content".to_vec()
);
}
}
| true
|
37da22c28c8bbaa6a86779cd8d1da5b449ddb771
|
Rust
|
johnolos/sbanken-cli
|
/src/cli.rs
|
UTF-8
| 7,342
| 2.75
| 3
|
[] |
no_license
|
use clap::{App, Arg, ArgGroup, SubCommand};
pub const VERSION: &str = "0.4.0";
pub fn build_cli() -> App<'static, 'static> {
App::new("sbanken-cli")
.version(VERSION)
.about("Your personal bank right in your favorite terminal")
.author("John-Olav Storvold")
.arg(
Arg::with_name("v")
.short("v")
.multiple(true)
.help("Sets level of verbosity"),
)
.arg(
Arg::with_name("color")
.short("c")
.long("color")
.help("Allows for colored output. Equal to SBANKEN_COLOR=1."),
)
.subcommand(
SubCommand::with_name("account")
.about("See account details")
.group(
ArgGroup::with_name("mode")
.args(&["account", "interactive"])
.required(false),
)
.arg(
Arg::with_name("account")
.short("a")
.long("account")
.required(false)
.help("Retrieve details for a specified account")
.takes_value(true),
)
.arg(
Arg::with_name("interactive")
.short("i")
.long("interactive")
.required(false)
.help("Interactively select which account to details"),
)
.arg(
Arg::with_name("list")
.short("l")
.long("list")
.required(false)
.help("Retrieve accounts as a list"),
)
.display_order(1),
)
.subcommand(
SubCommand::with_name("customer")
.about("display customer information")
.display_order(2),
)
.subcommand(
SubCommand::with_name("transaction")
.about("See transactions made on your accounts")
.groups(&[
ArgGroup::with_name("mode")
.args(&["account", "interactive"])
.required(true),
ArgGroup::with_name("optional_args")
.args(&["from", "to", "length"])
.multiple(true)
.requires("mode"),
])
.arg(
Arg::with_name("account")
.short("a")
.long("account")
.help("List transactions made on your account")
.takes_value(true),
)
.arg(
Arg::with_name("from")
.short("f")
.long("from")
.help(
"An start date, yyyy-mm-dd, to be used to narrow the results.\n\
Defaults to current time and date minus 30 days.",
)
.takes_value(true),
)
.arg(
Arg::with_name("interactive")
.short("i")
.long("interactive")
.required(false)
.help("Interactively select accounts to transfer"),
)
.arg(
Arg::with_name("to")
.short("t")
.long("to")
.help(
"An end date, yyyy-mm-dd, to be used to narrow the results.\n\
Defaults to current time and date.",
)
.takes_value(true),
)
.arg(
Arg::with_name("length")
.short("l")
.long("length")
.help("Number of transactions to be displayed")
.default_value("20")
.takes_value(true),
)
.display_order(3),
)
.subcommand(
SubCommand::with_name("transfer")
.about("Transfer between your accounts")
.arg(
Arg::with_name("amount")
.short("a")
.long("amount")
.takes_value(true)
.required(true)
.requires_all(&["from", "message", "to"])
.conflicts_with("interactive")
.help("Amount to transfer between accounts"),
)
.arg(
Arg::with_name("from")
.short("f")
.long("from")
.takes_value(true)
.required(true)
.requires_all(&["amount", "message", "to"])
.conflicts_with("interactive")
.help("From account you want to withdraw money from"),
)
.arg(
Arg::with_name("interactive")
.short("i")
.long("interactive")
.conflicts_with_all(&["amount", "from", "message", "to"])
.help("Interactively select accounts to transfer"),
)
.arg(
Arg::with_name("message")
.short("m")
.long("message")
.takes_value(true)
.required(true)
.requires_all(&["amount", "from", "to"])
.conflicts_with("interactive")
.help("Message to be recorded"),
)
.arg(
Arg::with_name("to")
.short("t")
.long("to")
.takes_value(true)
.required(true)
.requires_all(&["amount", "from", "message"])
.conflicts_with("interactive")
.help("To account you want to deposit money into"),
)
.display_order(4),
)
.subcommand(
SubCommand::with_name("generate-bash-completions")
.about("Generate completion script for bash")
.display_order(5),
)
.subcommand(
SubCommand::with_name("generate-zsh-completions")
.about("Generate completion script for zsh")
.display_order(6),
)
.subcommand(
SubCommand::with_name("generate-fish-completions")
.about("Generate completion script for fish")
.display_order(7),
)
.subcommand(
SubCommand::with_name("generate-powershell-completions")
.about("Generate completion script for PowerShell")
.display_order(8),
)
}
| true
|
b293bdf770e96468d3d1dc12716cfe74ee23056f
|
Rust
|
dylanNew/ralloc
|
/src/micro.rs
|
UTF-8
| 2,505
| 3
| 3
|
[
"MIT"
] |
permissive
|
//! Micro slots for caching small allocations.
// TODO needs tests and documentation.
use prelude::*;
use core::{marker, mem};
const CACHE_LINE_SIZE: usize = 128;
const CACHE_LINES: usize = 32;
/// A "microcache".
///
/// A microcache consists of some number of equal sized slots, whose state is stored as bitflags.
pub struct MicroCache {
free: u32,
lines: [CacheLine; CACHE_LINES],
}
impl MicroCache {
pub const fn new() -> MicroCache {
MicroCache {
free: !0,
lines: [CacheLine::new(); CACHE_LINES],
}
}
pub fn alloc(&mut self, size: usize, align: usize) -> Result<Block, ()> {
if size <= CACHE_LINE_SIZE && self.free != 0 {
let ind = self.free.trailing_zeros();
let line = &mut self.lines[ind as usize];
let res = unsafe { line.take(size) };
if res.aligned_to(align) {
self.free ^= 1u32.wrapping_shl(ind);
return Ok(res);
} else {
line.reset();
}
}
Err(())
}
pub fn free(&mut self, mut block: Block) -> Result<(), Block> {
let res = block.pop();
let ptr: Pointer<u8> = block.into();
let ind = (*ptr as usize - &self.lines as *const CacheLine as usize) / mem::size_of::<Block>();
if let Some(line) = self.lines.get_mut(ind) {
line.used -= res.size();
if line.used == 0 {
debug_assert!(self.free & 1u32.wrapping_shl(ind as u32) == 0, "Freeing a block \
already marked as free.");
self.free ^= 1u32.wrapping_shl(ind as u32);
}
Ok(())
} else {
Err(res)
}
}
}
#[derive(Clone, Copy)]
struct CacheLine {
/// The cache line's data.
///
/// We use `u32` as a hack to be able to derive `Copy`.
data: [u32; CACHE_LINE_SIZE / 4],
used: usize,
_static: marker::PhantomData<&'static mut [u8]>,
}
impl CacheLine {
pub const fn new() -> CacheLine {
CacheLine {
data: [0; CACHE_LINE_SIZE / 4],
used: 0,
_static: marker::PhantomData,
}
}
fn reset(&mut self) {
self.used = 0;
}
unsafe fn take(&mut self, size: usize) -> Block {
debug_assert!(self.used == 0, "Block not freed!");
self.used = size;
Block::from_raw_parts(Pointer::new(&mut self.data[0] as *mut u32 as *mut u8), size)
}
}
| true
|
0e8008ba5ac23383c294e6627990e3577dc2244d
|
Rust
|
mtib/hnfen
|
/src/types.rs
|
UTF-8
| 12,743
| 3
| 3
|
[] |
no_license
|
use std::convert::TryInto;
use crate::moves::{in_board, is_castle, is_corner, Direction, Move, Position};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Board {
pub ranks: [Rank; 11],
pub next: Player,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Rank {
pub fields: [Option<Piece>; 11],
}
const WHITE: &str = "h";
const BLACK: &str = "a";
const KING: &str = "K";
const RANK_SEP: &str = "/";
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Piece {
Normal(Player),
King,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Player {
/// Starts, is attacker
Black,
/// Second, is defender
White,
}
pub trait Hnfen: Sized {
fn as_hnfen(&self) -> String;
fn from_hnfen(hnfen: &str) -> Option<Self>;
}
impl Hnfen for Player {
fn as_hnfen(&self) -> String {
match self {
Player::Black => BLACK,
Player::White => WHITE,
}
.to_string()
}
fn from_hnfen(hnfen: &str) -> Option<Self> {
match hnfen {
BLACK => Some(Player::Black),
WHITE => Some(Player::White),
_ => None,
}
}
}
impl Player {
pub fn opposite(&self) -> Player {
match self {
Player::Black => Player::White,
Player::White => Player::Black,
}
}
}
impl Hnfen for Piece {
fn as_hnfen(&self) -> String {
match self {
Piece::Normal(Player::Black) => BLACK,
Piece::Normal(Player::White) => WHITE,
Piece::King => KING,
}
.to_owned()
}
fn from_hnfen(hnfen: &str) -> Option<Self> {
Some(match hnfen {
BLACK => Piece::Normal(Player::Black),
WHITE => Piece::Normal(Player::White),
KING => Piece::King,
_ => return None,
})
}
}
impl Piece {
pub fn color(&self) -> Player {
match self {
Piece::Normal(c) => *c,
Piece::King => Player::White,
}
}
}
impl Hnfen for Rank {
fn as_hnfen(&self) -> String {
let mut empty_prec = 0;
let mut buf = String::new();
for k in self.fields.iter() {
if let Some(p) = k {
if empty_prec > 0 {
buf.push_str(&format!("{}", empty_prec));
empty_prec = 0;
}
buf.push_str(&p.as_hnfen());
} else {
empty_prec += 1;
}
}
if empty_prec > 0 {
buf.push_str(&format!("{}", empty_prec));
}
buf
}
fn from_hnfen(hnfen: &str) -> Option<Self> {
// NOTE this is when I realized that using multi-digit numbers makes the language context-sensitive.
let mut rank = Rank { fields: [None; 11] };
enum C {
Number(usize),
Character(Piece),
}
let mut groups: Vec<C> = Vec::new();
for k in hnfen.chars() {
match k {
_ if k.is_numeric() => {
if let Some(C::Number(c)) = groups.last_mut() {
*c = *c * 10 + k.to_digit(10).unwrap() as usize
} else {
groups.push(C::Number(k.to_digit(10).unwrap() as usize))
}
}
_ => groups.push(C::Character(Piece::from_hnfen(&k.to_string())?)),
};
}
let mut c_index = 0;
for group in groups.into_iter() {
match group {
C::Number(k) => {
c_index += k;
}
C::Character(p) => {
rank.fields[c_index] = Some(p);
c_index += 1;
}
}
}
if c_index != 11 {
None
} else {
Some(rank)
}
}
}
impl Default for Rank {
fn default() -> Self {
Rank { fields: [None; 11] }
}
}
impl Rank {
pub fn pretty(&self) -> String {
let mut buf = String::new();
for f in self.fields.iter() {
match f {
Some(p) => buf.push_str(&p.as_hnfen()),
None => buf.push(' '),
}
}
buf
}
}
impl Board {
pub fn get(&self, pos: &Position) -> Option<Piece> {
let (x, y) = pos.to_indices();
self.ranks[y].fields[x]
}
pub fn set(&mut self, pos: &Position, piece: &Option<Piece>) {
let (x, y) = pos.to_indices();
self.ranks[y].fields[x] = *piece;
}
pub fn pieces(&self, color: Player) -> Vec<Position> {
let mut pos = Vec::new();
for (y, rank) in self.ranks.iter().enumerate() {
for (x, piece) in rank.fields.iter().enumerate() {
match piece {
Some(Piece::Normal(c)) if *c == color => pos.push(Position::from_indices(x, y)),
Some(Piece::King) if color == Player::White => {
pos.push(Position::from_indices(x, y))
}
_ => {}
}
}
}
pos
}
pub fn king(&self) -> Option<Position> {
for (y, rank) in self.ranks.iter().enumerate() {
for (x, piece) in rank.fields.iter().enumerate() {
if let Some(Piece::King) = piece {
return Some(Position::from_indices(x, y));
}
}
}
None
}
pub fn king_escaped(&self) -> bool {
if let Some(pos) = self.king() {
let (x, y) = pos.to_indices();
is_corner(x, y)
} else {
false
}
}
/// Returns true if a king at position pos *would* be captured
pub fn is_king_capture(&self, pos: &Position) -> bool {
//println!("Potential king capture with board\n{}", self.pretty());
let pos = pos.to_indices();
for dir in Direction::card().iter() {
let dir_diff = dir.vector(1);
let check_place = (pos.0 as isize + dir_diff.0, pos.1 as isize + dir_diff.1);
if !in_board(check_place.0, check_place.1) {
return false; // not captured
}
let check_place = (check_place.0 as usize, check_place.1 as usize);
if is_castle(check_place.0, check_place.1) {
continue; // captured in this direction
}
if let Some(p) = self.get(&Position::from_indices(check_place.0, check_place.1)) {
if p.color() == Player::White {
return false; // not captured
} else {
continue; // captured in this direction
}
} else {
return false; // not captured
}
}
true
}
pub fn apply(&mut self, mov: &Move) {
let (x, y) = mov.from.to_indices();
let piece = if let Some(p) = self.ranks[y].fields[x] {
p
} else {
// Probably a nop move
return;
};
let move_color = piece.color();
self.ranks[y].fields[x] = None;
let (x, y) = mov.to.to_indices();
self.ranks[y].fields[x] = Some(piece);
for dir in Direction::card().iter() {
let dir_diff = dir.vector(1);
let check_place = (x as isize + dir_diff.0, y as isize + dir_diff.1);
if !in_board(check_place.0, check_place.1) {
continue;
}
let other_place = (check_place.0 as usize, check_place.1 as usize);
let other_is_king =
match self.get(&Position::from_indices(other_place.0, other_place.1)) {
Some(Piece::Normal(c)) if c != move_color => {
// Potential take of other_piece
false
}
Some(Piece::King) if move_color == Player::Black => {
// Potential take of king!
true
}
_ => {
// Nothing here to take, continue with next direction
continue;
}
};
let opposite_place = (
other_place.0 as isize + dir_diff.0,
other_place.1 as isize + dir_diff.1,
);
if !in_board(opposite_place.0, opposite_place.1) {
continue;
}
let opposite_place = (opposite_place.0 as usize, opposite_place.1 as usize);
if other_is_king {
if self.is_king_capture(&Position::from_indices(other_place.0, other_place.1)) {
// Took the king, that's pretty cool
self.set(&Position::from_indices(other_place.0, other_place.1), &None);
} else {
// Not taking the king
continue;
}
} else if let Some(p) =
self.get(&Position::from_indices(opposite_place.0, opposite_place.1))
{
// Is surrounded by other piece of move_color
if p.color() == move_color {
self.set(&Position::from_indices(other_place.0, other_place.1), &None);
}
}
}
self.next = move_color.opposite();
}
pub fn pretty(&self) -> String {
let mut pp = "╔═══════════╗\n".to_string();
pp.push_str(
&self
.ranks
.iter()
.map(|r| format!("║{}║", r.pretty()))
.collect::<Vec<String>>()
.join("\n"),
);
pp.push_str("\n╚═══════════╝");
pp
}
}
impl Hnfen for Board {
fn as_hnfen(&self) -> String {
let mut buf = String::new();
buf.push_str(
&self
.ranks
.iter()
.map(Rank::as_hnfen)
.collect::<Vec<String>>()
.join(RANK_SEP),
);
buf.push(' ');
buf.push_str(&self.next.as_hnfen());
buf
}
fn from_hnfen(hnfen: &str) -> Option<Self> {
let splits: Vec<&str> = hnfen.split_whitespace().collect();
Some(Board {
ranks: splits[0]
.split(RANK_SEP)
.map(Rank::from_hnfen)
.collect::<Option<Vec<Rank>>>()?
.try_into()
.unwrap(),
next: if let Some(s) = splits.get(1) {
Player::from_hnfen(s)?
} else {
Player::Black
},
})
}
}
impl Default for Board {
fn default() -> Self {
Board::from_hnfen(crate::DEFAULT_START_HNFEN).expect("default map should be good")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn default_board() {
Board::default();
}
#[test]
fn test_rank_to_hnfen() {
let mut rank = Rank { fields: [None; 11] };
assert_eq!(rank.as_hnfen(), "11");
rank.fields[10] = Some(Piece::King);
assert_eq!(rank.as_hnfen(), "10K");
rank.fields[0] = Some(Piece::Normal(Player::Black));
assert_eq!(rank.as_hnfen(), "a9K");
rank.fields[5] = Some(Piece::Normal(Player::White)); // a....h....K
assert_eq!(rank.as_hnfen(), "a4h4K");
}
#[test]
fn test_rank_from_hnfen() {
let mut expected_success_cases = vec!["11", "10K", "K10", "a9K", "a4h4K"];
expected_success_cases.extend(
crate::DEFAULT_START_HNFEN
.split_ascii_whitespace()
.next()
.unwrap()
.split(RANK_SEP),
);
for case in expected_success_cases.into_iter() {
assert_eq!(Rank::from_hnfen(case).unwrap().as_hnfen(), case);
}
assert_eq!(Rank::from_hnfen("00011").unwrap().as_hnfen(), "11");
assert_eq!(Rank::from_hnfen("0a09a0").unwrap().as_hnfen(), "a9a");
}
#[test]
fn test_board_from_hnfen() {
assert_eq!(
Board::from_hnfen(crate::DEFAULT_START_HNFEN)
.unwrap()
.as_hnfen(),
crate::DEFAULT_START_HNFEN
);
}
#[test]
fn get_pieces_amount() {
let board = Board::default();
assert_eq!(board.pieces(Player::White).len(), 13);
assert_eq!(board.pieces(Player::Black).len(), 24);
}
}
| true
|
68650989c0a226e06780c3383e31778204255054
|
Rust
|
boa-dev/boa
|
/boa_parser/src/error/tests.rs
|
UTF-8
| 4,765
| 3.140625
| 3
|
[
"MIT",
"Unlicense"
] |
permissive
|
use super::*;
#[test]
fn context() {
let result: ParseResult<String> = ParseResult::Err(Error::expected(
["testing".to_owned()],
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 1)),
"before",
));
assert_eq!(result.context(), Some("before"));
let result = result.set_context("after");
assert_eq!(result.context(), Some("after"));
let error = result.unwrap_err();
if let Error::Expected {
expected,
found,
span,
context,
} = error
{
assert_eq!(expected.as_ref(), &["testing".to_owned()]);
assert_eq!(found, "nottesting".into());
assert_eq!(span, Span::new(Position::new(1, 1), Position::new(1, 1)));
assert_eq!(context, "after");
} else {
unreachable!();
}
let err = Error::AbruptEnd;
assert!(err.context().is_none());
let err = err.set_context("ignored");
assert!(err.context().is_none());
}
#[test]
fn from_lex_error() {
let lex_err = LexError::syntax("testing", Position::new(1, 1));
let parse_err: Error = lex_err.into();
assert!(matches!(parse_err, Error::Lex { .. }));
let lex_err = LexError::syntax("testing", Position::new(1, 1));
let parse_err = Error::lex(lex_err);
assert!(matches!(parse_err, Error::Lex { .. }));
}
#[test]
fn misplaced_function_declaration() {
let err = Error::misplaced_function_declaration(Position::new(1, 1), false);
if let Error::General { message, position } = err {
assert_eq!(
message.as_ref(),
"functions can only be declared at the top level or inside a block."
);
assert_eq!(position, Position::new(1, 1));
} else {
unreachable!()
}
let err = Error::misplaced_function_declaration(Position::new(1, 1), true);
if let Error::General { message, position } = err {
assert_eq!(
message.as_ref(),
"in strict mode code, functions can only be declared at the top level or inside a block."
);
assert_eq!(position, Position::new(1, 1));
} else {
unreachable!()
}
}
#[test]
fn wrong_labelled_function_declaration() {
let err = Error::wrong_labelled_function_declaration(Position::new(1, 1));
if let Error::General { message, position } = err {
assert_eq!(
message.as_ref(),
"labelled functions can only be declared at the top level or inside a block"
);
assert_eq!(position, Position::new(1, 1));
} else {
unreachable!()
}
}
#[test]
fn display() {
let err = Error::expected(
["testing".to_owned()],
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 1)),
"context",
);
assert_eq!(
err.to_string(),
"expected token 'testing', got 'nottesting' in context at line 1, col 1"
);
let err = Error::expected(
["testing".to_owned(), "more".to_owned()],
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 3)),
"context",
);
assert_eq!(
err.to_string(),
"expected one of 'testing' or 'more', got 'nottesting' in context at line 1, col 1"
);
let err = Error::expected(
["testing".to_owned(), "more".to_owned(), "tokens".to_owned()],
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 3)),
"context",
);
assert_eq!(
err.to_string(),
"expected one of 'testing', 'more' or 'tokens', got 'nottesting' in context at line 1, col 1"
);
let err = Error::expected(
[
"testing".to_owned(),
"more".to_owned(),
"tokens".to_owned(),
"extra".to_owned(),
],
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 3)),
"context",
);
assert_eq!(
err.to_string(),
"expected one of 'testing', 'more', 'tokens' or 'extra', got 'nottesting' in context at line 1, col 1"
);
let err = Error::unexpected(
"nottesting",
Span::new(Position::new(1, 1), Position::new(1, 3)),
"error message",
);
assert_eq!(
err.to_string(),
"unexpected token 'nottesting', error message at line 1, col 1"
);
let err = Error::general("this is a general error message", Position::new(1, 1));
assert_eq!(
err.to_string(),
"this is a general error message at line 1, col 1"
);
let err = Error::AbruptEnd;
assert_eq!(err.to_string(), "abrupt end");
let lex_err = LexError::syntax("testing", Position::new(1, 1));
let err = Error::lex(lex_err);
assert_eq!(err.to_string(), "testing at line 1, col 1");
}
| true
|
3784501971bcb5ead64ae71e2a371c42828255cc
|
Rust
|
lopuhin/rust-broad-crawl
|
/src/downloader.rs
|
UTF-8
| 3,923
| 2.796875
| 3
|
[] |
no_license
|
use std::io;
use std::sync::mpsc;
use std::time::Duration;
use hyper;
use hyper::client::{Client, Request as HyperRequest, Response as HyperResponse,
DefaultTransport as HttpStream};
use hyper::header::{Connection, ContentType};
use hyper::{Decoder, Encoder, Next};
use hyper::status::StatusCode;
use hyper::header::{Headers, UserAgent};
use mime::Mime;
use mime::TopLevel::Text;
use mime::SubLevel::Html;
use request::Request;
use response::Response;
pub type ResultSender = mpsc::Sender<(Request, Option<Response>)>;
#[derive(Debug)]
pub struct Handler {
request: Request,
response: Option<Response>,
sender: ResultSender,
timeout: u64,
user_agent: String,
}
pub fn make_request(request: Request, client: &Client<Handler>, tx: ResultSender,
timeout: u64, user_agent: &str) {
let url = request.url.clone();
let handler = Handler {
request: request,
response: None,
sender: tx,
timeout: timeout,
user_agent: user_agent.to_owned(),
};
client.request(url, handler).unwrap();
}
fn is_html(headers: &Headers) -> bool {
match headers.get::<ContentType>() {
Some(content_type) => match content_type {
&ContentType(Mime(Text, Html, _)) => true,
_ => false
},
None => false
}
}
impl Handler {
fn read(&self) -> Next {
Next::read().timeout(Duration::from_secs(self.timeout))
}
fn return_response(&self) -> Next {
self.send_result();
Next::end()
}
fn send_result(&self) {
self.sender.send((self.request.clone(), self.response.clone())).unwrap();
}
}
impl hyper::client::Handler<HttpStream> for Handler {
fn on_request(&mut self, req: &mut HyperRequest) -> Next {
let mut headers = req.headers_mut();
headers.set(Connection::close());
headers.set(UserAgent(self.user_agent.clone()));
self.read()
}
fn on_request_writable(&mut self, _encoder: &mut Encoder<HttpStream>) -> Next {
self.read()
}
fn on_response(&mut self, response: HyperResponse) -> Next {
let status = response.status();
let headers = response.headers();
debug!("Got {} for {}", status, self.request.url);
self.response = Some(Response {
status: status.clone(),
headers: headers.clone(),
body: None
});
match status {
&StatusCode::Ok => {
if is_html(headers) {
self.read()
} else {
self.return_response()
}
},
_ => self.return_response()
}
}
fn on_response_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
let mut read_result = None;
if let Some(ref mut response) = self.response {
if response.body.is_none() {
response.body = Some(Vec::new());
}
if let Some(ref mut body) = response.body {
// TODO - check that this really appends data, not overrides
read_result = Some(io::copy(decoder, body));
}
}
if let Some(read_result) = read_result {
match read_result {
Ok(0) => self.return_response(),
Ok(_) => self.read(),
Err(e) => match e.kind() {
io::ErrorKind::WouldBlock => Next::read(),
_ => {
info!("Response read error for {}: {}", self.request.url, e);
self.return_response()
}
}
}
} else {
panic!();
}
}
fn on_error(&mut self, err: hyper::Error) -> Next {
info!("Http error for {}: {}", self.request.url, err);
self.send_result();
Next::remove()
}
}
| true
|
2610210e447b52eb395b594e7e3ade7e9990bdc3
|
Rust
|
bodoni/postscript
|
/src/compact1/encoding.rs
|
UTF-8
| 19,655
| 3.171875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! The glyph encodings.
use crate::compact1::{GlyphID, StringID};
use crate::{Result, Tape, Value};
/// A glyph encoding.
#[derive(Clone, Debug)]
pub enum Encoding {
Standard,
Expert,
Format0(Encoding0),
Format1(Encoding1),
FormatSupplemental(EncodingSupplemental),
}
table! {
#[doc = "An encoding in format 0."]
pub Encoding0 { // Format 0
format (u8) = { 0 }, // format
code_count (u8), // nCodes
codes (Vec<u8>) |this, tape| { // code
tape.take_given(this.code_count as usize)
},
}
}
table! {
#[doc = "An encoding in format 1."]
pub Encoding1 { // Format 1
format (u8) = { 1 }, // format
range_count (u8), // nRanges
ranges (Vec<Range1>) |this, tape| { // Range1
tape.take_given(this.range_count as usize)
},
}
}
table! {
#[doc = "An encoding in the supplemental format."]
pub EncodingSupplemental { // Supplemental Encoding Data
format (u8),
supplement_count (u8), // nSups
supplements (Vec<Supplement>) |this, tape| { // Supplement
tape.take_given(this.supplement_count as usize)
},
}
}
table! {
#[doc = "A range of an encoding in format 1."]
#[derive(Copy)]
pub Range1 {
first_code (u8), // first
left_count (u8), // nLeft
}
}
table! {
#[doc = "A supplement of an encoding in the supplemental format."]
#[derive(Copy)]
pub Supplement {
code (u8 ), // code
glyph (StringID), // glyph
}
}
impl Encoding {
/// Return the string identifier of a glyph.
pub fn get(&self, glyph_id: GlyphID) -> Option<StringID> {
match self {
Encoding::Standard => get_standard(glyph_id),
Encoding::Expert => get_expert(glyph_id),
Encoding::Format0(ref encoding) => encoding.get(glyph_id),
Encoding::Format1(ref encoding) => encoding.get(glyph_id),
Encoding::FormatSupplemental(ref encoding) => encoding.get(glyph_id),
}
}
}
impl Value for Encoding {
fn read<T: Tape>(tape: &mut T) -> Result<Self> {
Ok(match tape.peek::<u8>()? {
0 => Encoding::Format0(tape.take()?),
1 => Encoding::Format1(tape.take()?),
format if format & 0x80 > 0 => Encoding::FormatSupplemental(tape.take()?),
format => raise!("found an unknown format of encodings ({format})"),
})
}
}
impl Encoding0 {
#[inline]
fn get(&self, _: GlyphID) -> Option<StringID> {
None
}
}
impl Encoding1 {
#[inline]
fn get(&self, _: GlyphID) -> Option<StringID> {
None
}
}
impl EncodingSupplemental {
#[inline]
fn get(&self, _: GlyphID) -> Option<StringID> {
None
}
}
macro_rules! get(
($one:ident { $($glyph_id:pat => $string_id:expr => $name:expr,)+ }) => (
Some(match $one {
$($glyph_id => $string_id,)+
_ => return None,
})
);
);
fn get_standard(glyph_id: GlyphID) -> Option<StringID> {
get!(glyph_id {
0 => 0 => ".notdef",
1 => 0 => ".notdef",
2 => 0 => ".notdef",
3 => 0 => ".notdef",
4 => 0 => ".notdef",
5 => 0 => ".notdef",
6 => 0 => ".notdef",
7 => 0 => ".notdef",
8 => 0 => ".notdef",
9 => 0 => ".notdef",
10 => 0 => ".notdef",
11 => 0 => ".notdef",
12 => 0 => ".notdef",
13 => 0 => ".notdef",
14 => 0 => ".notdef",
15 => 0 => ".notdef",
16 => 0 => ".notdef",
17 => 0 => ".notdef",
18 => 0 => ".notdef",
19 => 0 => ".notdef",
20 => 0 => ".notdef",
21 => 0 => ".notdef",
22 => 0 => ".notdef",
23 => 0 => ".notdef",
24 => 0 => ".notdef",
25 => 0 => ".notdef",
26 => 0 => ".notdef",
27 => 0 => ".notdef",
28 => 0 => ".notdef",
29 => 0 => ".notdef",
30 => 0 => ".notdef",
31 => 0 => ".notdef",
32 => 1 => "space",
33 => 2 => "exclam",
34 => 3 => "quotedbl",
35 => 4 => "numbersign",
36 => 5 => "dollar",
37 => 6 => "percent",
38 => 7 => "ampersand",
39 => 8 => "quoteright",
40 => 9 => "parenleft",
41 => 10 => "parenright",
42 => 11 => "asterisk",
43 => 12 => "plus",
44 => 13 => "comma",
45 => 14 => "hyphen",
46 => 15 => "period",
47 => 16 => "slash",
48 => 17 => "zero",
49 => 18 => "one",
50 => 19 => "two",
51 => 20 => "three",
52 => 21 => "four",
53 => 22 => "five",
54 => 23 => "six",
55 => 24 => "seven",
56 => 25 => "eight",
57 => 26 => "nine",
58 => 27 => "colon",
59 => 28 => "semicolon",
60 => 29 => "less",
61 => 30 => "equal",
62 => 31 => "greater",
63 => 32 => "question",
64 => 33 => "at",
65 => 34 => "A",
66 => 35 => "B",
67 => 36 => "C",
68 => 37 => "D",
69 => 38 => "E",
70 => 39 => "F",
71 => 40 => "G",
72 => 41 => "H",
73 => 42 => "I",
74 => 43 => "J",
75 => 44 => "K",
76 => 45 => "L",
77 => 46 => "M",
78 => 47 => "N",
79 => 48 => "O",
80 => 49 => "P",
81 => 50 => "Q",
82 => 51 => "R",
83 => 52 => "S",
84 => 53 => "T",
85 => 54 => "U",
86 => 55 => "V",
87 => 56 => "W",
88 => 57 => "X",
89 => 58 => "Y",
90 => 59 => "Z",
91 => 60 => "bracketleft",
92 => 61 => "backslash",
93 => 62 => "bracketright",
94 => 63 => "asciicircum",
95 => 64 => "underscore",
96 => 65 => "quoteleft",
97 => 66 => "a",
98 => 67 => "b",
99 => 68 => "c",
100 => 69 => "d",
101 => 70 => "e",
102 => 71 => "f",
103 => 72 => "g",
104 => 73 => "h",
105 => 74 => "i",
106 => 75 => "j",
107 => 76 => "k",
108 => 77 => "l",
109 => 78 => "m",
110 => 79 => "n",
111 => 80 => "o",
112 => 81 => "p",
113 => 82 => "q",
114 => 83 => "r",
115 => 84 => "s",
116 => 85 => "t",
117 => 86 => "u",
118 => 87 => "v",
119 => 88 => "w",
120 => 89 => "x",
121 => 90 => "y",
122 => 91 => "z",
123 => 92 => "braceleft",
124 => 93 => "bar",
125 => 94 => "braceright",
126 => 95 => "asciitilde",
127 => 0 => ".notdef",
128 => 0 => ".notdef",
129 => 0 => ".notdef",
130 => 0 => ".notdef",
131 => 0 => ".notdef",
132 => 0 => ".notdef",
133 => 0 => ".notdef",
134 => 0 => ".notdef",
135 => 0 => ".notdef",
136 => 0 => ".notdef",
137 => 0 => ".notdef",
138 => 0 => ".notdef",
139 => 0 => ".notdef",
140 => 0 => ".notdef",
141 => 0 => ".notdef",
142 => 0 => ".notdef",
143 => 0 => ".notdef",
144 => 0 => ".notdef",
145 => 0 => ".notdef",
146 => 0 => ".notdef",
147 => 0 => ".notdef",
148 => 0 => ".notdef",
149 => 0 => ".notdef",
150 => 0 => ".notdef",
151 => 0 => ".notdef",
152 => 0 => ".notdef",
153 => 0 => ".notdef",
154 => 0 => ".notdef",
155 => 0 => ".notdef",
156 => 0 => ".notdef",
157 => 0 => ".notdef",
158 => 0 => ".notdef",
159 => 0 => ".notdef",
160 => 0 => ".notdef",
161 => 96 => "exclamdown",
162 => 97 => "cent",
163 => 98 => "sterling",
164 => 99 => "fraction",
165 => 100 => "yen",
166 => 101 => "florin",
167 => 102 => "section",
168 => 103 => "currency",
169 => 104 => "quotesingle",
170 => 105 => "quotedblleft",
171 => 106 => "guillemotleft",
172 => 107 => "guilsinglleft",
173 => 108 => "guilsinglright",
174 => 109 => "fi",
175 => 110 => "fl",
176 => 0 => ".notdef",
177 => 111 => "endash",
178 => 112 => "dagger",
179 => 113 => "daggerdbl",
180 => 114 => "periodcentered",
181 => 0 => ".notdef",
182 => 115 => "paragraph",
183 => 116 => "bullet",
184 => 117 => "quotesinglbase",
185 => 118 => "quotedblbase",
186 => 119 => "quotedblright",
187 => 120 => "guillemotright",
188 => 121 => "ellipsis",
189 => 122 => "perthousand",
190 => 0 => ".notdef",
191 => 123 => "questiondown",
192 => 0 => ".notdef",
193 => 124 => "grave",
194 => 125 => "acute",
195 => 126 => "circumflex",
196 => 127 => "tilde",
197 => 128 => "macron",
198 => 129 => "breve",
199 => 130 => "dotaccent",
200 => 131 => "dieresis",
201 => 0 => ".notdef",
202 => 132 => "ring",
203 => 133 => "cedilla",
204 => 0 => ".notdef",
205 => 134 => "hungarumlaut",
206 => 135 => "ogonek",
207 => 136 => "caron",
208 => 137 => "emdash",
209 => 0 => ".notdef",
210 => 0 => ".notdef",
211 => 0 => ".notdef",
212 => 0 => ".notdef",
213 => 0 => ".notdef",
214 => 0 => ".notdef",
215 => 0 => ".notdef",
216 => 0 => ".notdef",
217 => 0 => ".notdef",
218 => 0 => ".notdef",
219 => 0 => ".notdef",
220 => 0 => ".notdef",
221 => 0 => ".notdef",
222 => 0 => ".notdef",
223 => 0 => ".notdef",
224 => 0 => ".notdef",
225 => 138 => "AE",
226 => 0 => ".notdef",
227 => 139 => "ordfeminine",
228 => 0 => ".notdef",
229 => 0 => ".notdef",
230 => 0 => ".notdef",
231 => 0 => ".notdef",
232 => 140 => "Lslash",
233 => 141 => "Oslash",
234 => 142 => "OE",
235 => 143 => "ordmasculine",
236 => 0 => ".notdef",
237 => 0 => ".notdef",
238 => 0 => ".notdef",
239 => 0 => ".notdef",
240 => 0 => ".notdef",
241 => 144 => "ae",
242 => 0 => ".notdef",
243 => 0 => ".notdef",
244 => 0 => ".notdef",
245 => 145 => "dotlessi",
246 => 0 => ".notdef",
247 => 0 => ".notdef",
248 => 146 => "lslash",
249 => 147 => "oslash",
250 => 148 => "oe",
251 => 149 => "germandbls",
252 => 0 => ".notdef",
253 => 0 => ".notdef",
254 => 0 => ".notdef",
255 => 0 => ".notdef",
})
}
fn get_expert(glyph_id: GlyphID) -> Option<StringID> {
get!(glyph_id {
0 => 0 => ".notdef",
1 => 0 => ".notdef",
2 => 0 => ".notdef",
3 => 0 => ".notdef",
4 => 0 => ".notdef",
5 => 0 => ".notdef",
6 => 0 => ".notdef",
7 => 0 => ".notdef",
8 => 0 => ".notdef",
9 => 0 => ".notdef",
10 => 0 => ".notdef",
11 => 0 => ".notdef",
12 => 0 => ".notdef",
13 => 0 => ".notdef",
14 => 0 => ".notdef",
15 => 0 => ".notdef",
16 => 0 => ".notdef",
17 => 0 => ".notdef",
18 => 0 => ".notdef",
19 => 0 => ".notdef",
20 => 0 => ".notdef",
21 => 0 => ".notdef",
22 => 0 => ".notdef",
23 => 0 => ".notdef",
24 => 0 => ".notdef",
25 => 0 => ".notdef",
26 => 0 => ".notdef",
27 => 0 => ".notdef",
28 => 0 => ".notdef",
29 => 0 => ".notdef",
30 => 0 => ".notdef",
31 => 0 => ".notdef",
32 => 1 => "space",
33 => 229 => "exclamsmall",
34 => 230 => "Hungarumlautsmall",
35 => 0 => ".notdef",
36 => 231 => "dollaroldstyle",
37 => 232 => "dollarsuperior",
38 => 233 => "ampersandsmall",
39 => 234 => "Acutesmall",
40 => 235 => "parenleftsuperior",
41 => 236 => "parenrightsuperior",
42 => 237 => "twodotenleader",
43 => 238 => "onedotenleader",
44 => 13 => "comma",
45 => 14 => "hyphen",
46 => 15 => "period",
47 => 99 => "fraction",
48 => 239 => "zerooldstyle",
49 => 240 => "oneoldstyle",
50 => 241 => "twooldstyle",
51 => 242 => "threeoldstyle",
52 => 243 => "fouroldstyle",
53 => 244 => "fiveoldstyle",
54 => 245 => "sixoldstyle",
55 => 246 => "sevenoldstyle",
56 => 247 => "eightoldstyle",
57 => 248 => "nineoldstyle",
58 => 27 => "colon",
59 => 28 => "semicolon",
60 => 249 => "commasuperior",
61 => 250 => "threequartersemdash",
62 => 251 => "periodsuperior",
63 => 252 => "questionsmall",
64 => 0 => ".notdef",
65 => 253 => "asuperior",
66 => 254 => "bsuperior",
67 => 255 => "centsuperior",
68 => 256 => "dsuperior",
69 => 257 => "esuperior",
70 => 0 => ".notdef",
71 => 0 => ".notdef",
72 => 0 => ".notdef",
73 => 258 => "isuperior",
74 => 0 => ".notdef",
75 => 0 => ".notdef",
76 => 259 => "lsuperior",
77 => 260 => "msuperior",
78 => 261 => "nsuperior",
79 => 262 => "osuperior",
80 => 0 => ".notdef",
81 => 0 => ".notdef",
82 => 263 => "rsuperior",
83 => 264 => "ssuperior",
84 => 265 => "tsuperior",
85 => 0 => ".notdef",
86 => 266 => "ff",
87 => 109 => "fi",
88 => 110 => "fl",
89 => 267 => "ffi",
90 => 268 => "ffl",
91 => 269 => "parenleftinferior",
92 => 0 => ".notdef",
93 => 270 => "parenrightinferior",
94 => 271 => "Circumflexsmall",
95 => 272 => "hyphensuperior",
96 => 273 => "Gravesmall",
97 => 274 => "Asmall",
98 => 275 => "Bsmall",
99 => 276 => "Csmall",
100 => 277 => "Dsmall",
101 => 278 => "Esmall",
102 => 279 => "Fsmall",
103 => 280 => "Gsmall",
104 => 281 => "Hsmall",
105 => 282 => "Ismall",
106 => 283 => "Jsmall",
107 => 284 => "Ksmall",
108 => 285 => "Lsmall",
109 => 286 => "Msmall",
110 => 287 => "Nsmall",
111 => 288 => "Osmall",
112 => 289 => "Psmall",
113 => 290 => "Qsmall",
114 => 291 => "Rsmall",
115 => 292 => "Ssmall",
116 => 293 => "Tsmall",
117 => 294 => "Usmall",
118 => 295 => "Vsmall",
119 => 296 => "Wsmall",
120 => 297 => "Xsmall",
121 => 298 => "Ysmall",
122 => 299 => "Zsmall",
123 => 300 => "colonmonetary",
124 => 301 => "onefitted",
125 => 302 => "rupiah",
126 => 303 => "Tildesmall",
127 => 0 => ".notdef",
128 => 0 => ".notdef",
129 => 0 => ".notdef",
130 => 0 => ".notdef",
131 => 0 => ".notdef",
132 => 0 => ".notdef",
133 => 0 => ".notdef",
134 => 0 => ".notdef",
135 => 0 => ".notdef",
136 => 0 => ".notdef",
137 => 0 => ".notdef",
138 => 0 => ".notdef",
139 => 0 => ".notdef",
140 => 0 => ".notdef",
141 => 0 => ".notdef",
142 => 0 => ".notdef",
143 => 0 => ".notdef",
144 => 0 => ".notdef",
145 => 0 => ".notdef",
146 => 0 => ".notdef",
147 => 0 => ".notdef",
148 => 0 => ".notdef",
149 => 0 => ".notdef",
150 => 0 => ".notdef",
151 => 0 => ".notdef",
152 => 0 => ".notdef",
153 => 0 => ".notdef",
154 => 0 => ".notdef",
155 => 0 => ".notdef",
156 => 0 => ".notdef",
157 => 0 => ".notdef",
158 => 0 => ".notdef",
159 => 0 => ".notdef",
160 => 0 => ".notdef",
161 => 304 => "exclamdownsmall",
162 => 305 => "centoldstyle",
163 => 306 => "Lslashsmall",
164 => 0 => ".notdef",
165 => 0 => ".notdef",
166 => 307 => "Scaronsmall",
167 => 308 => "Zcaronsmall",
168 => 309 => "Dieresissmall",
169 => 310 => "Brevesmall",
170 => 311 => "Caronsmall",
171 => 0 => ".notdef",
172 => 312 => "Dotaccentsmall",
173 => 0 => ".notdef",
174 => 0 => ".notdef",
175 => 313 => "Macronsmall",
176 => 0 => ".notdef",
177 => 0 => ".notdef",
178 => 314 => "figuredash",
179 => 315 => "hypheninferior",
180 => 0 => ".notdef",
181 => 0 => ".notdef",
182 => 316 => "Ogoneksmall",
183 => 317 => "Ringsmall",
184 => 318 => "Cedillasmall",
185 => 0 => ".notdef",
186 => 0 => ".notdef",
187 => 0 => ".notdef",
188 => 158 => "onequarter",
189 => 155 => "onehalf",
190 => 163 => "threequarters",
191 => 319 => "questiondownsmall",
192 => 320 => "oneeighth",
193 => 321 => "threeeighths",
194 => 322 => "fiveeighths",
195 => 323 => "seveneighths",
196 => 324 => "onethird",
197 => 325 => "twothirds",
198 => 0 => ".notdef",
199 => 0 => ".notdef",
200 => 326 => "zerosuperior",
201 => 150 => "onesuperior",
202 => 164 => "twosuperior",
203 => 169 => "threesuperior",
204 => 327 => "foursuperior",
205 => 328 => "fivesuperior",
206 => 329 => "sixsuperior",
207 => 330 => "sevensuperior",
208 => 331 => "eightsuperior",
209 => 332 => "ninesuperior",
210 => 333 => "zeroinferior",
211 => 334 => "oneinferior",
212 => 335 => "twoinferior",
213 => 336 => "threeinferior",
214 => 337 => "fourinferior",
215 => 338 => "fiveinferior",
216 => 339 => "sixinferior",
217 => 340 => "seveninferior",
218 => 341 => "eightinferior",
219 => 342 => "nineinferior",
220 => 343 => "centinferior",
221 => 344 => "dollarinferior",
222 => 345 => "periodinferior",
223 => 346 => "commainferior",
224 => 347 => "Agravesmall",
225 => 348 => "Aacutesmall",
226 => 349 => "Acircumflexsmall",
227 => 350 => "Atildesmall",
228 => 351 => "Adieresissmall",
229 => 352 => "Aringsmall",
230 => 353 => "AEsmall",
231 => 354 => "Ccedillasmall",
232 => 355 => "Egravesmall",
233 => 356 => "Eacutesmall",
234 => 357 => "Ecircumflexsmall",
235 => 358 => "Edieresissmall",
236 => 359 => "Igravesmall",
237 => 360 => "Iacutesmall",
238 => 361 => "Icircumflexsmall",
239 => 362 => "Idieresissmall",
240 => 363 => "Ethsmall",
241 => 364 => "Ntildesmall",
242 => 365 => "Ogravesmall",
243 => 366 => "Oacutesmall",
244 => 367 => "Ocircumflexsmall",
245 => 368 => "Otildesmall",
246 => 369 => "Odieresissmall",
247 => 370 => "OEsmall",
248 => 371 => "Oslashsmall",
249 => 372 => "Ugravesmall",
250 => 373 => "Uacutesmall",
251 => 374 => "Ucircumflexsmall",
252 => 375 => "Udieresissmall",
253 => 376 => "Yacutesmall",
254 => 377 => "Thornsmall",
255 => 378 => "Ydieresissmall",
})
}
| true
|
0475b793a6c48ee579924cfbd07182ce13eb06ba
|
Rust
|
The-Peso-G/yarte
|
/yarte_hir/src/visit_partial.rs
|
UTF-8
| 3,082
| 2.828125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::{collections::BTreeMap, mem};
use quote::quote;
use syn::visit::Visit;
use super::{is_tuple_index, validator};
pub fn visit_partial(e: &[syn::Expr]) -> (BTreeMap<String, &syn::Expr>, Option<&syn::Expr>) {
PartialBuilder::default().build(e)
}
struct PartialBuilder<'a> {
ident: String,
ctx: BTreeMap<String, &'a syn::Expr>,
scope: Option<&'a syn::Expr>,
}
impl<'a> Default for PartialBuilder<'a> {
fn default() -> Self {
Self {
ident: String::new(),
ctx: BTreeMap::new(),
scope: None,
}
}
}
macro_rules! panic_some {
($some:expr) => {
if $some.is_some() {
panic!("Not available in a template expression");
}
};
}
impl<'a> PartialBuilder<'a> {
fn build(
mut self,
e: &'a [syn::Expr],
) -> (BTreeMap<String, &'a syn::Expr>, Option<&'a syn::Expr>) {
debug_assert_ne!(e.len(), 0);
use syn::Expr::*;
match &e[0] {
Assign(assign) => self.visit_expr_assign(&assign),
e @ Path(..) => self.scope = Some(e),
_ => panic!("Not available in partial argument:\n{}", quote!(#(#e ,)*)),
}
for i in (&e[1..]).iter() {
match i {
Assign(assign) => self.visit_expr_assign(&assign),
Path(..) => panic!("place scope argument `{}` at first position", quote!(#i)),
_ => panic!("Not available in partial argument:\n{}", quote!(#i)),
}
}
(self.ctx, self.scope)
}
}
impl<'a> Visit<'a> for PartialBuilder<'a> {
fn visit_expr(&mut self, i: &'a syn::Expr) {
use syn::Expr::*;
match *i {
Path(ref e) => {
panic_some!(e.qself);
panic_some!(e.path.leading_colon);
if !self.ident.is_empty() {
panic!("Empty buffer before");
}
if e.path.segments.len() != 1 {
panic!(
"Not available Rust expression in partial scope argument:\n{}",
quote!(#i)
)
}
let ident = e.path.segments[0].ident.to_string();
if RESERVED_WORDS.contains(&ident.as_str()) || is_tuple_index(ident.as_bytes()) {
panic!(
"Reserved word `{}` in partial assign argument:\n{}",
ident,
quote!(#i)
);
}
self.ident = ident;
}
_ => panic!(
"Not available Rust expression in partial argument:\n{}",
quote!(#i)
),
}
}
fn visit_expr_assign(&mut self, i: &'a syn::ExprAssign) {
validator::partial_assign(&i.right);
self.visit_expr(&i.left);
panic_some!(self
.ctx
.insert(mem::replace(&mut self.ident, String::new()), &i.right));
}
}
static RESERVED_WORDS: &[&str; 2] = &["self", "super"];
| true
|
605ed4786b8ef29b1e1c8a53fe0e5af9f09be748
|
Rust
|
sugyan/leetcode
|
/others/june-leetcoding-challenge/week-3/3362/lib.rs
|
UTF-8
| 1,215
| 3.4375
| 3
|
[] |
no_license
|
pub struct Solution {}
impl Solution {
pub fn valid_ip_address(ip: String) -> String {
let s4: Vec<&str> = ip.split('.').collect();
let s6: Vec<&str> = ip.split(':').collect();
if s4.len() == 4
&& s4
.iter()
.all(|s| (s == &"0" || !s.starts_with('0')) && s.parse::<u8>().ok().is_some())
{
return "IPv4".to_string();
}
if s6.len() == 8
&& s6
.iter()
.all(|s| s.len() <= 4 && u16::from_str_radix(&s, 16).ok().is_some())
{
return "IPv6".to_string();
}
"Neither".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1() {
assert_eq!(
"IPv4",
Solution::valid_ip_address("172.16.254.1".to_string())
);
}
#[test]
fn example_2() {
assert_eq!(
"IPv6",
Solution::valid_ip_address("2001:0db8:85a3:0:0:8A2E:0370:7334".to_string())
);
}
#[test]
fn example_3() {
assert_eq!(
"Neither",
Solution::valid_ip_address("256.256.256.256".to_string())
);
}
}
| true
|
f639ee28838831a217ad079f86d656a8d87c073b
|
Rust
|
TechGuard/advent_of_code_2018
|
/day08/src/main.rs
|
UTF-8
| 1,531
| 3.4375
| 3
|
[] |
no_license
|
use std::io::{self, Read};
use std::slice::Iter;
fn main() {
let mut input = String::new();
io::stdin()
.read_to_string(&mut input)
.expect("Expected input");
// Parse input
let data = input
.split(' ')
.map(|s| s.parse::<u32>().unwrap())
.collect::<Vec<u32>>();
// Read all nodes
let root = read_node(&mut data.iter());
println!("1st Answer = {}", root.get_answer1());
println!("2st Answer = {}", root.get_answer2());
}
#[derive(Debug, Default)]
struct Node {
children: Vec<Node>,
metadata: Vec<u32>,
}
impl Node {
fn get_answer1(&self) -> u32 {
let total = self.children.iter().map(|c| c.get_answer1()).sum::<u32>();
total + self.metadata.iter().sum::<u32>()
}
fn get_answer2(&self) -> u32 {
if self.children.is_empty() {
return self.metadata.iter().sum();
}
let mut total = 0;
for &idx in &self.metadata {
if idx > 0 && idx <= self.children.len() as u32 {
total += self.children[(idx - 1) as usize].get_answer2();
}
}
return total;
}
}
fn read_node(mut data: &mut Iter<u32>) -> Node {
let child_count = data.next().unwrap();
let meta_count = data.next().unwrap();
let mut children = vec![];
for _ in 0..*child_count {
children.push(read_node(&mut data));
}
Node {
children: children,
metadata: data.take(*meta_count as usize).map(|x| *x).collect(),
}
}
| true
|
e125f7a809eec8d07b8dcd229425d0154d5b4eae
|
Rust
|
A-Team-Rowan-University/a-team-website
|
/backend/webdev_lib/src/users/models.rs
|
UTF-8
| 4,892
| 2.5625
| 3
|
[] |
no_license
|
use diesel::Queryable;
use rouille::router;
use serde::Deserialize;
use serde::Serialize;
use url::form_urlencoded;
use log::warn;
use super::schema::users;
use crate::permissions::models::Permission;
use crate::errors::Error;
use crate::errors::ErrorKind;
use crate::search::Search;
#[derive(Queryable, Debug)]
pub struct RawUser {
pub id: u64,
pub first_name: String,
pub last_name: String,
pub banner_id: u32,
pub email: String,
}
#[derive(Insertable, Debug)]
#[table_name = "users"]
pub struct NewRawUser {
pub first_name: String,
pub last_name: String,
pub banner_id: u32,
pub email: String,
}
#[derive(Queryable, Debug)]
pub struct JoinedUser {
pub user: RawUser,
pub permission: Option<Permission>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct User {
pub id: u64,
pub first_name: String,
pub last_name: String,
pub banner_id: u32,
pub email: String,
pub permissions: Vec<Permission>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct NewUser {
pub first_name: String,
pub last_name: String,
pub banner_id: u32,
pub email: String,
pub permissions: Vec<u64>,
}
#[derive(Debug, AsChangeset, Serialize, Deserialize)]
#[table_name = "users"]
pub struct PartialUser {
pub first_name: Option<String>,
pub last_name: Option<String>,
pub banner_id: Option<u32>,
pub email: Option<String>,
}
#[derive(Debug)]
pub struct SearchUser {
pub first_name: Search<String>,
pub last_name: Search<String>,
pub banner_id: Search<u32>,
pub email: Search<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UserList {
pub users: Vec<User>,
}
#[derive(Debug)]
pub enum UserRequest {
SearchUsers(SearchUser),
Current,
GetUser(u64),
CreateUser(NewUser),
UpdateUser(u64, PartialUser),
DeleteUser(u64),
}
impl UserRequest {
pub fn from_rouille(request: &rouille::Request) -> Result<UserRequest, Error> {
let url_queries = form_urlencoded::parse(request.raw_query_string().as_bytes());
router!(request,
(GET) (/) => {
let mut first_name_search = Search::NoSearch;
let mut last_name_search = Search::NoSearch;
let mut banner_id_search = Search::NoSearch;
let mut email_search = Search::NoSearch;
for (field, query) in url_queries {
match field.as_ref() {
"first_name" => first_name_search =
Search::from_query(query.as_ref())?,
"last_name" => last_name_search =
Search::from_query(query.as_ref())?,
"banner_id" => banner_id_search =
Search::from_query(query.as_ref())?,
"email" => email_search =
Search::from_query(query.as_ref())?,
_ => return Err(Error::new(ErrorKind::Url)),
}
}
Ok(UserRequest::SearchUsers(SearchUser {
first_name: first_name_search,
last_name: last_name_search,
banner_id: banner_id_search,
email: email_search,
}))
},
(GET) (/current) => {
Ok(UserRequest::Current)
},
(GET) (/{id: u64}) => {
Ok(UserRequest::GetUser(id))
},
(POST) (/) => {
let request_body = request.data()
.ok_or(Error::new(ErrorKind::Body))?;
let new_user: NewUser =
serde_json::from_reader(request_body)?;
Ok(UserRequest::CreateUser(new_user))
},
(PUT) (/{id: u64}) => {
let request_body = request.data()
.ok_or(Error::new(ErrorKind::Body))?;
let update_user: PartialUser
= serde_json::from_reader(request_body)?;
Ok(UserRequest::UpdateUser(id, update_user))
},
(DELETE) (/{id: u64}) => {
Ok(UserRequest::DeleteUser(id))
},
_ => {
warn!("Could not create a user request for the given rouille request");
Err(Error::new(ErrorKind::NotFound))
}
)
}
}
#[derive(Debug)]
pub enum UserResponse {
OneUser(User),
ManyUsers(UserList),
NoResponse,
}
impl UserResponse {
pub fn to_rouille(self) -> rouille::Response {
match self {
UserResponse::OneUser(user) => rouille::Response::json(&user),
UserResponse::ManyUsers(users) => rouille::Response::json(&users),
UserResponse::NoResponse => rouille::Response::empty_204(),
}
}
}
| true
|
e9a81df6614e378f5d1f74bdb683b029adf5f3e8
|
Rust
|
cffi-rs/cffi
|
/impl/src/ext.rs
|
UTF-8
| 3,512
| 2.609375
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::fmt::Display;
use quote::quote;
pub trait ForeignArgExt {
fn to_foreign_param(&self) -> Result<syn::PatType, syn::Error>;
fn to_foreign_arg(&self) -> Result<syn::Pat, syn::Error>;
}
impl ForeignArgExt for syn::PatType {
fn to_foreign_param(&self) -> Result<syn::PatType, syn::Error> {
Ok(syn::PatType {
ty: Box::new(self.ty.to_foreign_type()?),
..self.clone()
}
.into())
}
fn to_foreign_arg(&self) -> Result<syn::Pat, syn::Error> {
Ok(*self.pat.clone())
}
}
impl ForeignArgExt for syn::Receiver {
fn to_foreign_param(&self) -> Result<syn::PatType, syn::Error> {
Ok(syn::PatType {
attrs: vec![],
pat: syn::parse2(quote! { __handle }).unwrap(),
colon_token: <syn::Token![:]>::default(),
ty: Box::new(syn::parse2(quote! { *const ::std::ffi::c_void }).unwrap()),
})
}
fn to_foreign_arg(&self) -> Result<syn::Pat, syn::Error> {
Ok(syn::parse2(quote! { __handle }).unwrap())
}
}
impl ForeignArgExt for syn::FnArg {
fn to_foreign_param(&self) -> Result<syn::PatType, syn::Error> {
match self {
syn::FnArg::Typed(arg) => arg.to_foreign_param(),
syn::FnArg::Receiver(receiver) => receiver.to_foreign_param(),
}
}
fn to_foreign_arg(&self) -> Result<syn::Pat, syn::Error> {
match self {
syn::FnArg::Typed(arg) => arg.to_foreign_arg(),
syn::FnArg::Receiver(receiver) => receiver.to_foreign_arg(),
}
}
}
pub trait ForeignTypeExt {
fn to_foreign_type(&self) -> Result<syn::Type, syn::Error>;
}
impl ForeignTypeExt for syn::Type {
fn to_foreign_type(&self) -> Result<syn::Type, syn::Error> {
match &self {
syn::Type::Path(..) | syn::Type::Reference(..) => {}
syn::Type::BareFn(bare_fn) => {
if bare_fn.abi.is_some() {
// This one is safe to pass through.
return Ok(self.clone());
}
return Err(syn::Error::new_spanned(
self,
"Non-extern-C fn parameters not supported",
));
}
syn::Type::Tuple(..) => {
return Err(syn::Error::new_spanned(
self,
"Tuple parameters not supported",
))
}
_ => {
return Err(syn::Error::new_spanned(
self,
"Unknown parameters not supported",
))
}
}
// Special case for bool
let bool_ty: syn::Type = syn::parse_str("bool").unwrap();
if self == &bool_ty {
return Ok(syn::parse2(quote! { /* bool */ u8 }).unwrap());
}
match crate::is_passthrough_type(self) {
true => Ok(self.clone()),
false => {
let c_ty: syn::Type = syn::parse2(quote! { *const ::std::ffi::c_void }).unwrap();
Ok(c_ty)
}
}
}
}
pub trait ErrorExt<T> {
fn context(self, msg: impl Display) -> Result<T, syn::Error>;
}
impl<T> ErrorExt<T> for Result<T, syn::Error> {
fn context(self, msg: impl Display) -> Self {
match self {
Err(err) => Err(syn::Error::new(
err.span(),
format!("{}: {}", msg, err.to_string()),
)),
x => x,
}
}
}
| true
|
475e2b72975caa2bab8ffdd8f3141431fcbfb129
|
Rust
|
Roughsketch/advent2018
|
/lib/day4/src/entry.rs
|
UTF-8
| 1,305
| 3.640625
| 4
|
[] |
no_license
|
use std::cmp::Ordering;
use crate::time::Time;
#[derive(Debug, Eq, PartialEq)]
pub enum Event {
Shift(i32),
Sleep,
Wake,
}
#[derive(Debug, Eq, PartialEq)]
pub struct LogEntry {
pub time: Time,
pub event: Event,
}
impl Ord for LogEntry {
fn cmp(&self, other: &LogEntry) -> Ordering {
self.time.cmp(&other.time)
}
}
impl PartialOrd for LogEntry {
fn partial_cmp(&self, other: &LogEntry) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl LogEntry {
pub fn new(entry: &str) -> Self {
let split = entry.split(|c: char| !c.is_numeric())
.filter(|s| !s.is_empty())
.map(|num| num.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
let time = Time::new(
split[0], // year
split[1], // month
split[2], // day
split[3], // hour
split[4], // minute
);
let event = if split.len() == 6 {
Event::Shift(split[5])
} else if entry.contains("wakes up") {
Event::Wake
} else if entry.contains("falls asleep") {
Event::Sleep
} else {
unreachable!("Invalid event: {:?}", entry);
};
Self {
time, event
}
}
}
| true
|
4b6a25525982b9d0e019cab13a8ad3c112cc541d
|
Rust
|
foleyj2/rust-examples
|
/rust-book/10-generics/src/traits.rs
|
UTF-8
| 5,628
| 3.515625
| 4
|
[
"MIT"
] |
permissive
|
use std::fmt::Display;
pub fn traits_main() {
//testtweet();
test_article_default();
// testtweet2();
// largest_test();
}
// Kristjan's question: enums with traits?
pub enum MyEnum {
X,
Y,
}
impl Summary for MyEnum {
fn summarize(&self) -> String {
match self {
MyEnum::X => format!("I'm a MyEnum enum X"),
MyEnum::Y => format!("I'm a MyEnum enum Y"),
}
}
}
pub trait Summary {
fn summarize(&self) -> String {
String::from("(Read more...)")
}
}
pub struct NewsArticle {
pub headline: String,
pub location: String,
pub author: String,
pub content: String,
}
impl Summary for NewsArticle {
fn summarize(&self) -> String {
format!("{}, by {} ({})", self.headline, self.author, self.location)
}
}
//impl Summary for NewsArticle {}//Use default implementation
pub struct Tweet {
pub username: String,
pub content: String,
pub reply: bool,
pub retweet: bool,
}
impl Summary for Tweet {
fn summarize(&self) -> String {
format!("{}: {}", self.username, self.content)
}
}
pub fn testtweet() {
let tweet = Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably know, people"),
reply: false,
retweet: false,
};
println!("1 new tweet: {}", tweet.summarize());
}
pub trait Summary2 {
fn summarize_author(&self) -> String; // needs to be implemented
fn summarize(&self) -> String {
// default implementations
format!("(Read more from {}...)", self.summarize_author())
}
}
pub fn test_article_default() {
let article = NewsArticle {
headline: String::from("Penguins win the Stanley Cup Championship!"),
location: String::from("Pittsburgh, PA, USA"),
author: String::from("Iceburgh"),
content: String::from(
"The Pittsburgh Penguins once again are the best \
hockey team in the NHL.",
),
};
println!("New article available! {}", article.summarize());
}
pub struct Tweet2 {
pub username: String,
pub content: String,
pub reply: bool,
pub retweet: bool,
}
impl Summary2 for Tweet2 {
fn summarize_author(&self) -> String {
format!("@{}", self.username)
}
}
pub fn testtweet2() {
let tweet = Tweet2 {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably know, people"),
reply: false,
retweet: false,
};
println!("1 new tweet: {}", tweet.summarize());
}
pub fn _notify(item: &impl Summary) {
// syntactic sugar: <T: Summary>(item: &T)
println!("Breaking news! {}", item.summarize());
}
fn _returns_summarizable() -> impl Summary {
Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
}
// can't return multiple types with this trait though
}
pub fn _notify_bound<T: Summary>(item: &T) {
// // Trait Bound Syntax
// pub fn notify(item1: &impl Summary, item2: &impl Summary) {
// // Multiple trait bounds
// pub fn notify(item: &(impl Summary + Display)) {
// pub fn notify<T: Summary + Display>(item: &T) {
println!("Breaking news! {}", item.summarize());
}
// // Where clauses can make traits clearer
//fn some_function<T: Display + Clone, U: Clone + Debug>(t: &T, u: &U) -> i32 { }
//fn some_function<T, U>(t: &T, u: &U) -> i32
// where T: Display + Clone,
// U: Clone + Debug
//{}
// // return types that implement traits
fn _returns_summarizable2() -> impl Summary {
Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
}
}
// fn _broken_returns_summarizable(switch: bool) -> impl Summary {
// if switch {
// NewsArticle {
// headline: String::from("Penguins win the Stanley Cup Championship!"),
// location: String::from("Pittsburgh, PA, USA"),
// author: String::from("Iceburgh"),
// content: String::from(
// "The Pittsburgh Penguins once again are the best \
// hockey team in the NHL.",
// ),
// }
// } else {
// Tweet {
// username: String::from("horse_ebooks"),
// content: String::from("of course, as you probably already know, people"),
// reply: false,
// retweet: false,
// }
// }
// }
fn largest_using_traits<T: PartialOrd + Copy>(list: &[T]) -> T {
let mut largest = list[0];
for &item in list {
if item > largest {
largest = item;
}
}
largest
}
fn largest_test() {
let number_list = vec![34, 50, 25, 100, 65];
let result = largest_using_traits(&number_list);
println!("The largest number is {}", result);
let char_list = vec!['y', 'm', 'a', 'q'];
let result = largest_using_traits(&char_list);
println!("The largest char is {}", result);
}
// Conditional trait bounds
struct _Pair<T> {
x: T,
y: T,
}
impl<T> _Pair<T> {
fn _new(x: T, y: T) -> Self {
Self { x, y }
}
}
impl<T: Display + PartialOrd> _Pair<T> {
fn _cmp_display(&self) {
if self.x >= self.y {
println!("The largest member is x = {}", self.x);
} else {
println!("The largest member is y = {}", self.y);
}
}
}
| true
|
b1d5956ed35bdb90a0a2657bc66f38c0b1da499b
|
Rust
|
yuanguohuo/rust-examples
|
/src/test_match/test_enum_match.rs
|
UTF-8
| 3,934
| 3.921875
| 4
|
[] |
no_license
|
use std::fmt;
pub enum MyOption<T> {
MyNone,
MySome(T),
}
impl<T: fmt::Display> fmt::Display for MyOption<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"fmt {}",
match self {
MyOption::MyNone => "MyNone".to_string(),
MyOption::MySome(v) => format!("MySome({})", v),
}
)
}
}
impl<T: fmt::Display> MyOption<T> {
pub fn dump(&self) {
match self {
MyOption::MyNone => println!("dump MyNone"),
MyOption::MySome(v) => println!("dump MySome({})", v),
}
}
}
#[cfg(test)]
mod test {
use super::MyOption::MyNone;
use super::MyOption::MySome;
use crate::test_match::test_struct_match::Foo;
#[test]
fn match_val_by_val() {
let i: i32 = 3;
let ri = &i;
let o1 = MySome(ri);
match o1 {
MyNone => println!("match MyNone"),
MySome(v) => println!("match MySome({})", v),
}
println!("{}", o1); //o1 is not moved;
o1.dump(); //o1 is not moved;
println!("{}", ri); //ri is not moved;
let mut j: i32 = 4;
let rj = &mut j;
let o2 = MySome(rj);
match o2 {
MyNone => println!("match MyNone"),
MySome(v) => println!("match MySome({})", v),
}
//println!("{}", o2); //o2 is moved;
//o2.dump();
//println!("{}", rj); //rj is moved;
let mut i: i32 = 1;
let mut s: String = "a".to_string();
let foo = Foo::new(i, &mut i, "a".to_string(), &mut s);
println!("{}", foo);
let o3 = MySome(foo);
match o3 {
MyNone => println!("match MyNone"),
MySome(v) => println!("match MySome({})", v),
}
//println!("{}", o3); //o3 is moved;
//println!("{}", foo); //foo is moved;
}
#[test]
fn match_ref_by_ref() {
let i: i32 = 333;
let o1 = MySome(i);
let o1_ref = &o1;
match o1_ref {
&MyNone => println!("match MyNone"),
&MySome(v) => println!("match MySome({})", v),
}
println!("{}", o1); //o1 is not moved;
o1.dump(); //o1 is not moved;
let mut i: i32 = 1;
let mut s: String = "a".to_string();
let o2 = MySome(Foo::new(i, &mut i, "a".to_string(), &mut s));
let o2_ref = &o2;
match o2_ref {
&MyNone => println!("match MyNone"),
//this would be fine if all members of MySome are Copy. It can be thought this way:
//rust would create a brand new MySome from o_ref, as a result, all members would be
//moved (unless it is Copy) from o_ref; but on the other hand o_ref is a reference
//which cannot be moved. So, this will not compile if any member of o_ref is not Copy;
//See o1 above, because i32 is Copy, so it is OK;
//&MySome(v) => println!("match MySome({})", v),
&MySome(ref v) => println!("match MySome({})", v),
}
println!("{}", o2);
println!("{}", o2_ref);
}
#[test]
fn match_ref_by_val() {
let mut i: i32 = 1;
let mut s: String = "a".to_string();
let o1 = MySome(Foo::new(i, &mut i, "a".to_string(), &mut s));
let o1_ref = &o1;
match o1_ref {
MyNone => println!("match MyNone"),
MySome(v) => println!("match MySome({})", v),
}
println!("{}", o1);
println!("{}", o1_ref);
}
/*
#[test]
fn match_val_by_ref() {
let mut i: i32 = 1;
let mut s: String = "a".to_string();
let o1 = MySome(Foo::new(i, &mut i, "a".to_string(), &mut s));
match o1 {
&MyNone => println!("match MyNone"),
&MySome(v) => println!("match MySome({})", v),
}
}
*/
}
| true
|
66639c5c5f809a74f99c59a416d7cdbbc0a5b6bd
|
Rust
|
l0calh05t/poly-rs
|
/examples/basic.rs
|
UTF-8
| 593
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
use num::Complex;
use poly::{coefficients, Polynomial};
fn main() {
let a = Polynomial::new(coefficients![1f32, 2.0, 3.0, 0.0]);
let b = Polynomial::new(coefficients![1f32, 0.0, 1.0]);
let (q, r) = a.div_rem(&b);
println!(
"({0}) / ({1}) = ({1}) * ({2}) + {3}",
a.to_display("ω"),
b.to_display("ω"),
q.to_display("ω"),
r.to_display("ω")
);
let x = Complex::new(0f32, 1.0);
let e = a.eval(x);
println!("{} = {} for x = {}", a.to_display("x"), e, x);
let d = a.eval_der(1f32, 2);
println!("({})'' = {} for z = {}", a.to_display("z"), d, 1f32);
}
| true
|
98cfe6944b98ea926507eab068988fa40c28a166
|
Rust
|
armyhaylenko/brainfuck_compiler
|
/src/main.rs
|
UTF-8
| 305
| 2.71875
| 3
|
[] |
no_license
|
fn main() -> std::io::Result<()> {
let args: Vec<String> = std::env::args().collect();
let file = args.get(1).expect("File location not specified");
let code = std::fs::read_to_string(file).expect("Brainfuck source not found");
println!("{}", brainfuck_compiler::run(code));
Ok(())
}
| true
|
207296d8c4173a25f50c3ac47cdad76b80ab352a
|
Rust
|
davidgjordan/rustc
|
/factorial.rs
|
UTF-8
| 2,601
| 3.890625
| 4
|
[] |
no_license
|
fn factorial(n:i32)->i32{
if n == 1{
return 1;
}else{
return n*factorial(n-1);
}
}
fn esPar(n:i32){
if(n%2==0){
println!("Es par");
}else{
println!("Es impar");
}
}
fn isValidDate(date:i32)->bool{
let mut auxDate = date;
let anio = auxDate/10000;
auxDate = auxDate%10000;
let mes = auxDate/100;
auxDate = auxDate % 100;
let dia = auxDate;
println!("Anio:{}",anio );
println!("mes:{}",mes );
println!("dia:{}",dia );
//si tenemos un ano correcto y un mes correcto segun a sus dias
if(isValidYear(anio) && isValidMonth(dia,mes, anio)){
return true;
}else{
return false;
}
}
fn isValidYear(a:i32)->bool{
//si es multiplo de 4 el anio entonces el mes de febrero tien un
//dia mas
if(a>1900 && a < 2018){
true
}else {
false
}
}
fn isValidMonth(d:i32,m:i32, a:i32)->bool{
//si es un mes valido
if(m > 0 && m < 12){
//si es un mes de 31 dias
if(m == 1 || m == 3 || m == 5 ||m == 7 ||m == 8 ||m == 10 ||m == 12 ){
return isValidDay(d,31);
//si son los restantes meses exepto mes 2 solo 30 dias
}else if(m != 2){
return isValidDay(d,30);
//si es el mes dos comprobamos si no es anio biciesto
}else{
//si es anio biciesto mes dos entre 0 a 29
if(isYearBiciesto(a)){
return isValidDay(d,29);
//si no es biciesto mes dos entre 0 y 28 dias
}else{
return isValidDay(d,28);
}
}
//si no es un mes valido
}else{
return false;
}
}
fn isValidDay(d:i32, maxDias:i32)->bool{
/*Dia valido si esta etre 0 y 31 exepto si es mes dos el anio no tiene q ser biciesto
si el anio es biciesto el mes dos tiene q tener 29 dias*/
if(d > 0 && d <= maxDias ){
return true;
}else{
return false;
}
}
fn isYearBiciesto(a:i32)->bool{
/*si es multiplo de cuatro es anio biciesto*/
/*cada 4 anios es biciesto execto si ese anio es multiplo de 100*/
if(a % 4 == 0 && a % 100 != 0 ){
return true;
}else{
return false;
}
}
fn main() {
//variables let como auto en c++-> let obliga a inicializar mis variables
//let r = factorial(8);
//let p = esPar(6);
//println!("FActorial:{:?}",r );
//println!("Hello:{}",r );
let date = 20170229;
if(isValidDate(date)){
let mut auxDate = date;
let anio = auxDate/10000;
auxDate = auxDate%10000;
let mes = auxDate/100;
auxDate = auxDate % 100;
let dia = auxDate;
println!("Fecha correcta:");
println!("Fecha anio:{}",anio);
println!("Fecha mes:{}",mes);
println!("Fecha dia:{}",dia);
}else{
println!("Invalid date");
}
}
| true
|
e009939477b5d8d0d09d6a9938ba47f5f3f1fb9b
|
Rust
|
novakov-alexey/pdf-generator
|
/src/handlebars_ext.rs
|
UTF-8
| 3,345
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
extern crate handlebars;
use std::collections::HashMap;
use std::collections::HashSet;
use std::iter::FromIterator;
use handlebars::*;
pub fn array_length_helper(
h: &Helper,
_: &Handlebars,
_: &Context,
_: &mut RenderContext,
out: &mut dyn Output,
) -> Result<(), RenderError> {
let length = h
.param(0)
.as_ref()
.and_then(|v| v.value().as_array())
.map(|arr| arr.len())
.ok_or_else(|| RenderError::new(
"Param 0 with 'array' type is required for array_length helper",
))?;
out.write(length.to_string().as_ref())?;
Ok(())
}
// Check whether arrays have non-empty intersection based on given property name
//
// Example: (contains "category" ../master_data ["sensible"])
// return true: if array A (2nd parameter) contains any element from array B (3rd parameter).
// 1st parameter is property name to check at every element of the array A.
// return false: otherwise
pub fn contains_helper(
h: &Helper,
_: &Handlebars,
_: &Context,
_: &mut RenderContext,
out: &mut dyn Output,
) -> Result<(), RenderError> {
let property = h.param(0)
.as_ref()
.and_then(|v| v.value().as_str())
.ok_or_else(|| RenderError::new(
"Param 0 with 'string' type is required for 'contains' helper",
))?;
let data: Vec<&str> = h
.param(1)
.as_ref()
.and_then(|v| v.value()
.as_array()
.map(|v|
v.iter()
.flat_map(|e|
e.as_object()
.and_then(|o| o.get(property)
.and_then(|vv| vv.as_str()))
)
.collect())
)
.ok_or_else(|| RenderError::new(
"Param 1 with 'array' type is required for 'contains' helper",
))?;
let keys: Vec<&str> = h
.param(2)
.as_ref()
.and_then(|v| v.value().as_array()
.map(|v| v.iter()
.flat_map(|e| e.as_str())
.collect())
)
.ok_or_else(|| RenderError::new(
"Param 2 with 'array' type is required for 'contains' helper",
))?;
let set: HashSet<&str> = HashSet::from_iter(data);
let found = keys.iter().any(|k| set.contains(k));
if found {
out.write("true".to_string().as_ref())?
}
Ok(())
}
pub struct I18Helper(pub HashMap<String, String>);
impl HelperDef for I18Helper {
fn call<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
_: &'reg Handlebars,
_: &'rc Context,
_: &mut RenderContext<'reg>,
out: &mut dyn Output,
) -> HelperResult {
let read_param = |i| h.param(i)
.as_ref()
.and_then(|v| v.value().as_str())
.ok_or_else(|| RenderError::new(
format!("Param {:?} with 'string' type is required for i18 helper", i),
));
let key = read_param(0)?;
let res = match self.0.get(key) {
Some(v) => out.write(v),
None => out.write(key)
};
res.map_err(|e|
RenderError::new(format!("Failed to write into the Template output: {}",
e.to_string())))
}
}
| true
|
8b52d2063ce5811f4dacc101ac636ca8262f4b6a
|
Rust
|
mortonar/rustsysinfo
|
/src/bin.rs
|
UTF-8
| 842
| 2.625
| 3
|
[] |
no_license
|
#![feature(libc)]
extern crate rustsysinfo;
extern crate libc;
use rustsysinfo::sysinfo;
use libc::c_char;
use std::str::from_utf8_unchecked;
use std::ffi::CStr;
pub fn main() {
match sysinfo() {
Ok(output) => {
print_field(output.sysname);
print_field(output.nodename);
print_field(output.release);
print_field(output.version);
print_field(output.machine);
print_field(output.__domainname);
},
Err(e) => eprintln!("error getting sysinfo: {}", e)
}
}
fn print_field(field: [c_char; 65usize]) {
println!("{:?}",to_str(&(&field as *const c_char ) as *const *const c_char));
}
fn to_str<'a>(s: *const *const c_char) -> &'a str {
unsafe {
let res = CStr::from_ptr(*s).to_bytes();
from_utf8_unchecked(res)
}
}
| true
|
8708cd579cee8c049683dc6054b1504826ad9ff3
|
Rust
|
Redrield/aoc2020
|
/src/day7.rs
|
UTF-8
| 5,083
| 3.09375
| 3
|
[] |
no_license
|
use petgraph::graphmap::DiGraphMap;
use std::collections::HashMap;
use regex::Regex;
use petgraph::dot::Dot;
pub fn main() {
let contents = std::fs::read_to_string("inputs/day7").unwrap().lines().map(ToString::to_string).collect::<Vec<_>>();
part1(&contents);
part2(&contents);
}
fn part1(contents: &Vec<String>) {
let mut graph = DiGraphMap::new();
// Mapping of bag name to graph id, needed because the node types must impl Copy, and trying to get &str lifetimes functional here would be a nightmare
let mut names: HashMap<String, usize> = HashMap::new();
// Rolling count of next node id
let mut node = 1;
// depluralify bags to normalize their names
let re = Regex::new("s$").unwrap();
for line in contents {
let mut line = line.split(" contain ");
let this_node = line.next().unwrap();
let this_node = *names.entry(re.replace(this_node, "").to_string())
.or_insert_with(|| {
let x = node;
node += 1;
x
});
let connections = line.next().unwrap().split(", ").map(|s| &s[2..])
.map(|s| {
// normalize with no periods and in the singular
re.replace(&s.replace(".", ""), "").to_string()
})
// Find existing node id or create one with running id
.map(|s| *names.entry(s).or_insert_with(|| {
let x = node;
node += 1;
x
}))
.collect::<Vec<_>>();
for con in connections {
// con -> this_node matters because it's building a DAG.
// outgoing nodes (a->b) imply that b can contain a, needed to properly traverse the graph for part 1
graph.add_edge(con, this_node, 1);
}
}
let id = *names.get("shiny gold bag").unwrap();
println!("{}", traverse_graph_part1(id, &graph, &mut vec![]).len());
}
fn part2(contents: &Vec<String>) {
let mut graph = DiGraphMap::new();
// Mapping of bag name to graph id, needed because the node types must impl Copy, and trying to get &str lifetimes functional here would be a nightmare
let mut names: HashMap<String, usize> = HashMap::new();
// Rolling count of next node id
let mut node = 1;
// depluralify bags to normalize their names
let re = Regex::new("s$").unwrap();
for line in contents {
let mut line = line.split(" contain ");
let this_node = line.next().unwrap();
let this_node = *names.entry(re.replace(this_node, "").to_string())
.or_insert_with(|| {
let x = node;
node += 1;
x
});
let connections = line.next().unwrap().split(", ").map(|s| {
(s[..2].trim().parse::<usize>().unwrap_or(0), &s[2..])
})
.map(|(weight, s)| {
// normalize with no periods and in the singular
(weight, re.replace(&s.replace(".", ""), "").to_string())
})
// Find existing node id or create one with running id
.map(|(weight, s)| (weight, *names.entry(s).or_insert_with(|| {
let x = node;
node += 1;
x
})))
.collect::<Vec<_>>();
for (weight, con) in connections {
// con -> this_node matters because it's building a DAG.
// outgoing nodes (a->b) imply that b can contain a, needed to properly traverse the graph for part 1
graph.add_edge(this_node, con, weight);
}
}
let id = *names.get("shiny gold bag").unwrap();
println!("{}", traverse_graph_part2(id, &graph) - 1); // alg with count the shiny gold bag by default
}
/// A fairly naive approach to finding all nodes accessible from a start node
/// Go through all neighbours, and keep track of nodes that have already been seen to stop duplicates
/// keep doing this until there are no more outgoing edges
/// Directed edge here should mean for a->b, a is contained within b
pub fn traverse_graph_part1<'a>(start: usize, graph: &DiGraphMap<usize, u8>, seen: &'a mut Vec<usize>) -> &'a mut Vec<usize> {
// Starting at `start`, recursively descend every path through every neighbouring node until there aren't any unique outgoing edges to observe anymore
for neighbor in graph.neighbors(start) {
// make sure that values aren't duplicated
if seen.contains(&neighbor) {
continue;
}
// record this value to make sure this node doesn't get double counted, and continue
seen.push(neighbor);
traverse_graph_part1(neighbor, graph, seen);
}
seen
}
pub fn traverse_graph_part2(start: usize, graph: &DiGraphMap<usize, usize>) -> usize {
let mut total_weight = 1; // Weight starts at 1 to count the parent bag in this tree
for (_, neighbor, weight) in graph.edges(start) {
let total = traverse_graph_part2(neighbor, graph);
total_weight += total * weight;
}
total_weight
}
| true
|
0077976222dcd31614ef96bc0ba4d9ea9d26dd43
|
Rust
|
yamash723/til
|
/atcoder/BeginnersSelection/05.ABC083B - Some Sums/rust/main.rs
|
UTF-8
| 735
| 3.296875
| 3
|
[] |
no_license
|
fn main() {
let configure = read_vec::<u32>();
let max = &configure[0];
let a = &configure[1];
let b = &configure[2];
let mut total_sum = 0;
for n in 1..(max + 1) {
let mut j = n;
let mut sum = 0;
while j != 0 {
sum += j % 10;
j = j / 10;
}
if a <= &sum && &sum <= b {
total_sum += n;
}
}
println!("{}", total_sum);
}
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn read_vec<T: std::str::FromStr>() -> Vec<T> {
read::<String>().split_whitespace()
.map(|e| e.parse().ok().unwrap()).collect()
}
| true
|
a1bb9abae231636cbb8debbb63546c8ef9269af4
|
Rust
|
TimonPeng/sixtyfourgame
|
/program/src/instruction.rs
|
UTF-8
| 2,736
| 3.140625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use solana_program::program_error::ProgramError;
use std::convert::TryInto;
use crate::error::SixtyFourGameError::InvalidInstruction;
pub enum SixtyFourGameInstruction {
/// Bid - amount and pubkey - adds BidEntry to AuctionList
Bid {
amount: u64,
pubkey: Account,
},
/// CancelBid - pubkey - removes BidEntry from AuctionList
CancelBid {
pubkey: Account,
},
/// MintNFT - bidEntryNumber - creates NFT after auction
MintNFT {
bidEntryNumber: u64
},
/// InitiatePlay - square and pubkey - allows player to attack
InitiatePlay {
square: u64,
pubkey: Account,
},
/// EndPlay - square and pubkey - withdraws NFT to owner (can't attack)
EndPlay {
square: u64,
pubkey: Account,
},
/// Attack - from/to squares and fromPubkey - attacks neighboring square
Attack {
fromSquare: u64,
toSquare: u64,
fromPubkey: Account,
},
}
impl SixtyFourGameInstruction {
/// Unpacks a byte buffer into a SixtyFourGameInstruction
pub fn unpack(input: &[u8]) -> Result<Self, ProgramError> {
let (tag, rest) = input.split_first().ok_or(InvalidInstruction)?;
Ok(match tag {
0 => Self::Bid {
amount: Self::unpack_amount(rest)?,
pubkey: Self::unpack_pubkey(rest)?,
},
1 => Self::CancelBid {
pubkey: Self::unpack_pubkey(rest)?,
},
2 => Self::MintNFT {
bidEntryNumber: Self::unpack_amount(rest)?,
},
3 => Self::InitiatePlay {
square: Self::unpack_amount(rest)?,
pubkey: Self::unpack_pubkey(rest)?,
},
4 => Self::EndPlay {
square: Self::unpack_amount(rest)?,
pubkey: Self::unpack_pubkey(rest)?,
},
5 => Self::Attack {
fromSquare: Self::unpack_amount(rest)?,
toSquare: Self::unpack_amount(rest)?,
fromPubkey: Self::unpack_pubkey(rest)?,
},
_ => return Err(InvalidInstruction.into()),
})
}
fn unpack_amount(input: &[u8]) -> Result<u64, ProgramError> {
let amount = input
.get(..8)
.and_then(|slice| slice.try_into().ok())
.map(u64::from_le_bytes)
.ok_or(InvalidInstruction)?;
Ok(amount)
}
fn unpack_amount_32(input: &[u8]) -> Result<u32, ProgramError> {
let amount = input
.get(..4)
.and_then(|slice| slice.try_into().ok())
.map(u32::from_le_bytes)
.ok_or(InvalidInstruction)?;
Ok(amount)
}
}
| true
|
399ec9ee1fbf03bcbf64793455adeb47c04606f5
|
Rust
|
edgedb/edgedb-cli
|
/src/cli/migrate.rs
|
UTF-8
| 12,804
| 2.5625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::env;
use std::path::{Path, PathBuf};
use std::io::{self, Write};
use anyhow::Context;
use edgedb_cli_derive::EdbClap;
use fs_err as fs;
use fn_error_context::context;
use crate::cli::install::{get_rc_files, no_dir_in_path};
use crate::commands::ExitCode;
use crate::credentials;
use crate::platform::binary_path;
use crate::platform::{home_dir, tmp_file_path, symlink_dir, config_dir};
use crate::portable::project;
use crate::print;
use crate::print_markdown;
use crate::question;
#[derive(EdbClap, Clone, Debug)]
pub struct CliMigrate {
/// Dry run: do no actually move anything
#[clap(short='v', long)]
pub verbose: bool,
/// Dry run: do no actually move anything (use with increased verbosity)
#[clap(short='n', long)]
pub dry_run: bool,
}
#[derive(Clone, Debug)]
enum ConfirmOverwrite {
Yes,
Skip,
Quit,
}
pub fn main(options: &CliMigrate) -> anyhow::Result<()> {
let base = home_dir()?.join(".edgedb");
if base.exists() {
migrate(&base, options.dry_run)
} else {
log::warn!("Directory {:?} does not exist. Nothing to do.", base);
Ok(())
}
}
fn file_is_non_empty(path: &Path) -> anyhow::Result<bool> {
match fs::metadata(path) {
Ok(meta) => Ok(meta.len() > 0),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e.into()),
}
}
fn dir_is_non_empty(path: &Path) -> anyhow::Result<bool> {
match fs::read_dir(path) {
Ok(mut dir) => Ok(dir.next().is_some()),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e.into()),
}
}
fn move_file(src: &Path, dest: &Path, dry_run: bool) -> anyhow::Result<()> {
use ConfirmOverwrite::*;
if file_is_non_empty(&dest)? {
if dry_run {
log::warn!("File {:?} exists in both locations, \
will prompt for overwrite", dest);
return Ok(());
}
let mut q = question::Choice::new(format!(
"Attempting to move {:?} -> {:?}, but \
destination file exists. Do you want to overwrite?",
src, dest));
q.option(Yes, &["y"], "overwrite the destination file");
q.option(Skip, &["s"],
"skip, keep the destination file, remove the source");
q.option(Quit, &["q"], "quit now without overwriting");
match q.ask()? {
Yes => {},
Skip => return Ok(()),
Quit => anyhow::bail!("Cancelled by user"),
}
} else {
if dry_run {
log::info!("Would move {:?} -> {:?}", src, dest);
return Ok(());
}
}
let tmp = tmp_file_path(&dest);
fs::copy(src, &tmp)?;
fs::rename(tmp, dest)?;
fs::remove_file(src)?;
Ok(())
}
fn move_dir(src: &Path, dest: &Path, dry_run: bool) -> anyhow::Result<()> {
use ConfirmOverwrite::*;
if dir_is_non_empty(&dest)? {
if dry_run {
log::warn!("Directory {:?} exists in both locations, \
will prompt for overwrite", dest);
return Ok(());
}
let mut q = question::Choice::new(format!(
"Attempting to move {:?} -> {:?}, but \
destination directory exists. Do you want to overwrite?",
src, dest));
q.option(Yes, &["y"], "overwrite the destination dir");
q.option(Skip, &["s"],
"skip, keep the destination dir, remove the source");
q.option(Quit, &["q"], "quit now without overwriting");
match q.ask()? {
Yes => {},
Skip => return Ok(()),
Quit => anyhow::bail!("Cancelled by user"),
}
} else {
if dry_run {
log::info!("Would move {:?} -> {:?}", src, dest);
return Ok(());
}
}
fs::create_dir_all(dest)?;
for item in fs::read_dir(src)? {
let item = item?;
let ref dest_path = dest.join(item.file_name());
match item.file_type()? {
typ if typ.is_file() => {
let tmp = tmp_file_path(dest_path);
fs::copy(item.path(), &tmp)?;
fs::rename(&tmp, dest_path)?;
}
#[cfg(unix)]
typ if typ.is_symlink() => {
let path = fs::read_link(item.path())?;
symlink_dir(path, dest_path)
.map_err(|e| {
log::info!(
"Error symlinking project at {:?}: {}",
dest_path, e);
}).ok();
}
_ => {
log::warn!("Skipping {:?} of unexpected type", item.path());
}
}
}
fs::remove_dir_all(src)?;
Ok(())
}
fn try_move_bin(exe_path: &Path, bin_path: &Path) -> anyhow::Result<()> {
let bin_dir = bin_path.parent().unwrap();
if !bin_dir.exists() {
fs::create_dir_all(&bin_dir)?;
}
fs::rename(&exe_path, &bin_path)?;
Ok(())
}
#[context("error updating {:?}", path)]
fn replace_line(path: &PathBuf, old_line: &str, new_line: &str)
-> anyhow::Result<bool>
{
if !path.exists() {
return Ok(false);
}
let text = fs::read_to_string(path)
.context("cannot read file")?;
if let Some(idx) = text.find(old_line) {
log::info!("File {:?} contains old path, replacing", path);
let mut file = fs::File::create(path)?;
file.write(text[..idx].as_bytes())?;
file.write(new_line.as_bytes())?;
file.write(text[idx+old_line.len()..].as_bytes())?;
Ok(true)
} else {
log::info!("File {:?} has no old path, skipping", path);
return Ok(false);
}
}
fn update_path(base: &Path, new_bin_path: &Path) -> anyhow::Result<()> {
log::info!("Updating PATH");
let old_bin_dir = base.join("bin");
let new_bin_dir = new_bin_path.parent().unwrap();
#[cfg(windows)] {
use std::env::join_paths;
let mut modified = false;
crate::cli::install::windows_augment_path(|orig_path| {
if orig_path.iter().any(|p| p == new_bin_dir) {
return None;
}
Some(join_paths(
orig_path.iter()
.map(|x| {
if x == &old_bin_dir {
modified = true;
new_bin_dir
} else {
x.as_ref()
}
})
).expect("paths can be joined"))
})?;
if modified && no_dir_in_path(&new_bin_dir) {
print::success("The `edgedb` executable has moved!");
print_markdown!("\
\n\
We've updated your environment configuration to have\n\
`${dir}` in your `PATH` environment variable. You\n\
may need to reopen the terminal for this change to\n\
take effect, and for the `edgedb` command to become\n\
available.\
",
dir=new_bin_dir.display(),
);
}
}
if cfg!(unix) {
let rc_files = get_rc_files()?;
let old_line = format!(
"\nexport PATH=\"{}:$PATH\"\n",
old_bin_dir.display(),
);
let new_line = format!(
"\nexport PATH=\"{}:$PATH\"\n",
new_bin_dir.display(),
);
let mut modified = false;
for path in &rc_files {
if replace_line(&path, &old_line, &new_line)? {
modified = true;
}
}
let cfg_dir = config_dir()?;
let env_file = cfg_dir.join("env");
fs::create_dir_all(&cfg_dir)
.with_context(
|| format!("failed to create {:?}", cfg_dir))?;
fs::write(&env_file, &(new_line + "\n"))
.with_context(
|| format!("failed to write env file {:?}", env_file))?;
if modified && no_dir_in_path(&new_bin_dir) {
print::success("The `edgedb` executable has moved!");
print_markdown!("\
\n\
We've updated your shell profile to have ${dir} in your\n\
`PATH` environment variable. Next time you open the terminal\n\
it will be configured automatically.\n\
\n\
For this session please run:\n\
```\n\
source \"${env_path}\"\n\
```\n\
Depending on your shell type you might also need \
to run `rehash`.\
",
dir=new_bin_dir.display(),
env_path=env_file.display(),
);
}
}
Ok(())
}
pub fn migrate(base: &Path, dry_run: bool) -> anyhow::Result<()> {
if let Ok(exe_path) = env::current_exe() {
if exe_path.starts_with(base) {
let new_bin_path = binary_path()?;
try_move_bin(&exe_path, &new_bin_path)
.map_err(|e| {
print::error("Cannot move executable to the new location.");
eprintln!(" Try `edgedb cli upgrade` instead.");
e
})?;
update_path(base, &new_bin_path)?;
}
}
let source = base.join("credentials");
let target = credentials::base_dir()?;
if source.exists() {
if !dry_run {
fs::create_dir_all(&target)?;
}
for item in fs::read_dir(&source)? {
let item = item?;
move_file(&item.path(), &target.join(item.file_name()), dry_run)?;
}
if !dry_run {
fs::remove_dir(&source)
.map_err(|e| log::warn!("Cannot remove {:?}: {}", source, e))
.ok();
}
}
let source = base.join("projects");
let target = project::stash_base()?;
if source.exists() {
if !dry_run {
fs::create_dir_all(&target)?;
}
for item in fs::read_dir(&source)? {
let item = item?;
if item.metadata()?.is_dir() {
move_dir(&item.path(),
&target.join(item.file_name()), dry_run)?;
}
}
if !dry_run {
fs::remove_dir(&source)
.map_err(|e| log::warn!("Cannot remove {:?}: {}", source, e))
.ok();
}
}
let source = base.join("config");
let target = config_dir()?;
if source.exists() {
if !dry_run {
fs::create_dir_all(&target)?;
}
for item in fs::read_dir(&source)? {
let item = item?;
move_file(&item.path(), &target.join(item.file_name()), dry_run)?;
}
if !dry_run {
fs::remove_dir(&source)
.map_err(|e| log::warn!("Cannot remove {:?}: {}", source, e))
.ok();
}
}
remove_file(&base.join("env"), dry_run)?;
remove_dir_all(&base.join("bin"), dry_run)?;
remove_dir_all(&base.join("run"), dry_run)?;
remove_dir_all(&base.join("logs"), dry_run)?;
remove_dir_all(&base.join("cache"), dry_run)?;
if !dry_run && dir_is_non_empty(&base)? {
eprintln!("\
Directory {:?} is not used by EdgeDB tools any more and must be \
removed to finish migration. But there are some files or \
directories left after all known files moved to the locations. \
This might be because third party tools left some files there. \
", base);
let q = question::Confirm::new(format!(
"Do you want to remove all files and directories within {:?}?",
base,
));
if !q.ask()? {
print::error("Cancelled by user.");
print_markdown!("\
When all files are backed up, run either of:\n\
```\n\
rm -rf ~/.edgedb\n\
edgedb cli migrate\n\
```\
");
return Err(ExitCode::new(2).into());
}
}
remove_dir_all(&base, dry_run)?;
print::success("Directory layout migration successful!");
Ok(())
}
fn remove_file(path: &Path, dry_run: bool) -> anyhow::Result<()> {
if !path.exists() {
return Ok(())
}
if dry_run {
log::info!("Would remove {:?}", path);
return Ok(());
}
log::info!("Removing {:?}", path);
fs::remove_file(path)?;
Ok(())
}
fn remove_dir_all(path: &Path, dry_run: bool) -> anyhow::Result<()> {
if !path.exists() {
return Ok(())
}
if dry_run {
log::info!("Would remove dir {:?} recursively", path);
return Ok(());
}
log::info!("Removing dir {:?}", path);
fs::remove_dir_all(path)?;
Ok(())
}
| true
|
20ba655a24b80b49b9df005d4e4d09fd2c0839c8
|
Rust
|
estshorter/ray-tracing-in-one-weekend-rust
|
/one_weekend/src/hittable.rs
|
UTF-8
| 1,006
| 3.15625
| 3
|
[] |
no_license
|
use crate::material::*;
use crate::ray::Ray;
use crate::vec3::*;
pub struct HitRecord<'a> {
pub p: Point3,
pub normal: Vec3,
pub material: &'a dyn Material,
pub t: f64,
pub front_face: bool,
}
impl<'a> HitRecord<'a> {
pub fn new(
p: Point3,
t: f64,
material: &'a dyn Material,
r: &Ray,
outward_normal: Vec3,
) -> Self {
let (front_face, normal) = calc_face_normal(r, outward_normal);
Self {
p,
normal,
material,
t,
front_face,
}
}
}
pub trait Hittable: Send + Sync {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;
}
fn calc_face_normal(r: &Ray, outward_normal: Vec3) -> (bool, Vec3) {
let front_face = dot(r.direction(), &outward_normal) < 0.0;
let normal = if front_face {
outward_normal
} else {
-outward_normal
};
(front_face, normal)
}
| true
|
e83071fbcd93078c8633ab6ac5d10e0f9f948c9f
|
Rust
|
d3an/mariobros
|
/src/mariobros.rs
|
UTF-8
| 4,357
| 2.828125
| 3
|
[] |
no_license
|
use amethyst::{
assets::{AssetStorage, Loader, Handle},
core::timing::Time,
core::transform::Transform,
ecs::prelude::{Component, DenseVecStorage, Entity},
prelude::*,
renderer::{Camera, ImageFormat, SpriteRender, SpriteSheet, SpriteSheetFormat, Texture},
ui::{Anchor, TtfFormat, UiText, UiTransform},
};
pub const ARENA_HEIGHT: f32 = 100.0;
pub const ARENA_WIDTH: f32 = 100.0;
pub const PLAYER_HEIGHT: f32 = 25.0;
pub const PLAYER_WIDTH: f32 = 18.0;
pub const BRICK_HEIGHT: f32 = 18.0;
/*
=================================================================
====================== Static Map Structs =======================
=================================================================
*/
pub struct BrickFloor {
pub length: f32,
pub height: f32,
}
// Consider adding neighbour fields
pub struct FloatingFloor {
pub top: f32,
pub left: f32,
}
pub struct LargePOW {}
pub struct MediumPOW {}
pub struct SmallPOW {}
pub struct LargePipe {}
pub struct SmallPipe {}
/*
=================================================================
====================== MarioBros Struct =========================
=================================================================
*/
#[derive(Default)]
pub struct MarioBros;
impl SimpleState for MarioBros {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
world.register::<Player>();
initialize_players(world);
initialize_camera(world);
}
}
/*
=================================================================
===================== Mario Player Struct =======================
=================================================================
*/
#[derive(PartialEq, Eq)]
pub enum Character {
Mario,
Luigi,
}
pub struct Player {
pub character: Character,
pub width: f32,
pub height: f32,
}
// Player Implementation
impl Player {
fn new(character: Character) -> Player {
Player {
character,
width: PLAYER_WIDTH,
height: PLAYER_HEIGHT,
}
}
}
// Player Component Implementation
impl Component for Player {
type Storage = DenseVecStorage<Self>;
}
/*
=================================================================
===================== Additional Functions ======================
=================================================================
*/
// Initializes world view camera
fn initialize_camera(world: &mut World) {
// Setup camera in a way that our screen covers whole arena and (0, 0) is in the bottom left.
let mut transform = Transform::default();
transform.set_translation_xyz(ARENA_WIDTH * 0.5, ARENA_HEIGHT * 0.5, 1.0);
world
.create_entity()
.with(Camera::standard_2d(ARENA_WIDTH, ARENA_HEIGHT))
.with(transform)
.build();
}
// Initializes arena for default game
fn initialize_map(world: &mut World, sprite_sheet: Handle<SpriteSheet>) {
let brick_floor = SpriteRender {
};
world
.create_entity()
.with(brick_floor.clone())
.build();
}
// Add Luigi entity if needed
fn initialize_players(world: &mut World, sprite_sheet: Handle<SpriteSheet>) {
let mut mario_transform = Transform::default();
// Correctly position the player
let top = ARENA_HEIGHT - BRICK_HEIGHT;
mario_transform.set_translation_xyz(ARENA_WIDTH * 0.5 - PLAYER_WIDTH * 0.5, top, 0.0);
let sprite_render = SpriteRender {
sprite_sheet: sprite_sheet.clone(),
sprite_number: 11, // Default mario is at index 11 in .ron
};
// Create a Mario entity
world
.create_entity()
.with(sprite_render.clone())
.with(Paddle::new(Character::Mario))
.with(mario_transform)
.build();
}
fn load_sprite_sheet(world: &mut World) -> Handle<SpriteSheet> {
// Load the sprite sheet necessary to render the graphics.
// The texture is the pixel data
// `texture_handle` is a cloneable reference to the texture
let texture_handle = {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
loader.load(
"texture/mario_bros_nes_sprite_sheet.png",
ImageFormat::default(),
(),
&texture_storage,
)
};
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load(
"texture/mario_bros_spritesheet.ron",
SpriteSheetFormat(texture_handle),
(),
&sprite_sheet_store,
)
}
| true
|
2da88abeb2eb2a5ecfdd688fcde627d408180ab7
|
Rust
|
rhinterberger/advent
|
/2021/day5/src/main.rs
|
UTF-8
| 2,073
| 3.546875
| 4
|
[] |
no_license
|
use std::cmp::max;
use std::fs;
const X_SIZE:usize = 1000;
const Y_SIZE:usize = 1000;
fn main() {
let lines = read_input("input.txt");
let mut board = vec![0; X_SIZE*Y_SIZE];
for line in &lines {
if line.is_vertical() || line.is_horizontal() {
draw(&mut board, line);
}
}
let sum = board.into_iter().filter(|value| *value >= 2).count();
println!("{}", sum);
let mut board = vec![0; X_SIZE*Y_SIZE];
for line in &lines {
draw(&mut board, line);
}
let sum = board.into_iter().filter(|value| *value >= 2).count();
println!("{}", sum);
}
fn read_input(path: &str) -> Vec<Line> {
fs::read_to_string(path)
.expect(&format!("Cannot open [{}]", path.to_string()))
.lines()
.map(|line| parse_lines(line))
.collect::<Vec<Line>>()
}
fn parse_lines(line: &str) -> Line {
let coordinates = line
.replace(" -> ",",")
.split(",")
.map(|value| value.parse::<i32>().unwrap())
.collect::<Vec<i32>>();
Line::new(coordinates)
}
fn draw(board: &mut Vec<i32>, line: &Line) {
let length = max(line.length.x, line.length.y);
for i in 0..length+1 {
let x = line.start.x + i*line.delta.x;
let y = line.start.y + i*line.delta.y;
board[x as usize + y as usize * Y_SIZE] += 1;
}
}
#[derive(Debug)]
struct Point {
x: i32,
y: i32
}
#[derive(Debug)]
struct Line {
start: Point,
end: Point,
delta: Point,
length: Point
}
impl Line {
fn new(coords: Vec<i32>) -> Line {
let start = Point {x: coords[0], y: coords[1]};
let end = Point {x: coords[2], y: coords[3]};
let dx = end.x - start.x;
let dy = end.y - start.y;
let delta = Point {x: dx.signum(), y:dy.signum()};
let length = Point {x: dx.abs(), y:dy.abs()};
Line { start, end, delta, length }
}
fn is_horizontal(&self) -> bool {
self.start.y == self.end.y
}
fn is_vertical(&self) -> bool {
self.start.x == self.end.x
}
}
| true
|
601cd3539d26a2fe7faf6bd44ee8228d95dd1f4b
|
Rust
|
isgasho/doukutsu-rs
|
/src/npc/mimiga_village.rs
|
UTF-8
| 4,688
| 2.65625
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use num_traits::clamp;
use crate::common::Direction;
use crate::ggez::GameResult;
use crate::npc::NPC;
use crate::player::Player;
use crate::shared_game_state::SharedGameState;
impl NPC {
pub(crate) fn tick_n071_chinfish(&mut self, state: &mut SharedGameState) -> GameResult {
if self.action_num == 0 {
self.action_num = 1;
self.target_x = self.x;
self.target_y = self.y;
self.vel_y = 0x80;
}
if self.action_num == 1 {
if self.target_y < self.y {
self.vel_y -= 8;
} else if self.target_y > self.y {
self.vel_y += 8;
}
self.vel_y = clamp(self.vel_y, -0x100, 0x100);
}
self.x += self.vel_x;
self.y += self.vel_y;
self.anim_counter += 1;
if self.anim_counter > 4 {
self.anim_counter = 0;
self.anim_num += 1;
}
if self.anim_num > 1 {
self.anim_num = 0;
}
if self.shock > 0 {
self.anim_num = 2;
}
if self.direction == Direction::Left {
self.anim_rect = state.constants.npc.n071_chinfish[self.anim_num as usize];
} else {
self.anim_rect = state.constants.npc.n071_chinfish[self.anim_num as usize + 3];
}
Ok(())
}
pub(crate) fn tick_n075_kanpachi(&mut self, state: &mut SharedGameState, player: &Player) -> GameResult {
if self.action_num == 0 {
self.action_num = 1;
self.anim_num = 0;
self.anim_counter = 0;
}
if self.action_num == 1 {
if (self.x - (48 * 0x200) < player.x) && (self.x + (48 * 0x200) > player.x)
&& (self.y - (48 * 0x200) < player.y) && (self.y + (48 * 0x200) > player.y) {
self.anim_num = 1;
} else {
self.anim_num = 0;
}
}
self.anim_rect = state.constants.npc.n075_kanpachi[self.anim_num as usize];
Ok(())
}
pub(crate) fn tick_n077_yamashita(&mut self, state: &mut SharedGameState) -> GameResult {
if self.action_num == 0 {
self.action_num = 1;
self.anim_num = 0;
self.anim_counter = 0;
}
match self.action_num {
1 => {
if state.game_rng.range(0..120) == 10 {
self.action_num = 2;
self.action_counter = 0;
self.anim_num = 1;
}
}
2 => {
self.action_counter += 1;
if self.action_counter > 8 {
self.action_num = 1;
self.anim_num = 0;
}
}
_ => {}
}
if self.direction == Direction::Left {
self.anim_rect = state.constants.npc.n077_yamashita[self.anim_num as usize];
} else {
self.anim_rect = state.constants.npc.n077_yamashita[2];
}
Ok(())
}
pub(crate) fn tick_n079_mahin(&mut self, state: &mut SharedGameState, player: &Player) -> GameResult {
match self.action_num {
0 => {
self.action_num = 1;
self.anim_num = 2;
self.anim_counter = 0;
}
2 => {
self.anim_num = 0;
if state.game_rng.range(0..120) == 10 {
self.action_num = 3;
self.action_counter = 0;
self.anim_num = 1;
}
if (self.x - (32 * 0x200) < player.x) && (self.x + (32 * 0x200) > player.x)
&& (self.y - (32 * 0x200) < player.y) && (self.y + (16 * 0x200) > player.y) {
if self.x > player.x {
self.direction = Direction::Left;
} else {
self.direction = Direction::Right;
}
}
}
3 => {
self.action_counter += 1;
if self.action_counter > 8 {
self.action_num = 2;
self.anim_num = 0;
}
}
_ => {}
}
self.vel_y += 0x40;
if self.vel_y > 0x5ff {
self.vel_y = 0x5ff;
}
self.y += self.vel_y;
if self.direction == Direction::Left {
self.anim_rect = state.constants.npc.n079_mahin[self.anim_num as usize];
} else {
self.anim_rect = state.constants.npc.n079_mahin[self.anim_num as usize + 3];
}
Ok(())
}
}
| true
|
ac7d1cb5c6f5181d5996984f80a8ccdf19b7a4d7
|
Rust
|
mythmon/advent-of-code
|
/year2017/src/day18/part1.rs
|
UTF-8
| 4,913
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
use advent_lib::cases::{GenericPuzzleCase, PuzzleCase, PuzzleRunner};
use indoc::indoc;
use std::collections::HashMap;
use std::str::FromStr;
#[derive(Debug)]
pub struct Part1;
impl PuzzleRunner for Part1 {
type Input = &'static str;
type Output = i64;
fn name(&self) -> String {
"2017-D18-P1".to_owned()
}
fn cases(&self) -> Result<Vec<Box<dyn PuzzleCase>>, Box<dyn std::error::Error>> {
Ok(GenericPuzzleCase::<Self, _, _>::build_set()
.case(
"Example",
indoc!(
"
set a 1
add a 2
mul a a
mod a 5
snd a
set a 0
rcv a
jgz a -1
set a 1
jgz a -2"
),
4,
)
.case("Solution", include_str!("input"), 1_187)
.collect())
}
fn run_puzzle(input: Self::Input) -> Self::Output {
let mut machine: Machine = input.parse().unwrap();
machine.run_until_recover()
}
}
#[derive(Debug)]
struct Machine {
instructions: Vec<Instr>,
registers: HashMap<char, i64>,
last_sound: Option<i64>,
program_counter: usize,
}
impl Machine {
fn run_until_recover(&mut self) -> i64 {
loop {
let instr = self.instructions[self.program_counter];
match instr {
Instr::Snd(a) => {
self.last_sound = Some(self.value(&a));
}
Instr::Set(r, a) => {
let v = self.value(&a);
self.registers.insert(r, v);
}
Instr::Add(r, a) => {
*(self.registers.entry(r).or_insert(0)) += self.value(&a);
}
Instr::Mul(r, a) => {
*(self.registers.entry(r).or_insert(0)) *= self.value(&a);
}
Instr::Mod(r, a) => {
*(self.registers.entry(r).or_insert(0)) %= self.value(&a);
}
Instr::Rcv(a) => {
if self.value(&a) != 0 {
break;
}
}
Instr::Jgz(a1, a2) => {
if self.value(&a1) > 0 {
// subtract 1 since 1 will be added at the end of the loop
self.program_counter =
((self.program_counter as i64) + self.value(&a2) - 1) as usize;
}
}
}
self.program_counter += 1;
assert!(self.program_counter < self.instructions.len());
}
self.last_sound.unwrap()
}
fn value(&self, arg: &Arg) -> i64 {
match arg {
Arg::Value(v) => *v,
Arg::Register(r) => *(self.registers.get(r).unwrap_or(&0)),
}
}
}
impl FromStr for Machine {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
let instructions: Vec<Instr> = input.trim().lines().map(|l| l.parse().unwrap()).collect();
Ok(Self {
instructions,
registers: HashMap::new(),
last_sound: None,
program_counter: 0,
})
}
}
#[derive(Debug, Clone, Copy)]
enum Instr {
Snd(Arg),
Set(char, Arg),
Add(char, Arg),
Mul(char, Arg),
Mod(char, Arg),
Rcv(Arg),
Jgz(Arg, Arg),
}
impl FromStr for Instr {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
let parts: Vec<&str> = input.split_whitespace().collect();
match &parts[..] {
["snd", x] => Ok(Instr::Snd(x.parse()?)),
["set", x, y] => Ok(Instr::Set(x.parse().unwrap(), y.parse().unwrap())),
["add", x, y] => Ok(Instr::Add(x.parse().unwrap(), y.parse().unwrap())),
["mul", x, y] => Ok(Instr::Mul(x.parse().unwrap(), y.parse().unwrap())),
["mod", x, y] => Ok(Instr::Mod(x.parse().unwrap(), y.parse().unwrap())),
["rcv", x] => Ok(Instr::Rcv(x.parse().unwrap())),
["jgz", x, y] => Ok(Instr::Jgz(x.parse().unwrap(), y.parse().unwrap())),
_ => Err(format!("Could not parse instruction: {}", input)),
}
}
}
#[derive(Debug, Clone, Copy)]
enum Arg {
Value(i64),
Register(char),
}
impl FromStr for Arg {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
match input.parse::<i64>() {
Ok(v) => Ok(Arg::Value(v)),
Err(e) => {
if input.len() == 1 {
Ok(Arg::Register(input.chars().next().unwrap()))
} else {
Err(format!("Could not parse number: '{}'", e))
}
}
}
}
}
| true
|
ad9348461a7d12d5d40c75f3a0f8d109f3422e18
|
Rust
|
BroderickCarlin/embedded-graphics
|
/tinytga/src/footer.rs
|
UTF-8
| 3,363
| 3.015625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use core::num::NonZeroUsize;
use nom::{bytes::complete::tag, combinator::map, number::complete::le_u32, IResult, Needed};
/// TGA footer length in bytes
const TGA_FOOTER_LENGTH: usize = 26;
/// TGA footer structure, referenced from <http://tfc.duke.free.fr/coding/tga_specs.pdf>
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
pub(crate) struct TgaFooter {
/// Footer start offset
footer_start: usize,
/// Extension area offset
extension_area_offset: Option<NonZeroUsize>,
/// Developer directory
developer_directory_offset: Option<NonZeroUsize>,
}
impl TgaFooter {
/// Parses the TGA footer.
///
/// Returns `None` if the file doesn't contain a valid footer.
pub fn parse(image_data: &[u8]) -> Option<Self> {
parse_footer(image_data).ok().map(|(_, footer)| footer)
}
/// Returns the length of the footer section of the TGA file.
///
/// The length includes the footer, extension area and developer directory.
pub fn length(&self, image_data: &[u8]) -> usize {
let mut length = TGA_FOOTER_LENGTH;
if let Some(offset) = self.extension_area_offset {
length = length.max(image_data.len() - offset.get());
}
if let Some(offset) = self.developer_directory_offset {
length = length.max(image_data.len() - offset.get());
}
length
}
/// Returns the extension area.
///
/// Returns `None` if the file doesn't contain an extension area.
pub fn extension_area<'a>(&self, image_data: &'a [u8]) -> Option<&'a [u8]> {
self.extension_area_offset
.map(NonZeroUsize::get)
.and_then(|start| {
let end = self
.developer_directory_offset
.map(NonZeroUsize::get)
.filter(|offset| *offset > start)
.unwrap_or(self.footer_start);
image_data.get(start..end)
})
}
/// Returns the developer directory.
///
/// Returns `None` if the file doesn't contain a developer directory.
pub fn developer_directory<'a>(&self, image_data: &'a [u8]) -> Option<&'a [u8]> {
self.developer_directory_offset
.map(NonZeroUsize::get)
.and_then(|start| {
let end = self
.extension_area_offset
.map(NonZeroUsize::get)
.filter(|offset| *offset > start)
.unwrap_or(self.footer_start);
image_data.get(start..end)
})
}
}
fn offset(input: &[u8]) -> IResult<&[u8], Option<NonZeroUsize>> {
map(le_u32, |offset| NonZeroUsize::new(offset as usize))(input)
}
fn parse_footer<'a>(input: &'a [u8]) -> IResult<&[u8], TgaFooter> {
let footer_start = input
.len()
.checked_sub(TGA_FOOTER_LENGTH)
.ok_or(nom::Err::Incomplete(Needed::Size(TGA_FOOTER_LENGTH)))?;
let input = &input[footer_start..input.len()];
let (input, extension_area_offset) = offset(input)?;
let (input, developer_directory_offset) = offset(input)?;
let (input, _) = tag("TRUEVISION-XFILE.\0")(input)?;
Ok((
input,
TgaFooter {
footer_start,
extension_area_offset,
developer_directory_offset,
},
))
}
| true
|
13c95b13ecbf6ff431377feecea25c6a9a4fad71
|
Rust
|
mdaffin/lazaretto
|
/iced-quicksilver-renderer/src/renderer.rs
|
UTF-8
| 4,008
| 2.640625
| 3
|
[
"MIT"
] |
permissive
|
use std::sync::Arc;
use crate::{text, Defaults, Primitive, Settings};
#[cfg(any(feature = "image", feature = "svg"))]
use crate::image::{self, Image};
use iced_native::{self, MouseCursor};
use quicksilver::{
geom::{Rectangle, Vector},
graphics::{Background, Color, Font},
lifecycle::Asset,
lifecycle::Window,
};
mod widget;
/// A [`quicksilver`] renderer for `[iced]`.
///
/// [`iced`]: https://github.com/hecrj/iced
/// [`quicksilver`]: https://github.com/ryanisaacg/quicksilver
#[derive(Derivative)]
#[derivative(Debug)]
pub struct Renderer {
text_pipeline: text::Pipeline,
}
impl Renderer {
/// Creates a new [`Renderer`].
///
/// [`Renderer`]: struct.Renderer.html
pub fn new(settings: Settings) -> Self {
let text_pipeline = text::Pipeline::new(settings.default_font);
Self { text_pipeline }
}
/// Draws the provided primitives in the given [`Target`].
///
/// The text provided as overlay will be renderer on top of the primitives.
/// This is useful for rendering debug information.
///
/// [`Target`]: struct.Target.html
pub fn draw<T: AsRef<str>>(
&mut self,
window: &mut Window,
(primitive, mouse_cursor): &(Primitive, MouseCursor),
overlay: &[T],
) -> MouseCursor {
self.draw_primitive(
window, // Vector::new(0.0, 0.0),
primitive,
);
*mouse_cursor
}
fn draw_primitive<'a>(
&mut self,
window: &mut Window,
// translation: Vector,
primitive: &'a Primitive,
) {
match primitive {
Primitive::None => {}
Primitive::Group { primitives } => {
// TODO: Inspect a bit and regroup (?)
for primitive in primitives {
self.draw_primitive(window, primitive)
}
}
Primitive::Text {
content,
bounds,
color,
size,
font,
horizontal_alignment,
vertical_alignment,
} => {
let image = self.text_pipeline.to_image(
content,
iced_col_to_qs(color),
*size,
*font,
);
window.draw(&iced_rect_to_qs(bounds), Background::Img(&image));
}
Primitive::Quad {
bounds,
background,
border_width,
border_color,
} => {
let bounds = iced_rect_to_qs(bounds);
if *border_width > 0 {
window.draw(
&Rectangle {
pos: bounds.pos - Vector::ONE * *border_width,
size: bounds.size + Vector::ONE * (*border_width * 2),
},
Background::Col(iced_col_to_qs(border_color)),
);
}
let background_color = match background {
iced_native::Background::Color(col) => col,
};
window.draw(&bounds, Background::Col(iced_col_to_qs(background_color)));
}
}
}
}
impl iced_native::Renderer for Renderer {
type Output = (Primitive, MouseCursor);
type Defaults = Defaults;
fn layout<'a, Message>(
&mut self,
element: &iced_native::Element<'a, Message, Self>,
limits: &iced_native::layout::Limits,
) -> iced_native::layout::Node {
let node = element.layout(self, limits);
node
}
}
fn iced_rect_to_qs(rect: &iced_native::Rectangle) -> Rectangle {
Rectangle {
pos: Vector::new(rect.x, rect.y),
size: Vector::new(rect.width, rect.height),
}
}
fn iced_col_to_qs(col: &iced_native::Color) -> Color {
let &iced_native::Color { r, g, b, a } = col;
Color { r, g, b, a }
}
| true
|
1c0871c44f38df300ddb53a917d10449c1834744
|
Rust
|
marmistrz/rustyline
|
/src/highlight.rs
|
UTF-8
| 11,707
| 3.0625
| 3
|
[
"MIT"
] |
permissive
|
//! Syntax highlighting
use crate::config::CompletionType;
use memchr::memchr;
use std::borrow::Cow::{self, Borrowed, Owned};
use std::cell::Cell;
pub struct PromptInfo<'a> {
pub(crate) default: bool,
pub(crate) offset: usize,
pub(crate) cursor: Option<usize>,
pub(crate) input: &'a str,
pub(crate) line: &'a str,
pub(crate) line_no: usize,
}
/// Syntax highlighter with [ANSI color](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_(Select_Graphic_Rendition)_parameters).
/// Rustyline will try to handle escape sequence for ANSI color on windows
/// when not supported natively (windows <10).
///
/// Currently, the highlighted version *must* have the same display width as
/// the original input.
pub trait Highlighter {
/// Takes the currently edited `line` with the cursor `pos`ition and
/// returns the highlighted version (with ANSI color).
///
/// For example, you can implement
/// [blink-matching-paren](https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html).
fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> {
let _ = pos;
Borrowed(line)
}
/// Takes the `prompt` and
/// returns the highlighted version (with ANSI color).
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
&'s self,
prompt: &'p str,
info: PromptInfo<'_>,
) -> Cow<'b, str> {
let _ = info;
Borrowed(prompt)
}
/// Returns `true` if prompt is rectangular rather than being present only
/// on the first line of input
fn has_continuation_prompt(&self) -> bool {
false
}
/// Takes the `hint` and
/// returns the highlighted version (with ANSI color).
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
Borrowed(hint)
}
/// Takes the completion `candidate` and
/// returns the highlighted version (with ANSI color).
///
/// Currently, used only with `CompletionType::List`.
fn highlight_candidate<'c>(
&self,
candidate: &'c str,
completion: CompletionType,
) -> Cow<'c, str> {
let _ = completion;
Borrowed(candidate)
}
/// Tells if `line` needs to be highlighted when a specific char is typed or
/// when cursor is moved under a specific char.
///
/// Used to optimize refresh when a character is inserted or the cursor is
/// moved.
fn highlight_char(&self, line: &str, pos: usize) -> bool {
let _ = (line, pos);
false
}
}
impl Highlighter for () {}
impl<'r, H: ?Sized + Highlighter> Highlighter for &'r H {
fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> {
(**self).highlight(line, pos)
}
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
&'s self,
prompt: &'p str,
info: PromptInfo<'_>,
) -> Cow<'b, str> {
(**self).highlight_prompt(prompt, info)
}
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
(**self).highlight_hint(hint)
}
fn highlight_candidate<'c>(
&self,
candidate: &'c str,
completion: CompletionType,
) -> Cow<'c, str> {
(**self).highlight_candidate(candidate, completion)
}
fn highlight_char(&self, line: &str, pos: usize) -> bool {
(**self).highlight_char(line, pos)
}
}
const OPENS: &[u8; 3] = b"{[(";
const CLOSES: &[u8; 3] = b"}])";
#[derive(Default)]
pub struct MatchingBracketHighlighter {
bracket: Cell<Option<(u8, usize)>>, // memorize the character to search...
}
impl MatchingBracketHighlighter {
pub fn new() -> Self {
Self {
bracket: Cell::new(None),
}
}
}
impl Highlighter for MatchingBracketHighlighter {
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
if line.len() <= 1 {
return Borrowed(line);
}
// highlight matching brace/bracket/parenthesis if it exists
if let Some((bracket, pos)) = self.bracket.get() {
if let Some((matching, idx)) = find_matching_bracket(line, pos, bracket) {
let mut copy = line.to_owned();
copy.replace_range(idx..=idx, &format!("\x1b[1;34m{}\x1b[0m", matching as char));
return Owned(copy);
}
}
Borrowed(line)
}
fn highlight_char(&self, line: &str, pos: usize) -> bool {
// will highlight matching brace/bracket/parenthesis if it exists
self.bracket.set(check_bracket(line, pos));
self.bracket.get().is_some()
}
}
fn find_matching_bracket(line: &str, pos: usize, bracket: u8) -> Option<(u8, usize)> {
let matching = matching_bracket(bracket);
let mut idx;
let mut unmatched = 1;
if is_open_bracket(bracket) {
// forward search
idx = pos + 1;
let bytes = &line.as_bytes()[idx..];
for b in bytes {
if *b == matching {
unmatched -= 1;
if unmatched == 0 {
debug_assert_eq!(matching, line.as_bytes()[idx]);
return Some((matching, idx));
}
} else if *b == bracket {
unmatched += 1;
}
idx += 1;
}
debug_assert_eq!(idx, line.len());
} else {
// backward search
idx = pos;
let bytes = &line.as_bytes()[..idx];
for b in bytes.iter().rev() {
if *b == matching {
unmatched -= 1;
if unmatched == 0 {
debug_assert_eq!(matching, line.as_bytes()[idx - 1]);
return Some((matching, idx - 1));
}
} else if *b == bracket {
unmatched += 1;
}
idx -= 1;
}
debug_assert_eq!(idx, 0);
}
None
}
// check under or before the cursor
fn check_bracket(line: &str, pos: usize) -> Option<(u8, usize)> {
if line.is_empty() {
return None;
}
let mut pos = pos;
if pos >= line.len() {
pos = line.len() - 1; // before cursor
let b = line.as_bytes()[pos]; // previous byte
if is_close_bracket(b) {
Some((b, pos))
} else {
None
}
} else {
let mut under_cursor = true;
loop {
let b = line.as_bytes()[pos];
if is_close_bracket(b) {
if pos == 0 {
return None;
} else {
return Some((b, pos));
}
} else if is_open_bracket(b) {
if pos + 1 == line.len() {
return None;
} else {
return Some((b, pos));
}
} else if under_cursor && pos > 0 {
under_cursor = false;
pos -= 1; // or before cursor
} else {
return None;
}
}
}
}
fn matching_bracket(bracket: u8) -> u8 {
match bracket {
b'{' => b'}',
b'}' => b'{',
b'[' => b']',
b']' => b'[',
b'(' => b')',
b')' => b'(',
b => b,
}
}
fn is_open_bracket(bracket: u8) -> bool {
memchr(bracket, OPENS).is_some()
}
fn is_close_bracket(bracket: u8) -> bool {
memchr(bracket, CLOSES).is_some()
}
pub(crate) fn split_highlight(src: &str, offset: usize) -> (Cow<'_, str>, Cow<'_, str>) {
let mut style_buffer = String::with_capacity(32);
let mut iter = src.char_indices();
let mut non_escape_idx = 0;
while let Some((idx, c)) = iter.next() {
if c == '\x1b' {
match iter.next() {
Some((_, '[')) => {}
_ => continue, // unknown escape, skip
}
while let Some((end_idx, c)) = iter.next() {
match c {
'm' => {
let slice = &src[idx..end_idx + 1];
if slice == "\x1b[0m" {
style_buffer.clear();
} else {
style_buffer.push_str(slice);
}
break;
}
';' | '0'..='9' => continue,
_ => break, // unknown escape, skip
}
}
continue;
}
if non_escape_idx >= offset {
if style_buffer.is_empty() {
return (src[..idx].into(), src[idx..].into());
} else {
let mut left = String::with_capacity(idx + 4);
left.push_str(&src[..idx]);
left.push_str("\x1b[0m");
let mut right = String::with_capacity(src.len() - idx + style_buffer.len());
right.push_str(&style_buffer);
right.push_str(&src[idx..]);
return (left.into(), right.into());
}
}
non_escape_idx += c.len_utf8();
}
(src.into(), "".into())
}
impl PromptInfo<'_> {
/// Returns true if this is the default prompt
pub fn default(&self) -> bool {
self.default
}
/// Returns the byte offset where prompt is shown in the initial text
///
/// This is a position right after the newline of the previous line
pub fn line_offset(&self) -> usize {
self.offset
}
/// Returns the byte position of the cursor relative to `line_offset` if
/// the cursor is in the current line
pub fn cursor(&self) -> Option<usize> {
self.cursor
}
/// Returns the zero-based line number of the current prompt line
pub fn line_no(&self) -> usize {
self.line_no
}
/// Returns the line contents shown after the prompt
pub fn line(&self) -> &str {
self.line
}
/// Returns the whole input (equal to `line` if input is the single line)
pub fn input(&self) -> &str {
self.input
}
}
#[cfg(test)]
mod tests {
#[test]
pub fn find_matching_bracket() {
use super::find_matching_bracket;
assert_eq!(find_matching_bracket("(...", 0, b'('), None);
assert_eq!(find_matching_bracket("...)", 3, b')'), None);
assert_eq!(find_matching_bracket("()..", 0, b'('), Some((b')', 1)));
assert_eq!(find_matching_bracket("(..)", 0, b'('), Some((b')', 3)));
assert_eq!(find_matching_bracket("..()", 3, b')'), Some((b'(', 2)));
assert_eq!(find_matching_bracket("(..)", 3, b')'), Some((b'(', 0)));
assert_eq!(find_matching_bracket("(())", 0, b'('), Some((b')', 3)));
assert_eq!(find_matching_bracket("(())", 3, b')'), Some((b'(', 0)));
}
#[test]
pub fn check_bracket() {
use super::check_bracket;
assert_eq!(check_bracket(")...", 0), None);
assert_eq!(check_bracket("(...", 2), None);
assert_eq!(check_bracket("...(", 3), None);
assert_eq!(check_bracket("...(", 4), None);
assert_eq!(check_bracket("..).", 4), None);
assert_eq!(check_bracket("(...", 0), Some((b'(', 0)));
assert_eq!(check_bracket("(...", 1), Some((b'(', 0)));
assert_eq!(check_bracket("...)", 3), Some((b')', 3)));
assert_eq!(check_bracket("...)", 4), Some((b')', 3)));
}
#[test]
pub fn matching_bracket() {
use super::matching_bracket;
assert_eq!(matching_bracket(b'('), b')');
assert_eq!(matching_bracket(b')'), b'(');
}
#[test]
pub fn is_open_bracket() {
use super::is_close_bracket;
use super::is_open_bracket;
assert!(is_open_bracket(b'('));
assert!(is_close_bracket(b')'));
}
}
| true
|
42020d25d63132b52b14d72f69e90d3f2dea98fe
|
Rust
|
rariyama/imitation_interpreter
|
/src/object.rs
|
UTF-8
| 3,232
| 3.203125
| 3
|
[] |
no_license
|
use std::fmt;
use std::collections::BTreeMap;
use super::evaluator::{Environment};
use super::ast::{Expression, Statement};
use super::errors::{Errors};
#[derive(Debug,PartialEq, Clone, Eq, Ord, PartialOrd)]
pub enum Object {
Identifier(String),
String(String),
Integer(i32),
Boolean(bool),
Return(Box<Object>),
Let(Box<Object>),
Array(Vec<Object>),
Hash(BTreeMap<Box<HashKey>, Box<HashPair>>),
Function{params: Vec<Expression>,
body: Statement,
env: Environment
},
Builtin{
func: fn(Vec<Object>) -> Object
},
Error(Errors),
Null,
Default
}
impl fmt::Display for Object {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Object::Identifier(value) => write!(f, "{}", value),
Object::String(value) => write!(f, "{}", value),
Object::Integer(value) => write!(f, "{}", value),
Object::Boolean(value) => write!(f, "{}", value),
Object::Return(value) => write!(f, "{}", value),
Object::Let(value) => write!(f, "{}", value),
Object::Hash(tree) => {
match tree {
key => write!(f, "{{{}}}", tree.iter().map(|(key, value)| format!("{}: {}", key, value)).collect::<Vec<_>>().join(", ")),
_ => unreachable!()}
},
Object::Array(value) => write!(f, "[{}]", value.iter().map(|expression| format!("{}", &expression)).collect::<Vec<_>>().join(", ")),
Object::Function{params, body, env} => write!(f, "{} {} {:?}", params.iter().map(|expression| format!("{}", &expression)).collect::<Vec<_>>().join(", ")
, body
, env
),
Object::Builtin{func: _} => write!(f, "builtin functions"),
Object::Null => write!(f, ""),
Object::Default => write!(f, "default"),
Object::Error(value) => write!(f, "{}", value)
}
}
}
#[derive(Debug,PartialEq, Clone, Eq, Ord, PartialOrd)]
pub struct HashPair {
pub key: Object,
pub value: Object,
}
impl fmt::Display for HashPair {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.value)
}
}
#[derive(Debug,PartialEq, Clone, Eq, Ord, PartialOrd)]
pub enum HashKey {
Integer(i32),
String(String),
Boolean(bool),
Null
}
impl fmt::Display for HashKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
HashKey::Integer(value) => write!(f, "{}", value),
HashKey::String(value) => write!(f, "{}", value),
HashKey::Boolean(value) => write!(f, "{}", value),
HashKey::Null => write!(f, "null"),
}
}
}
impl HashKey {
pub fn get_hashkey(key: &Object) -> Self {
match key {
Object::Integer(key) => HashKey::Integer(*key),
Object::String(key) => HashKey::String(key.clone()),
Object::Boolean(key) => HashKey::Boolean(*key),
_ => HashKey::Null
}
}
}
| true
|
b7fb696884f7955b7a1cae4e411db3c460d7c63a
|
Rust
|
niklasha/adventofcode2019
|
/src/day15.rs
|
UTF-8
| 14,336
| 2.703125
| 3
|
[] |
no_license
|
use simple_error::bail;
use evmap;
use std::error;
use std::io;
use std::io::BufRead;
use std::sync::mpsc;
use std::thread;
use crate::day;
pub type BoxResult<T> = Result<T, Box<dyn error::Error>>;
struct Intcode {
p: Vec<i64>,
base: i64,
}
impl Intcode {
fn new(p: &[i64]) -> Self { Self { p: p.to_vec(), base: 0 } }
fn op(&self, c: i64) -> i64 { c % 100 }
fn get(&mut self, a: usize) -> i64 {
if a >= self.p.len() { self.p.resize(a + 1, 0); }
self.p[a]
}
fn put(&mut self, a: usize, v: i64) {
// eprintln!("put @{} {}", a, v);
if a >= self.p.len() { self.p.resize(a + 1, 0); }
self.p[a] = v;
}
fn addr(&mut self, ip: usize, i: usize) -> usize {
let a = self.get(ip + i);
let v = match self.get(ip) / vec![100, 1000, 10000][i - 1] % 10 {
0 => a as usize,
2 => (a + self.base) as usize,
_ => 0, // XXX
};
v
}
fn val(&mut self, ip: usize, i: usize) -> i64 {
let a = self.get(ip + i);
let v = match self.get(ip) / vec![100, 1000, 10000][i - 1] % 10 {
1 => a,
_ => {
let addr = self.addr(ip, i);
self.get(addr)
},
};
// eprintln!("{} {} {} {} {} {}", ip, self.get(ip), i, a, self.base, v);
v
}
fn run(&mut self, sender: mpsc::Sender<i64>, receiver: mpsc::Receiver<i64>,
request: mpsc::Sender<()>, ack: mpsc::Receiver<()>) -> BoxResult<i64> {
let mut ip = 0;
let mut o = None;
while { let op = self.get(ip); self.op(op) != 99 } {
// eprintln!("{}: {} {} {} {}", ip, self.p[ip], self.p[ip + 1], self.p[ip + 2], self.p[ip + 3]);
match self.op(self.p[ip]) {
1 => {
let a = self.val(ip, 1);
let b = self.val( ip, 2);
let c = self.addr(ip, 3);
self.put(c, a + b);
ip += 4;
},
2 => {
let a = self.val(ip, 1);
let b = self.val(ip, 2);
let c = self.addr(ip, 3);
self.put(c, a * b);
ip += 4;
},
3 => {
let a = self.addr(ip, 1);
// eprintln!(">recv {}", a);
request.send(())?;
self.put(a as usize, receiver.recv()?);
// eprintln!("<recv {}", self.get(a as usize));
ip += 2;
},
4 => {
let a = self.val(ip, 1);
o = Some(a);
// eprintln!(">send {}", a);
sender.send(a)?;
// eprintln!("<send");
ack.recv()?;
ip += 2;
},
5 => {
let a = self.val(ip, 1);
let b = self.val(ip, 2) as usize;
ip = if a != 0 { b } else { ip + 3 };
},
6 => {
let a = self.val(ip, 1);
let b = self.val(ip, 2) as usize;
ip = if a == 0 { b } else { ip + 3 };
},
7 => {
let a = self.val(ip, 1);
let b = self.val(ip, 2);
let c = self.addr(ip, 3);
self.put(c, if a < b { 1 } else { 0 });
ip += 4;
},
8 => {
let a = self.val(ip, 1);
let b = self.val(ip, 2);
let c = self.addr(ip,3);
self.put(c, if a == b { 1 } else { 0 });
ip += 4;
},
9 => {
self.base += self.val(ip, 1);
// eprintln!("<base {}", self.base);
ip += 2;
}
_ => bail!("unknown opcode {}: {}", ip, self.op(self.p[ip])),
};
}
if o.is_none() { bail!("no output"); }
Ok(o.unwrap())
}
#[allow(dead_code)]
fn arg(&self, ip: usize, offset: usize) -> String {
let a = self.p[ip + offset].to_string();
match self.p[ip] / vec![100, 1000, 10000][offset - 1] % 10 {
0 => format!("@{}", a),
1 => a,
2 => format!("+{}", a),
_ => String::from(""), // XXX
}
}
#[allow(dead_code)]
fn disassemble(&self) {
let mut ip = 0;
while ip < self.p.len() {
match self.op(self.p[ip]) {
1 => {
println!("{}: add {} {} {}", ip, self.arg(ip, 1), self.arg(ip, 2), self.arg(ip, 3));
ip += 4;
},
2 => {
println!("{}: mul {} {} {}", ip, self.arg(ip, 1), self.arg(ip, 2), self.arg(ip, 3));
ip += 4;
},
3 => {
println!("{}: in {}", ip, self.arg(ip, 1));
ip += 2;
},
4 => {
println!("{}: out {}", ip, self.arg(ip, 1));
ip += 2;
},
5 => {
println!("{}: jnz {} {}", ip, self.arg(ip, 1), self.arg(ip, 2));
ip += 3;
},
6 => {
println!("{}: jz {} {}", ip, self.arg(ip, 1), self.arg(ip, 2));
ip += 3;
},
7 => {
println!("{}: testlt {} {} {}", ip, self.arg(ip, 1), self.arg(ip, 2), self.arg(ip, 3));
ip += 4;
},
8 => {
println!("{}: testeq {} {} {}", ip, self.arg(ip, 1), self.arg(ip, 2), self.arg(ip, 3));
ip += 4;
},
9 => {
println!("{}: base {}", ip, self.arg(ip, 1));
ip += 2;
},
99 => {
println!("{}: halt", ip);
ip += 1;
},
_ => {
println!("{}: data ({})", ip, self.p[ip]);
ip += 1;
},
};
}
}
}
pub struct Day15 {}
impl day::Day for Day15 {
fn tag(&self) -> &str { "15" }
fn part1(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
let reader = io::BufReader::new(input());
let p = reader.split(b',')
.map(|v| String::from_utf8(v.unwrap()).unwrap())
.map(|s| s.trim_end().parse::<i64>().unwrap())
.collect::<Vec<_>>();
println!("{:?}", self.part1_impl(p));
}
fn part2(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
let reader = io::BufReader::new(input());
let p = reader.split(b',')
.map(|v| String::from_utf8(v.unwrap()).unwrap())
.map(|s| s.trim_end().parse::<i64>().unwrap())
.collect::<Vec<_>>();
println!("{:?}", self.part2_impl(p));
}
}
impl Day15 {
fn part1_impl(self: &Self, p: Vec<i64>) -> BoxResult<usize> {
let (input_sender, input_receiver) = mpsc::channel::<i64>();
let (output_sender, output_receiver) = mpsc::channel::<i64>();
let (request_sender, request_receiver) = mpsc::channel::<()>();
let (ack_sender, ack_receiver) = mpsc::channel::<()>();
let _cpu = thread::spawn(move || {
let mut ic = Intcode::new(&p);
ic.run(output_sender, input_receiver, request_sender, ack_receiver)
.unwrap_or(0);
});
let (grid_r, mut grid_w) = evmap::new();
let dirs: Vec<(i64, i64)> = vec![(0, -1), (0, 1), (-1, 0), (1, 0)];
let to_move = |(dx, dy): (i64, i64)|
dirs.iter().position(|&(x, y)| dx == x && dy == y).unwrap() as i64
+ 1;
let origin = (0i64, 0i64);
let mut pos = vec![origin];
grid_w.insert(origin, (1, vec![]));
grid_w.refresh();
let mut track = vec![];
let mut peek = |(px, py), (dx, dy), track: &mut Vec<_>| {
request_receiver.recv().unwrap();
input_sender.send(to_move((dx, dy))).unwrap();
let v = output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
if v != 0 {
request_receiver.recv().unwrap();
input_sender.send(to_move((-dx, -dy))).unwrap();
output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
};
track.push((dx, dy).clone());
grid_w.update((px + dx, py + dy), (v, track.clone()));
grid_w.refresh();
track.pop();
grid_w.refresh();
// eprintln!("peek ({}, {}): {}", px + dx, py + dy, v);
v
};
let step = |(dx, dy): (i64, i64), forward, track: &mut Vec<_>| {
let (dx, dy) = if forward { (dx, dy) } else { (-dx, -dy) };
request_receiver.recv().unwrap();
input_sender.send(to_move((dx, dy))).unwrap();
output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
if forward { track.push((dx, dy).clone()); } else { track.pop(); };
};
let mut i = 0;
let mut found = false;
while !found {
i += 1;
// eprintln!("{}: {:?}", i, pos);
let r = pos.iter().fold((vec![], false), |(mut pos, found), &(px, py)| {
let (_, t) = grid_r.get_and(&(px, py), |x| x[0].clone()).unwrap();
// eprintln!("({}, {}): {} {:?}", px, py, v, t);
for m in &t { step(*m, true, &mut track); }
let moves = dirs.iter().filter(|&(dx, dy)|
grid_r.get_and(&(px + *dx, py + *dy), |_| ()) == None);
let moves: Vec<_> = moves
.map(|&(dx, dy)| ((px + dx, py + dy), peek((px, py), (dx, dy), &mut track)))
.filter(|&(_, x)| x != 0).collect();
for (p, _) in &moves { pos.push(*p); }
for m in t.iter().rev() { step(*m, false, &mut track) };
(pos, found || moves.iter().any(|(_, x)| *x == 2))
});
pos = r.0;
found = r.1;
}
Ok(i)
}
fn part2_impl(self: &Self, p: Vec<i64>) -> BoxResult<i64> {
let (input_sender, input_receiver) = mpsc::channel::<i64>();
let (output_sender, output_receiver) = mpsc::channel::<i64>();
let (request_sender, request_receiver) = mpsc::channel::<()>();
let (ack_sender, ack_receiver) = mpsc::channel::<()>();
let _cpu = thread::spawn(move || {
let mut ic = Intcode::new(&p);
ic.run(output_sender, input_receiver, request_sender, ack_receiver)
.unwrap_or(0);
});
let (grid_r, mut grid_w) = evmap::new();
let dirs: Vec<(i64, i64)> = vec![(0, -1), (0, 1), (-1, 0), (1, 0)];
let to_move = |(dx, dy): (i64, i64)|
dirs.iter().position(|&(x, y)| dx == x && dy == y).unwrap() as i64
+ 1;
let origin = (0i64, 0i64);
let mut pos = vec![origin];
grid_w.insert(origin, (1, vec![]));
grid_w.refresh();
let mut track = vec![];
let mut peek = |(px, py), (dx, dy), track: &mut Vec<_>| {
request_receiver.recv().unwrap();
input_sender.send(to_move((dx, dy))).unwrap();
let v = output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
if v != 0 {
request_receiver.recv().unwrap();
input_sender.send(to_move((-dx, -dy))).unwrap();
output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
};
track.push((dx, dy).clone());
grid_w.update((px + dx, py + dy), (v, track.clone()));
grid_w.refresh();
track.pop();
grid_w.refresh();
// eprintln!("peek ({}, {}): {}", px + dx, py + dy, v);
v
};
let step = |(dx, dy): (i64, i64), forward, track: &mut Vec<_>| {
let (dx, dy) = if forward { (dx, dy) } else { (-dx, -dy) };
request_receiver.recv().unwrap();
input_sender.send(to_move((dx, dy))).unwrap();
output_receiver.recv().unwrap();
ack_sender.send(()).unwrap();
if forward { track.push((dx, dy).clone()); } else { track.pop(); };
};
let mut oxygen = None;
while !pos.is_empty() {
let (p, o) = pos.iter().fold((vec![], None), |(mut pos, _oxygen), &(px, py)| {
let (_, t) = grid_r.get_and(&(px, py), |x| x[0].clone()).unwrap();
for m in &t { step(*m, true, &mut track); }
let moves = dirs.iter().filter(|&(dx, dy)|
grid_r.get_and(&(px + *dx, py + *dy), |_| ()) == None);
let moves: Vec<_> = moves
.map(|&(dx, dy)| ((px + dx, py + dy), peek((px, py), (dx, dy), &mut track)))
.filter(|&(_, x)| x != 0).collect();
for (p, _) in &moves { pos.push(*p); }
for m in t.iter().rev() { step(*m, false, &mut track) };
(pos,
moves.iter().filter(|(_, x)| *x == 2).next().map(|(p, _)| *p))
});
pos = p;
if o != None { oxygen = o; }
}
let mut t = 0;
pos = vec![oxygen.unwrap()];
while !pos.is_empty() {
// eprintln!("{}: {:?}", t, pos);
pos = pos.iter().fold(vec![], |mut pos, &(px, py)| {
for (dx, dy) in dirs.iter().filter(|&(dx, dy)|
grid_r.get_and(&(px + *dx, py + *dy), |x| x[0].clone()).unwrap().0 == 1) {
let (x, y) = (px + *dx, py + *dy);
grid_w.update((x, y), (2, vec![]));
grid_w.refresh();
pos.push((x, y));
}
pos
});
t += 1;
}
Ok(t - 1)
}
}
| true
|
e335d5b5f0c298ab26cf0a2d8c8e0c650218a2b7
|
Rust
|
fluffypony/tari
|
/base_layer/p2p/src/proto/tari.p2p.message_type.rs
|
UTF-8
| 700
| 2.546875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
/// A tari message type is an immutable 32-bit signed integer indicating the type of message being received or sent
/// over the network.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum TariMessageType {
None = 0,
// -- NetMessages --
PingPong = 1,
// -- Blockchain messages --
NewTransaction = 65,
NewBlock = 66,
SenderPartialTransaction = 67,
ReceiverPartialTransactionReply = 68,
BaseNodeRequest = 69,
BaseNodeResponse = 70,
MempoolRequest = 71,
MempoolResponse = 72,
/// -- DAN Messages --
TransactionFinalized = 73,
// -- Extended --
Text = 225,
TextAck = 226,
}
| true
|
6283a0d13eba316a8b6b8ff693bd295ed6c22816
|
Rust
|
Kerollmops/corewar-rs
|
/compiler/src/var_instr/variable/var_dir_ind.rs
|
UTF-8
| 1,560
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use pest::Error;
use var_instr::variable::{Variable, AsComplete, LabelNotFound};
use var_instr::variable::FromPair;
use machine::instruction::mem_size::MemSize;
use machine::instruction::parameter::{Direct, Indirect, DirInd};
use label::Label;
#[derive(Debug)]
pub enum VarDirInd {
Direct(Variable<Direct>),
Indirect(Variable<Indirect>),
}
impl MemSize for VarDirInd {
fn mem_size(&self) -> usize {
match *self {
VarDirInd::Direct(ref direct) => direct.mem_size(),
VarDirInd::Indirect(ref indirect) => indirect.mem_size(),
}
}
}
impl FromPair for VarDirInd {
fn from_pair(pair: ::AsmPair) -> Result<Self, ::AsmError> {
match pair.as_rule() {
::Rule::direct => Ok(VarDirInd::Direct(Variable::from_pair(pair)?)),
::Rule::indirect => Ok(VarDirInd::Indirect(Variable::from_pair(pair)?)),
_ => Err(Error::CustomErrorSpan {
message: format!("expected direct, indirect found {:?}", pair.as_rule()),
span: pair.clone().into_span(),
}),
}
}
}
impl AsComplete<DirInd> for VarDirInd {
fn as_complete(&self, offset: usize, label_offsets: &HashMap<Label, usize>) -> Result<DirInd, LabelNotFound> {
use self::VarDirInd::*;
match *self {
Direct(ref direct) => Ok(DirInd::Direct(direct.as_complete(offset, label_offsets)?)),
Indirect(ref indirect) => Ok(DirInd::Indirect(indirect.as_complete(offset, label_offsets)?)),
}
}
}
| true
|
fe76fd4a705a4b0020414fd842ddfb7da017a1a7
|
Rust
|
Kasama/NovaROMarketBot
|
/src/web.rs
|
UTF-8
| 4,420
| 2.859375
| 3
|
[] |
no_license
|
pub mod selectors;
use mdo::option::*;
use crate::item::{ItemInfo,ItemType,IdInfo};
use std::string::String;
use scraper::{Html,Selector,ElementRef};
pub fn get_ids_by_name(item_name: &str) -> Vec<IdInfo> {
let name = item_name.replace(" ", "+").to_lowercase();
let url = format!("https://www.novaragnarok.com/?module=item&action=index&type=&name={}", name);
let maybe_vec = reqwest::get(&url)
.and_then(|mut response| response.text())
.and_then(|body: String| {
let document = Html::parse_document(&body);
let rows_selector = Selector::parse("#itemtable > tbody > tr").unwrap();
let id_selector = Selector::parse("td:nth-child(1)").unwrap();
let name_selector = Selector::parse("td:nth-child(3) > a").unwrap();
let type_selector = Selector::parse("td:nth-child(4)").unwrap();
let item_iterator = document.select(&rows_selector);
let items: Vec<IdInfo> = item_iterator.map(|item_element: ElementRef| {
let maybe_id = item_element.select(&id_selector).nth(0)
.map(|i| i.inner_html())
.map(|str_id: String| str_id.trim().parse::<i32>().unwrap_or(0));
let maybe_name = item_element.select(&name_selector).nth(0)
.map(|i| i.inner_html());
let maybe_type = item_element.select(&type_selector).nth(0)
.map(|i| String::from(i.inner_html().trim()));
mdo! {
id =<< maybe_id;
name =<< maybe_name;
item_type =<< maybe_type;
ret ret(IdInfo{
id, name, item_type
})
}
}).flatten().collect();
Ok(items)
});
maybe_vec.unwrap_or(vec![])
}
pub fn get_market_entries(item_id: i32) -> Vec<ItemInfo> {
let url = format!("https://www.novaragnarok.com/?module=vending&action=item&id={}", item_id);
let maybe_body = reqwest::get(&url)
.and_then(|mut response| {
response.text()
});
match maybe_body {
Ok(ref body) => get_items(body),
Err(_) => vec![],
}
}
fn get_items(body: &str) -> Vec<ItemInfo> {
let document = Html::parse_document(&body);
let rows_selector = Selector::parse("#itemtable > tbody > tr").unwrap();
let differentiation_selector = Selector::parse("#itemtable > thead > tr > th:nth-child(2)").unwrap();
let name_selector = Selector::parse(".tooltip > a:nth-child(1)").unwrap();
let price_selector = Selector::parse("td:nth-child(1)").unwrap();
let amount_selector = Selector::parse("td:nth-child(2)").unwrap();
let refine_selector = Selector::parse("td:nth-child(2)").unwrap();
let properties_selector = Selector::parse("td:nth-child(3)").unwrap();
let get_number_from_table = |el: ElementRef| el.value().attr("d)ata-order").unwrap_or("0").parse::<i32>().unwrap_or(0);
let items_iterator = document.select(&rows_selector);
let item_name = document.select(&name_selector).next().map(|name_el: ElementRef| { name_el.inner_html() });
let diff = document.select(&differentiation_selector).next();
let is_item = diff.map(|label: ElementRef| label.inner_html().to_lowercase() == "qty");
if let None = is_item { return vec![] };
let mut items: Vec<ItemInfo> = items_iterator.map(|item_element: ElementRef| {
let name = item_name.clone();
match is_item.unwrap() {
true => ItemInfo{
name: name,
item_type: ItemType::Item,
price: item_element.select(&price_selector).nth(0).map(get_number_from_table),
amount: item_element.select(&amount_selector).nth(0).map(get_number_from_table),
refine: None,
properties: None,
},
false => ItemInfo{
name: name,
item_type: ItemType::Equip,
price: item_element.select(&price_selector).nth(0).map(get_number_from_table),
refine: item_element.select(&refine_selector).nth(0).map(get_number_from_table),
properties: item_element.select(&properties_selector).nth(0).map(|el| el.inner_html()),
amount: None,
},
}
}).collect();
items.sort();
return items;
}
| true
|
f6dbebdb7382159af0707ac80c50d2b101b05aca
|
Rust
|
snarkyboojum/sha_hash
|
/src/sha512.rs
|
UTF-8
| 10,578
| 3.046875
| 3
|
[
"Apache-2.0"
] |
permissive
|
/*
This implementation only works for SHA-512 currently. Other algorithms
may be added later on. See https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
for implementation details.
The algorithm consists of two main stages:
1. Preprocessing
- Padding
- Parsing into m-bit blocks
- Setting initialisation values to be used in the hash computation
2. Hash computation
- generate message schedule
- iteratively generate hash values using the message schedule etc
SHA-512 details:
- Message size < 2 ^ 128 bits (m-bits)
- Block size (1024 bits / 16 x 64-bit words)
- Word size 64 bits / u64
- Message digest size (512 bits / 8 x 64 bit words)
Big-endian byte order is used throughout.
*/
// msg should be a multiple of 1024 bits
// pad with 1 then 0s up to msg.len % 1024 - 128 - 1
fn pad_message(msg: &[u8]) -> Vec<u8> {
let num_blocks = (msg.len() * 8 + 128 + 1) / 1024;
let min_msg_bits = msg.len() * 8 % 1024 + 1;
let mut num_zero_bits = 0;
use std::cmp::Ordering;
match min_msg_bits.cmp(&896) {
Ordering::Less => {
num_zero_bits = 896 - (msg.len() * 8 % 1024 + 1);
}
Ordering::Greater => {
if num_blocks > 1 {
num_zero_bits = 1024 - ((msg.len() * 8 + 1 + 128) % 1024) + 1024 * num_blocks;
} else {
num_zero_bits = 1024 - ((msg.len() * 8 + 1 + 128) % 1024);
}
}
Ordering::Equal => {}
}
let buffer_size = (msg.len() * 8) + 1 + num_zero_bits + 128;
// 128 bit representation of the length of the message
let length_128: u128 = (msg.len() * 8) as u128;
use bytes::{BufMut, BytesMut};
let mut buffer = BytesMut::with_capacity(buffer_size / 8);
buffer.put(msg);
if num_zero_bits > 0 {
buffer.put_u8(0x80);
for _ in 0..(num_zero_bits / 8) {
buffer.put_u8(0x00);
}
} else {
// TODO: not sure how to handle this
}
buffer.put_u128(length_128);
buffer.to_vec()
}
// Functions to be used during the hash computation
#[allow(dead_code)]
fn rotl(n: u64, x: u64) -> u64 {
(x << n) | (x >> (64 - n))
}
fn rotr(n: u64, x: u64) -> u64 {
(x >> n) | (x << (64 - n))
}
fn shr(n: u64, x: u64) -> u64 {
x >> n
}
fn ch(x: u64, y: u64, z: u64) -> u64 {
(x & y) ^ (!x & z)
}
fn maj(x: u64, y: u64, z: u64) -> u64 {
(x & y) ^ (x & z) ^ (y & z)
}
fn s_sigma1_512(word: u64) -> u64 {
rotr(19, word) ^ rotr(61, word) ^ shr(6, word)
}
fn s_sigma0_512(word: u64) -> u64 {
rotr(1, word) ^ rotr(8, word) ^ shr(7, word)
}
fn b_sigma1_512(word: u64) -> u64 {
rotr(14, word) ^ rotr(18, word) ^ rotr(41, word)
}
fn b_sigma0_512(word: u64) -> u64 {
rotr(28, word) ^ rotr(34, word) ^ rotr(39, word)
}
pub fn hash(msg: &[u8]) -> Option<[u64; 8]> {
if msg.is_empty() {
None
} else {
let padded_message = pad_message(msg);
//println!("Padded message: {:#x?}", padded_message);
//println!("Length of padded message: {} bytes", padded_message.len());
// we only take n * 1024 bit messages
assert_eq!((padded_message.len() * 8) % 1024, 0);
// parse into 1024 bit blocks (128 bytes), using 64 bit words (8 bytes)
// see 6.4.1 and 6.4.2 on p24 of
// https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
use byteorder::{BigEndian, ByteOrder};
let mut hashes: [u64; 8] = SHA_512_INIT;
//println!("Initial hashes: {:#x?}", hashes);
for (_i, block) in padded_message.chunks(128).enumerate() {
let mut t = 0;
let mut msg_schedule: [u64; 80] = [0u64; 80];
// build message schedule
for word in block.chunks(8) {
if t < 16 {
msg_schedule[t] = BigEndian::read_u64(word);
}
t += 1;
}
for t in 16..80 {
msg_schedule[t] = s_sigma1_512(msg_schedule[t - 2])
.wrapping_add(msg_schedule[t - 7])
.wrapping_add(s_sigma0_512(msg_schedule[t - 15]))
.wrapping_add(msg_schedule[t - 16]);
}
/*
println!("Message schedule for block: {}", i);
for m in msg_schedule.iter() {
print!("{:#x?} ", m);
}
println!("");
*/
let mut a = hashes[0];
let mut b = hashes[1];
let mut c = hashes[2];
let mut d = hashes[3];
let mut e = hashes[4];
let mut f = hashes[5];
let mut g = hashes[6];
let mut h = hashes[7];
for t in 0..80 {
//print!("t={}: ", t);
let t1 = h
.wrapping_add(b_sigma1_512(e))
.wrapping_add(ch(e, f, g))
.wrapping_add(SHA_512[t])
.wrapping_add(msg_schedule[t]);
let t2 = b_sigma0_512(a).wrapping_add(maj(a, b, c));
h = g;
g = f;
f = e;
e = d.wrapping_add(t1);
d = c;
c = b;
b = a;
a = t1.wrapping_add(t2);
/*
print!(
"A: {:#x?} B: {:#x?} C: {:#x?} D: {:#x?} E: {:#x?} F: {:#x?} G: {:#x?} H: {:#x?}",
a, b, c, d, e, f, g, h
);
println!("");
*/
}
hashes[0] = hashes[0].wrapping_add(a);
hashes[1] = hashes[1].wrapping_add(b);
hashes[2] = hashes[2].wrapping_add(c);
hashes[3] = hashes[3].wrapping_add(d);
hashes[4] = hashes[4].wrapping_add(e);
hashes[5] = hashes[5].wrapping_add(f);
hashes[6] = hashes[6].wrapping_add(g);
hashes[7] = hashes[7].wrapping_add(h);
}
Some(hashes)
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_sha512_hash() {
use std::collections::HashMap;
let mut message_hashes: HashMap<&str, [u64; 8]> = HashMap::new();
message_hashes.insert(
"",
[
0xcf83e1357eefb8bd,
0xf1542850d66d8007,
0xd620e4050b5715dc,
0x83f4a921d36ce9ce,
0x47d0d13c5d85f2b0,
0xff8318d2877eec2f,
0x63b931bd47417a81,
0xa538327af927da3e,
],
);
message_hashes.insert(
"abc",
[
0xddaf35a193617aba,
0xcc417349ae204131,
0x12e6fa4e89a97ea2,
0x0a9eeee64b55d39a,
0x2192992a274fc1a8,
0x36ba3c23a3feebbd,
0x454d4423643ce80e,
0x2a9ac94fa54ca49f,
],
);
message_hashes.insert(
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
[
0x204a8fc6dda82f0a,
0x0ced7beb8e08a416,
0x57c16ef468b228a8,
0x279be331a703c335,
0x96fd15c13b1b07f9,
0xaa1d3bea57789ca0,
0x31ad85c7a71dd703,
0x54ec631238ca3445,
],
);
message_hashes.insert(
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
[0x8e959b75dae313da, 0x8cf4f72814fc143f, 0x8f7779c6eb9f7fa1, 0x7299aeadb6889018, 0x501d289e4900f7e4, 0x331b99dec4b5433a, 0xc7d329eeb6dd2654, 0x5e96e55b874be909]
);
for (msg, hash) in message_hashes.iter() {
let test_hashes = super::hash(&msg.as_bytes());
for (i, test_hash) in test_hashes.iter().enumerate() {
assert_eq!(hash[i], test_hash[i]);
}
}
}
}
// the initial hash value consists of the following eight 64-bit words (i.e. 512 bits)
const SHA_512_INIT: [u64; 8] = [
0x6a09_e667_f3bc_c908,
0xbb67_ae85_84ca_a73b,
0x3c6e_f372_fe94_f82b,
0xa54f_f53a_5f1d_36f1,
0x510e_527f_ade6_82d1,
0x9b05_688c_2b3e_6c1f,
0x1f83_d9ab_fb41_bd6b,
0x5be0_cd19_137e_2179,
];
// SHA-384, SHA-512, SHA-512/224 and SHA-512/256 use the same sequence of
// eighty constant 64-bit words
const SHA_512: [u64; 80] = [
0x428a_2f98_d728_ae22,
0x7137_4491_23ef_65cd,
0xb5c0_fbcf_ec4d_3b2f,
0xe9b5_dba5_8189_dbbc,
0x3956_c25b_f348_b538,
0x59f1_11f1_b605_d019,
0x923f_82a4_af19_4f9b,
0xab1c_5ed5_da6d_8118,
0xd807_aa98_a303_0242,
0x1283_5b01_4570_6fbe,
0x2431_85be_4ee4_b28c,
0x550c_7dc3_d5ff_b4e2,
0x72be_5d74_f27b_896f,
0x80de_b1fe_3b16_96b1,
0x9bdc_06a7_25c7_1235,
0xc19b_f174_cf69_2694,
0xe49b_69c1_9ef1_4ad2,
0xefbe_4786_384f_25e3,
0x0fc1_9dc6_8b8c_d5b5,
0x240c_a1cc_77ac_9c65,
0x2de9_2c6f_592b_0275,
0x4a74_84aa_6ea6_e483,
0x5cb0_a9dc_bd41_fbd4,
0x76f9_88da_8311_53b5,
0x983e_5152_ee66_dfab,
0xa831_c66d_2db4_3210,
0xb003_27c8_98fb_213f,
0xbf59_7fc7_beef_0ee4,
0xc6e0_0bf3_3da8_8fc2,
0xd5a7_9147_930a_a725,
0x06ca_6351_e003_826f,
0x1429_2967_0a0e_6e70,
0x27b7_0a85_46d2_2ffc,
0x2e1b_2138_5c26_c926,
0x4d2c_6dfc_5ac4_2aed,
0x5338_0d13_9d95_b3df,
0x650a_7354_8baf_63de,
0x766a_0abb_3c77_b2a8,
0x81c2_c92e_47ed_aee6,
0x9272_2c85_1482_353b,
0xa2bf_e8a1_4cf1_0364,
0xa81a_664b_bc42_3001,
0xc24b_8b70_d0f8_9791,
0xc76c_51a3_0654_be30,
0xd192_e819_d6ef_5218,
0xd699_0624_5565_a910,
0xf40e_3585_5771_202a,
0x106a_a070_32bb_d1b8,
0x19a4_c116_b8d2_d0c8,
0x1e37_6c08_5141_ab53,
0x2748_774c_df8e_eb99,
0x34b0_bcb5_e19b_48a8,
0x391c_0cb3_c5c9_5a63,
0x4ed8_aa4a_e341_8acb,
0x5b9c_ca4f_7763_e373,
0x682e_6ff3_d6b2_b8a3,
0x748f_82ee_5def_b2fc,
0x78a5_636f_4317_2f60,
0x84c8_7814_a1f0_ab72,
0x8cc7_0208_1a64_39ec,
0x90be_fffa_2363_1e28,
0xa450_6ceb_de82_bde9,
0xbef9_a3f7_b2c6_7915,
0xc671_78f2_e372_532b,
0xca27_3ece_ea26_619c,
0xd186_b8c7_21c0_c207,
0xeada_7dd6_cde0_eb1e,
0xf57d_4f7f_ee6e_d178,
0x06f0_67aa_7217_6fba,
0x0a63_7dc5_a2c8_98a6,
0x113f_9804_bef9_0dae,
0x1b71_0b35_131c_471b,
0x28db_77f5_2304_7d84,
0x32ca_ab7b_40c7_2493,
0x3c9e_be0a_15c9_bebc,
0x431d_67c4_9c10_0d4c,
0x4cc5_d4be_cb3e_42b6,
0x597f_299c_fc65_7e2a,
0x5fcb_6fab_3ad6_faec,
0x6c44_198c_4a47_5817,
];
| true
|
74f1a438424f1c4cde77c244e734b3c7b963dc10
|
Rust
|
J-AugustoManzano/livro_Rust
|
/ExerciciosAprendizagem/Cap07/c07ex04/src/main.rs
|
UTF-8
| 1,122
| 3.203125
| 3
|
[] |
no_license
|
use std::io;
use std::io::prelude::*;
fn main() {
let mut nota: [[f32; 10]; 4] = [[0.; 10]; 4];
let mut valor = String::new();
println!("Leitura e apresentação de notas escolares.\n");
println!("Entrada dos dados.\n");
// Entrada das notas
for i in 0..10 {
println!("Informa as notas do {:2}o. aluno:\n", i + 1);
for j in 0..4 {
print!("Nota {}: ", j + 1);
io::stdout().flush().unwrap();
io::stdin().read_line(&mut valor).unwrap();
nota[j][i] = valor.trim().parse::<f32>().unwrap();
valor.clear();
}
println!();
}
println!("Saída dos dados.\n");
// Apresentação das notas
for i in 0..10 {
println!("As notas do {:2}o. aluno são:\n", i + 1);
for j in 0..4 {
println!("Nota {} = {:6.2}", j + 1, nota[j][i]);
}
println!();
}
println!();
print!("Tecle <Enter> para encerrar...");
io::stdout().flush().unwrap();
io::stdin().read(&mut [0u8]).unwrap();
}
| true
|
ae66720f512546e98ea4a1f05d50138f2b58c8e7
|
Rust
|
cottonguard/kyopro-rs-bundler
|
/src/elimination.rs
|
UTF-8
| 2,511
| 2.609375
| 3
|
[] |
no_license
|
use std::collections::{HashMap, HashSet, VecDeque, hash_map::Entry};
use crate::{
mod_tree::{ModTree, Node},
path::{ModulePath, ModulePathBuf},
};
pub fn flatten_mods(tree: ModTree) -> HashMap<ModulePathBuf, Node> {
let mut res = HashMap::new();
flatten_mods_rec(&mut res, ModulePath::new(""), tree);
res
}
fn flatten_mods_rec(res: &mut HashMap<ModulePathBuf, Node>, path: &ModulePath, tree: ModTree) {
let mut inner = |path: ModulePathBuf, mut node: Node| {
// node.items.drain_filter()
let mut i = 0;
while i < node.items.len() {
if matches!(&node.items[i], ModTree::Mod(_, _)) {
let child_mod = node.items.remove(i);
flatten_mods_rec(res, &*path, child_mod);
} else {
i += 1;
}
}
match res.entry(path) {
Entry::Vacant(e) => {
e.insert(node);
}
Entry::Occupied(mut e) => {
let x = e.get_mut();
x.items.extend(node.items);
x.accesses.extend(node.accesses);
}
}
};
match tree {
ModTree::File(node) => {
let path = ModulePathBuf::from("crate");
inner(path, node);
}
ModTree::Mod(name, node) => {
let path = path.join(name);
inner(path, node);
}
_ => panic!("the tree is not File or Mod"),
}
}
pub fn collect_reachablity(mods: &HashMap<ModulePathBuf, Node>) {
let mut que: VecDeque<_> = Some(ModulePathBuf::from("crate")).into_iter().collect();
let mut reached: HashSet<_> = Some(ModulePathBuf::from("crate")).into_iter().collect();
while let Some(name) = que.pop_front() {
let node = mods.get(&name).expect("mod is not found");
for item in &node.items {
match item {
ModTree::Block(inner) | ModTree::Generics(inner) => {}
_ => {}
}
}
}
}
fn proc_mod(node: Node, scopes: &mut Vec<Scope>) {
let mut aliases = HashMap::new();
for item in node.items {
match item {
ModTree::Use(list) => {
for (name, path) in list {
aliases.insert(name, path);
}
}
_ => {}
}
}
// let mut resolved = HashMap::new();
// let mut stack = vec![];
}
struct Scope {
types: HashSet<String>,
aliases: HashMap<String, ModulePathBuf>,
}
| true
|
a90aa190af39cf030bffe981d709f3c9d87a6329
|
Rust
|
ry/tokio
|
/src/clock.rs
|
UTF-8
| 666
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
//! A configurable source of time.
//!
//! This module provides the [`now`][n] function, which returns an `Instant`
//! representing "now". The source of time used by this function is configurable
//! (via the [`tokio-timer`] crate) and allows mocking out the source of time in
//! tests or performing caching operations to reduce the number of syscalls.
//!
//! Note that, because the source of time is configurable, it is possible to
//! observe non-monotonic behavior when calling [`now`][n] from different
//! executors.
//!
//! [n]: fn.now.html
//! [`tokio-timer`]: https://docs.rs/tokio-timer/0.2/tokio_timer/clock/index.html
pub use tokio_timer::clock::now;
| true
|
3920e9334eff6c183ec87a7751429c6d9d5ad4c4
|
Rust
|
shmuga/super-http
|
/src/indexer/handlers/start_indexing.rs
|
UTF-8
| 2,147
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
use crate::indexer::links_storage::Link;
use crate::indexer::{fs, parser, HtmlIndexer};
use crate::prelude::*;
use crate::ParsedFile;
use actix::prelude::*;
use actix::Message;
use log::info;
use tantivy::Document;
#[derive(Message)]
#[rtype(result = "Result<()>")]
pub struct StartIndexing(pub String);
impl Handler<StartIndexing> for HtmlIndexer {
type Result = Result<()>;
fn handle(&mut self, message: StartIndexing, _: &mut Self::Context) -> Self::Result {
info!("Indexing started");
let path = message.0;
let files = fs::walk(path.clone())?;
let inter = &mut self.0;
let mut index_writer = inter.index.writer(30_000_000)?;
info!("Found {} files to be indexed", files.len());
for file in files.iter() {
let content = fs::file(&file)?;
let ParsedFile {
body,
title,
tags,
headers,
links,
} = parser::parse_content(content)?;
let mut doc = Document::new();
let no_basedir = file.to_string().replace(&path, "");
doc.add_text(inter.filename_field, no_basedir.clone());
// TODO: replace multiple consecutive spaces in body
doc.add_text(inter.body_field, body);
doc.add_text(inter.title_field, title.clone());
for tag in tags.iter() {
doc.add_text(inter.tags_field, tag);
}
let file_link = Link::new(Some("/".to_string()), no_basedir.clone(), title.clone());
let links: Vec<Link> = links
.iter()
.map(|(href, title)| Link::new(Some(no_basedir.clone()), href.to_owned(), title.to_owned()))
.collect();
inter.links_storage.update_file(file_link, links);
for header in headers.iter() {
doc.add_text(inter.title_field, header);
}
index_writer.add_document(doc);
}
println!("{:?}", inter.links_storage);
index_writer.commit()?;
self.0.is_indexing = false;
Ok(())
}
}
| true
|
6917a94295b7dee3ccb564bf0a427cf436254776
|
Rust
|
Concordium/wasm-tools
|
/fuzz/fuzz_targets/incremental-parse.rs
|
UTF-8
| 7,006
| 2.828125
| 3
|
[
"LLVM-exception",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#![no_main]
use libfuzzer_sys::*;
use wasmparser::*;
use Payload::*;
// Simulate receiving chunks of data by fuzzing over a `Vec<Vec<u8>>` where each
// element of the outer vec is a chunk of data we received.
//
// The assertion here is that parsing everything in one go should always produce
// the exact same results as an incremental parse.
fuzz_target!(|data: Vec<Vec<u8>>| {
drop(env_logger::try_init());
// Concatenate everything together, create our expected iterator of
// payloads, and then write out `input.wasm` if debugging is enabled.
let everything = data.iter().flat_map(|a| a).copied().collect::<Vec<_>>();
let mut expected = Parser::new(0).parse_all(&everything);
if log::log_enabled!(log::Level::Debug) {
std::fs::write("input.wasm", &everything).unwrap();
}
// Create our parser as well as a stack of nested parsers for parsing nested
// modules.
let mut stack = Vec::new();
let mut parser = Parser::new(0);
// We'll be parsing data from `buf` starting at `pos`, and we translate
// `data` into an iterator of chunks so when requested we'll take another
// chunk of data and feed it in.
let mut pos = 0;
let mut buf = Vec::new();
let mut data = data.into_iter().peekable();
loop {
log::debug!("parsing {}..{}", pos, buf.len());
let payload = match parser.parse(&buf[pos..], data.peek().is_none()) {
// If more data is requested then we're guaranteed that `data`
// should have another element in its iterato, so pull that off and
// add it to the end of the `buf`.
Ok(Chunk::NeedMoreData(_n)) => {
buf.extend(data.next().unwrap());
continue;
}
Ok(Chunk::Parsed { consumed, payload }) => {
log::debug!("parsed {} bytes", consumed);
pos += consumed;
payload
}
// On failure we should receive the same failure as if we did a full
// parse.
Err(actual) => {
let expected = expected
.next()
.expect("full parse stopped early")
.err()
.expect("full parse didn't return an error");
assert_eq!(expected.offset(), actual.offset());
assert_eq!(expected.message(), actual.message());
break;
}
};
log::debug!("parsed payload {:?}", payload);
let expected_payload = expected
.next()
.expect("full parse stopped early")
.expect("full parse failed but incremental succeeded");
match (payload, expected_payload) {
(End, End) => match stack.pop() {
Some(p) => parser = p,
None => {
log::debug!("no more parsers");
assert!(expected.next().is_none());
break;
}
},
(Version { num: a, range: ar }, Version { num: b, range: br }) => {
assert_eq!(a, b);
assert_eq!(ar, br);
}
(TypeSection(a), TypeSection(b)) => assert_eq!(a.range(), b.range()),
(ImportSection(a), ImportSection(b)) => assert_eq!(a.range(), b.range()),
(AliasSection(a), AliasSection(b)) => assert_eq!(a.range(), b.range()),
(InstanceSection(a), InstanceSection(b)) => assert_eq!(a.range(), b.range()),
(FunctionSection(a), FunctionSection(b)) => assert_eq!(a.range(), b.range()),
(TableSection(a), TableSection(b)) => assert_eq!(a.range(), b.range()),
(MemorySection(a), MemorySection(b)) => assert_eq!(a.range(), b.range()),
(GlobalSection(a), GlobalSection(b)) => assert_eq!(a.range(), b.range()),
(ExportSection(a), ExportSection(b)) => assert_eq!(a.range(), b.range()),
(EventSection(a), EventSection(b)) => assert_eq!(a.range(), b.range()),
(StartSection { func: a, range: ar }, StartSection { func: b, range: br }) => {
assert_eq!(a, b);
assert_eq!(ar, br);
}
(ElementSection(a), ElementSection(b)) => assert_eq!(a.range(), b.range()),
(
DataCountSection {
count: a,
range: ar,
},
DataCountSection {
count: b,
range: br,
},
) => {
assert_eq!(a, b);
assert_eq!(ar, br);
}
(DataSection(a), DataSection(b)) => assert_eq!(a.range(), b.range()),
(
CustomSection {
name: a,
data_offset: ado,
data: ad,
},
CustomSection {
name: b,
data_offset: bdo,
data: bd,
},
) => {
assert_eq!(a, b);
assert_eq!(ad, bd);
assert_eq!(ado, bdo);
}
(
CodeSectionStart {
count: a,
range: ar,
size: asz,
},
CodeSectionStart {
count: b,
range: br,
size: bsz,
},
)
| (
ModuleSectionStart {
count: a,
range: ar,
size: asz,
},
ModuleSectionStart {
count: b,
range: br,
size: bsz,
},
) => {
assert_eq!(a, b);
assert_eq!(ar, br);
assert_eq!(asz, bsz);
}
(CodeSectionEntry(a), CodeSectionEntry(b)) => {
assert_eq!(a.get_binary_reader().range(), b.get_binary_reader().range());
}
(
ModuleSectionEntry {
range: ar,
parser: ap,
},
ModuleSectionEntry { range: br, .. },
) => {
assert_eq!(ar, br);
stack.push(parser);
parser = ap;
}
(
UnknownSection {
id: a,
contents: ac,
range: ar,
},
UnknownSection {
id: b,
contents: bc,
range: br,
},
) => {
assert_eq!(a, b);
assert_eq!(ar, br);
assert_eq!(ac, bc);
}
(a, b) => {
panic!("expected {:?}\ngot {:?}", b, a);
}
}
}
});
| true
|
292c2c25cc1beb162dcecca8cbff08cc91bb6591
|
Rust
|
GaloisInc/mir-verifier
|
/lib/libcore/benches/iter.rs
|
UTF-8
| 7,977
| 3.25
| 3
|
[] |
permissive
|
use core::iter::*;
use test::{black_box, Bencher};
#[bench]
fn bench_rposition(b: &mut Bencher) {
let it: Vec<usize> = (0..300).collect();
b.iter(|| {
it.iter().rposition(|&x| x <= 150);
});
}
#[bench]
fn bench_skip_while(b: &mut Bencher) {
b.iter(|| {
let it = 0..100;
let mut sum = 0;
it.skip_while(|&x| {
sum += x;
sum < 4000
})
.all(|_| true);
});
}
#[bench]
fn bench_multiple_take(b: &mut Bencher) {
let mut it = (0..42).cycle();
b.iter(|| {
let n = it.next().unwrap();
for _ in 0..n {
it.clone().take(it.next().unwrap()).all(|_| true);
}
});
}
fn scatter(x: i32) -> i32 {
(x * 31) % 127
}
#[bench]
fn bench_max_by_key(b: &mut Bencher) {
b.iter(|| {
let it = 0..100;
it.map(black_box).max_by_key(|&x| scatter(x))
})
}
// http://www.reddit.com/r/rust/comments/31syce/using_iterators_to_find_the_index_of_the_min_or/
#[bench]
fn bench_max_by_key2(b: &mut Bencher) {
fn max_index_iter(array: &[i32]) -> usize {
array.iter().enumerate().max_by_key(|&(_, item)| item).unwrap().0
}
let mut data = vec![0; 1638];
data[514] = 9999;
b.iter(|| max_index_iter(&data));
}
#[bench]
fn bench_max(b: &mut Bencher) {
b.iter(|| {
let it = 0..100;
it.map(black_box).map(scatter).max()
})
}
pub fn copy_zip(xs: &[u8], ys: &mut [u8]) {
for (a, b) in ys.iter_mut().zip(xs) {
*a = *b;
}
}
pub fn add_zip(xs: &[f32], ys: &mut [f32]) {
for (a, b) in ys.iter_mut().zip(xs) {
*a += *b;
}
}
#[bench]
fn bench_zip_copy(b: &mut Bencher) {
let source = vec![0u8; 16 * 1024];
let mut dst = black_box(vec![0u8; 16 * 1024]);
b.iter(|| copy_zip(&source, &mut dst))
}
#[bench]
fn bench_zip_add(b: &mut Bencher) {
let source = vec![1.; 16 * 1024];
let mut dst = vec![0.; 16 * 1024];
b.iter(|| add_zip(&source, &mut dst));
}
/// `Iterator::for_each` implemented as a plain loop.
fn for_each_loop<I, F>(iter: I, mut f: F)
where
I: Iterator,
F: FnMut(I::Item),
{
for item in iter {
f(item);
}
}
/// `Iterator::for_each` implemented with `fold` for internal iteration.
/// (except when `by_ref()` effectively disables that optimization.)
fn for_each_fold<I, F>(iter: I, mut f: F)
where
I: Iterator,
F: FnMut(I::Item),
{
iter.fold((), move |(), item| f(item));
}
#[bench]
fn bench_for_each_chain_loop(b: &mut Bencher) {
b.iter(|| {
let mut acc = 0;
let iter = (0i64..1000000).chain(0..1000000).map(black_box);
for_each_loop(iter, |x| acc += x);
acc
});
}
#[bench]
fn bench_for_each_chain_fold(b: &mut Bencher) {
b.iter(|| {
let mut acc = 0;
let iter = (0i64..1000000).chain(0..1000000).map(black_box);
for_each_fold(iter, |x| acc += x);
acc
});
}
#[bench]
fn bench_for_each_chain_ref_fold(b: &mut Bencher) {
b.iter(|| {
let mut acc = 0;
let mut iter = (0i64..1000000).chain(0..1000000).map(black_box);
for_each_fold(iter.by_ref(), |x| acc += x);
acc
});
}
/// Helper to benchmark `sum` for iterators taken by value which
/// can optimize `fold`, and by reference which cannot.
macro_rules! bench_sums {
($bench_sum:ident, $bench_ref_sum:ident, $iter:expr) => {
#[bench]
fn $bench_sum(b: &mut Bencher) {
b.iter(|| -> i64 { $iter.map(black_box).sum() });
}
#[bench]
fn $bench_ref_sum(b: &mut Bencher) {
b.iter(|| -> i64 { $iter.map(black_box).by_ref().sum() });
}
};
}
bench_sums! {
bench_flat_map_sum,
bench_flat_map_ref_sum,
(0i64..1000).flat_map(|x| x..x+1000)
}
bench_sums! {
bench_flat_map_chain_sum,
bench_flat_map_chain_ref_sum,
(0i64..1000000).flat_map(|x| once(x).chain(once(x)))
}
bench_sums! {
bench_enumerate_sum,
bench_enumerate_ref_sum,
(0i64..1000000).enumerate().map(|(i, x)| x * i as i64)
}
bench_sums! {
bench_enumerate_chain_sum,
bench_enumerate_chain_ref_sum,
(0i64..1000000).chain(0..1000000).enumerate().map(|(i, x)| x * i as i64)
}
bench_sums! {
bench_filter_sum,
bench_filter_ref_sum,
(0i64..1000000).filter(|x| x % 3 == 0)
}
bench_sums! {
bench_filter_chain_sum,
bench_filter_chain_ref_sum,
(0i64..1000000).chain(0..1000000).filter(|x| x % 3 == 0)
}
bench_sums! {
bench_filter_map_sum,
bench_filter_map_ref_sum,
(0i64..1000000).filter_map(|x| x.checked_mul(x))
}
bench_sums! {
bench_filter_map_chain_sum,
bench_filter_map_chain_ref_sum,
(0i64..1000000).chain(0..1000000).filter_map(|x| x.checked_mul(x))
}
bench_sums! {
bench_fuse_sum,
bench_fuse_ref_sum,
(0i64..1000000).fuse()
}
bench_sums! {
bench_fuse_chain_sum,
bench_fuse_chain_ref_sum,
(0i64..1000000).chain(0..1000000).fuse()
}
bench_sums! {
bench_inspect_sum,
bench_inspect_ref_sum,
(0i64..1000000).inspect(|_| {})
}
bench_sums! {
bench_inspect_chain_sum,
bench_inspect_chain_ref_sum,
(0i64..1000000).chain(0..1000000).inspect(|_| {})
}
bench_sums! {
bench_peekable_sum,
bench_peekable_ref_sum,
(0i64..1000000).peekable()
}
bench_sums! {
bench_peekable_chain_sum,
bench_peekable_chain_ref_sum,
(0i64..1000000).chain(0..1000000).peekable()
}
bench_sums! {
bench_skip_sum,
bench_skip_ref_sum,
(0i64..1000000).skip(1000)
}
bench_sums! {
bench_skip_chain_sum,
bench_skip_chain_ref_sum,
(0i64..1000000).chain(0..1000000).skip(1000)
}
bench_sums! {
bench_skip_while_sum,
bench_skip_while_ref_sum,
(0i64..1000000).skip_while(|&x| x < 1000)
}
bench_sums! {
bench_skip_while_chain_sum,
bench_skip_while_chain_ref_sum,
(0i64..1000000).chain(0..1000000).skip_while(|&x| x < 1000)
}
bench_sums! {
bench_take_while_chain_sum,
bench_take_while_chain_ref_sum,
(0i64..1000000).chain(1000000..).take_while(|&x| x < 1111111)
}
bench_sums! {
bench_cycle_take_sum,
bench_cycle_take_ref_sum,
(0i64..10000).cycle().take(1000000)
}
// Checks whether Skip<Zip<A,B>> is as fast as Zip<Skip<A>, Skip<B>>, from
// https://users.rust-lang.org/t/performance-difference-between-iterator-zip-and-skip-order/15743
#[bench]
fn bench_zip_then_skip(b: &mut Bencher) {
let v: Vec<_> = (0..100_000).collect();
let t: Vec<_> = (0..100_000).collect();
b.iter(|| {
let s = v
.iter()
.zip(t.iter())
.skip(10000)
.take_while(|t| *t.0 < 10100)
.map(|(a, b)| *a + *b)
.sum::<u64>();
assert_eq!(s, 2009900);
});
}
#[bench]
fn bench_skip_then_zip(b: &mut Bencher) {
let v: Vec<_> = (0..100_000).collect();
let t: Vec<_> = (0..100_000).collect();
b.iter(|| {
let s = v
.iter()
.skip(10000)
.zip(t.iter().skip(10000))
.take_while(|t| *t.0 < 10100)
.map(|(a, b)| *a + *b)
.sum::<u64>();
assert_eq!(s, 2009900);
});
}
#[bench]
fn bench_filter_count(b: &mut Bencher) {
b.iter(|| (0i64..1000000).map(black_box).filter(|x| x % 3 == 0).count())
}
#[bench]
fn bench_filter_ref_count(b: &mut Bencher) {
b.iter(|| (0i64..1000000).map(black_box).by_ref().filter(|x| x % 3 == 0).count())
}
#[bench]
fn bench_filter_chain_count(b: &mut Bencher) {
b.iter(|| (0i64..1000000).chain(0..1000000).map(black_box).filter(|x| x % 3 == 0).count())
}
#[bench]
fn bench_filter_chain_ref_count(b: &mut Bencher) {
b.iter(|| {
(0i64..1000000).chain(0..1000000).map(black_box).by_ref().filter(|x| x % 3 == 0).count()
})
}
#[bench]
fn bench_partial_cmp(b: &mut Bencher) {
b.iter(|| (0..100000).map(black_box).partial_cmp((0..100000).map(black_box)))
}
#[bench]
fn bench_lt(b: &mut Bencher) {
b.iter(|| (0..100000).map(black_box).lt((0..100000).map(black_box)))
}
| true
|
be6dfea464639ae6ef7fe1c541a32597102acbbd
|
Rust
|
tonlabs/ton-labs-node-storage
|
/src/dynamic_boc_db.rs
|
UTF-8
| 3,766
| 2.828125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::sync::{Arc, RwLock, Weak};
use fnv::FnvHashMap;
use ton_types::{Cell, Result};
use crate::cell_db::CellDb;
use crate::dynamic_boc_diff_writer::{DynamicBocDiffFactory, DynamicBocDiffWriter};
use crate::types::{CellId, StorageCell};
#[derive(Debug)]
pub struct DynamicBocDb {
db: Arc<CellDb>,
cells: Arc<RwLock<FnvHashMap<CellId, Weak<StorageCell>>>>,
diff_factory: DynamicBocDiffFactory,
}
impl DynamicBocDb {
/// Constructs new instance using in-memory key-value collection
pub fn in_memory() -> Self {
Self::with_db(CellDb::in_memory())
}
/// Constructs new instance using RocksDB with given path
pub fn with_path<P: AsRef<Path>>(path: P) -> Self {
Self::with_db(CellDb::with_path(path))
}
/// Constructs new instance using given key-value collection implementation
pub(crate) fn with_db(db: CellDb) -> Self {
let db = Arc::new(db);
Self {
db: Arc::clone(&db),
cells: Arc::new(RwLock::new(FnvHashMap::default())),
diff_factory: DynamicBocDiffFactory::new(db),
}
}
pub fn cell_db(&self) -> &Arc<CellDb> {
&self.db
}
pub fn cells_map(&self) -> Arc<RwLock<FnvHashMap<CellId, Weak<StorageCell>>>> {
Arc::clone(&self.cells)
}
/// Converts tree of cells into DynamicBoc
pub fn save_as_dynamic_boc(self: &Arc<Self>, root_cell: Cell) -> Result<usize> {
let diff_writer = self.diff_factory.construct();
let written_count = self.save_tree_of_cells_recursive(
root_cell.clone(),
Arc::clone(&self.db),
&diff_writer)?;
diff_writer.apply()?;
Ok(written_count)
}
/// Gets root cell from key-value storage
pub fn load_dynamic_boc(self: &Arc<Self>, root_cell_id: &CellId) -> Result<Cell> {
let storage_cell = self.load_cell(root_cell_id)?;
Ok(Cell::with_cell_impl_arc(storage_cell))
}
pub(crate) fn diff_factory(&self) -> &DynamicBocDiffFactory {
&self.diff_factory
}
pub(crate) fn load_cell(self: &Arc<Self>, cell_id: &CellId) -> Result<Arc<StorageCell>> {
if let Some(cell) = self.cells.read()
.expect("Poisoned RwLock")
.get(&cell_id)
{
if let Some(ref cell) = Weak::upgrade(&cell) {
return Ok(Arc::clone(cell));
}
// Even if the cell is disposed, we will load and store it later,
// so we don't need to remove garbage here.
}
let storage_cell = Arc::new(
CellDb::get_cell(&*self.db, &cell_id, Arc::clone(self))?
);
self.cells.write()
.expect("Poisoned RwLock")
.insert(cell_id.clone(), Arc::downgrade(&storage_cell));
Ok(storage_cell)
}
fn save_tree_of_cells_recursive(
self: &Arc<Self>,
cell: Cell,
cell_db: Arc<CellDb>,
diff_writer: &DynamicBocDiffWriter
) -> Result<usize> {
let cell_id = CellId::new(cell.repr_hash());
if cell_db.contains(&cell_id)? {
return Ok(0);
}
diff_writer.add_cell(cell_id, cell.clone());
let mut count = 1;
for i in 0..cell.references_count() {
count += self.save_tree_of_cells_recursive(
cell.reference(i)?,
Arc::clone(&cell_db),
diff_writer
)?;
}
Ok(count)
}
}
impl Deref for DynamicBocDb {
type Target = Arc<CellDb>;
fn deref(&self) -> &Self::Target {
&self.db
}
}
impl DerefMut for DynamicBocDb {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.db
}
}
| true
|
79cb13e9bdd4a69b20c947342681576bee5db0c7
|
Rust
|
PolySignInc/cosmos-rust
|
/cosmos-sdk-rs/src/decimal.rs
|
UTF-8
| 1,356
| 3.203125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Decimal type with equivalent semantics to the [Cosmos `sdk.Dec`][1] type.
//!
//! [1]: https://pkg.go.dev/github.com/cosmos/cosmos-sdk/types#Dec
use crate::Result;
use std::{
fmt,
ops::{Add, AddAssign},
str::FromStr,
};
/// Decimal type which follows Cosmos [Cosmos `sdk.Dec`][1] conventions.
///
/// [1]: https://pkg.go.dev/github.com/cosmos/cosmos-sdk/types#Dec
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub struct Decimal(u64);
impl FromStr for Decimal {
type Err = eyre::Report;
fn from_str(s: &str) -> Result<Self> {
Ok(s.parse().map(Self)?)
}
}
impl fmt::Display for Decimal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Add for Decimal {
type Output = Decimal;
#[inline]
fn add(self, rhs: Decimal) -> Decimal {
Decimal(self.0 + rhs.0)
}
}
impl AddAssign for Decimal {
#[inline]
fn add_assign(&mut self, rhs: Decimal) {
self.0 += rhs.0;
}
}
macro_rules! impl_from_primitive_int_for_decimal {
($($int:ty),+) => {
$(impl From<$int> for Decimal {
fn from(num: $int) -> Decimal {
#[allow(trivial_numeric_casts)]
Decimal(num.into())
}
})+
};
}
impl_from_primitive_int_for_decimal!(u8, u16, u32, u64);
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.