blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
41a7e276233534295bf65df308a27a0c54a69255
|
Rust
|
gnoliyil/fuchsia
|
/third_party/rust_crates/vendor/uuid-1.1.2/examples/uuid_macro.rs
|
UTF-8
| 557
| 3.453125
| 3
|
[
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Using the `uuid!` macro.
//!
//! `uuid!` will parse encoded UUIDs at compile time instead of at runtime.
//! If you've got a fixed UUID string handy then consider using `uuid!` instead
//! of `Uuid::parse_str` or `str::parse`.
//!
//! If you enable the `macro-diagnostics` feature, you can see much better
//! error messages.
#[test]
fn parse_uuid_at_compile_time() {
use uuid::uuid;
let uuid = uuid!("67e55044-10b1-426f-9247-bb680e5fe0c8");
assert_eq!(Some(uuid::Version::Random), uuid.get_version());
}
fn main() {}
| true
|
91e2c2f031631b3bf645c24445410004c3cab8b7
|
Rust
|
Pana/rs-data-structures
|
/linked_list/src/lib.rs
|
UTF-8
| 301
| 2.703125
| 3
|
[] |
no_license
|
pub mod linked_list;
pub trait LinkedList<T> {
fn is_empty(&self) -> bool;
fn clear(&self);
fn len(&self) -> usize;
fn print(&self);
fn insert(&mut self, val: T, index: usize) -> bool;
fn remove(&mut self, val: T, index: usize) -> bool;
fn search(&self, val: T) -> bool;
}
| true
|
903b9e4b9498e8c378fee12a9ec9aed476ebf8bf
|
Rust
|
rick68/rust-trace
|
/libcore/num/mod/f32/from_str.rs
|
UTF-8
| 5,487
| 3.578125
| 4
|
[
"MIT"
] |
permissive
|
#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::ParseFloatError;
use core::num::Float;
use core::str::FromStr;
// pub struct ParseFloatError {
// #[doc(hidden)]
// pub __kind: FloatErrorKind
// }
// #[derive(Debug, Clone, PartialEq)]
// pub enum FloatErrorKind {
// Empty,
// Invalid,
// }
// impl ParseFloatError {
// #[doc(hidden)]
// pub fn __description(&self) -> &str {
// match self.__kind {
// FloatErrorKind::Empty => "cannot parse float from empty string",
// FloatErrorKind::Invalid => "invalid float literal",
// }
// }
// }
// macro_rules! from_str_float_impl {
// ($t:ty) => {
// #[stable(feature = "rust1", since = "1.0.0")]
// impl FromStr for $t {
// type Err = ParseFloatError;
//
// /// Converts a string in base 10 to a float.
// /// Accepts an optional decimal exponent.
// ///
// /// This function accepts strings such as
// ///
// /// * '3.14'
// /// * '-3.14'
// /// * '2.5E10', or equivalently, '2.5e10'
// /// * '2.5E-10'
// /// * '.' (understood as 0)
// /// * '5.'
// /// * '.5', or, equivalently, '0.5'
// /// * 'inf', '-inf', 'NaN'
// ///
// /// Leading and trailing whitespace represent an error.
// ///
// /// # Arguments
// ///
// /// * src - A string
// ///
// /// # Return value
// ///
// /// `Err(ParseFloatError)` if the string did not represent a valid
// /// number. Otherwise, `Ok(n)` where `n` is the floating-point
// /// number represented by `src`.
// #[inline]
// #[allow(deprecated)]
// fn from_str(src: &str) -> Result<Self, ParseFloatError> {
// Self::from_str_radix(src, 10)
// }
// }
// }
// }
// from_str_float_impl!(f32);
#[test]
fn from_str_test1() {
let src: &'static str = "3.14";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(3.1399999));
}
#[test]
fn from_str_test2() {
let src: &'static str = "-3.14";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(-3.1399999));
}
#[test]
fn from_str_test3() {
let src: &'static str = "2.5E10";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(25000000000f32));
}
#[test]
fn from_str_test4() {
let src: &'static str = "2.5e10";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(25000000000f32));
}
#[test]
fn from_str_test5() {
let src: &'static str = "2.5E-10";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(0.00000000025));
}
#[test]
fn from_str_test6() {
let src: &'static str = "2.5e-10";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(0.00000000025));
}
#[test]
fn from_str_test7() {
let src: &'static str = ".";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(0f32));
}
#[test]
fn from_str_test8() {
let src: &'static str = "5.";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(5f32));
}
#[test]
fn from_str_test9() {
let src: &'static str = ".5";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(0.5f32));
}
#[test]
fn from_str_test10() {
let src: &'static str = "0.5";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(0.5f32));
}
#[test]
fn from_str_test11() {
let src: &'static str = "inf";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(f32::infinity()));
}
#[test]
fn from_str_test12() {
let src: &'static str = "-inf";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result, Ok::<f32, ParseFloatError>(f32::neg_infinity()));
}
#[test]
fn from_str_test13() {
let src: &'static str = "NaN";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
match result {
Ok(v) => assert_eq!(v.is_nan(), true),
Err(_) => assert!(false)
}
}
#[test]
fn from_str_test14() {
let src: &'static str = "";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result.unwrap_err().__description(), "cannot parse float from empty string");
}
#[test]
fn from_str_test15() {
let src: &'static str = "Hello, World!";
let result: Result<f32, ParseFloatError> = f32::from_str(src);
assert_eq!(result.unwrap_err().__description(), "invalid float literal");
}
}
| true
|
ecb43194a0f07d8c3e709ac5ea82ae665e8f3d03
|
Rust
|
kyle-mccarthy/alpine
|
/src/error/arrow_error.rs
|
UTF-8
| 1,300
| 2.671875
| 3
|
[
"Apache-2.0"
] |
permissive
|
use arrow::error::ArrowError as ArrowErr;
use snafu::Snafu;
#[derive(Debug, Snafu)]
pub enum ArrowError {
MemoryError { inner: String },
ParseError { inner: String },
ComputeError { inner: String },
DivideByZero,
CsvError { inner: String },
JsonError { inner: String },
IoError { inner: String },
InvalidArgumentError { inner: String },
}
impl From<ArrowErr> for ArrowError {
fn from(error: ArrowErr) -> ArrowError {
match error {
ArrowErr::MemoryError(inner) => ArrowError::MemoryError { inner },
ArrowErr::ParseError(inner) => ArrowError::ParseError { inner },
ArrowErr::ComputeError(inner) => ArrowError::ComputeError { inner },
ArrowErr::DivideByZero => ArrowError::DivideByZero,
ArrowErr::CsvError(inner) => ArrowError::CsvError { inner },
ArrowErr::JsonError(inner) => ArrowError::JsonError { inner },
ArrowErr::IoError(inner) => ArrowError::IoError { inner },
ArrowErr::InvalidArgumentError(inner) => ArrowError::InvalidArgumentError { inner },
}
}
}
impl From<ArrowErr> for super::Error {
fn from(error: ArrowErr) -> super::Error {
let source = Into::<ArrowError>::into(error);
super::Error::Arrow { source }
}
}
| true
|
8cb66e49bfcbf6d54c3a267715503f039e40d83d
|
Rust
|
asny/tri-mesh
|
/src/mesh/traversal.rs
|
UTF-8
| 11,943
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
use crate::mesh::connectivity_info::{ConnectivityInfo, HalfEdge};
use crate::mesh::*;
/// # Traversal
/// Methods to construct a [Walker] which is used for easy and efficient traversal of the mesh.
/// See [Walker] for more information and examples.
/// Also see [Connectivity](#connectivity) for common connectivity utility functionality.
impl Mesh {
/// Creates an 'empty' [Walker], ie. a walker that is associated with any half-edge.
pub(super) fn walker(&self) -> Walker {
Walker::new(&self.connectivity_info)
}
/// Creates a [Walker] at the half-edge pointed to by the given vertex.
pub fn walker_from_vertex(&self, vertex_id: VertexID) -> Walker {
self.walker().into_vertex_halfedge_walker(vertex_id)
}
/// Creates a [Walker] at the given half-edge.
pub fn walker_from_halfedge(&self, halfedge_id: HalfEdgeID) -> Walker {
self.walker().into_halfedge_walker(halfedge_id)
}
/// Creates a [Walker] at the half-edge pointed to by the given face.
pub fn walker_from_face(&self, face_id: FaceID) -> Walker {
self.walker().into_face_halfedge_walker(face_id)
}
}
///
/// Used for easy and efficient traversal of the mesh.
/// Also see [Connectivity](Mesh#connectivity) for common connectivity utility functionality.
///
/// Use [Mesh::walker_from_vertex], [Mesh::walker_from_halfedge] or [Mesh::walker_from_face] to construct a walker
/// and the examples below for instructions on how to use a walker.
///
/// **Note:** If you walk outside the mesh at some point, no error will be returned,
/// instead, all methods to extract an ID will return `None`.
///
/// # Examples
///
/// ## \# 1
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// # let halfedge_id = mesh.halfedge_iter().next().unwrap();
/// // Find the id of the vertex pointed to by a half-edge.
/// let vertex_id = mesh.walker_from_halfedge(halfedge_id).vertex_id().unwrap();
/// ```
///
/// ## \# 2
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// # let halfedge_id = mesh.halfedge_iter().next().unwrap();
/// let mut walker = mesh.walker_from_halfedge(halfedge_id);
/// // Walk around the three sides of a face..
/// let result_halfedge_id = walker.as_next().as_next().next_id().unwrap();
/// // .. ending up at the same half-edge
/// assert_eq!(halfedge_id, result_halfedge_id);
/// ```
/// ## \# 3
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// # let face_id = mesh.face_iter().next().unwrap();
/// // Find one neighbouring face to the given face
/// let neighbour_face_id = mesh.walker_from_face(face_id).into_twin().face_id().unwrap();
/// ```
///
/// ## \# 4
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// # let face_id = mesh.face_iter().next().unwrap();
/// // Find the circumference of a face
/// let mut walker = mesh.walker_from_face(face_id);
/// let mut circumference = mesh.edge_length(walker.halfedge_id().unwrap());
/// walker.as_next();
/// circumference += mesh.edge_length(walker.halfedge_id().unwrap());
/// circumference += mesh.edge_length(walker.next_id().unwrap());
/// ```
///
/// ## \# 5
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// # let halfedge_id = mesh.halfedge_iter().next().unwrap();
/// // Check if the half-edge is on the boundary of the mesh
/// let mut walker = mesh.walker_from_halfedge(halfedge_id);
/// let is_on_boundary = walker.face_id().is_none() || walker.as_twin().face_id().is_none();
/// # assert!(!is_on_boundary);
/// ```
///
/// ## \# 6
///
/// ```
/// # use tri_mesh::*;
/// # let mesh: Mesh = three_d_asset::TriMesh::sphere(4).into();
/// // Compute the average edge length
/// let mut avg_edge_length = 0.0f64;
/// for halfedge_id in mesh.edge_iter()
/// {
/// let mut walker = mesh.walker_from_halfedge(halfedge_id);
/// let p0 = mesh.vertex_position(walker.vertex_id().unwrap());
/// let p1 = mesh.vertex_position(walker.as_twin().vertex_id().unwrap());
/// avg_edge_length += (p0 - p1).magnitude();
/// }
/// avg_edge_length /= mesh.no_edges() as f64;
/// ```
///
#[derive(Clone, Debug)]
pub struct Walker<'a> {
connectivity_info: &'a ConnectivityInfo,
current: Option<HalfEdgeID>,
current_info: Option<HalfEdge>,
}
impl<'a> Walker<'a> {
pub(super) fn new(connectivity_info: &'a ConnectivityInfo) -> Self {
Walker {
current: None,
current_info: None,
connectivity_info: connectivity_info,
}
}
/// Jumps to the half-edge pointed to by the given vertex.
pub(super) fn into_vertex_halfedge_walker(mut self, vertex_id: VertexID) -> Self {
self.as_vertex_halfedge_walker(vertex_id);
self
}
/// Jumps to the given half-edge.
pub(super) fn into_halfedge_walker(mut self, halfedge_id: HalfEdgeID) -> Self {
self.as_halfedge_walker(halfedge_id);
self
}
/// Jumps to the half-edge pointed to by the given face.
pub(super) fn into_face_halfedge_walker(mut self, face_id: FaceID) -> Self {
self.as_face_halfedge_walker(face_id);
self
}
/// Jumps to the half-edge pointed to by the given vertex.
pub(super) fn as_vertex_halfedge_walker(&mut self, vertex_id: VertexID) -> &mut Self {
let halfedge_id = self.connectivity_info.vertex_halfedge(vertex_id);
self.set_current(halfedge_id);
self
}
/// Jumps to the given half-edge.
pub(super) fn as_halfedge_walker(&mut self, halfedge_id: HalfEdgeID) -> &mut Self {
let halfedge_id = Some(halfedge_id);
self.set_current(halfedge_id);
self
}
/// Jumps to the half-edge pointed to by the given face.
pub(super) fn as_face_halfedge_walker(&mut self, face_id: FaceID) -> &mut Self {
let halfedge_id = self.connectivity_info.face_halfedge(face_id);
self.set_current(halfedge_id);
self
}
/// Walk to the next half-edge in the adjacent face.
pub fn into_next(mut self) -> Self {
self.as_next();
self
}
/// Walk to the previous half-edge in the adjacent face.
pub fn into_previous(mut self) -> Self {
self.as_next().as_next();
self
}
/// Walk to the twin half-edge.
pub fn into_twin(mut self) -> Self {
self.as_twin();
self
}
/// Walk to the next half-edge in the adjacent face.
pub fn as_next(&mut self) -> &mut Self {
let halfedge_id = match self.current_info {
Some(ref current_info) => current_info.next,
None => None,
};
self.set_current(halfedge_id);
self
}
/// Walk to the previous half-edge in the adjacent face.
pub fn as_previous(&mut self) -> &mut Self {
self.as_next().as_next()
}
/// Walk to the twin half-edge.
pub fn as_twin(&mut self) -> &mut Self {
let halfedge_id = match self.current_info {
Some(ref current_info) => current_info.twin,
None => None,
};
self.set_current(halfedge_id);
self
}
/// Returns the id of the vertex pointed to by the current half-edge or `None` if the walker has walked outside of the mesh at some point.
pub fn vertex_id(&self) -> Option<VertexID> {
if let Some(ref halfedge) = self.current_info {
halfedge.vertex
} else {
None
}
}
/// Returns the id of the next half-edge in the adjacent face or `None` if the half-edge is at the boundary of the mesh
/// or if the walker has walked outside of the mesh at some point.
pub fn next_id(&self) -> Option<HalfEdgeID> {
if let Some(ref halfedge) = self.current_info {
halfedge.next
} else {
None
}
}
/// Returns the id of the previous half-edge in the adjacent face or `None` if the half-edge is at the boundary of the mesh
/// or if the walker has walked outside of the mesh at some point.
pub fn previous_id(&self) -> Option<HalfEdgeID> {
if let Some(next_id) = self.next_id() {
Walker::new(&self.connectivity_info)
.into_halfedge_walker(next_id)
.next_id()
} else {
None
}
}
/// Returns the id of the twin half-edge to the current half-edge or `None` if the walker has walked outside of the mesh at some point.
pub fn twin_id(&self) -> Option<HalfEdgeID> {
if let Some(ref halfedge) = self.current_info {
halfedge.twin
} else {
None
}
}
/// Returns the id of the current half-edge or `None` if the walker has walked outside of the mesh at some point.
pub fn halfedge_id(&self) -> Option<HalfEdgeID> {
self.current
}
/// Returns the id of the adjacent face or `None` if the half-edge is at the boundary of the mesh
/// or if the walker has walked outside of the mesh at some point.
pub fn face_id(&self) -> Option<FaceID> {
if let Some(ref halfedge) = self.current_info {
halfedge.face
} else {
None
}
}
fn set_current(&mut self, halfedge_id: Option<HalfEdgeID>) {
self.current_info = if let Some(id) = halfedge_id {
self.connectivity_info.halfedge(id)
} else {
None
};
self.current = halfedge_id;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_one_face_connectivity() {
let mesh: Mesh = crate::test_utility::triangle();
let f1 = mesh.face_iter().next().unwrap();
let v1 = mesh.walker_from_face(f1).vertex_id().unwrap();
let v2 = mesh.walker_from_face(f1).as_next().vertex_id().unwrap();
let v3 = mesh.walker_from_face(f1).as_previous().vertex_id().unwrap();
let t1 = mesh.walker_from_vertex(v1).vertex_id();
assert_eq!(t1, Some(v2));
let t2 = mesh.walker_from_vertex(v1).as_twin().vertex_id();
assert_eq!(t2, Some(v1));
let t3 = mesh.walker_from_vertex(v2).as_next().as_next().vertex_id();
assert_eq!(t3, Some(v2));
let t4 = mesh.walker_from_face(f1).as_twin().face_id();
assert!(t4.is_none());
let t5 = mesh.walker_from_face(f1).as_twin().next_id();
assert!(t5.is_none());
let t6 = mesh
.walker_from_face(f1)
.as_previous()
.as_previous()
.as_twin()
.as_twin()
.face_id();
assert_eq!(t6, Some(f1));
let t7 = mesh.walker_from_vertex(v2).as_next().as_next().next_id();
assert_eq!(t7, mesh.walker_from_vertex(v2).halfedge_id());
let t8 = mesh.walker_from_vertex(v3).face_id();
assert_eq!(t8, Some(f1));
mesh.is_valid().unwrap();
}
#[test]
fn test_three_face_connectivity() {
let mesh = crate::test_utility::subdivided_triangle();
let mut id = None;
for vertex_id in mesh.vertex_iter() {
let mut round = true;
for halfedge_id in mesh.vertex_halfedge_iter(vertex_id) {
if mesh.walker_from_halfedge(halfedge_id).face_id().is_none() {
round = false;
break;
}
}
if round {
id = Some(vertex_id);
break;
}
}
let mut walker = mesh.walker_from_vertex(id.unwrap());
let start_edge = walker.halfedge_id().unwrap();
let one_round_edge = walker
.as_previous()
.as_twin()
.as_previous()
.as_twin()
.as_previous()
.twin_id()
.unwrap();
assert_eq!(start_edge, one_round_edge);
}
}
| true
|
e0dad9c65dd7b614309ef2e93d719742912bde15
|
Rust
|
sgaflv/euler
|
/src/solve18.rs
|
UTF-8
| 1,714
| 3.15625
| 3
|
[] |
no_license
|
use core::cmp;
static NUMBERS_STR: &str = "\
75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
";
type Numbers = Vec<Vec<u64>>;
pub struct Solver {
size: usize,
numbers: Numbers,
best: Numbers,
}
fn init_numbers(size: usize) -> Numbers {
let mut numbers: Numbers = Vec::new();
for _ in 0..size {
numbers.push(vec![0u64; size]);
}
numbers
}
fn get_numbers(size: usize, numbers_str: &str) -> Numbers {
let mut numbers: Numbers = init_numbers(size);
for (y, line) in numbers_str.lines().enumerate() {
for (x, item) in line.split_whitespace().enumerate() {
numbers[y][x] = item.parse().unwrap();
}
}
numbers
}
impl Solver {
pub fn new(size: usize, numbers_str: &str) -> Solver {
Solver {
size,
numbers: get_numbers(size, numbers_str),
best: init_numbers(size),
}
}
pub fn solve(&mut self, x: usize, y: usize) -> u64 {
if x > y {
return 0;
}
if x >= self.size || y >= self.size {
return 0;
}
if self.best[y][x] == 0 {
self.best[y][x] = self.numbers[y][x] + cmp::max(self.solve(x, y+1), self.solve(x+1,y+1));
}
self.best[y][x]
}
}
pub fn solve18() -> u64 {
let mut solver = Solver::new(15, NUMBERS_STR);
let res = solver.solve(0,0);
res
}
| true
|
7be76049d4fdfb4cecd1a6b70d91fb1fcf92e57e
|
Rust
|
vojtechkral/jxlrate
|
/src/utils.rs
|
UTF-8
| 248
| 3.0625
| 3
|
[] |
no_license
|
use std::ops;
#[derive(Clone, Copy, Debug)]
pub struct Rational(pub u32, pub u32);
impl ops::Mul<u32> for Rational {
type Output = u32;
fn mul(self, rhs: u32) -> Self::Output {
(rhs as u64 * self.0 as u64) as u32 / self.1
}
}
| true
|
b08fb3fc7d1810c21986b05662a8b4301c24cbf7
|
Rust
|
Havvy/voroni
|
/src/voronoi_map.rs
|
UTF-8
| 20,615
| 2.890625
| 3
|
[] |
no_license
|
use std::cell::Cell;
use std::cmp::Ordering;
use std::collections::binary_heap::BinaryHeap;
use std::collections::{BTreeMap, HashSet};
use std::f64::{NEG_INFINITY, INFINITY};
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::ops::Index;
use std::rc::Rc;
use ordered_float::{NotNaN};
use half_edge::{ConnectivityKernel};
use ::geometry::{LineSegment, Point, Triangle, TriangleOrientation};
use self::Event::{Site, Circle};
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum EdgeDirection{Left, Right}
/// FIXME(Havvy): Transient struct? Should use the ConnectivityKernel instead?
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
struct VoroniEdge;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
struct Sweepline(Cell<NotNaN<f64>>);
impl Sweepline {
fn new(y: NotNaN<f64>) -> Sweepline {
Sweepline(Cell::new(y))
}
fn set(&self, y: NotNaN<f64>) {
self.0.set(y);
}
fn get(&self) -> NotNaN<f64> {
self.0.get()
}
}
/// The breakpoint of two coinciding arcs in the beachline
///
/// The struct only contains the information for calculating the breakpoint,
/// and the actual breakpoint is on the `point` method.
// TODO(Havvy): Derive PartialEq and Eq myself and ignore the cache.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
struct ArcBreakpoint<'sl> {
/// The site of the arc to the left on the breakpoint
site_left: Point,
/// The site of the arc to the right on the breakpoint
site_right: Point,
/// ???
e: VoroniEdge,
/// ???
direction: EdgeDirection,
/// ???
edge_begin: Point,
/// Where the sweep location is at all time. (This gets mutated despite being a non-mutable borrow.)
sweep_location: &'sl Sweepline,
/// Last location of the sweep location last time the breakpoint was queried.
sweep_location_cache: Cell<NotNaN<f64>>,
/// The last point returned last time the breakpoint was queried.
point_cache: Cell<Point>
}
impl<'sl> ArcBreakpoint<'sl> {
fn new(site_left: Point, site_right: Point, e: VoroniEdge, direction: EdgeDirection, sweep_location: &Sweepline) -> ArcBreakpoint {
let zero = NotNaN::new(0f64).expect("0 is not NaN");
let mut breakpoint = ArcBreakpoint {
site_left, site_right, e, direction, sweep_location,
edge_begin: Point { x: zero, y: zero },
sweep_location_cache: Cell::new(zero),
point_cache: Cell::new(Point { x: zero, y: zero })
};
breakpoint.edge_begin = breakpoint.point();
breakpoint
}
/// Calculate the location of the breakpoint at the current sweep location.
fn point(&self) -> Point {
let sweep_location = self.sweep_location.get();
// TODO(Havvy): Do with interior mutability?
if sweep_location == self.sweep_location_cache.get() {
return self.point_cache.get();
}
self.sweep_location_cache.set(sweep_location);
// TODO(Havvy): Learn math and refactor.
// TODO(Havvy): What happens if sweep location and y values are all the same when?
let two = NotNaN::new(2f64).expect("Two is not NaN.");
self.point_cache.set(if self.site_left.y == self.site_right.y {
// Vertical line case.
let x = (self.site_left.x + self.site_right.x) / two;
let y_num = NotNaN::new((x - self.site_left.x).powi(2) + self.site_left.y.powi(2) - sweep_location.powi(2)).expect("Squaring cannot produce NaN.");
let y_den = two * (self.site_left.y - *sweep_location);
let y = y_num / y_den;
Point { x, y }
} else {
// This method works by intersecting the line of the edge with the parabola of one of the higher site point.
let site = if self.site_left > self.site_right { self.site_left } else { self.site_right };
// TODO(Havvy): Math into geometry module
let _d = two * (site.y - *sweep_location);
// double px = (s1.y > s2.y) ? s1.x : s2.x;
// double py = (s1.y > s2.y) ? s1.y : s2.y;
// double m = e.m;
// double b = e.b;
// double d = 2*(py - l);
// // Straight up quadratic formula
// double A = 1;
// double B = -2*px - d*m;
// double C = sq(px) + sq(py) - sq(l) - d*b;
// int sign = (s1.y > s2.y) ? -1 : 1;
// double det = sq(B) - 4 * A * C;
// // When rounding leads to a very very small negative determinant, fix it
// if (det <= 0) {
// x = -B / (2 * A);
// }
// else {
// x = (-B + sign * Math.sqrt(det)) / (2 * A);
// }
// y = m*x + b;
unimplemented!()
});
// Recursive call that grabs the updated cached version.
self.point()
}
}
impl<'sl> Hash for ArcBreakpoint<'sl> {
fn hash<H>(&self, hasher: &mut H) where H: Hasher {
// FIXME(Havvy): Some of these fields change?
self.site_left.hash(hasher);
self.site_right.hash(hasher);
self.e.hash(hasher);
self.direction.hash(hasher);
self.edge_begin.hash(hasher);
}
}
struct Breakpoints<'sl>(HashSet<Rc<ArcBreakpoint<'sl>>>);
impl<'sl> Breakpoints<'sl> {
fn new() -> Breakpoints<'sl> {
Breakpoints(HashSet::new())
}
fn insert(&mut self, breakpoint: ArcBreakpoint<'sl>) -> Rc<ArcBreakpoint<'sl>> {
let rc = Rc::new(breakpoint);
self.0.insert(rc.clone());
rc
}
fn remove(&mut self, breakpoint: Rc<ArcBreakpoint<'sl>>) {
self.0.remove(&breakpoint);
}
}
/// An arc on the beachline.
///
/// The arc consists of the site that created it and the breakpoints with the arcs next to it.
///
/// The first arc cannot have breakpoints as no other arcs to break against.
/// Afterwards, only the leftmost arc's left breakpoint and rightmost arc's right breakpoint
/// will be None.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Arc<'sl> {
/// The location of the site event that created the arc.
site: Point,
left: Option<Rc<ArcBreakpoint<'sl>>>,
right: Option<Rc<ArcBreakpoint<'sl>>>
}
impl<'sl> Arc<'sl> {
fn first(site: Point) -> Arc<'sl> {
Arc { site, left: None, right: None }
}
fn new(left: Option<Rc<ArcBreakpoint<'sl>>>, right: Option<Rc<ArcBreakpoint<'sl>>>) -> Arc<'sl> {
let site = match (&left, &right) {
(&Some(ref bp), _) => bp.site_right,
(_, &Some(ref bp)) => bp.site_left,
_ => { panic!("At least one breakpoint must exist."); }
};
Arc { left, right, site }
}
/// Whether or not the point is located under the arc.
fn is_point_under(&self, point: &Point) -> bool {
point.x >= self.left_breakpoint().x && point.x <= self.right_breakpoint().x
}
fn left_breakpoint(&self) -> Point {
match self.left {
Some(ref left) => left.point(),
None => Point::new_unwrap(NEG_INFINITY, INFINITY)
}
}
fn right_breakpoint(&self) -> Point {
match self.left {
Some(ref right) => right.point(),
None => Point::new_unwrap(INFINITY, INFINITY)
}
}
/// Get the center of the circle event's circle, if it exists.
///
/// The outer arcs in the beachline do not have circle events.
///
/// Furthermore, the location of the arc's site w.r.t the adjacent sites
/// can mean there is no circle event.
fn circle_event_center(&self) -> Option<Point> {
match *self {
Arc { left: None, .. } => None,
Arc { right: None, .. } => None,
Arc { left: Some(ref left), right: Some(ref right), site } => {
if Triangle(left.site_left, site, right.site_right).orientation() != TriangleOrientation::Counterclockwise {
None
} else {
// FIXME(Havvy): return (this.left.getEdge().intersection(this.right.getEdge()));
unimplemented!()
}
}
}
}
fn get_circle_event(&self) -> Option<Event<'sl>> {
self.circle_event_center().map(|center| {
let radius = (LineSegment { from: self.site, to: center }).length();
Circle {
site: Point { x: center.x, y: center.y - radius },
arc: (*self).clone(),
vertex: center
}
})
}
}
impl<'sl> PartialOrd for Arc<'sl> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
// FIXME(Havvy): These are _false_ impls. Arcs cannot be ordered alone.
impl<'sl> Ord for Arc<'sl> {
fn cmp(&self, other: &Self) -> Ordering {
let my_left = self.left_breakpoint();
let my_right = self.right_breakpoint();
let other_left = other.left_breakpoint();
let other_right = other.right_breakpoint();
if my_left.x == other_left.x && my_right.x == other_right.x {
Ordering::Equal
} else if my_left.x >= other_right.x {
Ordering::Greater
} else if my_right.x <= other_left.x {
Ordering::Less
} else {
let my_midpoint = (LineSegment { from: my_left, to: my_right }).midpoint();
let other_midpoint = (LineSegment { from: other_left, to: other_right }).midpoint();
my_midpoint.cmp(&other_midpoint)
}
}
}
/// The beachline is a map from Arcs to their associated Circle Events, if they have one.
#[derive(Debug)]
struct Beachline<'sl>(BTreeMap<Arc<'sl>, Option<Event<'sl>>>);
impl<'sl> Beachline<'sl> {
fn new() -> Beachline<'sl> {
Beachline(BTreeMap::new())
}
fn arcs<'bl>(&'bl self) -> ::std::collections::btree_map::Keys<'bl, Arc<'sl>, Option<Event<'sl>>> {
self.0.keys()
}
fn remove_adjacent_arcs(&mut self, arc: &Arc<'sl>, event_queue: &mut EventQueue<'sl>) -> (Arc<'sl>, Arc<'sl>) {
let arc_left = self
.arcs()
.filter(|beacharc| beacharc < &arc)
.last()
.expect("Only called with a circle event arc, so adjacent arcs must exist.")
.clone();
let arc_right = self
.arcs()
.filter(|beacharc| beacharc > &arc)
.next()
.expect("Only called with a circle event arc, so adjacent arcs must exist.")
.clone();
self.remove(&arc_left, event_queue);
self.remove(&arc_right, event_queue);
(arc_left, arc_right)
}
/// Put a new arc into the beachline with its associated circle event.
/// When there aren't enough points for a circle event (less than 3?),
/// then put None for the circle event.
fn insert(&mut self, arc: Arc<'sl>, event: Option<Event<'sl>>) {
self.0.insert(arc, event);
}
fn remove(&mut self, arc: &Arc<'sl>, event_queue: &mut EventQueue<'sl>) {
let maybe_event = self.0.remove(arc).expect("Removed arc is in the beachline");
if let Some(circle_event) = maybe_event {
event_queue.remove(circle_event);
}
}
fn remove_only(&mut self, arc: &Arc<'sl>) {
self.0.remove(arc);
}
}
impl<'a, 'sl> Index<&'a Arc<'sl>> for Beachline<'sl> {
type Output = Option<Event<'sl>>;
fn index(&self, arc: &'a Arc<'sl>) -> &Option<Event<'sl>> {
&self.0[arc]
}
}
/// An event to be processed in Fortune's Algorithm.
///
/// Events are compared where the lowest `y` value is the greatest.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Event<'sl> {
/// The sweepline is passing this point.
Site(Point),
/// An arc collapsed into a single point.
Circle {
/// The site of the circle's event.
site: Point,
/// The arc that has collapsed into a point when the site is handled.
arc: Arc<'sl>,
/// The point the arc collasped at.
vertex: Point
}
}
impl<'sl> Event<'sl> {
fn handle(self, event_queue: &mut EventQueue<'sl>, beachline: &mut Beachline<'sl>, sweep_location: &'sl Sweepline, breakpoints: &mut Breakpoints<'sl>, _voroni: &mut ConnectivityKernel) {
match self {
Site(site) => {
let arc_above_site = beachline
.arcs()
.find(|arc| arc.is_point_under(&site))
.expect("There is always an arc above every non-initial site.")
.clone();
// Deal with the degenerate case where the first two points are at the same y value
// ???(Havvy): Is this actually needed? The if check is wrong because size would never be 0.
// ???(Havvy): If it is needed, should this be done outside of handle, perhaps in an e.g. handle_second?
// if (arcs.size() == 0 && arcAbove.site.y == cur.p.y) {
// VoronoiEdge newEdge = new VoronoiEdge(arcAbove.site, cur.p);
// newEdge.p1 = new Point((cur.p.x + arcAbove.site.x)/2, Double.POSITIVE_INFINITY);
// BreakPoint newBreak = new BreakPoint(arcAbove.site, cur.p, newEdge, false, this);
// breakPoints.add(newBreak);
// this.edgeList.add(newEdge);
// Arc arcLeft = new Arc(null, newBreak, this);
// Arc arcRight = new Arc(newBreak, null, this);
// arcs.remove(arcAbove);
// arcs.put(arcLeft, null);
// arcs.put(arcRight, null);
// return;
// }
// Remove the circle event associated with this arc if there is one
// Also remove the arc from the beachline, to be replaced by three other arcs below.
beachline.remove(&arc_above_site, event_queue);
// Create and insert 3 arcs to replace the removed arc ealier.
// The center arc has 0 width at the current sweep line, but will grow out
// towards the left and right arc as the sweep line moves. The left and right
// arcs go from the respective left and right breakpoints of the removed arc to
// the center arc.
let left_arc_left_breakpoint = arc_above_site.left.clone();
let right_arc_right_breakpoint = arc_above_site.right;
let edge = VoroniEdge;
// VoronoiEdge newEdge = new VoronoiEdge(arcAbove.site, cur.p);
// this.edgeList.add(newEdge);
let left_arc_right_breakpoint = ArcBreakpoint::new(arc_above_site.site, site, edge, EdgeDirection::Left, sweep_location);
let left_arc_right_breakpoint = breakpoints.insert(left_arc_right_breakpoint);
let left_arc_right_breakpoint = Some(left_arc_right_breakpoint);
let right_arc_left_breakpoint = ArcBreakpoint::new(arc_above_site.site, site, edge, EdgeDirection::Right, sweep_location);
let right_arc_left_breakpoint = breakpoints.insert(right_arc_left_breakpoint);
let right_arc_left_breakpoint = Some(right_arc_left_breakpoint);
let arc_left = Arc::new(left_arc_left_breakpoint, left_arc_right_breakpoint.clone());
let arc_center = Arc::new(left_arc_right_breakpoint, right_arc_left_breakpoint.clone());
let arc_right = Arc::new(right_arc_left_breakpoint, right_arc_right_breakpoint);
let arc_left_event = arc_left.get_circle_event();
if let Some(ref event) = arc_left_event {
event_queue.insert(event.clone())
}
beachline.insert(arc_left, arc_left_event);
// As a new
beachline.insert(arc_center, None);
let arc_right_event = arc_right.get_circle_event();
if let Some(ref event) = arc_right_event {
event_queue.insert(event.clone())
}
beachline.insert(arc_right, arc_right_event);
},
Circle { arc, site: _site, vertex: _vertex } => {
let (arc_left, arc_right) = beachline.remove_adjacent_arcs(&arc, event_queue);
beachline.remove_only(&arc);
breakpoints.remove(arc.left.expect("Left arc exists in circle event."));
breakpoints.remove(arc.right.expect("Right arc exists in circle event."));
// VoronoiEdge e = new VoronoiEdge(ce.arc.left.site_left, ce.arc.right.site_right);
// edgeList.add(e);
// // Here we're trying to figure out if the Voronoi vertex we've found is the left
// // or right point of the new edge.
// // If the edges being traces out by these two beachline take a right turn then we know
// // that the vertex is going to be above the current point
// boolean turnsLeft = Point.ccw(arcLeft.right.edgeBegin, ce.p, arcRight.left.edgeBegin) == 1;
// // So if it turns left, we know the next vertex will be below this vertex
// // so if it's below and the slow is negative then this vertex is the left point
// boolean isLeftPoint = (turnsLeft) ? (e.m < 0) : (e.m > 0);
// if (isLeftPoint) {
// e.p1 = ce.vert;
// }
// else {
// e.p2 = ce.vert;
// }
// BreakPoint newBP = new BreakPoint(ce.arc.left.s1, ce.arc.right.s2, e, !isLeftPoint, this);
// breakPoints.add(newBP);
// arcRight.left = newBP;
// arcLeft.right = newBP;
// checkForCircleEvent(arcLeft);
// checkForCircleEvent(arcRight);
unimplemented!()
}
};
}
fn y(&self) -> NotNaN<f64> {
match *self {
Site(Point{y, ..}) => y,
Circle { site: Point { y, .. }, .. } => y
}
}
}
impl<'sl> PartialOrd for Event<'sl> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<'sl> Ord for Event<'sl> {
fn cmp(&self, other: &Self) -> Ordering {
// Note the ! operand.
self.y().cmp(&other.y()).reverse()
}
}
impl<'sl> From<Point> for Event<'sl> {
fn from(p: Point) -> Event<'sl> {
Site(p)
}
}
struct EventQueue<'sl> {
queue: BinaryHeap<Event<'sl>>,
removed_events: HashSet<Event<'sl>>
}
impl<'sl> EventQueue<'sl> {
fn new<I>(sites: I) -> EventQueue<'sl> where I: Iterator<Item=Event<'sl>> {
EventQueue {
queue: BinaryHeap::from_iter(sites),
removed_events: HashSet::new()
}
}
fn insert(&mut self, event: Event<'sl>) {
self.queue.push(event);
}
fn pop(&mut self) -> Option<Event<'sl>> {
loop {
match self.queue.pop() {
Some(event) => {
if !self.removed_events.remove(&event) {
break Some(event)
}
},
None => break None
}
}
}
fn remove(&mut self, event: Event<'sl>) {
self.removed_events.insert(event);
}
}
pub fn fortune(sites: &[Point]) -> ConnectivityKernel {
let sweep_location: &Sweepline = &Sweepline::new(NotNaN::new(0f64).expect("Zero is not NaN"));
let mut voroni = ConnectivityKernel::new();
{
let mut event_queue = EventQueue::new(sites.into_iter().map(|&site| Site(site)));
let mut breakpoints = Breakpoints::new();
let mut beachline = Beachline::new();
// Deal with the first point specially.
match event_queue.pop() {
Some(Site(site)) => {
sweep_location.set(site.y);
beachline.insert(Arc::first(site), None);
},
None => {
// No points were given.
return voroni;
},
_ => panic!("First event must be a Site event if it exists.")
};
while let Some(event) = event_queue.pop() {
sweep_location.set(event.y());
event.handle(&mut event_queue, &mut beachline, &sweep_location, &mut breakpoints, &mut voroni);
}
}
voroni
}
| true
|
4259714c495e76442e5db351e33cce7604805dda
|
Rust
|
oxidecomputer/cio
|
/cio/src/features.rs
|
UTF-8
| 249
| 2.671875
| 3
|
[
"Apache-2.0"
] |
permissive
|
pub struct Features;
impl Features {
pub fn is_enabled<S>(feature: S) -> bool
where
S: AsRef<str>,
{
std::env::var(feature.as_ref())
.map(|f| f.to_lowercase() == "true")
.unwrap_or(false)
}
}
| true
|
4c6f0afa38ae5141558db8c3b0957d2362507517
|
Rust
|
siddMahen/deep-work
|
/src/main.rs
|
UTF-8
| 7,773
| 2.5625
| 3
|
[] |
no_license
|
use std::fs::{OpenOptions, remove_file};
use std::error::Error;
use std::path::Path;
use std::fmt::Display;
use std::env;
use ansi_term::Colour;
use chrono::prelude::*;
use chrono::TimeZone;
use clap::{Arg, App, SubCommand};
use csv::{ReaderBuilder, Writer, StringRecord};
static TIME_FMT: &str = "%H:%M:%S";
static DATE_FMT: &str = "%A, %B %e, %Y";
static DW_LOG: &str = ".dw.csv";
static DW_TMP: &str = ".dw.tmp";
static TXT_COLOUR: u8 = 13;
fn main() -> Result<(), Box<dyn Error>> {
let matches = App::new("Deep Work Tracker")
.version("0.1.0")
.author("Siddharth Mahendraker <siddharth.mahen@gmail.com>")
.about("A simple deep work time management tool")
.subcommand(SubCommand::with_name("start")
.about("Start tracking a deep work session")
.arg(Arg::with_name("description")
.required(false)
.takes_value(true)
.short("d")
.long("desc")
.default_value("")
.hide_default_value(true)
.help("Description attached to this deep work session"))
.arg(Arg::with_name("tags")
.required(false)
.multiple(true)
.takes_value(true)
.short("t")
.long("tag")
.default_value("")
.visible_alias("tags")
.hide_default_value(true)
.help("Tag(s) attached to this deep work session")))
.subcommand(SubCommand::with_name("stop")
.about("Stop tracking the current deep work session"))
.subcommand(SubCommand::with_name("status")
.about("Get the status of the current deep work session"))
.subcommand(SubCommand::with_name("summary")
.about("Summarize today's deep work"))
.get_matches();
let home = env::var("HOME")
.expect("Failed to access HOME environment variable");
let log_path = Path::new(&home).join(DW_LOG);
let tmp_path = Path::new(&home).join(DW_TMP);
let log_path_str = log_path.to_str()
.expect("Failed to convert log path to string");
let tmp_path_str = tmp_path.to_str()
.expect("Failed to convert tmp path to string");
if let Some(start) = matches.subcommand_matches("start") {
let desc = start.value_of("description").unwrap();
let tags: Vec<_> = start.values_of("tags").unwrap().collect();
handle_start(tmp_path_str, desc, tags)?;
} else if let Some(_) = matches.subcommand_matches("stop") {
handle_stop(log_path_str, tmp_path_str)?;
} else if let Some(_) = matches.subcommand_matches("status") {
handle_status(tmp_path_str)?;
} else if let Some(_) = matches.subcommand_matches("summary") {
handle_summary(log_path_str)?;
}
Ok(())
}
fn handle_summary(log_path: &str) -> Result<(), Box<dyn Error>> {
let file = OpenOptions::new()
.read(true)
.open(log_path)?;
let mut reader = ReaderBuilder::new()
.has_headers(false)
.from_reader(file);
let iter = reader.records();
let mut total_dw_time = 0;
for sess in iter {
let record = sess.unwrap();
let start = DateTime::parse_from_rfc3339(&record[0])?;
let duration : i32 = (&record[2]).parse().unwrap();
if start.date() == Local::now().date() {
total_dw_time += duration;
}
}
let hrs = total_dw_time/3600;
let minutes = (total_dw_time/60) - 60*hrs;
let seconds = total_dw_time - 60*minutes - 3600*hrs;
let now = Local::now();
println!("Deep work summary for {}:", now.format(DATE_FMT).to_string());
println!("{} hour(s) {} minute(s) {} seconds(s)",
Colour::Fixed(TXT_COLOUR).paint(hrs.to_string()),
Colour::Fixed(TXT_COLOUR).paint(minutes.to_string()),
Colour::Fixed(TXT_COLOUR).paint(seconds.to_string()));
Ok(())
}
fn handle_start(tmp_path: &str, desc: &str, tags: Vec<&str>) -> Result<(), Box<dyn Error>> {
let path = Path::new(tmp_path);
if path.is_file() {
println!("Another deep work session is active");
return Ok(());
}
let file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(tmp_path)?;
let mut writer = Writer::from_writer(file);
let start = Local::now();
writer.write_record(&[start.to_rfc3339(), desc.to_string(), tags.join(" ")])?;
writer.flush()?;
println!("Begin deep work!");
print_start_time(start);
print_description(desc);
Ok(())
}
fn datetime_from_last_entry(path: &str) -> StringRecord {
let file = OpenOptions::new()
.read(true)
.open(path)
.expect("Failed to read temporary file");
let mut reader = ReaderBuilder::new().
has_headers(false).
from_reader(file);
let iter = reader.records();
return iter.last().unwrap().unwrap();
}
fn handle_stop(log_path: &str, tmp_path: &str) -> Result<(), Box<dyn Error>> {
let path = Path::new(tmp_path);
if !path.is_file() {
println!("No active deep work session");
return Ok(());
}
let stop = Local::now();
let record = datetime_from_last_entry(tmp_path);
let start = DateTime::parse_from_rfc3339(&record[0])?;
let desc = &record[1];
let tags = &record[2];
let file = OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(log_path)?;
let mut writer = Writer::from_writer(file);
let elapsed = stop.signed_duration_since(start);
writer.write_record(&[start.to_rfc3339(),
stop.to_rfc3339(),
elapsed.num_seconds().to_string(),
desc.to_string(),
tags.to_string()])?;
writer.flush()?;
println!("Deep work complete!");
print_start_time(start);
print_stop_time(stop);
print_elapsed_time(start, stop);
print_description(desc);
print_tags(tags);
remove_file(tmp_path)?;
Ok(())
}
fn print_start_time<T: TimeZone>(time: DateTime<T>) where
T::Offset: Display
{
println!("Start: {}",
Colour::Fixed(TXT_COLOUR).paint(time.format(TIME_FMT).to_string()));
}
fn print_stop_time<T: TimeZone>(time: DateTime<T>) where
T::Offset: Display
{
println!("Stop: {}",
Colour::Fixed(TXT_COLOUR).paint(time.format(TIME_FMT).to_string()));
}
fn print_elapsed_time<S: TimeZone, T: TimeZone>(start: DateTime<S>, stop: DateTime<T>) {
let elapsed = stop.signed_duration_since(start);
let hrs = elapsed.num_hours();
let min = elapsed.num_minutes() - 60*hrs;
let sec = elapsed.num_seconds() - 3600*hrs - 60*min;
println!("Time Elapsed: {} hour(s), {} minute(s), {} second(s)",
Colour::Fixed(TXT_COLOUR).paint(hrs.to_string()),
Colour::Fixed(TXT_COLOUR).paint(min.to_string()),
Colour::Fixed(TXT_COLOUR).paint(sec.to_string()));
}
fn print_description(desc: &str) {
if desc.len() > 0 {
println!("Description: {}", desc);
}
}
fn print_tags(tags: &str) {
if tags.len() > 0 {
println!("Tags: {}", tags);
}
}
fn handle_status(tmp_path: &str) -> Result<(), Box<dyn Error>> {
let path = Path::new(tmp_path);
if !path.is_file() {
println!("No active deep work session");
return Ok(());
}
let now = Local::now();
let record = datetime_from_last_entry(tmp_path);
let start = DateTime::parse_from_rfc3339(&record[0])?;
let desc = &record[1];
let tags = &record[2];
print_start_time(start);
print_elapsed_time(start, now);
print_description(desc);
print_tags(tags);
Ok(())
}
| true
|
a7a06454f82c7dccdbf39cff4e20cff118f36d3c
|
Rust
|
chrstsmth/ciphertools-rs
|
/cipher-lib/src/candidate/mod.rs
|
UTF-8
| 1,864
| 3.15625
| 3
|
[] |
no_license
|
use std::fmt;
use crate::key::Key;
use crate::key::any_key::AnyKey;
#[derive(Clone, PartialEq, PartialOrd)]
pub struct Candidate {
score: f64,
key: AnyKey,
text: String,
}
#[derive(Clone)]
pub struct Candidates(Vec<Candidate>);
impl Candidate {
pub fn new<K: Key>(score: f64, key: K, text: String) -> Self {
Candidate {
score,
key: K::into(key),
text,
}
}
pub fn key(&self) -> AnyKey {
return self.key.clone();
}
pub fn score(&self) -> f64 {
return self.score;
}
}
impl fmt::Display for Candidate
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {}", self.score, self.key, self.text)
}
}
pub struct CandidatesIntermediates<I> where
I: Iterator<Item = Candidate>
{
it: I,
cs: Candidates,
}
impl<I> Iterator for CandidatesIntermediates<I> where
I: Iterator<Item = Candidate>
{
type Item = Vec<Candidate>;
fn next(&mut self) -> Option<Self::Item> {
for c in &mut self.it {
if self.cs.present_candidate(&c) {
return Some(self.cs.0.clone());
}
}
return None;
}
}
impl Candidates {
fn with_length(n: usize) -> Candidates
{
Candidates(Vec::with_capacity(n))
}
pub fn intermediates<I>(n: usize, it: I) -> CandidatesIntermediates<I> where
I: Iterator<Item = Candidate>
{
CandidatesIntermediates {
it,
cs: Candidates::with_length(n)
}
}
pub fn present_candidate(&mut self, c: &Candidate) -> bool
{
if self.0.len() < self.0.capacity() {
self.0.push(c.clone());
} else if self.0.last().unwrap() < c && !self.0.contains(&c) {
self.0.pop();
self.0.push(c.clone());
} else {
return false;
}
self.0.sort_by(|a, b| b.partial_cmp(a).unwrap()); // Reverse order sort
return true;
}
}
impl fmt::Display for Candidates
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for c in &self.0 {
writeln!(f, "{}", *c)?
}
Ok(())
}
}
| true
|
4f0125643eeb9d09dd411edf0fcea9df96bdfbd3
|
Rust
|
Gachapen/lsystem
|
/crates/abnf/src/core.rs
|
UTF-8
| 1,299
| 3.609375
| 4
|
[
"MIT"
] |
permissive
|
//! ABNF core rules.
//!
//! Note that because the rules, e.g. `ALPHA`, and the grammar `GRAMMAR` are lazily initialized
//! static variables, there are some things to consider:
//!
//! * You might want to initialize them manually with `initialize`.
//! * Each time a variable is used, it has to do an atomic check, so it is recommended to keep a
//! reference to the variable if it is used multiple times in the same scope:
//!
//! ```
//! use abnf::Grammar;
//!
//! // It is important to specify the &Grammar type, otherwise it will be a reference to the
//! // `LazyStatic` struct, which will still require the atomic check when it is dereferenced.
//! let grammar: &Grammar = &abnf::core::GRAMMAR;
//! ```
use lazy_static;
use syntax::{Content, Grammar, Item, List, Symbol};
lazy_static! {
pub static ref ALPHA: List = List::Alternatives(vec![
Item::new(Content::Range('A', 'Z')),
Item::new(Content::Range('a', 'z')),
]);
pub static ref GRAMMAR: Grammar = Grammar::from_rules(vec![(Symbol::from("ALPHA"), ALPHA.clone())]);
}
/// Initialize the core grammar and all of its rules.
///
/// Not necessary as they are lazily initialized, but useful if a concrete point of
/// iniitialization is desired.
pub fn initialize() {
lazy_static::initialize(&GRAMMAR);
}
| true
|
1d2ef83e3939cee3f54c3096d0b6353f2157daa5
|
Rust
|
leun4m/colorful
|
/src/models.rs
|
UTF-8
| 409
| 3.203125
| 3
|
[
"MIT"
] |
permissive
|
use std::fmt::{Debug, Display};
/// The HSV color model
pub mod hsv;
/// The RGB color model
pub mod rgb;
/// Collection of basic methods every color (regardless of model) should have
pub trait Color: Clone + PartialEq + Debug + Display + Default {
/// Returns if color is (absolute) white
fn is_white(&self) -> bool;
/// Returns if color is (absolute) black
fn is_black(&self) -> bool;
}
| true
|
cf682a7a0f442c2ce435e7eca4b8979119b477fb
|
Rust
|
dxx/leetcode-note
|
/code-rust/src/binary_tree_inorder_traversal/main.rs
|
UTF-8
| 3,447
| 4.15625
| 4
|
[] |
no_license
|
/// 二叉树的中序遍历
/// 给定一个二叉树,返回它的中序遍历。
/// 进阶: 递归算法很简单,你可以通过迭代算法完成吗?
/// 示例
/// 输入: [1,null,2,3]
/// 1
/// \
/// 2
/// /
/// 3
/// 输出: [1,3,2]
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode { val, left: None, right: None, }
}
}
use std::cell::RefCell;
use std::rc::Rc;
pub struct Solution {}
impl Solution {
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
// 非递归
Solution::inorder_no_recursion(root)
// 递归
// Solution::inorder_recursion(root)
}
/// 非递归中序遍历
fn inorder_no_recursion(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
let mut stack = Vec::new();
let mut output = Vec::new();
if root.is_none() {
return output;
}
// 将当前节点压入栈
stack.push(root.clone());
// 记录每次迭代时的节点
let mut current = root.clone();
while stack.len() > 0 {
// 将当前节点的所有最左边的节点入栈
while current.as_ref().unwrap().borrow().left.is_some() {
let left = current.as_ref().unwrap().borrow().left.clone();
// 将最左边的节点压入栈
stack.push(left.clone());
current = left;
}
// 弹出当前节点
current = stack.pop().unwrap();
output.push(current.as_ref().unwrap().borrow().val);
// 当前节点的右子节点不为空, 重复循环
if current.as_ref().unwrap().borrow().right.is_some() {
let right = current.as_ref().unwrap().borrow().right.clone();
// 将右子节点压入栈
stack.push(right.clone());
current = right;
} else {
// 当前节点的右子节点为空, 赋值为一个新的节点, 避免继续重复将最左边的节点压入栈
current = Some(Rc::new(RefCell::new(TreeNode::new(-1))));
}
}
output
}
#[allow(dead_code)]
/// 递归中序遍历
fn inorder_recursion(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
if root.is_none() {
return Vec::new();
}
let root = root.unwrap();
let mut nodes = Vec::new();
// 先访问左子节点
nodes.append(&mut Solution::inorder_recursion(root.borrow().left.clone()));
// 再将当前节点存入向量
nodes.push(root.borrow().val);
// 最后访问右子节点
nodes.append(&mut Solution::inorder_recursion(
root.borrow().right.clone(),
));
nodes
}
}
#[test]
fn test_inorder_traversal() {
let mut head = Some(Rc::new(RefCell::new(TreeNode::new(1))));
let mut node2 = Some(Rc::new(RefCell::new(TreeNode::new(2))));
let node3 = Some(Rc::new(RefCell::new(TreeNode::new(3))));
node2.as_mut().unwrap().borrow_mut().left = node3;
head.as_mut().unwrap().borrow_mut().right = node2;
let results = Solution::inorder_traversal(head);
println!("{:?}", results);
}
| true
|
9a68e16b0f45d067d827633bb170befc3685d805
|
Rust
|
sbeckeriv/rust-algorithms
|
/chapter-2/select-sort/src/main.rs
|
UTF-8
| 1,112
| 3.28125
| 3
|
[] |
no_license
|
mod timer;
mod sort;
use std::env;
use std::path::Path;
use std::io::prelude::*;
use std::fs::File;
fn read_file_lines(file_string: String) -> Vec<String> {
let file = Path::new(&file_string);
let mut open_file = File::open(file).unwrap();
let mut buffer = String::new();
open_file.read_to_string(&mut buffer).unwrap();
buffer.lines().map(|num| num.to_string()).collect::<Vec<String>>()
}
fn read_file_chars(file_string: String) -> Vec<String> {
let file = Path::new(&file_string);
let mut open_file = File::open(file).unwrap();
let mut buffer = String::new();
open_file.read_to_string(&mut buffer).unwrap();
buffer.split_whitespace().map(|num| num.to_string()).collect::<Vec<String>>()
}
fn main() {
let mut arguments: Vec<String> = env::args().collect();
let file_string = arguments.pop().unwrap();
let mut vec = read_file_chars(file_string);
let mut sorter = sort::Algo::new(vec);
let spent = timer::record(|| {
sorter.sort();
});
println!("{:?}", sorter);
println!("{:?}", sorter.is_sorted());
println!("{:?}", spent);
}
| true
|
b6566d89296bc4801b22d962c1cdda375051a13f
|
Rust
|
backup22143/monolith
|
/src/tests/html/get_charset.rs
|
UTF-8
| 2,554
| 2.796875
| 3
|
[
"CC0-1.0"
] |
permissive
|
// ██████╗ █████╗ ███████╗███████╗██╗███╗ ██╗ ██████╗
// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
// ██████╔╝███████║███████╗███████╗██║██╔██╗ ██║██║ ███╗
// ██╔═══╝ ██╔══██║╚════██║╚════██║██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║███████║███████║██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod passing {
use crate::html;
#[test]
fn meta_content_type() {
let html = "<!doctype html>
<html>
<head>
<meta http-equiv=\"content-type\" content=\"text/html;charset=GB2312\" />
</head>
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
}
#[test]
fn meta_charset() {
let html = "<!doctype html>
<html>
<head>
<meta charset=\"GB2312\" />
</head>
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
}
#[test]
fn multiple_conflicting_meta_charset_first() {
let html = "<!doctype html>
<html>
<head>
<meta charset=\"utf-8\" />
<meta http-equiv=\"content-type\" content=\"text/html;charset=GB2312\" />
</head>
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
assert_eq!(html::get_charset(&dom.document), Some(str!("utf-8")));
}
#[test]
fn multiple_conflicting_meta_content_type_first() {
let html = "<!doctype html>
<html>
<head>
<meta http-equiv=\"content-type\" content=\"text/html;charset=GB2312\" />
<meta charset=\"utf-8\" />
</head>
<body>
</body>
</html>";
let dom = html::html_to_dom(&html.as_bytes().to_vec(), str!());
assert_eq!(html::get_charset(&dom.document), Some(str!("GB2312")));
}
}
| true
|
76ae81282468bdef8c6c1f61066cec662a1831c7
|
Rust
|
HenryZou1/random_number
|
/src/main.rs
|
UTF-8
| 837
| 2.59375
| 3
|
[] |
no_license
|
use rand::Rng;
fn main() {
let mut number: u8 = 0;
let mut num = magic_num::magic_number();
//8th bit
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//7
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//6
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//5
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//4
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//3
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//2
number = number | num;
number = number << 1;
num = magic_num::magic_number();
//1
number = number | num;
println!("{}",number);
number = rand::thread_rng().gen_range(0, 231) | num;
println!("{}",number);
}
| true
|
ad40460e7f4e7c15e4d3856f0d9777bb1a576c90
|
Rust
|
IThawk/rust-project
|
/rust-master/src/librustc_target/abi/call/arm.rs
|
UTF-8
| 3,243
| 2.609375
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
use crate::abi::call::{Conv, FnType, ArgType, Reg, RegKind, Uniform};
use crate::abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
use crate::spec::HasTargetSpec;
fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
arg.layout.homogeneous_aggregate(cx).unit().and_then(|unit| {
let size = arg.layout.size;
// Ensure we have at most four uniquely addressable members.
if size > unit.size.checked_mul(4, cx).unwrap() {
return None;
}
let valid_unit = match unit.kind {
RegKind::Integer => false,
RegKind::Float => true,
RegKind::Vector => size.bits() == 64 || size.bits() == 128
};
if valid_unit {
Some(Uniform {
unit,
total: size
})
} else {
None
}
})
}
fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, vfp: bool)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !ret.layout.is_aggregate() {
ret.extend_integer_width_to(32);
return;
}
if vfp {
if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
ret.cast_to(uniform);
return;
}
}
let size = ret.layout.size;
let bits = size.bits();
if bits <= 32 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else {
Reg::i32()
};
ret.cast_to(Uniform {
unit,
total: size
});
return;
}
ret.make_indirect();
}
fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, vfp: bool)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !arg.layout.is_aggregate() {
arg.extend_integer_width_to(32);
return;
}
if vfp {
if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
arg.cast_to(uniform);
return;
}
}
let align = arg.layout.align.abi.bytes();
let total = arg.layout.size;
arg.cast_to(Uniform {
unit: if align <= 4 { Reg::i32() } else { Reg::i64() },
total
});
}
pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
{
// If this is a target with a hard-float ABI, and the function is not explicitly
// `extern "aapcs"`, then we must use the VFP registers for homogeneous aggregates.
let vfp = cx.target_spec().llvm_target.ends_with("hf")
&& fty.conv != Conv::ArmAapcs
&& !fty.c_variadic;
if !fty.ret.is_ignore() {
classify_ret_ty(cx, &mut fty.ret, vfp);
}
for arg in &mut fty.args {
if arg.is_ignore() { continue; }
classify_arg_ty(cx, arg, vfp);
}
}
| true
|
36ee47937c8175e40fdecb00d91731599f7c24b0
|
Rust
|
polyhorn/polyhorn-cli
|
/src/commands/run.rs
|
UTF-8
| 1,792
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
use ansi_term::Colour::Red;
use clap::Clap;
use path_absolutize::Absolutize;
use std::path::Path;
use super::Platform;
use crate::spec::{Error, Spec};
/// Runs the app on a device or simulator.
#[derive(Clap)]
pub struct Run {
#[clap(subcommand)]
platform: Platform,
}
impl Run {
/// Implementation of the `polyhorn run` command that delegates its work to
/// one of the platform-specific implementations.
pub fn main(&self, manifest_path: &Path) {
let spec = match Spec::open(manifest_path) {
Ok(spec) => spec,
Err(Error::TOML(error)) => {
eprintln!(
"{}: could not read manifest: {}",
Red.bold().paint("error"),
error
);
std::process::abort();
}
Err(Error::IO(_)) => {
eprintln!(
"{}: could not find file: {:?}",
Red.bold().paint("error"),
manifest_path
.absolutize_virtually(std::env::current_dir().unwrap())
.unwrap(),
);
std::process::abort();
}
};
let manifest_path = std::fs::canonicalize(manifest_path).unwrap();
let mut manifest_dir = manifest_path.clone();
manifest_dir.pop();
let mut target_dir = manifest_dir.clone();
target_dir.push("target");
let config = crate::Config {
manifest_path,
manifest_dir,
target_dir,
spec,
};
match self.platform {
Platform::Android => crate::android::commands::run(config),
Platform::IOS => crate::ios::commands::run(config),
}
}
}
| true
|
eb05eb780fe316e4ccf61cf9c55550959ae6efd2
|
Rust
|
SirGFM/rusty-but-fast
|
/src/bin/timer_client.rs
|
UTF-8
| 1,251
| 2.953125
| 3
|
[
"Zlib"
] |
permissive
|
use rusty_but_fast::timer;
fn do_opt(opt: &str, prefix: &[u8], conn: &mut std::net::TcpStream) -> bool {
match opt {
"start" => timer::start(prefix, conn).unwrap(),
"stop" => timer::stop(prefix, conn).unwrap(),
"reset" => timer::reset(prefix, conn).unwrap(),
"get" => {
let d = match timer::get(prefix, conn) {
Ok(ok) => ok,
Err(err) => panic!("Failed to get the current time: {}", err),
};
println!("Current time: {}.{}", d.as_secs(), d.subsec_nanos());
},
_ => return false,
}
return true;
}
fn main() {
let bin_name = std::env::current_exe().expect("Couldn't get the filename");
let bin_name = bin_name.to_str().expect("Couldn't get the filename");
println!("Running the client!");
let empty: [u8; 0] = [];
let empty = &empty[..];
let conn = std::net::TcpStream::connect("127.0.0.1:8080");
let conn = &mut conn.expect("Couldn't connect to the server...");
let mut found = 0;
for arg in std::env::args() {
if do_opt(&arg, empty, conn) {
found |= 1;
}
}
if found == 0 {
println!("Usage: {} [start|stop|reset|get]*", bin_name);
}
}
| true
|
0d8c1bcb837577ca16372c97463204d9ff827480
|
Rust
|
prz23/zinc
|
/zargo/src/project/src/circuit.rs
|
UTF-8
| 2,109
| 3.359375
| 3
|
[
"Apache-2.0"
] |
permissive
|
//!
//! The circuit `main.zn` file.
//!
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use anyhow::Context;
use serde::Deserialize;
///
/// The circuit source code entry point file representation.
///
#[derive(Deserialize)]
pub struct Circuit {
/// The circuit project name.
pub name: String,
}
impl Circuit {
///
/// Creates a new file representation instance.
///
pub fn new(name: &str) -> Self {
Self {
name: name.to_owned(),
}
}
///
/// Checks if the file exists in the project at the given `path`.
///
pub fn exists_at(path: &PathBuf) -> bool {
Self::append_default(path).exists()
}
///
/// Creates the file in the project at the given `path`.
///
pub fn write_to(self, path: &PathBuf) -> anyhow::Result<()> {
let path = Self::append_default(path);
let mut file = File::create(&path).with_context(|| path.to_string_lossy().to_string())?;
file.write_all(self.template().as_bytes())
.with_context(|| path.to_string_lossy().to_string())?;
Ok(())
}
///
/// If the path is a directory, appends the missing elements by default.
///
fn append_default(path: &PathBuf) -> PathBuf {
let mut path = path.to_owned();
if path.is_dir() {
if !path.ends_with(zinc_const::directory::SOURCE) {
path.push(PathBuf::from(zinc_const::directory::SOURCE));
}
path.push(PathBuf::from(Self::file_name()));
}
path
}
///
/// The circuit main file template function.
///
fn template(&self) -> String {
format!(
r#"//!
//! The '{}' circuit entry.
//!
fn main(witness: u8) -> u8 {{
dbg!("Zello, World!");
witness
}}
"#,
self.name
)
}
///
/// Creates a string with the default file name.
///
fn file_name() -> String {
format!(
"{}.{}",
zinc_const::file_name::APPLICATION_ENTRY,
zinc_const::extension::SOURCE,
)
}
}
| true
|
1f5796ba7b644167ba82dd678e22af63fcdcf50c
|
Rust
|
shybyte/rust-midi-patcher
|
/src/absolute_sleep.rs
|
UTF-8
| 2,240
| 3.640625
| 4
|
[] |
no_license
|
use std::time::{Duration, Instant};
use std::thread;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct AbsoluteSleep {
start_time: Instant,
duration_sum: Duration
}
impl AbsoluteSleep {
pub fn new() -> AbsoluteSleep {
AbsoluteSleep { start_time: Instant::now(), duration_sum: Duration::new(0, 0) }
}
pub fn sleep(&mut self, duration: Duration) {
self.duration_sum += duration;
let now = Instant::now();
// let sleep_time = self.duration_sum - (now - self.start_time);
// thread::sleep(sleep_time);
let sleep_time_option = self.duration_sum.checked_sub(now - self.start_time);
if let Some(sleep_time) = sleep_time_option {
thread::sleep(sleep_time);
} else {
eprintln!("Timing Problem = {:?}", self.duration_sum - (now - self.start_time));
}
}
}
#[cfg(test)]
mod tests {
use std::i64;
use std::time::{Duration, Instant};
use crate::absolute_sleep::AbsoluteSleep;
use std::thread;
#[test]
fn sleep() {
let mut abs_sleep = AbsoluteSleep::new();
let time1 = Instant::now();
abs_sleep.sleep(Duration::from_millis(100));
let sleep_time: i64 = (Instant::now() - time1).subsec_nanos() as i64 - 100 * 1000_000;
assert!(sleep_time.abs() < 1000_000, format!("sleep right time {}", sleep_time));
}
#[test]
fn sleep_more() {
let mut abs_sleep = AbsoluteSleep::new();
let time1 = Instant::now();
abs_sleep.sleep(Duration::from_millis(100));
abs_sleep.sleep(Duration::from_millis(100));
let sleep_time: i64 = (Instant::now() - time1).subsec_nanos() as i64 - 200 * 1000_000;
assert!(sleep_time.abs() < 1000_000, format!("sleep right time {}", sleep_time));
}
#[test]
fn sleep_less_if_time_has_elapsed() {
let mut abs_sleep = AbsoluteSleep::new();
thread::sleep(Duration::from_millis(75));
let time1 = Instant::now();
abs_sleep.sleep(Duration::from_millis(100));
let sleep_time: i64 = (Instant::now() - time1).subsec_nanos() as i64 - 25 * 1000_000;
assert!(sleep_time.abs() < 2000_000, format!("sleep right time {}", sleep_time));
}
}
| true
|
c4da5b69781b1c1844036b5122647dcb41d5bbe2
|
Rust
|
jonhoo/rust-imap
|
/src/extensions/metadata.rs
|
UTF-8
| 17,978
| 2.90625
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! Adds support for the IMAP METADATA extension specificed in [RFC
//! 5464](https://tools.ietf.org/html/rfc5464).
//!
//! Mailboxes or the server as a whole may have zero or more annotations associated with them. An
//! annotation contains a uniquely named entry, which has a value. Annotations can be added to
//! mailboxes when a mailbox name is provided as the first argument to
//! [`set_metadata`](Session::set_metadata), or to the server as a whole when the first argument is
//! `None`.
//!
//! For example, a general comment being added to a mailbox may have an entry name of "/comment"
//! and a value of "Really useful mailbox".
use crate::client::*;
use crate::error::{Error, ParseError, Result};
use crate::parse::try_handle_unilateral;
use crate::types::*;
use imap_proto::types::{MailboxDatum, Metadata, Response, ResponseCode};
use std::io::{Read, Write};
use std::sync::mpsc;
// for intra-doc links
#[allow(unused_imports)]
use crate::error::No;
trait CmdListItemFormat {
fn format_as_cmd_list_item(&self, item_index: usize) -> Result<String>;
}
impl CmdListItemFormat for Metadata {
fn format_as_cmd_list_item(&self, item_index: usize) -> Result<String> {
let synopsis = "SETMETADATA";
Ok(format!(
"{} {}",
validate_str(
synopsis,
&format!("entry#{}", item_index + 1),
self.entry.as_str()
)?,
self.value
.as_ref()
.map(|v| validate_str(synopsis, &format!("value#{}", item_index + 1), v.as_str()))
.unwrap_or_else(|| Ok("NIL".to_string()))?
))
}
}
/// Represents variants of the `DEPTH` parameter for the `GETMETADATA` command.
///
/// When a non-zero depth is specified with the `GETMETADATA` command, it extends the list of entry
/// values returned by the server. For each entry name specified in the `GETMETADATA` command, the
/// server returns the value of the specified entry name (if it exists), plus all entries below the
/// entry name up to the specified `DEPTH`.
///
/// See also [RFC 5464, section 4.2.2](https://tools.ietf.org/html/rfc5464#section-4.2.2).
#[derive(Debug, Copy, Clone)]
pub enum MetadataDepth {
/// No entries below the specified entry are returned.
Zero,
/// Only entries immediately below the specified entry are returned.
///
/// Thus, a depth of one for an entry `/a` will match `/a` as well as its children entries
/// (e.g., `/a/b`), but will not match grandchildren entries (e.g., `/a/b/c`).
One,
/// All entries below the specified entry are returned
Infinity,
}
impl Default for MetadataDepth {
fn default() -> Self {
Self::Zero
}
}
impl MetadataDepth {
fn depth_str<'a>(self) -> &'a str {
match self {
MetadataDepth::Zero => "0",
MetadataDepth::One => "1",
MetadataDepth::Infinity => "infinity",
}
}
}
fn parse_metadata<'a>(
mut lines: &'a [u8],
unsolicited: &'a mut mpsc::Sender<UnsolicitedResponse>,
) -> Result<Vec<Metadata>> {
let mut res: Vec<Metadata> = Vec::new();
loop {
if lines.is_empty() {
break Ok(res);
}
match imap_proto::parser::parse_response(lines) {
Ok((rest, resp)) => {
lines = rest;
match resp {
Response::MailboxData(MailboxDatum::MetadataSolicited {
mailbox: _,
mut values,
}) => {
res.append(&mut values);
}
_ => {
if let Some(unhandled) = try_handle_unilateral(resp, unsolicited) {
break Err(unhandled.into());
}
}
}
}
Err(_) => {
return Err(Error::Parse(ParseError::Invalid(lines.to_vec())));
}
}
}
}
impl<T: Read + Write> Session<T> {
/// Retrieve server or mailbox annotations.
///
/// This uses the `GETMETADATA` command defined in the METADATA extension of the IMAP protocol.
/// See [RFC 5464, section 4.2](https://tools.ietf.org/html/rfc5464#section-4.2) for more
/// details. Server support for the extension is indicated by the `METADATA` capability.
///
/// When the mailbox name is empty, this command retrieves server annotations. Otherwise,
/// this command retrieves annotations on the specified mailbox. If the `METADATA-SERVER`
/// capability is present, server metadata is supported, but not mailbox metadata.
///
/// The `entries` list specifies which annotations should be fetched. The RFC defines a number
/// of standard names in [Section 3.2.1](https://tools.ietf.org/html/rfc5464#section-3.2.1):
///
/// - Server entries (when `mailbox` is `None`):
/// - `/shared/comment`: A comment or note that is associated with the server and that is
/// shared with authorized users of the server.
/// - `/shared/admin`: Indicates a method for contacting the server administrator. The value
/// MUST be a URI (e.g., a `mailto:` or `tel:` URL). This entry is always read-only --
/// clients cannot change it. It is visible to authorized users of the system.
/// - `/shared/vendor/<vendor-token>`: Defines the top level of shared entries associated
/// with the server, as created by a particular product of some vendor. This entry can be
/// used by vendors to provide server- or client-specific annotations. The vendor-token
/// MUST be registered with IANA, using the Application Configuration Access Protocol
/// (ACAP) [RFC2244] vendor subtree registry.
/// - `/private/vendor/<vendor-token>`: Defines the top level of private entries associated
/// with the server, as created by a particular product of some vendor. This entry can be
/// used by vendors to provide server- or client-specific annotations. The vendor-token
/// MUST be registered with IANA, using the ACAP [RFC2244] vendor subtree registry.
/// - Mailbox entries (when `mailbox` is `Some`):
/// - `/shared/comment`: Defines a shared comment or note associated with a mailbox.
/// - `/private/comment`: Defines a private (per-user) comment or note associated with a
/// mailbox.
/// - `/shared/vendor/<vendor-token>`: Defines the top level of shared entries associated
/// with a specific mailbox, as created by a particular product of some vendor. This entry
/// can be used by vendors to provide client-specific annotations. The vendor-token MUST
/// be registered with IANA, using the ACAP [RFC2244] vendor subtree registry.
/// - `/private/vendor/<vendor-token>`: Defines the top level of private entries associated
/// with a specific mailbox, as created by a particular product of some vendor. This entry
/// can be used by vendors to provide client- specific annotations. The vendor-token MUST
/// be registered with IANA, using the ACAP [RFC2244] vendor subtree registry.
///
/// [RFC2244]: https://tools.ietf.org/html/rfc2244
///
/// The `depth` argument dictates whether metadata on children of the requested entity are
/// returned. See [`MetadataDepth`] for details
///
/// When `maxsize` is specified, it restricts which entry values are returned by the server.
/// Only entries that are less than or equal in octet size to the specified `maxsize` are
/// returned. If there are any entries with values larger than `maxsize`, this method also
/// returns the size of the biggest entry requested by the client that exceeded `maxsize`.
pub fn get_metadata(
&mut self,
mailbox: Option<&str>,
entries: &[impl AsRef<str>],
depth: MetadataDepth,
maxsize: Option<usize>,
) -> Result<(Vec<Metadata>, Option<u64>)> {
let synopsis = "GETMETADATA";
let v: Vec<String> = entries
.iter()
.enumerate()
.map(|(i, e)| validate_str(synopsis, format!("entry#{}", i + 1), e.as_ref()))
.collect::<Result<_>>()?;
let s = v.as_slice().join(" ");
let mut command = format!("GETMETADATA (DEPTH {}", depth.depth_str());
if let Some(size) = maxsize {
command.push_str(format!(" MAXSIZE {}", size).as_str());
}
command.push_str(
format!(
") {} ({})",
mailbox
.map(|mbox| validate_str(synopsis, "mailbox", mbox))
.unwrap_or_else(|| Ok("\"\"".to_string()))?,
s
)
.as_str(),
);
let (lines, ok) = self.run(command)?;
let meta = parse_metadata(&lines[..ok], &mut self.unsolicited_responses_tx)?;
let missed = if maxsize.is_some() {
if let Ok((_, Response::Done { code, .. })) =
imap_proto::parser::parse_response(&lines[ok..])
{
match code {
None => None,
Some(ResponseCode::MetadataLongEntries(v)) => Some(v),
Some(_) => None,
}
} else {
unreachable!("already parsed as Done by Client::run");
}
} else {
None
};
Ok((meta, missed))
}
/// Set annotations.
///
/// This command sets the specified list of entries by adding or replacing the specified values
/// provided, on the specified existing mailboxes or on the server (if the mailbox argument is
/// `None`). Clients can use `None` for the value of entries it wants to remove.
///
/// If the server is unable to set an annotation because the size of its value is too large,
/// this command will fail with a [`Error::No`] and its [status code](No::code) will be
/// [`ResponseCode::MetadataMaxSize`] where the contained value is the maximum octet count that
/// the server is willing to accept.
///
/// If the server is unable to set a new annotation because the maximum number of allowed
/// annotations has already been reached, this command will fail with an [`Error::No`] and its
/// [status code](No::code) will be [`ResponseCode::MetadataTooMany`].
///
/// If the server is unable to set a new annotation because it does not support private
/// annotations on one of the specified mailboxes, you guess it, you'll get an [`Error::No`] with
/// a [status code](No::code) of [`ResponseCode::MetadataNoPrivate`].
///
/// When any one annotation fails to be set and [`Error::No`] is returned, the server will not
/// change the values for other annotations specified.
///
/// See [RFC 5464, section 4.3](https://tools.ietf.org/html/rfc5464#section-4.3)
pub fn set_metadata(&mut self, mbox: impl AsRef<str>, annotations: &[Metadata]) -> Result<()> {
let v: Vec<String> = annotations
.iter()
.enumerate()
.map(|(i, metadata)| metadata.format_as_cmd_list_item(i))
.collect::<Result<_>>()?;
let s = v.as_slice().join(" ");
let command = format!(
"SETMETADATA {} ({})",
validate_str("SETMETADATA", "mailbox", mbox.as_ref())?,
s
);
self.run_command_and_check_ok(command)
}
}
#[cfg(test)]
mod tests {
use crate::extensions::metadata::*;
use crate::mock_stream::MockStream;
use crate::*;
#[test]
fn test_getmetadata() {
let response = "a1 OK Logged in.\r\n* METADATA \"\" (/shared/vendor/vendor.coi/a {3}\r\nAAA /shared/vendor/vendor.coi/b {3}\r\nBBB /shared/vendor/vendor.coi/c {3}\r\nCCC)\r\na2 OK GETMETADATA Completed\r\n";
let mock_stream = MockStream::new(response.as_bytes().to_vec());
let client = Client::new(mock_stream);
let mut session = client.login("testuser", "pass").unwrap();
let r = session.get_metadata(
None,
&["/shared/vendor/vendor.coi", "/shared/comment"],
MetadataDepth::Infinity,
Option::None,
);
match r {
Ok((v, missed)) => {
assert_eq!(missed, None);
assert_eq!(v.len(), 3);
assert_eq!(v[0].entry, "/shared/vendor/vendor.coi/a");
assert_eq!(v[0].value.as_ref().expect("None is not expected"), "AAA");
assert_eq!(v[1].entry, "/shared/vendor/vendor.coi/b");
assert_eq!(v[1].value.as_ref().expect("None is not expected"), "BBB");
assert_eq!(v[2].entry, "/shared/vendor/vendor.coi/c");
assert_eq!(v[2].value.as_ref().expect("None is not expected"), "CCC");
}
Err(e) => panic!("Unexpected error: {:?}", e),
}
}
use crate::client::testutils::assert_validation_error_session;
#[test]
fn test_getmetadata_validation_entry1() {
assert_validation_error_session(
|mut session| {
session.get_metadata(
None,
&[
"/shared/vendor\n/vendor.coi",
"/shared/comment",
"/some/other/entry",
],
MetadataDepth::Infinity,
None,
)
},
"GETMETADATA",
"entry#1",
'\n',
)
}
#[test]
fn test_getmetadata_validation_entry2() {
assert_validation_error_session(
|mut session| {
session.get_metadata(
Some("INBOX"),
&["/shared/vendor/vendor.coi", "/\rshared/comment"],
MetadataDepth::Infinity,
None,
)
},
"GETMETADATA",
"entry#2",
'\r',
)
}
#[test]
fn test_getmetadata_validation_mailbox() {
assert_validation_error_session(
|mut session| {
session.get_metadata(
Some("INB\nOX"),
&["/shared/vendor/vendor.coi", "/shared/comment"],
MetadataDepth::Infinity,
None,
)
},
"GETMETADATA",
"mailbox",
'\n',
);
}
#[test]
fn test_setmetadata_validation_mailbox() {
assert_validation_error_session(
|mut session| {
session.set_metadata(
"INB\nOX",
&[
Metadata {
entry: "/shared/vendor/vendor.coi".to_string(),
value: None,
},
Metadata {
entry: "/shared/comment".to_string(),
value: Some("value".to_string()),
},
],
)
},
"SETMETADATA",
"mailbox",
'\n',
);
}
#[test]
fn test_setmetadata_validation_entry1() {
assert_validation_error_session(
|mut session| {
session.set_metadata(
"INBOX",
&[
Metadata {
entry: "/shared/\nvendor/vendor.coi".to_string(),
value: None,
},
Metadata {
entry: "/shared/comment".to_string(),
value: Some("value".to_string()),
},
],
)
},
"SETMETADATA",
"entry#1",
'\n',
);
}
#[test]
fn test_setmetadata_validation_entry2_key() {
assert_validation_error_session(
|mut session| {
session.set_metadata(
"INBOX",
&[
Metadata {
entry: "/shared/vendor/vendor.coi".to_string(),
value: None,
},
Metadata {
entry: "/shared\r/comment".to_string(),
value: Some("value".to_string()),
},
],
)
},
"SETMETADATA",
"entry#2",
'\r',
);
}
#[test]
fn test_setmetadata_validation_entry2_value() {
assert_validation_error_session(
|mut session| {
session.set_metadata(
"INBOX",
&[
Metadata {
entry: "/shared/vendor/vendor.coi".to_string(),
value: None,
},
Metadata {
entry: "/shared/comment".to_string(),
value: Some("va\nlue".to_string()),
},
],
)
},
"SETMETADATA",
"value#2",
'\n',
);
}
#[test]
fn test_setmetadata_validation_entry() {
assert_validation_error_session(
|mut session| {
session.set_metadata(
"INBOX",
&[Metadata {
entry: "/shared/\nvendor/vendor.coi".to_string(),
value: None,
}],
)
},
"SETMETADATA",
"entry#1",
'\n',
);
}
}
| true
|
def99e1cdb496354da21cc6fbbac537113b66e01
|
Rust
|
satvikshukla/algorithmplus
|
/src/search/binary_search.rs
|
UTF-8
| 1,289
| 4.15625
| 4
|
[
"MIT"
] |
permissive
|
/// Search for an element in a sorted array using binary search
///
/// # Parameters
///
/// - `target`: The element to find
/// - `arr`: A vector to search the element in
///
/// # Type parameters
///
/// - `T`: A type that can be checked for equality and ordering e.g. a `i32`, a
/// `u8`, or a `f32`.
///
/// # Examples
///
/// ```rust
/// use algorithmplus::search::binary_search;
///
/// let ls = vec![1, 7, 9, 11, 12];
/// let idx = binary_search(&7, &ls).unwrap_or_default();
///
/// assert_eq!(idx, 1);
/// ```
///
/// ```rust
/// use algorithmplus::search::binary_search;
///
/// let ls = vec![1, 7, 9, 11, 12];
/// let idx = binary_search(&8, &ls);
///
/// assert_eq!(idx, None);
/// ```
pub fn binary_search<T: PartialEq + PartialOrd>(target: &T, arr: &[T]) -> Option<usize> {
let mut left = 0;
let arr_len = arr.len();
if arr_len == 0 {
return None;
}
let mut right = arr_len - 1;
if &arr[left] > target || &arr[right] < target {
return None;
}
while left <= right {
let mid = left + (right - left) / 2;
if &arr[mid] > target {
right = mid - 1;
} else if &arr[mid] < target {
left = mid + 1;
} else {
return Some(mid);
}
}
None
}
| true
|
899e5079af96b1e5e9d6e75b8aae4ea78f16c768
|
Rust
|
nvzqz/byte-set-rs
|
/src/iter.rs
|
UTF-8
| 3,042
| 3.3125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::{chunk, ByteSet};
use core::iter;
/// An iterator over a [`ByteSet`].
///
/// [`ByteSet`]: struct.ByteSet.html
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Iter {
/// The set being iterated over. It is mutated in-place as bits are popped
/// from each chunk.
byte_set: ByteSet,
/// The current chunk index when iterating forwards.
forward_index: usize,
/// The current chunk index when iterating backwards.
backward_index: usize,
}
impl Iter {
#[inline]
pub(crate) const fn new(byte_set: ByteSet) -> Self {
Self {
byte_set,
forward_index: 0,
backward_index: ByteSet::LAST_SLOT_INDEX,
}
}
/// Returns the underlying [`ByteSet`].
///
/// Note that iteration mutates the byteset in-place.
#[inline]
pub const fn into_byte_set(self) -> ByteSet {
self.byte_set
}
}
impl From<ByteSet> for Iter {
#[inline]
fn from(byte_set: ByteSet) -> Self {
Self::new(byte_set)
}
}
impl Iterator for Iter {
type Item = u8;
fn next(&mut self) -> Option<u8> {
let range = self.forward_index..ByteSet::NUM_SLOTS;
for index in range {
self.forward_index = index;
let chunk = &mut self.byte_set.0[index];
if let Some(lsb) = chunk::pop_lsb(chunk) {
return Some(lsb + (index * chunk::INDEX_OFFSET) as u8);
}
}
None
}
fn for_each<F>(mut self, mut f: F)
where
F: FnMut(u8),
{
(0..ByteSet::NUM_SLOTS).for_each(|index| {
let chunk = &mut self.byte_set.0[index];
while let Some(lsb) = chunk::pop_lsb(chunk) {
f(lsb + (index * chunk::INDEX_OFFSET) as u8);
}
});
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn last(mut self) -> Option<u8> {
self.next_back()
}
#[inline]
fn min(mut self) -> Option<u8> {
self.next()
}
#[inline]
fn max(self) -> Option<u8> {
self.last()
}
}
impl DoubleEndedIterator for Iter {
fn next_back(&mut self) -> Option<u8> {
// `Range` (`a..b`) is faster than `InclusiveRange` (`a..=b`).
let range = 0..(self.backward_index + 1);
for index in range.rev() {
self.backward_index = index;
// SAFETY: This invariant is tested.
let chunk = unsafe { self.byte_set.0.get_unchecked_mut(index) };
if let Some(msb) = chunk::pop_msb(chunk) {
return Some(msb + (index * chunk::INDEX_OFFSET) as u8);
}
}
None
}
}
impl ExactSizeIterator for Iter {
#[inline]
fn len(&self) -> usize {
self.byte_set.len()
}
}
// `Iter` does not produce more values after `None` is reached.
impl iter::FusedIterator for Iter {}
| true
|
413cbacc70b93ba6ac189d2e971b794fa97052f9
|
Rust
|
bowlofeggs/average
|
/src/traits.rs
|
UTF-8
| 3,713
| 3.453125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
/// Estimate a statistic of a sequence of numbers ("population").
pub trait Estimate {
/// Add an observation sampled from the population.
fn add(&mut self, x: f64);
/// Estimate the statistic of the population.
fn estimate(&self) -> f64;
}
/// Merge another sample into this one.
pub trait Merge {
fn merge(&mut self, other: &Self);
}
/// Calculate the multinomial variance. Relevant for histograms.
#[inline(always)]
fn multinomal_variance(n: f64, n_tot_inv: f64) -> f64 {
n * (1. - n * n_tot_inv)
}
/// Get the bins and ranges from a histogram.
pub trait Histogram:
where for<'a> &'a Self: IntoIterator<Item = ((f64, f64), u64)>
{
/// Return the bins of the histogram.
fn bins(&self) -> &[u64];
/// Estimate the variance for the given bin.
///
/// The square root of this estimates the error of the bin count.
#[inline]
fn variance(&self, bin: usize) -> f64 {
let count = self.bins()[bin];
let sum: u64 = self.bins().iter().sum();
multinomal_variance(count as f64, 1./(sum as f64))
}
/// Return an iterator over the bins normalized by the bin widths.
#[inline]
fn normalized_bins(&self) -> IterNormalized<<&Self as IntoIterator>::IntoIter> {
IterNormalized { histogram_iter: self.into_iter() }
}
/// Return an iterator over the bin widths.
#[inline]
fn widths(&self) -> IterWidths<<&Self as IntoIterator>::IntoIter> {
IterWidths { histogram_iter: self.into_iter() }
}
/// Return an iterator over the bin centers.
#[inline]
fn centers(&self) -> IterBinCenters<<&Self as IntoIterator>::IntoIter> {
IterBinCenters { histogram_iter: self.into_iter() }
}
/// Return an iterator over the bin variances.
///
/// This is more efficient than calling `variance()` for each bin.
#[inline]
fn variances(&self) -> IterVariances<<&Self as IntoIterator>::IntoIter> {
let sum: u64 = self.bins().iter().sum();
IterVariances {
histogram_iter: self.into_iter(),
sum_inv: 1./(sum as f64)
}
}
}
/// Iterate over the bins normalized by bin width.
pub struct IterNormalized<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
histogram_iter: T,
}
impl<T> Iterator for IterNormalized<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
type Item = f64;
#[inline]
fn next(&mut self) -> Option<f64> {
self.histogram_iter.next().map(|((a, b), count)| (count as f64) / (b - a))
}
}
/// Iterate over the widths of the bins.
pub struct IterWidths<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
histogram_iter: T,
}
impl<T> Iterator for IterWidths<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
type Item = f64;
#[inline]
fn next(&mut self) -> Option<f64> {
self.histogram_iter.next().map(|((a, b), _)| b - a)
}
}
/// Iterate over the bin centers.
pub struct IterBinCenters<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
histogram_iter: T,
}
impl<T> Iterator for IterBinCenters<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
type Item = f64;
#[inline]
fn next(&mut self) -> Option<f64> {
self.histogram_iter.next().map(|((a, b), _)| 0.5 * (a + b))
}
}
/// Iterate over the variances.
pub struct IterVariances<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
histogram_iter: T,
sum_inv: f64,
}
impl<T> Iterator for IterVariances<T>
where T: Iterator<Item = ((f64, f64), u64)>
{
type Item = f64;
#[inline]
fn next(&mut self) -> Option<f64> {
self.histogram_iter.next()
.map(|(_, n)| multinomal_variance(n as f64, self.sum_inv))
}
}
| true
|
6dcfe82932ab604637d2b680a2d0fcb92837b4f9
|
Rust
|
tostaylo/tostaylo.github.io
|
/src/content.rs
|
UTF-8
| 8,168
| 2.796875
| 3
|
[
"MIT"
] |
permissive
|
use crate::about::about;
use crate::handle;
use crate::posts::posts;
use crate::site_info::site_info;
use std::cell::RefCell;
use std::rc::Rc;
use web_sys::ScrollToOptions;
#[derive(Debug, Clone, PartialEq)]
pub enum ContentType {
Home,
About,
Posts,
SiteInfo,
Github,
LinkedIn,
}
impl Default for ContentType {
fn default() -> Self {
ContentType::Home
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Actions {
ContentType(ContentType),
ShowNav,
HideNav,
}
impl Default for Actions {
fn default() -> Self {
Actions::ContentType(ContentType::Home)
}
}
#[derive(Debug, Default, Clone, PartialEq)]
pub struct ContentState {
content: ContentType,
is_nav: bool,
}
#[derive(Debug, Default, Clone)]
pub struct Content {
id: String,
state: ContentState,
}
impl Content {
pub fn create() -> handle::Handle<Self> {
let content = Content {
id: "content".to_owned(),
state: ContentState {
content: ContentType::Home,
is_nav: false,
},
};
handle::Handle(Rc::new(RefCell::new(content)))
}
}
impl rust_fel::Component for handle::Handle<Content> {
type Properties = Option<String>;
type Message = Actions;
type State = ContentState;
fn add_props(&mut self, _props: Self::Properties) {}
fn reduce_state(&mut self, message: Self::Message) {
let window = web_sys::window().expect("no global `window` exists");
let mut opts = ScrollToOptions::new();
opts.top(0.0);
window.scroll_with_scroll_to_options(&opts);
match message {
Actions::ContentType(x) => {
self.0.borrow_mut().state.content = x;
self.0.borrow_mut().state.is_nav = false;
}
Actions::ShowNav => self.0.borrow_mut().state.is_nav = true,
Actions::HideNav => self.0.borrow_mut().state.is_nav = false,
}
rust_fel::re_render(self.render(), Some(self.0.borrow().id.clone()));
}
fn render(&self) -> rust_fel::Element {
let borrow = self.0.borrow_mut();
let state = borrow.state.clone();
let mut content_type_vec = vec![
ContentType::About,
ContentType::SiteInfo,
ContentType::Posts,
ContentType::Github,
ContentType::LinkedIn,
];
if state.content != ContentType::Home {
content_type_vec.push(ContentType::Home)
}
let nav_items: Vec<rust_fel::Element> = content_type_vec
.iter()
.map(|content_type| {
let mut clone = self.clone();
let (label, html_type) = match content_type {
ContentType::Home => ("<span>Home</span>", "li"),
ContentType::Posts => ("<span | data-cy=nav-posts |>Posts</span>", "li"),
ContentType::SiteInfo => {
("<span | data-cy=nav-site-info |>Site Info</span>", "li")
}
ContentType::About => ("<span | data-cy=nav-about |>About</span>", "li"),
ContentType::Github => {
("<a | href=https://github.com/tostaylo |>Github</a>", "li")
}
ContentType::LinkedIn => (
"<a | href=https://www.linkedin.com/in/taylortorre |>LinkedIn</a>",
"li",
),
};
let owned_content_type = content_type.to_owned();
let on_click = match content_type {
ContentType::Github => None,
ContentType::LinkedIn => None,
_ => Some(Box::new(move || {
clone.reduce_state(Actions::ContentType(owned_content_type.clone()))
}) as rust_fel::ClosureProp),
};
let nav_item_class_name = if content_type == &state.content.clone() {
Some("nav-item nav-item-active".to_owned())
} else {
Some("nav-item".to_owned())
};
rust_fel::Element::new(
html_type.to_owned(),
rust_fel::Props {
on_click,
children: Some(vec![rust_fel::html(label.to_owned())]),
class_name: nav_item_class_name,
..Default::default()
},
)
})
.collect();
fn navigation(list_items: Vec<rust_fel::Element>, class_name: String) -> rust_fel::Element {
let ul = rust_fel::Element::new(
"ul".to_owned(),
rust_fel::Props {
children: Some(list_items),
..Default::default()
},
);
rust_fel::Element::new(
"nav".to_owned(),
rust_fel::Props {
children: Some(vec![ul]),
class_name: Some(class_name),
..Default::default()
},
)
}
let (menu_button_action, nav_toggle_classname, body_lock) = match state.is_nav {
true => {
let body_lock = rust_fel::html(
"<style>@media screen and (max-width: 900px){{body{{position:fixed; overflow:hidden;}}}}</style>".to_owned()
);
(Actions::HideNav, "show-nav", body_lock)
}
false => (
Actions::ShowNav,
"hide-nav",
rust_fel::html("<style></style".to_owned()),
),
};
let mut clone_for_menu_button = self.clone();
let menu_button_onclick =
Some(
Box::new(move || clone_for_menu_button.reduce_state(menu_button_action.clone()))
as rust_fel::ClosureProp,
);
let menu = rust_fel::html("<span |class=menu|></span>".to_owned());
let menu_button_mobile = rust_fel::Element::new(
"span".to_owned(),
rust_fel::Props {
class_name: Some("menu-button".to_owned()),
on_click: menu_button_onclick,
children: Some(vec![menu]),
data_cy: Some("menu-button".to_owned()),
..Default::default()
},
);
let content_footer = rust_fel::html(
"<div |class=content-footer|><span |class=content-footer-underline|></span></div>"
.to_owned(),
);
let content_children = match borrow.state.content {
ContentType::About => Some(vec![
navigation(
nav_items,
format!("non-home-navigation {}", nav_toggle_classname),
),
menu_button_mobile,
about(),
content_footer,
body_lock,
]),
ContentType::SiteInfo => Some(vec![
navigation(
nav_items,
format!("non-home-navigation {}", nav_toggle_classname),
),
menu_button_mobile,
site_info(),
content_footer,
body_lock,
]),
ContentType::Posts => Some(vec![
navigation(
nav_items,
format!("non-home-navigation {}", nav_toggle_classname),
),
menu_button_mobile,
posts(),
content_footer,
body_lock,
]),
_ => Some(vec![navigation(nav_items, "home-navigation".to_owned())]),
};
rust_fel::Element::new(
"section".to_owned(),
rust_fel::Props {
id: Some(borrow.id.clone()),
class_name: Some("content".to_owned()),
children: content_children,
..Default::default()
},
)
}
}
| true
|
dfd1ab14e74b42b571ed54e76c579976e9ad932d
|
Rust
|
PhilboBaggins/check-project
|
/src/main.rs
|
UTF-8
| 4,158
| 2.65625
| 3
|
[] |
no_license
|
extern crate cargo_toml;
extern crate toml;
extern crate clap;
extern crate ini;
use std::fs;
use cargo_toml::Manifest;
use std::fs::read;
use clap::{App, Arg};
use std::path::Path;
use ini::Ini;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let five = 5;
assert!(5 == five);
}
}
fn check_field_string(field: Option<&String>, friendly_name: &str) {
if let Some(field) = field {
if field.contains("?") {
println!("{}: {}", friendly_name, field);
}
} else {
println!("{}: Not present", friendly_name);
}
}
fn compare_fields_string(field_1: Option<&String>, field_2: Option<&String>,
friendly_name_1: &str, friendly_name_2: &str) {
let not_present = "Not present".to_owned();
let field_1 = field_1.unwrap_or(¬_present);
let field_2 = field_2.unwrap_or(¬_present);
if field_1 != field_2 {
println!("{} and {} do not match", friendly_name_1, friendly_name_2);
println!(" {}: {}", friendly_name_1, field_1);
println!(" {}: {}", friendly_name_2, field_2);
}
}
fn check_cargo_project(proj_path: &str, _verbose: u64) {
let cargo_toml_path = Path::new(proj_path).join("Cargo.toml");
let cargo_toml_data = Manifest::<toml::Value>::from_slice_with_metadata(&read(cargo_toml_path).unwrap()).unwrap();
let cargo_toml_package = cargo_toml_data.package.as_ref().unwrap();
// Check Cargo.toml fields
// TODO: &cargo_toml_package.edition
check_field_string(Some(&cargo_toml_package.version), "Cargo.toml version field");
check_field_string(cargo_toml_package.description.as_ref(), "Cargo.toml description field");
check_field_string(cargo_toml_package.repository.as_ref(), "Cargo.toml repository field");
check_field_string(cargo_toml_package.license.as_ref(), "Cargo.toml license field");
// .....................
let git_config_path = Path::new(proj_path).join(".git").join("config");
if let Ok(conf) = Ini::load_from_file(git_config_path) {
if let Some(remote_origin_section) = conf.section(Some("remote \"origin\"".to_owned())) {
compare_fields_string(
cargo_toml_package.repository.as_ref(),
remote_origin_section.get("url"),
"Cargo.toml repository field",
".git/config remote origin URL");
}
}
if let Some(_license_info) = &cargo_toml_package.license {
// TODO: Check for license files that match cargo_toml_package.license
}
// TODO: Add check for CI badge:
// * https://github.com/sodiumoxide/sodiumoxide/commit/9a9ab1d4347ad15ae545019cd2355cda723938c5
// * https://doc.rust-lang.org/cargo/reference/manifest.html
}
fn main() {
let matches = App::new("check-project")
.version("1.0")
.about("???????????????????????????????????????")
.author("Phil B.")
.arg(Arg::with_name("path")
.help("Path to project")
.takes_value(true)
.required(true))
.arg(Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Enable verbose output"))
.get_matches();
let proj_path = matches.value_of("path").unwrap();
let verbose = matches.occurrences_of("verbose");
// TODO: Check for .git dir, .gitignore file, etc
let projects_top_level_contents = fs::read_dir(proj_path).unwrap_or_else(|error| {
eprintln!("{}", error.to_string());
::std::process::exit(1);
});
for path in projects_top_level_contents {
// TODO: Consider files and directories differently
// TODO: Get rid of some of these unwrap() calls
if let Some(file_name) = path.unwrap().path().file_name() {
match file_name.to_str() {
Some("Cargo.toml") => check_cargo_project(proj_path, verbose),
Some(file_name) if verbose >= 2 => {
println!("Looking for project files ... ignoring {}", file_name)
},
_ => {
// ????
}
}
}
}
}
| true
|
2c3eca984bb31584978072fb4e2339fa8dd3ab09
|
Rust
|
MarimeGui/my_gltf
|
/src/materials.rs
|
UTF-8
| 895
| 2.671875
| 3
|
[] |
no_license
|
pub type Materials = Vec<Material>;
#[derive(Serialize, Deserialize)]
pub struct Material {
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "pbrMetallicRoughness")]
pub pbr_metallic_roughness: Option<PbrMetallicRoughness>,
}
#[derive(Serialize, Deserialize)]
pub struct PbrMetallicRoughness {
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "baseColorTexture")]
pub base_color_texture: Option<BaseColorTexture>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "metallicFactor")]
pub metallic_factor: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "roughnessFactor")]
pub roughness_factor: Option<f64>,
}
#[derive(Serialize, Deserialize)]
pub struct BaseColorTexture {
#[serde(skip_serializing_if = "Option::is_none")]
pub index: Option<usize>,
}
| true
|
75e37c61b70ab9879445106994e621d1cb2737fe
|
Rust
|
KomodoPlatform/trie
|
/trie-db/src/triedb.rs
|
UTF-8
| 10,435
| 2.59375
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2017, 2020 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use hash_db::{HashDBRef, Prefix, EMPTY_PREFIX};
use crate::nibble::NibbleSlice;
use crate::iterator::TrieDBNodeIterator;
use crate::rstd::boxed::Box;
use super::node::{NodeHandle, Node, OwnedNode, decode_hash};
use super::lookup::Lookup;
use super::{Result, DBValue, Trie, TrieItem, TrieError, TrieIterator, Query,
TrieLayout, CError, TrieHash};
use super::nibble::NibbleVec;
#[cfg(feature = "std")]
use crate::rstd::{fmt, vec::Vec};
/// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher`
/// implementation to generate keys and a `NodeCodec` implementation to encode/decode
/// the nodes.
///
/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object.
/// Use `get` and `contains` to query values associated with keys in the trie.
///
/// # Example
/// ```ignore
/// use hash_db::Hasher;
/// use reference_trie::{RefTrieDBMut, RefTrieDB, Trie, TrieMut};
/// use trie_db::DBValue;
/// use keccak_hasher::KeccakHasher;
/// use memory_db::*;
///
/// let mut memdb = MemoryDB::<KeccakHasher, HashKey<_>, _>::default();
/// let mut root = Default::default();
/// RefTrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
/// let t = RefTrieDB::new(&memdb, &root).unwrap();
/// assert!(t.contains(b"foo").unwrap());
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar".to_vec());
/// ```
pub struct TrieDB<'db, L>
where
L: TrieLayout,
{
db: &'db dyn HashDBRef<L::Hash, DBValue>,
root: &'db TrieHash<L>,
/// The number of hashes performed so far in operations on this trie.
hash_count: usize,
}
impl<'db, L> TrieDB<'db, L>
where
L: TrieLayout,
{
/// Create a new trie with the backing database `db` and `root`
/// Returns an error if `root` does not exist
pub fn new(
db: &'db dyn HashDBRef<L::Hash, DBValue>,
root: &'db TrieHash<L>
) -> Result<Self, TrieHash<L>, CError<L>> {
if !db.contains(root, EMPTY_PREFIX) {
Err(Box::new(TrieError::InvalidStateRoot(*root)))
} else {
Ok(TrieDB {db, root, hash_count: 0})
}
}
/// Get the backing database.
pub fn db(&'db self) -> &'db dyn HashDBRef<L::Hash, DBValue> { self.db }
/// Given some node-describing data `node`, and node key return the actual node RLP.
/// This could be a simple identity operation in the case that the node is sufficiently small,
/// but may require a database lookup.
///
/// Return value is the node data and the node hash if the value was looked up in the database
/// or None if it was returned raw.
///
/// `partial_key` is encoded nibble slice that addresses the node.
pub(crate) fn get_raw_or_lookup(
&self,
parent_hash: TrieHash<L>,
node_handle: NodeHandle,
partial_key: Prefix,
) -> Result<(OwnedNode<DBValue>, Option<TrieHash<L>>), TrieHash<L>, CError<L>> {
let (node_hash, node_data) = match node_handle {
NodeHandle::Hash(data) => {
let node_hash = decode_hash::<L::Hash>(data)
.ok_or_else(|| Box::new(TrieError::InvalidHash(parent_hash, data.to_vec())))?;
let node_data = self.db
.get(&node_hash, partial_key)
.ok_or_else(|| {
if partial_key == EMPTY_PREFIX {
Box::new(TrieError::InvalidStateRoot(node_hash))
} else {
Box::new(TrieError::IncompleteDatabase(node_hash))
}
})?;
(Some(node_hash), node_data)
}
NodeHandle::Inline(data) => (None, data.to_vec()),
};
let owned_node = OwnedNode::new::<L::Codec>(node_data)
.map_err(|e| Box::new(TrieError::DecoderError(node_hash.unwrap_or(parent_hash), e)))?;
Ok((owned_node, node_hash))
}
}
impl<'db, L> Trie<L> for TrieDB<'db, L>
where
L: TrieLayout,
{
fn root(&self) -> &TrieHash<L> { self.root }
fn get_with<'a, 'key, Q: Query<L::Hash>>(
&'a self,
key: &'key [u8],
query: Q,
) -> Result<Option<Q::Item>, TrieHash<L>, CError<L>>
where 'a: 'key,
{
Lookup::<L, Q> {
db: self.db,
query,
hash: *self.root,
}.look_up(NibbleSlice::new(key))
}
fn iter<'a>(&'a self)-> Result<
Box<dyn TrieIterator<L, Item=TrieItem<TrieHash<L>, CError<L>>> + 'a>,
TrieHash<L>,
CError<L>,
> {
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
}
}
#[cfg(feature="std")]
// This is for pretty debug output only
struct TrieAwareDebugNode<'db, 'a, L>
where
L: TrieLayout,
{
trie: &'db TrieDB<'db, L>,
node_key: NodeHandle<'a>,
partial_key: NibbleVec,
index: Option<u8>,
}
#[cfg(feature="std")]
impl<'db, 'a, L> fmt::Debug for TrieAwareDebugNode<'db, 'a, L>
where
L: TrieLayout,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.trie.get_raw_or_lookup(
<TrieHash<L>>::default(),
self.node_key,
self.partial_key.as_prefix()
) {
Ok((owned_node, _node_hash)) => match owned_node.node() {
Node::Leaf(slice, value) =>
match (f.debug_struct("Node::Leaf"), self.index) {
(ref mut d, Some(i)) => d.field("index", &i),
(ref mut d, _) => d,
}
.field("slice", &slice)
.field("value", &value)
.finish(),
Node::Extension(slice, item) => {
match (f.debug_struct("Node::Extension"), self.index) {
(ref mut d, Some(i)) => d.field("index", &i),
(ref mut d, _) => d,
}
.field("slice", &slice)
.field("item", &TrieAwareDebugNode {
trie: self.trie,
node_key: item,
partial_key: self.partial_key
.clone_append_optional_slice_and_nibble(Some(&slice), None),
index: None,
})
.finish()
},
Node::Branch(ref nodes, ref value) => {
let nodes: Vec<TrieAwareDebugNode<L>> = nodes.into_iter()
.enumerate()
.filter_map(|(i, n)| n.map(|n| (i, n)))
.map(|(i, n)| TrieAwareDebugNode {
trie: self.trie,
index: Some(i as u8),
node_key: n,
partial_key: self.partial_key
.clone_append_optional_slice_and_nibble(None, Some(i as u8)),
})
.collect();
match (f.debug_struct("Node::Branch"), self.index) {
(ref mut d, Some(ref i)) => d.field("index", i),
(ref mut d, _) => d,
}
.field("nodes", &nodes)
.field("value", &value)
.finish()
},
Node::NibbledBranch(slice, nodes, value) => {
let nodes: Vec<TrieAwareDebugNode<L>> = nodes.iter()
.enumerate()
.filter_map(|(i, n)| n.map(|n| (i, n)))
.map(|(i, n)| TrieAwareDebugNode {
trie: self.trie,
index: Some(i as u8),
node_key: n,
partial_key: self.partial_key
.clone_append_optional_slice_and_nibble(Some(&slice), Some(i as u8)),
}).collect();
match (f.debug_struct("Node::NibbledBranch"), self.index) {
(ref mut d, Some(ref i)) => d.field("index", i),
(ref mut d, _) => d,
}
.field("slice", &slice)
.field("nodes", &nodes)
.field("value", &value)
.finish()
},
Node::Empty => f.debug_struct("Node::Empty").finish(),
},
Err(e) => f.debug_struct("BROKEN_NODE")
.field("index", &self.index)
.field("key", &self.node_key)
.field("error", &format!("ERROR fetching node: {}", e))
.finish(),
}
}
}
#[cfg(feature="std")]
impl<'db, L> fmt::Debug for TrieDB<'db, L>
where
L: TrieLayout,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TrieDB")
.field("hash_count", &self.hash_count)
.field("root", &TrieAwareDebugNode {
trie: self,
node_key: NodeHandle::Hash(self.root().as_ref()),
partial_key: NibbleVec::new(),
index: None,
})
.finish()
}
}
/// Iterator for going through all values in the trie in pre-order traversal order.
pub struct TrieDBIterator<'a, L: TrieLayout> {
inner: TrieDBNodeIterator<'a, L>,
}
impl<'a, L: TrieLayout> TrieDBIterator<'a, L> {
/// Create a new iterator.
pub fn new(db: &'a TrieDB<L>) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> {
let inner = TrieDBNodeIterator::new(db)?;
Ok(TrieDBIterator { inner })
}
/// Create a new iterator, but limited to a given prefix.
pub fn new_prefixed(db: &'a TrieDB<L>, prefix: &[u8]) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> {
let mut inner = TrieDBNodeIterator::new(db)?;
inner.prefix(prefix)?;
Ok(TrieDBIterator {
inner,
})
}
/// Create a new iterator, but limited to a given prefix.
/// It then do a seek operation from prefixed context (using `seek` lose
/// prefix context by default).
pub fn new_prefixed_then_seek(
db: &'a TrieDB<L>,
prefix: &[u8],
start_at: &[u8],
) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> {
let mut inner = TrieDBNodeIterator::new(db)?;
inner.prefix_then_seek(prefix, start_at)?;
Ok(TrieDBIterator {
inner,
})
}
}
impl<'a, L: TrieLayout> TrieIterator<L> for TrieDBIterator<'a, L> {
/// Position the iterator on the first element with key >= `key`
fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash<L>, CError<L>> {
TrieIterator::seek(&mut self.inner, key)
}
}
impl<'a, L: TrieLayout> Iterator for TrieDBIterator<'a, L> {
type Item = TrieItem<'a, TrieHash<L>, CError<L>>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(item) = self.inner.next() {
match item {
Ok((mut prefix, _, node)) => {
let maybe_value = match node.node() {
Node::Leaf(partial, value) => {
prefix.append_partial(partial.right());
Some(value)
}
Node::Branch(_, value) => value,
Node::NibbledBranch(partial, _, value) => {
prefix.append_partial(partial.right());
value
}
_ => None,
};
if let Some(value) = maybe_value {
let (key_slice, maybe_extra_nibble) = prefix.as_prefix();
let key = key_slice.to_vec();
if let Some(extra_nibble) = maybe_extra_nibble {
return Some(Err(Box::new(
TrieError::ValueAtIncompleteKey(key, extra_nibble)
)));
}
return Some(Ok((key, value.to_vec())));
}
},
Err(err) => return Some(Err(err)),
}
}
None
}
}
| true
|
099508d9f3e15c034320d5e0ae67cc819697e79a
|
Rust
|
thebracket/bracket-ui
|
/src/widgets/filler.rs
|
UTF-8
| 899
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
use crate::element::{ElementId, UiElement};
use crate::mouse_coverage::MouseCoverage;
use bracket_lib::prelude::*;
pub struct Filler {
glyph: FontCharType,
color: ColorPair,
id: ElementId,
}
impl UiElement for Filler {
fn id(&self) -> ElementId {
self.id
}
fn find(&mut self, id: ElementId) -> Option<&mut dyn UiElement> {
if id == self.id {
return Some(self);
}
None
}
fn render(
&mut self,
parent_bounds: Rect,
batch: &mut DrawBatch,
_mouse_coverage: &mut MouseCoverage,
) {
parent_bounds.for_each(|p| {
batch.set(p, self.color, self.glyph);
});
}
}
impl Filler {
pub fn new(glyph: FontCharType, color: ColorPair) -> Box<Self> {
Box::new(Self {
glyph,
color,
id: ElementId::new(),
})
}
}
| true
|
8f0bc7c78d73cd3b662b62a842c05669f9cbb561
|
Rust
|
yangzhe1990/conflux-rust
|
/core/src/vm/env.rs
|
UTF-8
| 2,479
| 2.640625
| 3
|
[
"GPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LGPL-2.1-or-later",
"LicenseRef-scancode-other-copyleft"
] |
permissive
|
// Copyright 2019-2020 Conflux Foundation. All rights reserved.
// Conflux is free software and distributed under GNU General Public License.
// See http://www.gnu.org/licenses/
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
// Copyright 2019 Conflux Foundation. All rights reserved.
// Conflux is free software and distributed under GNU General Public License.
// See http://www.gnu.org/licenses/
//! Environment information for transaction execution.
use cfx_types::{Address, H256, U256};
use primitives::BlockNumber;
/// Information concerning the execution environment for a
/// message-call/contract-creation.
#[derive(Debug, Clone, Default)]
pub struct Env {
/// The block number.
pub number: BlockNumber,
/// The block author.
pub author: Address,
/// The block timestamp.
pub timestamp: u64,
/// The block difficulty.
pub difficulty: U256,
/// The block gas limit.
pub gas_limit: U256,
/// The last block hash.
pub last_hash: H256,
/// The total gas used in the block following execution of the transaction.
pub accumulated_gas_used: U256,
/// The epoch height.
pub epoch_height: u64,
/// The transaction_epoch_bound used to verify if a transaction has
/// expired.
pub transaction_epoch_bound: u64,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_can_be_created_as_default() {
let default_env = Env::default();
assert_eq!(default_env.number, 0);
assert_eq!(default_env.author, Address::default());
assert_eq!(default_env.timestamp, 0);
assert_eq!(default_env.difficulty, 0.into());
assert_eq!(default_env.gas_limit, 0.into());
assert_eq!(default_env.last_hash, H256::zero());
assert_eq!(default_env.accumulated_gas_used, 0.into());
}
}
| true
|
d6e0baefed2984bdf5103fecf6aada15b43a8115
|
Rust
|
qeedquan/challenges
|
/poj/3183-stump-removal.rs
|
UTF-8
| 2,097
| 3.484375
| 3
|
[
"MIT"
] |
permissive
|
/*
Description
Always thinking of the cows' grazing experience, FJ has found that he must remove N (1 <= N <= 50,000) unsightly stumps from the pasture. The stumps are conveniently arranged in a straight line and numbered 1..N with each stump having some height H_i (1 <= H_i <= 10,000).
FJ will use the traditional high explosives to destroy the stumps. These high explosives are formulated to destroy adjacent stumps as long as those adjacent stumps are strictly shorter than the nearest stump being destroyed. The blast can continue past the closest adjacent stump to the next adjacent stump if it is even shorter than the nearest stump just destroyed. As soon as a stump encountered by the blast wave is not shorter, though, no more destruction occurs on that side of the target stump (the other side follows the same rules with whatever stumps might appear there).
Consider a line of nine stumps with these heights:
1 2 5 4 3 3 6 6 2
If FJ blows up the third stump (with height 5), then the second stump will also be destroyed (height 2) and the first stump (height 1) will also be destroyed. Likewise, the fourth stump (height 4) and fifth stump (height 3) will be destroyed since they are successively shorter, leaving the line like this:
* * * * * 3 6 6 2
Two more explosives (at stumps 7 and 8) will destroy the rest.
Help FJ determine the minimum number of explosive charges he needs to destroy the stumps.
Input
Line 1: A single integer, N
Lines 2..N+1: Line i+1 contains H_i
Output
Lines 1..?: Each line contains one integer which is the index of a stump to blow up. The indices must be listed in increasing order.
Sample Input
9
1
2
5
4
3
3
6
6
2
Sample Output
3
7
8
Source
USACO 2006 January Bronze
*/
fn main() {
assert_eq!(explosives(&vec![1, 2, 5, 4, 3, 3, 6, 6, 2]), vec![3, 7, 8]);
}
fn explosives(a: &Vec<isize>) -> Vec<usize> {
let n = a.len();
let mut r = vec![];
let mut i = 1;
while i + 1 < n {
if a[i] >= a[i - 1] && a[i] >= a[i + 1] {
r.push(i + 1);
}
i += 1;
}
r
}
| true
|
99140f23f0f65cdb16ca396af9cfede7c6937f41
|
Rust
|
afprusin/leetcode-rust
|
/src/bin/7_reverse_integer.rs
|
UTF-8
| 509
| 3.09375
| 3
|
[] |
no_license
|
pub struct Solution {}
impl Solution {
pub fn reverse(x: i32) -> i32 {
let mut to_reverse: i32 = x;
let mut reversed: i32 = 0;
while to_reverse != 0 {
reversed = match reversed.checked_mul(10) {
Some(val) => val,
None => { return 0; },
};
reversed += to_reverse % 10;
to_reverse /= 10;
}
return reversed;
}
}
fn main() {
println!("{}", Solution::reverse(1534236469));
}
| true
|
239184c262b8df81d5da3fe2f688b9fc516c6269
|
Rust
|
yokljo/modelone
|
/uione/src/resources.rs
|
UTF-8
| 6,173
| 2.703125
| 3
|
[] |
no_license
|
use crate::image_data::ImageData;
use crate::rect::Rect;
use std;
use std::fmt::Debug;
use std::sync::atomic::Ordering;
use std::any::Any;
use std::sync::{Arc, Mutex};
use std::sync::atomic::AtomicUsize;
use std::cell::{RefCell, Ref, RefMut};
use std::ops::{Deref, DerefMut};
use lazy_static::lazy_static;
static NEXT_RESOURCE_INDEX: AtomicUsize = AtomicUsize::new(0);
lazy_static! {
static ref RESOURCE_NAMES: Mutex<Vec<String>> = Mutex::new(vec![]);
}
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
pub struct GraphicResourceHandle(usize);
impl GraphicResourceHandle {
pub fn get_id(self) -> usize {
self.0
}
}
/// Call this ONCE per type of resource and store the resource handle for that type for the rest of
/// time.
pub fn get_unique_resource_handle(name: String) -> GraphicResourceHandle {
let mut names = RESOURCE_NAMES.lock().unwrap();
names.push(name);
GraphicResourceHandle(NEXT_RESOURCE_INDEX.fetch_add(1, Ordering::SeqCst))
}
pub fn get_resource_name(handle: GraphicResourceHandle) -> String {
let names = RESOURCE_NAMES.lock().unwrap();
names[handle.get_id()].clone()
}
/// A custom resource system that is shared between all items in a scene. This can be used to store
/// for example an image or font cache that all items can use when rendering, so the same thing
/// doesn't have to be loaded multiple times. The resources are shared per display context.
pub trait GraphicResource: Debug {
fn as_any(&mut self) -> &mut Any;
fn get_handle(&self) -> GraphicResourceHandle;
}
#[derive(Debug, Clone)]
pub enum GraphicResourceError {
NoSuchResource,
ResourceAlreadyInUse,
ResourceHasWrongType,
}
pub struct GraphicResourceRef<'m, T: 'static> {
graphic_resource_refmut: RefMut<'m, Box<GraphicResource>>,
// As long as graphic_resource_refmut is alive, this pointer to the typed resource will be
// alive, so it is safe to dereference it during that time.
typed_resource: *mut T,
}
impl<'m, T: 'static> GraphicResourceRef<'m, T> {
pub fn from_graphic_resource(graphic_resource_refmut: RefMut<'m, Box<GraphicResource>>) -> Result<GraphicResourceRef<'m, T>, GraphicResourceError> {
let mut res = GraphicResourceRef {
graphic_resource_refmut,
typed_resource: std::ptr::null_mut(),
};
{
let typed_resource_result = res.graphic_resource_refmut.deref_mut().as_any().downcast_mut::<T>();
match typed_resource_result {
Some(typed_resource) => {
res.typed_resource = typed_resource;
}
None => {
return Err(GraphicResourceError::ResourceHasWrongType);
}
}
}
Ok(res)
}
}
impl<'m, T> Deref for GraphicResourceRef<'m, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe {
&*self.typed_resource
}
}
}
impl<'m, T> DerefMut for GraphicResourceRef<'m, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe {
&mut *self.typed_resource
}
}
}
#[macro_export] macro_rules! uione_graphic_resource {
(
$resource_type:ty, $getter_name:ident, $handle_name:ident
) => {
lazy_static::lazy_static! {
pub static ref $handle_name: $crate::resources::GraphicResourceHandle = $crate::resources::get_unique_resource_handle(stringify!($resource_type).into());
}
impl $crate::resources::GraphicResource for $resource_type {
fn as_any(&mut self) -> &mut ::std::any::Any {
self
}
fn get_handle(&self) -> $crate::resources::GraphicResourceHandle {
*$handle_name
}
}
pub fn $getter_name<'m>(resource_manager: &'m $crate::resources::GraphicResourceManager) -> Result<$crate::resources::GraphicResourceRef<'m, $resource_type>, $crate::resources::GraphicResourceError> {
let res = resource_manager.get_resource(*$handle_name)?;
$crate::resources::GraphicResourceRef::from_graphic_resource(res)
//res.as_any().downcast_mut::<$resource_type>().ok_or($crate::resources::GraphicResourceError::ResourceHasWrongType)
}
}
}
pub struct GraphicResourceManager {
resources: Vec<Option<RefCell<Box<GraphicResource>>>>,
}
impl GraphicResourceManager {
pub fn new() -> GraphicResourceManager {
GraphicResourceManager {
resources: vec![],
}
}
pub fn register_resource(&mut self, resource: Box<GraphicResource>) -> Result<(), Box<GraphicResource>> {
let id = resource.get_handle().get_id();
while self.resources.len() <= id {
self.resources.push(None);
}
if self.resources[id].is_some() {
// The resource is already registered.
return Err(resource);
}
println!("Register resource {:?} ({})", get_resource_name(resource.get_handle()), resource.get_handle().get_id());
self.resources[id] = Some(RefCell::new(resource));
Ok(())
}
/*fn make_texture(&self, image_data: &image_data::ImageData) -> Arc<TextureResource> {
Arc::new(GlTextureResource::new(image_data).unwrap())
}*/
pub fn get_resource(&self, handle: GraphicResourceHandle) -> Result<RefMut<Box<GraphicResource>>, GraphicResourceError> {
let id = handle.get_id();
if let Some(Some(ref resource_cell)) = self.resources.get(id) {
if let Ok(resource) = resource_cell.try_borrow_mut() {
Ok(resource)
} else {
Err(GraphicResourceError::ResourceAlreadyInUse)
}
} else {
Err(GraphicResourceError::NoSuchResource)
}
}
/*pub fn get_resources<'l>(&mut self, requests: &mut [(GraphicResourceHandle, Result<RefMut<Box<GraphicResource>>, GraphicResourceError>)]) {
for (handle, result) in &mut requests {
if let Some(Some(ref resource_cell)) = self.resources.get(id) {
if let Ok(resource) = resource_cell.try_borrow_mut() {
Ok(resource)
} else {
Err(GraphicResourceError::ResourceAlreadyInUse)
}
} else {
Err(GraphicResourceError::NoSuchResource)
}
}
}*/
}
/*pub struct GraphicResourceQueryResult {
manager: &mut GraphicResourceManager,
results: &mut [(GraphicResourceHandle, Result<RefMut<Box<GraphicResource>>, GraphicResourceError>)]
}*/
/*pub trait ResourceManager {
fn make_texture(&self, image_data: &ImageData) -> Arc<TextureResource>;
fn get_custom_resource(&mut self, handle: GraphicResourceHandle) -> Result<&mut GraphicResource, GraphicResourceError>;
//fn get_custom_resources(&mut self, resources: &mut [(GraphicResourceHandle, Option<&mut Any>)]);
}*/
| true
|
d2df278f4097a2f9ea00910a240387797898d5e1
|
Rust
|
futursolo/fl-www-examples
|
/first-yew-app/src/main.rs
|
UTF-8
| 992
| 3.125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use yew::prelude::*;
#[derive(Debug)]
pub enum AppMsg {
Increment,
}
pub struct App {
counter: u64,
link: ComponentLink<Self>,
}
impl Component for App {
type Message = AppMsg;
type Properties = ();
fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self { counter: 0, link }
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
AppMsg::Increment => {
self.counter += 1;
}
}
true
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
let increment = self.link.callback(|_| AppMsg::Increment);
html! {
<div>
{format!("Current Counter: {}", self.counter)}
<br />
<button onclick=increment>{"Increment"}</button>
</div>
}
}
}
fn main() {
yew::start_app::<App>();
}
| true
|
ac6986edbf5edc965b7e537eba5cc8b04ab24c80
|
Rust
|
Terkwood/4d-labyrinth
|
/graph4d/src/geometry/vector.rs
|
UTF-8
| 6,499
| 3.421875
| 3
|
[] |
no_license
|
use std::cmp::PartialEq;
use std::ops;
#[derive(Clone, Copy)]
pub struct Vector {
coords: [f64; 5],
}
impl Vector {
pub fn new(x: f64, y: f64, z: f64, w: f64) -> Vector {
Vector {
coords: [x, y, z, w, 1.0],
}
}
pub fn from_array(arr: [f64; 5]) -> Vector {
Vector { coords: arr }
}
pub fn projective_normalize(&mut self) {
if self.coords[4] == 1.0 {
return;
}
self.coords[0] /= self.coords[4];
self.coords[1] /= self.coords[4];
self.coords[2] /= self.coords[4];
self.coords[3] /= self.coords[4];
self.coords[4] = 1.0;
}
#[inline]
pub fn x(&self) -> f64 {
self.coords[0] / self.coords[4]
}
#[inline]
pub fn y(&self) -> f64 {
self.coords[1] / self.coords[4]
}
#[inline]
pub fn z(&self) -> f64 {
self.coords[2] / self.coords[4]
}
#[inline]
pub fn w(&self) -> f64 {
self.coords[3] / self.coords[4]
}
#[inline]
pub fn coord(&self, i: usize) -> f64 {
self.coords[i]
}
pub fn dot(&self, other: Vector) -> f64 {
self.x() * other.x() + self.y() * other.y() + self.z() * other.z() + self.w() * other.w()
}
#[inline]
pub fn len(&self) -> f64 {
self.dot(*self).sqrt()
}
pub fn normalized(&self) -> Vector {
*self / self.len()
}
pub fn normalize(&mut self) {
let len = self.len();
self.coords[0] /= len;
self.coords[1] /= len;
self.coords[2] /= len;
self.coords[3] /= len;
}
pub fn cross3(arg1: Vector, arg2: Vector) -> Vector {
Vector {
coords: [
arg1.y() * arg2.z() - arg1.z() * arg2.y(),
arg1.z() * arg2.x() - arg1.x() * arg2.z(),
arg1.x() * arg2.y() - arg1.y() * arg2.x(),
0.0,
1.0,
],
}
}
pub fn cross4(arg1: Vector, arg2: Vector, arg3: Vector) -> Vector {
Vector {
coords: [
arg1.y() * arg2.z() * arg3.w()
+ arg1.z() * arg2.w() * arg3.y()
+ arg1.w() * arg2.y() * arg3.z()
- arg1.y() * arg2.w() * arg3.z()
- arg1.z() * arg2.y() * arg3.w()
- arg1.w() * arg2.z() * arg3.y(),
arg1.z() * arg2.w() * arg3.x()
+ arg1.w() * arg2.x() * arg3.z()
+ arg1.x() * arg2.z() * arg3.w()
- arg1.z() * arg2.x() * arg3.w()
- arg1.w() * arg2.z() * arg3.x()
- arg1.x() * arg2.w() * arg3.z(),
arg1.w() * arg2.x() * arg3.y()
+ arg1.x() * arg2.y() * arg3.w()
+ arg1.y() * arg2.w() * arg3.x()
- arg1.w() * arg2.y() * arg3.x()
- arg1.x() * arg2.w() * arg3.y()
- arg1.y() * arg2.x() * arg3.w(),
arg1.x() * arg2.y() * arg3.z()
+ arg1.y() * arg2.z() * arg3.x()
+ arg1.z() * arg2.x() * arg3.y()
- arg1.x() * arg2.z() * arg3.y()
- arg1.y() * arg2.x() * arg3.z()
- arg1.z() * arg2.y() * arg3.x(),
1.0,
],
}
}
}
impl ops::Add<Vector> for Vector {
type Output = Vector;
fn add(mut self, other: Vector) -> Vector {
self.projective_normalize();
self.coords[0] += other.coords[0] / other.coords[4];
self.coords[1] += other.coords[1] / other.coords[4];
self.coords[2] += other.coords[2] / other.coords[4];
self.coords[3] += other.coords[3] / other.coords[4];
self
}
}
impl ops::Sub<Vector> for Vector {
type Output = Vector;
fn sub(mut self, other: Vector) -> Vector {
self.projective_normalize();
self.coords[0] -= other.coords[0] / other.coords[4];
self.coords[1] -= other.coords[1] / other.coords[4];
self.coords[2] -= other.coords[2] / other.coords[4];
self.coords[3] -= other.coords[3] / other.coords[4];
self
}
}
impl ops::Mul<f64> for Vector {
type Output = Vector;
fn mul(mut self, other: f64) -> Vector {
self.projective_normalize();
self.coords[0] *= other;
self.coords[1] *= other;
self.coords[2] *= other;
self.coords[3] *= other;
self
}
}
impl ops::Div<f64> for Vector {
type Output = Vector;
fn div(mut self, other: f64) -> Vector {
self.projective_normalize();
self.coords[0] /= other;
self.coords[1] /= other;
self.coords[2] /= other;
self.coords[3] /= other;
self
}
}
const EPSILON: f64 = 0.0001;
impl PartialEq for Vector {
fn eq(&self, rhs: &Vector) -> bool {
let xeq = (self.x() - rhs.x()).abs() < EPSILON;
let yeq = (self.y() - rhs.y()).abs() < EPSILON;
let zeq = (self.z() - rhs.z()).abs() < EPSILON;
let weq = (self.w() - rhs.w()).abs() < EPSILON;
xeq && yeq && zeq && weq
}
}
#[cfg(test)]
mod test {
use super::Vector;
#[test]
fn test_add_vectors() {
let a = Vector::new(0.0, 1.0, 2.0, 3.0);
let b = Vector::new(2.0, 3.0, 8.0, 7.0);
let c = a + b;
assert_eq!(c.x(), 2.0);
assert_eq!(c.y(), 4.0);
assert_eq!(c.z(), 10.0);
assert_eq!(c.w(), 10.0);
}
#[test]
fn test_sub_vectors() {
let a = Vector::new(0.0, 1.0, 2.0, 3.0);
let b = Vector::new(2.0, 3.0, 8.0, 7.0);
let c = a - b;
assert_eq!(c.x(), -2.0);
assert_eq!(c.y(), -2.0);
assert_eq!(c.z(), -6.0);
assert_eq!(c.w(), -4.0);
}
#[test]
fn test_mul_vec_f64() {
let a = Vector::new(0.0, 1.0, 2.0, 3.0);
let c = a * 3.0;
assert_eq!(c.x(), 0.0);
assert_eq!(c.y(), 3.0);
assert_eq!(c.z(), 6.0);
assert_eq!(c.w(), 9.0);
}
#[test]
fn test_div_vec_f64() {
let a = Vector::new(0.0, 1.0, 2.0, 3.0);
let c = a / 2.0;
assert_eq!(c.x(), 0.0);
assert_eq!(c.y(), 0.5);
assert_eq!(c.z(), 1.0);
assert_eq!(c.w(), 1.5);
}
#[test]
fn test_dot_product() {
let a = Vector::new(0.0, 1.0, 2.0, 3.0);
let b = Vector::new(2.0, 3.0, 8.0, 7.0);
let c = a.dot(b);
assert_eq!(c, 40.0);
}
}
| true
|
1a59ab19a10f0038c394477b01d07f16766bd182
|
Rust
|
seandewar/challenge-solutions
|
/leetcode/medium/determine-if-two-strings-are-close.rs
|
UTF-8
| 1,340
| 3.625
| 4
|
[] |
no_license
|
// https://leetcode.com/problems/determine-if-two-strings-are-close
//
// Complexity: runtime O(n), space O(1).
impl Solution {
pub fn close_strings(word1: String, word2: String) -> bool {
if word1.len() != word2.len() {
return false;
}
// frequency map of characters in both words. This is all we need, as because of operation
// 1 allowing swaps, we can get chars in the positions we need to make the other word after
// operation 2 is performed
let (mut freqs1, mut freqs2) = ([0; 26], [0; 26]);
for (c1, c2) in word1.chars().zip(word2.chars()) {
freqs1[c1 as usize - 'a' as usize] += 1;
freqs2[c2 as usize - 'a' as usize] += 1;
}
// differing set of chars in one word means they can't be close
for (&f1, &f2) in freqs1.iter().zip(freqs2.iter()) {
match (f1, f2) {
(f1, 0) if f1 > 0 => return false,
(0, f2) if f2 > 0 => return false,
_ => continue,
}
}
// for operation 2, sort the frequency maps. If they are equal, it will be possible to
// change occurrences of one char to another in order to create the other word
freqs1.sort_unstable();
freqs2.sort_unstable();
freqs1 == freqs2
}
}
| true
|
0d7f391f33b65bbff284bae8abd2107af25efd0f
|
Rust
|
yes7rose/async-graphql
|
/async-graphql-actix-web/src/pubsub.rs
|
UTF-8
| 2,094
| 2.734375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use actix::{Actor, Context, Handler, Recipient, Supervised, SystemService};
use async_graphql::Result;
use slab::Slab;
use std::any::Any;
use std::sync::Arc;
#[derive(Message)]
#[rtype(result = "std::result::Result<(), ()>")]
pub struct PushMessage(pub Arc<dyn Any + Sync + Send>);
#[derive(Message)]
#[rtype(result = "usize")]
struct NewClient {
recipient: Recipient<PushMessage>,
}
#[derive(Message)]
#[rtype(result = "()")]
struct RemoveClient {
id: usize,
}
#[derive(Message)]
#[rtype(result = "()")]
struct PubMessage(Arc<dyn Any + Sync + Send>);
struct ClientInfo {
recipient: Recipient<PushMessage>,
}
#[derive(Default)]
struct PubSubService {
clients: Slab<ClientInfo>,
}
impl Actor for PubSubService {
type Context = Context<Self>;
}
impl Handler<NewClient> for PubSubService {
type Result = usize;
fn handle(&mut self, msg: NewClient, _ctx: &mut Context<Self>) -> Self::Result {
self.clients.insert(ClientInfo {
recipient: msg.recipient,
})
}
}
impl Handler<RemoveClient> for PubSubService {
type Result = ();
fn handle(&mut self, msg: RemoveClient, _ctx: &mut Context<Self>) -> Self::Result {
self.clients.remove(msg.id);
}
}
impl Handler<PubMessage> for PubSubService {
type Result = ();
fn handle(&mut self, msg: PubMessage, _ctx: &mut Context<Self>) -> Self::Result {
for (_, client) in &self.clients {
client.recipient.do_send(PushMessage(msg.0.clone())).ok();
}
}
}
impl Supervised for PubSubService {}
impl SystemService for PubSubService {}
pub async fn new_client(recipient: Recipient<PushMessage>) -> Result<usize> {
let id = PubSubService::from_registry()
.send(NewClient { recipient })
.await?;
Ok(id)
}
pub fn remove_client(id: usize) {
PubSubService::from_registry().do_send(RemoveClient { id });
}
/// Publish a message that will be pushed to all subscribed clients.
pub fn publish_message<T: Any + Send + Sync + Sized>(msg: T) {
PubSubService::from_registry().do_send(PubMessage(Arc::new(msg)));
}
| true
|
46579cf5ab4c2f06dbde017bbf0d7c9ac8e7c2fa
|
Rust
|
gnoliyil/fuchsia
|
/third_party/rust_crates/vendor/xml-rs-0.8.0/src/reader/parser/outside_tag.rs
|
UTF-8
| 5,825
| 2.640625
| 3
|
[
"BSD-2-Clause",
"MIT"
] |
permissive
|
use common::is_whitespace_char;
use reader::events::XmlEvent;
use reader::lexer::Token;
use super::{
Result, PullParser, State, ClosingTagSubstate, OpeningTagSubstate,
ProcessingInstructionSubstate, DEFAULT_VERSION, DEFAULT_ENCODING, DEFAULT_STANDALONE
};
impl PullParser {
pub fn outside_tag(&mut self, t: Token) -> Option<Result> {
match t {
Token::ReferenceStart =>
self.into_state_continue(State::InsideReference(Box::new(State::OutsideTag))),
Token::Whitespace(_) if self.depth() == 0 => None, // skip whitespace outside of the root element
_ if t.contains_char_data() && self.depth() == 0 =>
Some(self_error!(self; "Unexpected characters outside the root element: {}", t)),
Token::Whitespace(_) if self.config.trim_whitespace && !self.buf_has_data() => None,
Token::Whitespace(c) => {
if !self.buf_has_data() {
self.push_pos();
}
self.append_char_continue(c)
}
_ if t.contains_char_data() => { // Non-whitespace char data
if !self.buf_has_data() {
self.push_pos();
}
self.inside_whitespace = false;
t.push_to_string(&mut self.buf);
None
}
Token::ReferenceEnd => { // Semi-colon in a text outside an entity
self.inside_whitespace = false;
Token::ReferenceEnd.push_to_string(&mut self.buf);
None
}
Token::CommentStart if self.config.coalesce_characters && self.config.ignore_comments => {
// We need to switch the lexer into a comment mode inside comments
self.lexer.inside_comment();
self.into_state_continue(State::InsideComment)
}
Token::CDataStart if self.config.coalesce_characters && self.config.cdata_to_characters => {
if !self.buf_has_data() {
self.push_pos();
}
// We need to disable lexing errors inside CDATA
self.lexer.disable_errors();
self.into_state_continue(State::InsideCData)
}
_ => {
// Encountered some markup event, flush the buffer as characters
// or a whitespace
let mut next_event = if self.buf_has_data() {
let buf = self.take_buf();
if self.inside_whitespace && self.config.trim_whitespace {
None
} else if self.inside_whitespace && !self.config.whitespace_to_characters {
Some(Ok(XmlEvent::Whitespace(buf)))
} else if self.config.trim_whitespace {
Some(Ok(XmlEvent::Characters(buf.trim_matches(is_whitespace_char).into())))
} else {
Some(Ok(XmlEvent::Characters(buf)))
}
} else { None };
self.inside_whitespace = true; // Reset inside_whitespace flag
self.push_pos();
match t {
Token::ProcessingInstructionStart =>
self.into_state(State::InsideProcessingInstruction(ProcessingInstructionSubstate::PIInsideName), next_event),
Token::DoctypeStart if !self.encountered_element => {
// We don't have a doctype event so skip this position
// FIXME: update when we have a doctype event
self.next_pos();
self.lexer.disable_errors();
self.into_state(State::InsideDoctype, next_event)
}
Token::OpeningTagStart => {
// If declaration was not parsed and we have encountered an element,
// emit this declaration as the next event.
if !self.parsed_declaration {
self.parsed_declaration = true;
let sd_event = XmlEvent::StartDocument {
version: DEFAULT_VERSION,
encoding: DEFAULT_ENCODING.into(),
standalone: DEFAULT_STANDALONE
};
// next_event is always none here because we're outside of
// the root element
next_event = Some(Ok(sd_event));
self.push_pos();
}
self.encountered_element = true;
self.nst.push_empty();
self.into_state(State::InsideOpeningTag(OpeningTagSubstate::InsideName), next_event)
}
Token::ClosingTagStart if self.depth() > 0 =>
self.into_state(State::InsideClosingTag(ClosingTagSubstate::CTInsideName), next_event),
Token::CommentStart => {
// We need to switch the lexer into a comment mode inside comments
self.lexer.inside_comment();
self.into_state(State::InsideComment, next_event)
}
Token::CDataStart => {
// We need to disable lexing errors inside CDATA
self.lexer.disable_errors();
self.into_state(State::InsideCData, next_event)
}
_ => Some(self_error!(self; "Unexpected token: {}", t))
}
}
}
}
}
| true
|
74d39f3bf79e112f1f8f93ac49eecb71a6fc9a00
|
Rust
|
reinterpretcat/vrp
|
/rosomaxa/src/evolution/mod.rs
|
UTF-8
| 1,182
| 2.703125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Contains functionality to run evolution simulation.
use crate::prelude::*;
mod config;
pub use self::config::*;
mod simulator;
pub use self::simulator::*;
pub mod telemetry;
pub use self::telemetry::*;
pub mod strategies;
/// Defines evolution result type.
pub type EvolutionResult<S> = Result<(Vec<S>, Option<TelemetryMetrics>), GenericError>;
/// Provides the way to preprocess context before using it.
pub trait HeuristicContextProcessing {
/// A heuristic context type.
type Context: HeuristicContext<Objective = Self::Objective, Solution = Self::Solution>;
/// A heuristic objective type.
type Objective: HeuristicObjective<Solution = Self::Solution>;
/// A solution type.
type Solution: HeuristicSolution;
/// Preprocess a context in order to replace usages of a given context with a new one.
fn pre_process(&self, context: Self::Context) -> Self::Context;
}
/// Provides the way to modify solution before returning it.
pub trait HeuristicSolutionProcessing {
/// A solution type.
type Solution: HeuristicSolution;
/// Post processes solution.
fn post_process(&self, solution: Self::Solution) -> Self::Solution;
}
| true
|
77f3a8a72316e2fa714dbbcffb4dea27324dfd63
|
Rust
|
aeyakovenko/appendvec
|
/benches/appendvec.rs
|
UTF-8
| 2,473
| 2.78125
| 3
|
[] |
no_license
|
#![cfg_attr(feature = "unstable", feature(test))]
extern crate appendvec;
extern crate rand;
extern crate test;
use appendvec::appendvec::{Account, AppendVec};
use rand::{thread_rng, Rng};
use std::sync::Arc;
use std::thread::sleep;
use std::thread::spawn;
use std::time::Duration;
use test::Bencher;
fn test_account(ix: usize) -> Account {
let data_len = ix % 256;
Account {
lamports: ix as u64,
data: (0..data_len).into_iter().map(|_| data_len as u8).collect(),
}
}
#[bench]
fn append(bencher: &mut Bencher) {
let vec = AppendVec::new("/tmp/appendvec/bench_append", 2 * 1024 * 1024 * 1024);
bencher.iter(|| {
let val = test_account(0);
assert!(vec.append_account(&val).is_some());
});
}
#[bench]
fn sequential_read(bencher: &mut Bencher) {
let vec = AppendVec::new("/tmp/appendvec/bench_ra", 128 * 1024 * 1024);
let size = 1_000;
let mut indexes = vec![];
for ix in 0..size {
let val = test_account(ix);
let pos = vec.append_account(&val).unwrap();
indexes.push((ix, pos))
}
bencher.iter(|| {
let (ix, pos) = indexes.pop().unwrap();
let account = vec.get_account(pos);
let test = test_account(ix);
assert_eq!(*account, test);
indexes.push((ix, pos));
});
}
#[bench]
fn random_read(bencher: &mut Bencher) {
let vec = AppendVec::new("/tmp/appendvec/bench_rax", 128 * 1024 * 1024);
let size = 1_000;
let mut indexes = vec![];
for ix in 0..size {
let val = test_account(ix);
let pos = vec.append_account(&val).unwrap();
indexes.push(pos)
}
bencher.iter(|| {
let random_index: usize = thread_rng().gen_range(0, indexes.len());
let ix = &indexes[random_index];
let account = vec.get_account(*ix);
let test = test_account(random_index);
assert_eq!(*account, test);
});
}
#[bench]
fn concurrent_lock_append_read(bencher: &mut Bencher) {
let vec = Arc::new(AppendVec::new(
"/tmp/appendvec/bench_lock_append_read",
1024 * 1024,
));
let vec1 = vec.clone();
spawn(move || loop {
let account = test_account(0);
if vec1.append_account(&account).is_none() {
break;
}
});
while vec.len() == 0 {
sleep(Duration::from_millis(100));
}
bencher.iter(|| {
for acc in vec.accounts(0) {
assert_eq!(acc.data.len(), 0);
}
});
}
| true
|
50695474ddac0fdffa855f4a613d415550526ab7
|
Rust
|
BrandonSchaefer/rust-playground
|
/stack/stack.rs
|
UTF-8
| 1,493
| 3.546875
| 4
|
[] |
no_license
|
mod ds {
pub struct Stack {
size: usize,
data: [i32; 10],
}
impl Stack {
pub fn new() -> Stack {
Stack { size: 0, data: [0; 10] }
}
pub fn push(&mut self, elem: i32) {
if self.size < 10 {
self.data[self.size] = elem;
self.size += 1;
}
else {
panic!("Stack full!")
}
}
pub fn pop(&mut self) -> i32 {
if self.size > 0 {
let temp = self.data[self.size - 1];
self.size -= 1;
temp
}
else {
panic!("Stack is empty!");
}
}
pub fn top(&self) -> i32 {
if self.size > 0 {
self.data[self.size - 1]
}
else {
panic!("Stack is empty!");
}
}
pub fn empty(&self) -> bool {
self.size == 0
}
pub fn size(&self) -> usize {
self.size
}
}
}
fn main() {
let mut stack = ds::Stack::new();
println!("Empty: {}", stack.empty());
println!("Size: {}", stack.size());
stack.push(3);
stack.push(2);
stack.push(1);
println!("Empty: {}", stack.empty());
println!("Size: {}", stack.size());
while !stack.empty() {
println!("Top: {}", stack.top());
stack.pop();
}
println!("Empty: {}", stack.empty());
}
| true
|
3d15a274fcfc58d56c75d14865d94acd240bab36
|
Rust
|
Aloso/unidok
|
/crates/unidok-parser/src/blocks/lists.rs
|
UTF-8
| 3,111
| 2.5625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use aho_corasick::AhoCorasick;
use unidok_repr::ast::blocks::{Bullet, ListAst};
use crate::parsing_mode::ParsingMode;
use crate::state::ParsingState;
use crate::utils::{ParseLineBreak, ParseLineEnd, ParseNSpaces, ParseSpacesU8, While};
use crate::{Context, Indents, Parse};
use super::ParseBlock;
pub(crate) struct ParseList<'a> {
pub ind: Indents<'a>,
pub mode: Option<ParsingMode>,
pub ac: &'a AhoCorasick,
}
impl Parse for ParseList<'_> {
type Output = ListAst;
fn parse(&mut self, input: &mut crate::Input) -> Option<Self::Output> {
let mut input = input.start();
let (mut indent_spaces, bullet) = input.parse(ParseBullet { first: true })?;
let mut items = Vec::new();
loop {
let ind = self.ind.push_indent(indent_spaces);
let content_parser =
ParseBlock::new_multi(self.mode, ParsingState::new(ind, Context::Global, self.ac));
items.push(input.parse(content_parser)?);
if input.parse(ParseLineBreak(self.ind)).is_none() {
break;
}
let mut input2 = input.start();
if let Some((is, b)) = input2.parse(ParseBullet { first: false }) {
if b.kind() == bullet.kind() {
indent_spaces = is;
input2.apply();
continue;
}
}
break;
}
input.apply();
Some(ListAst { indent_spaces, bullet, items })
}
}
struct ParseBullet {
#[allow(unused)]
first: bool,
}
impl Parse for ParseBullet {
type Output = (u8, Bullet);
fn parse(&mut self, input: &mut crate::Input) -> Option<Self::Output> {
let mut input = input.start();
let indent = input.parse(ParseSpacesU8)?;
if indent > (u8::MAX - 16) {
return None;
}
let result = match input.peek_char() {
Some('-') => {
input.bump(1);
(indent + 2, Bullet::Dash)
}
Some('+') => {
input.bump(1);
(indent + 2, Bullet::Plus)
}
Some('*') => {
input.bump(1);
(indent + 2, Bullet::Star)
}
Some('0'..='9') => {
let num = input.parse_i(While(|c: char| c.is_ascii_digit()));
if num.len() > 9 {
return None;
}
let start = num.to_str(&input.text).parse::<u32>().unwrap();
let bullet = if input.parse('.').is_some() {
Bullet::Dot { start }
} else if input.parse(')').is_some() {
Bullet::Paren { start }
} else {
return None;
};
(indent + num.len() as u8 + 2, bullet)
}
_ => return None,
};
if input.parse(ParseNSpaces(1)).is_none() && !input.can_parse(ParseLineEnd) {
return None;
}
input.apply();
Some(result)
}
}
| true
|
9cb8aea78b4a4bc77b4c659b7210109ca2b668c6
|
Rust
|
TheCowKingmoo/Learning_Rust
|
/dice_roller/src/user_input.rs
|
UTF-8
| 354
| 4.125
| 4
|
[] |
no_license
|
use std::io;
pub fn user_input() -> String {
let mut input = String::new();
println!( "Usage: side on dice, num of times to roll EX: 6, 10 --> rolls 10 six sided dice\n");
println!( "num of sides, num of times to roll");
io::stdin().read_line(&mut input)
.ok()
.expect("Couldn't read line\n");
return input;
}
| true
|
727031bd3281772e1c280c2cba808525791460b0
|
Rust
|
PhilipAnderson/theon
|
/src/query.rs
|
UTF-8
| 30,409
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
//! Spatial queries.
//!
//! This module provides types and traits for performing spatial queries.
use approx::abs_diff_eq;
use decorum::cmp::IntrinsicOrd;
use decorum::Infinite;
use num::{Bounded, Signed, Zero};
use std::fmt::{self, Debug, Formatter};
use std::ops::Neg;
use typenum::type_operators::Cmp;
use typenum::{Greater, U0, U1, U2};
use crate::adjunct::{Fold, ZipMap};
use crate::ops::Dot;
use crate::space::{
Basis, EuclideanSpace, FiniteDimensional, InnerSpace, Scalar, Vector, VectorSpace,
};
// Intersections are implemented for types with a lesser lexographical order.
// For example, `Intersection` is implemented for `Aabb` before `Plane`, with
// `Plane` having a trivial symmetric implementation.
/// Intersection of geometric objects.
///
/// Determines if a pair of objects intersects and produces data describing the
/// intersection. Each set of objects produces its own intersection data as the
/// `Output` type.
///
/// A symmetrical implementation is provided for heterogeneous pairs:
///
/// ```rust
/// # extern crate nalgebra;
/// # extern crate theon;
/// #
/// # use nalgebra::Point3;
/// # use theon::query::{Intersection, Line, LinePlane, Plane, Unit};
/// # use theon::space::EuclideanSpace;
/// #
/// # type E3 = Point3<f64>;
/// #
/// # let line = Line::<E3> {
/// # origin: EuclideanSpace::origin(),
/// # direction: Unit::x(),
/// # };
/// # let plane = Plane::<E3> {
/// # origin: EuclideanSpace::from_xyz(1.0, 0.0, 0.0),
/// # normal: Unit::x(),
/// # };
/// // These queries are equivalent.
/// if let Some(LinePlane::TimeOfImpact(t)) = line.intersection(&plane) { /* ... */ }
/// if let Some(LinePlane::TimeOfImpact(t)) = plane.intersection(&line) { /* ... */ }
/// ```
///
/// # Examples
///
/// Testing for intersection of an axis-aligned bounding box and a ray:
///
/// ```rust
/// # extern crate nalgebra;
/// # extern crate theon;
/// #
/// use nalgebra::Point2;
/// use theon::query::{Aabb, Intersection, Ray, Unit};
/// use theon::space::{EuclideanSpace, VectorSpace};
///
/// type E2 = Point2<f64>;
///
/// let aabb = Aabb::<E2> {
/// origin: EuclideanSpace::from_xy(1.0, -1.0),
/// extent: VectorSpace::from_xy(2.0, 2.0),
/// };
/// let ray = Ray::<E2> {
/// origin: EuclideanSpace::origin(),
/// direction: Unit::x(),
/// };
/// if let Some((min, max)) = ray.intersection(&aabb) {
/// // ...
/// }
/// ```
pub trait Intersection<T> {
type Output;
fn intersection(&self, other: &T) -> Option<Self::Output>;
}
macro_rules! impl_symmetrical_intersection {
($a:ident $(,)?) => {
/// Symmetrical intersection.
impl<S> Intersection<$a<S>> for S
where
S: EuclideanSpace,
$a<S>: Intersection<S>,
{
type Output = <$a<S> as Intersection<S>>::Output;
fn intersection(&self, other: &$a<S>) -> Option<Self::Output> {
other.intersection(self)
}
}
};
($a:ident, $b:ident $(,)?) => {
/// Symmetrical intersection.
impl<S> Intersection<$a<S>> for $b<S>
where
S: EuclideanSpace,
$a<S>: Intersection<$b<S>>,
{
type Output = <$a<S> as Intersection<$b<S>>>::Output;
fn intersection(&self, other: &$a<S>) -> Option<Self::Output> {
other.intersection(self)
}
}
};
}
/// Unit vector.
///
/// Primarily represents a direction within an `InnerSpace`.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Unit<S>
where
S: InnerSpace,
{
inner: S,
}
impl<S> Unit<S>
where
S: InnerSpace,
{
fn from_inner_unchecked(inner: S) -> Self {
Unit { inner }
}
/// Creates a `Unit` from a non-zero magnitude vector.
///
/// The given vector is normalized. If the vector's magnitude is zero, then
/// `None` is returned.
///
/// # Examples
///
/// ```rust
/// # extern crate nalgebra;
/// # extern crate theon;
/// #
/// use nalgebra::Vector3;
/// use theon::query::Unit;
/// use theon::space::Basis;
///
/// type R3 = Vector3<f64>;
///
/// let unit = Unit::<R3>::try_from_inner(Basis::i()).unwrap();
/// ```
pub fn try_from_inner(inner: S) -> Option<Self> {
inner.normalize().map(|inner| Unit { inner })
}
pub fn into_inner(self) -> S {
self.inner
}
pub fn x() -> Self
where
S: Basis + FiniteDimensional,
S::N: Cmp<U0, Output = Greater>,
{
Self::from_inner_unchecked(Basis::i())
}
pub fn y() -> Self
where
S: Basis + FiniteDimensional,
S::N: Cmp<U1, Output = Greater>,
{
Self::from_inner_unchecked(Basis::j())
}
pub fn z() -> Self
where
S: Basis + FiniteDimensional,
S::N: Cmp<U2, Output = Greater>,
{
Self::from_inner_unchecked(Basis::k())
}
pub fn get(&self) -> &S {
self.as_ref()
}
#[must_use]
pub fn try_set(&mut self, inner: S) -> Option<&S> {
if let Some(inner) = inner.normalize() {
self.inner = inner;
Some(&self.inner)
}
else {
None
}
}
pub fn reverse(self) -> Self {
// TODO: This assumes that the `Neg` implementation does not affect
// magnitude.
let Unit { inner, .. } = self;
Self::from_inner_unchecked(-inner)
}
}
impl<S> AsRef<S> for Unit<S>
where
S: InnerSpace,
{
fn as_ref(&self) -> &S {
&self.inner
}
}
impl<S> Default for Unit<S>
where
S: Basis + InnerSpace,
{
fn default() -> Self {
Unit {
inner: S::canonical_basis_component(0).unwrap(),
}
}
}
impl<S> Neg for Unit<S>
where
S: InnerSpace,
{
type Output = Self;
fn neg(self) -> Self::Output {
self.reverse()
}
}
/// Line.
///
/// Describes a line containing an _origin_ point and a _direction_. Lines
/// extend infinitely from their origin along their direction $\hat{u}$. Unlike
/// `Ray`, the direction component of `Line` extends in both the positive and
/// negative.
///
/// This representation is typically known as the _vector form_ $P_0 +
/// t\hat{u}$ where $t$ is some non-zero _time of impact_.
#[derive(Clone, Copy, PartialEq)]
pub struct Line<S>
where
S: EuclideanSpace,
{
/// The origin or contained point of the line.
pub origin: S,
/// The unit direction(s) in which the line extends from its origin.
pub direction: Unit<Vector<S>>,
}
impl<S> Line<S>
where
S: EuclideanSpace,
{
pub fn x() -> Self
where
S: FiniteDimensional,
S::N: Cmp<U0, Output = Greater>,
{
Line {
origin: S::origin(),
direction: Unit::x(),
}
}
pub fn y() -> Self
where
S: FiniteDimensional,
S::N: Cmp<U1, Output = Greater>,
{
Line {
origin: S::origin(),
direction: Unit::y(),
}
}
pub fn z() -> Self
where
S: FiniteDimensional,
S::N: Cmp<U2, Output = Greater>,
{
Line {
origin: S::origin(),
direction: Unit::z(),
}
}
pub fn into_ray(self) -> Ray<S> {
let Line { origin, direction } = self;
Ray { origin, direction }
}
}
// TODO: Provide higher dimensional intercepts, such as the xy-intercept in
// three dimensions.
impl<S> Line<S>
where
S: EuclideanSpace + FiniteDimensional<N = U2>,
{
pub fn slope(&self) -> Option<Scalar<S>> {
let (x, y) = self.direction.get().into_xy();
if x.is_zero() {
None
}
else {
Some(y / x)
}
}
pub fn x_intercept(&self) -> Option<Scalar<S>> {
self.intersection(&Line::x())
.and_then(|embedding| match embedding {
LineLine::Point(point) => Some(point.into_xy().0),
_ => None,
})
}
pub fn y_intercept(&self) -> Option<Scalar<S>> {
self.intersection(&Line::y())
.and_then(|embedding| match embedding {
LineLine::Point(point) => Some(point.into_xy().1),
_ => None,
})
}
}
impl<S> Debug for Line<S>
where
S: Debug + EuclideanSpace,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
formatter
.debug_struct("Line")
.field("origin", &self.origin)
.field("direction", &self.direction)
.finish()
}
}
impl<S> Default for Line<S>
where
S: EuclideanSpace,
{
fn default() -> Self {
Line {
origin: S::origin(),
direction: Unit::default(),
}
}
}
/// Intersection of lines.
#[derive(Clone, Copy, PartialEq)]
pub enum LineLine<S>
where
S: EuclideanSpace,
{
// Lines and rays typically produce times of impact for point intersections,
// but this implementation computes the point. While this is a bit
// inconsistent, it avoids needing to know from which line the time of
// impact applies.
Point(S),
Line(Line<S>),
}
impl<S> LineLine<S>
where
S: EuclideanSpace,
{
pub fn into_point(self) -> Option<S> {
match self {
LineLine::Point(point) => Some(point),
_ => None,
}
}
pub fn into_line(self) -> Option<Line<S>> {
match self {
LineLine::Line(line) => Some(line),
_ => None,
}
}
}
impl<S> Debug for LineLine<S>
where
S: Debug + EuclideanSpace,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
LineLine::Point(point) => write!(formatter, "Point({:?})", point),
LineLine::Line(line) => write!(formatter, "Line({:?})", line),
}
}
}
// TODO: Though higher dimensional intersections are probably less useful,
// consider a more general implementation. This could use projection into
// two dimensions followed by confirmation in the higher dimension.
/// Intersection of lines in two dimensions.
impl<S> Intersection<Line<S>> for Line<S>
where
S: EuclideanSpace + FiniteDimensional<N = U2>,
{
type Output = LineLine<S>;
fn intersection(&self, other: &Line<S>) -> Option<Self::Output> {
let (x1, y1) = if (self.origin - other.origin).is_zero() {
// Detect like origins and avoid zeroes in the numerator by
// translating the origin.
(self.origin + *self.direction.get()).into_xy()
}
else {
self.origin.into_xy()
};
let (u1, v1) = self.direction.get().into_xy();
let (x2, y2) = other.origin.into_xy();
let (u2, v2) = other.direction.get().into_xy();
let numerator = (u2 * (y1 - y2)) - (v2 * (x1 - x2));
let denominator = (v2 * u1) - (u2 * v1);
match (numerator.is_zero(), denominator.is_zero()) {
(true, true) => Some(LineLine::Line(*self)),
(false, true) => None,
_ => {
let quotient = numerator / denominator;
Some(LineLine::Point(S::from_xy(
x1 + (quotient * u1),
y1 + (quotient * v1),
)))
}
}
}
}
/// Intersection of a line and a plane.
#[derive(Clone, Copy, PartialEq)]
pub enum LinePlane<S>
where
S: EuclideanSpace,
{
TimeOfImpact(Scalar<S>),
Line(Line<S>),
}
impl<S> LinePlane<S>
where
S: EuclideanSpace,
{
pub fn into_time_of_impact(self) -> Option<Scalar<S>> {
match self {
LinePlane::TimeOfImpact(time) => Some(time),
_ => None,
}
}
pub fn into_line(self) -> Option<Line<S>> {
match self {
LinePlane::Line(line) => Some(line),
_ => None,
}
}
}
impl<S> Debug for LinePlane<S>
where
S: Debug + EuclideanSpace,
Scalar<S>: Debug,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
LinePlane::TimeOfImpact(x) => write!(formatter, "TimeOfImpact({:?})", x),
LinePlane::Line(line) => write!(formatter, "Line({:?})", line),
}
}
}
/// Intersection of a line and a plane.
impl<S> Intersection<Plane<S>> for Line<S>
where
S: EuclideanSpace + FiniteDimensional,
<S as FiniteDimensional>::N: Cmp<U2, Output = Greater>,
{
/// The _time of impact_ of a point intersection or the line if it lies
/// within the plane.
///
/// The time of impact $t$ describes the distance from the line's origin
/// point at which the intersection occurs.
type Output = LinePlane<S>;
/// Determines if a line intersects a plane at a point or lies within the
/// plane. Computes the _time of impact_ of a `Line` for a point
/// intersection.
///
/// Given a line formed from an origin $P_0$ and a unit direction
/// $\hat{u}$, the point of intersection with the plane is $P_0 +
/// t\hat{u}$.
fn intersection(&self, plane: &Plane<S>) -> Option<Self::Output> {
let line = self;
let direction = *line.direction.get();
let normal = *plane.normal.get();
let orientation = direction.dot(normal);
if abs_diff_eq!(orientation, Zero::zero()) {
// The line and plane are parallel.
if abs_diff_eq!((plane.origin - line.origin).dot(normal), Zero::zero()) {
Some(LinePlane::Line(*line))
}
else {
None
}
}
else {
// The line and plane are not parallel and must intersect at a
// point.
Some(LinePlane::TimeOfImpact(
(plane.origin - line.origin).dot(normal) / orientation,
))
}
}
}
impl_symmetrical_intersection!(Line, Plane);
/// Ray or half-line.
///
/// Describes a decomposed line with an _origin_ or _initial point_ and a
/// _direction_. Rays extend infinitely from their origin. The origin $P_0$ and
/// the point $P_0 + \hat{u}$ (where $\hat{u}$ is the direction of the ray)
/// form a half-line originating from $P_0$.
#[derive(Clone, Copy, PartialEq)]
pub struct Ray<S>
where
S: EuclideanSpace,
{
/// The origin or initial point of the ray.
pub origin: S,
/// The unit direction in which the ray extends from its origin.
pub direction: Unit<Vector<S>>,
}
impl<S> Ray<S>
where
S: EuclideanSpace,
{
pub fn into_line(self) -> Line<S> {
let Ray { origin, direction } = self;
Line { origin, direction }
}
/// Reverses the direction of the ray.
///
/// Reversing a ray yields its _opposite_, with the same origin and the
/// opposing half-line.
pub fn reverse(self) -> Self {
let Ray { origin, direction } = self;
Ray {
origin,
direction: Unit::from_inner_unchecked(-direction.into_inner()),
}
}
}
impl<S> Debug for Ray<S>
where
S: Debug + EuclideanSpace,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
formatter
.debug_struct("Ray")
.field("origin", &self.origin)
.field("direction", &self.direction)
.finish()
}
}
impl<S> Default for Ray<S>
where
S: EuclideanSpace,
{
fn default() -> Self {
Ray {
origin: S::origin(),
direction: Unit::default(),
}
}
}
impl<S> Neg for Ray<S>
where
S: EuclideanSpace,
{
type Output = Self;
fn neg(self) -> Self::Output {
self.reverse()
}
}
/// Axis-aligned bounding box.
///
/// Represents an $n$-dimensional volume along each basis vector of a Euclidean
/// space. The bounding box is defined by the region between its _origin_ and
/// _endpoint_.
#[derive(Clone, Copy, PartialEq)]
pub struct Aabb<S>
where
S: EuclideanSpace,
{
/// The _origin_ of the bounding box.
///
/// The origin does **not** necessarily represent the lower or upper bound
/// of the `Aabb`. See `lower_bound` and `upper_bound`.
pub origin: S,
/// The _extent_ of the bounding box.
///
/// The extent describes the endpoint as a translation from the origin. The
/// endpoint $P_E$ is formed by $P_0 + \vec{v}$, where $P_0$ is the origin
/// and $\vec{v}$ is the extent.
pub extent: Vector<S>,
}
impl<S> Aabb<S>
where
S: EuclideanSpace,
{
/// Creates an `Aabb` from a set of points.
///
/// The bounding box is formed from the lower and upper bounds of the
/// points. If the set of points is empty, then the `Aabb` will sit at the
/// origin with zero volume.
pub fn from_points<I>(points: I) -> Self
where
I: IntoIterator<Item = S>,
Scalar<S>: IntrinsicOrd,
{
let mut min = S::origin();
let mut max = S::origin();
for point in points {
min = min.per_item_min_or_undefined(point);
max = max.per_item_max_or_undefined(point);
}
Aabb {
origin: min,
extent: max - min,
}
}
pub fn endpoint(&self) -> S {
self.origin + self.extent
}
pub fn upper_bound(&self) -> S
where
Scalar<S>: IntrinsicOrd,
{
self.origin.per_item_max_or_undefined(self.endpoint())
}
pub fn lower_bound(&self) -> S
where
Scalar<S>: IntrinsicOrd,
{
self.origin.per_item_min_or_undefined(self.endpoint())
}
/// Gets the Lebesgue measure ($n$-dimensional volume) of the bounding box.
///
/// This value is analogous to _length_, _area_, and _volume_ in one, two,
/// and three dimensions, respectively.
pub fn volume(&self) -> Scalar<S> {
self.origin
.zip_map(self.endpoint(), |a, b| (a - b).abs())
.product()
}
pub fn union(&self, aabb: &Self) -> Self
where
Scalar<S>: IntrinsicOrd,
{
let origin = self
.lower_bound()
.per_item_min_or_undefined(aabb.lower_bound());
let extent = self
.upper_bound()
.per_item_max_or_undefined(aabb.upper_bound())
- origin;
Aabb { origin, extent }
}
}
impl<S> Debug for Aabb<S>
where
S: Debug + EuclideanSpace,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
formatter
.debug_struct("Aabb")
.field("origin", &self.origin)
.field("extent", &self.extent)
.finish()
}
}
impl<S> Default for Aabb<S>
where
S: EuclideanSpace,
{
fn default() -> Self {
Aabb {
origin: S::origin(),
extent: Vector::<S>::zero(),
}
}
}
/// Intersection of an axis-aligned bounding box and a point.
impl<S> Intersection<S> for Aabb<S>
where
S: EuclideanSpace,
Scalar<S>: IntrinsicOrd + Signed,
{
type Output = Vector<S>;
fn intersection(&self, point: &S) -> Option<Self::Output> {
let aabb = self;
let lower = aabb.lower_bound().per_item_max_or_undefined(*point);
let upper = aabb.upper_bound().per_item_min_or_undefined(*point);
if lower == upper {
Some(*point - aabb.lower_bound())
}
else {
None
}
}
}
impl_symmetrical_intersection!(Aabb);
/// Intersection of axis-aligned bounding boxes.
impl<S> Intersection<Aabb<S>> for Aabb<S>
where
S: EuclideanSpace,
Scalar<S>: IntrinsicOrd + Signed,
{
type Output = Self;
fn intersection(&self, other: &Aabb<S>) -> Option<Self::Output> {
let max_lower_bound = self
.lower_bound()
.per_item_max_or_undefined(other.lower_bound());
let min_upper_bound = self
.upper_bound()
.per_item_min_or_undefined(other.upper_bound());
let difference = min_upper_bound - max_lower_bound;
if difference.all(|x| (!x.is_undefined()) && x.is_positive()) {
Some(Aabb {
origin: max_lower_bound,
extent: difference,
})
}
else {
None
}
}
}
/// Intersection of an axis-aligned bounding box and a ray.
impl<S> Intersection<Ray<S>> for Aabb<S>
where
S: EuclideanSpace,
Scalar<S>: Bounded + Infinite + IntrinsicOrd + Signed,
{
/// The minimum and maximum _times of impact_ of the intersection.
///
/// The times of impact $t_{min}$ and $t_{max}$ describe the distance along
/// the half-line from the ray's origin at which the intersection occurs.
type Output = (Scalar<S>, Scalar<S>);
/// Determines the minimum and maximum _times of impact_ of a `Ray`
/// intersection with an `Aabb`.
///
/// Given a ray formed by an origin $P_0$ and a unit direction $\hat{u}$,
/// the nearest point of intersection is $P_0 + t_{min}\hat{u}$.
///
/// # Examples
///
/// Determine the point of impact between a ray and axis-aligned bounding box:
///
/// ```rust
/// # extern crate nalgebra;
/// # extern crate theon;
/// #
/// use nalgebra::Point2;
/// use theon::space::{EuclideanSpace, VectorSpace};
/// use theon::query::{Aabb, Intersection, Ray, Unit};
///
/// type E2 = Point2<f64>;
///
/// let aabb = Aabb::<E2> {
/// origin: EuclideanSpace::from_xy(1.0, -1.0),
/// extent: VectorSpace::from_xy(2.0, 2.0),
/// };
/// let ray = Ray::<E2> {
/// origin: EuclideanSpace::origin(),
/// direction: Unit::x(),
/// };
/// let (min, _) = ray.intersection(&aabb).unwrap();
/// let point = ray.origin + (ray.direction.get() * min);
fn intersection(&self, ray: &Ray<S>) -> Option<Self::Output> {
// Avoid computing `NaN`s. Note that multiplying by the inverse (instead
// of dividing) avoids dividing zero by zero, but does not avoid
// multiplying zero by infinity.
let pdiv = |a: Scalar<S>, b: Scalar<S>| {
if abs_diff_eq!(a, Zero::zero()) {
a
}
else {
a / b
}
};
let aabb = self;
let direction = *ray.direction.get();
let origin = (aabb.origin - ray.origin).zip_map(direction, pdiv);
let endpoint = ((aabb.endpoint()) - ray.origin).zip_map(direction, pdiv);
let min = origin
.per_item_min_or_undefined(endpoint)
.max_or_undefined();
let max = origin
.per_item_max_or_undefined(endpoint)
.min_or_undefined();
if max.is_negative() || min > max || min.is_undefined() || max.is_undefined() {
None
}
else {
Some((min, max))
}
}
}
impl_symmetrical_intersection!(Aabb, Ray);
//impl<S> PartialEq for Aabb<S>
//where
// S: EuclideanSpace,
//{
//}
#[derive(Clone)]
pub struct Plane<S>
where
S: EuclideanSpace,
{
pub origin: S,
pub normal: Unit<Vector<S>>,
}
impl<S> Copy for Plane<S>
where
S: EuclideanSpace,
Vector<S>: Copy,
{
}
impl<S> Debug for Plane<S>
where
S: Debug + EuclideanSpace,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
formatter
.debug_struct("Plane")
.field("origin", &self.origin)
.field("normal", &self.normal)
.finish()
}
}
/// Intersection of a plane and a ray.
#[derive(Clone, Copy, PartialEq)]
pub enum PlaneRay<S>
where
S: EuclideanSpace,
{
TimeOfImpact(Scalar<S>),
Ray(Ray<S>),
}
impl<S> PlaneRay<S>
where
S: EuclideanSpace,
{
pub fn into_time_of_impact(self) -> Option<Scalar<S>> {
match self {
PlaneRay::TimeOfImpact(time) => Some(time),
_ => None,
}
}
pub fn into_ray(self) -> Option<Ray<S>> {
match self {
PlaneRay::Ray(ray) => Some(ray),
_ => None,
}
}
}
impl<S> Debug for PlaneRay<S>
where
S: Debug + EuclideanSpace,
Scalar<S>: Debug,
Vector<S>: Debug,
{
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
PlaneRay::TimeOfImpact(x) => write!(formatter, "TimeOfImpact({:?})", x),
PlaneRay::Ray(ray) => write!(formatter, "Ray({:?})", ray),
}
}
}
/// Intersection of a plane and a ray.
impl<S> Intersection<Ray<S>> for Plane<S>
where
S: EuclideanSpace + FiniteDimensional,
<S as FiniteDimensional>::N: Cmp<U2, Output = Greater>,
Scalar<S>: Signed,
{
/// The _time of impact_ of a point intersection or the ray if it lies
/// within the plane.
///
/// The time of impact $t$ describes the distance along the half-line from
/// the ray's origin at which the intersection occurs.
type Output = PlaneRay<S>;
/// Determines if a ray intersects a plane at a point or lies within the
/// plane. Computes the _time of impact_ of a `Ray` for a point
/// intersection.
///
/// Given a ray formed by an origin $P_0$ and a unit direction $\hat{u}$,
/// the point of intersection with the plane is $P_0 + t\hat{u}$.
fn intersection(&self, ray: &Ray<S>) -> Option<Self::Output> {
let plane = self;
ray.into_line()
.intersection(plane)
.and_then(|embedding| match embedding {
LinePlane::TimeOfImpact(t) => {
if t.is_positive() {
Some(PlaneRay::TimeOfImpact(t))
}
else {
None
}
}
LinePlane::Line(_) => Some(PlaneRay::Ray(*ray)),
})
}
}
impl_symmetrical_intersection!(Plane, Ray);
#[cfg(all(test, feature = "geometry-nalgebra"))]
mod tests {
use decorum::N64;
use nalgebra::{Point2, Point3};
use crate::adjunct::Converged;
use crate::query::{Aabb, Intersection, Line, LineLine, Plane, PlaneRay, Ray, Unit};
use crate::space::{EuclideanSpace, Vector, VectorSpace};
type E2 = Point2<f64>;
type E3 = Point3<f64>;
#[test]
fn aabb_aabb_intersection_e2() {
let aabb1 = Aabb::<E2> {
origin: EuclideanSpace::origin(),
extent: Converged::converged(2.0),
};
let aabb2 = Aabb::<E2> {
origin: Converged::converged(1.0),
extent: Converged::converged(2.0),
};
assert_eq!(Some(aabb1), aabb1.intersection(&aabb1));
assert_eq!(
Some(Aabb::<E2> {
origin: Converged::converged(1.0),
extent: Converged::converged(1.0),
}),
aabb1.intersection(&aabb2),
);
let aabb2 = Aabb::<E2> {
origin: Converged::converged(-3.0),
extent: Converged::converged(2.0),
};
assert_eq!(None, aabb1.intersection(&aabb2));
}
#[test]
fn aabb_point_intersection_e2() {
let aabb = Aabb::<E2> {
origin: EuclideanSpace::origin(),
extent: Converged::converged(2.0),
};
let point = E2::converged(1.0);
assert_eq!(
Some(Vector::<E2>::converged(1.0)),
aabb.intersection(&point),
);
let point = E2::converged(3.0);
assert_eq!(None, aabb.intersection(&point));
}
#[test]
fn aabb_ray_intersection_e2() {
let aabb = Aabb::<E2> {
origin: EuclideanSpace::origin(),
extent: Converged::converged(1.0),
};
let ray = Ray::<E2> {
origin: EuclideanSpace::from_xy(-1.0, 0.5),
direction: Unit::x(),
};
assert_eq!(Some((1.0, 2.0)), ray.intersection(&aabb));
assert_eq!(None, ray.reverse().intersection(&aabb));
}
#[test]
fn aabb_ray_intersection_e3() {
let aabb = Aabb::<E3> {
origin: EuclideanSpace::origin(),
extent: Converged::converged(1.0),
};
let ray = Ray::<E3> {
origin: EuclideanSpace::from_xyz(-1.0, 0.5, 0.5),
direction: Unit::x(),
};
assert_eq!(Some((1.0, 2.0)), ray.intersection(&aabb));
assert_eq!(None, ray.reverse().intersection(&aabb));
}
// Ensure that certain values do not produce `NaN`s when querying the
// intersection of `Aabb` and `Ray`.
#[test]
fn aabb_ray_intersection_nan() {
let aabb = Aabb::<Point2<N64>> {
origin: EuclideanSpace::origin(),
extent: Converged::converged(1.0.into()),
};
let ray = Ray::<Point2<N64>> {
origin: EuclideanSpace::origin(),
direction: Unit::x(),
};
assert_eq!(Some((0.0.into(), 1.0.into())), ray.intersection(&aabb));
}
#[test]
fn line_line_intersection_e2() {
let line = Line::<E2>::x();
assert_eq!(
Some(LineLine::Point(E2::origin())),
line.intersection(&Line::y()),
);
assert_eq!(Some(LineLine::Line(line)), line.intersection(&Line::x()));
let line1 = Line::<E2> {
origin: E2::origin(),
direction: Unit::try_from_inner(Converged::converged(1.0)).unwrap(),
};
let line2 = Line::<E2> {
origin: E2::from_xy(2.0, 0.0),
direction: Unit::try_from_inner(Vector::<E2>::from_xy(-1.0, 1.0)).unwrap(),
};
assert_eq!(
Some(LineLine::Point(Converged::converged(1.0))),
line1.intersection(&line2),
);
let line1 = Line::<E2>::x();
let line2 = Line::<E2> {
origin: E2::from_xy(0.0, 1.0),
direction: Unit::x(),
};
assert_eq!(None, line1.intersection(&line2));
}
#[test]
fn plane_ray_intersection_e3() {
let plane = Plane::<E3> {
origin: EuclideanSpace::from_xyz(0.0, 0.0, 1.0),
normal: Unit::z(),
};
let ray = Ray::<E3> {
origin: EuclideanSpace::origin(),
direction: Unit::z(),
};
assert_eq!(Some(PlaneRay::TimeOfImpact(1.0)), ray.intersection(&plane));
assert_eq!(None, ray.reverse().intersection(&plane));
}
}
| true
|
2ae171557e7b7d0aa83e2c53989d3cdf1ae8436a
|
Rust
|
sourcefrog/aoc2018
|
/src/bin/aoc06b.rs
|
UTF-8
| 3,031
| 3.59375
| 4
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! https://adventofcode.com/2018/day/6
//!
//! For every point, calculate the distance to every landing, stopping if
//! we get above the limit. If we complete before getting to the limit,
//! that point counts.
use std::io;
use std::io::prelude::*;
pub fn main() {
let pts: Vec<Point> = io::stdin()
.lock()
.lines()
.map(Result::unwrap)
.map(|s| Point::from_string(&s))
.collect();
let m = Map::from_points(pts);
const N: i32 = 10_000;
println!("largest within {}: {}", N, m.count_within_distance(N));
}
type Coord = i32;
#[derive(Copy, Clone, Debug, PartialEq)]
struct Point {
x: Coord,
y: Coord,
}
#[derive(Clone, Debug, PartialEq)]
struct Map {
// For simplicity addressing is zero-based even though that may leave
// some empty space to the top-left.
w: Coord,
h: Coord,
ls: Vec<Point>,
}
impl Point {
pub fn from_string(s: &str) -> Point {
let mut splits = s.split(", ");
Point {
x: splits.next().unwrap().parse().unwrap(),
y: splits.next().unwrap().parse().unwrap(),
}
}
fn abs_difference(&self, other: &Point) -> Coord {
(self.x - other.x).abs() + (self.y - other.y).abs()
}
}
impl Map {
/// Make a new map that will fit all these points
pub fn from_points(points: Vec<Point>) -> Map {
Map {
w: points.iter().map(|p| p.x).max().unwrap() + 2,
h: points.iter().map(|p| p.y).max().unwrap() + 2,
ls: points,
}
}
fn count_within_distance(&self, limit: i32) -> u32 {
let mut n = 0;
for y in 0..self.h {
for x in 0..self.w {
let mut t = 0;
let p = Point { x, y };
for l in &self.ls {
t += p.abs_difference(l);
if t > limit {
break;
}
}
if t < limit {
n += 1;
}
}
}
n
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn simple() {
let pts: Vec<_> = [(1, 1), (1, 6), (8, 3), (3, 4), (5, 5), (8, 9)]
.iter()
.map(|(x, y)| Point { x: *x, y: *y })
.collect();
let m = Map::from_points(pts);
println!("{:?}", &m);
assert_eq!(m.count_within_distance(32), 16);
}
}
| true
|
d544074eea6308d565f0da34af60aec83a2ed6ea
|
Rust
|
Xetera/ginkou-api
|
/src/language.rs
|
UTF-8
| 9,575
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::File;
use std::io;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::string::FromUtf8Error;
extern crate dirs;
#[macro_use]
use rusqlite::params;
use rusqlite::Connection;
use structopt;
use structopt::StructOpt;
use mecab;
use mecab::Tagger;
const DAKUTEN_BYTES: [u8; 3] = [227, 128, 130];
const SQL_ADD_SENTENCE: &'static str = include_str!("sql/add_sentence.sql");
const SQL_ADD_WORD_JUNCTION: &'static str = include_str!("sql/add_word_junction.sql");
const SQL_ADD_WORD: &'static str = include_str!("sql/add_word.sql");
const SQL_ALL_WORD_SENTENCES: &'static str = include_str!("sql/all_word_sentences.sql");
const SQL_BEST_WORD_SENTENCES: &'static str = include_str!("sql/best_word_sentences.sql");
const SQL_SETUP: &'static str = include_str!("sql/setup.sql");
#[derive(Debug)]
enum SentenceError {
Utf8(FromUtf8Error),
IO(io::Error),
}
impl From<FromUtf8Error> for SentenceError {
fn from(err: FromUtf8Error) -> Self {
SentenceError::Utf8(err)
}
}
impl From<io::Error> for SentenceError {
fn from(err: io::Error) -> Self {
SentenceError::IO(err)
}
}
struct Sentences<R> {
bytes: io::Bytes<R>,
done: bool,
}
impl<B: io::BufRead> Iterator for Sentences<B> {
type Item = Result<String, SentenceError>;
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
let mut buf = Vec::new();
let mut match_index = 0;
while match_index < 3 {
let byte = match self.bytes.next() {
None => break,
Some(Err(e)) => return Some(Err(e.into())),
Some(Ok(b)) => b,
};
buf.push(byte);
if byte == DAKUTEN_BYTES[match_index] {
match_index += 1;
} else {
match_index = 0;
}
}
if buf.len() == 0 {
self.done = true;
return None;
}
let next = String::from_utf8(buf).map_err(SentenceError::from);
Some(next.map(|x| x.replace(|x: char| x.is_whitespace(), "")))
}
}
fn sentences<R: io::BufRead>(reader: R) -> Sentences<R> {
Sentences {
bytes: reader.bytes(),
done: false,
}
}
fn create_tables(conn: &Connection) -> rusqlite::Result<()> {
conn.execute_batch(SQL_SETUP)}
pub fn conn_from_disk<P: AsRef<Path>>(path: P) -> rusqlite::Result<Connection> {
let existed = path.as_ref().exists();
let conn = Connection::open(path)?;
if !existed {
create_tables(&conn)?;
}
Ok(conn)
}
fn conn_from_memory() -> rusqlite::Result<Connection> {
let conn = Connection::open_in_memory()?;
create_tables(&conn)?;
Ok(conn)
}
fn add_sentence(conn: &Connection, sentence: &str) -> rusqlite::Result<u32> {
conn.execute(SQL_ADD_SENTENCE, params![sentence])?;
Ok(conn.last_insert_rowid() as u32)
}
fn add_word(conn: &Connection, word: &str, sentence_id: u32) -> rusqlite::Result<()> {
conn.execute(SQL_ADD_WORD, params![word])?;
conn.execute(SQL_ADD_WORD_JUNCTION, params![word, sentence_id])?;
Ok(())
}
pub fn matching_word(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<Vec<String>> {
let query = if all {
SQL_ALL_WORD_SENTENCES
} else {
SQL_BEST_WORD_SENTENCES
};
let mut stmt = conn.prepare_cached(query)?;
let mut buffer = Vec::new();
let results = stmt.query_map(params![word], |row| row.get(0))?;
for r in results {
let s: String = r?;
buffer.push(s);
}
Ok(buffer)
}
// This will ignore broken pipes, to support unix piping into things like head
fn print_matching_words(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<()> {
let query = if all {
SQL_ALL_WORD_SENTENCES
} else {
SQL_BEST_WORD_SENTENCES
};
let mut stmt = conn.prepare_cached(query)?;
let results = stmt.query_map(params![word], |row| row.get(0))?;
for r in results {
let r: String = r?;
if let Err(e) = write!(io::stdout(), "{}\n", r) {
if e.kind() != io::ErrorKind::BrokenPipe {
panic!(e);
}
}
}
Ok(())
}
fn consume_trimmed(conn: &Connection, trimmed: &str) -> rusqlite::Result<()> {
let sentence_id = add_sentence(conn, trimmed)?;
let mut tagger = Tagger::new("");
tagger.parse_nbest_init(trimmed);
let mecab_out = tagger.next().unwrap();
for l in mecab_out.lines() {
if l == "EOS" {
break;
}
let tab_index = l.find('\t').unwrap();
let (_, rest) = l.split_at(tab_index);
// Remove the leading tab
let rest = &rest[1..];
let root = rest.split(',').skip(6).next().unwrap();
add_word(conn, root, sentence_id)?;
}
Ok(())
}
fn consume_sentences<R: io::BufRead>(conn: &Connection, reader: R) -> rusqlite::Result<()> {
let mut i = 0;
for sentence in sentences(reader) {
i += 1;
if sentence.is_err() {
println!("Err on #{}: {:?}", i, sentence);
continue;
};
let sentence = sentence.unwrap();
println!("#{}: {}", i, sentence);
consume_trimmed(conn, &sentence)?;
}
Ok(())
}
#[derive(Debug, StructOpt)]
#[structopt(name = "ginkou", about = "Japanese sentence bank")]
enum Ginkou {
/// Add new sentences to the database.
#[structopt(name = "add")]
Add {
/// The file to read sentences from.
///
/// If no file is given, sentences will be read from stdin.
#[structopt(long, short = "f", parse(from_os_str))]
file: Option<PathBuf>,
/// The database to use.
#[structopt(long = "database", short = "d", parse(from_os_str))]
db: Option<PathBuf>,
},
/// Search for all sentences containing a given word.
#[structopt(name = "get")]
Get {
/// The word to search for in the database.
word: String,
/// Show all results instead of shortest 200
#[structopt(long = "allwords", short = "a")]
all: bool,
/// The database to use.
#[structopt(long = "database", short = "d", parse(from_os_str))]
db: Option<PathBuf>,
},
}
fn default_db_path() -> PathBuf {
if let Some(mut pb) = dirs::home_dir() {
pb.push(".ginkoudb");
pb
} else {
PathBuf::from(".ginkoudb")
}
}
fn main() {
// let opt = Ginkou::from_args();
// match opt {
// Ginkou::Get { word, all, db } => {
// let db_path = db.unwrap_or(default_db_path());
// let mut conn = conn_from_disk(&db_path)?;
// print_matching_words(&mut conn, &word, all)?;
// }
// Ginkou::Add { file, db } => {
// let db_path = db.unwrap_or(default_db_path());
// let mut conn = conn_from_disk(&db_path)?;
// let tx = conn.transaction()?;
// match file {
// None => {
// consume_sentences(&tx, io::BufReader::new(io::stdin()))?;
// }
// Some(path) => {
// let file_res = File::open(&path);
// if let Err(e) = file_res {
// println!("Couldn't open {}:\n {}", path.as_path().display(), e);
// return Ok(());
// }
// let file = file_res.unwrap();
// consume_sentences(&tx, io::BufReader::new(file))?;
// }
// };
// tx.commit()?;
// }
// };
// Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sentences_works_correctly() {
let string = "A。\n B。\n\n XXC。";
let mut iter = sentences(std::io::BufReader::new(string.as_bytes()));
let a = iter.next();
assert_eq!(String::from("A。"), a.unwrap().unwrap());
let b = iter.next();
assert_eq!(String::from("B。"), b.unwrap().unwrap());
let c = iter.next();
assert_eq!(String::from("XXC。"), c.unwrap().unwrap());
}
#[test]
fn bank_lookup_works_correctly() -> rusqlite::Result<()> {
let conn = conn_from_memory()?;
let sentence1 = String::from("A B");
let sentence2 = String::from("A B C");
let s1 = add_sentence(&conn, &sentence1)?;
add_word(&conn, "A", s1)?;
add_word(&conn, "B", s1)?;
let s2 = add_sentence(&conn, &sentence2)?;
add_word(&conn, "A", s2)?;
add_word(&conn, "B", s2)?;
add_word(&conn, "C", s2)?;
let a_sentences = vec![sentence1.clone(), sentence2.clone()];
assert_eq!(Ok(a_sentences), matching_word(&conn, "A"));
let c_sentences = vec![sentence2.clone()];
assert_eq!(Ok(c_sentences), matching_word(&conn, "C"));
Ok(())
}
#[test]
fn sentences_can_be_consumed() -> rusqlite::Result<()> {
let conn = conn_from_memory()?;
let sentence1 = "猫を見た";
let sentence2 = "犬を見る";
consume_trimmed(&conn, sentence1)?;
consume_trimmed(&conn, sentence2)?;
let a_sentences = vec![sentence1.into(), sentence2.into()];
assert_eq!(Ok(a_sentences), matching_word(&conn, "見る"));
let b_sentences = vec![sentence2.into()];
assert_eq!(Ok(b_sentences), matching_word(&conn, "犬"));
let c_sentences = vec![sentence1.into()];
assert_eq!(Ok(c_sentences), matching_word(&conn, "猫"));
Ok(())
}
}
| true
|
d7faafd271e6b5a543f240272ff3810ed1a35300
|
Rust
|
Adiguna7/semester5
|
/mobileprogramming/tugas1/sumfile2/sumfile.rs
|
UTF-8
| 828
| 3.65625
| 4
|
[] |
no_license
|
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
/// Return the sum of all the integers found in the file at `path`. Each integer
/// must be on its own line.
fn sum_file(path: &str) -> i64 {
let mut sum : i64 = 0;
let file = match File::open(path) {
Ok(f) => f,
Err(e) => panic!("couldn't open {}: {}", path, e),
};
let reader = BufReader::new(file);
for readline in reader.lines() {
let line = match readline {
Ok(readline) => readline,
Err(e) => panic!("couldn't read from {}: {}", path, e),
};
match line.trim().parse::<i64>() {
Ok(v) => sum += v,
Err(_) => panic!("invalid integer in {}: {}", path, line),
}
}
sum
}
fn main() {
println!("Sum: {}", sum_file("numbers.txt"));
}
| true
|
dafa2d218358a2fbb8d1da09ac109034d8f73d67
|
Rust
|
runarberg/math-text-transform
|
/src/variants/bold_italic.rs
|
UTF-8
| 3,574
| 3.953125
| 4
|
[
"MIT"
] |
permissive
|
/// Transform a character to it's mathematical bold italic equivalent.
pub fn math_bold_italic(c: char) -> Option<char> {
match c {
// Latin capital letters.
'A' => Some('𝑨'),
'B' => Some('𝑩'),
'C' => Some('𝑪'),
'D' => Some('𝑫'),
'E' => Some('𝑬'),
'F' => Some('𝑭'),
'G' => Some('𝑮'),
'H' => Some('𝑯'),
'I' => Some('𝑰'),
'J' => Some('𝑱'),
'K' => Some('𝑲'),
'L' => Some('𝑳'),
'M' => Some('𝑴'),
'N' => Some('𝑵'),
'O' => Some('𝑶'),
'P' => Some('𝑷'),
'Q' => Some('𝑸'),
'R' => Some('𝑹'),
'S' => Some('𝑺'),
'T' => Some('𝑻'),
'U' => Some('𝑼'),
'V' => Some('𝑽'),
'W' => Some('𝑾'),
'X' => Some('𝑿'),
'Y' => Some('𝒀'),
'Z' => Some('𝒁'),
// Latin small letters.
'a' => Some('𝒂'),
'b' => Some('𝒃'),
'c' => Some('𝒄'),
'd' => Some('𝒅'),
'e' => Some('𝒆'),
'f' => Some('𝒇'),
'g' => Some('𝒈'),
'h' => Some('𝒉'),
'i' => Some('𝒊'),
'j' => Some('𝒋'),
'k' => Some('𝒌'),
'l' => Some('𝒍'),
'm' => Some('𝒎'),
'n' => Some('𝒏'),
'o' => Some('𝒐'),
'p' => Some('𝒑'),
'q' => Some('𝒒'),
'r' => Some('𝒓'),
's' => Some('𝒔'),
't' => Some('𝒕'),
'u' => Some('𝒖'),
'v' => Some('𝒗'),
'w' => Some('𝒘'),
'x' => Some('𝒙'),
'y' => Some('𝒚'),
'z' => Some('𝒛'),
// Greek capital letters.
'Α' => Some('𝜜'),
'Β' => Some('𝜝'),
'Γ' => Some('𝜞'),
'Δ' => Some('𝜟'),
'Ε' => Some('𝜠'),
'Ζ' => Some('𝜡'),
'Η' => Some('𝜢'),
'Θ' => Some('𝜣'),
'Ι' => Some('𝜤'),
'Κ' => Some('𝜥'),
'Λ' => Some('𝜦'),
'Μ' => Some('𝜧'),
'Ν' => Some('𝜨'),
'Ξ' => Some('𝜩'),
'Ο' => Some('𝜪'),
'Π' => Some('𝜫'),
'Ρ' => Some('𝜬'),
'ϴ' => Some('𝜭'),
'Σ' => Some('𝜮'),
'Τ' => Some('𝜯'),
'Υ' => Some('𝜰'),
'Φ' => Some('𝜱'),
'Χ' => Some('𝜲'),
'Ψ' => Some('𝜳'),
'Ω' => Some('𝜴'),
'∇' => Some('𝜵'),
// Greek small letters
'α' => Some('𝜶'),
'β' => Some('𝜷'),
'γ' => Some('𝜸'),
'δ' => Some('𝜹'),
'ε' => Some('𝜺'),
'ζ' => Some('𝜻'),
'η' => Some('𝜼'),
'θ' => Some('𝜽'),
'ι' => Some('𝜾'),
'κ' => Some('𝜿'),
'λ' => Some('𝝀'),
'μ' => Some('𝝁'),
'ν' => Some('𝝂'),
'ξ' => Some('𝝃'),
'ο' => Some('𝝄'),
'π' => Some('𝝅'),
'ρ' => Some('𝝆'),
'ς' => Some('𝝇'),
'σ' => Some('𝝈'),
'τ' => Some('𝝉'),
'υ' => Some('𝝊'),
'φ' => Some('𝝋'),
'χ' => Some('𝝌'),
'ψ' => Some('𝝍'),
'ω' => Some('𝝎'),
'∂' => Some('𝝏'),
'ϵ' => Some('𝝐'),
'ϑ' => Some('𝝑'),
'ϰ' => Some('𝝒'),
'ϕ' => Some('𝝓'),
'ϱ' => Some('𝝔'),
'ϖ' => Some('𝝕'),
// No equivalence.
_ => None,
}
}
| true
|
34fac6370d78514816f419c00f03186175469db9
|
Rust
|
AlexTugarev/nushell
|
/crates/nu-cli/src/commands/if_.rs
|
UTF-8
| 6,167
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
use crate::command_registry::CommandRegistry;
use crate::commands::classified::block::run_block;
use crate::commands::WholeStreamCommand;
use crate::evaluate::evaluate_baseline_expr;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{
hir::Block, hir::ClassifiedCommand, Scope, Signature, SyntaxShape, UntaggedValue,
};
pub struct If;
#[derive(Deserialize)]
pub struct IfArgs {
condition: Block,
then_case: Block,
else_case: Block,
}
#[async_trait]
impl WholeStreamCommand for If {
fn name(&self) -> &str {
"if"
}
fn signature(&self) -> Signature {
Signature::build("if")
.required(
"condition",
SyntaxShape::Math,
"the condition that must match",
)
.required(
"then_case",
SyntaxShape::Block,
"block to run if condition is true",
)
.required(
"else_case",
SyntaxShape::Block,
"block to run if condition is false",
)
}
fn usage(&self) -> &str {
"Run blocks if a condition is true or false."
}
async fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
if_command(args, registry).await
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Run a block if a condition is true",
example: "echo 10 | if $it > 5 { echo 'greater than 5' } { echo 'less than or equal to 5' }",
result: Some(vec![UntaggedValue::string("greater than 5").into()]),
},
Example {
description: "Run a block if a condition is false",
example: "echo 1 | if $it > 5 { echo 'greater than 5' } { echo 'less than or equal to 5' }",
result: Some(vec![UntaggedValue::string("less than or equal to 5").into()]),
},
]
}
}
async fn if_command(
raw_args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
let registry = Arc::new(registry.clone());
let scope = raw_args.call_info.scope.clone();
let tag = raw_args.call_info.name_tag.clone();
let context = Arc::new(EvaluationContext::from_raw(&raw_args, ®istry));
let (
IfArgs {
condition,
then_case,
else_case,
},
input,
) = raw_args.process(®istry).await?;
let condition = {
if condition.block.len() != 1 {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
match condition.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
},
None => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
}
};
Ok(input
.then(move |input| {
let condition = condition.clone();
let then_case = then_case.clone();
let else_case = else_case.clone();
let registry = registry.clone();
let scope = Scope::append_var(scope.clone(), "$it", input);
let mut context = context.clone();
async move {
//FIXME: should we use the scope that's brought in as well?
let condition = evaluate_baseline_expr(&condition, &*registry, scope.clone()).await;
match condition {
Ok(condition) => match condition.as_bool() {
Ok(b) => {
if b {
match run_block(
&then_case,
Arc::make_mut(&mut context),
InputStream::empty(),
scope,
)
.await
{
Ok(stream) => stream.to_output_stream(),
Err(e) => futures::stream::iter(vec![Err(e)].into_iter())
.to_output_stream(),
}
} else {
match run_block(
&else_case,
Arc::make_mut(&mut context),
InputStream::empty(),
scope,
)
.await
{
Ok(stream) => stream.to_output_stream(),
Err(e) => futures::stream::iter(vec![Err(e)].into_iter())
.to_output_stream(),
}
}
}
Err(e) => {
futures::stream::iter(vec![Err(e)].into_iter()).to_output_stream()
}
},
Err(e) => futures::stream::iter(vec![Err(e)].into_iter()).to_output_stream(),
}
}
})
.flatten()
.to_output_stream())
}
#[cfg(test)]
mod tests {
use super::If;
use super::ShellError;
#[test]
fn examples_work_as_expected() -> Result<(), ShellError> {
use crate::examples::test as test_examples;
Ok(test_examples(If {})?)
}
}
| true
|
9a20b32ced3b48516131b60e17ba8ad0b91933c5
|
Rust
|
Lasia98/Filecoin
|
/mirrors.ustc.edu.cn-b63e9dae659fc205/positioned-io-0.2.2/src/unix.rs
|
UTF-8
| 936
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::File;
use std::io::{Result, Write, Error};
use std::os::unix::io::AsRawFd;
use super::{ReadAt, WriteAt};
extern crate libc;
use self::libc::{pread, pwrite, c_void, off_t, size_t, ssize_t};
fn err(e: ssize_t) -> Result<usize> {
if e == -1 as ssize_t {
Err(Error::last_os_error())
} else {
Ok(e as usize)
}
}
impl ReadAt for File {
fn read_at(&self, pos: u64, buf: &mut [u8]) -> Result<usize> {
let fd = self.as_raw_fd();
err(unsafe {
pread(fd, buf.as_mut_ptr() as *mut c_void, buf.len() as size_t, pos as off_t)
})
}
}
impl WriteAt for File {
fn write_at(&mut self, pos: u64, buf: &[u8]) -> Result<usize> {
let fd = self.as_raw_fd();
err(unsafe {
pwrite(fd, buf.as_ptr() as *const c_void, buf.len() as size_t, pos as off_t)
})
}
fn flush(&mut self) -> Result<()> {
Write::flush(self)
}
}
| true
|
cec586085e2c4e33ecece7f95c12e7d27ab2bc37
|
Rust
|
CraftSpider/nanowrimo-rs
|
/src/client.rs
|
UTF-8
| 13,218
| 2.953125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::data::*;
use super::error::Error;
use super::kind::NanoKind;
use std::collections::HashMap;
use std::cell::RefCell;
use reqwest::{Client, Method, StatusCode};
use serde::Serialize;
use serde::de::DeserializeOwned;
#[cfg(test)]
mod tests;
fn add_included(data: &mut Vec<(String, String)>, include: &[NanoKind]) {
if !include.is_empty() {
data.push(
("include".to_string(), include.iter().map(|kind| kind.api_name()).collect::<Vec<&str>>().join(","))
)
}
}
/// A client with which to connect to the Nano site. Can be used with or without login.
#[derive(Debug)]
pub struct NanoClient {
client: Client,
username: String,
password: String,
token: RefCell<Option<String>>,
}
impl NanoClient {
const BASE_URL: &'static str = "https://api.nanowrimo.org/";
fn new(user: &str, pass: &str) -> NanoClient {
NanoClient {
client: Client::new(),
username: user.to_string(),
password: pass.to_string(),
token: RefCell::new(None),
}
}
/// Create a new client with the 'anonymous' or 'guest' user, not logged in
pub fn new_anon() -> NanoClient {
NanoClient::new("", "")
}
/// Create a new client that is automatically logged in as a specific user
pub async fn new_user(user: &str, pass: &str) -> Result<NanoClient, Error> {
let client = NanoClient::new(user, pass);
client.login().await?;
Ok(client)
}
async fn make_request<T, U>(&self, path: &str, method: Method, data: &T) -> Result<U, Error>
where
T: Serialize + ?Sized,
U: DeserializeOwned + std::fmt::Debug
{
let mut query = None;
let mut json = None;
match method {
Method::GET => query = Some(data),
_ => json = Some(data)
}
let mut req = self.client.request(method, &format!("{}{}", NanoClient::BASE_URL, path));
if let Some(token) = &*self.token.borrow() {
req = req.header("Authorization", token)
}
if let Some(query) = query {
req = req.query(query);
}
if let Some(json) = json {
req = req.json(json)
}
let resp = req.send()
.await?;
let status = resp.status();
match status {
StatusCode::INTERNAL_SERVER_ERROR => return Err(
Error::SimpleNanoError(status, "Internal Server Error".to_string())
),
StatusCode::NOT_FOUND => return Err(
Error::SimpleNanoError(status, "Page Not Found".to_string())
),
_ => ()
}
let nano_resp = resp
.json()
.await?;
match nano_resp {
NanoResponse::Success(val) => Ok(val),
NanoResponse::Error(err) => {
match err {
NanoError::SimpleError { error } => Err(Error::SimpleNanoError(status, error)),
NanoError::ErrorList { errors } => Err(Error::NanoErrors(errors))
}
},
NanoResponse::Unknown(val) => panic!("Couldn't parse valid JSON as NanoResponse:\n{}", val)
}
}
async fn retry_request<T, U>(&self, path: &str, method: Method, data: &T) -> Result<U, Error>
where
T: Serialize + ?Sized,
U: DeserializeOwned + std::fmt::Debug
{
let res = self.make_request(path, method.clone(), data).await;
match res {
Err(Error::SimpleNanoError(code, _)) if code == StatusCode::UNAUTHORIZED && self.is_logged_in() => {
self.login().await?;
self.make_request(path, method, data).await
},
_ => res
}
}
/// Check whether this client is currently logged in
pub fn is_logged_in(&self) -> bool {
self.token.borrow().is_none()
}
/// Log in this client, without logging out
pub async fn login(&self) -> Result<(), Error> {
let mut map = HashMap::new();
map.insert("identifier", &self.username);
map.insert("password", &self.password);
let res = self.make_request::<_, LoginResponse>("users/sign_in", Method::POST, &map)
.await?;
self.token.replace(Some(res.auth_token));
Ok(())
}
/// Log out this client, without checking if it's logged in
pub async fn logout(&self) -> Result<(), Error> {
self.make_request::<_, ()>("users/logout", Method::POST, &()).await?;
self.token.replace(None);
Ok(())
}
/// Change the current user of the client. Logs out if necessary, and either logs in if provided
/// with username/password, or stays logged out and shifts to the 'guest' user
pub async fn change_user(&mut self, user: Option<&str>, pass: Option<&str>) -> Result<(), Error> {
if self.is_logged_in() {
self.logout().await?;
}
if user.is_some() && pass.is_some() {
self.username = user.unwrap().to_string();
self.password = pass.unwrap().to_string();
self.login().await?;
} else if user.is_none() && pass.is_none() {
self.username = "".to_string();
self.password = "".to_string();
self.token.replace(None);
} else {
panic!("Either both user and pass must be provided, or neither")
}
Ok(())
}
// Commands
/// Get information about the Nano fundometer
pub async fn fundometer(&self) -> Result<Fundometer, Error> {
self.retry_request("fundometer", Method::GET, &()).await
}
/// Search for users by username
pub async fn search(&self, name: &str) -> Result<CollectionResponse<UserObject>, Error> {
self.retry_request("search", Method::GET, &[("q", name)]).await
}
/// Get a random sponsor offer
pub async fn random_offer(&self) -> Result<ItemResponse<PostObject>, Error> {
self.retry_request("random_offer", Method::GET, &()).await
}
/// Get a list of all store items
pub async fn store_items(&self) -> Result<Vec<StoreItem>, Error> {
self.retry_request("store_items", Method::GET, &()).await
}
/// Get a list of all current sponsor offers
pub async fn offers(&self) -> Result<Vec<ItemResponse<PostObject>>, Error> {
self.retry_request("offers", Method::GET, &()).await
}
/// Get the currently logged in user, with included linked items
pub async fn current_user_include(&self, include: &[NanoKind]) -> Result<ItemResponse<UserObject>, Error> {
let mut data = Vec::new();
add_included(&mut data, include);
self.retry_request("users/current", Method::GET, &data).await
}
/// Get the currently logged in user
pub async fn current_user(&self) -> Result<ItemResponse<UserObject>, Error> {
self.current_user_include(&[]).await
}
/// Get info about a specific set of pages. Known valid values include:
///
/// - `"what-is-camp-nanowrimo"`
/// - `"nano-prep-101"`
/// - `"pep-talks"`
/// - `"dei"`
/// - `"come-write-in"`
/// - `"about-nano"`
/// - `"staff"`
/// - `"board-of-directors"`
/// - `"writers-board"`
/// - `"terms-and-conditions"`
/// - `"writers-board"`
/// - `"brought-to-you-by"`
///
/// If you know of other valid values, please open an issue with the values to add to this list!
pub async fn pages(&self, page: &str) -> Result<ItemResponse<PageObject>, Error> {
self.retry_request(&format!("pages/{}", page), Method::GET, &()).await
}
/// Get the list of notifications for the current user
pub async fn notifications(&self) -> Result<CollectionResponse<NotificationObject>, Error> {
self.retry_request("notifications", Method::GET, &()).await
}
/// Get a set of all the challenges this user has access to (Possibly all they can make
/// projects in)
pub async fn available_challenges(&self) -> Result<CollectionResponse<ChallengeObject>, Error> {
self.retry_request("challenges/available", Method::GET, &()).await
}
/// Get the daily aggregates for a given ProjectChallenge
/// ProjectChallenge is the common link between a project and a challenge it was part of,
/// thus providing info for counts on given days
pub async fn daily_aggregates(&self, id: u64) -> Result<CollectionResponse<DailyAggregateObject>, Error> {
self.retry_request(&format!("project-challenges/{}/daily-aggregates", id), Method::GET, &()).await
}
// Type queries
/// Get all accessible items of a specific kind, with included linked items and filtering to
/// certain related IDs.
///
/// 'includes' will add more items in the response as part of an 'includes' list,
/// so one request can get more items
///
/// 'filter' will filter certain types of objects by IDs of other objects related to them.
///
/// **Warning**: Many filter combinations are invalid, and the rules are not currently fully
/// understood.
pub async fn get_all_include_filtered(&self, ty: NanoKind, include: &[NanoKind], filter: &[(&str, u64)]) -> Result<CollectionResponse, Error> {
let mut data = Vec::new();
for i in filter {
data.push(
(format!("filter[{}]", i.0), i.1.to_string())
)
}
add_included(&mut data, include);
self.retry_request(ty.api_name(), Method::GET, &data).await
}
/// Get all accessible items of a specific kind, with filtering to certain related IDs
/// (See [`Self::get_all_include_filtered`])
pub async fn get_all_filtered(&self, ty: NanoKind, filter: &[(&str, u64)]) -> Result<CollectionResponse, Error> {
self.get_all_include_filtered(ty, &[], filter).await
}
/// Get all accessible items of a specific kind, with included linked items
/// (See [`Self::get_all_include_filtered`])
pub async fn get_all_include(&self, ty: NanoKind, include: &[NanoKind]) -> Result<CollectionResponse, Error> {
self.get_all_include_filtered(ty, include, &[]).await
}
/// Get all accessible items of a specific kind, neither filtering nor including linked items
/// (See [`Self::get_all_include_filtered`])
pub async fn get_all(&self, ty: NanoKind) -> Result<CollectionResponse, Error> {
self.get_all_include_filtered(ty, &[], &[]).await
}
/// Get an item of a specific type and ID, with included linked items
pub async fn get_id_include(&self, ty: NanoKind, id: u64, include: &[NanoKind]) -> Result<ItemResponse, Error> {
let mut data = Vec::new();
add_included(&mut data, include);
self.retry_request(&format!("{}/{}", ty.api_name(), id), Method::GET, &data).await
}
/// Get an item of a specific type and ID, with no included items.
/// (See [`Self::get_id_include`])
pub async fn get_id(&self, ty: NanoKind, id: u64) -> Result<ItemResponse, Error> {
self.get_id_include(ty, id, &[]).await
}
/// Get an item of a specific type and slug, with included items.
/// A slug is a unique text identifier for an object, not all types have one.
pub async fn get_slug_include(&self, ty: NanoKind, slug: &str, include: &[NanoKind]) -> Result<ItemResponse, Error> {
let mut data = Vec::new();
add_included(&mut data, include);
self.retry_request(&format!("{}/{}", ty.api_name(), slug), Method::GET, &data).await
}
/// Get an item of a specific type and slug, with no included items.
/// A slug is a unique text identifier for an object, not all types have one.
pub async fn get_slug(&self, ty: NanoKind, slug: &str) -> Result<ItemResponse, Error> {
self.get_slug_include(ty, slug, &[]).await
}
/// Get all items from a given RelationLink, a tie from one object to object(s) of a specific
/// type that are related to it.
///
/// **Warning**: Not all RelationLinks can be retrieved, some will return a 404 due to the
/// way Nano handle them on its end, if you know ahead of time that you will need the relations,
/// it's better to use [`Self::get_id_include`] or [`Self::get_all_include`]
pub async fn get_all_related(&self, rel: &RelationLink) -> Result<CollectionResponse, Error> {
if !rel.related.ends_with("s") {
panic!("get_all_related can only get many-relation links")
}
self.retry_request(&rel.related, Method::GET, &()).await
}
/// Get a single item from a given RelationLink, a tie from one object to object(s) of a
/// specific type that are related to it. Single relations tend to not have the same pitfalls as
/// multiple relations, so this is less dangerous than [`Self::get_all_related`]
pub async fn get_unique_related(&self, rel: &RelationLink) -> Result<ItemResponse, Error> {
if rel.related.ends_with("s") {
panic!("get_unique_related can only get single-relation links")
}
self.retry_request(&rel.related, Method::GET, &()).await
}
}
| true
|
69f1d60370f32e66e43c9bc9965260d0f2c0108a
|
Rust
|
m4tx/offs
|
/liboffs/src/store/id_generator.rs
|
UTF-8
| 1,647
| 2.9375
| 3
|
[
"MIT"
] |
permissive
|
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use itertools::Itertools;
use rand::distributions::Standard;
use rand::Rng;
pub trait IdGenerator: Clone {
fn generate_id(&mut self) -> String;
fn reset_generator(&mut self) {}
}
#[derive(Clone)]
pub struct RandomHexIdGenerator;
impl RandomHexIdGenerator {
pub fn new() -> Self {
Self {}
}
}
impl IdGenerator for RandomHexIdGenerator {
fn generate_id(&mut self) -> String {
hex::encode(
&rand::thread_rng()
.sample_iter(&Standard)
.take(16)
.collect_vec(),
)
}
}
#[derive(Clone)]
pub struct LocalTempIdGenerator {
pub next_id: Arc<AtomicUsize>,
}
const LOCAL_PREFIX: &str = "temp-";
impl LocalTempIdGenerator {
pub fn new() -> Self {
Self {
next_id: Arc::new(AtomicUsize::new(0)),
}
}
pub fn get_nth_id(n: usize) -> String {
format!("{}{:020}", LOCAL_PREFIX, n)
}
pub fn get_n(id: &str) -> usize {
debug_assert!(Self::is_local_id(id));
id[LOCAL_PREFIX.len()..]
.parse()
.expect(&format!("Invalid temporary ID assigned: {}", id))
}
pub fn is_local_id(id: &str) -> bool {
id.starts_with(LOCAL_PREFIX)
}
}
impl IdGenerator for LocalTempIdGenerator {
fn generate_id(&mut self) -> String {
let result = Self::get_nth_id(self.next_id.load(Ordering::Relaxed));
self.next_id.fetch_add(1, Ordering::Relaxed);
result
}
fn reset_generator(&mut self) {
self.next_id.store(0, Ordering::Relaxed);
}
}
| true
|
a4a5e0f9f0b2f7d5b31686961f1202b73326ba58
|
Rust
|
juggernaut09/cosmwasm
|
/packages/vm/src/backends/singlepass.rs
|
UTF-8
| 4,123
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#![cfg(any(feature = "singlepass", feature = "default-singlepass"))]
use wasmer_middleware_common::metering;
use wasmer_runtime_core::{
backend::Compiler,
codegen::{MiddlewareChain, StreamingCompiler},
compile_with,
module::Module,
vm::Ctx,
};
use wasmer_singlepass_backend::ModuleCodeGenerator as SinglePassMCG;
use crate::errors::VmResult;
use crate::middleware::DeterministicMiddleware;
/// In Wasmer, the gas limit is set on modules during compilation and is included in the cached modules.
/// This causes issues when trying to instantiate the same compiled module with a different gas limit.
/// A fix for this is proposed here: https://github.com/wasmerio/wasmer/pull/996.
///
/// To work around this limitation, we set the gas limit of all Wasmer instances to this very high value,
/// assuming users won't request more than this amount of gas. In order to set the real gas limit, we pretend
/// to consume the difference between the two in `set_gas_left` ("points used" in the metering middleware).
/// Since we observed overflow behaviour in the points used, we ensure both MAX_GAS_LIMIT and points used stay
/// far below u64::MAX.
const MAX_GAS_LIMIT: u64 = u64::MAX / 2;
pub const BACKEND_NAME: &str = "singlepass";
pub fn compile(code: &[u8]) -> VmResult<Module> {
let module = compile_with(code, compiler().as_ref())?;
Ok(module)
}
pub fn compiler() -> Box<dyn Compiler> {
let c: StreamingCompiler<SinglePassMCG, _, _, _, _> = StreamingCompiler::new(move || {
let mut chain = MiddlewareChain::new();
chain.push(DeterministicMiddleware::new());
chain.push(metering::Metering::new(MAX_GAS_LIMIT));
chain
});
Box::new(c)
}
/// Set the amount of gas units that can be used in the context.
pub fn set_gas_left(ctx: &mut Ctx, amount: u64) {
if amount > MAX_GAS_LIMIT {
panic!(
"Attempted to set gas limit larger than max gas limit (got: {}; maximum: {}).",
amount, MAX_GAS_LIMIT
);
} else {
let used = MAX_GAS_LIMIT - amount;
metering::set_points_used_ctx(ctx, used);
}
}
/// Get how many more gas units can be used in the context.
pub fn get_gas_left(ctx: &Ctx) -> u64 {
let used = metering::get_points_used_ctx(ctx);
// when running out of gas, get_points_used can exceed MAX_GAS_LIMIT
MAX_GAS_LIMIT.saturating_sub(used)
}
#[cfg(test)]
mod test {
use super::*;
use wasmer_runtime_core::{imports, Instance as WasmerInstance};
fn instantiate(code: &[u8]) -> WasmerInstance {
let module = compile(code).unwrap();
let import_obj = imports! { "env" => {}, };
module.instantiate(&import_obj).unwrap()
}
#[test]
fn get_gas_left_defaults_to_constant() {
let wasm = wat::parse_str("(module)").unwrap();
let instance = instantiate(&wasm);
let gas_left = get_gas_left(instance.context());
assert_eq!(gas_left, MAX_GAS_LIMIT);
}
#[test]
fn set_gas_left_works() {
let wasm = wat::parse_str("(module)").unwrap();
let mut instance = instantiate(&wasm);
let limit = 3456789;
set_gas_left(instance.context_mut(), limit);
assert_eq!(get_gas_left(instance.context()), limit);
let limit = 1;
set_gas_left(instance.context_mut(), limit);
assert_eq!(get_gas_left(instance.context()), limit);
let limit = 0;
set_gas_left(instance.context_mut(), limit);
assert_eq!(get_gas_left(instance.context()), limit);
let limit = MAX_GAS_LIMIT;
set_gas_left(instance.context_mut(), limit);
assert_eq!(get_gas_left(instance.context()), limit);
}
#[test]
#[should_panic(
expected = "Attempted to set gas limit larger than max gas limit (got: 9223372036854775808; maximum: 9223372036854775807)."
)]
fn set_gas_left_panic_for_values_too_large() {
let wasm = wat::parse_str("(module)").unwrap();
let mut instance = instantiate(&wasm);
let limit = MAX_GAS_LIMIT + 1;
set_gas_left(instance.context_mut(), limit);
}
}
| true
|
87fe3506863b8bf98b656763cd181ae6df984919
|
Rust
|
wycats/crates.io
|
/src/model.rs
|
UTF-8
| 589
| 2.53125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use pg;
use db::Connection;
use util::{CargoResult, Require};
use util::errors::NotFound;
pub trait Model {
fn from_row(row: &pg::Row) -> Self;
fn table_name(_: Option<Self>) -> &'static str;
fn find(conn: &Connection, id: i32) -> CargoResult<Self> {
let sql = format!("SELECT * FROM {} WHERE id = $1",
Model::table_name(None::<Self>));
let stmt = try!(conn.prepare(sql.as_slice()));
let mut rows = try!(stmt.query(&[&id]));
let row = try!(rows.next().require(|| NotFound));
Ok(Model::from_row(&row))
}
}
| true
|
e150139fba6c2c3cdf218bb7efd470fdaf1ee073
|
Rust
|
vraisamis/AtCoder
|
/ABC121/c/src/main.rs
|
UTF-8
| 867
| 2.953125
| 3
|
[] |
no_license
|
#[allow(dead_code)]
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
#[allow(dead_code)]
fn readv<T: std::str::FromStr>() -> Vec<T> {
read::<String>().split_whitespace()
.map(|e| e.parse().ok().unwrap()).collect()
}
#[allow(dead_code)]
fn readvv<T: std::str::FromStr>(n: usize) -> Vec<Vec<T>> {
(0..n).map(|_| readv()).collect()
}
fn main() {
let vs: Vec<usize> = readv();
let (n, m) = (vs[0], vs[1]);
let mut mat: Vec<Vec<u64>> = readvv(n);
mat.sort_by_key(|ab| ab[0]);
let mut monay = 0;
let mut k = 0;
for ab in mat {
if k + ab[1] > m as u64 {
monay += ab[0] * ((m as u64) - k);
break;
}
k += ab[1];
monay += ab[0] * ab[1]
}
println!("{}", monay);
}
| true
|
4fc582e835dec2d9ae617efb2bb38c27d3de079e
|
Rust
|
Sollimann/viewercloud
|
/src/annotation.rs
|
UTF-8
| 4,386
| 3.0625
| 3
|
[
"MIT"
] |
permissive
|
//! KITTI Annotation file parsing.
use anyhow::Result;
use std::ffi::OsStr;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::path::Path;
/// Struct to reprensent every KITTI annotation.
#[derive(Debug, Default, PartialEq)]
pub struct KittiAnnotation {
/// Describes the type of object:
/// 'Car', 'Van', 'Truck',
/// 'Pedestrian', 'Person_sitting',
/// 'Cyclist', 'Tram',
/// 'Misc' or 'DontCare'.
pub category: String,
/// Float from 0 (non-truncated)
/// to 1 (truncated), where truncated refers
/// to the object leaving image boundaries
pub truncation: f32,
/// Integer (0,1,2,3) indicating occlusion state:
/// 0 = fully visible,
/// 1 = partly occluded,
/// 2 = largely occluded,
/// 3 = unknown
pub occlusion: i16,
/// Observation angle of object, ranging [-pi..pi]
pub alpha: f32,
/// 2D bounding box xmin of object in the image
pub xmin: f32,
/// 2D bounding box ymin of object in the image
pub ymin: f32,
/// 2D bounding box xmax of object in the image
pub xmax: f32,
/// 2D bounding box ymax of object in the image
pub ymax: f32,
/// 3D object dimensions height(in meters)
pub h: f32,
/// 3D object dimensions width (in meters)
pub w: f32,
/// 3D object dimensions length (in meters)
pub l: f32,
/// 3D object location x in camera coordinates (in meters)
pub x: f32,
/// 3D object location y in camera coordinates (in meters)
pub y: f32,
/// 3D object location z in camera coordinates (in meters)
pub z: f32,
/// Rotation ry around Y-axis in camera coordinates [-pi..pi]
pub ry: f32,
/// Score indicating confidence in detection [0..1]
/// If coming from gound truth score is 1.0
pub score: f32,
}
impl KittiAnnotation {
/// Create a KittiAnnotation
#[allow(clippy::too_many_arguments)]
pub fn new(
category: String,
truncation: f32,
occlusion: i16,
alpha: f32,
xmin: f32,
ymin: f32,
xmax: f32,
ymax: f32,
height: f32,
width: f32,
length: f32,
xc: f32,
yc: f32,
zc: f32,
ry: f32,
score: f32,
) -> Self {
Self {
category,
truncation,
occlusion,
alpha,
xmin,
ymin,
xmax,
ymax,
h: height,
w: width,
l: length,
x: xc,
y: yc,
z: zc,
ry,
score,
}
}
/// Return the 2D BoundingBox in image coordinates system
pub fn get_2d_bounding_box(self) -> [f32; 4] {
[self.xmin, self.ymin, self.xmax, self.ymax]
}
/// Return the 3D BoundingBox in the Lidar coordinates system
pub fn get_3d_bounding_box(self) -> [f32; 7] {
[self.x, self.y, self.z, self.h, self.w, self.l, self.ry]
}
}
/// Parse a KITTI annotation file describe in the DevKit
pub fn read_annotation_file(kitti_annotations_path: String) -> Result<Vec<KittiAnnotation>> {
let extension = Path::new(&kitti_annotations_path).extension();
if extension != Some(OsStr::new("txt")) {
panic!(
"KITTI annotation file are in txt format and it received an got {:?}.",
extension
);
}
let file = File::open(kitti_annotations_path).expect("This file does not exist");
let file = BufReader::new(file);
let mut annotation: Vec<KittiAnnotation> = vec![];
for line in file.lines() {
let line = line?;
let data: Vec<&str> = line.split_whitespace().collect();
if data[0] != "DontCare" {
let anno = KittiAnnotation::new(
data[0].to_string(),
data[1].parse()?,
data[2].parse()?,
data[3].parse()?,
data[4].parse()?,
data[5].parse()?,
data[6].parse()?,
data[7].parse()?,
data[8].parse()?,
data[9].parse()?,
data[10].parse()?,
data[11].parse()?,
data[12].parse()?,
data[13].parse()?,
data[14].parse()?,
1.0,
);
annotation.push(anno);
}
}
Ok(annotation)
}
| true
|
c4ae43d498639e9aa4e163735db4b355fd941281
|
Rust
|
rcdexta/rust-playground
|
/src/strings.rs
|
UTF-8
| 315
| 3.609375
| 4
|
[] |
no_license
|
fn main() {
let s1 = String::from("new String");
let s2 = "Another String".to_string();
println!("{}, {}", s1, s2);
let s3 = s1 + &s2;
let s = format!("{} && {}", s2, s3);
println!("{} + {} = {}", s2, s3, s);
println!("By iterating as chars!:");
for c in s.chars() {
print!("{}", c);
}
}
| true
|
33b51ac6c4e0ddd613ea25e1567cb6a3584a66ef
|
Rust
|
wilfreddenton/parity_problem
|
/src/network/network_layer/neuron/mod.rs
|
UTF-8
| 1,488
| 2.890625
| 3
|
[] |
no_license
|
extern crate rand;
use std::f32::consts::E;
use self::rand::distributions::{IndependentSample, Range};
#[derive(Debug)]
pub struct Neuron {
pub weights: Vec<f32>,
}
impl Neuron {
fn activation_potential(&self, inputs: &Vec<f32>) -> f32 {
let mut inputs: Vec<f32> = inputs.to_owned();
let mut v = 0.0;
// add an input for the bias
inputs.insert(0, 1.0);
for (w, x) in self.weights.iter().zip(inputs.iter()) {
v = v + w * x;
}
v
}
fn activation_function(&self, activation_potential: f32) -> f32 {
1.0 / (1.0 + E.powf(-activation_potential))
}
pub fn new(num_inputs: usize) -> Neuron {
let between = Range::new(-1.0, 1.0);
let mut rng = rand::thread_rng();
let mut weights: Vec<f32> = Vec::new();
// add 1 to include the bias as w0
for _ in 0..(num_inputs + 1) {
weights.push(between.ind_sample(&mut rng));
}
Neuron {
weights: weights,
}
}
pub fn update_weights(&mut self, delta_weights: &Vec<f32>) {
let mut weights: Vec<f32> = Vec::new();
for (dw, w) in delta_weights.iter().zip(self.weights.iter()) {
weights.push(*w + *dw);
}
self.weights = weights;
}
pub fn signal(&self, inputs: &Vec<f32>) -> f32 {
let v = self.activation_potential(inputs);
let phi_prime: f32 = self.activation_function(v);
phi_prime
}
}
| true
|
440bf044e6b8e2a3157e7552d2479e57c9ccec5d
|
Rust
|
Alexhuszagh/bdb
|
/src/db/mass_spectra/fullms_mgf.rs
|
UTF-8
| 9,255
| 2.609375
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Utilities to load and save Pava FullMS MGF files.
use std::io::prelude::*;
use std::io::Lines;
use traits::*;
use util::*;
use super::mgf::MgfRecordIter;
use super::peak::Peak;
use super::re::*;
use super::record::Record;
// SIZE
/// Estimate the size of a Pava FullMS MGF record.
#[inline]
pub(crate) fn estimate_fullms_mgf_record_size(record: &Record) -> usize {
// Actual size is ~100 with a lot of extra size for the scan,
// and the peptide RT, average m/z and intensity.
const MGF_VOCABULARY_SIZE: usize = 175;
// Estimated average is ~20 characters per line, assume slightly above.
const MGF_PEAK_SIZE: usize = 25;
MGF_VOCABULARY_SIZE + MGF_PEAK_SIZE * record.peaks.len()
}
// WRITER
#[inline(always)]
fn to_mgf<'a, T: Write>(writer: &mut T, record: &'a Record)
-> Result<()>
{
record_to_fullms_mgf(writer, record)
}
#[inline(always)]
fn export_scan<T: Write>(writer: &mut T, record: &Record)
-> Result<()>
{
let num = to_bytes(&record.num)?;
write_alls!(writer, b"Scan#: ", num.as_slice(), b"\n")?;
Ok(())
}
#[inline(always)]
fn export_rt<T: Write>(writer: &mut T, record: &Record)
-> Result<()>
{
let rt = to_bytes(&record.rt)?;
write_alls!(writer, b"Ret.Time: ", rt.as_slice(), b"\n")?;
Ok(())
}
#[inline(always)]
fn export_basepeak<T: Write>(writer: &mut T, record: &Record)
-> Result<()>
{
// Export the basepeak m/z and intensity, which is the m/z
// and intensity for the **most intense** peak in the peaklist.
match record.base_peak() {
None => {
write_alls!(writer, b"BasePeakMass: 0.0\nBasePeakIntensity: 0.0\n")?;
},
Some(v) => {
let mz = to_bytes(&v.mz)?;
let intensity = to_bytes(&v.intensity)?;
write_alls!(
writer,
b"BasePeakMass: ", mz.as_slice(),
b"\nBasePeakIntensity: ", intensity.as_slice(),
b"\n"
)?;
}
}
Ok(())
}
#[inline(always)]
fn export_spectra<T: Write>(writer: &mut T, record: &Record)
-> Result<()>
{
for peak in record.peaks.iter() {
let mz = to_bytes(&peak.mz)?;
let intensity = to_bytes(&peak.intensity)?;
write_alls!(writer, mz.as_slice(), b"\t", intensity.as_slice(), b"\n")?;
}
Ok(())
}
/// Export record to Pava FullMS MGF.
pub(crate) fn record_to_fullms_mgf<T: Write>(writer: &mut T, record: &Record)
-> Result<()>
{
export_scan(writer, record)?;
export_rt(writer, record)?;
// Export null values,since we don't store this information.
writer.write_all(b"IonInjectionTime(ms): 0.0\nTotalIonCurrent: 0\n")?;
export_basepeak(writer, record)?;
export_spectra(writer, record)?;
writer.write_all(b"\n\n")?;
Ok(())
}
// WRITER -- DEFAULT
#[inline(always)]
fn init_cb<T: Write>(writer: &mut T, delimiter: u8)
-> Result<TextWriterState<T>>
{
Ok(TextWriterState::new(writer, delimiter))
}
#[inline(always)]
fn export_cb<'a, T: Write>(writer: &mut TextWriterState<T>, record: &'a Record)
-> Result<()>
{
writer.export(record, &to_mgf)
}
#[inline(always)]
fn dest_cb<T: Write>(_: &mut TextWriterState<T>)
-> Result<()>
{
Ok(())
}
/// Default exporter from a non-owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn reference_iterator_to_fullms_mgf<'a, Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = &'a Record>
{
reference_iterator_export(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
/// Default exporter from an owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn value_iterator_to_fullms_mgf<Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = Result<Record>>
{
value_iterator_export(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
// WRITER -- STRICT
/// Strict exporter from a non-owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn reference_iterator_to_fullms_mgf_strict<'a, Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = &'a Record>
{
reference_iterator_export_strict(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
/// Strict exporter from an owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn value_iterator_to_fullms_mgf_strict<Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = Result<Record>>
{
value_iterator_export_strict(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
// WRITER -- LENIENT
/// Lenient exporter from a non-owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn reference_iterator_to_fullms_mgf_lenient<'a, Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = &'a Record>
{
reference_iterator_export_lenient(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
/// Lenient exporter from an owning iterator to Pava FullMS MGF.
#[inline(always)]
pub(crate) fn value_iterator_to_fullms_mgf_lenient<Iter, T>(writer: &mut T, iter: Iter)
-> Result<()>
where T: Write,
Iter: Iterator<Item = Result<Record>>
{
value_iterator_export_lenient(writer, iter, b'\n', &init_cb, &export_cb, &dest_cb)
}
// READER
/// Parse the title header line.
#[inline(always)]
fn parse_scan_line<T: BufRead>(lines: &mut Lines<T>, record: &mut Record)
-> Result<()>
{
type Scan = FullMsMgfScanRegex;
// Verify and parse the scan line.
let line = none_to_error!(lines.next(), InvalidInput)?;
let captures = none_to_error!(Scan::extract().captures(&line), InvalidInput);
let num = capture_as_str(&captures, Scan::NUM_INDEX);
record.num = from_string(num)?;
Ok(())
}
/// Parse the RT header line.
#[inline(always)]
fn parse_rt_line<T: BufRead>(lines: &mut Lines<T>, record: &mut Record)
-> Result<()>
{
type Rt = FullMsMgfRtRegex;
// Verify and parse the RT line.
let line = none_to_error!(lines.next(), InvalidInput)?;
let captures = none_to_error!(Rt::extract().captures(&line), InvalidInput);
let rt = capture_as_str(&captures, Rt::RT_INDEX);
record.rt = from_string(rt)?;
Ok(())
}
/// Parse the ion injection time line.
#[inline(always)]
fn parse_ion_injection_time_line<T: BufRead>(lines: &mut Lines<T>, _: &mut Record)
-> Result<()>
{
// Verify the ion injection time line.
let line = none_to_error!(lines.next(), InvalidInput)?;
bool_to_error!(line.starts_with("IonInjectionTime(ms): "), InvalidInput);
Ok(())
}
/// Parse the total ion current line.
#[inline(always)]
fn parse_total_ion_current_line<T: BufRead>(lines: &mut Lines<T>, _: &mut Record)
-> Result<()>
{
// Verify the total ion current line.
let line = none_to_error!(lines.next(), InvalidInput)?;
bool_to_error!(line.starts_with("TotalIonCurrent: "), InvalidInput);
Ok(())
}
/// Parse the basepeak mass line.
#[inline(always)]
fn parse_basepeak_mass_line<T: BufRead>(lines: &mut Lines<T>, _: &mut Record)
-> Result<()>
{
// Verify the basepeak mass line.
let line = none_to_error!(lines.next(), InvalidInput)?;
bool_to_error!(line.starts_with("BasePeakMass: "), InvalidInput);
Ok(())
}
/// Parse the basepeak intensity line.
#[inline(always)]
fn parse_basepeak_intensity_line<T: BufRead>(lines: &mut Lines<T>, _: &mut Record)
-> Result<()>
{
// Verify the basepeak intensity line.
let line = none_to_error!(lines.next(), InvalidInput)?;
bool_to_error!(line.starts_with("BasePeakIntensity: "), InvalidInput);
Ok(())
}
/// Parse the charge header line.
#[inline(always)]
fn parse_spectra<T: BufRead>(lines: &mut Lines<T>, record: &mut Record)
-> Result<()>
{
for result in lines {
let line = result?;
if line.is_empty() {
break;
}
// Parse the line data
let mut items = line.split('\t');
let mz = none_to_error!(items.next(), InvalidInput);
let intensity = none_to_error!(items.next(), InvalidInput);
bool_to_error!(items.next().is_none(), InvalidInput);
record.peaks.push(Peak {
mz: from_string(mz)?,
intensity: from_string(intensity)?,
z: 0,
});
}
Ok(())
}
/// Import record from MGF.
pub(crate) fn record_from_fullms_mgf<T: BufRead>(reader: &mut T)
-> Result<Record>
{
let mut lines = reader.lines();
let mut record = Record::with_peak_capacity(50);
parse_scan_line(&mut lines, &mut record)?;
parse_rt_line(&mut lines, &mut record)?;
parse_ion_injection_time_line(&mut lines, &mut record)?;
parse_total_ion_current_line(&mut lines, &mut record)?;
parse_basepeak_mass_line(&mut lines, &mut record)?;
parse_basepeak_intensity_line(&mut lines, &mut record)?;
parse_spectra(&mut lines, &mut record)?;
record.peaks.shrink_to_fit();
Ok(record)
}
// READER -- DEFAULT
/// Create default record iterator from reader.
#[inline(always)]
pub(crate) fn iterator_from_fullms_mgf<T: BufRead>(reader: T)
-> MgfRecordIter<T>
{
MgfRecordIter::new(reader, b"Scan#:", MgfKind::FullMs)
}
| true
|
1dca907132a08f17ed3fdab6e7b12a306fdf7339
|
Rust
|
stanciua/exercism
|
/rust/rectangles/src/lib.rs
|
UTF-8
| 3,592
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
extern crate itertools;
use std::collections::HashSet;
use itertools::Itertools;
type Point = (i32, i32);
pub fn count(lines: &[&str]) -> i32 {
if lines.is_empty() {
return 0;
}
let rectangle_corners = lines.iter().enumerate().fold(HashSet::new(),
|acc, (outer_idx, val)| {
val.chars()
.enumerate()
.filter(|&(_, ch)| ch == '+')
.fold(acc, |mut acc, (inner_idx, _)| {
acc.insert((outer_idx as i32, inner_idx as i32));
acc
})
});
// get the the list of total points from the diagram, then grab all the
// right corners and feed it to a function that will calculate the left
// corner
let rectangles = (0i32..lines.len() as i32)
.cartesian_product(0i32..lines[0].len() as i32)
.filter(|&(x, y)| x != 0 && y != 0)
.map(|corner| get_list_of_rects_from_right_corner(corner))
.fold(HashSet::<(Point, Point)>::new(), |mut acc, val| {
acc.extend(val.iter());
acc
});
count_rectangles(&rectangle_corners, &rectangles, lines)
}
// gets the list of rectangles, (up_left, right_down) pairs starting from down right corner
fn get_list_of_rects_from_right_corner(point: Point) -> HashSet<(Point, Point)> {
let mut v = HashSet::new();
let mut left_corner = (point.0 - 1, point.1 - 1);
// populates up left and down right pair of corners
while left_corner.0 >= 0 {
while left_corner.1 >= 0 {
v.insert((left_corner, point));
left_corner = (left_corner.0, left_corner.1 - 1);
}
left_corner = (left_corner.0 - 1, point.1 - 1);
}
v
}
// this function validates if the rectangle has been formed using valid ASCII
// characters
// - horizontal: + | -
// - vertical: + | |
fn does_rectangle_contains_valid_chars(up_left: Point, right_down: Point, lines: &[&str]) -> bool {
lines.iter()
.skip(up_left.0 as usize)
.take(1)
.next()
.unwrap()[up_left.1 as usize..right_down.1 as usize + 1]
.chars()
.all(|y| y == '+' || y == '-') &&
lines.iter()
.skip(right_down.0 as usize)
.take(1)
.next()
.unwrap()[up_left.1 as usize..right_down.1 as usize + 1]
.chars()
.all(|y| y == '+' || y == '-') &&
lines.iter()
.skip(up_left.0 as usize)
.take(right_down.0 as usize - up_left.0 as usize + 1)
.map(|str| str.chars().skip(up_left.1 as usize).take(1).next().unwrap())
.all(|c| c == '+' || c == '|') &&
lines.iter()
.skip(up_left.0 as usize)
.take(right_down.0 as usize - up_left.0 as usize + 1)
.map(|str| str.chars().skip(right_down.1 as usize).take(1).next().unwrap())
.all(|c| c == '+' || c == '|')
}
fn count_rectangles(rectangle_points: &HashSet<Point>,
from: &HashSet<(Point, Point)>,
lines: &[&str])
-> i32 {
let mut count = 0;
for &(up_left_corner, right_down_corner) in from {
if rectangle_points.contains(&up_left_corner) &&
rectangle_points.contains(&right_down_corner) &&
rectangle_points.contains(&(right_down_corner.0, up_left_corner.1)) &&
rectangle_points.contains(&(up_left_corner.0, right_down_corner.1)) &&
does_rectangle_contains_valid_chars(up_left_corner, right_down_corner, lines) {
count += 1;
}
}
count
}
| true
|
f7c2702c632273269d2feea5c70861a1bb5ed29b
|
Rust
|
pipi32167/LeetCode
|
/rust/src/problem_01_06.rs
|
UTF-8
| 821
| 3.4375
| 3
|
[] |
no_license
|
#[derive(Debug)]
struct Solution {}
impl Solution {
pub fn compress_string(s: String) -> String {
if s.len() <= 1 {
return s;
}
let mut ret = String::new();
let mut chars = s.chars();
let mut before = chars.next().unwrap();
let mut cnt = 1;
for c in chars {
if before != c {
ret.push(before);
ret.push_str(&cnt.to_string());
before = c;
cnt = 1;
} else {
cnt += 1;
}
}
ret.push(before);
ret.push_str(&cnt.to_string());
if ret.len() < s.len() {
ret
} else {
s
}
}
}
#[test]
fn test_compress_string() {
assert_eq!(Solution::compress_string("aabcccccaaa".to_string()), "a2b1c5a3".to_string());
assert_eq!(Solution::compress_string("abbccd".to_string()), "abbccd".to_string());
}
| true
|
7f69ab9c2a50a1cfe7b090610772bd014efbfc5c
|
Rust
|
bpglaser/advent
|
/2017/src/day10_part01/src/main.rs
|
UTF-8
| 1,056
| 3.1875
| 3
|
[] |
no_license
|
use std::env::args;
use std::fs::File;
use std::io::Read;
const MAX: u32 = 256;
fn main() {
let path = args().nth(1).unwrap();
let lengths = load_input(&path);
let mut list: Vec<_> = (0..MAX).collect();
let mut current_position = 0;
let mut skip_size = 0;
for length in lengths {
let mut i = current_position;
let mut temp: Vec<_> = list.iter()
.cloned()
.cycle()
.skip(current_position)
.take(length)
.collect();
temp.reverse();
for n in temp {
list[i] = n;
i = (i + 1) % list.len();
}
current_position = (current_position + length + skip_size) % list.len();
skip_size += 1;
}
println!("Product of first two digits: {}", list[0] * list[1]);
}
fn load_input(path: &str) -> Vec<usize> {
let mut file = File::open(path).unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
buf.trim().split(',').map(|s| s.parse().unwrap()).collect()
}
| true
|
fdd32ef25b64a9fd522af8b32fbd8e75ed09c288
|
Rust
|
sym233/leetcode_problems
|
/684. Redundant Connection/684. Redundant Connection.rs
|
UTF-8
| 962
| 2.671875
| 3
|
[] |
no_license
|
impl Solution {
pub fn find_redundant_connection(edges: Vec<Vec<i32>>) -> Vec<i32> {
let n = edges.len();
let mut union = vec![0usize; n + 1];
for edge in edges {
let a = edge[0] as usize;
let b = edge[1] as usize;
if union[a] == 0usize {
union[a] = a;
}
if union[b] == 0 {
union[b] = Self::find_parent(&union, a);
} else {
let pa = Self::find_parent(&union, a);
let pb = Self::find_parent(&union, b);
if pa == pb {
return edge;
}
union[b] = pa;
union[pb] = pa;
}
}
return Vec::new();
}
fn find_parent(u: &Vec<usize>, p: usize) -> usize {
if u[p] == p || u[p] == 0 {
return u[p];
}
return Self::find_parent(u, u[p]);
}
}
| true
|
0374c5357af9140a56eb9ed27641fc3198af1a1e
|
Rust
|
xieren58/hs-probe-firmware
|
/firmware/src/swd.rs
|
UTF-8
| 5,631
| 2.53125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2019-2020 Adam Greig
// Dual licensed under the Apache 2.0 and MIT licenses.
use crate::bsp::{gpio::Pins, spi::SPI};
use num_enum::IntoPrimitive;
#[derive(Copy, Clone, Debug)]
pub enum Error {
BadParity,
AckWait,
AckFault,
AckProtocol,
AckUnknown(u8),
}
pub type Result<T> = core::result::Result<T, Error>;
#[repr(u8)]
#[derive(Copy, Clone, Debug, IntoPrimitive)]
pub enum DPRegister {
DPIDR = 0,
CTRLSTAT = 1,
SELECT = 2,
RDBUFF = 3,
}
pub struct SWD<'a> {
spi: &'a SPI,
pins: &'a Pins<'a>,
wait_retries: usize,
}
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
pub enum APnDP {
DP = 0,
AP = 1,
}
impl From<bool> for APnDP {
fn from(x: bool) -> APnDP {
if x {
APnDP::AP
} else {
APnDP::DP
}
}
}
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
enum RnW {
W = 0,
R = 1,
}
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
enum ACK {
OK = 0b001,
WAIT = 0b010,
FAULT = 0b100,
PROTOCOL = 0b111,
}
impl ACK {
pub fn try_ok(ack: u8) -> Result<()> {
match ack {
v if v == (ACK::OK as u8) => Ok(()),
v if v == (ACK::WAIT as u8) => Err(Error::AckWait),
v if v == (ACK::FAULT as u8) => Err(Error::AckFault),
v if v == (ACK::PROTOCOL as u8) => Err(Error::AckProtocol),
_ => Err(Error::AckUnknown(ack)),
}
}
}
impl<'a> SWD<'a> {
pub fn new(spi: &'a SPI, pins: &'a Pins) -> Self {
SWD {
spi,
pins,
wait_retries: 8,
}
}
pub fn set_clock(&self, max_frequency: u32) -> bool {
if let Some(prescaler) = self.spi.calculate_prescaler(max_frequency) {
self.spi.set_prescaler(prescaler);
true
} else {
false
}
}
pub fn spi_enable(&self) {
self.spi.setup_swd();
}
pub fn spi_disable(&self) {
self.spi.disable();
}
pub fn set_wait_retries(&mut self, wait_retries: usize) {
self.wait_retries = wait_retries;
}
pub fn idle_low(&self) {
self.spi.tx4(0x0);
}
pub fn read_dp(&self, a: u8) -> Result<u32> {
self.read(APnDP::DP, a)
}
pub fn write_dp(&self, a: u8, data: u32) -> Result<()> {
self.write(APnDP::DP, a, data)
}
pub fn read_ap(&self, a: u8) -> Result<u32> {
self.read(APnDP::AP, a)
}
pub fn read(&self, apndp: APnDP, a: u8) -> Result<u32> {
for _ in 0..self.wait_retries {
match self.read_inner(apndp, a) {
Err(Error::AckWait) => continue,
x => return x,
}
}
Err(Error::AckWait)
}
pub fn write(&self, apndp: APnDP, a: u8, data: u32) -> Result<()> {
for _ in 0..self.wait_retries {
match self.write_inner(apndp, a, data) {
Err(Error::AckWait) => continue,
x => return x,
}
}
Err(Error::AckWait)
}
fn read_inner(&self, apndp: APnDP, a: u8) -> Result<u32> {
let req = Self::make_request(apndp, RnW::R, a);
self.spi.tx8(req);
self.spi.wait_busy();
self.spi.drain();
self.pins.swd_rx();
// 1 clock for turnaround and 3 for ACK
let ack = self.spi.rx4() >> 1;
match ACK::try_ok(ack as u8) {
Ok(_) => (),
Err(e) => {
// On non-OK ACK, target has released the bus but
// is still expecting a turnaround clock before
// the next request, and we need to take over the bus.
self.pins.swd_tx();
self.idle_low();
return Err(e);
}
}
// Read 8x4=32 bits of data and 8x1=8 bits for parity+turnaround+trailing.
// Doing a batch of 5 8-bit reads is the quickest option as we keep the FIFO hot.
let (data, parity) = self.spi.swd_rdata_phase(self.pins);
let parity = (parity & 1) as u32;
// Back to driving SWDIO to ensure it doesn't float high
self.pins.swd_tx();
if parity == (data.count_ones() & 1) {
Ok(data)
} else {
Err(Error::BadParity)
}
}
fn write_inner(&self, apndp: APnDP, a: u8, data: u32) -> Result<()> {
let req = Self::make_request(apndp, RnW::W, a);
let parity = data.count_ones() & 1;
self.spi.tx8(req);
self.spi.wait_busy();
self.spi.drain();
self.pins.swd_rx();
// 1 clock for turnaround and 3 for ACK and 1 for turnaround
let ack = (self.spi.rx5() >> 1) & 0b111;
self.pins.swd_tx();
match ACK::try_ok(ack as u8) {
Ok(_) => (),
Err(e) => return Err(e),
}
// Write 8x4=32 bits of data and 8x1=8 bits for parity+trailing idle.
// This way we keep the FIFO full and eliminate delays between words,
// even at the cost of more trailing bits. We can't change DS to 4 bits
// until the FIFO is empty, and waiting for that costs more time overall.
// Additionally, many debug ports require a couple of clock cycles after
// the parity bit of a write transaction to make the write effective.
self.spi.swd_wdata_phase(data, parity as u8);
self.spi.wait_busy();
Ok(())
}
fn make_request(apndp: APnDP, rnw: RnW, a: u8) -> u8 {
let req = 1 | ((apndp as u8) << 1) | ((rnw as u8) << 2) | (a << 3) | (1 << 7);
let parity = (req.count_ones() & 1) as u8;
req | (parity << 5)
}
}
| true
|
62d31982a748af4f9bbdecda9744638fb7eb2121
|
Rust
|
nakat-t/nlp100
|
/q12/src/main.rs
|
UTF-8
| 1,348
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
use std::error::Error;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
fn main() {
let col1_path = Path::new("col1.txt");
let col2_path = Path::new("col2.txt");
let mut col1 = io::BufWriter::new(match File::create(&col1_path) {
Err(why) => panic!(
"couldn't create {}: {}",
col1_path.display(),
Error::description(&why)
),
Ok(file) => file,
});
let mut col2 = io::BufWriter::new(match File::create(&col2_path) {
Err(why) => panic!(
"couldn't create {}: {}",
col2_path.display(),
Error::description(&why)
),
Ok(file) => file,
});
loop {
let mut line = String::new();
match io::stdin().read_line(&mut line) {
Ok(0) => break,
Ok(_) => {
let cols: Vec<String> = line.split_whitespace().map(|c| c.to_string()).collect();
writeln!(col1, "{}", cols[0])
.expect(&format!("write failed to {}", col1_path.display()));
writeln!(col2, "{}", cols[1])
.expect(&format!("write failed to {}", col2_path.display()));
}
Err(err) => {
println!("error: {}", err);
break;
}
}
}
}
| true
|
f00804bf30194130a96908ae313051e918e5a04e
|
Rust
|
nolanderc/washem
|
/interpret/src/ast/types.rs
|
UTF-8
| 1,859
| 2.9375
| 3
|
[] |
no_license
|
pub const PAGE_SIZE: usize = 1 << 16;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ValueType {
I32,
I64,
F32,
F64,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResultType {
pub types: Option<ValueType>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct FunctionType {
pub parameters: Vec<ValueType>,
pub results: Vec<ValueType>,
}
#[derive(Debug, Copy, Clone)]
pub struct Limits {
pub lower: u32,
pub upper: Option<u32>,
}
#[derive(Debug, Copy, Clone)]
pub struct MemoryType {
pub limits: Limits,
}
#[derive(Debug, Copy, Clone)]
pub struct TableType {
pub element: ElementType,
pub limits: Limits,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ElementType {
FunctionReference,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct GlobalType {
pub ty: ValueType,
pub mutability: Mutability,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Mutability {
Constant,
Variable,
}
impl FunctionType {
pub fn matches(&self, other: &Self) -> bool {
self == other
}
}
impl TableType {
pub fn matches(&self, other: &Self) -> bool {
self.limits.matches(&other.limits) && self.element == other.element
}
}
impl MemoryType {
pub fn matches(&self, other: &Self) -> bool {
self.limits.matches(&other.limits)
}
}
impl GlobalType {
pub fn matches(self, other: Self) -> bool {
self == other
}
}
impl Limits {
pub fn matches(&self, other: &Self) -> bool {
self.lower >= other.lower
&& match (self.upper, other.upper) {
(_, None) => true,
(Some(m1), Some(m2)) if m1 <= m2 => true,
_ => false,
}
}
}
impl ResultType {
pub fn arity(&self) -> u32 {
if self.types.is_some() { 1 } else { 0 }
}
}
| true
|
5ab269ef0953e1953ec3037b3f273f80c515a3ed
|
Rust
|
AuroransSolis/project-euler
|
/problem-10/src/main.rs
|
UTF-8
| 516
| 3.15625
| 3
|
[] |
no_license
|
const MAX: usize = 2_000_000;
fn main() {
let mut primes = Vec::with_capacity(((MAX as f64) / (MAX as f64).ln()) as usize);
primes.push(2);
for n in (3..MAX).step_by(2) {
if !primes
.iter()
.take_while(|&prime| prime * prime <= n)
.any(|prime| n % prime == 0)
{
primes.push(n);
}
}
println!("number of primes: {}", primes.len());
let summation = primes.into_iter().sum::<usize>();
println!("p10: {}", summation);
}
| true
|
9c4b54bf5d152de6db8193844b0df918cfcddbd3
|
Rust
|
mmmpa/rust_simple_bbs
|
/src/url_separation.rs
|
UTF-8
| 4,869
| 3.203125
| 3
|
[] |
no_license
|
use std::collections::HashMap;
pub struct Matcher<T: Clone + Send + Sync> {
matching: Option<T>,
matching_params: Option<Vec<String>>,
children: Option<HashMap<String, Matcher<T>>>,
}
const PLACEHOLDER: &str = ":";
impl<T: Clone + Send + Sync> Matcher<T> {
pub fn new(matching: Option<T>) -> Self {
Matcher {
matching,
matching_params: None,
children: None,
}
}
pub fn add(&mut self, base: &str, matching: T) {
let (levels, keys) = separate(base);
self.add_h(&levels[..], keys, matching);
}
fn add_h(&mut self, levels: &[String], keys: Vec<String>, matching: T) {
if levels.len() == 0 {
self.matching = Some(matching);
self.matching_params = Some(keys);
return;
}
let children = match &mut self.children {
Some(chldren) => chldren,
None => {
self.children = Some(HashMap::new());
self.children.as_mut().unwrap()
}
};
let next = &levels[0];
let child = match children.get_mut(next) {
Some(child) => child,
None => {
children.insert(next.to_string(), Matcher::new(None));
children.get_mut(next).unwrap()
}
};
child.add_h(&levels[1..], keys, matching);
}
pub fn pick(&self, paths: &[&str]) -> Option<(T, HashMap<String, String>)> {
self.pick_r(paths, &mut Vec::with_capacity(2))
}
fn pick_r(&self, paths: &[&str], values: &mut Vec<String>) -> Option<(T, HashMap<String, String>)> {
if paths.len() == 0 {
if self.matching.is_none() {
return None;
}
let matching = self.matching.as_ref().unwrap().clone();
return match &self.matching_params {
None => Some((matching, HashMap::new())),
Some(keys) => Some((matching, build(keys, values))),
};
}
if self.children.is_none() {
return None;
}
let children = self.children.as_ref().unwrap().clone();
let next = match children.get(paths[0]) {
None => match children.get(PLACEHOLDER) {
None => return None,
Some(next) => {
values.push(paths[0].to_string());
next
}
},
Some(next) => next
};
next.pick_r(&paths[1..], values)
}
}
fn separate(base: &str) -> (Vec<String>, Vec<String>) {
let mut lebels = vec![];
let mut keys = vec![];
let mut in_key = false;
let mut head = 0;
let checker = match base.chars().last() {
Some(a) if a != '/' => {
let mut s = base.to_string();
s.push('/');
s
},
_ => base.to_string()
};
for (i, c) in checker.chars().enumerate() {
match c {
':' => {
if !in_key { in_key = true }
head += 1;
},
'/' => {
let key_name = base[head..i].to_string();
head = i + 1;
if in_key {
keys.push(key_name);
lebels.push(PLACEHOLDER.to_string());
in_key = false;
} else {
lebels.push(key_name);
}
}
_ => ()
}
}
(lebels, keys)
}
fn build(keys: &[String], values: &[String]) -> HashMap<String, String> {
let mut value_map = HashMap::new();
keys.iter().enumerate().for_each(|(i, k)| { value_map.insert(k.to_string(), values[i].clone()); });
value_map
}
#[cfg(test)]
mod tests {
use crate::url_separation::Matcher;
use std::collections::HashMap;
use std::sync::Arc;
#[test]
fn test_string() {
let mut m = Matcher::new(Some("".to_string()));
m.add("aaa/:abc/ddd", "matching DDD".to_string());
m.add("aaa/bbb", "matching BBB".to_string());
let (matching, map) = m.pick(&vec!["aaa", "bbb"]).unwrap();
assert_eq!(matching, "matching BBB".to_string());
assert_eq!(map, HashMap::new());
let (matching, map) = m.pick(&vec!["aaa", "DDD", "ddd"]).unwrap();
let mut expected = HashMap::new();
expected.insert("abc".to_string(), "DDD".to_string());
assert_eq!(matching, "matching DDD".to_string());
assert_eq!(map, expected);
assert!(m.pick(&vec!["b", "DDD", "ddd"]).is_none());
}
#[test]
fn test_callback() {
let mut m = Matcher::new(None);
m.add("aaa/bbb", Arc::new(|| "test".to_string()));
let (matching, _) = m.pick(&vec!["aaa", "bbb"]).unwrap();
assert_eq!(matching(), "test".to_string());
}
}
| true
|
74c99018e2583958a0974feb8bafffee710f2281
|
Rust
|
arnau/hextermbiest
|
/src/error.rs
|
UTF-8
| 538
| 2.640625
| 3
|
[
"MIT"
] |
permissive
|
use std::string::ParseError;
use std::num::ParseIntError;
use std::{result, str};
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
Parse,
Utf8Error,
NotImplemented,
}
impl From<ParseIntError> for Error {
fn from(_: ParseIntError) -> Error {
Error::Parse
}
}
impl From<ParseError> for Error {
fn from(_: ParseError) -> Error {
Error::Parse
}
}
impl From<str::Utf8Error> for Error {
fn from(_: str::Utf8Error) -> Error {
Error::Utf8Error
}
}
| true
|
873c09777a7d96d498a30a59490251120b33c646
|
Rust
|
curvelogic/eucalypt
|
/src/eval/stg/tags.rs
|
UTF-8
| 3,027
| 3.453125
| 3
|
[
"MIT"
] |
permissive
|
//! Predefined data tags and arities
use std::convert::TryFrom;
/// Datatype tag
pub type Tag = u8;
/// Predefined data type tags
#[derive(Copy, Clone)]
pub enum DataConstructor {
Unit = 0,
BoolTrue = 1,
BoolFalse = 2,
BoxedNumber = 3,
BoxedSymbol = 4,
BoxedString = 5,
/// Empty list
ListNil = 6,
/// Propert list cons cell
ListCons = 7,
/// Default blocks are constructed as, but LOOKUP is polymorphic
/// and works on alternative structures too.
///
/// BLOCK (LIST_CONS (BLOCK_PAIR x y) (LIST_CONS (BLOCK_PAIR x y) LIST_NIL))
Block = 8,
/// BLOCK_PAIR is a pair of *unboxed* symbol and value
BlockPair = 9,
/// BLOCK_KV_LIST marks a list of which the first two elements are
/// interpreted as KV
BlockKvList = 10,
/// Boxed zoned datetime
BoxedZdt = 11,
}
impl DataConstructor {
pub fn tag(self) -> Tag {
self as Tag
}
pub fn arity(self) -> usize {
match self {
DataConstructor::Unit => 0,
DataConstructor::BoolTrue => 0,
DataConstructor::BoolFalse => 0,
DataConstructor::BoxedNumber => 1,
DataConstructor::BoxedSymbol => 1,
DataConstructor::BoxedString => 1,
DataConstructor::ListNil => 0,
DataConstructor::ListCons => 2,
DataConstructor::Block => 1,
DataConstructor::BlockPair => 2,
DataConstructor::BlockKvList => 2,
DataConstructor::BoxedZdt => 1,
}
}
}
impl TryFrom<Tag> for DataConstructor {
type Error = ();
fn try_from(value: Tag) -> Result<Self, Self::Error> {
match value {
value if value == DataConstructor::Unit as Tag => Ok(DataConstructor::Unit),
value if value == DataConstructor::BoolTrue as Tag => Ok(DataConstructor::BoolTrue),
value if value == DataConstructor::BoolFalse as Tag => Ok(DataConstructor::BoolFalse),
value if value == DataConstructor::BoxedNumber as Tag => {
Ok(DataConstructor::BoxedNumber)
}
value if value == DataConstructor::BoxedSymbol as Tag => {
Ok(DataConstructor::BoxedSymbol)
}
value if value == DataConstructor::BoxedString as Tag => {
Ok(DataConstructor::BoxedString)
}
value if value == DataConstructor::ListNil as Tag => Ok(DataConstructor::ListNil),
value if value == DataConstructor::ListCons as Tag => Ok(DataConstructor::ListCons),
value if value == DataConstructor::Block as Tag => Ok(DataConstructor::Block),
value if value == DataConstructor::BlockPair as Tag => Ok(DataConstructor::BlockPair),
value if value == DataConstructor::BlockKvList as Tag => {
Ok(DataConstructor::BlockKvList)
}
value if value == DataConstructor::BoxedZdt as Tag => Ok(DataConstructor::BoxedZdt),
_ => Err(()),
}
}
}
| true
|
e99d7fec41a3325960d4bbbd2d965a37a10dfb38
|
Rust
|
andor44/valico
|
/src/json_schema/keywords/maxmin_length.rs
|
UTF-8
| 3,493
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
use rustc_serialize::json;
use super::super::schema;
use super::super::validators;
macro_rules! kw_minmax_integer{
($name:ident, $keyword:expr) => {
#[allow(missing_copy_implementations)]
pub struct $name;
impl super::Keyword for $name {
fn compile(&self, def: &json::Json, ctx: &schema::WalkContext) -> super::KeywordResult {
let length = keyword_key_exists!(def, $keyword);
if length.is_number() {
let length_val = length.as_f64().unwrap();
if length_val >= 0f64 && length_val.fract() == 0f64 {
Ok(Some(Box::new(validators::$name {
length: length_val as u64
})))
} else {
Err(schema::SchemaError::Malformed {
path: ctx.fragment.connect("/"),
detail: "The value MUST be a positive integer or zero".to_string()
})
}
} else {
Err(schema::SchemaError::Malformed {
path: ctx.fragment.connect("/"),
detail: "The value MUST be a positive integer or zero".to_string()
})
}
}
}
}
}
kw_minmax_integer!(MaxLength, "maxLength");
kw_minmax_integer!(MinLength, "minLength");
#[cfg(test)] use super::super::scope;
#[cfg(test)] use jsonway;
#[cfg(test)] use super::super::builder;
#[cfg(test)] use rustc_serialize::json::{ToJson};
#[test]
fn validate_max_length() {
let mut scope = scope::Scope::new();
let schema = scope.compile_and_return(builder::schema(|s| {
s.max_length(5u64);
}).into_json(), true).ok().unwrap();;
assert_eq!(schema.validate(&"1234".to_json()).is_valid(), true);
assert_eq!(schema.validate(&"12345".to_json()).is_valid(), true);
assert_eq!(schema.validate(&"123456".to_json()).is_valid(), false);
}
#[test]
fn malformed_max_length() {
let mut scope = scope::Scope::new();
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("maxLength", (-1).to_json());
}).unwrap(), true).is_err());
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("maxLength", "".to_json());
}).unwrap(), true).is_err());
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("maxLength", (1.1).to_json());
}).unwrap(), true).is_err());
}
#[test]
fn validate_min_length() {
let mut scope = scope::Scope::new();
let schema = scope.compile_and_return(builder::schema(|s| {
s.min_length(5u64);
}).into_json(), true).ok().unwrap();;
assert_eq!(schema.validate(&"1234".to_json()).is_valid(), false);
assert_eq!(schema.validate(&"12345".to_json()).is_valid(), true);
assert_eq!(schema.validate(&"123456".to_json()).is_valid(), true);
}
#[test]
fn malformed_min_length() {
let mut scope = scope::Scope::new();
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("minLength", (-1).to_json());
}).unwrap(), true).is_err());
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("minLength", "".to_json());
}).unwrap(), true).is_err());
assert!(scope.compile_and_return(jsonway::object(|schema| {
schema.set("minLength", (1.1).to_json());
}).unwrap(), true).is_err());
}
| true
|
90add3720838b2984a802639c28092c6cfa747a4
|
Rust
|
Giovan/lumen
|
/liblumen_alloc/src/erts/term/port.rs
|
UTF-8
| 2,592
| 2.8125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use core::cmp;
use core::fmt::{self, Debug, Display};
use core::hash::{Hash, Hasher};
use crate::borrow::CloneToProcess;
use crate::erts::exception::system::Alloc;
use crate::erts::{HeapAlloc, Node};
use super::{AsTerm, Term};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct Port(usize);
impl Port {
/// Given a the raw pid value (as a usize), reifies it into a `Port`
#[inline]
pub unsafe fn from_raw(port: usize) -> Self {
Self(port)
}
}
unsafe impl AsTerm for Port {
#[inline]
unsafe fn as_term(&self) -> Term {
Term::make_port(self.0)
}
}
impl Display for Port {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
}
impl PartialEq<ExternalPort> for Port {
#[inline]
fn eq(&self, _other: &ExternalPort) -> bool {
false
}
}
impl PartialOrd<ExternalPort> for Port {
#[inline]
fn partial_cmp(&self, other: &ExternalPort) -> Option<cmp::Ordering> {
self.partial_cmp(&other.port)
}
}
pub struct ExternalPort {
header: Term,
node: Node,
next: *mut u8,
port: Port,
}
unsafe impl AsTerm for ExternalPort {
#[inline]
unsafe fn as_term(&self) -> Term {
Term::make_boxed(self)
}
}
impl CloneToProcess for ExternalPort {
fn clone_to_heap<A: HeapAlloc>(&self, _heap: &mut A) -> Result<Term, Alloc> {
unimplemented!()
}
}
impl Debug for ExternalPort {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ExternalPort")
.field("header", &format_args!("{:#b}", &self.header.as_usize()))
.field("node", &self.node)
.field("next", &self.next)
.field("port", &self.port)
.finish()
}
}
impl Display for ExternalPort {
fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
}
impl Hash for ExternalPort {
fn hash<H: Hasher>(&self, state: &mut H) {
self.node.hash(state);
self.port.hash(state);
}
}
impl PartialEq<ExternalPort> for ExternalPort {
#[inline]
fn eq(&self, other: &ExternalPort) -> bool {
self.node == other.node && self.port == other.port
}
}
impl PartialOrd<ExternalPort> for ExternalPort {
#[inline]
fn partial_cmp(&self, other: &ExternalPort) -> Option<cmp::Ordering> {
use cmp::Ordering;
match self.node.partial_cmp(&other.node) {
Some(Ordering::Equal) => self.port.partial_cmp(&other.port),
result => result,
}
}
}
| true
|
ae41a20f27754b6656d744b55bbf3298f232a1f1
|
Rust
|
oldwomanjosiah/twitch-clip-downloader
|
/src/state.rs
|
UTF-8
| 1,634
| 3.109375
| 3
|
[] |
no_license
|
use log::{info, trace};
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::path::PathBuf;
#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)]
#[serde(tag = "type")]
pub enum Pagination {
Forwards(String),
Backwards(String),
}
#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)]
pub struct State {
pub auth_token: Option<String>,
pub auth_timeout: Option<time::OffsetDateTime>,
pub pagination: Option<Pagination>,
}
impl Default for State {
fn default() -> Self {
Self {
auth_token: None,
auth_timeout: None,
pagination: None,
}
}
}
pub fn load(path: Option<PathBuf>) -> Option<State> {
trace!("Trying to read state file");
if let Ok(file) = File::open(path.unwrap_or(PathBuf::from(crate::DEFAULT_STATE_LOCATION))) {
trace!("File opened");
let buf_reader = BufReader::new(file);
match serde_json::from_reader(buf_reader) {
Ok(read) => Some(read),
Err(_) => {
info!("Could not parse state file");
None
}
}
} else {
info!("Could not open state file, may not exist yet");
None
}
}
pub fn save(state: &State, path: Option<PathBuf>) {
trace!("Attempting to save state file");
let file = File::create(path.unwrap_or(PathBuf::from(crate::DEFAULT_STATE_LOCATION)))
.expect("Could not write to State file");
let buf_writer = BufWriter::new(file);
serde_json::to_writer_pretty(buf_writer, state).expect("Could not serialize state");
}
| true
|
4a7cbabc8455dd8d2e08d23de8a43452950bc313
|
Rust
|
lovesegfault/leetcode
|
/course-scheduler-ii/src/main.rs
|
UTF-8
| 3,000
| 3.53125
| 4
|
[] |
no_license
|
pub struct Solution;
impl Solution {
// prerequisites: Vec<(i32, 32)> 0 <- 1
// O(m + n)
pub fn find_order(num_courses: i32, prerequisites: Vec<Vec<i32>>) -> Vec<i32> {
let num_courses = num_courses as usize;
// for a given course number, the number of requirements it has
let mut num_requirements = vec![0; num_courses];
// for a given course number, all courses that depend on it
let mut children = vec![Vec::with_capacity(num_courses); num_courses];
// topologically ordered course numbers
let mut result = Vec::with_capacity(num_courses);
// for each subgraph r[0] -> r[1], contained in prerequisites, that represent one
// individual dependency relationship in the overall digraph.
prerequisites.iter().for_each(|req| {
// we record that dependency
children[req[1] as usize].push(req[0]);
// and record the total number of dependencies for that course / digraph node.
num_requirements[req[0] as usize] += 1;
});
// Our BFS Stack
let mut stack = vec![];
// we push all the sink nodes into our stack, as we want to start BFS from them.
for i in 0..num_courses {
if num_requirements[i] == 0 {
stack.push(i);
}
}
while let Some(course) = stack.pop() {
// if this is a sink node, push it to our topo-sorted node list.
result.push(course as i32);
// now look at all the nodes that point to this sink
for n in &children[course] {
let n = *n as usize;
// it has one less requirement, as the sink node in question is now being "pruned"
// from the digraph.
num_requirements[n] -= 1;
// if at this point it has no more requirements, it is now a sink node, and we want
// to consider it in our search.
if num_requirements[n] == 0 {
stack.push(n);
}
}
}
// at the end, we expect to have looked at all the nodes in the digraph, and constructed a
// total ordering of them. If this isn't the case, it must mean the graph is cyclic or
// clustered, and we return an empty result.
if result.len() == num_courses {
result
} else {
Vec::new()
}
}
}
/*
O(m + n)
L ← Empty list that will contain the sorted nodes
while exists nodes without a permanent mark do
select an unmarked node n
visit(n)
function visit(node n)
if n has a permanent mark then
return
if n has a temporary mark then
stop (not a DAG)
mark n with a temporary mark
for each node m with an edge from n to m do
visit(m)
remove temporary mark from n
mark n with a permanent mark
add n to head of L
*/
fn main() {
println!("Hello, world!");
}
| true
|
c742c9c57111ede76fa855173c3697f2d470c252
|
Rust
|
code0100fun/rs-vr
|
/lib/hid_rs/src/usb/device/info/iter.rs
|
UTF-8
| 2,742
| 2.703125
| 3
|
[] |
no_license
|
use winapi::um::setupapi::{
HDEVINFO,
};
use std::io;
use super::{HIDDeviceInfo};
use super::sys::{
get_device_info_set,
get_device_info,
};
pub struct HIDDeviceInfoIter {
pub index: u32,
pub device_info_set: Option<HDEVINFO>,
}
impl Iterator for HIDDeviceInfoIter {
type Item = io::Result<HIDDeviceInfo>;
fn next(&mut self) -> Option<io::Result<HIDDeviceInfo>> {
match self.device_info_set() {
Ok(device_info_set) => {
match get_next_hid_device_info(device_info_set, self.index) {
(false, _, _) => {
None // no more results
}
(_, index, Some(Ok(device_info))) => {
self.index = index + 1; // may have skipped over non-HID devices
Some(Ok(device_info))
},
(_, index, Some(Err(error))) => {
self.index = index + 1; // may have skipped over non-HID devices
Some(Err(error)) // an error, return it but keep going
},
_ => unreachable!(),
}
}
Err(_) => None, // can't iterate if we can't get this
}
}
}
impl HIDDeviceInfoIter {
fn device_info_set(&mut self) -> io::Result<HDEVINFO> {
match self.device_info_set {
None => {
match get_device_info_set() {
Ok(device_info_set) => {
self.device_info_set = Some(device_info_set);
// fetch succeess
Ok(device_info_set)
}
Err(err) => Err(err),
}
}
Some(device_info_set) => Ok(device_info_set) // cache success
}
}
}
fn get_next_hid_device_info(device_info_set: HDEVINFO, index: u32) -> (bool, u32, Option<io::Result<HIDDeviceInfo>>) {
// get device at index
// open and get info
// is it the correct vendor/product
// no then skip and try again
// yes then build result
let mut curr_index = index;
let result;
loop {
match get_device_info(device_info_set, curr_index) {
(true, None) => {
curr_index += 1; // not a HID device so go to the next one
}
(true, Some(ok_or_error)) => {
result = (true, curr_index, Some(ok_or_error)); // we found a HID device
break;
}
(false, None) => {
result = (false, curr_index, None); // no more devices
break;
}
_ => unreachable!(),
}
}
result
}
| true
|
2565dbf2e378a389c7bc17b319ccf8d8aa810b42
|
Rust
|
ascii-dresden/asciii
|
/examples/git_describe.rs
|
UTF-8
| 928
| 2.671875
| 3
|
[] |
no_license
|
use std::process::{Command, Output};
fn execute_git(command: &str, args: &[&str]) -> Output {
let workdir = ".";
let gitdir = "./.git";
Command::new("git")
.arg("--no-pager")
.args(["--work-tree", workdir])
.args(["--git-dir", gitdir])
.arg(command)
.args(args)
.output()
.unwrap_or_else(|_| panic!("git_failed"))
}
use std::fs::File;
use std::io::Write;
fn main() {
let git_log =
String::from_utf8(execute_git("log", &["--oneline", r##"--format=%h"##]).stdout).unwrap();
let count = git_log.lines().count().to_string();
let last_commit = git_log.lines().next().unwrap().to_string();
let description = format!("build {} ({})", count.trim(), last_commit.trim());
println!("description string= {description:?}");
let mut f = File::create("most_recent_commit.txt").unwrap();
f.write_all(description.as_bytes()).unwrap();
}
| true
|
d802f3dfa1a8a47f0abe2e62a76922e7eacca424
|
Rust
|
ElectricRCAircraftGuy/eRCaGuy_hello_world
|
/rust/hello_world.rs
|
UTF-8
| 1,230
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
///usr/bin/env rustc "$0" -o /tmp/a && /tmp/a "$@"; exit
// For the line just above, see my answer here: https://stackoverflow.com/a/75491834/4561887
/*
This file is part of eRCaGuy_hello_world: https://github.com/ElectricRCAircraftGuy/eRCaGuy_hello_world
(description)
STATUS: (status)
keywords to easily grep or ripgrep in this repo for this program and what it teaches
KEYWORDS:
To compile and run (assuming you've already `cd`ed into this dir):
```bash
./hello_world.rs
# OR
rustc hello_world.rs --out-dir bin && bin/hello_world
```
References:
1. This code was originally learned from the official "Rust By Example" online book here:
https://doc.rust-lang.org/stable/rust-by-example/hello.html
1.
*/
// line comment
/*
block
comment
*/
// This is the main function.
fn main()
{
// Statements here are executed when the compiled binary is called.
// Print text to the console via the `println!()` macro.
println!("Hello World!");
println!("I'm a Rustacean!");
}
/*
SAMPLE OUTPUT:
eRCaGuy_hello_world/rust$ rustc hello_world.rs --out-dir bin && bin/hello_world
Hello World!
I'm a Rustacean!
eRCaGuy_hello_world/rust$ ./hello_world.rs
Hello World!
I'm a Rustacean!
*/
| true
|
314304afab4bf36bf2319806a1f8ea9ca54eb0a1
|
Rust
|
x0rz3q/advent-of-code-2015
|
/day05/src/main.rs
|
UTF-8
| 1,526
| 3.296875
| 3
|
[] |
no_license
|
use colored::*;
use itertools::Itertools;
use regex::Regex;
use std::{
cmp,
collections::{HashMap, HashSet},
time::Instant,
};
#[derive(Hash, Clone, Debug, Copy)]
struct Coordinate {
x: i64,
y: i64,
}
impl Coordinate {
fn new(x: i64, y: i64) -> Coordinate {
Coordinate { x, y }
}
}
impl Eq for Coordinate {
}
impl PartialEq for Coordinate {
fn eq(&self, other: &Self) -> bool {
self.x == other.x && self.y == other.y
}
}
fn silver(input: &Vec<String>) -> usize {
let vowels = vec!['a', 'e', 'i', 'o', 'u'];
let pairs = vec!["ab", "cd", "pq", "xy"];
input
.iter()
.filter(|x| {
pairs.iter().all(|y| !x.contains(y))
&& x.chars().filter(|x| vowels.contains(x)).count() >= 3
&& x.chars().any(|y| x.contains(&format!("{}{}", y, y)))
})
.count()
}
fn gold(input: &Vec<String>) -> usize {
input
.iter()
.filter(|x| {
x.chars()
.cartesian_product(x.chars())
.any(|(a, b)| x.matches(&format!("{}{}{}", a, b, a)).count() > 0)
&&
x.chars()
.cartesian_product(x.chars())
.any(|(a, b)| x.matches(&format!("{}{}", a, b)).count() > 1)
})
.count()
}
fn main() {
let now = Instant::now();
let input: Vec<String> = include_str!("input")
.trim()
.split('\n')
.map(|x| x.to_string())
.collect();
assert_eq!(1, silver(&vec!["ugknbfddgicrmopn".to_string()]));
assert_eq!(0, silver(&vec!["jchzalrnumimnmhp".to_string()]));
println!("Silver: {}", silver(&input));
println!("Gold: {}", gold(&input));
println!("Time: {}ms", now.elapsed().as_millis());
}
| true
|
13624494177f44cda0b881c1db57cb54a1f4f17c
|
Rust
|
Smithay/smithay
|
/src/wayland/viewporter/mod.rs
|
UTF-8
| 16,014
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
//! Utilities for handling the `wp_viewporter` protocol
//!
//! ## How to use it
//!
//! ### Initialization
//!
//! To initialize this implementation, create [`ViewporterState`], store it in your `State` struct and
//! implement the required traits, as shown in this example:
//!
//! ```
//! use smithay::wayland::viewporter::ViewporterState;
//! use smithay::delegate_viewporter;
//!
//! # struct State;
//! # let mut display = wayland_server::Display::<State>::new().unwrap();
//!
//! // Create the viewporter state:
//! let viewporter_state = ViewporterState::new::<State>(
//! &display.handle(), // the display
//! );
//!
//! // implement Dispatch for the Viewporter types
//! delegate_viewporter!(State);
//!
//! // You're now ready to go!
//! ```
//!
//! ### Use the viewport state
//!
//! The [`viewport state`](ViewportCachedState) is double-buffered and
//! can be accessed by using the [`with_states`] function
//!
//! ```no_compile
//! let viewport = with_states(surface, |states| {
//! states.cached_state.current::<ViewportCachedState>();
//! });
//! ```
//!
//! Before accessing the state you should call [`ensure_viewport_valid`]
//! to ensure the viewport is valid.
//!
//! Note: If you already hand over buffer management to smithay by using
//! [`on_commit_buffer_handler`](crate::backend::renderer::utils::on_commit_buffer_handler)
//! the implementation will already call [`ensure_viewport_valid`] for you.
use std::cell::RefCell;
use tracing::trace;
use wayland_protocols::wp::viewporter::server::{wp_viewport, wp_viewporter};
use wayland_server::{
backend::GlobalId, protocol::wl_surface, Dispatch, DisplayHandle, GlobalDispatch, Resource, Weak,
};
use crate::utils::{Logical, Rectangle, Size};
use super::compositor::{self, with_states, Cacheable, SurfaceData};
/// State of the wp_viewporter Global
#[derive(Debug)]
pub struct ViewporterState {
global: GlobalId,
}
impl ViewporterState {
/// Create new [`wp_viewporter`](wayland_protocols::viewporter::server::wp_viewporter) global.
///
/// It returns the viewporter state, which you can drop to remove these global from
/// the event loop in the future.
pub fn new<D>(display: &DisplayHandle) -> ViewporterState
where
D: GlobalDispatch<wp_viewporter::WpViewporter, ()>
+ Dispatch<wp_viewporter::WpViewporter, ()>
+ Dispatch<wp_viewport::WpViewport, ViewportState>
+ 'static,
{
ViewporterState {
global: display.create_global::<D, wp_viewporter::WpViewporter, ()>(1, ()),
}
}
/// Returns the viewporter global.
pub fn global(&self) -> GlobalId {
self.global.clone()
}
}
impl<D> GlobalDispatch<wp_viewporter::WpViewporter, (), D> for ViewporterState
where
D: GlobalDispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewport::WpViewport, ViewportState>,
{
fn bind(
_state: &mut D,
_handle: &DisplayHandle,
_client: &wayland_server::Client,
resource: wayland_server::New<wp_viewporter::WpViewporter>,
_global_data: &(),
data_init: &mut wayland_server::DataInit<'_, D>,
) {
data_init.init(resource, ());
}
}
impl<D> Dispatch<wp_viewporter::WpViewporter, (), D> for ViewporterState
where
D: GlobalDispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewport::WpViewport, ViewportState>,
{
fn request(
_state: &mut D,
_client: &wayland_server::Client,
_resource: &wp_viewporter::WpViewporter,
request: <wp_viewporter::WpViewporter as wayland_server::Resource>::Request,
_data: &(),
_dhandle: &DisplayHandle,
data_init: &mut wayland_server::DataInit<'_, D>,
) {
match request {
wp_viewporter::Request::GetViewport { id, surface } => {
let already_has_viewport = with_states(&surface, |states| {
states
.data_map
.get::<RefCell<Option<ViewportMarker>>>()
.map(|v| v.borrow().is_some())
.unwrap_or(false)
});
if already_has_viewport {
surface.post_error(
wp_viewporter::Error::ViewportExists as u32,
"the surface already has a viewport object associated".to_string(),
);
return;
}
let viewport = data_init.init(
id,
ViewportState {
surface: surface.downgrade(),
},
);
let initial = with_states(&surface, |states| {
let inserted = states
.data_map
.insert_if_missing(|| RefCell::new(Some(ViewportMarker(viewport.downgrade()))));
// if we did not insert the marker it will be None as
// checked in already_has_viewport and we have to update
// it now
if !inserted {
*states
.data_map
.get::<RefCell<Option<ViewportMarker>>>()
.unwrap()
.borrow_mut() = Some(ViewportMarker(viewport.downgrade()));
}
inserted
});
// only add the pre-commit hook once for the surface
if initial {
compositor::add_pre_commit_hook::<D, _>(&surface, viewport_commit_hook);
}
}
wp_viewporter::Request::Destroy => {
// All is already handled by our destructor
}
_ => unreachable!(),
}
}
}
impl<D> Dispatch<wp_viewport::WpViewport, ViewportState, D> for ViewportState
where
D: GlobalDispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewporter::WpViewporter, ()>,
D: Dispatch<wp_viewport::WpViewport, ViewportState>,
{
fn request(
_state: &mut D,
_client: &wayland_server::Client,
resource: &wp_viewport::WpViewport,
request: <wp_viewport::WpViewport as wayland_server::Resource>::Request,
data: &ViewportState,
_dhandle: &DisplayHandle,
_data_init: &mut wayland_server::DataInit<'_, D>,
) {
match request {
wp_viewport::Request::Destroy => {
if let Ok(surface) = data.surface.upgrade() {
with_states(&surface, |states| {
states
.data_map
.get::<RefCell<Option<ViewportMarker>>>()
.unwrap()
.borrow_mut()
.take();
*states.cached_state.pending::<ViewportCachedState>() =
ViewportCachedState::default();
});
}
}
wp_viewport::Request::SetSource { x, y, width, height } => {
// If all of x, y, width and height are -1.0, the source rectangle is unset instead.
// Any other set of values where width or height are zero or negative,
// or x or y are negative, raise the bad_value protocol error.
let is_unset = x == -1.0 && y == -1.0 && width == -1.0 && height == -1.0;
let is_valid_src = x >= 0.0 && y >= 0.0 && width > 0.0 && height > 0.0;
if !is_unset && !is_valid_src {
resource.post_error(
wp_viewport::Error::BadValue as u32,
"negative or zero values in width or height or negative values in x or y".to_string(),
);
return;
}
// If the wl_surface associated with the wp_viewport is destroyed,
// all wp_viewport requests except 'destroy' raise the protocol error no_surface.
let Ok(surface) = data.surface.upgrade() else {
resource.post_error(
wp_viewport::Error::NoSurface as u32,
"the wl_surface was destroyed".to_string(),
);
return;
};
with_states(&surface, |states| {
let mut viewport_state = states.cached_state.pending::<ViewportCachedState>();
let src = if is_unset {
None
} else {
let src = Rectangle::from_loc_and_size((x, y), (width, height));
trace!(surface = ?surface, src = ?src, "setting surface viewport src");
Some(src)
};
viewport_state.src = src;
});
}
wp_viewport::Request::SetDestination { width, height } => {
// If width is -1 and height is -1, the destination size is unset instead.
// Any other pair of values for width and height that contains zero or
// negative values raises the bad_value protocol error.
let is_unset = width == -1 && height == -1;
let is_valid_size = width > 0 && height > 0;
if !is_unset && !is_valid_size {
resource.post_error(
wp_viewport::Error::BadValue as u32,
"negative or zero values in width or height".to_string(),
);
return;
}
// If the wl_surface associated with the wp_viewport is destroyed,
// all wp_viewport requests except 'destroy' raise the protocol error no_surface.
let Ok(surface) = data.surface.upgrade() else {
resource.post_error(
wp_viewport::Error::NoSurface as u32,
"the wl_surface was destroyed".to_string(),
);
return;
};
with_states(&surface, |states| {
let mut viewport_state = states.cached_state.pending::<ViewportCachedState>();
let size = if is_unset {
None
} else {
let dst = Size::from((width, height));
trace!(surface = ?surface, size = ?dst, "setting surface viewport destination size");
Some(dst)
};
viewport_state.dst = size;
});
}
_ => unreachable!(),
}
}
}
/// State of a single viewport attached to a surface
#[derive(Debug)]
pub struct ViewportState {
surface: Weak<wl_surface::WlSurface>,
}
struct ViewportMarker(Weak<wp_viewport::WpViewport>);
fn viewport_commit_hook<D: 'static>(_state: &mut D, _dh: &DisplayHandle, surface: &wl_surface::WlSurface) {
with_states(surface, |states| {
states
.data_map
.insert_if_missing(|| RefCell::new(Option::<ViewportMarker>::None));
let viewport = states
.data_map
.get::<RefCell<Option<ViewportMarker>>>()
.unwrap()
.borrow();
if let Some(viewport) = &*viewport {
let viewport_state = states.cached_state.pending::<ViewportCachedState>();
// If src_width or src_height are not integers and destination size is not set,
// the bad_size protocol error is raised when the surface state is applied.
let src_size = viewport_state.src.map(|src| src.size);
if viewport_state.dst.is_none()
&& src_size != src_size.map(|s| Size::from((s.w as i32, s.h as i32)).to_f64())
{
if let Ok(viewport) = viewport.0.upgrade() {
viewport.post_error(
wp_viewport::Error::BadSize as u32,
"destination size is not integer".to_string(),
);
}
}
}
});
}
/// Ensures that the viewport, if any, is valid accordingly to the protocol specification.
///
/// If the viewport violates any protocol checks a protocol error will be raised and `false`
/// is returned.
pub fn ensure_viewport_valid(states: &SurfaceData, buffer_size: Size<i32, Logical>) -> bool {
states
.data_map
.insert_if_missing(|| RefCell::new(Option::<ViewportMarker>::None));
let viewport = states
.data_map
.get::<RefCell<Option<ViewportMarker>>>()
.unwrap()
.borrow();
if let Some(viewport) = &*viewport {
let state = states.cached_state.pending::<ViewportCachedState>();
let buffer_rect = Rectangle::from_loc_and_size((0.0, 0.0), buffer_size.to_f64());
let src = state.src.unwrap_or(buffer_rect);
let valid = buffer_rect.contains_rect(src);
if !valid {
if let Ok(viewport) = viewport.0.upgrade() {
viewport.post_error(
wp_viewport::Error::OutOfBuffer as u32,
"source rectangle extends outside of the content area".to_string(),
);
}
}
valid
} else {
true
}
}
/// Represents the double-buffered viewport
/// state of a [`WlSurface`](wl_surface::WlSurface)
#[derive(Debug, Default, Clone, Copy)]
pub struct ViewportCachedState {
/// Defines the source [`Rectangle`] of the [`WlSurface`](wl_surface::WlSurface) in [`Logical`]
/// coordinates used for cropping.
pub src: Option<Rectangle<f64, Logical>>,
/// Defines the destination [`Size`] of the [`WlSurface`](wl_surface::WlSurface) in [`Logical`]
/// coordinates used for scaling.
pub dst: Option<Size<i32, Logical>>,
}
impl ViewportCachedState {
/// Gets the actual size the [`WlSurface`](wl_surface::WlSurface) should have on screen in
/// [`Logical`] coordinates.
///
/// This will return the destination size if set or the size of the source rectangle.
/// If both are unset `None` is returned.
pub fn size(&self) -> Option<Size<i32, Logical>> {
self.dst.or_else(|| {
self.src
.map(|src| Size::from((src.size.w as i32, src.size.h as i32)))
})
}
}
impl Cacheable for ViewportCachedState {
fn commit(&mut self, _dh: &DisplayHandle) -> Self {
ViewportCachedState {
src: self.src,
dst: self.dst,
}
}
fn merge_into(self, into: &mut Self, _dh: &DisplayHandle) {
into.src = self.src;
into.dst = self.dst;
}
}
#[allow(missing_docs)] // TODO
#[macro_export]
macro_rules! delegate_viewporter {
($(@<$( $lt:tt $( : $clt:tt $(+ $dlt:tt )* )? ),+>)? $ty: ty) => {
$crate::reexports::wayland_server::delegate_global_dispatch!($(@< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? $ty: [
$crate::reexports::wayland_protocols::wp::viewporter::server::wp_viewporter::WpViewporter: ()
] => $crate::wayland::viewporter::ViewporterState);
$crate::reexports::wayland_server::delegate_dispatch!($(@< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? $ty: [
$crate::reexports::wayland_protocols::wp::viewporter::server::wp_viewporter::WpViewporter: ()
] => $crate::wayland::viewporter::ViewporterState);
$crate::reexports::wayland_server::delegate_dispatch!($(@< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? $ty: [
$crate::reexports::wayland_protocols::wp::viewporter::server::wp_viewport::WpViewport: $crate::wayland::viewporter::ViewportState
] => $crate::wayland::viewporter::ViewportState);
};
}
| true
|
0a0b4455143db8f8975f5f50449fbc7091dcba47
|
Rust
|
jkarns275/cc-rs
|
/src/parse_tree/function.rs
|
UTF-8
| 1,369
| 3.109375
| 3
|
[] |
no_license
|
use crate::parse_tree::*;
pub enum DeclOrStmnt {
Decl(Declaration),
Stmnt(Statement),
}
impl DeclOrStmnt {
pub fn pretty_print(&self, buf: &mut String, si: &Interner<String>) {
match self {
DeclOrStmnt::Decl(d) => d.pretty_print(buf, si),
DeclOrStmnt::Stmnt(s) => s.pretty_print(buf, si),
}
}
}
pub struct Function {
pub specs: DeclSpec,
pub decl: Declarator,
pub type_specs: Box<[Declaration]>,
pub body: Box<[DeclOrStmnt]>,
}
impl Function {
pub fn new(specs: DeclSpec, decl: Declarator, type_specs: Box<[Declaration]>, body: Box<[DeclOrStmnt]>) -> Self {
Function { specs, decl, type_specs, body }
}
pub fn pretty_print(&self, buf: &mut String, si: &Interner<String>) {
self.specs.pretty_print(buf, si);
buf.push(' ');
self.decl.pretty_print(buf, si);
if self.type_specs.len() > 0 {
for decl in self.type_specs.iter() {
buf.push_str(" ");
decl.pretty_print(buf, si);
buf.push_str(";\n");
}
buf.pop();
buf.pop();
}
buf.push_str(" {\n");
for s in self.body.iter() {
buf.push_str(" ");
s.pretty_print(buf, si);
buf.push_str(";\n")
}
buf.push_str("}\n");
}
}
| true
|
d6fac70bb1c30a95abd40eee18d8799535f6e0ca
|
Rust
|
madmax28/aoc2018
|
/day20/src/util.rs
|
UTF-8
| 961
| 3.515625
| 4
|
[] |
no_license
|
use std::fmt;
use std::ops::{Add, AddAssign};
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct Point {
x: i32,
y: i32,
}
impl Point {
pub fn new(x: i32, y: i32) -> Self {
Point { x, y }
}
pub fn from_char(c: char) -> Self {
match c {
'N' => Point::new(0, -1),
'E' => Point::new(1, 0),
'S' => Point::new(0, 1),
'W' => Point::new(-1, 0),
_ => panic!(),
}
}
}
impl Add for Point {
type Output = Point;
fn add(self, other: Point) -> Self::Output {
Point::new(self.x + other.x, self.y + other.y)
}
}
impl AddAssign for Point {
fn add_assign(&mut self, other: Point) {
*self = Point {
x: self.x + other.x,
y: self.y + other.y,
};
}
}
impl fmt::Debug for Point {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {}) }}", self.x, self.y)
}
}
| true
|
bb18a6cfc2efb409c52d9c03720a6c8a6935e3f6
|
Rust
|
NovaDenizen/bitwise
|
/src/word/mask_trailing_ones.rs
|
UTF-8
| 706
| 3.359375
| 3
|
[
"MIT"
] |
permissive
|
use word::Word;
/// Returns mask with the trailing 1's of `self` set.
///
/// If `x` is zero, returns `0`.
///
/// # Intrinsics:
/// - TBM: t1mskc, not.
///
/// # Examples
///
/// ```
/// use bitwise::word::*;
///
/// assert_eq!(0b0101_1111u8.mask_trailing_ones(), 0b0001_1111u8);
/// assert_eq!(mask_trailing_ones(0), 0);
/// ```
#[inline]
pub fn mask_trailing_ones<T: Word>(x: T) -> T {
!x.t1mskc()
}
/// Method version of [`mask_trailing_ones`](fn.mask_trailing_ones.html).
pub trait MaskTrailingOnes {
#[inline]
fn mask_trailing_ones(self) -> Self;
}
impl<T: Word> MaskTrailingOnes for T {
#[inline]
fn mask_trailing_ones(self) -> Self {
mask_trailing_ones(self)
}
}
| true
|
e0bc11dad65ff2bb63c34a4bdebfaf30026a0c63
|
Rust
|
sorz/sd-journal-rs
|
/src/entry.rs
|
UTF-8
| 3,979
| 2.8125
| 3
|
[] |
no_license
|
use crate::{journal::Journal,
journal_sys::{
ENOENT,
sd_journal_get_data,
sd_journal_enumerate_data,
sd_journal_restart_data,
},
};
use std::{
borrow::Cow,
collections::HashMap,
ffi::{c_void, CString},
fmt,
};
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct FieldData<'e> {
pub name: Cow<'e, str>,
pub data: Cow<'e, [u8]>,
}
impl<'a> FieldData<'a> {
fn from_raw(data: &'a [u8]) -> Self {
let mut name_data = data.splitn(2, |c| *c == b'=');
let name = name_data.next().unwrap();
let data = name_data.next().expect("missing field name");
let name = std::str::from_utf8(name).expect("invalid utf-8 field name");
Self {
name: Cow::Borrowed(name),
data: Cow::Borrowed(data),
}
}
pub fn into_owned(self) -> FieldData<'static> {
FieldData {
name: Cow::Owned(self.name.into_owned()),
data: Cow::Owned(self.data.into_owned()),
}
}
}
impl fmt::Display for FieldData<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}={}", self.name, String::from_utf8_lossy(&self.data))
}
}
pub struct Entry<'j> {
pub(crate) journal: &'j Journal,
}
impl<'j> Entry<'j> {
pub fn field<S: AsRef<str>>(&mut self, name: S) -> Option<FieldData> {
let c_name = CString::new(name.as_ref()).unwrap();
let mut buf = 0 as *const u8;
let mut size = 0usize;
let ret = unsafe {
sd_journal_get_data(
self.journal.ret,
c_name.as_ptr(),
&mut buf as *mut _ as *mut *const c_void,
&mut size,
)
};
if ret >= 0 {
let buf = unsafe { std::slice::from_raw_parts(buf, size) };
let field = FieldData::from_raw(buf);
assert_eq!(name.as_ref(), field.name);
Some(field)
} else if ret == -(ENOENT as i32) {
None
} else {
panic!("error on get field data: {}", ret);
}
}
pub fn fields<'e>(&'e mut self) -> Fields<'e, 'j> {
Fields(self)
}
pub fn all_fields(&mut self) -> HashMap<String, Vec<u8>> {
let mut kvs = HashMap::new();
for field in self.fields() {
kvs.insert(field.name.into_owned(), field.data.into_owned());
}
kvs
}
}
pub struct Fields<'e, 'j: 'e>(&'e Entry<'j>);
impl Drop for Fields<'_, '_> {
fn drop(&mut self) {
unsafe { sd_journal_restart_data(self.0.journal.ret) }
}
}
impl<'e, 'j> Iterator for Fields<'e, 'j> {
type Item = FieldData<'e>;
fn next(&mut self) -> Option<Self::Item> {
let mut buf = 0 as *const u8;
let mut size = 0usize;
let ret = unsafe {
sd_journal_enumerate_data(
self.0.journal.ret,
&mut buf as *mut _ as *mut *const c_void,
&mut size,
)
};
match ret {
0 => None,
e if e < 0 => panic!("enumerate fail: {}", e),
_ => {
let buf = unsafe { std::slice::from_raw_parts(buf, size) };
Some(FieldData::from_raw(buf))
}
}
}
}
#[test]
fn test_nonexist_field() {
use crate::flags::OpenFlags;
use crate::journal::Seek;
let mut journal = Journal::open(OpenFlags::empty()).unwrap();
journal.seek(Seek::Head).unwrap();
assert!(journal.next().unwrap());
let mut entry = journal.entry();
assert!(entry.field("NON_EXIST__").is_none())
}
#[test]
fn test_enumerate_fields() {
use crate::flags::OpenFlags;
use crate::journal::Seek;
let mut journal = Journal::open(OpenFlags::empty()).unwrap();
journal.seek(Seek::Head).unwrap();
assert!(journal.next().unwrap());
let mut entry = journal.entry();
for field in entry.fields() {
println!("field {}", field);
}
}
| true
|
9a25af77c9f03b2570bddf43b7afc5ac60d9cdc3
|
Rust
|
tc-lang/mk
|
/mk-parser/src/combinators/all.rs
|
UTF-8
| 18,570
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
// Due to a bug in Clippy, warnings are produced from calling `impl_tup_any`
// below when there shouldn't be. We're disabling them here because it isn't
// currently possible to add attributes to macro expansions
#![allow(clippy::identity_op)]
// Most of the goings-on of this module are just macro expansions - there's no
// reason to expose that mess.
//
// That being said, there are a few items that are made public through
// `combinators`:
// * IntoAll
// * Chain
// * all
// * DynAll
// As such, their documentation comments reflect that they are being used above
#![doc(hidden)]
use crate::{source::Source, utils::LazyString, DynParser, ParseResult, Parser};
#[cfg(feature = "bnf-syntax")]
use std::ops::{Add, BitOr};
/// Constructs a parser that matches only when each given parser matches in
/// succession.
///
/// [`IntoAll`] is implemented for n-tuples of parsers\* so that this function
/// can be called in a similar fashion to the example below:
///
/// \* `IntoAll` is only implemented on n-tuples up to n=16.
///
/// # Examples
///
/// ```
/// use mk_parser::{DynParser, Parser, source::Source};
/// use mk_parser::basics::{StrLit, char_lit};
/// use mk_parser::combinators::all;
///
/// let s = "foo☺bar";
/// let mut src = Source::new(s.as_bytes());
///
/// // Note that this trivial example is intentionally convoluted in order to
/// // demonstrate the flexibility of types allowed here.
/// let psr = all((StrLit("foo"), char_lit('☺').map(|_| "☺"), StrLit("bar")));
///
/// assert_eq!(psr.parse(&mut src, false).unwrap().0, ("foo", "☺", "bar"));
/// ```
///
/// [`IntoAll`]: trait.IntoAll.html
pub fn all<A: IntoAll>(into_all: A) -> A::Output {
into_all.into()
}
/// A trait that is implemented on n-tuples of parsers, up to n=16
///
/// This is essentially a clone of [`IntoAny`].
///
/// Information about its usage can be found in the function [`all`]. The rest
/// of the writing here is meant for people who like to abuse the crates they
/// import.
///
/// It may be useful to know that `IntoAll` isn't implemented on singleton
/// tuples (of the form `(P,)`) - it is instead implement for the types itself,
/// thus it is implemented on every parser as well. Calling [`all`] with a
/// single parser (which might be necessary if you're designing macros
/// yourself) could be done like:
/// ```no_run
/// # use mk_parser::{combinators::all, basics::StrLit};
/// # let _ = {
/// all(StrLit("foo"))
/// # };
/// ```
/// These details are thankfully likely not relevant to "typical" users of this
/// crate.
///
/// [`IntoAny`]: trait.IntoAny.html
/// [`all`]: fn.all.html
pub trait IntoAll: sealed::Sealed {
type Output: Parser;
fn into(self) -> Self::Output;
}
pub mod sealed {
pub trait Sealed {}
macro_rules! impl_sealed {
( $p:ident, $($ps:ident),+ ) => {
impl<$p, $($ps),+> Sealed for ($p, $($ps),+)
where
$p: $crate::Parser,
$($ps: $crate::Parser),+
{}
impl_sealed!( $($ps),+ );
};
( $p:ident ) => {
impl<P: $crate::Parser> Sealed for P {}
}
}
impl_sealed! {
P16, P15, P14, P13, P12, P11, P10, P9,
P8, P7, P6, P5, P4, P3, P2, P1
}
}
/// Parser combinator that matches on one parser followed by another
///
/// This parser is typically constructed with the parser method [`Parser::and`],
/// but is also the resulting parser from calling [`all`] with two inputs.
///
/// The output type of this parser is a tuple where the first element is the
/// output from the first parser's match and the second is the output from the
/// second.
///
/// # Examples
///
/// ```
/// use mk_parser::{DynParser, Parser, source::Source};
/// use mk_parser::basics::StrLit;
///
/// let s = "foo-bar";
/// let mut src = Source::new(s.as_bytes());
///
/// let psr = StrLit("foo").and(StrLit("-bar"));
///
/// assert_eq!(psr.parse(&mut src, false).unwrap().0, ("foo", "-bar"));
/// ```
///
/// For matching many parsers in sequence, it is recommended to use [`all`] -
/// or [`DynAll`] if that sequence is not known at compile-time.
///
/// [`Parser::and`]: trait.Parser.html#method.and
/// [`all`]: fn.all.html
/// [`DynAll`]: struct.DynAll.html
pub struct Chain<P1, P2>
where
P1: Parser,
P2: Parser,
{
pub(crate) first: P1,
pub(crate) second: P2,
}
impl<P1: Parser, P2: Parser> IntoAll for (P1, P2) {
type Output = Chain<P1, P2>;
fn into(self) -> Chain<P1, P2> {
Chain {
first: self.0,
second: self.1,
}
}
}
impl<P1: Parser, P2: Parser> DynParser for Chain<P1, P2> {
type Output = (P1::Output, P2::Output);
fn parse(&self, src: &mut Source, msg_hint: bool) -> ParseResult<Self::Output> {
let pos = src.pos();
let first = match self.first.parse(src, msg_hint) {
ParseResult::Error(e) => return ParseResult::Error(e),
ParseResult::Success(o, _) => o,
ParseResult::Fail(p, lvl, m) => {
let msg = if m.is_some() && (lvl != 0 || msg_hint) {
Some(LazyString::new(move || {
format!(
"{} at {}:\n{}",
"Failed to match first parser",
p,
String::from(m.unwrap())
)
}))
} else if msg_hint {
Some(LazyString::new(move || {
format!(
"{} at {} without message",
"Failed to match first parser", p
)
}))
} else {
None
};
return ParseResult::Fail(pos, lvl, msg);
}
};
let second = match self.second.parse(src, msg_hint) {
ParseResult::Error(e) => return ParseResult::Error(e),
ParseResult::Success(o, _) => o,
ParseResult::Fail(p, lvl, m) => {
let msg = if m.is_some() && (lvl != 0 || msg_hint) {
Some(LazyString::new(move || {
format!(
"{} at {}:\n{}",
"Failed to match second parser",
p,
String::from(m.unwrap())
)
}))
} else if msg_hint {
Some(LazyString::new(move || {
format!(
"{} at {} without message",
"Failed to match second parser", p
)
}))
} else {
None
};
return ParseResult::Fail(pos, lvl, msg);
}
};
ParseResult::Success((first, second), pos)
}
}
impl<P1, P2> Parser for Chain<P1, P2>
where
P1: Parser,
P2: Parser,
{
}
#[cfg(feature = "bnf-syntax")]
impl<P1, P2, P3> BitOr<P3> for Chain<P1, P2>
where
P1: Parser,
P2: Parser,
P3: Parser<Output = (P1::Output, P2::Output)>,
{
impl_bitor!(P3);
}
#[cfg(feature = "bnf-syntax")]
impl<P1, P2, P3> Add<P3> for Chain<P1, P2>
where
P1: Parser,
P2: Parser,
P3: Parser,
{
type Output = All3<P1, P2, P3>;
fn add(self, rhs: P3) -> All3<P1, P2, P3> {
All3 {
inner: (self.first, self.second, rhs),
}
}
}
macro_rules! special_impl_add {
(
Top:
$all_head:ident, $all:ident, $($all_tail:ident),+ @
$p:ident, $($ps:ident),+ @
$oid:ident, $idx:tt @ $($oids:ident, $idx_tail:tt)@+
) => {
// We don't want to implement it for the highest number, because that
// could lead to obscure errors if someone (for **SOME** reason) were
// to try to chain more than 16 parsers together.
};
(
Middle:
$all_head:ident, $all:ident, $($all_tail:ident),+ @
$p:ident, $($ps:ident),+ @
$oid:ident, $idx:tt @ $($oids:ident, $idx_tail:tt)@+
) => {
#[cfg(feature = "bnf-syntax")]
impl<P, $p, $($ps),+> Add<P> for $all<$p, $($ps),+>
where
$p: Parser,
$($ps: Parser,)+
P: Parser<Output = <Self as DynParser>::Output>,
{
type Output = $all_head<$p, $($ps),+, P>;
fn add(self, rhs: P) -> Self::Output {
let rev_tup = (self.inner.$idx, $(self.inner.$idx_tail),+);
$all_head {
inner: (rev_tup.$idx, $(rev_tup.$idx_tail),+, rhs),
}
}
}
};
}
macro_rules! impl_tup_all {
(
$TOP:ident:
$all_head:ident, $all:ident, $all_tail:ident @
$p:ident, $ps:ident @
$oid:ident, $idx:tt @ $oids:ident, $idx_tail:tt
) => {};
(
$TOP:ident:
$all_head:ident, $all:ident, $($all_tail:ident),+ @
$p:ident, $($ps:ident),+ @
$oid:ident, $idx:tt @ $($oids:ident, $idx_tail:tt)@+
) => {
pub struct $all<$p: Parser, $($ps: Parser),+> {
inner: ($p, $($ps),+),
}
impl<$p: Parser, $($ps: Parser),+> IntoAll for ($p,$($ps),+) {
type Output = $all<$p, $($ps),+>;
fn into(self) -> Self::Output {
$all {
inner: self,
}
}
}
impl<$p, $($ps),+> DynParser for $all<$p, $($ps),+>
where
$p: Parser,
$($ps: Parser),+
{
type Output = ($p::Output, $($ps::Output),+);
fn parse(&self, src: &mut Source, msg_hint: bool) -> ParseResult<Self::Output> {
let pos = src.pos();
// reverse the parsers so that we can have everything in the
// right order
let rev_psrs = (&self.inner.$idx, $(&self.inner.$idx_tail),+);
let $oid = match rev_psrs.$idx.parse(src, msg_hint) {
ParseResult::Error(e) => return ParseResult::Error(e),
ParseResult::Success(o,_) => o,
ParseResult::Fail(p,lvl,m) => {
let msg = if m.is_some() && (lvl != 0 || msg_hint) {
Some(LazyString::new(move || {
format!("{} #0 at {}:\n{}",
"Failed to match parser",
p, String::from(m.unwrap()))
}))
} else if msg_hint {
Some(LazyString::new(move || {
format!("{} #0 at {} without message",
"Failed to match parser", p)
}))
} else {
None
};
return ParseResult::Fail(pos, lvl, msg);
},
};
$(let $oids = match rev_psrs.$idx_tail.parse(src, msg_hint) {
ParseResult::Error(e) => return ParseResult::Error(e),
ParseResult::Success(o,_) => o,
ParseResult::Fail(p,lvl,m) => {
let msg = if m.is_some() && (lvl != 0 || msg_hint) {
Some(LazyString::new(move || {
format!("{} #{:?} at {}:\n{}",
"Failed to match parser",
p, $idx-$idx_tail, String::from(m.unwrap()))
}))
} else if msg_hint {
Some(LazyString::new(move || {
format!("{} #{:?} at {} without message",
"Failed to match parser",
$idx-$idx_tail, p)
}))
} else {
None
};
return ParseResult::Fail(pos, lvl, msg);
},
};)+
return ParseResult::Success(($oid, $($oids),+), pos);
}
}
impl<$p, $($ps),+> Parser for $all<$p, $($ps),+>
where
$p: Parser,
$($ps: Parser),+
{}
#[cfg(feature = "bnf-syntax")]
impl<P, $p, $($ps),+> BitOr<P> for $all<$p, $($ps),+>
where
$p: Parser,
$($ps: Parser,)+
P: Parser<Output = <Self as DynParser>::Output>,
{
impl_bitor!(P);
}
special_impl_add! {
$TOP:
$all_head, $all, $($all_tail),+ @
$p, $($ps),+ @
$oid, $idx @ $($oids, $idx_tail)@+
}
impl_tup_all! {
Middle:
$all, $($all_tail),+ @
$($ps),+ @
$($oids, $idx_tail)@+
}
};
}
impl_tup_all! {
Top:
All_Pad,
All16, All15, All14, All13, All12, All11, All10, All9,
All8, All7, All6, All5, All4, All3, All2, All1 @
P15, P14, P13, P12,
P11, P10, P9, P8,
P7, P6, P5, P4,
P3, P2, P1, P0 @
o15, 15 @ o14, 14 @ o13, 13 @ o12, 12 @
o11, 11 @ o10, 10 @ o9, 9 @ o8, 8 @
o7, 7 @ o6, 6 @ o5, 5 @ o4, 4 @
o3, 3 @ o2, 2 @ o1, 1 @ o0, 0
}
impl<P: Parser> IntoAll for P {
type Output = Self;
fn into(self) -> Self {
self
}
}
/// Parser that matches when an entire runtime sequence of parsers matches
///
/// Unlike the function [`all`], this parser allows the calculation of the
/// sequence of parsers to be deferred until runtime. It *will* be slower than
/// one computed at compile-time, so this type should be used exclusively for
/// cases where the combinator **must** be built at runtime. For other uses,
/// see [`all`].
///
/// # Examples
///
/// For this case, the types can't be computed at compile-time. If you need to,
/// it should be simple enough to extend this idea to cases where the length of
/// the sequence isn't known at compile-time either.
/// ```
/// use mk_parser::{
/// Parser, DynParser,
/// source::Source,
/// basics::{StrLit, char_lit, StringLit},
/// combinators::DynAll,
/// };
///
/// fn gen_psr(i: i32) -> Box<dyn DynParser<Output = String>> {
/// match i {
/// 0 => Box::new(StrLit("zero").map(String::from)),
/// 1 => Box::new(char_lit(' ').map(|_| String::from(" "))),
/// _ => Box::new(StringLit(String::from("two")).name("Special number two!")),
/// }
/// }
///
/// let s = "zero two";
/// let mut src = Source::new(s.as_bytes());
///
/// let psr = DynAll(vec![gen_psr(0), gen_psr(1), gen_psr(2)]);
/// assert_eq!(psr.parse(&mut src, false).unwrap().0, &["zero", " ", "two"]);
/// ```
///
/// [`all`]: fn.all.html
pub struct DynAll<T>(pub Vec<Box<dyn DynParser<Output = T>>>);
impl<T> DynParser for DynAll<T> {
type Output = Vec<T>;
fn parse(&self, src: &mut Source, msg_hint: bool) -> ParseResult<Vec<T>> {
let pos = src.pos();
let mut outs = Vec::with_capacity(self.0.len());
for (i, psr) in self.0.iter().enumerate() {
let t = match psr.parse(src, msg_hint) {
ParseResult::Error(e) => return ParseResult::Error(e),
ParseResult::Success(t, _) => t,
ParseResult::Fail(p, lvl, m) => {
let msg = if m.is_some() && (lvl != 0 || msg_hint) {
Some(LazyString::new(move || {
format!(
"{} #{} at {}:\n{}",
"Failed to match dyn parser",
i,
p,
String::from(m.unwrap())
)
}))
} else if msg_hint {
Some(LazyString::new(move || {
format!(
"{} #{} at {} without message",
"Failed to match dyn parser", i, p
)
}))
} else {
None
};
return ParseResult::Fail(pos, lvl, msg);
}
};
outs.push(t);
}
ParseResult::Success(outs, pos)
}
}
impl<T> Parser for DynAll<T> {}
#[cfg(feature = "bnf-syntax")]
impl<T, P: Parser<Output = Vec<T>>> BitOr<P> for DynAll<T> {
impl_bitor!(P);
}
#[cfg(feature = "bnf-syntax")]
impl<T, P: Parser<Output = Vec<T>>> Add<P> for DynAll<T> {
impl_add!(P);
}
#[cfg(test)]
mod tests {
use crate::{basics::StrLit, source::Source, DynParser, Parser};
#[test]
fn chain() {
// TODO: More comprehensive test
let s = "foobarbazfoobar";
let mut src = Source::new(s.as_bytes());
let psr = StrLit("foo").and(StrLit("bar"));
assert_eq!(psr.parse(&mut src, false).unwrap().0, ("foo", "bar"));
assert!(psr.parse(&mut src, false).is_fail());
assert!(psr.parse(&mut src, false).is_success());
}
#[test]
fn all() {
// NOTE: This is a partial test - we need to adequately test failure as
// well.
// TODO
let s = "foobarbaz";
let mut src = Source::new(s.as_bytes());
// The first couple few are simply checking that it compiles properly
let _psr = super::all(StrLit("foo"));
let _psr = super::all((StrLit("foo"), StrLit("bar")));
let psr = super::all((StrLit("foo"), StrLit("bar"), StrLit("baz")));
// let psr = StrLit("foo") + StrLit("bar") + StrLit("baz");
assert_eq!(
psr.parse(&mut src, false).unwrap().0,
("foo".into(), "bar".into(), "baz".into())
);
}
#[test]
fn dyn_all() {
// TODO: More comprehensive test
let s = "foo-bar-baz";
let mut src = Source::new(s.as_bytes());
let psr = super::DynAll(vec![
Box::new(StrLit("foo")),
Box::new(StrLit("-bar")),
Box::new(StrLit("-baz")),
]);
assert_eq!(
psr.parse(&mut src, false).unwrap().0,
["foo", "-bar", "-baz"]
);
}
}
| true
|
f127b5d5f212660fb501e6f1f8a62f799878fdbb
|
Rust
|
xcaptain/rust-algorithms
|
/leetcode/src/p88.rs
|
UTF-8
| 1,055
| 3.421875
| 3
|
[] |
no_license
|
// https://leetcode-cn.com/problems/merge-sorted-array/
pub fn merge(nums1: &mut [i32], m: i32, nums2: &mut [i32], n: i32) {
let m = m as usize;
let n = n as usize;
let mut nums1_copy = vec![];
for item in nums1.iter().take(m).skip(0) {
nums1_copy.push(*item);
}
let mut p1 = 0;
let mut p2 = 0;
let mut p = 0;
while p1 < m && p2 < n {
if nums1_copy[p1] < nums2[p2] {
nums1[p] = nums1_copy[p1];
p1 += 1;
} else {
nums1[p] = nums2[p2];
p2 += 1;
}
p += 1;
}
for item in nums1_copy.iter().take(m).skip(p1) {
nums1[p] = *item;
p += 1;
}
for item in nums2.iter().take(n).skip(p2) {
nums1[p] = *item;
p += 1;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge() {
let mut nums1 = vec![1, 2, 3, 0, 0, 0];
let mut nums2 = vec![2, 5, 6];
merge(&mut nums1, 3, &mut nums2, 3);
assert_eq!(vec![1, 2, 2, 3, 5, 6], nums1);
}
}
| true
|
88c4d54f9548fc45e605858b31b07bbb3c720e81
|
Rust
|
luisgabrielroldan/gamebrust
|
/core/src/memory/mmu.rs
|
UTF-8
| 5,799
| 2.609375
| 3
|
[] |
no_license
|
use crate::io::timer::Timer;
use crate::io::joypad::{Joypad, JoypadAdapter};
use crate::memory::Memory;
use crate::memory::Ram;
use crate::memory::bootrom::DMG1;
use crate::cartridge::Cartridge;
use crate::ppu::PPU;
use crate::Display;
struct OAMDma {
active: bool,
from: u16,
index: u16,
}
impl OAMDma {
pub fn new() -> Self {
Self {
active: false,
from: 0,
index: 0,
}
}
pub fn start(&mut self, from: u8) {
self.active = true;
self.index = 0;
self.from = (from as u16) << 8;
}
}
pub struct MMU {
intfs: u8,
inte: u8,
bootrom: bool,
cartridge: Cartridge,
timer: Timer,
joypad: Joypad,
ppu: PPU,
wram: Ram,
zram: Ram,
sb: u8,
oam_dma: OAMDma,
}
#[allow(dead_code)]
impl MMU {
pub fn new(cartridge: Cartridge, display: Box<dyn Display>, bootrom: bool) -> Self {
Self {
intfs: 0,
inte: 0,
bootrom: bootrom,
cartridge: cartridge,
joypad: Joypad::new(),
timer: Timer::new(),
ppu: PPU::new(display),
wram: Ram::new(0x8000),
zram: Ram::new(0x7F),
sb: 0,
oam_dma: OAMDma::new(),
}
}
pub fn step(&mut self, ticks: u32) {
self.handle_oam_dma(ticks);
self.intfs |= self.timer.step(ticks);
self.intfs |= self.ppu.step(ticks);
self.intfs |= self.joypad.step();
self.intfs |= 0xE0;
}
pub fn get_joypad_adapter(&mut self) -> &mut dyn JoypadAdapter {
&mut self.joypad
}
fn fast_oam_dma(&mut self, value: u8) {
let base = (value as u16) << 8;
for i in 0 .. 0xA0 {
let b = self.read(base + i);
self.write(0xFE00 + i, b);
}
}
fn handle_oam_dma(&mut self, ticks: u32) {
if !self.oam_dma.active { return; }
let cycles = (ticks / 4) as u16;
let count =
if 0x8F - self.oam_dma.index > cycles {
cycles
} else {
0x8F - self.oam_dma.index
};
for i in 0..count {
let v = self.read(self.oam_dma.from + self.oam_dma.index + i);
self.write(0xFE00 + self.oam_dma.index + i, v);
}
self.oam_dma.index += count;
if self.oam_dma.index == 0x8F {
self.oam_dma.active = false;
}
}
fn io_read(&self, addr: u16) -> u8 {
match addr {
0xFF00 => self.joypad.read(),
0xFF01..=0xFF02 => 0xFF,
0xFF04 => self.timer.get_div(),
0xFF05 => self.timer.get_tima(),
0xFF06 => self.timer.get_tma(),
0xFF07 => self.timer.get_tac(),
0xFF0F => self.intfs,
0xFF10..=0xFF3F => 0, // TODO: Implement sound someday...
0xFF40..=0xFF4F => self.ppu.read(addr),
0xFF50 => { if self.bootrom { 1 } else { 0 } }
// 0xFF51..=0xFF55 => 0, // TODO DMA
0xFF68..=0xFF6B => self.ppu.read(addr),
0xFFFF => self.inte,
_ => { println!("Warning: Attempt to READ from unmapped IO area: 0x{:04X}", addr); 0xFF }
}
}
fn io_write(&mut self, addr: u16, v: u8) {
match addr {
0xFF00 => self.joypad.write(v),
0xFF01 => { self.sb = v; }
0xFF02 => { if v == 0x81 { print!("{}", self.sb as char) } }
0xFF04 => self.timer.set_div(v),
0xFF05 => self.timer.set_tima(v),
0xFF06 => self.timer.set_tma(v),
0xFF07 => self.timer.set_tac(v),
0xFF0F => { self.intfs = v; }
0xFF10..=0xFF3F => {} // TODO: Implement sound someday...
0xFF46 => { self.fast_oam_dma(v); } //self.oam_dma.start(v),
0xFF40..=0xFF4F => self.ppu.write(addr, v),
0xFF50 => { if (v & 1) == 1 { self.bootrom = false } }
// 0xFF51..=0xFF55 => {} // DMA CGB
0xFF68..=0xFF6B => self.ppu.write(addr, v),
0xFFFF => self.inte = v,
_ => { println!("Warning: Attempt to WRITE on unmapped IO area: 0x{:04X}", addr); }
}
}
}
impl Memory for MMU {
fn read(&self, addr: u16) -> u8 {
match addr {
0x000..=0x7FFF => {
if self.bootrom && addr < 0x100 {
DMG1[addr as usize]
} else {
self.cartridge.read(addr)
}
}
0x8000..=0x9FFF => self.ppu.read(addr),
0xA000..=0xBFFF => self.cartridge.read(addr),
0xC000..=0xCFFF | 0xE000..=0xEFFF => self.wram.read(addr & 0x0FFF),
0xD000..=0xDFFF | 0xF000..=0xFDFF => self.wram.read(0x1000 | (addr & 0x0FFF)),
0xFE00..=0xFE9F => self.ppu.read(addr),
0xFEA0..=0xFEFF => 0xFF, // Not Used
0xFF00..=0xFF7F => self.io_read(addr),
0xFF80..=0xFFFE => self.zram.read(addr - 0xFF80),
0xFFFF => self.io_read(addr),
}
}
fn write(&mut self, addr: u16, v: u8) {
match addr {
0x000..=0x7FFF => self.cartridge.write(addr, v),
0x8000..=0x9FFF => self.ppu.write(addr, v),
0xA000..=0xBFFF => self.cartridge.write(addr, v),
0xC000..=0xCFFF | 0xE000..=0xEFFF => self.wram.write(addr & 0x0FFF, v),
0xD000..=0xDFFF | 0xF000..=0xFDFF => self.wram.write(0x1000 | (addr & 0x0FFF), v),
0xFE00..=0xFE9F => self.ppu.write(addr, v),
0xFEA0..=0xFEFF => { /* Not Used */ }
0xFF00..=0xFF7F => self.io_write(addr, v),
0xFF80..=0xFFFE => self.zram.write(addr - 0xFF80, v),
0xFFFF => self.io_write(addr, v),
};
}
}
| true
|
0264879cde8ac54800a086cf16bd9b1b1ea18a6c
|
Rust
|
Lacaranian/hvif-rs
|
/src/parser/shape.rs
|
UTF-8
| 4,679
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
//! Parser for HVIF shapes
use types::*;
use nom::*;
use parser::util::*;
named_attr!(#[doc = "Parses an HVIF shape"], pub hvif_shape<&[u8], HVIFShape>,
do_parse!(
tag!(&[0x0a]) >> // There is only one shape type, SHAPE_TYPE_PATH_SOURCE - should always be this!
style_index: be_u8 >>
path_indices: length_count!(be_u8, be_u8) >>
shape_flags: be_u8 >>
shape_modifiers: apply!(hvif_shape_modifier_parser_from_flags, shape_flags) >>
(HVIFShape {
style_index: style_index,
path_indices: path_indices,
modifiers: shape_modifiers
})
)
);
fn hvif_shape_modifier_parser_from_flags(input: &[u8], flags: u8) -> IResult<&[u8], Vec<HVIFShapeModifier>>
{
let mut cur_input = input;
let mut cur_modifiers = Vec::new();
let parsers_per_flags : Vec<(HVIFFlag, fn(&[u8]) -> IResult<&[u8], HVIFShapeModifier>)> = vec![
// Order of hinting doesn't matter, it doesn't parse anything!
(HVIF_SHAPE_FLAG_HINTING , hvif_shape_modifier_hinting),
// Order of these parsers matter!
(HVIF_SHAPE_FLAG_TRANSFORM , hvif_shape_modifier_transform),
(HVIF_SHAPE_FLAG_TRANSLATION , hvif_shape_modifier_translation),
(HVIF_SHAPE_FLAG_LOD_SCALE , hvif_shape_modifier_lod_scale),
(HVIF_SHAPE_FLAG_HAS_TRANSFORMERS, hvif_shape_modifier_has_transformers),
];
let mut ppfiter = parsers_per_flags.into_iter();
// Run all of the modifier parsers for set flags in order
while let Some((flag, parser)) = ppfiter.next() {
match flag.is_set_on(flags) {
true => {
let (rem_input, new_mod) = try_parse!(cur_input, parser);
cur_input = rem_input;
cur_modifiers.push(new_mod);
},
false => ()
}
}
return IResult::Done(cur_input, cur_modifiers)
}
named!(hvif_shape_modifier_hinting<&[u8], HVIFShapeModifier>,
do_parse!(
(HVIFShapeModifier::HVIFHinting)
)
);
named!(hvif_shape_modifier_transform<&[u8], HVIFShapeModifier>,
do_parse!(
matrix: hvif_shape_matrix >>
(HVIFShapeModifier::HVIFTransformMatrix(
matrix
))
)
);
named!(hvif_shape_modifier_translation<&[u8], HVIFShapeModifier>,
do_parse!(
point: hvif_point >>
(HVIFShapeModifier::HVIFTranslation(
point
))
)
);
named!(hvif_shape_modifier_lod_scale<&[u8], HVIFShapeModifier>,
do_parse!(
min_int: be_u8 >>
max_int: be_u8 >>
(HVIFShapeModifier::HVIFLODScale {
min: (min_int as f32) / 63.75,
max: (max_int as f32) / 63.75,
})
)
);
named!(hvif_shape_modifier_has_transformers<&[u8], HVIFShapeModifier>,
do_parse!(
transformers: length_count!(be_u8, hvif_shape_modifier_transformer) >>
(HVIFShapeModifier::HVIFTransformerList(transformers))
)
);
named!(hvif_shape_modifier_transformer<&[u8], HVIFTransformer>,
do_parse!(
transformer_type: be_u8 >>
transformer: apply!(hvif_shape_modifier_transformer_parser_from_flags,transformer_type) >>
(transformer)
)
);
fn hvif_shape_modifier_transformer_parser_from_flags(input: &[u8], transformer_type: u8) -> IResult<&[u8], HVIFTransformer>
{
let maybe_parser: Option<fn(&[u8]) -> IResult<&[u8], HVIFTransformer>> = match transformer_type {
20 => { // Affine matrix
Some(hvif_shape_modifier_transformer_matrix)
},
21 => { // Contour
Some(hvif_shape_modifier_transformer_contour)
},
22 => { // Perspective (unused?)
Some(hvif_shape_modifier_transformer_perspective)
},
23 => { // Stroke
Some(hvif_shape_modifier_transformer_stroke)
},
_ => None
};
let result = match maybe_parser {
Some(parser) => {
let (rem_input, transformer) = try_parse!(input, parser);
IResult::Done(rem_input, transformer)
}
None => IResult::Error(ErrorKind::Custom(1))
};
return result
}
named!(hvif_shape_modifier_transformer_matrix<&[u8], HVIFTransformer>,
do_parse!(
matrix: hvif_shape_matrix >>
(HVIFTransformer::Affine(matrix))
)
);
named!(hvif_shape_modifier_transformer_contour<&[u8], HVIFTransformer>,
do_parse!(
width_int: be_u8 >>
lj: be_u8 >>
ml : be_u8 >>
(HVIFTransformer::Contour {
width: (width_int as f32) - 128.0,
line_join: lj,
miter_limit: ml
})
)
);
named!(hvif_shape_modifier_transformer_perspective<&[u8], HVIFTransformer>,
do_parse!(
(HVIFTransformer::Perspective)
)
);
named!(hvif_shape_modifier_transformer_stroke<&[u8], HVIFTransformer>,
do_parse!(
width_int: be_u8 >>
line_opts: be_u8 >>
ml : be_u8 >>
(HVIFTransformer::Stroke {
width: (width_int as f32) - 128.0,
line_join: line_opts & 15,
line_cap: line_opts << 4,
miter_limit: ml
})
)
);
| true
|
75f6d470975217ceda901b9b2cb58d515394d008
|
Rust
|
Dr-Horv/Advent-of-Code-2019
|
/src/day10/mod.rs
|
UTF-8
| 6,601
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use crate::lib::{Solver, Position, real_distance};
use std::f64::consts::PI;
struct Asteroid {
pub relative_pos: Position,
pub original_pos: Position,
pub angle: f64,
}
pub(crate) struct Day10Solver {}
fn has_line_of_sight(p1: &Position, p2: &Position, asteroid_map: &HashMap<Position, bool>) -> bool {
let relative_pos = Position{x: p1.x - p2.x, y: p1.y - p2.y};
!asteroid_map.iter()
.filter( |&(k,_)| p1 != k && p2 != k)
.any(|(p, _)| {
let relative_pos_a = Position{x: p1.x - p.x, y: p1.y - p.y};
let a = (relative_pos.y as f64).atan2(relative_pos.x as f64);
let b = (relative_pos_a.y as f64).atan2(relative_pos_a.x as f64);
((a - b).abs() < 0.001) && (real_distance(p, p1) < real_distance(p1, p2) && real_distance(p, p2) < real_distance(p1, p2))
})
}
impl Solver for Day10Solver {
fn solve(&self, lines: Vec<String>, part_two: bool) -> String {
let mut asteroid_map = HashMap::new();
let mut asteroid_map_vision = HashMap::new();
let height = lines.len();
let width = lines[0].len();
for y in 0..lines.len() {
let line = &lines[y];
let mut chars = line.chars();
for x in 0..line.len() {
let c = chars.next().unwrap();
if c == '#' {
let p = Position{x: x as i32, y: y as i32};
asteroid_map.insert(p, true);
}
}
}
let mut max = 0;
let mut best = &Position{x: 0,y: 0};
for (p1, _) in &asteroid_map {
let mut count = 0;
for (p2, _) in &asteroid_map {
if p1 == p2 {
continue
}
if has_line_of_sight(p1, p2, &asteroid_map) {
count += 1
}
}
asteroid_map_vision.insert(p1, count);
if count > max {
max = count;
best = p1;
}
}
let mut map = String::new();
for y in 0..height {
map.push_str("\n");
for x in 0..width {
let s = match asteroid_map_vision.get(&Position{x: x as i32, y: y as i32}) {
Some(t) => t.to_string(),
None => String::from(".")
};
map.push_str(s.as_str())
}
}
println!("Best={},{} can see {}", best.x, best.y, max);
if !part_two {
return max.to_string();
}
let mut asteroids: Vec<Asteroid> = asteroid_map.iter()
.map(|(&a,_)| {
let relative_pos = Position{x: best.x - a.x, y: best.y - a.y};
let pos = Position{x: a.x, y: a.y};
let a = (relative_pos.y as f64).atan2(relative_pos.x as f64);
let c = a - PI/2.0;
let d = if c < 0.0 { PI*2.0+c } else { c };
let angle = d;
return Asteroid{relative_pos, original_pos: pos, angle };
}).collect();
asteroids.sort_by(|a,b| {
return a.angle.partial_cmp(&b.angle).unwrap()
});
let mut a_map_copy = asteroid_map.clone();
let mut count = 0;
let mut asteroids_to_remove = Vec::new();
loop {
for a in asteroids.iter() {
if a.original_pos != *best && has_line_of_sight(best, &a.original_pos, &a_map_copy) {
count += 1;
println!("{} Asteroid={},{} angle {}", count, a.original_pos.x, a.original_pos.y, a.angle);
asteroids_to_remove.push(a);
if count == 200 {
println!("Asteroid={},{} angle {}", a.original_pos.x, a.original_pos.y, a.angle);
return (a.original_pos.x * 100 + a.original_pos.y).to_string();
}
}
}
for a in asteroids_to_remove.iter_mut() {
a_map_copy.remove(&a.original_pos);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lib::test_solver;
#[test]
fn test_part_one() {
let solver = Day10Solver {};
test_solver(&solver, false, &[".#..#", ".....", "#####", "....#", "...##"], "8");
test_solver(&solver, false, &["......#.#.",
"#..#.#....",
"..#######.",
".#.#.###..",
".#..#.....",
"..#....#.#",
"#..#....#.",
".##.#..###",
"##...#..#.",
".#....####"
], "33");
test_solver(&solver, false, &["#.#...#.#.",
".###....#.",
".#....#...",
"##.#.#.#.#",
"....#.#.#.",
".##..###.#",
"..#...##..",
"..##....##",
"......#...",
".####.###."], "35");
test_solver(&solver, false, &[".#..##.###...#######",
"##.############..##.",
".#.######.########.#",
".###.#######.####.#.",
"#####.##.#.##.###.##",
"..#####..#.#########",
"####################",
"#.####....###.#.#.##",
"##.#################",
"#####.##.###..####..",
"..######..##.#######",
"####.##.####...##..#",
".#####..#.######.###",
"##...#.##########...",
"#.##########.#######",
".####.#.###.###.#.##",
"....##.##.###..#####",
".#.#.###########.###",
"#.#.#.#####.####.###",
"###.##.####.##.#..##"], "210");
}
#[test]
fn test_part_two() {
let solver = Day10Solver {};
test_solver(&solver, true, &[".#..##.###...#######",
"##.############..##.",
".#.######.########.#",
".###.#######.####.#.",
"#####.##.#.##.###.##",
"..#####..#.#########",
"####################",
"#.####....###.#.#.##",
"##.#################",
"#####.##.###..####..",
"..######..##.#######",
"####.##.####...##..#",
".#####..#.######.###",
"##...#.##########...",
"#.##########.#######",
".####.#.###.###.#.##",
"....##.##.###..#####",
".#.#.###########.###",
"#.#.#.#####.####.###",
"###.##.####.##.#..##"], "802");
}
}
| true
|
074a766c46da2f530441a78f8bcd3a9d3cb7c187
|
Rust
|
Undin/fall
|
/fall_tree/src/node/mod.rs
|
UTF-8
| 2,010
| 2.671875
| 3
|
[] |
no_license
|
use std::time::Duration;
use {Text, TextRange, NodeType, Language};
mod imp;
mod immutable;
pub use self::imp::NodeChildren;
pub use self::immutable::INode;
pub struct Edit {
pub delete: TextRange,
pub insert: String,
}
pub struct File {
imp: imp::FileImpl,
inode: INode,
}
impl File {
pub fn new(lang: Language, text: String, stats: FileStats, node: INode) -> File {
File {
imp: imp::new_file(lang, text, stats, &node),
inode: node,
}
}
pub fn language(&self) -> &Language {
&self.imp.lang
}
pub fn root(&self) -> Node {
self.imp.root()
}
pub fn text(&self) -> Text {
self.imp.text()
}
pub fn stats(&self) -> FileStats {
self.imp.stats()
}
pub fn inode(&self) -> INode {
self.inode.clone()
}
pub fn edit(&self, edit: &Edit) -> File {
self.language().reparse(self, edit)
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Node<'f>(imp::NodeImpl<'f>);
impl<'f> ::std::fmt::Debug for Node<'f> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
self.0.debug(f)
}
}
impl<'f> Node<'f> {
pub fn ty(&self) -> NodeType {
self.0.ty()
}
pub fn range(&self) -> TextRange {
self.0.range()
}
pub fn text(&self) -> Text<'f> {
self.0.text()
}
pub fn parent(&self) -> Option<Node<'f>> {
self.0.parent()
}
pub fn children(&self) -> NodeChildren<'f> {
self.0.children()
}
}
#[derive(Clone, Copy, Debug)]
pub struct FileStats {
pub lexing_time: Duration,
pub parsing_time: Duration,
pub parsing_ticks: u64,
pub reparsed_region: TextRange,
}
impl FileStats {
pub fn new() -> FileStats {
FileStats {
lexing_time: Default::default(),
parsing_time: Default::default(),
parsing_ticks: Default::default(),
reparsed_region: TextRange::empty()
}
}
}
| true
|
ab22796c478bf59562fd3a5c409b8117d9460755
|
Rust
|
ddziaduch/codewars
|
/rust/src/leap_years.rs
|
UTF-8
| 384
| 3.046875
| 3
|
[] |
no_license
|
// https://www.codewars.com/kata/leap-years/train/rust
fn is_leap_year(year: i32) -> bool {
year % 400 == 0 || year % 4 == 0 && year % 100 != 0
}
pub fn verify() {
assert_eq!(is_leap_year(1234), false);
assert_eq!(is_leap_year(1984), true);
assert_eq!(is_leap_year(2000), true);
assert_eq!(is_leap_year(2010), false);
assert_eq!(is_leap_year(2013), false);
}
| true
|
9a338902a9553e4c4099b216baecf87dc495d8cf
|
Rust
|
sunhuachuang/study-demo
|
/rust/rust_primer/variable.rs
|
UTF-8
| 3,897
| 3.5
| 4
|
[] |
no_license
|
#![feature(inclusive_range_syntax)]
fn main() {
let a1 = 5;
let a2: i32 = 5;
assert_eq!(a1, a2);
let b1: u32 = 5;
//assert_eq!(a2, b1); //mismatch types
println!("{}, {}, {}", a1, a2, b1);
let mut m1: f64 = 1.0;
println!("{}", m1); //warning: value assigned to `m1` is never read
m1 = 3.0;
println!("{}", m1);
let b2 = 4.0f32;
println!("{}", b2);
//int
/*
* 不可变大小
* u8 (无符号) i8 (有符号)
* u16 i16
* u32 i32 (默认)
* u64 i64
*
* 可变大小
* usize isize
*
* 浮点数
* f32 (默认) f64
*/
let f1 = 0.2;
let f2 = 0.2f32;
assert_eq!(f1, f2);
let f3 = 0.1;
if f1 + f3 == 0.3 {
println!("0.1 + 0.2 == 0.3");
} else {
println!("0.1 + 0.2 != 0.3");
}
// let 模式匹配表达式
let (a, b) = (1, 2);
println!("a: {}, b: {}", a, b);
let (t, mut f): (bool, bool) = (true, false);
println!("t: {}, f: {}", t, f);
f = !f;
assert_eq!(t, f);
// 内置原生类型
/*
* bool 布尔类型: true, false
* char 字符类型: 单个unicode字符, 存储为4个字节
* int(float) 数值类型: 有符号整数, 无符号整数,浮点数
* str(String) 字符串类型: 底层为不定长类型str, 常用其切片 &str(&'static str) 和 堆分配 String, 切片静态不可变。String可变
* array 数组类型: 同类型固定大小 [T; N]
* slice 切片类型: 引用数组的部分, 不需要拷贝 &[T]
* tuple 元组类型: 固定大小有序列表,类型不统一,可以通过解构和索引获取值
* pointer 指针类型: 最底层裸指针 *const T 和 *mut T, 解引用必须放到unsafe中
* function 函数类型: 实质是个函数指针
* 元类型: (), 唯一值 ()
*/
let t = true;
let f: bool = false;
assert_eq!(t, !f);
let c = 'c';
println!("{}", c as i32);
let x = 123_456;
assert_eq!(x, 123456);
let xx: f64 = 1.23e+2;
//let zero = z.abs_sub(123.4);
let bin = 0b1111_0000;
let oct = 0o7320_1546;
let hex = 0xf23a_b09;
println!("{}, {}, {}, {}", xx, bin, oct, hex);
let str = "hello";
let sstr: &'static str = "hello";
assert_eq!(str, sstr);
let mut string = str.to_string();
string += " world!";
println!("{}, {}", str, string);
let arr = [1, 2, 3, 4, 5];
println!("arr1: {:?}, {}", arr, arr[1]);
let arr2 = [0; 10];
println!("{:?}, arr1.slice: {:?}, {:?}",
arr2,
&arr[1..3],
&arr[1...3]); //#![feature(inclusive_range_syntax)]
// arr1: [1, 2, 3, 4, 5], 2
// [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], arr1.slice: [2, 3], [2, 3, 4]
let tup = (1, "hello");
let (_, say) = tup;
println!("{}, tuple.1 : {}", say, tup.1);
let x = 5;
let raw = &x as *const i32;
let points_at = unsafe { *raw };
println!("{}", points_at);
fn foo(x: i32) -> i32 {
return x * 2;
}
let bar: fn(i32) -> i32 = foo;
println!("{}", bar(3));
/*
* 单字节字符 b'a', b'hello', 原始字符串r#"hello"#
* 使用 & 将string 转化为 &str很廉价,反之to_string, 涉及内存分配,不廉价
* 动态数组 vec!
* 不多于32个元素的数组和不多于12个元素的元组,传值自动复制
*/
type Point = (i32, i32);
let p: Point = (1, 2);
println!("{:?}", p);
let s = "hello".to_string();
use_str(&*s); //string to &str
let say = "我很好".to_string();
for i in say.as_bytes() {
println!("{}", i);
}
for i in say.chars() {
println!("{}", i);
}
println!("{:?}", say.chars().nth(2));
}
fn use_str(s: &str) {
println!("{}", s);
}
| true
|
417979c5cbfc41e51662e53f0556f0880fcd9206
|
Rust
|
Cloudxtreme/omud
|
/src/player.rs
|
UTF-8
| 4,208
| 2.96875
| 3
|
[] |
no_license
|
use std::net::{TcpStream, Shutdown};
use std::io::{Read, Write};
use std::sync::mpsc::Sender;
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::Values;
use world::World;
use command::Command;
use room::Room;
use entity::{self, Id, Tick, Describable, Container, Living};
use item::Item;
use output;
pub struct Player {
id: usize,
name: String,
health: f32,
connected: bool,
stream: RefCell<TcpStream>,
inventory: HashMap<usize, Item>,
}
impl Id for Player {
fn get_id(&self) -> usize {
self.id
}
fn set_id(&mut self, id: usize) {
self.id = id
}
}
impl Tick for Player {
fn tick(&self, room: &Room, world: &World, sender: Sender<Command>) {
let line = self.read_line().trim().to_string();
if line != "" {
self.handle_command(room, world, sender, &line);
}
}
}
impl Describable for Player {
fn get_name(&self) -> String {
self.name.clone()
}
fn get_description(&self) -> String {
"player ".to_string() + &self.name
}
}
impl<'a> Container<'a> for Player {
type Iter = Values<'a, usize, Item>;
fn has_item(&self, id: usize) -> bool {
self.inventory.contains_key(&id)
}
fn add_item(&mut self, item: Item) {
self.inventory.insert(item.get_id(), item);
}
fn get_item(&self, id: usize) -> Option<&Item> {
self.inventory.get(&id)
}
fn get_item_mut(&mut self, id: usize) -> Option<&mut Item> {
self.inventory.get_mut(&id)
}
fn remove_item(&mut self, id: usize) -> Option<Item> {
self.inventory.remove(&id)
}
fn item_count(&self) -> usize {
self.inventory.len()
}
fn inventory_iter(&'a self) -> Self::Iter {
self.inventory.values()
}
}
impl Living for Player {
fn get_health(&self) -> f32 {
self.health
}
}
impl Player {
pub fn new(stream: TcpStream) -> Player {
Player {
id: entity::generate_id(),
name: "Bob".to_string(),
health: 100.0,
connected: false,
stream: RefCell::new(stream),
inventory: HashMap::new(),
}
}
fn handle_command(&self, room: &Room, world: &World, sender: Sender<Command>, command: &str) {
let parts: Vec<&str> = command.splitn(2, ' ').collect();
match parts[0] {
"echo" => {
output::echo(&mut*self.stream.borrow_mut(), &parts);
},
"status" => {
output::status(&mut*self.stream.borrow_mut(), self);
}
"exit" | "quit" => {
self.writeln("Goodbye");
self.stream.borrow_mut().shutdown(Shutdown::Both).unwrap();
sender.send(Command::Remove{id: self.get_id(), location: room.get_id().to_string()}).unwrap();
},
"look" => {
output::look(&mut*self.stream.borrow_mut(), self, room);
},
"go" => {
if let Some(id) = room.get_exit(parts[1]) {
sender.send(Command::Move{id: self.get_id(), from: room.get_id().to_string(), to: id}).unwrap();
} else {
self.writeln(&format!("There is no exit named {}.", parts[1]));
}
}
"i" | "inventory" => {
self.writeln("Inventory: ");
for i in self.inventory_iter() {
self.writeln(&format!("Item: {}", &i.get_name()));
}
}
_ => {
output::unknown_command(&mut*self.stream.borrow_mut(), command);
}
}
}
fn read_line(&self) -> String {
let mut line = String::new();
match self.stream.borrow_mut().read_to_string(&mut line) {
Ok(_) => line,
Err(_) => line
}
}
fn write(&self, line: &str) {
self.stream.borrow_mut().write(line.as_bytes()).unwrap();
}
fn writeln(&self, line: &str) {
self.stream.borrow_mut().write(line.as_bytes()).unwrap();
self.stream.borrow_mut().write(b"\r\n").unwrap();
}
}
| true
|
3d9887cc66c34f915deb60e312784093c30f6168
|
Rust
|
ke-pan/leetcode-rust
|
/src/n386_lexicographical_numbers.rs
|
UTF-8
| 1,855
| 3.640625
| 4
|
[] |
no_license
|
/*
* @lc app=leetcode id=386 lang=rust
*
* [386] Lexicographical Numbers
*
* https://leetcode.com/problems/lexicographical-numbers/description/
*
* algorithms
* Medium (45.35%)
* Total Accepted: 38.5K
* Total Submissions: 84.8K
* Testcase Example: '13'
*
* Given an integer n, return 1 - n in lexicographical order.
*
* For example, given 13, return: [1,10,11,12,13,2,3,4,5,6,7,8,9].
*
* Please optimize your algorithm to use less time and space. The input size
* may be as large as 5,000,000.
*
*/
struct Solution {}
impl Solution {
pub fn lexical_order(n: i32) -> Vec<i32> {
let mut i = 1;
let mut res: Vec<i32> = Vec::new();
for _ in 0..n {
res.push(i);
if i * 10 <= n {
i *= 10
} else if i < n && i % 10 < 9 {
i += 1;
} else {
while i / 10 % 10 == 9 {
i /= 10;
}
i = i / 10 + 1;
}
}
res
}
pub fn lexical_order_dfs(n: i32) -> Vec<i32> {
let mut res: Vec<i32> = Vec::new();
for i in 1..10 {
Solution::dfs(i, n, &mut res)
}
res
}
fn dfs(curr: i32, n: i32, res: &mut Vec<i32>) {
if res.len() == n as usize {
return;
}
res.push(curr);
for i in 0..10 {
if curr * 10 + i <= n {
Solution::dfs(curr * 10 + i, n, res)
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn lexical_order() {
assert_eq!(
Solution::lexical_order(13),
[1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9]
);
assert_eq!(
Solution::lexical_order_dfs(13),
[1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9]
);
}
}
| true
|
84a35dbf21bd7e6ca93b33c9e686baaea3ae949a
|
Rust
|
nfukasawa/rust_study
|
/thread-pool/src/lib.rs
|
UTF-8
| 2,030
| 3.21875
| 3
|
[] |
no_license
|
use std::error::Error;
use std::sync::mpsc;
use std::sync::{Arc, Mutex};
use std::thread;
pub type ThreadPoolError = Box<dyn Error>;
pub struct ThreadPool {
workers: Vec<Worker>,
sender: mpsc::SyncSender<Message>,
}
impl ThreadPool {
pub fn new(queue_size: usize, number_of_threads: usize) -> Self {
assert_ne!(queue_size, 0);
assert_ne!(number_of_threads, 0);
let mut workers = Vec::with_capacity(number_of_threads);
let (sender, receiver) = mpsc::sync_channel(queue_size);
let receiver = Arc::new(Mutex::new(receiver));
for _ in 0..number_of_threads {
workers.push(Worker::new(Arc::clone(&receiver)));
}
Self { workers, sender }
}
pub fn dispatch<F>(&self, f: F) -> Result<(), ThreadPoolError>
where
F: FnOnce() + Send + 'static,
{
let job = Box::new(f);
match self.sender.send(Message::Dispatch(job)) {
Err(err) => Err(Box::new(err)),
_ => Ok(()),
}
}
}
impl Drop for ThreadPool {
fn drop(&mut self) {
for _ in 0..self.workers.len() {
self.sender.send(Message::Terminate).unwrap();
}
while let Some(worker) = self.workers.pop() {
worker.join();
}
}
}
struct Worker {
thread: thread::JoinHandle<()>,
}
impl Worker {
fn new(receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker {
let thread = thread::spawn(move || loop {
let msg = receiver.lock().unwrap().recv().unwrap();
match msg {
Message::Dispatch(job) => job.call_box(),
Message::Terminate => break,
}
});
Self { thread }
}
fn join(self) {
self.thread.join().unwrap();
}
}
enum Message {
Dispatch(Job),
Terminate,
}
type Job = Box<dyn FnBox + Send + 'static>;
trait FnBox {
fn call_box(self: Box<Self>);
}
impl<F: FnOnce()> FnBox for F {
fn call_box(self: Box<F>) {
(*self)()
}
}
| true
|
e6fe84d2a4ecb4d25f280f743fdc745a226d7309
|
Rust
|
mikialex/rendiation
|
/components/texture/core/src/iter.rs
|
UTF-8
| 1,174
| 2.890625
| 3
|
[] |
no_license
|
use rendiation_algebra::vector;
use crate::Texture2D;
pub struct TexturePixels<'a, T> {
pub(crate) texture: &'a T,
pub(crate) current: usize,
pub(crate) all: usize,
}
impl<'a, T: Texture2D> Iterator for TexturePixels<'a, T> {
type Item = (&'a T::Pixel, (usize, usize));
fn next(&mut self) -> Option<Self::Item> {
if self.current == self.all {
return None;
}
let width = self.texture.size().width;
let x = self.current % width;
let y = self.current / width;
self.current += 1;
Some((self.texture.get(vector!(x, y)), (x, y)))
}
}
pub struct TexturePixelsMut<'a, T> {
pub(crate) texture: &'a mut T,
pub(crate) current: usize,
pub(crate) all: usize,
}
impl<'a, T: Texture2D> Iterator for TexturePixelsMut<'a, T> {
type Item = (&'a mut T::Pixel, (usize, usize));
fn next(&mut self) -> Option<Self::Item> {
if self.current == self.all {
return None;
}
let width = self.texture.size().width;
let x = self.current % width;
let y = self.current / width;
self.current += 1;
let pixel = unsafe { std::mem::transmute(self.texture.get_mut(vector!(x, y))) };
Some((pixel, (x, y)))
}
}
| true
|
63a1485dd196119abddcc9e82a15ec5253ccfd9c
|
Rust
|
prisma/prisma-engines
|
/psl/psl/tests/config/generators.rs
|
UTF-8
| 11,637
| 2.703125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::common::*;
#[test]
fn serialize_generators_to_cmf() {
let schema: &str = indoc! {r#"
generator js1 {
provider = "javascript"
output = "../../js"
engineType = "binary"
}
generator go {
provider = "go"
binaryTargets = ["a", "b"]
}
"#};
let expected = expect![[r#"
[
{
"name": "js1",
"provider": {
"fromEnvVar": null,
"value": "javascript"
},
"output": {
"fromEnvVar": null,
"value": "../../js"
},
"config": {
"engineType": "binary"
},
"binaryTargets": [],
"previewFeatures": []
},
{
"name": "go",
"provider": {
"fromEnvVar": null,
"value": "go"
},
"output": null,
"config": {},
"binaryTargets": [
{
"fromEnvVar": null,
"value": "a"
},
{
"fromEnvVar": null,
"value": "b"
}
],
"previewFeatures": []
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn preview_features_setting_must_work() {
// make sure both single value and array syntax work
let schema = indoc! {r#"
generator js {
provider = "javascript"
previewFeatures = "connectOrCreate"
}
generator go {
provider = "go"
previewFeatures = ["connectOrCreate", "transactionApi"]
}
"#};
let expected = expect![[r#"
[
{
"name": "js",
"provider": {
"fromEnvVar": null,
"value": "javascript"
},
"output": null,
"config": {},
"binaryTargets": [],
"previewFeatures": [
"connectOrCreate"
]
},
{
"name": "go",
"provider": {
"fromEnvVar": null,
"value": "go"
},
"output": null,
"config": {},
"binaryTargets": [],
"previewFeatures": [
"connectOrCreate",
"transactionApi"
]
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn hidden_preview_features_setting_must_work() {
let schema = indoc! {r#"
generator go {
provider = "go"
previewFeatures = ["fullTextIndex"]
}
"#};
let expected = expect![[r#"
[
{
"name": "go",
"provider": {
"fromEnvVar": null,
"value": "go"
},
"output": null,
"config": {},
"binaryTargets": [],
"previewFeatures": [
"fullTextIndex"
]
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn back_slashes_in_providers_must_work() {
let schema = indoc! {r#"
generator mygen {
provider = "../folder\twith\ttabs/my\tgenerator.js"
}
"#};
let expected = expect![[r#"
[
{
"name": "mygen",
"provider": {
"fromEnvVar": null,
"value": "../folder\twith\ttabs/my\tgenerator.js"
},
"output": null,
"config": {},
"binaryTargets": [],
"previewFeatures": []
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn new_lines_in_generator_must_work() {
let schema = indoc! {r#"
generator go {
provider = "go"
binaryTargets = ["b", "c"]
}
"#};
let expected = expect![[r#"
[
{
"name": "go",
"provider": {
"fromEnvVar": null,
"value": "go"
},
"output": null,
"config": {},
"binaryTargets": [
{
"fromEnvVar": null,
"value": "b"
},
{
"fromEnvVar": null,
"value": "c"
}
],
"previewFeatures": []
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::get_config::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn fail_to_load_generator_with_options_missing() {
let schema = indoc! {r#"
generator js1 {
no_provider = "javascript"
output = "../../js"
}
"#};
let error = psl::parse_configuration(schema)
.map(drop)
.map_err(|diag| diag.to_pretty_string("schema.prisma", schema))
.unwrap_err();
let expectation = expect![[r#"
[1;91merror[0m: [1mArgument "provider" is missing in generator block "js1".[0m
[1;94m-->[0m [4mschema.prisma:1[0m
[1;94m | [0m
[1;94m | [0m
[1;94m 1 | [0m[1;91mgenerator js1 {[0m
[1;94m 2 | [0m no_provider = "javascript"
[1;94m 3 | [0m output = "../../js"
[1;94m 4 | [0m}
[1;94m | [0m
"#]];
expectation.assert_eq(&error)
}
#[test]
fn nice_error_for_unknown_generator_preview_feature() {
let schema = indoc! {r#"
generator client {
provider = "prisma-client-js"
previewFeatures = ["foo"]
}
"#};
let error = psl::parse_configuration(schema)
.map(drop)
.map_err(|diag| diag.to_pretty_string("schema.prisma", schema))
.unwrap_err();
let expectation = expect![[r#"
[1;91merror[0m: [1mThe preview feature "foo" is not known. Expected one of: deno, fullTextIndex, fullTextSearch, metrics, multiSchema, postgresqlExtensions, tracing, views[0m
[1;94m-->[0m [4mschema.prisma:3[0m
[1;94m | [0m
[1;94m 2 | [0m provider = "prisma-client-js"
[1;94m 3 | [0m previewFeatures = [1;91m["foo"][0m
[1;94m | [0m
"#]];
expectation.assert_eq(&error)
}
#[test]
fn binary_targets_from_env_var_should_work() {
let schema = indoc! {r#"
datasource db {
provider = "mysql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client-js"
binaryTargets = env("BINARY_TARGETS")
}
model User {
id Int @id
}
"#};
let expected = expect![[r#"
[
{
"name": "client",
"provider": {
"fromEnvVar": null,
"value": "prisma-client-js"
},
"output": null,
"config": {},
"binaryTargets": [
{
"fromEnvVar": "BINARY_TARGETS",
"value": null
}
],
"previewFeatures": []
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::get_config::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn retain_env_var_definitions_in_generator_block() {
let schema = indoc! {r#"
generator js1 {
provider = env("PROVIDER")
output = env("OUTPUT")
}
"#};
let expected = expect![[r#"
[
{
"name": "js1",
"provider": {
"fromEnvVar": "PROVIDER",
"value": null
},
"output": {
"fromEnvVar": "OUTPUT",
"value": null
},
"config": {},
"binaryTargets": [],
"previewFeatures": []
}
]"#]];
let config = parse_configuration(schema);
let rendered = psl::get_config::generators_to_json(&config.generators);
expected.assert_eq(&rendered);
}
#[test]
fn env_in_preview_features_must_be_rejected() {
let schema_1 = indoc! {r#"
generator client {
provider = "prisma-client-js"
previewFeatures = [env("MY_PREVIEW_FEATURE")]
}
"#};
let schema_2 = indoc! {r#"
generator client {
provider = "prisma-client-js"
previewFeatures = env("MY_PREVIEW_FEATURE")
}
"#};
let expect_1 = expect![[r#"
[1;91merror[0m: [1mExpected a string value, but received functional value `env("MY_PREVIEW_FEATURE")`.[0m
[1;94m-->[0m [4mschema.prisma:3[0m
[1;94m | [0m
[1;94m 2 | [0m provider = "prisma-client-js"
[1;94m 3 | [0m previewFeatures = [[1;91menv("MY_PREVIEW_FEATURE")[0m]
[1;94m | [0m
"#]];
let expect_2 = expect![[r#"
[1;91merror[0m: [1mExpected a string value, but received functional value `env("MY_PREVIEW_FEATURE")`.[0m
[1;94m-->[0m [4mschema.prisma:3[0m
[1;94m | [0m
[1;94m 2 | [0m provider = "prisma-client-js"
[1;94m 3 | [0m previewFeatures = [1;91menv("MY_PREVIEW_FEATURE")[0m
[1;94m | [0m
"#]];
expect_1.assert_eq(&parse_unwrap_err(schema_1));
expect_2.assert_eq(&parse_unwrap_err(schema_2));
}
#[test]
fn empty_preview_features_array_should_work() {
let schema = r#"
datasource db {
provider = "postgresql"
url = env("DBURL")
}
generator js {
provider = "prisma-client-js"
previewFeatures = []
}
"#;
let schema = psl::parse_schema(schema).unwrap();
assert!(schema.configuration.preview_features().is_empty());
}
#[test]
fn empty_preview_features_array_with_empty_space_should_work() {
let schema = r#"
datasource db {
provider = "postgresql"
url = env("DBURL")
}
generator js {
provider = "prisma-client-js"
previewFeatures = [ ]
}
"#;
let schema = psl::parse_schema(schema).unwrap();
assert!(schema.configuration.preview_features().is_empty());
}
#[test]
fn engine_type_must_be_a_string() {
let with_string = indoc! {r#"
generator client {
provider = "prisma-client-js"
engineType = "binary"
}
"#};
assert_valid(with_string);
let with_array = indoc! {r#"
generator client {
provider = "prisma-client-js"
engineType = ["binary"]
}
"#};
let expect = expect![[r#"
[1;91merror[0m: [1mExpected a String value, but received array value `["binary"]`.[0m
[1;94m-->[0m [4mschema.prisma:3[0m
[1;94m | [0m
[1;94m 2 | [0m provider = "prisma-client-js"
[1;94m 3 | [0m engineType = [1;91m["binary"][0m
[1;94m | [0m
"#]];
let error = psl::parse_configuration(with_array)
.map(drop)
.map_err(|diag| diag.to_pretty_string("schema.prisma", with_array))
.unwrap_err();
expect.assert_eq(&error);
}
| true
|
b2f63281688c2a82dc419a3038c3dfa1ed7f86de
|
Rust
|
qqq-tech/braiins
|
/open/bosminer/bosminer-antminer/src/utils.rs
|
UTF-8
| 5,150
| 2.890625
| 3
|
[] |
no_license
|
// Copyright (C) 2020 Braiins Systems s.r.o.
//
// This file is part of Braiins Open-Source Initiative (BOSI).
//
// BOSI is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
// Please, keep in mind that we may also license BOSI or any part thereof
// under a proprietary license. For more information on the terms and conditions
// of such proprietary license or if you have any other questions, please
// contact us at opensource@braiins.com.
//! Utilities to calculate baudrates, register packing
use crate::error::{self, ErrorKind};
use packed_struct::prelude::*;
/// Helper method that calculates baud rate clock divisor value for the specified baud rate.
///
/// The calculation follows the same scheme for the hashing chips as well as for the FPGA IP core
///
/// * `baud_rate` - requested baud rate
/// * `base_clock_hz` - base clock for the UART peripheral
/// * `base_clock_div` - divisor for the base clock
/// Return a baudrate divisor and actual baud rate or an error
pub fn calc_baud_clock_div(
baud_rate: usize,
base_clock_hz: usize,
base_clock_div: usize,
) -> error::Result<(usize, usize)> {
const MAX_BAUD_RATE_ERR_PERC: usize = 5;
// The actual calculation is:
// base_clock_hz / (base_clock_div * baud_rate) - 1
// We have to mathematically round the calculated divisor in fixed point arithmethic
let baud_div = (10 * base_clock_hz / (base_clock_div * baud_rate) + 5) / 10 - 1;
let actual_baud_rate = base_clock_hz / (base_clock_div * (baud_div + 1));
//
let baud_rate_diff = if actual_baud_rate > baud_rate {
actual_baud_rate - baud_rate
} else {
baud_rate - actual_baud_rate
};
// the baud rate has to be within a few percents
if baud_rate_diff > (MAX_BAUD_RATE_ERR_PERC * baud_rate / 100) {
Err(ErrorKind::BaudRate(format!(
"requested {} baud, resulting {} baud",
baud_rate, actual_baud_rate
)))?
}
Ok((baud_div, actual_baud_rate))
}
/// Just an util trait so that we can pack/unpack directly to registers
pub trait PackedRegister: Sized {
fn from_reg(reg: u32) -> Self;
fn to_reg(&self) -> u32;
}
impl<T> PackedRegister for T
where
T: PackedStruct<[u8; 4]>,
{
/// Take register and unpack (as big endian)
fn from_reg(reg: u32) -> Self {
Self::unpack(&u32::to_be_bytes(reg)).expect("unpacking error")
}
/// Pack into big-endian register
fn to_reg(&self) -> u32 {
u32::from_be_bytes(self.pack())
}
}
// compute distance between two usizes
pub fn distance(x: usize, y: usize) -> usize {
if x >= y {
x - y
} else {
y - x
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::bm1387;
use crate::io;
const CHIP_OSC_CLK_HZ: usize = 25_000_000;
#[test]
fn test_calc_baud_div_correct_baud_rate_bm1387() {
// these are sample baud rates for communicating with BM1387 chips
let correct_bauds_and_divs = [
(115_200usize, 26usize),
(460_800, 6),
(1_500_000, 1),
(3_000_000, 0),
];
for (baud_rate, baud_div) in correct_bauds_and_divs.iter() {
let (baud_clock_div, actual_baud_rate) = calc_baud_clock_div(
*baud_rate,
CHIP_OSC_CLK_HZ,
bm1387::CHIP_OSC_CLK_BASE_BAUD_DIV,
)
.unwrap();
assert_eq!(
baud_clock_div, *baud_div,
"Calculated baud divisor doesn't match, requested: {} baud, actual: {} baud",
baud_rate, actual_baud_rate
)
}
}
#[test]
fn test_calc_baud_div_correct_baud_rate_fpga() {
// these are baudrates commonly used with UART on FPGA
let correct_bauds_and_divs = [(115_740usize, 53usize), (1_562_500, 3), (3_125_000, 1)];
for &(baud_rate, baud_div) in correct_bauds_and_divs.iter() {
let (baud_clock_div, _actual_baud_rate) =
calc_baud_clock_div(baud_rate, io::F_CLK_SPEED_HZ, io::F_CLK_BASE_BAUD_DIV)
.expect("failed to calculate divisor");
assert_eq!(baud_clock_div, baud_div);
}
}
/// Test higher baud rate than supported
#[test]
fn test_calc_baud_div_over_baud_rate_bm1387() {
let result = calc_baud_clock_div(
3_500_000,
CHIP_OSC_CLK_HZ,
bm1387::CHIP_OSC_CLK_BASE_BAUD_DIV,
);
assert!(
result.is_err(),
"Baud clock divisor unexpectedly calculated!"
);
}
}
| true
|
48a19e4c55efda293cbdfa9240c20765c0858ded
|
Rust
|
mocyuto/leetcode
|
/rust/src/solution20.rs
|
UTF-8
| 989
| 3.75
| 4
|
[] |
no_license
|
struct Solution20;
impl Solution20 {
pub fn is_valid(s: String) -> bool {
let mut stack: Vec<char> = Vec::new();
for c in s.chars() {
match c {
'('|'{'|'[' => stack.push(c),
')'|'}'|']' => if !validate(&mut stack, c) {return false},
_ => return false,
};
}
stack.is_empty()
}
}
fn validate(stack: &mut Vec<char>, c: char) -> bool{
let top_op = stack.pop();
if top_op.is_none() {
return false;
}
match top_op.unwrap() {
'(' => ')' == c,
'{' => '}' == c,
'[' => ']' == c,
_ => false,
}
}
pub fn main(){
assert_eq!(Solution20::is_valid("[".to_string()),false);
assert_eq!(Solution20::is_valid("()".to_string()),true);
assert_eq!(Solution20::is_valid("()[]{}".to_string()),true);
assert_eq!(Solution20::is_valid("(]".to_string()),false);
assert_eq!(Solution20::is_valid("([)]".to_string()),false);
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.